|
@@ -15,58 +15,143 @@ from utils.tools import get_host_ip
|
|
|
|
|
|
class Scheduler:
|
|
|
|
|
|
- def __init__(self, query: dict):
|
|
|
- self.query = query
|
|
|
- self.account_tab = mongo_table('py_spider', 'match_account')
|
|
|
- self.crawl_error_tab = mongo_table('py_spider', 'crawl_error')
|
|
|
+ def __init__(self, site, crawl_type, **kwargs):
|
|
|
+ self.site = site
|
|
|
+ self.crawl_type = crawl_type
|
|
|
self.crawl_start = False
|
|
|
+ self.count = None # 日采集数量
|
|
|
+ self.total = None # 日采集上限
|
|
|
self.account_id = None
|
|
|
+ self.record_id = None
|
|
|
self.user = None
|
|
|
self.spider_code = None
|
|
|
self.crawl_url = None
|
|
|
self.crawl_params = None
|
|
|
self.crawl_exception = None
|
|
|
- self.crawl_type = None
|
|
|
- self.__records = None
|
|
|
+ self.kwargs = kwargs
|
|
|
+
|
|
|
+ self.account_tab = mongo_table('py_spider', 'match_account')
|
|
|
+ self.record_tab = mongo_table('py_spider', 'match_account_record')
|
|
|
+ self.crawl_error_tab = mongo_table('py_spider', 'crawl_error')
|
|
|
+
|
|
|
+ def crawl_counter(self, number: int):
|
|
|
+ """采集计数器"""
|
|
|
+ records = self.record_tab.find_one({'_id': self.record_id})
|
|
|
+ records['count'] += number
|
|
|
+ self.count = records['count']
|
|
|
+ self._update_tab(self.record_tab, self.record_id, records)
|
|
|
|
|
|
- def _update_data(self, item):
|
|
|
+ def query_user(self, account: str):
|
|
|
+ query = {'account': account}
|
|
|
+ item = self.account_tab.find_one(query)
|
|
|
+ if item is None:
|
|
|
+ return None
|
|
|
+ return User(item['account'], item['password'])
|
|
|
+
|
|
|
+ def finished(self, execute_next_time=None):
|
|
|
+ logger.info("任务结束")
|
|
|
+ self._release_account()
|
|
|
+ self.sleep(execute_next_time)
|
|
|
+
|
|
|
+ def err_record(self, e: JyBasicException):
|
|
|
+ rows = {
|
|
|
+ 'account': self.user.username if self.user is not None else '',
|
|
|
+ 'spidercode': self.spider_code,
|
|
|
+ 'url': self.crawl_url,
|
|
|
+ 'status_code': e.code,
|
|
|
+ 'reason': e.reason,
|
|
|
+ 'params': getattr(e, 'title', ''),
|
|
|
+ 'crawl_time': int2long(int(time.time())),
|
|
|
+ 'crawl_type': self.crawl_type,
|
|
|
+ }
|
|
|
+ self.crawl_error_tab.insert_one(rows)
|
|
|
+
|
|
|
+ def _update_tab(self, mgo_coll, _id, item):
|
|
|
"""
|
|
|
- 更新账号所属的采集数据信息
|
|
|
+ 更新mongo表
|
|
|
|
|
|
- :param item: 最新数据
|
|
|
+ :param mgo_coll: mongo表
|
|
|
+ :param _id: mongo_id
|
|
|
+ :param item: 数据
|
|
|
"""
|
|
|
- item['ip'] = get_host_ip()
|
|
|
- item['update_time'] = datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')
|
|
|
- self.account_tab.update_one(
|
|
|
- {'_id': self.account_id},
|
|
|
- {'$set': item}
|
|
|
- )
|
|
|
+ item['update_time'] = self.current_time
|
|
|
+ mgo_coll.update_one({'_id': _id}, {'$set': item})
|
|
|
|
|
|
def _release_account(self):
|
|
|
- rows = dict(
|
|
|
- used=False,
|
|
|
- update_time=datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')
|
|
|
- )
|
|
|
+ if self.crawl_type == 'detail':
|
|
|
+ rows = dict(crawl_detail=False,)
|
|
|
+ else:
|
|
|
+ rows = dict(crawl_list=False,)
|
|
|
if self.account_id is not None:
|
|
|
- self._update_data(rows)
|
|
|
+ self._update_tab(self.account_tab, self.account_id, rows)
|
|
|
|
|
|
- @staticmethod
|
|
|
- def sleep(wait_time=None):
|
|
|
- sleep_time = (wait_time or 600)
|
|
|
- time.sleep(sleep_time)
|
|
|
+ def __enter__(self):
|
|
|
+ logger.info(f'[开启调度]')
|
|
|
+ '''获取闲置账号'''
|
|
|
+ if self.account is not None:
|
|
|
+ self.account_id = self.account['_id']
|
|
|
+ self.user = User(self.account['account'], self.account['password'])
|
|
|
+ logger.info(f'[启用账号]{self.user.username}')
|
|
|
+ '''初始化记录表'''
|
|
|
+ records = self.account_records
|
|
|
+ if self.crawl_type == 'detail':
|
|
|
+ item = {'crawl_detail': True}
|
|
|
+ self.total = records['total']
|
|
|
+ self.count = records['count']
|
|
|
+ else:
|
|
|
+ item = {'crawl_list': True}
|
|
|
+ '''初始化采集账号记录'''
|
|
|
+ self._update_tab(self.account_tab, self.account_id, item)
|
|
|
+ self.crawl_start = True
|
|
|
+ else:
|
|
|
+ logger.warning(f'[{self.site}]暂无闲置账号')
|
|
|
+ return self
|
|
|
|
|
|
@staticmethod
|
|
|
def wait_for_next_task(wait_time=None):
|
|
|
_sleep = (wait_time or random.choice(range(5, 15)))
|
|
|
time.sleep(_sleep)
|
|
|
|
|
|
+ @staticmethod
|
|
|
+ def sleep(wait_time=None):
|
|
|
+ sleep_time = (wait_time or 600)
|
|
|
+ time.sleep(sleep_time)
|
|
|
+
|
|
|
@property
|
|
|
- def today(self):
|
|
|
- return datetime.datetime.today().strftime('%Y-%m-%d')
|
|
|
+ def account_records(self):
|
|
|
+ """账号使用记录"""
|
|
|
+ query = dict(
|
|
|
+ account=self.account['account'],
|
|
|
+ date=self.today,
|
|
|
+ type=self.crawl_type,
|
|
|
+ site=self.site
|
|
|
+ )
|
|
|
+ item = self.record_tab.find_one(query)
|
|
|
+ if item is None:
|
|
|
+ item = dict(
|
|
|
+ site=self.site,
|
|
|
+ account=self.account['account'],
|
|
|
+ type=self.crawl_type,
|
|
|
+ total=self.account.get('total', 0), # 任务总数不设置时,默认为:0
|
|
|
+ count=0,
|
|
|
+ ip=get_host_ip(),
|
|
|
+ date=self.today,
|
|
|
+ update_time=self.current_time,
|
|
|
+ )
|
|
|
+ result = self.record_tab.insert_one(item)
|
|
|
+ item['_id'] = result.inserted_id
|
|
|
+ self.record_id = item['_id']
|
|
|
+ return item
|
|
|
|
|
|
@property
|
|
|
- def yesterday(self):
|
|
|
- return (date.today() - timedelta(days=1)).strftime("%Y-%m-%d")
|
|
|
+ def account(self):
|
|
|
+ """账号"""
|
|
|
+ query = dict(site=self.site)
|
|
|
+ if self.crawl_type == 'detail':
|
|
|
+ query['crawl_detail'] = False
|
|
|
+ else:
|
|
|
+ query['crawl_list'] = False
|
|
|
+ return self.account_tab.find_one(query, sort=[('update_time', 1)])
|
|
|
|
|
|
@property
|
|
|
def crawl_task(self):
|
|
@@ -86,60 +171,17 @@ class Scheduler:
|
|
|
except requests.RequestException:
|
|
|
return results
|
|
|
|
|
|
- def err_record(self, e: JyBasicException):
|
|
|
- rows = {
|
|
|
- 'account': self.user.username if self.user is not None else '',
|
|
|
- 'spidercode': self.spider_code,
|
|
|
- 'url': self.crawl_url,
|
|
|
- 'status_code': e.code,
|
|
|
- 'reason': e.reason,
|
|
|
- 'params': getattr(e, 'title', ''),
|
|
|
- 'crawl_time': int2long(int(time.time())),
|
|
|
- 'crawl_type': self.crawl_type,
|
|
|
- }
|
|
|
- self.crawl_error_tab.insert_one(rows)
|
|
|
-
|
|
|
- def query_user(self, account: str):
|
|
|
- query = {'account': account}
|
|
|
- rows = self.account_tab.find_one(query)
|
|
|
- if rows is None:
|
|
|
- raise
|
|
|
- return User(rows['account'], rows['password'])
|
|
|
-
|
|
|
- def finished(self, execute_next_time=None):
|
|
|
- logger.info("任务结束")
|
|
|
- self._release_account()
|
|
|
- self.sleep(execute_next_time)
|
|
|
+ @property
|
|
|
+ def today(self):
|
|
|
+ return datetime.datetime.today().strftime('%Y-%m-%d')
|
|
|
|
|
|
- def update_count(self, number):
|
|
|
- rows = self.account_tab.find_one({'_id': self.account_id})
|
|
|
- records = rows.get('records', {self.today: 0})
|
|
|
- '''采集记录历史保存7天'''
|
|
|
- count = records.get(self.today, 0)
|
|
|
- count += number
|
|
|
- if len(records) > 7:
|
|
|
- records.clear()
|
|
|
- records.setdefault(self.today, count)
|
|
|
- else:
|
|
|
- records.update({self.today: count})
|
|
|
- rows.update({'records': records})
|
|
|
- self._update_data(rows)
|
|
|
+ @property
|
|
|
+ def current_time(self):
|
|
|
+ return datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
|
|
- def __enter__(self):
|
|
|
- logger.info(f'[开启调度]')
|
|
|
- rows = self.account_tab.find_one(self.query, sort=[('update_time', 1)])
|
|
|
- if rows is not None:
|
|
|
- self.account_id = rows['_id']
|
|
|
- self.user = User(rows['account'], rows['password'])
|
|
|
- logger.info(f'[启用账号] {self.user.username}')
|
|
|
- rows['used'] = True
|
|
|
- records = rows.get('records', {self.today: 0})
|
|
|
- rows.update({'records': records})
|
|
|
- self._update_data(rows)
|
|
|
- self.crawl_start = True # 控制调度的状态
|
|
|
- else:
|
|
|
- logger.warning(f'[{self.query.get("site")}采集]暂无闲置账号')
|
|
|
- return self
|
|
|
+ @property
|
|
|
+ def yesterday(self):
|
|
|
+ return (date.today() - timedelta(days=1)).strftime("%Y-%m-%d")
|
|
|
|
|
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
|
logger.info(f'[关闭调度]')
|