basics.py 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188
  1. import threading
  2. import time
  3. from typing import List, Mapping
  4. from common.databases import insert_one, update_one_by_domain
  5. from common.log import logger
  6. from constants import (
  7. ORGANIZATION,
  8. KEYWORD,
  9. SEED_URL,
  10. COMPETING_GOODS,
  11. VISIT_CLASSIFY,
  12. QUERY_CLASSIFY
  13. )
  14. from crawler.Task import Task
  15. from crawler.analysis import Parser
  16. from crawler.download import Downloader
  17. from crawler.schedule import Scheduler
  18. from crawler.validate import Validator
  19. from settings import (
  20. MGO_URLS,
  21. MGO_ORGS,
  22. MGO_KEYWORDS,
  23. MGO_COMPETING_GOODS,
  24. MGO_DATA_GARBAGE,
  25. MGO_DOMAIN,
  26. MGO_QUERY,
  27. MGO_RECORDS
  28. )
  29. class BasicService:
  30. def __init__(
  31. self,
  32. scheduler=None,
  33. validator=None,
  34. downloader=None,
  35. collector=None,
  36. parser=None,
  37. **kwargs
  38. ):
  39. self.scheduler = (scheduler or Scheduler())
  40. self.validator = (validator or Validator(redis_key='RemovalDuplicate_'))
  41. self.collector = (collector or Validator(redis_key='CollectUrl_'))
  42. self.downloader = (downloader or Downloader())
  43. self.parser = (parser or Parser())
  44. # mongo查询
  45. self.query = {'enable_added': {'$exists': False}}
  46. self.projection = {'name': 1}
  47. self.sort = [('_id', -1)]
  48. # 权重
  49. self.org_weight = (kwargs.pop('org_weight', None) or 7)
  50. self.url_weight = (kwargs.pop('url_weight', None) or 8)
  51. self.keyword_weight = (kwargs.pop('keyword_weight', None) or 9)
  52. # 分类
  53. self.visit_classify = VISIT_CLASSIFY
  54. self.query_classify = QUERY_CLASSIFY
  55. # 归属组
  56. self.org_groups = ORGANIZATION
  57. self.keyword_groups = KEYWORD
  58. self.url_groups = SEED_URL
  59. self.competing_groups = COMPETING_GOODS
  60. @property
  61. def thread_name(self):
  62. return threading.currentThread().getName()
  63. def loops_interval(self, interval, enable_debug_log=False):
  64. if enable_debug_log:
  65. logger.debug(f'Thread-<{self.thread_name}> is closed.')
  66. time.sleep(interval)
  67. @staticmethod
  68. def make_task(**kwargs):
  69. """生成Task对象"""
  70. return Task(**kwargs)
  71. @staticmethod
  72. def make_retrieve_item(task: Task):
  73. item = {
  74. 'name': task['name'],
  75. 'url': task['url'],
  76. 'domain': task['domain'],
  77. 'origin': task['origin'],
  78. 'groups': task['groups'],
  79. 'create_at': task['create_at'],
  80. 'update_at': task['update_at'],
  81. }
  82. return item
  83. @staticmethod
  84. def make_domain_item(task: Task):
  85. item = {
  86. 'name': task['name'],
  87. 'url': task['url'],
  88. 'domain': task['domain'],
  89. 'depth': task['depth'],
  90. 'origin': task['origin'],
  91. 'groups': task['groups'],
  92. 'create_at': task['create_at'],
  93. 'update_at': task['update_at'],
  94. }
  95. return item
  96. @staticmethod
  97. def make_duplicate_removal(task: Task):
  98. item = {
  99. 'domain': task['domain'],
  100. 'origin': task['origin'],
  101. 'create_at': task['update_at'],
  102. }
  103. return item
  104. def _push_data(self, purpose: str, task: Task, collection):
  105. if purpose == 'query':
  106. item = self.make_retrieve_item(task)
  107. insert_one(collection, item)
  108. logger.info(f'<{self.thread_name}> - 查询结果 - {item["_id"]}')
  109. elif purpose == 'domain':
  110. item = self.make_domain_item(task)
  111. insert_one(collection, item)
  112. logger.info(f'<{self.thread_name}> - 寻源结果 - {task["domain"]}')
  113. elif purpose == 'remove':
  114. item = self.make_duplicate_removal(task)
  115. update_one_by_domain(collection, item)
  116. logger.info(f'<{self.thread_name}> - 添加过滤 - {task["url"]}')
  117. else:
  118. insert_one(collection, task)
  119. logger.info(f'<{self.thread_name}> - 记录数据 - {task["_id"]}')
  120. def push_remove(self, task: Task):
  121. """数据去重表"""
  122. if not self.validator.data(task['url']):
  123. self._push_data('remove', task, MGO_DATA_GARBAGE)
  124. self.validator.add_data(task['url'])
  125. return True
  126. return False
  127. def push_domain(self, task: Task):
  128. """数据挖掘结果,推送保存"""
  129. if not self.collector.data(task['domain']):
  130. self._push_data('domain', task, MGO_DOMAIN)
  131. self.collector.add_data(task['domain'])
  132. return True
  133. return False
  134. def push_query(self, task: Task):
  135. """搜索组织单位查询结果,推送保存"""
  136. self._push_data('query', task, MGO_QUERY)
  137. def push_records(self, task: Task):
  138. """挖掘数据的记录"""
  139. if len(task['name']) > 50:
  140. task['name'] = '{:.50s}'.format(task['name'])
  141. self._push_data('records', task, MGO_RECORDS)
  142. def orgs_table(self) -> List[Mapping]:
  143. """组织|单位表"""
  144. search_orgs = []
  145. cursor = MGO_ORGS.find(self.query, projection=self.projection)
  146. for item in cursor.sort(self.sort):
  147. search_orgs.append(item)
  148. return search_orgs
  149. def keywords_table(self):
  150. """关键词表"""
  151. search_keywords = []
  152. cursor = MGO_KEYWORDS.find(projection=self.projection)
  153. for item in cursor.sort(self.sort):
  154. search_keywords.append(item['name'])
  155. return search_keywords
  156. def seed_urls_table(self) -> List[Mapping]:
  157. """种子列表"""
  158. search_urls = []
  159. cursor = MGO_URLS.find(self.query, projection=self.projection)
  160. for item in cursor.sort(self.sort):
  161. search_urls.append(item)
  162. return search_urls
  163. def competing_goods_table(self):
  164. """竞品列表"""
  165. competing_goods = []
  166. cursor = MGO_COMPETING_GOODS.find(self.query, projection=self.projection)
  167. for item in cursor.sort(self.sort):
  168. competing_goods.append(item)
  169. return competing_goods