浏览代码

千里马 - update

dongzhaorui 2 年之前
父节点
当前提交
818342da7c
共有 1 个文件被更改,包括 35 次插入47 次删除
  1. 35 47
      qlm/source_qianlima.py

+ 35 - 47
qlm/source_qianlima.py

@@ -49,11 +49,19 @@ cookies = {
     "userInfo": "{%22userId%22:10609848%2C%22username%22:%2217610673271%22%2C%22userIcon%22:%22%22%2C%22linkName%22:%22%E8%91%A3%E5%85%88%E7%94%9F%22%2C%22companyName%22:%22%E5%90%88%E8%82%A5%E6%8B%93%E6%99%AE%E7%BD%91%E7%BB%9C%E7%B3%BB%E7%BB%9F%E5%B7%A5%E7%A8%8B%E6%9C%89%E9%99%90%E8%B4%A3%E4%BB%BB%E5%85%AC%E5%8F%B8%22%2C%22areaId%22:%222703%22%2C%22areaName%22:%22%E5%85%A8%E5%9B%BD%22%2C%22roleId%22:1%2C%22roleName%22:%22%E7%AE%A1%E7%90%86%E5%91%98%22%2C%22sex%22:%22m%22%2C%22expireDate%22:%22%E6%97%A0%22%2C%22isExpired%22:null%2C%22maxChildCount%22:0%2C%22isUsedCount%22:0%2C%22userStatus%22:1%2C%22memberLevel%22:5%2C%22memberLevelName%22:%22%E5%85%8D%E8%B4%B9%E6%B3%A8%E5%86%8C%E4%BC%9A%E5%91%98%22%2C%22registerTime%22:%222022-05-30%22%2C%22isSuperSupplier%22:0%2C%22isNewUser%22:1%2C%22welcomeMsg%22:%22%E6%AC%A2%E8%BF%8E%E8%BF%9B%E5%85%A5%E5%8D%83%E9%87%8C%E9%A9%AC%E6%8B%9B%E6%A0%87%E7%BD%91%EF%BD%9E%22%2C%22customerServiceInfo%22:{%22id%22:42%2C%22customerServiceName%22:%22%E5%8D%83%E9%87%8C%E9%A9%AC%E5%AE%A2%E6%9C%8D%22%2C%22weChatIcon%22:%22http://img_al.qianlima.com/invoice/1588986761_8ebeade70a.jpg%22%2C%22customerServicePhone%22:%2217718573953%22%2C%22customerServiceQQ%22:%22%22%2C%22customerServiceEmail%22:%22qianlima_service@qianlima.com%22%2C%22deptType%22:0}%2C%22shouji%22:%2217610673271%22%2C%22email%22:%22%22%2C%22dwmc%22:%22%E5%90%88%E8%82%A5%E6%8B%93%E6%99%AE%E7%BD%91%E7%BB%9C%E7%B3%BB%E7%BB%9F%E5%B7%A5%E7%A8%8B%E6%9C%89%E9%99%90%E8%B4%A3%E4%BB%BB%E5%85%AC%E5%8F%B8%22%2C%22zhiwu%22:%22%E4%BA%A7%E5%93%81%E7%BB%8F%E7%90%86%22%2C%22types%22:1%2C%22isPayBefore%22:0%2C%22memberOpenTime%22:null%2C%22businessUserType%22:null%2C%22businessCompanyName%22:null%2C%22isBusinessUser%22:null}"
 }
 session = requests.session()
-INFO_TYPES_MAPS = {
-    0: {"allType": -1, "types": "-1", "hasTenderTransferProject": 1, "searchDataType": 0},  # 全部
-    1: {"allType": 0, "progIdList": [0, 1], "types": -1, "searchDataType": 1},  # 招标信息
-    2: {"allType": 3, "progIdList": [3], "types": 3, "searchDataType": 1},  # 中标信息
-    3: {"allType": 99, "progIdList": [99], "types": 99, "searchDataType": 1}  # 采购意向
+
+'''
+招标阶段
+0 = 全部
+1 = 招标信息
+2 = 中标信息
+3 = 采购意向
+'''
+REQUEST_DATA_MAP = {
+    0: {"keywords": "", "timeType": 4, "beginTime": "2022-12-07", "endTime": "2022-12-07", "filtermode": "8", "searchMode": 0, "currentPage": 1, "numPerPage": 20, "sortType": 1, "allType": -1, "noticeSegmentTypeStr": "", "beginAmount": "", "endAmount": "", "purchasingUnitIdList": "", "threeClassifyTagStr": "", "fourLevelCategoryIdListStr": "", "threeLevelCategoryIdListStr": "", "levelId": "", "searchDataType": 0, "types": "-1", "showContent": 1, "hasTenderTransferProject": 1, "newAreas": "1", "hasChooseSortType": 1, "summaryType": 0},
+    1: {"keywords": "", "timeType": 4, "beginTime": "2022-12-07", "endTime": "2022-12-07", "filtermode": "8", "searchMode": 0, "currentPage": 1, "numPerPage": 20, "sortType": 1, "allType": "0", "beginAmount": "", "endAmount": "", "purchasingUnitIdList": "", "threeClassifyTagStr": "", "fourLevelCategoryIdListStr": "", "threeLevelCategoryIdListStr": "", "levelId": "", "searchDataType": 1, "types": -1, "showContent": 1, "newAreas": "", "hasChooseSortType": 1, "progIdAndNoticeSegmentTypeMaps": {"0": [], "1": []}, "summaryType": 0},
+    2: {"keywords": "", "timeType": 4, "beginTime": "2022-12-07", "endTime": "2022-12-07", "filtermode": "8", "searchMode": 0, "currentPage": 1, "numPerPage": 20, "sortType": 1, "allType": 3, "beginAmount": "", "endAmount": "", "purchasingUnitIdList": "", "threeClassifyTagStr": "", "fourLevelCategoryIdListStr": "", "threeLevelCategoryIdListStr": "", "levelId": "", "searchDataType": 1, "types": 3, "showContent": 1, "newAreas": "", "hasChooseSortType": 1, "progIdAndNoticeSegmentTypeMaps": {"3": []}, "summaryType": 0},
+    3: {"keywords": "", "timeType": 4, "beginTime": "2022-12-07", "endTime": "2022-12-07", "filtermode": "8", "searchMode": 0, "currentPage": 1, "numPerPage": 20, "sortType": 1, "allType": 99, "beginAmount": "", "endAmount": "", "purchasingUnitIdList": "", "threeClassifyTagStr": "", "fourLevelCategoryIdListStr": "", "threeLevelCategoryIdListStr": "", "levelId": "", "searchDataType": 1, "types": 99, "showContent": 1, "newAreas": "", "hasChooseSortType": 1, "progIdAndNoticeSegmentTypeMaps": {"99": []}, "summaryType": 0}
 }
 
 
@@ -97,7 +105,7 @@ def crawl_request(url, data, retries=5):
     return resp
 
 
-def crawl_spider(area: str, types: int, page: int, **kwargs):
+def crawl_spider(area: str, type_: int, page: int, **kwargs):
     results = []
     request_status = 'failure'  # 资源请求结果, 成功=success 失败=failure 停止=stop 账号封停=disable
 
@@ -105,31 +113,13 @@ def crawl_spider(area: str, types: int, page: int, **kwargs):
     begin_time = kwargs.pop('begin_time', curr_date)
     end_time = kwargs.pop('end_time', curr_date)
     max_per_page = kwargs.pop('max_page', 20)
-    types_map = INFO_TYPES_MAPS[types]
-    data = {
-        **types_map,
-        "keywords": "",
-        "timeType": 4,  # 自定义时间参数
-        "beginTime": begin_time,  # 格式: xxxx-xx-xxx
-        "endTime": end_time,  # 格式: xxxx-xx-xxx
-        "filtermode": 8,
-        "searchMode": 0,
-        "currentPage": page,  # 页码
-        "numPerPage": max_per_page,  # 每页的条目数
-        "sortType": 1,
-        "noticeSegmentTypeStr": "",
-        "beginAmount": "",
-        "endAmount": "",
-        "purchasingUnitIdList": "",
-        "threeClassifyTagStr": "",
-        "fourLevelCategoryIdListStr": "",
-        "threeLevelCategoryIdListStr": "",
-        "levelId": "",
-        "showContent": 1,
-        "newAreas": area,  # 设置地区
-        "hasChooseSortType": 1,
-        "summaryType": 0
-    }
+    data = REQUEST_DATA_MAP[type_]
+    data['newAreas'] = area  # 设置地区
+    data['currentPage'] = page  # 页码
+    data['numPerPage'] = max_per_page  # 每页的条目数
+    data['timeType'] = 4  # 自定义时间参数
+    data['beginTime'] = begin_time  # 开始时间,格式:xxxx-xx-xxx
+    data['endTime'] = end_time  # 结束时间,格式:xxxx-xx-xxx
     data = json.dumps(data)
     url = "https://search.vip.qianlima.com/rest/service/website/search/solr"
     response = crawl_request(url, data)
@@ -185,10 +175,10 @@ def crawl_spider(area: str, types: int, page: int, **kwargs):
         qlm.insert_many(results)
 
     if request_status in ['stop', 'success']:
-        logger.info("{}-第{}区-第{}类-第{}页{}条,成功上传{}条数据".format(
+        logger.info("{}-第{}区-第{}类{}条-第{}页,成功上传{}条数据".format(
             begin_time,
             area,
-            types,
+            type_,
             page,
             row_count,
             len(results))
@@ -196,10 +186,10 @@ def crawl_spider(area: str, types: int, page: int, **kwargs):
     return request_status
 
 
-def by_area_crawl_data(area="", types=0, **kwargs):
+def by_area_crawl_data(area="", type_=0, **kwargs):
     close_spider = False
     disable_page, max_disable_page = 0, 3
-    pages = list(range(1, 11))  # 目前仅支持前10000数据的搜索
+    pages = list(range(1, 101))  # 目前仅支持前10000数据的搜索
     while len(pages) > 0:
         if close_spider:
             break
@@ -208,39 +198,37 @@ def by_area_crawl_data(area="", types=0, **kwargs):
             break
 
         page = pages.pop(0)
-        logger.info(f"访问第{area}区-第{types}类-第{page}页数据")
+        logger.info(f"访问第{area}区-第{type_}类-第{page}页数据")
         while True:
-            success = crawl_spider(area, types, page, **kwargs)
+            success = crawl_spider(area, type_, page, **kwargs)
             if success == 'failure':
-                n = random.randint(100, 2400)
-                interval = math.log(n, 2)
-                logger.debug(f'等待{interval}s')
+                interval = math.log(random.randint(100, 2400), 2)
+                logger.debug(f'异常重试,等待{interval}s')
                 time.sleep(interval)
                 continue
             elif success == 'disable':
                 logger.warning(f"账号被禁止访问第{area}区-第{page}页数据")
                 disable_page += 1
-                break
             elif success == 'method_not_allowed':
                 logger.warning("服务器禁止使用当前 HTTP 方法的请求")
                 disable_page += 1
-                break
             elif success == 'stop':
                 close_spider = True
-            logger.info(f"第{area}区-第{page}页数据采集成功")
+            else:
+                logger.info(f"第{area}区-第{page}页数据采集成功")
             break
 
 
 def select_types(date: str, area: str):
-    for types in [1, 2, 3]:
+    for type_ in [1, 2, 3]:
         by_area_crawl_data(
             area=area,
-            types=types,
+            type_=type_,
             begin_time=date,
             end_time=date,
-            max_page=1000
+            max_page=100
         )
-        logger.info(f"{date}-第{area}区-第{types}类采集结束")
+        logger.info(f"{date}-第{area}区-第{type_}类采集结束")
 
 
 def select_area(date: str):