Gtcgpt.py 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157
  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on 2024-09-19
  4. ---------
  5. @summary: 国铁采购平台
  6. ---------
  7. @author: lzz
  8. """
  9. from collections import namedtuple
  10. import feapder
  11. import feapder.utils.tools as tools
  12. from items.spider_item import BidingListItem
  13. from untils.tools import get_proxy
  14. from fingerprint import fetch_alteon_pcgmh, get_fingerprint, check_fingerprint
  15. class Spider(feapder.BiddingListSpider):
  16. def start_callback(self):
  17. Menu = namedtuple('Menu', ['channel', 'code', 'noticeType', 'tid', 'crawl_page'])
  18. self.site = "国铁采购平台"
  19. self.menus = [
  20. Menu('采购结果', 'a_gtcgpt_cgjg', '001', 'queryProcurementResultsList', 20),
  21. ]
  22. self.headers = {
  23. "Accept": "application/json, text/javascript, */*; q=0.01",
  24. "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
  25. "Cache-Control": "no-cache",
  26. "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36",
  27. "X-Requested-With": "XMLHttpRequest",
  28. }
  29. self.alteon_pcgmh = None
  30. self.cookies = None
  31. self.proxy = get_proxy()
  32. self.fp = get_fingerprint()
  33. self.dt = tools.get_current_date("%Y-%m-%d") # 搜索查询时间
  34. def start_requests(self):
  35. for menu in self.menus:
  36. referer = "https://cg.95306.cn/baseinfor/notice/procurementNotice"
  37. params = {
  38. "bidType": "",
  39. "noticeType": f"{menu.noticeType}",
  40. "transactionType": "01",
  41. "wzType": "",
  42. "title": "",
  43. "bidding": "",
  44. "navigation": ""
  45. }
  46. self.headers["Referer"] = tools.joint_url(referer, params)
  47. url = f"https://cg.95306.cn/proxy/portal/elasticSearch/{menu.tid}"
  48. for page in range(1, menu.crawl_page + 1):
  49. yield feapder.Request(url, proxies=False, item=menu._asdict(), page=page)
  50. def download_midware(self, request):
  51. if self.alteon_pcgmh is None:
  52. self.alteon_pcgmh = fetch_alteon_pcgmh(self.proxy)
  53. if self.cookies is None:
  54. self.cookies = {
  55. 'AlteonPcgmh': self.alteon_pcgmh,
  56. 'mhId': self.fp,
  57. }
  58. if request.page > 1:
  59. data = {
  60. 'mhId': self.fp,
  61. 'projBidType': '01',
  62. 'bidType': '',
  63. 'noticeType': f"{request.item['noticeType']}",
  64. 'title': '',
  65. 'inforCode': '',
  66. 'startDate': self.dt,
  67. 'endDate': self.dt,
  68. 'pageNum': request.page,
  69. 'projType': '',
  70. 'professionalCode': '',
  71. 'createPeopUnit': '',
  72. }
  73. else:
  74. data = {
  75. 'mhId': self.fp,
  76. 'projBidType': '01',
  77. 'bidType': '',
  78. 'noticeType': f"{request.item['noticeType']}",
  79. 'wzType': '',
  80. 'title': '',
  81. }
  82. request.data = data
  83. request.headers = self.headers
  84. request.cookies = self.cookies
  85. request.proxies = self.proxy
  86. def validate(self, request, response):
  87. if response.json['code'] == '0-0203':
  88. referer = self.headers["Referer"]
  89. check_fingerprint(self.fp, self.cookies, referer, self.proxy)
  90. request.callback = self.request_retry
  91. return True
  92. elif response.json.get('data'):
  93. return True
  94. else:
  95. raise ValueError('数据不能为空!')
  96. def request_retry(self, request, response):
  97. request.callback = self.parse
  98. yield request
  99. def parse(self, request, response):
  100. menu = request.item
  101. info_list = response.json.get('data')
  102. for info in info_list.get('resultData').get('result'):
  103. href_id = info.get('id')
  104. href = f"https://cg.95306.cn/baseinfor/notice/informationShow?id={href_id}"
  105. title = info.get('notTitle').strip()
  106. create_time = info.get('checkTime')
  107. area = "全国" # 省份
  108. city = "" # 城市
  109. list_item = BidingListItem() # 存储数据的管道
  110. list_item.href = href # 标书链接
  111. list_item.channel = menu.get("channel") # 最上方定义的抓取栏目 (编辑器定的)
  112. list_item.spidercode = menu.get("code") # 最上方定义的爬虫code(编辑器定的)
  113. list_item.title = title # 标题
  114. list_item.publishtime = create_time # 标书发布时间
  115. list_item.site = self.site
  116. list_item.area = area # 城市默认:全国
  117. list_item.city = city # 城市 默认为空
  118. list_item.unique_key = ("href",)
  119. list_item.parse = "self.detail_get"
  120. list_item.proxies = False
  121. params_d = {
  122. "noticeId": f"{href_id}",
  123. "mhId": self.fp,
  124. }
  125. list_item.request_params = {"params": params_d}
  126. list_item.parse_url = "https://cg.95306.cn/proxy/portal/elasticSearch/indexView"
  127. yield list_item
  128. def exception_request(self, request, response):
  129. self.alteon_pcgmh = None
  130. self.cookies = None
  131. self.proxy = get_proxy()
  132. self.fp = get_fingerprint()
  133. yield request
  134. if __name__ == "__main__":
  135. Spider(redis_key="lzz:Gtcgpt").start()