Gtcgpt.py 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128
  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on 2024-09-19
  4. ---------
  5. @summary: 国铁采购平台
  6. ---------
  7. @author: lzz
  8. """
  9. from collections import namedtuple
  10. import feapder
  11. from feapder.utils.tools import joint_url
  12. from items.spider_item import BidingListItem
  13. from untils.tools import get_proxy
  14. from fingerprint import get_fingerprint
  15. class Spider(feapder.BiddingListSpider):
  16. def start_callback(self):
  17. Menu = namedtuple('Menu', ['channel', 'code', 'noticeType', 'tid', 'crawl_page'])
  18. self.site = "国铁采购平台"
  19. self.menus = [
  20. Menu('采购公告', 'a_gtcgpt_cggg', '000', 'queryProcurementNoticeList', 20),
  21. ]
  22. self.headers = {
  23. "Accept": "application/json, text/javascript, */*; q=0.01",
  24. "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
  25. "Cache-Control": "no-cache",
  26. "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36",
  27. "X-Requested-With": "XMLHttpRequest",
  28. }
  29. self.cookies = None
  30. self.proxy = get_proxy()
  31. self.fp = get_fingerprint()
  32. def start_requests(self):
  33. for menu in self.menus:
  34. referer = "https://cg.95306.cn/baseinfor/notice/procurementNotice"
  35. params = {
  36. "bidType": "",
  37. "noticeType": f"{menu.noticeType}",
  38. "transactionType": "01",
  39. "wzType": "",
  40. "title": "",
  41. "bidding": "",
  42. "navigation": ""
  43. }
  44. self.headers["Referer"] = joint_url(referer, params)
  45. url = f"https://cg.95306.cn/proxy/portal/elasticSearch/{menu.tid}"
  46. yield feapder.Request(url, item=menu._asdict(), page=1, proxies=False)
  47. def download_midware(self, request):
  48. if self.cookies is None:
  49. self.cookies = {
  50. 'AlteonPcgmh': '0a03b7f3bb36ad3f1f41',
  51. 'mhId': self.fp,
  52. }
  53. data = {
  54. 'mhId': self.fp,
  55. 'projBidType': '01',
  56. 'bidType': '',
  57. 'noticeType': '000',
  58. 'wzType': '',
  59. 'title': '',
  60. }
  61. request.data = data
  62. request.headers = self.headers
  63. request.cookies = self.cookies
  64. request.proxies = self.proxy
  65. def validate(self, request, response):
  66. data = response.json.get('data')
  67. if not data:
  68. raise ValueError('数据不能为空!')
  69. return True
  70. def parse(self, request, response):
  71. menu = request.item
  72. info_list = response.json.get('data')
  73. for info in info_list.get('resultData').get('result'):
  74. href_id = info.get('id')
  75. href = f"https://cg.95306.cn/baseinfor/notice/informationShow?id={href_id}"
  76. title = info.get('notTitle').strip()
  77. create_time = info.get('checkTime')
  78. area = "全国" # 省份
  79. city = "" # 城市
  80. list_item = BidingListItem() # 存储数据的管道
  81. list_item.href = href # 标书链接
  82. list_item.channel = menu.get("channel") # 最上方定义的抓取栏目 (编辑器定的)
  83. list_item.spidercode = menu.get("code") # 最上方定义的爬虫code(编辑器定的)
  84. list_item.title = title # 标题
  85. list_item.publishtime = create_time # 标书发布时间
  86. list_item.site = self.site
  87. list_item.area = area # 城市默认:全国
  88. list_item.city = city # 城市 默认为空
  89. list_item.unique_key = ("href",)
  90. list_item.parse = "self.detail_get"
  91. list_item.proxies = False
  92. params_d = {
  93. "noticeId": f"{href_id}",
  94. "mhId": self.fp,
  95. }
  96. list_item.request_params = {"params": params_d}
  97. list_item.parse_url = "https://cg.95306.cn/proxy/portal/elasticSearch/indexView"
  98. yield list_item
  99. # 无限翻页设置
  100. request = self.infinite_pages(request, response)
  101. yield request
  102. def exception_request(self, request, response):
  103. self.cookies = None
  104. self.proxy = get_proxy()
  105. self.fp = get_fingerprint()
  106. yield request
  107. if __name__ == "__main__":
  108. Spider(redis_key="lzz:Gtcgpt").start()