中介服务-服务终止公示-列表页.py 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133
  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on 2024-01-04
  4. ---------
  5. @summary: 广东省公共资源交易平台
  6. ---------
  7. @author: lzz
  8. """
  9. import json
  10. from collections import namedtuple
  11. import feapder
  12. from items.spider_item import BidingListItem
  13. from gd_utils import *
  14. class Spider(feapder.BiddingListSpider):
  15. def start_callback(self):
  16. Menu = namedtuple('Menu', ['channel', 'code', 'crawl_page'])
  17. self.site = "广东省公共资源交易平台"
  18. self.menus = [
  19. Menu('中介服务-服务终止公示', 'gd_gdsggzyjypt_zjfw_fwzzgs', 1),
  20. ]
  21. def start_requests(self):
  22. url = "https://ygp.gdzwfw.gov.cn/ggzy-portal/search/v2/items"
  23. for menu in self.menus:
  24. yield feapder.Request(url, item=menu._asdict(), page=1)
  25. def download_midware(self, request):
  26. page = request.page
  27. data = {
  28. "type": "trading-type",
  29. "openConvert": False,
  30. "keyword": "",
  31. "siteCode": "44",
  32. "secondType": "R",
  33. "tradingProcess": "2181,3181",
  34. "thirdType": "[]",
  35. "projectType": "",
  36. "publishStartTime": "",
  37. "publishEndTime": "",
  38. "pageNo": page,
  39. "pageSize": 10
  40. }
  41. en_str = get_enstr(data)
  42. data = json.dumps(data, separators=(',', ':'))
  43. request.data = data
  44. request.headers = {
  45. "Accept": "application/json, text/plain, */*",
  46. "Accept-Language": "zh-CN,zh;q=0.9",
  47. "Cache-Control": "no-cache",
  48. "Connection": "keep-alive",
  49. "Content-Type": "application/json",
  50. "Origin": "https://ygp.gdzwfw.gov.cn",
  51. "Pragma": "no-cache",
  52. "Referer": "https://ygp.gdzwfw.gov.cn/ggzy-portal/",
  53. "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36",
  54. "X-Dgi-Req-App": en_str.get('X-Dgi-Req-App'),
  55. "X-Dgi-Req-Nonce": en_str.get('X-Dgi-Req-Nonce'),
  56. "X-Dgi-Req-Signature": en_str.get('X-Dgi-Req-Signature'),
  57. "X-Dgi-Req-Timestamp": en_str.get('X-Dgi-Req-Timestamp'),
  58. }
  59. def parse(self, request, response):
  60. menu = request.item
  61. info_list = response.json.get('data').get('pageData')
  62. for info in info_list:
  63. noticeSecondType = info.get('noticeSecondType')
  64. edition = info.get('edition')
  65. noticeId = info.get('noticeId')
  66. projectCode = info.get('projectCode')
  67. tradingProcess = info.get('tradingProcess')
  68. siteCode = info.get('regionCode')
  69. publishDate = info.get('publishDate')
  70. params = {
  71. "siteCode": f"{siteCode}",
  72. "tradingType": f"{noticeSecondType}",
  73. "bizCode": f"{tradingProcess}",
  74. "projectCode": f"{projectCode}"
  75. }
  76. nodeId = get_nodeId(params,proxies=request.get_proxies()).get(noticeId)
  77. info['nodeId'] = nodeId
  78. href = create_href(info)
  79. title = info.get('noticeTitle').strip()
  80. create_time = deal_time(publishDate)
  81. regionName = info.get('regionName','').strip()
  82. area = "广东"
  83. city = regionName
  84. list_item = BidingListItem() # 存储数据的管道
  85. list_item.href = href # 标书链接
  86. list_item.unique_key = ('href',)
  87. list_item.channel = menu.get("channel") # 最上方定义的抓取栏目 (编辑器定的)
  88. list_item.spidercode = menu.get("code") # 最上方定义的爬虫code(编辑器定的)
  89. list_item.title = title # 标题
  90. list_item.site = self.site
  91. list_item.publishtime = create_time
  92. list_item.area = area # 城市默认:全国
  93. list_item.city = city # 城市 默认为空
  94. list_item.parse = "self.detail_get" # 详情页回调方法
  95. dparams = {
  96. "nodeId": f"{nodeId}",
  97. "version": f"{edition}",
  98. "tradingType": f"{noticeSecondType}",
  99. "noticeId": f"{noticeId}",
  100. "bizCode": f"{tradingProcess}",
  101. "projectCode": f"{projectCode}",
  102. "siteCode": f"{siteCode}"
  103. }
  104. list_item.request_params = {"params":dparams}
  105. list_item.deal_detail = [] # 抽取正文xpath
  106. list_item.proxies = False
  107. list_item.parse_url = "https://ygp.gdzwfw.gov.cn/ggzy-portal/center/apis/trading-notice/new/detail"
  108. yield list_item
  109. # 无限翻页
  110. request = self.infinite_pages(request, response)
  111. yield request
  112. if __name__ == "__main__":
  113. Spider(redis_key="lzz:gdsggzyjypt_zjfw_fwzzgs").start()