Dfxwszhcgpt.py 3.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394
  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on 2025-01-06
  4. ---------
  5. @summary: 东方希望数字化采购平台
  6. ---------
  7. @author: lzz
  8. """
  9. from collections import namedtuple
  10. import feapder
  11. from items.spider_item import BidingListItem
  12. class Spider(feapder.BiddingListSpider):
  13. def start_callback(self):
  14. Menu = namedtuple('Menu', ['channel', 'code', 'sourceType', 'purchaseType', 'crawl_page'])
  15. self.site = "东方希望数字化采购平台"
  16. self.menus = [
  17. Menu('招标公告', 'a_dfxwszhcgpt_zbgg', 'BR', '1', 3),
  18. ]
  19. self.headers = {
  20. "Accept": "application/json, text/javascript, */*; q=0.01",
  21. "Accept-Language": "zh-CN,zh;q=0.9",
  22. "Cache-Control": "no-cache",
  23. "Origin": "https://srm.easthope.cn",
  24. "Referer": "https://srm.easthope.cn/",
  25. "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36",
  26. }
  27. def start_requests(self):
  28. url = "https://srm-gw.easthope.cn/ssrc/v1/53/source-notices/br-list/advance/public"
  29. for menu in self.menus:
  30. yield feapder.Request(url, item=menu._asdict(), page=1)
  31. def download_midware(self, request):
  32. page = request.page
  33. sourceType = request.item.get('sourceType')
  34. purchaseType = request.item.get('purchaseType')
  35. params = {
  36. "sourceType": f"{sourceType}",
  37. "page": f"{page - 1}",
  38. "size": "10",
  39. "bidTitle": "",
  40. "projectType": "",
  41. "startDate": "",
  42. "endDate": "",
  43. "industry": "",
  44. "purchaseType": f"{purchaseType}"
  45. }
  46. request.params = params
  47. request.headers = self.headers
  48. def parse(self, request, response):
  49. menu = request.item
  50. info_list = response.json.get('content')
  51. for info in info_list:
  52. sourceType = info.get('sourceType')
  53. noticeId = info.get('noticeId')
  54. sourceHeaderId = info.get('sourceHeaderId')
  55. advanceNoticeId = info.get('advanceNoticeId')
  56. sourceFrom = info.get('sourceFrom')
  57. href = f"https://srm.easthope.cn/oauth/public/default/source_notice_advance_detail.html?type={sourceType}&from=ZBXX_HOME&noticeId={noticeId}&sourceHeaderId={sourceHeaderId}&advanceNoticeId={advanceNoticeId}&sourceFrom={sourceFrom}"
  58. title = info.get('bidTitle').strip()
  59. create_time = info.get('approvedDate')
  60. area = "全国" # 省份
  61. city = "" # 城市
  62. list_item = BidingListItem() # 存储数据的管道
  63. list_item.href = href # 标书链接
  64. list_item.channel = menu.get("channel") # 最上方定义的抓取栏目 (编辑器定的)
  65. list_item.spidercode = menu.get("code") # 最上方定义的爬虫code(编辑器定的)
  66. list_item.title = title # 标题
  67. list_item.publishtime = create_time # 标书发布时间
  68. list_item.site = self.site
  69. list_item.area = area # 城市默认:全国
  70. list_item.city = city # 城市 默认为空
  71. list_item.unique_key = ('href', 'title')
  72. list_item.parse = "self.detail_get"
  73. list_item.deal_detail = ['//div[@class="notice-detail-body"]']
  74. list_item.proxies = False
  75. list_item.parse_url = href
  76. yield list_item
  77. request = self.infinite_pages(request, response)
  78. yield request
  79. if __name__ == "__main__":
  80. Spider(redis_key="lzz:DfxwszhcgptNew").start()