123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115 |
- # -*- coding: utf-8 -*-
- """
- Created on 2025-04-29
- ---------
- @summary: 中国联通采购与招标网
- ---------
- @author: lzz
- """
- import json
- from collections import namedtuple
- import feapder
- from items.spider_item import BidingListItem
- from untils.WebCookiePool import WebCookiePool
- from untils.tools import get_proxy
- class Spider(feapder.BiddingListSpider):
- def start_callback(self):
- Menu = namedtuple('Menu', ['channel', 'code', 'tid', 'crawl_page'])
- self.site = "中国联通采购与招标网"
- self.menus = [
- Menu('变更公告', 'a_zgltcgyzbw_bggg', '001003', 1),
- ]
- self.headers = {
- "Accept": "application/json",
- "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
- "Authorization": "null",
- "Connection": "keep-alive",
- "Content-Type": "application/json;charset=UTF-8",
- "Origin": "http://www.chinaunicombidding.cn",
- "Referer": "http://www.chinaunicombidding.cn/bidInformation",
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
- "roleId;": ""
- }
- self.cookie_pool = WebCookiePool(redis_key="zgydcgyzbw_ck",
- page_url="http://www.chinaunicombidding.cn/bidInformation",
- cookie_key="jqmEwVYRfTEJT")
- self.cookie_pool.user_agent("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36")
- def start_requests(self):
- url = "http://www.cupb.cn/api/v1/bizAnno/getAnnoList"
- for menu in self.menus:
- proxies = get_proxy()
- yield feapder.Request(url, item=menu._asdict(), page=1, proxies=proxies)
- def download_midware(self, request):
- page = request.page
- menu = request.item
- data = {
- "current": page,
- "pageSize": 10,
- "modeNo": "BizAnnoVoMtable",
- "pageNo": page,
- "annoType": menu.get('tid')
- }
- request.data = json.dumps(data, separators=(',', ':'))
- self.cookie_pool.proxies(proxy=request.get_proxy())
- request.cookies = self.cookie_pool.create_cookie()
- request.headers = self.headers
- def validate(self, request, response):
- if response.status_code != 200:
- raise ConnectionRefusedError
- return True
- def parse(self, request, response):
- menu = request.item
- info_list = response.json.get('data').get('records')
- for info in info_list:
- hid = info.get('id')
- href = f"http://www.chinaunicombidding.cn/bidInformation/detail?id={hid}"
- title = info.get('annoName').strip()
- create_time = info.get('createDate')
- cty = info.get('provinceName','').replace('其他','')
- area = cty
- city = ""
- list_item = BidingListItem() # 存储数据的管道
- list_item.href = href # 标书链接
- list_item.unique_key = ('href',)
- list_item.channel = menu.get("channel") # 最上方定义的抓取栏目 (编辑器定的)
- list_item.spidercode = menu.get("code") # 最上方定义的爬虫code(编辑器定的)
- list_item.title = title # 标题
- list_item.site = self.site
- list_item.publishtime = create_time
- list_item.area = area or "全国" # 城市默认:全国
- list_item.city = city # 城市 默认为空
- list_item.unique_key = ('href',)
- list_item.parse = "self.detail_get" # 详情页回调方法
- list_item.deal_detail = [] # 抽取正文xpath
- list_item.proxies = False
- list_item.parse_url = f"http://www.chinaunicombidding.cn/api/v1/bizAnno/getAnnoDetailed/{hid}"
- yield list_item
- # 无限翻页
- request = self.infinite_pages(request, response)
- yield request
- def exception_request(self, request, response):
- request.proxies = get_proxy()
- yield request
- if __name__ == "__main__":
- Spider(redis_key="lzz:zgydcgyzbw_cgxqgs").start()
|