123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126 |
- # -*- coding: utf-8 -*-
- """
- Created on 2025-05-26
- ---------
- @summary: 华能燃料交易网站
- ---------
- @author: lzz
- """
- import feapder
- from items.spider_item import MgpListItem
- from collections import namedtuple
- import requests
- def get_cookeis(proxies=False):
- headers = {
- "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
- "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
- "priority": "u=0, i",
- "referer": "https://fec.hpi.com.cn/Home/Login",
- "upgrade-insecure-requests": "1",
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36"
- }
- url = "https://fec.hpi.com.cn/Home/Login"
- res = requests.get(url, headers=headers, timeout=30, proxies=proxies)
- cookies = res.cookies.get_dict()
- url1 = "https://fec.hpi.com.cn/Topic/Index?currmenuId=100&topiccode=BULLETIN"
- url2 = "https://fec.hpi.com.cn/Topic/Search?topiccode=BULLETIN"
- requests.get(url1, headers=headers, cookies=cookies, timeout=30, proxies=proxies)
- requests.get(url2, headers=headers, cookies=cookies, timeout=30, proxies=proxies)
- return cookies
- class Hnrljywz(feapder.BiddingListSpider):
- def start_callback(self):
- Menu = namedtuple('Menu', ['channel', 'code', 'typeone', 'order', 'crawl_page'])
- self.site = "华能燃料交易网站"
- self.menus = [
- Menu('直达中标结果', 'a_hnrljywz_zdzbjg', '', '3', 1),
- ]
- self.count = 0
- self.headers = {
- "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
- "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
- "priority": "u=0, i",
- "upgrade-insecure-requests": "1",
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36"
- }
- self.cookies= get_cookeis()
- def start_requests(self):
- for menu in self.menus:
- yield feapder.Request(item=menu._asdict(), headers=self.headers, proxies=False, page=1)
- def download_midware(self, request):
- menu = request.item
- if menu["code"] == 'a_hnrljywz_zdzbjg':
- url = "https://fec.hpi.com.cn/Evaluate/Index?currmenuId=100"
- else:
- url = f"https://fec.hpi.com.cn/{menu['typeone']}?page={request.page}"
- request.url = url
- request.headers = self.headers
- request.cookies = self.cookies
- def parse(self, request, response):
- if self.count > 5:
- return
- if response.status_code == 500:
- self.count += 1
- self.cookies = get_cookeis()
- yield request
- else:
- self.count = 0
- menu = request.item
- info_list = response.xpath('//table[@class="contain_table"]/tr')
- order = menu.get('order')
- for info in info_list[-20:]:
- title = info.xpath('./td[1]/a/text()').extract_first().strip()
- href = info.xpath('./td[1]/a/@href').extract_first()
- zbgs = "".join(info.xpath(f'./td[{order}]/text()').extract()).strip()
- create_time_org = "".join(info.xpath('./td[last()]/text()').extract())
- if '/' in create_time_org:
- create_time = create_time_org.replace('/', '-').strip()
- if len(create_time) == 16:
- create_time = create_time + ":00"
- elif '-' in create_time_org:
- create_time = create_time_org.strip()
- if len(create_time) == 16:
- create_time = create_time + ":00"
- else:
- create_time = ""
- area = "全国" # 省份
- city = "" # 城市
- list_item = MgpListItem() # 存储数据的管道
- list_item.href = href # 标书链接
- list_item.channel = menu.get("channel") # 最上方定义的抓取栏目 (编辑器定的)
- list_item.spidercode = menu.get("code") # 最上方定义的爬虫code(编辑器定的)
- list_item.title = title # 标题
- list_item.publishtime = create_time # 标书发布时间
- list_item.site = self.site
- list_item.area = area # 城市默认:全国
- list_item.city = city # 城市 默认为空
- list_item.unique_key = ("href", zbgs)
- list_item.parse = "self.detail_get"
- list_item.proxies = False
- list_item.parse_url = href
- yield list_item
- # 无限翻页设置
- request = self.infinite_pages(request, response)
- yield request
- if __name__ == "__main__":
- Hnrljywz(redis_key="lzz:hnrljywz_xsgg").start()
|