lizongze hai 1 ano
pai
achega
0db188d6d8
Modificáronse 1 ficheiros con 5 adicións e 3 borrados
  1. 5 3
      FworkSpider/feapder/templates/njpc_detail_template.tmpl

+ 5 - 3
FworkSpider/feapder/templates/njpc_detail_template.tmpl

@@ -21,7 +21,7 @@ redis_key = "njpc_details"
 
 
 # 拟建爬虫下载附件
-def njpc_get_files(html, file_type="", s_key="http", proxies=False):
+def njpc_get_files(html, headers, file_type="", s_key="http", proxies=False):
     def parse_filetype(response, filetypes):
         val = response.headers.get("content-disposition")
         filetype = val.split('.')[-1].replace('"', '').replace("'", "")
@@ -44,6 +44,7 @@ def njpc_get_files(html, file_type="", s_key="http", proxies=False):
                         download_url=file_url,
                         callback=parse_filetype,
                         proxies=proxies,
+                        headers=headers,
                     )
                     attachments[str(len(attachments) + 1)] = attachment
             else:
@@ -56,7 +57,7 @@ def njpc_get_files(html, file_type="", s_key="http", proxies=False):
                     file_name = file_name.strip()
                     attachment = AD().fetch_attachment(
                         file_name=file_name, file_type=file_tp, download_url=file_url,
-                        proxies=proxies)
+                        proxies=proxies, headers=headers,)
                     attachments[str(len(attachments) + 1)] = attachment
         return attachments
 
@@ -124,7 +125,8 @@ class Details(feapder.PlanToBuildDetailSpider):
             html,
             file_type=fp.get("file_type", ""),
             s_key=fp.get("s_key", "http"),
-            proxies=fp.get("proxies", False)
+            proxies=fp.get("proxies", False),
+            headers=fp.get('headers', {}
         )
         if attachments:
             data_item.projectinfo = {"attachments": attachments}