dongzhaorui il y a 1 jour
Parent
commit
0843335cee

+ 122 - 0
FworkSpider/crawl_func/DP_ali.py

@@ -0,0 +1,122 @@
+# -*- coding: utf-8 -*-
+"""
+Created on 2025-08-12
+---------
+@summary: 阿里滑块验证 acw_sc__v3
+---------
+@author: Lzz
+"""
+import re
+import time
+import execjs
+from feapder.utils.log import log
+from feapder.utils.webdriver import DrissionPageDriver
+import random
+
+
+def get_acw_sc_v2(html):
+    arg1 = "".join(re.findall("arg1='(.*?)'", html))
+    if not arg1:
+        return {}
+
+    try:
+        js_script = '''
+            function getAcw_sc__v2(obt_arg1) {
+                String["prototype"]["hexXor"] = function (_0x4e08d8) {
+                    var _0x5a5d3b = '';
+                    for (var _0xe89588 = 0x0; _0xe89588 < this["length"] && _0xe89588 < _0x4e08d8["length"]; _0xe89588 += 2) {
+                        var _0x401af1 = parseInt(this["slice"](_0xe89588, _0xe89588 + 2), 16);
+                        var _0x105f59 = parseInt(_0x4e08d8["slice"](_0xe89588, _0xe89588 + 2), 16);
+                        var _0x189e2c = (_0x401af1 ^ _0x105f59)["toString"](16);
+                        if (_0x189e2c["length"] == 1) {
+                            _0x189e2c = '0' + _0x189e2c;
+                        }
+                        _0x5a5d3b += _0x189e2c;
+                    }
+                    return _0x5a5d3b;
+                };
+                String["prototype"]["unsbox"] = function () {
+                    var _0x4b082b = [15, 35,29, 24, 33, 16, 1, 38, 10, 9, 19, 31, 40, 27, 22, 23, 25, 13, 6, 11, 39, 18, 20, 8, 14, 21, 32, 26, 2, 30, 7, 4, 17, 5, 3, 28, 34, 37, 12, 36];
+                    var _0x4da0dc = [];
+                    var _0x12605e = '';
+                    for (var _0x20a7bf = 0x0; _0x20a7bf < this["length"]; _0x20a7bf++) {
+                        var _0x385ee3 = this[_0x20a7bf];
+                        for (var _0x217721 = 0; _0x217721 < _0x4b082b["length"]; _0x217721++) {
+                            if (_0x4b082b[_0x217721] == _0x20a7bf + 1) {
+                                _0x4da0dc[_0x217721] = _0x385ee3;
+                            }
+                        }
+                    }
+                    _0x12605e = _0x4da0dc["join"]('');
+                    return _0x12605e;
+                };
+
+                var _0x5e8b26 = "3000176000856006061501533003690027800375";
+                // var arg1 = "0A5F01F50F9BC66FB28038F18B99B7B10CFF4667"
+                var arg1 = obt_arg1
+                var _0x23a392 = arg1["unsbox"]();
+                arg2 = _0x23a392["hexXor"](_0x5e8b26);
+                return arg2
+            }
+        '''
+        ctx = execjs.compile(js_script)
+        arg2 = ctx.call('getAcw_sc__v2', arg1)
+        return {"acw_sc__v2": arg2}
+    except:
+        return {}
+
+
+def create_cookie(page_url):
+    with DrissionPageDriver(headless=True) as driver:
+        try:
+            cookies = {}
+            driver.tab.get(page_url)
+            time.sleep(5)
+
+            arg2 = get_acw_sc_v2(driver.tab.html)
+            if arg2:
+                cookies.update(arg2)
+
+            for i in range(6):
+                try:
+                    slider = driver.tab.ele("x://div[contains(@id, 'aliyunCaptcha-sliding-slider')]")
+                    if slider:
+                        b1 = driver.tab.actions.move_to(slider, duration=0.5).hold()
+                        time.sleep(random.random())
+                        b2 = b1.move(50, random.uniform(-6, 6), duration=0.15)
+                        time.sleep(0.02)
+                        b3 = b2.move(100, random.uniform(-8, 8), duration=round(random.random() / 10, 2))
+                        time.sleep(0.03)
+                        b4 = b3.move(30, random.uniform(-10, 10), duration=round(random.random() / 10, 2))
+                        time.sleep(0.01)
+                        b5 = b4.move(70, random.uniform(-8, 8), duration=round(random.random() / 10, 2))
+                        time.sleep(0.03)
+                        b6 = b5.move(32, random.uniform(-6, 6), duration=0.2)
+                        time.sleep(0.2)
+                        b6.release()
+                        driver.tab.wait(2)
+                        if "验证失败,请刷新重试" in driver.tab.html:
+                            sec_ = driver.tab.ele('x://span[contains(@id, "aliyunCaptcha-sliding-refresh"))]')
+                            sec_.click()
+                            time.sleep(.6)
+                        else:
+                            break
+                    else:
+                        break
+
+                except Exception as e:
+                    log.error(e)
+                    break
+                time.sleep(2)
+
+            cookies.update(driver.tab.cookies().as_dict())
+            return cookies
+
+        except Exception as e:
+            log.error(f"获取cookie失败,{e}")
+            driver.tab.clear_cache()
+
+
+# if __name__ == '__main__':
+#     cookies = create_cookie(page_url="https://fixedhall.zcygov.cn/universal/announcement-list")
+#     log.debug(cookies)

+ 78 - 37
FworkSpider/crawl_func/PYCCS_cookies.py

@@ -12,43 +12,84 @@ import execjs
 import requests
 
 
-def get_PYCCS_ck(url, headers, proxies=False):
-    session = requests.session()
-    session.proxies = proxies
+def get_ck_by_js(*args):
     ex_js = '''
-    function get_ck(a,b,c) {
-        var x08c924 = parseInt(a);
-        x08c924 = x08c924 * parseInt(b);
-        x08c924 = x08c924 + parseInt(c);
-        x08c924 = (x08c924 * 0x3 + 0x7);
-        if (x08c924 < 0x7b)
-            x08c924 = x08c924 + 0x929;
-        if (x08c924 > 0x929)
-            x08c924 = Math['floor'](x08c924 / 0x7b);
-        return x08c924
-    }
+        function get_ck(a,b,c) {
+            var x08c924 = parseInt(a);
+            x08c924 = x08c924 * parseInt(b);
+            x08c924 = x08c924 + parseInt(c);
+            x08c924 = (x08c924 * 0x3 + 0x7);
+            if (x08c924 < 0x7b)
+                x08c924 = x08c924 + 0x929;
+            if (x08c924 > 0x929)
+                x08c924 = Math['floor'](x08c924 / 0x7b);
+            return x08c924
+        }
     '''
+    a, b, c = args
     ctx = execjs.compile(ex_js)
-    count = 0
-    while count < 3:
-        try:
-            res = session.get(url, headers=headers, timeout=60,verify=False)
-
-            pm_data = "".join(re.findall('\|function\|(.*?)\|version\|',res.text,re.S)).split('|')
-
-            answer = ctx.call('get_ck',pm_data[1],pm_data[3],pm_data[-1])
-
-            data = {
-                "answer": f"{answer}"
-            }
-            resp = session.post(url.split('?')[0], headers=headers, data=data,timeout=60,verify=False)
-            cookies = session.cookies.get_dict()
-
-            if re.findall('\|function\|(.*?)\|version\|',resp.text,re.S):
-                print(f"请求解析异常!重试 {count} 次")
-                count += 1
-            else:
-                return cookies
-        except:
-            print("cookies_PYCCS 获取失败!")
-            return {}
+    return ctx.call('get_ck', a, b, c)
+
+
+def get_ck_by_py(*args):
+    a, b, c = args
+
+    # 将输入转换为整数
+    x08c924 = int(a)
+    x08c924 *= int(b)
+    x08c924 += int(c)
+
+    # 执行运算
+    x08c924 = x08c924 * 0x3 + 0x7  # 0x3是十六进制的3,0x7是十六进制的7
+
+    # 条件判断
+    if x08c924 < 0x7b:  # 0x7b是十六进制的123
+        x08c924 += 0x929  # 0x929是十六进制的2345
+
+    if x08c924 > 0x929:
+        x08c924 = x08c924 // 0x7b  # 使用整数除法模拟Math.floor
+
+    return x08c924
+
+
+def get_ck(*args, platform='py'):
+    if platform == 'js':
+        return get_ck_by_js(*args)
+    else:
+        return get_ck_by_py(*args)
+
+
+def find_pyccs(url, headers, proxies=None, platform=None):
+    cookies = {}
+    exception = None
+
+    request_params = dict(headers=headers, timeout=60, verify=False)
+    for i in range(3):
+        with requests.Session() as session:
+            session.proxies = proxies
+            try:
+                res = session.get(url, **request_params)
+                pm_data = "".join(re.findall('\|function\|(.*?)\|version\|',res.text,re.S)).split('|')
+                answer = get_ck(pm_data[1], pm_data[3], pm_data[-1], platform=platform)
+                data = {"answer": f"{answer}"}
+
+                url2 = url.split('?')[0]
+                resp = session.post(url2, data=data, **request_params)
+                cookies = session.cookies.get_dict()
+                if re.findall('\|function\|(.*?)\|version\|', resp.text, re.S):
+                    print(f"请求解析异常!重试 {i + 1} 次")
+                else:
+                    return cookies
+            except Exception as e:
+                print("cookies_PYCCS 获取失败!")
+                exception = e
+                break
+
+    return cookies
+
+
+get_PYCCS_ck = find_pyccs
+
+
+if __name__ == "__main__":
+    assert get_ck_by_py(1, 2, 3) == get_ck_by_js(1, 2, 3)

+ 55 - 35
FworkSpider/crawl_func/YunSuoAutoJump.py

@@ -8,15 +8,51 @@ Created on 2023-09-12
 """
 import time
 
-import execjs
 import requests
+import execjs
+
+
+def yun_suo_auto_jump_by_js(url):
+    ctx = execjs.compile('''
+        function YunSuoAutoJump(url) {
+            function stringToHex(str) {
+                var val = "";
+                for (var i = 0; i < str.length; i++) {
+                    if (val == "") val = str.charCodeAt(i).toString(16); else val += str.charCodeAt(i).toString(16);
+                }
+                return val;
+            }
+            var width = 1536;
+            var height = 864;
+            var screendate = width + "," + height;
+            location = url + "?security_verify_data=" + stringToHex(screendate);
+            return location
+        }
+    ''')
+    return ctx.call("YunSuoAutoJump", url)
+
 
+def string_to_hex(s):
+    # 将字符串中的每个字符转换为十六进制,不添加0x前缀
+    return ''.join([format(ord(char), 'x') for char in s])
 
-def get_mid_code(security_verify_data_url, proxies=False):
 
-    session = requests.session()
-    session.proxies = proxies
+def yun_suo_auto_jump_by_py(url):
+    width = 1536
+    height = 864
+    screen_data = f"{width},{height}"
+    # 生成包含安全验证数据的URL
+    return f"{url}?security_verify_data={string_to_hex(screen_data)}"
 
+
+def yun_suo_auto_jump(url, platform='py'):
+    if platform == 'js':
+        return yun_suo_auto_jump_by_js(url)
+    else:
+        return yun_suo_auto_jump_by_py(url)
+
+
+def get_mid_code(url, proxies=None, platform=None):
     headers = {
         "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
         "Accept-Language": "zh-CN,zh;q=0.9",
@@ -26,34 +62,18 @@ def get_mid_code(security_verify_data_url, proxies=False):
         "Upgrade-Insecure-Requests": "1",
         "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36"
     }
-    res = session.get(security_verify_data_url, headers=headers, timeout=60, verify=False)
-
-    ex_js = '''
-    function YunSuoAutoJump(url) {
-        function stringToHex(str) {
-            var val = "";
-            for (var i = 0; i < str.length; i++) {
-                if (val == "") val = str.charCodeAt(i).toString(16); else val += str.charCodeAt(i).toString(16);
-            }
-            return val;
-        }
-        var width = 1536;
-        var height = 864;
-        var screendate = width + "," + height;
-        location = url + "?security_verify_data=" + stringToHex(screendate);
-        return location
-    }
-    '''
-    ctx = execjs.compile(ex_js)
-    yz_url = ctx.call("YunSuoAutoJump",security_verify_data_url)
-
-    num = 0
-    cookies = {}
-    while num < 10:
-        response = session.get(yz_url, headers=headers, timeout=60, verify=False)
-        cookies = session.cookies.get_dict()
-        if cookies.get('security_session_mid_verify'):
-            break
-        num += 1
-        time.sleep(2)
-    return cookies
+    with requests.session() as session:
+        session.proxies = proxies
+        request_params = dict(headers=headers, timeout=60, verify=False)
+        session.get(url, **request_params)
+        yz_url = yun_suo_auto_jump(url, platform=platform)
+
+        count, cookies = 0, {}
+        while count < 10:
+            session.get(yz_url, headers=headers, timeout=60, verify=False)
+            cookies = session.cookies.get_dict()
+            if cookies.get('security_session_mid_verify'):
+                break
+            count += 1
+            time.sleep(2)
+        return cookies

+ 0 - 2
FworkSpider/crawl_func/ali_slide_verify.py

@@ -84,7 +84,6 @@ class WebCookiePool(PageCookiePool):
 
 
     def create_cookie(self):
-
         with WebDriver(**self._kwargs) as driver_pool:
             import time
             try:
@@ -105,7 +104,6 @@ class WebCookiePool(PageCookiePool):
                             ActionChains(driver_pool).click_and_hold(on_element=slider).perform()
                             ActionChains(driver_pool).move_by_offset(xoffset=252, yoffset=0).perform()
                             ActionChains(driver_pool).pause(1).release().perform()
-
                             cookies.update(driver_pool.cookies)
                     except Exception as e:
                         log.info(e)

+ 60 - 61
FworkSpider/crawl_func/jsl_5s.py

@@ -28,72 +28,71 @@ class DTCookiePool(PageCookiePool):
         self.is_save_js = save_js
 
     def create_cookie(self):
-        proxies = self.proxies
-        try:
-            session = requests.Session()
-            session.proxies = proxies
-            start_url = self.page_url
-            res = session.get(start_url, headers=self.headers,timeout=120, verify=False)
-            js_func = "".join(re.findall("document.cookie=(.*?)location.href", res.text))
-            js_func = 'function sd() { return ' + js_func + "}"
-            ctx = execjs.compile(js_func)
-            sss = ctx.call("sd")
-            cookie = {}
-            for temp, index in res.cookies.get_dict().items():
-                cookie[temp] = index
+        request_params = dict(headers=self.headers, timeout=120, verify=False)
+        with requests.session() as session:
+            session.proxies = self.proxies
+            try:
+                res = session.get(self.page_url, **request_params)
+                js_func = "".join(re.findall("document.cookie=(.*?)location.href", res.content.decode()))
+                js_func = 'function sd() { return ' + js_func + "}"
+                ctx = execjs.compile(js_func)
+                sss = ctx.call("sd")
+                cookie = {}
+                for temp, index in res.cookies.get_dict().items():
+                    cookie[temp] = index
 
-            for item in sss.split(";"):
-                if '=' in item:
-                    cookie[item.split("=")[0]] = item.split("=")[-1]
+                for item in sss.split(";"):
+                    if '=' in item:
+                        cookie[item.split("=")[0]] = item.split("=")[-1]
 
-            res = session.get(start_url, cookies=cookie,headers=self.headers,timeout=120,verify=False)
-            html_str = res.content.decode()
-            if "<!DOCTYPE html>" in html_str:
-                html_str = re.sub("<!DOCTYPE html>[\s\S]*?</html>", "", html_str.strip(),re.S)
+                res = session.get(self.page_url, cookies=cookie, **request_params)
+                html_str = res.content.decode()
+                if "<!DOCTYPE html>" in html_str:
+                    html_str = re.sub("<!DOCTYPE html>[\s\S]*?</html>", "", html_str.strip(),re.S)
 
-            if self.is_save_js:
-                with open('./source_code.js', 'w+', encoding='utf-8') as f:
-                    f.write(html_str)
+                if self.is_save_js:
+                    with open('./source_code.js', 'w+', encoding='utf-8') as f:
+                        f.write(html_str)
 
-            js_do_data = "".join(re.findall('};go\((.*?)\)', html_str))
-            js_func = re.sub("<(/*?)script>", "", html_str)
-            location = re.compile('location(.*?)}}else')
-            location2 = re.compile('location(.*?)}else')
-            setTimeout = re.compile('0x5dc;}}(.*?)setTimeout,function\(\)\{')
-            setTimeout2 = re.compile('0x5dc;}(.*?)setTimeout\(function\(\)\{')
-            gox = re.compile('};go(.*?)\)')
-            js_func = re.sub(location, "}}else", js_func)
-            js_func = re.sub(location2, "}else", js_func)
-            js_func = re.sub(setTimeout, "0x5dc;}}", js_func)
-            js_func = re.sub(setTimeout2, "0x5dc;}", js_func)
-            js_func = re.sub(gox, "return document['cookie']\n};", js_func)
+                js_do_data = "".join(re.findall('};go\((.*?)\)', html_str))
+                js_func = re.sub("<(/*?)script>", "", html_str)
+                location = re.compile('location(.*?)}}else')
+                location2 = re.compile('location(.*?)}else')
+                setTimeout = re.compile('0x5dc;}}(.*?)setTimeout,function\(\)\{')
+                setTimeout2 = re.compile('0x5dc;}(.*?)setTimeout\(function\(\)\{')
+                gox = re.compile('};go(.*?)\)')
+                js_func = re.sub(location, "}}else", js_func)
+                js_func = re.sub(location2, "}else", js_func)
+                js_func = re.sub(setTimeout, "0x5dc;}}", js_func)
+                js_func = re.sub(setTimeout2, "0x5dc;}", js_func)
+                js_func = re.sub(gox, "return document['cookie']\n};", js_func)
 
-            js_func = '''const jsdom = require("jsdom");
-                        const {JSDOM} = jsdom;
-                        const dom = new JSDOM(`<!DOCTYPE html><p>Hello world</p>`,
-                                            {
-                                                url: "https://example.org/",
-                                                referrer: "https://example.com/",
-                                                contentType: "text/html",
-                                            });
-                        window = dom.window;
-                        document = window.document;
-                        location = window.location;
-                        ''' + js_func
-            ctx = execjs.compile(js_func,cwd=self.cwd)
+                js_func = '''const jsdom = require("jsdom");
+                            const {JSDOM} = jsdom;
+                            const dom = new JSDOM(`<!DOCTYPE html><p>Hello world</p>`,
+                                                {
+                                                    url: "https://example.org/",
+                                                    referrer: "https://example.com/",
+                                                    contentType: "text/html",
+                                                });
+                            window = dom.window;
+                            document = window.document;
+                            location = window.location;
+                            ''' + js_func
+                ctx = execjs.compile(js_func,cwd=self.cwd)
 
-            if self.is_save_js:
-                with open('./clean_code.js', 'w+', encoding='utf-8') as f:
-                    f.write(js_func)
+                if self.is_save_js:
+                    with open('./clean_code.js', 'w+', encoding='utf-8') as f:
+                        f.write(js_func)
 
-            ss = ctx.call("go", json.loads(js_do_data))
-            for item in ss.split(";"):
-                if '=' in item:
-                    session.cookies.setdefault(item.split("=")[0], item.split("=")[-1])
-            session.get(start_url,headers=self.headers,timeout=120,verify=False)
-            cookies = requests.utils.dict_from_cookiejar(session.cookies)
-            return cookies
-        except Exception as e:
-            print("cookie生产错误:",e)
-            return {}
+                ss = ctx.call("go", json.loads(js_do_data))
+                for item in ss.split(";"):
+                    if '=' in item:
+                        session.cookies.setdefault(item.split("=")[0], item.split("=")[-1])
 
+                session.get(self.page_url, **request_params)
+                cookies = requests.utils.dict_from_cookiejar(session.cookies)
+                return cookies
+            except Exception as e:
+                print("cookie生产错误:", e)
+                return {}

+ 54 - 51
FworkSpider/crawl_func/jsl_clearance_s.py

@@ -26,58 +26,61 @@ class DTCookiePool(PageCookiePool):
         self.proxies = kwargs.get('proxies') or False
 
     def create_cookie(self):
-        session = requests.Session()
-        session.proxies = self.proxies
-        start_url = self.page_url
-        res = session.get(start_url, headers=self.headers, timeout=120, verify=False)
-        js_func = "".join(re.findall("document.cookie=(.*?)location.href", res.text))
-        js_func = 'function sd() { return ' + js_func + "}"
-        ctx = execjs.compile(js_func)
-        sss = ctx.call("sd")
-        cookie = {}
-        for temp, index in res.cookies.get_dict().items():
-            cookie[temp] = index
+        exception = None
 
-        for item in sss.split(";"):
-            if '=' in item:
-                cookie[item.split("=")[0]] = item.split("=")[-1]
+        request_params = dict(headers=self.headers, timeout=120, verify=False)
+        with requests.Session() as session:
+            session.proxies = self.proxies
+            res = session.get(self.page_url, **request_params)
+            js_func = "".join(re.findall("document.cookie=(.*?)location.href", res.text))
+            js_func = 'function sd() { return ' + js_func + "}"
+            ctx = execjs.compile(js_func)
+            sss = ctx.call("sd")
+            cookie = {}
+            for temp, index in res.cookies.get_dict().items():
+                cookie[temp] = index
 
-        res = session.get(start_url, cookies=cookie,headers=self.headers,timeout=120,verify=False)
-        html_str = res.content.decode()
-        js_do_data = "".join(re.findall('};go\((.*?)\)', html_str))
-        js_func = re.sub("<(/*?)script>", "", html_str)
-        location = re.compile('location(.*?)}}else')
-        location2 = re.compile('location(.*?)}else')
-        setTimeout = re.compile('setTimeout(.*?)document')
-        gox = re.compile('};go(.*?)\)')
-        js_func = re.sub(location, "}}else", js_func)
-        js_func = re.sub(location2, "}else", js_func)
-        js_func = re.sub(setTimeout, "document", js_func)
-        js_func = re.sub('0x5dc;}(.*?)\(document', "0x5dc;}document", js_func)
-        js_func = re.sub(gox, "return document['cookie']\n};", js_func)
-        js_func = '''const jsdom = require("jsdom");
-                    const {JSDOM} = jsdom;
-                    const dom = new JSDOM(`<!DOCTYPE html><p>Hello world</p>`,
-                                        {
-                                            url: "https://example.org/",
-                                            referrer: "https://example.com/",
-                                            contentType: "text/html",
-                                        });
-                    window = dom.window;
-                    document = window.document;
-                    location = window.location;
-                    ''' + js_func
-        ctx = execjs.compile(js_func)
-        # with open('wzjyjt_xxgg_pm.js', 'w+', encoding='utf-8') as f:
-        #     f.write(js_func)
-        try:
-            ss = ctx.call("go", json.loads(js_do_data))
-            for item in ss.split(";"):
+            for item in sss.split(";"):
                 if '=' in item:
-                    session.cookies.setdefault(item.split("=")[0], item.split("=")[-1])
-            session.get(start_url,headers=self.headers,timeout=120,verify=False)
-            cookies = requests.utils.dict_from_cookiejar(session.cookies)
-            return cookies
-        except Exception as e:
-            pass
+                    cookie[item.split("=")[0]] = item.split("=")[-1]
 
+            res = session.get(self.page_url, cookies=cookie, **request_params)
+            html_str = res.content.decode()
+            js_do_data = "".join(re.findall('};go\((.*?)\)', html_str))
+            js_func = re.sub("<(/*?)script>", "", html_str)
+            location = re.compile('location(.*?)}}else')
+            location2 = re.compile('location(.*?)}else')
+            setTimeout = re.compile('setTimeout(.*?)document')
+            gox = re.compile('};go(.*?)\)')
+            js_func = re.sub(location, "}}else", js_func)
+            js_func = re.sub(location2, "}else", js_func)
+            js_func = re.sub(setTimeout, "document", js_func)
+            js_func = re.sub('0x5dc;}(.*?)\(document', "0x5dc;}document", js_func)
+            js_func = re.sub(gox, "return document['cookie']\n};", js_func)
+            js_func = '''const jsdom = require("jsdom");
+                        const {JSDOM} = jsdom;
+                        const dom = new JSDOM(`<!DOCTYPE html><p>Hello world</p>`,
+                                            {
+                                                url: "https://example.org/",
+                                                referrer: "https://example.com/",
+                                                contentType: "text/html",
+                                            });
+                        window = dom.window;
+                        document = window.document;
+                        location = window.location;
+                        ''' + js_func
+            ctx = execjs.compile(js_func)
+            # with open('wzjyjt_xxgg_pm.js', 'w+', encoding='utf-8') as f:
+            #     f.write(js_func)
+            try:
+                ss = ctx.call("go", json.loads(js_do_data))
+                for item in ss.split(";"):
+                    if '=' in item:
+                        session.cookies.setdefault(item.split("=")[0], item.split("=")[-1])
+
+                session.get(self.page_url, **request_params)
+                cookies = requests.utils.dict_from_cookiejar(session.cookies)
+                return cookies
+            except Exception as e:
+                exception = e
+                return None