Sfoglia il codice sorgente

移除代理服务

dzr 8 mesi fa
parent
commit
78167e78f4
1 ha cambiato i file con 4 aggiunte e 52 eliminazioni
  1. 4 52
      routes.py

+ 4 - 52
routes.py

@@ -6,9 +6,7 @@ from common.databases import mongo_table
 from common.log import logger
 from services import (
     accountManagePool,
-    get_base_url,
-    socks5ProxyPool,
-    httpProxyPool,
+    get_base_url
 )
 
 '''以下模块以动态方式加载入全局变量,请勿删除'''
@@ -16,7 +14,6 @@ try:
     from services import zbytb
     from services import ybw
     # from services import nmpa
-    from services import site_monitor
 except ImportError as e:
     print(f"缺少全局变量, 原因:{e.args}")
 
@@ -34,9 +31,7 @@ Users = mongo_table('py_spider', 'spider_scheduler_auth')
 def verify_password(username, password):
     item = Users.find_one({'username': username})
     if item is not None:
-        user = {
-            item['username']: generate_password_hash(item['password'])
-        }
+        user = {item['username']: generate_password_hash(item['password'])}
         if username in user and check_password_hash(user.get(username), password):
             return username
     return None
@@ -92,49 +87,6 @@ def show_chrome_proxy_plugin_user():
     return jsonify(data=ChromeUser)
 
 
-@app.route('/crawl/proxy/<scheme>/fetch', methods=['GET'])
-@auth.login_required
-def get_proxy(scheme):
-    # logger.info(f'[访问ip]{request.remote_addr}, class:{scheduler_class_name}')
-    pk = request.args.get('pk')  # 代理类型:1=部署lua下载器节点类 2=python专用代理
-
-    result = {}
-    try:
-        if scheme == 'http':
-            proxies = httpProxyPool.proxies(pk=pk)
-        elif scheme == 'socks5':
-            proxies = socks5ProxyPool.proxies(pk=pk)
-        else:
-            abort(404)
-
-        # logger.debug(f'[调用{scheme}代理]{proxies}')
-        if proxies is not None:
-            result.update(proxies)
-    except (KeyError, IndexError):
-        pass
-
-    return jsonify(data=result)
-
-
-@app.route('/crawl/proxy/query', methods=['GET'])
-@auth.login_required
-def show_proxy():
-    pk = request.args.get('pk')
-    socks_pool = socks5ProxyPool.get_proxy_pool(pk=pk)
-    http_pool = httpProxyPool.get_proxy_pool(pk=pk)
-    pool = [*socks_pool, *http_pool]
-    return jsonify(data=pool)
-
-
-@app.route('/crawl/proxy/getips', methods=['GET'])
-@auth.login_required
-def show_proxy_ips():
-    pk = request.args.get('pk')
-    socks_ips = socks5ProxyPool.get_all_proxy_ip('socks5', pk=pk)
-    http_ips = httpProxyPool.get_all_proxy_ip('http', pk=pk)
-    return jsonify(data={'socks': socks_ips, 'http': http_ips})
-
-
 @app.route('/upload/data/<scheduler_class_name>/<table>', methods=['POST'])
 @auth.login_required
 def upload_data(scheduler_class_name, table):
@@ -148,7 +100,7 @@ def upload_data(scheduler_class_name, table):
         return 'failure'
 
 
-@app.route('/crawl/<scheduler_class_name>/task/fetch', methods=['GET'])
+@app.route('/task/<scheduler_class_name>/fetch', methods=['GET'])
 def get_crawl_task(scheduler_class_name):
     task = {}
     try:
@@ -161,7 +113,7 @@ def get_crawl_task(scheduler_class_name):
     return jsonify(data=task)
 
 
-@app.route('/crawl/<scheduler_class_name>/task/total', methods=['GET'])
+@app.route('/task/<scheduler_class_name>/total', methods=['GET'])
 def get_crawl_task_total(scheduler_class_name):
     total = {'total': 0}
     try: