proxy.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281
  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on 2023-05-11
  4. ---------
  5. @summary: 代理池
  6. ---------
  7. @author: Dzr
  8. """
  9. import ast
  10. import multiprocessing
  11. import random
  12. import threading
  13. from collections import deque
  14. from concurrent.futures import ThreadPoolExecutor, as_completed
  15. from operator import itemgetter
  16. from urllib.parse import urlparse
  17. import requests
  18. import setting as settings
  19. from base_server import BaseServer, tools
  20. from common.log import logger
  21. from common.redis_lock import acquire_lock_with_timeout, release_lock
  22. DEFAULT_UA = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36'
  23. def decrypt(input_str: str) -> str:
  24. """
  25. 定义base64解密函数
  26. :param input_str:
  27. :return:
  28. """
  29. # 对前面不是“=”的字节取索引,然后转换为2进制
  30. key = settings.jy_proxy['socks5']['decrypt']
  31. ascii_list = ['{:0>6}'.format(str(bin(key.index(i))).replace('0b', '')) for i in input_str if i != '=']
  32. output_str = ''
  33. # 补齐“=”的个数
  34. equal_num = input_str.count('=')
  35. while ascii_list:
  36. temp_list = ascii_list[:4]
  37. # 转换成2进制字符串
  38. temp_str = ''.join(temp_list)
  39. # 对没有8位2进制的字符串补够8位2进制
  40. if len(temp_str) % 8 != 0:
  41. temp_str = temp_str[0:-1 * equal_num * 2]
  42. # 4个6字节的二进制 转换 为三个8字节的二进制
  43. temp_str_list = [temp_str[x:x + 8] for x in [0, 8, 16]]
  44. # 二进制转为10进制
  45. temp_str_list = [int(x, 2) for x in temp_str_list if x]
  46. # 连接成字符串
  47. output_str += ''.join([chr(x) for x in temp_str_list])
  48. ascii_list = ascii_list[4:]
  49. return output_str
  50. def get_base_url():
  51. return settings.jy_proxy['socks5']['base_url']
  52. def get_netloc(proxy, default=None):
  53. proxies = None
  54. if isinstance(proxy, dict):
  55. proxies = proxy.get('proxies')
  56. if isinstance(proxies, str):
  57. proxies = tools.json_loads(proxies)
  58. # proxies = proxy.get('proxies') if isinstance(proxy, dict) else None
  59. if proxies is not None:
  60. parser = urlparse(proxies.get('http'))
  61. default = parser.netloc
  62. return default
  63. class BaseProxyPool(BaseServer):
  64. def __init__(self, name, redis_label, scheme):
  65. super(BaseProxyPool, self).__init__(server=name, label=redis_label)
  66. self.scheme = scheme.lower()
  67. self.proxy_name = self.scheme + self.server
  68. self.proxy_queue = f'{redis_label}_{self.scheme}'
  69. self.unique_key = ('ip', 'port') # 组合 proxy 指纹的字段名称
  70. def get_redis_name(self, proxy):
  71. return f"{self.proxy_queue}_{proxy['fingerprint']}"
  72. def get_redis_name_lst(self, pattern='*'):
  73. return self.redis_db.keys(self.proxy_queue + pattern)
  74. def get_proxy(self, name):
  75. items = self.redis_db.hgetall(name)
  76. if items is None or 'proxies' not in items:
  77. return None
  78. proxy = {
  79. 'proxies': ast.literal_eval(items['proxies']),
  80. 'fingerprint': items['fingerprint'],
  81. 'start_time': int(items['start_time']),
  82. 'end_time': int(items['end_time']),
  83. 'last_time': int(items['last_time']),
  84. 'usage': int(items['usage']),
  85. }
  86. return proxy
  87. def get(self, name, key):
  88. return self.redis_db.hget(name, key)
  89. def exists(self, proxy):
  90. return self.redis_db.exists(self.get_redis_name(proxy))
  91. def check(self, proxy):
  92. is_ok = False
  93. # url = 'https://myip.ipip.net'
  94. url = 'https://www.baidu.com/'
  95. netloc = get_netloc(proxy)
  96. try:
  97. requests_param = {
  98. "headers": {'User-Agent': DEFAULT_UA},
  99. "proxies": proxy['proxies'],
  100. "timeout": 5
  101. }
  102. requests.get(url, **requests_param)
  103. is_ok = True
  104. except requests.RequestException:
  105. pass
  106. msg = "正常" if is_ok else "失效"
  107. logger.debug(f"[{self.proxy_name}]检查代理Ip - {netloc} --通信{msg}")
  108. return proxy, is_ok
  109. def remove_proxy(self, proxy):
  110. netloc = get_netloc(proxy)
  111. logger.debug(f"[{self.proxy_name}]代理Ip - {netloc} --删除")
  112. if self.exists(proxy):
  113. redis_name = self.get_redis_name(proxy)
  114. self.redis_db.delete(redis_name)
  115. def add_proxy(self, proxy):
  116. netloc = get_netloc(proxy)
  117. logger.debug(f"[{self.proxy_name}]代理Ip - {netloc} --添加")
  118. if not self.exists(proxy):
  119. redis_name = self.get_redis_name(proxy)
  120. self.redis_db.hset(redis_name, None, None, mapping=proxy)
  121. expire_ts = proxy['end_time'] - tools.now_ts()
  122. self.redis_db.expire(redis_name, expire_ts)
  123. class ProxyPoolServer(BaseProxyPool, threading.Thread):
  124. def __init__(self, name, redis_label, scheme: str):
  125. threading.Thread.__init__(self)
  126. super(ProxyPoolServer, self).__init__(name, redis_label, scheme)
  127. self.label = f'{self.proxy_name}_{self.getName()}'
  128. self.ports = ['8862', '8863'] if self.scheme == "http" else ['8860', '8861']
  129. self.load_interval = 60 # 轮询访问vps代理服务的时间间隔
  130. def remove_failure_proxy(self, proxy_lst):
  131. """删除失效/故障代理ip"""
  132. logger.info(f"[{self.label}]清除无效代理Ip")
  133. proxy_fingerprints = set([proxy['fingerprint'] for proxy in proxy_lst])
  134. for redis_name in self.get_redis_name_lst():
  135. fingerprint = self.get(redis_name, 'fingerprint')
  136. if fingerprint not in proxy_fingerprints:
  137. self.redis_db.delete(redis_name)
  138. def request_proxy(self):
  139. logger.info(f"[{self.label}]请求vps服务")
  140. proxy_lst = []
  141. try:
  142. url = settings.jy_proxy['socks5']['url']
  143. response = requests.get(url, timeout=10)
  144. for item in response.json():
  145. ports = list(filter(lambda p: p in self.ports, item['ports']))
  146. if not ports:
  147. continue
  148. ip = decrypt(item['ip'])
  149. port = int(ports[random.randint(0, len(ports) - 1)])
  150. start_time = tools.now_ts()
  151. end_time = item['lifetime']
  152. if end_time - start_time > 0:
  153. proxy = {
  154. 'proxies': {
  155. 'http': '{}://{}:{}'.format(self.scheme, ip, port),
  156. 'https': '{}://{}:{}'.format(self.scheme, ip, port)
  157. },
  158. 'fingerprint': self.fingerprint(ip=ip, port=port),
  159. 'start_time': start_time,
  160. 'end_time': end_time,
  161. 'last_time': 0,
  162. 'usage': 0,
  163. }
  164. proxy_lst.append(proxy)
  165. except Exception as e:
  166. logger.error(f"[{self.label}]vps服务访问异常,原因:{e.args}")
  167. return proxy_lst
  168. def manage_proxy(self, proxy_lst: list, workers=1):
  169. self.remove_failure_proxy(proxy_lst)
  170. with ThreadPoolExecutor(max_workers=workers) as Executor:
  171. fs = [Executor.submit(self.check, proxy) for proxy in proxy_lst]
  172. for f in as_completed(fs):
  173. proxy, is_ok = f.result()
  174. if is_ok:
  175. self.add_proxy(proxy)
  176. else:
  177. self.remove_proxy(proxy)
  178. def run(self):
  179. logger.info(f'[{self.label}]开始生产代理Ip')
  180. while True:
  181. try:
  182. proxy_lst = self.request_proxy()
  183. if not proxy_lst:
  184. tools.delay(2)
  185. continue
  186. dynamic_workers = min((int(len(proxy_lst) / 2) or 1), 10)
  187. self.manage_proxy(proxy_lst, workers=dynamic_workers) # 线程池上限10
  188. tools.delay(self.load_interval)
  189. except Exception as e:
  190. logger.exception(e)
  191. class ProxyPoolClient(BaseProxyPool):
  192. def __init__(self, name: str, redis_label: str, scheme: str):
  193. super(ProxyPoolClient, self).__init__(name, redis_label, scheme)
  194. current_process = multiprocessing.current_process()
  195. sub_label = f'{tools.get_localhost_ip()}:{current_process.pid}'
  196. self.lock_label = f'{redis_label}:{sub_label}'
  197. @property
  198. def proxy_total(self):
  199. return len(self.get_redis_name_lst())
  200. def get_all_proxy(self):
  201. proxy_lst = deque([])
  202. for redis_name in self.get_redis_name_lst():
  203. proxy = self.get_proxy(redis_name)
  204. if isinstance(proxy, dict):
  205. proxy_lst.append(proxy)
  206. if len(proxy_lst) > 0:
  207. '''按照使用次数大小从低到高(左小右大)排序'''
  208. proxy_lst = deque(sorted(proxy_lst, key=itemgetter('usage')))
  209. return proxy_lst
  210. def get_proxy_pool(self):
  211. _pool_proxy = []
  212. for proxy in self.get_all_proxy():
  213. last_time = proxy['last_time']
  214. end_time = proxy['end_time']
  215. expire = end_time - tools.now_ts()
  216. _pool_proxy.append({
  217. 'proxies': proxy['proxies'],
  218. 'start_time': tools.ts2dt(proxy['start_time']),
  219. 'end_time': tools.ts2dt(end_time),
  220. 'last_time': tools.ts2dt(last_time) if last_time != 0 else '',
  221. 'expire': expire,
  222. 'usage': proxy['usage'],
  223. })
  224. # 展示时按照过期时间从大到小排列
  225. return list(sorted(_pool_proxy, key=lambda x: x['expire'], reverse=True))
  226. def get_all_proxy_ip(self, protocol):
  227. return [proxy['proxies']['http'].replace(f'{protocol}://', '') for proxy in self.get_all_proxy()]
  228. def proxies(self):
  229. lock = acquire_lock_with_timeout(self.redis_db, self.lock_label)
  230. if lock:
  231. proxy = {}
  232. if self.proxy_total > 0:
  233. proxy_lst = self.get_all_proxy()
  234. proxy = proxy_lst.popleft()
  235. name = self.get_redis_name(proxy)
  236. self.redis_db.hset(name, 'usage', proxy['usage'] + 1)
  237. self.redis_db.hset(name, 'last_time', tools.now_ts())
  238. release_lock(self.redis_db, self.lock_label, lock)
  239. return proxy.get('proxies')