proxy.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277
  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on 2023-05-11
  4. ---------
  5. @summary: 代理池
  6. ---------
  7. @author: Dzr
  8. """
  9. import ast
  10. import multiprocessing
  11. import random
  12. import threading
  13. from collections import deque
  14. from concurrent.futures import ThreadPoolExecutor, as_completed
  15. from operator import itemgetter
  16. from urllib.parse import urlparse
  17. import requests
  18. import setting as settings
  19. from base_server import BaseServer, tools
  20. from common.log import logger
  21. from common.redis_lock import acquire_lock_with_timeout, release_lock
  22. DEFAULT_UA = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36'
  23. def decrypt(input_str: str) -> str:
  24. """
  25. 定义base64解密函数
  26. :param input_str:
  27. :return:
  28. """
  29. # 对前面不是“=”的字节取索引,然后转换为2进制
  30. key = settings.jy_proxy['socks5']['decrypt']
  31. ascii_list = ['{:0>6}'.format(str(bin(key.index(i))).replace('0b', '')) for i in input_str if i != '=']
  32. output_str = ''
  33. # 补齐“=”的个数
  34. equal_num = input_str.count('=')
  35. while ascii_list:
  36. temp_list = ascii_list[:4]
  37. # 转换成2进制字符串
  38. temp_str = ''.join(temp_list)
  39. # 对没有8位2进制的字符串补够8位2进制
  40. if len(temp_str) % 8 != 0:
  41. temp_str = temp_str[0:-1 * equal_num * 2]
  42. # 4个6字节的二进制 转换 为三个8字节的二进制
  43. temp_str_list = [temp_str[x:x + 8] for x in [0, 8, 16]]
  44. # 二进制转为10进制
  45. temp_str_list = [int(x, 2) for x in temp_str_list if x]
  46. # 连接成字符串
  47. output_str += ''.join([chr(x) for x in temp_str_list])
  48. ascii_list = ascii_list[4:]
  49. return output_str
  50. def get_base_url():
  51. return settings.jy_proxy['socks5']['base_url']
  52. def get_netloc(proxy, default=None):
  53. proxies = None
  54. if isinstance(proxy, dict):
  55. proxies = proxy.get('proxies')
  56. if isinstance(proxies, str):
  57. proxies = tools.json_loads(proxies)
  58. # proxies = proxy.get('proxies') if isinstance(proxy, dict) else None
  59. if proxies is not None:
  60. parser = urlparse(proxies.get('http'))
  61. default = parser.netloc
  62. return default
  63. class BaseProxyPool(BaseServer):
  64. def __init__(self, name, redis_label, scheme):
  65. super(BaseProxyPool, self).__init__(server=name, label=redis_label)
  66. self.scheme = scheme.lower()
  67. self.proxy_name = self.scheme + self.server
  68. self.proxy_queue = f'{redis_label}_{self.scheme}'
  69. self.unique_key = ('ip', 'port') # 组合 proxy 指纹的字段名称
  70. def get_redis_name(self, proxy):
  71. return f"{self.proxy_queue}_{proxy['fingerprint']}"
  72. def get_redis_name_lst(self, pattern='*'):
  73. return self.redis_db.keys(self.proxy_queue + pattern)
  74. def get_proxy(self, name):
  75. items = self.redis_db.hgetall(name)
  76. if items is None or 'proxies' not in items:
  77. return None
  78. proxy = {
  79. 'proxies': ast.literal_eval(items['proxies']),
  80. 'fingerprint': items['fingerprint'],
  81. 'start_time': int(items['start_time']),
  82. 'end_time': int(items['end_time']),
  83. 'last_time': int(items['last_time']),
  84. 'usage': int(items['usage']),
  85. }
  86. return proxy
  87. def get(self, name, key):
  88. return self.redis_db.hget(name, key)
  89. def exists(self, proxy):
  90. return self.redis_db.exists(self.get_redis_name(proxy))
  91. def check(self, proxy):
  92. is_ok = False
  93. url = 'https://myip.ipip.net'
  94. netloc = get_netloc(proxy)
  95. try:
  96. requests_param = {
  97. "headers": {'User-Agent': DEFAULT_UA},
  98. "proxies": proxy['proxies'],
  99. "timeout": 5
  100. }
  101. requests.get(url, **requests_param)
  102. is_ok = True
  103. except requests.RequestException:
  104. pass
  105. msg = "正常" if is_ok else "失效"
  106. logger.debug(f"[{self.proxy_name}]检查代理Ip - {netloc} --通信{msg}")
  107. return proxy, is_ok
  108. def remove_proxy(self, proxy):
  109. netloc = get_netloc(proxy)
  110. logger.debug(f"[{self.proxy_name}]代理Ip - {netloc} --删除")
  111. if self.exists(proxy):
  112. redis_name = self.get_redis_name(proxy)
  113. self.redis_db.delete(redis_name)
  114. def add_proxy(self, proxy):
  115. netloc = get_netloc(proxy)
  116. logger.debug(f"[{self.proxy_name}]代理Ip - {netloc} --添加")
  117. if not self.exists(proxy):
  118. redis_name = self.get_redis_name(proxy)
  119. self.redis_db.hset(redis_name, None, None, mapping=proxy)
  120. expire_ts = proxy['end_time'] - tools.now_ts()
  121. self.redis_db.expire(redis_name, expire_ts)
  122. class ProxyPoolServer(BaseProxyPool, threading.Thread):
  123. def __init__(self, name, redis_label, scheme: str):
  124. threading.Thread.__init__(self)
  125. super(ProxyPoolServer, self).__init__(name, redis_label, scheme)
  126. self.label = f'{self.proxy_name}_{self.getName()}'
  127. self.ports = ['8862', '8863'] if self.scheme == "http" else ['8860', '8861']
  128. self.load_interval = 60 # 轮询访问vps代理服务的时间间隔
  129. def remove_failure_proxy(self, proxy_lst):
  130. """删除失效/故障代理ip"""
  131. logger.info(f"[{self.label}]清除无效代理Ip")
  132. proxy_fingerprints = set([proxy['fingerprint'] for proxy in proxy_lst])
  133. for redis_name in self.get_redis_name_lst():
  134. fingerprint = self.get(redis_name, 'fingerprint')
  135. if fingerprint not in proxy_fingerprints:
  136. self.redis_db.delete(redis_name)
  137. def request_proxy(self):
  138. logger.info(f"[{self.label}]请求vps服务")
  139. proxy_lst = []
  140. try:
  141. url = settings.jy_proxy['socks5']['url']
  142. response = requests.get(url, timeout=10)
  143. for item in response.json():
  144. ports = list(filter(lambda p: p in self.ports, item['ports']))
  145. if not ports:
  146. continue
  147. ip = decrypt(item['ip'])
  148. port = int(ports[random.randint(0, len(ports) - 1)])
  149. start_time = tools.now_ts()
  150. end_time = item['lifetime']
  151. if end_time - start_time > 0:
  152. proxy = {
  153. 'proxies': {
  154. 'http': '{}://{}:{}'.format(self.scheme, ip, port),
  155. 'https': '{}://{}:{}'.format(self.scheme, ip, port)
  156. },
  157. 'fingerprint': self.fingerprint(ip=ip, port=port),
  158. 'start_time': start_time,
  159. 'end_time': end_time,
  160. 'last_time': 0,
  161. 'usage': 0,
  162. }
  163. proxy_lst.append(proxy)
  164. except Exception as e:
  165. logger.error(f"[{self.label}]vps服务访问异常,原因:{e.args}")
  166. return proxy_lst
  167. def manage_proxy(self, proxy_lst: list, workers=1):
  168. self.remove_failure_proxy(proxy_lst)
  169. with ThreadPoolExecutor(max_workers=workers) as Executor:
  170. fs = [Executor.submit(self.check, proxy) for proxy in proxy_lst]
  171. for f in as_completed(fs):
  172. proxy, is_ok = f.result()
  173. if is_ok:
  174. self.add_proxy(proxy)
  175. else:
  176. self.remove_proxy(proxy)
  177. def run(self):
  178. logger.info(f'[{self.label}]开始生产代理Ip')
  179. while True:
  180. proxy_lst = self.request_proxy()
  181. if not proxy_lst:
  182. tools.delay(2)
  183. continue
  184. dynamic_workers = min((int(len(proxy_lst) / 2) or 1), 10)
  185. self.manage_proxy(proxy_lst, workers=dynamic_workers) # 线程池上限10
  186. tools.delay(self.load_interval)
  187. class ProxyPoolClient(BaseProxyPool):
  188. def __init__(self, name: str, redis_label: str, scheme: str):
  189. super(ProxyPoolClient, self).__init__(name, redis_label, scheme)
  190. current_process = multiprocessing.current_process()
  191. sub_label = f'{tools.get_localhost_ip()}:{current_process.pid}'
  192. self.lock_label = f'{redis_label}:{sub_label}'
  193. @property
  194. def proxy_total(self):
  195. return len(self.get_redis_name_lst())
  196. def get_all_proxy(self):
  197. proxy_lst = deque([])
  198. for redis_name in self.get_redis_name_lst():
  199. proxy = self.get_proxy(redis_name)
  200. if isinstance(proxy, dict):
  201. proxy_lst.append(proxy)
  202. if len(proxy_lst) > 0:
  203. '''按照使用次数大小从低到高(左小右大)排序'''
  204. proxy_lst = deque(sorted(proxy_lst, key=itemgetter('usage')))
  205. return proxy_lst
  206. def get_proxy_pool(self):
  207. _pool_proxy = []
  208. for proxy in self.get_all_proxy():
  209. last_time = proxy['last_time']
  210. end_time = proxy['end_time']
  211. expire = end_time - tools.now_ts()
  212. _pool_proxy.append({
  213. 'proxies': proxy['proxies'],
  214. 'start_time': tools.ts2dt(proxy['start_time']),
  215. 'end_time': tools.ts2dt(end_time),
  216. 'last_time': tools.ts2dt(last_time) if last_time != 0 else '',
  217. 'expire': expire,
  218. 'usage': proxy['usage'],
  219. })
  220. # 展示时按照过期时间从大到小排列
  221. return list(sorted(_pool_proxy, key=lambda x: x['expire'], reverse=True))
  222. def get_all_proxy_ip(self, protocol):
  223. return [proxy['proxies']['http'].replace(f'{protocol}://', '') for proxy in self.get_all_proxy()]
  224. def proxies(self):
  225. lock = acquire_lock_with_timeout(self.redis_db, self.lock_label)
  226. if lock:
  227. proxy = {}
  228. if self.proxy_total > 0:
  229. proxy_lst = self.get_all_proxy()
  230. proxy = proxy_lst.popleft()
  231. name = self.get_redis_name(proxy)
  232. self.redis_db.hset(name, 'usage', proxy['usage'] + 1)
  233. self.redis_db.hset(name, 'last_time', tools.now_ts())
  234. release_lock(self.redis_db, self.lock_label, lock)
  235. return proxy.get('proxies')