proxy.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316
  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on 2023-05-11
  4. ---------
  5. @summary: 代理池
  6. ---------
  7. @author: Dzr
  8. """
  9. import ast
  10. import multiprocessing
  11. import random
  12. import threading
  13. from collections import deque
  14. from concurrent.futures import ThreadPoolExecutor, as_completed
  15. from operator import itemgetter
  16. from urllib.parse import urlparse
  17. import requests
  18. import setting as settings
  19. from base_server import BaseServer, tools
  20. from common.log import logger
  21. from common.redis_lock import OptimisticLock
  22. DEFAULT_UA = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36'
  23. def decrypt(input_str: str) -> str:
  24. """
  25. 定义base64解密函数
  26. :param input_str:
  27. :return:
  28. """
  29. # 对前面不是“=”的字节取索引,然后转换为2进制
  30. key = settings.jy_proxy['socks5']['decrypt']
  31. ascii_list = ['{:0>6}'.format(str(bin(key.index(i))).replace('0b', '')) for i in input_str if i != '=']
  32. output_str = ''
  33. # 补齐“=”的个数
  34. equal_num = input_str.count('=')
  35. while ascii_list:
  36. temp_list = ascii_list[:4]
  37. # 转换成2进制字符串
  38. temp_str = ''.join(temp_list)
  39. # 对没有8位2进制的字符串补够8位2进制
  40. if len(temp_str) % 8 != 0:
  41. temp_str = temp_str[0:-1 * equal_num * 2]
  42. # 4个6字节的二进制 转换 为三个8字节的二进制
  43. temp_str_list = [temp_str[x:x + 8] for x in [0, 8, 16]]
  44. # 二进制转为10进制
  45. temp_str_list = [int(x, 2) for x in temp_str_list if x]
  46. # 连接成字符串
  47. output_str += ''.join([chr(x) for x in temp_str_list])
  48. ascii_list = ascii_list[4:]
  49. return output_str
  50. def get_base_url():
  51. return settings.jy_proxy['socks5']['base_url']
  52. def get_netloc(proxy, default=None):
  53. proxies = None
  54. if isinstance(proxy, dict):
  55. proxies = proxy.get('proxies')
  56. if isinstance(proxies, str):
  57. proxies = tools.json_loads(proxies)
  58. # proxies = proxy.get('proxies') if isinstance(proxy, dict) else None
  59. if proxies is not None:
  60. parser = urlparse(proxies.get('http'))
  61. default = parser.netloc
  62. return default
  63. class BaseProxyPool(BaseServer):
  64. def __init__(self, name, redis_label, scheme):
  65. super(BaseProxyPool, self).__init__(server=name, label=redis_label)
  66. self.scheme = scheme.lower()
  67. self.proxy_name = self.scheme + self.server
  68. self.proxy_queue = f'{redis_label}_{self.scheme}'
  69. self.unique_key = ('ip', 'port') # 组合 proxy 指纹的字段名称
  70. def get_redis_name(self, proxy):
  71. return f"{self.proxy_queue}_{proxy['fingerprint']}"
  72. def str_scan(self, pattern, count=1000):
  73. cursor = '0'
  74. while True:
  75. cursor, keys = self.redis_db.scan(cursor, pattern, count)
  76. if len(keys) > 0:
  77. yield from keys
  78. if cursor == 0:
  79. break
  80. def get_redis_name_lst(self, pattern='*'):
  81. results = []
  82. pattern = self.proxy_queue + pattern
  83. for key in self.str_scan(pattern, count=5000):
  84. results.append(key)
  85. return results
  86. def get_proxy(self, name):
  87. items = self.redis_db.hgetall(name)
  88. if items is None or 'proxies' not in items:
  89. return None
  90. proxy = {
  91. 'proxies': ast.literal_eval(items['proxies']),
  92. 'fingerprint': items['fingerprint'],
  93. 'start_time': int(items['start_time']),
  94. 'end_time': int(items['end_time']),
  95. 'last_time': int(items['last_time']),
  96. 'usage': int(items['usage']),
  97. 'pk': int(items.get('pk', 1))
  98. }
  99. return proxy
  100. def get(self, name, key):
  101. return self.redis_db.hget(name, key)
  102. def exists(self, proxy):
  103. return self.redis_db.exists(self.get_redis_name(proxy))
  104. def check(self, proxy):
  105. is_ok = False
  106. # url = 'https://myip.ipip.net'
  107. url = 'https://www.baidu.com/'
  108. netloc = get_netloc(proxy)
  109. try:
  110. requests_param = {
  111. "headers": {'User-Agent': DEFAULT_UA},
  112. "proxies": proxy['proxies'],
  113. "timeout": 5
  114. }
  115. requests.get(url, **requests_param)
  116. is_ok = True
  117. except requests.RequestException:
  118. pass
  119. msg = "正常" if is_ok else "失效"
  120. logger.debug(f"[{self.proxy_name}]检查代理Ip - {netloc} --通信{msg}")
  121. return proxy, is_ok
  122. def remove_proxy(self, proxy):
  123. netloc = get_netloc(proxy)
  124. logger.debug(f"[{self.proxy_name}]代理Ip - {netloc} --删除")
  125. if self.exists(proxy):
  126. self.redis_db.delete(self.get_redis_name(proxy))
  127. def add_proxy(self, proxy):
  128. netloc = get_netloc(proxy)
  129. logger.debug(f"[{self.proxy_name}]代理Ip - {netloc} --添加")
  130. if not self.exists(proxy):
  131. redis_name = self.get_redis_name(proxy)
  132. self.redis_db.hset(redis_name, None, None, mapping=proxy)
  133. expire_ts = proxy['end_time'] - tools.now_ts()
  134. self.redis_db.expire(redis_name, expire_ts)
  135. class ProxyPoolServer(BaseProxyPool, threading.Thread):
  136. def __init__(self, name, redis_label, scheme):
  137. """
  138. 代理池生产管理
  139. @param str name: 服务名称
  140. @param str redis_label: redis 标识前缀
  141. @param str scheme: 协议类型
  142. """
  143. threading.Thread.__init__(self)
  144. super(ProxyPoolServer, self).__init__(name, redis_label, scheme)
  145. self.label = f'{self.proxy_name}_{self.getName()}'
  146. self.ports = ['8862', '8863'] if self.scheme == 'http' else ['8860', '8861']
  147. self.load_interval = 60 # 轮询访问vps代理服务的时间间隔
  148. def remove_failure_proxy(self, proxy_lst):
  149. """删除失效/故障代理ip"""
  150. logger.info(f"[{self.label}]清除无效代理Ip")
  151. proxy_fingerprints = set([proxy['fingerprint'] for proxy in proxy_lst])
  152. for redis_name in self.get_redis_name_lst():
  153. fingerprint = self.get(redis_name, 'fingerprint')
  154. if fingerprint not in proxy_fingerprints:
  155. self.redis_db.delete(redis_name)
  156. def request_proxy(self):
  157. logger.info(f"[{self.label}]请求vps服务")
  158. proxy_lst = []
  159. for idx, url in enumerate(settings.jy_proxy['socks5']['url']):
  160. try:
  161. response = requests.get(url, timeout=10)
  162. for item in response.json():
  163. ports = list(filter(lambda p: p in self.ports, item['ports']))
  164. if not ports:
  165. continue
  166. ip = decrypt(item['ip'])
  167. port = int(ports[random.randint(0, len(ports) - 1)])
  168. start_time = tools.now_ts()
  169. end_time = item['lifetime']
  170. if end_time - start_time > 0:
  171. proxy = {
  172. 'proxies': {
  173. 'http': '{}://{}:{}'.format(self.scheme, ip, port),
  174. 'https': '{}://{}:{}'.format(self.scheme, ip, port)
  175. },
  176. 'fingerprint': self.fingerprint(ip=ip, port=port),
  177. 'start_time': start_time,
  178. 'end_time': end_time,
  179. 'last_time': 0,
  180. 'usage': 0,
  181. 'pk': idx + 1
  182. }
  183. proxy_lst.append(proxy)
  184. except Exception as e:
  185. logger.error(f'[{self.label}]vps服务访问异常[{url}],原因:{e.args}')
  186. return proxy_lst
  187. def manage_proxy(self, proxy_lst: list, workers=1):
  188. self.remove_failure_proxy(proxy_lst)
  189. with ThreadPoolExecutor(max_workers=workers) as Executor:
  190. fs = [Executor.submit(self.check, proxy) for proxy in proxy_lst]
  191. for f in as_completed(fs):
  192. proxy, is_ok = f.result()
  193. if is_ok:
  194. self.add_proxy(proxy)
  195. else:
  196. self.remove_proxy(proxy)
  197. def run(self):
  198. logger.info(f'[{self.label}]开始生产代理Ip')
  199. while True:
  200. try:
  201. proxy_lst = self.request_proxy()
  202. if not proxy_lst:
  203. tools.delay(2)
  204. continue
  205. dynamic_workers = min((int(len(proxy_lst) / 2) or 1), 10)
  206. self.manage_proxy(proxy_lst, workers=dynamic_workers) # 线程池上限10
  207. tools.delay(self.load_interval)
  208. except Exception as e:
  209. logger.exception(e)
  210. class ProxyPoolClient(BaseProxyPool):
  211. def __init__(self, name: str, redis_label: str, scheme: str):
  212. """
  213. 调用代理池
  214. """
  215. super(ProxyPoolClient, self).__init__(name, redis_label, scheme)
  216. current_process = multiprocessing.current_process()
  217. sub_name = f'{tools.get_localhost_ip()}:{current_process.pid}'
  218. self.lock_label = f'{redis_label}:{sub_name}'
  219. def proxy_total(self):
  220. return len(self.get_redis_name_lst())
  221. def get_all_proxy(self, pk=None):
  222. proxy_lst = deque([])
  223. for redis_name in self.get_redis_name_lst():
  224. proxy = self.get_proxy(redis_name)
  225. if isinstance(proxy, dict):
  226. proxy_lst.append(proxy)
  227. if len(proxy_lst) > 0:
  228. if pk is not None:
  229. '''先按照代理类型(pk)过滤,再按照使用次数进行升序排列'''
  230. special = deque(filter(lambda x: x['pk'] == int(pk), proxy_lst))
  231. proxy_lst = deque(sorted(special, key=itemgetter('usage')))
  232. else:
  233. '''按照使用次数进行升序排列(左小右大)'''
  234. proxy_lst = deque(sorted(proxy_lst, key=itemgetter('usage')))
  235. return proxy_lst
  236. def get_proxy_pool(self, **kwargs):
  237. proxy_lst = []
  238. for proxy in self.get_all_proxy(**kwargs):
  239. last_time = proxy['last_time']
  240. end_time = proxy['end_time']
  241. expire = end_time - tools.now_ts()
  242. proxy_lst.append({
  243. 'proxies': proxy['proxies'],
  244. 'start_time': tools.ts2dt(proxy['start_time']),
  245. 'end_time': tools.ts2dt(end_time),
  246. 'last_time': tools.ts2dt(last_time) if last_time != 0 else '',
  247. 'expire': expire,
  248. 'usage': proxy['usage'],
  249. 'pk': proxy.get('pk', 1),
  250. })
  251. # 展示时按照过期时间从大到小排列
  252. return list(sorted(proxy_lst, key=lambda x: x['expire'], reverse=True))
  253. def get_all_proxy_ip(self, protocol, **kwargs):
  254. return [
  255. proxy['proxies']['http'].replace(f'{protocol}://', '')
  256. for proxy in self.get_all_proxy(**kwargs)
  257. ]
  258. def proxies(self, **kwargs):
  259. with OptimisticLock(self.redis_db, self.lock_label):
  260. proxy = {}
  261. if self.proxy_total() > 0:
  262. proxy_lst = self.get_all_proxy(**kwargs)
  263. proxy = proxy_lst.popleft()
  264. name = self.get_redis_name(proxy)
  265. mapping = {
  266. 'usage': proxy['usage'] + 1,
  267. 'last_time': tools.now_ts()
  268. }
  269. self.redis_db.hset(name, None, None, mapping)
  270. return proxy.get('proxies')