network.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. import atexit
  3. import asyncio
  4. import ipaddress
  5. from itertools import cycle
  6. import httpx
  7. from .client import new_client, LOOP
  8. DEFAULT_NAME = '__DEFAULT__'
  9. NETWORKS = {}
  10. # requests compatibility when reading proxy settings from settings.yml
  11. PROXY_PATTERN_MAPPING = {
  12. 'http': 'http://',
  13. 'https': 'https://',
  14. 'socks4': 'socks4://',
  15. 'socks5': 'socks5://',
  16. 'socks5h': 'socks5h://',
  17. 'http:': 'http://',
  18. 'https:': 'https://',
  19. 'socks4:': 'socks4://',
  20. 'socks5:': 'socks5://',
  21. 'socks5h:': 'socks5h://',
  22. }
  23. ADDRESS_MAPPING = {
  24. 'ipv4': '0.0.0.0',
  25. 'ipv6': '::'
  26. }
  27. class Network:
  28. __slots__ = ('enable_http', 'verify', 'enable_http2',
  29. 'max_connections', 'max_keepalive_connections', 'keepalive_expiry',
  30. 'local_addresses', 'proxies', 'max_redirects', 'retries', 'retry_on_http_error',
  31. '_local_addresses_cycle', '_proxies_cycle', '_clients')
  32. def __init__(self,
  33. enable_http=True,
  34. verify=True,
  35. enable_http2=False,
  36. max_connections=None,
  37. max_keepalive_connections=None,
  38. keepalive_expiry=None,
  39. proxies=None,
  40. local_addresses=None,
  41. retries=0,
  42. retry_on_http_error=None,
  43. max_redirects=30):
  44. self.enable_http = enable_http
  45. self.verify = verify
  46. self.enable_http2 = enable_http2
  47. self.max_connections = max_connections
  48. self.max_keepalive_connections = max_keepalive_connections
  49. self.keepalive_expiry = keepalive_expiry
  50. self.proxies = proxies
  51. self.local_addresses = local_addresses
  52. self.retries = retries
  53. self.retry_on_http_error = retry_on_http_error
  54. self.max_redirects = max_redirects
  55. self._local_addresses_cycle = self.get_ipaddress_cycle()
  56. self._proxies_cycle = self.get_proxy_cycles()
  57. self._clients = {}
  58. self.check_parameters()
  59. def check_parameters(self):
  60. for address in self.iter_ipaddresses():
  61. if '/' in address:
  62. ipaddress.ip_network(address, False)
  63. else:
  64. ipaddress.ip_address(address)
  65. if self.proxies is not None and not isinstance(self.proxies, (str, dict)):
  66. raise ValueError('proxies type has to be str, dict or None')
  67. def iter_ipaddresses(self):
  68. local_addresses = self.local_addresses
  69. if not local_addresses:
  70. return
  71. elif isinstance(local_addresses, str):
  72. local_addresses = [local_addresses]
  73. for address in local_addresses:
  74. yield address
  75. def get_ipaddress_cycle(self):
  76. while True:
  77. count = 0
  78. for address in self.iter_ipaddresses():
  79. if '/' in address:
  80. for a in ipaddress.ip_network(address, False).hosts():
  81. yield str(a)
  82. count += 1
  83. else:
  84. a = ipaddress.ip_address(address)
  85. yield str(a)
  86. count += 1
  87. if count == 0:
  88. yield None
  89. def iter_proxies(self):
  90. if not self.proxies:
  91. return
  92. # https://www.python-httpx.org/compatibility/#proxy-keys
  93. if isinstance(self.proxies, str):
  94. yield 'all://', [self.proxies]
  95. else:
  96. for pattern, proxy_url in self.proxies.items():
  97. pattern = PROXY_PATTERN_MAPPING.get(pattern, pattern)
  98. if isinstance(proxy_url, str):
  99. proxy_url = [proxy_url]
  100. yield pattern, proxy_url
  101. def get_proxy_cycles(self):
  102. proxy_settings = {}
  103. for pattern, proxy_urls in self.iter_proxies():
  104. proxy_settings[pattern] = cycle(proxy_urls)
  105. while True:
  106. yield tuple((pattern, next(proxy_url_cycle)) for pattern, proxy_url_cycle in proxy_settings.items())
  107. def get_client(self, verify=None, max_redirects=None):
  108. verify = self.verify if verify is None else verify
  109. max_redirects = self.max_redirects if max_redirects is None else max_redirects
  110. local_address = next(self._local_addresses_cycle)
  111. proxies = next(self._proxies_cycle) # is a tuple so it can be part of the key
  112. key = (verify, max_redirects, local_address, proxies)
  113. if key not in self._clients or self._clients[key].is_closed:
  114. self._clients[key] = new_client(self.enable_http,
  115. verify,
  116. self.enable_http2,
  117. self.max_connections,
  118. self.max_keepalive_connections,
  119. self.keepalive_expiry,
  120. dict(proxies),
  121. local_address,
  122. 0,
  123. max_redirects)
  124. return self._clients[key]
  125. async def aclose(self):
  126. async def close_client(client):
  127. try:
  128. await client.aclose()
  129. except httpx.HTTPError:
  130. pass
  131. await asyncio.gather(*[close_client(client) for client in self._clients.values()], return_exceptions=False)
  132. @staticmethod
  133. def get_kwargs_clients(kwargs):
  134. kwargs_clients = {}
  135. if 'verify' in kwargs:
  136. kwargs_clients['verify'] = kwargs.pop('verify')
  137. if 'max_redirects' in kwargs:
  138. kwargs_clients['max_redirects'] = kwargs.pop('max_redirects')
  139. return kwargs_clients
  140. def is_valid_respones(self, response):
  141. if (self.retry_on_http_error is True and 400 <= response.status_code <= 599) \
  142. or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error) \
  143. or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error):
  144. return False
  145. return True
  146. async def request(self, method, url, **kwargs):
  147. retries = self.retries
  148. while retries >= 0: # pragma: no cover
  149. kwargs_clients = Network.get_kwargs_clients(kwargs)
  150. client = self.get_client(**kwargs_clients)
  151. try:
  152. response = await client.request(method, url, **kwargs)
  153. if self.is_valid_respones(response) or retries <= 0:
  154. return response
  155. except (httpx.RequestError, httpx.HTTPStatusError) as e:
  156. if retries <= 0:
  157. raise e
  158. retries -= 1
  159. def stream(self, method, url, **kwargs):
  160. retries = self.retries
  161. while retries >= 0: # pragma: no cover
  162. kwargs_clients = Network.get_kwargs_clients(kwargs)
  163. client = self.get_client(**kwargs_clients)
  164. try:
  165. response = client.stream(method, url, **kwargs)
  166. if self.is_valid_respones(response) or retries <= 0:
  167. return response
  168. except (httpx.RequestError, httpx.HTTPStatusError) as e:
  169. if retries <= 0:
  170. raise e
  171. retries -= 1
  172. @classmethod
  173. async def aclose_all(cls):
  174. await asyncio.gather(*[network.aclose() for network in NETWORKS.values()], return_exceptions=False)
  175. def get_network(name=None):
  176. global NETWORKS
  177. return NETWORKS.get(name or DEFAULT_NAME)
  178. def initialize(settings_engines=None, settings_outgoing=None):
  179. from searx.engines import engines
  180. from searx import settings
  181. global NETWORKS
  182. settings_engines = settings_engines or settings.get('engines')
  183. settings_outgoing = settings_outgoing or settings.get('outgoing')
  184. # default parameters for AsyncHTTPTransport
  185. # see https://github.com/encode/httpx/blob/e05a5372eb6172287458b37447c30f650047e1b8/httpx/_transports/default.py#L108-L121 # noqa
  186. default_params = {
  187. 'enable_http': False,
  188. 'verify': True,
  189. 'enable_http2': settings_outgoing.get('enable_http2', True),
  190. # Magic number kept from previous code
  191. 'max_connections': settings_outgoing.get('pool_connections', 100),
  192. # Picked from constructor
  193. 'max_keepalive_connections': settings_outgoing.get('pool_maxsize', 10),
  194. #
  195. 'keepalive_expiry': settings_outgoing.get('keepalive_expiry', 5.0),
  196. 'local_addresses': settings_outgoing.get('source_ips'),
  197. 'proxies': settings_outgoing.get('proxies'),
  198. # default maximum redirect
  199. # from https://github.com/psf/requests/blob/8c211a96cdbe9fe320d63d9e1ae15c5c07e179f8/requests/models.py#L55
  200. 'max_redirects': settings_outgoing.get('max_redirects', 30),
  201. #
  202. 'retries': settings_outgoing.get('retries', 0),
  203. 'retry_on_http_error': None,
  204. }
  205. def new_network(params):
  206. nonlocal default_params
  207. result = {}
  208. result.update(default_params)
  209. result.update(params)
  210. return Network(**result)
  211. def iter_networks():
  212. nonlocal settings_engines
  213. for engine_spec in settings_engines:
  214. engine_name = engine_spec['name']
  215. engine = engines.get(engine_name)
  216. if engine is None:
  217. continue
  218. network = getattr(engine, 'network', None)
  219. yield engine_name, engine, network
  220. if NETWORKS:
  221. done()
  222. NETWORKS.clear()
  223. NETWORKS[DEFAULT_NAME] = new_network({})
  224. NETWORKS['ipv4'] = new_network({'local_addresses': '0.0.0.0'})
  225. NETWORKS['ipv6'] = new_network({'local_addresses': '::'})
  226. # define networks from outgoing.networks
  227. for network_name, network in settings_outgoing.get('networks', {}).items():
  228. NETWORKS[network_name] = new_network(network)
  229. # define networks from engines.[i].network (except references)
  230. for engine_name, engine, network in iter_networks():
  231. if network is None:
  232. network = {}
  233. for attribute_name, attribute_value in default_params.items():
  234. if hasattr(engine, attribute_name):
  235. network[attribute_name] = getattr(engine, attribute_name)
  236. else:
  237. network[attribute_name] = attribute_value
  238. NETWORKS[engine_name] = new_network(network)
  239. elif isinstance(network, dict):
  240. NETWORKS[engine_name] = new_network(network)
  241. # define networks from engines.[i].network (references)
  242. for engine_name, engine, network in iter_networks():
  243. if isinstance(network, str):
  244. NETWORKS[engine_name] = NETWORKS[network]
  245. @atexit.register
  246. def done():
  247. """Close all HTTP client
  248. Avoid a warning at exit
  249. see https://github.com/encode/httpx/blob/1a6e254f72d9fd5694a1c10a28927e193ab4f76b/httpx/_client.py#L1785
  250. Note: since Network.aclose has to be async, it is not possible to call this method on Network.__del__
  251. So Network.aclose is called here using atexit.register
  252. """
  253. try:
  254. if LOOP:
  255. future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), LOOP)
  256. # wait 3 seconds to close the HTTP clients
  257. future.result(3)
  258. finally:
  259. NETWORKS.clear()
  260. NETWORKS[DEFAULT_NAME] = Network()