network.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # lint: pylint
  3. # pylint: disable=global-statement
  4. # pylint: disable=missing-module-docstring, missing-class-docstring
  5. import atexit
  6. import asyncio
  7. import ipaddress
  8. from itertools import cycle
  9. import httpx
  10. from searx import logger, searx_debug
  11. from .client import new_client, get_loop
  12. logger = logger.getChild('network')
  13. DEFAULT_NAME = '__DEFAULT__'
  14. NETWORKS = {}
  15. # requests compatibility when reading proxy settings from settings.yml
  16. PROXY_PATTERN_MAPPING = {
  17. 'http': 'http://',
  18. 'https': 'https://',
  19. 'socks4': 'socks4://',
  20. 'socks5': 'socks5://',
  21. 'socks5h': 'socks5h://',
  22. 'http:': 'http://',
  23. 'https:': 'https://',
  24. 'socks4:': 'socks4://',
  25. 'socks5:': 'socks5://',
  26. 'socks5h:': 'socks5h://',
  27. }
  28. ADDRESS_MAPPING = {
  29. 'ipv4': '0.0.0.0',
  30. 'ipv6': '::'
  31. }
  32. class Network:
  33. __slots__ = (
  34. 'enable_http', 'verify', 'enable_http2',
  35. 'max_connections', 'max_keepalive_connections', 'keepalive_expiry',
  36. 'local_addresses', 'proxies', 'max_redirects', 'retries', 'retry_on_http_error',
  37. '_local_addresses_cycle', '_proxies_cycle', '_clients', '_logger'
  38. )
  39. def __init__(
  40. # pylint: disable=too-many-arguments
  41. self,
  42. enable_http=True,
  43. verify=True,
  44. enable_http2=False,
  45. max_connections=None,
  46. max_keepalive_connections=None,
  47. keepalive_expiry=None,
  48. proxies=None,
  49. local_addresses=None,
  50. retries=0,
  51. retry_on_http_error=None,
  52. max_redirects=30,
  53. logger_name=None):
  54. self.enable_http = enable_http
  55. self.verify = verify
  56. self.enable_http2 = enable_http2
  57. self.max_connections = max_connections
  58. self.max_keepalive_connections = max_keepalive_connections
  59. self.keepalive_expiry = keepalive_expiry
  60. self.proxies = proxies
  61. self.local_addresses = local_addresses
  62. self.retries = retries
  63. self.retry_on_http_error = retry_on_http_error
  64. self.max_redirects = max_redirects
  65. self._local_addresses_cycle = self.get_ipaddress_cycle()
  66. self._proxies_cycle = self.get_proxy_cycles()
  67. self._clients = {}
  68. self._logger = logger.getChild(logger_name) if logger_name else logger
  69. self.check_parameters()
  70. def check_parameters(self):
  71. for address in self.iter_ipaddresses():
  72. if '/' in address:
  73. ipaddress.ip_network(address, False)
  74. else:
  75. ipaddress.ip_address(address)
  76. if self.proxies is not None and not isinstance(self.proxies, (str, dict)):
  77. raise ValueError('proxies type has to be str, dict or None')
  78. def iter_ipaddresses(self):
  79. local_addresses = self.local_addresses
  80. if not local_addresses:
  81. return
  82. if isinstance(local_addresses, str):
  83. local_addresses = [local_addresses]
  84. for address in local_addresses:
  85. yield address
  86. def get_ipaddress_cycle(self):
  87. while True:
  88. count = 0
  89. for address in self.iter_ipaddresses():
  90. if '/' in address:
  91. for a in ipaddress.ip_network(address, False).hosts():
  92. yield str(a)
  93. count += 1
  94. else:
  95. a = ipaddress.ip_address(address)
  96. yield str(a)
  97. count += 1
  98. if count == 0:
  99. yield None
  100. def iter_proxies(self):
  101. if not self.proxies:
  102. return
  103. # https://www.python-httpx.org/compatibility/#proxy-keys
  104. if isinstance(self.proxies, str):
  105. yield 'all://', [self.proxies]
  106. else:
  107. for pattern, proxy_url in self.proxies.items():
  108. pattern = PROXY_PATTERN_MAPPING.get(pattern, pattern)
  109. if isinstance(proxy_url, str):
  110. proxy_url = [proxy_url]
  111. yield pattern, proxy_url
  112. def get_proxy_cycles(self):
  113. proxy_settings = {}
  114. for pattern, proxy_urls in self.iter_proxies():
  115. proxy_settings[pattern] = cycle(proxy_urls)
  116. while True:
  117. # pylint: disable=stop-iteration-return
  118. yield tuple((pattern, next(proxy_url_cycle)) for pattern, proxy_url_cycle in proxy_settings.items())
  119. async def log_response(self, response: httpx.Response):
  120. request = response.request
  121. status = f"{response.status_code} {response.reason_phrase}"
  122. response_line = f"{response.http_version} {status}"
  123. if hasattr(response, "_elapsed"):
  124. elapsed_time = f"{response.elapsed.total_seconds()} sec"
  125. else:
  126. elapsed_time = "stream"
  127. self._logger.debug(
  128. f'HTTP Request: {request.method} {request.url} "{response_line}" ({elapsed_time})'
  129. )
  130. def get_client(self, verify=None, max_redirects=None):
  131. verify = self.verify if verify is None else verify
  132. max_redirects = self.max_redirects if max_redirects is None else max_redirects
  133. local_address = next(self._local_addresses_cycle)
  134. proxies = next(self._proxies_cycle) # is a tuple so it can be part of the key
  135. key = (verify, max_redirects, local_address, proxies)
  136. hook_log_response = self.log_response if searx_debug else None
  137. if key not in self._clients or self._clients[key].is_closed:
  138. self._clients[key] = new_client(
  139. self.enable_http,
  140. verify,
  141. self.enable_http2,
  142. self.max_connections,
  143. self.max_keepalive_connections,
  144. self.keepalive_expiry,
  145. dict(proxies),
  146. local_address,
  147. 0,
  148. max_redirects,
  149. hook_log_response
  150. )
  151. return self._clients[key]
  152. async def aclose(self):
  153. async def close_client(client):
  154. try:
  155. await client.aclose()
  156. except httpx.HTTPError:
  157. pass
  158. await asyncio.gather(*[close_client(client) for client in self._clients.values()], return_exceptions=False)
  159. @staticmethod
  160. def get_kwargs_clients(kwargs):
  161. kwargs_clients = {}
  162. if 'verify' in kwargs:
  163. kwargs_clients['verify'] = kwargs.pop('verify')
  164. if 'max_redirects' in kwargs:
  165. kwargs_clients['max_redirects'] = kwargs.pop('max_redirects')
  166. return kwargs_clients
  167. def is_valid_respones(self, response):
  168. # pylint: disable=too-many-boolean-expressions
  169. if ((self.retry_on_http_error is True and 400 <= response.status_code <= 599)
  170. or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error)
  171. or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error)
  172. ):
  173. return False
  174. return True
  175. async def request(self, method, url, **kwargs):
  176. retries = self.retries
  177. while retries >= 0: # pragma: no cover
  178. kwargs_clients = Network.get_kwargs_clients(kwargs)
  179. client = self.get_client(**kwargs_clients)
  180. try:
  181. response = await client.request(method, url, **kwargs)
  182. if self.is_valid_respones(response) or retries <= 0:
  183. return response
  184. except (httpx.RequestError, httpx.HTTPStatusError) as e:
  185. if retries <= 0:
  186. raise e
  187. retries -= 1
  188. def stream(self, method, url, **kwargs):
  189. retries = self.retries
  190. while retries >= 0: # pragma: no cover
  191. kwargs_clients = Network.get_kwargs_clients(kwargs)
  192. client = self.get_client(**kwargs_clients)
  193. try:
  194. response = client.stream(method, url, **kwargs)
  195. if self.is_valid_respones(response) or retries <= 0:
  196. return response
  197. except (httpx.RequestError, httpx.HTTPStatusError) as e:
  198. if retries <= 0:
  199. raise e
  200. retries -= 1
  201. @classmethod
  202. async def aclose_all(cls):
  203. await asyncio.gather(*[network.aclose() for network in NETWORKS.values()], return_exceptions=False)
  204. def get_network(name=None):
  205. return NETWORKS.get(name or DEFAULT_NAME)
  206. def initialize(settings_engines=None, settings_outgoing=None):
  207. # pylint: disable=import-outside-toplevel)
  208. from searx.engines import engines
  209. from searx import settings
  210. # pylint: enable=import-outside-toplevel)
  211. settings_engines = settings_engines or settings['engines']
  212. settings_outgoing = settings_outgoing or settings['outgoing']
  213. # default parameters for AsyncHTTPTransport
  214. # see https://github.com/encode/httpx/blob/e05a5372eb6172287458b37447c30f650047e1b8/httpx/_transports/default.py#L108-L121 # pylint: disable=line-too-long
  215. default_params = {
  216. 'enable_http': False,
  217. 'verify': True,
  218. 'enable_http2': settings_outgoing['enable_http2'],
  219. 'max_connections': settings_outgoing['pool_connections'],
  220. 'max_keepalive_connections': settings_outgoing['pool_maxsize'],
  221. 'keepalive_expiry': settings_outgoing['keepalive_expiry'],
  222. 'local_addresses': settings_outgoing['source_ips'],
  223. 'proxies': settings_outgoing['proxies'],
  224. 'max_redirects': settings_outgoing['max_redirects'],
  225. 'retries': settings_outgoing['retries'],
  226. 'retry_on_http_error': None,
  227. }
  228. def new_network(params, logger_name=None):
  229. nonlocal default_params
  230. result = {}
  231. result.update(default_params)
  232. result.update(params)
  233. if logger_name:
  234. result['logger_name'] = logger_name
  235. return Network(**result)
  236. def iter_networks():
  237. nonlocal settings_engines
  238. for engine_spec in settings_engines:
  239. engine_name = engine_spec['name']
  240. engine = engines.get(engine_name)
  241. if engine is None:
  242. continue
  243. network = getattr(engine, 'network', None)
  244. yield engine_name, engine, network
  245. if NETWORKS:
  246. done()
  247. NETWORKS.clear()
  248. NETWORKS[DEFAULT_NAME] = new_network({}, logger_name='default')
  249. NETWORKS['ipv4'] = new_network({'local_addresses': '0.0.0.0'}, logger_name='ipv4')
  250. NETWORKS['ipv6'] = new_network({'local_addresses': '::'}, logger_name='ipv6')
  251. # define networks from outgoing.networks
  252. for network_name, network in settings_outgoing['networks'].items():
  253. NETWORKS[network_name] = new_network(network, logger_name=network_name)
  254. # define networks from engines.[i].network (except references)
  255. for engine_name, engine, network in iter_networks():
  256. if network is None:
  257. network = {}
  258. for attribute_name, attribute_value in default_params.items():
  259. if hasattr(engine, attribute_name):
  260. network[attribute_name] = getattr(engine, attribute_name)
  261. else:
  262. network[attribute_name] = attribute_value
  263. NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
  264. elif isinstance(network, dict):
  265. NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
  266. # define networks from engines.[i].network (references)
  267. for engine_name, engine, network in iter_networks():
  268. if isinstance(network, str):
  269. NETWORKS[engine_name] = NETWORKS[network]
  270. # the /image_proxy endpoint has a dedicated network.
  271. # same parameters than the default network, but HTTP/2 is disabled.
  272. # It decreases the CPU load average, and the total time is more or less the same
  273. if 'image_proxy' not in NETWORKS:
  274. image_proxy_params = default_params.copy()
  275. image_proxy_params['enable_http2'] = False
  276. NETWORKS['image_proxy'] = new_network(image_proxy_params, logger_name='image_proxy')
  277. @atexit.register
  278. def done():
  279. """Close all HTTP client
  280. Avoid a warning at exit
  281. see https://github.com/encode/httpx/blob/1a6e254f72d9fd5694a1c10a28927e193ab4f76b/httpx/_client.py#L1785
  282. Note: since Network.aclose has to be async, it is not possible to call this method on Network.__del__
  283. So Network.aclose is called here using atexit.register
  284. """
  285. try:
  286. loop = get_loop()
  287. if loop:
  288. future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), loop)
  289. # wait 3 seconds to close the HTTP clients
  290. future.result(3)
  291. finally:
  292. NETWORKS.clear()
  293. NETWORKS[DEFAULT_NAME] = Network()