client.py 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # lint: pylint
  3. # pylint: disable=missing-module-docstring, global-statement
  4. import asyncio
  5. import logging
  6. from ssl import SSLContext
  7. import threading
  8. from typing import Any, Dict
  9. import httpx
  10. from httpx_socks import AsyncProxyTransport
  11. from python_socks import parse_proxy_url, ProxyConnectionError, ProxyTimeoutError, ProxyError
  12. from searx import logger
  13. # Optional uvloop (support Python 3.6)
  14. try:
  15. import uvloop
  16. except ImportError:
  17. pass
  18. else:
  19. uvloop.install()
  20. logger = logger.getChild('searx.network.client')
  21. LOOP = None
  22. SSLCONTEXTS: Dict[Any, SSLContext] = {}
  23. TRANSPORT_KWARGS = {
  24. 'trust_env': False,
  25. }
  26. def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
  27. key = (proxy_url, cert, verify, trust_env, http2)
  28. if key not in SSLCONTEXTS:
  29. SSLCONTEXTS[key] = httpx.create_ssl_context(cert, verify, trust_env, http2)
  30. return SSLCONTEXTS[key]
  31. class AsyncHTTPTransportNoHttp(httpx.AsyncHTTPTransport):
  32. """Block HTTP request"""
  33. async def handle_async_request(self, request):
  34. raise httpx.UnsupportedProtocol('HTTP protocol is disabled')
  35. class AsyncProxyTransportFixed(AsyncProxyTransport):
  36. """Fix httpx_socks.AsyncProxyTransport
  37. Map python_socks exceptions to httpx.ProxyError exceptions
  38. """
  39. async def handle_async_request(self, request):
  40. try:
  41. return await super().handle_async_request(request)
  42. except ProxyConnectionError as e:
  43. raise httpx.ProxyError("ProxyConnectionError: " + e.strerror, request=request) from e
  44. except ProxyTimeoutError as e:
  45. raise httpx.ProxyError("ProxyTimeoutError: " + e.args[0], request=request) from e
  46. except ProxyError as e:
  47. raise httpx.ProxyError("ProxyError: " + e.args[0], request=request) from e
  48. def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
  49. # support socks5h (requests compatibility):
  50. # https://requests.readthedocs.io/en/master/user/advanced/#socks
  51. # socks5:// hostname is resolved on client side
  52. # socks5h:// hostname is resolved on proxy side
  53. rdns = False
  54. socks5h = 'socks5h://'
  55. if proxy_url.startswith(socks5h):
  56. proxy_url = 'socks5://' + proxy_url[len(socks5h) :]
  57. rdns = True
  58. proxy_type, proxy_host, proxy_port, proxy_username, proxy_password = parse_proxy_url(proxy_url)
  59. verify = get_sslcontexts(proxy_url, None, True, False, http2) if verify is True else verify
  60. return AsyncProxyTransportFixed(
  61. proxy_type=proxy_type,
  62. proxy_host=proxy_host,
  63. proxy_port=proxy_port,
  64. username=proxy_username,
  65. password=proxy_password,
  66. rdns=rdns,
  67. loop=get_loop(),
  68. verify=verify,
  69. http2=http2,
  70. local_address=local_address,
  71. limits=limit,
  72. retries=retries,
  73. **TRANSPORT_KWARGS,
  74. )
  75. def get_transport(verify, http2, local_address, proxy_url, limit, retries):
  76. verify = get_sslcontexts(None, None, True, False, http2) if verify is True else verify
  77. return httpx.AsyncHTTPTransport(
  78. # pylint: disable=protected-access
  79. verify=verify,
  80. http2=http2,
  81. limits=limit,
  82. proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
  83. local_address=local_address,
  84. retries=retries,
  85. **TRANSPORT_KWARGS,
  86. )
  87. def new_client(
  88. # pylint: disable=too-many-arguments
  89. enable_http,
  90. verify,
  91. enable_http2,
  92. max_connections,
  93. max_keepalive_connections,
  94. keepalive_expiry,
  95. proxies,
  96. local_address,
  97. retries,
  98. max_redirects,
  99. hook_log_response,
  100. ):
  101. limit = httpx.Limits(
  102. max_connections=max_connections,
  103. max_keepalive_connections=max_keepalive_connections,
  104. keepalive_expiry=keepalive_expiry,
  105. )
  106. # See https://www.python-httpx.org/advanced/#routing
  107. mounts = {}
  108. for pattern, proxy_url in proxies.items():
  109. if not enable_http and pattern.startswith('http://'):
  110. continue
  111. if proxy_url.startswith('socks4://') or proxy_url.startswith('socks5://') or proxy_url.startswith('socks5h://'):
  112. mounts[pattern] = get_transport_for_socks_proxy(
  113. verify, enable_http2, local_address, proxy_url, limit, retries
  114. )
  115. else:
  116. mounts[pattern] = get_transport(verify, enable_http2, local_address, proxy_url, limit, retries)
  117. if not enable_http:
  118. mounts['http://'] = AsyncHTTPTransportNoHttp()
  119. transport = get_transport(verify, enable_http2, local_address, None, limit, retries)
  120. event_hooks = None
  121. if hook_log_response:
  122. event_hooks = {'response': [hook_log_response]}
  123. return httpx.AsyncClient(
  124. transport=transport,
  125. mounts=mounts,
  126. max_redirects=max_redirects,
  127. event_hooks=event_hooks,
  128. )
  129. def get_loop():
  130. return LOOP
  131. def init():
  132. # log
  133. for logger_name in ('hpack.hpack', 'hpack.table', 'httpx._client'):
  134. logging.getLogger(logger_name).setLevel(logging.WARNING)
  135. # loop
  136. def loop_thread():
  137. global LOOP
  138. LOOP = asyncio.new_event_loop()
  139. LOOP.run_forever()
  140. thread = threading.Thread(
  141. target=loop_thread,
  142. name='asyncio_loop',
  143. daemon=True,
  144. )
  145. thread.start()
  146. init()