client.py 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # pylint: disable=missing-module-docstring, global-statement
  3. import asyncio
  4. import logging
  5. import random
  6. from ssl import SSLContext
  7. import threading
  8. from typing import Any, Dict
  9. import httpx
  10. from httpx_socks import AsyncProxyTransport
  11. from python_socks import parse_proxy_url, ProxyConnectionError, ProxyTimeoutError, ProxyError
  12. from searx import logger
  13. # Optional uvloop (support Python 3.6)
  14. try:
  15. import uvloop
  16. except ImportError:
  17. pass
  18. else:
  19. uvloop.install()
  20. logger = logger.getChild('searx.network.client')
  21. LOOP = None
  22. SSLCONTEXTS: Dict[Any, SSLContext] = {}
  23. def shuffle_ciphers(ssl_context):
  24. """Shuffle httpx's default ciphers of a SSL context randomly.
  25. From `What Is TLS Fingerprint and How to Bypass It`_
  26. > When implementing TLS fingerprinting, servers can't operate based on a
  27. > locked-in whitelist database of fingerprints. New fingerprints appear
  28. > when web clients or TLS libraries release new versions. So, they have to
  29. > live off a blocklist database instead.
  30. > ...
  31. > It's safe to leave the first three as is but shuffle the remaining ciphers
  32. > and you can bypass the TLS fingerprint check.
  33. .. _What Is TLS Fingerprint and How to Bypass It:
  34. https://www.zenrows.com/blog/what-is-tls-fingerprint#how-to-bypass-tls-fingerprinting
  35. """
  36. c_list = httpx._config.DEFAULT_CIPHERS.split(':') # pylint: disable=protected-access
  37. sc_list, c_list = c_list[:3], c_list[3:]
  38. random.shuffle(c_list)
  39. ssl_context.set_ciphers(":".join(sc_list + c_list))
  40. def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
  41. key = (proxy_url, cert, verify, trust_env, http2)
  42. if key not in SSLCONTEXTS:
  43. SSLCONTEXTS[key] = httpx.create_ssl_context(cert, verify, trust_env, http2)
  44. shuffle_ciphers(SSLCONTEXTS[key])
  45. return SSLCONTEXTS[key]
  46. class AsyncHTTPTransportNoHttp(httpx.AsyncHTTPTransport):
  47. """Block HTTP request
  48. The constructor is blank because httpx.AsyncHTTPTransport.__init__ creates an SSLContext unconditionally:
  49. https://github.com/encode/httpx/blob/0f61aa58d66680c239ce43c8cdd453e7dc532bfc/httpx/_transports/default.py#L271
  50. Each SSLContext consumes more than 500kb of memory, since there is about one network per engine.
  51. In consequence, this class overrides all public methods
  52. For reference: https://github.com/encode/httpx/issues/2298
  53. """
  54. def __init__(self, *args, **kwargs):
  55. # pylint: disable=super-init-not-called
  56. # this on purpose if the base class is not called
  57. pass
  58. async def handle_async_request(self, request):
  59. raise httpx.UnsupportedProtocol('HTTP protocol is disabled')
  60. async def aclose(self) -> None:
  61. pass
  62. async def __aenter__(self):
  63. return self
  64. async def __aexit__(
  65. self,
  66. exc_type=None,
  67. exc_value=None,
  68. traceback=None,
  69. ) -> None:
  70. pass
  71. class AsyncProxyTransportFixed(AsyncProxyTransport):
  72. """Fix httpx_socks.AsyncProxyTransport
  73. Map python_socks exceptions to httpx.ProxyError exceptions
  74. """
  75. async def handle_async_request(self, request):
  76. try:
  77. return await super().handle_async_request(request)
  78. except ProxyConnectionError as e:
  79. raise httpx.ProxyError("ProxyConnectionError: " + e.strerror, request=request) from e
  80. except ProxyTimeoutError as e:
  81. raise httpx.ProxyError("ProxyTimeoutError: " + e.args[0], request=request) from e
  82. except ProxyError as e:
  83. raise httpx.ProxyError("ProxyError: " + e.args[0], request=request) from e
  84. def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
  85. # support socks5h (requests compatibility):
  86. # https://requests.readthedocs.io/en/master/user/advanced/#socks
  87. # socks5:// hostname is resolved on client side
  88. # socks5h:// hostname is resolved on proxy side
  89. rdns = False
  90. socks5h = 'socks5h://'
  91. if proxy_url.startswith(socks5h):
  92. proxy_url = 'socks5://' + proxy_url[len(socks5h) :]
  93. rdns = True
  94. proxy_type, proxy_host, proxy_port, proxy_username, proxy_password = parse_proxy_url(proxy_url)
  95. verify = get_sslcontexts(proxy_url, None, verify, True, http2) if verify is True else verify
  96. return AsyncProxyTransportFixed(
  97. proxy_type=proxy_type,
  98. proxy_host=proxy_host,
  99. proxy_port=proxy_port,
  100. username=proxy_username,
  101. password=proxy_password,
  102. rdns=rdns,
  103. loop=get_loop(),
  104. verify=verify,
  105. http2=http2,
  106. local_address=local_address,
  107. limits=limit,
  108. retries=retries,
  109. )
  110. def get_transport(verify, http2, local_address, proxy_url, limit, retries):
  111. verify = get_sslcontexts(None, None, verify, True, http2) if verify is True else verify
  112. return httpx.AsyncHTTPTransport(
  113. # pylint: disable=protected-access
  114. verify=verify,
  115. http2=http2,
  116. limits=limit,
  117. proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
  118. local_address=local_address,
  119. retries=retries,
  120. )
  121. def new_client(
  122. # pylint: disable=too-many-arguments
  123. enable_http,
  124. verify,
  125. enable_http2,
  126. max_connections,
  127. max_keepalive_connections,
  128. keepalive_expiry,
  129. proxies,
  130. local_address,
  131. retries,
  132. max_redirects,
  133. hook_log_response,
  134. ):
  135. limit = httpx.Limits(
  136. max_connections=max_connections,
  137. max_keepalive_connections=max_keepalive_connections,
  138. keepalive_expiry=keepalive_expiry,
  139. )
  140. # See https://www.python-httpx.org/advanced/#routing
  141. mounts = {}
  142. for pattern, proxy_url in proxies.items():
  143. if not enable_http and pattern.startswith('http://'):
  144. continue
  145. if proxy_url.startswith('socks4://') or proxy_url.startswith('socks5://') or proxy_url.startswith('socks5h://'):
  146. mounts[pattern] = get_transport_for_socks_proxy(
  147. verify, enable_http2, local_address, proxy_url, limit, retries
  148. )
  149. else:
  150. mounts[pattern] = get_transport(verify, enable_http2, local_address, proxy_url, limit, retries)
  151. if not enable_http:
  152. mounts['http://'] = AsyncHTTPTransportNoHttp()
  153. transport = get_transport(verify, enable_http2, local_address, None, limit, retries)
  154. event_hooks = None
  155. if hook_log_response:
  156. event_hooks = {'response': [hook_log_response]}
  157. return httpx.AsyncClient(
  158. transport=transport,
  159. mounts=mounts,
  160. max_redirects=max_redirects,
  161. event_hooks=event_hooks,
  162. )
  163. def get_loop():
  164. return LOOP
  165. def init():
  166. # log
  167. for logger_name in (
  168. 'httpx',
  169. 'httpcore.proxy',
  170. 'httpcore.connection',
  171. 'httpcore.http11',
  172. 'httpcore.http2',
  173. 'hpack.hpack',
  174. 'hpack.table',
  175. ):
  176. logging.getLogger(logger_name).setLevel(logging.WARNING)
  177. # loop
  178. def loop_thread():
  179. global LOOP
  180. LOOP = asyncio.new_event_loop()
  181. LOOP.run_forever()
  182. thread = threading.Thread(
  183. target=loop_thread,
  184. name='asyncio_loop',
  185. daemon=True,
  186. )
  187. thread.start()
  188. init()