client.py 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # lint: pylint
  3. # pylint: disable=missing-module-docstring, global-statement
  4. import asyncio
  5. import logging
  6. import random
  7. from ssl import SSLContext
  8. import threading
  9. from typing import Any, Dict
  10. import httpx
  11. from httpx_socks import AsyncProxyTransport
  12. from python_socks import parse_proxy_url, ProxyConnectionError, ProxyTimeoutError, ProxyError
  13. from searx import logger
  14. # Optional uvloop (support Python 3.6)
  15. try:
  16. import uvloop
  17. except ImportError:
  18. pass
  19. else:
  20. uvloop.install()
  21. logger = logger.getChild('searx.network.client')
  22. LOOP = None
  23. SSLCONTEXTS: Dict[Any, SSLContext] = {}
  24. def shuffle_ciphers(ssl_context):
  25. """Shuffle httpx's default ciphers of a SSL context randomly.
  26. From `What Is TLS Fingerprint and How to Bypass It`_
  27. > When implementing TLS fingerprinting, servers can't operate based on a
  28. > locked-in whitelist database of fingerprints. New fingerprints appear
  29. > when web clients or TLS libraries release new versions. So, they have to
  30. > live off a blocklist database instead.
  31. > ...
  32. > It's safe to leave the first three as is but shuffle the remaining ciphers
  33. > and you can bypass the TLS fingerprint check.
  34. .. _What Is TLS Fingerprint and How to Bypass It:
  35. https://www.zenrows.com/blog/what-is-tls-fingerprint#how-to-bypass-tls-fingerprinting
  36. """
  37. c_list = httpx._config.DEFAULT_CIPHERS.split(':') # pylint: disable=protected-access
  38. sc_list, c_list = c_list[:3], c_list[3:]
  39. random.shuffle(c_list)
  40. ssl_context.set_ciphers(":".join(sc_list + c_list))
  41. def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
  42. key = (proxy_url, cert, verify, trust_env, http2)
  43. if key not in SSLCONTEXTS:
  44. SSLCONTEXTS[key] = httpx.create_ssl_context(cert, verify, trust_env, http2)
  45. shuffle_ciphers(SSLCONTEXTS[key])
  46. return SSLCONTEXTS[key]
  47. class AsyncHTTPTransportNoHttp(httpx.AsyncHTTPTransport):
  48. """Block HTTP request
  49. The constructor is blank because httpx.AsyncHTTPTransport.__init__ creates an SSLContext unconditionally:
  50. https://github.com/encode/httpx/blob/0f61aa58d66680c239ce43c8cdd453e7dc532bfc/httpx/_transports/default.py#L271
  51. Each SSLContext consumes more than 500kb of memory, since there is about one network per engine.
  52. In consequence, this class overrides all public methods
  53. For reference: https://github.com/encode/httpx/issues/2298
  54. """
  55. def __init__(self, *args, **kwargs):
  56. # pylint: disable=super-init-not-called
  57. # this on purpose if the base class is not called
  58. pass
  59. async def handle_async_request(self, request):
  60. raise httpx.UnsupportedProtocol('HTTP protocol is disabled')
  61. async def aclose(self) -> None:
  62. pass
  63. async def __aenter__(self):
  64. return self
  65. async def __aexit__(
  66. self,
  67. exc_type=None,
  68. exc_value=None,
  69. traceback=None,
  70. ) -> None:
  71. pass
  72. class AsyncProxyTransportFixed(AsyncProxyTransport):
  73. """Fix httpx_socks.AsyncProxyTransport
  74. Map python_socks exceptions to httpx.ProxyError exceptions
  75. """
  76. async def handle_async_request(self, request):
  77. try:
  78. return await super().handle_async_request(request)
  79. except ProxyConnectionError as e:
  80. raise httpx.ProxyError("ProxyConnectionError: " + e.strerror, request=request) from e
  81. except ProxyTimeoutError as e:
  82. raise httpx.ProxyError("ProxyTimeoutError: " + e.args[0], request=request) from e
  83. except ProxyError as e:
  84. raise httpx.ProxyError("ProxyError: " + e.args[0], request=request) from e
  85. def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
  86. # support socks5h (requests compatibility):
  87. # https://requests.readthedocs.io/en/master/user/advanced/#socks
  88. # socks5:// hostname is resolved on client side
  89. # socks5h:// hostname is resolved on proxy side
  90. rdns = False
  91. socks5h = 'socks5h://'
  92. if proxy_url.startswith(socks5h):
  93. proxy_url = 'socks5://' + proxy_url[len(socks5h) :]
  94. rdns = True
  95. proxy_type, proxy_host, proxy_port, proxy_username, proxy_password = parse_proxy_url(proxy_url)
  96. verify = get_sslcontexts(proxy_url, None, verify, True, http2) if verify is True else verify
  97. return AsyncProxyTransportFixed(
  98. proxy_type=proxy_type,
  99. proxy_host=proxy_host,
  100. proxy_port=proxy_port,
  101. username=proxy_username,
  102. password=proxy_password,
  103. rdns=rdns,
  104. loop=get_loop(),
  105. verify=verify,
  106. http2=http2,
  107. local_address=local_address,
  108. limits=limit,
  109. retries=retries,
  110. )
  111. def get_transport(verify, http2, local_address, proxy_url, limit, retries):
  112. verify = get_sslcontexts(None, None, verify, True, http2) if verify is True else verify
  113. return httpx.AsyncHTTPTransport(
  114. # pylint: disable=protected-access
  115. verify=verify,
  116. http2=http2,
  117. limits=limit,
  118. proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
  119. local_address=local_address,
  120. retries=retries,
  121. )
  122. def new_client(
  123. # pylint: disable=too-many-arguments
  124. enable_http,
  125. verify,
  126. enable_http2,
  127. max_connections,
  128. max_keepalive_connections,
  129. keepalive_expiry,
  130. proxies,
  131. local_address,
  132. retries,
  133. max_redirects,
  134. hook_log_response,
  135. ):
  136. limit = httpx.Limits(
  137. max_connections=max_connections,
  138. max_keepalive_connections=max_keepalive_connections,
  139. keepalive_expiry=keepalive_expiry,
  140. )
  141. # See https://www.python-httpx.org/advanced/#routing
  142. mounts = {}
  143. for pattern, proxy_url in proxies.items():
  144. if not enable_http and pattern.startswith('http://'):
  145. continue
  146. if proxy_url.startswith('socks4://') or proxy_url.startswith('socks5://') or proxy_url.startswith('socks5h://'):
  147. mounts[pattern] = get_transport_for_socks_proxy(
  148. verify, enable_http2, local_address, proxy_url, limit, retries
  149. )
  150. else:
  151. mounts[pattern] = get_transport(verify, enable_http2, local_address, proxy_url, limit, retries)
  152. if not enable_http:
  153. mounts['http://'] = AsyncHTTPTransportNoHttp()
  154. transport = get_transport(verify, enable_http2, local_address, None, limit, retries)
  155. event_hooks = None
  156. if hook_log_response:
  157. event_hooks = {'response': [hook_log_response]}
  158. return httpx.AsyncClient(
  159. transport=transport,
  160. mounts=mounts,
  161. max_redirects=max_redirects,
  162. event_hooks=event_hooks,
  163. )
  164. def get_loop():
  165. return LOOP
  166. def init():
  167. # log
  168. for logger_name in (
  169. 'httpx',
  170. 'httpcore.proxy',
  171. 'httpcore.connection',
  172. 'httpcore.http11',
  173. 'httpcore.http2',
  174. 'hpack.hpack',
  175. 'hpack.table',
  176. ):
  177. logging.getLogger(logger_name).setLevel(logging.WARNING)
  178. # loop
  179. def loop_thread():
  180. global LOOP
  181. LOOP = asyncio.new_event_loop()
  182. LOOP.run_forever()
  183. thread = threading.Thread(
  184. target=loop_thread,
  185. name='asyncio_loop',
  186. daemon=True,
  187. )
  188. thread.start()
  189. init()