Browse Source

[pylint] searx/network/client.py & add global (TRANSPORT_KWARGS)

No functional change!

- fix messages from pylint
- add ``global TRANSPORT_KWARGS``
- normalized python_socks imports

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
Markus Heiser 3 years ago
parent
commit
b595c482d0
1 changed files with 35 additions and 20 deletions
  1. 35 20
      searx/network/client.py

+ 35 - 20
searx/network/client.py

@@ -1,14 +1,19 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
+# lint: pylint
+# pylint: disable=missing-module-docstring, missing-function-docstring, global-statement
 
 import asyncio
 import logging
 import threading
-
 import httpcore
 import httpx
 from httpx_socks import AsyncProxyTransport
-from python_socks import parse_proxy_url
-import python_socks._errors
+from python_socks import (
+    parse_proxy_url,
+    ProxyConnectionError,
+    ProxyTimeoutError,
+    ProxyError
+)
 
 from searx import logger
 
@@ -30,7 +35,11 @@ TRANSPORT_KWARGS = {
 }
 
 
-async def close_connections_for_url(connection_pool: httpcore.AsyncConnectionPool, url: httpcore._utils.URL):
+# pylint: disable=protected-access
+async def close_connections_for_url(
+        connection_pool: httpcore.AsyncConnectionPool,
+        url: httpcore._utils.URL ):
+
     origin = httpcore._utils.url_to_origin(url)
     logger.debug('Drop connections for %r', origin)
     connections_to_close = connection_pool._connections_for_origin(origin)
@@ -40,6 +49,7 @@ async def close_connections_for_url(connection_pool: httpcore.AsyncConnectionPoo
             await connection.aclose()
         except httpcore.NetworkError as e:
             logger.warning('Error closing an existing connection', exc_info=e)
+# pylint: enable=protected-access
 
 
 def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
@@ -80,9 +90,7 @@ class AsyncProxyTransportFixed(AsyncProxyTransport):
             retry -= 1
             try:
                 return await super().arequest(method, url, headers, stream, ext)
-            except (python_socks._errors.ProxyConnectionError,
-                    python_socks._errors.ProxyTimeoutError,
-                    python_socks._errors.ProxyError) as e:
+            except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e:
                 raise httpcore.ProxyError(e)
             except OSError as e:
                 # socket.gaierror when DNS resolution fails
@@ -114,7 +122,7 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
             except httpcore.CloseError as e:
                 # httpcore.CloseError: [Errno 104] Connection reset by peer
                 # raised by _keepalive_sweep()
-                #   from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198  # noqa
+                #   from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198  # pylint: disable=line-too-long
                 await close_connections_for_url(self._pool, url)
                 logger.warning('httpcore.CloseError: retry', exc_info=e)
                 # retry
@@ -129,6 +137,7 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
 
 
 def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
+    global TRANSPORT_KWARGS
     # support socks5h (requests compatibility):
     # https://requests.readthedocs.io/en/master/user/advanced/#socks
     # socks5://   hostname is resolved on client side
@@ -156,14 +165,18 @@ def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit
 
 
 def get_transport(verify, http2, local_address, proxy_url, limit, retries):
+    global TRANSPORT_KWARGS
     verify = get_sslcontexts(None, None, True, False, http2) if verify is True else verify
-    return AsyncHTTPTransportFixed(verify=verify,
-                                   http2=http2,
-                                   local_address=local_address,
-                                   proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
-                                   limits=limit,
-                                   retries=retries,
-                                   **TRANSPORT_KWARGS)
+    return AsyncHTTPTransportFixed(
+        # pylint: disable=protected-access
+        verify=verify,
+        http2=http2,
+        local_address=local_address,
+        proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
+        limits=limit,
+        retries=retries,
+        **TRANSPORT_KWARGS
+    )
 
 
 def iter_proxies(proxies):
@@ -175,9 +188,11 @@ def iter_proxies(proxies):
             yield pattern, proxy_url
 
 
-def new_client(enable_http, verify, enable_http2,
-               max_connections, max_keepalive_connections, keepalive_expiry,
-               proxies, local_address, retries, max_redirects):
+def new_client(
+        # pylint: disable=too-many-arguments
+        enable_http, verify, enable_http2,
+        max_connections, max_keepalive_connections, keepalive_expiry,
+        proxies, local_address, retries, max_redirects  ):
     limit = httpx.Limits(max_connections=max_connections,
                          max_keepalive_connections=max_keepalive_connections,
                          keepalive_expiry=keepalive_expiry)
@@ -217,12 +232,12 @@ def init():
         LOOP = asyncio.new_event_loop()
         LOOP.run_forever()
 
-    th = threading.Thread(
+    thread = threading.Thread(
         target=loop_thread,
         name='asyncio_loop',
         daemon=True,
     )
-    th.start()
+    thread.start()
 
 
 init()