__init__.py 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # lint: pylint
  3. # pylint: disable=missing-module-docstring, missing-function-docstring, global-statement
  4. import asyncio
  5. import threading
  6. import concurrent.futures
  7. from timeit import default_timer
  8. import httpx
  9. import h2.exceptions
  10. from .network import get_network, initialize
  11. from .client import get_loop
  12. from .raise_for_httperror import raise_for_httperror
  13. # queue.SimpleQueue: Support Python 3.6
  14. try:
  15. from queue import SimpleQueue
  16. except ImportError:
  17. from queue import Empty
  18. from collections import deque
  19. class SimpleQueue:
  20. """Minimal backport of queue.SimpleQueue"""
  21. def __init__(self):
  22. self._queue = deque()
  23. self._count = threading.Semaphore(0)
  24. def put(self, item):
  25. self._queue.append(item)
  26. self._count.release()
  27. def get(self):
  28. if not self._count.acquire(True): #pylint: disable=consider-using-with
  29. raise Empty
  30. return self._queue.popleft()
  31. THREADLOCAL = threading.local()
  32. """Thread-local data is data for thread specific values."""
  33. def reset_time_for_thread():
  34. global THREADLOCAL
  35. THREADLOCAL.total_time = 0
  36. def get_time_for_thread():
  37. """returns thread's total time or None"""
  38. global THREADLOCAL
  39. return THREADLOCAL.__dict__.get('total_time')
  40. def set_timeout_for_thread(timeout, start_time=None):
  41. global THREADLOCAL
  42. THREADLOCAL.timeout = timeout
  43. THREADLOCAL.start_time = start_time
  44. def set_context_network_name(network_name):
  45. global THREADLOCAL
  46. THREADLOCAL.network = get_network(network_name)
  47. def get_context_network():
  48. """If set return thread's network.
  49. If unset, return value from :py:obj:`get_network`.
  50. """
  51. global THREADLOCAL
  52. return THREADLOCAL.__dict__.get('network') or get_network()
  53. def request(method, url, **kwargs):
  54. """same as requests/requests/api.py request(...)"""
  55. global THREADLOCAL
  56. time_before_request = default_timer()
  57. # timeout (httpx)
  58. if 'timeout' in kwargs:
  59. timeout = kwargs['timeout']
  60. else:
  61. timeout = getattr(THREADLOCAL, 'timeout', None)
  62. if timeout is not None:
  63. kwargs['timeout'] = timeout
  64. # 2 minutes timeout for the requests without timeout
  65. timeout = timeout or 120
  66. # ajdust actual timeout
  67. timeout += 0.2 # overhead
  68. start_time = getattr(THREADLOCAL, 'start_time', time_before_request)
  69. if start_time:
  70. timeout -= default_timer() - start_time
  71. # raise_for_error
  72. check_for_httperror = True
  73. if 'raise_for_httperror' in kwargs:
  74. check_for_httperror = kwargs['raise_for_httperror']
  75. del kwargs['raise_for_httperror']
  76. # requests compatibility
  77. if isinstance(url, bytes):
  78. url = url.decode()
  79. # network
  80. network = get_context_network()
  81. # do request
  82. future = asyncio.run_coroutine_threadsafe(network.request(method, url, **kwargs), get_loop())
  83. try:
  84. response = future.result(timeout)
  85. except concurrent.futures.TimeoutError as e:
  86. raise httpx.TimeoutException('Timeout', request=None) from e
  87. # requests compatibility
  88. # see also https://www.python-httpx.org/compatibility/#checking-for-4xx5xx-responses
  89. response.ok = not response.is_error
  90. # update total_time.
  91. # See get_time_for_thread() and reset_time_for_thread()
  92. if hasattr(THREADLOCAL, 'total_time'):
  93. time_after_request = default_timer()
  94. THREADLOCAL.total_time += time_after_request - time_before_request
  95. # raise an exception
  96. if check_for_httperror:
  97. raise_for_httperror(response)
  98. return response
  99. def get(url, **kwargs):
  100. kwargs.setdefault('allow_redirects', True)
  101. return request('get', url, **kwargs)
  102. def options(url, **kwargs):
  103. kwargs.setdefault('allow_redirects', True)
  104. return request('options', url, **kwargs)
  105. def head(url, **kwargs):
  106. kwargs.setdefault('allow_redirects', False)
  107. return request('head', url, **kwargs)
  108. def post(url, data=None, **kwargs):
  109. return request('post', url, data=data, **kwargs)
  110. def put(url, data=None, **kwargs):
  111. return request('put', url, data=data, **kwargs)
  112. def patch(url, data=None, **kwargs):
  113. return request('patch', url, data=data, **kwargs)
  114. def delete(url, **kwargs):
  115. return request('delete', url, **kwargs)
  116. async def stream_chunk_to_queue(network, queue, method, url, **kwargs):
  117. try:
  118. async with network.stream(method, url, **kwargs) as response:
  119. queue.put(response)
  120. async for chunk in response.aiter_bytes(65536):
  121. if len(chunk) > 0:
  122. queue.put(chunk)
  123. except (httpx.HTTPError, OSError, h2.exceptions.ProtocolError) as e:
  124. queue.put(e)
  125. finally:
  126. queue.put(None)
  127. def stream(method, url, **kwargs):
  128. """Replace httpx.stream.
  129. Usage:
  130. stream = poolrequests.stream(...)
  131. response = next(stream)
  132. for chunk in stream:
  133. ...
  134. httpx.Client.stream requires to write the httpx.HTTPTransport version of the
  135. the httpx.AsyncHTTPTransport declared above.
  136. """
  137. queue = SimpleQueue()
  138. future = asyncio.run_coroutine_threadsafe(
  139. stream_chunk_to_queue(get_network(), queue, method, url, **kwargs),
  140. get_loop()
  141. )
  142. chunk_or_exception = queue.get()
  143. while chunk_or_exception is not None:
  144. if isinstance(chunk_or_exception, Exception):
  145. raise chunk_or_exception
  146. yield chunk_or_exception
  147. chunk_or_exception = queue.get()
  148. return future.result()