__init__.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # lint: pylint
  3. # pylint: disable=missing-module-docstring, global-statement
  4. import asyncio
  5. import threading
  6. import concurrent.futures
  7. from types import MethodType
  8. from timeit import default_timer
  9. import httpx
  10. import h2.exceptions
  11. from .network import get_network, initialize
  12. from .client import get_loop
  13. from .raise_for_httperror import raise_for_httperror
  14. # queue.SimpleQueue: Support Python 3.6
  15. try:
  16. from queue import SimpleQueue
  17. except ImportError:
  18. from queue import Empty
  19. from collections import deque
  20. class SimpleQueue:
  21. """Minimal backport of queue.SimpleQueue"""
  22. def __init__(self):
  23. self._queue = deque()
  24. self._count = threading.Semaphore(0)
  25. def put(self, item):
  26. self._queue.append(item)
  27. self._count.release()
  28. def get(self):
  29. if not self._count.acquire(True): #pylint: disable=consider-using-with
  30. raise Empty
  31. return self._queue.popleft()
  32. THREADLOCAL = threading.local()
  33. """Thread-local data is data for thread specific values."""
  34. def reset_time_for_thread():
  35. THREADLOCAL.total_time = 0
  36. def get_time_for_thread():
  37. """returns thread's total time or None"""
  38. return THREADLOCAL.__dict__.get('total_time')
  39. def set_timeout_for_thread(timeout, start_time=None):
  40. THREADLOCAL.timeout = timeout
  41. THREADLOCAL.start_time = start_time
  42. def set_context_network_name(network_name):
  43. THREADLOCAL.network = get_network(network_name)
  44. def get_context_network():
  45. """If set return thread's network.
  46. If unset, return value from :py:obj:`get_network`.
  47. """
  48. return THREADLOCAL.__dict__.get('network') or get_network()
  49. def request(method, url, **kwargs):
  50. """same as requests/requests/api.py request(...)"""
  51. time_before_request = default_timer()
  52. # timeout (httpx)
  53. if 'timeout' in kwargs:
  54. timeout = kwargs['timeout']
  55. else:
  56. timeout = getattr(THREADLOCAL, 'timeout', None)
  57. if timeout is not None:
  58. kwargs['timeout'] = timeout
  59. # 2 minutes timeout for the requests without timeout
  60. timeout = timeout or 120
  61. # ajdust actual timeout
  62. timeout += 0.2 # overhead
  63. start_time = getattr(THREADLOCAL, 'start_time', time_before_request)
  64. if start_time:
  65. timeout -= default_timer() - start_time
  66. # raise_for_error
  67. check_for_httperror = True
  68. if 'raise_for_httperror' in kwargs:
  69. check_for_httperror = kwargs['raise_for_httperror']
  70. del kwargs['raise_for_httperror']
  71. # requests compatibility
  72. if isinstance(url, bytes):
  73. url = url.decode()
  74. # network
  75. network = get_context_network()
  76. # do request
  77. future = asyncio.run_coroutine_threadsafe(network.request(method, url, **kwargs), get_loop())
  78. try:
  79. response = future.result(timeout)
  80. except concurrent.futures.TimeoutError as e:
  81. raise httpx.TimeoutException('Timeout', request=None) from e
  82. # requests compatibility
  83. # see also https://www.python-httpx.org/compatibility/#checking-for-4xx5xx-responses
  84. response.ok = not response.is_error
  85. # update total_time.
  86. # See get_time_for_thread() and reset_time_for_thread()
  87. if hasattr(THREADLOCAL, 'total_time'):
  88. time_after_request = default_timer()
  89. THREADLOCAL.total_time += time_after_request - time_before_request
  90. # raise an exception
  91. if check_for_httperror:
  92. raise_for_httperror(response)
  93. return response
  94. def get(url, **kwargs):
  95. kwargs.setdefault('allow_redirects', True)
  96. return request('get', url, **kwargs)
  97. def options(url, **kwargs):
  98. kwargs.setdefault('allow_redirects', True)
  99. return request('options', url, **kwargs)
  100. def head(url, **kwargs):
  101. kwargs.setdefault('allow_redirects', False)
  102. return request('head', url, **kwargs)
  103. def post(url, data=None, **kwargs):
  104. return request('post', url, data=data, **kwargs)
  105. def put(url, data=None, **kwargs):
  106. return request('put', url, data=data, **kwargs)
  107. def patch(url, data=None, **kwargs):
  108. return request('patch', url, data=data, **kwargs)
  109. def delete(url, **kwargs):
  110. return request('delete', url, **kwargs)
  111. async def stream_chunk_to_queue(network, queue, method, url, **kwargs):
  112. try:
  113. async with network.stream(method, url, **kwargs) as response:
  114. queue.put(response)
  115. # aiter_raw: access the raw bytes on the response without applying any HTTP content decoding
  116. # https://www.python-httpx.org/quickstart/#streaming-responses
  117. async for chunk in response.aiter_raw(65536):
  118. if len(chunk) > 0:
  119. queue.put(chunk)
  120. except httpx.ResponseClosed:
  121. # the response was queued before the exception.
  122. # the exception was raised on aiter_raw.
  123. # we do nothing here: in the finally block, None will be queued
  124. # so stream(method, url, **kwargs) generator can stop
  125. pass
  126. except Exception as e: # pylint: disable=broad-except
  127. # broad except to avoid this scenario:
  128. # exception in network.stream(method, url, **kwargs)
  129. # -> the exception is not catch here
  130. # -> queue None (in finally)
  131. # -> the function below steam(method, url, **kwargs) has nothing to return
  132. queue.put(e)
  133. finally:
  134. queue.put(None)
  135. def _close_response_method(self):
  136. asyncio.run_coroutine_threadsafe(
  137. self.aclose(),
  138. get_loop()
  139. )
  140. def stream(method, url, **kwargs):
  141. """Replace httpx.stream.
  142. Usage:
  143. stream = poolrequests.stream(...)
  144. response = next(stream)
  145. for chunk in stream:
  146. ...
  147. httpx.Client.stream requires to write the httpx.HTTPTransport version of the
  148. the httpx.AsyncHTTPTransport declared above.
  149. """
  150. queue = SimpleQueue()
  151. network = get_context_network()
  152. future = asyncio.run_coroutine_threadsafe(
  153. stream_chunk_to_queue(network, queue, method, url, **kwargs),
  154. get_loop()
  155. )
  156. # yield response
  157. response = queue.get()
  158. if isinstance(response, Exception):
  159. raise response
  160. response.close = MethodType(_close_response_method, response)
  161. yield response
  162. # yield chunks
  163. chunk_or_exception = queue.get()
  164. while chunk_or_exception is not None:
  165. if isinstance(chunk_or_exception, Exception):
  166. raise chunk_or_exception
  167. yield chunk_or_exception
  168. chunk_or_exception = queue.get()
  169. future.result()