online.py 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # lint: pylint
  3. """Processores for engine-type: ``online``
  4. """
  5. from timeit import default_timer
  6. import asyncio
  7. import httpx
  8. import searx.network
  9. from searx.utils import gen_useragent
  10. from searx.exceptions import (
  11. SearxEngineAccessDeniedException,
  12. SearxEngineCaptchaException,
  13. SearxEngineTooManyRequestsException,
  14. )
  15. from searx.metrics.error_recorder import count_error
  16. from .abstract import EngineProcessor
  17. def default_request_params():
  18. """Default request parameters for ``online`` engines."""
  19. return {
  20. # fmt: off
  21. 'method': 'GET',
  22. 'headers': {},
  23. 'data': {},
  24. 'url': '',
  25. 'cookies': {},
  26. 'verify': True,
  27. 'auth': None
  28. # fmt: on
  29. }
  30. class OnlineProcessor(EngineProcessor):
  31. """Processor class for ``online`` engines."""
  32. engine_type = 'online'
  33. def initialize(self):
  34. # set timeout for all HTTP requests
  35. searx.network.set_timeout_for_thread(self.engine.timeout, start_time=default_timer())
  36. # reset the HTTP total time
  37. searx.network.reset_time_for_thread()
  38. # set the network
  39. searx.network.set_context_network_name(self.engine_name)
  40. super().initialize()
  41. def get_params(self, search_query, engine_category):
  42. params = super().get_params(search_query, engine_category)
  43. if params is None:
  44. return None
  45. # add default params
  46. params.update(default_request_params())
  47. # add an user agent
  48. params['headers']['User-Agent'] = gen_useragent()
  49. return params
  50. def _send_http_request(self, params):
  51. # create dictionary which contain all
  52. # informations about the request
  53. request_args = dict(
  54. headers=params['headers'], cookies=params['cookies'], verify=params['verify'], auth=params['auth']
  55. )
  56. # max_redirects
  57. max_redirects = params.get('max_redirects')
  58. if max_redirects:
  59. request_args['max_redirects'] = max_redirects
  60. # allow_redirects
  61. if 'allow_redirects' in params:
  62. request_args['allow_redirects'] = params['allow_redirects']
  63. # soft_max_redirects
  64. soft_max_redirects = params.get('soft_max_redirects', max_redirects or 0)
  65. # raise_for_status
  66. request_args['raise_for_httperror'] = params.get('raise_for_httperror', True)
  67. # specific type of request (GET or POST)
  68. if params['method'] == 'GET':
  69. req = searx.network.get
  70. else:
  71. req = searx.network.post
  72. request_args['data'] = params['data']
  73. # send the request
  74. response = req(params['url'], **request_args)
  75. # check soft limit of the redirect count
  76. if len(response.history) > soft_max_redirects:
  77. # unexpected redirect : record an error
  78. # but the engine might still return valid results.
  79. status_code = str(response.status_code or '')
  80. reason = response.reason_phrase or ''
  81. hostname = response.url.host
  82. count_error(
  83. self.engine_name,
  84. '{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects),
  85. (status_code, reason, hostname),
  86. secondary=True,
  87. )
  88. return response
  89. def _search_basic(self, query, params):
  90. # update request parameters dependent on
  91. # search-engine (contained in engines folder)
  92. self.engine.request(query, params)
  93. # ignoring empty urls
  94. if params['url'] is None:
  95. return None
  96. if not params['url']:
  97. return None
  98. # send request
  99. response = self._send_http_request(params)
  100. # parse the response
  101. response.search_params = params
  102. return self.engine.response(response)
  103. def search(self, query, params, result_container, start_time, timeout_limit):
  104. # set timeout for all HTTP requests
  105. searx.network.set_timeout_for_thread(timeout_limit, start_time=start_time)
  106. # reset the HTTP total time
  107. searx.network.reset_time_for_thread()
  108. # set the network
  109. searx.network.set_context_network_name(self.engine_name)
  110. try:
  111. # send requests and parse the results
  112. search_results = self._search_basic(query, params)
  113. self.extend_container(result_container, start_time, search_results)
  114. except (httpx.TimeoutException, asyncio.TimeoutError) as e:
  115. # requests timeout (connect or read)
  116. self.handle_exception(result_container, e, suspend=True)
  117. self.logger.error(
  118. "HTTP requests timeout (search duration : {0} s, timeout: {1} s) : {2}".format(
  119. default_timer() - start_time, timeout_limit, e.__class__.__name__
  120. )
  121. )
  122. except (httpx.HTTPError, httpx.StreamError) as e:
  123. # other requests exception
  124. self.handle_exception(result_container, e, suspend=True)
  125. self.logger.exception(
  126. "requests exception (search duration : {0} s, timeout: {1} s) : {2}".format(
  127. default_timer() - start_time, timeout_limit, e
  128. )
  129. )
  130. except SearxEngineCaptchaException as e:
  131. self.handle_exception(result_container, e, suspend=True)
  132. self.logger.exception('CAPTCHA')
  133. except SearxEngineTooManyRequestsException as e:
  134. self.handle_exception(result_container, e, suspend=True)
  135. self.logger.exception('Too many requests')
  136. except SearxEngineAccessDeniedException as e:
  137. self.handle_exception(result_container, e, suspend=True)
  138. self.logger.exception('Searx is blocked')
  139. except Exception as e: # pylint: disable=broad-except
  140. self.handle_exception(result_container, e)
  141. self.logger.exception('exception : {0}'.format(e))
  142. def get_default_tests(self):
  143. tests = {}
  144. tests['simple'] = {
  145. 'matrix': {'query': ('life', 'computer')},
  146. 'result_container': ['not_empty'],
  147. }
  148. if getattr(self.engine, 'paging', False):
  149. tests['paging'] = {
  150. 'matrix': {'query': 'time', 'pageno': (1, 2, 3)},
  151. 'result_container': ['not_empty'],
  152. 'test': ['unique_results'],
  153. }
  154. if 'general' in self.engine.categories:
  155. # avoid documentation about HTML tags (<time> and <input type="time">)
  156. tests['paging']['matrix']['query'] = 'news'
  157. if getattr(self.engine, 'time_range', False):
  158. tests['time_range'] = {
  159. 'matrix': {'query': 'news', 'time_range': (None, 'day')},
  160. 'result_container': ['not_empty'],
  161. 'test': ['unique_results'],
  162. }
  163. if getattr(self.engine, 'supported_languages', []):
  164. tests['lang_fr'] = {
  165. 'matrix': {'query': 'paris', 'lang': 'fr'},
  166. 'result_container': ['not_empty', ('has_language', 'fr')],
  167. }
  168. tests['lang_en'] = {
  169. 'matrix': {'query': 'paris', 'lang': 'en'},
  170. 'result_container': ['not_empty', ('has_language', 'en')],
  171. }
  172. if getattr(self.engine, 'safesearch', False):
  173. tests['safesearch'] = {'matrix': {'query': 'porn', 'safesearch': (0, 2)}, 'test': ['unique_results']}
  174. return tests