online.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. from urllib.parse import urlparse
  3. from time import time
  4. import threading
  5. import requests.exceptions
  6. import searx.poolrequests as poolrequests
  7. from searx.engines import settings
  8. from searx import logger
  9. from searx.utils import gen_useragent
  10. from searx.exceptions import (SearxEngineAccessDeniedException, SearxEngineCaptchaException,
  11. SearxEngineTooManyRequestsException,)
  12. from searx.metrology.error_recorder import record_exception, record_error
  13. from searx.search.processors.abstract import EngineProcessor
  14. logger = logger.getChild('search.processor.online')
  15. def default_request_params():
  16. return {
  17. 'method': 'GET',
  18. 'headers': {},
  19. 'data': {},
  20. 'url': '',
  21. 'cookies': {},
  22. 'verify': True,
  23. 'auth': None
  24. }
  25. class OnlineProcessor(EngineProcessor):
  26. engine_type = 'online'
  27. def get_params(self, search_query, engine_category):
  28. params = super().get_params(search_query, engine_category)
  29. if params is None:
  30. return None
  31. # skip suspended engines
  32. if self.engine.suspend_end_time >= time():
  33. logger.debug('Engine currently suspended: %s', self.engine_name)
  34. return None
  35. # add default params
  36. params.update(default_request_params())
  37. # add an user agent
  38. params['headers']['User-Agent'] = gen_useragent()
  39. return params
  40. def _send_http_request(self, params):
  41. # create dictionary which contain all
  42. # informations about the request
  43. request_args = dict(
  44. headers=params['headers'],
  45. cookies=params['cookies'],
  46. verify=params['verify'],
  47. auth=params['auth']
  48. )
  49. # setting engine based proxies
  50. if hasattr(self.engine, 'proxies'):
  51. request_args['proxies'] = poolrequests.get_proxies(self.engine.proxies)
  52. # max_redirects
  53. max_redirects = params.get('max_redirects')
  54. if max_redirects:
  55. request_args['max_redirects'] = max_redirects
  56. # soft_max_redirects
  57. soft_max_redirects = params.get('soft_max_redirects', max_redirects or 0)
  58. # raise_for_status
  59. request_args['raise_for_httperror'] = params.get('raise_for_httperror', False)
  60. # specific type of request (GET or POST)
  61. if params['method'] == 'GET':
  62. req = poolrequests.get
  63. else:
  64. req = poolrequests.post
  65. request_args['data'] = params['data']
  66. # send the request
  67. response = req(params['url'], **request_args)
  68. # check soft limit of the redirect count
  69. if len(response.history) > soft_max_redirects:
  70. # unexpected redirect : record an error
  71. # but the engine might still return valid results.
  72. status_code = str(response.status_code or '')
  73. reason = response.reason or ''
  74. hostname = str(urlparse(response.url or '').netloc)
  75. record_error(self.engine_name,
  76. '{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects),
  77. (status_code, reason, hostname))
  78. return response
  79. def _search_basic(self, query, params):
  80. # update request parameters dependent on
  81. # search-engine (contained in engines folder)
  82. self.engine.request(query, params)
  83. # ignoring empty urls
  84. if params['url'] is None:
  85. return None
  86. if not params['url']:
  87. return None
  88. # send request
  89. response = self._send_http_request(params)
  90. # parse the response
  91. response.search_params = params
  92. return self.engine.response(response)
  93. def search(self, query, params, result_container, start_time, timeout_limit):
  94. # set timeout for all HTTP requests
  95. poolrequests.set_timeout_for_thread(timeout_limit, start_time=start_time)
  96. # reset the HTTP total time
  97. poolrequests.reset_time_for_thread()
  98. # suppose everything will be alright
  99. requests_exception = False
  100. suspended_time = None
  101. try:
  102. # send requests and parse the results
  103. search_results = self._search_basic(query, params)
  104. # check if the engine accepted the request
  105. if search_results is not None:
  106. # yes, so add results
  107. result_container.extend(self.engine_name, search_results)
  108. # update engine time when there is no exception
  109. engine_time = time() - start_time
  110. page_load_time = poolrequests.get_time_for_thread()
  111. result_container.add_timing(self.engine_name, engine_time, page_load_time)
  112. with threading.RLock():
  113. self.engine.stats['engine_time'] += engine_time
  114. self.engine.stats['engine_time_count'] += 1
  115. # update stats with the total HTTP time
  116. self.engine.stats['page_load_time'] += page_load_time
  117. self.engine.stats['page_load_count'] += 1
  118. except Exception as e:
  119. record_exception(self.engine_name, e)
  120. # Timing
  121. engine_time = time() - start_time
  122. page_load_time = poolrequests.get_time_for_thread()
  123. result_container.add_timing(self.engine_name, engine_time, page_load_time)
  124. # Record the errors
  125. with threading.RLock():
  126. self.engine.stats['errors'] += 1
  127. if (issubclass(e.__class__, requests.exceptions.Timeout)):
  128. result_container.add_unresponsive_engine(self.engine_name, 'HTTP timeout')
  129. # requests timeout (connect or read)
  130. logger.error("engine {0} : HTTP requests timeout"
  131. "(search duration : {1} s, timeout: {2} s) : {3}"
  132. .format(self.engine_name, engine_time, timeout_limit, e.__class__.__name__))
  133. requests_exception = True
  134. elif (issubclass(e.__class__, requests.exceptions.RequestException)):
  135. result_container.add_unresponsive_engine(self.engine_name, 'HTTP error')
  136. # other requests exception
  137. logger.exception("engine {0} : requests exception"
  138. "(search duration : {1} s, timeout: {2} s) : {3}"
  139. .format(self.engine_name, engine_time, timeout_limit, e))
  140. requests_exception = True
  141. elif (issubclass(e.__class__, SearxEngineCaptchaException)):
  142. result_container.add_unresponsive_engine(self.engine_name, 'CAPTCHA required')
  143. logger.exception('engine {0} : CAPTCHA'.format(self.engine_name))
  144. suspended_time = e.suspended_time # pylint: disable=no-member
  145. elif (issubclass(e.__class__, SearxEngineTooManyRequestsException)):
  146. result_container.add_unresponsive_engine(self.engine_name, 'too many requests')
  147. logger.exception('engine {0} : Too many requests'.format(self.engine_name))
  148. suspended_time = e.suspended_time # pylint: disable=no-member
  149. elif (issubclass(e.__class__, SearxEngineAccessDeniedException)):
  150. result_container.add_unresponsive_engine(self.engine_name, 'blocked')
  151. logger.exception('engine {0} : Searx is blocked'.format(self.engine_name))
  152. suspended_time = e.suspended_time # pylint: disable=no-member
  153. else:
  154. result_container.add_unresponsive_engine(self.engine_name, 'unexpected crash')
  155. # others errors
  156. logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e))
  157. else:
  158. if getattr(threading.current_thread(), '_timeout', False):
  159. record_error(self.engine_name, 'Timeout')
  160. # suspend the engine if there is an HTTP error
  161. # or suspended_time is defined
  162. with threading.RLock():
  163. if requests_exception or suspended_time:
  164. # update continuous_errors / suspend_end_time
  165. self.engine.continuous_errors += 1
  166. if suspended_time is None:
  167. suspended_time = min(settings['search']['max_ban_time_on_fail'],
  168. self.engine.continuous_errors * settings['search']['ban_time_on_fail'])
  169. self.engine.suspend_end_time = time() + suspended_time
  170. else:
  171. # reset the suspend variables
  172. self.engine.continuous_errors = 0
  173. self.engine.suspend_end_time = 0
  174. def get_default_tests(self):
  175. tests = {}
  176. tests['simple'] = {
  177. 'matrix': {'query': ('life', 'computer')},
  178. 'result_container': ['not_empty'],
  179. }
  180. if getattr(self.engine, 'paging', False):
  181. tests['paging'] = {
  182. 'matrix': {'query': 'time',
  183. 'pageno': (1, 2, 3)},
  184. 'result_container': ['not_empty'],
  185. 'test': ['unique_results']
  186. }
  187. if 'general' in self.engine.categories:
  188. # avoid documentation about HTML tags (<time> and <input type="time">)
  189. tests['paging']['matrix']['query'] = 'news'
  190. if getattr(self.engine, 'time_range', False):
  191. tests['time_range'] = {
  192. 'matrix': {'query': 'news',
  193. 'time_range': (None, 'day')},
  194. 'result_container': ['not_empty'],
  195. 'test': ['unique_results']
  196. }
  197. if getattr(self.engine, 'lang', False):
  198. tests['lang_fr'] = {
  199. 'matrix': {'query': 'paris', 'lang': 'fr'},
  200. 'result_container': ['not_empty', ('has_lang', 'fr')],
  201. }
  202. tests['lang_en'] = {
  203. 'matrix': {'query': 'paris', 'lang': 'en'},
  204. 'result_container': ['not_empty', ('has_lang', 'en')],
  205. }
  206. if getattr(self.engine, 'safesearch', False):
  207. tests['safesearch'] = {
  208. 'matrix': {'query': 'porn',
  209. 'safesearch': (0, 2)},
  210. 'test': ['unique_results']
  211. }
  212. return tests