search.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. import typing
  15. import gc
  16. import threading
  17. from time import time
  18. from uuid import uuid4
  19. from _thread import start_new_thread
  20. import requests.exceptions
  21. import searx.poolrequests as requests_lib
  22. from searx.engines import engines, settings
  23. from searx.answerers import ask
  24. from searx.external_bang import get_bang_url
  25. from searx.utils import gen_useragent
  26. from searx.results import ResultContainer
  27. from searx import logger
  28. from searx.plugins import plugins
  29. logger = logger.getChild('search')
  30. max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None)
  31. if max_request_timeout is None:
  32. logger.info('max_request_timeout={0}'.format(max_request_timeout))
  33. else:
  34. if isinstance(max_request_timeout, float):
  35. logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
  36. else:
  37. logger.critical('outgoing.max_request_timeout if defined has to be float')
  38. from sys import exit
  39. exit(1)
  40. class EngineRef:
  41. __slots__ = 'name', 'category', 'from_bang'
  42. def __init__(self, name: str, category: str, from_bang: bool=False):
  43. self.name = name
  44. self.category = category
  45. self.from_bang = from_bang
  46. def __str__(self):
  47. return "(" + self.name + "," + self.category + "," + str(self.from_bang) + ")"
  48. class SearchQuery:
  49. """container for all the search parameters (query, language, etc...)"""
  50. __slots__ = 'query', 'engineref_list', 'categories', 'lang', 'safesearch', 'pageno', 'time_range',\
  51. 'timeout_limit', 'external_bang'
  52. def __init__(self,
  53. query: str,
  54. engineref_list: typing.List[EngineRef],
  55. categories: typing.List[str],
  56. lang: str,
  57. safesearch: bool,
  58. pageno: int,
  59. time_range: typing.Optional[str],
  60. timeout_limit: typing.Optional[float]=None,
  61. external_bang: typing.Optional[str]=False):
  62. self.query = query
  63. self.engineref_list = engineref_list
  64. self.categories = categories
  65. self.lang = lang
  66. self.safesearch = safesearch
  67. self.pageno = pageno
  68. self.time_range = time_range
  69. self.timeout_limit = timeout_limit
  70. self.external_bang = external_bang
  71. def __str__(self):
  72. return self.query + ";" + str(self.engineref_list)
  73. def send_http_request(engine, request_params):
  74. # create dictionary which contain all
  75. # informations about the request
  76. request_args = dict(
  77. headers=request_params['headers'],
  78. cookies=request_params['cookies'],
  79. verify=request_params['verify']
  80. )
  81. # setting engine based proxies
  82. if hasattr(engine, 'proxies'):
  83. request_args['proxies'] = engine.proxies
  84. # specific type of request (GET or POST)
  85. if request_params['method'] == 'GET':
  86. req = requests_lib.get
  87. else:
  88. req = requests_lib.post
  89. request_args['data'] = request_params['data']
  90. # send the request
  91. return req(request_params['url'], **request_args)
  92. def search_one_http_request(engine, query, request_params):
  93. # update request parameters dependent on
  94. # search-engine (contained in engines folder)
  95. engine.request(query, request_params)
  96. # ignoring empty urls
  97. if request_params['url'] is None:
  98. return None
  99. if not request_params['url']:
  100. return None
  101. # send request
  102. response = send_http_request(engine, request_params)
  103. # parse the response
  104. response.search_params = request_params
  105. return engine.response(response)
  106. def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  107. # set timeout for all HTTP requests
  108. requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
  109. # reset the HTTP total time
  110. requests_lib.reset_time_for_thread()
  111. #
  112. engine = engines[engine_name]
  113. # suppose everything will be alright
  114. requests_exception = False
  115. try:
  116. # send requests and parse the results
  117. search_results = search_one_http_request(engine, query, request_params)
  118. # check if the engine accepted the request
  119. if search_results is not None:
  120. # yes, so add results
  121. result_container.extend(engine_name, search_results)
  122. # update engine time when there is no exception
  123. engine_time = time() - start_time
  124. page_load_time = requests_lib.get_time_for_thread()
  125. result_container.add_timing(engine_name, engine_time, page_load_time)
  126. with threading.RLock():
  127. engine.stats['engine_time'] += engine_time
  128. engine.stats['engine_time_count'] += 1
  129. # update stats with the total HTTP time
  130. engine.stats['page_load_time'] += page_load_time
  131. engine.stats['page_load_count'] += 1
  132. except Exception as e:
  133. # Timing
  134. engine_time = time() - start_time
  135. page_load_time = requests_lib.get_time_for_thread()
  136. result_container.add_timing(engine_name, engine_time, page_load_time)
  137. # Record the errors
  138. with threading.RLock():
  139. engine.stats['errors'] += 1
  140. if (issubclass(e.__class__, requests.exceptions.Timeout)):
  141. result_container.add_unresponsive_engine(engine_name, 'timeout')
  142. # requests timeout (connect or read)
  143. logger.error("engine {0} : HTTP requests timeout"
  144. "(search duration : {1} s, timeout: {2} s) : {3}"
  145. .format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
  146. requests_exception = True
  147. elif (issubclass(e.__class__, requests.exceptions.RequestException)):
  148. result_container.add_unresponsive_engine(engine_name, 'request exception')
  149. # other requests exception
  150. logger.exception("engine {0} : requests exception"
  151. "(search duration : {1} s, timeout: {2} s) : {3}"
  152. .format(engine_name, engine_time, timeout_limit, e))
  153. requests_exception = True
  154. else:
  155. result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
  156. # others errors
  157. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  158. # suspend or not the engine if there are HTTP errors
  159. with threading.RLock():
  160. if requests_exception:
  161. # update continuous_errors / suspend_end_time
  162. engine.continuous_errors += 1
  163. engine.suspend_end_time = time() + min(settings['search']['max_ban_time_on_fail'],
  164. engine.continuous_errors * settings['search']['ban_time_on_fail'])
  165. else:
  166. # no HTTP error (perhaps an engine error)
  167. # anyway, reset the suspend variables
  168. engine.continuous_errors = 0
  169. engine.suspend_end_time = 0
  170. def record_offline_engine_stats_on_error(engine, result_container, start_time):
  171. engine_time = time() - start_time
  172. result_container.add_timing(engine.name, engine_time, engine_time)
  173. with threading.RLock():
  174. engine.stats['errors'] += 1
  175. def search_one_offline_request(engine, query, request_params):
  176. return engine.search(query, request_params)
  177. def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  178. engine = engines[engine_name]
  179. try:
  180. search_results = search_one_offline_request(engine, query, request_params)
  181. if search_results:
  182. result_container.extend(engine_name, search_results)
  183. engine_time = time() - start_time
  184. result_container.add_timing(engine_name, engine_time, engine_time)
  185. with threading.RLock():
  186. engine.stats['engine_time'] += engine_time
  187. engine.stats['engine_time_count'] += 1
  188. except ValueError as e:
  189. record_offline_engine_stats_on_error(engine, result_container, start_time)
  190. logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
  191. except Exception as e:
  192. record_offline_engine_stats_on_error(engine, result_container, start_time)
  193. result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
  194. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  195. def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  196. if engines[engine_name].offline:
  197. return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) # noqa
  198. return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
  199. def search_multiple_requests(requests, result_container, start_time, timeout_limit):
  200. search_id = uuid4().__str__()
  201. for engine_name, query, request_params in requests:
  202. th = threading.Thread(
  203. target=search_one_request_safe,
  204. args=(engine_name, query, request_params, result_container, start_time, timeout_limit),
  205. name=search_id,
  206. )
  207. th._engine_name = engine_name
  208. th.start()
  209. for th in threading.enumerate():
  210. if th.name == search_id:
  211. remaining_time = max(0.0, timeout_limit - (time() - start_time))
  212. th.join(remaining_time)
  213. if th.is_alive():
  214. result_container.add_unresponsive_engine(th._engine_name, 'timeout')
  215. logger.warning('engine timeout: {0}'.format(th._engine_name))
  216. # get default reqest parameter
  217. def default_request_params():
  218. return {
  219. 'method': 'GET',
  220. 'headers': {},
  221. 'data': {},
  222. 'url': '',
  223. 'cookies': {},
  224. 'verify': True
  225. }
  226. class Search:
  227. """Search information container"""
  228. __slots__ = "search_query", "result_container", "start_time", "actual_timeout"
  229. def __init__(self, search_query):
  230. # init vars
  231. super().__init__()
  232. self.search_query = search_query
  233. self.result_container = ResultContainer()
  234. self.start_time = None
  235. self.actual_timeout = None
  236. def search_external_bang(self):
  237. """
  238. Check if there is a external bang.
  239. If yes, update self.result_container and return True
  240. """
  241. if self.search_query.external_bang:
  242. self.result_container.redirect_url = get_bang_url(self.search_query)
  243. # This means there was a valid bang and the
  244. # rest of the search does not need to be continued
  245. if isinstance(self.result_container.redirect_url, str):
  246. return True
  247. return False
  248. def search_answerers(self):
  249. """
  250. Check if an answer return a result.
  251. If yes, update self.result_container and return True
  252. """
  253. answerers_results = ask(self.search_query)
  254. if answerers_results:
  255. for results in answerers_results:
  256. self.result_container.extend('answer', results)
  257. return True
  258. return False
  259. def _is_accepted(self, engine_name, engine):
  260. # skip suspended engines
  261. if engine.suspend_end_time >= time():
  262. logger.debug('Engine currently suspended: %s', engine_name)
  263. return False
  264. # if paging is not supported, skip
  265. if self.search_query.pageno > 1 and not engine.paging:
  266. return False
  267. # if time_range is not supported, skip
  268. if self.search_query.time_range and not engine.time_range_support:
  269. return False
  270. return True
  271. def _get_params(self, engineref, user_agent):
  272. if engineref.name not in engines:
  273. return None, None
  274. engine = engines[engineref.name]
  275. if not self._is_accepted(engineref.name, engine):
  276. return None, None
  277. # set default request parameters
  278. request_params = {}
  279. if not engine.offline:
  280. request_params = default_request_params()
  281. request_params['headers']['User-Agent'] = user_agent
  282. if hasattr(engine, 'language') and engine.language:
  283. request_params['language'] = engine.language
  284. else:
  285. request_params['language'] = self.search_query.lang
  286. request_params['safesearch'] = self.search_query.safesearch
  287. request_params['time_range'] = self.search_query.time_range
  288. request_params['category'] = engineref.category
  289. request_params['pageno'] = self.search_query.pageno
  290. return request_params, engine.timeout
  291. # do search-request
  292. def _get_requests(self):
  293. # init vars
  294. requests = []
  295. # set default useragent
  296. # user_agent = request.headers.get('User-Agent', '')
  297. user_agent = gen_useragent()
  298. # max of all selected engine timeout
  299. default_timeout = 0
  300. # start search-reqest for all selected engines
  301. for engineref in self.search_query.engineref_list:
  302. # set default request parameters
  303. request_params, engine_timeout = self._get_params(engineref, user_agent)
  304. if request_params is None:
  305. continue
  306. # append request to list
  307. requests.append((engineref.name, self.search_query.query, request_params))
  308. # update default_timeout
  309. default_timeout = max(default_timeout, engine_timeout)
  310. # adjust timeout
  311. actual_timeout = default_timeout
  312. query_timeout = self.search_query.timeout_limit
  313. if max_request_timeout is None and query_timeout is None:
  314. # No max, no user query: default_timeout
  315. pass
  316. elif max_request_timeout is None and query_timeout is not None:
  317. # No max, but user query: From user query except if above default
  318. actual_timeout = min(default_timeout, query_timeout)
  319. elif max_request_timeout is not None and query_timeout is None:
  320. # Max, no user query: Default except if above max
  321. actual_timeout = min(default_timeout, max_request_timeout)
  322. elif max_request_timeout is not None and query_timeout is not None:
  323. # Max & user query: From user query except if above max
  324. actual_timeout = min(query_timeout, max_request_timeout)
  325. logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
  326. .format(actual_timeout, default_timeout, query_timeout, max_request_timeout))
  327. return requests, actual_timeout
  328. def search_standard(self):
  329. """
  330. Update self.result_container, self.actual_timeout
  331. """
  332. requests, self.actual_timeout = self._get_requests()
  333. # send all search-request
  334. if requests:
  335. search_multiple_requests(requests, self.result_container, self.start_time, self.actual_timeout)
  336. start_new_thread(gc.collect, tuple())
  337. # return results, suggestions, answers and infoboxes
  338. return True
  339. # do search-request
  340. def search(self):
  341. self.start_time = time()
  342. if not self.search_external_bang():
  343. if not self.search_answerers():
  344. self.search_standard()
  345. return self.result_container
  346. class SearchWithPlugins(Search):
  347. """Similar to the Search class but call the plugins."""
  348. __slots__ = 'ordered_plugin_list', 'request'
  349. def __init__(self, search_query, ordered_plugin_list, request):
  350. super().__init__(search_query)
  351. self.ordered_plugin_list = ordered_plugin_list
  352. self.request = request
  353. def search(self):
  354. if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
  355. super().search()
  356. plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
  357. results = self.result_container.get_ordered_results()
  358. for result in results:
  359. plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
  360. return self.result_container