search.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. import typing
  15. import gc
  16. import threading
  17. from time import time
  18. from uuid import uuid4
  19. from _thread import start_new_thread
  20. import requests.exceptions
  21. import searx.poolrequests as requests_lib
  22. from searx.engines import engines, settings
  23. from searx.answerers import ask
  24. from searx.external_bang import get_bang_url
  25. from searx.utils import gen_useragent
  26. from searx.results import ResultContainer
  27. from searx import logger
  28. from searx.plugins import plugins
  29. logger = logger.getChild('search')
  30. max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None)
  31. if max_request_timeout is None:
  32. logger.info('max_request_timeout={0}'.format(max_request_timeout))
  33. else:
  34. if isinstance(max_request_timeout, float):
  35. logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
  36. else:
  37. logger.critical('outgoing.max_request_timeout if defined has to be float')
  38. from sys import exit
  39. exit(1)
  40. class EngineRef:
  41. __slots__ = 'name', 'category', 'from_bang'
  42. def __init__(self, name: str, category: str, from_bang: bool=False):
  43. self.name = name
  44. self.category = category
  45. self.from_bang = from_bang
  46. def __repr__(self):
  47. return "EngineRef({!r}, {!r}, {!r})".format(self.name, self.category, self.from_bang)
  48. def __eq__(self, other):
  49. return self.name == other.name and self.category == other.category and self.from_bang == other.from_bang
  50. class SearchQuery:
  51. """container for all the search parameters (query, language, etc...)"""
  52. __slots__ = 'query', 'engineref_list', 'categories', 'lang', 'safesearch', 'pageno', 'time_range',\
  53. 'timeout_limit', 'external_bang'
  54. def __init__(self,
  55. query: str,
  56. engineref_list: typing.List[EngineRef],
  57. categories: typing.List[str],
  58. lang: str,
  59. safesearch: int,
  60. pageno: int,
  61. time_range: typing.Optional[str],
  62. timeout_limit: typing.Optional[float]=None,
  63. external_bang: typing.Optional[str]=None):
  64. self.query = query
  65. self.engineref_list = engineref_list
  66. self.categories = categories
  67. self.lang = lang
  68. self.safesearch = safesearch
  69. self.pageno = pageno
  70. self.time_range = time_range
  71. self.timeout_limit = timeout_limit
  72. self.external_bang = external_bang
  73. def __repr__(self):
  74. return "SearchQuery({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})".\
  75. format(self.query, self.engineref_list, self.categories, self.lang, self.safesearch,
  76. self.pageno, self.time_range, self.timeout_limit, self.external_bang)
  77. def __eq__(self, other):
  78. return self.query == other.query\
  79. and self.engineref_list == other.engineref_list\
  80. and self.categories == self.categories\
  81. and self.lang == other.lang\
  82. and self.safesearch == other.safesearch\
  83. and self.pageno == other.pageno\
  84. and self.time_range == other.time_range\
  85. and self.timeout_limit == other.timeout_limit\
  86. and self.external_bang == other.external_bang
  87. def send_http_request(engine, request_params):
  88. # create dictionary which contain all
  89. # informations about the request
  90. request_args = dict(
  91. headers=request_params['headers'],
  92. cookies=request_params['cookies'],
  93. verify=request_params['verify'],
  94. auth=request_params['auth']
  95. )
  96. # setting engine based proxies
  97. if hasattr(engine, 'proxies'):
  98. request_args['proxies'] = requests_lib.get_proxies(engine.proxies)
  99. # specific type of request (GET or POST)
  100. if request_params['method'] == 'GET':
  101. req = requests_lib.get
  102. else:
  103. req = requests_lib.post
  104. request_args['data'] = request_params['data']
  105. # send the request
  106. return req(request_params['url'], **request_args)
  107. def search_one_http_request(engine, query, request_params):
  108. # update request parameters dependent on
  109. # search-engine (contained in engines folder)
  110. engine.request(query, request_params)
  111. # ignoring empty urls
  112. if request_params['url'] is None:
  113. return None
  114. if not request_params['url']:
  115. return None
  116. # send request
  117. response = send_http_request(engine, request_params)
  118. # parse the response
  119. response.search_params = request_params
  120. return engine.response(response)
  121. def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  122. # set timeout for all HTTP requests
  123. requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
  124. # reset the HTTP total time
  125. requests_lib.reset_time_for_thread()
  126. #
  127. engine = engines[engine_name]
  128. # suppose everything will be alright
  129. requests_exception = False
  130. try:
  131. # send requests and parse the results
  132. search_results = search_one_http_request(engine, query, request_params)
  133. # check if the engine accepted the request
  134. if search_results is not None:
  135. # yes, so add results
  136. result_container.extend(engine_name, search_results)
  137. # update engine time when there is no exception
  138. engine_time = time() - start_time
  139. page_load_time = requests_lib.get_time_for_thread()
  140. result_container.add_timing(engine_name, engine_time, page_load_time)
  141. with threading.RLock():
  142. engine.stats['engine_time'] += engine_time
  143. engine.stats['engine_time_count'] += 1
  144. # update stats with the total HTTP time
  145. engine.stats['page_load_time'] += page_load_time
  146. engine.stats['page_load_count'] += 1
  147. except Exception as e:
  148. # Timing
  149. engine_time = time() - start_time
  150. page_load_time = requests_lib.get_time_for_thread()
  151. result_container.add_timing(engine_name, engine_time, page_load_time)
  152. # Record the errors
  153. with threading.RLock():
  154. engine.stats['errors'] += 1
  155. if (issubclass(e.__class__, requests.exceptions.Timeout)):
  156. result_container.add_unresponsive_engine(engine_name, 'timeout')
  157. # requests timeout (connect or read)
  158. logger.error("engine {0} : HTTP requests timeout"
  159. "(search duration : {1} s, timeout: {2} s) : {3}"
  160. .format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
  161. requests_exception = True
  162. elif (issubclass(e.__class__, requests.exceptions.RequestException)):
  163. result_container.add_unresponsive_engine(engine_name, 'request exception')
  164. # other requests exception
  165. logger.exception("engine {0} : requests exception"
  166. "(search duration : {1} s, timeout: {2} s) : {3}"
  167. .format(engine_name, engine_time, timeout_limit, e))
  168. requests_exception = True
  169. else:
  170. result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
  171. # others errors
  172. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  173. # suspend or not the engine if there are HTTP errors
  174. with threading.RLock():
  175. if requests_exception:
  176. # update continuous_errors / suspend_end_time
  177. engine.continuous_errors += 1
  178. engine.suspend_end_time = time() + min(settings['search']['max_ban_time_on_fail'],
  179. engine.continuous_errors * settings['search']['ban_time_on_fail'])
  180. else:
  181. # no HTTP error (perhaps an engine error)
  182. # anyway, reset the suspend variables
  183. engine.continuous_errors = 0
  184. engine.suspend_end_time = 0
  185. def record_offline_engine_stats_on_error(engine, result_container, start_time):
  186. engine_time = time() - start_time
  187. result_container.add_timing(engine.name, engine_time, engine_time)
  188. with threading.RLock():
  189. engine.stats['errors'] += 1
  190. def search_one_offline_request(engine, query, request_params):
  191. return engine.search(query, request_params)
  192. def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  193. engine = engines[engine_name]
  194. try:
  195. search_results = search_one_offline_request(engine, query, request_params)
  196. if search_results:
  197. result_container.extend(engine_name, search_results)
  198. engine_time = time() - start_time
  199. result_container.add_timing(engine_name, engine_time, engine_time)
  200. with threading.RLock():
  201. engine.stats['engine_time'] += engine_time
  202. engine.stats['engine_time_count'] += 1
  203. except ValueError as e:
  204. record_offline_engine_stats_on_error(engine, result_container, start_time)
  205. logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
  206. except Exception as e:
  207. record_offline_engine_stats_on_error(engine, result_container, start_time)
  208. result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
  209. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  210. def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  211. if engines[engine_name].offline:
  212. return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) # noqa
  213. return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
  214. def search_multiple_requests(requests, result_container, start_time, timeout_limit):
  215. search_id = uuid4().__str__()
  216. for engine_name, query, request_params in requests:
  217. th = threading.Thread(
  218. target=search_one_request_safe,
  219. args=(engine_name, query, request_params, result_container, start_time, timeout_limit),
  220. name=search_id,
  221. )
  222. th._engine_name = engine_name
  223. th.start()
  224. for th in threading.enumerate():
  225. if th.name == search_id:
  226. remaining_time = max(0.0, timeout_limit - (time() - start_time))
  227. th.join(remaining_time)
  228. if th.is_alive():
  229. result_container.add_unresponsive_engine(th._engine_name, 'timeout')
  230. logger.warning('engine timeout: {0}'.format(th._engine_name))
  231. # get default reqest parameter
  232. def default_request_params():
  233. return {
  234. 'method': 'GET',
  235. 'headers': {},
  236. 'data': {},
  237. 'url': '',
  238. 'cookies': {},
  239. 'verify': True,
  240. 'auth': None
  241. }
  242. class Search:
  243. """Search information container"""
  244. __slots__ = "search_query", "result_container", "start_time", "actual_timeout"
  245. def __init__(self, search_query):
  246. # init vars
  247. super().__init__()
  248. self.search_query = search_query
  249. self.result_container = ResultContainer()
  250. self.start_time = None
  251. self.actual_timeout = None
  252. def search_external_bang(self):
  253. """
  254. Check if there is a external bang.
  255. If yes, update self.result_container and return True
  256. """
  257. if self.search_query.external_bang:
  258. self.result_container.redirect_url = get_bang_url(self.search_query)
  259. # This means there was a valid bang and the
  260. # rest of the search does not need to be continued
  261. if isinstance(self.result_container.redirect_url, str):
  262. return True
  263. return False
  264. def search_answerers(self):
  265. """
  266. Check if an answer return a result.
  267. If yes, update self.result_container and return True
  268. """
  269. answerers_results = ask(self.search_query)
  270. if answerers_results:
  271. for results in answerers_results:
  272. self.result_container.extend('answer', results)
  273. return True
  274. return False
  275. def _is_accepted(self, engine_name, engine):
  276. # skip suspended engines
  277. if engine.suspend_end_time >= time():
  278. logger.debug('Engine currently suspended: %s', engine_name)
  279. return False
  280. # if paging is not supported, skip
  281. if self.search_query.pageno > 1 and not engine.paging:
  282. return False
  283. # if time_range is not supported, skip
  284. if self.search_query.time_range and not engine.time_range_support:
  285. return False
  286. return True
  287. def _get_params(self, engineref, user_agent):
  288. if engineref.name not in engines:
  289. return None, None
  290. engine = engines[engineref.name]
  291. if not self._is_accepted(engineref.name, engine):
  292. return None, None
  293. # set default request parameters
  294. request_params = {}
  295. if not engine.offline:
  296. request_params = default_request_params()
  297. request_params['headers']['User-Agent'] = user_agent
  298. if hasattr(engine, 'language') and engine.language:
  299. request_params['language'] = engine.language
  300. else:
  301. request_params['language'] = self.search_query.lang
  302. request_params['safesearch'] = self.search_query.safesearch
  303. request_params['time_range'] = self.search_query.time_range
  304. request_params['category'] = engineref.category
  305. request_params['pageno'] = self.search_query.pageno
  306. return request_params, engine.timeout
  307. # do search-request
  308. def _get_requests(self):
  309. # init vars
  310. requests = []
  311. # set default useragent
  312. # user_agent = request.headers.get('User-Agent', '')
  313. user_agent = gen_useragent()
  314. # max of all selected engine timeout
  315. default_timeout = 0
  316. # start search-reqest for all selected engines
  317. for engineref in self.search_query.engineref_list:
  318. # set default request parameters
  319. request_params, engine_timeout = self._get_params(engineref, user_agent)
  320. if request_params is None:
  321. continue
  322. # append request to list
  323. requests.append((engineref.name, self.search_query.query, request_params))
  324. # update default_timeout
  325. default_timeout = max(default_timeout, engine_timeout)
  326. # adjust timeout
  327. actual_timeout = default_timeout
  328. query_timeout = self.search_query.timeout_limit
  329. if max_request_timeout is None and query_timeout is None:
  330. # No max, no user query: default_timeout
  331. pass
  332. elif max_request_timeout is None and query_timeout is not None:
  333. # No max, but user query: From user query except if above default
  334. actual_timeout = min(default_timeout, query_timeout)
  335. elif max_request_timeout is not None and query_timeout is None:
  336. # Max, no user query: Default except if above max
  337. actual_timeout = min(default_timeout, max_request_timeout)
  338. elif max_request_timeout is not None and query_timeout is not None:
  339. # Max & user query: From user query except if above max
  340. actual_timeout = min(query_timeout, max_request_timeout)
  341. logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
  342. .format(actual_timeout, default_timeout, query_timeout, max_request_timeout))
  343. return requests, actual_timeout
  344. def search_standard(self):
  345. """
  346. Update self.result_container, self.actual_timeout
  347. """
  348. requests, self.actual_timeout = self._get_requests()
  349. # send all search-request
  350. if requests:
  351. search_multiple_requests(requests, self.result_container, self.start_time, self.actual_timeout)
  352. start_new_thread(gc.collect, tuple())
  353. # return results, suggestions, answers and infoboxes
  354. return True
  355. # do search-request
  356. def search(self):
  357. self.start_time = time()
  358. if not self.search_external_bang():
  359. if not self.search_answerers():
  360. self.search_standard()
  361. return self.result_container
  362. class SearchWithPlugins(Search):
  363. """Similar to the Search class but call the plugins."""
  364. __slots__ = 'ordered_plugin_list', 'request'
  365. def __init__(self, search_query, ordered_plugin_list, request):
  366. super().__init__(search_query)
  367. self.ordered_plugin_list = ordered_plugin_list
  368. self.request = request
  369. def search(self):
  370. if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
  371. super().search()
  372. plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
  373. results = self.result_container.get_ordered_results()
  374. for result in results:
  375. plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
  376. return self.result_container