search.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. import gc
  15. import threading
  16. from time import time
  17. from uuid import uuid4
  18. from _thread import start_new_thread
  19. import requests.exceptions
  20. import searx.poolrequests as requests_lib
  21. from searx.engines import engines, settings
  22. from searx.answerers import ask
  23. from searx.external_bang import get_bang_url
  24. from searx.utils import gen_useragent
  25. from searx.results import ResultContainer
  26. from searx import logger
  27. from searx.plugins import plugins
  28. logger = logger.getChild('search')
  29. max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None)
  30. if max_request_timeout is None:
  31. logger.info('max_request_timeout={0}'.format(max_request_timeout))
  32. else:
  33. if isinstance(max_request_timeout, float):
  34. logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
  35. else:
  36. logger.critical('outgoing.max_request_timeout if defined has to be float')
  37. from sys import exit
  38. exit(1)
  39. class EngineRef:
  40. def __init__(self, name, category, from_bang=False):
  41. self.name = name
  42. self.category = category
  43. self.from_bang = from_bang
  44. def __str__(self):
  45. return "(" + self.name + "," + self.category + "," + str(self.from_bang) + ")"
  46. class SearchQuery:
  47. """container for all the search parameters (query, language, etc...)"""
  48. def __init__(self, query, engineref_list, categories, lang, safesearch, pageno, time_range,
  49. timeout_limit=None, preferences=None, external_bang=None):
  50. self.query = query
  51. self.engineref_list = engineref_list
  52. self.categories = categories
  53. self.lang = lang
  54. self.safesearch = safesearch
  55. self.pageno = pageno
  56. self.time_range = None if time_range in ('', 'None', None) else time_range
  57. self.timeout_limit = timeout_limit
  58. self.preferences = preferences
  59. self.external_bang = external_bang
  60. def __str__(self):
  61. return self.query + ";" + str(self.engineref_list)
  62. def send_http_request(engine, request_params):
  63. # create dictionary which contain all
  64. # informations about the request
  65. request_args = dict(
  66. headers=request_params['headers'],
  67. cookies=request_params['cookies'],
  68. verify=request_params['verify']
  69. )
  70. # setting engine based proxies
  71. if hasattr(engine, 'proxies'):
  72. request_args['proxies'] = engine.proxies
  73. # specific type of request (GET or POST)
  74. if request_params['method'] == 'GET':
  75. req = requests_lib.get
  76. else:
  77. req = requests_lib.post
  78. request_args['data'] = request_params['data']
  79. # send the request
  80. return req(request_params['url'], **request_args)
  81. def search_one_http_request(engine, query, request_params):
  82. # update request parameters dependent on
  83. # search-engine (contained in engines folder)
  84. engine.request(query, request_params)
  85. # ignoring empty urls
  86. if request_params['url'] is None:
  87. return None
  88. if not request_params['url']:
  89. return None
  90. # send request
  91. response = send_http_request(engine, request_params)
  92. # parse the response
  93. response.search_params = request_params
  94. return engine.response(response)
  95. def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  96. # set timeout for all HTTP requests
  97. requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
  98. # reset the HTTP total time
  99. requests_lib.reset_time_for_thread()
  100. #
  101. engine = engines[engine_name]
  102. # suppose everything will be alright
  103. requests_exception = False
  104. try:
  105. # send requests and parse the results
  106. search_results = search_one_http_request(engine, query, request_params)
  107. # check if the engine accepted the request
  108. if search_results is not None:
  109. # yes, so add results
  110. result_container.extend(engine_name, search_results)
  111. # update engine time when there is no exception
  112. engine_time = time() - start_time
  113. page_load_time = requests_lib.get_time_for_thread()
  114. result_container.add_timing(engine_name, engine_time, page_load_time)
  115. with threading.RLock():
  116. engine.stats['engine_time'] += engine_time
  117. engine.stats['engine_time_count'] += 1
  118. # update stats with the total HTTP time
  119. engine.stats['page_load_time'] += page_load_time
  120. engine.stats['page_load_count'] += 1
  121. except Exception as e:
  122. # Timing
  123. engine_time = time() - start_time
  124. page_load_time = requests_lib.get_time_for_thread()
  125. result_container.add_timing(engine_name, engine_time, page_load_time)
  126. # Record the errors
  127. with threading.RLock():
  128. engine.stats['errors'] += 1
  129. if (issubclass(e.__class__, requests.exceptions.Timeout)):
  130. result_container.add_unresponsive_engine(engine_name, 'timeout')
  131. # requests timeout (connect or read)
  132. logger.error("engine {0} : HTTP requests timeout"
  133. "(search duration : {1} s, timeout: {2} s) : {3}"
  134. .format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
  135. requests_exception = True
  136. elif (issubclass(e.__class__, requests.exceptions.RequestException)):
  137. result_container.add_unresponsive_engine(engine_name, 'request exception')
  138. # other requests exception
  139. logger.exception("engine {0} : requests exception"
  140. "(search duration : {1} s, timeout: {2} s) : {3}"
  141. .format(engine_name, engine_time, timeout_limit, e))
  142. requests_exception = True
  143. else:
  144. result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
  145. # others errors
  146. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  147. # suspend or not the engine if there are HTTP errors
  148. with threading.RLock():
  149. if requests_exception:
  150. # update continuous_errors / suspend_end_time
  151. engine.continuous_errors += 1
  152. engine.suspend_end_time = time() + min(settings['search']['max_ban_time_on_fail'],
  153. engine.continuous_errors * settings['search']['ban_time_on_fail'])
  154. else:
  155. # no HTTP error (perhaps an engine error)
  156. # anyway, reset the suspend variables
  157. engine.continuous_errors = 0
  158. engine.suspend_end_time = 0
  159. def record_offline_engine_stats_on_error(engine, result_container, start_time):
  160. engine_time = time() - start_time
  161. result_container.add_timing(engine.name, engine_time, engine_time)
  162. with threading.RLock():
  163. engine.stats['errors'] += 1
  164. def search_one_offline_request(engine, query, request_params):
  165. return engine.search(query, request_params)
  166. def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  167. engine = engines[engine_name]
  168. try:
  169. search_results = search_one_offline_request(engine, query, request_params)
  170. if search_results:
  171. result_container.extend(engine_name, search_results)
  172. engine_time = time() - start_time
  173. result_container.add_timing(engine_name, engine_time, engine_time)
  174. with threading.RLock():
  175. engine.stats['engine_time'] += engine_time
  176. engine.stats['engine_time_count'] += 1
  177. except ValueError as e:
  178. record_offline_engine_stats_on_error(engine, result_container, start_time)
  179. logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
  180. except Exception as e:
  181. record_offline_engine_stats_on_error(engine, result_container, start_time)
  182. result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
  183. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  184. def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  185. if engines[engine_name].offline:
  186. return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) # noqa
  187. return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
  188. def search_multiple_requests(requests, result_container, start_time, timeout_limit):
  189. search_id = uuid4().__str__()
  190. for engine_name, query, request_params in requests:
  191. th = threading.Thread(
  192. target=search_one_request_safe,
  193. args=(engine_name, query, request_params, result_container, start_time, timeout_limit),
  194. name=search_id,
  195. )
  196. th._engine_name = engine_name
  197. th.start()
  198. for th in threading.enumerate():
  199. if th.name == search_id:
  200. remaining_time = max(0.0, timeout_limit - (time() - start_time))
  201. th.join(remaining_time)
  202. if th.is_alive():
  203. result_container.add_unresponsive_engine(th._engine_name, 'timeout')
  204. logger.warning('engine timeout: {0}'.format(th._engine_name))
  205. # get default reqest parameter
  206. def default_request_params():
  207. return {
  208. 'method': 'GET',
  209. 'headers': {},
  210. 'data': {},
  211. 'url': '',
  212. 'cookies': {},
  213. 'verify': True
  214. }
  215. class Search:
  216. """Search information container"""
  217. def __init__(self, search_query):
  218. # init vars
  219. super().__init__()
  220. self.search_query = search_query
  221. self.result_container = ResultContainer()
  222. self.start_time = None
  223. self.actual_timeout = None
  224. def search_external_bang(self):
  225. """
  226. Check if there is a external bang.
  227. If yes, update self.result_container and return True
  228. """
  229. if self.search_query.external_bang:
  230. self.result_container.redirect_url = get_bang_url(self.search_query)
  231. # This means there was a valid bang and the
  232. # rest of the search does not need to be continued
  233. if isinstance(self.result_container.redirect_url, str):
  234. return True
  235. return False
  236. def search_answerers(self):
  237. """
  238. Check if an answer return a result.
  239. If yes, update self.result_container and return True
  240. """
  241. answerers_results = ask(self.search_query)
  242. if answerers_results:
  243. for results in answerers_results:
  244. self.result_container.extend('answer', results)
  245. return True
  246. return False
  247. def _is_accepted(self, engine_name, engine):
  248. if not self.search_query.preferences.validate_token(engine):
  249. return False
  250. # skip suspended engines
  251. if engine.suspend_end_time >= time():
  252. logger.debug('Engine currently suspended: %s', engine_name)
  253. return False
  254. # if paging is not supported, skip
  255. if self.search_query.pageno > 1 and not engine.paging:
  256. return False
  257. # if time_range is not supported, skip
  258. if self.search_query.time_range and not engine.time_range_support:
  259. return False
  260. return True
  261. def _get_params(self, engineref, user_agent):
  262. if engineref.name not in engines:
  263. return None, None
  264. engine = engines[engineref.name]
  265. if not self._is_accepted(engineref.name, engine):
  266. return None, None
  267. # set default request parameters
  268. request_params = {}
  269. if not engine.offline:
  270. request_params = default_request_params()
  271. request_params['headers']['User-Agent'] = user_agent
  272. if hasattr(engine, 'language') and engine.language:
  273. request_params['language'] = engine.language
  274. else:
  275. request_params['language'] = self.search_query.lang
  276. request_params['safesearch'] = self.search_query.safesearch
  277. request_params['time_range'] = self.search_query.time_range
  278. request_params['category'] = engineref.category
  279. request_params['pageno'] = self.search_query.pageno
  280. return request_params, engine.timeout
  281. # do search-request
  282. def _get_requests(self):
  283. # init vars
  284. requests = []
  285. # set default useragent
  286. # user_agent = request.headers.get('User-Agent', '')
  287. user_agent = gen_useragent()
  288. # max of all selected engine timeout
  289. default_timeout = 0
  290. # start search-reqest for all selected engines
  291. for engineref in self.search_query.engineref_list:
  292. # set default request parameters
  293. request_params, engine_timeout = self._get_params(engineref, user_agent)
  294. if request_params is None:
  295. continue
  296. # append request to list
  297. requests.append((engineref.name, self.search_query.query, request_params))
  298. # update default_timeout
  299. default_timeout = max(default_timeout, engine_timeout)
  300. # adjust timeout
  301. actual_timeout = default_timeout
  302. query_timeout = self.search_query.timeout_limit
  303. if max_request_timeout is None and query_timeout is None:
  304. # No max, no user query: default_timeout
  305. pass
  306. elif max_request_timeout is None and query_timeout is not None:
  307. # No max, but user query: From user query except if above default
  308. actual_timeout = min(default_timeout, query_timeout)
  309. elif max_request_timeout is not None and query_timeout is None:
  310. # Max, no user query: Default except if above max
  311. actual_timeout = min(default_timeout, max_request_timeout)
  312. elif max_request_timeout is not None and query_timeout is not None:
  313. # Max & user query: From user query except if above max
  314. actual_timeout = min(query_timeout, max_request_timeout)
  315. logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
  316. .format(self.actual_timeout, default_timeout, query_timeout, max_request_timeout))
  317. return requests, actual_timeout
  318. def search_standard(self):
  319. """
  320. Update self.result_container, self.actual_timeout
  321. """
  322. requests, self.actual_timeout = self._get_requests()
  323. # send all search-request
  324. if requests:
  325. search_multiple_requests(requests, self.result_container, self.start_time, self.actual_timeout)
  326. start_new_thread(gc.collect, tuple())
  327. # return results, suggestions, answers and infoboxes
  328. return True
  329. # do search-request
  330. def search(self):
  331. self.start_time = time()
  332. if not self.search_external_bang():
  333. if not self.search_answerers():
  334. self.search_standard()
  335. return self.result_container
  336. class SearchWithPlugins(Search):
  337. """Similar to the Search class but call the plugins."""
  338. def __init__(self, search_query, ordered_plugin_list, request):
  339. super().__init__(search_query)
  340. self.ordered_plugin_list = ordered_plugin_list
  341. self.request = request
  342. def search(self):
  343. if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
  344. super().search()
  345. plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
  346. results = self.result_container.get_ordered_results()
  347. for result in results:
  348. plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
  349. return self.result_container