search.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. import gc
  15. import sys
  16. import threading
  17. from time import time
  18. from uuid import uuid4
  19. from _thread import start_new_thread
  20. from flask_babel import gettext
  21. import requests.exceptions
  22. import searx.poolrequests as requests_lib
  23. from searx.engines import (
  24. categories, engines, settings
  25. )
  26. from searx.answerers import ask
  27. from searx.external_bang import get_bang_url
  28. from searx.utils import gen_useragent
  29. from searx.query import RawTextQuery, SearchQuery, VALID_LANGUAGE_CODE
  30. from searx.results import ResultContainer
  31. from searx import logger
  32. from searx.plugins import plugins
  33. from searx.exceptions import SearxParameterException
  34. logger = logger.getChild('search')
  35. max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None)
  36. if max_request_timeout is None:
  37. logger.info('max_request_timeout={0}'.format(max_request_timeout))
  38. else:
  39. if isinstance(max_request_timeout, float):
  40. logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
  41. else:
  42. logger.critical('outgoing.max_request_timeout if defined has to be float')
  43. from sys import exit
  44. exit(1)
  45. def send_http_request(engine, request_params):
  46. # create dictionary which contain all
  47. # informations about the request
  48. request_args = dict(
  49. headers=request_params['headers'],
  50. cookies=request_params['cookies'],
  51. verify=request_params['verify']
  52. )
  53. # setting engine based proxies
  54. if hasattr(engine, 'proxies'):
  55. request_args['proxies'] = engine.proxies
  56. # specific type of request (GET or POST)
  57. if request_params['method'] == 'GET':
  58. req = requests_lib.get
  59. else:
  60. req = requests_lib.post
  61. request_args['data'] = request_params['data']
  62. # send the request
  63. return req(request_params['url'], **request_args)
  64. def search_one_http_request(engine, query, request_params):
  65. # update request parameters dependent on
  66. # search-engine (contained in engines folder)
  67. engine.request(query, request_params)
  68. # ignoring empty urls
  69. if request_params['url'] is None:
  70. return None
  71. if not request_params['url']:
  72. return None
  73. # send request
  74. response = send_http_request(engine, request_params)
  75. # parse the response
  76. response.search_params = request_params
  77. return engine.response(response)
  78. def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  79. # set timeout for all HTTP requests
  80. requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
  81. # reset the HTTP total time
  82. requests_lib.reset_time_for_thread()
  83. #
  84. engine = engines[engine_name]
  85. # suppose everything will be alright
  86. requests_exception = False
  87. try:
  88. # send requests and parse the results
  89. search_results = search_one_http_request(engine, query, request_params)
  90. # check if the engine accepted the request
  91. if search_results is not None:
  92. # yes, so add results
  93. result_container.extend(engine_name, search_results)
  94. # update engine time when there is no exception
  95. engine_time = time() - start_time
  96. page_load_time = requests_lib.get_time_for_thread()
  97. result_container.add_timing(engine_name, engine_time, page_load_time)
  98. with threading.RLock():
  99. engine.stats['engine_time'] += engine_time
  100. engine.stats['engine_time_count'] += 1
  101. # update stats with the total HTTP time
  102. engine.stats['page_load_time'] += page_load_time
  103. engine.stats['page_load_count'] += 1
  104. except Exception as e:
  105. # Timing
  106. engine_time = time() - start_time
  107. page_load_time = requests_lib.get_time_for_thread()
  108. result_container.add_timing(engine_name, engine_time, page_load_time)
  109. # Record the errors
  110. with threading.RLock():
  111. engine.stats['errors'] += 1
  112. if (issubclass(e.__class__, requests.exceptions.Timeout)):
  113. result_container.add_unresponsive_engine(engine_name, 'timeout')
  114. # requests timeout (connect or read)
  115. logger.error("engine {0} : HTTP requests timeout"
  116. "(search duration : {1} s, timeout: {2} s) : {3}"
  117. .format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
  118. requests_exception = True
  119. elif (issubclass(e.__class__, requests.exceptions.RequestException)):
  120. result_container.add_unresponsive_engine(engine_name, 'request exception')
  121. # other requests exception
  122. logger.exception("engine {0} : requests exception"
  123. "(search duration : {1} s, timeout: {2} s) : {3}"
  124. .format(engine_name, engine_time, timeout_limit, e))
  125. requests_exception = True
  126. else:
  127. result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
  128. # others errors
  129. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  130. # suspend or not the engine if there are HTTP errors
  131. with threading.RLock():
  132. if requests_exception:
  133. # update continuous_errors / suspend_end_time
  134. engine.continuous_errors += 1
  135. engine.suspend_end_time = time() + min(settings['search']['max_ban_time_on_fail'],
  136. engine.continuous_errors * settings['search']['ban_time_on_fail'])
  137. else:
  138. # no HTTP error (perhaps an engine error)
  139. # anyway, reset the suspend variables
  140. engine.continuous_errors = 0
  141. engine.suspend_end_time = 0
  142. def record_offline_engine_stats_on_error(engine, result_container, start_time):
  143. engine_time = time() - start_time
  144. result_container.add_timing(engine.name, engine_time, engine_time)
  145. with threading.RLock():
  146. engine.stats['errors'] += 1
  147. def search_one_offline_request(engine, query, request_params):
  148. return engine.search(query, request_params)
  149. def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  150. engine = engines[engine_name]
  151. try:
  152. search_results = search_one_offline_request(engine, query, request_params)
  153. if search_results:
  154. result_container.extend(engine_name, search_results)
  155. engine_time = time() - start_time
  156. result_container.add_timing(engine_name, engine_time, engine_time)
  157. with threading.RLock():
  158. engine.stats['engine_time'] += engine_time
  159. engine.stats['engine_time_count'] += 1
  160. except ValueError as e:
  161. record_offline_engine_stats_on_error(engine, result_container, start_time)
  162. logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
  163. except Exception as e:
  164. record_offline_engine_stats_on_error(engine, result_container, start_time)
  165. result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
  166. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  167. def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  168. if engines[engine_name].offline:
  169. return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) # noqa
  170. return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
  171. def search_multiple_requests(requests, result_container, start_time, timeout_limit):
  172. search_id = uuid4().__str__()
  173. for engine_name, query, request_params in requests:
  174. th = threading.Thread(
  175. target=search_one_request_safe,
  176. args=(engine_name, query, request_params, result_container, start_time, timeout_limit),
  177. name=search_id,
  178. )
  179. th._engine_name = engine_name
  180. th.start()
  181. for th in threading.enumerate():
  182. if th.name == search_id:
  183. remaining_time = max(0.0, timeout_limit - (time() - start_time))
  184. th.join(remaining_time)
  185. if th.is_alive():
  186. result_container.add_unresponsive_engine(th._engine_name, 'timeout')
  187. logger.warning('engine timeout: {0}'.format(th._engine_name))
  188. # get default reqest parameter
  189. def default_request_params():
  190. return {
  191. 'method': 'GET',
  192. 'headers': {},
  193. 'data': {},
  194. 'url': '',
  195. 'cookies': {},
  196. 'verify': True
  197. }
  198. # remove duplicate queries.
  199. # FIXME: does not fix "!music !soundcloud", because the categories are 'none' and 'music'
  200. def deduplicate_query_engines(query_engines):
  201. uniq_query_engines = {q["category"] + '|' + q["name"]: q for q in query_engines}
  202. return uniq_query_engines.values()
  203. def get_search_query_from_webapp(preferences, form):
  204. # no text for the query ?
  205. if not form.get('q'):
  206. raise SearxParameterException('q', '')
  207. # set blocked engines
  208. disabled_engines = preferences.engines.get_disabled()
  209. # parse query, if tags are set, which change
  210. # the serch engine or search-language
  211. raw_text_query = RawTextQuery(form['q'], disabled_engines)
  212. # set query
  213. query = raw_text_query.getQuery()
  214. # get and check page number
  215. pageno_param = form.get('pageno', '1')
  216. if not pageno_param.isdigit() or int(pageno_param) < 1:
  217. raise SearxParameterException('pageno', pageno_param)
  218. query_pageno = int(pageno_param)
  219. # get language
  220. # set specific language if set on request, query or preferences
  221. # TODO support search with multible languages
  222. if len(raw_text_query.languages):
  223. query_lang = raw_text_query.languages[-1]
  224. elif 'language' in form:
  225. query_lang = form.get('language')
  226. else:
  227. query_lang = preferences.get_value('language')
  228. # check language
  229. if not VALID_LANGUAGE_CODE.match(query_lang):
  230. raise SearxParameterException('language', query_lang)
  231. # get safesearch
  232. if 'safesearch' in form:
  233. query_safesearch = form.get('safesearch')
  234. # first check safesearch
  235. if not query_safesearch.isdigit():
  236. raise SearxParameterException('safesearch', query_safesearch)
  237. query_safesearch = int(query_safesearch)
  238. else:
  239. query_safesearch = preferences.get_value('safesearch')
  240. # safesearch : second check
  241. if query_safesearch < 0 or query_safesearch > 2:
  242. raise SearxParameterException('safesearch', query_safesearch)
  243. # get time_range
  244. query_time_range = form.get('time_range')
  245. # check time_range
  246. if query_time_range not in ('None', None, '', 'day', 'week', 'month', 'year'):
  247. raise SearxParameterException('time_range', query_time_range)
  248. # query_engines
  249. query_engines = raw_text_query.engines
  250. # timeout_limit
  251. query_timeout = raw_text_query.timeout_limit
  252. if query_timeout is None and 'timeout_limit' in form:
  253. raw_time_limit = form.get('timeout_limit')
  254. if raw_time_limit in ['None', '']:
  255. raw_time_limit = None
  256. else:
  257. try:
  258. query_timeout = float(raw_time_limit)
  259. except ValueError:
  260. raise SearxParameterException('timeout_limit', raw_time_limit)
  261. # query_categories
  262. query_categories = []
  263. # if engines are calculated from query,
  264. # set categories by using that informations
  265. if query_engines and raw_text_query.specific:
  266. additional_categories = set()
  267. for engine in query_engines:
  268. if 'from_bang' in engine and engine['from_bang']:
  269. additional_categories.add('none')
  270. else:
  271. additional_categories.add(engine['category'])
  272. query_categories = list(additional_categories)
  273. # otherwise, using defined categories to
  274. # calculate which engines should be used
  275. else:
  276. # set categories/engines
  277. load_default_categories = True
  278. for pd_name, pd in form.items():
  279. if pd_name == 'categories':
  280. query_categories.extend(categ for categ in map(str.strip, pd.split(',')) if categ in categories)
  281. elif pd_name == 'engines':
  282. pd_engines = [{'category': engines[engine].categories[0],
  283. 'name': engine}
  284. for engine in map(str.strip, pd.split(',')) if engine in engines]
  285. if pd_engines:
  286. query_engines.extend(pd_engines)
  287. load_default_categories = False
  288. elif pd_name.startswith('category_'):
  289. category = pd_name[9:]
  290. # if category is not found in list, skip
  291. if category not in categories:
  292. continue
  293. if pd != 'off':
  294. # add category to list
  295. query_categories.append(category)
  296. elif category in query_categories:
  297. # remove category from list if property is set to 'off'
  298. query_categories.remove(category)
  299. if not load_default_categories:
  300. if not query_categories:
  301. query_categories = list(set(engine['category']
  302. for engine in query_engines))
  303. else:
  304. # if no category is specified for this search,
  305. # using user-defined default-configuration which
  306. # (is stored in cookie)
  307. if not query_categories:
  308. cookie_categories = preferences.get_value('categories')
  309. for ccateg in cookie_categories:
  310. if ccateg in categories:
  311. query_categories.append(ccateg)
  312. # if still no category is specified, using general
  313. # as default-category
  314. if not query_categories:
  315. query_categories = ['general']
  316. # using all engines for that search, which are
  317. # declared under the specific categories
  318. for categ in query_categories:
  319. query_engines.extend({'category': categ,
  320. 'name': engine.name}
  321. for engine in categories[categ]
  322. if (engine.name, categ) not in disabled_engines)
  323. query_engines = deduplicate_query_engines(query_engines)
  324. external_bang = raw_text_query.external_bang
  325. return (SearchQuery(query, query_engines, query_categories,
  326. query_lang, query_safesearch, query_pageno,
  327. query_time_range, query_timeout, preferences,
  328. external_bang=external_bang),
  329. raw_text_query)
  330. class Search:
  331. """Search information container"""
  332. def __init__(self, search_query):
  333. # init vars
  334. super().__init__()
  335. self.search_query = search_query
  336. self.result_container = ResultContainer()
  337. self.start_time = None
  338. self.actual_timeout = None
  339. def search_external_bang(self):
  340. """
  341. Check if there is a external bang.
  342. If yes, update self.result_container and return True
  343. """
  344. if self.search_query.external_bang:
  345. self.result_container.redirect_url = get_bang_url(self.search_query)
  346. # This means there was a valid bang and the
  347. # rest of the search does not need to be continued
  348. if isinstance(self.result_container.redirect_url, str):
  349. return True
  350. return False
  351. def search_answerers(self):
  352. """
  353. Check if an answer return a result.
  354. If yes, update self.result_container and return True
  355. """
  356. answerers_results = ask(self.search_query)
  357. if answerers_results:
  358. for results in answerers_results:
  359. self.result_container.extend('answer', results)
  360. return True
  361. return False
  362. def _is_accepted(self, engine_name, engine):
  363. if not self.search_query.preferences.validate_token(engine):
  364. return False
  365. # skip suspended engines
  366. if engine.suspend_end_time >= time():
  367. logger.debug('Engine currently suspended: %s', engine_name)
  368. return False
  369. # if paging is not supported, skip
  370. if self.search_query.pageno > 1 and not engine.paging:
  371. return False
  372. # if time_range is not supported, skip
  373. if self.search_query.time_range and not engine.time_range_support:
  374. return False
  375. return True
  376. def _get_params(self, selected_engine, user_agent):
  377. if selected_engine['name'] not in engines:
  378. return None, None
  379. engine = engines[selected_engine['name']]
  380. if not self._is_accepted(selected_engine['name'], engine):
  381. return None, None
  382. # set default request parameters
  383. request_params = {}
  384. if not engine.offline:
  385. request_params = default_request_params()
  386. request_params['headers']['User-Agent'] = user_agent
  387. if hasattr(engine, 'language') and engine.language:
  388. request_params['language'] = engine.language
  389. else:
  390. request_params['language'] = self.search_query.lang
  391. request_params['safesearch'] = self.search_query.safesearch
  392. request_params['time_range'] = self.search_query.time_range
  393. request_params['category'] = selected_engine['category']
  394. request_params['pageno'] = self.search_query.pageno
  395. return request_params, engine.timeout
  396. # do search-request
  397. def _get_requests(self):
  398. global number_of_searches
  399. # init vars
  400. requests = []
  401. # set default useragent
  402. # user_agent = request.headers.get('User-Agent', '')
  403. user_agent = gen_useragent()
  404. # max of all selected engine timeout
  405. default_timeout = 0
  406. # start search-reqest for all selected engines
  407. for selected_engine in self.search_query.engines:
  408. # set default request parameters
  409. request_params, engine_timeout = self._get_params(selected_engine, user_agent)
  410. if request_params is None:
  411. continue
  412. # append request to list
  413. requests.append((selected_engine['name'], self.search_query.query, request_params))
  414. # update default_timeout
  415. default_timeout = max(default_timeout, engine_timeout)
  416. # adjust timeout
  417. actual_timeout = default_timeout
  418. query_timeout = self.search_query.timeout_limit
  419. if max_request_timeout is None and query_timeout is None:
  420. # No max, no user query: default_timeout
  421. pass
  422. elif max_request_timeout is None and query_timeout is not None:
  423. # No max, but user query: From user query except if above default
  424. actual_timeout = min(default_timeout, query_timeout)
  425. elif max_request_timeout is not None and query_timeout is None:
  426. # Max, no user query: Default except if above max
  427. actual_timeout = min(default_timeout, max_request_timeout)
  428. elif max_request_timeout is not None and query_timeout is not None:
  429. # Max & user query: From user query except if above max
  430. actual_timeout = min(query_timeout, max_request_timeout)
  431. logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
  432. .format(self.actual_timeout, default_timeout, query_timeout, max_request_timeout))
  433. return requests, actual_timeout
  434. def search_standard(self):
  435. """
  436. Update self.result_container, self.actual_timeout
  437. """
  438. requests, self.actual_timeout = self._get_requests()
  439. # send all search-request
  440. if requests:
  441. search_multiple_requests(requests, self.result_container, self.start_time, self.actual_timeout)
  442. start_new_thread(gc.collect, tuple())
  443. # return results, suggestions, answers and infoboxes
  444. return True
  445. # do search-request
  446. def search(self):
  447. self.start_time = time()
  448. if not self.search_external_bang():
  449. if not self.search_answerers():
  450. self.search_standard()
  451. return self.result_container
  452. class SearchWithPlugins(Search):
  453. """Similar to the Search class but call the plugins."""
  454. def __init__(self, search_query, ordered_plugin_list, request):
  455. super().__init__(search_query)
  456. self.ordered_plugin_list = ordered_plugin_list
  457. self.request = request
  458. def search(self):
  459. if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
  460. super().search()
  461. plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
  462. results = self.result_container.get_ordered_results()
  463. for result in results:
  464. plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
  465. return self.result_container