search.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. import gc
  15. import threading
  16. from thread import start_new_thread
  17. from time import time
  18. from uuid import uuid4
  19. import searx.poolrequests as requests_lib
  20. from searx.engines import (
  21. categories, engines
  22. )
  23. from searx.answerers import ask
  24. from searx.utils import gen_useragent
  25. from searx.query import RawTextQuery, SearchQuery
  26. from searx.results import ResultContainer
  27. from searx import logger
  28. from searx.plugins import plugins
  29. logger = logger.getChild('search')
  30. number_of_searches = 0
  31. def send_http_request(engine, request_params, timeout_limit):
  32. response = None
  33. try:
  34. # create dictionary which contain all
  35. # informations about the request
  36. request_args = dict(
  37. headers=request_params['headers'],
  38. cookies=request_params['cookies'],
  39. timeout=timeout_limit,
  40. verify=request_params['verify']
  41. )
  42. # specific type of request (GET or POST)
  43. if request_params['method'] == 'GET':
  44. req = requests_lib.get
  45. else:
  46. req = requests_lib.post
  47. request_args['data'] = request_params['data']
  48. # for page_load_time stats
  49. time_before_request = time()
  50. # send the request
  51. response = req(request_params['url'], **request_args)
  52. with threading.RLock():
  53. # no error : reset the suspend variables
  54. engine.continuous_errors = 0
  55. engine.suspend_end_time = 0
  56. # update stats with current page-load-time
  57. # only the HTTP request
  58. engine.stats['page_load_time'] += time() - time_before_request
  59. engine.stats['page_load_count'] += 1
  60. # is there a timeout (no parsing in this case)
  61. timeout_overhead = 0.2 # seconds
  62. search_duration = time() - request_params['started']
  63. if search_duration > timeout_limit + timeout_overhead:
  64. logger.exception('engine timeout on HTTP request:'
  65. '{0} (search duration : {1} ms, time-out: {2} )'
  66. .format(engine.name, search_duration, timeout_limit))
  67. with threading.RLock():
  68. engine.stats['errors'] += 1
  69. return False
  70. # everything is ok : return the response
  71. return response
  72. except:
  73. # increase errors stats
  74. with threading.RLock():
  75. engine.stats['errors'] += 1
  76. engine.continuous_errors += 1
  77. engine.suspend_end_time = time() + min(60, engine.continuous_errors)
  78. # print engine name and specific error message
  79. logger.exception('engine crash: {0}'.format(engine.name))
  80. return False
  81. def search_one_request(engine_name, query, request_params, result_container, timeout_limit):
  82. engine = engines[engine_name]
  83. # update request parameters dependent on
  84. # search-engine (contained in engines folder)
  85. engine.request(query, request_params)
  86. # TODO add support of offline engines
  87. if request_params['url'] is None:
  88. return False
  89. # ignoring empty urls
  90. if not request_params['url']:
  91. return False
  92. # send request
  93. response = send_http_request(engine, request_params, timeout_limit)
  94. # parse response
  95. success = None
  96. if response:
  97. # parse the response
  98. response.search_params = request_params
  99. try:
  100. search_results = engine.response(response)
  101. except:
  102. logger.exception('engine crash: {0}'.format(engine.name))
  103. search_results = []
  104. # add results
  105. for result in search_results:
  106. result['engine'] = engine.name
  107. result_container.extend(engine.name, search_results)
  108. success = True
  109. else:
  110. success = False
  111. with threading.RLock():
  112. # update stats : total time
  113. engine.stats['engine_time'] += time() - request_params['started']
  114. engine.stats['engine_time_count'] += 1
  115. return success
  116. def search_multiple_requests(requests, result_container, timeout_limit):
  117. start_time = time()
  118. search_id = uuid4().__str__()
  119. for engine_name, query, request_params in requests:
  120. th = threading.Thread(
  121. target=search_one_request,
  122. args=(engine_name, query, request_params, result_container, timeout_limit),
  123. name=search_id,
  124. )
  125. th._engine_name = engine_name
  126. th.start()
  127. for th in threading.enumerate():
  128. if th.name == search_id:
  129. remaining_time = max(0.0, timeout_limit - (time() - start_time))
  130. th.join(remaining_time)
  131. if th.isAlive():
  132. logger.warning('engine timeout: {0}'.format(th._engine_name))
  133. # get default reqest parameter
  134. def default_request_params():
  135. return {
  136. 'method': 'GET',
  137. 'headers': {},
  138. 'data': {},
  139. 'url': '',
  140. 'cookies': {},
  141. 'verify': True
  142. }
  143. def get_search_query_from_webapp(preferences, form):
  144. query = None
  145. query_engines = []
  146. query_categories = []
  147. query_paging = False
  148. query_pageno = 1
  149. query_lang = 'all'
  150. query_time_range = None
  151. # set blocked engines
  152. disabled_engines = preferences.engines.get_disabled()
  153. # set specific language if set
  154. query_lang = preferences.get_value('language')
  155. # safesearch
  156. query_safesearch = preferences.get_value('safesearch')
  157. # TODO better exceptions
  158. if not form.get('q'):
  159. raise Exception('noquery')
  160. # set pagenumber
  161. pageno_param = form.get('pageno', '1')
  162. if not pageno_param.isdigit() or int(pageno_param) < 1:
  163. pageno_param = 1
  164. query_pageno = int(pageno_param)
  165. # parse query, if tags are set, which change
  166. # the serch engine or search-language
  167. raw_text_query = RawTextQuery(form['q'], disabled_engines)
  168. raw_text_query.parse_query()
  169. # set query
  170. query = raw_text_query.getSearchQuery()
  171. # get last selected language in query, if possible
  172. # TODO support search with multible languages
  173. if len(raw_text_query.languages):
  174. query_lang = raw_text_query.languages[-1]
  175. query_time_range = form.get('time_range')
  176. query_engines = raw_text_query.engines
  177. # if engines are calculated from query,
  178. # set categories by using that informations
  179. if query_engines and raw_text_query.specific:
  180. query_categories = list(set(engine['category']
  181. for engine in query_engines))
  182. # otherwise, using defined categories to
  183. # calculate which engines should be used
  184. else:
  185. # set categories/engines
  186. load_default_categories = True
  187. for pd_name, pd in form.items():
  188. if pd_name == 'categories':
  189. query_categories.extend(categ for categ in map(unicode.strip, pd.split(',')) if categ in categories)
  190. elif pd_name == 'engines':
  191. pd_engines = [{'category': engines[engine].categories[0],
  192. 'name': engine}
  193. for engine in map(unicode.strip, pd.split(',')) if engine in engines]
  194. if pd_engines:
  195. query_engines.extend(pd_engines)
  196. load_default_categories = False
  197. elif pd_name.startswith('category_'):
  198. category = pd_name[9:]
  199. # if category is not found in list, skip
  200. if category not in categories:
  201. continue
  202. if pd != 'off':
  203. # add category to list
  204. query_categories.append(category)
  205. elif category in query_categories:
  206. # remove category from list if property is set to 'off'
  207. query_categories.remove(category)
  208. if not load_default_categories:
  209. if not query_categories:
  210. query_categories = list(set(engine['category']
  211. for engine in engines))
  212. else:
  213. # if no category is specified for this search,
  214. # using user-defined default-configuration which
  215. # (is stored in cookie)
  216. if not query_categories:
  217. cookie_categories = preferences.get_value('categories')
  218. for ccateg in cookie_categories:
  219. if ccateg in categories:
  220. query_categories.append(ccateg)
  221. # if still no category is specified, using general
  222. # as default-category
  223. if not query_categories:
  224. query_categories = ['general']
  225. # using all engines for that search, which are
  226. # declared under the specific categories
  227. for categ in query_categories:
  228. query_engines.extend({'category': categ,
  229. 'name': engine.name}
  230. for engine in categories[categ]
  231. if (engine.name, categ) not in disabled_engines)
  232. return SearchQuery(query, query_engines, query_categories,
  233. query_lang, query_safesearch, query_pageno, query_time_range)
  234. class Search(object):
  235. """Search information container"""
  236. def __init__(self, search_query):
  237. # init vars
  238. super(Search, self).__init__()
  239. self.search_query = search_query
  240. self.result_container = ResultContainer()
  241. # do search-request
  242. def search(self):
  243. global number_of_searches
  244. # start time
  245. start_time = time()
  246. # answeres ?
  247. answerers_results = ask(self.search_query)
  248. if answerers_results:
  249. for results in answerers_results:
  250. self.result_container.extend('answer', results)
  251. return self.result_container
  252. # init vars
  253. requests = []
  254. # increase number of searches
  255. number_of_searches += 1
  256. # set default useragent
  257. # user_agent = request.headers.get('User-Agent', '')
  258. user_agent = gen_useragent()
  259. search_query = self.search_query
  260. # max of all selected engine timeout
  261. timeout_limit = 0
  262. # start search-reqest for all selected engines
  263. for selected_engine in search_query.engines:
  264. if selected_engine['name'] not in engines:
  265. continue
  266. engine = engines[selected_engine['name']]
  267. # skip suspended engines
  268. if engine.suspend_end_time >= time():
  269. logger.debug('Engine currently suspended: %s', selected_engine['name'])
  270. continue
  271. # if paging is not supported, skip
  272. if search_query.pageno > 1 and not engine.paging:
  273. continue
  274. # if search-language is set and engine does not
  275. # provide language-support, skip
  276. if search_query.lang != 'all' and not engine.language_support:
  277. continue
  278. # if time_range is not supported, skip
  279. if search_query.time_range and not engine.time_range_support:
  280. continue
  281. # set default request parameters
  282. request_params = default_request_params()
  283. request_params['headers']['User-Agent'] = user_agent
  284. request_params['category'] = selected_engine['category']
  285. request_params['started'] = start_time
  286. request_params['pageno'] = search_query.pageno
  287. if hasattr(engine, 'language') and engine.language:
  288. request_params['language'] = engine.language
  289. else:
  290. request_params['language'] = search_query.lang
  291. # 0 = None, 1 = Moderate, 2 = Strict
  292. request_params['safesearch'] = search_query.safesearch
  293. request_params['time_range'] = search_query.time_range
  294. # append request to list
  295. requests.append((selected_engine['name'], search_query.query.encode('utf-8'), request_params))
  296. # update timeout_limit
  297. timeout_limit = max(timeout_limit, engine.timeout)
  298. if requests:
  299. # send all search-request
  300. search_multiple_requests(requests, self.result_container, timeout_limit - (time() - start_time))
  301. start_new_thread(gc.collect, tuple())
  302. # return results, suggestions, answers and infoboxes
  303. return self.result_container
  304. class SearchWithPlugins(Search):
  305. """Similar to the Search class but call the plugins."""
  306. def __init__(self, search_query, request):
  307. super(SearchWithPlugins, self).__init__(search_query)
  308. self.request = request
  309. def search(self):
  310. if plugins.call('pre_search', self.request, self):
  311. super(SearchWithPlugins, self).search()
  312. plugins.call('post_search', self.request, self)
  313. results = self.result_container.get_ordered_results()
  314. for result in results:
  315. plugins.call('on_result', self.request, self, result)
  316. return self.result_container