search.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313
  1. import grequests
  2. from itertools import izip_longest, chain
  3. from datetime import datetime
  4. from operator import itemgetter
  5. from urlparse import urlparse, unquote
  6. from searx.engines import (
  7. categories, engines, engine_shortcuts
  8. )
  9. from searx.languages import language_codes
  10. from searx.utils import gen_useragent
  11. number_of_searches = 0
  12. def default_request_params():
  13. return {
  14. 'method': 'GET', 'headers': {}, 'data': {}, 'url': '', 'cookies': {}}
  15. def make_callback(engine_name, results, suggestions, callback, params):
  16. # creating a callback wrapper for the search engine results
  17. def process_callback(response, **kwargs):
  18. cb_res = []
  19. response.search_params = params
  20. engines[engine_name].stats['page_load_time'] += \
  21. (datetime.now() - params['started']).total_seconds()
  22. try:
  23. search_results = callback(response)
  24. except Exception, e:
  25. engines[engine_name].stats['errors'] += 1
  26. results[engine_name] = cb_res
  27. print '[E] Error with engine "{0}":\n\t{1}'.format(
  28. engine_name, str(e))
  29. return
  30. for result in search_results:
  31. result['engine'] = engine_name
  32. if 'suggestion' in result:
  33. # TODO type checks
  34. suggestions.add(result['suggestion'])
  35. continue
  36. cb_res.append(result)
  37. results[engine_name] = cb_res
  38. return process_callback
  39. def score_results(results):
  40. flat_res = filter(
  41. None, chain.from_iterable(izip_longest(*results.values())))
  42. flat_len = len(flat_res)
  43. engines_len = len(results)
  44. results = []
  45. # pass 1: deduplication + scoring
  46. for i, res in enumerate(flat_res):
  47. res['parsed_url'] = urlparse(res['url'])
  48. res['host'] = res['parsed_url'].netloc
  49. if res['host'].startswith('www.'):
  50. res['host'] = res['host'].replace('www.', '', 1)
  51. res['engines'] = [res['engine']]
  52. weight = 1.0
  53. if hasattr(engines[res['engine']], 'weight'):
  54. weight = float(engines[res['engine']].weight)
  55. score = int((flat_len - i) / engines_len) * weight + 1
  56. duplicated = False
  57. for new_res in results:
  58. p1 = res['parsed_url'].path[:-1] if res['parsed_url'].path.endswith('/') else res['parsed_url'].path # noqa
  59. p2 = new_res['parsed_url'].path[:-1] if new_res['parsed_url'].path.endswith('/') else new_res['parsed_url'].path # noqa
  60. if res['host'] == new_res['host'] and\
  61. unquote(p1) == unquote(p2) and\
  62. res['parsed_url'].query == new_res['parsed_url'].query and\
  63. res.get('template') == new_res.get('template'):
  64. duplicated = new_res
  65. break
  66. if duplicated:
  67. if res.get('content') > duplicated.get('content'):
  68. duplicated['content'] = res['content']
  69. duplicated['score'] += score
  70. duplicated['engines'].append(res['engine'])
  71. if duplicated['parsed_url'].scheme == 'https':
  72. continue
  73. elif res['parsed_url'].scheme == 'https':
  74. duplicated['url'] = res['parsed_url'].geturl()
  75. duplicated['parsed_url'] = res['parsed_url']
  76. else:
  77. res['score'] = score
  78. results.append(res)
  79. results = sorted(results, key=itemgetter('score'), reverse=True)
  80. # pass 2 : group results by category and template
  81. gresults = []
  82. categoryPositions = {}
  83. for i, res in enumerate(results):
  84. # FIXME : handle more than one category per engine
  85. category = engines[res['engine']].categories[0] + ':' + '' if 'template' not in res else res['template']
  86. current = None if category not in categoryPositions else categoryPositions[category]
  87. # group with previous results using the same category if the group can accept more result and is not too far from the current position
  88. if current != None and (current['count'] > 0) and (len(gresults) - current['index'] < 20):
  89. # group with the previous results using the same category with this one
  90. index = current['index']
  91. gresults.insert(index, res)
  92. # update every index after the current one (including the current one)
  93. for k in categoryPositions:
  94. v = categoryPositions[k]['index']
  95. if v >= index:
  96. categoryPositions[k]['index'] = v+1
  97. # update this category
  98. current['count'] -= 1
  99. else:
  100. # same category
  101. gresults.append(res)
  102. # update categoryIndex
  103. categoryPositions[category] = { 'index' : len(gresults), 'count' : 8 }
  104. # return gresults
  105. return gresults
  106. class Search(object):
  107. """Search information container"""
  108. def __init__(self, request):
  109. super(Search, self).__init__()
  110. self.query = None
  111. self.engines = []
  112. self.categories = []
  113. self.paging = False
  114. self.pageno = 1
  115. self.lang = 'all'
  116. if request.cookies.get('blocked_engines'):
  117. self.blocked_engines = request.cookies['blocked_engines'].split(',') # noqa
  118. else:
  119. self.blocked_engines = []
  120. self.results = []
  121. self.suggestions = []
  122. self.request_data = {}
  123. if request.cookies.get('language')\
  124. and request.cookies['language'] in (x[0] for x in language_codes):
  125. self.lang = request.cookies['language']
  126. if request.method == 'POST':
  127. self.request_data = request.form
  128. else:
  129. self.request_data = request.args
  130. # TODO better exceptions
  131. if not self.request_data.get('q'):
  132. raise Exception('noquery')
  133. self.query = self.request_data['q']
  134. pageno_param = self.request_data.get('pageno', '1')
  135. if not pageno_param.isdigit() or int(pageno_param) < 1:
  136. raise Exception('wrong pagenumber')
  137. self.pageno = int(pageno_param)
  138. self.parse_query()
  139. self.categories = []
  140. if self.engines:
  141. self.categories = list(set(engine['category']
  142. for engine in self.engines))
  143. else:
  144. for pd_name, pd in self.request_data.items():
  145. if pd_name.startswith('category_'):
  146. category = pd_name[9:]
  147. if not category in categories:
  148. continue
  149. self.categories.append(category)
  150. if not self.categories:
  151. cookie_categories = request.cookies.get('categories', '')
  152. cookie_categories = cookie_categories.split(',')
  153. for ccateg in cookie_categories:
  154. if ccateg in categories:
  155. self.categories.append(ccateg)
  156. if not self.categories:
  157. self.categories = ['general']
  158. for categ in self.categories:
  159. self.engines.extend({'category': categ,
  160. 'name': x.name}
  161. for x in categories[categ]
  162. if not x.name in self.blocked_engines)
  163. def parse_query(self):
  164. query_parts = self.query.split()
  165. modified = False
  166. if query_parts[0].startswith(':'):
  167. lang = query_parts[0][1:].lower()
  168. for lc in language_codes:
  169. lang_id, lang_name, country = map(str.lower, lc)
  170. if lang == lang_id\
  171. or lang_id.startswith(lang)\
  172. or lang == lang_name\
  173. or lang == country:
  174. self.lang = lang
  175. modified = True
  176. break
  177. elif query_parts[0].startswith('!'):
  178. prefix = query_parts[0][1:].replace('_', ' ')
  179. if prefix in engine_shortcuts\
  180. and not engine_shortcuts[prefix] in self.blocked_engines:
  181. modified = True
  182. self.engines.append({'category': 'none',
  183. 'name': engine_shortcuts[prefix]})
  184. elif prefix in engines\
  185. and not prefix in self.blocked_engines:
  186. modified = True
  187. self.engines.append({'category': 'none',
  188. 'name': prefix})
  189. elif prefix in categories:
  190. modified = True
  191. self.engines.extend({'category': prefix,
  192. 'name': engine.name}
  193. for engine in categories[prefix]
  194. if not engine in self.blocked_engines)
  195. if modified:
  196. self.query = self.query.replace(query_parts[0], '', 1).strip()
  197. self.parse_query()
  198. def search(self, request):
  199. global number_of_searches
  200. requests = []
  201. results = {}
  202. suggestions = set()
  203. number_of_searches += 1
  204. #user_agent = request.headers.get('User-Agent', '')
  205. user_agent = gen_useragent()
  206. for selected_engine in self.engines:
  207. if selected_engine['name'] not in engines:
  208. continue
  209. engine = engines[selected_engine['name']]
  210. if self.pageno > 1 and not engine.paging:
  211. continue
  212. if self.lang != 'all' and not engine.language_support:
  213. continue
  214. request_params = default_request_params()
  215. request_params['headers']['User-Agent'] = user_agent
  216. request_params['category'] = selected_engine['category']
  217. request_params['started'] = datetime.now()
  218. request_params['pageno'] = self.pageno
  219. request_params['language'] = self.lang
  220. request_params = engine.request(self.query.encode('utf-8'),
  221. request_params)
  222. if request_params['url'] is None:
  223. # TODO add support of offline engines
  224. pass
  225. callback = make_callback(
  226. selected_engine['name'],
  227. results,
  228. suggestions,
  229. engine.response,
  230. request_params
  231. )
  232. request_args = dict(
  233. headers=request_params['headers'],
  234. hooks=dict(response=callback),
  235. cookies=request_params['cookies'],
  236. timeout=engine.timeout
  237. )
  238. if request_params['method'] == 'GET':
  239. req = grequests.get
  240. else:
  241. req = grequests.post
  242. request_args['data'] = request_params['data']
  243. # ignoring empty urls
  244. if not request_params['url']:
  245. continue
  246. requests.append(req(request_params['url'], **request_args))
  247. grequests.map(requests)
  248. for engine_name, engine_results in results.items():
  249. engines[engine_name].stats['search_count'] += 1
  250. engines[engine_name].stats['result_count'] += len(engine_results)
  251. results = score_results(results)
  252. for result in results:
  253. for res_engine in result['engines']:
  254. engines[result['engine']]\
  255. .stats['score_count'] += result['score']
  256. return results, suggestions