__init__.py 10.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. from os.path import realpath, dirname, splitext, join
  15. from imp import load_source
  16. import grequests
  17. from itertools import izip_longest, chain
  18. from operator import itemgetter
  19. from urlparse import urlparse
  20. from searx import settings
  21. from searx.utils import gen_useragent
  22. import ConfigParser
  23. import sys
  24. from datetime import datetime
  25. engine_dir = dirname(realpath(__file__))
  26. searx_dir = join(engine_dir, '../../')
  27. engines_config = ConfigParser.SafeConfigParser()
  28. engines_config.read(join(searx_dir, 'engines.cfg'))
  29. number_of_searches = 0
  30. engines = {}
  31. categories = {'general': []}
  32. def load_module(filename):
  33. modname = splitext(filename)[0]
  34. if modname in sys.modules:
  35. del sys.modules[modname]
  36. filepath = join(engine_dir, filename)
  37. module = load_source(modname, filepath)
  38. module.name = modname
  39. return module
  40. if not engines_config.sections():
  41. print '[E] Error no engines found. Edit your engines.cfg'
  42. exit(2)
  43. for engine_config_name in engines_config.sections():
  44. engine_data = engines_config.options(engine_config_name)
  45. engine = load_module(engines_config.get(engine_config_name, 'engine')+'.py')
  46. engine.name = engine_config_name
  47. for param_name in engine_data:
  48. if param_name == 'engine':
  49. continue
  50. if param_name == 'categories':
  51. if engines_config.get(engine_config_name, param_name) == 'none':
  52. engine.categories = []
  53. else:
  54. engine.categories = map(str.strip, engines_config.get(engine_config_name, param_name).split(','))
  55. continue
  56. setattr(engine, param_name, engines_config.get(engine_config_name, param_name))
  57. for engine_attr in dir(engine):
  58. if engine_attr.startswith('_'):
  59. continue
  60. if getattr(engine, engine_attr) == None:
  61. print '[E] Engine config error: Missing attribute "{0}.{1}"'.format(engine.name, engine_attr)
  62. sys.exit(1)
  63. engines[engine.name] = engine
  64. engine.stats = {'result_count': 0, 'search_count': 0, 'page_load_time': 0, 'score_count': 0, 'errors': 0}
  65. if hasattr(engine, 'categories'):
  66. for category_name in engine.categories:
  67. categories.setdefault(category_name, []).append(engine)
  68. else:
  69. categories['general'].append(engine)
  70. def default_request_params():
  71. return {'method': 'GET', 'headers': {}, 'data': {}, 'url': '', 'cookies': {}}
  72. def make_callback(engine_name, results, suggestions, callback, params):
  73. # creating a callback wrapper for the search engine results
  74. def process_callback(response, **kwargs):
  75. cb_res = []
  76. response.search_params = params
  77. engines[engine_name].stats['page_load_time'] += (datetime.now() - params['started']).total_seconds()
  78. try:
  79. search_results = callback(response)
  80. except Exception, e:
  81. engines[engine_name].stats['errors'] += 1
  82. results[engine_name] = cb_res
  83. print '[E] Error with engine "{0}":\n\t{1}'.format(engine_name, str(e))
  84. return
  85. for result in search_results:
  86. result['engine'] = engine_name
  87. if 'suggestion' in result:
  88. # TODO type checks
  89. suggestions.add(result['suggestion'])
  90. continue
  91. cb_res.append(result)
  92. results[engine_name] = cb_res
  93. return process_callback
  94. def score_results(results):
  95. flat_res = filter(None, chain.from_iterable(izip_longest(*results.values())))
  96. flat_len = len(flat_res)
  97. engines_len = len(results)
  98. results = []
  99. # deduplication + scoring
  100. for i,res in enumerate(flat_res):
  101. res['parsed_url'] = urlparse(res['url'])
  102. res['engines'] = [res['engine']]
  103. weight = 1.0
  104. if hasattr(engines[res['engine']], 'weight'):
  105. weight = float(engines[res['engine']].weight)
  106. elif res['engine'] in settings.weights:
  107. weight = float(settings.weights[res['engine']])
  108. score = int((flat_len - i)/engines_len)*weight+1
  109. duplicated = False
  110. for new_res in results:
  111. p1 = res['parsed_url'].path[:-1] if res['parsed_url'].path.endswith('/') else res['parsed_url'].path
  112. p2 = new_res['parsed_url'].path[:-1] if new_res['parsed_url'].path.endswith('/') else new_res['parsed_url'].path
  113. if res['parsed_url'].netloc == new_res['parsed_url'].netloc and\
  114. p1 == p2 and\
  115. res['parsed_url'].query == new_res['parsed_url'].query and\
  116. res.get('template') == new_res.get('template'):
  117. duplicated = new_res
  118. break
  119. if duplicated:
  120. if len(res.get('content', '')) > len(duplicated.get('content', '')):
  121. duplicated['content'] = res['content']
  122. duplicated['score'] += score
  123. duplicated['engines'].append(res['engine'])
  124. if duplicated['parsed_url'].scheme == 'https':
  125. continue
  126. elif res['parsed_url'].scheme == 'https':
  127. duplicated['url'] = res['parsed_url'].geturl()
  128. duplicated['parsed_url'] = res['parsed_url']
  129. else:
  130. res['score'] = score
  131. results.append(res)
  132. return sorted(results, key=itemgetter('score'), reverse=True)
  133. def search(query, request, selected_engines):
  134. global engines, categories, number_of_searches
  135. requests = []
  136. results = {}
  137. suggestions = set()
  138. number_of_searches += 1
  139. #user_agent = request.headers.get('User-Agent', '')
  140. user_agent = gen_useragent()
  141. for selected_engine in selected_engines:
  142. if selected_engine['name'] not in engines:
  143. continue
  144. engine = engines[selected_engine['name']]
  145. request_params = default_request_params()
  146. request_params['headers']['User-Agent'] = user_agent
  147. request_params['category'] = selected_engine['category']
  148. request_params['started'] = datetime.now()
  149. request_params = engine.request(query, request_params)
  150. callback = make_callback(selected_engine['name'], results, suggestions, engine.response, request_params)
  151. request_args = dict(headers = request_params['headers']
  152. ,hooks = dict(response=callback)
  153. ,cookies = request_params['cookies']
  154. ,timeout = settings.request_timeout
  155. )
  156. if request_params['method'] == 'GET':
  157. req = grequests.get
  158. else:
  159. req = grequests.post
  160. request_args['data'] = request_params['data']
  161. # ignoring empty urls
  162. if not request_params['url']:
  163. continue
  164. requests.append(req(request_params['url'], **request_args))
  165. grequests.map(requests)
  166. for engine_name,engine_results in results.items():
  167. engines[engine_name].stats['search_count'] += 1
  168. engines[engine_name].stats['result_count'] += len(engine_results)
  169. results = score_results(results)
  170. for result in results:
  171. for res_engine in result['engines']:
  172. engines[result['engine']].stats['score_count'] += result['score']
  173. return results, suggestions
  174. def get_engines_stats():
  175. # TODO refactor
  176. pageloads = []
  177. results = []
  178. scores = []
  179. errors = []
  180. scores_per_result = []
  181. max_pageload = max_results = max_score = max_errors = max_score_per_result = 0
  182. for engine in engines.values():
  183. if engine.stats['search_count'] == 0:
  184. continue
  185. results_num = engine.stats['result_count']/float(engine.stats['search_count'])
  186. load_times = engine.stats['page_load_time']/float(engine.stats['search_count'])
  187. if results_num:
  188. score = engine.stats['score_count'] / float(engine.stats['search_count'])
  189. score_per_result = score / results_num
  190. else:
  191. score = score_per_result = 0.0
  192. max_results = max(results_num, max_results)
  193. max_pageload = max(load_times, max_pageload)
  194. max_score = max(score, max_score)
  195. max_score_per_result = max(score_per_result, max_score_per_result)
  196. max_errors = max(max_errors, engine.stats['errors'])
  197. pageloads.append({'avg': load_times, 'name': engine.name})
  198. results.append({'avg': results_num, 'name': engine.name})
  199. scores.append({'avg': score, 'name': engine.name})
  200. errors.append({'avg': engine.stats['errors'], 'name': engine.name})
  201. scores_per_result.append({'avg': score_per_result, 'name': engine.name})
  202. for engine in pageloads:
  203. engine['percentage'] = int(engine['avg']/max_pageload*100)
  204. for engine in results:
  205. engine['percentage'] = int(engine['avg']/max_results*100)
  206. for engine in scores:
  207. engine['percentage'] = int(engine['avg']/max_score*100)
  208. for engine in scores_per_result:
  209. engine['percentage'] = int(engine['avg']/max_score_per_result*100)
  210. for engine in errors:
  211. if max_errors:
  212. engine['percentage'] = int(float(engine['avg'])/max_errors*100)
  213. else:
  214. engine['percentage'] = 0
  215. return [('Page loads (sec)', sorted(pageloads, key=itemgetter('avg')))
  216. ,('Number of results', sorted(results, key=itemgetter('avg'), reverse=True))
  217. ,('Scores', sorted(scores, key=itemgetter('avg'), reverse=True))
  218. ,('Scores per result', sorted(scores_per_result, key=itemgetter('avg'), reverse=True))
  219. ,('Errors', sorted(errors, key=itemgetter('avg'), reverse=True))
  220. ]