__init__.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. import sys
  15. import threading
  16. from os.path import realpath, dirname
  17. from babel.localedata import locale_identifiers
  18. from urllib.parse import urlparse
  19. from flask_babel import gettext
  20. from operator import itemgetter
  21. from searx import settings
  22. from searx import logger
  23. from searx.data import ENGINES_LANGUAGES
  24. from searx.poolrequests import get, get_proxy_cycles
  25. from searx.utils import load_module, match_language, get_engine_from_settings
  26. logger = logger.getChild('engines')
  27. engine_dir = dirname(realpath(__file__))
  28. engines = {}
  29. categories = {'general': []}
  30. babel_langs = [lang_parts[0] + '-' + lang_parts[-1] if len(lang_parts) > 1 else lang_parts[0]
  31. for lang_parts in (lang_code.split('_') for lang_code in locale_identifiers())]
  32. engine_shortcuts = {}
  33. engine_default_args = {'paging': False,
  34. 'categories': ['general'],
  35. 'language_support': True,
  36. 'supported_languages': [],
  37. 'safesearch': False,
  38. 'timeout': settings['outgoing']['request_timeout'],
  39. 'shortcut': '-',
  40. 'disabled': False,
  41. 'suspend_end_time': 0,
  42. 'continuous_errors': 0,
  43. 'time_range_support': False,
  44. 'offline': False,
  45. 'display_error_messages': True,
  46. 'tokens': []}
  47. def load_engine(engine_data):
  48. engine_name = engine_data['name']
  49. if '_' in engine_name:
  50. logger.error('Engine name contains underscore: "{}"'.format(engine_name))
  51. sys.exit(1)
  52. if engine_name.lower() != engine_name:
  53. logger.warn('Engine name is not lowercase: "{}", converting to lowercase'.format(engine_name))
  54. engine_name = engine_name.lower()
  55. engine_data['name'] = engine_name
  56. engine_module = engine_data['engine']
  57. try:
  58. engine = load_module(engine_module + '.py', engine_dir)
  59. except (SyntaxError, KeyboardInterrupt, SystemExit, SystemError, ImportError, RuntimeError):
  60. logger.exception('Fatal exception in engine "{}"'.format(engine_module))
  61. sys.exit(1)
  62. except:
  63. logger.exception('Cannot load engine "{}"'.format(engine_module))
  64. return None
  65. for param_name, param_value in engine_data.items():
  66. if param_name == 'engine':
  67. pass
  68. elif param_name == 'categories':
  69. if param_value == 'none':
  70. engine.categories = []
  71. else:
  72. engine.categories = list(map(str.strip, param_value.split(',')))
  73. elif param_name == 'proxies':
  74. engine.proxies = get_proxy_cycles(param_value)
  75. else:
  76. setattr(engine, param_name, param_value)
  77. for arg_name, arg_value in engine_default_args.items():
  78. if not hasattr(engine, arg_name):
  79. setattr(engine, arg_name, arg_value)
  80. # checking required variables
  81. for engine_attr in dir(engine):
  82. if engine_attr.startswith('_'):
  83. continue
  84. if engine_attr == 'inactive' and getattr(engine, engine_attr) is True:
  85. return None
  86. if getattr(engine, engine_attr) is None:
  87. logger.error('Missing engine config attribute: "{0}.{1}"'
  88. .format(engine.name, engine_attr))
  89. sys.exit(1)
  90. # assign supported languages from json file
  91. if engine_data['name'] in ENGINES_LANGUAGES:
  92. setattr(engine, 'supported_languages', ENGINES_LANGUAGES[engine_data['name']])
  93. # find custom aliases for non standard language codes
  94. if hasattr(engine, 'supported_languages'):
  95. if hasattr(engine, 'language_aliases'):
  96. language_aliases = getattr(engine, 'language_aliases')
  97. else:
  98. language_aliases = {}
  99. for engine_lang in getattr(engine, 'supported_languages'):
  100. iso_lang = match_language(engine_lang, babel_langs, fallback=None)
  101. if iso_lang and iso_lang != engine_lang and not engine_lang.startswith(iso_lang) and \
  102. iso_lang not in getattr(engine, 'supported_languages'):
  103. language_aliases[iso_lang] = engine_lang
  104. setattr(engine, 'language_aliases', language_aliases)
  105. # assign language fetching method if auxiliary method exists
  106. if hasattr(engine, '_fetch_supported_languages'):
  107. setattr(engine, 'fetch_supported_languages',
  108. lambda: engine._fetch_supported_languages(get(engine.supported_languages_url)))
  109. engine.stats = {
  110. 'sent_search_count': 0, # sent search
  111. 'search_count': 0, # succesful search
  112. 'result_count': 0,
  113. 'engine_time': 0,
  114. 'engine_time_count': 0,
  115. 'score_count': 0,
  116. 'errors': 0
  117. }
  118. if not engine.offline:
  119. engine.stats['page_load_time'] = 0
  120. engine.stats['page_load_count'] = 0
  121. # tor related settings
  122. if settings['outgoing'].get('using_tor_proxy'):
  123. # use onion url if using tor.
  124. if hasattr(engine, 'onion_url'):
  125. engine.search_url = engine.onion_url + getattr(engine, 'search_path', '')
  126. elif 'onions' in engine.categories:
  127. # exclude onion engines if not using tor.
  128. return None
  129. engine.timeout += settings['outgoing'].get('extra_proxy_timeout', 0)
  130. for category_name in engine.categories:
  131. categories.setdefault(category_name, []).append(engine)
  132. if engine.shortcut in engine_shortcuts:
  133. logger.error('Engine config error: ambigious shortcut: {0}'.format(engine.shortcut))
  134. sys.exit(1)
  135. engine_shortcuts[engine.shortcut] = engine.name
  136. return engine
  137. def to_percentage(stats, maxvalue):
  138. for engine_stat in stats:
  139. if maxvalue:
  140. engine_stat['percentage'] = int(engine_stat['avg'] / maxvalue * 100)
  141. else:
  142. engine_stat['percentage'] = 0
  143. return stats
  144. def get_engines_stats(preferences):
  145. # TODO refactor
  146. pageloads = []
  147. engine_times = []
  148. results = []
  149. scores = []
  150. errors = []
  151. scores_per_result = []
  152. max_pageload = max_engine_times = max_results = max_score = max_errors = max_score_per_result = 0 # noqa
  153. for engine in engines.values():
  154. if not preferences.validate_token(engine):
  155. continue
  156. if engine.stats['search_count'] == 0:
  157. continue
  158. results_num = \
  159. engine.stats['result_count'] / float(engine.stats['search_count'])
  160. if engine.stats['engine_time_count'] != 0:
  161. this_engine_time = engine.stats['engine_time'] / float(engine.stats['engine_time_count']) # noqa
  162. else:
  163. this_engine_time = 0
  164. if results_num:
  165. score = engine.stats['score_count'] / float(engine.stats['search_count']) # noqa
  166. score_per_result = score / results_num
  167. else:
  168. score = score_per_result = 0.0
  169. if not engine.offline:
  170. load_times = 0
  171. if engine.stats['page_load_count'] != 0:
  172. load_times = engine.stats['page_load_time'] / float(engine.stats['page_load_count']) # noqa
  173. max_pageload = max(load_times, max_pageload)
  174. pageloads.append({'avg': load_times, 'name': engine.name})
  175. max_engine_times = max(this_engine_time, max_engine_times)
  176. max_results = max(results_num, max_results)
  177. max_score = max(score, max_score)
  178. max_score_per_result = max(score_per_result, max_score_per_result)
  179. max_errors = max(max_errors, engine.stats['errors'])
  180. engine_times.append({'avg': this_engine_time, 'name': engine.name})
  181. results.append({'avg': results_num, 'name': engine.name})
  182. scores.append({'avg': score, 'name': engine.name})
  183. errors.append({'avg': engine.stats['errors'], 'name': engine.name})
  184. scores_per_result.append({
  185. 'avg': score_per_result,
  186. 'name': engine.name
  187. })
  188. pageloads = to_percentage(pageloads, max_pageload)
  189. engine_times = to_percentage(engine_times, max_engine_times)
  190. results = to_percentage(results, max_results)
  191. scores = to_percentage(scores, max_score)
  192. scores_per_result = to_percentage(scores_per_result, max_score_per_result)
  193. errors = to_percentage(errors, max_errors)
  194. return [
  195. (
  196. gettext('Engine time (sec)'),
  197. sorted(engine_times, key=itemgetter('avg'))
  198. ),
  199. (
  200. gettext('Page loads (sec)'),
  201. sorted(pageloads, key=itemgetter('avg'))
  202. ),
  203. (
  204. gettext('Number of results'),
  205. sorted(results, key=itemgetter('avg'), reverse=True)
  206. ),
  207. (
  208. gettext('Scores'),
  209. sorted(scores, key=itemgetter('avg'), reverse=True)
  210. ),
  211. (
  212. gettext('Scores per result'),
  213. sorted(scores_per_result, key=itemgetter('avg'), reverse=True)
  214. ),
  215. (
  216. gettext('Errors'),
  217. sorted(errors, key=itemgetter('avg'), reverse=True)
  218. ),
  219. ]
  220. def load_engines(engine_list):
  221. global engines, engine_shortcuts
  222. engines.clear()
  223. engine_shortcuts.clear()
  224. for engine_data in engine_list:
  225. engine = load_engine(engine_data)
  226. if engine is not None:
  227. engines[engine.name] = engine
  228. return engines
  229. def initialize_engines(engine_list):
  230. load_engines(engine_list)
  231. def engine_init(engine_name, init_fn):
  232. init_fn(get_engine_from_settings(engine_name))
  233. logger.debug('%s engine: Initialized', engine_name)
  234. for engine_name, engine in engines.items():
  235. if hasattr(engine, 'init'):
  236. init_fn = getattr(engine, 'init')
  237. if init_fn:
  238. logger.debug('%s engine: Starting background initialization', engine_name)
  239. threading.Thread(target=engine_init, args=(engine_name, init_fn)).start()
  240. _set_https_support_for_engine(engine)
  241. def _set_https_support_for_engine(engine):
  242. # check HTTPS support if it is not disabled
  243. if not engine.offline and not hasattr(engine, 'https_support'):
  244. params = engine.request('http_test', {
  245. 'method': 'GET',
  246. 'headers': {},
  247. 'data': {},
  248. 'url': '',
  249. 'cookies': {},
  250. 'verify': True,
  251. 'auth': None,
  252. 'pageno': 1,
  253. 'time_range': None,
  254. 'language': '',
  255. 'safesearch': False,
  256. 'is_test': True,
  257. 'category': 'files',
  258. 'raise_for_status': True,
  259. })
  260. if 'url' not in params:
  261. return
  262. parsed_url = urlparse(params['url'])
  263. https_support = parsed_url.scheme == 'https'
  264. setattr(engine, 'https_support', https_support)