webapp.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657
  1. #!/usr/bin/env python
  2. '''
  3. searx is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU Affero General Public License as published by
  5. the Free Software Foundation, either version 3 of the License, or
  6. (at your option) any later version.
  7. searx is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU Affero General Public License for more details.
  11. You should have received a copy of the GNU Affero General Public License
  12. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  13. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  14. '''
  15. if __name__ == '__main__':
  16. from sys import path
  17. from os.path import realpath, dirname
  18. path.append(realpath(dirname(realpath(__file__))+'/../'))
  19. import json
  20. import cStringIO
  21. import os
  22. import hashlib
  23. from datetime import datetime, timedelta
  24. from urllib import urlencode
  25. from flask import (
  26. Flask, request, render_template, url_for, Response, make_response,
  27. redirect, send_from_directory
  28. )
  29. from flask.ext.babel import Babel, gettext, format_date
  30. from searx import settings, searx_dir
  31. from searx.poolrequests import get as http_get
  32. from searx.engines import (
  33. categories, engines, get_engines_stats, engine_shortcuts
  34. )
  35. from searx.utils import (
  36. UnicodeWriter, highlight_content, html_to_text, get_themes,
  37. get_static_files, get_result_templates, gen_useragent, dict_subset,
  38. prettify_url, get_blocked_engines
  39. )
  40. from searx.version import VERSION_STRING
  41. from searx.languages import language_codes
  42. from searx.https_rewrite import https_url_rewrite
  43. from searx.search import Search
  44. from searx.query import Query
  45. from searx.autocomplete import searx_bang, backends as autocomplete_backends
  46. from searx import logger
  47. try:
  48. from pygments import highlight
  49. from pygments.lexers import get_lexer_by_name
  50. from pygments.formatters import HtmlFormatter
  51. except:
  52. logger.critical("cannot import dependency: pygments")
  53. from sys import exit
  54. exit(1)
  55. logger = logger.getChild('webapp')
  56. static_path, templates_path, themes =\
  57. get_themes(settings['themes_path']
  58. if settings.get('themes_path')
  59. else searx_dir)
  60. default_theme = settings['server'].get('default_theme', 'default')
  61. static_files = get_static_files(searx_dir)
  62. result_templates = get_result_templates(searx_dir)
  63. app = Flask(
  64. __name__,
  65. static_folder=static_path,
  66. template_folder=templates_path
  67. )
  68. app.secret_key = settings['server']['secret_key']
  69. babel = Babel(app)
  70. global_favicons = []
  71. for indice, theme in enumerate(themes):
  72. global_favicons.append([])
  73. theme_img_path = searx_dir+"/static/themes/"+theme+"/img/icons/"
  74. for (dirpath, dirnames, filenames) in os.walk(theme_img_path):
  75. global_favicons[indice].extend(filenames)
  76. cookie_max_age = 60 * 60 * 24 * 365 * 5 # 5 years
  77. @babel.localeselector
  78. def get_locale():
  79. locale = request.accept_languages.best_match(settings['locales'].keys())
  80. if settings['server'].get('default_locale'):
  81. locale = settings['server']['default_locale']
  82. if request.cookies.get('locale', '') in settings['locales']:
  83. locale = request.cookies.get('locale', '')
  84. if 'locale' in request.args\
  85. and request.args['locale'] in settings['locales']:
  86. locale = request.args['locale']
  87. if 'locale' in request.form\
  88. and request.form['locale'] in settings['locales']:
  89. locale = request.form['locale']
  90. return locale
  91. # code-highlighter
  92. @app.template_filter('code_highlighter')
  93. def code_highlighter(codelines, language=None):
  94. if not language:
  95. language = 'text'
  96. try:
  97. # find lexer by programing language
  98. lexer = get_lexer_by_name(language, stripall=True)
  99. except:
  100. # if lexer is not found, using default one
  101. logger.debug('highlighter cannot find lexer for {0}'.format(language))
  102. lexer = get_lexer_by_name('text', stripall=True)
  103. html_code = ''
  104. tmp_code = ''
  105. last_line = None
  106. # parse lines
  107. for line, code in codelines:
  108. if not last_line:
  109. line_code_start = line
  110. # new codeblock is detected
  111. if last_line is not None and\
  112. last_line + 1 != line:
  113. # highlight last codepart
  114. formatter = HtmlFormatter(linenos='inline',
  115. linenostart=line_code_start)
  116. html_code = html_code + highlight(tmp_code, lexer, formatter)
  117. # reset conditions for next codepart
  118. tmp_code = ''
  119. line_code_start = line
  120. # add codepart
  121. tmp_code += code + '\n'
  122. # update line
  123. last_line = line
  124. # highlight last codepart
  125. formatter = HtmlFormatter(linenos='inline', linenostart=line_code_start)
  126. html_code = html_code + highlight(tmp_code, lexer, formatter)
  127. return html_code
  128. def get_base_url():
  129. if settings['server']['base_url']:
  130. hostname = settings['server']['base_url']
  131. else:
  132. scheme = 'http'
  133. if request.is_secure:
  134. scheme = 'https'
  135. hostname = url_for('index', _external=True, _scheme=scheme)
  136. return hostname
  137. def get_current_theme_name(override=None):
  138. """Returns theme name.
  139. Checks in this order:
  140. 1. override
  141. 2. cookies
  142. 3. settings"""
  143. if override and override in themes:
  144. return override
  145. theme_name = request.args.get('theme',
  146. request.cookies.get('theme',
  147. default_theme))
  148. if theme_name not in themes:
  149. theme_name = default_theme
  150. return theme_name
  151. def get_result_template(theme, template_name):
  152. themed_path = theme + '/result_templates/' + template_name
  153. if themed_path in result_templates:
  154. return themed_path
  155. return 'result_templates/' + template_name
  156. def url_for_theme(endpoint, override_theme=None, **values):
  157. if endpoint == 'static' and values.get('filename'):
  158. theme_name = get_current_theme_name(override=override_theme)
  159. filename_with_theme = "themes/{}/{}".format(theme_name, values['filename'])
  160. if filename_with_theme in static_files:
  161. values['filename'] = filename_with_theme
  162. return url_for(endpoint, **values)
  163. def image_proxify(url):
  164. if url.startswith('//'):
  165. url = 'https:' + url
  166. url = url.encode('utf-8')
  167. if not settings['server'].get('image_proxy') and not request.cookies.get('image_proxy'):
  168. return url
  169. h = hashlib.sha256(url + settings['server']['secret_key'].encode('utf-8')).hexdigest()
  170. return '{0}?{1}'.format(url_for('image_proxy'),
  171. urlencode(dict(url=url, h=h)))
  172. def render(template_name, override_theme=None, **kwargs):
  173. blocked_engines = get_blocked_engines(engines, request.cookies)
  174. autocomplete = request.cookies.get('autocomplete')
  175. if autocomplete not in autocomplete_backends:
  176. autocomplete = None
  177. nonblocked_categories = set(category for engine_name in engines
  178. for category in engines[engine_name].categories
  179. if (engine_name, category) not in blocked_engines)
  180. if 'categories' not in kwargs:
  181. kwargs['categories'] = ['general']
  182. kwargs['categories'].extend(x for x in
  183. sorted(categories.keys())
  184. if x != 'general'
  185. and x in nonblocked_categories)
  186. if 'selected_categories' not in kwargs:
  187. kwargs['selected_categories'] = []
  188. for arg in request.args:
  189. if arg.startswith('category_'):
  190. c = arg.split('_', 1)[1]
  191. if c in categories:
  192. kwargs['selected_categories'].append(c)
  193. if not kwargs['selected_categories']:
  194. cookie_categories = request.cookies.get('categories', '').split(',')
  195. for ccateg in cookie_categories:
  196. if ccateg in categories:
  197. kwargs['selected_categories'].append(ccateg)
  198. if not kwargs['selected_categories']:
  199. kwargs['selected_categories'] = ['general']
  200. if 'autocomplete' not in kwargs:
  201. kwargs['autocomplete'] = autocomplete
  202. kwargs['searx_version'] = VERSION_STRING
  203. kwargs['method'] = request.cookies.get('method', 'POST')
  204. # override url_for function in templates
  205. kwargs['url_for'] = url_for_theme
  206. kwargs['image_proxify'] = image_proxify
  207. kwargs['get_result_template'] = get_result_template
  208. kwargs['theme'] = get_current_theme_name(override=override_theme)
  209. kwargs['template_name'] = template_name
  210. kwargs['cookies'] = request.cookies
  211. return render_template(
  212. '{}/{}'.format(kwargs['theme'], template_name), **kwargs)
  213. @app.route('/search', methods=['GET', 'POST'])
  214. @app.route('/', methods=['GET', 'POST'])
  215. def index():
  216. """Render index page.
  217. Supported outputs: html, json, csv, rss.
  218. """
  219. if not request.args and not request.form:
  220. return render(
  221. 'index.html',
  222. )
  223. try:
  224. search = Search(request)
  225. except:
  226. return render(
  227. 'index.html',
  228. )
  229. search.results, search.suggestions,\
  230. search.answers, search.infoboxes = search.search(request)
  231. for result in search.results:
  232. if not search.paging and engines[result['engine']].paging:
  233. search.paging = True
  234. # check if HTTPS rewrite is required
  235. if settings['server']['https_rewrite']\
  236. and result['parsed_url'].scheme == 'http':
  237. result = https_url_rewrite(result)
  238. if search.request_data.get('format', 'html') == 'html':
  239. if 'content' in result:
  240. result['content'] = highlight_content(result['content'],
  241. search.query.encode('utf-8')) # noqa
  242. result['title'] = highlight_content(result['title'],
  243. search.query.encode('utf-8'))
  244. else:
  245. if 'content' in result:
  246. result['content'] = html_to_text(result['content']).strip()
  247. # removing html content and whitespace duplications
  248. result['title'] = ' '.join(html_to_text(result['title'])
  249. .strip().split())
  250. result['pretty_url'] = prettify_url(result['url'])
  251. # TODO, check if timezone is calculated right
  252. if 'publishedDate' in result:
  253. result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
  254. if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
  255. timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)
  256. minutes = int((timedifference.seconds / 60) % 60)
  257. hours = int(timedifference.seconds / 60 / 60)
  258. if hours == 0:
  259. result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes) # noqa
  260. else:
  261. result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
  262. else:
  263. result['publishedDate'] = format_date(result['publishedDate'])
  264. if search.request_data.get('format') == 'json':
  265. return Response(json.dumps({'query': search.query,
  266. 'results': search.results}),
  267. mimetype='application/json')
  268. elif search.request_data.get('format') == 'csv':
  269. csv = UnicodeWriter(cStringIO.StringIO())
  270. keys = ('title', 'url', 'content', 'host', 'engine', 'score')
  271. if search.results:
  272. csv.writerow(keys)
  273. for row in search.results:
  274. row['host'] = row['parsed_url'].netloc
  275. csv.writerow([row.get(key, '') for key in keys])
  276. csv.stream.seek(0)
  277. response = Response(csv.stream.read(), mimetype='application/csv')
  278. cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
  279. response.headers.add('Content-Disposition', cont_disp)
  280. return response
  281. elif search.request_data.get('format') == 'rss':
  282. response_rss = render(
  283. 'opensearch_response_rss.xml',
  284. results=search.results,
  285. q=search.request_data['q'],
  286. number_of_results=len(search.results),
  287. base_url=get_base_url()
  288. )
  289. return Response(response_rss, mimetype='text/xml')
  290. return render(
  291. 'results.html',
  292. results=search.results,
  293. q=search.request_data['q'],
  294. selected_categories=search.categories,
  295. paging=search.paging,
  296. pageno=search.pageno,
  297. base_url=get_base_url(),
  298. suggestions=search.suggestions,
  299. answers=search.answers,
  300. infoboxes=search.infoboxes,
  301. theme=get_current_theme_name(),
  302. favicons=global_favicons[themes.index(get_current_theme_name())]
  303. )
  304. @app.route('/about', methods=['GET'])
  305. def about():
  306. """Render about page"""
  307. return render(
  308. 'about.html',
  309. )
  310. @app.route('/autocompleter', methods=['GET', 'POST'])
  311. def autocompleter():
  312. """Return autocompleter results"""
  313. request_data = {}
  314. # select request method
  315. if request.method == 'POST':
  316. request_data = request.form
  317. else:
  318. request_data = request.args
  319. # set blocked engines
  320. blocked_engines = get_blocked_engines(engines, request.cookies)
  321. # parse query
  322. query = Query(request_data.get('q', '').encode('utf-8'), blocked_engines)
  323. query.parse_query()
  324. # check if search query is set
  325. if not query.getSearchQuery():
  326. return '', 400
  327. # run autocompleter
  328. completer = autocomplete_backends.get(request.cookies.get('autocomplete'))
  329. # parse searx specific autocompleter results like !bang
  330. raw_results = searx_bang(query)
  331. # normal autocompletion results only appear if max 3 inner results returned
  332. if len(raw_results) <= 3 and completer:
  333. # run autocompletion
  334. raw_results.extend(completer(query.getSearchQuery()))
  335. # parse results (write :language and !engine back to result string)
  336. results = []
  337. for result in raw_results:
  338. query.changeSearchQuery(result)
  339. # add parsed result
  340. results.append(query.getFullQuery())
  341. # return autocompleter results
  342. if request_data.get('format') == 'x-suggestions':
  343. return Response(json.dumps([query.query, results]),
  344. mimetype='application/json')
  345. return Response(json.dumps(results),
  346. mimetype='application/json')
  347. @app.route('/preferences', methods=['GET', 'POST'])
  348. def preferences():
  349. """Render preferences page.
  350. Settings that are going to be saved as cookies."""
  351. lang = None
  352. image_proxy = request.cookies.get('image_proxy', settings['server'].get('image_proxy'))
  353. if request.cookies.get('language')\
  354. and request.cookies['language'] in (x[0] for x in language_codes):
  355. lang = request.cookies['language']
  356. blocked_engines = []
  357. resp = make_response(redirect(url_for('index')))
  358. if request.method == 'GET':
  359. blocked_engines = get_blocked_engines(engines, request.cookies)
  360. else: # on save
  361. selected_categories = []
  362. locale = None
  363. autocomplete = ''
  364. method = 'POST'
  365. for pd_name, pd in request.form.items():
  366. if pd_name.startswith('category_'):
  367. category = pd_name[9:]
  368. if category not in categories:
  369. continue
  370. selected_categories.append(category)
  371. elif pd_name == 'locale' and pd in settings['locales']:
  372. locale = pd
  373. elif pd_name == 'image_proxy':
  374. image_proxy = pd
  375. elif pd_name == 'autocomplete':
  376. autocomplete = pd
  377. elif pd_name == 'language' and (pd == 'all' or
  378. pd in (x[0] for
  379. x in language_codes)):
  380. lang = pd
  381. elif pd_name == 'method':
  382. method = pd
  383. elif pd_name.startswith('engine_'):
  384. if pd_name.find('__') > -1:
  385. engine_name, category = pd_name.replace('engine_', '', 1).split('__', 1)
  386. if engine_name in engines and category in engines[engine_name].categories:
  387. blocked_engines.append((engine_name, category))
  388. elif pd_name == 'theme':
  389. theme = pd if pd in themes else default_theme
  390. else:
  391. resp.set_cookie(pd_name, pd, max_age=cookie_max_age)
  392. resp.set_cookie(
  393. 'blocked_engines', ','.join('__'.join(e) for e in blocked_engines),
  394. max_age=cookie_max_age
  395. )
  396. if locale:
  397. resp.set_cookie(
  398. 'locale', locale,
  399. max_age=cookie_max_age
  400. )
  401. if lang:
  402. resp.set_cookie(
  403. 'language', lang,
  404. max_age=cookie_max_age
  405. )
  406. if selected_categories:
  407. # cookie max age: 4 weeks
  408. resp.set_cookie(
  409. 'categories', ','.join(selected_categories),
  410. max_age=cookie_max_age
  411. )
  412. resp.set_cookie(
  413. 'autocomplete', autocomplete,
  414. max_age=cookie_max_age
  415. )
  416. resp.set_cookie('method', method, max_age=cookie_max_age)
  417. resp.set_cookie('image_proxy', image_proxy, max_age=cookie_max_age)
  418. resp.set_cookie('theme', theme, max_age=cookie_max_age)
  419. return resp
  420. return render('preferences.html',
  421. locales=settings['locales'],
  422. current_locale=get_locale(),
  423. current_language=lang or 'all',
  424. image_proxy=image_proxy,
  425. language_codes=language_codes,
  426. categs=categories.items(),
  427. blocked_engines=blocked_engines,
  428. autocomplete_backends=autocomplete_backends,
  429. shortcuts={y: x for x, y in engine_shortcuts.items()},
  430. themes=themes,
  431. theme=get_current_theme_name())
  432. @app.route('/image_proxy', methods=['GET'])
  433. def image_proxy():
  434. url = request.args.get('url').encode('utf-8')
  435. if not url:
  436. return '', 400
  437. h = hashlib.sha256(url + settings['server']['secret_key'].encode('utf-8')).hexdigest()
  438. if h != request.args.get('h'):
  439. return '', 400
  440. headers = dict_subset(request.headers, {'If-Modified-Since', 'If-None-Match'})
  441. headers['User-Agent'] = gen_useragent()
  442. resp = http_get(url,
  443. stream=True,
  444. timeout=settings['server'].get('request_timeout', 2),
  445. headers=headers)
  446. if resp.status_code == 304:
  447. return '', resp.status_code
  448. if resp.status_code != 200:
  449. logger.debug('image-proxy: wrong response code: {0}'.format(resp.status_code))
  450. if resp.status_code >= 400:
  451. return '', resp.status_code
  452. return '', 400
  453. if not resp.headers.get('content-type', '').startswith('image/'):
  454. logger.debug('image-proxy: wrong content-type: {0}'.format(resp.get('content-type')))
  455. return '', 400
  456. img = ''
  457. chunk_counter = 0
  458. for chunk in resp.iter_content(1024*1024):
  459. chunk_counter += 1
  460. if chunk_counter > 5:
  461. return '', 502 # Bad gateway - file is too big (>5M)
  462. img += chunk
  463. headers = dict_subset(resp.headers, {'Content-Length', 'Length', 'Date', 'Last-Modified', 'Expires', 'Etag'})
  464. return Response(img, mimetype=resp.headers['content-type'], headers=headers)
  465. @app.route('/stats', methods=['GET'])
  466. def stats():
  467. """Render engine statistics page."""
  468. stats = get_engines_stats()
  469. return render(
  470. 'stats.html',
  471. stats=stats,
  472. )
  473. @app.route('/robots.txt', methods=['GET'])
  474. def robots():
  475. return Response("""User-agent: *
  476. Allow: /
  477. Allow: /about
  478. Disallow: /stats
  479. Disallow: /preferences
  480. """, mimetype='text/plain')
  481. @app.route('/opensearch.xml', methods=['GET'])
  482. def opensearch():
  483. method = 'post'
  484. # chrome/chromium only supports HTTP GET....
  485. if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
  486. method = 'get'
  487. ret = render('opensearch.xml',
  488. opensearch_method=method,
  489. host=get_base_url())
  490. resp = Response(response=ret,
  491. status=200,
  492. mimetype="text/xml")
  493. return resp
  494. @app.route('/favicon.ico')
  495. def favicon():
  496. return send_from_directory(os.path.join(app.root_path,
  497. 'static/themes',
  498. get_current_theme_name(),
  499. 'img'),
  500. 'favicon.png',
  501. mimetype='image/vnd.microsoft.icon')
  502. def run():
  503. app.run(
  504. debug=settings['server']['debug'],
  505. use_debugger=settings['server']['debug'],
  506. port=settings['server']['port']
  507. )
  508. application = app
  509. if __name__ == "__main__":
  510. run()