webapp.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695
  1. #!/usr/bin/env python
  2. '''
  3. searx is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU Affero General Public License as published by
  5. the Free Software Foundation, either version 3 of the License, or
  6. (at your option) any later version.
  7. searx is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU Affero General Public License for more details.
  11. You should have received a copy of the GNU Affero General Public License
  12. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  13. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  14. '''
  15. if __name__ == '__main__':
  16. from sys import path
  17. from os.path import realpath, dirname
  18. path.append(realpath(dirname(realpath(__file__)) + '/../'))
  19. import json
  20. import cStringIO
  21. import os
  22. import hashlib
  23. from datetime import datetime, timedelta
  24. from urllib import urlencode
  25. from werkzeug.contrib.fixers import ProxyFix
  26. from flask import (
  27. Flask, request, render_template, url_for, Response, make_response,
  28. redirect, send_from_directory
  29. )
  30. from flask.ext.babel import Babel, gettext, format_date
  31. from searx import settings, searx_dir
  32. from searx.poolrequests import get as http_get
  33. from searx.engines import (
  34. categories, engines, get_engines_stats, engine_shortcuts
  35. )
  36. from searx.utils import (
  37. UnicodeWriter, highlight_content, html_to_text, get_themes,
  38. get_static_files, get_result_templates, gen_useragent, dict_subset,
  39. prettify_url, get_blocked_engines
  40. )
  41. from searx.version import VERSION_STRING
  42. from searx.languages import language_codes
  43. from searx.https_rewrite import https_url_rewrite
  44. from searx.search import Search
  45. from searx.query import Query
  46. from searx.autocomplete import searx_bang, backends as autocomplete_backends
  47. from searx import logger
  48. try:
  49. from pygments import highlight
  50. from pygments.lexers import get_lexer_by_name
  51. from pygments.formatters import HtmlFormatter
  52. except:
  53. logger.critical("cannot import dependency: pygments")
  54. from sys import exit
  55. exit(1)
  56. # check if the pyopenssl, ndg-httpsclient, pyasn1 packages are installed.
  57. # They are needed for SSL connection without trouble, see #298
  58. try:
  59. import OpenSSL.SSL # NOQA
  60. import ndg.httpsclient # NOQA
  61. import pyasn1 # NOQA
  62. except ImportError:
  63. logger.critical("The pyopenssl, ndg-httpsclient, pyasn1 packages have to be installed.\n"
  64. "Some HTTPS connections will failed")
  65. logger = logger.getChild('webapp')
  66. static_path, templates_path, themes =\
  67. get_themes(settings['themes_path']
  68. if settings.get('themes_path')
  69. else searx_dir)
  70. default_theme = settings['server'].get('default_theme', 'default')
  71. static_files = get_static_files(searx_dir)
  72. result_templates = get_result_templates(searx_dir)
  73. app = Flask(
  74. __name__,
  75. static_folder=static_path,
  76. template_folder=templates_path
  77. )
  78. app.jinja_env.trim_blocks = True
  79. app.jinja_env.lstrip_blocks = True
  80. app.secret_key = settings['server']['secret_key']
  81. babel = Babel(app)
  82. rtl_locales = ['ar', 'arc', 'bcc', 'bqi', 'ckb', 'dv', 'fa', 'glk', 'he',
  83. 'ku', 'mzn', 'pnb'', ''ps', 'sd', 'ug', 'ur', 'yi']
  84. global_favicons = []
  85. for indice, theme in enumerate(themes):
  86. global_favicons.append([])
  87. theme_img_path = searx_dir + "/static/themes/" + theme + "/img/icons/"
  88. for (dirpath, dirnames, filenames) in os.walk(theme_img_path):
  89. global_favicons[indice].extend(filenames)
  90. cookie_max_age = 60 * 60 * 24 * 365 * 5 # 5 years
  91. _category_names = (gettext('files'),
  92. gettext('general'),
  93. gettext('music'),
  94. gettext('social media'),
  95. gettext('images'),
  96. gettext('videos'),
  97. gettext('it'),
  98. gettext('news'),
  99. gettext('map'))
  100. @babel.localeselector
  101. def get_locale():
  102. locale = request.accept_languages.best_match(settings['locales'].keys())
  103. if settings['server'].get('default_locale'):
  104. locale = settings['server']['default_locale']
  105. if request.cookies.get('locale', '') in settings['locales']:
  106. locale = request.cookies.get('locale', '')
  107. if 'locale' in request.args\
  108. and request.args['locale'] in settings['locales']:
  109. locale = request.args['locale']
  110. if 'locale' in request.form\
  111. and request.form['locale'] in settings['locales']:
  112. locale = request.form['locale']
  113. return locale
  114. # code-highlighter
  115. @app.template_filter('code_highlighter')
  116. def code_highlighter(codelines, language=None):
  117. if not language:
  118. language = 'text'
  119. try:
  120. # find lexer by programing language
  121. lexer = get_lexer_by_name(language, stripall=True)
  122. except:
  123. # if lexer is not found, using default one
  124. logger.debug('highlighter cannot find lexer for {0}'.format(language))
  125. lexer = get_lexer_by_name('text', stripall=True)
  126. html_code = ''
  127. tmp_code = ''
  128. last_line = None
  129. # parse lines
  130. for line, code in codelines:
  131. if not last_line:
  132. line_code_start = line
  133. # new codeblock is detected
  134. if last_line is not None and\
  135. last_line + 1 != line:
  136. # highlight last codepart
  137. formatter = HtmlFormatter(linenos='inline',
  138. linenostart=line_code_start)
  139. html_code = html_code + highlight(tmp_code, lexer, formatter)
  140. # reset conditions for next codepart
  141. tmp_code = ''
  142. line_code_start = line
  143. # add codepart
  144. tmp_code += code + '\n'
  145. # update line
  146. last_line = line
  147. # highlight last codepart
  148. formatter = HtmlFormatter(linenos='inline', linenostart=line_code_start)
  149. html_code = html_code + highlight(tmp_code, lexer, formatter)
  150. return html_code
  151. def get_base_url():
  152. if settings['server']['base_url']:
  153. hostname = settings['server']['base_url']
  154. else:
  155. scheme = 'http'
  156. if request.is_secure:
  157. scheme = 'https'
  158. hostname = url_for('index', _external=True, _scheme=scheme)
  159. return hostname
  160. def get_current_theme_name(override=None):
  161. """Returns theme name.
  162. Checks in this order:
  163. 1. override
  164. 2. cookies
  165. 3. settings"""
  166. if override and override in themes:
  167. return override
  168. theme_name = request.args.get('theme',
  169. request.cookies.get('theme',
  170. default_theme))
  171. if theme_name not in themes:
  172. theme_name = default_theme
  173. return theme_name
  174. def get_result_template(theme, template_name):
  175. themed_path = theme + '/result_templates/' + template_name
  176. if themed_path in result_templates:
  177. return themed_path
  178. return 'result_templates/' + template_name
  179. def url_for_theme(endpoint, override_theme=None, **values):
  180. if endpoint == 'static' and values.get('filename'):
  181. theme_name = get_current_theme_name(override=override_theme)
  182. filename_with_theme = "themes/{}/{}".format(theme_name, values['filename'])
  183. if filename_with_theme in static_files:
  184. values['filename'] = filename_with_theme
  185. return url_for(endpoint, **values)
  186. def image_proxify(url):
  187. if url.startswith('//'):
  188. url = 'https:' + url
  189. if not settings['server'].get('image_proxy') and not request.cookies.get('image_proxy'):
  190. return url
  191. hash_string = url + settings['server']['secret_key']
  192. h = hashlib.sha256(hash_string.encode('utf-8')).hexdigest()
  193. return '{0}?{1}'.format(url_for('image_proxy'),
  194. urlencode(dict(url=url.encode('utf-8'), h=h)))
  195. def render(template_name, override_theme=None, **kwargs):
  196. blocked_engines = get_blocked_engines(engines, request.cookies)
  197. autocomplete = request.cookies.get('autocomplete')
  198. if autocomplete not in autocomplete_backends:
  199. autocomplete = None
  200. nonblocked_categories = set(category for engine_name in engines
  201. for category in engines[engine_name].categories
  202. if (engine_name, category) not in blocked_engines)
  203. if 'categories' not in kwargs:
  204. kwargs['categories'] = ['general']
  205. kwargs['categories'].extend(x for x in
  206. sorted(categories.keys())
  207. if x != 'general'
  208. and x in nonblocked_categories)
  209. if 'selected_categories' not in kwargs:
  210. kwargs['selected_categories'] = []
  211. for arg in request.args:
  212. if arg.startswith('category_'):
  213. c = arg.split('_', 1)[1]
  214. if c in categories:
  215. kwargs['selected_categories'].append(c)
  216. if not kwargs['selected_categories']:
  217. cookie_categories = request.cookies.get('categories', '').split(',')
  218. for ccateg in cookie_categories:
  219. if ccateg in categories:
  220. kwargs['selected_categories'].append(ccateg)
  221. if not kwargs['selected_categories']:
  222. kwargs['selected_categories'] = ['general']
  223. if 'autocomplete' not in kwargs:
  224. kwargs['autocomplete'] = autocomplete
  225. if get_locale() in rtl_locales and 'rtl' not in kwargs:
  226. kwargs['rtl'] = True
  227. kwargs['searx_version'] = VERSION_STRING
  228. kwargs['method'] = request.cookies.get('method', 'POST')
  229. kwargs['safesearch'] = request.cookies.get('safesearch', '1')
  230. # override url_for function in templates
  231. kwargs['url_for'] = url_for_theme
  232. kwargs['image_proxify'] = image_proxify
  233. kwargs['get_result_template'] = get_result_template
  234. kwargs['theme'] = get_current_theme_name(override=override_theme)
  235. kwargs['template_name'] = template_name
  236. kwargs['cookies'] = request.cookies
  237. return render_template(
  238. '{}/{}'.format(kwargs['theme'], template_name), **kwargs)
  239. @app.route('/search', methods=['GET', 'POST'])
  240. @app.route('/', methods=['GET', 'POST'])
  241. def index():
  242. """Render index page.
  243. Supported outputs: html, json, csv, rss.
  244. """
  245. if not request.args and not request.form:
  246. return render(
  247. 'index.html',
  248. )
  249. try:
  250. search = Search(request)
  251. except:
  252. return render(
  253. 'index.html',
  254. )
  255. search.results, search.suggestions,\
  256. search.answers, search.infoboxes = search.search(request)
  257. for result in search.results:
  258. if not search.paging and engines[result['engine']].paging:
  259. search.paging = True
  260. # check if HTTPS rewrite is required
  261. if settings['server']['https_rewrite']\
  262. and result['parsed_url'].scheme == 'http':
  263. result = https_url_rewrite(result)
  264. if search.request_data.get('format', 'html') == 'html':
  265. if 'content' in result:
  266. result['content'] = highlight_content(result['content'],
  267. search.query.encode('utf-8')) # noqa
  268. result['title'] = highlight_content(result['title'],
  269. search.query.encode('utf-8'))
  270. else:
  271. if 'content' in result:
  272. result['content'] = html_to_text(result['content']).strip()
  273. # removing html content and whitespace duplications
  274. result['title'] = ' '.join(html_to_text(result['title'])
  275. .strip().split())
  276. result['pretty_url'] = prettify_url(result['url'])
  277. # TODO, check if timezone is calculated right
  278. if 'publishedDate' in result:
  279. result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
  280. if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
  281. timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)
  282. minutes = int((timedifference.seconds / 60) % 60)
  283. hours = int(timedifference.seconds / 60 / 60)
  284. if hours == 0:
  285. result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes) # noqa
  286. else:
  287. result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
  288. else:
  289. result['publishedDate'] = format_date(result['publishedDate'])
  290. if search.request_data.get('format') == 'json':
  291. return Response(json.dumps({'query': search.query,
  292. 'results': search.results}),
  293. mimetype='application/json')
  294. elif search.request_data.get('format') == 'csv':
  295. csv = UnicodeWriter(cStringIO.StringIO())
  296. keys = ('title', 'url', 'content', 'host', 'engine', 'score')
  297. if search.results:
  298. csv.writerow(keys)
  299. for row in search.results:
  300. row['host'] = row['parsed_url'].netloc
  301. csv.writerow([row.get(key, '') for key in keys])
  302. csv.stream.seek(0)
  303. response = Response(csv.stream.read(), mimetype='application/csv')
  304. cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
  305. response.headers.add('Content-Disposition', cont_disp)
  306. return response
  307. elif search.request_data.get('format') == 'rss':
  308. response_rss = render(
  309. 'opensearch_response_rss.xml',
  310. results=search.results,
  311. q=search.request_data['q'],
  312. number_of_results=len(search.results),
  313. base_url=get_base_url()
  314. )
  315. return Response(response_rss, mimetype='text/xml')
  316. return render(
  317. 'results.html',
  318. results=search.results,
  319. q=search.request_data['q'],
  320. selected_categories=search.categories,
  321. paging=search.paging,
  322. pageno=search.pageno,
  323. base_url=get_base_url(),
  324. suggestions=search.suggestions,
  325. answers=search.answers,
  326. infoboxes=search.infoboxes,
  327. theme=get_current_theme_name(),
  328. favicons=global_favicons[themes.index(get_current_theme_name())]
  329. )
  330. @app.route('/about', methods=['GET'])
  331. def about():
  332. """Render about page"""
  333. return render(
  334. 'about.html',
  335. )
  336. @app.route('/autocompleter', methods=['GET', 'POST'])
  337. def autocompleter():
  338. """Return autocompleter results"""
  339. request_data = {}
  340. # select request method
  341. if request.method == 'POST':
  342. request_data = request.form
  343. else:
  344. request_data = request.args
  345. # set blocked engines
  346. blocked_engines = get_blocked_engines(engines, request.cookies)
  347. # parse query
  348. query = Query(request_data.get('q', '').encode('utf-8'), blocked_engines)
  349. query.parse_query()
  350. # check if search query is set
  351. if not query.getSearchQuery():
  352. return '', 400
  353. # run autocompleter
  354. completer = autocomplete_backends.get(request.cookies.get('autocomplete'))
  355. # parse searx specific autocompleter results like !bang
  356. raw_results = searx_bang(query)
  357. # normal autocompletion results only appear if max 3 inner results returned
  358. if len(raw_results) <= 3 and completer:
  359. # run autocompletion
  360. raw_results.extend(completer(query.getSearchQuery()))
  361. # parse results (write :language and !engine back to result string)
  362. results = []
  363. for result in raw_results:
  364. query.changeSearchQuery(result)
  365. # add parsed result
  366. results.append(query.getFullQuery())
  367. # return autocompleter results
  368. if request_data.get('format') == 'x-suggestions':
  369. return Response(json.dumps([query.query, results]),
  370. mimetype='application/json')
  371. return Response(json.dumps(results),
  372. mimetype='application/json')
  373. @app.route('/preferences', methods=['GET', 'POST'])
  374. def preferences():
  375. """Render preferences page.
  376. Settings that are going to be saved as cookies."""
  377. lang = None
  378. image_proxy = request.cookies.get('image_proxy', settings['server'].get('image_proxy'))
  379. if request.cookies.get('language')\
  380. and request.cookies['language'] in (x[0] for x in language_codes):
  381. lang = request.cookies['language']
  382. blocked_engines = []
  383. resp = make_response(redirect(url_for('index')))
  384. if request.method == 'GET':
  385. blocked_engines = get_blocked_engines(engines, request.cookies)
  386. else: # on save
  387. selected_categories = []
  388. locale = None
  389. autocomplete = ''
  390. method = 'POST'
  391. safesearch = '1'
  392. for pd_name, pd in request.form.items():
  393. if pd_name.startswith('category_'):
  394. category = pd_name[9:]
  395. if category not in categories:
  396. continue
  397. selected_categories.append(category)
  398. elif pd_name == 'locale' and pd in settings['locales']:
  399. locale = pd
  400. elif pd_name == 'image_proxy':
  401. image_proxy = pd
  402. elif pd_name == 'autocomplete':
  403. autocomplete = pd
  404. elif pd_name == 'language' and (pd == 'all' or
  405. pd in (x[0] for
  406. x in language_codes)):
  407. lang = pd
  408. elif pd_name == 'method':
  409. method = pd
  410. elif pd_name == 'safesearch':
  411. safesearch = pd
  412. elif pd_name.startswith('engine_'):
  413. if pd_name.find('__') > -1:
  414. engine_name, category = pd_name.replace('engine_', '', 1).split('__', 1)
  415. if engine_name in engines and category in engines[engine_name].categories:
  416. blocked_engines.append((engine_name, category))
  417. elif pd_name == 'theme':
  418. theme = pd if pd in themes else default_theme
  419. else:
  420. resp.set_cookie(pd_name, pd, max_age=cookie_max_age)
  421. resp.set_cookie(
  422. 'blocked_engines', ','.join('__'.join(e) for e in blocked_engines),
  423. max_age=cookie_max_age
  424. )
  425. if locale:
  426. resp.set_cookie(
  427. 'locale', locale,
  428. max_age=cookie_max_age
  429. )
  430. if lang:
  431. resp.set_cookie(
  432. 'language', lang,
  433. max_age=cookie_max_age
  434. )
  435. if selected_categories:
  436. # cookie max age: 4 weeks
  437. resp.set_cookie(
  438. 'categories', ','.join(selected_categories),
  439. max_age=cookie_max_age
  440. )
  441. resp.set_cookie(
  442. 'autocomplete', autocomplete,
  443. max_age=cookie_max_age
  444. )
  445. resp.set_cookie('method', method, max_age=cookie_max_age)
  446. resp.set_cookie('safesearch', safesearch, max_age=cookie_max_age)
  447. resp.set_cookie('image_proxy', image_proxy, max_age=cookie_max_age)
  448. resp.set_cookie('theme', theme, max_age=cookie_max_age)
  449. return resp
  450. return render('preferences.html',
  451. locales=settings['locales'],
  452. current_locale=get_locale(),
  453. current_language=lang or 'all',
  454. image_proxy=image_proxy,
  455. language_codes=language_codes,
  456. categs=categories.items(),
  457. blocked_engines=blocked_engines,
  458. autocomplete_backends=autocomplete_backends,
  459. shortcuts={y: x for x, y in engine_shortcuts.items()},
  460. themes=themes,
  461. theme=get_current_theme_name())
  462. @app.route('/image_proxy', methods=['GET'])
  463. def image_proxy():
  464. url = request.args.get('url').encode('utf-8')
  465. if not url:
  466. return '', 400
  467. h = hashlib.sha256(url + settings['server']['secret_key'].encode('utf-8')).hexdigest()
  468. if h != request.args.get('h'):
  469. return '', 400
  470. headers = dict_subset(request.headers, {'If-Modified-Since', 'If-None-Match'})
  471. headers['User-Agent'] = gen_useragent()
  472. resp = http_get(url,
  473. stream=True,
  474. timeout=settings['server'].get('request_timeout', 2),
  475. headers=headers)
  476. if resp.status_code == 304:
  477. return '', resp.status_code
  478. if resp.status_code != 200:
  479. logger.debug('image-proxy: wrong response code: {0}'.format(resp.status_code))
  480. if resp.status_code >= 400:
  481. return '', resp.status_code
  482. return '', 400
  483. if not resp.headers.get('content-type', '').startswith('image/'):
  484. logger.debug('image-proxy: wrong content-type: {0}'.format(resp.get('content-type')))
  485. return '', 400
  486. img = ''
  487. chunk_counter = 0
  488. for chunk in resp.iter_content(1024 * 1024):
  489. chunk_counter += 1
  490. if chunk_counter > 5:
  491. return '', 502 # Bad gateway - file is too big (>5M)
  492. img += chunk
  493. headers = dict_subset(resp.headers, {'Content-Length', 'Length', 'Date', 'Last-Modified', 'Expires', 'Etag'})
  494. return Response(img, mimetype=resp.headers['content-type'], headers=headers)
  495. @app.route('/stats', methods=['GET'])
  496. def stats():
  497. """Render engine statistics page."""
  498. stats = get_engines_stats()
  499. return render(
  500. 'stats.html',
  501. stats=stats,
  502. )
  503. @app.route('/robots.txt', methods=['GET'])
  504. def robots():
  505. return Response("""User-agent: *
  506. Allow: /
  507. Allow: /about
  508. Disallow: /stats
  509. Disallow: /preferences
  510. """, mimetype='text/plain')
  511. @app.route('/opensearch.xml', methods=['GET'])
  512. def opensearch():
  513. method = 'post'
  514. # chrome/chromium only supports HTTP GET....
  515. if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
  516. method = 'get'
  517. ret = render('opensearch.xml',
  518. opensearch_method=method,
  519. host=get_base_url())
  520. resp = Response(response=ret,
  521. status=200,
  522. mimetype="text/xml")
  523. return resp
  524. @app.route('/favicon.ico')
  525. def favicon():
  526. return send_from_directory(os.path.join(app.root_path,
  527. 'static/themes',
  528. get_current_theme_name(),
  529. 'img'),
  530. 'favicon.png',
  531. mimetype='image/vnd.microsoft.icon')
  532. def run():
  533. app.run(
  534. debug=settings['server']['debug'],
  535. use_debugger=settings['server']['debug'],
  536. port=settings['server']['port']
  537. )
  538. application = app
  539. app.wsgi_app = ProxyFix(application.wsgi_app)
  540. if __name__ == "__main__":
  541. run()