webapp.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690
  1. #!/usr/bin/env python
  2. '''
  3. searx is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU Affero General Public License as published by
  5. the Free Software Foundation, either version 3 of the License, or
  6. (at your option) any later version.
  7. searx is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU Affero General Public License for more details.
  11. You should have received a copy of the GNU Affero General Public License
  12. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  13. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  14. '''
  15. if __name__ == '__main__':
  16. from sys import path
  17. from os.path import realpath, dirname
  18. path.append(realpath(dirname(realpath(__file__)) + '/../'))
  19. import json
  20. import cStringIO
  21. import os
  22. import hashlib
  23. from datetime import datetime, timedelta
  24. from urllib import urlencode
  25. from urlparse import urlparse
  26. from werkzeug.contrib.fixers import ProxyFix
  27. from flask import (
  28. Flask, request, render_template, url_for, Response, make_response,
  29. redirect, send_from_directory
  30. )
  31. from flask.ext.babel import Babel, gettext, format_date
  32. from searx import settings, searx_dir
  33. from searx.poolrequests import get as http_get
  34. from searx.engines import (
  35. categories, engines, get_engines_stats, engine_shortcuts
  36. )
  37. from searx.utils import (
  38. UnicodeWriter, highlight_content, html_to_text, get_themes,
  39. get_static_files, get_result_templates, gen_useragent, dict_subset,
  40. prettify_url, get_blocked_engines
  41. )
  42. from searx.version import VERSION_STRING
  43. from searx.languages import language_codes
  44. from searx.https_rewrite import https_url_rewrite
  45. from searx.search import Search
  46. from searx.query import Query
  47. from searx.autocomplete import searx_bang, backends as autocomplete_backends
  48. from searx import logger
  49. try:
  50. from pygments import highlight
  51. from pygments.lexers import get_lexer_by_name
  52. from pygments.formatters import HtmlFormatter
  53. except:
  54. logger.critical("cannot import dependency: pygments")
  55. from sys import exit
  56. exit(1)
  57. logger = logger.getChild('webapp')
  58. static_path, templates_path, themes =\
  59. get_themes(settings['themes_path']
  60. if settings.get('themes_path')
  61. else searx_dir)
  62. default_theme = settings['server'].get('default_theme', 'default')
  63. static_files = get_static_files(searx_dir)
  64. result_templates = get_result_templates(searx_dir)
  65. app = Flask(
  66. __name__,
  67. static_folder=static_path,
  68. template_folder=templates_path
  69. )
  70. app.secret_key = settings['server']['secret_key']
  71. babel = Babel(app)
  72. rtl_locales = ['ar', 'arc', 'bcc', 'bqi', 'ckb', 'dv', 'fa', 'glk', 'he',
  73. 'ku', 'mzn', 'pnb'', ''ps', 'sd', 'ug', 'ur', 'yi']
  74. global_favicons = []
  75. for indice, theme in enumerate(themes):
  76. global_favicons.append([])
  77. theme_img_path = searx_dir + "/static/themes/" + theme + "/img/icons/"
  78. for (dirpath, dirnames, filenames) in os.walk(theme_img_path):
  79. global_favicons[indice].extend(filenames)
  80. cookie_max_age = 60 * 60 * 24 * 365 * 5 # 5 years
  81. _category_names = (gettext('files'),
  82. gettext('general'),
  83. gettext('music'),
  84. gettext('social media'),
  85. gettext('images'),
  86. gettext('videos'),
  87. gettext('it'),
  88. gettext('news'),
  89. gettext('map'))
  90. @babel.localeselector
  91. def get_locale():
  92. locale = request.accept_languages.best_match(settings['locales'].keys())
  93. if settings['server'].get('default_locale'):
  94. locale = settings['server']['default_locale']
  95. if request.cookies.get('locale', '') in settings['locales']:
  96. locale = request.cookies.get('locale', '')
  97. if 'locale' in request.args\
  98. and request.args['locale'] in settings['locales']:
  99. locale = request.args['locale']
  100. if 'locale' in request.form\
  101. and request.form['locale'] in settings['locales']:
  102. locale = request.form['locale']
  103. return locale
  104. # code-highlighter
  105. @app.template_filter('code_highlighter')
  106. def code_highlighter(codelines, language=None):
  107. if not language:
  108. language = 'text'
  109. try:
  110. # find lexer by programing language
  111. lexer = get_lexer_by_name(language, stripall=True)
  112. except:
  113. # if lexer is not found, using default one
  114. logger.debug('highlighter cannot find lexer for {0}'.format(language))
  115. lexer = get_lexer_by_name('text', stripall=True)
  116. html_code = ''
  117. tmp_code = ''
  118. last_line = None
  119. # parse lines
  120. for line, code in codelines:
  121. if not last_line:
  122. line_code_start = line
  123. # new codeblock is detected
  124. if last_line is not None and\
  125. last_line + 1 != line:
  126. # highlight last codepart
  127. formatter = HtmlFormatter(linenos='inline',
  128. linenostart=line_code_start)
  129. html_code = html_code + highlight(tmp_code, lexer, formatter)
  130. # reset conditions for next codepart
  131. tmp_code = ''
  132. line_code_start = line
  133. # add codepart
  134. tmp_code += code + '\n'
  135. # update line
  136. last_line = line
  137. # highlight last codepart
  138. formatter = HtmlFormatter(linenos='inline', linenostart=line_code_start)
  139. html_code = html_code + highlight(tmp_code, lexer, formatter)
  140. return html_code
  141. # Extract domain from url
  142. @app.template_filter('extract_domain')
  143. def extract_domain(url):
  144. return urlparse(url)[1]
  145. def get_base_url():
  146. if settings['server']['base_url']:
  147. hostname = settings['server']['base_url']
  148. else:
  149. scheme = 'http'
  150. if request.is_secure:
  151. scheme = 'https'
  152. hostname = url_for('index', _external=True, _scheme=scheme)
  153. return hostname
  154. def get_current_theme_name(override=None):
  155. """Returns theme name.
  156. Checks in this order:
  157. 1. override
  158. 2. cookies
  159. 3. settings"""
  160. if override and override in themes:
  161. return override
  162. theme_name = request.args.get('theme',
  163. request.cookies.get('theme',
  164. default_theme))
  165. if theme_name not in themes:
  166. theme_name = default_theme
  167. return theme_name
  168. def get_result_template(theme, template_name):
  169. themed_path = theme + '/result_templates/' + template_name
  170. if themed_path in result_templates:
  171. return themed_path
  172. return 'result_templates/' + template_name
  173. def url_for_theme(endpoint, override_theme=None, **values):
  174. if endpoint == 'static' and values.get('filename'):
  175. theme_name = get_current_theme_name(override=override_theme)
  176. filename_with_theme = "themes/{}/{}".format(theme_name, values['filename'])
  177. if filename_with_theme in static_files:
  178. values['filename'] = filename_with_theme
  179. return url_for(endpoint, **values)
  180. def image_proxify(url):
  181. if url.startswith('//'):
  182. url = 'https:' + url
  183. if not settings['server'].get('image_proxy') and not request.cookies.get('image_proxy'):
  184. return url
  185. hash_string = url + settings['server']['secret_key']
  186. h = hashlib.sha256(hash_string.encode('utf-8')).hexdigest()
  187. return '{0}?{1}'.format(url_for('image_proxy'),
  188. urlencode(dict(url=url.encode('utf-8'), h=h)))
  189. def render(template_name, override_theme=None, **kwargs):
  190. blocked_engines = get_blocked_engines(engines, request.cookies)
  191. autocomplete = request.cookies.get('autocomplete')
  192. if autocomplete not in autocomplete_backends:
  193. autocomplete = None
  194. nonblocked_categories = set(category for engine_name in engines
  195. for category in engines[engine_name].categories
  196. if (engine_name, category) not in blocked_engines)
  197. if 'categories' not in kwargs:
  198. kwargs['categories'] = ['general']
  199. kwargs['categories'].extend(x for x in
  200. sorted(categories.keys())
  201. if x != 'general'
  202. and x in nonblocked_categories)
  203. if 'selected_categories' not in kwargs:
  204. kwargs['selected_categories'] = []
  205. for arg in request.args:
  206. if arg.startswith('category_'):
  207. c = arg.split('_', 1)[1]
  208. if c in categories:
  209. kwargs['selected_categories'].append(c)
  210. if not kwargs['selected_categories']:
  211. cookie_categories = request.cookies.get('categories', '').split(',')
  212. for ccateg in cookie_categories:
  213. if ccateg in categories:
  214. kwargs['selected_categories'].append(ccateg)
  215. if not kwargs['selected_categories']:
  216. kwargs['selected_categories'] = ['general']
  217. if 'autocomplete' not in kwargs:
  218. kwargs['autocomplete'] = autocomplete
  219. if get_locale() in rtl_locales and 'rtl' not in kwargs:
  220. kwargs['rtl'] = True
  221. kwargs['searx_version'] = VERSION_STRING
  222. kwargs['method'] = request.cookies.get('method', 'POST')
  223. kwargs['safesearch'] = request.cookies.get('safesearch', '1')
  224. # override url_for function in templates
  225. kwargs['url_for'] = url_for_theme
  226. kwargs['image_proxify'] = image_proxify
  227. kwargs['get_result_template'] = get_result_template
  228. kwargs['theme'] = get_current_theme_name(override=override_theme)
  229. kwargs['template_name'] = template_name
  230. kwargs['cookies'] = request.cookies
  231. return render_template(
  232. '{}/{}'.format(kwargs['theme'], template_name), **kwargs)
  233. @app.route('/search', methods=['GET', 'POST'])
  234. @app.route('/', methods=['GET', 'POST'])
  235. def index():
  236. """Render index page.
  237. Supported outputs: html, json, csv, rss.
  238. """
  239. if not request.args and not request.form:
  240. return render(
  241. 'index.html',
  242. )
  243. try:
  244. search = Search(request)
  245. except:
  246. return render(
  247. 'index.html',
  248. )
  249. search.results, search.suggestions,\
  250. search.answers, search.infoboxes = search.search(request)
  251. for result in search.results:
  252. if not search.paging and engines[result['engine']].paging:
  253. search.paging = True
  254. # check if HTTPS rewrite is required
  255. if settings['server']['https_rewrite']\
  256. and result['parsed_url'].scheme == 'http':
  257. result = https_url_rewrite(result)
  258. if search.request_data.get('format', 'html') == 'html':
  259. if 'content' in result:
  260. result['content'] = highlight_content(result['content'],
  261. search.query.encode('utf-8')) # noqa
  262. result['title'] = highlight_content(result['title'],
  263. search.query.encode('utf-8'))
  264. else:
  265. if 'content' in result:
  266. result['content'] = html_to_text(result['content']).strip()
  267. # removing html content and whitespace duplications
  268. result['title'] = ' '.join(html_to_text(result['title'])
  269. .strip().split())
  270. result['pretty_url'] = prettify_url(result['url'])
  271. # TODO, check if timezone is calculated right
  272. if 'publishedDate' in result:
  273. result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
  274. if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
  275. timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)
  276. minutes = int((timedifference.seconds / 60) % 60)
  277. hours = int(timedifference.seconds / 60 / 60)
  278. if hours == 0:
  279. result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes) # noqa
  280. else:
  281. result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
  282. else:
  283. result['publishedDate'] = format_date(result['publishedDate'])
  284. if search.request_data.get('format') == 'json':
  285. return Response(json.dumps({'query': search.query,
  286. 'results': search.results}),
  287. mimetype='application/json')
  288. elif search.request_data.get('format') == 'csv':
  289. csv = UnicodeWriter(cStringIO.StringIO())
  290. keys = ('title', 'url', 'content', 'host', 'engine', 'score')
  291. if search.results:
  292. csv.writerow(keys)
  293. for row in search.results:
  294. row['host'] = row['parsed_url'].netloc
  295. csv.writerow([row.get(key, '') for key in keys])
  296. csv.stream.seek(0)
  297. response = Response(csv.stream.read(), mimetype='application/csv')
  298. cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
  299. response.headers.add('Content-Disposition', cont_disp)
  300. return response
  301. elif search.request_data.get('format') == 'rss':
  302. response_rss = render(
  303. 'opensearch_response_rss.xml',
  304. results=search.results,
  305. q=search.request_data['q'],
  306. number_of_results=len(search.results),
  307. base_url=get_base_url()
  308. )
  309. return Response(response_rss, mimetype='text/xml')
  310. return render(
  311. 'results.html',
  312. results=search.results,
  313. q=search.request_data['q'],
  314. selected_categories=search.categories,
  315. paging=search.paging,
  316. pageno=search.pageno,
  317. base_url=get_base_url(),
  318. suggestions=search.suggestions,
  319. answers=search.answers,
  320. infoboxes=search.infoboxes,
  321. theme=get_current_theme_name(),
  322. favicons=global_favicons[themes.index(get_current_theme_name())]
  323. )
  324. @app.route('/about', methods=['GET'])
  325. def about():
  326. """Render about page"""
  327. return render(
  328. 'about.html',
  329. )
  330. @app.route('/autocompleter', methods=['GET', 'POST'])
  331. def autocompleter():
  332. """Return autocompleter results"""
  333. request_data = {}
  334. # select request method
  335. if request.method == 'POST':
  336. request_data = request.form
  337. else:
  338. request_data = request.args
  339. # set blocked engines
  340. blocked_engines = get_blocked_engines(engines, request.cookies)
  341. # parse query
  342. query = Query(request_data.get('q', '').encode('utf-8'), blocked_engines)
  343. query.parse_query()
  344. # check if search query is set
  345. if not query.getSearchQuery():
  346. return '', 400
  347. # run autocompleter
  348. completer = autocomplete_backends.get(request.cookies.get('autocomplete'))
  349. # parse searx specific autocompleter results like !bang
  350. raw_results = searx_bang(query)
  351. # normal autocompletion results only appear if max 3 inner results returned
  352. if len(raw_results) <= 3 and completer:
  353. # run autocompletion
  354. raw_results.extend(completer(query.getSearchQuery()))
  355. # parse results (write :language and !engine back to result string)
  356. results = []
  357. for result in raw_results:
  358. query.changeSearchQuery(result)
  359. # add parsed result
  360. results.append(query.getFullQuery())
  361. # return autocompleter results
  362. if request_data.get('format') == 'x-suggestions':
  363. return Response(json.dumps([query.query, results]),
  364. mimetype='application/json')
  365. return Response(json.dumps(results),
  366. mimetype='application/json')
  367. @app.route('/preferences', methods=['GET', 'POST'])
  368. def preferences():
  369. """Render preferences page.
  370. Settings that are going to be saved as cookies."""
  371. lang = None
  372. image_proxy = request.cookies.get('image_proxy', settings['server'].get('image_proxy'))
  373. if request.cookies.get('language')\
  374. and request.cookies['language'] in (x[0] for x in language_codes):
  375. lang = request.cookies['language']
  376. blocked_engines = []
  377. resp = make_response(redirect(url_for('index')))
  378. if request.method == 'GET':
  379. blocked_engines = get_blocked_engines(engines, request.cookies)
  380. else: # on save
  381. selected_categories = []
  382. locale = None
  383. autocomplete = ''
  384. method = 'POST'
  385. safesearch = '1'
  386. for pd_name, pd in request.form.items():
  387. if pd_name.startswith('category_'):
  388. category = pd_name[9:]
  389. if category not in categories:
  390. continue
  391. selected_categories.append(category)
  392. elif pd_name == 'locale' and pd in settings['locales']:
  393. locale = pd
  394. elif pd_name == 'image_proxy':
  395. image_proxy = pd
  396. elif pd_name == 'autocomplete':
  397. autocomplete = pd
  398. elif pd_name == 'language' and (pd == 'all' or
  399. pd in (x[0] for
  400. x in language_codes)):
  401. lang = pd
  402. elif pd_name == 'method':
  403. method = pd
  404. elif pd_name == 'safesearch':
  405. safesearch = pd
  406. elif pd_name.startswith('engine_'):
  407. if pd_name.find('__') > -1:
  408. engine_name, category = pd_name.replace('engine_', '', 1).split('__', 1)
  409. if engine_name in engines and category in engines[engine_name].categories:
  410. blocked_engines.append((engine_name, category))
  411. elif pd_name == 'theme':
  412. theme = pd if pd in themes else default_theme
  413. else:
  414. resp.set_cookie(pd_name, pd, max_age=cookie_max_age)
  415. resp.set_cookie(
  416. 'blocked_engines', ','.join('__'.join(e) for e in blocked_engines),
  417. max_age=cookie_max_age
  418. )
  419. if locale:
  420. resp.set_cookie(
  421. 'locale', locale,
  422. max_age=cookie_max_age
  423. )
  424. if lang:
  425. resp.set_cookie(
  426. 'language', lang,
  427. max_age=cookie_max_age
  428. )
  429. if selected_categories:
  430. # cookie max age: 4 weeks
  431. resp.set_cookie(
  432. 'categories', ','.join(selected_categories),
  433. max_age=cookie_max_age
  434. )
  435. resp.set_cookie(
  436. 'autocomplete', autocomplete,
  437. max_age=cookie_max_age
  438. )
  439. resp.set_cookie('method', method, max_age=cookie_max_age)
  440. resp.set_cookie('safesearch', safesearch, max_age=cookie_max_age)
  441. resp.set_cookie('image_proxy', image_proxy, max_age=cookie_max_age)
  442. resp.set_cookie('theme', theme, max_age=cookie_max_age)
  443. return resp
  444. return render('preferences.html',
  445. locales=settings['locales'],
  446. current_locale=get_locale(),
  447. current_language=lang or 'all',
  448. image_proxy=image_proxy,
  449. language_codes=language_codes,
  450. categs=categories.items(),
  451. blocked_engines=blocked_engines,
  452. autocomplete_backends=autocomplete_backends,
  453. shortcuts={y: x for x, y in engine_shortcuts.items()},
  454. themes=themes,
  455. theme=get_current_theme_name())
  456. @app.route('/image_proxy', methods=['GET'])
  457. def image_proxy():
  458. url = request.args.get('url').encode('utf-8')
  459. if not url:
  460. return '', 400
  461. h = hashlib.sha256(url + settings['server']['secret_key'].encode('utf-8')).hexdigest()
  462. if h != request.args.get('h'):
  463. return '', 400
  464. headers = dict_subset(request.headers, {'If-Modified-Since', 'If-None-Match'})
  465. headers['User-Agent'] = gen_useragent()
  466. resp = http_get(url,
  467. stream=True,
  468. timeout=settings['server'].get('request_timeout', 2),
  469. headers=headers)
  470. if resp.status_code == 304:
  471. return '', resp.status_code
  472. if resp.status_code != 200:
  473. logger.debug('image-proxy: wrong response code: {0}'.format(resp.status_code))
  474. if resp.status_code >= 400:
  475. return '', resp.status_code
  476. return '', 400
  477. if not resp.headers.get('content-type', '').startswith('image/'):
  478. logger.debug('image-proxy: wrong content-type: {0}'.format(resp.get('content-type')))
  479. return '', 400
  480. img = ''
  481. chunk_counter = 0
  482. for chunk in resp.iter_content(1024 * 1024):
  483. chunk_counter += 1
  484. if chunk_counter > 5:
  485. return '', 502 # Bad gateway - file is too big (>5M)
  486. img += chunk
  487. headers = dict_subset(resp.headers, {'Content-Length', 'Length', 'Date', 'Last-Modified', 'Expires', 'Etag'})
  488. return Response(img, mimetype=resp.headers['content-type'], headers=headers)
  489. @app.route('/stats', methods=['GET'])
  490. def stats():
  491. """Render engine statistics page."""
  492. stats = get_engines_stats()
  493. return render(
  494. 'stats.html',
  495. stats=stats,
  496. )
  497. @app.route('/robots.txt', methods=['GET'])
  498. def robots():
  499. return Response("""User-agent: *
  500. Allow: /
  501. Allow: /about
  502. Disallow: /stats
  503. Disallow: /preferences
  504. """, mimetype='text/plain')
  505. @app.route('/opensearch.xml', methods=['GET'])
  506. def opensearch():
  507. method = 'post'
  508. # chrome/chromium only supports HTTP GET....
  509. if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
  510. method = 'get'
  511. ret = render('opensearch.xml',
  512. opensearch_method=method,
  513. host=get_base_url())
  514. resp = Response(response=ret,
  515. status=200,
  516. mimetype="text/xml")
  517. return resp
  518. @app.route('/favicon.ico')
  519. def favicon():
  520. return send_from_directory(os.path.join(app.root_path,
  521. 'static/themes',
  522. get_current_theme_name(),
  523. 'img'),
  524. 'favicon.png',
  525. mimetype='image/vnd.microsoft.icon')
  526. def run():
  527. app.run(
  528. debug=settings['server']['debug'],
  529. use_debugger=settings['server']['debug'],
  530. port=settings['server']['port']
  531. )
  532. application = app
  533. app.wsgi_app = ProxyFix(application.wsgi_app)
  534. if __name__ == "__main__":
  535. run()