webapp.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420
  1. #!/usr/bin/env python
  2. '''
  3. searx is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU Affero General Public License as published by
  5. the Free Software Foundation, either version 3 of the License, or
  6. (at your option) any later version.
  7. searx is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU Affero General Public License for more details.
  11. You should have received a copy of the GNU Affero General Public License
  12. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  13. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  14. '''
  15. if __name__ == '__main__':
  16. from sys import path
  17. from os.path import realpath, dirname
  18. path.append(realpath(dirname(realpath(__file__))+'/../'))
  19. import json
  20. import cStringIO
  21. import os
  22. from datetime import datetime, timedelta
  23. from itertools import chain
  24. from flask import (
  25. Flask, request, render_template, url_for, Response, make_response,
  26. redirect, send_from_directory
  27. )
  28. from flask.ext.babel import Babel, gettext, format_date
  29. from searx import settings, searx_dir
  30. from searx.engines import (
  31. search as do_search, categories, engines, get_engines_stats,
  32. engine_shortcuts
  33. )
  34. from searx.utils import UnicodeWriter, highlight_content, html_to_text
  35. from searx.languages import language_codes
  36. from searx.search import Search
  37. from searx.autocomplete import backends as autocomplete_backends
  38. app = Flask(
  39. __name__,
  40. static_folder=os.path.join(searx_dir, 'static'),
  41. template_folder=os.path.join(searx_dir, 'templates')
  42. )
  43. app.secret_key = settings['server']['secret_key']
  44. babel = Babel(app)
  45. #TODO configurable via settings.yml
  46. favicons = ['wikipedia', 'youtube', 'vimeo', 'soundcloud',
  47. 'twitter', 'stackoverflow', 'github']
  48. cookie_max_age = 60 * 60 * 24 * 365 * 23 # 23 years
  49. @babel.localeselector
  50. def get_locale():
  51. locale = request.accept_languages.best_match(settings['locales'].keys())
  52. if request.cookies.get('locale', '') in settings['locales']:
  53. locale = request.cookies.get('locale', '')
  54. if 'locale' in request.args\
  55. and request.args['locale'] in settings['locales']:
  56. locale = request.args['locale']
  57. if 'locale' in request.form\
  58. and request.form['locale'] in settings['locales']:
  59. locale = request.form['locale']
  60. return locale
  61. def get_base_url():
  62. if settings['server']['base_url']:
  63. hostname = settings['server']['base_url']
  64. else:
  65. scheme = 'http'
  66. if request.is_secure:
  67. scheme = 'https'
  68. hostname = url_for('index', _external=True, _scheme=scheme)
  69. return hostname
  70. def render(template_name, **kwargs):
  71. blocked_engines = request.cookies.get('blocked_engines', '').split(',')
  72. autocomplete = request.cookies.get('autocomplete')
  73. if autocomplete not in autocomplete_backends:
  74. autocomplete = None
  75. nonblocked_categories = (engines[e].categories
  76. for e in engines
  77. if e not in blocked_engines)
  78. nonblocked_categories = set(chain.from_iterable(nonblocked_categories))
  79. if not 'categories' in kwargs:
  80. kwargs['categories'] = ['general']
  81. kwargs['categories'].extend(x for x in
  82. sorted(categories.keys())
  83. if x != 'general'
  84. and x in nonblocked_categories)
  85. if not 'selected_categories' in kwargs:
  86. kwargs['selected_categories'] = []
  87. cookie_categories = request.cookies.get('categories', '').split(',')
  88. for ccateg in cookie_categories:
  89. if ccateg in categories:
  90. kwargs['selected_categories'].append(ccateg)
  91. if not kwargs['selected_categories']:
  92. kwargs['selected_categories'] = ['general']
  93. if not 'autocomplete' in kwargs:
  94. kwargs['autocomplete'] = autocomplete
  95. return render_template(template_name, **kwargs)
  96. @app.route('/', methods=['GET', 'POST'])
  97. def index():
  98. """Render index page.
  99. Supported outputs: html, json, csv, rss.
  100. """
  101. if not request.args and not request.form:
  102. return render(
  103. 'index.html',
  104. )
  105. try:
  106. search = Search(request)
  107. except:
  108. return render(
  109. 'index.html',
  110. )
  111. # TODO moar refactor - do_search integration into Search class
  112. search.results, search.suggestions = do_search(search.query,
  113. request,
  114. search.engines,
  115. search.pageno,
  116. search.lang)
  117. for result in search.results:
  118. if not search.paging and engines[result['engine']].paging:
  119. search.paging = True
  120. if search.request_data.get('format', 'html') == 'html':
  121. if 'content' in result:
  122. result['content'] = highlight_content(result['content'],
  123. search.query.encode('utf-8')) # noqa
  124. result['title'] = highlight_content(result['title'],
  125. search.query.encode('utf-8'))
  126. else:
  127. if 'content' in result:
  128. result['content'] = html_to_text(result['content']).strip()
  129. # removing html content and whitespace duplications
  130. result['title'] = ' '.join(html_to_text(result['title'])
  131. .strip().split())
  132. if len(result['url']) > 74:
  133. url_parts = result['url'][:35], result['url'][-35:]
  134. result['pretty_url'] = u'{0}[...]{1}'.format(*url_parts)
  135. else:
  136. result['pretty_url'] = result['url']
  137. for engine in result['engines']:
  138. if engine in favicons:
  139. result['favicon'] = engine
  140. # TODO, check if timezone is calculated right
  141. if 'publishedDate' in result:
  142. if result['publishedDate'].replace(tzinfo=None)\
  143. >= datetime.now() - timedelta(days=1):
  144. timedifference = datetime.now() - result['publishedDate']\
  145. .replace(tzinfo=None)
  146. minutes = int((timedifference.seconds / 60) % 60)
  147. hours = int(timedifference.seconds / 60 / 60)
  148. if hours == 0:
  149. result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes) # noqa
  150. else:
  151. result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
  152. else:
  153. result['pubdate'] = result['publishedDate']\
  154. .strftime('%a, %d %b %Y %H:%M:%S %z')
  155. result['publishedDate'] = format_date(result['publishedDate'])
  156. if search.request_data.get('format') == 'json':
  157. return Response(json.dumps({'query': search.query,
  158. 'results': search.results}),
  159. mimetype='application/json')
  160. elif search.request_data.get('format') == 'csv':
  161. csv = UnicodeWriter(cStringIO.StringIO())
  162. keys = ('title', 'url', 'content', 'host', 'engine', 'score')
  163. if search.results:
  164. csv.writerow(keys)
  165. for row in search.results:
  166. row['host'] = row['parsed_url'].netloc
  167. csv.writerow([row.get(key, '') for key in keys])
  168. csv.stream.seek(0)
  169. response = Response(csv.stream.read(), mimetype='application/csv')
  170. cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
  171. response.headers.add('Content-Disposition', cont_disp)
  172. return response
  173. elif search.request_data.get('format') == 'rss':
  174. response_rss = render(
  175. 'opensearch_response_rss.xml',
  176. results=search.results,
  177. q=search.request_data['q'],
  178. number_of_results=len(search.results),
  179. base_url=get_base_url()
  180. )
  181. return Response(response_rss, mimetype='text/xml')
  182. return render(
  183. 'results.html',
  184. results=search.results,
  185. q=search.request_data['q'],
  186. selected_categories=search.categories,
  187. paging=search.paging,
  188. pageno=search.pageno,
  189. base_url=get_base_url(),
  190. suggestions=search.suggestions
  191. )
  192. @app.route('/about', methods=['GET'])
  193. def about():
  194. """Render about page"""
  195. return render(
  196. 'about.html',
  197. )
  198. @app.route('/autocompleter', methods=['GET', 'POST'])
  199. def autocompleter():
  200. """Return autocompleter results"""
  201. request_data = {}
  202. if request.method == 'POST':
  203. request_data = request.form
  204. else:
  205. request_data = request.args
  206. # TODO fix XSS-vulnerability
  207. query = request_data.get('q')
  208. if not query:
  209. return
  210. completer = autocomplete_backends.get(request.cookies.get('autocomplete'))
  211. if not completer:
  212. return
  213. try:
  214. results = completer(query)
  215. except Exception, e:
  216. print e
  217. results = []
  218. if request_data.get('format') == 'x-suggestions':
  219. return Response(json.dumps([query, results]),
  220. mimetype='application/json')
  221. else:
  222. return Response(json.dumps(results),
  223. mimetype='application/json')
  224. @app.route('/preferences', methods=['GET', 'POST'])
  225. def preferences():
  226. """Render preferences page.
  227. Settings that are going to be saved as cookies."""
  228. lang = None
  229. if request.cookies.get('language')\
  230. and request.cookies['language'] in (x[0] for x in language_codes):
  231. lang = request.cookies['language']
  232. blocked_engines = []
  233. if request.method == 'GET':
  234. blocked_engines = request.cookies.get('blocked_engines', '').split(',')
  235. else:
  236. selected_categories = []
  237. locale = None
  238. autocomplete = ''
  239. for pd_name, pd in request.form.items():
  240. if pd_name.startswith('category_'):
  241. category = pd_name[9:]
  242. if not category in categories:
  243. continue
  244. selected_categories.append(category)
  245. elif pd_name == 'locale' and pd in settings['locales']:
  246. locale = pd
  247. elif pd_name == 'autocomplete':
  248. autocomplete = pd
  249. elif pd_name == 'language' and (pd == 'all' or
  250. pd in (x[0] for
  251. x in language_codes)):
  252. lang = pd
  253. elif pd_name.startswith('engine_'):
  254. engine_name = pd_name.replace('engine_', '', 1)
  255. if engine_name in engines:
  256. blocked_engines.append(engine_name)
  257. resp = make_response(redirect(url_for('index')))
  258. user_blocked_engines = request.cookies.get('blocked_engines', '').split(',') # noqa
  259. if sorted(blocked_engines) != sorted(user_blocked_engines):
  260. resp.set_cookie(
  261. 'blocked_engines', ','.join(blocked_engines),
  262. max_age=cookie_max_age
  263. )
  264. if locale:
  265. resp.set_cookie(
  266. 'locale', locale,
  267. max_age=cookie_max_age
  268. )
  269. if lang:
  270. resp.set_cookie(
  271. 'language', lang,
  272. max_age=cookie_max_age
  273. )
  274. if selected_categories:
  275. # cookie max age: 4 weeks
  276. resp.set_cookie(
  277. 'categories', ','.join(selected_categories),
  278. max_age=cookie_max_age
  279. )
  280. resp.set_cookie(
  281. 'autocomplete', autocomplete,
  282. max_age=cookie_max_age
  283. )
  284. return resp
  285. return render('preferences.html',
  286. locales=settings['locales'],
  287. current_locale=get_locale(),
  288. current_language=lang or 'all',
  289. language_codes=language_codes,
  290. categs=categories.items(),
  291. blocked_engines=blocked_engines,
  292. autocomplete_backends=autocomplete_backends,
  293. shortcuts={y: x for x, y in engine_shortcuts.items()})
  294. @app.route('/stats', methods=['GET'])
  295. def stats():
  296. """Render engine statistics page."""
  297. global categories
  298. stats = get_engines_stats()
  299. return render(
  300. 'stats.html',
  301. stats=stats,
  302. )
  303. @app.route('/robots.txt', methods=['GET'])
  304. def robots():
  305. return Response("""User-agent: *
  306. Allow: /
  307. Allow: /about
  308. Disallow: /stats
  309. Disallow: /preferences
  310. """, mimetype='text/plain')
  311. @app.route('/opensearch.xml', methods=['GET'])
  312. def opensearch():
  313. method = 'post'
  314. # chrome/chromium only supports HTTP GET....
  315. if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
  316. method = 'get'
  317. ret = render('opensearch.xml',
  318. method=method,
  319. host=get_base_url())
  320. resp = Response(response=ret,
  321. status=200,
  322. mimetype="application/xml")
  323. return resp
  324. @app.route('/favicon.ico')
  325. def favicon():
  326. return send_from_directory(os.path.join(app.root_path, 'static/img'),
  327. 'favicon.png',
  328. mimetype='image/vnd.microsoft.icon')
  329. def run():
  330. from gevent import monkey
  331. monkey.patch_all()
  332. app.run(
  333. debug=settings['server']['debug'],
  334. use_debugger=settings['server']['debug'],
  335. port=settings['server']['port']
  336. )
  337. if __name__ == "__main__":
  338. run()