webapp.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423
  1. #!/usr/bin/env python
  2. '''
  3. searx is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU Affero General Public License as published by
  5. the Free Software Foundation, either version 3 of the License, or
  6. (at your option) any later version.
  7. searx is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU Affero General Public License for more details.
  11. You should have received a copy of the GNU Affero General Public License
  12. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  13. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  14. '''
  15. if __name__ == '__main__':
  16. from sys import path
  17. from os.path import realpath, dirname
  18. path.append(realpath(dirname(realpath(__file__))+'/../'))
  19. import json
  20. import cStringIO
  21. import os
  22. from datetime import datetime, timedelta
  23. from itertools import chain
  24. from flask import (
  25. Flask, request, render_template, url_for, Response, make_response,
  26. redirect, send_from_directory
  27. )
  28. from flask.ext.babel import Babel, gettext, format_date
  29. from searx import settings, searx_dir
  30. from searx.engines import (
  31. search as do_search, categories, engines, get_engines_stats,
  32. engine_shortcuts
  33. )
  34. from searx.utils import UnicodeWriter, highlight_content, html_to_text
  35. from searx.languages import language_codes
  36. from searx.search import Search
  37. from searx.autocomplete import backends as autocomplete_backends
  38. app = Flask(
  39. __name__,
  40. static_folder=os.path.join(searx_dir, 'static'),
  41. template_folder=os.path.join(searx_dir, 'templates')
  42. )
  43. app.secret_key = settings['server']['secret_key']
  44. babel = Babel(app)
  45. #TODO configurable via settings.yml
  46. favicons = ['wikipedia', 'youtube', 'vimeo', 'soundcloud',
  47. 'twitter', 'stackoverflow', 'github']
  48. cookie_max_age = 60 * 60 * 24 * 365 * 23 # 23 years
  49. @babel.localeselector
  50. def get_locale():
  51. locale = request.accept_languages.best_match(settings['locales'].keys())
  52. if request.cookies.get('locale', '') in settings['locales']:
  53. locale = request.cookies.get('locale', '')
  54. if 'locale' in request.args\
  55. and request.args['locale'] in settings['locales']:
  56. locale = request.args['locale']
  57. if 'locale' in request.form\
  58. and request.form['locale'] in settings['locales']:
  59. locale = request.form['locale']
  60. return locale
  61. def get_base_url():
  62. if settings['server']['base_url']:
  63. hostname = settings['server']['base_url']
  64. else:
  65. scheme = 'http'
  66. if request.is_secure:
  67. scheme = 'https'
  68. hostname = url_for('index', _external=True, _scheme=scheme)
  69. return hostname
  70. def render(template_name, **kwargs):
  71. blocked_engines = request.cookies.get('blocked_engines', '').split(',')
  72. autocomplete = request.cookies.get('autocomplete')
  73. if autocomplete not in autocomplete_backends:
  74. autocomplete = None
  75. nonblocked_categories = (engines[e].categories
  76. for e in engines
  77. if e not in blocked_engines)
  78. nonblocked_categories = set(chain.from_iterable(nonblocked_categories))
  79. if not 'categories' in kwargs:
  80. kwargs['categories'] = ['general']
  81. kwargs['categories'].extend(x for x in
  82. sorted(categories.keys())
  83. if x != 'general'
  84. and x in nonblocked_categories)
  85. if not 'selected_categories' in kwargs:
  86. kwargs['selected_categories'] = []
  87. cookie_categories = request.cookies.get('categories', '').split(',')
  88. for ccateg in cookie_categories:
  89. if ccateg in categories:
  90. kwargs['selected_categories'].append(ccateg)
  91. if not kwargs['selected_categories']:
  92. kwargs['selected_categories'] = ['general']
  93. if not 'autocomplete' in kwargs:
  94. kwargs['autocomplete'] = autocomplete
  95. kwargs['method'] = request.cookies.get('method', 'POST')
  96. return render_template(template_name, **kwargs)
  97. @app.route('/', methods=['GET', 'POST'])
  98. def index():
  99. """Render index page.
  100. Supported outputs: html, json, csv, rss.
  101. """
  102. if not request.args and not request.form:
  103. return render(
  104. 'index.html',
  105. )
  106. try:
  107. search = Search(request)
  108. except:
  109. return render(
  110. 'index.html',
  111. )
  112. # TODO moar refactor - do_search integration into Search class
  113. search.results, search.suggestions = do_search(search.query,
  114. request,
  115. search.engines,
  116. search.pageno,
  117. search.lang)
  118. for result in search.results:
  119. if not search.paging and engines[result['engine']].paging:
  120. search.paging = True
  121. if search.request_data.get('format', 'html') == 'html':
  122. if 'content' in result:
  123. result['content'] = highlight_content(result['content'],
  124. search.query.encode('utf-8')) # noqa
  125. result['title'] = highlight_content(result['title'],
  126. search.query.encode('utf-8'))
  127. else:
  128. if 'content' in result:
  129. result['content'] = html_to_text(result['content']).strip()
  130. # removing html content and whitespace duplications
  131. result['title'] = ' '.join(html_to_text(result['title'])
  132. .strip().split())
  133. if len(result['url']) > 74:
  134. url_parts = result['url'][:35], result['url'][-35:]
  135. result['pretty_url'] = u'{0}[...]{1}'.format(*url_parts)
  136. else:
  137. result['pretty_url'] = result['url']
  138. for engine in result['engines']:
  139. if engine in favicons:
  140. result['favicon'] = engine
  141. # TODO, check if timezone is calculated right
  142. if 'publishedDate' in result:
  143. if result['publishedDate'].replace(tzinfo=None)\
  144. >= datetime.now() - timedelta(days=1):
  145. timedifference = datetime.now() - result['publishedDate']\
  146. .replace(tzinfo=None)
  147. minutes = int((timedifference.seconds / 60) % 60)
  148. hours = int(timedifference.seconds / 60 / 60)
  149. if hours == 0:
  150. result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes) # noqa
  151. else:
  152. result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
  153. else:
  154. result['pubdate'] = result['publishedDate']\
  155. .strftime('%a, %d %b %Y %H:%M:%S %z')
  156. result['publishedDate'] = format_date(result['publishedDate'])
  157. if search.request_data.get('format') == 'json':
  158. return Response(json.dumps({'query': search.query,
  159. 'results': search.results}),
  160. mimetype='application/json')
  161. elif search.request_data.get('format') == 'csv':
  162. csv = UnicodeWriter(cStringIO.StringIO())
  163. keys = ('title', 'url', 'content', 'host', 'engine', 'score')
  164. if search.results:
  165. csv.writerow(keys)
  166. for row in search.results:
  167. row['host'] = row['parsed_url'].netloc
  168. csv.writerow([row.get(key, '') for key in keys])
  169. csv.stream.seek(0)
  170. response = Response(csv.stream.read(), mimetype='application/csv')
  171. cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
  172. response.headers.add('Content-Disposition', cont_disp)
  173. return response
  174. elif search.request_data.get('format') == 'rss':
  175. response_rss = render(
  176. 'opensearch_response_rss.xml',
  177. results=search.results,
  178. q=search.request_data['q'],
  179. number_of_results=len(search.results),
  180. base_url=get_base_url()
  181. )
  182. return Response(response_rss, mimetype='text/xml')
  183. return render(
  184. 'results.html',
  185. results=search.results,
  186. q=search.request_data['q'],
  187. selected_categories=search.categories,
  188. paging=search.paging,
  189. pageno=search.pageno,
  190. base_url=get_base_url(),
  191. suggestions=search.suggestions
  192. )
  193. @app.route('/about', methods=['GET'])
  194. def about():
  195. """Render about page"""
  196. return render(
  197. 'about.html',
  198. )
  199. @app.route('/autocompleter', methods=['GET', 'POST'])
  200. def autocompleter():
  201. """Return autocompleter results"""
  202. request_data = {}
  203. if request.method == 'POST':
  204. request_data = request.form
  205. else:
  206. request_data = request.args
  207. # TODO fix XSS-vulnerability
  208. query = request_data.get('q', '').encode('utf-8')
  209. if not query:
  210. return
  211. completer = autocomplete_backends.get(request.cookies.get('autocomplete'))
  212. if not completer:
  213. return
  214. results = completer(query)
  215. if request_data.get('format') == 'x-suggestions':
  216. return Response(json.dumps([query, results]),
  217. mimetype='application/json')
  218. else:
  219. return Response(json.dumps(results),
  220. mimetype='application/json')
  221. @app.route('/preferences', methods=['GET', 'POST'])
  222. def preferences():
  223. """Render preferences page.
  224. Settings that are going to be saved as cookies."""
  225. lang = None
  226. if request.cookies.get('language')\
  227. and request.cookies['language'] in (x[0] for x in language_codes):
  228. lang = request.cookies['language']
  229. blocked_engines = []
  230. if request.method == 'GET':
  231. blocked_engines = request.cookies.get('blocked_engines', '').split(',')
  232. else:
  233. selected_categories = []
  234. locale = None
  235. autocomplete = ''
  236. method = 'POST'
  237. for pd_name, pd in request.form.items():
  238. if pd_name.startswith('category_'):
  239. category = pd_name[9:]
  240. if not category in categories:
  241. continue
  242. selected_categories.append(category)
  243. elif pd_name == 'locale' and pd in settings['locales']:
  244. locale = pd
  245. elif pd_name == 'autocomplete':
  246. autocomplete = pd
  247. elif pd_name == 'language' and (pd == 'all' or
  248. pd in (x[0] for
  249. x in language_codes)):
  250. lang = pd
  251. elif pd_name == 'method':
  252. method = pd
  253. elif pd_name.startswith('engine_'):
  254. engine_name = pd_name.replace('engine_', '', 1)
  255. if engine_name in engines:
  256. blocked_engines.append(engine_name)
  257. resp = make_response(redirect(url_for('index')))
  258. user_blocked_engines = request.cookies.get('blocked_engines', '').split(',') # noqa
  259. if sorted(blocked_engines) != sorted(user_blocked_engines):
  260. resp.set_cookie(
  261. 'blocked_engines', ','.join(blocked_engines),
  262. max_age=cookie_max_age
  263. )
  264. if locale:
  265. resp.set_cookie(
  266. 'locale', locale,
  267. max_age=cookie_max_age
  268. )
  269. if lang:
  270. resp.set_cookie(
  271. 'language', lang,
  272. max_age=cookie_max_age
  273. )
  274. if selected_categories:
  275. # cookie max age: 4 weeks
  276. resp.set_cookie(
  277. 'categories', ','.join(selected_categories),
  278. max_age=cookie_max_age
  279. )
  280. resp.set_cookie(
  281. 'autocomplete', autocomplete,
  282. max_age=cookie_max_age
  283. )
  284. resp.set_cookie('method', method, max_age=cookie_max_age)
  285. return resp
  286. return render('preferences.html',
  287. locales=settings['locales'],
  288. current_locale=get_locale(),
  289. current_language=lang or 'all',
  290. language_codes=language_codes,
  291. categs=categories.items(),
  292. blocked_engines=blocked_engines,
  293. autocomplete_backends=autocomplete_backends,
  294. shortcuts={y: x for x, y in engine_shortcuts.items()})
  295. @app.route('/stats', methods=['GET'])
  296. def stats():
  297. """Render engine statistics page."""
  298. global categories
  299. stats = get_engines_stats()
  300. return render(
  301. 'stats.html',
  302. stats=stats,
  303. )
  304. @app.route('/robots.txt', methods=['GET'])
  305. def robots():
  306. return Response("""User-agent: *
  307. Allow: /
  308. Allow: /about
  309. Disallow: /stats
  310. Disallow: /preferences
  311. """, mimetype='text/plain')
  312. @app.route('/opensearch.xml', methods=['GET'])
  313. def opensearch():
  314. method = 'post'
  315. # chrome/chromium only supports HTTP GET....
  316. if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
  317. method = 'get'
  318. ret = render('opensearch.xml',
  319. method=method,
  320. host=get_base_url())
  321. resp = Response(response=ret,
  322. status=200,
  323. mimetype="application/xml")
  324. return resp
  325. @app.route('/favicon.ico')
  326. def favicon():
  327. return send_from_directory(os.path.join(app.root_path, 'static/img'),
  328. 'favicon.png',
  329. mimetype='image/vnd.microsoft.icon')
  330. def run():
  331. from gevent import monkey
  332. monkey.patch_all()
  333. app.run(
  334. debug=settings['server']['debug'],
  335. use_debugger=settings['server']['debug'],
  336. port=settings['server']['port']
  337. )
  338. if __name__ == "__main__":
  339. run()