webapp.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390
  1. #!/usr/bin/env python
  2. '''
  3. searx is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU Affero General Public License as published by
  5. the Free Software Foundation, either version 3 of the License, or
  6. (at your option) any later version.
  7. searx is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU Affero General Public License for more details.
  11. You should have received a copy of the GNU Affero General Public License
  12. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  13. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  14. '''
  15. if __name__ == '__main__':
  16. from sys import path
  17. from os.path import realpath, dirname
  18. path.append(realpath(dirname(realpath(__file__))+'/../'))
  19. import json
  20. import cStringIO
  21. import os
  22. from datetime import datetime, timedelta
  23. from itertools import chain
  24. from flask import (
  25. Flask, request, render_template, url_for, Response, make_response,
  26. redirect, send_from_directory
  27. )
  28. from flask.ext.babel import Babel, gettext, format_date
  29. from searx import settings, searx_dir
  30. from searx.engines import (
  31. search as do_search, categories, engines, get_engines_stats,
  32. engine_shortcuts
  33. )
  34. from searx.utils import UnicodeWriter, highlight_content, html_to_text
  35. from searx.languages import language_codes
  36. from searx.search import Search
  37. app = Flask(
  38. __name__,
  39. static_folder=os.path.join(searx_dir, 'static'),
  40. template_folder=os.path.join(searx_dir, 'templates')
  41. )
  42. app.secret_key = settings['server']['secret_key']
  43. babel = Babel(app)
  44. #TODO configurable via settings.yml
  45. favicons = ['wikipedia', 'youtube', 'vimeo', 'soundcloud',
  46. 'twitter', 'stackoverflow', 'github']
  47. cookie_max_age = 60 * 60 * 24 * 365 * 23 # 23 years
  48. @babel.localeselector
  49. def get_locale():
  50. locale = request.accept_languages.best_match(settings['locales'].keys())
  51. if request.cookies.get('locale', '') in settings['locales']:
  52. locale = request.cookies.get('locale', '')
  53. if 'locale' in request.args\
  54. and request.args['locale'] in settings['locales']:
  55. locale = request.args['locale']
  56. if 'locale' in request.form\
  57. and request.form['locale'] in settings['locales']:
  58. locale = request.form['locale']
  59. return locale
  60. def get_base_url():
  61. if settings['server']['base_url']:
  62. hostname = settings['server']['base_url']
  63. else:
  64. scheme = 'http'
  65. if request.is_secure:
  66. scheme = 'https'
  67. hostname = url_for('index', _external=True, _scheme=scheme)
  68. return hostname
  69. def render(template_name, **kwargs):
  70. blocked_engines = request.cookies.get('blocked_engines', '').split(',')
  71. nonblocked_categories = (engines[e].categories
  72. for e in engines
  73. if e not in blocked_engines)
  74. nonblocked_categories = set(chain.from_iterable(nonblocked_categories))
  75. if not 'categories' in kwargs:
  76. kwargs['categories'] = ['general']
  77. kwargs['categories'].extend(x for x in
  78. sorted(categories.keys())
  79. if x != 'general'
  80. and x in nonblocked_categories)
  81. if not 'selected_categories' in kwargs:
  82. kwargs['selected_categories'] = []
  83. cookie_categories = request.cookies.get('categories', '').split(',')
  84. for ccateg in cookie_categories:
  85. if ccateg in categories:
  86. kwargs['selected_categories'].append(ccateg)
  87. if not kwargs['selected_categories']:
  88. kwargs['selected_categories'] = ['general']
  89. return render_template(template_name, **kwargs)
  90. @app.route('/', methods=['GET', 'POST'])
  91. def index():
  92. """Render index page.
  93. Supported outputs: html, json, csv, rss.
  94. """
  95. if not request.args and not request.form:
  96. return render(
  97. 'index.html',
  98. client=settings.get('client', None)
  99. )
  100. try:
  101. search = Search(request)
  102. except:
  103. return render(
  104. 'index.html',
  105. client=settings.get('client', None)
  106. )
  107. # TODO moar refactor - do_search integration into Search class
  108. search.results, search.suggestions = do_search(search.query,
  109. request,
  110. search.engines,
  111. search.pageno,
  112. search.lang)
  113. for result in search.results:
  114. if not search.paging and engines[result['engine']].paging:
  115. search.paging = True
  116. if search.request_data.get('format', 'html') == 'html':
  117. if 'content' in result:
  118. result['content'] = highlight_content(result['content'],
  119. search.query.encode('utf-8')) # noqa
  120. result['title'] = highlight_content(result['title'],
  121. search.query.encode('utf-8'))
  122. else:
  123. if 'content' in result:
  124. result['content'] = html_to_text(result['content']).strip()
  125. # removing html content and whitespace duplications
  126. result['title'] = ' '.join(html_to_text(result['title'])
  127. .strip().split())
  128. if len(result['url']) > 74:
  129. url_parts = result['url'][:35], result['url'][-35:]
  130. result['pretty_url'] = u'{0}[...]{1}'.format(*url_parts)
  131. else:
  132. result['pretty_url'] = result['url']
  133. for engine in result['engines']:
  134. if engine in favicons:
  135. result['favicon'] = engine
  136. # TODO, check if timezone is calculated right
  137. if 'publishedDate' in result:
  138. if result['publishedDate'].replace(tzinfo=None)\
  139. >= datetime.now() - timedelta(days=1):
  140. timedifference = datetime.now() - result['publishedDate']\
  141. .replace(tzinfo=None)
  142. minutes = int((timedifference.seconds / 60) % 60)
  143. hours = int(timedifference.seconds / 60 / 60)
  144. if hours == 0:
  145. result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes) # noqa
  146. else:
  147. result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
  148. else:
  149. result['pubdate'] = result['publishedDate']\
  150. .strftime('%a, %d %b %Y %H:%M:%S %z')
  151. result['publishedDate'] = format_date(result['publishedDate'])
  152. if search.request_data.get('format') == 'json':
  153. return Response(json.dumps({'query': search.query,
  154. 'results': search.results}),
  155. mimetype='application/json')
  156. elif search.request_data.get('format') == 'csv':
  157. csv = UnicodeWriter(cStringIO.StringIO())
  158. keys = ('title', 'url', 'content', 'host', 'engine', 'score')
  159. if search.results:
  160. csv.writerow(keys)
  161. for row in search.results:
  162. row['host'] = row['parsed_url'].netloc
  163. csv.writerow([row.get(key, '') for key in keys])
  164. csv.stream.seek(0)
  165. response = Response(csv.stream.read(), mimetype='application/csv')
  166. cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
  167. response.headers.add('Content-Disposition', cont_disp)
  168. return response
  169. elif search.request_data.get('format') == 'rss':
  170. response_rss = render(
  171. 'opensearch_response_rss.xml',
  172. results=search.results,
  173. q=search.request_data['q'],
  174. number_of_results=len(search.results),
  175. base_url=get_base_url()
  176. )
  177. return Response(response_rss, mimetype='text/xml')
  178. return render(
  179. 'results.html',
  180. results=search.results,
  181. client=settings.get('client', None),
  182. q=search.request_data['q'],
  183. selected_categories=search.categories,
  184. paging=search.paging,
  185. pageno=search.pageno,
  186. base_url=get_base_url(),
  187. suggestions=search.suggestions
  188. )
  189. @app.route('/about', methods=['GET'])
  190. def about():
  191. """Render about page"""
  192. return render(
  193. 'about.html',
  194. client=settings.get('client', None)
  195. )
  196. @app.route('/autocompleter', methods=['GET', 'POST'])
  197. def autocompleter():
  198. """Return autocompleter results"""
  199. request_data = {}
  200. if request.method == 'POST':
  201. request_data = request.form
  202. else:
  203. request_data = request.args
  204. # TODO fix XSS-vulnerability
  205. autocompleter.querry = request_data.get('q')
  206. autocompleter.results = []
  207. if settings['client']['autocompleter']:
  208. #TODO remove test code and add real autocompletion
  209. if autocompleter.querry:
  210. autocompleter.results = [autocompleter.querry + " result-1",autocompleter.querry + " result-2",autocompleter.querry + " result-3",autocompleter.querry + " result-4"]
  211. if request_data.get('format') == 'x-suggestions':
  212. return Response(json.dumps([autocompleter.querry,autocompleter.results]),
  213. mimetype='application/json')
  214. else:
  215. return Response(json.dumps(autocompleter.results),
  216. mimetype='application/json')
  217. @app.route('/preferences', methods=['GET', 'POST'])
  218. def preferences():
  219. """Render preferences page.
  220. Settings that are going to be saved as cookies."""
  221. lang = None
  222. if request.cookies.get('language')\
  223. and request.cookies['language'] in (x[0] for x in language_codes):
  224. lang = request.cookies['language']
  225. blocked_engines = []
  226. if request.method == 'GET':
  227. blocked_engines = request.cookies.get('blocked_engines', '').split(',')
  228. else:
  229. selected_categories = []
  230. locale = None
  231. for pd_name, pd in request.form.items():
  232. if pd_name.startswith('category_'):
  233. category = pd_name[9:]
  234. if not category in categories:
  235. continue
  236. selected_categories.append(category)
  237. elif pd_name == 'locale' and pd in settings['locales']:
  238. locale = pd
  239. elif pd_name == 'language' and (pd == 'all' or
  240. pd in (x[0] for
  241. x in language_codes)):
  242. lang = pd
  243. elif pd_name.startswith('engine_'):
  244. engine_name = pd_name.replace('engine_', '', 1)
  245. if engine_name in engines:
  246. blocked_engines.append(engine_name)
  247. resp = make_response(redirect(url_for('index')))
  248. user_blocked_engines = request.cookies.get('blocked_engines', '').split(',') # noqa
  249. if sorted(blocked_engines) != sorted(user_blocked_engines):
  250. resp.set_cookie(
  251. 'blocked_engines', ','.join(blocked_engines),
  252. max_age=cookie_max_age
  253. )
  254. if locale:
  255. resp.set_cookie(
  256. 'locale', locale,
  257. max_age=cookie_max_age
  258. )
  259. if lang:
  260. resp.set_cookie(
  261. 'language', lang,
  262. max_age=cookie_max_age
  263. )
  264. if selected_categories:
  265. # cookie max age: 4 weeks
  266. resp.set_cookie(
  267. 'categories', ','.join(selected_categories),
  268. max_age=60 * 60 * 24 * 7 * 4
  269. )
  270. return resp
  271. return render('preferences.html',
  272. client=settings.get('client', None),
  273. locales=settings['locales'],
  274. current_locale=get_locale(),
  275. current_language=lang or 'all',
  276. language_codes=language_codes,
  277. categs=categories.items(),
  278. blocked_engines=blocked_engines,
  279. shortcuts={y: x for x, y in engine_shortcuts.items()})
  280. @app.route('/stats', methods=['GET'])
  281. def stats():
  282. """Render engine statistics page."""
  283. global categories
  284. stats = get_engines_stats()
  285. return render(
  286. 'stats.html',
  287. stats=stats,
  288. client=settings.get('client', None)
  289. )
  290. @app.route('/robots.txt', methods=['GET'])
  291. def robots():
  292. return Response("""User-agent: *
  293. Allow: /
  294. Allow: /about
  295. Disallow: /stats
  296. Disallow: /preferences
  297. """, mimetype='text/plain')
  298. @app.route('/opensearch.xml', methods=['GET'])
  299. def opensearch():
  300. method = 'post'
  301. # chrome/chromium only supports HTTP GET....
  302. if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
  303. method = 'get'
  304. ret = render('opensearch.xml', method=method, host=get_base_url(),client=settings['client'])
  305. resp = Response(response=ret,
  306. status=200,
  307. mimetype="application/xml")
  308. return resp
  309. @app.route('/favicon.ico')
  310. def favicon():
  311. return send_from_directory(os.path.join(app.root_path, 'static/img'),
  312. 'favicon.png',
  313. mimetype='image/vnd.microsoft.icon')
  314. def run():
  315. from gevent import monkey
  316. monkey.patch_all()
  317. app.run(
  318. debug=settings['server']['debug'],
  319. use_debugger=settings['server']['debug'],
  320. port=settings['server']['port']
  321. )
  322. if __name__ == "__main__":
  323. run()