webapp.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366
  1. #!/usr/bin/env python
  2. '''
  3. searx is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU Affero General Public License as published by
  5. the Free Software Foundation, either version 3 of the License, or
  6. (at your option) any later version.
  7. searx is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU Affero General Public License for more details.
  11. You should have received a copy of the GNU Affero General Public License
  12. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  13. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  14. '''
  15. if __name__ == '__main__':
  16. from sys import path
  17. from os.path import realpath, dirname
  18. path.append(realpath(dirname(realpath(__file__))+'/../'))
  19. import json
  20. import cStringIO
  21. import os
  22. from datetime import datetime, timedelta
  23. from itertools import chain
  24. from flask import (
  25. Flask, request, render_template, url_for, Response, make_response,
  26. redirect, send_from_directory
  27. )
  28. from flask.ext.babel import Babel, gettext, format_date
  29. from searx import settings, searx_dir
  30. from searx.engines import (
  31. search as do_search, categories, engines, get_engines_stats,
  32. engine_shortcuts
  33. )
  34. from searx.utils import UnicodeWriter, highlight_content, html_to_text
  35. from searx.languages import language_codes
  36. from searx.search import Search
  37. app = Flask(
  38. __name__,
  39. static_folder=os.path.join(searx_dir, 'static'),
  40. template_folder=os.path.join(searx_dir, 'templates')
  41. )
  42. app.secret_key = settings['server']['secret_key']
  43. babel = Babel(app)
  44. #TODO configurable via settings.yml
  45. favicons = ['wikipedia', 'youtube', 'vimeo', 'soundcloud',
  46. 'twitter', 'stackoverflow', 'github']
  47. cookie_max_age = 60 * 60 * 24 * 365 * 23 # 23 years
  48. @babel.localeselector
  49. def get_locale():
  50. locale = request.accept_languages.best_match(settings['locales'].keys())
  51. if request.cookies.get('locale', '') in settings['locales']:
  52. locale = request.cookies.get('locale', '')
  53. if 'locale' in request.args\
  54. and request.args['locale'] in settings['locales']:
  55. locale = request.args['locale']
  56. if 'locale' in request.form\
  57. and request.form['locale'] in settings['locales']:
  58. locale = request.form['locale']
  59. return locale
  60. def get_base_url():
  61. if settings['server']['base_url']:
  62. hostname = settings['server']['base_url']
  63. else:
  64. scheme = 'http'
  65. if request.is_secure:
  66. scheme = 'https'
  67. hostname = url_for('index', _external=True, _scheme=scheme)
  68. return hostname
  69. def render(template_name, **kwargs):
  70. blocked_engines = request.cookies.get('blocked_engines', '').split(',')
  71. nonblocked_categories = (engines[e].categories
  72. for e in engines
  73. if e not in blocked_engines)
  74. nonblocked_categories = set(chain.from_iterable(nonblocked_categories))
  75. if not 'categories' in kwargs:
  76. kwargs['categories'] = ['general']
  77. kwargs['categories'].extend(x for x in
  78. sorted(categories.keys())
  79. if x != 'general'
  80. and x in nonblocked_categories)
  81. if not 'selected_categories' in kwargs:
  82. kwargs['selected_categories'] = []
  83. cookie_categories = request.cookies.get('categories', '').split(',')
  84. for ccateg in cookie_categories:
  85. if ccateg in categories:
  86. kwargs['selected_categories'].append(ccateg)
  87. if not kwargs['selected_categories']:
  88. kwargs['selected_categories'] = ['general']
  89. return render_template(template_name, **kwargs)
  90. @app.route('/', methods=['GET', 'POST'])
  91. def index():
  92. """Render index page.
  93. Supported outputs: html, json, csv, rss.
  94. """
  95. if not request.args and not request.form:
  96. return render('index.html')
  97. try:
  98. search = Search(request)
  99. except:
  100. return render('index.html')
  101. # TODO moar refactor - do_search integration into Search class
  102. search.results, search.suggestions = do_search(search.query,
  103. request,
  104. search.engines,
  105. search.pageno,
  106. search.lang)
  107. for result in search.results:
  108. if not search.paging and engines[result['engine']].paging:
  109. search.paging = True
  110. if search.request_data.get('format', 'html') == 'html':
  111. if 'content' in result:
  112. result['content'] = highlight_content(result['content'],
  113. search.query.encode('utf-8')) # noqa
  114. result['title'] = highlight_content(result['title'],
  115. search.query.encode('utf-8'))
  116. else:
  117. if 'content' in result:
  118. result['content'] = html_to_text(result['content']).strip()
  119. # removing html content and whitespace duplications
  120. result['title'] = ' '.join(html_to_text(result['title'])
  121. .strip().split())
  122. if len(result['url']) > 74:
  123. url_parts = result['url'][:35], result['url'][-35:]
  124. result['pretty_url'] = u'{0}[...]{1}'.format(*url_parts)
  125. else:
  126. result['pretty_url'] = result['url']
  127. for engine in result['engines']:
  128. if engine in favicons:
  129. result['favicon'] = engine
  130. # TODO, check if timezone is calculated right
  131. if 'publishedDate' in result:
  132. if result['publishedDate'].replace(tzinfo=None)\
  133. >= datetime.now() - timedelta(days=1):
  134. timedifference = datetime.now() - result['publishedDate']\
  135. .replace(tzinfo=None)
  136. minutes = int((timedifference.seconds / 60) % 60)
  137. hours = int(timedifference.seconds / 60 / 60)
  138. if hours == 0:
  139. result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes) # noqa
  140. else:
  141. result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
  142. else:
  143. result['pubdate'] = result['publishedDate']\
  144. .strftime('%a, %d %b %Y %H:%M:%S %z')
  145. result['publishedDate'] = format_date(result['publishedDate'])
  146. if search.request_data.get('format') == 'json':
  147. return Response(json.dumps({'query': search.query,
  148. 'results': search.results}),
  149. mimetype='application/json')
  150. elif search.request_data.get('format') == 'csv':
  151. csv = UnicodeWriter(cStringIO.StringIO())
  152. keys = ('title', 'url', 'content', 'host', 'engine', 'score')
  153. if search.results:
  154. csv.writerow(keys)
  155. for row in search.results:
  156. row['host'] = row['parsed_url'].netloc
  157. csv.writerow([row.get(key, '') for key in keys])
  158. csv.stream.seek(0)
  159. response = Response(csv.stream.read(), mimetype='application/csv')
  160. cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
  161. response.headers.add('Content-Disposition', cont_disp)
  162. return response
  163. elif search.request_data.get('format') == 'rss':
  164. response_rss = render(
  165. 'opensearch_response_rss.xml',
  166. results=search.results,
  167. q=search.request_data['q'],
  168. number_of_results=len(search.results),
  169. base_url=get_base_url()
  170. )
  171. return Response(response_rss, mimetype='text/xml')
  172. return render(
  173. 'results.html',
  174. results=search.results,
  175. q=search.request_data['q'],
  176. selected_categories=search.categories,
  177. paging=search.paging,
  178. pageno=search.pageno,
  179. base_url=get_base_url(),
  180. suggestions=search.suggestions
  181. )
  182. @app.route('/about', methods=['GET'])
  183. def about():
  184. """Render about page"""
  185. return render('about.html')
  186. @app.route('/autocompleter', methods=['GET', 'POST'])
  187. def autocompleter():
  188. """Return autocompleter results"""
  189. request_data = {}
  190. if request.method == 'POST':
  191. request_data = request.form
  192. else:
  193. request_data = request.args
  194. # TODO fix XSS-vulnerability, remove test code
  195. autocompleter.querry = request_data.get('q')
  196. autocompleter.results = [autocompleter.querry]
  197. return Response(json.dumps(autocompleter.results),
  198. mimetype='application/json')
  199. @app.route('/preferences', methods=['GET', 'POST'])
  200. def preferences():
  201. """Render preferences page.
  202. Settings that are going to be saved as cookies."""
  203. lang = None
  204. if request.cookies.get('language')\
  205. and request.cookies['language'] in (x[0] for x in language_codes):
  206. lang = request.cookies['language']
  207. blocked_engines = []
  208. if request.method == 'GET':
  209. blocked_engines = request.cookies.get('blocked_engines', '').split(',')
  210. else:
  211. selected_categories = []
  212. locale = None
  213. for pd_name, pd in request.form.items():
  214. if pd_name.startswith('category_'):
  215. category = pd_name[9:]
  216. if not category in categories:
  217. continue
  218. selected_categories.append(category)
  219. elif pd_name == 'locale' and pd in settings['locales']:
  220. locale = pd
  221. elif pd_name == 'language' and (pd == 'all' or
  222. pd in (x[0] for
  223. x in language_codes)):
  224. lang = pd
  225. elif pd_name.startswith('engine_'):
  226. engine_name = pd_name.replace('engine_', '', 1)
  227. if engine_name in engines:
  228. blocked_engines.append(engine_name)
  229. resp = make_response(redirect(url_for('index')))
  230. user_blocked_engines = request.cookies.get('blocked_engines', '').split(',') # noqa
  231. if sorted(blocked_engines) != sorted(user_blocked_engines):
  232. resp.set_cookie(
  233. 'blocked_engines', ','.join(blocked_engines),
  234. max_age=cookie_max_age
  235. )
  236. if locale:
  237. resp.set_cookie(
  238. 'locale', locale,
  239. max_age=cookie_max_age
  240. )
  241. if lang:
  242. resp.set_cookie(
  243. 'language', lang,
  244. max_age=cookie_max_age
  245. )
  246. if selected_categories:
  247. # cookie max age: 4 weeks
  248. resp.set_cookie(
  249. 'categories', ','.join(selected_categories),
  250. max_age=60 * 60 * 24 * 7 * 4
  251. )
  252. return resp
  253. return render('preferences.html',
  254. locales=settings['locales'],
  255. current_locale=get_locale(),
  256. current_language=lang or 'all',
  257. language_codes=language_codes,
  258. categs=categories.items(),
  259. blocked_engines=blocked_engines,
  260. shortcuts={y: x for x, y in engine_shortcuts.items()})
  261. @app.route('/stats', methods=['GET'])
  262. def stats():
  263. """Render engine statistics page."""
  264. global categories
  265. stats = get_engines_stats()
  266. return render('stats.html', stats=stats)
  267. @app.route('/robots.txt', methods=['GET'])
  268. def robots():
  269. return Response("""User-agent: *
  270. Allow: /
  271. Allow: /about
  272. Disallow: /stats
  273. Disallow: /preferences
  274. """, mimetype='text/plain')
  275. @app.route('/opensearch.xml', methods=['GET'])
  276. def opensearch():
  277. method = 'post'
  278. # chrome/chromium only supports HTTP GET....
  279. if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
  280. method = 'get'
  281. ret = render('opensearch.xml', method=method, host=get_base_url())
  282. resp = Response(response=ret,
  283. status=200,
  284. mimetype="application/xml")
  285. return resp
  286. @app.route('/favicon.ico')
  287. def favicon():
  288. return send_from_directory(os.path.join(app.root_path, 'static/img'),
  289. 'favicon.png',
  290. mimetype='image/vnd.microsoft.icon')
  291. def run():
  292. from gevent import monkey
  293. monkey.patch_all()
  294. app.run(
  295. debug=settings['server']['debug'],
  296. use_debugger=settings['server']['debug'],
  297. port=settings['server']['port']
  298. )
  299. if __name__ == "__main__":
  300. run()