Browse Source

[fix] unicode and pep8 fixes

asciimoo 11 years ago
parent
commit
9eb31c0186
3 changed files with 11 additions and 8 deletions
  1. 1 1
      searx/engines/__init__.py
  2. 2 2
      searx/search.py
  3. 8 5
      searx/webapp.py

+ 1 - 1
searx/engines/__init__.py

@@ -205,7 +205,7 @@ def search(query, request, selected_engines, pageno=1, lang='all'):
         request_params['started'] = datetime.now()
         request_params['pageno'] = pageno
         request_params['language'] = lang
-        request_params = engine.request(query, request_params)
+        request_params = engine.request(query.encode('utf-8'), request_params)
 
         callback = make_callback(
             selected_engine['name'],

+ 2 - 2
searx/search.py

@@ -18,7 +18,7 @@ class Search(object):
         self.pageno = 1
         self.lang = 'all'
         if request.cookies.get('blocked_engines'):
-            self.blocked_engines = request.cookies['blocked_engines'].split(',')
+            self.blocked_engines = request.cookies['blocked_engines'].split(',')  # noqa
         else:
             self.blocked_engines = []
         self.results = []
@@ -70,7 +70,7 @@ class Search(object):
 
         if len(self.engines):
             self.categories = list(set(engine['category']
-                                           for engine in self.engines))
+                                       for engine in self.engines))
         else:
             for pd_name, pd in self.request_data.items():
                 if pd_name.startswith('category_'):

+ 8 - 5
searx/webapp.py

@@ -120,8 +120,10 @@ def index():
             search.paging = True
         if search.request_data.get('format', 'html') == 'html':
             if 'content' in result:
-                result['content'] = highlight_content(result['content'], search.query)
-            result['title'] = highlight_content(result['title'], search.query)
+                result['content'] = highlight_content(result['content'],
+                                                      search.query.encode('utf-8'))  # noqa
+            result['title'] = highlight_content(result['title'],
+                                                search.query.encode('utf-8'))
         else:
             if 'content' in result:
                 result['content'] = html_to_text(result['content']).strip()
@@ -139,7 +141,8 @@ def index():
                 result['favicon'] = engine
 
     if search.request_data.get('format') == 'json':
-        return Response(json.dumps({'query': search.query, 'results': search.results}),
+        return Response(json.dumps({'query': search.query,
+                                    'results': search.results}),
                         mimetype='application/json')
     elif search.request_data.get('format') == 'csv':
         csv = UnicodeWriter(cStringIO.StringIO())
@@ -151,8 +154,8 @@ def index():
                 csv.writerow([row.get(key, '') for key in keys])
         csv.stream.seek(0)
         response = Response(csv.stream.read(), mimetype='application/csv')
-        content_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
-        response.headers.add('Content-Disposition', content_disp)
+        cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
+        response.headers.add('Content-Disposition', cont_disp)
         return response
     elif search.request_data.get('format') == 'rss':
         response_rss = render(