Browse Source

Revert "remove 'all' option from search languages"

This reverts commit 4d1770398a6af8902e75c0bd885781584d39e796.
Noémi Ványi 6 years ago
parent
commit
b63d645a52
51 changed files with 245 additions and 70 deletions
  1. 2 2
      searx/engines/archlinux.py
  2. 4 1
      searx/engines/bing.py
  3. 4 1
      searx/engines/bing_news.py
  4. 4 1
      searx/engines/dailymotion.py
  5. 9 3
      searx/engines/duckduckgo.py
  6. 6 2
      searx/engines/duckduckgo_images.py
  7. 4 1
      searx/engines/faroo.py
  8. 6 3
      searx/engines/gigablast.py
  9. 5 1
      searx/engines/google.py
  10. 4 3
      searx/engines/google_news.py
  11. 4 1
      searx/engines/mediawiki.py
  12. 4 3
      searx/engines/photon.py
  13. 3 2
      searx/engines/qwant.py
  14. 3 2
      searx/engines/startpage.py
  15. 1 1
      searx/engines/subtitleseeker.py
  16. 6 2
      searx/engines/swisscows.py
  17. 6 1
      searx/engines/twitter.py
  18. 4 1
      searx/engines/wikidata.py
  19. 3 0
      searx/engines/wikipedia.py
  20. 3 1
      searx/engines/yacy.py
  21. 13 4
      searx/engines/yahoo.py
  22. 4 1
      searx/engines/yahoo_news.py
  23. 3 1
      searx/engines/youtube_api.py
  24. 1 0
      searx/preferences.py
  25. 1 5
      searx/search.py
  26. 1 1
      searx/settings_robot.yml
  27. 1 0
      searx/templates/courgette/preferences.html
  28. 1 0
      searx/templates/legacy/preferences.html
  29. 6 5
      searx/templates/oscar/languages.html
  30. 1 0
      searx/templates/pix-art/preferences.html
  31. 11 6
      searx/webapp.py
  32. 1 1
      tests/unit/engines/test_archlinux.py
  33. 4 0
      tests/unit/engines/test_bing.py
  34. 4 0
      tests/unit/engines/test_bing_news.py
  35. 4 0
      tests/unit/engines/test_dailymotion.py
  36. 1 1
      tests/unit/engines/test_duckduckgo_images.py
  37. 4 0
      tests/unit/engines/test_faroo.py
  38. 6 1
      tests/unit/engines/test_gigablast.py
  39. 5 0
      tests/unit/engines/test_google_news.py
  40. 4 0
      tests/unit/engines/test_mediawiki.py
  41. 1 2
      tests/unit/engines/test_qwant.py
  42. 4 0
      tests/unit/engines/test_startpage.py
  43. 36 0
      tests/unit/engines/test_subtitleseeker.py
  44. 5 0
      tests/unit/engines/test_swisscows.py
  45. 6 0
      tests/unit/engines/test_twitter.py
  46. 2 0
      tests/unit/engines/test_wikidata.py
  47. 6 0
      tests/unit/engines/test_wikipedia.py
  48. 5 0
      tests/unit/engines/test_yacy.py
  49. 7 0
      tests/unit/engines/test_yahoo.py
  50. 7 0
      tests/unit/engines/test_yahoo_news.py
  51. 5 10
      tests/unit/test_preferences.py

+ 2 - 2
searx/engines/archlinux.py

@@ -36,7 +36,7 @@ def locale_to_lang_code(locale):
 # wikis for some languages were moved off from the main site, we need to make
 # requests to correct URLs to be able to get results in those languages
 lang_urls = {
-    'en': {
+    'all': {
         'base': 'https://wiki.archlinux.org',
         'search': '/index.php?title=Special:Search&offset={offset}&{query}'
     },
@@ -67,7 +67,7 @@ lang_urls = {
 def get_lang_urls(language):
     if language in lang_urls:
         return lang_urls[language]
-    return lang_urls['en']
+    return lang_urls['all']
 
 
 # Language names to build search requests for

+ 4 - 1
searx/engines/bing.py

@@ -34,7 +34,10 @@ search_string = 'search?{query}&first={offset}'
 def request(query, params):
     offset = (params['pageno'] - 1) * 10 + 1
 
-    lang = match_language(params['language'], supported_languages, language_aliases)
+    if params['language'] == 'all':
+        lang = 'EN'
+    else:
+        lang = match_language(params['language'], supported_languages, language_aliases)
 
     query = u'language:{} {}'.format(lang.split('-')[0].upper(), query.decode('utf-8')).encode('utf-8')
 

+ 4 - 1
searx/engines/bing_news.py

@@ -71,7 +71,10 @@ def request(query, params):
 
     offset = (params['pageno'] - 1) * 10 + 1
 
-    language = match_language(params['language'], supported_languages, language_aliases)
+    if params['language'] == 'all':
+        language = 'en-US'
+    else:
+        language = match_language(params['language'], supported_languages, language_aliases)
 
     params['url'] = _get_url(query, language, offset, params['time_range'])
 

+ 4 - 1
searx/engines/dailymotion.py

@@ -33,7 +33,10 @@ supported_languages_url = 'https://api.dailymotion.com/languages'
 
 # do search-request
 def request(query, params):
-    locale = match_language(params['language'], supported_languages)
+    if params['language'] == 'all':
+        locale = 'en-US'
+    else:
+        locale = match_language(params['language'], supported_languages)
 
     params['url'] = search_url.format(
         query=urlencode({'search': query, 'localization': locale}),

+ 9 - 3
searx/engines/duckduckgo.py

@@ -54,6 +54,9 @@ content_xpath = './/a[@class="result__snippet"]'
 
 # match query's language to a region code that duckduckgo will accept
 def get_region_code(lang, lang_list=[]):
+    if lang == 'all':
+        return None
+
     lang_code = match_language(lang, lang_list, language_aliases, 'wt-WT')
     lang_parts = lang_code.split('-')
 
@@ -61,7 +64,6 @@ def get_region_code(lang, lang_list=[]):
     return lang_parts[1].lower() + '-' + lang_parts[0].lower()
 
 
-# do search-request
 def request(query, params):
     if params['time_range'] and params['time_range'] not in time_range_dict:
         return params
@@ -69,8 +71,12 @@ def request(query, params):
     offset = (params['pageno'] - 1) * 30
 
     region_code = get_region_code(params['language'], supported_languages)
-    params['url'] = url.format(
-        query=urlencode({'q': query, 'kl': region_code}), offset=offset, dc_param=offset)
+    if region_code:
+        params['url'] = url.format(
+            query=urlencode({'q': query, 'kl': region_code}), offset=offset, dc_param=offset)
+    else:
+        params['url'] = url.format(
+            query=urlencode({'q': query}), offset=offset, dc_param=offset)
 
     if params['time_range'] in time_range_dict:
         params['url'] += time_range_url.format(range=time_range_dict[params['time_range']])

+ 6 - 2
searx/engines/duckduckgo_images.py

@@ -56,8 +56,12 @@ def request(query, params):
     safesearch = params['safesearch'] - 1
 
     region_code = get_region_code(params['language'], lang_list=supported_languages)
-    params['url'] = images_url.format(
-        query=urlencode({'q': query, 'l': region_code}), offset=offset, safesearch=safesearch, vqd=vqd)
+    if region_code:
+        params['url'] = images_url.format(
+            query=urlencode({'q': query, 'l': region_code}), offset=offset, safesearch=safesearch, vqd=vqd)
+    else:
+        params['url'] = images_url.format(
+            query=urlencode({'q': query}), offset=offset, safesearch=safesearch, vqd=vqd)
 
     return params
 

+ 4 - 1
searx/engines/faroo.py

@@ -40,7 +40,10 @@ def request(query, params):
     offset = (params['pageno'] - 1) * number_of_results + 1
     categorie = search_category.get(params['category'], 'web')
 
-    language = params['language'].split('-')[0]
+    if params['language'] == 'all':
+        language = 'en'
+    else:
+        language = params['language'].split('-')[0]
 
     # if language is not supported, put it in english
     if language != 'en' and\

+ 6 - 3
searx/engines/gigablast.py

@@ -50,9 +50,12 @@ supported_languages_url = 'https://gigablast.com/search?&rxikd=1'
 def request(query, params):
     offset = (params['pageno'] - 1) * number_of_results
 
-    language = params['language'].replace('-', '_').lower()
-    if language.split('-')[0] != 'zh':
-        language = language.split('-')[0]
+    if params['language'] == 'all':
+        language = 'xx'
+    else:
+        language = params['language'].replace('-', '_').lower()
+        if language.split('-')[0] != 'zh':
+            language = language.split('-')[0]
 
     if params['safesearch'] >= 1:
         safesearch = 1

+ 5 - 1
searx/engines/google.py

@@ -166,7 +166,11 @@ def extract_text_from_dom(result, xpath):
 def request(query, params):
     offset = (params['pageno'] - 1) * 10
 
-    language = match_language(params['language'], supported_languages, language_aliases)
+    if params['language'] == 'all' or params['language'] == 'en-US':
+        language = 'en-GB'
+    else:
+        language = match_language(params['language'], supported_languages, language_aliases)
+
     language_array = language.split('-')
     if params['language'].find('-') > 0:
         country = params['language'].split('-')[1]

+ 4 - 3
searx/engines/google_news.py

@@ -51,9 +51,10 @@ def request(query, params):
     params['url'] = search_url.format(query=urlencode({'q': query}),
                                       search_options=urlencode(search_options))
 
-    language = match_language(params['language'], supported_languages, language_aliases).split('-')[0]
-    if language:
-        params['url'] += '&lr=lang_' + language
+    if params['language'] != 'all':
+        language = match_language(params['language'], supported_languages, language_aliases).split('-')[0]
+        if language:
+            params['url'] += '&lr=lang_' + language
 
     return params
 

+ 4 - 1
searx/engines/mediawiki.py

@@ -45,7 +45,10 @@ def request(query, params):
 
     format_strings = list(Formatter().parse(base_url))
 
-    language = params['language'].split('-')[0]
+    if params['language'] == 'all':
+        language = 'en'
+    else:
+        language = params['language'].split('-')[0]
 
     # format_string [('https://', 'language', '', None), ('.wikipedia.org/', None, None, None)]
     if any(x[1] == 'language' for x in format_strings):

+ 4 - 3
searx/engines/photon.py

@@ -35,9 +35,10 @@ def request(query, params):
         search_string.format(query=urlencode({'q': query}),
                              limit=number_of_results)
 
-    language = params['language'].split('-')[0]
-    if language in supported_languages:
-        params['url'] = params['url'] + "&lang=" + language
+    if params['language'] != 'all':
+        language = params['language'].split('_')[0]
+        if language in supported_languages:
+            params['url'] = params['url'] + "&lang=" + language
 
     # using searx User-Agent
     params['headers']['User-Agent'] = searx_useragent()

+ 3 - 2
searx/engines/qwant.py

@@ -46,8 +46,9 @@ def request(query, params):
                                    offset=offset)
 
     # add language tag
-    language = match_language(params['language'], supported_languages, language_aliases)
-    params['url'] += '&locale=' + language.replace('-', '_').lower()
+    if params['language'] != 'all':
+        language = match_language(params['language'], supported_languages, language_aliases)
+        params['url'] += '&locale=' + language.replace('-', '_').lower()
 
     return params
 

+ 3 - 2
searx/engines/startpage.py

@@ -46,8 +46,9 @@ def request(query, params):
     params['data'] = {'query': query,
                       'startat': offset}
 
-    # set language
-    params['data']['with_language'] = ('lang_' + params['language'].split('-')[0])
+    # set language if specified
+    if params['language'] != 'all':
+        params['data']['with_language'] = ('lang_' + params['language'].split('-')[0])
 
     return params
 

+ 1 - 1
searx/engines/subtitleseeker.py

@@ -48,7 +48,7 @@ def response(resp):
         search_lang = 'Farsi'
     elif resp.search_params['language'] == 'pt-BR':
         search_lang = 'Brazilian'
-    else:
+    elif resp.search_params['language'] != 'all':
         search_lang = [lc[3]
                        for lc in language_codes
                        if lc[0].split('-')[0] == resp.search_params['language'].split('-')[0]]

+ 6 - 2
searx/engines/swisscows.py

@@ -36,8 +36,12 @@ regex_img_url_remove_start = re.compile(b'^https?://i\.swisscows\.ch/\?link=')
 
 # do search-request
 def request(query, params):
-    region = match_language(params['language'], supported_languages, language_aliases)
-    ui_language = region.split('-')[0]
+    if params['language'] == 'all':
+        ui_language = 'browser'
+        region = 'browser'
+    else:
+        region = match_language(params['language'], supported_languages, language_aliases)
+        ui_language = region.split('-')[0]
 
     search_path = search_string.format(
         query=urlencode({'query': query, 'uiLanguage': ui_language, 'region': region}),

+ 6 - 1
searx/engines/twitter.py

@@ -37,7 +37,12 @@ timestamp_xpath = './/span[contains(@class,"_timestamp")]'
 # do search-request
 def request(query, params):
     params['url'] = search_url + urlencode({'q': query})
-    params['cookies']['lang'] = params['language'].split('-')[0]
+
+    # set language if specified
+    if params['language'] != 'all':
+        params['cookies']['lang'] = params['language'].split('-')[0]
+    else:
+        params['cookies']['lang'] = 'en'
 
     return params
 

+ 4 - 1
searx/engines/wikidata.py

@@ -68,7 +68,10 @@ def response(resp):
     html = fromstring(resp.text)
     search_results = html.xpath(wikidata_ids_xpath)
 
-    language = match_language(resp.search_params['language'], supported_languages, language_aliases).split('-')[0]
+    if resp.search_params['language'].split('-')[0] == 'all':
+        language = 'en'
+    else:
+        language = match_language(resp.search_params['language'], supported_languages, language_aliases).split('-')[0]
 
     # TODO: make requests asynchronous to avoid timeout when result_count > 1
     for search_result in search_results[:result_count]:

+ 3 - 0
searx/engines/wikipedia.py

@@ -31,6 +31,9 @@ supported_languages_url = 'https://meta.wikimedia.org/wiki/List_of_Wikipedias'
 
 # set language in base_url
 def url_lang(lang):
+    lang_pre = lang.split('-')[0]
+    if lang_pre == 'all' or lang_pre not in supported_languages:
+        return 'en'
     return match_language(lang, supported_languages, language_aliases).split('-')[0]
 
 

+ 3 - 1
searx/engines/yacy.py

@@ -51,7 +51,9 @@ def request(query, params):
                           limit=number_of_results,
                           search_type=search_type)
 
-    params['url'] += '&lr=lang_' + params['language'].split('-')[0]
+    # add language tag if specified
+    if params['language'] != 'all':
+        params['url'] += '&lr=lang_' + params['language'].split('-')[0]
 
     return params
 

+ 13 - 4
searx/engines/yahoo.py

@@ -73,16 +73,25 @@ def _get_url(query, offset, language, time_range):
                                         lang=language)
 
 
+def _get_language(params):
+    if params['language'] == 'all':
+        return 'en'
+
+    language = match_language(params['language'], supported_languages, language_aliases)
+    if language not in language_aliases.values():
+        language = language.split('-')[0]
+    language = language.replace('-', '_').lower()
+
+    return language
+
+
 # do search-request
 def request(query, params):
     if params['time_range'] and params['time_range'] not in time_range_dict:
         return params
 
     offset = (params['pageno'] - 1) * 10 + 1
-    language = match_language(params['language'], supported_languages, language_aliases)
-    if language not in language_aliases.values():
-        language = language.split('-')[0]
-    language = language.replace('-', '_').lower()
+    language = _get_language(params)
 
     params['url'] = _get_url(query, offset, language, params['time_range'])
 

+ 4 - 1
searx/engines/yahoo_news.py

@@ -41,7 +41,10 @@ suggestion_xpath = '//div[contains(@class,"VerALSOTRY")]//a'
 def request(query, params):
     offset = (params['pageno'] - 1) * 10 + 1
 
-    language = match_language(params['language'], supported_languages, language_aliases).split('-')[0]
+    if params['language'] == 'all':
+        language = 'en'
+    else:
+        language = match_language(params['language'], supported_languages, language_aliases).split('-')[0]
 
     params['url'] = search_url.format(offset=offset,
                                       query=urlencode({'p': query}),

+ 3 - 1
searx/engines/youtube_api.py

@@ -34,7 +34,9 @@ def request(query, params):
     params['url'] = search_url.format(query=urlencode({'q': query}),
                                       api_key=api_key)
 
-    params['url'] += '&relevanceLanguage=' + params['language'].split('-')[0]
+    # add language tag if specified
+    if params['language'] != 'all':
+        params['url'] += '&relevanceLanguage=' + params['language'].split('-')[0]
 
     return params
 

+ 1 - 0
searx/preferences.py

@@ -12,6 +12,7 @@ if version[0] == '3':
 
 COOKIE_MAX_AGE = 60 * 60 * 24 * 365 * 5  # 5 years
 LANGUAGE_CODES = [l[0] for l in languages]
+LANGUAGE_CODES.append('all')
 DISABLED = 0
 ENABLED = 1
 DOI_RESOLVERS = list(settings['doi_resolvers'])

+ 1 - 5
searx/search.py

@@ -24,7 +24,7 @@ from flask_babel import gettext
 import requests.exceptions
 import searx.poolrequests as requests_lib
 from searx.engines import (
-    categories, engines, settings
+    categories, engines
 )
 from searx.answerers import ask
 from searx.utils import gen_useragent
@@ -221,10 +221,6 @@ def get_search_query_from_webapp(preferences, form):
     else:
         query_lang = preferences.get_value('language')
 
-    # provides backwards compatibility for requests using old language default
-    if query_lang == 'all':
-        query_lang = settings['search']['language']
-
     # check language
     if not VALID_LANGUAGE_CODE.match(query_lang):
         raise SearxParameterException('language', query_lang)

+ 1 - 1
searx/settings_robot.yml

@@ -5,7 +5,7 @@ general:
 search:
     safe_search : 0
     autocomplete : ""
-    language: "en-US"
+    language: "all"
 
 server:
     port : 11111

+ 1 - 0
searx/templates/courgette/preferences.html

@@ -13,6 +13,7 @@
         <legend>{{ _('Search language') }}</legend>
         <p>
             <select name='language'>
+                <option value="all" {% if current_language == 'all' %}selected="selected"{% endif %}>{{ _('Default language') }}</option>
                 {% for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) %}
                 <option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>{{ lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id }}</option>
                 {% endfor %}

+ 1 - 0
searx/templates/legacy/preferences.html

@@ -14,6 +14,7 @@
         <legend>{{ _('Search language') }}</legend>
         <p>
         <select name='language'>
+            <option value="all" {% if current_language == 'all' %}selected="selected"{% endif %}>{{ _('Default language') }}</option>
             {% for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) %}
             <option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>{{ lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id }}</option>
             {% endfor %}

+ 6 - 5
searx/templates/oscar/languages.html

@@ -3,9 +3,10 @@
 {% else %}
 <select class="time_range custom-select form-control" id='language' name='language'>
 {% endif %}
-	{% for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) %}
-	<option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>
-		{{ lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id }}
-	</option>
-	{% endfor %}
+	<option value="all" {% if current_language == 'all' %}selected="selected"{% endif %}>{{ _('Default language') }}</option>
+		{% for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) %}
+		<option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>
+			{{ lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id }}
+		</option>
+		{% endfor %}
 </select>

+ 1 - 0
searx/templates/pix-art/preferences.html

@@ -9,6 +9,7 @@
         <legend>{{ _('Search language') }}</legend>
         <p>
         <select name='language'>
+            <option value="all" {% if current_language == 'all' %}selected="selected"{% endif %}>{{ _('Default language') }}</option>
             {% for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) %}
             <option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>{{ lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id }}</option>
             {% endfor %}

+ 11 - 6
searx/webapp.py

@@ -637,8 +637,8 @@ def autocompleter():
     if len(raw_results) <= 3 and completer:
         # get language from cookie
         language = request.preferences.get_value('language')
-        if not language:
-            language = settings['search']['language']
+        if not language or language == 'all':
+            language = 'en'
         else:
             language = language.split('-')[0]
         # run autocompletion
@@ -691,10 +691,7 @@ def preferences():
                              'warn_time': False}
             if e.timeout > settings['outgoing']['request_timeout']:
                 stats[e.name]['warn_timeout'] = True
-            if match_language(request.preferences.get_value('language'),
-                              getattr(e, 'supported_languages', []),
-                              getattr(e, 'language_aliases', {}), None):
-                stats[e.name]['supports_selected_language'] = True
+            stats[e.name]['supports_selected_language'] = _is_selected_language_supported(e, request.preferences)
 
     # get first element [0], the engine time,
     # and then the second element [1] : the time (the first one is the label)
@@ -725,6 +722,14 @@ def preferences():
                   preferences=True)
 
 
+def _is_selected_language_supported(engine, preferences):
+    language = preferences.get_value('language')
+    return (language == 'all'
+            or match_language(language,
+                              getattr(engine, 'supported_languages', []),
+                              getattr(engine, 'language_aliases', {}), None))
+
+
 @app.route('/image_proxy', methods=['GET'])
 def image_proxy():
     url = request.args.get('url').encode('utf-8')

+ 1 - 1
tests/unit/engines/test_archlinux.py

@@ -4,7 +4,7 @@ from searx.engines import archlinux
 from searx.testing import SearxTestCase
 
 domains = {
-    'en': 'https://wiki.archlinux.org',
+    'all': 'https://wiki.archlinux.org',
     'de': 'https://wiki.archlinux.de',
     'fr': 'https://wiki.archlinux.fr',
     'ja': 'https://wiki.archlinuxjp.org',

+ 4 - 0
tests/unit/engines/test_bing.py

@@ -18,6 +18,10 @@ class TestBingEngine(SearxTestCase):
         self.assertTrue('language%3AFR' in params['url'])
         self.assertTrue('bing.com' in params['url'])
 
+        dicto['language'] = 'all'
+        params = bing.request(query.encode('utf-8'), dicto)
+        self.assertTrue('language' in params['url'])
+
     def test_response(self):
         self.assertRaises(AttributeError, bing.response, None)
         self.assertRaises(AttributeError, bing.response, [])

+ 4 - 0
tests/unit/engines/test_bing_news.py

@@ -20,6 +20,10 @@ class TestBingNewsEngine(SearxTestCase):
         self.assertIn('bing.com', params['url'])
         self.assertIn('fr', params['url'])
 
+        dicto['language'] = 'all'
+        params = bing_news.request(query, dicto)
+        self.assertIn('en', params['url'])
+
     def test_no_url_in_request_year_time_range(self):
         dicto = defaultdict(dict)
         query = 'test_query'

+ 4 - 0
tests/unit/engines/test_dailymotion.py

@@ -19,6 +19,10 @@ class TestDailymotionEngine(SearxTestCase):
         self.assertTrue('dailymotion.com' in params['url'])
         self.assertTrue('fr' in params['url'])
 
+        dicto['language'] = 'all'
+        params = dailymotion.request(query, dicto)
+        self.assertTrue('en' in params['url'])
+
     def test_response(self):
         self.assertRaises(AttributeError, dailymotion.response, None)
         self.assertRaises(AttributeError, dailymotion.response, [])

+ 1 - 1
tests/unit/engines/test_duckduckgo_images.py

@@ -14,7 +14,7 @@ class TestDuckduckgoImagesEngine(SearxTestCase):
         dicto['is_test'] = True
         dicto['pageno'] = 1
         dicto['safesearch'] = 0
-        dicto['language'] = 'en-US'
+        dicto['language'] = 'all'
         params = duckduckgo_images.request(query, dicto)
         self.assertIn('url', params)
         self.assertIn(query, params['url'])

+ 4 - 0
tests/unit/engines/test_faroo.py

@@ -20,6 +20,10 @@ class TestFarooEngine(SearxTestCase):
         self.assertIn('en', params['url'])
         self.assertIn('web', params['url'])
 
+        dicto['language'] = 'all'
+        params = faroo.request(query, dicto)
+        self.assertIn('en', params['url'])
+
         dicto['language'] = 'de-DE'
         params = faroo.request(query, dicto)
         self.assertIn('de', params['url'])

+ 6 - 1
tests/unit/engines/test_gigablast.py

@@ -11,11 +11,16 @@ class TestGigablastEngine(SearxTestCase):
         dicto = defaultdict(dict)
         dicto['pageno'] = 0
         dicto['safesearch'] = 0
-        dicto['language'] = 'en-US'
+        dicto['language'] = 'all'
         params = gigablast.request(query, dicto)
         self.assertTrue('url' in params)
         self.assertTrue(query in params['url'])
         self.assertTrue('gigablast.com' in params['url'])
+        self.assertTrue('xx' in params['url'])
+
+        dicto['language'] = 'en-US'
+        params = gigablast.request(query, dicto)
+        self.assertTrue('en' in params['url'])
         self.assertFalse('en-US' in params['url'])
 
     def test_response(self):

+ 5 - 0
tests/unit/engines/test_google_news.py

@@ -21,6 +21,11 @@ class TestGoogleNewsEngine(SearxTestCase):
         self.assertIn(query, params['url'])
         self.assertIn('fr', params['url'])
 
+        dicto['language'] = 'all'
+        params = google_news.request(query, dicto)
+        self.assertIn('url', params)
+        self.assertNotIn('fr', params['url'])
+
     def test_response(self):
         self.assertRaises(AttributeError, google_news.response, None)
         self.assertRaises(AttributeError, google_news.response, [])

+ 4 - 0
tests/unit/engines/test_mediawiki.py

@@ -18,6 +18,10 @@ class TestMediawikiEngine(SearxTestCase):
         self.assertIn('wikipedia.org', params['url'])
         self.assertIn('fr', params['url'])
 
+        dicto['language'] = 'all'
+        params = mediawiki.request(query, dicto)
+        self.assertIn('en', params['url'])
+
         mediawiki.base_url = "http://test.url/"
         mediawiki.search_url = mediawiki.base_url +\
                                  'w/api.php?action=query'\

+ 1 - 2
tests/unit/engines/test_qwant.py

@@ -21,11 +21,10 @@ class TestQwantEngine(SearxTestCase):
         self.assertIn('qwant.com', params['url'])
         self.assertIn('fr_fr', params['url'])
 
-        dicto['language'] = 'en-US'
+        dicto['language'] = 'all'
         qwant.categories = ['news']
         params = qwant.request(query, dicto)
         self.assertFalse('fr' in params['url'])
-        self.assertIn('en_us', params['url'])
         self.assertIn('news', params['url'])
 
         dicto['language'] = 'fr'

+ 4 - 0
tests/unit/engines/test_startpage.py

@@ -21,6 +21,10 @@ class TestStartpageEngine(SearxTestCase):
         self.assertIn('with_language', params['data'])
         self.assertIn('lang_fr', params['data']['with_language'])
 
+        dicto['language'] = 'all'
+        params = startpage.request(query, dicto)
+        self.assertNotIn('with_language', params['data'])
+
     def test_response(self):
         self.assertRaises(AttributeError, startpage.response, None)
         self.assertRaises(AttributeError, startpage.response, [])

+ 36 - 0
tests/unit/engines/test_subtitleseeker.py

@@ -73,6 +73,42 @@ class TestSubtitleseekerEngine(SearxTestCase):
         results = subtitleseeker.response(response)
         self.assertEqual(results[0]['url'], 'http://this.is.the.url/Brazilian/')
 
+        html = """
+        <div class="boxRows">
+            <div class="boxRowsInner" style="width:600px;">
+                <img src="http://static.subtitleseeker.com/images/movie.gif"
+                    style="width:16px; height:16px;" class="icon">
+                <a href="http://this.is.the.url/"
+                    class="blue" title="Title subtitle" >
+                    This is the Title
+                </a>
+            </div>
+            <div class="boxRowsInner f12b red" style="width:70px;">
+                1998
+            </div>
+            <div class="boxRowsInner grey-web f12" style="width:120px;">
+                <img src="http://static.subtitleseeker.com/images/basket_put.png"
+                    style="width:16px; height:16px;" class="icon">
+                1039 Subs
+            </div>
+            <div class="boxRowsInner grey-web f10" style="width:130px;">
+                <img src="http://static.subtitleseeker.com/images/arrow_refresh_small.png"
+                    style="width:16px; height:16px;" class="icon">
+                1 hours ago
+            </div>
+            <div class="clear"></div>
+        </div>
+        """
+        dicto['language'] = 'all'
+        response = mock.Mock(text=html, search_params=dicto)
+        results = subtitleseeker.response(response)
+        self.assertEqual(type(results), list)
+        self.assertEqual(len(results), 1)
+        self.assertEqual(results[0]['title'], 'This is the Title')
+        self.assertEqual(results[0]['url'], 'http://this.is.the.url/')
+        self.assertIn('1998', results[0]['content'])
+        self.assertIn('1039 Subs', results[0]['content'])
+
         html = """
         <div class="boxRows">
             <div class="boxRowsInner" style="width:600px;">

+ 5 - 0
tests/unit/engines/test_swisscows.py

@@ -20,6 +20,11 @@ class TestSwisscowsEngine(SearxTestCase):
         self.assertTrue('uiLanguage=de' in params['url'])
         self.assertTrue('region=de-DE' in params['url'])
 
+        dicto['language'] = 'all'
+        params = swisscows.request(query, dicto)
+        self.assertTrue('uiLanguage=browser' in params['url'])
+        self.assertTrue('region=browser' in params['url'])
+
         dicto['category'] = 'images'
         params = swisscows.request(query, dicto)
         self.assertIn('image', params['url'])

+ 6 - 0
tests/unit/engines/test_twitter.py

@@ -20,6 +20,12 @@ class TestTwitterEngine(SearxTestCase):
         self.assertIn('lang', params['cookies'])
         self.assertIn('fr', params['cookies']['lang'])
 
+        dicto['language'] = 'all'
+        params = twitter.request(query, dicto)
+        self.assertIn('cookies', params)
+        self.assertIn('lang', params['cookies'])
+        self.assertIn('en', params['cookies']['lang'])
+
     def test_response(self):
         self.assertRaises(AttributeError, twitter.response, None)
         self.assertRaises(AttributeError, twitter.response, [])

+ 2 - 0
tests/unit/engines/test_wikidata.py

@@ -11,11 +11,13 @@ class TestWikidataEngine(SearxTestCase):
     def test_request(self):
         query = 'test_query'
         dicto = defaultdict(dict)
+        dicto['language'] = 'all'
         params = wikidata.request(query, dicto)
         self.assertIn('url', params)
         self.assertIn(query, params['url'])
         self.assertIn('wikidata.org', params['url'])
 
+        dicto['language'] = 'es_ES'
         params = wikidata.request(query, dicto)
         self.assertIn(query, params['url'])
 

+ 6 - 0
tests/unit/engines/test_wikipedia.py

@@ -26,9 +26,15 @@ class TestWikipediaEngine(SearxTestCase):
         self.assertIn('Test_Query', params['url'])
         self.assertNotIn('test_query', params['url'])
 
+<<<<<<< HEAD
         dicto['language'] = 'nb'
         params = wikipedia.request(query, dicto)
         self.assertIn('no.wikipedia.org', params['url'])
+=======
+        dicto['language'] = 'all'
+        params = wikipedia.request(query, dicto)
+        self.assertIn('en', params['url'])
+>>>>>>> Revert "remove 'all' option from search languages"
 
         dicto['language'] = 'xx'
         params = wikipedia.request(query, dicto)

+ 5 - 0
tests/unit/engines/test_yacy.py

@@ -17,6 +17,11 @@ class TestYacyEngine(SearxTestCase):
         self.assertIn('localhost', params['url'])
         self.assertIn('fr', params['url'])
 
+        dicto['language'] = 'all'
+        params = yacy.request(query, dicto)
+        self.assertIn('url', params)
+        self.assertNotIn('lr=lang_', params['url'])
+
     def test_response(self):
         self.assertRaises(AttributeError, yacy.response, None)
         self.assertRaises(AttributeError, yacy.response, [])

+ 7 - 0
tests/unit/engines/test_yahoo.py

@@ -50,6 +50,13 @@ class TestYahooEngine(SearxTestCase):
         self.assertIn('zh_cht', params['url'])
         self.assertIn('zh_cht', params['cookies']['sB'])
 
+        dicto['language'] = 'all'
+        params = yahoo.request(query, dicto)
+        self.assertIn('cookies', params)
+        self.assertIn('sB', params['cookies'])
+        self.assertIn('en', params['cookies']['sB'])
+        self.assertIn('en', params['url'])
+
     def test_no_url_in_request_year_time_range(self):
         dicto = defaultdict(dict)
         query = 'test_query'

+ 7 - 0
tests/unit/engines/test_yahoo_news.py

@@ -23,6 +23,13 @@ class TestYahooNewsEngine(SearxTestCase):
         self.assertIn('sB', params['cookies'])
         self.assertIn('fr', params['cookies']['sB'])
 
+        dicto['language'] = 'all'
+        params = yahoo_news.request(query, dicto)
+        self.assertIn('cookies', params)
+        self.assertIn('sB', params['cookies'])
+        self.assertIn('en', params['cookies']['sB'])
+        self.assertIn('en', params['url'])
+
     def test_sanitize_url(self):
         url = "test.url"
         self.assertEqual(url, yahoo_news.sanitize_url(url))

+ 5 - 10
tests/unit/test_preferences.py

@@ -90,30 +90,25 @@ class TestSettings(SearxTestCase):
 
     # search language settings
     def test_lang_setting_valid_choice(self):
-        setting = SearchLanguageSetting('en', choices=['de', 'en'])
+        setting = SearchLanguageSetting('all', choices=['all', 'de', 'en'])
         setting.parse('de')
         self.assertEquals(setting.get_value(), 'de')
 
     def test_lang_setting_invalid_choice(self):
-        setting = SearchLanguageSetting('en', choices=['de', 'en'])
+        setting = SearchLanguageSetting('all', choices=['all', 'de', 'en'])
         setting.parse('xx')
-        self.assertEquals(setting.get_value(), 'en')
+        self.assertEquals(setting.get_value(), 'all')
 
     def test_lang_setting_old_cookie_choice(self):
-        setting = SearchLanguageSetting('en', choices=['en', 'es', 'es-ES'])
+        setting = SearchLanguageSetting('all', choices=['all', 'es', 'es-ES'])
         setting.parse('es_XA')
         self.assertEquals(setting.get_value(), 'es')
 
     def test_lang_setting_old_cookie_format(self):
-        setting = SearchLanguageSetting('en', choices=['en', 'es', 'es-ES'])
+        setting = SearchLanguageSetting('all', choices=['all', 'es', 'es-ES'])
         setting.parse('es_ES')
         self.assertEquals(setting.get_value(), 'es-ES')
 
-    def test_lang_setting_old_default(self):
-        setting = SearchLanguageSetting('en', choices=['en', 'es', 'de'])
-        setting.parse('all')
-        self.assertEquals(setting.get_value(), 'en')
-
     # plugins settings
     def test_plugins_setting_all_default_enabled(self):
         plugin1 = PluginStub('plugin1', True)