|
@@ -1,73 +1,207 @@
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
|
# lint: pylint
|
|
|
-"""DuckDuckGo Lite
|
|
|
+"""
|
|
|
+DuckDuckGo Lite
|
|
|
+~~~~~~~~~~~~~~~
|
|
|
"""
|
|
|
|
|
|
+from typing import TYPE_CHECKING
|
|
|
+from urllib.parse import urlencode
|
|
|
import json
|
|
|
-from lxml import html
|
|
|
+import babel
|
|
|
+import lxml.html
|
|
|
|
|
|
+from searx import (
|
|
|
+ network,
|
|
|
+ locales,
|
|
|
+ redislib,
|
|
|
+)
|
|
|
+from searx import redisdb
|
|
|
from searx.utils import (
|
|
|
- dict_subset,
|
|
|
eval_xpath,
|
|
|
eval_xpath_getindex,
|
|
|
extract_text,
|
|
|
- match_language,
|
|
|
)
|
|
|
-from searx import network
|
|
|
from searx.enginelib.traits import EngineTraits
|
|
|
+from searx.exceptions import SearxEngineAPIException
|
|
|
+
|
|
|
+if TYPE_CHECKING:
|
|
|
+ import logging
|
|
|
+
|
|
|
+ logger: logging.Logger
|
|
|
|
|
|
traits: EngineTraits
|
|
|
|
|
|
-# about
|
|
|
about = {
|
|
|
"website": 'https://lite.duckduckgo.com/lite/',
|
|
|
"wikidata_id": 'Q12805',
|
|
|
- "official_api_documentation": 'https://duckduckgo.com/api',
|
|
|
"use_official_api": False,
|
|
|
"require_api_key": False,
|
|
|
"results": 'HTML',
|
|
|
}
|
|
|
|
|
|
+send_accept_language_header = True
|
|
|
+"""DuckDuckGo-Lite tries to guess user's prefered language from the HTTP
|
|
|
+``Accept-Language``. Optional the user can select a region filter (but not a
|
|
|
+language).
|
|
|
+"""
|
|
|
+
|
|
|
# engine dependent config
|
|
|
categories = ['general', 'web']
|
|
|
paging = True
|
|
|
-supported_languages_url = 'https://duckduckgo.com/util/u588.js'
|
|
|
time_range_support = True
|
|
|
-send_accept_language_header = True
|
|
|
+safesearch = True # user can't select but the results are filtered
|
|
|
|
|
|
-language_aliases = {
|
|
|
- 'ar-SA': 'ar-XA',
|
|
|
- 'es-419': 'es-XL',
|
|
|
- 'ja': 'jp-JP',
|
|
|
- 'ko': 'kr-KR',
|
|
|
- 'sl-SI': 'sl-SL',
|
|
|
- 'zh-TW': 'tzh-TW',
|
|
|
- 'zh-HK': 'tzh-HK',
|
|
|
-}
|
|
|
+url = 'https://lite.duckduckgo.com/lite/'
|
|
|
+# url_ping = 'https://duckduckgo.com/t/sl_l'
|
|
|
|
|
|
time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'}
|
|
|
+form_data = {'v': 'l', 'api': 'd.js', 'o': 'json'}
|
|
|
|
|
|
-# search-url
|
|
|
-url = 'https://lite.duckduckgo.com/lite/'
|
|
|
-url_ping = 'https://duckduckgo.com/t/sl_l'
|
|
|
|
|
|
-# match query's language to a region code that duckduckgo will accept
|
|
|
-def get_region_code(lang, lang_list=None):
|
|
|
- if lang == 'all':
|
|
|
- return None
|
|
|
+def cache_vqd(query, value):
|
|
|
+ """Caches a ``vqd`` value from a query.
|
|
|
+
|
|
|
+ The vqd value depends on the query string and is needed for the follow up
|
|
|
+ pages or the images loaded by a XMLHttpRequest:
|
|
|
+
|
|
|
+ - DuckDuckGo Web: `https://links.duckduckgo.com/d.js?q=...&vqd=...`
|
|
|
+ - DuckDuckGo Images: `https://duckduckgo.com/i.js??q=...&vqd=...`
|
|
|
+
|
|
|
+ """
|
|
|
+ c = redisdb.client()
|
|
|
+ if c:
|
|
|
+ logger.debug("cache vqd value: %s", value)
|
|
|
+ key = 'SearXNG_ddg_vqd' + redislib.secret_hash(query)
|
|
|
+ c.set(key, value, ex=600)
|
|
|
+
|
|
|
+
|
|
|
+def get_vqd(query, headers):
|
|
|
+ """Returns the ``vqd`` that fits to the *query*. If there is no ``vqd`` cached
|
|
|
+ (:py:obj:`cache_vqd`) the query is sent to DDG to get a vqd value from the
|
|
|
+ response.
|
|
|
+
|
|
|
+ """
|
|
|
+ value = None
|
|
|
+ c = redisdb.client()
|
|
|
+ if c:
|
|
|
+ key = 'SearXNG_ddg_vqd' + redislib.secret_hash(query)
|
|
|
+ value = c.get(key)
|
|
|
+ if value:
|
|
|
+ value = value.decode('utf-8')
|
|
|
+ logger.debug("re-use cached vqd value: %s", value)
|
|
|
+ return value
|
|
|
|
|
|
- lang_code = match_language(lang, lang_list or [], language_aliases, 'wt-WT')
|
|
|
- lang_parts = lang_code.split('-')
|
|
|
+ query_url = 'https://duckduckgo.com/?{query}&iar=images'.format(query=urlencode({'q': query}))
|
|
|
+ res = network.get(query_url, headers=headers)
|
|
|
+ content = res.text
|
|
|
+ if content.find('vqd=\'') == -1:
|
|
|
+ raise SearxEngineAPIException('Request failed')
|
|
|
+ value = content[content.find('vqd=\'') + 5 :]
|
|
|
+ value = value[: value.find('\'')]
|
|
|
+ logger.debug("new vqd value: %s", value)
|
|
|
+ cache_vqd(query, value)
|
|
|
+ return value
|
|
|
|
|
|
- # country code goes first
|
|
|
- return lang_parts[1].lower() + '-' + lang_parts[0].lower()
|
|
|
+
|
|
|
+def get_ddg_lang(eng_traits: EngineTraits, sxng_locale, default='en_US'):
|
|
|
+ """Get DuckDuckGo's language identifier from SearXNG's locale.
|
|
|
+
|
|
|
+ DuckDuckGo defines its lanaguages by region codes (see
|
|
|
+ :py:obj:`fetch_traits`).
|
|
|
+
|
|
|
+ To get region and language of a DDG service use:
|
|
|
+
|
|
|
+ .. code: python
|
|
|
+
|
|
|
+ eng_region = traits.get_region(params['searxng_locale'], traits.all_locale)
|
|
|
+ eng_lang = get_ddg_lang(traits, params['searxng_locale'])
|
|
|
+
|
|
|
+ It might confuse, but the ``l`` value of the cookie is what SearXNG calls
|
|
|
+ the *region*:
|
|
|
+
|
|
|
+ .. code:: python
|
|
|
+
|
|
|
+ # !ddi paris :es-AR --> {'ad': 'es_AR', 'ah': 'ar-es', 'l': 'ar-es'}
|
|
|
+ params['cookies']['ad'] = eng_lang
|
|
|
+ params['cookies']['ah'] = eng_region
|
|
|
+ params['cookies']['l'] = eng_region
|
|
|
+
|
|
|
+ .. hint::
|
|
|
+
|
|
|
+ `DDG-lite <https://lite.duckduckgo.com/lite>`__ does not offer a language
|
|
|
+ selection to the user, only a region can be selected by the user
|
|
|
+ (``eng_region`` from the example above). DDG-lite stores the selected
|
|
|
+ region in a cookie::
|
|
|
+
|
|
|
+ params['cookies']['kl'] = eng_region # 'ar-es'
|
|
|
+
|
|
|
+ """
|
|
|
+ return eng_traits.custom['lang_region'].get(sxng_locale, eng_traits.get_language(sxng_locale, default))
|
|
|
+
|
|
|
+
|
|
|
+ddg_reg_map = {
|
|
|
+ 'tw-tzh': 'zh_TW',
|
|
|
+ 'hk-tzh': 'zh_HK',
|
|
|
+ 'ct-ca': 'skip', # ct-ca and es-ca both map to ca_ES
|
|
|
+ 'es-ca': 'ca_ES',
|
|
|
+ 'id-en': 'id_ID',
|
|
|
+ 'no-no': 'nb_NO',
|
|
|
+ 'jp-jp': 'ja_JP',
|
|
|
+ 'kr-kr': 'ko_KR',
|
|
|
+ 'xa-ar': 'ar_SA',
|
|
|
+ 'sl-sl': 'sl_SI',
|
|
|
+ 'th-en': 'th_TH',
|
|
|
+ 'vn-en': 'vi_VN',
|
|
|
+}
|
|
|
+
|
|
|
+ddg_lang_map = {
|
|
|
+ # use ar --> ar_EG (Egypt's arabic)
|
|
|
+ "ar_DZ": 'lang_region',
|
|
|
+ "ar_JO": 'lang_region',
|
|
|
+ "ar_SA": 'lang_region',
|
|
|
+ # use bn --> bn_BD
|
|
|
+ 'bn_IN': 'lang_region',
|
|
|
+ # use de --> de_DE
|
|
|
+ 'de_CH': 'lang_region',
|
|
|
+ # use en --> en_US,
|
|
|
+ 'en_AU': 'lang_region',
|
|
|
+ 'en_CA': 'lang_region',
|
|
|
+ 'en_GB': 'lang_region',
|
|
|
+ # Esperanto
|
|
|
+ 'eo_XX': 'eo',
|
|
|
+ # use es --> es_ES,
|
|
|
+ 'es_AR': 'lang_region',
|
|
|
+ 'es_CL': 'lang_region',
|
|
|
+ 'es_CO': 'lang_region',
|
|
|
+ 'es_CR': 'lang_region',
|
|
|
+ 'es_EC': 'lang_region',
|
|
|
+ 'es_MX': 'lang_region',
|
|
|
+ 'es_PE': 'lang_region',
|
|
|
+ 'es_UY': 'lang_region',
|
|
|
+ 'es_VE': 'lang_region',
|
|
|
+ # use fr --> rf_FR
|
|
|
+ 'fr_CA': 'lang_region',
|
|
|
+ 'fr_CH': 'lang_region',
|
|
|
+ 'fr_BE': 'lang_region',
|
|
|
+ # use nl --> nl_NL
|
|
|
+ 'nl_BE': 'lang_region',
|
|
|
+ # use pt --> pt_PT
|
|
|
+ 'pt_BR': 'lang_region',
|
|
|
+ # skip these languages
|
|
|
+ 'od_IN': 'skip',
|
|
|
+ 'io_XX': 'skip',
|
|
|
+ 'tokipona_XX': 'skip',
|
|
|
+}
|
|
|
|
|
|
|
|
|
def request(query, params):
|
|
|
|
|
|
+ eng_region = traits.get_region(params['searxng_locale'], traits.all_locale)
|
|
|
+ # eng_lang = get_ddg_lang(traits, params['searxng_locale'])
|
|
|
+
|
|
|
params['url'] = url
|
|
|
params['method'] = 'POST'
|
|
|
-
|
|
|
params['data']['q'] = query
|
|
|
|
|
|
# The API is not documented, so we do some reverse engineering and emulate
|
|
@@ -90,23 +224,19 @@ def request(query, params):
|
|
|
params['data']['s'] = offset
|
|
|
params['data']['dc'] = offset + 1
|
|
|
|
|
|
+ # request needs a vqd argument
|
|
|
+ params['data']['vqd'] = get_vqd(query, params["headers"])
|
|
|
+
|
|
|
# initial page does not have additional data in the input form
|
|
|
if params['pageno'] > 1:
|
|
|
- # request the second page (and more pages) needs 'o' and 'api' arguments
|
|
|
- params['data']['o'] = 'json'
|
|
|
- params['data']['api'] = 'd.js'
|
|
|
|
|
|
- # initial page does not have additional data in the input form
|
|
|
- if params['pageno'] > 2:
|
|
|
- # request the third page (and more pages) some more arguments
|
|
|
- params['data']['nextParams'] = ''
|
|
|
- params['data']['v'] = ''
|
|
|
- params['data']['vqd'] = ''
|
|
|
+ params['data']['o'] = form_data.get('o', 'json')
|
|
|
+ params['data']['api'] = form_data.get('api', 'd.js')
|
|
|
+ params['data']['nextParams'] = form_data.get('nextParams', '')
|
|
|
+ params['data']['v'] = form_data.get('v', 'l')
|
|
|
|
|
|
- region_code = get_region_code(params['language'], supported_languages)
|
|
|
- if region_code:
|
|
|
- params['data']['kl'] = region_code
|
|
|
- params['cookies']['kl'] = region_code
|
|
|
+ params['data']['kl'] = eng_region
|
|
|
+ params['cookies']['kl'] = eng_region
|
|
|
|
|
|
params['data']['df'] = ''
|
|
|
if params['time_range'] in time_range_dict:
|
|
@@ -118,26 +248,40 @@ def request(query, params):
|
|
|
return params
|
|
|
|
|
|
|
|
|
-# get response from search-request
|
|
|
def response(resp):
|
|
|
|
|
|
- headers_ping = dict_subset(resp.request.headers, ['User-Agent', 'Accept-Encoding', 'Accept', 'Cookie'])
|
|
|
- network.get(url_ping, headers=headers_ping)
|
|
|
-
|
|
|
if resp.status_code == 303:
|
|
|
return []
|
|
|
|
|
|
results = []
|
|
|
- doc = html.fromstring(resp.text)
|
|
|
+ doc = lxml.html.fromstring(resp.text)
|
|
|
|
|
|
result_table = eval_xpath(doc, '//html/body/form/div[@class="filters"]/table')
|
|
|
- if not len(result_table) >= 3:
|
|
|
+
|
|
|
+ if len(result_table) == 2:
|
|
|
+ # some locales (at least China) does not have a "next page" button and
|
|
|
+ # the layout of the HTML tables is different.
|
|
|
+ result_table = result_table[1]
|
|
|
+ elif not len(result_table) >= 3:
|
|
|
# no more results
|
|
|
return []
|
|
|
- result_table = result_table[2]
|
|
|
+ else:
|
|
|
+ result_table = result_table[2]
|
|
|
+ # update form data from response
|
|
|
+ form = eval_xpath(doc, '//html/body/form/div[@class="filters"]/table//input/..')
|
|
|
+ if len(form):
|
|
|
+
|
|
|
+ form = form[0]
|
|
|
+ form_data['v'] = eval_xpath(form, '//input[@name="v"]/@value')[0]
|
|
|
+ form_data['api'] = eval_xpath(form, '//input[@name="api"]/@value')[0]
|
|
|
+ form_data['o'] = eval_xpath(form, '//input[@name="o"]/@value')[0]
|
|
|
+ logger.debug('form_data: %s', form_data)
|
|
|
+
|
|
|
+ value = eval_xpath(form, '//input[@name="vqd"]/@value')[0]
|
|
|
+ query = resp.search_params['data']['q']
|
|
|
+ cache_vqd(query, value)
|
|
|
|
|
|
tr_rows = eval_xpath(result_table, './/tr')
|
|
|
-
|
|
|
# In the last <tr> is the form of the 'previous/next page' links
|
|
|
tr_rows = tr_rows[:-1]
|
|
|
|
|
@@ -174,32 +318,35 @@ def response(resp):
|
|
|
return results
|
|
|
|
|
|
|
|
|
-# get supported languages from their site
|
|
|
-def _fetch_supported_languages(resp):
|
|
|
-
|
|
|
- # response is a js file with regions as an embedded object
|
|
|
- response_page = resp.text
|
|
|
- response_page = response_page[response_page.find('regions:{') + 8 :]
|
|
|
- response_page = response_page[: response_page.find('}') + 1]
|
|
|
-
|
|
|
- regions_json = json.loads(response_page)
|
|
|
- supported_languages = map((lambda x: x[3:] + '-' + x[:2].upper()), regions_json.keys())
|
|
|
+def fetch_traits(engine_traits: EngineTraits):
|
|
|
+ """Fetch languages & regions from DuckDuckGo.
|
|
|
|
|
|
- return list(supported_languages)
|
|
|
+ SearXNG's ``all`` locale maps DuckDuckGo's "Alle regions" (``wt-wt``).
|
|
|
+ DuckDuckGo's language "Browsers prefered language" (``wt_WT``) makes no
|
|
|
+ sense in a SearXNG request since SearXNG's ``all`` will not add a
|
|
|
+ ``Accept-Language`` HTTP header. The value in ``engine_traits.all_locale``
|
|
|
+ is ``wt-wt`` (the region).
|
|
|
|
|
|
+ Beside regions DuckDuckGo also defines its lanaguages by region codes. By
|
|
|
+ example these are the english languages in DuckDuckGo:
|
|
|
|
|
|
-def fetch_traits(engine_traits: EngineTraits):
|
|
|
- """Fetch regions from DuckDuckGo."""
|
|
|
- # pylint: disable=import-outside-toplevel
|
|
|
+ - en_US
|
|
|
+ - en_AU
|
|
|
+ - en_CA
|
|
|
+ - en_GB
|
|
|
|
|
|
- engine_traits.data_type = 'supported_languages' # deprecated
|
|
|
+ The function :py:obj:`get_ddg_lang` evaluates DuckDuckGo's language from
|
|
|
+ SearXNG's locale.
|
|
|
|
|
|
- import babel
|
|
|
- from searx.locales import region_tag
|
|
|
+ """
|
|
|
+ # pylint: disable=too-many-branches, too-many-statements
|
|
|
+ # fetch regions
|
|
|
|
|
|
engine_traits.all_locale = 'wt-wt'
|
|
|
|
|
|
- resp = network.get('https://duckduckgo.com/util/u588.js')
|
|
|
+ # updated from u588 to u661 / should be updated automatically?
|
|
|
+ resp = network.get('https://duckduckgo.com/util/u661.js')
|
|
|
+
|
|
|
if not resp.ok:
|
|
|
print("ERROR: response from DuckDuckGo is not OK.")
|
|
|
|
|
@@ -208,28 +355,13 @@ def fetch_traits(engine_traits: EngineTraits):
|
|
|
pos = js_code.find('}') + 1
|
|
|
regions = json.loads(js_code[:pos])
|
|
|
|
|
|
- reg_map = {
|
|
|
- 'tw-tzh': 'zh_TW',
|
|
|
- 'hk-tzh': 'zh_HK',
|
|
|
- 'ct-ca': 'skip', # ct-ca and es-ca both map to ca_ES
|
|
|
- 'es-ca': 'ca_ES',
|
|
|
- 'id-en': 'id_ID',
|
|
|
- 'no-no': 'nb_NO',
|
|
|
- 'jp-jp': 'ja_JP',
|
|
|
- 'kr-kr': 'ko_KR',
|
|
|
- 'xa-ar': 'ar_SA',
|
|
|
- 'sl-sl': 'sl_SI',
|
|
|
- 'th-en': 'th_TH',
|
|
|
- 'vn-en': 'vi_VN',
|
|
|
- }
|
|
|
-
|
|
|
for eng_tag, name in regions.items():
|
|
|
|
|
|
if eng_tag == 'wt-wt':
|
|
|
engine_traits.all_locale = 'wt-wt'
|
|
|
continue
|
|
|
|
|
|
- region = reg_map.get(eng_tag)
|
|
|
+ region = ddg_reg_map.get(eng_tag)
|
|
|
if region == 'skip':
|
|
|
continue
|
|
|
|
|
@@ -238,7 +370,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
|
|
region = eng_lang + '_' + eng_territory.upper()
|
|
|
|
|
|
try:
|
|
|
- sxng_tag = region_tag(babel.Locale.parse(region))
|
|
|
+ sxng_tag = locales.region_tag(babel.Locale.parse(region))
|
|
|
except babel.UnknownLocaleError:
|
|
|
print("ERROR: %s (%s) -> %s is unknown by babel" % (name, eng_tag, region))
|
|
|
continue
|
|
@@ -249,3 +381,42 @@ def fetch_traits(engine_traits: EngineTraits):
|
|
|
print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_tag))
|
|
|
continue
|
|
|
engine_traits.regions[sxng_tag] = eng_tag
|
|
|
+
|
|
|
+ # fetch languages
|
|
|
+
|
|
|
+ engine_traits.custom['lang_region'] = {}
|
|
|
+
|
|
|
+ pos = resp.text.find('languages:{') + 10
|
|
|
+ js_code = resp.text[pos:]
|
|
|
+ pos = js_code.find('}') + 1
|
|
|
+ js_code = '{"' + js_code[1:pos].replace(':', '":').replace(',', ',"')
|
|
|
+ languages = json.loads(js_code)
|
|
|
+
|
|
|
+ for eng_lang, name in languages.items():
|
|
|
+
|
|
|
+ if eng_lang == 'wt_WT':
|
|
|
+ continue
|
|
|
+
|
|
|
+ babel_tag = ddg_lang_map.get(eng_lang, eng_lang)
|
|
|
+ if babel_tag == 'skip':
|
|
|
+ continue
|
|
|
+
|
|
|
+ try:
|
|
|
+
|
|
|
+ if babel_tag == 'lang_region':
|
|
|
+ sxng_tag = locales.region_tag(babel.Locale.parse(eng_lang))
|
|
|
+ engine_traits.custom['lang_region'][sxng_tag] = eng_lang
|
|
|
+ continue
|
|
|
+
|
|
|
+ sxng_tag = locales.language_tag(babel.Locale.parse(babel_tag))
|
|
|
+
|
|
|
+ except babel.UnknownLocaleError:
|
|
|
+ print("ERROR: language %s (%s) is unknown by babel" % (name, eng_lang))
|
|
|
+ continue
|
|
|
+
|
|
|
+ conflict = engine_traits.languages.get(sxng_tag)
|
|
|
+ if conflict:
|
|
|
+ if conflict != eng_lang:
|
|
|
+ print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_lang))
|
|
|
+ continue
|
|
|
+ engine_traits.languages[sxng_tag] = eng_lang
|