| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306 | # SPDX-License-Identifier: AGPL-3.0-or-later"""This module implements functions needed for the autocompleter."""# pylint: disable=use-dict-literalimport jsonimport htmlfrom urllib.parse import urlencode, quote_plusimport lxml.etreeimport lxml.htmlfrom httpx import HTTPErrorfrom searx.extended_types import SXNG_Responsefrom searx import settingsfrom searx.engines import (    engines,    google,)from searx.network import get as http_get, post as http_postfrom searx.exceptions import SearxEngineResponseExceptionfrom searx.utils import extrdef update_kwargs(**kwargs):    if 'timeout' not in kwargs:        kwargs['timeout'] = settings['outgoing']['request_timeout']    kwargs['raise_for_httperror'] = Truedef get(*args, **kwargs) -> SXNG_Response:    update_kwargs(**kwargs)    return http_get(*args, **kwargs)def post(*args, **kwargs) -> SXNG_Response:    update_kwargs(**kwargs)    return http_post(*args, **kwargs)def baidu(query, _lang):    # baidu search autocompleter    base_url = "https://www.baidu.com/sugrec?"    response = get(base_url + urlencode({'ie': 'utf-8', 'json': 1, 'prod': 'pc', 'wd': query}))    results = []    if response.ok:        data = response.json()        if 'g' in data:            for item in data['g']:                results.append(item['q'])    return resultsdef brave(query, _lang):    # brave search autocompleter    url = 'https://search.brave.com/api/suggest?'    url += urlencode({'q': query})    country = 'all'    # if lang in _brave:    #    country = lang    kwargs = {'cookies': {'country': country}}    resp = get(url, **kwargs)    results = []    if resp.ok:        data = resp.json()        for item in data[1]:            results.append(item)    return resultsdef dbpedia(query, _lang):    # dbpedia autocompleter, no HTTPS    autocomplete_url = 'https://lookup.dbpedia.org/api/search.asmx/KeywordSearch?'    response = get(autocomplete_url + urlencode(dict(QueryString=query)))    results = []    if response.ok:        dom = lxml.etree.fromstring(response.content)        results = dom.xpath('//Result/Label//text()')    return resultsdef duckduckgo(query, sxng_locale):    """Autocomplete from DuckDuckGo. Supports DuckDuckGo's languages"""    traits = engines['duckduckgo'].traits    args = {        'q': query,        'kl': traits.get_region(sxng_locale, traits.all_locale),    }    url = 'https://duckduckgo.com/ac/?type=list&' + urlencode(args)    resp = get(url)    ret_val = []    if resp.ok:        j = resp.json()        if len(j) > 1:            ret_val = j[1]    return ret_valdef google_complete(query, sxng_locale):    """Autocomplete from Google.  Supports Google's languages and subdomains    (:py:obj:`searx.engines.google.get_google_info`) by using the async REST    API::        https://{subdomain}/complete/search?{args}    """    google_info = google.get_google_info({'searxng_locale': sxng_locale}, engines['google'].traits)    url = 'https://{subdomain}/complete/search?{args}'    args = urlencode(        {            'q': query,            'client': 'gws-wiz',            'hl': google_info['params']['hl'],        }    )    results = []    resp = get(url.format(subdomain=google_info['subdomain'], args=args))    if resp and resp.ok:        json_txt = resp.text[resp.text.find('[') : resp.text.find(']', -3) + 1]        data = json.loads(json_txt)        for item in data[0]:            results.append(lxml.html.fromstring(item[0]).text_content())    return resultsdef mwmbl(query, _lang):    """Autocomplete from Mwmbl_."""    # mwmbl autocompleter    url = 'https://api.mwmbl.org/search/complete?{query}'    results = get(url.format(query=urlencode({'q': query}))).json()[1]    # results starting with `go:` are direct urls and not useful for auto completion    return [result for result in results if not result.startswith("go: ") and not result.startswith("search: ")]def qihu360search(query, _lang):    # 360Search search autocompleter    url = f"https://sug.so.360.cn/suggest?{urlencode({'format': 'json', 'word': query})}"    response = get(url)    results = []    if response.ok:        data = response.json()        if 'result' in data:            for item in data['result']:                results.append(item['word'])    return resultsdef seznam(query, _lang):    # seznam search autocompleter    url = 'https://suggest.seznam.cz/fulltext/cs?{query}'    resp = get(        url.format(            query=urlencode(                {'phrase': query, 'cursorPosition': len(query), 'format': 'json-2', 'highlight': '1', 'count': '6'}            )        )    )    if not resp.ok:        return []    data = resp.json()    return [        ''.join([part.get('text', '') for part in item.get('text', [])])        for item in data.get('result', [])        if item.get('itemType', None) == 'ItemType.TEXT'    ]def sogou(query, _lang):    # Sogou search autocompleter    base_url = "https://sor.html5.qq.com/api/getsug?"    response = get(base_url + urlencode({'m': 'searxng', 'key': query}))    if response.ok:        raw_json = extr(response.text, "[", "]", default="")        try:            data = json.loads(f"[{raw_json}]]")            return data[1]        except json.JSONDecodeError:            return []    return []def stract(query, _lang):    # stract autocompleter (beta)    url = f"https://stract.com/beta/api/autosuggest?q={quote_plus(query)}"    resp = post(url)    if not resp.ok:        return []    return [html.unescape(suggestion['raw']) for suggestion in resp.json()]def swisscows(query, _lang):    # swisscows autocompleter    url = 'https://swisscows.ch/api/suggest?{query}&itemsCount=5'    resp = json.loads(get(url.format(query=urlencode({'query': query}))).text)    return respdef qwant(query, sxng_locale):    """Autocomplete from Qwant. Supports Qwant's regions."""    results = []    locale = engines['qwant'].traits.get_region(sxng_locale, 'en_US')    url = 'https://api.qwant.com/v3/suggest?{query}'    resp = get(url.format(query=urlencode({'q': query, 'locale': locale, 'version': '2'})))    if resp.ok:        data = resp.json()        if data['status'] == 'success':            for item in data['data']['items']:                results.append(item['value'])    return resultsdef wikipedia(query, sxng_locale):    """Autocomplete from Wikipedia. Supports Wikipedia's languages (aka netloc)."""    results = []    eng_traits = engines['wikipedia'].traits    wiki_lang = eng_traits.get_language(sxng_locale, 'en')    wiki_netloc = eng_traits.custom['wiki_netloc'].get(wiki_lang, 'en.wikipedia.org')  # type: ignore    url = 'https://{wiki_netloc}/w/api.php?{args}'    args = urlencode(        {            'action': 'opensearch',            'format': 'json',            'formatversion': '2',            'search': query,            'namespace': '0',            'limit': '10',        }    )    resp = get(url.format(args=args, wiki_netloc=wiki_netloc))    if resp.ok:        data = resp.json()        if len(data) > 1:            results = data[1]    return resultsdef yandex(query, _lang):    # yandex autocompleter    url = "https://suggest.yandex.com/suggest-ff.cgi?{0}"    resp = json.loads(get(url.format(urlencode(dict(part=query)))).text)    if len(resp) > 1:        return resp[1]    return []backends = {    '360search': qihu360search,    'baidu': baidu,    'brave': brave,    'dbpedia': dbpedia,    'duckduckgo': duckduckgo,    'google': google_complete,    'mwmbl': mwmbl,    'qwant': qwant,    'seznam': seznam,    'sogou': sogou,    'stract': stract,    'swisscows': swisscows,    'wikipedia': wikipedia,    'yandex': yandex,}def search_autocomplete(backend_name, query, sxng_locale):    backend = backends.get(backend_name)    if backend is None:        return []    try:        return backend(query, sxng_locale)    except (HTTPError, SearxEngineResponseException):        return []
 |