|
@@ -1,16 +1,26 @@
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
|
-"""
|
|
|
- Wikipedia (Web)
|
|
|
+# lint: pylint
|
|
|
+"""This module implements the Wikipedia engine. Some of this implementations
|
|
|
+are shared by other engines:
|
|
|
+
|
|
|
+- :ref:`wikidata engine`
|
|
|
+
|
|
|
+The list of supported languages is fetched from the article linked by
|
|
|
+:py:obj:`wikipedia_article_depth`. Unlike traditional search engines, wikipedia
|
|
|
+does not support one Wikipedia for all the languages, but there is one Wikipedia
|
|
|
+for every language (:py:obj:`fetch_traits`).
|
|
|
"""
|
|
|
|
|
|
-from urllib.parse import quote
|
|
|
-from json import loads
|
|
|
+import urllib.parse
|
|
|
+import babel
|
|
|
+
|
|
|
from lxml import html
|
|
|
-from searx.utils import match_language, searx_useragent
|
|
|
+
|
|
|
from searx import network
|
|
|
+from searx.locales import language_tag
|
|
|
from searx.enginelib.traits import EngineTraits
|
|
|
|
|
|
-engine_traits: EngineTraits
|
|
|
+traits: EngineTraits
|
|
|
|
|
|
# about
|
|
|
about = {
|
|
@@ -22,32 +32,40 @@ about = {
|
|
|
"results": 'JSON',
|
|
|
}
|
|
|
|
|
|
-
|
|
|
send_accept_language_header = True
|
|
|
|
|
|
-# search-url
|
|
|
-search_url = 'https://{language}.wikipedia.org/api/rest_v1/page/summary/{title}'
|
|
|
-supported_languages_url = 'https://meta.wikimedia.org/wiki/List_of_Wikipedias'
|
|
|
-language_variants = {"zh": ("zh-cn", "zh-hk", "zh-mo", "zh-my", "zh-sg", "zh-tw")}
|
|
|
+wikipedia_article_depth = 'https://meta.wikimedia.org/wiki/Wikipedia_article_depth'
|
|
|
+"""The *editing depth* of Wikipedia is one of several possible rough indicators
|
|
|
+of the encyclopedia's collaborative quality, showing how frequently its articles
|
|
|
+are updated. The measurement of depth was introduced after some limitations of
|
|
|
+the classic measurement of article count were realized.
|
|
|
+"""
|
|
|
+
|
|
|
+# example: https://zh-classical.wikipedia.org/api/rest_v1/page/summary/日
|
|
|
+rest_v1_summary_url = 'https://{wiki_netloc}/api/rest_v1/page/summary/{title}'
|
|
|
+"""`wikipedia rest_v1 summary API`_: The summary response includes an extract of
|
|
|
+the first paragraph of the page in plain text and HTML as well as the type of
|
|
|
+page. This is useful for page previews (fka. Hovercards, aka. Popups) on the web
|
|
|
+and link previews in the apps.
|
|
|
|
|
|
+.. _wikipedia rest_v1 summary API: https://en.wikipedia.org/api/rest_v1/#/Page%20content/get_page_summary__title_
|
|
|
|
|
|
-# set language in base_url
|
|
|
-def url_lang(lang):
|
|
|
- lang_pre = lang.split('-')[0]
|
|
|
- if lang_pre == 'all' or lang_pre not in supported_languages and lang_pre not in language_aliases:
|
|
|
- return 'en'
|
|
|
- return match_language(lang, supported_languages, language_aliases).split('-')[0]
|
|
|
+"""
|
|
|
|
|
|
|
|
|
-# do search-request
|
|
|
def request(query, params):
|
|
|
+ """Assemble a request (`wikipedia rest_v1 summary API`_)."""
|
|
|
if query.islower():
|
|
|
query = query.title()
|
|
|
|
|
|
- language = url_lang(params['language'])
|
|
|
- params['url'] = search_url.format(title=quote(query), language=language)
|
|
|
+ engine_language = traits.get_language(params['searxng_locale'], 'en')
|
|
|
+ wiki_netloc = traits.custom['wiki_netloc'].get(engine_language, 'https://en.wikipedia.org/wiki/')
|
|
|
+ title = urllib.parse.quote(query)
|
|
|
+
|
|
|
+ # '!wikipedia 日 :zh-TW' --> https://zh-classical.wikipedia.org/
|
|
|
+ # '!wikipedia 日 :zh' --> https://zh.wikipedia.org/
|
|
|
+ params['url'] = rest_v1_summary_url.format(wiki_netloc=wiki_netloc, title=title)
|
|
|
|
|
|
- params['headers']['User-Agent'] = searx_useragent()
|
|
|
params['raise_for_httperror'] = False
|
|
|
params['soft_max_redirects'] = 2
|
|
|
|
|
@@ -56,13 +74,14 @@ def request(query, params):
|
|
|
|
|
|
# get response from search-request
|
|
|
def response(resp):
|
|
|
+
|
|
|
+ results = []
|
|
|
if resp.status_code == 404:
|
|
|
return []
|
|
|
-
|
|
|
if resp.status_code == 400:
|
|
|
try:
|
|
|
- api_result = loads(resp.text)
|
|
|
- except:
|
|
|
+ api_result = resp.json()
|
|
|
+ except Exception: # pylint: disable=broad-except
|
|
|
pass
|
|
|
else:
|
|
|
if (
|
|
@@ -73,52 +92,25 @@ def response(resp):
|
|
|
|
|
|
network.raise_for_httperror(resp)
|
|
|
|
|
|
- results = []
|
|
|
- api_result = loads(resp.text)
|
|
|
-
|
|
|
- # skip disambiguation pages
|
|
|
- if api_result.get('type') != 'standard':
|
|
|
- return []
|
|
|
-
|
|
|
+ api_result = resp.json()
|
|
|
title = api_result['title']
|
|
|
wikipedia_link = api_result['content_urls']['desktop']['page']
|
|
|
-
|
|
|
- results.append({'url': wikipedia_link, 'title': title})
|
|
|
-
|
|
|
- results.append(
|
|
|
- {
|
|
|
- 'infobox': title,
|
|
|
- 'id': wikipedia_link,
|
|
|
- 'content': api_result.get('extract', ''),
|
|
|
- 'img_src': api_result.get('thumbnail', {}).get('source'),
|
|
|
- 'urls': [{'title': 'Wikipedia', 'url': wikipedia_link}],
|
|
|
- }
|
|
|
- )
|
|
|
+ results.append({'url': wikipedia_link, 'title': title, 'content': api_result.get('description', '')})
|
|
|
+
|
|
|
+ if api_result.get('type') == 'standard':
|
|
|
+ results.append(
|
|
|
+ {
|
|
|
+ 'infobox': title,
|
|
|
+ 'id': wikipedia_link,
|
|
|
+ 'content': api_result.get('extract', ''),
|
|
|
+ 'img_src': api_result.get('thumbnail', {}).get('source'),
|
|
|
+ 'urls': [{'title': 'Wikipedia', 'url': wikipedia_link}],
|
|
|
+ }
|
|
|
+ )
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
-# get supported languages from their site
|
|
|
-def _fetch_supported_languages(resp):
|
|
|
- supported_languages = {}
|
|
|
- dom = html.fromstring(resp.text)
|
|
|
- tables = dom.xpath('//table[contains(@class,"sortable")]')
|
|
|
- for table in tables:
|
|
|
- # exclude header row
|
|
|
- trs = table.xpath('.//tr')[1:]
|
|
|
- for tr in trs:
|
|
|
- td = tr.xpath('./td')
|
|
|
- code = td[3].xpath('./a')[0].text
|
|
|
- name = td[1].xpath('./a')[0].text
|
|
|
- english_name = td[1].xpath('./a')[0].text
|
|
|
- articles = int(td[4].xpath('./a')[0].text.replace(',', ''))
|
|
|
- # exclude languages with too few articles
|
|
|
- if articles >= 100:
|
|
|
- supported_languages[code] = {"name": name, "english_name": english_name}
|
|
|
-
|
|
|
- return supported_languages
|
|
|
-
|
|
|
-
|
|
|
# Nonstandard language codes
|
|
|
#
|
|
|
# These Wikipedias use language codes that do not conform to the ISO 639
|
|
@@ -135,104 +127,57 @@ lang_map = {
|
|
|
'nrm': 'nrf',
|
|
|
'roa-rup': 'rup',
|
|
|
'nds-nl': 'nds',
|
|
|
- #'roa-tara: – invented code used for the Tarantino Wikipedia (again, roa is the standard code for the large family of Romance languages that the Tarantino dialect falls within)
|
|
|
#'simple: – invented code used for the Simple English Wikipedia (not the official IETF code en-simple)
|
|
|
- 'zh-classical': 'zh_Hant',
|
|
|
'zh-min-nan': 'nan',
|
|
|
'zh-yue': 'yue',
|
|
|
'an': 'arg',
|
|
|
+ 'zh-classical': 'zh-Hant', # babel maps classical to zh-Hans (for whatever reason)
|
|
|
}
|
|
|
|
|
|
unknown_langs = [
|
|
|
- 'ab', # Abkhazian
|
|
|
- 'alt', # Southern Altai
|
|
|
'an', # Aragonese
|
|
|
- 'ang', # Anglo-Saxon
|
|
|
- 'arc', # Aramaic
|
|
|
- 'ary', # Moroccan Arabic
|
|
|
- 'av', # Avar
|
|
|
'ba', # Bashkir
|
|
|
- 'be-tarask',
|
|
|
'bar', # Bavarian
|
|
|
'bcl', # Central Bicolano
|
|
|
- 'bh', # Bhojpuri
|
|
|
- 'bi', # Bislama
|
|
|
- 'bjn', # Banjar
|
|
|
- 'blk', # Pa'O
|
|
|
- 'bpy', # Bishnupriya Manipuri
|
|
|
- 'bxr', # Buryat
|
|
|
- 'cbk-zam', # Zamboanga Chavacano
|
|
|
- 'co', # Corsican
|
|
|
- 'cu', # Old Church Slavonic
|
|
|
- 'dty', # Doteli
|
|
|
- 'dv', # Divehi
|
|
|
- 'ext', # Extremaduran
|
|
|
- 'fj', # Fijian
|
|
|
- 'frp', # Franco-Provençal
|
|
|
- 'gan', # Gan
|
|
|
- 'gom', # Goan Konkani
|
|
|
+ 'be-tarask', # Belarusian variant / Belarusian is already covered by 'be'
|
|
|
+ 'bpy', # Bishnupriya Manipuri is unknown by babel
|
|
|
'hif', # Fiji Hindi
|
|
|
'ilo', # Ilokano
|
|
|
- 'inh', # Ingush
|
|
|
- 'jbo', # Lojban
|
|
|
- 'kaa', # Karakalpak
|
|
|
- 'kbd', # Kabardian Circassian
|
|
|
- 'kg', # Kongo
|
|
|
- 'koi', # Komi-Permyak
|
|
|
- 'krc', # Karachay-Balkar
|
|
|
- 'kv', # Komi
|
|
|
- 'lad', # Ladino
|
|
|
- 'lbe', # Lak
|
|
|
- 'lez', # Lezgian
|
|
|
'li', # Limburgish
|
|
|
- 'ltg', # Latgalian
|
|
|
- 'mdf', # Moksha
|
|
|
- 'mnw', # Mon
|
|
|
- 'mwl', # Mirandese
|
|
|
- 'myv', # Erzya
|
|
|
- 'na', # Nauruan
|
|
|
- 'nah', # Nahuatl
|
|
|
- 'nov', # Novial
|
|
|
- 'nrm', # Norman
|
|
|
- 'pag', # Pangasinan
|
|
|
- 'pam', # Kapampangan
|
|
|
- 'pap', # Papiamentu
|
|
|
- 'pdc', # Pennsylvania German
|
|
|
- 'pfl', # Palatinate German
|
|
|
- 'roa-rup', # Aromanian
|
|
|
- 'sco', # Scots
|
|
|
- 'sco', # Scots (https://sco.wikipedia.org) is not known by babel, Scottish Gaelic (https://gd.wikipedia.org) is known by babel
|
|
|
+ 'sco', # Scots (sco) is not known by babel, Scottish Gaelic (gd) is known by babel
|
|
|
'sh', # Serbo-Croatian
|
|
|
'simple', # simple english is not know as a natural language different to english (babel)
|
|
|
- 'sm', # Samoan
|
|
|
- 'srn', # Sranan
|
|
|
- 'stq', # Saterland Frisian
|
|
|
- 'szy', # Sakizaya
|
|
|
- 'tcy', # Tulu
|
|
|
- 'tet', # Tetum
|
|
|
- 'tpi', # Tok Pisin
|
|
|
- 'trv', # Seediq
|
|
|
- 'ty', # Tahitian
|
|
|
- 'tyv', # Tuvan
|
|
|
- 'udm', # Udmurt
|
|
|
- 'vep', # Vepsian
|
|
|
- 'vls', # West Flemish
|
|
|
'vo', # Volapük
|
|
|
'wa', # Walloon
|
|
|
- 'xal', # Kalmyk
|
|
|
]
|
|
|
|
|
|
|
|
|
def fetch_traits(engine_traits: EngineTraits):
|
|
|
- """Fetch languages from Wikipedia"""
|
|
|
- # pylint: disable=import-outside-toplevel
|
|
|
+ """Fetch languages from Wikipedia.
|
|
|
+
|
|
|
+ The location of the Wikipedia address of a language is mapped in a
|
|
|
+ :py:obj:`custom field <searx.enginelib.traits.EngineTraits.custom>`
|
|
|
+ (``wiki_netloc``). Here is a reduced example:
|
|
|
+
|
|
|
+ .. code:: python
|
|
|
|
|
|
- engine_traits.data_type = 'supported_languages' # deprecated
|
|
|
+ traits.custom['wiki_netloc'] = {
|
|
|
+ "en": "en.wikipedia.org",
|
|
|
+ ..
|
|
|
+ "gsw": "als.wikipedia.org",
|
|
|
+ ..
|
|
|
+ "zh": "zh.wikipedia.org",
|
|
|
+ "zh-classical": "zh-classical.wikipedia.org"
|
|
|
+ }
|
|
|
|
|
|
- import babel
|
|
|
- from searx.locales import language_tag
|
|
|
+ """
|
|
|
|
|
|
- resp = network.get('https://meta.wikimedia.org/wiki/List_of_Wikipedias')
|
|
|
+ engine_traits.custom['wiki_netloc'] = {}
|
|
|
+
|
|
|
+ # insert alias to map from a region like zh-CN to a language zh_Hans
|
|
|
+ engine_traits.languages['zh_Hans'] = 'zh'
|
|
|
+
|
|
|
+ resp = network.get(wikipedia_article_depth)
|
|
|
if not resp.ok:
|
|
|
print("ERROR: response from Wikipedia is not OK.")
|
|
|
|
|
@@ -242,34 +187,31 @@ def fetch_traits(engine_traits: EngineTraits):
|
|
|
cols = row.xpath('./td')
|
|
|
if not cols:
|
|
|
continue
|
|
|
-
|
|
|
cols = [c.text_content().strip() for c in cols]
|
|
|
- articles = int(cols[4].replace(',', '').replace('-', '0'))
|
|
|
- users = int(cols[8].replace(',', '').replace('-', '0'))
|
|
|
- depth = cols[11].strip('-')
|
|
|
|
|
|
- if articles < 1000:
|
|
|
+ depth = float(cols[3].replace('-', '0').replace(',', ''))
|
|
|
+ articles = int(cols[4].replace(',', '').replace(',', ''))
|
|
|
+
|
|
|
+ if articles < 10000:
|
|
|
# exclude languages with too few articles
|
|
|
continue
|
|
|
|
|
|
- # depth: rough indicator of a Wikipedia’s quality, showing how
|
|
|
- # frequently its articles are updated.
|
|
|
- if depth == '':
|
|
|
- if users < 1000:
|
|
|
- # depth is not calculated --> at least 1000 user should registered
|
|
|
- continue
|
|
|
- elif int(depth) < 20:
|
|
|
+ if int(depth) < 20:
|
|
|
+ # Rough indicator of a Wikipedia’s quality, showing how frequently
|
|
|
+ # its articles are updated.
|
|
|
continue
|
|
|
|
|
|
- eng_tag = cols[3]
|
|
|
+ eng_tag = cols[2]
|
|
|
+ wiki_url = row.xpath('./td[3]/a/@href')[0]
|
|
|
+ wiki_url = urllib.parse.urlparse(wiki_url)
|
|
|
|
|
|
if eng_tag in unknown_langs:
|
|
|
continue
|
|
|
|
|
|
try:
|
|
|
- sxng_tag = language_tag(babel.Locale.parse(lang_map.get(eng_tag, eng_tag)))
|
|
|
+ sxng_tag = language_tag(babel.Locale.parse(lang_map.get(eng_tag, eng_tag), sep='-'))
|
|
|
except babel.UnknownLocaleError:
|
|
|
- print("ERROR: %s -> %s is unknown by babel" % (cols[1], eng_tag))
|
|
|
+ print("ERROR: %s [%s] is unknown by babel" % (cols[0], eng_tag))
|
|
|
continue
|
|
|
|
|
|
conflict = engine_traits.languages.get(sxng_tag)
|
|
@@ -277,6 +219,6 @@ def fetch_traits(engine_traits: EngineTraits):
|
|
|
if conflict != eng_tag:
|
|
|
print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_tag))
|
|
|
continue
|
|
|
- engine_traits.languages[sxng_tag] = eng_tag
|
|
|
|
|
|
- engine_traits.languages['zh_Hans'] = 'zh'
|
|
|
+ engine_traits.languages[sxng_tag] = eng_tag
|
|
|
+ engine_traits.custom['wiki_netloc'][eng_tag] = wiki_url.netloc
|