update_languages.py 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239
  1. #!/usr/bin/env python
  2. # lint: pylint
  3. # SPDX-License-Identifier: AGPL-3.0-or-later
  4. """This script generates languages.py from intersecting each engine's supported
  5. languages.
  6. Output files: :origin:`searx/data/engines_languages.json` and
  7. :origin:`searx/languages.py` (:origin:`CI Update data ...
  8. <.github/workflows/data-update.yml>`).
  9. """
  10. # pylint: disable=invalid-name
  11. import json
  12. from pathlib import Path
  13. from pprint import pformat
  14. from babel import Locale, UnknownLocaleError
  15. from babel.languages import get_global
  16. from searx import settings, searx_dir
  17. from searx.engines import load_engines, engines
  18. from searx.network import set_timeout_for_thread
  19. # Output files.
  20. engines_languages_file = Path(searx_dir) / 'data' / 'engines_languages.json'
  21. languages_file = Path(searx_dir) / 'languages.py'
  22. # Fetchs supported languages for each engine and writes json file with those.
  23. def fetch_supported_languages():
  24. set_timeout_for_thread(10.0)
  25. engines_languages = {}
  26. names = list(engines)
  27. names.sort()
  28. for engine_name in names:
  29. if hasattr(engines[engine_name], 'fetch_supported_languages'):
  30. engines_languages[engine_name] = engines[engine_name].fetch_supported_languages()
  31. print("fetched %s languages from engine %s" % (len(engines_languages[engine_name]), engine_name))
  32. if type(engines_languages[engine_name]) == list: # pylint: disable=unidiomatic-typecheck
  33. engines_languages[engine_name] = sorted(engines_languages[engine_name])
  34. print("fetched languages from %s engines" % len(engines_languages))
  35. # write json file
  36. with open(engines_languages_file, 'w', encoding='utf-8') as f:
  37. json.dump(engines_languages, f, indent=2, sort_keys=True)
  38. return engines_languages
  39. # Get babel Locale object from lang_code if possible.
  40. def get_locale(lang_code):
  41. try:
  42. locale = Locale.parse(lang_code, sep='-')
  43. return locale
  44. except (UnknownLocaleError, ValueError):
  45. return None
  46. # Join all language lists.
  47. def join_language_lists(engines_languages):
  48. language_list = {}
  49. for engine_name in engines_languages:
  50. for lang_code in engines_languages[engine_name]:
  51. # apply custom fixes if necessary
  52. if lang_code in getattr(engines[engine_name], 'language_aliases', {}).values():
  53. lang_code = next(
  54. lc for lc, alias in engines[engine_name].language_aliases.items() if lang_code == alias
  55. )
  56. locale = get_locale(lang_code)
  57. # ensure that lang_code uses standard language and country codes
  58. if locale and locale.territory:
  59. lang_code = "{lang}-{country}".format(lang=locale.language, country=locale.territory)
  60. short_code = lang_code.split('-')[0]
  61. # add language without country if not in list
  62. if short_code not in language_list:
  63. if locale:
  64. # get language's data from babel's Locale object
  65. language_name = locale.get_language_name().title()
  66. english_name = locale.english_name.split(' (')[0]
  67. elif short_code in engines_languages['wikipedia']:
  68. # get language's data from wikipedia if not known by babel
  69. language_name = engines_languages['wikipedia'][short_code]['name']
  70. english_name = engines_languages['wikipedia'][short_code]['english_name']
  71. else:
  72. language_name = None
  73. english_name = None
  74. # add language to list
  75. language_list[short_code] = {
  76. 'name': language_name,
  77. 'english_name': english_name,
  78. 'counter': set(),
  79. 'countries': {},
  80. }
  81. # add language with country if not in list
  82. if lang_code != short_code and lang_code not in language_list[short_code]['countries']:
  83. country_name = ''
  84. if locale:
  85. # get country name from babel's Locale object
  86. try:
  87. country_name = locale.get_territory_name()
  88. except FileNotFoundError as exc:
  89. print("ERROR: %s --> %s" % (locale, exc))
  90. locale = None
  91. language_list[short_code]['countries'][lang_code] = {'country_name': country_name, 'counter': set()}
  92. # count engine for both language_country combination and language alone
  93. language_list[short_code]['counter'].add(engine_name)
  94. if lang_code != short_code:
  95. language_list[short_code]['countries'][lang_code]['counter'].add(engine_name)
  96. return language_list
  97. # Filter language list so it only includes the most supported languages and countries
  98. def filter_language_list(all_languages):
  99. min_engines_per_lang = 13
  100. min_engines_per_country = 7
  101. # pylint: disable=consider-using-dict-items, consider-iterating-dictionary
  102. main_engines = [
  103. engine_name
  104. for engine_name in engines.keys()
  105. if 'general' in engines[engine_name].categories
  106. and engines[engine_name].supported_languages
  107. and not engines[engine_name].disabled
  108. ]
  109. # filter list to include only languages supported by most engines or all default general engines
  110. filtered_languages = {
  111. code: lang
  112. for code, lang in all_languages.items()
  113. if (
  114. len(lang['counter']) >= min_engines_per_lang
  115. or all(main_engine in lang['counter'] for main_engine in main_engines)
  116. )
  117. }
  118. def _copy_lang_data(lang, country_name=None):
  119. new_dict = {}
  120. new_dict['name'] = all_languages[lang]['name']
  121. new_dict['english_name'] = all_languages[lang]['english_name']
  122. if country_name:
  123. new_dict['country_name'] = country_name
  124. return new_dict
  125. # for each language get country codes supported by most engines or at least one country code
  126. filtered_languages_with_countries = {}
  127. for lang, lang_data in filtered_languages.items():
  128. countries = lang_data['countries']
  129. filtered_countries = {}
  130. # get language's country codes with enough supported engines
  131. for lang_country, country_data in countries.items():
  132. if len(country_data['counter']) >= min_engines_per_country:
  133. filtered_countries[lang_country] = _copy_lang_data(lang, country_data['country_name'])
  134. # add language without countries too if there's more than one country to choose from
  135. if len(filtered_countries) > 1:
  136. filtered_countries[lang] = _copy_lang_data(lang)
  137. elif len(filtered_countries) == 1:
  138. # if there's only one country per language, it's not necessary to show country name
  139. lang_country = next(iter(filtered_countries))
  140. filtered_countries[lang_country]['country_name'] = None
  141. # if no country has enough engines try to get most likely country code from babel
  142. if not filtered_countries:
  143. lang_country = None
  144. subtags = get_global('likely_subtags').get(lang)
  145. if subtags:
  146. country_code = subtags.split('_')[-1]
  147. if len(country_code) == 2:
  148. lang_country = "{lang}-{country}".format(lang=lang, country=country_code)
  149. if lang_country:
  150. filtered_countries[lang_country] = _copy_lang_data(lang)
  151. else:
  152. filtered_countries[lang] = _copy_lang_data(lang)
  153. filtered_languages_with_countries.update(filtered_countries)
  154. return filtered_languages_with_countries
  155. # Write languages.py.
  156. def write_languages_file(languages):
  157. file_headers = (
  158. "# -*- coding: utf-8 -*-",
  159. "# list of language codes",
  160. "# this file is generated automatically by utils/fetch_languages.py",
  161. "language_codes = (\n",
  162. )
  163. language_codes = []
  164. for code in sorted(languages):
  165. name = languages[code]['name']
  166. if name is None:
  167. print("ERROR: languages['%s'] --> %s" % (code, languages[code]))
  168. continue
  169. item = (
  170. code,
  171. languages[code]['name'].split(' (')[0],
  172. languages[code].get('country_name') or '',
  173. languages[code].get('english_name') or '',
  174. )
  175. language_codes.append(item)
  176. language_codes = tuple(language_codes)
  177. with open(languages_file, 'w', encoding='utf-8') as new_file:
  178. file_content = "{file_headers} {language_codes},\n)\n".format(
  179. # fmt: off
  180. file_headers = '\n'.join(file_headers),
  181. language_codes = pformat(language_codes, indent=4)[1:-1]
  182. # fmt: on
  183. )
  184. new_file.write(file_content)
  185. new_file.close()
  186. if __name__ == "__main__":
  187. load_engines(settings['engines'])
  188. _engines_languages = fetch_supported_languages()
  189. _all_languages = join_language_lists(_engines_languages)
  190. _filtered_languages = filter_language_list(_all_languages)
  191. write_languages_file(_filtered_languages)