archlinux.py 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143
  1. # -*- coding: utf-8 -*-
  2. """
  3. Arch Linux Wiki
  4. @website https://wiki.archlinux.org
  5. @provide-api no (Mediawiki provides API, but Arch Wiki blocks access to it
  6. @using-api no
  7. @results HTML
  8. @stable no (HTML can change)
  9. @parse url, title
  10. """
  11. from urlparse import urljoin
  12. from urllib import urlencode
  13. from lxml import html
  14. from searx.engines.xpath import extract_text
  15. # engine dependent config
  16. categories = ['it']
  17. language_support = True
  18. paging = True
  19. base_url = 'https://wiki.archlinux.org'
  20. # xpath queries
  21. xpath_results = '//ul[@class="mw-search-results"]/li'
  22. xpath_link = './/div[@class="mw-search-result-heading"]/a'
  23. # cut 'en' from 'en_US', 'de' from 'de_CH', and so on
  24. def locale_to_lang_code(locale):
  25. if locale.find('-') >= 0:
  26. locale = locale.split('-')[0]
  27. return locale
  28. # wikis for some languages were moved off from the main site, we need to make
  29. # requests to correct URLs to be able to get results in those languages
  30. lang_urls = {
  31. 'all': {
  32. 'base': 'https://wiki.archlinux.org',
  33. 'search': '/index.php?title=Special:Search&offset={offset}&{query}'
  34. },
  35. 'de': {
  36. 'base': 'https://wiki.archlinux.de',
  37. 'search': '/index.php?title=Spezial:Suche&offset={offset}&{query}'
  38. },
  39. 'fr': {
  40. 'base': 'https://wiki.archlinux.fr',
  41. 'search': '/index.php?title=Spécial:Recherche&offset={offset}&{query}'
  42. },
  43. 'ja': {
  44. 'base': 'https://wiki.archlinuxjp.org',
  45. 'search': '/index.php?title=特別:検索&offset={offset}&{query}'
  46. },
  47. 'ro': {
  48. 'base': 'http://wiki.archlinux.ro',
  49. 'search': '/index.php?title=Special:Căutare&offset={offset}&{query}'
  50. },
  51. 'tr': {
  52. 'base': 'http://archtr.org/wiki',
  53. 'search': '/index.php?title=Özel:Ara&offset={offset}&{query}'
  54. }
  55. }
  56. # get base & search URLs for selected language
  57. def get_lang_urls(language):
  58. if language in lang_urls:
  59. return lang_urls[language]
  60. return lang_urls['all']
  61. # Language names to build search requests for
  62. # those languages which are hosted on the main site.
  63. main_langs = {
  64. 'ar': 'العربية',
  65. 'bg': 'Български',
  66. 'cs': 'Česky',
  67. 'da': 'Dansk',
  68. 'el': 'Ελληνικά',
  69. 'es': 'Español',
  70. 'he': 'עברית',
  71. 'hr': 'Hrvatski',
  72. 'hu': 'Magyar',
  73. 'it': 'Italiano',
  74. 'ko': '한국어',
  75. 'lt': 'Lietuviškai',
  76. 'nl': 'Nederlands',
  77. 'pl': 'Polski',
  78. 'pt': 'Português',
  79. 'ru': 'Русский',
  80. 'sl': 'Slovenský',
  81. 'th': 'ไทย',
  82. 'uk': 'Українська',
  83. 'zh': '简体中文'
  84. }
  85. supported_languages = dict(lang_urls, **main_langs)
  86. # do search-request
  87. def request(query, params):
  88. # translate the locale (e.g. 'en_US') to language code ('en')
  89. language = locale_to_lang_code(params['language'])
  90. # if our language is hosted on the main site, we need to add its name
  91. # to the query in order to narrow the results to that language
  92. if language in main_langs:
  93. query += '(' + main_langs[language] + ')'
  94. # prepare the request parameters
  95. query = urlencode({'search': query})
  96. offset = (params['pageno'] - 1) * 20
  97. # get request URLs for our language of choice
  98. urls = get_lang_urls(language)
  99. search_url = urls['base'] + urls['search']
  100. params['url'] = search_url.format(query=query, offset=offset)
  101. return params
  102. # get response from search-request
  103. def response(resp):
  104. # get the base URL for the language in which request was made
  105. language = locale_to_lang_code(resp.search_params['language'])
  106. base_url = get_lang_urls(language)['base']
  107. results = []
  108. dom = html.fromstring(resp.text)
  109. # parse results
  110. for result in dom.xpath(xpath_results):
  111. link = result.xpath(xpath_link)[0]
  112. href = urljoin(base_url, link.attrib.get('href'))
  113. title = extract_text(link)
  114. results.append({'url': href,
  115. 'title': title})
  116. return results