duckduckgo.py 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142
  1. """
  2. DuckDuckGo (Web)
  3. @website https://duckduckgo.com/
  4. @provide-api yes (https://duckduckgo.com/api),
  5. but not all results from search-site
  6. @using-api no
  7. @results HTML (using search portal)
  8. @stable no (HTML can change)
  9. @parse url, title, content
  10. @todo rewrite to api
  11. """
  12. from urllib import urlencode
  13. from lxml.html import fromstring
  14. from searx.engines.xpath import extract_text
  15. from searx.languages import language_codes
  16. # engine dependent config
  17. categories = ['general']
  18. paging = True
  19. language_support = True
  20. supported_languages = ["es-AR", "en-AU", "de-AT", "fr-BE", "nl-BE", "pt-BR", "bg-BG", "en-CA", "fr-CA", "ca-CT",
  21. "es-CL", "zh-CN", "es-CO", "hr-HR", "cs-CZ", "da-DK", "et-EE", "fi-FI", "fr-FR", "de-DE",
  22. "el-GR", "tzh-HK", "hu-HU", "en-IN", "id-ID", "en-ID", "en-IE", "he-IL", "it-IT", "jp-JP",
  23. "kr-KR", "es-XL", "lv-LV", "lt-LT", "ms-MY", "en-MY", "es-MX", "nl-NL", "en-NZ", "no-NO",
  24. "es-PE", "en-PH", "tl-PH", "pl-PL", "pt-PT", "ro-RO", "ru-RU", "ar-XA", "en-XA", "en-SG",
  25. "sk-SK", "sl-SL", "en-ZA", "es-ES", "ca-ES", "sv-SE", "de-CH", "fr-CH", "it-CH", "tzh-TW",
  26. "th-TH", "tr-TR", "uk-UA", "en-UK", "en-US", "es-US", "vi-VN"]
  27. time_range_support = True
  28. # search-url
  29. url = 'https://duckduckgo.com/html?{query}&s={offset}'
  30. time_range_url = '&df={range}'
  31. time_range_dict = {'day': 'd',
  32. 'week': 'w',
  33. 'month': 'm'}
  34. # specific xpath variables
  35. result_xpath = '//div[@class="result results_links results_links_deep web-result "]' # noqa
  36. url_xpath = './/a[@class="result__a"]/@href'
  37. title_xpath = './/a[@class="result__a"]'
  38. content_xpath = './/a[@class="result__snippet"]'
  39. # do search-request
  40. def request(query, params):
  41. if params['time_range'] and params['time_range'] not in time_range_dict:
  42. return params
  43. offset = (params['pageno'] - 1) * 30
  44. # custom fixes for languages
  45. if params['language'] == 'all':
  46. locale = None
  47. elif params['language'][:2] == 'ja':
  48. locale = 'jp-jp'
  49. elif params['language'] == 'zh-TW':
  50. locale = 'tw-tzh'
  51. elif params['language'] == 'zh-HK':
  52. locale = 'hk-tzh'
  53. elif params['language'][-2:] == 'SA':
  54. locale = 'xa' + params['language'].split('-')[0]
  55. elif params['language'][-2:] == 'GB':
  56. locale = 'uk' + params['language'].split('-')[0]
  57. elif params['language'] == 'es-419':
  58. locale = 'xl-es'
  59. else:
  60. locale = params['language'].split('-')
  61. if len(locale) == 2:
  62. # country code goes first
  63. locale = locale[1].lower() + '-' + locale[0].lower()
  64. else:
  65. # tries to get a country code from language
  66. locale = locale[0].lower()
  67. lang_codes = [x[0] for x in language_codes]
  68. for lc in lang_codes:
  69. lc = lc.split('-')
  70. if locale == lc[0] and len(lc) == 2:
  71. locale = lc[1].lower() + '-' + lc[0].lower()
  72. break
  73. if locale:
  74. params['url'] = url.format(
  75. query=urlencode({'q': query, 'kl': locale}), offset=offset)
  76. else:
  77. locale = params['language'].split('-')
  78. if len(locale) == 2:
  79. # country code goes first
  80. locale = locale[1].lower() + '-' + locale[0].lower()
  81. else:
  82. # tries to get a country code from language
  83. locale = locale[0].lower()
  84. lang_codes = [x[0] for x in language_codes]
  85. for lc in lang_codes:
  86. lc = lc.split('-')
  87. if locale == lc[0]:
  88. locale = lc[1].lower() + '-' + lc[0].lower()
  89. break
  90. if locale:
  91. params['url'] = url.format(
  92. query=urlencode({'q': query, 'kl': locale}), offset=offset)
  93. else:
  94. params['url'] = url.format(
  95. query=urlencode({'q': query}), offset=offset)
  96. if params['time_range'] in time_range_dict:
  97. params['url'] += time_range_url.format(range=time_range_dict[params['time_range']])
  98. return params
  99. # get response from search-request
  100. def response(resp):
  101. results = []
  102. doc = fromstring(resp.text)
  103. # parse results
  104. for r in doc.xpath(result_xpath):
  105. try:
  106. res_url = r.xpath(url_xpath)[-1]
  107. except:
  108. continue
  109. if not res_url:
  110. continue
  111. title = extract_text(r.xpath(title_xpath))
  112. content = extract_text(r.xpath(content_xpath))
  113. # append result
  114. results.append({'title': title,
  115. 'content': content,
  116. 'url': res_url})
  117. # return results
  118. return results