dictzone.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. """
  3. Dictzone
  4. """
  5. from lxml import html
  6. from searx.utils import eval_xpath
  7. # about
  8. about = {
  9. "website": 'https://dictzone.com/',
  10. "wikidata_id": None,
  11. "official_api_documentation": None,
  12. "use_official_api": False,
  13. "require_api_key": False,
  14. "results": 'HTML',
  15. }
  16. engine_type = 'online_dictionary'
  17. categories = ['general', 'translate']
  18. url = 'https://dictzone.com/{from_lang}-{to_lang}-dictionary/{query}'
  19. weight = 100
  20. results_xpath = './/table[@id="r"]/tr'
  21. https_support = True
  22. def request(query, params): # pylint: disable=unused-argument
  23. params['url'] = url.format(from_lang=params['from_lang'][2], to_lang=params['to_lang'][2], query=params['query'])
  24. return params
  25. def response(resp):
  26. dom = html.fromstring(resp.text)
  27. translations = []
  28. for result in eval_xpath(dom, results_xpath)[1:]:
  29. try:
  30. from_result, to_results_raw = eval_xpath(result, './td')
  31. except: # pylint: disable=bare-except
  32. continue
  33. to_results = []
  34. for to_result in eval_xpath(to_results_raw, './p/a'):
  35. t = to_result.text_content()
  36. if t.strip():
  37. to_results.append(to_result.text_content())
  38. translations.append(
  39. {
  40. 'text': f"{from_result.text_content()} - {'; '.join(to_results)}",
  41. }
  42. )
  43. if translations:
  44. result = {
  45. 'answer': translations[0]['text'],
  46. 'translations': translations,
  47. 'answer_type': 'translations',
  48. }
  49. return [result]