etools.py 1.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. """
  3. eTools (Web)
  4. """
  5. from lxml import html
  6. from urllib.parse import quote
  7. from searx.utils import extract_text, eval_xpath
  8. # about
  9. about = {
  10. "website": 'https://www.etools.ch',
  11. "wikidata_id": None,
  12. "official_api_documentation": None,
  13. "use_official_api": False,
  14. "require_api_key": False,
  15. "results": 'HTML',
  16. }
  17. categories = ['general']
  18. paging = False
  19. language_support = False
  20. safesearch = True
  21. base_url = 'https://www.etools.ch'
  22. search_path = '/searchAdvancedSubmit.do'\
  23. '?query={search_term}'\
  24. '&pageResults=20'\
  25. '&safeSearch={safesearch}'
  26. def request(query, params):
  27. if params['safesearch']:
  28. safesearch = 'true'
  29. else:
  30. safesearch = 'false'
  31. params['url'] = base_url + search_path.format(search_term=quote(query), safesearch=safesearch)
  32. return params
  33. def response(resp):
  34. results = []
  35. dom = html.fromstring(resp.text)
  36. for result in eval_xpath(dom, '//table[@class="result"]//td[@class="record"]'):
  37. url = eval_xpath(result, './a/@href')[0]
  38. title = extract_text(eval_xpath(result, './a//text()'))
  39. content = extract_text(eval_xpath(result, './/div[@class="text"]//text()'))
  40. results.append({'url': url,
  41. 'title': title,
  42. 'content': content})
  43. return results