sogou.py 1.8 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. """Sogou search engine for searxng"""
  3. from urllib.parse import urlencode
  4. from lxml import html
  5. from searx.utils import extract_text
  6. # Metadata
  7. about = {
  8. "website": "https://www.sogou.com/",
  9. "wikidata_id": "Q7554565",
  10. "use_official_api": False,
  11. "require_api_key": False,
  12. "results": "HTML",
  13. "language": "zh",
  14. }
  15. # Engine Configuration
  16. categories = ["general"]
  17. paging = True
  18. time_range_support = True
  19. time_range_dict = {'day': 'inttime_day', 'week': 'inttime_week', 'month': 'inttime_month', 'year': 'inttime_year'}
  20. # Base URL
  21. base_url = "https://www.sogou.com"
  22. def request(query, params):
  23. query_params = {
  24. "query": query,
  25. "page": params["pageno"],
  26. }
  27. if time_range_dict.get(params['time_range']):
  28. query_params["s_from"] = time_range_dict.get(params['time_range'])
  29. query_params["tsn"] = 1
  30. params["url"] = f"{base_url}/web?{urlencode(query_params)}"
  31. return params
  32. def response(resp):
  33. dom = html.fromstring(resp.text)
  34. results = []
  35. for item in dom.xpath('//div[contains(@class, "vrwrap")]'):
  36. title = extract_text(item.xpath('.//h3[contains(@class, "vr-title")]/a'))
  37. url = extract_text(item.xpath('.//h3[contains(@class, "vr-title")]/a/@href'))
  38. if url.startswith("/link?url="):
  39. url = f"{base_url}{url}"
  40. content = extract_text(item.xpath('.//div[contains(@class, "text-layout")]//p[contains(@class, "star-wiki")]'))
  41. if not content:
  42. content = extract_text(item.xpath('.//div[contains(@class, "fz-mid space-txt")]'))
  43. if title and url:
  44. results.append(
  45. {
  46. "title": title,
  47. "url": url,
  48. "content": content,
  49. }
  50. )
  51. return results