360search_videos.py 1.9 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # pylint: disable=invalid-name
  3. """360Search-Videos: A search engine for retrieving videos from 360Search."""
  4. from urllib.parse import urlencode
  5. from datetime import datetime
  6. from searx.exceptions import SearxEngineAPIException
  7. from searx.utils import html_to_text, get_embeded_stream_url
  8. about = {
  9. "website": "https://tv.360kan.com/",
  10. "use_official_api": False,
  11. "require_api_key": False,
  12. "results": "JSON",
  13. }
  14. paging = True
  15. results_per_page = 10
  16. categories = ["videos"]
  17. base_url = "https://tv.360kan.com"
  18. def request(query, params):
  19. query_params = {"count": 10, "q": query, "start": params["pageno"] * 10}
  20. params["url"] = f"{base_url}/v1/video/list?{urlencode(query_params)}"
  21. return params
  22. def response(resp):
  23. try:
  24. data = resp.json()
  25. except Exception as e:
  26. raise SearxEngineAPIException(f"Invalid response: {e}") from e
  27. results = []
  28. if "data" not in data or "result" not in data["data"]:
  29. raise SearxEngineAPIException("Invalid response")
  30. for entry in data["data"]["result"]:
  31. if not entry.get("title") or not entry.get("play_url"):
  32. continue
  33. published_date = None
  34. if entry.get("publish_time"):
  35. try:
  36. published_date = datetime.fromtimestamp(int(entry["publish_time"]))
  37. except (ValueError, TypeError):
  38. published_date = None
  39. results.append(
  40. {
  41. 'url': entry["play_url"],
  42. 'title': html_to_text(entry["title"]),
  43. 'content': html_to_text(entry["description"]),
  44. 'template': 'videos.html',
  45. 'publishedDate': published_date,
  46. 'thumbnail': entry["cover_img"],
  47. "iframe_src": get_embeded_stream_url(entry["play_url"]),
  48. }
  49. )
  50. return results