abstract.py 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. import threading
  3. from abc import abstractmethod, ABC
  4. from time import time
  5. from searx import logger
  6. from searx.engines import settings
  7. from searx.network import get_time_for_thread, get_network
  8. from searx.metrology.error_recorder import record_exception, record_error
  9. from searx.exceptions import SearxEngineAccessDeniedException
  10. logger = logger.getChild('searx.search.processor')
  11. SUSPENDED_STATUS = {}
  12. class SuspendedStatus:
  13. __slots__ = 'suspend_end_time', 'suspend_reason', 'continuous_errors', 'lock'
  14. def __init__(self):
  15. self.lock = threading.Lock()
  16. self.continuous_errors = 0
  17. self.suspend_end_time = 0
  18. self.suspend_reason = None
  19. @property
  20. def is_suspended(self):
  21. return self.suspend_end_time >= time()
  22. def suspend(self, suspended_time, suspend_reason):
  23. with self.lock:
  24. # update continuous_errors / suspend_end_time
  25. self.continuous_errors += 1
  26. if suspended_time is None:
  27. suspended_time = min(settings['search']['max_ban_time_on_fail'],
  28. self.continuous_errors * settings['search']['ban_time_on_fail'])
  29. self.suspend_end_time = time() + suspended_time
  30. self.suspend_reason = suspend_reason
  31. logger.debug('Suspend engine for %i seconds', suspended_time)
  32. def resume(self):
  33. with self.lock:
  34. # reset the suspend variables
  35. self.continuous_errors = 0
  36. self.suspend_end_time = 0
  37. self.suspend_reason = None
  38. class EngineProcessor(ABC):
  39. __slots__ = 'engine', 'engine_name', 'lock', 'suspended_status'
  40. def __init__(self, engine, engine_name):
  41. self.engine = engine
  42. self.engine_name = engine_name
  43. self.lock = threading.Lock()
  44. key = get_network(self.engine_name)
  45. key = id(key) if key else self.engine_name
  46. self.suspended_status = SUSPENDED_STATUS.setdefault(key, SuspendedStatus())
  47. def handle_exception(self, result_container, reason, exception, suspend=False, display_exception=True):
  48. # update result_container
  49. error_message = str(exception) if display_exception and exception else None
  50. result_container.add_unresponsive_engine(self.engine_name, reason, error_message)
  51. # metrics
  52. with self.lock:
  53. self.engine.stats['errors'] += 1
  54. if exception:
  55. record_exception(self.engine_name, exception)
  56. else:
  57. record_error(self.engine_name, reason)
  58. # suspend the engine ?
  59. if suspend:
  60. suspended_time = None
  61. if isinstance(exception, SearxEngineAccessDeniedException):
  62. suspended_time = exception.suspended_time
  63. self.suspended_status.suspend(suspended_time, reason) # pylint: disable=no-member
  64. def _extend_container_basic(self, result_container, start_time, search_results):
  65. # update result_container
  66. result_container.extend(self.engine_name, search_results)
  67. engine_time = time() - start_time
  68. page_load_time = get_time_for_thread()
  69. result_container.add_timing(self.engine_name, engine_time, page_load_time)
  70. # metrics
  71. with self.lock:
  72. self.engine.stats['engine_time'] += engine_time
  73. self.engine.stats['engine_time_count'] += 1
  74. # update stats with the total HTTP time
  75. if page_load_time is not None and 'page_load_time' in self.engine.stats:
  76. self.engine.stats['page_load_time'] += page_load_time
  77. self.engine.stats['page_load_count'] += 1
  78. def extend_container(self, result_container, start_time, search_results):
  79. if getattr(threading.current_thread(), '_timeout', False):
  80. # the main thread is not waiting anymore
  81. self.handle_exception(result_container, 'Timeout', None)
  82. else:
  83. # check if the engine accepted the request
  84. if search_results is not None:
  85. self._extend_container_basic(result_container, start_time, search_results)
  86. self.suspended_status.resume()
  87. def extend_container_if_suspended(self, result_container):
  88. if self.suspended_status.is_suspended:
  89. result_container.add_unresponsive_engine(self.engine_name,
  90. self.suspended_status.suspend_reason,
  91. suspended=True)
  92. return True
  93. return False
  94. def get_params(self, search_query, engine_category):
  95. # if paging is not supported, skip
  96. if search_query.pageno > 1 and not self.engine.paging:
  97. return None
  98. # if time_range is not supported, skip
  99. if search_query.time_range and not self.engine.time_range_support:
  100. return None
  101. params = {}
  102. params['category'] = engine_category
  103. params['pageno'] = search_query.pageno
  104. params['safesearch'] = search_query.safesearch
  105. params['time_range'] = search_query.time_range
  106. params['engine_data'] = search_query.engine_data.get(self.engine_name, {})
  107. if hasattr(self.engine, 'language') and self.engine.language:
  108. params['language'] = self.engine.language
  109. else:
  110. params['language'] = search_query.lang
  111. return params
  112. @abstractmethod
  113. def search(self, query, params, result_container, start_time, timeout_limit):
  114. pass
  115. def get_tests(self):
  116. tests = getattr(self.engine, 'tests', None)
  117. if tests is None:
  118. tests = getattr(self.engine, 'additional_tests', {})
  119. tests.update(self.get_default_tests())
  120. return tests
  121. else:
  122. return tests
  123. def get_default_tests(self):
  124. return {}