abstract.py 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. import threading
  3. from abc import abstractmethod, ABC
  4. from timeit import default_timer
  5. from searx import logger
  6. from searx.engines import settings
  7. from searx.network import get_time_for_thread, get_network
  8. from searx.metrics import histogram_observe, counter_inc, count_exception, count_error
  9. from searx.exceptions import SearxEngineAccessDeniedException
  10. logger = logger.getChild('searx.search.processor')
  11. SUSPENDED_STATUS = {}
  12. class SuspendedStatus:
  13. __slots__ = 'suspend_end_time', 'suspend_reason', 'continuous_errors', 'lock'
  14. def __init__(self):
  15. self.lock = threading.Lock()
  16. self.continuous_errors = 0
  17. self.suspend_end_time = 0
  18. self.suspend_reason = None
  19. @property
  20. def is_suspended(self):
  21. return self.suspend_end_time >= default_timer()
  22. def suspend(self, suspended_time, suspend_reason):
  23. with self.lock:
  24. # update continuous_errors / suspend_end_time
  25. self.continuous_errors += 1
  26. if suspended_time is None:
  27. suspended_time = min(settings['search']['max_ban_time_on_fail'],
  28. self.continuous_errors * settings['search']['ban_time_on_fail'])
  29. self.suspend_end_time = default_timer() + suspended_time
  30. self.suspend_reason = suspend_reason
  31. logger.debug('Suspend engine for %i seconds', suspended_time)
  32. def resume(self):
  33. with self.lock:
  34. # reset the suspend variables
  35. self.continuous_errors = 0
  36. self.suspend_end_time = 0
  37. self.suspend_reason = None
  38. class EngineProcessor(ABC):
  39. __slots__ = 'engine', 'engine_name', 'lock', 'suspended_status'
  40. def __init__(self, engine, engine_name):
  41. self.engine = engine
  42. self.engine_name = engine_name
  43. key = get_network(self.engine_name)
  44. key = id(key) if key else self.engine_name
  45. self.suspended_status = SUSPENDED_STATUS.setdefault(key, SuspendedStatus())
  46. def handle_exception(self, result_container, reason, exception, suspend=False, display_exception=True):
  47. # update result_container
  48. error_message = str(exception) if display_exception and exception else None
  49. result_container.add_unresponsive_engine(self.engine_name, reason, error_message)
  50. # metrics
  51. counter_inc('engine', self.engine_name, 'search', 'count', 'error')
  52. if exception:
  53. count_exception(self.engine_name, exception)
  54. else:
  55. count_error(self.engine_name, reason)
  56. # suspend the engine ?
  57. if suspend:
  58. suspended_time = None
  59. if isinstance(exception, SearxEngineAccessDeniedException):
  60. suspended_time = exception.suspended_time
  61. self.suspended_status.suspend(suspended_time, reason) # pylint: disable=no-member
  62. def _extend_container_basic(self, result_container, start_time, search_results):
  63. # update result_container
  64. result_container.extend(self.engine_name, search_results)
  65. engine_time = default_timer() - start_time
  66. page_load_time = get_time_for_thread()
  67. result_container.add_timing(self.engine_name, engine_time, page_load_time)
  68. # metrics
  69. counter_inc('engine', self.engine_name, 'search', 'count', 'successful')
  70. histogram_observe(engine_time, 'engine', self.engine_name, 'time', 'total')
  71. if page_load_time is not None:
  72. histogram_observe(page_load_time, 'engine', self.engine_name, 'time', 'http')
  73. def extend_container(self, result_container, start_time, search_results):
  74. if getattr(threading.current_thread(), '_timeout', False):
  75. # the main thread is not waiting anymore
  76. self.handle_exception(result_container, 'Timeout', None)
  77. else:
  78. # check if the engine accepted the request
  79. if search_results is not None:
  80. self._extend_container_basic(result_container, start_time, search_results)
  81. self.suspended_status.resume()
  82. def extend_container_if_suspended(self, result_container):
  83. if self.suspended_status.is_suspended:
  84. result_container.add_unresponsive_engine(self.engine_name,
  85. self.suspended_status.suspend_reason,
  86. suspended=True)
  87. return True
  88. return False
  89. def get_params(self, search_query, engine_category):
  90. # if paging is not supported, skip
  91. if search_query.pageno > 1 and not self.engine.paging:
  92. return None
  93. # if time_range is not supported, skip
  94. if search_query.time_range and not self.engine.time_range_support:
  95. return None
  96. params = {}
  97. params['category'] = engine_category
  98. params['pageno'] = search_query.pageno
  99. params['safesearch'] = search_query.safesearch
  100. params['time_range'] = search_query.time_range
  101. params['engine_data'] = search_query.engine_data.get(self.engine_name, {})
  102. if hasattr(self.engine, 'language') and self.engine.language:
  103. params['language'] = self.engine.language
  104. else:
  105. params['language'] = search_query.lang
  106. return params
  107. @abstractmethod
  108. def search(self, query, params, result_container, start_time, timeout_limit):
  109. pass
  110. def get_tests(self):
  111. tests = getattr(self.engine, 'tests', None)
  112. if tests is None:
  113. tests = getattr(self.engine, 'additional_tests', {})
  114. tests.update(self.get_default_tests())
  115. return tests
  116. else:
  117. return tests
  118. def get_default_tests(self):
  119. return {}