__init__.py 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # lint: pylint
  3. # pylint: disable=missing-module-docstring, too-few-public-methods
  4. import typing
  5. import threading
  6. from timeit import default_timer
  7. from uuid import uuid4
  8. from searx import settings
  9. from searx.answerers import ask
  10. from searx.external_bang import get_bang_url
  11. from searx.results import ResultContainer
  12. from searx import logger
  13. from searx.plugins import plugins
  14. from searx.search.models import EngineRef, SearchQuery
  15. from searx.engines import load_engines
  16. from searx.network import initialize as initialize_network, check_network_configuration
  17. from searx.metrics import initialize as initialize_metrics, counter_inc, histogram_observe_time
  18. from searx.search.processors import PROCESSORS, initialize as initialize_processors
  19. from searx.search.checker import initialize as initialize_checker
  20. logger = logger.getChild('search')
  21. def initialize(settings_engines=None, enable_checker=False, check_network=False):
  22. settings_engines = settings_engines or settings['engines']
  23. load_engines(settings_engines)
  24. initialize_network(settings_engines, settings['outgoing'])
  25. if check_network:
  26. check_network_configuration()
  27. initialize_metrics([engine['name'] for engine in settings_engines])
  28. initialize_processors(settings_engines)
  29. if enable_checker:
  30. initialize_checker()
  31. class Search:
  32. """Search information container"""
  33. __slots__ = "search_query", "result_container", "start_time", "actual_timeout"
  34. def __init__(self, search_query: SearchQuery):
  35. # init vars
  36. super().__init__()
  37. self.search_query = search_query
  38. self.result_container = ResultContainer()
  39. self.start_time = None
  40. self.actual_timeout = None
  41. def search_external_bang(self):
  42. """
  43. Check if there is a external bang.
  44. If yes, update self.result_container and return True
  45. """
  46. if self.search_query.external_bang:
  47. self.result_container.redirect_url = get_bang_url(self.search_query)
  48. # This means there was a valid bang and the
  49. # rest of the search does not need to be continued
  50. if isinstance(self.result_container.redirect_url, str):
  51. return True
  52. return False
  53. def search_answerers(self):
  54. """
  55. Check if an answer return a result.
  56. If yes, update self.result_container and return True
  57. """
  58. answerers_results = ask(self.search_query)
  59. if answerers_results:
  60. for results in answerers_results:
  61. self.result_container.extend('answer', results)
  62. return True
  63. return False
  64. # do search-request
  65. def _get_requests(self):
  66. # init vars
  67. requests = []
  68. # max of all selected engine timeout
  69. default_timeout = 0
  70. # start search-reqest for all selected engines
  71. for engineref in self.search_query.engineref_list:
  72. processor = PROCESSORS[engineref.name]
  73. # stop the request now if the engine is suspend
  74. if processor.extend_container_if_suspended(self.result_container):
  75. continue
  76. # set default request parameters
  77. request_params = processor.get_params(self.search_query, engineref.category)
  78. if request_params is None:
  79. continue
  80. counter_inc('engine', engineref.name, 'search', 'count', 'sent')
  81. # append request to list
  82. requests.append((engineref.name, self.search_query.query, request_params))
  83. # update default_timeout
  84. default_timeout = max(default_timeout, processor.engine.timeout)
  85. # adjust timeout
  86. max_request_timeout = settings['outgoing']['max_request_timeout']
  87. actual_timeout = default_timeout
  88. query_timeout = self.search_query.timeout_limit
  89. if max_request_timeout is None and query_timeout is None:
  90. # No max, no user query: default_timeout
  91. pass
  92. elif max_request_timeout is None and query_timeout is not None:
  93. # No max, but user query: From user query except if above default
  94. actual_timeout = min(default_timeout, query_timeout)
  95. elif max_request_timeout is not None and query_timeout is None:
  96. # Max, no user query: Default except if above max
  97. actual_timeout = min(default_timeout, max_request_timeout)
  98. elif max_request_timeout is not None and query_timeout is not None:
  99. # Max & user query: From user query except if above max
  100. actual_timeout = min(query_timeout, max_request_timeout)
  101. logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
  102. .format(actual_timeout, default_timeout, query_timeout, max_request_timeout))
  103. return requests, actual_timeout
  104. def search_multiple_requests(self, requests):
  105. # pylint: disable=protected-access
  106. search_id = uuid4().__str__()
  107. for engine_name, query, request_params in requests:
  108. th = threading.Thread( # pylint: disable=invalid-name
  109. target=PROCESSORS[engine_name].search,
  110. args=(query, request_params, self.result_container, self.start_time, self.actual_timeout),
  111. name=search_id,
  112. )
  113. th._timeout = False
  114. th._engine_name = engine_name
  115. th.start()
  116. for th in threading.enumerate(): # pylint: disable=invalid-name
  117. if th.name == search_id:
  118. remaining_time = max(0.0, self.actual_timeout - (default_timer() - self.start_time))
  119. th.join(remaining_time)
  120. if th.is_alive():
  121. th._timeout = True
  122. self.result_container.add_unresponsive_engine(th._engine_name, 'timeout')
  123. PROCESSORS[th._engine_name].logger.error('engine timeout')
  124. def search_standard(self):
  125. """
  126. Update self.result_container, self.actual_timeout
  127. """
  128. requests, self.actual_timeout = self._get_requests()
  129. # send all search-request
  130. if requests:
  131. self.search_multiple_requests(requests)
  132. # return results, suggestions, answers and infoboxes
  133. return True
  134. # do search-request
  135. def search(self) -> ResultContainer:
  136. self.start_time = default_timer()
  137. if not self.search_external_bang():
  138. if not self.search_answerers():
  139. self.search_standard()
  140. return self.result_container
  141. class SearchWithPlugins(Search):
  142. """Inherit from the Search class, add calls to the plugins."""
  143. __slots__ = 'ordered_plugin_list', 'request'
  144. def __init__(self, search_query: SearchQuery, ordered_plugin_list, request: "flask.Request"):
  145. super().__init__(search_query)
  146. self.ordered_plugin_list = ordered_plugin_list
  147. self.result_container.on_result = self._on_result
  148. # pylint: disable=line-too-long
  149. # get the "real" request to use it outside the Flask context.
  150. # see
  151. # * https://github.com/pallets/flask/blob/d01d26e5210e3ee4cbbdef12f05c886e08e92852/src/flask/globals.py#L55
  152. # * https://github.com/pallets/werkzeug/blob/3c5d3c9bd0d9ce64590f0af8997a38f3823b368d/src/werkzeug/local.py#L548-L559
  153. # * https://werkzeug.palletsprojects.com/en/2.0.x/local/#werkzeug.local.LocalProxy._get_current_object
  154. # pylint: enable=line-too-long
  155. self.request = request._get_current_object()
  156. def _on_result(self, result):
  157. return plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
  158. def search(self) -> ResultContainer:
  159. if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
  160. super().search()
  161. plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
  162. self.result_container.close()
  163. return self.result_container