Browse Source

Merge pull request #302 from dalf/mod_plugin_on_result

[mod] plugin: call on_result for each result of each engines.
Markus Heiser 3 years ago
parent
commit
b671e0364f

+ 45 - 13
docs/dev/plugins.rst

@@ -26,8 +26,8 @@ Example plugin
    # attach callback to the post search hook
    #  request: flask request object
    #  ctx: the whole local context of the post search hook
-   def post_search(request, ctx):
-       ctx['search'].suggestions.add('example')
+   def post_search(request, search):
+       search.result_container.suggestions.add('example')
        return True
 
 External plugins
@@ -50,20 +50,52 @@ Plugin entry points
 
 Entry points (hooks) define when a plugin runs. Right now only three hooks are
 implemented. So feel free to implement a hook if it fits the behaviour of your
-plugin.
+plugin. A plugin doesn't need to implement all the hooks.
 
-Pre search hook
----------------
 
-Runs BEFORE the search request. Function to implement: ``pre_search``
+.. py:function:: pre_search(request, search) -> bool
 
-Post search hook
-----------------
+   Runs BEFORE the search request.
 
-Runs AFTER the search request. Function to implement: ``post_search``
+   `search.result_container` can be changed.
 
-Result hook
------------
+   Return a boolean:
 
-Runs when a new result is added to the result list. Function to implement:
-``on_result``
+   * True to continue the search
+   * False to stop the search
+
+   :param flask.request request:
+   :param searx.search.SearchWithPlugins search:
+   :return: False to stop the search
+   :rtype: bool
+
+
+.. py:function:: post_search(request, search) -> None
+
+   Runs AFTER the search request.
+
+   :param flask.request request: Flask request.
+   :param searx.search.SearchWithPlugins search: Context.
+
+
+.. py:function:: on_result(request, search, result) -> bool
+
+   Runs for each result of each engine.
+
+   `result` can be changed.
+
+   If `result["url"]` is defined, then `result["parsed_url"] = urlparse(result['url'])`
+
+   .. warning::
+      `result["url"]` can be changed, but `result["parsed_url"]` must be updated too.
+
+   Return a boolean:
+
+   * True to keep the result
+   * False to remove the result
+
+   :param flask.request request:
+   :param searx.search.SearchWithPlugins search:
+   :param typing.Dict result: Result, see - :ref:`engine results`
+   :return: True to keep the result
+   :rtype: bool

+ 38 - 0
docs/src/searx.search.rst

@@ -0,0 +1,38 @@
+.. _searx.search:
+
+======
+Search
+======
+
+.. autoclass:: searx.search.EngineRef
+  :members:
+
+.. autoclass:: searx.search.SearchQuery
+  :members:
+
+.. autoclass:: searx.search.Search
+
+  .. attribute:: search_query
+    :type: searx.search.SearchQuery
+
+  .. attribute:: result_container
+    :type: searx.results.ResultContainer
+
+  .. automethod:: search() -> searx.results.ResultContainer
+
+.. autoclass:: searx.search.SearchWithPlugins
+  :members:
+
+  .. attribute:: search_query
+    :type: searx.search.SearchQuery
+
+  .. attribute:: result_container
+    :type: searx.results.ResultContainer
+
+  .. attribute:: ordered_plugin_list
+    :type: typing.List
+
+  .. attribute:: request
+    :type: flask.request
+
+  .. automethod:: search() -> searx.results.ResultContainer

+ 1 - 7
searx/plugins/ahmia_filter.py

@@ -20,14 +20,8 @@ def get_ahmia_blacklist():
     return ahmia_blacklist
 
 
-def not_blacklisted(result):
+def on_result(request, search, result):
     if not result.get('is_onion') or not result.get('parsed_url'):
         return True
     result_hash = md5(result['parsed_url'].hostname.encode()).hexdigest()
     return result_hash not in get_ahmia_blacklist()
-
-
-def post_search(request, search):
-    filtered_results = list(filter(not_blacklisted, search.result_container._merged_results))
-    search.result_container._merged_results = filtered_results
-    return True

+ 6 - 9
searx/plugins/oa_doi_rewrite.py

@@ -11,8 +11,6 @@ description = gettext('Avoid paywalls by redirecting to open-access versions of
 default_on = False
 preference_section = 'general'
 
-doi_resolvers = settings['doi_resolvers']
-
 
 def extract_doi(url):
     match = regex.search(url.path)
@@ -25,13 +23,12 @@ def extract_doi(url):
     return None
 
 
-def get_doi_resolver(args, preference_doi_resolver):
+def get_doi_resolver(preferences):
     doi_resolvers = settings['doi_resolvers']
-    doi_resolver = args.get('doi_resolver', preference_doi_resolver)[0]
-    if doi_resolver not in doi_resolvers:
-        doi_resolver = settings['default_doi_resolver']
-    doi_resolver_url = doi_resolvers[doi_resolver]
-    return doi_resolver_url
+    selected_resolver = preferences.get_value('doi_resolver')[0]
+    if selected_resolver not in doi_resolvers:
+        selected_resolver = settings['default_doi_resolver']
+    return doi_resolvers[selected_resolver]
 
 
 def on_result(request, search, result):
@@ -43,6 +40,6 @@ def on_result(request, search, result):
         for suffix in ('/', '.pdf', '.xml', '/full', '/meta', '/abstract'):
             if doi.endswith(suffix):
                 doi = doi[:-len(suffix)]
-        result['url'] = get_doi_resolver(request.args, request.preferences.get_value('doi_resolver')) + doi
+        result['url'] = get_doi_resolver(request.preferences) + doi
         result['parsed_url'] = urlparse(result['url'])
     return True

+ 58 - 36
searx/results.py

@@ -145,7 +145,7 @@ class ResultContainer:
     """docstring for ResultContainer"""
 
     __slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\
-                '_ordered', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data'
+                '_closed', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data', 'on_result'
 
     def __init__(self):
         super().__init__()
@@ -156,43 +156,48 @@ class ResultContainer:
         self.corrections = set()
         self._number_of_results = []
         self.engine_data = defaultdict(dict)
-        self._ordered = False
+        self._closed = False
         self.paging = False
         self.unresponsive_engines = set()
         self.timings = []
         self.redirect_url = None
+        self.on_result = lambda _: True
 
     def extend(self, engine_name, results):
+        if self._closed:
+            return
+
         standard_result_count = 0
         error_msgs = set()
         for result in list(results):
             result['engine'] = engine_name
-            if 'suggestion' in result:
+            if 'suggestion' in result and self.on_result(result):
                 self.suggestions.add(result['suggestion'])
-            elif 'answer' in result:
+            elif 'answer' in result and self.on_result(result):
                 self.answers[result['answer']] = result
-            elif 'correction' in result:
+            elif 'correction' in result and self.on_result(result):
                 self.corrections.add(result['correction'])
-            elif 'infobox' in result:
+            elif 'infobox' in result and self.on_result(result):
                 self._merge_infobox(result)
-            elif 'number_of_results' in result:
+            elif 'number_of_results' in result and self.on_result(result):
                 self._number_of_results.append(result['number_of_results'])
-            elif 'engine_data' in result:
+            elif 'engine_data' in result and self.on_result(result):
                 self.engine_data[engine_name][result['key']] = result['engine_data']
-            else:
+            elif 'url' in result:
                 # standard result (url, title, content)
-                if 'url' in result and not isinstance(result['url'], str):
-                    logger.debug('result: invalid URL: %s', str(result))
-                    error_msgs.add('invalid URL')
-                elif 'title' in result and not isinstance(result['title'], str):
-                    logger.debug('result: invalid title: %s', str(result))
-                    error_msgs.add('invalid title')
-                elif 'content' in result and not isinstance(result['content'], str):
-                    logger.debug('result: invalid content: %s', str(result))
-                    error_msgs.add('invalid content')
-                else:
-                    self._merge_result(result, standard_result_count + 1)
-                    standard_result_count += 1
+                if not self._is_valid_url_result(result, error_msgs):
+                    continue
+                # normalize the result
+                self._normalize_url_result(result)
+                # call on_result call searx.search.SearchWithPlugins._on_result
+                # which calls the plugins
+                if not self.on_result(result):
+                    continue
+                self.__merge_url_result(result, standard_result_count + 1)
+                standard_result_count += 1
+            elif self.on_result(result):
+                self.__merge_result_no_url(result, standard_result_count + 1)
+                standard_result_count += 1
 
         if len(error_msgs) > 0:
             for msg in error_msgs:
@@ -219,14 +224,29 @@ class ResultContainer:
         if add_infobox:
             self.infoboxes.append(infobox)
 
-    def _merge_result(self, result, position):
+    def _is_valid_url_result(self, result, error_msgs):
         if 'url' in result:
-            self.__merge_url_result(result, position)
-            return
-
-        self.__merge_result_no_url(result, position)
-
-    def __merge_url_result(self, result, position):
+            if not isinstance(result['url'], str):
+                logger.debug('result: invalid URL: %s', str(result))
+                error_msgs.add('invalid URL')
+                return False
+
+        if 'title' in result and not isinstance(result['title'], str):
+            logger.debug('result: invalid title: %s', str(result))
+            error_msgs.add('invalid title')
+            return False
+
+        if 'content' in result:
+            if not isinstance(result['content'], str):
+                logger.debug('result: invalid content: %s', str(result))
+                error_msgs.add('invalid content')
+                return False
+
+        return True
+
+    def _normalize_url_result(self, result):
+        """Return True if the result is valid
+        """
         result['parsed_url'] = urlparse(result['url'])
 
         # if the result has no scheme, use http as default
@@ -234,12 +254,13 @@ class ResultContainer:
             result['parsed_url'] = result['parsed_url']._replace(scheme="http")
             result['url'] = result['parsed_url'].geturl()
 
-        result['engines'] = set([result['engine']])
-
         # strip multiple spaces and cariage returns from content
-        if result.get('content'):
-            result['content'] = WHITESPACE_REGEX.sub(' ', result['content'])
+        result['content'] = WHITESPACE_REGEX.sub(' ', result['content'])
+
+        return True
 
+    def __merge_url_result(self, result, position):
+        result['engines'] = set([result['engine']])
         duplicated = self.__find_duplicated_http_result(result)
         if duplicated:
             self.__merge_duplicated_http_result(duplicated, result, position)
@@ -295,7 +316,9 @@ class ResultContainer:
         with RLock():
             self._merged_results.append(result)
 
-    def order_results(self):
+    def close(self):
+        self._closed = True
+
         for result in self._merged_results:
             score = result_score(result)
             result['score'] = score
@@ -349,12 +372,11 @@ class ResultContainer:
                 categoryPositions[category] = {'index': len(gresults), 'count': 8}
 
         # update _merged_results
-        self._ordered = True
         self._merged_results = gresults
 
     def get_ordered_results(self):
-        if not self._ordered:
-            self.order_results()
+        if not self._closed:
+            self.close()
         return self._merged_results
 
     def results_length(self):

+ 20 - 12
searx/search/__init__.py

@@ -1,6 +1,6 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # lint: pylint
-# pylint: disable=missing-module-docstring
+# pylint: disable=missing-module-docstring, too-few-public-methods
 
 import typing
 import threading
@@ -39,7 +39,7 @@ class Search:
 
     __slots__ = "search_query", "result_container", "start_time", "actual_timeout"
 
-    def __init__(self, search_query):
+    def __init__(self, search_query: SearchQuery):
         # init vars
         super().__init__()
         self.search_query = search_query
@@ -163,7 +163,7 @@ class Search:
         return True
 
     # do search-request
-    def search(self):
+    def search(self) -> ResultContainer:
         self.start_time = default_timer()
         if not self.search_external_bang():
             if not self.search_answerers():
@@ -172,24 +172,32 @@ class Search:
 
 
 class SearchWithPlugins(Search):
-    """Similar to the Search class but call the plugins."""
+    """Inherit from the Search class, add calls to the plugins."""
 
     __slots__ = 'ordered_plugin_list', 'request'
 
-    def __init__(self, search_query, ordered_plugin_list, request):
+    def __init__(self, search_query: SearchQuery, ordered_plugin_list, request: "flask.Request"):
         super().__init__(search_query)
         self.ordered_plugin_list = ordered_plugin_list
-        self.request = request
-
-    def search(self):
+        self.result_container.on_result = self._on_result
+        # pylint: disable=line-too-long
+        # get the "real" request to use it outside the Flask context.
+        # see
+        # * https://github.com/pallets/flask/blob/d01d26e5210e3ee4cbbdef12f05c886e08e92852/src/flask/globals.py#L55
+        # * https://github.com/pallets/werkzeug/blob/3c5d3c9bd0d9ce64590f0af8997a38f3823b368d/src/werkzeug/local.py#L548-L559
+        # * https://werkzeug.palletsprojects.com/en/2.0.x/local/#werkzeug.local.LocalProxy._get_current_object
+        # pylint: enable=line-too-long
+        self.request = request._get_current_object()
+
+    def _on_result(self, result):
+        return plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
+
+    def search(self) -> ResultContainer:
         if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
             super().search()
 
         plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
 
-        results = self.result_container.get_ordered_results()
-
-        for result in results:
-            plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
+        self.result_container.close()
 
         return self.result_container

+ 1 - 0
searx/search/models.py

@@ -4,6 +4,7 @@ import typing
 
 
 class EngineRef:
+    """Reference by names to an engine and category"""
 
     __slots__ = 'name', 'category'
 

+ 1 - 3
searx/webapp.py

@@ -1040,9 +1040,7 @@ def preferences():
         themes = themes,
         plugins = plugins,
         doi_resolvers = settings['doi_resolvers'],
-        current_doi_resolver = get_doi_resolver(
-            request.args, request.preferences.get_value('doi_resolver')
-        ),
+        current_doi_resolver = get_doi_resolver(request.preferences),
         allowed_plugins = allowed_plugins,
         theme = get_current_theme_name(),
         preferences_url_params = request.preferences.get_as_url_params(),