Browse Source

[refactor] typification of SearXNG / EngineResults

In [1] and [2] we discussed the need of a Result.results property and how we can
avoid unclear code.  This patch implements a class for the reslut-lists of
engines::

    searx.result_types.EngineResults

A simple example for the usage in engine development::

    from searx.result_types import EngineResults
    ...
    def response(resp) -> EngineResults:
        res = EngineResults()
        ...
        res.add( res.types.Answer(answer="lorem ipsum ..", url="https://example.org") )
        ...
        return res

[1] https://github.com/searxng/searxng/pull/4183#pullrequestreview-257400034
[2] https://github.com/searxng/searxng/pull/4183#issuecomment-2614301580
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
Markus Heiser 3 months ago
parent
commit
36a1ef1239

+ 8 - 0
docs/dev/engines/index.rst

@@ -19,6 +19,14 @@ Engine Implementations
    engine_overview
 
 
+ResultList and engines
+======================
+
+.. autoclass:: searx.result_types.ResultList
+
+.. autoclass:: searx.result_types.EngineResults
+
+
 Engine Types
 ============
 

+ 11 - 11
searx/engines/brave.py

@@ -139,7 +139,7 @@ from searx.utils import (
     get_embeded_stream_url,
 )
 from searx.enginelib.traits import EngineTraits
-from searx.result_types import Answer
+from searx.result_types import EngineResults
 
 if TYPE_CHECKING:
     import logging
@@ -249,7 +249,7 @@ def _extract_published_date(published_date_raw):
         return None
 
 
-def response(resp):
+def response(resp) -> EngineResults:
 
     if brave_category in ('search', 'goggles'):
         return _parse_search(resp)
@@ -270,9 +270,9 @@ def response(resp):
     raise ValueError(f"Unsupported brave category: {brave_category}")
 
 
-def _parse_search(resp):
+def _parse_search(resp) -> EngineResults:
+    result_list = EngineResults()
 
-    result_list = []
     dom = html.fromstring(resp.text)
 
     # I doubt that Brave is still providing the "answer" class / I haven't seen
@@ -282,7 +282,7 @@ def _parse_search(resp):
         url = eval_xpath_getindex(dom, '//div[@id="featured_snippet"]/a[@class="result-header"]/@href', 0, default=None)
         answer = extract_text(answer_tag)
         if answer is not None:
-            Answer(results=result_list, answer=answer, url=url)
+            result_list.add(result_list.types.Answer(answer=answer, url=url))
 
     # xpath_results = '//div[contains(@class, "snippet fdb") and @data-type="web"]'
     xpath_results = '//div[contains(@class, "snippet ")]'
@@ -339,8 +339,8 @@ def _parse_search(resp):
     return result_list
 
 
-def _parse_news(json_resp):
-    result_list = []
+def _parse_news(json_resp) -> EngineResults:
+    result_list = EngineResults()
 
     for result in json_resp["results"]:
         item = {
@@ -356,8 +356,8 @@ def _parse_news(json_resp):
     return result_list
 
 
-def _parse_images(json_resp):
-    result_list = []
+def _parse_images(json_resp) -> EngineResults:
+    result_list = EngineResults()
 
     for result in json_resp["results"]:
         item = {
@@ -375,8 +375,8 @@ def _parse_images(json_resp):
     return result_list
 
 
-def _parse_videos(json_resp):
-    result_list = []
+def _parse_videos(json_resp) -> EngineResults:
+    result_list = EngineResults()
 
     for result in json_resp["results"]:
 

+ 9 - 10
searx/engines/deepl.py

@@ -1,7 +1,7 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 """Deepl translation engine"""
 
-from searx.result_types import Translations
+from searx.result_types import EngineResults
 
 about = {
     "website": 'https://deepl.com',
@@ -39,15 +39,14 @@ def request(_query, params):
     return params
 
 
-def response(resp):
-    results = []
+def response(resp) -> EngineResults:
 
-    result = resp.json()
+    res = EngineResults()
+    data = resp.json()
+    if not data.get('translations'):
+        return res
 
-    if not result.get('translations'):
-        return results
+    translations = [res.types.Translations.Item(text=t['text']) for t in data['translations']]
+    res.add(res.types.Translations(translations=translations))
 
-    translations = [Translations.Item(text=t['text']) for t in result['translations']]
-    Translations(results=results, translations=translations)
-
-    return results
+    return res

+ 5 - 4
searx/engines/demo_offline.py

@@ -13,6 +13,7 @@ close to the implementation, its just a simple example.  To get in use of this
 """
 
 import json
+from searx.result_types import EngineResults
 
 engine_type = 'offline'
 categories = ['general']
@@ -48,14 +49,14 @@ def init(engine_settings=None):
     )
 
 
-def search(query, request_params):
+def search(query, request_params) -> EngineResults:
     """Query (offline) engine and return results.  Assemble the list of results from
     your local engine.  In this demo engine we ignore the 'query' term, usual
     you would pass the 'query' term to your local engine to filter out the
     results.
 
     """
-    ret_val = []
+    res = EngineResults()
 
     result_list = json.loads(_my_offline_engine)
 
@@ -67,6 +68,6 @@ def search(query, request_params):
             # choose a result template or comment out to use the *default*
             'template': 'key-value.html',
         }
-        ret_val.append(entry)
+        res.append(entry)
 
-    return ret_val
+    return res

+ 12 - 4
searx/engines/demo_online.py

@@ -17,6 +17,7 @@ list in ``settings.yml``:
 
 from json import loads
 from urllib.parse import urlencode
+from searx.result_types import EngineResults
 
 engine_type = 'online'
 send_accept_language_header = True
@@ -70,21 +71,28 @@ def request(query, params):
     return params
 
 
-def response(resp):
+def response(resp) -> EngineResults:
     """Parse out the result items from the response.  In this example we parse the
     response from `api.artic.edu <https://artic.edu>`__ and filter out all
     images.
 
     """
-    results = []
+    res = EngineResults()
     json_data = loads(resp.text)
 
+    res.add(
+        res.types.Answer(
+            answer="this is a dummy answer ..",
+            url="https://example.org",
+        )
+    )
+
     for result in json_data['data']:
 
         if not result['image_id']:
             continue
 
-        results.append(
+        res.append(
             {
                 'url': 'https://artic.edu/artworks/%(id)s' % result,
                 'title': result['title'] + " (%(date_display)s) // %(artist_display)s" % result,
@@ -95,4 +103,4 @@ def response(resp):
             }
         )
 
-    return results
+    return res

+ 6 - 6
searx/engines/dictzone.py

@@ -7,7 +7,7 @@ import urllib.parse
 from lxml import html
 
 from searx.utils import eval_xpath, extract_text
-from searx.result_types import Translations
+from searx.result_types import EngineResults
 from searx.network import get as http_get  # https://github.com/searxng/searxng/issues/762
 
 # about
@@ -43,9 +43,9 @@ def _clean_up_node(node):
             n.getparent().remove(n)
 
 
-def response(resp):
+def response(resp) -> EngineResults:
+    results = EngineResults()
 
-    results = []
     item_list = []
 
     if not resp.ok:
@@ -85,7 +85,7 @@ def response(resp):
 
             synonyms.append(p_text)
 
-        item = Translations.Item(text=text, synonyms=synonyms)
+        item = results.types.Translations.Item(text=text, synonyms=synonyms)
         item_list.append(item)
 
     # the "autotranslate" of dictzone is loaded by the JS from URL:
@@ -98,7 +98,7 @@ def response(resp):
     # works only sometimes?
     autotranslate = http_get(f"{base_url}/trans/{query}/{from_lang}_{to_lang}", timeout=1.0)
     if autotranslate.ok and autotranslate.text:
-        item_list.insert(0, Translations.Item(text=autotranslate.text))
+        item_list.insert(0, results.types.Translations.Item(text=autotranslate.text))
 
-    Translations(results=results, translations=item_list, url=resp.search_params["url"])
+    results.add(results.types.Translations(translations=item_list, url=resp.search_params["url"]))
     return results

+ 13 - 6
searx/engines/duckduckgo.py

@@ -27,7 +27,7 @@ from searx.network import get  # see https://github.com/searxng/searxng/issues/7
 from searx import redisdb
 from searx.enginelib.traits import EngineTraits
 from searx.exceptions import SearxEngineCaptchaException
-from searx.result_types import Answer
+from searx.result_types import EngineResults
 
 if TYPE_CHECKING:
     import logging
@@ -355,12 +355,12 @@ def is_ddg_captcha(dom):
     return bool(eval_xpath(dom, "//form[@id='challenge-form']"))
 
 
-def response(resp):
+def response(resp) -> EngineResults:
+    results = EngineResults()
 
     if resp.status_code == 303:
-        return []
+        return results
 
-    results = []
     doc = lxml.html.fromstring(resp.text)
 
     if is_ddg_captcha(doc):
@@ -398,8 +398,15 @@ def response(resp):
         and "URL Decoded:" not in zero_click
     ):
         current_query = resp.search_params["data"].get("q")
-
-        Answer(results=results, answer=zero_click, url="https://duckduckgo.com/?" + urlencode({"q": current_query}))
+        results.add(
+            results.types.Answer(
+                answer=zero_click,
+                url="https://duckduckgo.com/?"
+                + urlencode(
+                    {"q": current_query},
+                ),
+            )
+        )
 
     return results
 

+ 9 - 4
searx/engines/duckduckgo_definitions.py

@@ -21,7 +21,7 @@ from lxml import html
 from searx.data import WIKIDATA_UNITS
 from searx.utils import extract_text, html_to_text, get_string_replaces_function
 from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
-from searx.result_types import Answer
+from searx.result_types import EngineResults
 
 if TYPE_CHECKING:
     import logging
@@ -76,9 +76,9 @@ def request(query, params):
     return params
 
 
-def response(resp):
+def response(resp) -> EngineResults:
     # pylint: disable=too-many-locals, too-many-branches, too-many-statements
-    results = []
+    results = EngineResults()
 
     search_res = resp.json()
 
@@ -103,7 +103,12 @@ def response(resp):
         answer_type = search_res.get('AnswerType')
         logger.debug('AnswerType="%s" Answer="%s"', answer_type, answer)
         if isinstance(answer, str) and answer_type not in ['calc', 'ip']:
-            Answer(results=results, answer=html_to_text(answer), url=search_res.get('AbstractURL', ''))
+            results.add(
+                results.types.Answer(
+                    answer=html_to_text(answer),
+                    url=search_res.get('AbstractURL', ''),
+                )
+            )
 
     # add infobox
     if 'Definition' in search_res:

+ 9 - 4
searx/engines/google.py

@@ -25,7 +25,7 @@ from searx.locales import language_tag, region_tag, get_official_locales
 from searx.network import get  # see https://github.com/searxng/searxng/issues/762
 from searx.exceptions import SearxEngineCaptchaException
 from searx.enginelib.traits import EngineTraits
-from searx.result_types import Answer
+from searx.result_types import EngineResults
 
 if TYPE_CHECKING:
     import logging
@@ -316,12 +316,12 @@ def _parse_data_images(dom):
     return data_image_map
 
 
-def response(resp):
+def response(resp) -> EngineResults:
     """Get response from google's search request"""
     # pylint: disable=too-many-branches, too-many-statements
     detect_google_sorry(resp)
 
-    results = []
+    results = EngineResults()
 
     # convert the text to dom
     dom = html.fromstring(resp.text)
@@ -332,7 +332,12 @@ def response(resp):
     for item in answer_list:
         for bubble in eval_xpath(item, './/div[@class="nnFGuf"]'):
             bubble.drop_tree()
-        Answer(results=results, answer=extract_text(item), url=(eval_xpath(item, '../..//a/@href') + [None])[0])
+        results.add(
+            results.types.Answer(
+                answer=extract_text(item),
+                url=(eval_xpath(item, '../..//a/@href') + [None])[0],
+            )
+        )
 
     # parse results
 

+ 5 - 5
searx/engines/libretranslate.py

@@ -3,7 +3,7 @@
 
 import random
 import json
-from searx.result_types import Translations
+from searx.result_types import EngineResults
 
 about = {
     "website": 'https://libretranslate.com',
@@ -45,15 +45,15 @@ def request(_query, params):
     return params
 
 
-def response(resp):
-    results = []
+def response(resp) -> EngineResults:
+    results = EngineResults()
 
     json_resp = resp.json()
     text = json_resp.get('translatedText')
     if not text:
         return results
 
-    item = Translations.Item(text=text, examples=json_resp.get('alternatives', []))
-    Translations(results=results, translations=[item])
+    item = results.types.Translations.Item(text=text, examples=json_resp.get('alternatives', []))
+    results.add(results.types.Translations(translations=[item]))
 
     return results

+ 11 - 10
searx/engines/lingva.py

@@ -1,7 +1,7 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 """Lingva (alternative Google Translate frontend)"""
 
-from searx.result_types import Translations
+from searx.result_types import EngineResults
 
 about = {
     "website": 'https://lingva.ml',
@@ -23,8 +23,8 @@ def request(_query, params):
     return params
 
 
-def response(resp):
-    results = []
+def response(resp) -> EngineResults:
+    results = EngineResults()
 
     result = resp.json()
     info = result["info"]
@@ -44,7 +44,7 @@ def response(resp):
     for definition in info['definitions']:
         for translation in definition['list']:
             data.append(
-                Translations.Item(
+                results.types.Translations.Item(
                     text=result['translation'],
                     definitions=[translation['definition']] if translation['definition'] else [],
                     examples=[translation['example']] if translation['example'] else [],
@@ -55,19 +55,20 @@ def response(resp):
     for translation in info["extraTranslations"]:
         for word in translation["list"]:
             data.append(
-                Translations.Item(
+                results.types.Translations.Item(
                     text=word['word'],
                     definitions=word['meanings'],
                 )
             )
 
     if not data and result['translation']:
-        data.append(Translations.Item(text=result['translation']))
+        data.append(results.types.Translations.Item(text=result['translation']))
 
     params = resp.search_params
-    Translations(
-        results=results,
-        translations=data,
-        url=f"{url}/{params['from_lang'][1]}/{params['to_lang'][1]}/{params['query']}",
+    results.add(
+        results.types.Translations(
+            translations=data,
+            url=f"{url}/{params['from_lang'][1]}/{params['to_lang'][1]}/{params['query']}",
+        )
     )
     return results

+ 6 - 6
searx/engines/mozhi.py

@@ -5,7 +5,7 @@ import random
 import re
 import urllib.parse
 
-from searx.result_types import Translations
+from searx.result_types import EngineResults
 
 about = {
     "website": 'https://codeberg.org/aryak/mozhi',
@@ -33,11 +33,11 @@ def request(_query, params):
     return params
 
 
-def response(resp):
-    results = []
+def response(resp) -> EngineResults:
+    res = EngineResults()
     translation = resp.json()
 
-    item = Translations.Item(text=translation['translated-text'])
+    item = res.types.Translations.Item(text=translation['translated-text'])
 
     if translation['target_transliteration'] and not re.match(
         re_transliteration_unsupported, translation['target_transliteration']
@@ -57,5 +57,5 @@ def response(resp):
     url = urllib.parse.urlparse(resp.search_params["url"])
     # remove the api path
     url = url._replace(path="", fragment="").geturl()
-    Translations(results=results, translations=[item], url=url)
-    return results
+    res.add(res.types.Translations(translations=[item], url=url))
+    return res

+ 9 - 7
searx/engines/openstreetmap.py

@@ -13,7 +13,7 @@ from flask_babel import gettext
 from searx.data import OSM_KEYS_TAGS, CURRENCIES
 from searx.external_urls import get_external_url
 from searx.engines.wikidata import send_wikidata_query, sparql_string_escape, get_thumbnail
-from searx.result_types import Answer
+from searx.result_types import EngineResults
 
 # about
 about = {
@@ -141,8 +141,8 @@ def request(query, params):
     return params
 
 
-def response(resp):
-    results = []
+def response(resp) -> EngineResults:
+    results = EngineResults()
 
     nominatim_json = resp.json()
     user_language = resp.search_params['language']
@@ -152,10 +152,12 @@ def response(resp):
         l = re.findall(r"\s*(.*)\s+to\s+(.+)", resp.search_params["query"])
     if l:
         point1, point2 = [urllib.parse.quote_plus(p) for p in l[0]]
-        Answer(
-            results=results,
-            answer=gettext('Show route in map ..'),
-            url=f"{route_url}/?point={point1}&point={point2}",
+
+        results.add(
+            results.types.Answer(
+                answer=gettext('Show route in map ..'),
+                url=f"{route_url}/?point={point1}&point={point2}",
+            )
         )
 
     # simplify the code below: make sure extratags is a dictionary

+ 5 - 4
searx/engines/tineye.py

@@ -19,6 +19,8 @@ from urllib.parse import urlencode
 from datetime import datetime
 from flask_babel import gettext
 
+from searx.result_types import EngineResults
+
 if TYPE_CHECKING:
     import logging
 
@@ -154,9 +156,9 @@ def parse_tineye_match(match_json):
     }
 
 
-def response(resp):
+def response(resp) -> EngineResults:
     """Parse HTTP response from TinEye."""
-    results = []
+    results = EngineResults()
 
     # handle the 422 client side errors, and the possible 400 status code error
     if resp.status_code in (400, 422):
@@ -183,8 +185,7 @@ def response(resp):
                 message = ','.join(description)
 
         # see https://github.com/searxng/searxng/pull/1456#issuecomment-1193105023
-        # from searx.result_types import Answer
-        # Answer(results=results, answer=message)
+        # results.add(results.types.Answer(answer=message))
         logger.info(message)
         return results
 

+ 5 - 5
searx/engines/translated.py

@@ -5,7 +5,7 @@
 
 import urllib.parse
 
-from searx.result_types import Translations
+from searx.result_types import EngineResults
 
 # about
 about = {
@@ -37,8 +37,8 @@ def request(query, params):  # pylint: disable=unused-argument
     return params
 
 
-def response(resp):
-    results = []
+def response(resp) -> EngineResults:
+    results = EngineResults()
     data = resp.json()
 
     args = {
@@ -53,7 +53,7 @@ def response(resp):
 
     examples = [f"{m['segment']} : {m['translation']}" for m in data['matches'] if m['translation'] != text]
 
-    item = Translations.Item(text=text, examples=examples)
-    Translations(results=results, translations=[item], url=link)
+    item = results.types.Translations.Item(text=text, examples=examples)
+    results.add(results.types.Translations(translations=[item], url=link))
 
     return results

+ 6 - 5
searx/engines/xpath.py

@@ -74,6 +74,7 @@ from urllib.parse import urlencode
 from lxml import html
 from searx.utils import extract_text, extract_url, eval_xpath, eval_xpath_list
 from searx.network import raise_for_httperror
+from searx.result_types import EngineResults
 
 search_url = None
 """
@@ -261,15 +262,15 @@ def request(query, params):
     return params
 
 
-def response(resp):  # pylint: disable=too-many-branches
-    '''Scrap *results* from the response (see :ref:`result types`).'''
+def response(resp) -> EngineResults:  # pylint: disable=too-many-branches
+    """Scrap *results* from the response (see :ref:`result types`)."""
+    results = EngineResults()
+
     if no_result_for_http_status and resp.status_code in no_result_for_http_status:
-        return []
+        return results
 
     raise_for_httperror(resp)
 
-    results = []
-
     if not resp.text:
         return results
 

+ 4 - 4
searx/plugins/calculator.py

@@ -14,7 +14,7 @@ import babel
 import babel.numbers
 from flask_babel import gettext
 
-from searx.result_types import Answer
+from searx.result_types import EngineResults
 
 name = "Basic Calculator"
 description = gettext("Calculate mathematical expressions via the search bar")
@@ -94,8 +94,8 @@ def timeout_func(timeout, func, *args, **kwargs):
     return ret_val
 
 
-def post_search(request, search) -> list[Answer]:
-    results = []
+def post_search(request, search) -> EngineResults:
+    results = EngineResults()
 
     # only show the result of the expression on the first page
     if search.search_query.pageno > 1:
@@ -135,6 +135,6 @@ def post_search(request, search) -> list[Answer]:
         return results
 
     res = babel.numbers.format_decimal(res, locale=ui_locale)
-    Answer(results=results, answer=f"{search.search_query.query} = {res}")
+    results.add(results.types.Answer(answer=f"{search.search_query.query} = {res}"))
 
     return results

+ 4 - 4
searx/plugins/hash_plugin.py

@@ -9,7 +9,7 @@ import hashlib
 from flask_babel import gettext
 
 from searx.plugins import Plugin, PluginInfo
-from searx.result_types import Answer
+from searx.result_types import EngineResults
 
 if typing.TYPE_CHECKING:
     from searx.search import SearchWithPlugins
@@ -37,9 +37,9 @@ class SXNGPlugin(Plugin):
             preference_section="query",
         )
 
-    def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> list[Answer]:
+    def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> EngineResults:
         """Returns a result list only for the first page."""
-        results = []
+        results = EngineResults()
 
         if search.search_query.pageno > 1:
             return results
@@ -61,6 +61,6 @@ class SXNGPlugin(Plugin):
         f.update(string.encode("utf-8").strip())
         answer = function + " " + gettext("hash digest") + ": " + f.hexdigest()
 
-        Answer(results=results, answer=answer)
+        results.add(results.types.Answer(answer=answer))
 
         return results

+ 5 - 5
searx/plugins/self_info.py

@@ -7,7 +7,7 @@ import re
 from flask_babel import gettext
 
 from searx.botdetection._helpers import get_real_ip
-from searx.result_types import Answer
+from searx.result_types import EngineResults
 
 from . import Plugin, PluginInfo
 
@@ -41,17 +41,17 @@ class SXNGPlugin(Plugin):
             preference_section="query",
         )
 
-    def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> list[Answer]:
+    def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> EngineResults:
         """Returns a result list only for the first page."""
-        results = []
+        results = EngineResults()
 
         if search.search_query.pageno > 1:
             return results
 
         if self.ip_regex.search(search.search_query.query):
-            Answer(results=results, answer=gettext("Your IP is: ") + get_real_ip(request))
+            results.add(results.types.Answer(answer=gettext("Your IP is: ") + get_real_ip(request)))
 
         if self.ua_regex.match(search.search_query.query):
-            Answer(results=results, answer=gettext("Your user-agent is: ") + str(request.user_agent))
+            results.add(results.types.Answer(answer=gettext("Your user-agent is: ") + str(request.user_agent)))
 
         return results

+ 41 - 1
searx/result_types/__init__.py

@@ -9,10 +9,50 @@
    gradually.  For more, please read :ref:`result types`.
 
 """
+# pylint: disable=too-few-public-methods
 
 from __future__ import annotations
 
-__all__ = ["Result", "AnswerSet", "Answer", "Translations"]
+__all__ = ["Result", "EngineResults", "AnswerSet", "Answer", "Translations"]
+
+import abc
+
+from searx import enginelib
 
 from ._base import Result, LegacyResult
 from .answer import AnswerSet, Answer, Translations
+
+
+class ResultList(list, abc.ABC):
+    """Base class of all result lists (abstract)."""
+
+    class types:  # pylint: disable=invalid-name
+        """The collection of result types (which have already been implemented)."""
+
+        Answer = Answer
+        Translations = Translations
+
+    def __init__(self):
+        # pylint: disable=useless-parent-delegation
+        super().__init__()
+
+    def add(self, result: Result):
+        """Add a :py:`Result` item to the result list."""
+        self.append(result)
+
+
+class EngineResults(ResultList):
+    """Result list that should be used by engine developers.  For convenience,
+    engine developers don't need to import types / see :py:obj:`ResultList.types`.
+
+    .. code:: python
+
+       from searx.result_types import EngineResults
+       ...
+       def response(resp) -> EngineResults:
+           res = EngineResults()
+           ...
+           res.add( res.types.Answer(answer="lorem ipsum ..", url="https://example.org") )
+           ...
+           return res
+    """

+ 1 - 24
searx/result_types/_base.py

@@ -53,27 +53,6 @@ class Result(msgspec.Struct, kw_only=True):
     The field is optional and is initialized from the context if necessary.
     """
 
-    results: list = []  # https://jcristharif.com/msgspec/structs.html#default-values
-    """Result list of an :origin:`engine <searx/engines>` response or a
-    :origin:`answerer <searx/answerers>` to which the answer should be added.
-
-    This field is only present for the sake of simplicity.  Typically, the
-    response function of an engine has a result list that is returned at the
-    end. By specifying the result list in the constructor of the result, this
-    result is then immediately added to the list (this parameter does not have
-    another function).
-
-    .. code:: python
-
-       def response(resp):
-           results = []
-           ...
-           Answer(results=results, answer=answer, url=url)
-           ...
-           return results
-
-    """
-
     def normalize_result_fields(self):
         """Normalize a result ..
 
@@ -92,9 +71,7 @@ class Result(msgspec.Struct, kw_only=True):
                 self.url = self.parsed_url.geturl()
 
     def __post_init__(self):
-        """Add *this* result to the result list."""
-
-        self.results.append(self)
+        pass
 
     def __hash__(self) -> int:
         """Generates a hash value that uniquely identifies the content of *this*

+ 4 - 4
tests/unit/engines/test_xpath.py

@@ -70,7 +70,7 @@ class TestXpathEngine(SearxTestCase):
 
         response = mock.Mock(text=self.html, status_code=200)
         results = xpath.response(response)
-        self.assertEqual(type(results), list)
+        self.assertIsInstance(results, list)
         self.assertEqual(len(results), 2)
         self.assertEqual(results[0]['title'], 'Result 1')
         self.assertEqual(results[0]['url'], 'https://result1.com/')
@@ -82,7 +82,7 @@ class TestXpathEngine(SearxTestCase):
         # with cached urls, without results_xpath
         xpath.cached_xpath = '//div[@class="search_result"]//a[@class="cached"]/@href'
         results = xpath.response(response)
-        self.assertEqual(type(results), list)
+        self.assertIsInstance(results, list)
         self.assertEqual(len(results), 2)
         self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com')
         self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com')
@@ -112,7 +112,7 @@ class TestXpathEngine(SearxTestCase):
 
         response = mock.Mock(text=self.html, status_code=200)
         results = xpath.response(response)
-        self.assertEqual(type(results), list)
+        self.assertIsInstance(results, list)
         self.assertEqual(len(results), 2)
         self.assertEqual(results[0]['title'], 'Result 1')
         self.assertEqual(results[0]['url'], 'https://result1.com/')
@@ -124,7 +124,7 @@ class TestXpathEngine(SearxTestCase):
         # with cached urls, with results_xpath
         xpath.cached_xpath = './/a[@class="cached"]/@href'
         results = xpath.response(response)
-        self.assertEqual(type(results), list)
+        self.assertIsInstance(results, list)
         self.assertEqual(len(results), 2)
         self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com')
         self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com')

+ 2 - 2
tests/unit/test_plugin_calculator.py

@@ -38,7 +38,7 @@ class PluginCalculator(SearxTestCase):
         with self.app.test_request_context():
             sxng_request.preferences = self.pref
             query = "1+1"
-            answer = Answer(results=[], answer=f"{query} = {eval(query)}")  # pylint: disable=eval-used
+            answer = Answer(answer=f"{query} = {eval(query)}")  # pylint: disable=eval-used
 
             search = do_post_search(query, self.storage, pageno=1)
             self.assertIn(answer, search.result_container.answers)
@@ -81,7 +81,7 @@ class PluginCalculator(SearxTestCase):
         with self.app.test_request_context():
             self.pref.parse_dict({"locale": lang})
             sxng_request.preferences = self.pref
-            answer = Answer(results=[], answer=f"{query} = {res}")
+            answer = Answer(answer=f"{query} = {res}")
 
             search = do_post_search(query, self.storage)
             self.assertIn(answer, search.result_container.answers)

+ 2 - 2
tests/unit/test_plugin_hash.py

@@ -51,7 +51,7 @@ class PluginHashTest(SearxTestCase):
     def test_hash_digest_new(self, query: str, res: str):
         with self.app.test_request_context():
             sxng_request.preferences = self.pref
-            answer = Answer(results=[], answer=res)
+            answer = Answer(answer=res)
 
             search = do_post_search(query, self.storage)
             self.assertIn(answer, search.result_container.answers)
@@ -60,7 +60,7 @@ class PluginHashTest(SearxTestCase):
         with self.app.test_request_context():
             sxng_request.preferences = self.pref
             query, res = query_res[0]
-            answer = Answer(results=[], answer=res)
+            answer = Answer(answer=res)
 
             search = do_post_search(query, self.storage, pageno=1)
             self.assertIn(answer, search.result_container.answers)

+ 2 - 2
tests/unit/test_plugin_self_info.py

@@ -39,7 +39,7 @@ class PluginIPSelfInfo(SearxTestCase):
             sxng_request.preferences = self.pref
             sxng_request.remote_addr = "127.0.0.1"
             sxng_request.headers = {"X-Forwarded-For": "1.2.3.4, 127.0.0.1", "X-Real-IP": "127.0.0.1"}  # type: ignore
-            answer = Answer(results=[], answer=gettext("Your IP is: ") + "127.0.0.1")
+            answer = Answer(answer=gettext("Your IP is: ") + "127.0.0.1")
 
             search = do_post_search("ip", self.storage, pageno=1)
             self.assertIn(answer, search.result_container.answers)
@@ -60,7 +60,7 @@ class PluginIPSelfInfo(SearxTestCase):
         with self.app.test_request_context():
             sxng_request.preferences = self.pref
             sxng_request.user_agent = "Dummy agent"  # type: ignore
-            answer = Answer(results=[], answer=gettext("Your user-agent is: ") + "Dummy agent")
+            answer = Answer(answer=gettext("Your user-agent is: ") + "Dummy agent")
 
             search = do_post_search(query, self.storage, pageno=1)
             self.assertIn(answer, search.result_container.answers)

+ 1 - 1
tests/unit/test_plugins.py

@@ -101,6 +101,6 @@ class PluginStorage(SearxTestCase):
             ret = self.storage.on_result(
                 sxng_request,
                 get_search_mock("lorem ipsum", user_plugins=["plg001", "plg002"]),
-                Result(results=[]),
+                Result(),
             )
             self.assertFalse(ret)