Browse Source

[mod] pylint all files with one profile / drop PYLINT_SEARXNG_DISABLE_OPTION

In the past, some files were tested with the standard profile, others with a
profile in which most of the messages were switched off ... some files were not
checked at all.

- ``PYLINT_SEARXNG_DISABLE_OPTION`` has been abolished
- the distinction ``# lint: pylint`` is no longer necessary
- the pylint tasks have been reduced from three to two

  1. ./searx/engines -> lint engines with additional builtins
  2. ./searx ./searxng_extra ./tests -> lint all other python files

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
Markus Heiser 1 year ago
parent
commit
542f7d0d7b
100 changed files with 154 additions and 283 deletions
  1. 1 1
      .pylintrc
  2. 0 1
      docs/conf.py
  3. 0 25
      examples/basic_engine.py
  4. 0 21
      manage
  5. 1 2
      searx/__init__.py
  6. 10 5
      searx/answerers/__init__.py
  7. 2 0
      searx/answerers/random/answerer.py
  8. 1 0
      searx/answerers/statistics/answerer.py
  9. 0 1
      searx/autocomplete.py
  10. 0 1
      searx/babel_extract.py
  11. 0 1
      searx/botdetection/__init__.py
  12. 0 1
      searx/botdetection/_helpers.py
  13. 0 1
      searx/botdetection/config.py
  14. 0 1
      searx/botdetection/http_accept.py
  15. 0 1
      searx/botdetection/http_accept_encoding.py
  16. 0 1
      searx/botdetection/http_accept_language.py
  17. 0 1
      searx/botdetection/http_connection.py
  18. 0 1
      searx/botdetection/http_user_agent.py
  19. 0 1
      searx/botdetection/ip_limit.py
  20. 0 1
      searx/botdetection/ip_lists.py
  21. 0 1
      searx/botdetection/link_token.py
  22. 0 1
      searx/data/__init__.py
  23. 0 1
      searx/enginelib/__init__.py
  24. 1 2
      searx/enginelib/traits.py
  25. 0 13
      searx/engines/bandcamp.py
  26. 2 14
      searx/engines/duden.py
  27. 1 2
      searx/engines/presearch.py
  28. 0 2
      searx/exceptions.py
  29. 2 1
      searx/external_bang.py
  30. 4 2
      searx/external_urls.py
  31. 0 1
      searx/flaskfix.py
  32. 0 2
      searx/infopage/__init__.py
  33. 0 1
      searx/limiter.py
  34. 0 2
      searx/locales.py
  35. 0 1
      searx/metrics/__init__.py
  36. 10 5
      searx/metrics/error_recorder.py
  37. 14 15
      searx/metrics/models.py
  38. 0 1
      searx/network/__init__.py
  39. 0 1
      searx/network/client.py
  40. 0 1
      searx/network/network.py
  41. 0 1
      searx/network/raise_for_httperror.py
  42. 0 1
      searx/plugins/__init__.py
  43. 4 5
      searx/plugins/ahmia_filter.py
  44. 6 20
      searx/plugins/hash_plugin.py
  45. 5 2
      searx/plugins/hostname_replace.py
  46. 6 3
      searx/plugins/oa_doi_rewrite.py
  47. 0 1
      searx/plugins/self_info.py
  48. 0 1
      searx/plugins/tor_check.py
  49. 5 18
      searx/plugins/tracker_url_remover.py
  50. 0 1
      searx/preferences.py
  51. 2 1
      searx/query.py
  52. 0 1
      searx/redisdb.py
  53. 0 1
      searx/redislib.py
  54. 16 14
      searx/results.py
  55. 0 1
      searx/search/__init__.py
  56. 1 0
      searx/search/checker/__init__.py
  57. 0 1
      searx/search/checker/__main__.py
  58. 1 3
      searx/search/checker/background.py
  59. 6 6
      searx/search/checker/impl.py
  60. 0 1
      searx/search/checker/scheduler.py
  61. 2 1
      searx/search/models.py
  62. 0 1
      searx/search/processors/__init__.py
  63. 0 2
      searx/search/processors/abstract.py
  64. 0 2
      searx/search/processors/offline.py
  65. 0 2
      searx/search/processors/online.py
  66. 0 1
      searx/search/processors/online_currency.py
  67. 0 1
      searx/search/processors/online_dictionary.py
  68. 0 1
      searx/search/processors/online_url_search.py
  69. 0 1
      searx/settings_defaults.py
  70. 1 0
      searx/settings_loader.py
  71. 1 1
      searx/sxng_locales.py
  72. 0 1
      searx/unixthreadname.py
  73. 2 4
      searx/utils.py
  74. 1 1
      searx/version.py
  75. 10 4
      searx/webadapter.py
  76. 0 2
      searx/webapp.py
  77. 9 10
      searx/webutils.py
  78. 2 0
      searxng_extra/__init__.py
  79. 5 7
      searxng_extra/docs_prebuild
  80. 0 3
      searxng_extra/standalone_searx.py
  81. 2 0
      searxng_extra/update/__init__.py
  82. 0 1
      searxng_extra/update/update_ahmia_blacklist.py
  83. 0 2
      searxng_extra/update/update_currencies.py
  84. 0 2
      searxng_extra/update/update_engine_descriptions.py
  85. 1 2
      searxng_extra/update/update_engine_traits.py
  86. 0 1
      searxng_extra/update/update_external_bangs.py
  87. 0 1
      searxng_extra/update/update_firefox_version.py
  88. 0 1
      searxng_extra/update/update_locales.py
  89. 0 1
      searxng_extra/update/update_osm_keys_tags.py
  90. 0 1
      searxng_extra/update/update_pygments.py
  91. 0 3
      searxng_extra/update/update_wikidata_units.py
  92. 1 1
      setup.py
  93. 3 1
      tests/__init__.py
  94. 2 0
      tests/robot/__init__.py
  95. 0 1
      tests/robot/__main__.py
  96. 0 1
      tests/robot/test_webapp.py
  97. 3 0
      tests/unit/__init__.py
  98. 3 0
      tests/unit/engines/test_command.py
  99. 3 1
      tests/unit/engines/test_xpath.py
  100. 2 0
      tests/unit/network/__init__.py

+ 1 - 1
.pylintrc

@@ -27,7 +27,7 @@ ignore-patterns=
 #init-hook=
 #init-hook=
 
 
 # Use multiple processes to speed up Pylint.
 # Use multiple processes to speed up Pylint.
-jobs=1
+jobs=0
 
 
 # List of plugins (as comma separated values of python modules names) to load,
 # List of plugins (as comma separated values of python modules names) to load,
 # usually to register additional checkers.
 # usually to register additional checkers.

+ 0 - 1
docs/conf.py

@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
 
 
 import  sys, os
 import  sys, os

+ 0 - 25
examples/basic_engine.py

@@ -1,25 +0,0 @@
-
-categories = ['general']  # optional
-
-
-def request(query, params):
-    '''pre-request callback
-    params<dict>:
-      method  : POST/GET
-      headers : {}
-      data    : {} # if method == POST
-      url     : ''
-      category: 'search category'
-      pageno  : 1 # number of the requested page
-    '''
-
-    params['url'] = 'https://host/%s' % query
-
-    return params
-
-
-def response(resp):
-    '''post-response callback
-    resp: requests response object
-    '''
-    return [{'url': '', 'title': '', 'content': ''}]

+ 0 - 21
manage

@@ -52,23 +52,6 @@ if [ -S "${_dev_redis_sock}" ] && [ -z "${SEARXNG_REDIS_URL}" ]; then
     export SEARXNG_REDIS_URL="unix://${_dev_redis_sock}?db=0"
     export SEARXNG_REDIS_URL="unix://${_dev_redis_sock}?db=0"
 fi
 fi
 
 
-pylint.FILES() {
-
-    # List files tagged by comment:
-    #
-    #   # lint: pylint
-    #
-    # These py files are linted by test.pylint()
-
-    grep -l -r --include \*.py '^#[[:blank:]]*lint:[[:blank:]]*pylint' searx searxng_extra tests
-    find . -name searxng.msg
-}
-
-PYLINT_FILES=()
-while IFS= read -r line; do
-   PYLINT_FILES+=("$line")
-done <<< "$(pylint.FILES)"
-
 YAMLLINT_FILES=()
 YAMLLINT_FILES=()
 while IFS= read -r line; do
 while IFS= read -r line; do
    YAMLLINT_FILES+=("$line")
    YAMLLINT_FILES+=("$line")
@@ -78,10 +61,6 @@ RST_FILES=(
     'README.rst'
     'README.rst'
 )
 )
 
 
-PYLINT_SEARXNG_DISABLE_OPTION="\
-I,C,R,\
-W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401,\
-E1136"
 help() {
 help() {
     nvm.help
     nvm.help
     cat <<EOF
     cat <<EOF

+ 1 - 2
searx/__init__.py

@@ -1,6 +1,5 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
-# pylint: disable=missing-module-docstring
+# pylint: disable=missing-module-docstring, cyclic-import
 
 
 import sys
 import sys
 import os
 import os

+ 10 - 5
searx/answerers/__init__.py

@@ -1,25 +1,30 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
+
+import sys
 from os import listdir
 from os import listdir
 from os.path import realpath, dirname, join, isdir
 from os.path import realpath, dirname, join, isdir
-from searx.utils import load_module
 from collections import defaultdict
 from collections import defaultdict
 
 
+from searx.utils import load_module
 
 
 answerers_dir = dirname(realpath(__file__))
 answerers_dir = dirname(realpath(__file__))
 
 
 
 
 def load_answerers():
 def load_answerers():
-    answerers = []
+    answerers = []  # pylint: disable=redefined-outer-name
+
     for filename in listdir(answerers_dir):
     for filename in listdir(answerers_dir):
         if not isdir(join(answerers_dir, filename)) or filename.startswith('_'):
         if not isdir(join(answerers_dir, filename)) or filename.startswith('_'):
             continue
             continue
         module = load_module('answerer.py', join(answerers_dir, filename))
         module = load_module('answerer.py', join(answerers_dir, filename))
-        if not hasattr(module, 'keywords') or not isinstance(module.keywords, tuple) or not len(module.keywords):
-            exit(2)
+        if not hasattr(module, 'keywords') or not isinstance(module.keywords, tuple) or not module.keywords:
+            sys.exit(2)
         answerers.append(module)
         answerers.append(module)
     return answerers
     return answerers
 
 
 
 
-def get_answerers_by_keywords(answerers):
+def get_answerers_by_keywords(answerers):  # pylint:disable=redefined-outer-name
     by_keyword = defaultdict(list)
     by_keyword = defaultdict(list)
     for answerer in answerers:
     for answerer in answerers:
         for keyword in answerer.keywords:
         for keyword in answerer.keywords:

+ 2 - 0
searx/answerers/random/answerer.py

@@ -1,3 +1,5 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
 import hashlib
 import hashlib
 import random
 import random
 import string
 import string

+ 1 - 0
searx/answerers/statistics/answerer.py

@@ -1,3 +1,4 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
 from functools import reduce
 from functools import reduce
 from operator import mul
 from operator import mul
 
 

+ 0 - 1
searx/autocomplete.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """This module implements functions needed for the autocompleter.
 """This module implements functions needed for the autocompleter.
 
 
 """
 """

+ 0 - 1
searx/babel_extract.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """This module implements the :origin:`searxng_msg <babel.cfg>` extractor to
 """This module implements the :origin:`searxng_msg <babel.cfg>` extractor to
 extract messages from:
 extract messages from:
 
 

+ 0 - 1
searx/botdetection/__init__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """.. _botdetection src:
 """.. _botdetection src:
 
 
 Implementations used for bot detection.
 Implementations used for bot detection.

+ 0 - 1
searx/botdetection/_helpers.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring, invalid-name
 # pylint: disable=missing-module-docstring, invalid-name
 from __future__ import annotations
 from __future__ import annotations
 
 

+ 0 - 1
searx/botdetection/config.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Configuration class :py:class:`Config` with deep-update, schema validation
 """Configuration class :py:class:`Config` with deep-update, schema validation
 and deprecated names.
 and deprecated names.
 
 

+ 0 - 1
searx/botdetection/http_accept.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """
 """
 Method ``http_accept``
 Method ``http_accept``
 ----------------------
 ----------------------

+ 0 - 1
searx/botdetection/http_accept_encoding.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """
 """
 Method ``http_accept_encoding``
 Method ``http_accept_encoding``
 -------------------------------
 -------------------------------

+ 0 - 1
searx/botdetection/http_accept_language.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """
 """
 Method ``http_accept_language``
 Method ``http_accept_language``
 -------------------------------
 -------------------------------

+ 0 - 1
searx/botdetection/http_connection.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """
 """
 Method ``http_connection``
 Method ``http_connection``
 --------------------------
 --------------------------

+ 0 - 1
searx/botdetection/http_user_agent.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """
 """
 Method ``http_user_agent``
 Method ``http_user_agent``
 --------------------------
 --------------------------

+ 0 - 1
searx/botdetection/ip_limit.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """.. _botdetection.ip_limit:
 """.. _botdetection.ip_limit:
 
 
 Method ``ip_limit``
 Method ``ip_limit``

+ 0 - 1
searx/botdetection/ip_lists.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """.. _botdetection.ip_lists:
 """.. _botdetection.ip_lists:
 
 
 Method ``ip_lists``
 Method ``ip_lists``

+ 0 - 1
searx/botdetection/link_token.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """
 """
 Method ``link_token``
 Method ``link_token``
 ---------------------
 ---------------------

+ 0 - 1
searx/data/__init__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """This module holds the *data* created by::
 """This module holds the *data* created by::
 
 
   make data.all
   make data.all

+ 0 - 1
searx/enginelib/__init__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Implementations of the framework for the SearXNG engines.
 """Implementations of the framework for the SearXNG engines.
 
 
 .. hint::
 .. hint::

+ 1 - 2
searx/enginelib/traits.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Engine's traits are fetched from the origin engines and stored in a JSON file
 """Engine's traits are fetched from the origin engines and stored in a JSON file
 in the *data folder*.  Most often traits are languages and region codes and
 in the *data folder*.  Most often traits are languages and region codes and
 their mapping from SearXNG's representation to the representation in the origin
 their mapping from SearXNG's representation to the representation in the origin
@@ -167,7 +166,7 @@ class EngineTraits:
         #   - name: google italian
         #   - name: google italian
         #     engine: google
         #     engine: google
         #     language: it
         #     language: it
-        #     region: it-IT
+        #     region: it-IT                                      # type: ignore
 
 
         traits = self.copy()
         traits = self.copy()
 
 

+ 0 - 13
searx/engines/bandcamp.py

@@ -37,16 +37,6 @@ iframe_src = "https://bandcamp.com/EmbeddedPlayer/{type}={result_id}/size=large/
 
 
 
 
 def request(query, params):
 def request(query, params):
-    '''pre-request callback
-
-    params<dict>:
-      method  : POST/GET
-      headers : {}
-      data    : {} # if method == POST
-      url     : ''
-      category: 'search category'
-      pageno  : 1 # number of the requested page
-    '''
 
 
     search_path = search_string.format(query=urlencode({'q': query}), page=params['pageno'])
     search_path = search_string.format(query=urlencode({'q': query}), page=params['pageno'])
     params['url'] = base_url + search_path
     params['url'] = base_url + search_path
@@ -54,10 +44,7 @@ def request(query, params):
 
 
 
 
 def response(resp):
 def response(resp):
-    '''post-response callback
 
 
-    resp: requests response object
-    '''
     results = []
     results = []
     dom = html.fromstring(resp.text)
     dom = html.fromstring(resp.text)
 
 

+ 2 - 14
searx/engines/duden.py

@@ -1,6 +1,6 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-"""
- Duden
+"""Duden
+
 """
 """
 
 
 import re
 import re
@@ -29,15 +29,6 @@ search_url = base_url + 'suchen/dudenonline/{query}?search_api_fulltext=&page={o
 
 
 
 
 def request(query, params):
 def request(query, params):
-    '''pre-request callback
-    params<dict>:
-      method  : POST/GET
-      headers : {}
-      data    : {} # if method == POST
-      url     : ''
-      category: 'search category'
-      pageno  : 1 # number of the requested page
-    '''
 
 
     offset = params['pageno'] - 1
     offset = params['pageno'] - 1
     if offset == 0:
     if offset == 0:
@@ -53,9 +44,6 @@ def request(query, params):
 
 
 
 
 def response(resp):
 def response(resp):
-    '''post-response callback
-    resp: requests response object
-    '''
     results = []
     results = []
 
 
     if resp.status_code == 404:
     if resp.status_code == 404:

+ 1 - 2
searx/engines/presearch.py

@@ -120,8 +120,7 @@ def _get_request_id(query, params):
         l = locales.get_locale(params['searxng_locale'])
         l = locales.get_locale(params['searxng_locale'])
 
 
         # Presearch narrows down the search by region.  In SearXNG when the user
         # Presearch narrows down the search by region.  In SearXNG when the user
-        # does not set a region (e.g. 'en-CA' / canada) we cannot hand over a
-        # region.
+        # does not set a region (e.g. 'en-CA' / canada) we cannot hand over a region.
 
 
         # We could possibly use searx.locales.get_official_locales to determine
         # We could possibly use searx.locales.get_official_locales to determine
         # in which regions this language is an official one, but then we still
         # in which regions this language is an official one, but then we still

+ 0 - 2
searx/exceptions.py

@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Exception types raised by SearXNG modules.
 """Exception types raised by SearXNG modules.
 """
 """
 
 

+ 2 - 1
searx/external_bang.py

@@ -1,4 +1,5 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 
 
 from urllib.parse import quote_plus, urlparse
 from urllib.parse import quote_plus, urlparse
 from searx.data import EXTERNAL_BANGS
 from searx.data import EXTERNAL_BANGS
@@ -53,7 +54,7 @@ def resolve_bang_definition(bang_definition, query):
     return (url, rank)
     return (url, rank)
 
 
 
 
-def get_bang_definition_and_autocomplete(bang, external_bangs_db=None):
+def get_bang_definition_and_autocomplete(bang, external_bangs_db=None):  # pylint: disable=invalid-name
     if external_bangs_db is None:
     if external_bangs_db is None:
         external_bangs_db = EXTERNAL_BANGS
         external_bangs_db = EXTERNAL_BANGS
 
 

+ 4 - 2
searx/external_urls.py

@@ -1,3 +1,6 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
+
 import math
 import math
 
 
 from searx.data import EXTERNAL_URLS
 from searx.data import EXTERNAL_URLS
@@ -46,8 +49,7 @@ def get_external_url(url_id, item_id, alternative="default"):
         if url_template is not None:
         if url_template is not None:
             if item_id is not None:
             if item_id is not None:
                 return url_template.replace('$1', item_id)
                 return url_template.replace('$1', item_id)
-            else:
-                return url_template
+            return url_template
     return None
     return None
 
 
 
 

+ 0 - 1
searx/flaskfix.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring
 # pylint: disable=missing-module-docstring
 
 
 from urllib.parse import urlparse
 from urllib.parse import urlparse

+ 0 - 2
searx/infopage/__init__.py

@@ -1,6 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
-# pyright: basic
 """Render SearXNG instance documentation.
 """Render SearXNG instance documentation.
 
 
 Usage in a Flask app route:
 Usage in a Flask app route:

+ 0 - 1
searx/limiter.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Bot protection / IP rate limitation.  The intention of rate limitation is to
 """Bot protection / IP rate limitation.  The intention of rate limitation is to
 limit suspicious requests from an IP.  The motivation behind this is the fact
 limit suspicious requests from an IP.  The motivation behind this is the fact
 that SearXNG passes through requests from bots and is thus classified as a bot
 that SearXNG passes through requests from bots and is thus classified as a bot

+ 0 - 2
searx/locales.py

@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """
 """
 SearXNG’s locale data
 SearXNG’s locale data
 =====================
 =====================

+ 0 - 1
searx/metrics/__init__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring
 # pylint: disable=missing-module-docstring
 
 
 import typing
 import typing

+ 10 - 5
searx/metrics/error_recorder.py

@@ -1,3 +1,6 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring, invalid-name
+
 import typing
 import typing
 import inspect
 import inspect
 from json import JSONDecodeError
 from json import JSONDecodeError
@@ -16,7 +19,7 @@ from searx.engines import engines
 errors_per_engines = {}
 errors_per_engines = {}
 
 
 
 
-class ErrorContext:
+class ErrorContext:  # pylint: disable=missing-class-docstring
 
 
     __slots__ = (
     __slots__ = (
         'filename',
         'filename',
@@ -29,7 +32,9 @@ class ErrorContext:
         'secondary',
         'secondary',
     )
     )
 
 
-    def __init__(self, filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary):
+    def __init__(  # pylint: disable=too-many-arguments
+        self, filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary
+    ):
         self.filename = filename
         self.filename = filename
         self.function = function
         self.function = function
         self.line_no = line_no
         self.line_no = line_no
@@ -39,7 +44,7 @@ class ErrorContext:
         self.log_parameters = log_parameters
         self.log_parameters = log_parameters
         self.secondary = secondary
         self.secondary = secondary
 
 
-    def __eq__(self, o) -> bool:
+    def __eq__(self, o) -> bool:  # pylint: disable=invalid-name
         if not isinstance(o, ErrorContext):
         if not isinstance(o, ErrorContext):
             return False
             return False
         return (
         return (
@@ -109,7 +114,7 @@ def get_request_exception_messages(
     status_code = None
     status_code = None
     reason = None
     reason = None
     hostname = None
     hostname = None
-    if hasattr(exc, '_request') and exc._request is not None:
+    if hasattr(exc, '_request') and exc._request is not None:  # pylint: disable=protected-access
         # exc.request is property that raise an RuntimeException
         # exc.request is property that raise an RuntimeException
         # if exc._request is not defined.
         # if exc._request is not defined.
         url = exc.request.url
         url = exc.request.url
@@ -123,7 +128,7 @@ def get_request_exception_messages(
     return (status_code, reason, hostname)
     return (status_code, reason, hostname)
 
 
 
 
-def get_messages(exc, filename) -> typing.Tuple:
+def get_messages(exc, filename) -> typing.Tuple:  # pylint: disable=too-many-return-statements
     if isinstance(exc, JSONDecodeError):
     if isinstance(exc, JSONDecodeError):
         return (exc.msg,)
         return (exc.msg,)
     if isinstance(exc, TypeError):
     if isinstance(exc, TypeError):

+ 14 - 15
searx/metrics/models.py

@@ -1,4 +1,5 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 
 
 import decimal
 import decimal
 import threading
 import threading
@@ -11,7 +12,7 @@ __all__ = ["Histogram", "HistogramStorage", "CounterStorage"]
 logger = logger.getChild('searx.metrics')
 logger = logger.getChild('searx.metrics')
 
 
 
 
-class Histogram:
+class Histogram:  # pylint: disable=missing-class-docstring
 
 
     _slots__ = '_lock', '_size', '_sum', '_quartiles', '_count', '_width'
     _slots__ = '_lock', '_size', '_sum', '_quartiles', '_count', '_width'
 
 
@@ -25,11 +26,11 @@ class Histogram:
 
 
     def observe(self, value):
     def observe(self, value):
         q = int(value / self._width)
         q = int(value / self._width)
-        if q < 0:
-            """Value below zero is ignored"""
+        if q < 0:  # pylint: disable=consider-using-max-builtin
+            # Value below zero is ignored
             q = 0
             q = 0
         if q >= self._size:
         if q >= self._size:
-            """Value above the maximum is replaced by the maximum"""
+            # Value above the maximum is replaced by the maximum
             q = self._size - 1
             q = self._size - 1
         with self._lock:
         with self._lock:
             self._quartiles[q] += 1
             self._quartiles[q] += 1
@@ -53,8 +54,7 @@ class Histogram:
         with self._lock:
         with self._lock:
             if self._count != 0:
             if self._count != 0:
                 return self._sum / self._count
                 return self._sum / self._count
-            else:
-                return 0
+            return 0
 
 
     @property
     @property
     def quartile_percentage(self):
     def quartile_percentage(self):
@@ -62,8 +62,7 @@ class Histogram:
         with self._lock:
         with self._lock:
             if self._count > 0:
             if self._count > 0:
                 return [int(q * 100 / self._count) for q in self._quartiles]
                 return [int(q * 100 / self._count) for q in self._quartiles]
-            else:
-                return self._quartiles
+            return self._quartiles
 
 
     @property
     @property
     def quartile_percentage_map(self):
     def quartile_percentage_map(self):
@@ -75,7 +74,7 @@ class Histogram:
         with self._lock:
         with self._lock:
             if self._count > 0:
             if self._count > 0:
                 for y in self._quartiles:
                 for y in self._quartiles:
-                    yp = int(y * 100 / self._count)
+                    yp = int(y * 100 / self._count)  # pylint: disable=invalid-name
                     if yp != 0:
                     if yp != 0:
                         result[round(float(x), width_exponent)] = yp
                         result[round(float(x), width_exponent)] = yp
                     x += width
                     x += width
@@ -100,7 +99,7 @@ class Histogram:
         return "Histogram<avg: " + str(self.average) + ", count: " + str(self._count) + ">"
         return "Histogram<avg: " + str(self.average) + ", count: " + str(self._count) + ">"
 
 
 
 
-class HistogramStorage:
+class HistogramStorage:  # pylint: disable=missing-class-docstring
 
 
     __slots__ = 'measures', 'histogram_class'
     __slots__ = 'measures', 'histogram_class'
 
 
@@ -121,12 +120,12 @@ class HistogramStorage:
 
 
     def dump(self):
     def dump(self):
         logger.debug("Histograms:")
         logger.debug("Histograms:")
-        ks = sorted(self.measures.keys(), key='/'.join)
+        ks = sorted(self.measures.keys(), key='/'.join)  # pylint: disable=invalid-name
         for k in ks:
         for k in ks:
             logger.debug("- %-60s %s", '|'.join(k), self.measures[k])
             logger.debug("- %-60s %s", '|'.join(k), self.measures[k])
 
 
 
 
-class CounterStorage:
+class CounterStorage:  # pylint: disable=missing-class-docstring
 
 
     __slots__ = 'counters', 'lock'
     __slots__ = 'counters', 'lock'
 
 
@@ -151,17 +150,17 @@ class CounterStorage:
 
 
     def dump(self):
     def dump(self):
         with self.lock:
         with self.lock:
-            ks = sorted(self.counters.keys(), key='/'.join)
+            ks = sorted(self.counters.keys(), key='/'.join)  # pylint: disable=invalid-name
         logger.debug("Counters:")
         logger.debug("Counters:")
         for k in ks:
         for k in ks:
             logger.debug("- %-60s %s", '|'.join(k), self.counters[k])
             logger.debug("- %-60s %s", '|'.join(k), self.counters[k])
 
 
 
 
-class VoidHistogram(Histogram):
+class VoidHistogram(Histogram):  # pylint: disable=missing-class-docstring
     def observe(self, value):
     def observe(self, value):
         pass
         pass
 
 
 
 
-class VoidCounterStorage(CounterStorage):
+class VoidCounterStorage(CounterStorage):  # pylint: disable=missing-class-docstring
     def add(self, value, *args):
     def add(self, value, *args):
         pass
         pass

+ 0 - 1
searx/network/__init__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring, global-statement
 # pylint: disable=missing-module-docstring, global-statement
 
 
 import asyncio
 import asyncio

+ 0 - 1
searx/network/client.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring, global-statement
 # pylint: disable=missing-module-docstring, global-statement
 
 
 import asyncio
 import asyncio

+ 0 - 1
searx/network/network.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=global-statement
 # pylint: disable=global-statement
 # pylint: disable=missing-module-docstring, missing-class-docstring
 # pylint: disable=missing-module-docstring, missing-class-docstring
 
 

+ 0 - 1
searx/network/raise_for_httperror.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Raise exception for an HTTP response is an error.
 """Raise exception for an HTTP response is an error.
 
 
 """
 """

+ 0 - 1
searx/plugins/__init__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring, missing-class-docstring
 # pylint: disable=missing-module-docstring, missing-class-docstring
 
 
 import sys
 import sys

+ 4 - 5
searx/plugins/ahmia_filter.py

@@ -1,6 +1,5 @@
-'''
- SPDX-License-Identifier: AGPL-3.0-or-later
-'''
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 
 
 from hashlib import md5
 from hashlib import md5
 from searx.data import ahmia_blacklist_loader
 from searx.data import ahmia_blacklist_loader
@@ -13,14 +12,14 @@ preference_section = 'onions'
 ahmia_blacklist = None
 ahmia_blacklist = None
 
 
 
 
-def on_result(request, search, result):
+def on_result(_request, _search, result):
     if not result.get('is_onion') or not result.get('parsed_url'):
     if not result.get('is_onion') or not result.get('parsed_url'):
         return True
         return True
     result_hash = md5(result['parsed_url'].hostname.encode()).hexdigest()
     result_hash = md5(result['parsed_url'].hostname.encode()).hexdigest()
     return result_hash not in ahmia_blacklist
     return result_hash not in ahmia_blacklist
 
 
 
 
-def init(app, settings):
+def init(_app, settings):
     global ahmia_blacklist  # pylint: disable=global-statement
     global ahmia_blacklist  # pylint: disable=global-statement
     if not settings['outgoing']['using_tor_proxy']:
     if not settings['outgoing']['using_tor_proxy']:
         # disable the plugin
         # disable the plugin

+ 6 - 20
searx/plugins/hash_plugin.py

@@ -1,25 +1,11 @@
-'''
-searx is free software: you can redistribute it and/or modify
-it under the terms of the GNU Affero General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 
 
-searx is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-GNU Affero General Public License for more details.
-
-You should have received a copy of the GNU Affero General Public License
-along with searx. If not, see < http://www.gnu.org/licenses/ >.
-
-(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
-(C) 2018, 2020 by Vaclav Zouzalik
-'''
-
-from flask_babel import gettext
 import hashlib
 import hashlib
 import re
 import re
 
 
+from flask_babel import gettext
+
 name = "Hash plugin"
 name = "Hash plugin"
 description = gettext("Converts strings to different hash digests.")
 description = gettext("Converts strings to different hash digests.")
 default_on = True
 default_on = True
@@ -30,7 +16,7 @@ query_examples = 'sha512 The quick brown fox jumps over the lazy dog'
 parser_re = re.compile('(md5|sha1|sha224|sha256|sha384|sha512) (.*)', re.I)
 parser_re = re.compile('(md5|sha1|sha224|sha256|sha384|sha512) (.*)', re.I)
 
 
 
 
-def post_search(request, search):
+def post_search(_request, search):
     # process only on first page
     # process only on first page
     if search.search_query.pageno > 1:
     if search.search_query.pageno > 1:
         return True
         return True
@@ -40,7 +26,7 @@ def post_search(request, search):
         return True
         return True
 
 
     function, string = m.groups()
     function, string = m.groups()
-    if string.strip().__len__() == 0:
+    if not string.strip():
         # end if the string is empty
         # end if the string is empty
         return True
         return True
 
 

+ 5 - 2
searx/plugins/hostname_replace.py

@@ -1,10 +1,13 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 
 
 import re
 import re
 from urllib.parse import urlunparse, urlparse
 from urllib.parse import urlunparse, urlparse
+
+from flask_babel import gettext
+
 from searx import settings
 from searx import settings
 from searx.plugins import logger
 from searx.plugins import logger
-from flask_babel import gettext
 
 
 name = gettext('Hostname replace')
 name = gettext('Hostname replace')
 description = gettext('Rewrite result hostnames or remove results based on the hostname')
 description = gettext('Rewrite result hostnames or remove results based on the hostname')
@@ -20,7 +23,7 @@ parsed = 'parsed_url'
 _url_fields = ['iframe_src', 'audio_src']
 _url_fields = ['iframe_src', 'audio_src']
 
 
 
 
-def on_result(request, search, result):
+def on_result(_request, _search, result):
 
 
     for pattern, replacement in replacements.items():
     for pattern, replacement in replacements.items():
 
 

+ 6 - 3
searx/plugins/oa_doi_rewrite.py

@@ -1,9 +1,12 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
+
+import re
 from urllib.parse import urlparse, parse_qsl
 from urllib.parse import urlparse, parse_qsl
+
 from flask_babel import gettext
 from flask_babel import gettext
-import re
 from searx import settings
 from searx import settings
 
 
-
 regex = re.compile(r'10\.\d{4,9}/[^\s]+')
 regex = re.compile(r'10\.\d{4,9}/[^\s]+')
 
 
 name = gettext('Open Access DOI rewrite')
 name = gettext('Open Access DOI rewrite')
@@ -31,7 +34,7 @@ def get_doi_resolver(preferences):
     return doi_resolvers[selected_resolver]
     return doi_resolvers[selected_resolver]
 
 
 
 
-def on_result(request, search, result):
+def on_result(request, _search, result):
     if 'parsed_url' not in result:
     if 'parsed_url' not in result:
         return True
         return True
 
 

+ 0 - 1
searx/plugins/self_info.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring,invalid-name
 # pylint: disable=missing-module-docstring,invalid-name
 
 
 import re
 import re

+ 0 - 1
searx/plugins/tor_check.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """A plugin to check if the ip address of the request is a Tor exit-node if the
 """A plugin to check if the ip address of the request is a Tor exit-node if the
 user searches for ``tor-check``.  It fetches the tor exit node list from
 user searches for ``tor-check``.  It fetches the tor exit node list from
 https://check.torproject.org/exit-addresses and parses all the IPs into a list,
 https://check.torproject.org/exit-addresses and parses all the IPs into a list,

+ 5 - 18
searx/plugins/tracker_url_remover.py

@@ -1,24 +1,11 @@
-'''
-searx is free software: you can redistribute it and/or modify
-it under the terms of the GNU Affero General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 
 
-searx is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-GNU Affero General Public License for more details.
-
-You should have received a copy of the GNU Affero General Public License
-along with searx. If not, see < http://www.gnu.org/licenses/ >.
-
-(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
-'''
-
-from flask_babel import gettext
 import re
 import re
 from urllib.parse import urlunparse, parse_qsl, urlencode
 from urllib.parse import urlunparse, parse_qsl, urlencode
 
 
+from flask_babel import gettext
+
 regexes = {
 regexes = {
     re.compile(r'utm_[^&]+'),
     re.compile(r'utm_[^&]+'),
     re.compile(r'(wkey|wemail)[^&]*'),
     re.compile(r'(wkey|wemail)[^&]*'),
@@ -32,7 +19,7 @@ default_on = True
 preference_section = 'privacy'
 preference_section = 'privacy'
 
 
 
 
-def on_result(request, search, result):
+def on_result(_request, _search, result):
     if 'parsed_url' not in result:
     if 'parsed_url' not in result:
         return True
         return True
 
 

+ 0 - 1
searx/preferences.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Searx preferences implementation.
 """Searx preferences implementation.
 """
 """
 
 

+ 2 - 1
searx/query.py

@@ -1,4 +1,5 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=invalid-name, missing-module-docstring, missing-class-docstring
 
 
 from abc import abstractmethod, ABC
 from abc import abstractmethod, ABC
 import re
 import re
@@ -191,7 +192,7 @@ class BangParser(QueryPartParser):
 
 
     def _parse(self, value):
     def _parse(self, value):
         # check if prefix is equal with engine shortcut
         # check if prefix is equal with engine shortcut
-        if value in engine_shortcuts:
+        if value in engine_shortcuts:  # pylint: disable=consider-using-get
             value = engine_shortcuts[value]
             value = engine_shortcuts[value]
 
 
         # check if prefix is equal with engine name
         # check if prefix is equal with engine name

+ 0 - 1
searx/redisdb.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Implementation of the redis client (redis-py_).
 """Implementation of the redis client (redis-py_).
 
 
 .. _redis-py: https://github.com/redis/redis-py
 .. _redis-py: https://github.com/redis/redis-py

+ 0 - 1
searx/redislib.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """A collection of convenient functions and redis/lua scripts.
 """A collection of convenient functions and redis/lua scripts.
 
 
 This code was partial inspired by the `Bullet-Proofing Lua Scripts in RedisPy`_
 This code was partial inspired by the `Bullet-Proofing Lua Scripts in RedisPy`_

+ 16 - 14
searx/results.py

@@ -1,3 +1,6 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
+
 import re
 import re
 from collections import defaultdict
 from collections import defaultdict
 from operator import itemgetter
 from operator import itemgetter
@@ -19,8 +22,7 @@ WHITESPACE_REGEX = re.compile('( |\t|\n)+', re.M | re.U)
 def result_content_len(content):
 def result_content_len(content):
     if isinstance(content, str):
     if isinstance(content, str):
         return len(CONTENT_LEN_IGNORED_CHARS_REGEX.sub('', content))
         return len(CONTENT_LEN_IGNORED_CHARS_REGEX.sub('', content))
-    else:
-        return 0
+    return 0
 
 
 
 
 def compare_urls(url_a, url_b):
 def compare_urls(url_a, url_b):
@@ -56,7 +58,7 @@ def compare_urls(url_a, url_b):
     return unquote(path_a) == unquote(path_b)
     return unquote(path_a) == unquote(path_b)
 
 
 
 
-def merge_two_infoboxes(infobox1, infobox2):
+def merge_two_infoboxes(infobox1, infobox2):  # pylint: disable=too-many-branches, too-many-statements
     # get engines weights
     # get engines weights
     if hasattr(engines[infobox1['engine']], 'weight'):
     if hasattr(engines[infobox1['engine']], 'weight'):
         weight1 = engines[infobox1['engine']].weight
         weight1 = engines[infobox1['engine']].weight
@@ -140,13 +142,13 @@ def result_score(result):
     return sum((occurrences * weight) / position for position in result['positions'])
     return sum((occurrences * weight) / position for position in result['positions'])
 
 
 
 
-class Timing(NamedTuple):
+class Timing(NamedTuple):  # pylint: disable=missing-class-docstring
     engine: str
     engine: str
     total: float
     total: float
     load: float
     load: float
 
 
 
 
-class UnresponsiveEngine(NamedTuple):
+class UnresponsiveEngine(NamedTuple):  # pylint: disable=missing-class-docstring
     engine: str
     engine: str
     error_type: str
     error_type: str
     suspended: bool
     suspended: bool
@@ -189,7 +191,7 @@ class ResultContainer:
         self.on_result = lambda _: True
         self.on_result = lambda _: True
         self._lock = RLock()
         self._lock = RLock()
 
 
-    def extend(self, engine_name, results):
+    def extend(self, engine_name, results):  # pylint: disable=too-many-branches
         if self._closed:
         if self._closed:
             return
             return
 
 
@@ -314,11 +316,11 @@ class ResultContainer:
                 if result_template != 'images.html':
                 if result_template != 'images.html':
                     # not an image, same template, same url : it's a duplicate
                     # not an image, same template, same url : it's a duplicate
                     return merged_result
                     return merged_result
-                else:
-                    # it's an image
-                    # it's a duplicate if the parsed_url, template and img_src are different
-                    if result.get('img_src', '') == merged_result.get('img_src', ''):
-                        return merged_result
+
+                # it's an image
+                # it's a duplicate if the parsed_url, template and img_src are different
+                if result.get('img_src', '') == merged_result.get('img_src', ''):
+                    return merged_result
         return None
         return None
 
 
     def __merge_duplicated_http_result(self, duplicated, result, position):
     def __merge_duplicated_http_result(self, duplicated, result, position):
@@ -371,11 +373,11 @@ class ResultContainer:
         categoryPositions = {}
         categoryPositions = {}
 
 
         for res in results:
         for res in results:
-            # FIXME : handle more than one category per engine
+            # do we need to handle more than one category per engine?
             engine = engines[res['engine']]
             engine = engines[res['engine']]
             res['category'] = engine.categories[0] if len(engine.categories) > 0 else ''
             res['category'] = engine.categories[0] if len(engine.categories) > 0 else ''
 
 
-            # FIXME : handle more than one category per engine
+            # do we need to handle more than one category per engine?
             category = (
             category = (
                 res['category']
                 res['category']
                 + ':'
                 + ':'
@@ -397,7 +399,7 @@ class ResultContainer:
 
 
                 # update every index after the current one
                 # update every index after the current one
                 # (including the current one)
                 # (including the current one)
-                for k in categoryPositions:
+                for k in categoryPositions:  # pylint: disable=consider-using-dict-items
                     v = categoryPositions[k]['index']
                     v = categoryPositions[k]['index']
                     if v >= index:
                     if v >= index:
                         categoryPositions[k]['index'] = v + 1
                         categoryPositions[k]['index'] = v + 1

+ 0 - 1
searx/search/__init__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring, too-few-public-methods
 # pylint: disable=missing-module-docstring, too-few-public-methods
 
 
 import threading
 import threading

+ 1 - 0
searx/search/checker/__init__.py

@@ -1,4 +1,5 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 
 
 from .impl import Checker
 from .impl import Checker
 from .background import initialize, get_result
 from .background import initialize, get_result

+ 0 - 1
searx/search/checker/__main__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring
 # pylint: disable=missing-module-docstring
 
 
 import sys
 import sys

+ 1 - 3
searx/search/checker/background.py

@@ -1,7 +1,5 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
-# pylint: disable=missing-module-docstring
-# pyright: basic
+# pylint: disable=missing-module-docstring, cyclic-import
 
 
 import json
 import json
 import time
 import time

+ 6 - 6
searx/search/checker/impl.py

@@ -1,4 +1,5 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring, invalid-name
 
 
 import gc
 import gc
 import typing
 import typing
@@ -149,7 +150,7 @@ def _search_query_diff(
     return (common, diff)
     return (common, diff)
 
 
 
 
-class TestResults:
+class TestResults:  # pylint: disable=missing-class-docstring
 
 
     __slots__ = 'errors', 'logs', 'languages'
     __slots__ = 'errors', 'logs', 'languages'
 
 
@@ -181,7 +182,7 @@ class TestResults:
                 yield (test_name, error)
                 yield (test_name, error)
 
 
 
 
-class ResultContainerTests:
+class ResultContainerTests:  # pylint: disable=missing-class-docstring
 
 
     __slots__ = 'test_name', 'search_query', 'result_container', 'languages', 'stop_test', 'test_results'
     __slots__ = 'test_name', 'search_query', 'result_container', 'languages', 'stop_test', 'test_results'
 
 
@@ -210,7 +211,6 @@ class ResultContainerTests:
         if langStr:
         if langStr:
             self.languages.add(langStr)
             self.languages.add(langStr)
             self.test_results.add_language(langStr)
             self.test_results.add_language(langStr)
-        return None
 
 
     def _check_result(self, result):
     def _check_result(self, result):
         if not _check_no_html(result.get('title', '')):
         if not _check_no_html(result.get('title', '')):
@@ -319,7 +319,7 @@ class ResultContainerTests:
         self._record_error(('{!r} not found in the title'.format(title)))
         self._record_error(('{!r} not found in the title'.format(title)))
 
 
 
 
-class CheckerTests:
+class CheckerTests:  # pylint: disable=missing-class-docstring, too-few-public-methods
 
 
     __slots__ = 'test_results', 'test_name', 'result_container_tests_list'
     __slots__ = 'test_results', 'test_name', 'result_container_tests_list'
 
 
@@ -351,7 +351,7 @@ class CheckerTests:
                         )
                         )
 
 
 
 
-class Checker:
+class Checker:  # pylint: disable=missing-class-docstring
 
 
     __slots__ = 'processor', 'tests', 'test_results'
     __slots__ = 'processor', 'tests', 'test_results'
 
 
@@ -377,7 +377,7 @@ class Checker:
             p.append(l)
             p.append(l)
 
 
         for kwargs in itertools.product(*p):
         for kwargs in itertools.product(*p):
-            kwargs = {k: v for k, v in kwargs}
+            kwargs = dict(kwargs)
             query = kwargs['query']
             query = kwargs['query']
             params = dict(kwargs)
             params = dict(kwargs)
             del params['query']
             del params['query']

+ 0 - 1
searx/search/checker/scheduler.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring
 # pylint: disable=missing-module-docstring
 """Lame scheduler which use Redis as a source of truth:
 """Lame scheduler which use Redis as a source of truth:
 * the Redis key SearXNG_checker_next_call_ts contains the next time the embedded checker should run.
 * the Redis key SearXNG_checker_next_call_ts contains the next time the embedded checker should run.

+ 2 - 1
searx/search/models.py

@@ -1,4 +1,5 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 
 
 import typing
 import typing
 import babel
 import babel
@@ -52,7 +53,7 @@ class SearchQuery:
         external_bang: typing.Optional[str] = None,
         external_bang: typing.Optional[str] = None,
         engine_data: typing.Optional[typing.Dict[str, str]] = None,
         engine_data: typing.Optional[typing.Dict[str, str]] = None,
         redirect_to_first_result: typing.Optional[bool] = None,
         redirect_to_first_result: typing.Optional[bool] = None,
-    ):
+    ):  # pylint:disable=too-many-arguments
         self.query = query
         self.query = query
         self.engineref_list = engineref_list
         self.engineref_list = engineref_list
         self.lang = lang
         self.lang = lang

+ 0 - 1
searx/search/processors/__init__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 
 
 """Implement request processors used by engine-types.
 """Implement request processors used by engine-types.
 
 

+ 0 - 2
searx/search/processors/abstract.py

@@ -1,6 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
-
 """Abstract base classes for engine request processors.
 """Abstract base classes for engine request processors.
 
 
 """
 """

+ 0 - 2
searx/search/processors/offline.py

@@ -1,6 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
-
 """Processors for engine-type: ``offline``
 """Processors for engine-type: ``offline``
 
 
 """
 """

+ 0 - 2
searx/search/processors/online.py

@@ -1,6 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
-
 """Processors for engine-type: ``online``
 """Processors for engine-type: ``online``
 
 
 """
 """

+ 0 - 1
searx/search/processors/online_currency.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Processors for engine-type: ``online_currency``
 """Processors for engine-type: ``online_currency``
 
 
 """
 """

+ 0 - 1
searx/search/processors/online_dictionary.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Processors for engine-type: ``online_dictionary``
 """Processors for engine-type: ``online_dictionary``
 
 
 """
 """

+ 0 - 1
searx/search/processors/online_url_search.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Processors for engine-type: ``online_url_search``
 """Processors for engine-type: ``online_url_search``
 
 
 """
 """

+ 0 - 1
searx/settings_defaults.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Implementation of the default settings.
 """Implementation of the default settings.
 
 
 """
 """

+ 1 - 0
searx/settings_loader.py

@@ -1,4 +1,5 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring, too-many-branches
 
 
 from typing import Optional
 from typing import Optional
 from os import environ
 from os import environ

+ 1 - 1
searx/sxng_locales.py

@@ -1,4 +1,4 @@
-# -*- coding: utf-8 -*-
+# SPDX-License-Identifier: AGPL-3.0-or-later
 '''List of SearXNG's locale codes.
 '''List of SearXNG's locale codes.
 
 
 .. hint::
 .. hint::

+ 0 - 1
searx/unixthreadname.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """
 """
 if setproctitle is installed.
 if setproctitle is installed.
 set Unix thread name with the Python thread name
 set Unix thread name with the Python thread name

+ 2 - 4
searx/utils.py

@@ -1,6 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
-# pyright: basic
 """Utility functions for the engines
 """Utility functions for the engines
 
 
 """
 """
@@ -56,7 +54,7 @@ _STORAGE_UNIT_VALUE: Dict[str, int] = {
 _XPATH_CACHE: Dict[str, XPath] = {}
 _XPATH_CACHE: Dict[str, XPath] = {}
 _LANG_TO_LC_CACHE: Dict[str, Dict[str, str]] = {}
 _LANG_TO_LC_CACHE: Dict[str, Dict[str, str]] = {}
 
 
-_FASTTEXT_MODEL: Optional["fasttext.FastText._FastText"] = None
+_FASTTEXT_MODEL: Optional["fasttext.FastText._FastText"] = None  # type: ignore
 """fasttext model to predict laguage of a search term"""
 """fasttext model to predict laguage of a search term"""
 
 
 SEARCH_LANGUAGE_CODES = frozenset([searxng_locale[0].split('-')[0] for searxng_locale in sxng_locales])
 SEARCH_LANGUAGE_CODES = frozenset([searxng_locale[0].split('-')[0] for searxng_locale in sxng_locales])
@@ -595,7 +593,7 @@ def eval_xpath_getindex(elements: ElementBase, xpath_spec: XPathSpecType, index:
     return default
     return default
 
 
 
 
-def _get_fasttext_model() -> "fasttext.FastText._FastText":
+def _get_fasttext_model() -> "fasttext.FastText._FastText":  # type: ignore
     global _FASTTEXT_MODEL  # pylint: disable=global-statement
     global _FASTTEXT_MODEL  # pylint: disable=global-statement
     if _FASTTEXT_MODEL is None:
     if _FASTTEXT_MODEL is None:
         import fasttext  # pylint: disable=import-outside-toplevel
         import fasttext  # pylint: disable=import-outside-toplevel

+ 1 - 1
searx/version.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=,missing-module-docstring,missing-class-docstring
 # pylint: disable=,missing-module-docstring,missing-class-docstring
 
 
 import os
 import os
@@ -108,6 +107,7 @@ if __name__ == "__main__":
     if len(sys.argv) >= 2 and sys.argv[1] == "freeze":
     if len(sys.argv) >= 2 and sys.argv[1] == "freeze":
         # freeze the version (to create an archive outside a git repository)
         # freeze the version (to create an archive outside a git repository)
         python_code = f"""# SPDX-License-Identifier: AGPL-3.0-or-later
         python_code = f"""# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 # this file is generated automatically by searx/version.py
 # this file is generated automatically by searx/version.py
 
 
 VERSION_STRING = "{VERSION_STRING}"
 VERSION_STRING = "{VERSION_STRING}"

+ 10 - 4
searx/webadapter.py

@@ -1,3 +1,6 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
+
 from collections import defaultdict
 from collections import defaultdict
 from typing import Dict, List, Optional, Tuple
 from typing import Dict, List, Optional, Tuple
 from searx.exceptions import SearxParameterException
 from searx.exceptions import SearxParameterException
@@ -10,7 +13,7 @@ from searx.utils import detect_language
 
 
 
 
 # remove duplicate queries.
 # remove duplicate queries.
-# FIXME: does not fix "!music !soundcloud", because the categories are 'none' and 'music'
+# HINT: does not fix "!music !soundcloud", because the categories are 'none' and 'music'
 def deduplicate_engineref_list(engineref_list: List[EngineRef]) -> List[EngineRef]:
 def deduplicate_engineref_list(engineref_list: List[EngineRef]) -> List[EngineRef]:
     engineref_dict = {q.category + '|' + q.name: q for q in engineref_list}
     engineref_dict = {q.category + '|' + q.name: q for q in engineref_list}
     return list(engineref_dict.values())
     return list(engineref_dict.values())
@@ -55,7 +58,7 @@ def parse_lang(preferences: Preferences, form: Dict[str, str], raw_text_query: R
         return preferences.get_value('language')
         return preferences.get_value('language')
     # get language
     # get language
     # set specific language if set on request, query or preferences
     # set specific language if set on request, query or preferences
-    # TODO support search with multiple languages
+    # search with multiple languages is not supported (by most engines)
     if len(raw_text_query.languages):
     if len(raw_text_query.languages):
         query_lang = raw_text_query.languages[-1]
         query_lang = raw_text_query.languages[-1]
     elif 'language' in form:
     elif 'language' in form:
@@ -153,7 +156,10 @@ def get_selected_categories(preferences: Preferences, form: Optional[Dict[str, s
     return selected_categories
     return selected_categories
 
 
 
 
-def get_engineref_from_category_list(category_list: List[str], disabled_engines: List[str]) -> List[EngineRef]:
+def get_engineref_from_category_list(  # pylint: disable=invalid-name
+    category_list: List[str],
+    disabled_engines: List[str],
+) -> List[EngineRef]:
     result = []
     result = []
     for categ in category_list:
     for categ in category_list:
         result.extend(
         result.extend(
@@ -172,7 +178,7 @@ def parse_generic(preferences: Preferences, form: Dict[str, str], disabled_engin
     explicit_engine_list = False
     explicit_engine_list = False
     if not is_locked('categories'):
     if not is_locked('categories'):
         # parse the form only if the categories are not locked
         # parse the form only if the categories are not locked
-        for pd_name, pd in form.items():
+        for pd_name, pd in form.items():  # pylint: disable=invalid-name
             if pd_name == 'engines':
             if pd_name == 'engines':
                 pd_engines = [
                 pd_engines = [
                     EngineRef(engine_name, engines[engine_name].categories[0])
                     EngineRef(engine_name, engines[engine_name].categories[0])

+ 0 - 2
searx/webapp.py

@@ -1,7 +1,5 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
-# pyright: basic
 """WebbApp
 """WebbApp
 
 
 """
 """

+ 9 - 10
searx/webutils.py

@@ -1,4 +1,6 @@
-# -*- coding: utf-8 -*-
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring, invalid-name
+
 from __future__ import annotations
 from __future__ import annotations
 
 
 import os
 import os
@@ -108,7 +110,7 @@ class CSVWriter:
             self.writerow(row)
             self.writerow(row)
 
 
 
 
-def write_csv_response(csv: CSVWriter, rc: ResultContainer) -> None:
+def write_csv_response(csv: CSVWriter, rc: ResultContainer) -> None:  # pylint: disable=redefined-outer-name
     """Write rows of the results to a query (``application/csv``) into a CSV
     """Write rows of the results to a query (``application/csv``) into a CSV
     table (:py:obj:`CSVWriter`).  First line in the table contain the column
     table (:py:obj:`CSVWriter`).  First line in the table contain the column
     names.  The column "type" specifies the type, the following types are
     names.  The column "type" specifies the type, the following types are
@@ -143,7 +145,7 @@ def write_csv_response(csv: CSVWriter, rc: ResultContainer) -> None:
         csv.writerow([row.get(key, '') for key in keys])
         csv.writerow([row.get(key, '') for key in keys])
 
 
 
 
-class JSONEncoder(json.JSONEncoder):
+class JSONEncoder(json.JSONEncoder):  # pylint: disable=missing-class-docstring
     def default(self, o):
     def default(self, o):
         if isinstance(o, datetime):
         if isinstance(o, datetime):
             return o.isoformat()
             return o.isoformat()
@@ -226,8 +228,7 @@ def prettify_url(url, max_length=74):
     if len(url) > max_length:
     if len(url) > max_length:
         chunk_len = int(max_length / 2 + 1)
         chunk_len = int(max_length / 2 + 1)
         return '{0}[...]{1}'.format(url[:chunk_len], url[-chunk_len:])
         return '{0}[...]{1}'.format(url[:chunk_len], url[-chunk_len:])
-    else:
-        return url
+    return url
 
 
 
 
 def contains_cjko(s: str) -> bool:
 def contains_cjko(s: str) -> bool:
@@ -269,8 +270,7 @@ def regex_highlight_cjk(word: str) -> str:
     rword = re.escape(word)
     rword = re.escape(word)
     if contains_cjko(rword):
     if contains_cjko(rword):
         return fr'({rword})'
         return fr'({rword})'
-    else:
-        return fr'\b({rword})(?!\w)'
+    return fr'\b({rword})(?!\w)'
 
 
 
 
 def highlight_content(content, query):
 def highlight_content(content, query):
@@ -279,7 +279,6 @@ def highlight_content(content, query):
         return None
         return None
 
 
     # ignoring html contents
     # ignoring html contents
-    # TODO better html content detection
     if content.find('<') != -1:
     if content.find('<') != -1:
         return content
         return content
 
 
@@ -353,8 +352,8 @@ def group_engines_in_tab(engines: Iterable[Engine]) -> List[Tuple[str, Iterable[
     sorted_groups = sorted(((name, list(engines)) for name, engines in subgroups), key=group_sort_key)
     sorted_groups = sorted(((name, list(engines)) for name, engines in subgroups), key=group_sort_key)
 
 
     ret_val = []
     ret_val = []
-    for groupname, engines in sorted_groups:
+    for groupname, _engines in sorted_groups:
         group_bang = '!' + groupname.replace(' ', '_') if groupname != NO_SUBGROUPING else ''
         group_bang = '!' + groupname.replace(' ', '_') if groupname != NO_SUBGROUPING else ''
-        ret_val.append((groupname, group_bang, sorted(engines, key=engine_sort_key)))
+        ret_val.append((groupname, group_bang, sorted(_engines, key=engine_sort_key)))
 
 
     return ret_val
     return ret_val

+ 2 - 0
searxng_extra/__init__.py

@@ -0,0 +1,2 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring

+ 5 - 7
searxng_extra/docs_prebuild

@@ -1,7 +1,5 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-
 """Script that implements some prebuild tasks needed by target docs.prebuild
 """Script that implements some prebuild tasks needed by target docs.prebuild
 """
 """
 
 
@@ -9,10 +7,10 @@ import sys
 import os.path
 import os.path
 import time
 import time
 from contextlib import contextmanager
 from contextlib import contextmanager
+
 from searx import settings, get_setting, locales
 from searx import settings, get_setting, locales
 from searx.infopage import InfoPageSet, InfoPage
 from searx.infopage import InfoPageSet, InfoPage
 
 
-
 _doc_user = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'docs', 'user'))
 _doc_user = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'docs', 'user'))
 
 
 
 
@@ -27,13 +25,13 @@ def main():
     with infopageset_ctx as infopageset:
     with infopageset_ctx as infopageset:
         for _, _, page in infopageset.iter_pages('en'):
         for _, _, page in infopageset.iter_pages('en'):
             fname = os.path.join(_doc_user, os.path.basename(page.fname))
             fname = os.path.join(_doc_user, os.path.basename(page.fname))
-            with open(fname, 'w') as f:
+            with open(fname, 'w', encoding='utf-8') as f:
                 f.write(page.content)
                 f.write(page.content)
 
 
 
 
-class OfflinePage(InfoPage):
+class OfflinePage(InfoPage):  # pylint: disable=missing-class-docstring
 
 
-    def get_ctx(self):  # pylint: disable=no-self-use
+    def get_ctx(self):
         """Jinja context to render :py:obj:`DocPage.content` for offline purpose (no
         """Jinja context to render :py:obj:`DocPage.content` for offline purpose (no
         links to SearXNG instance)"""
         links to SearXNG instance)"""
 
 
@@ -55,7 +53,7 @@ def _instance_infosetset_ctx(base_url):
     # registered in the Flask app.
     # registered in the Flask app.
 
 
     settings['server']['secret_key'] = ''
     settings['server']['secret_key'] = ''
-    from searx.webapp import app
+    from searx.webapp import app  # pylint: disable=import-outside-toplevel
 
 
     # Specify base_url so that url_for() works for base_urls.  If base_url is
     # Specify base_url so that url_for() works for base_urls.  If base_url is
     # specified, then these values from are given preference over any Flask's
     # specified, then these values from are given preference over any Flask's

+ 0 - 3
searxng_extra/standalone_searx.py

@@ -1,8 +1,5 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# (C) Copyright Contributors to the SearXNG project.
-
 """Script to run SearXNG from terminal.
 """Script to run SearXNG from terminal.
 
 
   DON'T USE THIS SCRIPT!!
   DON'T USE THIS SCRIPT!!

+ 2 - 0
searxng_extra/update/__init__.py

@@ -0,0 +1,2 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring

+ 0 - 1
searxng_extra/update/update_ahmia_blacklist.py

@@ -1,5 +1,4 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
 """This script saves `Ahmia's blacklist`_ for onion sites.
 """This script saves `Ahmia's blacklist`_ for onion sites.
 
 

+ 0 - 2
searxng_extra/update/update_currencies.py

@@ -1,7 +1,5 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-
 """Fetch currencies from :origin:`searx/engines/wikidata.py` engine.
 """Fetch currencies from :origin:`searx/engines/wikidata.py` engine.
 
 
 Output file: :origin:`searx/data/currencies.json` (:origin:`CI Update data ...
 Output file: :origin:`searx/data/currencies.json` (:origin:`CI Update data ...

+ 0 - 2
searxng_extra/update/update_engine_descriptions.py

@@ -1,7 +1,5 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-
 """Fetch website description from websites and from
 """Fetch website description from websites and from
 :origin:`searx/engines/wikidata.py` engine.
 :origin:`searx/engines/wikidata.py` engine.
 
 

+ 1 - 2
searxng_extra/update/update_engine_traits.py

@@ -1,5 +1,4 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
 """Update :py:obj:`searx.enginelib.traits.EngineTraitsMap` and :origin:`searx/languages.py`
 """Update :py:obj:`searx.enginelib.traits.EngineTraitsMap` and :origin:`searx/languages.py`
 
 
@@ -28,7 +27,7 @@ from searx.enginelib.traits import EngineTraitsMap
 # Output files.
 # Output files.
 languages_file = Path(searx_dir) / 'sxng_locales.py'
 languages_file = Path(searx_dir) / 'sxng_locales.py'
 languages_file_header = """\
 languages_file_header = """\
-# -*- coding: utf-8 -*-
+# SPDX-License-Identifier: AGPL-3.0-or-later
 '''List of SearXNG's locale codes.
 '''List of SearXNG's locale codes.
 
 
 .. hint::
 .. hint::

+ 0 - 1
searxng_extra/update/update_external_bangs.py

@@ -1,5 +1,4 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
 """Update :origin:`searx/data/external_bangs.json` using the duckduckgo bangs
 """Update :origin:`searx/data/external_bangs.json` using the duckduckgo bangs
 from :py:obj:`BANGS_URL`.
 from :py:obj:`BANGS_URL`.

+ 0 - 1
searxng_extra/update/update_firefox_version.py

@@ -1,5 +1,4 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
 """Fetch firefox useragent signatures
 """Fetch firefox useragent signatures
 
 

+ 0 - 1
searxng_extra/update/update_locales.py

@@ -1,5 +1,4 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
 """Update locale names in :origin:`searx/data/locales.json` used by
 """Update locale names in :origin:`searx/data/locales.json` used by
 :ref:`searx.locales`
 :ref:`searx.locales`

+ 0 - 1
searxng_extra/update/update_osm_keys_tags.py

@@ -1,5 +1,4 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
 """Fetch OSM keys and tags.
 """Fetch OSM keys and tags.
 
 

+ 0 - 1
searxng_extra/update/update_pygments.py

@@ -1,5 +1,4 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
-# lint: pylint
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
 """Update pygments style
 """Update pygments style
 
 

+ 0 - 3
searxng_extra/update/update_wikidata_units.py

@@ -1,8 +1,5 @@
 #!/usr/bin/env python
 #!/usr/bin/env python
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
-# pylint: disable=missing-module-docstring
-
 """Fetch units from :origin:`searx/engines/wikidata.py` engine.
 """Fetch units from :origin:`searx/engines/wikidata.py` engine.
 
 
 Output file: :origin:`searx/data/wikidata_units.json` (:origin:`CI Update data
 Output file: :origin:`searx/data/wikidata_units.json` (:origin:`CI Update data

+ 1 - 1
setup.py

@@ -1,4 +1,4 @@
-# -*- coding: utf-8 -*-
+# SPDX-License-Identifier: AGPL-3.0-or-later
 """Installer for SearXNG package."""
 """Installer for SearXNG package."""
 
 
 from setuptools import setup, find_packages
 from setuptools import setup, find_packages

+ 3 - 1
tests/__init__.py

@@ -1,5 +1,7 @@
-import os
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
 
 
+import os
 import aiounittest
 import aiounittest
 
 
 os.environ.pop('SEARX_DEBUG', None)
 os.environ.pop('SEARX_DEBUG', None)

+ 2 - 0
tests/robot/__init__.py

@@ -0,0 +1,2 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring, cyclic-import

+ 0 - 1
tests/robot/__main__.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 """Shared testing code."""
 """Shared testing code."""
 
 
 # pylint: disable=missing-function-docstring
 # pylint: disable=missing-function-docstring

+ 0 - 1
tests/robot/test_webapp.py

@@ -1,5 +1,4 @@
 # SPDX-License-Identifier: AGPL-3.0-or-later
 # SPDX-License-Identifier: AGPL-3.0-or-later
-# lint: pylint
 # pylint: disable=missing-module-docstring,missing-function-docstring
 # pylint: disable=missing-module-docstring,missing-function-docstring
 
 
 from time import sleep
 from time import sleep

+ 3 - 0
tests/unit/__init__.py

@@ -1,3 +1,6 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
+
 import os
 import os
 from os.path import dirname, sep, abspath
 from os.path import dirname, sep, abspath
 
 

+ 3 - 0
tests/unit/engines/test_command.py

@@ -1,3 +1,6 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
+
 '''
 '''
 searx is free software: you can redistribute it and/or modify
 searx is free software: you can redistribute it and/or modify
 it under the terms of the GNU Affero General Public License as published by
 it under the terms of the GNU Affero General Public License as published by

+ 3 - 1
tests/unit/engines/test_xpath.py

@@ -1,4 +1,6 @@
-# -*- coding: utf-8 -*-
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring
+
 from collections import defaultdict
 from collections import defaultdict
 import mock
 import mock
 from searx.engines import xpath
 from searx.engines import xpath

+ 2 - 0
tests/unit/network/__init__.py

@@ -0,0 +1,2 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# pylint: disable=missing-module-docstring

Some files were not shown because too many files changed in this diff