Browse Source

Merge pull request #211 from MarcAbonce/onions_v3_fix_searxng

Update onion engines to v3
Alexandre Flament 3 years ago
parent
commit
f523fd3ea7
3 changed files with 2 additions and 76 deletions
  1. 2 2
      searx/engines/ahmia.py
  2. 0 67
      searx/engines/not_evil.py
  3. 0 7
      searx/settings.yml

+ 2 - 2
searx/engines/ahmia.py

@@ -9,7 +9,7 @@ from searx.engines.xpath import extract_url, extract_text, eval_xpath_list, eval
 
 # about
 about = {
-    "website": 'http://msydqstlz2kzerdg.onion',
+    "website": 'http://juhanurmihxlp77nkq76byazcldy2hlmovfu2epvl5ankdibsot4csyd.onion',
     "wikidata_id": 'Q18693938',
     "official_api_documentation": None,
     "use_official_api": False,
@@ -23,7 +23,7 @@ paging = True
 page_size = 10
 
 # search url
-search_url = 'http://msydqstlz2kzerdg.onion/search/?{query}'
+search_url = 'http://juhanurmihxlp77nkq76byazcldy2hlmovfu2epvl5ankdibsot4csyd.onion/search/?{query}'
 time_range_support = True
 time_range_dict = {'day': 1,
                    'week': 7,

+ 0 - 67
searx/engines/not_evil.py

@@ -1,67 +0,0 @@
-# SPDX-License-Identifier: AGPL-3.0-or-later
-"""
- not Evil (Onions)
-"""
-
-from urllib.parse import urlencode
-from lxml import html
-from searx.engines.xpath import extract_text
-
-# about
-about = {
-    "website": 'http://hss3uro2hsxfogfq.onion',
-    "wikidata_id": None,
-    "official_api_documentation": 'http://hss3uro2hsxfogfq.onion/api.htm',
-    "use_official_api": False,
-    "require_api_key": False,
-    "results": 'HTML',
-}
-
-# engine dependent config
-categories = ['onions']
-paging = True
-page_size = 20
-
-# search-url
-base_url = 'http://hss3uro2hsxfogfq.onion/'
-search_url = 'index.php?{query}&hostLimit=20&start={pageno}&numRows={page_size}'
-
-# specific xpath variables
-results_xpath = '//*[@id="content"]/div/p'
-url_xpath = './span[1]'
-title_xpath = './a[1]'
-content_xpath = './text()'
-
-
-# do search-request
-def request(query, params):
-    offset = (params['pageno'] - 1) * page_size
-
-    params['url'] = base_url + search_url.format(pageno=offset,
-                                                 query=urlencode({'q': query}),
-                                                 page_size=page_size)
-
-    return params
-
-
-# get response from search-request
-def response(resp):
-    results = []
-
-    # needed because otherwise requests guesses wrong encoding
-    resp.encoding = 'utf8'
-    dom = html.fromstring(resp.text)
-
-    # parse results
-    for result in dom.xpath(results_xpath):
-        url = extract_text(result.xpath(url_xpath)[0])
-        title = extract_text(result.xpath(title_xpath)[0])
-        content = extract_text(result.xpath(content_xpath))
-
-        # append result
-        results.append({'url': url,
-                        'title': title,
-                        'content': content,
-                        'is_onion': True})
-
-    return results

+ 0 - 7
searx/settings.yml

@@ -845,13 +845,6 @@ engines:
       require_api_key: false
       results: JSON
 
-  # Requires Tor
-  - name: not evil
-    engine: not_evil
-    categories: onions
-    enable_http: true
-    shortcut: ne
-
   - name: nyaa
     engine: nyaa
     shortcut: nt