Browse Source

Merge pull request #16 from return42/add-core.ac

Add a search engine for core.ac.uk
Alexandre Flament 4 years ago
parent
commit
0603b043ce
3 changed files with 97 additions and 9 deletions
  1. 1 0
      manage
  2. 82 0
      searx/engines/core.py
  3. 14 9
      searx/settings.yml

+ 1 - 0
manage

@@ -38,6 +38,7 @@ PYLINT_FILES=(
     searx/engines/yahoo_news.py
     searx/engines/apkmirror.py
     searx/engines/artic.py
+    searx/engines/core.py
     searx_extra/update/update_external_bangs.py
     searx/metrics/__init__.py
 )

+ 82 - 0
searx/engines/core.py

@@ -0,0 +1,82 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+"""CORE (science)
+
+"""
+# pylint: disable=missing-function-docstring
+
+from json import loads
+from datetime import datetime
+from urllib.parse import urlencode
+
+from searx import logger
+from searx.exceptions import SearxEngineAPIException
+
+logger = logger.getChild('CORE engine')
+
+about = {
+    "website": 'https://core.ac.uk',
+    "wikidata_id": 'Q22661180',
+    "official_api_documentation": 'https://core.ac.uk/documentation/api/',
+    "use_official_api": True,
+    "require_api_key": True,
+    "results": 'JSON',
+}
+
+categories = ['science']
+paging = True
+nb_per_page = 10
+
+api_key = 'unset'
+
+logger = logger.getChild('CORE engine')
+
+base_url = 'https://core.ac.uk:443/api-v2/search/'
+search_string = '{query}?page={page}&pageSize={nb_per_page}&apiKey={apikey}'
+
+def request(query, params):
+
+    if api_key == 'unset':
+        raise SearxEngineAPIException('missing CORE API key')
+
+    search_path = search_string.format(
+        query = urlencode({'q': query}),
+        nb_per_page = nb_per_page,
+        page = params['pageno'],
+        apikey = api_key,
+    )
+    params['url'] = base_url + search_path
+
+    logger.debug("query_url --> %s", params['url'])
+    return params
+
+def response(resp):
+    results = []
+    json_data = loads(resp.text)
+
+    for result in json_data['data']:
+
+        source = result['_source']
+        time = source['publishedDate'] or source['depositedDate']
+        if time :
+            date = datetime.fromtimestamp(time / 1000)
+        else:
+            date = None
+
+        metadata = []
+        if source['publisher'] and len(source['publisher']) > 3:
+            metadata.append(source['publisher'])
+        if source['topics']:
+            metadata.append(source['topics'][0])
+        if source['doi']:
+            metadata.append(source['doi'])
+        metadata = ' / '.join(metadata)
+
+        results.append({
+            'url': source['urls'][0].replace('http://', 'https://', 1),
+            'title': source['title'],
+            'content': source['description'],
+            'publishedDate': date,
+            'metadata' : metadata,
+        })
+
+    return results

+ 14 - 9
searx/settings.yml

@@ -272,6 +272,13 @@ engines:
     categories : images
     shortcut : cce
 
+  # - name : core.ac.uk
+  #   engine : core
+  #   categories : science
+  #   shortcut : cor
+  #   # get your API key from: https://core.ac.uk/api-keys/register/
+  #   api_key : 'unset'
+
   - name : crossref
     engine : json_engine
     paging : True
@@ -965,15 +972,13 @@ engines:
 #    query_fields : '' # query fields
 #    enable_http : True
 
-  - name : springer nature
-    engine : springer
-    # get your API key from: https://dev.springernature.com/signup
-    # api_key : "a69685087d07eca9f13db62f65b8f601" # working API key, for test & debug
-    # set api_key and comment out disabled ..
-    disabled: True
-    shortcut : springer
-    categories : science
-    timeout : 6.0
+  # - name : springer nature
+  #   engine : springer
+  #   # get your API key from: https://dev.springernature.com/signup
+  #   api_key : 'unset' # working API key, for test & debug: "a69685087d07eca9f13db62f65b8f601"
+  #   shortcut : springer
+  #   categories : science
+  #   timeout : 6.0
 
   - name : startpage
     engine : startpage