|
@@ -2,13 +2,25 @@ import json
|
|
|
from requests import get
|
|
|
from urllib import urlencode
|
|
|
|
|
|
-resultCount=1
|
|
|
-urlSearch = 'https://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectiontitle&{query}'
|
|
|
-urlDetail = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=labels%7Cinfo%7Csitelinks%7Csitelinks%2Furls%7Cdescriptions%7Cclaims&{query}'
|
|
|
-urlMap = 'https://www.openstreetmap.org/?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
|
|
|
+result_count = 1
|
|
|
+wikidata_host = 'https://www.wikidata.org'
|
|
|
+wikidata_api = wikidata_host + '/w/api.php'
|
|
|
+url_search = wikidata_api \
|
|
|
+ + '?action=query&list=search&format=json'\
|
|
|
+ + '&srnamespace=0&srprop=sectiontitle&{query}'
|
|
|
+url_detail = wikidata_api\
|
|
|
+ + '?action=wbgetentities&format=json'\
|
|
|
+ + '&props=labels%7Cinfo%7Csitelinks'\
|
|
|
+ + '%7Csitelinks%2Furls%7Cdescriptions%7Cclaims'\
|
|
|
+ + '&{query}'
|
|
|
+url_map = 'https://www.openstreetmap.org/'\
|
|
|
+ + '?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
|
|
|
+
|
|
|
|
|
|
def request(query, params):
|
|
|
- params['url'] = urlSearch.format(query=urlencode({'srsearch': query, 'srlimit': resultCount}))
|
|
|
+ params['url'] = url_search.format(
|
|
|
+ query=urlencode({'srsearch': query,
|
|
|
+ 'srlimit': result_count}))
|
|
|
return params
|
|
|
|
|
|
|
|
@@ -23,7 +35,8 @@ def response(resp):
|
|
|
language = resp.search_params['language'].split('_')[0]
|
|
|
if language == 'all':
|
|
|
language = 'en'
|
|
|
- url = urlDetail.format(query=urlencode({'ids': '|'.join(wikidata_ids), 'languages': language + '|en'}))
|
|
|
+ url = url_detail.format(query=urlencode({'ids': '|'.join(wikidata_ids),
|
|
|
+ 'languages': language + '|en'}))
|
|
|
|
|
|
htmlresponse = get(url)
|
|
|
jsonresponse = json.loads(htmlresponse.content)
|
|
@@ -32,6 +45,7 @@ def response(resp):
|
|
|
|
|
|
return results
|
|
|
|
|
|
+
|
|
|
def getDetail(jsonresponse, wikidata_id, language):
|
|
|
results = []
|
|
|
urls = []
|
|
@@ -40,60 +54,103 @@ def getDetail(jsonresponse, wikidata_id, language):
|
|
|
result = jsonresponse.get('entities', {}).get(wikidata_id, {})
|
|
|
|
|
|
title = result.get('labels', {}).get(language, {}).get('value', None)
|
|
|
- if title == None:
|
|
|
+ if title is None:
|
|
|
title = result.get('labels', {}).get('en', {}).get('value', None)
|
|
|
- if title == None:
|
|
|
+ if title is None:
|
|
|
return results
|
|
|
|
|
|
- description = result.get('descriptions', {}).get(language, {}).get('value', None)
|
|
|
- if description == None:
|
|
|
- description = result.get('descriptions', {}).get('en', {}).get('value', '')
|
|
|
+ description = result\
|
|
|
+ .get('descriptions', {})\
|
|
|
+ .get(language, {})\
|
|
|
+ .get('value', None)
|
|
|
+
|
|
|
+ if description is None:
|
|
|
+ description = result\
|
|
|
+ .get('descriptions', {})\
|
|
|
+ .get('en', {})\
|
|
|
+ .get('value', '')
|
|
|
|
|
|
claims = result.get('claims', {})
|
|
|
official_website = get_string(claims, 'P856', None)
|
|
|
- if official_website != None:
|
|
|
- urls.append({ 'title' : 'Official site', 'url': official_website })
|
|
|
- results.append({ 'title': title, 'url' : official_website })
|
|
|
+ if official_website is not None:
|
|
|
+ urls.append({'title': 'Official site', 'url': official_website})
|
|
|
+ results.append({'title': title, 'url': official_website})
|
|
|
|
|
|
wikipedia_link_count = 0
|
|
|
if language != 'en':
|
|
|
- wikipedia_link_count += add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki'))
|
|
|
+ wikipedia_link_count += add_url(urls,
|
|
|
+ 'Wikipedia (' + language + ')',
|
|
|
+ get_wikilink(result, language +
|
|
|
+ 'wiki'))
|
|
|
wikipedia_en_link = get_wikilink(result, 'enwiki')
|
|
|
- wikipedia_link_count += add_url(urls, 'Wikipedia (en)', wikipedia_en_link)
|
|
|
+ wikipedia_link_count += add_url(urls,
|
|
|
+ 'Wikipedia (en)',
|
|
|
+ wikipedia_en_link)
|
|
|
if wikipedia_link_count == 0:
|
|
|
misc_language = get_wiki_firstlanguage(result, 'wiki')
|
|
|
- if misc_language != None:
|
|
|
- add_url(urls, 'Wikipedia (' + misc_language + ')', get_wikilink(result, misc_language + 'wiki'))
|
|
|
+ if misc_language is not None:
|
|
|
+ add_url(urls,
|
|
|
+ 'Wikipedia (' + misc_language + ')',
|
|
|
+ get_wikilink(result, misc_language + 'wiki'))
|
|
|
|
|
|
if language != 'en':
|
|
|
- add_url(urls, 'Wiki voyage (' + language + ')', get_wikilink(result, language + 'wikivoyage'))
|
|
|
- add_url(urls, 'Wiki voyage (en)', get_wikilink(result, 'enwikivoyage'))
|
|
|
+ add_url(urls,
|
|
|
+ 'Wiki voyage (' + language + ')',
|
|
|
+ get_wikilink(result, language + 'wikivoyage'))
|
|
|
+
|
|
|
+ add_url(urls,
|
|
|
+ 'Wiki voyage (en)',
|
|
|
+ get_wikilink(result, 'enwikivoyage'))
|
|
|
|
|
|
if language != 'en':
|
|
|
- add_url(urls, 'Wikiquote (' + language + ')', get_wikilink(result, language + 'wikiquote'))
|
|
|
- add_url(urls, 'Wikiquote (en)', get_wikilink(result, 'enwikiquote'))
|
|
|
+ add_url(urls,
|
|
|
+ 'Wikiquote (' + language + ')',
|
|
|
+ get_wikilink(result, language + 'wikiquote'))
|
|
|
|
|
|
- add_url(urls, 'Commons wiki', get_wikilink(result, 'commonswiki'))
|
|
|
+ add_url(urls,
|
|
|
+ 'Wikiquote (en)',
|
|
|
+ get_wikilink(result, 'enwikiquote'))
|
|
|
|
|
|
- add_url(urls, 'Location', get_geolink(claims, 'P625', None))
|
|
|
+ add_url(urls,
|
|
|
+ 'Commons wiki',
|
|
|
+ get_wikilink(result, 'commonswiki'))
|
|
|
|
|
|
- add_url(urls, 'Wikidata', 'https://www.wikidata.org/wiki/' + wikidata_id + '?uselang='+ language)
|
|
|
+ add_url(urls,
|
|
|
+ 'Location',
|
|
|
+ get_geolink(claims, 'P625', None))
|
|
|
+
|
|
|
+ add_url(urls,
|
|
|
+ 'Wikidata',
|
|
|
+ 'https://www.wikidata.org/wiki/'
|
|
|
+ + wikidata_id + '?uselang=' + language)
|
|
|
|
|
|
musicbrainz_work_id = get_string(claims, 'P435')
|
|
|
- if musicbrainz_work_id != None:
|
|
|
- add_url(urls, 'MusicBrainz', 'http://musicbrainz.org/work/' + musicbrainz_work_id)
|
|
|
+ if musicbrainz_work_id is not None:
|
|
|
+ add_url(urls,
|
|
|
+ 'MusicBrainz',
|
|
|
+ 'http://musicbrainz.org/work/'
|
|
|
+ + musicbrainz_work_id)
|
|
|
|
|
|
musicbrainz_artist_id = get_string(claims, 'P434')
|
|
|
- if musicbrainz_artist_id != None:
|
|
|
- add_url(urls, 'MusicBrainz', 'http://musicbrainz.org/artist/' + musicbrainz_artist_id)
|
|
|
+ if musicbrainz_artist_id is not None:
|
|
|
+ add_url(urls,
|
|
|
+ 'MusicBrainz',
|
|
|
+ 'http://musicbrainz.org/artist/'
|
|
|
+ + musicbrainz_artist_id)
|
|
|
|
|
|
musicbrainz_release_group_id = get_string(claims, 'P436')
|
|
|
- if musicbrainz_release_group_id != None:
|
|
|
- add_url(urls, 'MusicBrainz', 'http://musicbrainz.org/release-group/' + musicbrainz_release_group_id)
|
|
|
+ if musicbrainz_release_group_id is not None:
|
|
|
+ add_url(urls,
|
|
|
+ 'MusicBrainz',
|
|
|
+ 'http://musicbrainz.org/release-group/'
|
|
|
+ + musicbrainz_release_group_id)
|
|
|
|
|
|
musicbrainz_label_id = get_string(claims, 'P966')
|
|
|
- if musicbrainz_label_id != None:
|
|
|
- add_url(urls, 'MusicBrainz', 'http://musicbrainz.org/label/' + musicbrainz_label_id)
|
|
|
+ if musicbrainz_label_id is not None:
|
|
|
+ add_url(urls,
|
|
|
+ 'MusicBrainz',
|
|
|
+ 'http://musicbrainz.org/label/'
|
|
|
+ + musicbrainz_label_id)
|
|
|
|
|
|
# musicbrainz_area_id = get_string(claims, 'P982')
|
|
|
# P1407 MusicBrainz series ID
|
|
@@ -102,42 +159,43 @@ def getDetail(jsonresponse, wikidata_id, language):
|
|
|
# P1407 MusicBrainz series ID
|
|
|
|
|
|
postal_code = get_string(claims, 'P281', None)
|
|
|
- if postal_code != None:
|
|
|
- attributes.append({'label' : 'Postal code(s)', 'value' : postal_code})
|
|
|
+ if postal_code is not None:
|
|
|
+ attributes.append({'label': 'Postal code(s)', 'value': postal_code})
|
|
|
|
|
|
date_of_birth = get_time(claims, 'P569', None)
|
|
|
- if date_of_birth != None:
|
|
|
- attributes.append({'label' : 'Date of birth', 'value' : date_of_birth})
|
|
|
+ if date_of_birth is not None:
|
|
|
+ attributes.append({'label': 'Date of birth', 'value': date_of_birth})
|
|
|
|
|
|
date_of_death = get_time(claims, 'P570', None)
|
|
|
- if date_of_death != None:
|
|
|
- attributes.append({'label' : 'Date of death', 'value' : date_of_death})
|
|
|
+ if date_of_death is not None:
|
|
|
+ attributes.append({'label': 'Date of death', 'value': date_of_death})
|
|
|
|
|
|
- if len(attributes)==0 and len(urls)==2 and len(description)==0:
|
|
|
+ if len(attributes) == 0 and len(urls) == 2 and len(description) == 0:
|
|
|
results.append({
|
|
|
- 'url': urls[0]['url'],
|
|
|
- 'title': title,
|
|
|
- 'content': description
|
|
|
- })
|
|
|
+ 'url': urls[0]['url'],
|
|
|
+ 'title': title,
|
|
|
+ 'content': description
|
|
|
+ })
|
|
|
else:
|
|
|
results.append({
|
|
|
- 'infobox' : title,
|
|
|
- 'id' : wikipedia_en_link,
|
|
|
- 'content' : description,
|
|
|
- 'attributes' : attributes,
|
|
|
- 'urls' : urls
|
|
|
- })
|
|
|
+ 'infobox': title,
|
|
|
+ 'id': wikipedia_en_link,
|
|
|
+ 'content': description,
|
|
|
+ 'attributes': attributes,
|
|
|
+ 'urls': urls
|
|
|
+ })
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
def add_url(urls, title, url):
|
|
|
- if url != None:
|
|
|
- urls.append({'title' : title, 'url' : url})
|
|
|
+ if url is not None:
|
|
|
+ urls.append({'title': title, 'url': url})
|
|
|
return 1
|
|
|
else:
|
|
|
return 0
|
|
|
|
|
|
+
|
|
|
def get_mainsnak(claims, propertyName):
|
|
|
propValue = claims.get(propertyName, {})
|
|
|
if len(propValue) == 0:
|
|
@@ -157,7 +215,7 @@ def get_string(claims, propertyName, defaultValue=None):
|
|
|
mainsnak = e.get('mainsnak', {})
|
|
|
|
|
|
datavalue = mainsnak.get('datavalue', {})
|
|
|
- if datavalue != None:
|
|
|
+ if datavalue is not None:
|
|
|
result.append(datavalue.get('value', ''))
|
|
|
|
|
|
if len(result) == 0:
|
|
@@ -177,7 +235,7 @@ def get_time(claims, propertyName, defaultValue=None):
|
|
|
mainsnak = e.get('mainsnak', {})
|
|
|
|
|
|
datavalue = mainsnak.get('datavalue', {})
|
|
|
- if datavalue != None:
|
|
|
+ if datavalue is not None:
|
|
|
value = datavalue.get('value', '')
|
|
|
result.append(value.get('time', ''))
|
|
|
|
|
@@ -190,7 +248,7 @@ def get_time(claims, propertyName, defaultValue=None):
|
|
|
def get_geolink(claims, propertyName, defaultValue=''):
|
|
|
mainsnak = get_mainsnak(claims, propertyName)
|
|
|
|
|
|
- if mainsnak == None:
|
|
|
+ if mainsnak is None:
|
|
|
return defaultValue
|
|
|
|
|
|
datatype = mainsnak.get('datatype', '')
|
|
@@ -209,21 +267,25 @@ def get_geolink(claims, propertyName, defaultValue=''):
|
|
|
# 1 --> 6
|
|
|
# 0.016666666666667 --> 9
|
|
|
# 0.00027777777777778 --> 19
|
|
|
- # wolframalpha : quadratic fit { {13, 5}, {1, 6}, {0.0166666, 9}, {0.0002777777,19}}
|
|
|
+ # wolframalpha :
|
|
|
+ # quadratic fit { {13, 5}, {1, 6}, {0.0166666, 9}, {0.0002777777,19}}
|
|
|
# 14.1186-8.8322 x+0.625447 x^2
|
|
|
if precision < 0.0003:
|
|
|
zoom = 19
|
|
|
else:
|
|
|
zoom = int(15 - precision*8.8322 + precision*precision*0.625447)
|
|
|
|
|
|
- url = urlMap.replace('{latitude}', str(value.get('latitude',0))).replace('{longitude}', str(value.get('longitude',0))).replace('{zoom}', str(zoom))
|
|
|
+ url = url_map\
|
|
|
+ .replace('{latitude}', str(value.get('latitude', 0)))\
|
|
|
+ .replace('{longitude}', str(value.get('longitude', 0)))\
|
|
|
+ .replace('{zoom}', str(zoom))
|
|
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
def get_wikilink(result, wikiid):
|
|
|
url = result.get('sitelinks', {}).get(wikiid, {}).get('url', None)
|
|
|
- if url == None:
|
|
|
+ if url is None:
|
|
|
return url
|
|
|
elif url.startswith('http://'):
|
|
|
url = url.replace('http://', 'https://')
|
|
@@ -231,8 +293,9 @@ def get_wikilink(result, wikiid):
|
|
|
url = 'https:' + url
|
|
|
return url
|
|
|
|
|
|
+
|
|
|
def get_wiki_firstlanguage(result, wikipatternid):
|
|
|
for k in result.get('sitelinks', {}).keys():
|
|
|
- if k.endswith(wikipatternid) and len(k)==(2+len(wikipatternid)):
|
|
|
+ if k.endswith(wikipatternid) and len(k) == (2+len(wikipatternid)):
|
|
|
return k[0:2]
|
|
|
return None
|