wikidata.py 9.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305
  1. import json
  2. from urllib import urlencode
  3. from searx.poolrequests import get
  4. from searx.utils import format_date_by_locale
  5. result_count = 1
  6. wikidata_host = 'https://www.wikidata.org'
  7. wikidata_api = wikidata_host + '/w/api.php'
  8. url_search = wikidata_api \
  9. + '?action=query&list=search&format=json'\
  10. + '&srnamespace=0&srprop=sectiontitle&{query}'
  11. url_detail = wikidata_api\
  12. + '?action=wbgetentities&format=json'\
  13. + '&props=labels%7Cinfo%7Csitelinks'\
  14. + '%7Csitelinks%2Furls%7Cdescriptions%7Cclaims'\
  15. + '&{query}'
  16. url_map = 'https://www.openstreetmap.org/'\
  17. + '?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
  18. def request(query, params):
  19. params['url'] = url_search.format(
  20. query=urlencode({'srsearch': query,
  21. 'srlimit': result_count}))
  22. return params
  23. def response(resp):
  24. results = []
  25. search_res = json.loads(resp.text)
  26. wikidata_ids = set()
  27. for r in search_res.get('query', {}).get('search', {}):
  28. wikidata_ids.add(r.get('title', ''))
  29. language = resp.search_params['language'].split('_')[0]
  30. if language == 'all':
  31. language = 'en'
  32. url = url_detail.format(query=urlencode({'ids': '|'.join(wikidata_ids),
  33. 'languages': language + '|en'}))
  34. htmlresponse = get(url)
  35. jsonresponse = json.loads(htmlresponse.content)
  36. for wikidata_id in wikidata_ids:
  37. results = results + getDetail(jsonresponse, wikidata_id, language, resp.search_params['language'])
  38. return results
  39. def getDetail(jsonresponse, wikidata_id, language, locale):
  40. results = []
  41. urls = []
  42. attributes = []
  43. result = jsonresponse.get('entities', {}).get(wikidata_id, {})
  44. title = result.get('labels', {}).get(language, {}).get('value', None)
  45. if title is None:
  46. title = result.get('labels', {}).get('en', {}).get('value', None)
  47. if title is None:
  48. return results
  49. description = result\
  50. .get('descriptions', {})\
  51. .get(language, {})\
  52. .get('value', None)
  53. if description is None:
  54. description = result\
  55. .get('descriptions', {})\
  56. .get('en', {})\
  57. .get('value', '')
  58. claims = result.get('claims', {})
  59. official_website = get_string(claims, 'P856', None)
  60. if official_website is not None:
  61. urls.append({'title': 'Official site', 'url': official_website})
  62. results.append({'title': title, 'url': official_website})
  63. wikipedia_link_count = 0
  64. if language != 'en':
  65. wikipedia_link_count += add_url(urls,
  66. 'Wikipedia (' + language + ')',
  67. get_wikilink(result, language +
  68. 'wiki'))
  69. wikipedia_en_link = get_wikilink(result, 'enwiki')
  70. wikipedia_link_count += add_url(urls,
  71. 'Wikipedia (en)',
  72. wikipedia_en_link)
  73. if wikipedia_link_count == 0:
  74. misc_language = get_wiki_firstlanguage(result, 'wiki')
  75. if misc_language is not None:
  76. add_url(urls,
  77. 'Wikipedia (' + misc_language + ')',
  78. get_wikilink(result, misc_language + 'wiki'))
  79. if language != 'en':
  80. add_url(urls,
  81. 'Wiki voyage (' + language + ')',
  82. get_wikilink(result, language + 'wikivoyage'))
  83. add_url(urls,
  84. 'Wiki voyage (en)',
  85. get_wikilink(result, 'enwikivoyage'))
  86. if language != 'en':
  87. add_url(urls,
  88. 'Wikiquote (' + language + ')',
  89. get_wikilink(result, language + 'wikiquote'))
  90. add_url(urls,
  91. 'Wikiquote (en)',
  92. get_wikilink(result, 'enwikiquote'))
  93. add_url(urls,
  94. 'Commons wiki',
  95. get_wikilink(result, 'commonswiki'))
  96. add_url(urls,
  97. 'Location',
  98. get_geolink(claims, 'P625', None))
  99. add_url(urls,
  100. 'Wikidata',
  101. 'https://www.wikidata.org/wiki/'
  102. + wikidata_id + '?uselang=' + language)
  103. musicbrainz_work_id = get_string(claims, 'P435')
  104. if musicbrainz_work_id is not None:
  105. add_url(urls,
  106. 'MusicBrainz',
  107. 'http://musicbrainz.org/work/'
  108. + musicbrainz_work_id)
  109. musicbrainz_artist_id = get_string(claims, 'P434')
  110. if musicbrainz_artist_id is not None:
  111. add_url(urls,
  112. 'MusicBrainz',
  113. 'http://musicbrainz.org/artist/'
  114. + musicbrainz_artist_id)
  115. musicbrainz_release_group_id = get_string(claims, 'P436')
  116. if musicbrainz_release_group_id is not None:
  117. add_url(urls,
  118. 'MusicBrainz',
  119. 'http://musicbrainz.org/release-group/'
  120. + musicbrainz_release_group_id)
  121. musicbrainz_label_id = get_string(claims, 'P966')
  122. if musicbrainz_label_id is not None:
  123. add_url(urls,
  124. 'MusicBrainz',
  125. 'http://musicbrainz.org/label/'
  126. + musicbrainz_label_id)
  127. # musicbrainz_area_id = get_string(claims, 'P982')
  128. # P1407 MusicBrainz series ID
  129. # P1004 MusicBrainz place ID
  130. # P1330 MusicBrainz instrument ID
  131. # P1407 MusicBrainz series ID
  132. postal_code = get_string(claims, 'P281', None)
  133. if postal_code is not None:
  134. attributes.append({'label': 'Postal code(s)', 'value': postal_code})
  135. date_of_birth = get_time(claims, 'P569', None)
  136. if date_of_birth is not None:
  137. date_of_birth = format_date_by_locale(date_of_birth[8:], locale)
  138. attributes.append({'label': 'Date of birth', 'value': date_of_birth})
  139. date_of_death = get_time(claims, 'P570', None)
  140. if date_of_death is not None:
  141. date_of_death = format_date_by_locale(date_of_death[8:], locale)
  142. attributes.append({'label': 'Date of death', 'value': date_of_death})
  143. if len(attributes) == 0 and len(urls) == 2 and len(description) == 0:
  144. results.append({
  145. 'url': urls[0]['url'],
  146. 'title': title,
  147. 'content': description
  148. })
  149. else:
  150. results.append({
  151. 'infobox': title,
  152. 'id': wikipedia_en_link,
  153. 'content': description,
  154. 'attributes': attributes,
  155. 'urls': urls
  156. })
  157. return results
  158. def add_url(urls, title, url):
  159. if url is not None:
  160. urls.append({'title': title, 'url': url})
  161. return 1
  162. else:
  163. return 0
  164. def get_mainsnak(claims, propertyName):
  165. propValue = claims.get(propertyName, {})
  166. if len(propValue) == 0:
  167. return None
  168. propValue = propValue[0].get('mainsnak', None)
  169. return propValue
  170. def get_string(claims, propertyName, defaultValue=None):
  171. propValue = claims.get(propertyName, {})
  172. if len(propValue) == 0:
  173. return defaultValue
  174. result = []
  175. for e in propValue:
  176. mainsnak = e.get('mainsnak', {})
  177. datavalue = mainsnak.get('datavalue', {})
  178. if datavalue is not None:
  179. result.append(datavalue.get('value', ''))
  180. if len(result) == 0:
  181. return defaultValue
  182. else:
  183. # TODO handle multiple urls
  184. return result[0]
  185. def get_time(claims, propertyName, defaultValue=None):
  186. propValue = claims.get(propertyName, {})
  187. if len(propValue) == 0:
  188. return defaultValue
  189. result = []
  190. for e in propValue:
  191. mainsnak = e.get('mainsnak', {})
  192. datavalue = mainsnak.get('datavalue', {})
  193. if datavalue is not None:
  194. value = datavalue.get('value', '')
  195. result.append(value.get('time', ''))
  196. if len(result) == 0:
  197. return defaultValue
  198. else:
  199. return ', '.join(result)
  200. def get_geolink(claims, propertyName, defaultValue=''):
  201. mainsnak = get_mainsnak(claims, propertyName)
  202. if mainsnak is None:
  203. return defaultValue
  204. datatype = mainsnak.get('datatype', '')
  205. datavalue = mainsnak.get('datavalue', {})
  206. if datatype != 'globe-coordinate':
  207. return defaultValue
  208. value = datavalue.get('value', {})
  209. precision = value.get('precision', 0.0002)
  210. # there is no zoom information, deduce from precision (error prone)
  211. # samples :
  212. # 13 --> 5
  213. # 1 --> 6
  214. # 0.016666666666667 --> 9
  215. # 0.00027777777777778 --> 19
  216. # wolframalpha :
  217. # quadratic fit { {13, 5}, {1, 6}, {0.0166666, 9}, {0.0002777777,19}}
  218. # 14.1186-8.8322 x+0.625447 x^2
  219. if precision < 0.0003:
  220. zoom = 19
  221. else:
  222. zoom = int(15 - precision*8.8322 + precision*precision*0.625447)
  223. url = url_map\
  224. .replace('{latitude}', str(value.get('latitude', 0)))\
  225. .replace('{longitude}', str(value.get('longitude', 0)))\
  226. .replace('{zoom}', str(zoom))
  227. return url
  228. def get_wikilink(result, wikiid):
  229. url = result.get('sitelinks', {}).get(wikiid, {}).get('url', None)
  230. if url is None:
  231. return url
  232. elif url.startswith('http://'):
  233. url = url.replace('http://', 'https://')
  234. elif url.startswith('//'):
  235. url = 'https:' + url
  236. return url
  237. def get_wiki_firstlanguage(result, wikipatternid):
  238. for k in result.get('sitelinks', {}).keys():
  239. if k.endswith(wikipatternid) and len(k) == (2+len(wikipatternid)):
  240. return k[0:2]
  241. return None