wikidata.py 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315
  1. import json
  2. from requests import get
  3. from urllib import urlencode
  4. import locale
  5. import dateutil.parser
  6. result_count = 1
  7. wikidata_host = 'https://www.wikidata.org'
  8. wikidata_api = wikidata_host + '/w/api.php'
  9. url_search = wikidata_api \
  10. + '?action=query&list=search&format=json'\
  11. + '&srnamespace=0&srprop=sectiontitle&{query}'
  12. url_detail = wikidata_api\
  13. + '?action=wbgetentities&format=json'\
  14. + '&props=labels%7Cinfo%7Csitelinks'\
  15. + '%7Csitelinks%2Furls%7Cdescriptions%7Cclaims'\
  16. + '&{query}'
  17. url_map = 'https://www.openstreetmap.org/'\
  18. + '?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
  19. def request(query, params):
  20. params['url'] = url_search.format(
  21. query=urlencode({'srsearch': query,
  22. 'srlimit': result_count}))
  23. return params
  24. def response(resp):
  25. results = []
  26. search_res = json.loads(resp.text)
  27. wikidata_ids = set()
  28. for r in search_res.get('query', {}).get('search', {}):
  29. wikidata_ids.add(r.get('title', ''))
  30. language = resp.search_params['language'].split('_')[0]
  31. if language == 'all':
  32. language = 'en'
  33. try:
  34. locale.setlocale(locale.LC_ALL, str(resp.search_params['language']))
  35. except:
  36. try:
  37. locale.setlocale(locale.LC_ALL, 'en_US')
  38. except:
  39. pass
  40. pass
  41. url = url_detail.format(query=urlencode({'ids': '|'.join(wikidata_ids),
  42. 'languages': language + '|en'}))
  43. htmlresponse = get(url)
  44. jsonresponse = json.loads(htmlresponse.content)
  45. for wikidata_id in wikidata_ids:
  46. results = results + getDetail(jsonresponse, wikidata_id, language)
  47. return results
  48. def getDetail(jsonresponse, wikidata_id, language):
  49. results = []
  50. urls = []
  51. attributes = []
  52. result = jsonresponse.get('entities', {}).get(wikidata_id, {})
  53. title = result.get('labels', {}).get(language, {}).get('value', None)
  54. if title is None:
  55. title = result.get('labels', {}).get('en', {}).get('value', None)
  56. if title is None:
  57. return results
  58. description = result\
  59. .get('descriptions', {})\
  60. .get(language, {})\
  61. .get('value', None)
  62. if description is None:
  63. description = result\
  64. .get('descriptions', {})\
  65. .get('en', {})\
  66. .get('value', '')
  67. claims = result.get('claims', {})
  68. official_website = get_string(claims, 'P856', None)
  69. if official_website is not None:
  70. urls.append({'title': 'Official site', 'url': official_website})
  71. results.append({'title': title, 'url': official_website})
  72. wikipedia_link_count = 0
  73. if language != 'en':
  74. wikipedia_link_count += add_url(urls,
  75. 'Wikipedia (' + language + ')',
  76. get_wikilink(result, language +
  77. 'wiki'))
  78. wikipedia_en_link = get_wikilink(result, 'enwiki')
  79. wikipedia_link_count += add_url(urls,
  80. 'Wikipedia (en)',
  81. wikipedia_en_link)
  82. if wikipedia_link_count == 0:
  83. misc_language = get_wiki_firstlanguage(result, 'wiki')
  84. if misc_language is not None:
  85. add_url(urls,
  86. 'Wikipedia (' + misc_language + ')',
  87. get_wikilink(result, misc_language + 'wiki'))
  88. if language != 'en':
  89. add_url(urls,
  90. 'Wiki voyage (' + language + ')',
  91. get_wikilink(result, language + 'wikivoyage'))
  92. add_url(urls,
  93. 'Wiki voyage (en)',
  94. get_wikilink(result, 'enwikivoyage'))
  95. if language != 'en':
  96. add_url(urls,
  97. 'Wikiquote (' + language + ')',
  98. get_wikilink(result, language + 'wikiquote'))
  99. add_url(urls,
  100. 'Wikiquote (en)',
  101. get_wikilink(result, 'enwikiquote'))
  102. add_url(urls,
  103. 'Commons wiki',
  104. get_wikilink(result, 'commonswiki'))
  105. add_url(urls,
  106. 'Location',
  107. get_geolink(claims, 'P625', None))
  108. add_url(urls,
  109. 'Wikidata',
  110. 'https://www.wikidata.org/wiki/'
  111. + wikidata_id + '?uselang=' + language)
  112. musicbrainz_work_id = get_string(claims, 'P435')
  113. if musicbrainz_work_id is not None:
  114. add_url(urls,
  115. 'MusicBrainz',
  116. 'http://musicbrainz.org/work/'
  117. + musicbrainz_work_id)
  118. musicbrainz_artist_id = get_string(claims, 'P434')
  119. if musicbrainz_artist_id is not None:
  120. add_url(urls,
  121. 'MusicBrainz',
  122. 'http://musicbrainz.org/artist/'
  123. + musicbrainz_artist_id)
  124. musicbrainz_release_group_id = get_string(claims, 'P436')
  125. if musicbrainz_release_group_id is not None:
  126. add_url(urls,
  127. 'MusicBrainz',
  128. 'http://musicbrainz.org/release-group/'
  129. + musicbrainz_release_group_id)
  130. musicbrainz_label_id = get_string(claims, 'P966')
  131. if musicbrainz_label_id is not None:
  132. add_url(urls,
  133. 'MusicBrainz',
  134. 'http://musicbrainz.org/label/'
  135. + musicbrainz_label_id)
  136. # musicbrainz_area_id = get_string(claims, 'P982')
  137. # P1407 MusicBrainz series ID
  138. # P1004 MusicBrainz place ID
  139. # P1330 MusicBrainz instrument ID
  140. # P1407 MusicBrainz series ID
  141. postal_code = get_string(claims, 'P281', None)
  142. if postal_code is not None:
  143. attributes.append({'label': 'Postal code(s)', 'value': postal_code})
  144. date_of_birth = get_time(claims, 'P569', None)
  145. if date_of_birth is not None:
  146. date_of_birth = dateutil.parser.parse(date_of_birth[8:]).strftime(locale.nl_langinfo(locale.D_FMT))
  147. attributes.append({'label': 'Date of birth', 'value': date_of_birth})
  148. date_of_death = get_time(claims, 'P570', None)
  149. if date_of_death is not None:
  150. date_of_death = dateutil.parser.parse(date_of_death[8:]).strftime(locale.nl_langinfo(locale.D_FMT))
  151. attributes.append({'label': 'Date of death', 'value': date_of_death})
  152. if len(attributes) == 0 and len(urls) == 2 and len(description) == 0:
  153. results.append({
  154. 'url': urls[0]['url'],
  155. 'title': title,
  156. 'content': description
  157. })
  158. else:
  159. results.append({
  160. 'infobox': title,
  161. 'id': wikipedia_en_link,
  162. 'content': description,
  163. 'attributes': attributes,
  164. 'urls': urls
  165. })
  166. return results
  167. def add_url(urls, title, url):
  168. if url is not None:
  169. urls.append({'title': title, 'url': url})
  170. return 1
  171. else:
  172. return 0
  173. def get_mainsnak(claims, propertyName):
  174. propValue = claims.get(propertyName, {})
  175. if len(propValue) == 0:
  176. return None
  177. propValue = propValue[0].get('mainsnak', None)
  178. return propValue
  179. def get_string(claims, propertyName, defaultValue=None):
  180. propValue = claims.get(propertyName, {})
  181. if len(propValue) == 0:
  182. return defaultValue
  183. result = []
  184. for e in propValue:
  185. mainsnak = e.get('mainsnak', {})
  186. datavalue = mainsnak.get('datavalue', {})
  187. if datavalue is not None:
  188. result.append(datavalue.get('value', ''))
  189. if len(result) == 0:
  190. return defaultValue
  191. else:
  192. #TODO handle multiple urls
  193. return result[0]
  194. def get_time(claims, propertyName, defaultValue=None):
  195. propValue = claims.get(propertyName, {})
  196. if len(propValue) == 0:
  197. return defaultValue
  198. result = []
  199. for e in propValue:
  200. mainsnak = e.get('mainsnak', {})
  201. datavalue = mainsnak.get('datavalue', {})
  202. if datavalue is not None:
  203. value = datavalue.get('value', '')
  204. result.append(value.get('time', ''))
  205. if len(result) == 0:
  206. return defaultValue
  207. else:
  208. return ', '.join(result)
  209. def get_geolink(claims, propertyName, defaultValue=''):
  210. mainsnak = get_mainsnak(claims, propertyName)
  211. if mainsnak is None:
  212. return defaultValue
  213. datatype = mainsnak.get('datatype', '')
  214. datavalue = mainsnak.get('datavalue', {})
  215. if datatype != 'globe-coordinate':
  216. return defaultValue
  217. value = datavalue.get('value', {})
  218. precision = value.get('precision', 0.0002)
  219. # there is no zoom information, deduce from precision (error prone)
  220. # samples :
  221. # 13 --> 5
  222. # 1 --> 6
  223. # 0.016666666666667 --> 9
  224. # 0.00027777777777778 --> 19
  225. # wolframalpha :
  226. # quadratic fit { {13, 5}, {1, 6}, {0.0166666, 9}, {0.0002777777,19}}
  227. # 14.1186-8.8322 x+0.625447 x^2
  228. if precision < 0.0003:
  229. zoom = 19
  230. else:
  231. zoom = int(15 - precision*8.8322 + precision*precision*0.625447)
  232. url = url_map\
  233. .replace('{latitude}', str(value.get('latitude', 0)))\
  234. .replace('{longitude}', str(value.get('longitude', 0)))\
  235. .replace('{zoom}', str(zoom))
  236. return url
  237. def get_wikilink(result, wikiid):
  238. url = result.get('sitelinks', {}).get(wikiid, {}).get('url', None)
  239. if url is None:
  240. return url
  241. elif url.startswith('http://'):
  242. url = url.replace('http://', 'https://')
  243. elif url.startswith('//'):
  244. url = 'https:' + url
  245. return url
  246. def get_wiki_firstlanguage(result, wikipatternid):
  247. for k in result.get('sitelinks', {}).keys():
  248. if k.endswith(wikipatternid) and len(k) == (2+len(wikipatternid)):
  249. return k[0:2]
  250. return None