wikidata.py 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316
  1. import json
  2. from requests import get
  3. from urllib import urlencode
  4. import locale
  5. import time
  6. import dateutil.parser
  7. result_count = 1
  8. wikidata_host = 'https://www.wikidata.org'
  9. wikidata_api = wikidata_host + '/w/api.php'
  10. url_search = wikidata_api \
  11. + '?action=query&list=search&format=json'\
  12. + '&srnamespace=0&srprop=sectiontitle&{query}'
  13. url_detail = wikidata_api\
  14. + '?action=wbgetentities&format=json'\
  15. + '&props=labels%7Cinfo%7Csitelinks'\
  16. + '%7Csitelinks%2Furls%7Cdescriptions%7Cclaims'\
  17. + '&{query}'
  18. url_map = 'https://www.openstreetmap.org/'\
  19. + '?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
  20. def request(query, params):
  21. params['url'] = url_search.format(
  22. query=urlencode({'srsearch': query,
  23. 'srlimit': result_count}))
  24. return params
  25. def response(resp):
  26. results = []
  27. search_res = json.loads(resp.text)
  28. wikidata_ids = set()
  29. for r in search_res.get('query', {}).get('search', {}):
  30. wikidata_ids.add(r.get('title', ''))
  31. language = resp.search_params['language'].split('_')[0]
  32. if language == 'all':
  33. language = 'en'
  34. try:
  35. locale.setlocale(locale.LC_ALL, str(resp.search_params['language']))
  36. except:
  37. try:
  38. locale.setlocale(locale.LC_ALL, 'en_US')
  39. except:
  40. pass
  41. pass
  42. url = url_detail.format(query=urlencode({'ids': '|'.join(wikidata_ids),
  43. 'languages': language + '|en'}))
  44. htmlresponse = get(url)
  45. jsonresponse = json.loads(htmlresponse.content)
  46. for wikidata_id in wikidata_ids:
  47. results = results + getDetail(jsonresponse, wikidata_id, language)
  48. return results
  49. def getDetail(jsonresponse, wikidata_id, language):
  50. results = []
  51. urls = []
  52. attributes = []
  53. result = jsonresponse.get('entities', {}).get(wikidata_id, {})
  54. title = result.get('labels', {}).get(language, {}).get('value', None)
  55. if title is None:
  56. title = result.get('labels', {}).get('en', {}).get('value', None)
  57. if title is None:
  58. return results
  59. description = result\
  60. .get('descriptions', {})\
  61. .get(language, {})\
  62. .get('value', None)
  63. if description is None:
  64. description = result\
  65. .get('descriptions', {})\
  66. .get('en', {})\
  67. .get('value', '')
  68. claims = result.get('claims', {})
  69. official_website = get_string(claims, 'P856', None)
  70. if official_website is not None:
  71. urls.append({'title': 'Official site', 'url': official_website})
  72. results.append({'title': title, 'url': official_website})
  73. wikipedia_link_count = 0
  74. if language != 'en':
  75. wikipedia_link_count += add_url(urls,
  76. 'Wikipedia (' + language + ')',
  77. get_wikilink(result, language +
  78. 'wiki'))
  79. wikipedia_en_link = get_wikilink(result, 'enwiki')
  80. wikipedia_link_count += add_url(urls,
  81. 'Wikipedia (en)',
  82. wikipedia_en_link)
  83. if wikipedia_link_count == 0:
  84. misc_language = get_wiki_firstlanguage(result, 'wiki')
  85. if misc_language is not None:
  86. add_url(urls,
  87. 'Wikipedia (' + misc_language + ')',
  88. get_wikilink(result, misc_language + 'wiki'))
  89. if language != 'en':
  90. add_url(urls,
  91. 'Wiki voyage (' + language + ')',
  92. get_wikilink(result, language + 'wikivoyage'))
  93. add_url(urls,
  94. 'Wiki voyage (en)',
  95. get_wikilink(result, 'enwikivoyage'))
  96. if language != 'en':
  97. add_url(urls,
  98. 'Wikiquote (' + language + ')',
  99. get_wikilink(result, language + 'wikiquote'))
  100. add_url(urls,
  101. 'Wikiquote (en)',
  102. get_wikilink(result, 'enwikiquote'))
  103. add_url(urls,
  104. 'Commons wiki',
  105. get_wikilink(result, 'commonswiki'))
  106. add_url(urls,
  107. 'Location',
  108. get_geolink(claims, 'P625', None))
  109. add_url(urls,
  110. 'Wikidata',
  111. 'https://www.wikidata.org/wiki/'
  112. + wikidata_id + '?uselang=' + language)
  113. musicbrainz_work_id = get_string(claims, 'P435')
  114. if musicbrainz_work_id is not None:
  115. add_url(urls,
  116. 'MusicBrainz',
  117. 'http://musicbrainz.org/work/'
  118. + musicbrainz_work_id)
  119. musicbrainz_artist_id = get_string(claims, 'P434')
  120. if musicbrainz_artist_id is not None:
  121. add_url(urls,
  122. 'MusicBrainz',
  123. 'http://musicbrainz.org/artist/'
  124. + musicbrainz_artist_id)
  125. musicbrainz_release_group_id = get_string(claims, 'P436')
  126. if musicbrainz_release_group_id is not None:
  127. add_url(urls,
  128. 'MusicBrainz',
  129. 'http://musicbrainz.org/release-group/'
  130. + musicbrainz_release_group_id)
  131. musicbrainz_label_id = get_string(claims, 'P966')
  132. if musicbrainz_label_id is not None:
  133. add_url(urls,
  134. 'MusicBrainz',
  135. 'http://musicbrainz.org/label/'
  136. + musicbrainz_label_id)
  137. # musicbrainz_area_id = get_string(claims, 'P982')
  138. # P1407 MusicBrainz series ID
  139. # P1004 MusicBrainz place ID
  140. # P1330 MusicBrainz instrument ID
  141. # P1407 MusicBrainz series ID
  142. postal_code = get_string(claims, 'P281', None)
  143. if postal_code is not None:
  144. attributes.append({'label': 'Postal code(s)', 'value': postal_code})
  145. date_of_birth = get_time(claims, 'P569', None)
  146. if date_of_birth is not None:
  147. date_of_birth = dateutil.parser.parse(date_of_birth[8:]).strftime(locale.nl_langinfo(locale.D_FMT))
  148. attributes.append({'label': 'Date of birth', 'value': date_of_birth})
  149. date_of_death = get_time(claims, 'P570', None)
  150. if date_of_death is not None:
  151. date_of_death = dateutil.parser.parse(date_of_death[8:]).strftime(locale.nl_langinfo(locale.D_FMT))
  152. attributes.append({'label': 'Date of death', 'value': date_of_death})
  153. if len(attributes) == 0 and len(urls) == 2 and len(description) == 0:
  154. results.append({
  155. 'url': urls[0]['url'],
  156. 'title': title,
  157. 'content': description
  158. })
  159. else:
  160. results.append({
  161. 'infobox': title,
  162. 'id': wikipedia_en_link,
  163. 'content': description,
  164. 'attributes': attributes,
  165. 'urls': urls
  166. })
  167. return results
  168. def add_url(urls, title, url):
  169. if url is not None:
  170. urls.append({'title': title, 'url': url})
  171. return 1
  172. else:
  173. return 0
  174. def get_mainsnak(claims, propertyName):
  175. propValue = claims.get(propertyName, {})
  176. if len(propValue) == 0:
  177. return None
  178. propValue = propValue[0].get('mainsnak', None)
  179. return propValue
  180. def get_string(claims, propertyName, defaultValue=None):
  181. propValue = claims.get(propertyName, {})
  182. if len(propValue) == 0:
  183. return defaultValue
  184. result = []
  185. for e in propValue:
  186. mainsnak = e.get('mainsnak', {})
  187. datavalue = mainsnak.get('datavalue', {})
  188. if datavalue is not None:
  189. result.append(datavalue.get('value', ''))
  190. if len(result) == 0:
  191. return defaultValue
  192. else:
  193. #TODO handle multiple urls
  194. return result[0]
  195. def get_time(claims, propertyName, defaultValue=None):
  196. propValue = claims.get(propertyName, {})
  197. if len(propValue) == 0:
  198. return defaultValue
  199. result = []
  200. for e in propValue:
  201. mainsnak = e.get('mainsnak', {})
  202. datavalue = mainsnak.get('datavalue', {})
  203. if datavalue is not None:
  204. value = datavalue.get('value', '')
  205. result.append(value.get('time', ''))
  206. if len(result) == 0:
  207. return defaultValue
  208. else:
  209. return ', '.join(result)
  210. def get_geolink(claims, propertyName, defaultValue=''):
  211. mainsnak = get_mainsnak(claims, propertyName)
  212. if mainsnak is None:
  213. return defaultValue
  214. datatype = mainsnak.get('datatype', '')
  215. datavalue = mainsnak.get('datavalue', {})
  216. if datatype != 'globe-coordinate':
  217. return defaultValue
  218. value = datavalue.get('value', {})
  219. precision = value.get('precision', 0.0002)
  220. # there is no zoom information, deduce from precision (error prone)
  221. # samples :
  222. # 13 --> 5
  223. # 1 --> 6
  224. # 0.016666666666667 --> 9
  225. # 0.00027777777777778 --> 19
  226. # wolframalpha :
  227. # quadratic fit { {13, 5}, {1, 6}, {0.0166666, 9}, {0.0002777777,19}}
  228. # 14.1186-8.8322 x+0.625447 x^2
  229. if precision < 0.0003:
  230. zoom = 19
  231. else:
  232. zoom = int(15 - precision*8.8322 + precision*precision*0.625447)
  233. url = url_map\
  234. .replace('{latitude}', str(value.get('latitude', 0)))\
  235. .replace('{longitude}', str(value.get('longitude', 0)))\
  236. .replace('{zoom}', str(zoom))
  237. return url
  238. def get_wikilink(result, wikiid):
  239. url = result.get('sitelinks', {}).get(wikiid, {}).get('url', None)
  240. if url is None:
  241. return url
  242. elif url.startswith('http://'):
  243. url = url.replace('http://', 'https://')
  244. elif url.startswith('//'):
  245. url = 'https:' + url
  246. return url
  247. def get_wiki_firstlanguage(result, wikipatternid):
  248. for k in result.get('sitelinks', {}).keys():
  249. if k.endswith(wikipatternid) and len(k) == (2+len(wikipatternid)):
  250. return k[0:2]
  251. return None