wikidata.py 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221
  1. import json
  2. from requests import get
  3. from urllib import urlencode
  4. from datetime import datetime
  5. resultCount=2
  6. urlSearch = 'https://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectiontitle&{query}'
  7. urlDetail = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=labels%7Cinfo%7Csitelinks%7Csitelinks%2Furls%7Cdescriptions%7Cclaims&{query}'
  8. urlMap = 'https://www.openstreetmap.org/?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
  9. def request(query, params):
  10. params['url'] = urlSearch.format(query=urlencode({'srsearch': query, 'srlimit': resultCount}))
  11. print params['url']
  12. return params
  13. def response(resp):
  14. results = []
  15. search_res = json.loads(resp.text)
  16. wikidata_ids = set()
  17. for r in search_res.get('query', {}).get('search', {}):
  18. wikidata_ids.add(r.get('title', ''))
  19. language = resp.search_params['language'].split('_')[0]
  20. if language == 'all':
  21. language = 'en'
  22. url = urlDetail.format(query=urlencode({'ids': '|'.join(wikidata_ids), 'languages': language + '|en'}))
  23. before = datetime.now()
  24. htmlresponse = get(url)
  25. print datetime.now() - before
  26. jsonresponse = json.loads(htmlresponse.content)
  27. for wikidata_id in wikidata_ids:
  28. results = results + getDetail(jsonresponse, wikidata_id, language)
  29. return results
  30. def getDetail(jsonresponse, wikidata_id, language):
  31. result = jsonresponse.get('entities', {}).get(wikidata_id, {})
  32. title = result.get('labels', {}).get(language, {}).get('value', None)
  33. if title == None:
  34. title = result.get('labels', {}).get('en', {}).get('value', wikidata_id)
  35. results = []
  36. urls = []
  37. attributes = []
  38. description = result.get('descriptions', {}).get(language, {}).get('value', '')
  39. if description == '':
  40. description = result.get('descriptions', {}).get('en', {}).get('value', '')
  41. claims = result.get('claims', {})
  42. official_website = get_string(claims, 'P856', None)
  43. if official_website != None:
  44. urls.append({ 'title' : 'Official site', 'url': official_website })
  45. results.append({ 'title': title, 'url' : official_website })
  46. if language != 'en':
  47. add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki'))
  48. wikipedia_en_link = get_wikilink(result, 'enwiki')
  49. add_url(urls, 'Wikipedia (en)', wikipedia_en_link)
  50. if language != 'en':
  51. add_url(urls, 'Wiki voyage (' + language + ')', get_wikilink(result, language + 'wikivoyage'))
  52. add_url(urls, 'Wiki voyage (en)', get_wikilink(result, 'enwikivoyage'))
  53. if language != 'en':
  54. add_url(urls, 'Wikiquote (' + language + ')', get_wikilink(result, language + 'wikiquote'))
  55. add_url(urls, 'Wikiquote (en)', get_wikilink(result, 'enwikiquote'))
  56. add_url(urls, 'Commons wiki', get_wikilink(result, 'commonswiki'))
  57. add_url(urls, 'Location', get_geolink(claims, 'P625', None))
  58. add_url(urls, 'Wikidata', 'https://www.wikidata.org/wiki/' + wikidata_id + '?uselang='+ language)
  59. musicbrainz_work_id = get_string(claims, 'P435')
  60. if musicbrainz_work_id != None:
  61. add_url(urls, 'MusicBrainz', 'http://musicbrainz.org/work/' + musicbrainz_work_id)
  62. musicbrainz_artist_id = get_string(claims, 'P434')
  63. if musicbrainz_artist_id != None:
  64. add_url(urls, 'MusicBrainz', 'http://musicbrainz.org/artist/' + musicbrainz_artist_id)
  65. musicbrainz_release_group_id = get_string(claims, 'P436')
  66. if musicbrainz_release_group_id != None:
  67. add_url(urls, 'MusicBrainz', 'http://musicbrainz.org/release-group/' + musicbrainz_release_group_id)
  68. musicbrainz_label_id = get_string(claims, 'P966')
  69. if musicbrainz_label_id != None:
  70. add_url(urls, 'MusicBrainz', 'http://musicbrainz.org/label/' + musicbrainz_label_id)
  71. # musicbrainz_area_id = get_string(claims, 'P982')
  72. # P1407 MusicBrainz series ID
  73. # P1004 MusicBrainz place ID
  74. # P1330 MusicBrainz instrument ID
  75. # P1407 MusicBrainz series ID
  76. postal_code = get_string(claims, 'P281', None)
  77. if postal_code != None:
  78. attributes.append({'label' : 'Postal code(s)', 'value' : postal_code})
  79. date_of_birth = get_time(claims, 'P569', None)
  80. if date_of_birth != None:
  81. attributes.append({'label' : 'Date of birth', 'value' : date_of_birth})
  82. date_of_death = get_time(claims, 'P570', None)
  83. if date_of_death != None:
  84. attributes.append({'label' : 'Date of death', 'value' : date_of_death})
  85. results.append({
  86. 'infobox' : title,
  87. 'id' : wikipedia_en_link,
  88. 'content' : description,
  89. 'attributes' : attributes,
  90. 'urls' : urls
  91. })
  92. return results
  93. def add_url(urls, title, url):
  94. if url != None:
  95. urls.append({'title' : title, 'url' : url})
  96. def get_mainsnak(claims, propertyName):
  97. propValue = claims.get(propertyName, {})
  98. if len(propValue) == 0:
  99. return None
  100. propValue = propValue[0].get('mainsnak', None)
  101. return propValue
  102. def get_string(claims, propertyName, defaultValue=None):
  103. propValue = claims.get(propertyName, {})
  104. if len(propValue) == 0:
  105. return defaultValue
  106. result = []
  107. for e in propValue:
  108. mainsnak = e.get('mainsnak', {})
  109. datatype = mainsnak.get('datatype', '')
  110. datavalue = mainsnak.get('datavalue', {})
  111. if datavalue != None:
  112. result.append(datavalue.get('value', ''))
  113. if len(result) == 0:
  114. return defaultValue
  115. else:
  116. return ', '.join(result)
  117. def get_time(claims, propertyName, defaultValue=None):
  118. propValue = claims.get(propertyName, {})
  119. if len(propValue) == 0:
  120. return defaultValue
  121. result = []
  122. for e in propValue:
  123. mainsnak = e.get('mainsnak', {})
  124. datatype = mainsnak.get('datatype', '')
  125. datavalue = mainsnak.get('datavalue', {})
  126. if datavalue != None:
  127. value = datavalue.get('value', '')
  128. result.append(value.get('time', ''))
  129. if len(result) == 0:
  130. return defaultValue
  131. else:
  132. return ', '.join(result)
  133. def get_geolink(claims, propertyName, defaultValue=''):
  134. mainsnak = get_mainsnak(claims, propertyName)
  135. if mainsnak == None:
  136. return defaultValue
  137. datatype = mainsnak.get('datatype', '')
  138. datavalue = mainsnak.get('datavalue', {})
  139. if datatype != 'globe-coordinate':
  140. return defaultValue
  141. value = datavalue.get('value', {})
  142. precision = value.get('precision', 0.0002)
  143. # there is no zoom information, deduce from precision (error prone)
  144. # samples :
  145. # 13 --> 5
  146. # 1 --> 6
  147. # 0.016666666666667 --> 9
  148. # 0.00027777777777778 --> 19
  149. # wolframalpha : quadratic fit { {13, 5}, {1, 6}, {0.0166666, 9}, {0.0002777777,19}}
  150. # 14.1186-8.8322 x+0.625447 x^2
  151. if precision < 0.0003:
  152. zoom = 19
  153. else:
  154. zoom = int(15 - precision*8.8322 + precision*precision*0.625447)
  155. url = urlMap.replace('{latitude}', str(value.get('latitude',0))).replace('{longitude}', str(value.get('longitude',0))).replace('{zoom}', str(zoom))
  156. return url
  157. def get_wikilink(result, wikiid):
  158. url = result.get('sitelinks', {}).get(wikiid, {}).get('url', None)
  159. if url == None:
  160. return url
  161. elif url.startswith('http://'):
  162. url = url.replace('http://', 'https://')
  163. elif url.startswith('//'):
  164. url = 'https:' + url
  165. return url