wikidata.py 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193
  1. import json
  2. from datetime import datetime
  3. from requests import get
  4. from urllib import urlencode
  5. resultCount=2
  6. urlSearch = 'https://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectionsnippet&{query}'
  7. urlDetail = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=labels%7Cinfo%7Csitelinks%7Csitelinks%2Furls%7Cdescriptions%7Cclaims&{query}'
  8. # find the right URL for urlMap
  9. urlMap = 'http://www.openstreetmap.org/?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
  10. def request(query, params):
  11. params['url'] = urlSearch.format(query=urlencode({'srsearch': query, 'srlimit': resultCount}))
  12. print params['url']
  13. return params
  14. def response(resp):
  15. results = []
  16. search_res = json.loads(resp.text)
  17. # TODO parallel http queries
  18. before = datetime.now()
  19. for r in search_res.get('query', {}).get('search', {}):
  20. wikidata_id = r.get('title', '')
  21. results = results + getDetail(wikidata_id)
  22. after = datetime.now()
  23. print str(after - before) + " second(s)"
  24. return results
  25. def getDetail(wikidata_id):
  26. language = 'fr'
  27. url = urlDetail.format(query=urlencode({'ids': wikidata_id, 'languages': language + '|en'}))
  28. print url
  29. response = get(url)
  30. result = json.loads(response.content)
  31. result = result.get('entities', {}).get(wikidata_id, {})
  32. title = result.get('labels', {}).get(language, {}).get('value', None)
  33. if title == None:
  34. title = result.get('labels', {}).get('en', {}).get('value', wikidata_id)
  35. results = []
  36. urls = []
  37. attributes = []
  38. description = result.get('descriptions', {}).get(language, {}).get('value', '')
  39. if description == '':
  40. description = result.get('descriptions', {}).get('en', {}).get('value', '')
  41. claims = result.get('claims', {})
  42. official_website = get_string(claims, 'P856', None)
  43. print official_website
  44. if official_website != None:
  45. urls.append({ 'title' : 'Official site', 'url': official_website })
  46. results.append({ 'title': title, 'url' : official_website })
  47. if language != 'en':
  48. add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki'))
  49. wikipedia_en_link = get_wikilink(result, 'enwiki')
  50. add_url(urls, 'Wikipedia (en)', wikipedia_en_link)
  51. if language != 'en':
  52. add_url(urls, 'Wiki voyage (' + language + ')', get_wikilink(result, language + 'wikivoyage'))
  53. add_url(urls, 'Wiki voyage (en)', get_wikilink(result, 'enwikivoyage'))
  54. if language != 'en':
  55. add_url(urls, 'Wikiquote (' + language + ')', get_wikilink(result, language + 'wikiquote'))
  56. add_url(urls, 'Wikiquote (en)', get_wikilink(result, 'enwikiquote'))
  57. add_url(urls, 'Commons wiki', get_wikilink(result, 'commonswiki'))
  58. add_url(urls, 'Location', get_geolink(claims, 'P625', None))
  59. add_url(urls, 'Wikidata', 'https://www.wikidata.org/wiki/' + wikidata_id + '?uselang='+ language)
  60. postal_code = get_string(claims, 'P281', None)
  61. if postal_code != None:
  62. attributes.append({'label' : 'Postal code(s)', 'value' : postal_code})
  63. date_of_birth = get_time(claims, 'P569', None)
  64. if date_of_birth != None:
  65. attributes.append({'label' : 'Date of birth', 'value' : date_of_birth})
  66. date_of_death = get_time(claims, 'P570', None)
  67. if date_of_death != None:
  68. attributes.append({'label' : 'Date of death', 'value' : date_of_death})
  69. results.append({
  70. 'infobox' : title,
  71. 'id' : wikipedia_en_link,
  72. 'content' : description,
  73. 'attributes' : attributes,
  74. 'urls' : urls
  75. })
  76. return results
  77. def add_url(urls, title, url):
  78. if url != None:
  79. urls.append({'title' : title, 'url' : url})
  80. def get_mainsnak(claims, propertyName):
  81. propValue = claims.get(propertyName, {})
  82. if len(propValue) == 0:
  83. return None
  84. propValue = propValue[0].get('mainsnak', None)
  85. return propValue
  86. def get_string(claims, propertyName, defaultValue=None):
  87. propValue = claims.get(propertyName, {})
  88. if len(propValue) == 0:
  89. return defaultValue
  90. result = []
  91. for e in propValue:
  92. mainsnak = e.get('mainsnak', {})
  93. datatype = mainsnak.get('datatype', '')
  94. datavalue = mainsnak.get('datavalue', {})
  95. if datavalue != None:
  96. result.append(datavalue.get('value', ''))
  97. if len(result) == 0:
  98. return defaultValue
  99. else:
  100. return ', '.join(result)
  101. def get_time(claims, propertyName, defaultValue=None):
  102. propValue = claims.get(propertyName, {})
  103. if len(propValue) == 0:
  104. return defaultValue
  105. result = []
  106. for e in propValue:
  107. mainsnak = e.get('mainsnak', {})
  108. datatype = mainsnak.get('datatype', '')
  109. datavalue = mainsnak.get('datavalue', {})
  110. if datavalue != None:
  111. value = datavalue.get('value', '')
  112. result.append(value.get('time', ''))
  113. if len(result) == 0:
  114. return defaultValue
  115. else:
  116. return ', '.join(result)
  117. def get_geolink(claims, propertyName, defaultValue=''):
  118. mainsnak = get_mainsnak(claims, propertyName)
  119. if mainsnak == None:
  120. return defaultValue
  121. datatype = mainsnak.get('datatype', '')
  122. datavalue = mainsnak.get('datavalue', {})
  123. if datatype != 'globe-coordinate':
  124. return defaultValue
  125. value = datavalue.get('value', {})
  126. precision = value.get('precision', 0.0002)
  127. # there is no zoom information, deduce from precision (error prone)
  128. # samples :
  129. # 13 --> 5
  130. # 1 --> 6
  131. # 0.016666666666667 --> 9
  132. # 0.00027777777777778 --> 19
  133. # wolframalpha : quadratic fit { {13, 5}, {1, 6}, {0.0166666, 9}, {0.0002777777,19}}
  134. # 14.1186-8.8322 x+0.625447 x^2
  135. if precision < 0.0003:
  136. zoom = 19
  137. else:
  138. zoom = int(15 - precision*8.8322 + precision*precision*0.625447)
  139. url = urlMap.replace('{latitude}', str(value.get('latitude',0))).replace('{longitude}', str(value.get('longitude',0))).replace('{zoom}', str(zoom))
  140. return url
  141. def get_wikilink(result, wikiid):
  142. url = result.get('sitelinks', {}).get(wikiid, {}).get('url', None)
  143. if url == None:
  144. return url
  145. elif url.startswith('http://'):
  146. url = url.replace('http://', 'https://')
  147. elif url.startswith('//'):
  148. url = 'https:' + url
  149. return url