openstreetmap.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # lint: pylint
  3. """OpenStreetMap (Map)
  4. """
  5. import re
  6. from json import loads
  7. from urllib.parse import urlencode
  8. from functools import partial
  9. from flask_babel import gettext
  10. from searx.data import OSM_KEYS_TAGS, CURRENCIES
  11. from searx.utils import searx_useragent
  12. from searx.external_urls import get_external_url
  13. from searx.engines.wikidata import send_wikidata_query, sparql_string_escape, get_thumbnail
  14. # about
  15. about = {
  16. "website": 'https://www.openstreetmap.org/',
  17. "wikidata_id": 'Q936',
  18. "official_api_documentation": 'http://wiki.openstreetmap.org/wiki/Nominatim',
  19. "use_official_api": True,
  20. "require_api_key": False,
  21. "results": 'JSON',
  22. }
  23. # engine dependent config
  24. categories = ['map']
  25. paging = False
  26. language_support = True
  27. send_accept_language_header = True
  28. # search-url
  29. base_url = 'https://nominatim.openstreetmap.org/'
  30. search_string = 'search?{query}&polygon_geojson=1&format=jsonv2&addressdetails=1&extratags=1&dedupe=1'
  31. result_id_url = 'https://openstreetmap.org/{osm_type}/{osm_id}'
  32. result_lat_lon_url = 'https://www.openstreetmap.org/?mlat={lat}&mlon={lon}&zoom={zoom}&layers=M'
  33. route_url = 'https://graphhopper.com/maps/?point={}&point={}&locale=en-US&vehicle=car&weighting=fastest&turn_costs=true&use_miles=false&layer=Omniscale' # pylint: disable=line-too-long
  34. route_re = re.compile('(?:from )?(.+) to (.+)')
  35. wikidata_image_sparql = """
  36. select ?item ?itemLabel ?image ?sign ?symbol ?website ?wikipediaName
  37. where {
  38. hint:Query hint:optimizer "None".
  39. values ?item { %WIKIDATA_IDS% }
  40. OPTIONAL { ?item wdt:P18|wdt:P8517|wdt:P4291|wdt:P5252|wdt:P3451|wdt:P4640|wdt:P5775|wdt:P2716|wdt:P1801|wdt:P4896 ?image }
  41. OPTIONAL { ?item wdt:P1766|wdt:P8505|wdt:P8667 ?sign }
  42. OPTIONAL { ?item wdt:P41|wdt:P94|wdt:P154|wdt:P158|wdt:P2910|wdt:P4004|wdt:P5962|wdt:P8972 ?symbol }
  43. OPTIONAL { ?item wdt:P856 ?website }
  44. SERVICE wikibase:label {
  45. bd:serviceParam wikibase:language "%LANGUAGE%,en".
  46. ?item rdfs:label ?itemLabel .
  47. }
  48. OPTIONAL {
  49. ?wikipediaUrl schema:about ?item;
  50. schema:isPartOf/wikibase:wikiGroup "wikipedia";
  51. schema:name ?wikipediaName;
  52. schema:inLanguage "%LANGUAGE%" .
  53. }
  54. }
  55. ORDER by ?item
  56. """
  57. # key value that are link: mapping functions
  58. # 'mapillary': P1947
  59. # but https://github.com/kartaview/openstreetcam.org/issues/60
  60. # but https://taginfo.openstreetmap.org/keys/kartaview ...
  61. def value_to_https_link(value):
  62. http = 'http://'
  63. if value.startswith(http):
  64. value = 'https://' + value[len(http) :]
  65. return (value, value)
  66. def value_to_website_link(value):
  67. value = value.split(';')[0]
  68. return (value, value)
  69. def value_wikipedia_link(value):
  70. value = value.split(':', 1)
  71. return ('https://{0}.wikipedia.org/wiki/{1}'.format(*value), '{1} ({0})'.format(*value))
  72. def value_with_prefix(prefix, value):
  73. return (prefix + value, value)
  74. VALUE_TO_LINK = {
  75. 'website': value_to_website_link,
  76. 'contact:website': value_to_website_link,
  77. 'email': partial(value_with_prefix, 'mailto:'),
  78. 'contact:email': partial(value_with_prefix, 'mailto:'),
  79. 'contact:phone': partial(value_with_prefix, 'tel:'),
  80. 'phone': partial(value_with_prefix, 'tel:'),
  81. 'fax': partial(value_with_prefix, 'fax:'),
  82. 'contact:fax': partial(value_with_prefix, 'fax:'),
  83. 'contact:mastodon': value_to_https_link,
  84. 'facebook': value_to_https_link,
  85. 'contact:facebook': value_to_https_link,
  86. 'contact:foursquare': value_to_https_link,
  87. 'contact:instagram': value_to_https_link,
  88. 'contact:linkedin': value_to_https_link,
  89. 'contact:pinterest': value_to_https_link,
  90. 'contact:telegram': value_to_https_link,
  91. 'contact:tripadvisor': value_to_https_link,
  92. 'contact:twitter': value_to_https_link,
  93. 'contact:yelp': value_to_https_link,
  94. 'contact:youtube': value_to_https_link,
  95. 'contact:webcam': value_to_website_link,
  96. 'wikipedia': value_wikipedia_link,
  97. 'wikidata': partial(value_with_prefix, 'https://wikidata.org/wiki/'),
  98. 'brand:wikidata': partial(value_with_prefix, 'https://wikidata.org/wiki/'),
  99. }
  100. KEY_ORDER = [
  101. 'cuisine',
  102. 'organic',
  103. 'delivery',
  104. 'delivery:covid19',
  105. 'opening_hours',
  106. 'opening_hours:covid19',
  107. 'fee',
  108. 'payment:*',
  109. 'currency:*',
  110. 'outdoor_seating',
  111. 'bench',
  112. 'wheelchair',
  113. 'level',
  114. 'building:levels',
  115. 'bin',
  116. 'public_transport',
  117. 'internet_access:ssid',
  118. ]
  119. KEY_RANKS = {k: i for i, k in enumerate(KEY_ORDER)}
  120. def request(query, params):
  121. """do search-request"""
  122. params['url'] = base_url + search_string.format(query=urlencode({'q': query}))
  123. params['route'] = route_re.match(query)
  124. params['headers']['User-Agent'] = searx_useragent()
  125. if 'Accept-Language' not in params['headers']:
  126. params['headers']['Accept-Language'] = 'en'
  127. return params
  128. def response(resp):
  129. """get response from search-request"""
  130. results = []
  131. nominatim_json = loads(resp.text)
  132. user_language = resp.search_params['language']
  133. if resp.search_params['route']:
  134. results.append(
  135. {
  136. 'answer': gettext('Get directions'),
  137. 'url': route_url.format(*resp.search_params['route'].groups()),
  138. }
  139. )
  140. # simplify the code below: make sure extratags is a dictionnary
  141. for result in nominatim_json:
  142. if not isinstance(result.get('extratags'), dict):
  143. result["extratags"] = {}
  144. # fetch data from wikidata
  145. fetch_wikidata(nominatim_json, user_language)
  146. # create results
  147. for result in nominatim_json:
  148. title, address = get_title_address(result)
  149. # ignore result without title
  150. if not title:
  151. continue
  152. url, osm, geojson = get_url_osm_geojson(result)
  153. img_src = get_thumbnail(get_img_src(result))
  154. links, link_keys = get_links(result, user_language)
  155. data = get_data(result, user_language, link_keys)
  156. results.append(
  157. {
  158. 'template': 'map.html',
  159. 'title': title,
  160. 'address': address,
  161. 'address_label': get_key_label('addr', user_language),
  162. 'url': url,
  163. 'osm': osm,
  164. 'geojson': geojson,
  165. 'img_src': img_src,
  166. 'links': links,
  167. 'data': data,
  168. 'type': get_tag_label(result.get('category'), result.get('type', ''), user_language),
  169. 'type_icon': result.get('icon'),
  170. 'content': '',
  171. 'longitude': result['lon'],
  172. 'latitude': result['lat'],
  173. 'boundingbox': result['boundingbox'],
  174. }
  175. )
  176. return results
  177. def get_wikipedia_image(raw_value):
  178. if not raw_value:
  179. return None
  180. return get_external_url('wikimedia_image', raw_value)
  181. def fetch_wikidata(nominatim_json, user_language):
  182. """Update nominatim_json using the result of an unique to wikidata
  183. For result in nominatim_json:
  184. If result['extratags']['wikidata'] or r['extratags']['wikidata link']:
  185. Set result['wikidata'] to { 'image': ..., 'image_sign':..., 'image_symbal':... }
  186. Set result['extratags']['wikipedia'] if not defined
  187. Set result['extratags']['contact:website'] if not defined
  188. """
  189. wikidata_ids = []
  190. wd_to_results = {}
  191. for result in nominatim_json:
  192. extratags = result['extratags']
  193. # ignore brand:wikidata
  194. wd_id = extratags.get('wikidata', extratags.get('wikidata link'))
  195. if wd_id and wd_id not in wikidata_ids:
  196. wikidata_ids.append('wd:' + wd_id)
  197. wd_to_results.setdefault(wd_id, []).append(result)
  198. if wikidata_ids:
  199. user_language = 'en' if user_language == 'all' else user_language.split('-')[0]
  200. wikidata_ids_str = " ".join(wikidata_ids)
  201. query = wikidata_image_sparql.replace('%WIKIDATA_IDS%', sparql_string_escape(wikidata_ids_str)).replace(
  202. '%LANGUAGE%', sparql_string_escape(user_language)
  203. )
  204. wikidata_json = send_wikidata_query(query)
  205. for wd_result in wikidata_json.get('results', {}).get('bindings', {}):
  206. wd_id = wd_result['item']['value'].replace('http://www.wikidata.org/entity/', '')
  207. for result in wd_to_results.get(wd_id, []):
  208. result['wikidata'] = {
  209. 'itemLabel': wd_result['itemLabel']['value'],
  210. 'image': get_wikipedia_image(wd_result.get('image', {}).get('value')),
  211. 'image_sign': get_wikipedia_image(wd_result.get('sign', {}).get('value')),
  212. 'image_symbol': get_wikipedia_image(wd_result.get('symbol', {}).get('value')),
  213. }
  214. # overwrite wikipedia link
  215. wikipedia_name = wd_result.get('wikipediaName', {}).get('value')
  216. if wikipedia_name:
  217. result['extratags']['wikipedia'] = user_language + ':' + wikipedia_name
  218. # get website if not already defined
  219. website = wd_result.get('website', {}).get('value')
  220. if (
  221. website
  222. and not result['extratags'].get('contact:website')
  223. and not result['extratags'].get('website')
  224. ):
  225. result['extratags']['contact:website'] = website
  226. def get_title_address(result):
  227. """Return title and address
  228. title may be None
  229. """
  230. address_raw = result.get('address')
  231. address_name = None
  232. address = {}
  233. # get name
  234. if (
  235. result['category'] == 'amenity'
  236. or result['category'] == 'shop'
  237. or result['category'] == 'tourism'
  238. or result['category'] == 'leisure'
  239. ):
  240. if address_raw.get('address29'):
  241. # https://github.com/osm-search/Nominatim/issues/1662
  242. address_name = address_raw.get('address29')
  243. else:
  244. address_name = address_raw.get(result['category'])
  245. elif result['type'] in address_raw:
  246. address_name = address_raw.get(result['type'])
  247. # add rest of adressdata, if something is already found
  248. if address_name:
  249. title = address_name
  250. address.update(
  251. {
  252. 'name': address_name,
  253. 'house_number': address_raw.get('house_number'),
  254. 'road': address_raw.get('road'),
  255. 'locality': address_raw.get(
  256. 'city', address_raw.get('town', address_raw.get('village')) # noqa
  257. ), # noqa
  258. 'postcode': address_raw.get('postcode'),
  259. 'country': address_raw.get('country'),
  260. 'country_code': address_raw.get('country_code'),
  261. }
  262. )
  263. else:
  264. title = result.get('display_name')
  265. return title, address
  266. def get_url_osm_geojson(result):
  267. """Get url, osm and geojson"""
  268. osm_type = result.get('osm_type', result.get('type'))
  269. if 'osm_id' not in result:
  270. # see https://github.com/osm-search/Nominatim/issues/1521
  271. # query example: "EC1M 5RF London"
  272. url = result_lat_lon_url.format(lat=result['lat'], lon=result['lon'], zoom=12)
  273. osm = {}
  274. else:
  275. url = result_id_url.format(osm_type=osm_type, osm_id=result['osm_id'])
  276. osm = {'type': osm_type, 'id': result['osm_id']}
  277. geojson = result.get('geojson')
  278. # if no geojson is found and osm_type is a node, add geojson Point
  279. if not geojson and osm_type == 'node':
  280. geojson = {'type': 'Point', 'coordinates': [result['lon'], result['lat']]}
  281. return url, osm, geojson
  282. def get_img_src(result):
  283. """Get image URL from either wikidata or r['extratags']"""
  284. # wikidata
  285. img_src = None
  286. if 'wikidata' in result:
  287. img_src = result['wikidata']['image']
  288. if not img_src:
  289. img_src = result['wikidata']['image_symbol']
  290. if not img_src:
  291. img_src = result['wikidata']['image_sign']
  292. # img_src
  293. extratags = result['extratags']
  294. if not img_src and extratags.get('image'):
  295. img_src = extratags['image']
  296. del extratags['image']
  297. if not img_src and extratags.get('wikimedia_commons'):
  298. img_src = get_external_url('wikimedia_image', extratags['wikimedia_commons'])
  299. del extratags['wikimedia_commons']
  300. return img_src
  301. def get_links(result, user_language):
  302. """Return links from result['extratags']"""
  303. links = []
  304. link_keys = set()
  305. extratags = result['extratags']
  306. if not extratags:
  307. # minor optimization : no need to check VALUE_TO_LINK if extratags is empty
  308. return links, link_keys
  309. for k, mapping_function in VALUE_TO_LINK.items():
  310. raw_value = extratags.get(k)
  311. if not raw_value:
  312. continue
  313. url, url_label = mapping_function(raw_value)
  314. if url.startswith('https://wikidata.org'):
  315. url_label = result.get('wikidata', {}).get('itemLabel') or url_label
  316. links.append(
  317. {
  318. 'label': get_key_label(k, user_language),
  319. 'url': url,
  320. 'url_label': url_label,
  321. }
  322. )
  323. link_keys.add(k)
  324. return links, link_keys
  325. def get_data(result, user_language, ignore_keys):
  326. """Return key, value of result['extratags']
  327. Must be call after get_links
  328. Note: the values are not translated
  329. """
  330. data = []
  331. for k, v in result['extratags'].items():
  332. if k in ignore_keys:
  333. continue
  334. if get_key_rank(k) is None:
  335. continue
  336. k_label = get_key_label(k, user_language)
  337. if k_label:
  338. data.append(
  339. {
  340. 'label': k_label,
  341. 'key': k,
  342. 'value': v,
  343. }
  344. )
  345. data.sort(key=lambda entry: (get_key_rank(entry['key']), entry['label']))
  346. return data
  347. def get_key_rank(k):
  348. """Get OSM key rank
  349. The rank defines in which order the key are displayed in the HTML result
  350. """
  351. key_rank = KEY_RANKS.get(k)
  352. if key_rank is None:
  353. # "payment:*" in KEY_ORDER matches "payment:cash", "payment:debit card", etc...
  354. key_rank = KEY_RANKS.get(k.split(':')[0] + ':*')
  355. return key_rank
  356. def get_label(labels, lang):
  357. """Get label from labels in OSM_KEYS_TAGS
  358. in OSM_KEYS_TAGS, labels have key == '*'
  359. """
  360. tag_label = labels.get(lang.lower())
  361. if tag_label is None:
  362. # example: if 'zh-hk' is not found, check 'zh'
  363. tag_label = labels.get(lang.split('-')[0])
  364. if tag_label is None and lang != 'en':
  365. # example: if 'zh' is not found, check 'en'
  366. tag_label = labels.get('en')
  367. if tag_label is None and len(labels.values()) > 0:
  368. # example: if still not found, use the first entry
  369. tag_label = labels.values()[0]
  370. return tag_label
  371. def get_tag_label(tag_category, tag_name, lang):
  372. """Get tag label from OSM_KEYS_TAGS"""
  373. tag_name = '' if tag_name is None else tag_name
  374. tag_labels = OSM_KEYS_TAGS['tags'].get(tag_category, {}).get(tag_name, {})
  375. return get_label(tag_labels, lang)
  376. def get_key_label(key_name, lang):
  377. """Get key label from OSM_KEYS_TAGS"""
  378. if key_name.startswith('currency:'):
  379. # currency:EUR --> get the name from the CURRENCIES variable
  380. # see https://wiki.openstreetmap.org/wiki/Key%3Acurrency
  381. # and for exampe https://taginfo.openstreetmap.org/keys/currency:EUR#values
  382. # but there is also currency=EUR (currently not handled)
  383. # https://taginfo.openstreetmap.org/keys/currency#values
  384. currency = key_name.split(':')
  385. if len(currency) > 1:
  386. o = CURRENCIES['iso4217'].get(currency[1])
  387. if o:
  388. return get_label(o, lang).lower()
  389. return currency[1]
  390. labels = OSM_KEYS_TAGS['keys']
  391. for k in key_name.split(':') + ['*']:
  392. labels = labels.get(k)
  393. if labels is None:
  394. return None
  395. return get_label(labels, lang)