jisho.py 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. """
  3. Jisho (the Japanese-English dictionary)
  4. """
  5. import json
  6. from urllib.parse import urlencode, urljoin
  7. # about
  8. about = {
  9. "website": 'https://jisho.org',
  10. "wikidata_id": 'Q24568389',
  11. "official_api_documentation": "https://jisho.org/forum/54fefc1f6e73340b1f160000-is-there-any-kind-of-search-api",
  12. "use_official_api": True,
  13. "require_api_key": False,
  14. "results": 'JSON',
  15. }
  16. categories = ['dictionaries']
  17. paging = False
  18. URL = 'https://jisho.org'
  19. BASE_URL = 'https://jisho.org/word/'
  20. SEARCH_URL = URL + '/api/v1/search/words?{query}'
  21. def request(query, params):
  22. query = urlencode({'keyword': query})
  23. params['url'] = SEARCH_URL.format(query=query)
  24. logger.debug(f"query_url --> {params['url']}")
  25. return params
  26. def response(resp):
  27. results = []
  28. infoboxed = False
  29. search_results = json.loads(resp.text)
  30. pages = search_results.get('data', [])
  31. for page in pages:
  32. # Entries that are purely from Wikipedia are excluded.
  33. if page['senses'][0]['parts_of_speech'][0] != 'Wikipedia definition':
  34. # Process alternative forms
  35. japanese = page['japanese']
  36. alt_forms = []
  37. for title_raw in japanese:
  38. if 'word' not in title_raw:
  39. alt_forms.append(title_raw['reading'])
  40. else:
  41. title = title_raw['word']
  42. if 'reading' in title_raw:
  43. title += ' (' + title_raw['reading'] + ')'
  44. alt_forms.append(title)
  45. # Process definitions
  46. definitions = []
  47. def_raw = page['senses']
  48. for defn_raw in def_raw:
  49. extra = ''
  50. if not infoboxed:
  51. # Extra data. Since they're not documented, this implementation is based solely by the author's assumptions.
  52. if defn_raw['tags'] != []:
  53. if defn_raw['info'] != []:
  54. extra += defn_raw['tags'][0] + ', ' + defn_raw['info'][0] + '. ' # "usually written as kana: <kana>"
  55. else:
  56. extra += ', '.join(defn_raw['tags']) + '. ' # abbreviation, archaism, etc.
  57. elif defn_raw['info'] != []:
  58. extra += ', '.join(defn_raw['info']).capitalize() + '. ' # inconsistent
  59. if defn_raw['restrictions'] != []:
  60. extra += 'Only applies to: ' + ', '.join(defn_raw['restrictions']) + '. '
  61. extra = extra[:-1]
  62. definitions.append((
  63. ', '.join(defn_raw['parts_of_speech']),
  64. '; '.join(defn_raw['english_definitions']),
  65. extra
  66. ))
  67. content = ''
  68. infobox_content = '''
  69. <small><a href="https://www.edrdg.org/wiki/index.php/JMdict-EDICT_Dictionary_Project">JMdict</a>
  70. and <a href="https://www.edrdg.org/enamdict/enamdict_doc.html">JMnedict</a>
  71. by <a href="https://www.edrdg.org/edrdg/licence.html">EDRDG</a>, CC BY-SA 3.0.</small><ul>
  72. '''
  73. for pos, engdef, extra in definitions:
  74. if pos == 'Wikipedia definition':
  75. infobox_content += '</ul><small>Wikipedia, CC BY-SA 3.0.</small><ul>'
  76. if pos == '':
  77. infobox_content += f"<li>{engdef}"
  78. else:
  79. infobox_content += f"<li><i>{pos}</i>: {engdef}"
  80. if extra != '':
  81. infobox_content += f" ({extra})"
  82. infobox_content += '</li>'
  83. content += f"{engdef}. "
  84. infobox_content += '</ul>'
  85. # For results, we'll return the URL, all alternative forms (as title),
  86. # and all definitions (as description) truncated to 300 characters.
  87. results.append({
  88. 'url': urljoin(BASE_URL, page['slug']),
  89. 'title': ", ".join(alt_forms),
  90. 'content': content[:300] + (content[300:] and '...')
  91. })
  92. # Like Wordnik, we'll return the first result in an infobox too.
  93. if not infoboxed:
  94. infoboxed = True
  95. infobox_urls = []
  96. infobox_urls.append({
  97. 'title': 'Jisho.org',
  98. 'url': urljoin(BASE_URL, page['slug'])
  99. })
  100. infobox = {
  101. 'infobox': alt_forms[0],
  102. 'urls': infobox_urls
  103. }
  104. alt_forms.pop(0)
  105. alt_content = ''
  106. if len(alt_forms) > 0:
  107. alt_content = '<p><i>Other forms:</i> '
  108. alt_content += ", ".join(alt_forms)
  109. alt_content += '</p>'
  110. infobox['content'] = alt_content + infobox_content
  111. results.append(infobox)
  112. return results