test_wikipedia.py 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166
  1. # -*- coding: utf-8 -*-
  2. from collections import defaultdict
  3. import mock
  4. from searx.engines import wikipedia
  5. from searx.testing import SearxTestCase
  6. class TestWikipediaEngine(SearxTestCase):
  7. def test_request(self):
  8. wikipedia.supported_languages = ['fr', 'en']
  9. query = 'test_query'
  10. dicto = defaultdict(dict)
  11. dicto['language'] = 'fr-FR'
  12. params = wikipedia.request(query, dicto)
  13. self.assertIn('url', params)
  14. self.assertIn(query, params['url'])
  15. self.assertIn('test_query', params['url'])
  16. self.assertIn('Test_Query', params['url'])
  17. self.assertIn('fr.wikipedia.org', params['url'])
  18. query = 'Test_Query'
  19. params = wikipedia.request(query, dicto)
  20. self.assertIn('Test_Query', params['url'])
  21. self.assertNotIn('test_query', params['url'])
  22. dicto['language'] = 'all'
  23. params = wikipedia.request(query, dicto)
  24. self.assertIn('en', params['url'])
  25. dicto['language'] = 'xx'
  26. params = wikipedia.request(query, dicto)
  27. self.assertIn('en', params['url'])
  28. def test_response(self):
  29. dicto = defaultdict(dict)
  30. dicto['language'] = 'fr'
  31. self.assertRaises(AttributeError, wikipedia.response, None)
  32. self.assertRaises(AttributeError, wikipedia.response, [])
  33. self.assertRaises(AttributeError, wikipedia.response, '')
  34. self.assertRaises(AttributeError, wikipedia.response, '[]')
  35. # page not found
  36. json = """
  37. {
  38. "batchcomplete": "",
  39. "query": {
  40. "normalized": [],
  41. "pages": {
  42. "-1": {
  43. "ns": 0,
  44. "title": "",
  45. "missing": ""
  46. }
  47. }
  48. }
  49. }"""
  50. response = mock.Mock(content=json, search_params=dicto)
  51. self.assertEqual(wikipedia.response(response), [])
  52. # normal case
  53. json = """
  54. {
  55. "batchcomplete": "",
  56. "query": {
  57. "normalized": [],
  58. "pages": {
  59. "12345": {
  60. "pageid": 12345,
  61. "ns": 0,
  62. "title": "The Title",
  63. "extract": "The Title is...",
  64. "thumbnail": {
  65. "source": "img_src.jpg"
  66. },
  67. "pageimage": "img_name.jpg"
  68. }
  69. }
  70. }
  71. }"""
  72. response = mock.Mock(content=json, search_params=dicto)
  73. results = wikipedia.response(response)
  74. self.assertEqual(type(results), list)
  75. self.assertEqual(len(results), 2)
  76. self.assertEqual(results[0]['title'], u'The Title')
  77. self.assertIn('fr.wikipedia.org/wiki/The_Title', results[0]['url'])
  78. self.assertEqual(results[1]['infobox'], u'The Title')
  79. self.assertIn('fr.wikipedia.org/wiki/The_Title', results[1]['id'])
  80. self.assertIn('The Title is...', results[1]['content'])
  81. self.assertEqual(results[1]['img_src'], 'img_src.jpg')
  82. # disambiguation page
  83. json = """
  84. {
  85. "batchcomplete": "",
  86. "query": {
  87. "normalized": [],
  88. "pages": {
  89. "12345": {
  90. "pageid": 12345,
  91. "ns": 0,
  92. "title": "The Title",
  93. "extract": "The Title can be:\\nThe Title 1\\nThe Title 2\\nThe Title 3\\nThe Title 4......................................................................................................................................." """ # noqa
  94. json += """
  95. }
  96. }
  97. }
  98. }"""
  99. response = mock.Mock(content=json, search_params=dicto)
  100. results = wikipedia.response(response)
  101. self.assertEqual(type(results), list)
  102. self.assertEqual(len(results), 0)
  103. # no image
  104. json = """
  105. {
  106. "batchcomplete": "",
  107. "query": {
  108. "normalized": [],
  109. "pages": {
  110. "12345": {
  111. "pageid": 12345,
  112. "ns": 0,
  113. "title": "The Title",
  114. "extract": "The Title is......................................................................................................................................................................................." """ # noqa
  115. json += """
  116. }
  117. }
  118. }
  119. }"""
  120. response = mock.Mock(content=json, search_params=dicto)
  121. results = wikipedia.response(response)
  122. self.assertEqual(type(results), list)
  123. self.assertEqual(len(results), 2)
  124. self.assertIn('The Title is...', results[1]['content'])
  125. self.assertEqual(results[1]['img_src'], None)
  126. # title not in first paragraph
  127. json = u"""
  128. {
  129. "batchcomplete": "",
  130. "query": {
  131. "normalized": [],
  132. "pages": {
  133. "12345": {
  134. "pageid": 12345,
  135. "ns": 0,
  136. "title": "披頭四樂隊",
  137. "extract": "披头士乐队....................................................................................................................................................................................................\\n披頭四樂隊...", """ # noqa
  138. json += """
  139. "thumbnail": {
  140. "source": "img_src.jpg"
  141. },
  142. "pageimage": "img_name.jpg"
  143. }
  144. }
  145. }
  146. }"""
  147. response = mock.Mock(content=json, search_params=dicto)
  148. results = wikipedia.response(response)
  149. self.assertEqual(type(results), list)
  150. self.assertEqual(len(results), 2)
  151. self.assertEqual(results[1]['infobox'], u'披頭四樂隊')
  152. self.assertIn(u'披头士乐队...', results[1]['content'])