google_news.py 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. ## Google (News)
  2. #
  3. # @website https://www.google.com
  4. # @provide-api yes (https://developers.google.com/web-search/docs/),
  5. # deprecated!
  6. #
  7. # @using-api yes
  8. # @results JSON
  9. # @stable yes (but deprecated)
  10. # @parse url, title, content, publishedDate
  11. from urllib import urlencode
  12. from json import loads
  13. from dateutil import parser
  14. # search-url
  15. categories = ['news']
  16. paging = True
  17. language_support = True
  18. # engine dependent config
  19. url = 'https://ajax.googleapis.com/'
  20. search_url = url + 'ajax/services/search/news?v=2.0&start={offset}&rsz=large&safe=off&filter=off&{query}&hl={language}' # noqa
  21. # do search-request
  22. def request(query, params):
  23. offset = (params['pageno'] - 1) * 8
  24. language = 'en-US'
  25. if params['language'] != 'all':
  26. language = params['language'].replace('_', '-')
  27. params['url'] = search_url.format(offset=offset,
  28. query=urlencode({'q': query}),
  29. language=language)
  30. return params
  31. # get response from search-request
  32. def response(resp):
  33. results = []
  34. search_res = loads(resp.text)
  35. # return empty array if there are no results
  36. if not search_res.get('responseData', {}).get('results'):
  37. return []
  38. # parse results
  39. for result in search_res['responseData']['results']:
  40. # parse publishedDate
  41. publishedDate = parser.parse(result['publishedDate'])
  42. # append result
  43. results.append({'url': result['unescapedUrl'],
  44. 'title': result['titleNoFormatting'],
  45. 'publishedDate': publishedDate,
  46. 'content': result['content']})
  47. # return results
  48. return results