google_news.py 1.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768
  1. """
  2. Google (News)
  3. @website https://www.google.com
  4. @provide-api yes (https://developers.google.com/web-search/docs/),
  5. deprecated!
  6. @using-api yes
  7. @results JSON
  8. @stable yes (but deprecated)
  9. @parse url, title, content, publishedDate
  10. """
  11. from urllib import urlencode
  12. from json import loads
  13. from dateutil import parser
  14. # search-url
  15. categories = ['news']
  16. paging = True
  17. language_support = True
  18. # engine dependent config
  19. url = 'https://ajax.googleapis.com/'
  20. search_url = url + 'ajax/services/search/news?v=2.0&start={offset}&rsz=large&safe=off&filter=off&{query}&hl={lang}'
  21. # do search-request
  22. def request(query, params):
  23. offset = (params['pageno'] - 1) * 8
  24. language = 'en-US'
  25. if params['language'] != 'all':
  26. language = params['language'].replace('_', '-')
  27. params['url'] = search_url.format(offset=offset,
  28. query=urlencode({'q': query}),
  29. lang=language)
  30. return params
  31. # get response from search-request
  32. def response(resp):
  33. results = []
  34. search_res = loads(resp.text)
  35. # return empty array if there are no results
  36. if not search_res.get('responseData', {}).get('results'):
  37. return []
  38. # parse results
  39. for result in search_res['responseData']['results']:
  40. # parse publishedDate
  41. publishedDate = parser.parse(result['publishedDate'])
  42. if 'url' not in result:
  43. continue
  44. # append result
  45. results.append({'url': result['unescapedUrl'],
  46. 'title': result['titleNoFormatting'],
  47. 'publishedDate': publishedDate,
  48. 'content': result['content']})
  49. # return results
  50. return results