google.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403
  1. # Google (Web)
  2. #
  3. # @website https://www.google.com
  4. # @provide-api yes (https://developers.google.com/custom-search/)
  5. #
  6. # @using-api no
  7. # @results HTML
  8. # @stable no (HTML can change)
  9. # @parse url, title, content, suggestion
  10. import re
  11. from urllib import urlencode
  12. from urlparse import urlparse, parse_qsl
  13. from lxml import html
  14. from searx.poolrequests import get
  15. from searx.engines.xpath import extract_text, extract_url
  16. # engine dependent config
  17. categories = ['general']
  18. paging = True
  19. language_support = True
  20. use_locale_domain = True
  21. # based on https://en.wikipedia.org/wiki/List_of_Google_domains and tests
  22. default_hostname = 'www.google.com'
  23. country_to_hostname = {
  24. 'BG': 'www.google.bg', # Bulgaria
  25. 'CZ': 'www.google.cz', # Czech Republic
  26. 'DE': 'www.google.de', # Germany
  27. 'DK': 'www.google.dk', # Denmark
  28. 'AT': 'www.google.at', # Austria
  29. 'CH': 'www.google.ch', # Switzerland
  30. 'GR': 'www.google.gr', # Greece
  31. 'AU': 'www.google.com.au', # Australia
  32. 'CA': 'www.google.ca', # Canada
  33. 'GB': 'www.google.co.uk', # United Kingdom
  34. 'ID': 'www.google.co.id', # Indonesia
  35. 'IE': 'www.google.ie', # Ireland
  36. 'IN': 'www.google.co.in', # India
  37. 'MY': 'www.google.com.my', # Malaysia
  38. 'NZ': 'www.google.co.nz', # New Zealand
  39. 'PH': 'www.google.com.ph', # Philippines
  40. 'SG': 'www.google.com.sg', # Singapore
  41. # 'US': 'www.google.us', # United State, redirect to .com
  42. 'ZA': 'www.google.co.za', # South Africa
  43. 'AR': 'www.google.com.ar', # Argentina
  44. 'CL': 'www.google.cl', # Chile
  45. 'ES': 'www.google.es', # Span
  46. 'MX': 'www.google.com.mx', # Mexico
  47. 'EE': 'www.google.ee', # Estonia
  48. 'FI': 'www.google.fi', # Finland
  49. 'BE': 'www.google.be', # Belgium
  50. 'FR': 'www.google.fr', # France
  51. 'IL': 'www.google.co.il', # Israel
  52. 'HR': 'www.google.hr', # Croatia
  53. 'HU': 'www.google.hu', # Hungary
  54. 'IT': 'www.google.it', # Italy
  55. 'JP': 'www.google.co.jp', # Japan
  56. 'KR': 'www.google.co.kr', # South Korean
  57. 'LT': 'www.google.lt', # Lithuania
  58. 'LV': 'www.google.lv', # Latvia
  59. 'NO': 'www.google.no', # Norway
  60. 'NL': 'www.google.nl', # Netherlands
  61. 'PL': 'www.google.pl', # Poland
  62. 'BR': 'www.google.com.br', # Brazil
  63. 'PT': 'www.google.pt', # Portugal
  64. 'RO': 'www.google.ro', # Romania
  65. 'RU': 'www.google.ru', # Russia
  66. 'SK': 'www.google.sk', # Slovakia
  67. 'SL': 'www.google.si', # Slovenia (SL -> si)
  68. 'SE': 'www.google.se', # Sweden
  69. 'TH': 'www.google.co.th', # Thailand
  70. 'TR': 'www.google.com.tr', # Turkey
  71. 'UA': 'www.google.com.ua', # Ikraine
  72. # 'CN': 'www.google.cn', # China, only from china ?
  73. 'HK': 'www.google.com.hk', # Hong kong
  74. 'TW': 'www.google.com.tw' # Taiwan
  75. }
  76. # osm
  77. url_map = 'https://www.openstreetmap.org/'\
  78. + '?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
  79. # search-url
  80. search_path = '/search'
  81. search_url = ('https://{hostname}' +
  82. search_path +
  83. '?{query}&start={offset}&gbv=1')
  84. # other URLs
  85. map_hostname_start = 'maps.google.'
  86. maps_path = '/maps'
  87. redirect_path = '/url'
  88. images_path = '/images'
  89. # specific xpath variables
  90. results_xpath = '//li[@class="g"]'
  91. url_xpath = './/h3/a/@href'
  92. title_xpath = './/h3'
  93. content_xpath = './/span[@class="st"]'
  94. content_misc_xpath = './/div[@class="f slp"]'
  95. suggestion_xpath = '//p[@class="_Bmc"]'
  96. # map : detail location
  97. map_address_xpath = './/div[@class="s"]//table//td[2]/span/text()'
  98. map_phone_xpath = './/div[@class="s"]//table//td[2]/span/span'
  99. map_website_url_xpath = 'h3[2]/a/@href'
  100. map_website_title_xpath = 'h3[2]'
  101. # map : near the location
  102. map_near = 'table[@class="ts"]//tr'
  103. map_near_title = './/h4'
  104. map_near_url = './/h4/a/@href'
  105. map_near_phone = './/span[@class="nobr"]'
  106. # images
  107. images_xpath = './/div/a'
  108. image_url_xpath = './@href'
  109. image_img_src_xpath = './img/@src'
  110. # property names
  111. # FIXME : no translation
  112. property_address = "Address"
  113. property_phone = "Phone number"
  114. property_location = "Location"
  115. property_website = "Web site"
  116. property_gplus_website = "Google plus"
  117. # cookies
  118. pref_cookie = ''
  119. nid_cookie = {}
  120. # see https://support.google.com/websearch/answer/873?hl=en
  121. def get_google_pref_cookie():
  122. global pref_cookie
  123. if pref_cookie == '':
  124. resp = get('https://www.google.com/ncr', allow_redirects=False)
  125. pref_cookie = resp.cookies["PREF"]
  126. return pref_cookie
  127. def get_google_nid_cookie(google_hostname):
  128. global nid_cookie
  129. if google_hostname not in nid_cookie:
  130. resp = get('https://' + google_hostname)
  131. nid_cookie[google_hostname] = resp.cookies.get("NID", None)
  132. return nid_cookie[google_hostname]
  133. # remove google-specific tracking-url
  134. def parse_url(url_string, google_hostname):
  135. # sanity check
  136. if url_string is None:
  137. return url_string
  138. # normal case
  139. parsed_url = urlparse(url_string)
  140. if (parsed_url.netloc in [google_hostname, '']
  141. and parsed_url.path == redirect_path):
  142. query = dict(parse_qsl(parsed_url.query))
  143. return query['q']
  144. else:
  145. return url_string
  146. # URL : get label
  147. def url_get_label(url_string):
  148. # sanity check
  149. if url_string is None:
  150. return url_string
  151. # normal case
  152. parsed_url = urlparse(url_string)
  153. if parsed_url.netloc == 'plus.google.com':
  154. return property_gplus_website
  155. return property_website
  156. # returns extract_text on the first result selected by the xpath or None
  157. def extract_text_from_dom(result, xpath):
  158. r = result.xpath(xpath)
  159. if len(r) > 0:
  160. return extract_text(r[0])
  161. return None
  162. # do search-request
  163. def request(query, params):
  164. offset = (params['pageno'] - 1) * 10
  165. if params['language'] == 'all':
  166. language = 'en'
  167. country = 'US'
  168. else:
  169. language_array = params['language'].lower().split('_')
  170. if len(language_array) == 2:
  171. country = language_array[1]
  172. else:
  173. country = 'US'
  174. language = language_array[0] + ',' + language_array[0] + '-' + country
  175. if use_locale_domain:
  176. google_hostname = country_to_hostname.get(country.upper(), default_hostname)
  177. else:
  178. google_hostname = default_hostname
  179. params['url'] = search_url.format(offset=offset,
  180. query=urlencode({'q': query}),
  181. hostname=google_hostname)
  182. params['headers']['Accept-Language'] = language
  183. params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
  184. if google_hostname == default_hostname:
  185. params['cookies']['PREF'] = get_google_pref_cookie()
  186. params['cookies']['NID'] = get_google_nid_cookie(google_hostname)
  187. params['google_hostname'] = google_hostname
  188. return params
  189. # get response from search-request
  190. def response(resp):
  191. results = []
  192. # detect google sorry
  193. resp_url = urlparse(resp.url)
  194. if resp_url.netloc == 'sorry.google.com' or resp_url.path == '/sorry/IndexRedirect':
  195. raise RuntimeWarning('sorry.google.com')
  196. # which hostname ?
  197. google_hostname = resp.search_params.get('google_hostname')
  198. google_url = "https://" + google_hostname
  199. # convert the text to dom
  200. dom = html.fromstring(resp.text)
  201. # parse results
  202. for result in dom.xpath(results_xpath):
  203. title = extract_text(result.xpath(title_xpath)[0])
  204. try:
  205. url = parse_url(extract_url(result.xpath(url_xpath), google_url), google_hostname)
  206. parsed_url = urlparse(url, google_hostname)
  207. # map result
  208. if ((parsed_url.netloc == google_hostname and parsed_url.path.startswith(maps_path))
  209. or (parsed_url.netloc.startswith(map_hostname_start))):
  210. x = result.xpath(map_near)
  211. if len(x) > 0:
  212. # map : near the location
  213. results = results + parse_map_near(parsed_url, x, google_hostname)
  214. else:
  215. # map : detail about a location
  216. results = results + parse_map_detail(parsed_url, result, google_hostname)
  217. # google news
  218. elif (parsed_url.netloc == google_hostname
  219. and parsed_url.path == search_path):
  220. # skipping news results
  221. pass
  222. # images result
  223. elif (parsed_url.netloc == google_hostname
  224. and parsed_url.path == images_path):
  225. # only thumbnail image provided,
  226. # so skipping image results
  227. # results = results + parse_images(result, google_hostname)
  228. pass
  229. else:
  230. # normal result
  231. content = extract_text_from_dom(result, content_xpath)
  232. if content is None:
  233. continue
  234. content_misc = extract_text_from_dom(result, content_misc_xpath)
  235. if content_misc is not None:
  236. content = content_misc + "<br />" + content
  237. # append result
  238. results.append({'url': url,
  239. 'title': title,
  240. 'content': content})
  241. except:
  242. continue
  243. # parse suggestion
  244. for suggestion in dom.xpath(suggestion_xpath):
  245. # append suggestion
  246. results.append({'suggestion': extract_text(suggestion)})
  247. # return results
  248. return results
  249. def parse_images(result, google_hostname):
  250. results = []
  251. for image in result.xpath(images_xpath):
  252. url = parse_url(extract_text(image.xpath(image_url_xpath)[0]), google_hostname)
  253. img_src = extract_text(image.xpath(image_img_src_xpath)[0])
  254. # append result
  255. results.append({'url': url,
  256. 'title': '',
  257. 'content': '',
  258. 'img_src': img_src,
  259. 'template': 'images.html'})
  260. return results
  261. def parse_map_near(parsed_url, x, google_hostname):
  262. results = []
  263. for result in x:
  264. title = extract_text_from_dom(result, map_near_title)
  265. url = parse_url(extract_text_from_dom(result, map_near_url), google_hostname)
  266. phone = extract_text_from_dom(result, map_near_phone)
  267. if phone is not None:
  268. phone = property_phone + ": " + phone
  269. results.append({'url': url,
  270. 'title': title,
  271. 'content': phone})
  272. return results
  273. def parse_map_detail(parsed_url, result, google_hostname):
  274. results = []
  275. # try to parse the geoloc
  276. m = re.search('@([0-9\.]+),([0-9\.]+),([0-9]+)', parsed_url.path)
  277. if m is None:
  278. m = re.search('ll\=([0-9\.]+),([0-9\.]+)\&z\=([0-9]+)', parsed_url.query)
  279. if m is not None:
  280. # geoloc found
  281. lon = float(m.group(2))
  282. lat = float(m.group(1))
  283. zoom = int(m.group(3))
  284. # TODO : map zoom to dlon / dlat
  285. dlon = 0.000001
  286. dlat = 0.000001
  287. boundingbox = [round(lat - dlat, 7), round(lat + dlat, 7), round(lon - dlon, 7), round(lon + dlon, 7)]
  288. map_url = url_map\
  289. .replace('{latitude}', str(lat))\
  290. .replace('{longitude}', str(lon))\
  291. .replace('{zoom}', str(zoom+2))
  292. geojson = {u'type': u'Point',
  293. u'coordinates': [lon, lat]
  294. }
  295. # attributes
  296. attributes = []
  297. add_attributes(attributes, property_address, extract_text_from_dom(result, map_address_xpath))
  298. add_attributes(attributes, property_phone, extract_text_from_dom(result, map_phone_xpath))
  299. # title / content / url
  300. website_title = extract_text_from_dom(result, map_website_title_xpath)
  301. content = extract_text_from_dom(result, content_xpath)
  302. website_url = parse_url(extract_text_from_dom(result, map_website_url_xpath), google_hostname)
  303. # add an infobox if there is a website
  304. if website_url is not None:
  305. results.append({'infobox': website_title,
  306. 'id': website_url,
  307. 'content': content,
  308. 'attributes': attributes,
  309. 'urls': [
  310. {'title': url_get_label(website_url), 'url': website_url},
  311. {'title': property_location, 'url': map_url}
  312. ]
  313. })
  314. # usefull because user can see the map directly into searx
  315. results.append({'template': 'map.html',
  316. 'title': website_title,
  317. 'content': (content + '<br />' if content is not None else '')
  318. + attributes_to_html(attributes),
  319. 'longitude': lon,
  320. 'latitude': lat,
  321. 'boundingbox': boundingbox,
  322. 'geojson': geojson,
  323. 'url': website_url if website_url is not None else map_url
  324. })
  325. return results
  326. def add_attributes(attributes, name, value):
  327. if value is not None and len(value) > 0:
  328. attributes.append({'label': name, 'value': value})
  329. def attributes_to_html(attributes):
  330. retval = '<table class="table table-striped">'
  331. for a in attributes:
  332. retval = retval + '<tr><th>' + a.get('label') + '</th><td>' + a.get('value') + '</td></tr>'
  333. retval = retval + '</table>'
  334. return retval