startpage.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # lint: pylint
  3. """Startpage's language & region selectors are a mess ..
  4. .. _startpage regions:
  5. Startpage regions
  6. =================
  7. In the list of regions there are tags we need to map to common region tags::
  8. pt-BR_BR --> pt_BR
  9. zh-CN_CN --> zh_Hans_CN
  10. zh-TW_TW --> zh_Hant_TW
  11. zh-TW_HK --> zh_Hant_HK
  12. en-GB_GB --> en_GB
  13. and there is at least one tag with a three letter language tag (ISO 639-2)::
  14. fil_PH --> fil_PH
  15. The locale code ``no_NO`` from Startpage does not exists and is mapped to
  16. ``nb-NO``::
  17. babel.core.UnknownLocaleError: unknown locale 'no_NO'
  18. For reference see languages-subtag at iana; ``no`` is the macrolanguage [1]_ and
  19. W3C recommends subtag over macrolanguage [2]_.
  20. .. [1] `iana: language-subtag-registry
  21. <https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry>`_ ::
  22. type: language
  23. Subtag: nb
  24. Description: Norwegian Bokmål
  25. Added: 2005-10-16
  26. Suppress-Script: Latn
  27. Macrolanguage: no
  28. .. [2]
  29. Use macrolanguages with care. Some language subtags have a Scope field set to
  30. macrolanguage, i.e. this primary language subtag encompasses a number of more
  31. specific primary language subtags in the registry. ... As we recommended for
  32. the collection subtags mentioned above, in most cases you should try to use
  33. the more specific subtags ... `W3: The primary language subtag
  34. <https://www.w3.org/International/questions/qa-choosing-language-tags#langsubtag>`_
  35. .. _startpage languages:
  36. Startpage languages
  37. ===================
  38. :py:obj:`send_accept_language_header`:
  39. The displayed name in Startpage's settings page depend on the location of the
  40. IP when ``Accept-Language`` HTTP header is unset. In :py:obj:`fetch_traits`
  41. we use::
  42. 'Accept-Language': "en-US,en;q=0.5",
  43. ..
  44. to get uniform names independent from the IP).
  45. .. _startpage categories:
  46. Startpage categories
  47. ====================
  48. Startpage's category (for Web-search, News, Videos, ..) is set by
  49. :py:obj:`startpage_categ` in settings.yml::
  50. - name: startpage
  51. engine: startpage
  52. startpage_categ: web
  53. ...
  54. .. hint::
  55. The default category is ``web`` .. and other categories than ``web`` are not
  56. yet implemented.
  57. """
  58. from typing import TYPE_CHECKING
  59. from collections import OrderedDict
  60. import re
  61. from unicodedata import normalize, combining
  62. from time import time
  63. from datetime import datetime, timedelta
  64. import dateutil.parser
  65. import lxml.html
  66. import babel
  67. from searx.utils import extract_text, eval_xpath, gen_useragent
  68. from searx.network import get # see https://github.com/searxng/searxng/issues/762
  69. from searx.exceptions import SearxEngineCaptchaException
  70. from searx.locales import region_tag
  71. from searx.enginelib.traits import EngineTraits
  72. if TYPE_CHECKING:
  73. import logging
  74. logger: logging.Logger
  75. traits: EngineTraits
  76. # about
  77. about = {
  78. "website": 'https://startpage.com',
  79. "wikidata_id": 'Q2333295',
  80. "official_api_documentation": None,
  81. "use_official_api": False,
  82. "require_api_key": False,
  83. "results": 'HTML',
  84. }
  85. startpage_categ = 'web'
  86. """Startpage's category, visit :ref:`startpage categories`.
  87. """
  88. send_accept_language_header = True
  89. """Startpage tries to guess user's language and territory from the HTTP
  90. ``Accept-Language``. Optional the user can select a search-language (can be
  91. different to the UI language) and a region filter.
  92. """
  93. # engine dependent config
  94. categories = ['general', 'web']
  95. paging = True
  96. max_page = 18
  97. """Tested 18 pages maximum (argument ``page``), to be save max is set to 20."""
  98. time_range_support = True
  99. safesearch = True
  100. time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'}
  101. safesearch_dict = {0: '0', 1: '1', 2: '1'}
  102. # search-url
  103. base_url = 'https://www.startpage.com'
  104. search_url = base_url + '/sp/search'
  105. # specific xpath variables
  106. # ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"]
  107. # not ads: div[@class="result"] are the direct childs of div[@id="results"]
  108. results_xpath = '//div[@class="w-gl__result__main"]'
  109. link_xpath = './/a[@class="w-gl__result-title result-link"]'
  110. content_xpath = './/p[@class="w-gl__description"]'
  111. search_form_xpath = '//form[@id="search"]'
  112. """XPath of Startpage's origin search form
  113. .. code: html
  114. <form action="/sp/search" method="post">
  115. <input type="text" name="query" value="" ..>
  116. <input type="hidden" name="t" value="device">
  117. <input type="hidden" name="lui" value="english">
  118. <input type="hidden" name="sc" value="Q7Mt5TRqowKB00">
  119. <input type="hidden" name="cat" value="web">
  120. <input type="hidden" class="abp" id="abp-input" name="abp" value="1">
  121. </form>
  122. """
  123. # timestamp of the last fetch of 'sc' code
  124. sc_code_ts = 0
  125. sc_code = ''
  126. sc_code_cache_sec = 30
  127. """Time in seconds the sc-code is cached in memory :py:obj:`get_sc_code`."""
  128. def get_sc_code(searxng_locale, params):
  129. """Get an actual ``sc`` argument from Startpage's search form (HTML page).
  130. Startpage puts a ``sc`` argument on every HTML :py:obj:`search form
  131. <search_form_xpath>`. Without this argument Startpage considers the request
  132. is from a bot. We do not know what is encoded in the value of the ``sc``
  133. argument, but it seems to be a kind of a *time-stamp*.
  134. Startpage's search form generates a new sc-code on each request. This
  135. function scrap a new sc-code from Startpage's home page every
  136. :py:obj:`sc_code_cache_sec` seconds.
  137. """
  138. global sc_code_ts, sc_code # pylint: disable=global-statement
  139. if sc_code and (time() < (sc_code_ts + sc_code_cache_sec)):
  140. logger.debug("get_sc_code: reuse '%s'", sc_code)
  141. return sc_code
  142. headers = {**params['headers']}
  143. headers['Origin'] = base_url
  144. headers['Referer'] = base_url + '/'
  145. # headers['Connection'] = 'keep-alive'
  146. # headers['Accept-Encoding'] = 'gzip, deflate, br'
  147. # headers['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8'
  148. # headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0'
  149. # add Accept-Language header
  150. if searxng_locale == 'all':
  151. searxng_locale = 'en-US'
  152. locale = babel.Locale.parse(searxng_locale, sep='-')
  153. if send_accept_language_header:
  154. ac_lang = locale.language
  155. if locale.territory:
  156. ac_lang = "%s-%s,%s;q=0.9,*;q=0.5" % (
  157. locale.language,
  158. locale.territory,
  159. locale.language,
  160. )
  161. headers['Accept-Language'] = ac_lang
  162. get_sc_url = base_url + '/?sc=%s' % (sc_code)
  163. logger.debug("query new sc time-stamp ... %s", get_sc_url)
  164. logger.debug("headers: %s", headers)
  165. resp = get(get_sc_url, headers=headers)
  166. # ?? x = network.get('https://www.startpage.com/sp/cdn/images/filter-chevron.svg', headers=headers)
  167. # ?? https://www.startpage.com/sp/cdn/images/filter-chevron.svg
  168. # ?? ping-back URL: https://www.startpage.com/sp/pb?sc=TLsB0oITjZ8F21
  169. if str(resp.url).startswith('https://www.startpage.com/sp/captcha'): # type: ignore
  170. raise SearxEngineCaptchaException(
  171. message="get_sc_code: got redirected to https://www.startpage.com/sp/captcha",
  172. )
  173. dom = lxml.html.fromstring(resp.text) # type: ignore
  174. try:
  175. sc_code = eval_xpath(dom, search_form_xpath + '//input[@name="sc"]/@value')[0]
  176. except IndexError as exc:
  177. logger.debug("suspend startpage API --> https://github.com/searxng/searxng/pull/695")
  178. raise SearxEngineCaptchaException(
  179. message="get_sc_code: [PR-695] query new sc time-stamp failed! (%s)" % resp.url, # type: ignore
  180. ) from exc
  181. sc_code_ts = time()
  182. logger.debug("get_sc_code: new value is: %s", sc_code)
  183. return sc_code
  184. def request(query, params):
  185. """Assemble a Startpage request.
  186. To avoid CAPTCHA we need to send a well formed HTTP POST request with a
  187. cookie. We need to form a request that is identical to the request build by
  188. Startpage's search form:
  189. - in the cookie the **region** is selected
  190. - in the HTTP POST data the **language** is selected
  191. Additionally the arguments form Startpage's search form needs to be set in
  192. HTML POST data / compare ``<input>`` elements: :py:obj:`search_form_xpath`.
  193. """
  194. if startpage_categ == 'web':
  195. return _request_cat_web(query, params)
  196. logger.error("Startpages's category '%' is not yet implemented.", startpage_categ)
  197. return params
  198. def _request_cat_web(query, params):
  199. engine_region = traits.get_region(params['searxng_locale'], 'en-US')
  200. engine_language = traits.get_language(params['searxng_locale'], 'en')
  201. # build arguments
  202. args = {
  203. 'query': query,
  204. 'cat': 'web',
  205. 't': 'device',
  206. 'sc': get_sc_code(params['searxng_locale'], params), # hint: this func needs HTTP headers,
  207. 'with_date': time_range_dict.get(params['time_range'], ''),
  208. }
  209. if engine_language:
  210. args['language'] = engine_language
  211. args['lui'] = engine_language
  212. args['abp'] = '1'
  213. if params['pageno'] > 1:
  214. args['page'] = params['pageno']
  215. # build cookie
  216. lang_homepage = 'en'
  217. cookie = OrderedDict()
  218. cookie['date_time'] = 'world'
  219. cookie['disable_family_filter'] = safesearch_dict[params['safesearch']]
  220. cookie['disable_open_in_new_window'] = '0'
  221. cookie['enable_post_method'] = '1' # hint: POST
  222. cookie['enable_proxy_safety_suggest'] = '1'
  223. cookie['enable_stay_control'] = '1'
  224. cookie['instant_answers'] = '1'
  225. cookie['lang_homepage'] = 's/device/%s/' % lang_homepage
  226. cookie['num_of_results'] = '10'
  227. cookie['suggestions'] = '1'
  228. cookie['wt_unit'] = 'celsius'
  229. if engine_language:
  230. cookie['language'] = engine_language
  231. cookie['language_ui'] = engine_language
  232. if engine_region:
  233. cookie['search_results_region'] = engine_region
  234. params['cookies']['preferences'] = 'N1N'.join(["%sEEE%s" % x for x in cookie.items()])
  235. logger.debug('cookie preferences: %s', params['cookies']['preferences'])
  236. # POST request
  237. logger.debug("data: %s", args)
  238. params['data'] = args
  239. params['method'] = 'POST'
  240. params['url'] = search_url
  241. params['headers']['Origin'] = base_url
  242. params['headers']['Referer'] = base_url + '/'
  243. # is the Accept header needed?
  244. # params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
  245. return params
  246. # get response from search-request
  247. def response(resp):
  248. dom = lxml.html.fromstring(resp.text)
  249. if startpage_categ == 'web':
  250. return _response_cat_web(dom)
  251. logger.error("Startpages's category '%' is not yet implemented.", startpage_categ)
  252. return []
  253. def _response_cat_web(dom):
  254. results = []
  255. # parse results
  256. for result in eval_xpath(dom, results_xpath):
  257. links = eval_xpath(result, link_xpath)
  258. if not links:
  259. continue
  260. link = links[0]
  261. url = link.attrib.get('href')
  262. # block google-ad url's
  263. if re.match(r"^http(s|)://(www\.)?google\.[a-z]+/aclk.*$", url):
  264. continue
  265. # block startpage search url's
  266. if re.match(r"^http(s|)://(www\.)?startpage\.com/do/search\?.*$", url):
  267. continue
  268. title = extract_text(link)
  269. if eval_xpath(result, content_xpath):
  270. content: str = extract_text(eval_xpath(result, content_xpath)) # type: ignore
  271. else:
  272. content = ''
  273. published_date = None
  274. # check if search result starts with something like: "2 Sep 2014 ... "
  275. if re.match(r"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \.\.\. ", content):
  276. date_pos = content.find('...') + 4
  277. date_string = content[0 : date_pos - 5]
  278. # fix content string
  279. content = content[date_pos:]
  280. try:
  281. published_date = dateutil.parser.parse(date_string, dayfirst=True)
  282. except ValueError:
  283. pass
  284. # check if search result starts with something like: "5 days ago ... "
  285. elif re.match(r"^[0-9]+ days? ago \.\.\. ", content):
  286. date_pos = content.find('...') + 4
  287. date_string = content[0 : date_pos - 5]
  288. # calculate datetime
  289. published_date = datetime.now() - timedelta(days=int(re.match(r'\d+', date_string).group())) # type: ignore
  290. # fix content string
  291. content = content[date_pos:]
  292. if published_date:
  293. # append result
  294. results.append({'url': url, 'title': title, 'content': content, 'publishedDate': published_date})
  295. else:
  296. # append result
  297. results.append({'url': url, 'title': title, 'content': content})
  298. # return results
  299. return results
  300. def fetch_traits(engine_traits: EngineTraits):
  301. """Fetch :ref:`languages <startpage languages>` and :ref:`regions <startpage
  302. regions>` from Startpage."""
  303. # pylint: disable=too-many-branches
  304. headers = {
  305. 'User-Agent': gen_useragent(),
  306. 'Accept-Language': "en-US,en;q=0.5", # bing needs to set the English language
  307. }
  308. resp = get('https://www.startpage.com/do/settings', headers=headers)
  309. if not resp.ok: # type: ignore
  310. print("ERROR: response from Startpage is not OK.")
  311. dom = lxml.html.fromstring(resp.text) # type: ignore
  312. # regions
  313. sp_region_names = []
  314. for option in dom.xpath('//form[@name="settings"]//select[@name="search_results_region"]/option'):
  315. sp_region_names.append(option.get('value'))
  316. for eng_tag in sp_region_names:
  317. if eng_tag == 'all':
  318. continue
  319. babel_region_tag = {'no_NO': 'nb_NO'}.get(eng_tag, eng_tag) # norway
  320. if '-' in babel_region_tag:
  321. l, r = babel_region_tag.split('-')
  322. r = r.split('_')[-1]
  323. sxng_tag = region_tag(babel.Locale.parse(l + '_' + r, sep='_'))
  324. else:
  325. try:
  326. sxng_tag = region_tag(babel.Locale.parse(babel_region_tag, sep='_'))
  327. except babel.UnknownLocaleError:
  328. print("ERROR: can't determine babel locale of startpage's locale %s" % eng_tag)
  329. continue
  330. conflict = engine_traits.regions.get(sxng_tag)
  331. if conflict:
  332. if conflict != eng_tag:
  333. print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_tag))
  334. continue
  335. engine_traits.regions[sxng_tag] = eng_tag
  336. # languages
  337. catalog_engine2code = {name.lower(): lang_code for lang_code, name in babel.Locale('en').languages.items()}
  338. # get the native name of every language known by babel
  339. for lang_code in filter(
  340. lambda lang_code: lang_code.find('_') == -1, babel.localedata.locale_identifiers() # type: ignore
  341. ):
  342. native_name = babel.Locale(lang_code).get_language_name().lower() # type: ignore
  343. # add native name exactly as it is
  344. catalog_engine2code[native_name] = lang_code
  345. # add "normalized" language name (i.e. français becomes francais and español becomes espanol)
  346. unaccented_name = ''.join(filter(lambda c: not combining(c), normalize('NFKD', native_name)))
  347. if len(unaccented_name) == len(unaccented_name.encode()):
  348. # add only if result is ascii (otherwise "normalization" didn't work)
  349. catalog_engine2code[unaccented_name] = lang_code
  350. # values that can't be determined by babel's languages names
  351. catalog_engine2code.update(
  352. {
  353. # traditional chinese used in ..
  354. 'fantizhengwen': 'zh_Hant',
  355. # Korean alphabet
  356. 'hangul': 'ko',
  357. # Malayalam is one of 22 scheduled languages of India.
  358. 'malayam': 'ml',
  359. 'norsk': 'nb',
  360. 'sinhalese': 'si',
  361. }
  362. )
  363. skip_eng_tags = {
  364. 'english_uk', # SearXNG lang 'en' already maps to 'english'
  365. }
  366. for option in dom.xpath('//form[@name="settings"]//select[@name="language"]/option'):
  367. eng_tag = option.get('value')
  368. if eng_tag in skip_eng_tags:
  369. continue
  370. name = extract_text(option).lower() # type: ignore
  371. sxng_tag = catalog_engine2code.get(eng_tag)
  372. if sxng_tag is None:
  373. sxng_tag = catalog_engine2code[name]
  374. conflict = engine_traits.languages.get(sxng_tag)
  375. if conflict:
  376. if conflict != eng_tag:
  377. print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_tag))
  378. continue
  379. engine_traits.languages[sxng_tag] = eng_tag