startpage.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. """Startpage's language & region selectors are a mess ..
  3. .. _startpage regions:
  4. Startpage regions
  5. =================
  6. In the list of regions there are tags we need to map to common region tags::
  7. pt-BR_BR --> pt_BR
  8. zh-CN_CN --> zh_Hans_CN
  9. zh-TW_TW --> zh_Hant_TW
  10. zh-TW_HK --> zh_Hant_HK
  11. en-GB_GB --> en_GB
  12. and there is at least one tag with a three letter language tag (ISO 639-2)::
  13. fil_PH --> fil_PH
  14. The locale code ``no_NO`` from Startpage does not exists and is mapped to
  15. ``nb-NO``::
  16. babel.core.UnknownLocaleError: unknown locale 'no_NO'
  17. For reference see languages-subtag at iana; ``no`` is the macrolanguage [1]_ and
  18. W3C recommends subtag over macrolanguage [2]_.
  19. .. [1] `iana: language-subtag-registry
  20. <https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry>`_ ::
  21. type: language
  22. Subtag: nb
  23. Description: Norwegian Bokmål
  24. Added: 2005-10-16
  25. Suppress-Script: Latn
  26. Macrolanguage: no
  27. .. [2]
  28. Use macrolanguages with care. Some language subtags have a Scope field set to
  29. macrolanguage, i.e. this primary language subtag encompasses a number of more
  30. specific primary language subtags in the registry. ... As we recommended for
  31. the collection subtags mentioned above, in most cases you should try to use
  32. the more specific subtags ... `W3: The primary language subtag
  33. <https://www.w3.org/International/questions/qa-choosing-language-tags#langsubtag>`_
  34. .. _startpage languages:
  35. Startpage languages
  36. ===================
  37. :py:obj:`send_accept_language_header`:
  38. The displayed name in Startpage's settings page depend on the location of the
  39. IP when ``Accept-Language`` HTTP header is unset. In :py:obj:`fetch_traits`
  40. we use::
  41. 'Accept-Language': "en-US,en;q=0.5",
  42. ..
  43. to get uniform names independent from the IP).
  44. .. _startpage categories:
  45. Startpage categories
  46. ====================
  47. Startpage's category (for Web-search, News, Videos, ..) is set by
  48. :py:obj:`startpage_categ` in settings.yml::
  49. - name: startpage
  50. engine: startpage
  51. startpage_categ: web
  52. ...
  53. .. hint::
  54. The default category is ``web`` .. and other categories than ``web`` are not
  55. yet implemented.
  56. """
  57. from typing import TYPE_CHECKING
  58. from collections import OrderedDict
  59. import re
  60. from unicodedata import normalize, combining
  61. from time import time
  62. from datetime import datetime, timedelta
  63. import dateutil.parser
  64. import lxml.html
  65. import babel
  66. from searx.utils import extract_text, eval_xpath, gen_useragent
  67. from searx.network import get # see https://github.com/searxng/searxng/issues/762
  68. from searx.exceptions import SearxEngineCaptchaException
  69. from searx.locales import region_tag
  70. from searx.enginelib.traits import EngineTraits
  71. if TYPE_CHECKING:
  72. import logging
  73. logger: logging.Logger
  74. traits: EngineTraits
  75. # about
  76. about = {
  77. "website": 'https://startpage.com',
  78. "wikidata_id": 'Q2333295',
  79. "official_api_documentation": None,
  80. "use_official_api": False,
  81. "require_api_key": False,
  82. "results": 'HTML',
  83. }
  84. startpage_categ = 'web'
  85. """Startpage's category, visit :ref:`startpage categories`.
  86. """
  87. send_accept_language_header = True
  88. """Startpage tries to guess user's language and territory from the HTTP
  89. ``Accept-Language``. Optional the user can select a search-language (can be
  90. different to the UI language) and a region filter.
  91. """
  92. # engine dependent config
  93. categories = ['general', 'web']
  94. paging = True
  95. max_page = 18
  96. """Tested 18 pages maximum (argument ``page``), to be save max is set to 20."""
  97. time_range_support = True
  98. safesearch = True
  99. time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'}
  100. safesearch_dict = {0: '0', 1: '1', 2: '1'}
  101. # search-url
  102. base_url = 'https://www.startpage.com'
  103. search_url = base_url + '/sp/search'
  104. # specific xpath variables
  105. # ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"]
  106. # not ads: div[@class="result"] are the direct childs of div[@id="results"]
  107. results_xpath = '//div[@class="w-gl__result__main"]'
  108. link_xpath = './/a[@class="w-gl__result-title result-link"]'
  109. content_xpath = './/p[@class="w-gl__description"]'
  110. search_form_xpath = '//form[@id="search"]'
  111. """XPath of Startpage's origin search form
  112. .. code: html
  113. <form action="/sp/search" method="post">
  114. <input type="text" name="query" value="" ..>
  115. <input type="hidden" name="t" value="device">
  116. <input type="hidden" name="lui" value="english">
  117. <input type="hidden" name="sc" value="Q7Mt5TRqowKB00">
  118. <input type="hidden" name="cat" value="web">
  119. <input type="hidden" class="abp" id="abp-input" name="abp" value="1">
  120. </form>
  121. """
  122. # timestamp of the last fetch of 'sc' code
  123. sc_code_ts = 0
  124. sc_code = ''
  125. sc_code_cache_sec = 30
  126. """Time in seconds the sc-code is cached in memory :py:obj:`get_sc_code`."""
  127. def get_sc_code(searxng_locale, params):
  128. """Get an actual ``sc`` argument from Startpage's search form (HTML page).
  129. Startpage puts a ``sc`` argument on every HTML :py:obj:`search form
  130. <search_form_xpath>`. Without this argument Startpage considers the request
  131. is from a bot. We do not know what is encoded in the value of the ``sc``
  132. argument, but it seems to be a kind of a *time-stamp*.
  133. Startpage's search form generates a new sc-code on each request. This
  134. function scrap a new sc-code from Startpage's home page every
  135. :py:obj:`sc_code_cache_sec` seconds.
  136. """
  137. global sc_code_ts, sc_code # pylint: disable=global-statement
  138. if sc_code and (time() < (sc_code_ts + sc_code_cache_sec)):
  139. logger.debug("get_sc_code: reuse '%s'", sc_code)
  140. return sc_code
  141. headers = {**params['headers']}
  142. headers['Origin'] = base_url
  143. headers['Referer'] = base_url + '/'
  144. # headers['Connection'] = 'keep-alive'
  145. # headers['Accept-Encoding'] = 'gzip, deflate, br'
  146. # headers['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8'
  147. # headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0'
  148. # add Accept-Language header
  149. if searxng_locale == 'all':
  150. searxng_locale = 'en-US'
  151. locale = babel.Locale.parse(searxng_locale, sep='-')
  152. if send_accept_language_header:
  153. ac_lang = locale.language
  154. if locale.territory:
  155. ac_lang = "%s-%s,%s;q=0.9,*;q=0.5" % (
  156. locale.language,
  157. locale.territory,
  158. locale.language,
  159. )
  160. headers['Accept-Language'] = ac_lang
  161. get_sc_url = base_url + '/?sc=%s' % (sc_code)
  162. logger.debug("query new sc time-stamp ... %s", get_sc_url)
  163. logger.debug("headers: %s", headers)
  164. resp = get(get_sc_url, headers=headers)
  165. # ?? x = network.get('https://www.startpage.com/sp/cdn/images/filter-chevron.svg', headers=headers)
  166. # ?? https://www.startpage.com/sp/cdn/images/filter-chevron.svg
  167. # ?? ping-back URL: https://www.startpage.com/sp/pb?sc=TLsB0oITjZ8F21
  168. if str(resp.url).startswith('https://www.startpage.com/sp/captcha'): # type: ignore
  169. raise SearxEngineCaptchaException(
  170. message="get_sc_code: got redirected to https://www.startpage.com/sp/captcha",
  171. )
  172. dom = lxml.html.fromstring(resp.text) # type: ignore
  173. try:
  174. sc_code = eval_xpath(dom, search_form_xpath + '//input[@name="sc"]/@value')[0]
  175. except IndexError as exc:
  176. logger.debug("suspend startpage API --> https://github.com/searxng/searxng/pull/695")
  177. raise SearxEngineCaptchaException(
  178. message="get_sc_code: [PR-695] query new sc time-stamp failed! (%s)" % resp.url, # type: ignore
  179. ) from exc
  180. sc_code_ts = time()
  181. logger.debug("get_sc_code: new value is: %s", sc_code)
  182. return sc_code
  183. def request(query, params):
  184. """Assemble a Startpage request.
  185. To avoid CAPTCHA we need to send a well formed HTTP POST request with a
  186. cookie. We need to form a request that is identical to the request build by
  187. Startpage's search form:
  188. - in the cookie the **region** is selected
  189. - in the HTTP POST data the **language** is selected
  190. Additionally the arguments form Startpage's search form needs to be set in
  191. HTML POST data / compare ``<input>`` elements: :py:obj:`search_form_xpath`.
  192. """
  193. if startpage_categ == 'web':
  194. return _request_cat_web(query, params)
  195. logger.error("Startpages's category '%' is not yet implemented.", startpage_categ)
  196. return params
  197. def _request_cat_web(query, params):
  198. engine_region = traits.get_region(params['searxng_locale'], 'en-US')
  199. engine_language = traits.get_language(params['searxng_locale'], 'en')
  200. # build arguments
  201. args = {
  202. 'query': query,
  203. 'cat': 'web',
  204. 't': 'device',
  205. 'sc': get_sc_code(params['searxng_locale'], params), # hint: this func needs HTTP headers,
  206. 'with_date': time_range_dict.get(params['time_range'], ''),
  207. }
  208. if engine_language:
  209. args['language'] = engine_language
  210. args['lui'] = engine_language
  211. args['abp'] = '1'
  212. if params['pageno'] > 1:
  213. args['page'] = params['pageno']
  214. # build cookie
  215. lang_homepage = 'en'
  216. cookie = OrderedDict()
  217. cookie['date_time'] = 'world'
  218. cookie['disable_family_filter'] = safesearch_dict[params['safesearch']]
  219. cookie['disable_open_in_new_window'] = '0'
  220. cookie['enable_post_method'] = '1' # hint: POST
  221. cookie['enable_proxy_safety_suggest'] = '1'
  222. cookie['enable_stay_control'] = '1'
  223. cookie['instant_answers'] = '1'
  224. cookie['lang_homepage'] = 's/device/%s/' % lang_homepage
  225. cookie['num_of_results'] = '10'
  226. cookie['suggestions'] = '1'
  227. cookie['wt_unit'] = 'celsius'
  228. if engine_language:
  229. cookie['language'] = engine_language
  230. cookie['language_ui'] = engine_language
  231. if engine_region:
  232. cookie['search_results_region'] = engine_region
  233. params['cookies']['preferences'] = 'N1N'.join(["%sEEE%s" % x for x in cookie.items()])
  234. logger.debug('cookie preferences: %s', params['cookies']['preferences'])
  235. # POST request
  236. logger.debug("data: %s", args)
  237. params['data'] = args
  238. params['method'] = 'POST'
  239. params['url'] = search_url
  240. params['headers']['Origin'] = base_url
  241. params['headers']['Referer'] = base_url + '/'
  242. # is the Accept header needed?
  243. # params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
  244. return params
  245. # get response from search-request
  246. def response(resp):
  247. dom = lxml.html.fromstring(resp.text)
  248. if startpage_categ == 'web':
  249. return _response_cat_web(dom)
  250. logger.error("Startpages's category '%' is not yet implemented.", startpage_categ)
  251. return []
  252. def _response_cat_web(dom):
  253. results = []
  254. # parse results
  255. for result in eval_xpath(dom, results_xpath):
  256. links = eval_xpath(result, link_xpath)
  257. if not links:
  258. continue
  259. link = links[0]
  260. url = link.attrib.get('href')
  261. # block google-ad url's
  262. if re.match(r"^http(s|)://(www\.)?google\.[a-z]+/aclk.*$", url):
  263. continue
  264. # block startpage search url's
  265. if re.match(r"^http(s|)://(www\.)?startpage\.com/do/search\?.*$", url):
  266. continue
  267. title = extract_text(link)
  268. if eval_xpath(result, content_xpath):
  269. content: str = extract_text(eval_xpath(result, content_xpath)) # type: ignore
  270. else:
  271. content = ''
  272. published_date = None
  273. # check if search result starts with something like: "2 Sep 2014 ... "
  274. if re.match(r"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \.\.\. ", content):
  275. date_pos = content.find('...') + 4
  276. date_string = content[0 : date_pos - 5]
  277. # fix content string
  278. content = content[date_pos:]
  279. try:
  280. published_date = dateutil.parser.parse(date_string, dayfirst=True)
  281. except ValueError:
  282. pass
  283. # check if search result starts with something like: "5 days ago ... "
  284. elif re.match(r"^[0-9]+ days? ago \.\.\. ", content):
  285. date_pos = content.find('...') + 4
  286. date_string = content[0 : date_pos - 5]
  287. # calculate datetime
  288. published_date = datetime.now() - timedelta(days=int(re.match(r'\d+', date_string).group())) # type: ignore
  289. # fix content string
  290. content = content[date_pos:]
  291. if published_date:
  292. # append result
  293. results.append({'url': url, 'title': title, 'content': content, 'publishedDate': published_date})
  294. else:
  295. # append result
  296. results.append({'url': url, 'title': title, 'content': content})
  297. # return results
  298. return results
  299. def fetch_traits(engine_traits: EngineTraits):
  300. """Fetch :ref:`languages <startpage languages>` and :ref:`regions <startpage
  301. regions>` from Startpage."""
  302. # pylint: disable=too-many-branches
  303. headers = {
  304. 'User-Agent': gen_useragent(),
  305. 'Accept-Language': "en-US,en;q=0.5", # bing needs to set the English language
  306. }
  307. resp = get('https://www.startpage.com/do/settings', headers=headers)
  308. if not resp.ok: # type: ignore
  309. print("ERROR: response from Startpage is not OK.")
  310. dom = lxml.html.fromstring(resp.text) # type: ignore
  311. # regions
  312. sp_region_names = []
  313. for option in dom.xpath('//form[@name="settings"]//select[@name="search_results_region"]/option'):
  314. sp_region_names.append(option.get('value'))
  315. for eng_tag in sp_region_names:
  316. if eng_tag == 'all':
  317. continue
  318. babel_region_tag = {'no_NO': 'nb_NO'}.get(eng_tag, eng_tag) # norway
  319. if '-' in babel_region_tag:
  320. l, r = babel_region_tag.split('-')
  321. r = r.split('_')[-1]
  322. sxng_tag = region_tag(babel.Locale.parse(l + '_' + r, sep='_'))
  323. else:
  324. try:
  325. sxng_tag = region_tag(babel.Locale.parse(babel_region_tag, sep='_'))
  326. except babel.UnknownLocaleError:
  327. print("ERROR: can't determine babel locale of startpage's locale %s" % eng_tag)
  328. continue
  329. conflict = engine_traits.regions.get(sxng_tag)
  330. if conflict:
  331. if conflict != eng_tag:
  332. print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_tag))
  333. continue
  334. engine_traits.regions[sxng_tag] = eng_tag
  335. # languages
  336. catalog_engine2code = {name.lower(): lang_code for lang_code, name in babel.Locale('en').languages.items()}
  337. # get the native name of every language known by babel
  338. for lang_code in filter(
  339. lambda lang_code: lang_code.find('_') == -1, babel.localedata.locale_identifiers() # type: ignore
  340. ):
  341. native_name = babel.Locale(lang_code).get_language_name().lower() # type: ignore
  342. # add native name exactly as it is
  343. catalog_engine2code[native_name] = lang_code
  344. # add "normalized" language name (i.e. français becomes francais and español becomes espanol)
  345. unaccented_name = ''.join(filter(lambda c: not combining(c), normalize('NFKD', native_name)))
  346. if len(unaccented_name) == len(unaccented_name.encode()):
  347. # add only if result is ascii (otherwise "normalization" didn't work)
  348. catalog_engine2code[unaccented_name] = lang_code
  349. # values that can't be determined by babel's languages names
  350. catalog_engine2code.update(
  351. {
  352. # traditional chinese used in ..
  353. 'fantizhengwen': 'zh_Hant',
  354. # Korean alphabet
  355. 'hangul': 'ko',
  356. # Malayalam is one of 22 scheduled languages of India.
  357. 'malayam': 'ml',
  358. 'norsk': 'nb',
  359. 'sinhalese': 'si',
  360. }
  361. )
  362. skip_eng_tags = {
  363. 'english_uk', # SearXNG lang 'en' already maps to 'english'
  364. }
  365. for option in dom.xpath('//form[@name="settings"]//select[@name="language"]/option'):
  366. eng_tag = option.get('value')
  367. if eng_tag in skip_eng_tags:
  368. continue
  369. name = extract_text(option).lower() # type: ignore
  370. sxng_tag = catalog_engine2code.get(eng_tag)
  371. if sxng_tag is None:
  372. sxng_tag = catalog_engine2code[name]
  373. conflict = engine_traits.languages.get(sxng_tag)
  374. if conflict:
  375. if conflict != eng_tag:
  376. print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_tag))
  377. continue
  378. engine_traits.languages[sxng_tag] = eng_tag