Browse Source

Merge pull request #1552 from unixfox/fix-consent-google

ucbcb bypass method to CONSENT yes for google
Markus Heiser 2 years ago
parent
commit
b14ec7286d

+ 1 - 1
searx/engines/google.py

@@ -287,7 +287,6 @@ def request(query, params):
                 'oe': "utf8",
                 'oe': "utf8",
                 'start': offset,
                 'start': offset,
                 'filter': '0',
                 'filter': '0',
-                'ucbcb': 1,
                 **additional_parameters,
                 **additional_parameters,
             }
             }
         )
         )
@@ -299,6 +298,7 @@ def request(query, params):
         query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
         query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
     params['url'] = query_url
     params['url'] = query_url
 
 
+    params['cookies']['CONSENT'] = "YES+"
     params['headers'].update(lang_info['headers'])
     params['headers'].update(lang_info['headers'])
     if use_mobile_ui:
     if use_mobile_ui:
         params['headers']['Accept'] = '*/*'
         params['headers']['Accept'] = '*/*'

+ 2 - 3
searx/engines/google_images.py

@@ -132,9 +132,7 @@ def request(query, params):
         + lang_info['subdomain']
         + lang_info['subdomain']
         + '/search'
         + '/search'
         + "?"
         + "?"
-        + urlencode(
-            {'q': query, 'tbm': "isch", **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'num': 30, 'ucbcb': 1}
-        )
+        + urlencode({'q': query, 'tbm': "isch", **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'num': 30})
     )
     )
 
 
     if params['time_range'] in time_range_dict:
     if params['time_range'] in time_range_dict:
@@ -143,6 +141,7 @@ def request(query, params):
         query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
         query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
     params['url'] = query_url
     params['url'] = query_url
 
 
+    params['cookies']['CONSENT'] = "YES+"
     params['headers'].update(lang_info['headers'])
     params['headers'].update(lang_info['headers'])
     params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
     params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
     return params
     return params

+ 2 - 3
searx/engines/google_news.py

@@ -97,13 +97,12 @@ def request(query, params):
         + lang_info['subdomain']
         + lang_info['subdomain']
         + '/search'
         + '/search'
         + "?"
         + "?"
-        + urlencode(
-            {'q': query, **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'gl': lang_info['country'], 'ucbcb': 1}
-        )
+        + urlencode({'q': query, **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'gl': lang_info['country']})
         + ('&ceid=%s' % ceid)
         + ('&ceid=%s' % ceid)
     )  # ceid includes a ':' character which must not be urlencoded
     )  # ceid includes a ':' character which must not be urlencoded
     params['url'] = query_url
     params['url'] = query_url
 
 
+    params['cookies']['CONSENT'] = "YES+"
     params['headers'].update(lang_info['headers'])
     params['headers'].update(lang_info['headers'])
     params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
     params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
 
 

+ 2 - 1
searx/engines/google_play_apps.py

@@ -22,11 +22,12 @@ about = {
 }
 }
 
 
 categories = ["files", "apps"]
 categories = ["files", "apps"]
-search_url = "https://play.google.com/store/search?{query}&c=apps&ucbcb=1"
+search_url = "https://play.google.com/store/search?{query}&c=apps"
 
 
 
 
 def request(query, params):
 def request(query, params):
     params["url"] = search_url.format(query=urlencode({"q": query}))
     params["url"] = search_url.format(query=urlencode({"q": query}))
+    params['cookies']['CONSENT'] = "YES+"
 
 
     return params
     return params
 
 

+ 2 - 1
searx/engines/google_scholar.py

@@ -85,12 +85,13 @@ def request(query, params):
         + lang_info['subdomain']
         + lang_info['subdomain']
         + '/scholar'
         + '/scholar'
         + "?"
         + "?"
-        + urlencode({'q': query, **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'start': offset, 'ucbcb': 1})
+        + urlencode({'q': query, **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'start': offset})
     )
     )
 
 
     query_url += time_range_url(params)
     query_url += time_range_url(params)
     params['url'] = query_url
     params['url'] = query_url
 
 
+    params['cookies']['CONSENT'] = "YES+"
     params['headers'].update(lang_info['headers'])
     params['headers'].update(lang_info['headers'])
     params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
     params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
 
 

+ 2 - 1
searx/engines/google_videos.py

@@ -118,7 +118,7 @@ def request(query, params):
         + lang_info['subdomain']
         + lang_info['subdomain']
         + '/search'
         + '/search'
         + "?"
         + "?"
-        + urlencode({'q': query, 'tbm': "vid", **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'ucbcb': 1})
+        + urlencode({'q': query, 'tbm': "vid", **lang_info['params'], 'ie': "utf8", 'oe': "utf8"})
     )
     )
 
 
     if params['time_range'] in time_range_dict:
     if params['time_range'] in time_range_dict:
@@ -127,6 +127,7 @@ def request(query, params):
         query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
         query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
     params['url'] = query_url
     params['url'] = query_url
 
 
+    params['cookies']['CONSENT'] = "YES+"
     params['headers'].update(lang_info['headers'])
     params['headers'].update(lang_info['headers'])
     params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
     params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
     return params
     return params

+ 2 - 1
searx/engines/youtube_noapi.py

@@ -25,7 +25,7 @@ time_range_support = True
 
 
 # search-url
 # search-url
 base_url = 'https://www.youtube.com/results'
 base_url = 'https://www.youtube.com/results'
-search_url = base_url + '?search_query={query}&page={page}&ucbcb=1'
+search_url = base_url + '?search_query={query}&page={page}'
 time_range_url = '&sp=EgII{time_range}%253D%253D'
 time_range_url = '&sp=EgII{time_range}%253D%253D'
 # the key seems to be constant
 # the key seems to be constant
 next_page_url = 'https://www.youtube.com/youtubei/v1/search?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
 next_page_url = 'https://www.youtube.com/youtubei/v1/search?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
@@ -36,6 +36,7 @@ base_youtube_url = 'https://www.youtube.com/watch?v='
 
 
 # do search-request
 # do search-request
 def request(query, params):
 def request(query, params):
+    params['cookies']['CONSENT'] = "YES+"
     if not params['engine_data'].get('next_page_token'):
     if not params['engine_data'].get('next_page_token'):
         params['url'] = search_url.format(query=quote_plus(query), page=params['pageno'])
         params['url'] = search_url.format(query=quote_plus(query), page=params['pageno'])
         if params['time_range'] in time_range_dict:
         if params['time_range'] in time_range_dict: