|  | @@ -10,9 +10,8 @@ import re
 | 
	
		
			
				|  |  |  from collections.abc import Iterator
 | 
	
		
			
				|  |  |  from urllib.parse import urlparse, urlunparse, parse_qsl, urlencode
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  | -import httpx
 | 
	
		
			
				|  |  | -
 | 
	
		
			
				|  |  |  from searx.data.core import get_cache, log
 | 
	
		
			
				|  |  | +from searx.network import get as http_get
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  RuleType = tuple[str, list[str], list[str]]
 | 
	
		
			
				|  |  |  
 | 
	
	
		
			
				|  | @@ -44,7 +43,7 @@ class TrackerPatternsDB:
 | 
	
		
			
				|  |  |              self.cache.properties.set("tracker_patterns loaded", "OK")
 | 
	
		
			
				|  |  |              self.load()
 | 
	
		
			
				|  |  |          # F I X M E:
 | 
	
		
			
				|  |  | -        #     do we need a maintenance .. rember: database is stored
 | 
	
		
			
				|  |  | +        #     do we need a maintenance .. remember: database is stored
 | 
	
		
			
				|  |  |          #     in /tmp and will be rebuild during the reboot anyway
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |      def load(self):
 | 
	
	
		
			
				|  | @@ -71,7 +70,7 @@ class TrackerPatternsDB:
 | 
	
		
			
				|  |  |      def iter_clear_list(self) -> Iterator[RuleType]:
 | 
	
		
			
				|  |  |          resp = None
 | 
	
		
			
				|  |  |          for url in self.CLEAR_LIST_URL:
 | 
	
		
			
				|  |  | -            resp = httpx.get(url, timeout=3)
 | 
	
		
			
				|  |  | +            resp = http_get(url, timeout=3)
 | 
	
		
			
				|  |  |              if resp.status_code == 200:
 | 
	
		
			
				|  |  |                  break
 | 
	
		
			
				|  |  |              log.warning(f"TRACKER_PATTERNS: ClearURL ignore HTTP {resp.status_code} {url}")
 |