network.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # pylint: disable=global-statement
  3. # pylint: disable=missing-module-docstring, missing-class-docstring
  4. import atexit
  5. import asyncio
  6. import ipaddress
  7. from itertools import cycle
  8. from typing import Dict
  9. import httpx
  10. from searx import logger, searx_debug
  11. from .client import new_client, get_loop, AsyncHTTPTransportNoHttp
  12. from .raise_for_httperror import raise_for_httperror
  13. logger = logger.getChild('network')
  14. DEFAULT_NAME = '__DEFAULT__'
  15. NETWORKS: Dict[str, 'Network'] = {}
  16. # requests compatibility when reading proxy settings from settings.yml
  17. PROXY_PATTERN_MAPPING = {
  18. 'http': 'http://',
  19. 'https': 'https://',
  20. 'socks4': 'socks4://',
  21. 'socks5': 'socks5://',
  22. 'socks5h': 'socks5h://',
  23. 'http:': 'http://',
  24. 'https:': 'https://',
  25. 'socks4:': 'socks4://',
  26. 'socks5:': 'socks5://',
  27. 'socks5h:': 'socks5h://',
  28. }
  29. ADDRESS_MAPPING = {'ipv4': '0.0.0.0', 'ipv6': '::'}
  30. class Network:
  31. __slots__ = (
  32. 'enable_http',
  33. 'verify',
  34. 'enable_http2',
  35. 'max_connections',
  36. 'max_keepalive_connections',
  37. 'keepalive_expiry',
  38. 'local_addresses',
  39. 'proxies',
  40. 'using_tor_proxy',
  41. 'max_redirects',
  42. 'retries',
  43. 'retry_on_http_error',
  44. '_local_addresses_cycle',
  45. '_proxies_cycle',
  46. '_clients',
  47. '_logger',
  48. )
  49. _TOR_CHECK_RESULT = {}
  50. def __init__(
  51. # pylint: disable=too-many-arguments
  52. self,
  53. enable_http=True,
  54. verify=True,
  55. enable_http2=False,
  56. max_connections=None,
  57. max_keepalive_connections=None,
  58. keepalive_expiry=None,
  59. proxies=None,
  60. using_tor_proxy=False,
  61. local_addresses=None,
  62. retries=0,
  63. retry_on_http_error=None,
  64. max_redirects=30,
  65. logger_name=None,
  66. ):
  67. self.enable_http = enable_http
  68. self.verify = verify
  69. self.enable_http2 = enable_http2
  70. self.max_connections = max_connections
  71. self.max_keepalive_connections = max_keepalive_connections
  72. self.keepalive_expiry = keepalive_expiry
  73. self.proxies = proxies
  74. self.using_tor_proxy = using_tor_proxy
  75. self.local_addresses = local_addresses
  76. self.retries = retries
  77. self.retry_on_http_error = retry_on_http_error
  78. self.max_redirects = max_redirects
  79. self._local_addresses_cycle = self.get_ipaddress_cycle()
  80. self._proxies_cycle = self.get_proxy_cycles()
  81. self._clients = {}
  82. self._logger = logger.getChild(logger_name) if logger_name else logger
  83. self.check_parameters()
  84. def check_parameters(self):
  85. for address in self.iter_ipaddresses():
  86. if '/' in address:
  87. ipaddress.ip_network(address, False)
  88. else:
  89. ipaddress.ip_address(address)
  90. if self.proxies is not None and not isinstance(self.proxies, (str, dict)):
  91. raise ValueError('proxies type has to be str, dict or None')
  92. def iter_ipaddresses(self):
  93. local_addresses = self.local_addresses
  94. if not local_addresses:
  95. return
  96. if isinstance(local_addresses, str):
  97. local_addresses = [local_addresses]
  98. yield from local_addresses
  99. def get_ipaddress_cycle(self):
  100. while True:
  101. count = 0
  102. for address in self.iter_ipaddresses():
  103. if '/' in address:
  104. for a in ipaddress.ip_network(address, False).hosts():
  105. yield str(a)
  106. count += 1
  107. else:
  108. a = ipaddress.ip_address(address)
  109. yield str(a)
  110. count += 1
  111. if count == 0:
  112. yield None
  113. def iter_proxies(self):
  114. if not self.proxies:
  115. return
  116. # https://www.python-httpx.org/compatibility/#proxy-keys
  117. if isinstance(self.proxies, str):
  118. yield 'all://', [self.proxies]
  119. else:
  120. for pattern, proxy_url in self.proxies.items():
  121. pattern = PROXY_PATTERN_MAPPING.get(pattern, pattern)
  122. if isinstance(proxy_url, str):
  123. proxy_url = [proxy_url]
  124. yield pattern, proxy_url
  125. def get_proxy_cycles(self):
  126. proxy_settings = {}
  127. for pattern, proxy_urls in self.iter_proxies():
  128. proxy_settings[pattern] = cycle(proxy_urls)
  129. while True:
  130. # pylint: disable=stop-iteration-return
  131. yield tuple((pattern, next(proxy_url_cycle)) for pattern, proxy_url_cycle in proxy_settings.items())
  132. async def log_response(self, response: httpx.Response):
  133. request = response.request
  134. status = f"{response.status_code} {response.reason_phrase}"
  135. response_line = f"{response.http_version} {status}"
  136. content_type = response.headers.get("Content-Type")
  137. content_type = f' ({content_type})' if content_type else ''
  138. self._logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"{content_type}')
  139. @staticmethod
  140. async def check_tor_proxy(client: httpx.AsyncClient, proxies) -> bool:
  141. if proxies in Network._TOR_CHECK_RESULT:
  142. return Network._TOR_CHECK_RESULT[proxies]
  143. result = True
  144. # ignore client._transport because it is not used with all://
  145. for transport in client._mounts.values(): # pylint: disable=protected-access
  146. if isinstance(transport, AsyncHTTPTransportNoHttp):
  147. continue
  148. if getattr(transport, "_pool") and getattr(
  149. transport._pool, "_rdns", False # pylint: disable=protected-access
  150. ):
  151. continue
  152. return False
  153. response = await client.get("https://check.torproject.org/api/ip", timeout=60)
  154. if not response.json()["IsTor"]:
  155. result = False
  156. Network._TOR_CHECK_RESULT[proxies] = result
  157. return result
  158. async def get_client(self, verify=None, max_redirects=None):
  159. verify = self.verify if verify is None else verify
  160. max_redirects = self.max_redirects if max_redirects is None else max_redirects
  161. local_address = next(self._local_addresses_cycle)
  162. proxies = next(self._proxies_cycle) # is a tuple so it can be part of the key
  163. key = (verify, max_redirects, local_address, proxies)
  164. hook_log_response = self.log_response if searx_debug else None
  165. if key not in self._clients or self._clients[key].is_closed:
  166. client = new_client(
  167. self.enable_http,
  168. verify,
  169. self.enable_http2,
  170. self.max_connections,
  171. self.max_keepalive_connections,
  172. self.keepalive_expiry,
  173. dict(proxies),
  174. local_address,
  175. 0,
  176. max_redirects,
  177. hook_log_response,
  178. )
  179. if self.using_tor_proxy and not await self.check_tor_proxy(client, proxies):
  180. await client.aclose()
  181. raise httpx.ProxyError('Network configuration problem: not using Tor')
  182. self._clients[key] = client
  183. return self._clients[key]
  184. async def aclose(self):
  185. async def close_client(client):
  186. try:
  187. await client.aclose()
  188. except httpx.HTTPError:
  189. pass
  190. await asyncio.gather(*[close_client(client) for client in self._clients.values()], return_exceptions=False)
  191. @staticmethod
  192. def extract_kwargs_clients(kwargs):
  193. kwargs_clients = {}
  194. if 'verify' in kwargs:
  195. kwargs_clients['verify'] = kwargs.pop('verify')
  196. if 'max_redirects' in kwargs:
  197. kwargs_clients['max_redirects'] = kwargs.pop('max_redirects')
  198. if 'allow_redirects' in kwargs:
  199. # see https://github.com/encode/httpx/pull/1808
  200. kwargs['follow_redirects'] = kwargs.pop('allow_redirects')
  201. return kwargs_clients
  202. @staticmethod
  203. def extract_do_raise_for_httperror(kwargs):
  204. do_raise_for_httperror = True
  205. if 'raise_for_httperror' in kwargs:
  206. do_raise_for_httperror = kwargs['raise_for_httperror']
  207. del kwargs['raise_for_httperror']
  208. return do_raise_for_httperror
  209. def patch_response(self, response, do_raise_for_httperror):
  210. if isinstance(response, httpx.Response):
  211. # requests compatibility (response is not streamed)
  212. # see also https://www.python-httpx.org/compatibility/#checking-for-4xx5xx-responses
  213. response.ok = not response.is_error
  214. # raise an exception
  215. if do_raise_for_httperror:
  216. try:
  217. raise_for_httperror(response)
  218. except:
  219. self._logger.warning(f"HTTP Request failed: {response.request.method} {response.request.url}")
  220. raise
  221. return response
  222. def is_valid_response(self, response):
  223. # pylint: disable=too-many-boolean-expressions
  224. if (
  225. (self.retry_on_http_error is True and 400 <= response.status_code <= 599)
  226. or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error)
  227. or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error)
  228. ):
  229. return False
  230. return True
  231. async def call_client(self, stream, method, url, **kwargs):
  232. retries = self.retries
  233. was_disconnected = False
  234. do_raise_for_httperror = Network.extract_do_raise_for_httperror(kwargs)
  235. kwargs_clients = Network.extract_kwargs_clients(kwargs)
  236. while retries >= 0: # pragma: no cover
  237. client = await self.get_client(**kwargs_clients)
  238. try:
  239. if stream:
  240. response = client.stream(method, url, **kwargs)
  241. else:
  242. response = await client.request(method, url, **kwargs)
  243. if self.is_valid_response(response) or retries <= 0:
  244. return self.patch_response(response, do_raise_for_httperror)
  245. except httpx.RemoteProtocolError as e:
  246. if not was_disconnected:
  247. # the server has closed the connection:
  248. # try again without decreasing the retries variable & with a new HTTP client
  249. was_disconnected = True
  250. await client.aclose()
  251. self._logger.warning('httpx.RemoteProtocolError: the server has disconnected, retrying')
  252. continue
  253. if retries <= 0:
  254. raise e
  255. except (httpx.RequestError, httpx.HTTPStatusError) as e:
  256. if retries <= 0:
  257. raise e
  258. retries -= 1
  259. async def request(self, method, url, **kwargs):
  260. return await self.call_client(False, method, url, **kwargs)
  261. async def stream(self, method, url, **kwargs):
  262. return await self.call_client(True, method, url, **kwargs)
  263. @classmethod
  264. async def aclose_all(cls):
  265. await asyncio.gather(*[network.aclose() for network in NETWORKS.values()], return_exceptions=False)
  266. def get_network(name=None):
  267. return NETWORKS.get(name or DEFAULT_NAME)
  268. def check_network_configuration():
  269. async def check():
  270. exception_count = 0
  271. for network in NETWORKS.values():
  272. if network.using_tor_proxy:
  273. try:
  274. await network.get_client()
  275. except Exception: # pylint: disable=broad-except
  276. network._logger.exception('Error') # pylint: disable=protected-access
  277. exception_count += 1
  278. return exception_count
  279. future = asyncio.run_coroutine_threadsafe(check(), get_loop())
  280. exception_count = future.result()
  281. if exception_count > 0:
  282. raise RuntimeError("Invalid network configuration")
  283. def initialize(settings_engines=None, settings_outgoing=None):
  284. # pylint: disable=import-outside-toplevel)
  285. from searx.engines import engines
  286. from searx import settings
  287. # pylint: enable=import-outside-toplevel)
  288. settings_engines = settings_engines or settings['engines']
  289. settings_outgoing = settings_outgoing or settings['outgoing']
  290. # default parameters for AsyncHTTPTransport
  291. # see https://github.com/encode/httpx/blob/e05a5372eb6172287458b37447c30f650047e1b8/httpx/_transports/default.py#L108-L121 # pylint: disable=line-too-long
  292. default_params = {
  293. 'enable_http': False,
  294. 'verify': settings_outgoing['verify'],
  295. 'enable_http2': settings_outgoing['enable_http2'],
  296. 'max_connections': settings_outgoing['pool_connections'],
  297. 'max_keepalive_connections': settings_outgoing['pool_maxsize'],
  298. 'keepalive_expiry': settings_outgoing['keepalive_expiry'],
  299. 'local_addresses': settings_outgoing['source_ips'],
  300. 'using_tor_proxy': settings_outgoing['using_tor_proxy'],
  301. 'proxies': settings_outgoing['proxies'],
  302. 'max_redirects': settings_outgoing['max_redirects'],
  303. 'retries': settings_outgoing['retries'],
  304. 'retry_on_http_error': None,
  305. }
  306. def new_network(params, logger_name=None):
  307. nonlocal default_params
  308. result = {}
  309. result.update(default_params)
  310. result.update(params)
  311. if logger_name:
  312. result['logger_name'] = logger_name
  313. return Network(**result)
  314. def iter_networks():
  315. nonlocal settings_engines
  316. for engine_spec in settings_engines:
  317. engine_name = engine_spec['name']
  318. engine = engines.get(engine_name)
  319. if engine is None:
  320. continue
  321. network = getattr(engine, 'network', None)
  322. yield engine_name, engine, network
  323. if NETWORKS:
  324. done()
  325. NETWORKS.clear()
  326. NETWORKS[DEFAULT_NAME] = new_network({}, logger_name='default')
  327. NETWORKS['ipv4'] = new_network({'local_addresses': '0.0.0.0'}, logger_name='ipv4')
  328. NETWORKS['ipv6'] = new_network({'local_addresses': '::'}, logger_name='ipv6')
  329. # define networks from outgoing.networks
  330. for network_name, network in settings_outgoing['networks'].items():
  331. NETWORKS[network_name] = new_network(network, logger_name=network_name)
  332. # define networks from engines.[i].network (except references)
  333. for engine_name, engine, network in iter_networks():
  334. if network is None:
  335. network = {}
  336. for attribute_name, attribute_value in default_params.items():
  337. if hasattr(engine, attribute_name):
  338. network[attribute_name] = getattr(engine, attribute_name)
  339. else:
  340. network[attribute_name] = attribute_value
  341. NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
  342. elif isinstance(network, dict):
  343. NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
  344. # define networks from engines.[i].network (references)
  345. for engine_name, engine, network in iter_networks():
  346. if isinstance(network, str):
  347. NETWORKS[engine_name] = NETWORKS[network]
  348. # the /image_proxy endpoint has a dedicated network.
  349. # same parameters than the default network, but HTTP/2 is disabled.
  350. # It decreases the CPU load average, and the total time is more or less the same
  351. if 'image_proxy' not in NETWORKS:
  352. image_proxy_params = default_params.copy()
  353. image_proxy_params['enable_http2'] = False
  354. NETWORKS['image_proxy'] = new_network(image_proxy_params, logger_name='image_proxy')
  355. @atexit.register
  356. def done():
  357. """Close all HTTP client
  358. Avoid a warning at exit
  359. See https://github.com/encode/httpx/pull/2026
  360. Note: since Network.aclose has to be async, it is not possible to call this method on Network.__del__
  361. So Network.aclose is called here using atexit.register
  362. """
  363. try:
  364. loop = get_loop()
  365. if loop:
  366. future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), loop)
  367. # wait 3 seconds to close the HTTP clients
  368. future.result(3)
  369. finally:
  370. NETWORKS.clear()
  371. NETWORKS[DEFAULT_NAME] = Network()