client.py 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. # pylint: disable=missing-module-docstring, global-statement
  3. import asyncio
  4. import logging
  5. import random
  6. from ssl import SSLContext
  7. import threading
  8. from typing import Any, Dict
  9. import httpx
  10. from httpx_socks import AsyncProxyTransport
  11. from python_socks import parse_proxy_url, ProxyConnectionError, ProxyTimeoutError, ProxyError
  12. import uvloop
  13. from searx import logger
  14. uvloop.install()
  15. logger = logger.getChild('searx.network.client')
  16. LOOP = None
  17. SSLCONTEXTS: Dict[Any, SSLContext] = {}
  18. def shuffle_ciphers(ssl_context):
  19. """Shuffle httpx's default ciphers of a SSL context randomly.
  20. From `What Is TLS Fingerprint and How to Bypass It`_
  21. > When implementing TLS fingerprinting, servers can't operate based on a
  22. > locked-in whitelist database of fingerprints. New fingerprints appear
  23. > when web clients or TLS libraries release new versions. So, they have to
  24. > live off a blocklist database instead.
  25. > ...
  26. > It's safe to leave the first three as is but shuffle the remaining ciphers
  27. > and you can bypass the TLS fingerprint check.
  28. .. _What Is TLS Fingerprint and How to Bypass It:
  29. https://www.zenrows.com/blog/what-is-tls-fingerprint#how-to-bypass-tls-fingerprinting
  30. """
  31. c_list = httpx._config.DEFAULT_CIPHERS.split(':') # pylint: disable=protected-access
  32. sc_list, c_list = c_list[:3], c_list[3:]
  33. random.shuffle(c_list)
  34. ssl_context.set_ciphers(":".join(sc_list + c_list))
  35. def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
  36. key = (proxy_url, cert, verify, trust_env, http2)
  37. if key not in SSLCONTEXTS:
  38. SSLCONTEXTS[key] = httpx.create_ssl_context(cert, verify, trust_env, http2)
  39. shuffle_ciphers(SSLCONTEXTS[key])
  40. return SSLCONTEXTS[key]
  41. class AsyncHTTPTransportNoHttp(httpx.AsyncHTTPTransport):
  42. """Block HTTP request
  43. The constructor is blank because httpx.AsyncHTTPTransport.__init__ creates an SSLContext unconditionally:
  44. https://github.com/encode/httpx/blob/0f61aa58d66680c239ce43c8cdd453e7dc532bfc/httpx/_transports/default.py#L271
  45. Each SSLContext consumes more than 500kb of memory, since there is about one network per engine.
  46. In consequence, this class overrides all public methods
  47. For reference: https://github.com/encode/httpx/issues/2298
  48. """
  49. def __init__(self, *args, **kwargs):
  50. # pylint: disable=super-init-not-called
  51. # this on purpose if the base class is not called
  52. pass
  53. async def handle_async_request(self, request):
  54. raise httpx.UnsupportedProtocol('HTTP protocol is disabled')
  55. async def aclose(self) -> None:
  56. pass
  57. async def __aenter__(self):
  58. return self
  59. async def __aexit__(
  60. self,
  61. exc_type=None,
  62. exc_value=None,
  63. traceback=None,
  64. ) -> None:
  65. pass
  66. class AsyncProxyTransportFixed(AsyncProxyTransport):
  67. """Fix httpx_socks.AsyncProxyTransport
  68. Map python_socks exceptions to httpx.ProxyError exceptions
  69. """
  70. async def handle_async_request(self, request):
  71. try:
  72. return await super().handle_async_request(request)
  73. except ProxyConnectionError as e:
  74. raise httpx.ProxyError("ProxyConnectionError: " + e.strerror, request=request) from e
  75. except ProxyTimeoutError as e:
  76. raise httpx.ProxyError("ProxyTimeoutError: " + e.args[0], request=request) from e
  77. except ProxyError as e:
  78. raise httpx.ProxyError("ProxyError: " + e.args[0], request=request) from e
  79. def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
  80. # support socks5h (requests compatibility):
  81. # https://requests.readthedocs.io/en/master/user/advanced/#socks
  82. # socks5:// hostname is resolved on client side
  83. # socks5h:// hostname is resolved on proxy side
  84. rdns = False
  85. socks5h = 'socks5h://'
  86. if proxy_url.startswith(socks5h):
  87. proxy_url = 'socks5://' + proxy_url[len(socks5h) :]
  88. rdns = True
  89. proxy_type, proxy_host, proxy_port, proxy_username, proxy_password = parse_proxy_url(proxy_url)
  90. verify = get_sslcontexts(proxy_url, None, verify, True, http2) if verify is True else verify
  91. return AsyncProxyTransportFixed(
  92. proxy_type=proxy_type,
  93. proxy_host=proxy_host,
  94. proxy_port=proxy_port,
  95. username=proxy_username,
  96. password=proxy_password,
  97. rdns=rdns,
  98. loop=get_loop(),
  99. verify=verify,
  100. http2=http2,
  101. local_address=local_address,
  102. limits=limit,
  103. retries=retries,
  104. )
  105. def get_transport(verify, http2, local_address, proxy_url, limit, retries):
  106. verify = get_sslcontexts(None, None, verify, True, http2) if verify is True else verify
  107. return httpx.AsyncHTTPTransport(
  108. # pylint: disable=protected-access
  109. verify=verify,
  110. http2=http2,
  111. limits=limit,
  112. proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
  113. local_address=local_address,
  114. retries=retries,
  115. )
  116. def new_client(
  117. # pylint: disable=too-many-arguments
  118. enable_http,
  119. verify,
  120. enable_http2,
  121. max_connections,
  122. max_keepalive_connections,
  123. keepalive_expiry,
  124. proxies,
  125. local_address,
  126. retries,
  127. max_redirects,
  128. hook_log_response,
  129. ):
  130. limit = httpx.Limits(
  131. max_connections=max_connections,
  132. max_keepalive_connections=max_keepalive_connections,
  133. keepalive_expiry=keepalive_expiry,
  134. )
  135. # See https://www.python-httpx.org/advanced/#routing
  136. mounts = {}
  137. for pattern, proxy_url in proxies.items():
  138. if not enable_http and pattern.startswith('http://'):
  139. continue
  140. if proxy_url.startswith('socks4://') or proxy_url.startswith('socks5://') or proxy_url.startswith('socks5h://'):
  141. mounts[pattern] = get_transport_for_socks_proxy(
  142. verify, enable_http2, local_address, proxy_url, limit, retries
  143. )
  144. else:
  145. mounts[pattern] = get_transport(verify, enable_http2, local_address, proxy_url, limit, retries)
  146. if not enable_http:
  147. mounts['http://'] = AsyncHTTPTransportNoHttp()
  148. transport = get_transport(verify, enable_http2, local_address, None, limit, retries)
  149. event_hooks = None
  150. if hook_log_response:
  151. event_hooks = {'response': [hook_log_response]}
  152. return httpx.AsyncClient(
  153. transport=transport,
  154. mounts=mounts,
  155. max_redirects=max_redirects,
  156. event_hooks=event_hooks,
  157. )
  158. def get_loop():
  159. return LOOP
  160. def init():
  161. # log
  162. for logger_name in (
  163. 'httpx',
  164. 'httpcore.proxy',
  165. 'httpcore.connection',
  166. 'httpcore.http11',
  167. 'httpcore.http2',
  168. 'hpack.hpack',
  169. 'hpack.table',
  170. ):
  171. logging.getLogger(logger_name).setLevel(logging.WARNING)
  172. # loop
  173. def loop_thread():
  174. global LOOP
  175. LOOP = asyncio.new_event_loop()
  176. LOOP.run_forever()
  177. thread = threading.Thread(
  178. target=loop_thread,
  179. name='asyncio_loop',
  180. daemon=True,
  181. )
  182. thread.start()
  183. init()