diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-30 03:10:22 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-30 03:10:22 +0000 |
commit | bb3682b5a9a4d0e8e45f74de8c21dba3d5e6e0ab (patch) | |
tree | d7890656a89a7d2f3497a5793dd65aa746f7cabd /yt_dlp/networking | |
parent | Adding upstream version 2024.04.09. (diff) | |
download | yt-dlp-upstream/2024.05.26.tar.xz yt-dlp-upstream/2024.05.26.zip |
Adding upstream version 2024.05.26.upstream/2024.05.26
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'yt_dlp/networking')
-rw-r--r-- | yt_dlp/networking/_curlcffi.py | 24 | ||||
-rw-r--r-- | yt_dlp/networking/_requests.py | 18 | ||||
-rw-r--r-- | yt_dlp/networking/common.py | 10 |
3 files changed, 47 insertions, 5 deletions
diff --git a/yt_dlp/networking/_curlcffi.py b/yt_dlp/networking/_curlcffi.py index 39d1f70..f2df399 100644 --- a/yt_dlp/networking/_curlcffi.py +++ b/yt_dlp/networking/_curlcffi.py @@ -21,7 +21,7 @@ from .exceptions import ( TransportError, ) from .impersonate import ImpersonateRequestHandler, ImpersonateTarget -from ..dependencies import curl_cffi +from ..dependencies import curl_cffi, certifi from ..utils import int_or_none if curl_cffi is None: @@ -132,6 +132,16 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin): extensions.pop('cookiejar', None) extensions.pop('timeout', None) + def send(self, request: Request) -> Response: + target = self._get_request_target(request) + try: + response = super().send(request) + except HTTPError as e: + e.response.extensions['impersonate'] = target + raise + response.extensions['impersonate'] = target + return response + def _send(self, request: Request): max_redirects_exceeded = False session: curl_cffi.requests.Session = self._get_instance( @@ -156,6 +166,13 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin): # See: https://curl.se/libcurl/c/CURLOPT_HTTPPROXYTUNNEL.html session.curl.setopt(CurlOpt.HTTPPROXYTUNNEL, 1) + # curl_cffi does not currently set these for proxies + session.curl.setopt(CurlOpt.PROXY_CAINFO, certifi.where()) + + if not self.verify: + session.curl.setopt(CurlOpt.PROXY_SSL_VERIFYPEER, 0) + session.curl.setopt(CurlOpt.PROXY_SSL_VERIFYHOST, 0) + headers = self._get_impersonate_headers(request) if self._client_cert: @@ -203,7 +220,10 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin): max_redirects_exceeded = True curl_response = e.response - elif e.code == CurlECode.PROXY: + elif ( + e.code == CurlECode.PROXY + or (e.code == CurlECode.RECV_ERROR and 'Received HTTP code 407 from proxy after CONNECT' in str(e)) + ): raise ProxyError(cause=e) from e else: raise TransportError(cause=e) from e diff --git a/yt_dlp/networking/_requests.py b/yt_dlp/networking/_requests.py index e3edc77..6397a2c 100644 --- a/yt_dlp/networking/_requests.py +++ b/yt_dlp/networking/_requests.py @@ -28,6 +28,7 @@ import requests.adapters import requests.utils import urllib3.connection import urllib3.exceptions +import urllib3.util from ._helper import ( InstanceStoreMixin, @@ -180,10 +181,25 @@ class RequestsHTTPAdapter(requests.adapters.HTTPAdapter): extra_kwargs['proxy_ssl_context'] = self._proxy_ssl_context return super().proxy_manager_for(proxy, **proxy_kwargs, **self._pm_args, **extra_kwargs) + # Skip `requests` internal verification; we use our own SSLContext + # requests 2.31.0+ def cert_verify(*args, **kwargs): - # lean on SSLContext for cert verification pass + # requests 2.31.0-2.32.1 + def _get_connection(self, request, *_, proxies=None, **__): + return self.get_connection(request.url, proxies) + + # requests 2.32.2+: Reimplementation without `_urllib3_request_context` + def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None): + url = urllib3.util.parse_url(request.url).url + + manager = self.poolmanager + if proxy := select_proxy(url, proxies): + manager = self.proxy_manager_for(proxy) + + return manager.connection_from_url(url) + class RequestsSession(requests.sessions.Session): """ diff --git a/yt_dlp/networking/common.py b/yt_dlp/networking/common.py index 4c66ba6..d473e16 100644 --- a/yt_dlp/networking/common.py +++ b/yt_dlp/networking/common.py @@ -31,6 +31,8 @@ from ..utils import ( ) from ..utils.networking import HTTPHeaderDict, normalize_url +DEFAULT_TIMEOUT = 20 + def register_preference(*handlers: type[RequestHandler]): assert all(issubclass(handler, RequestHandler) for handler in handlers) @@ -235,7 +237,7 @@ class RequestHandler(abc.ABC): self._logger = logger self.headers = headers or {} self.cookiejar = cookiejar if cookiejar is not None else YoutubeDLCookieJar() - self.timeout = float(timeout or 20) + self.timeout = float(timeout or DEFAULT_TIMEOUT) self.proxies = proxies or {} self.source_address = source_address self.verbose = verbose @@ -497,6 +499,7 @@ class Response(io.IOBase): @param headers: response headers. @param status: Response HTTP status code. Default is 200 OK. @param reason: HTTP status reason. Will use built-in reasons based on status code if not provided. + @param extensions: Dictionary of handler-specific response extensions. """ def __init__( @@ -505,7 +508,9 @@ class Response(io.IOBase): url: str, headers: Mapping[str, str], status: int = 200, - reason: str = None): + reason: str = None, + extensions: dict = None + ): self.fp = fp self.headers = Message() @@ -517,6 +522,7 @@ class Response(io.IOBase): self.reason = reason or HTTPStatus(status).phrase except ValueError: self.reason = None + self.extensions = extensions or {} def readable(self): return self.fp.readable() |