summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 07:54:10 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 07:54:10 +0000
commit51b156e61c2761f69bb4ca39e115077705083406 (patch)
tree25bcc4265216ca429a2275c577fc631098a181c0 /test
parentReleasing progress-linux version 2024.03.10-1~progress7.99u1. (diff)
downloadyt-dlp-51b156e61c2761f69bb4ca39e115077705083406.tar.xz
yt-dlp-51b156e61c2761f69bb4ca39e115077705083406.zip
Merging upstream version 2024.04.09.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--test/test_YoutubeDL.py4
-rw-r--r--test/test_cookies.py4
-rw-r--r--test/test_networking.py632
-rw-r--r--test/test_socks.py33
-rw-r--r--test/test_traversal.py444
-rw-r--r--test/test_utils.py383
-rw-r--r--test/test_websockets.py53
7 files changed, 1004 insertions, 549 deletions
diff --git a/test/test_YoutubeDL.py b/test/test_YoutubeDL.py
index 6be47af..5242cf8 100644
--- a/test/test_YoutubeDL.py
+++ b/test/test_YoutubeDL.py
@@ -183,7 +183,7 @@ class TestFormatSelection(unittest.TestCase):
]
info_dict = _make_result(formats)
- ydl = YDL({'format': 'best'})
+ ydl = YDL({'format': 'best', 'format_sort': ['abr', 'ext']})
ydl.sort_formats(info_dict)
ydl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0]
@@ -195,7 +195,7 @@ class TestFormatSelection(unittest.TestCase):
downloaded = ydl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'mp3-64')
- ydl = YDL({'prefer_free_formats': True})
+ ydl = YDL({'prefer_free_formats': True, 'format_sort': ['abr', 'ext']})
ydl.sort_formats(info_dict)
ydl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0]
diff --git a/test/test_cookies.py b/test/test_cookies.py
index 5282ef6..bd61f30 100644
--- a/test/test_cookies.py
+++ b/test/test_cookies.py
@@ -1,5 +1,5 @@
+import datetime as dt
import unittest
-from datetime import datetime, timezone
from yt_dlp import cookies
from yt_dlp.cookies import (
@@ -138,7 +138,7 @@ class TestCookies(unittest.TestCase):
self.assertEqual(cookie.name, 'foo')
self.assertEqual(cookie.value, 'test%20%3Bcookie')
self.assertFalse(cookie.secure)
- expected_expiration = datetime(2021, 6, 18, 21, 39, 19, tzinfo=timezone.utc)
+ expected_expiration = dt.datetime(2021, 6, 18, 21, 39, 19, tzinfo=dt.timezone.utc)
self.assertEqual(cookie.expires, int(expected_expiration.timestamp()))
def test_pbkdf2_sha1(self):
diff --git a/test/test_networking.py b/test/test_networking.py
index 628f1f1..b50f70d 100644
--- a/test/test_networking.py
+++ b/test/test_networking.py
@@ -27,9 +27,10 @@ import zlib
from email.message import Message
from http.cookiejar import CookieJar
+from test.conftest import validate_and_send
from test.helper import FakeYDL, http_server_port, verify_address_availability
from yt_dlp.cookies import YoutubeDLCookieJar
-from yt_dlp.dependencies import brotli, requests, urllib3
+from yt_dlp.dependencies import brotli, curl_cffi, requests, urllib3
from yt_dlp.networking import (
HEADRequest,
PUTRequest,
@@ -50,10 +51,13 @@ from yt_dlp.networking.exceptions import (
TransportError,
UnsupportedRequest,
)
+from yt_dlp.networking.impersonate import (
+ ImpersonateRequestHandler,
+ ImpersonateTarget,
+)
+from yt_dlp.utils import YoutubeDLError
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
-from yt_dlp.utils.networking import HTTPHeaderDict
-
-from test.conftest import validate_and_send
+from yt_dlp.utils.networking import HTTPHeaderDict, std_headers
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
@@ -75,6 +79,7 @@ def _build_proxy_handler(name):
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
+ default_request_version = 'HTTP/1.1'
def log_message(self, format, *args):
pass
@@ -112,6 +117,8 @@ class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
def _read_data(self):
if 'Content-Length' in self.headers:
return self.rfile.read(int(self.headers['Content-Length']))
+ else:
+ return b''
def do_POST(self):
data = self._read_data() + str(self.headers).encode()
@@ -195,7 +202,8 @@ class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
self._headers()
elif self.path.startswith('/308-to-headers'):
self.send_response(308)
- self.send_header('Location', '/headers')
+ # redirect to "localhost" for testing cookie redirection handling
+ self.send_header('Location', f'http://localhost:{self.connection.getsockname()[1]}/headers')
self.send_header('Content-Length', '0')
self.end_headers()
elif self.path == '/trailing_garbage':
@@ -310,7 +318,7 @@ class TestRequestHandlerBase:
class TestHTTPRequestHandler(TestRequestHandlerBase):
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_verify_cert(self, handler):
with handler() as rh:
with pytest.raises(CertificateVerifyError):
@@ -321,7 +329,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert r.status == 200
r.close()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_ssl_error(self, handler):
# HTTPS server with too old TLS version
# XXX: is there a better way to test this than to create a new server?
@@ -335,11 +343,11 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
https_server_thread.start()
with handler(verify=False) as rh:
- with pytest.raises(SSLError, match=r'ssl(?:v3|/tls) alert handshake failure') as exc_info:
+ with pytest.raises(SSLError, match=r'(?i)ssl(?:v3|/tls).alert.handshake.failure') as exc_info:
validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
assert not issubclass(exc_info.type, CertificateVerifyError)
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_percent_encode(self, handler):
with handler() as rh:
# Unicode characters should be encoded with uppercase percent-encoding
@@ -351,7 +359,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert res.status == 200
res.close()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
@pytest.mark.parametrize('path', [
'/a/b/./../../headers',
'/redirect_dotsegments',
@@ -367,6 +375,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert res.url == f'http://127.0.0.1:{self.http_port}/headers'
res.close()
+ # Not supported by CurlCFFI (non-standard)
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
def test_unicode_path_redirection(self, handler):
with handler() as rh:
@@ -374,7 +383,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert r.url == f'http://127.0.0.1:{self.http_port}/%E4%B8%AD%E6%96%87.html'
r.close()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_raise_http_error(self, handler):
with handler() as rh:
for bad_status in (400, 500, 599, 302):
@@ -384,7 +393,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
# Should not raise an error
validate_and_send(rh, Request('http://127.0.0.1:%d/gen_200' % self.http_port)).close()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_response_url(self, handler):
with handler() as rh:
# Response url should be that of the last url in redirect chain
@@ -395,62 +404,50 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert res2.url == f'http://127.0.0.1:{self.http_port}/gen_200'
res2.close()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
- def test_redirect(self, handler):
+ # Covers some basic cases we expect some level of consistency between request handlers for
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
+ @pytest.mark.parametrize('redirect_status,method,expected', [
+ # A 303 must either use GET or HEAD for subsequent request
+ (303, 'POST', ('', 'GET', False)),
+ (303, 'HEAD', ('', 'HEAD', False)),
+
+ # 301 and 302 turn POST only into a GET
+ (301, 'POST', ('', 'GET', False)),
+ (301, 'HEAD', ('', 'HEAD', False)),
+ (302, 'POST', ('', 'GET', False)),
+ (302, 'HEAD', ('', 'HEAD', False)),
+
+ # 307 and 308 should not change method
+ (307, 'POST', ('testdata', 'POST', True)),
+ (308, 'POST', ('testdata', 'POST', True)),
+ (307, 'HEAD', ('', 'HEAD', False)),
+ (308, 'HEAD', ('', 'HEAD', False)),
+ ])
+ def test_redirect(self, handler, redirect_status, method, expected):
with handler() as rh:
- def do_req(redirect_status, method, assert_no_content=False):
- data = b'testdata' if method in ('POST', 'PUT') else None
- res = validate_and_send(
- rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_{redirect_status}', method=method, data=data))
-
- headers = b''
- data_sent = b''
- if data is not None:
- data_sent += res.read(len(data))
- if data_sent != data:
- headers += data_sent
- data_sent = b''
-
- headers += res.read()
-
- if assert_no_content or data is None:
- assert b'Content-Type' not in headers
- assert b'Content-Length' not in headers
- else:
- assert b'Content-Type' in headers
- assert b'Content-Length' in headers
-
- return data_sent.decode(), res.headers.get('method', '')
-
- # A 303 must either use GET or HEAD for subsequent request
- assert do_req(303, 'POST', True) == ('', 'GET')
- assert do_req(303, 'HEAD') == ('', 'HEAD')
-
- assert do_req(303, 'PUT', True) == ('', 'GET')
-
- # 301 and 302 turn POST only into a GET
- assert do_req(301, 'POST', True) == ('', 'GET')
- assert do_req(301, 'HEAD') == ('', 'HEAD')
- assert do_req(302, 'POST', True) == ('', 'GET')
- assert do_req(302, 'HEAD') == ('', 'HEAD')
-
- assert do_req(301, 'PUT') == ('testdata', 'PUT')
- assert do_req(302, 'PUT') == ('testdata', 'PUT')
+ data = b'testdata' if method == 'POST' else None
+ headers = {}
+ if data is not None:
+ headers['Content-Type'] = 'application/test'
+ res = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_{redirect_status}', method=method, data=data,
+ headers=headers))
- # 307 and 308 should not change method
- for m in ('POST', 'PUT'):
- assert do_req(307, m) == ('testdata', m)
- assert do_req(308, m) == ('testdata', m)
+ headers = b''
+ data_recv = b''
+ if data is not None:
+ data_recv += res.read(len(data))
+ if data_recv != data:
+ headers += data_recv
+ data_recv = b''
- assert do_req(307, 'HEAD') == ('', 'HEAD')
- assert do_req(308, 'HEAD') == ('', 'HEAD')
+ headers += res.read()
- # These should not redirect and instead raise an HTTPError
- for code in (300, 304, 305, 306):
- with pytest.raises(HTTPError):
- do_req(code, 'GET')
+ assert expected[0] == data_recv.decode()
+ assert expected[1] == res.headers.get('method')
+ assert expected[2] == ('content-length' in headers.decode().lower())
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_request_cookie_header(self, handler):
# We should accept a Cookie header being passed as in normal headers and handle it appropriately.
with handler() as rh:
@@ -459,16 +456,17 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
rh, Request(
f'http://127.0.0.1:{self.http_port}/headers',
headers={'Cookie': 'test=test'})).read().decode()
- assert 'Cookie: test=test' in res
+ assert 'cookie: test=test' in res.lower()
# Specified Cookie header should be removed on any redirect
res = validate_and_send(
rh, Request(
f'http://127.0.0.1:{self.http_port}/308-to-headers',
- headers={'Cookie': 'test=test'})).read().decode()
- assert 'Cookie: test=test' not in res
+ headers={'Cookie': 'test=test2'})).read().decode()
+ assert 'cookie: test=test2' not in res.lower()
# Specified Cookie header should override global cookiejar for that request
+ # Whether cookies from the cookiejar is applied on the redirect is considered undefined for now
cookiejar = YoutubeDLCookieJar()
cookiejar.set_cookie(http.cookiejar.Cookie(
version=0, name='test', value='ytdlp', port=None, port_specified=False,
@@ -478,23 +476,23 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
with handler(cookiejar=cookiejar) as rh:
data = validate_and_send(
- rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'cookie': 'test=test'})).read()
- assert b'Cookie: test=ytdlp' not in data
- assert b'Cookie: test=test' in data
+ rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'cookie': 'test=test3'})).read()
+ assert b'cookie: test=ytdlp' not in data.lower()
+ assert b'cookie: test=test3' in data.lower()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_redirect_loop(self, handler):
with handler() as rh:
with pytest.raises(HTTPError, match='redirect loop'):
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_loop'))
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_incompleteread(self, handler):
with handler(timeout=2) as rh:
- with pytest.raises(IncompleteRead):
+ with pytest.raises(IncompleteRead, match='13 bytes read, 234221 more expected'):
validate_and_send(rh, Request('http://127.0.0.1:%d/incompleteread' % self.http_port)).read()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_cookies(self, handler):
cookiejar = YoutubeDLCookieJar()
cookiejar.set_cookie(http.cookiejar.Cookie(
@@ -503,47 +501,66 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
with handler(cookiejar=cookiejar) as rh:
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
- assert b'Cookie: test=ytdlp' in data
+ assert b'cookie: test=ytdlp' in data.lower()
# Per request
with handler() as rh:
data = validate_and_send(
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={'cookiejar': cookiejar})).read()
- assert b'Cookie: test=ytdlp' in data
+ assert b'cookie: test=ytdlp' in data.lower()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_headers(self, handler):
with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
# Global Headers
- data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
- assert b'Test1: test' in data
+ data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read().lower()
+ assert b'test1: test' in data
# Per request headers, merged with global
data = validate_and_send(rh, Request(
- f'http://127.0.0.1:{self.http_port}/headers', headers={'test2': 'changed', 'test3': 'test3'})).read()
- assert b'Test1: test' in data
- assert b'Test2: changed' in data
- assert b'Test2: test2' not in data
- assert b'Test3: test3' in data
-
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
- def test_timeout(self, handler):
+ f'http://127.0.0.1:{self.http_port}/headers', headers={'test2': 'changed', 'test3': 'test3'})).read().lower()
+ assert b'test1: test' in data
+ assert b'test2: changed' in data
+ assert b'test2: test2' not in data
+ assert b'test3: test3' in data
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
+ def test_read_timeout(self, handler):
with handler() as rh:
# Default timeout is 20 seconds, so this should go through
validate_and_send(
- rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_3'))
+ rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1'))
- with handler(timeout=0.5) as rh:
+ with handler(timeout=0.1) as rh:
with pytest.raises(TransportError):
validate_and_send(
- rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1'))
+ rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_5'))
# Per request timeout, should override handler timeout
validate_and_send(
rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1', extensions={'timeout': 4}))
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
+ def test_connect_timeout(self, handler):
+ # nothing should be listening on this port
+ connect_timeout_url = 'http://10.255.255.255'
+ with handler(timeout=0.01) as rh:
+ now = time.time()
+ with pytest.raises(TransportError):
+ validate_and_send(
+ rh, Request(connect_timeout_url))
+ assert 0.01 <= time.time() - now < 20
+
+ with handler() as rh:
+ with pytest.raises(TransportError):
+ # Per request timeout, should override handler timeout
+ now = time.time()
+ validate_and_send(
+ rh, Request(connect_timeout_url, extensions={'timeout': 0.01}))
+ assert 0.01 <= time.time() - now < 20
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_source_address(self, handler):
source_address = f'127.0.0.{random.randint(5, 255)}'
# on some systems these loopback addresses we need for testing may not be available
@@ -554,6 +571,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
rh, Request(f'http://127.0.0.1:{self.http_port}/source_address')).read().decode()
assert source_address == data
+ # Not supported by CurlCFFI
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
def test_gzip_trailing_garbage(self, handler):
with handler() as rh:
@@ -571,7 +589,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert res.headers.get('Content-Encoding') == 'br'
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_deflate(self, handler):
with handler() as rh:
res = validate_and_send(
@@ -581,7 +599,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert res.headers.get('Content-Encoding') == 'deflate'
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_gzip(self, handler):
with handler() as rh:
res = validate_and_send(
@@ -591,7 +609,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert res.headers.get('Content-Encoding') == 'gzip'
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_multiple_encodings(self, handler):
with handler() as rh:
for pair in ('gzip,deflate', 'deflate, gzip', 'gzip, gzip', 'deflate, deflate'):
@@ -602,17 +620,18 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert res.headers.get('Content-Encoding') == pair
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
+ # Not supported by curl_cffi
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
def test_unsupported_encoding(self, handler):
with handler() as rh:
res = validate_and_send(
rh, Request(
f'http://127.0.0.1:{self.http_port}/content-encoding',
- headers={'ytdl-encoding': 'unsupported'}))
+ headers={'ytdl-encoding': 'unsupported', 'Accept-Encoding': '*'}))
assert res.headers.get('Content-Encoding') == 'unsupported'
assert res.read() == b'raw'
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_read(self, handler):
with handler() as rh:
res = validate_and_send(
@@ -620,9 +639,12 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
assert res.readable()
assert res.read(1) == b'H'
assert res.read(3) == b'ost'
+ assert res.read().decode().endswith('\n\n')
+ assert res.read() == b''
class TestHTTPProxy(TestRequestHandlerBase):
+ # Note: this only tests http urls over non-CONNECT proxy
@classmethod
def setup_class(cls):
super().setup_class()
@@ -642,7 +664,7 @@ class TestHTTPProxy(TestRequestHandlerBase):
cls.geo_proxy_thread.daemon = True
cls.geo_proxy_thread.start()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_http_proxy(self, handler):
http_proxy = f'http://127.0.0.1:{self.proxy_port}'
geo_proxy = f'http://127.0.0.1:{self.geo_port}'
@@ -668,7 +690,7 @@ class TestHTTPProxy(TestRequestHandlerBase):
assert res != f'normal: {real_url}'
assert 'Accept' in res
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_noproxy(self, handler):
with handler(proxies={'proxy': f'http://127.0.0.1:{self.proxy_port}'}) as rh:
# NO_PROXY
@@ -678,7 +700,7 @@ class TestHTTPProxy(TestRequestHandlerBase):
'utf-8')
assert 'Accept' in nop_response
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_allproxy(self, handler):
url = 'http://foo.com/bar'
with handler() as rh:
@@ -686,7 +708,7 @@ class TestHTTPProxy(TestRequestHandlerBase):
'utf-8')
assert response == f'normal: {url}'
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_http_proxy_with_idn(self, handler):
with handler(proxies={
'http': f'http://127.0.0.1:{self.proxy_port}',
@@ -698,7 +720,6 @@ class TestHTTPProxy(TestRequestHandlerBase):
class TestClientCertificate:
-
@classmethod
def setup_class(cls):
certfn = os.path.join(TEST_DIR, 'testcert.pem')
@@ -724,27 +745,27 @@ class TestClientCertificate:
) as rh:
validate_and_send(rh, Request(f'https://127.0.0.1:{self.port}/video.html')).read().decode()
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_certificate_combined_nopass(self, handler):
self._run_test(handler, client_cert={
'client_certificate': os.path.join(self.certdir, 'clientwithkey.crt'),
})
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_certificate_nocombined_nopass(self, handler):
self._run_test(handler, client_cert={
'client_certificate': os.path.join(self.certdir, 'client.crt'),
'client_certificate_key': os.path.join(self.certdir, 'client.key'),
})
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_certificate_combined_pass(self, handler):
self._run_test(handler, client_cert={
'client_certificate': os.path.join(self.certdir, 'clientwithencryptedkey.crt'),
'client_certificate_password': 'foobar',
})
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_certificate_nocombined_pass(self, handler):
self._run_test(handler, client_cert={
'client_certificate': os.path.join(self.certdir, 'client.crt'),
@@ -753,6 +774,18 @@ class TestClientCertificate:
})
+@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
+class TestHTTPImpersonateRequestHandler(TestRequestHandlerBase):
+ def test_supported_impersonate_targets(self, handler):
+ with handler(headers=std_headers) as rh:
+ # note: this assumes the impersonate request handler supports the impersonate extension
+ for target in rh.supported_targets:
+ res = validate_and_send(rh, Request(
+ f'http://127.0.0.1:{self.http_port}/headers', extensions={'impersonate': target}))
+ assert res.status == 200
+ assert std_headers['user-agent'].lower() not in res.read().decode().lower()
+
+
class TestRequestHandlerMisc:
"""Misc generic tests for request handlers, not related to request or validation testing"""
@pytest.mark.parametrize('handler,logger_name', [
@@ -931,6 +964,172 @@ class TestRequestsRequestHandler(TestRequestHandlerBase):
assert called
+@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
+class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
+
+ @pytest.mark.parametrize('params,extensions', [
+ ({}, {'impersonate': ImpersonateTarget('chrome')}),
+ ({'impersonate': ImpersonateTarget('chrome', '110')}, {}),
+ ({'impersonate': ImpersonateTarget('chrome', '99')}, {'impersonate': ImpersonateTarget('chrome', '110')}),
+ ])
+ def test_impersonate(self, handler, params, extensions):
+ with handler(headers=std_headers, **params) as rh:
+ res = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions=extensions)).read().decode()
+ assert 'sec-ch-ua: "Chromium";v="110"' in res
+ # Check that user agent is added over ours
+ assert 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36' in res
+
+ def test_headers(self, handler):
+ with handler(headers=std_headers) as rh:
+ # Ensure curl-impersonate overrides our standard headers (usually added
+ res = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={
+ 'impersonate': ImpersonateTarget('safari')}, headers={'x-custom': 'test', 'sec-fetch-mode': 'custom'})).read().decode().lower()
+
+ assert std_headers['user-agent'].lower() not in res
+ assert std_headers['accept-language'].lower() not in res
+ assert std_headers['sec-fetch-mode'].lower() not in res
+ # other than UA, custom headers that differ from std_headers should be kept
+ assert 'sec-fetch-mode: custom' in res
+ assert 'x-custom: test' in res
+ # but when not impersonating don't remove std_headers
+ res = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'x-custom': 'test'})).read().decode().lower()
+ # std_headers should be present
+ for k, v in std_headers.items():
+ assert f'{k}: {v}'.lower() in res
+
+ @pytest.mark.parametrize('raised,expected,match', [
+ (lambda: curl_cffi.requests.errors.RequestsError(
+ '', code=curl_cffi.const.CurlECode.PARTIAL_FILE), IncompleteRead, None),
+ (lambda: curl_cffi.requests.errors.RequestsError(
+ '', code=curl_cffi.const.CurlECode.OPERATION_TIMEDOUT), TransportError, None),
+ (lambda: curl_cffi.requests.errors.RequestsError(
+ '', code=curl_cffi.const.CurlECode.RECV_ERROR), TransportError, None),
+ ])
+ def test_response_error_mapping(self, handler, monkeypatch, raised, expected, match):
+ import curl_cffi.requests
+
+ from yt_dlp.networking._curlcffi import CurlCFFIResponseAdapter
+ curl_res = curl_cffi.requests.Response()
+ res = CurlCFFIResponseAdapter(curl_res)
+
+ def mock_read(*args, **kwargs):
+ try:
+ raise raised()
+ except Exception as e:
+ e.response = curl_res
+ raise
+ monkeypatch.setattr(res.fp, 'read', mock_read)
+
+ with pytest.raises(expected, match=match) as exc_info:
+ res.read()
+
+ assert exc_info.type is expected
+
+ @pytest.mark.parametrize('raised,expected,match', [
+ (lambda: curl_cffi.requests.errors.RequestsError(
+ '', code=curl_cffi.const.CurlECode.OPERATION_TIMEDOUT), TransportError, None),
+ (lambda: curl_cffi.requests.errors.RequestsError(
+ '', code=curl_cffi.const.CurlECode.PEER_FAILED_VERIFICATION), CertificateVerifyError, None),
+ (lambda: curl_cffi.requests.errors.RequestsError(
+ '', code=curl_cffi.const.CurlECode.SSL_CONNECT_ERROR), SSLError, None),
+ (lambda: curl_cffi.requests.errors.RequestsError(
+ '', code=curl_cffi.const.CurlECode.TOO_MANY_REDIRECTS), HTTPError, None),
+ (lambda: curl_cffi.requests.errors.RequestsError(
+ '', code=curl_cffi.const.CurlECode.PROXY), ProxyError, None),
+ ])
+ def test_request_error_mapping(self, handler, monkeypatch, raised, expected, match):
+ import curl_cffi.requests
+ curl_res = curl_cffi.requests.Response()
+ curl_res.status_code = 301
+
+ with handler() as rh:
+ original_get_instance = rh._get_instance
+
+ def mock_get_instance(*args, **kwargs):
+ instance = original_get_instance(*args, **kwargs)
+
+ def request(*_, **__):
+ try:
+ raise raised()
+ except Exception as e:
+ e.response = curl_res
+ raise
+ monkeypatch.setattr(instance, 'request', request)
+ return instance
+
+ monkeypatch.setattr(rh, '_get_instance', mock_get_instance)
+
+ with pytest.raises(expected) as exc_info:
+ rh.send(Request('http://fake'))
+
+ assert exc_info.type is expected
+
+ def test_response_reader(self, handler):
+ class FakeResponse:
+ def __init__(self, raise_error=False):
+ self.raise_error = raise_error
+ self.closed = False
+
+ def iter_content(self):
+ yield b'foo'
+ yield b'bar'
+ yield b'z'
+ if self.raise_error:
+ raise Exception('test')
+
+ def close(self):
+ self.closed = True
+
+ from yt_dlp.networking._curlcffi import CurlCFFIResponseReader
+
+ res = CurlCFFIResponseReader(FakeResponse())
+ assert res.readable
+ assert res.bytes_read == 0
+ assert res.read(1) == b'f'
+ assert res.bytes_read == 3
+ assert res._buffer == b'oo'
+
+ assert res.read(2) == b'oo'
+ assert res.bytes_read == 3
+ assert res._buffer == b''
+
+ assert res.read(2) == b'ba'
+ assert res.bytes_read == 6
+ assert res._buffer == b'r'
+
+ assert res.read(3) == b'rz'
+ assert res.bytes_read == 7
+ assert res._buffer == b''
+ assert res.closed
+ assert res._response.closed
+
+ # should handle no size param
+ res2 = CurlCFFIResponseReader(FakeResponse())
+ assert res2.read() == b'foobarz'
+ assert res2.bytes_read == 7
+ assert res2._buffer == b''
+ assert res2.closed
+
+ # should close on an exception
+ res3 = CurlCFFIResponseReader(FakeResponse(raise_error=True))
+ with pytest.raises(Exception, match='test'):
+ res3.read()
+ assert res3._buffer == b''
+ assert res3.bytes_read == 7
+ assert res3.closed
+
+ # buffer should be cleared on close
+ res4 = CurlCFFIResponseReader(FakeResponse())
+ res4.read(2)
+ assert res4._buffer == b'o'
+ res4.close()
+ assert res4.closed
+ assert res4._buffer == b''
+
+
def run_validation(handler, error, req, **handler_kwargs):
with handler(**handler_kwargs) as rh:
if error:
@@ -975,6 +1174,10 @@ class TestRequestHandlerValidation:
('ws', False, {}),
('wss', False, {}),
]),
+ ('CurlCFFI', [
+ ('http', False, {}),
+ ('https', False, {}),
+ ]),
(NoCheckRH, [('http', False, {})]),
(ValidationRH, [('http', UnsupportedRequest, {})])
]
@@ -998,6 +1201,14 @@ class TestRequestHandlerValidation:
('socks5', False),
('socks5h', False),
]),
+ ('CurlCFFI', 'http', [
+ ('http', False),
+ ('https', False),
+ ('socks4', False),
+ ('socks4a', False),
+ ('socks5', False),
+ ('socks5h', False),
+ ]),
(NoCheckRH, 'http', [('http', False)]),
(HTTPSupportedRH, 'http', [('http', UnsupportedRequest)]),
('Websockets', 'ws', [('http', UnsupportedRequest)]),
@@ -1015,6 +1226,10 @@ class TestRequestHandlerValidation:
('all', False),
('unrelated', False),
]),
+ ('CurlCFFI', [
+ ('all', False),
+ ('unrelated', False),
+ ]),
(NoCheckRH, [('all', False)]),
(HTTPSupportedRH, [('all', UnsupportedRequest)]),
(HTTPSupportedRH, [('no', UnsupportedRequest)]),
@@ -1036,6 +1251,19 @@ class TestRequestHandlerValidation:
({'timeout': 'notatimeout'}, AssertionError),
({'unsupported': 'value'}, UnsupportedRequest),
]),
+ ('CurlCFFI', 'http', [
+ ({'cookiejar': 'notacookiejar'}, AssertionError),
+ ({'cookiejar': YoutubeDLCookieJar()}, False),
+ ({'timeout': 1}, False),
+ ({'timeout': 'notatimeout'}, AssertionError),
+ ({'unsupported': 'value'}, UnsupportedRequest),
+ ({'impersonate': ImpersonateTarget('badtarget', None, None, None)}, UnsupportedRequest),
+ ({'impersonate': 123}, AssertionError),
+ ({'impersonate': ImpersonateTarget('chrome', None, None, None)}, False),
+ ({'impersonate': ImpersonateTarget(None, None, None, None)}, False),
+ ({'impersonate': ImpersonateTarget()}, False),
+ ({'impersonate': 'chrome'}, AssertionError)
+ ]),
(NoCheckRH, 'http', [
({'cookiejar': 'notacookiejar'}, False),
({'somerandom': 'test'}, False), # but any extension is allowed through
@@ -1055,7 +1283,7 @@ class TestRequestHandlerValidation:
def test_url_scheme(self, handler, scheme, fail, handler_kwargs):
run_validation(handler, fail, Request(f'{scheme}://'), **(handler_kwargs or {}))
- @pytest.mark.parametrize('handler,fail', [('Urllib', False), ('Requests', False)], indirect=['handler'])
+ @pytest.mark.parametrize('handler,fail', [('Urllib', False), ('Requests', False), ('CurlCFFI', False)], indirect=['handler'])
def test_no_proxy(self, handler, fail):
run_validation(handler, fail, Request('http://', proxies={'no': '127.0.0.1,github.com'}))
run_validation(handler, fail, Request('http://'), proxies={'no': '127.0.0.1,github.com'})
@@ -1078,13 +1306,13 @@ class TestRequestHandlerValidation:
run_validation(handler, fail, Request(f'{req_scheme}://', proxies={req_scheme: f'{scheme}://example.com'}))
run_validation(handler, fail, Request(f'{req_scheme}://'), proxies={req_scheme: f'{scheme}://example.com'})
- @pytest.mark.parametrize('handler', ['Urllib', HTTPSupportedRH, 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', HTTPSupportedRH, 'Requests', 'CurlCFFI'], indirect=True)
def test_empty_proxy(self, handler):
run_validation(handler, False, Request('http://', proxies={'http': None}))
run_validation(handler, False, Request('http://'), proxies={'http': None})
@pytest.mark.parametrize('proxy_url', ['//example.com', 'example.com', '127.0.0.1', '/a/b/c'])
- @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
def test_invalid_proxy_url(self, handler, proxy_url):
run_validation(handler, UnsupportedRequest, Request('http://', proxies={'http': proxy_url}))
@@ -1113,6 +1341,10 @@ class FakeResponse(Response):
class FakeRH(RequestHandler):
+ def __init__(self, *args, **params):
+ self.params = params
+ super().__init__(*args, **params)
+
def _validate(self, request):
return
@@ -1271,15 +1503,10 @@ class TestYoutubeDLNetworking:
('', {'all': '__noproxy__'}),
(None, {'http': 'http://127.0.0.1:8081', 'https': 'http://127.0.0.1:8081'}) # env, set https
])
- def test_proxy(self, proxy, expected):
- old_http_proxy = os.environ.get('HTTP_PROXY')
- try:
- os.environ['HTTP_PROXY'] = 'http://127.0.0.1:8081' # ensure that provided proxies override env
- with FakeYDL({'proxy': proxy}) as ydl:
- assert ydl.proxies == expected
- finally:
- if old_http_proxy:
- os.environ['HTTP_PROXY'] = old_http_proxy
+ def test_proxy(self, proxy, expected, monkeypatch):
+ monkeypatch.setenv('HTTP_PROXY', 'http://127.0.0.1:8081')
+ with FakeYDL({'proxy': proxy}) as ydl:
+ assert ydl.proxies == expected
def test_compat_request(self):
with FakeRHYDL() as ydl:
@@ -1331,6 +1558,95 @@ class TestYoutubeDLNetworking:
with pytest.raises(SSLError, match='testerror'):
ydl.urlopen('ssl://testerror')
+ def test_unsupported_impersonate_target(self):
+ class FakeImpersonationRHYDL(FakeYDL):
+ def __init__(self, *args, **kwargs):
+ class HTTPRH(RequestHandler):
+ def _send(self, request: Request):
+ pass
+ _SUPPORTED_URL_SCHEMES = ('http',)
+ _SUPPORTED_PROXY_SCHEMES = None
+
+ super().__init__(*args, **kwargs)
+ self._request_director = self.build_request_director([HTTPRH])
+
+ with FakeImpersonationRHYDL() as ydl:
+ with pytest.raises(
+ RequestError,
+ match=r'Impersonate target "test" is not available'
+ ):
+ ydl.urlopen(Request('http://', extensions={'impersonate': ImpersonateTarget('test', None, None, None)}))
+
+ def test_unsupported_impersonate_extension(self):
+ class FakeHTTPRHYDL(FakeYDL):
+ def __init__(self, *args, **kwargs):
+ class IRH(ImpersonateRequestHandler):
+ def _send(self, request: Request):
+ pass
+
+ _SUPPORTED_URL_SCHEMES = ('http',)
+ _SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget('abc',): 'test'}
+ _SUPPORTED_PROXY_SCHEMES = None
+
+ super().__init__(*args, **kwargs)
+ self._request_director = self.build_request_director([IRH])
+
+ with FakeHTTPRHYDL() as ydl:
+ with pytest.raises(
+ RequestError,
+ match=r'Impersonate target "test" is not available'
+ ):
+ ydl.urlopen(Request('http://', extensions={'impersonate': ImpersonateTarget('test', None, None, None)}))
+
+ def test_raise_impersonate_error(self):
+ with pytest.raises(
+ YoutubeDLError,
+ match=r'Impersonate target "test" is not available'
+ ):
+ FakeYDL({'impersonate': ImpersonateTarget('test', None, None, None)})
+
+ def test_pass_impersonate_param(self, monkeypatch):
+
+ class IRH(ImpersonateRequestHandler):
+ def _send(self, request: Request):
+ pass
+
+ _SUPPORTED_URL_SCHEMES = ('http',)
+ _SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget('abc'): 'test'}
+
+ # Bypass the check on initialize
+ brh = FakeYDL.build_request_director
+ monkeypatch.setattr(FakeYDL, 'build_request_director', lambda cls, handlers, preferences=None: brh(cls, handlers=[IRH]))
+
+ with FakeYDL({
+ 'impersonate': ImpersonateTarget('abc', None, None, None)
+ }) as ydl:
+ rh = self.build_handler(ydl, IRH)
+ assert rh.impersonate == ImpersonateTarget('abc', None, None, None)
+
+ def test_get_impersonate_targets(self):
+ handlers = []
+ for target_client in ('abc', 'xyz', 'asd'):
+ class TestRH(ImpersonateRequestHandler):
+ def _send(self, request: Request):
+ pass
+ _SUPPORTED_URL_SCHEMES = ('http',)
+ _SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget(target_client,): 'test'}
+ RH_KEY = target_client
+ RH_NAME = target_client
+ handlers.append(TestRH)
+
+ with FakeYDL() as ydl:
+ ydl._request_director = ydl.build_request_director(handlers)
+ assert set(ydl._get_available_impersonate_targets()) == {
+ (ImpersonateTarget('xyz'), 'xyz'),
+ (ImpersonateTarget('abc'), 'abc'),
+ (ImpersonateTarget('asd'), 'asd')
+ }
+ assert ydl._impersonate_target_available(ImpersonateTarget('abc'))
+ assert ydl._impersonate_target_available(ImpersonateTarget())
+ assert not ydl._impersonate_target_available(ImpersonateTarget('zxy'))
+
@pytest.mark.parametrize('proxy_key,proxy_url,expected', [
('http', '__noproxy__', None),
('no', '127.0.0.1,foo.bar', '127.0.0.1,foo.bar'),
@@ -1341,23 +1657,17 @@ class TestYoutubeDLNetworking:
('http', 'socks4://example.com', 'socks4://example.com'),
('unrelated', '/bad/proxy', '/bad/proxy'), # clean_proxies should ignore bad proxies
])
- def test_clean_proxy(self, proxy_key, proxy_url, expected):
+ def test_clean_proxy(self, proxy_key, proxy_url, expected, monkeypatch):
# proxies should be cleaned in urlopen()
with FakeRHYDL() as ydl:
req = ydl.urlopen(Request('test://', proxies={proxy_key: proxy_url})).request
assert req.proxies[proxy_key] == expected
# and should also be cleaned when building the handler
- env_key = f'{proxy_key.upper()}_PROXY'
- old_env_proxy = os.environ.get(env_key)
- try:
- os.environ[env_key] = proxy_url # ensure that provided proxies override env
- with FakeYDL() as ydl:
- rh = self.build_handler(ydl)
- assert rh.proxies[proxy_key] == expected
- finally:
- if old_env_proxy:
- os.environ[env_key] = old_env_proxy
+ monkeypatch.setenv(f'{proxy_key.upper()}_PROXY', proxy_url)
+ with FakeYDL() as ydl:
+ rh = self.build_handler(ydl)
+ assert rh.proxies[proxy_key] == expected
def test_clean_proxy_header(self):
with FakeRHYDL() as ydl:
@@ -1629,3 +1939,71 @@ class TestResponse:
assert res.geturl() == res.url
assert res.info() is res.headers
assert res.getheader('test') == res.get_header('test')
+
+
+class TestImpersonateTarget:
+ @pytest.mark.parametrize('target_str,expected', [
+ ('abc', ImpersonateTarget('abc', None, None, None)),
+ ('abc-120_esr', ImpersonateTarget('abc', '120_esr', None, None)),
+ ('abc-120:xyz', ImpersonateTarget('abc', '120', 'xyz', None)),
+ ('abc-120:xyz-5.6', ImpersonateTarget('abc', '120', 'xyz', '5.6')),
+ ('abc:xyz', ImpersonateTarget('abc', None, 'xyz', None)),
+ ('abc:', ImpersonateTarget('abc', None, None, None)),
+ ('abc-120:', ImpersonateTarget('abc', '120', None, None)),
+ (':xyz', ImpersonateTarget(None, None, 'xyz', None)),
+ (':xyz-6.5', ImpersonateTarget(None, None, 'xyz', '6.5')),
+ (':', ImpersonateTarget(None, None, None, None)),
+ ('', ImpersonateTarget(None, None, None, None)),
+ ])
+ def test_target_from_str(self, target_str, expected):
+ assert ImpersonateTarget.from_str(target_str) == expected
+
+ @pytest.mark.parametrize('target_str', [
+ '-120', ':-12.0', '-12:-12', '-:-',
+ '::', 'a-c-d:', 'a-c-d:e-f-g', 'a:b:'
+ ])
+ def test_target_from_invalid_str(self, target_str):
+ with pytest.raises(ValueError):
+ ImpersonateTarget.from_str(target_str)
+
+ @pytest.mark.parametrize('target,expected', [
+ (ImpersonateTarget('abc', None, None, None), 'abc'),
+ (ImpersonateTarget('abc', '120', None, None), 'abc-120'),
+ (ImpersonateTarget('abc', '120', 'xyz', None), 'abc-120:xyz'),
+ (ImpersonateTarget('abc', '120', 'xyz', '5'), 'abc-120:xyz-5'),
+ (ImpersonateTarget('abc', None, 'xyz', None), 'abc:xyz'),
+ (ImpersonateTarget('abc', '120', None, None), 'abc-120'),
+ (ImpersonateTarget('abc', '120', 'xyz', None), 'abc-120:xyz'),
+ (ImpersonateTarget('abc', None, 'xyz'), 'abc:xyz'),
+ (ImpersonateTarget(None, None, 'xyz', '6.5'), ':xyz-6.5'),
+ (ImpersonateTarget('abc', ), 'abc'),
+ (ImpersonateTarget(None, None, None, None), ''),
+ ])
+ def test_str(self, target, expected):
+ assert str(target) == expected
+
+ @pytest.mark.parametrize('args', [
+ ('abc', None, None, '5'),
+ ('abc', '120', None, '5'),
+ (None, '120', None, None),
+ (None, '120', None, '5'),
+ (None, None, None, '5'),
+ (None, '120', 'xyz', '5'),
+ ])
+ def test_invalid_impersonate_target(self, args):
+ with pytest.raises(ValueError):
+ ImpersonateTarget(*args)
+
+ @pytest.mark.parametrize('target1,target2,is_in,is_eq', [
+ (ImpersonateTarget('abc', None, None, None), ImpersonateTarget('abc', None, None, None), True, True),
+ (ImpersonateTarget('abc', None, None, None), ImpersonateTarget('abc', '120', None, None), True, False),
+ (ImpersonateTarget('abc', None, 'xyz', 'test'), ImpersonateTarget('abc', '120', 'xyz', None), True, False),
+ (ImpersonateTarget('abc', '121', 'xyz', 'test'), ImpersonateTarget('abc', '120', 'xyz', 'test'), False, False),
+ (ImpersonateTarget('abc'), ImpersonateTarget('abc', '120', 'xyz', 'test'), True, False),
+ (ImpersonateTarget('abc', '120', 'xyz', 'test'), ImpersonateTarget('abc'), True, False),
+ (ImpersonateTarget(), ImpersonateTarget('abc', '120', 'xyz'), True, False),
+ (ImpersonateTarget(), ImpersonateTarget(), True, True),
+ ])
+ def test_impersonate_target_in(self, target1, target2, is_in, is_eq):
+ assert (target1 in target2) is is_in
+ assert (target1 == target2) is is_eq
diff --git a/test/test_socks.py b/test/test_socks.py
index cb22b61..43d612d 100644
--- a/test/test_socks.py
+++ b/test/test_socks.py
@@ -286,8 +286,14 @@ def ctx(request):
return CTX_MAP[request.param]()
+@pytest.mark.parametrize(
+ 'handler,ctx', [
+ ('Urllib', 'http'),
+ ('Requests', 'http'),
+ ('Websockets', 'ws'),
+ ('CurlCFFI', 'http')
+ ], indirect=True)
class TestSocks4Proxy:
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks4_no_auth(self, handler, ctx):
with handler() as rh:
with ctx.socks_server(Socks4ProxyHandler) as server_address:
@@ -295,7 +301,6 @@ class TestSocks4Proxy:
rh, proxies={'all': f'socks4://{server_address}'})
assert response['version'] == 4
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks4_auth(self, handler, ctx):
with handler() as rh:
with ctx.socks_server(Socks4ProxyHandler, user_id='user') as server_address:
@@ -305,7 +310,6 @@ class TestSocks4Proxy:
rh, proxies={'all': f'socks4://user:@{server_address}'})
assert response['version'] == 4
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks4a_ipv4_target(self, handler, ctx):
with ctx.socks_server(Socks4ProxyHandler) as server_address:
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
@@ -313,7 +317,6 @@ class TestSocks4Proxy:
assert response['version'] == 4
assert (response['ipv4_address'] == '127.0.0.1') != (response['domain_address'] == '127.0.0.1')
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks4a_domain_target(self, handler, ctx):
with ctx.socks_server(Socks4ProxyHandler) as server_address:
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
@@ -322,7 +325,6 @@ class TestSocks4Proxy:
assert response['ipv4_address'] is None
assert response['domain_address'] == 'localhost'
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_ipv4_client_source_address(self, handler, ctx):
with ctx.socks_server(Socks4ProxyHandler) as server_address:
source_address = f'127.0.0.{random.randint(5, 255)}'
@@ -333,7 +335,6 @@ class TestSocks4Proxy:
assert response['client_address'][0] == source_address
assert response['version'] == 4
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
@pytest.mark.parametrize('reply_code', [
Socks4CD.REQUEST_REJECTED_OR_FAILED,
Socks4CD.REQUEST_REJECTED_CANNOT_CONNECT_TO_IDENTD,
@@ -345,7 +346,6 @@ class TestSocks4Proxy:
with pytest.raises(ProxyError):
ctx.socks_info_request(rh)
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_ipv6_socks4_proxy(self, handler, ctx):
with ctx.socks_server(Socks4ProxyHandler, bind_ip='::1') as server_address:
with handler(proxies={'all': f'socks4://{server_address}'}) as rh:
@@ -354,7 +354,6 @@ class TestSocks4Proxy:
assert response['ipv4_address'] == '127.0.0.1'
assert response['version'] == 4
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_timeout(self, handler, ctx):
with ctx.socks_server(Socks4ProxyHandler, sleep=2) as server_address:
with handler(proxies={'all': f'socks4://{server_address}'}, timeout=0.5) as rh:
@@ -362,9 +361,15 @@ class TestSocks4Proxy:
ctx.socks_info_request(rh)
+@pytest.mark.parametrize(
+ 'handler,ctx', [
+ ('Urllib', 'http'),
+ ('Requests', 'http'),
+ ('Websockets', 'ws'),
+ ('CurlCFFI', 'http')
+ ], indirect=True)
class TestSocks5Proxy:
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks5_no_auth(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler) as server_address:
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -372,7 +377,6 @@ class TestSocks5Proxy:
assert response['auth_methods'] == [0x0]
assert response['version'] == 5
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks5_user_pass(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler, auth=('test', 'testpass')) as server_address:
with handler() as rh:
@@ -385,7 +389,6 @@ class TestSocks5Proxy:
assert response['auth_methods'] == [Socks5Auth.AUTH_NONE, Socks5Auth.AUTH_USER_PASS]
assert response['version'] == 5
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks5_ipv4_target(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler) as server_address:
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -393,7 +396,6 @@ class TestSocks5Proxy:
assert response['ipv4_address'] == '127.0.0.1'
assert response['version'] == 5
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks5_domain_target(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler) as server_address:
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -401,7 +403,6 @@ class TestSocks5Proxy:
assert (response['ipv4_address'] == '127.0.0.1') != (response['ipv6_address'] == '::1')
assert response['version'] == 5
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks5h_domain_target(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler) as server_address:
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
@@ -410,7 +411,6 @@ class TestSocks5Proxy:
assert response['domain_address'] == 'localhost'
assert response['version'] == 5
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks5h_ip_target(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler) as server_address:
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
@@ -419,7 +419,6 @@ class TestSocks5Proxy:
assert response['domain_address'] is None
assert response['version'] == 5
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_socks5_ipv6_destination(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler) as server_address:
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -427,7 +426,6 @@ class TestSocks5Proxy:
assert response['ipv6_address'] == '::1'
assert response['version'] == 5
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_ipv6_socks5_proxy(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler, bind_ip='::1') as server_address:
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -438,7 +436,6 @@ class TestSocks5Proxy:
# XXX: is there any feasible way of testing IPv6 source addresses?
# Same would go for non-proxy source_address test...
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
def test_ipv4_client_source_address(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler) as server_address:
source_address = f'127.0.0.{random.randint(5, 255)}'
@@ -448,7 +445,6 @@ class TestSocks5Proxy:
assert response['client_address'][0] == source_address
assert response['version'] == 5
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
@pytest.mark.parametrize('reply_code', [
Socks5Reply.GENERAL_FAILURE,
Socks5Reply.CONNECTION_NOT_ALLOWED,
@@ -465,7 +461,6 @@ class TestSocks5Proxy:
with pytest.raises(ProxyError):
ctx.socks_info_request(rh)
- @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Websockets', 'ws')], indirect=True)
def test_timeout(self, handler, ctx):
with ctx.socks_server(Socks5ProxyHandler, sleep=2) as server_address:
with handler(proxies={'all': f'socks5://{server_address}'}, timeout=1) as rh:
diff --git a/test/test_traversal.py b/test/test_traversal.py
new file mode 100644
index 0000000..9b2a27b
--- /dev/null
+++ b/test/test_traversal.py
@@ -0,0 +1,444 @@
+import http.cookies
+import re
+import xml.etree.ElementTree
+
+import pytest
+
+from yt_dlp.utils import dict_get, int_or_none, str_or_none
+from yt_dlp.utils.traversal import traverse_obj
+
+_TEST_DATA = {
+ 100: 100,
+ 1.2: 1.2,
+ 'str': 'str',
+ 'None': None,
+ '...': ...,
+ 'urls': [
+ {'index': 0, 'url': 'https://www.example.com/0'},
+ {'index': 1, 'url': 'https://www.example.com/1'},
+ ],
+ 'data': (
+ {'index': 2},
+ {'index': 3},
+ ),
+ 'dict': {},
+}
+
+
+class TestTraversal:
+ def test_traversal_base(self):
+ assert traverse_obj(_TEST_DATA, ('str',)) == 'str', \
+ 'allow tuple path'
+ assert traverse_obj(_TEST_DATA, ['str']) == 'str', \
+ 'allow list path'
+ assert traverse_obj(_TEST_DATA, (value for value in ("str",))) == 'str', \
+ 'allow iterable path'
+ assert traverse_obj(_TEST_DATA, 'str') == 'str', \
+ 'single items should be treated as a path'
+ assert traverse_obj(_TEST_DATA, 100) == 100, \
+ 'allow int path'
+ assert traverse_obj(_TEST_DATA, 1.2) == 1.2, \
+ 'allow float path'
+ assert traverse_obj(_TEST_DATA, None) == _TEST_DATA, \
+ '`None` should not perform any modification'
+
+ def test_traversal_ellipsis(self):
+ assert traverse_obj(_TEST_DATA, ...) == [x for x in _TEST_DATA.values() if x not in (None, {})], \
+ '`...` should give all non discarded values'
+ assert traverse_obj(_TEST_DATA, ('urls', 0, ...)) == list(_TEST_DATA['urls'][0].values()), \
+ '`...` selection for dicts should select all values'
+ assert traverse_obj(_TEST_DATA, (..., ..., 'url')) == ['https://www.example.com/0', 'https://www.example.com/1'], \
+ 'nested `...` queries should work'
+ assert traverse_obj(_TEST_DATA, (..., ..., 'index')) == list(range(4)), \
+ '`...` query result should be flattened'
+ assert traverse_obj(iter(range(4)), ...) == list(range(4)), \
+ '`...` should accept iterables'
+
+ def test_traversal_function(self):
+ filter_func = lambda x, y: x == 'urls' and isinstance(y, list)
+ assert traverse_obj(_TEST_DATA, filter_func) == [_TEST_DATA['urls']], \
+ 'function as query key should perform a filter based on (key, value)'
+ assert traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)) == ['str'], \
+ 'exceptions in the query function should be catched'
+ assert traverse_obj(iter(range(4)), lambda _, x: x % 2 == 0) == [0, 2], \
+ 'function key should accept iterables'
+ # Wrong function signature should raise (debug mode)
+ with pytest.raises(Exception):
+ traverse_obj(_TEST_DATA, lambda a: ...)
+ with pytest.raises(Exception):
+ traverse_obj(_TEST_DATA, lambda a, b, c: ...)
+
+ def test_traversal_set(self):
+ # transformation/type, like `expected_type`
+ assert traverse_obj(_TEST_DATA, (..., {str.upper}, )) == ['STR'], \
+ 'Function in set should be a transformation'
+ assert traverse_obj(_TEST_DATA, (..., {str})) == ['str'], \
+ 'Type in set should be a type filter'
+ assert traverse_obj(_TEST_DATA, (..., {str, int})) == [100, 'str'], \
+ 'Multiple types in set should be a type filter'
+ assert traverse_obj(_TEST_DATA, {dict}) == _TEST_DATA, \
+ 'A single set should be wrapped into a path'
+ assert traverse_obj(_TEST_DATA, (..., {str.upper})) == ['STR'], \
+ 'Transformation function should not raise'
+ expected = [x for x in map(str_or_none, _TEST_DATA.values()) if x is not None]
+ assert traverse_obj(_TEST_DATA, (..., {str_or_none})) == expected, \
+ 'Function in set should be a transformation'
+ assert traverse_obj(_TEST_DATA, ('fail', {lambda _: 'const'})) == 'const', \
+ 'Function in set should always be called'
+ # Sets with length < 1 or > 1 not including only types should raise
+ with pytest.raises(Exception):
+ traverse_obj(_TEST_DATA, set())
+ with pytest.raises(Exception):
+ traverse_obj(_TEST_DATA, {str.upper, str})
+
+ def test_traversal_slice(self):
+ _SLICE_DATA = [0, 1, 2, 3, 4]
+
+ assert traverse_obj(_TEST_DATA, ('dict', slice(1))) is None, \
+ 'slice on a dictionary should not throw'
+ assert traverse_obj(_SLICE_DATA, slice(1)) == _SLICE_DATA[:1], \
+ 'slice key should apply slice to sequence'
+ assert traverse_obj(_SLICE_DATA, slice(1, 2)) == _SLICE_DATA[1:2], \
+ 'slice key should apply slice to sequence'
+ assert traverse_obj(_SLICE_DATA, slice(1, 4, 2)) == _SLICE_DATA[1:4:2], \
+ 'slice key should apply slice to sequence'
+
+ def test_traversal_alternatives(self):
+ assert traverse_obj(_TEST_DATA, 'fail', 'str') == 'str', \
+ 'multiple `paths` should be treated as alternative paths'
+ assert traverse_obj(_TEST_DATA, 'str', 100) == 'str', \
+ 'alternatives should exit early'
+ assert traverse_obj(_TEST_DATA, 'fail', 'fail') is None, \
+ 'alternatives should return `default` if exhausted'
+ assert traverse_obj(_TEST_DATA, (..., 'fail'), 100) == 100, \
+ 'alternatives should track their own branching return'
+ assert traverse_obj(_TEST_DATA, ('dict', ...), ('data', ...)) == list(_TEST_DATA['data']), \
+ 'alternatives on empty objects should search further'
+
+ def test_traversal_branching_nesting(self):
+ assert traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')) == ['https://www.example.com/0'], \
+ 'tuple as key should be treated as branches'
+ assert traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')) == ['https://www.example.com/0'], \
+ 'list as key should be treated as branches'
+ assert traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))) == ['https://www.example.com/0'], \
+ 'double nesting in path should be treated as paths'
+ assert traverse_obj(['0', [1, 2]], [(0, 1), 0]) == [1], \
+ 'do not fail early on branching'
+ expected = ['https://www.example.com/0', 'https://www.example.com/1']
+ assert traverse_obj(_TEST_DATA, ('urls', ((0, ('fail', 'url')), (1, 'url')))) == expected, \
+ 'tripple nesting in path should be treated as branches'
+ assert traverse_obj(_TEST_DATA, ('urls', ('fail', (..., 'url')))) == expected, \
+ 'ellipsis as branch path start gets flattened'
+
+ def test_traversal_dict(self):
+ assert traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}) == {0: 100, 1: 1.2}, \
+ 'dict key should result in a dict with the same keys'
+ expected = {0: 'https://www.example.com/0'}
+ assert traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}) == expected, \
+ 'dict key should allow paths'
+ expected = {0: ['https://www.example.com/0']}
+ assert traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}) == expected, \
+ 'tuple in dict path should be treated as branches'
+ assert traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}) == expected, \
+ 'double nesting in dict path should be treated as paths'
+ expected = {0: ['https://www.example.com/1', 'https://www.example.com/0']}
+ assert traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}) == expected, \
+ 'tripple nesting in dict path should be treated as branches'
+ assert traverse_obj(_TEST_DATA, {0: 'fail'}) == {}, \
+ 'remove `None` values when top level dict key fails'
+ assert traverse_obj(_TEST_DATA, {0: 'fail'}, default=...) == {0: ...}, \
+ 'use `default` if key fails and `default`'
+ assert traverse_obj(_TEST_DATA, {0: 'dict'}) == {}, \
+ 'remove empty values when dict key'
+ assert traverse_obj(_TEST_DATA, {0: 'dict'}, default=...) == {0: ...}, \
+ 'use `default` when dict key and `default`'
+ assert traverse_obj(_TEST_DATA, {0: {0: 'fail'}}) == {}, \
+ 'remove empty values when nested dict key fails'
+ assert traverse_obj(None, {0: 'fail'}) == {}, \
+ 'default to dict if pruned'
+ assert traverse_obj(None, {0: 'fail'}, default=...) == {0: ...}, \
+ 'default to dict if pruned and default is given'
+ assert traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=...) == {0: {0: ...}}, \
+ 'use nested `default` when nested dict key fails and `default`'
+ assert traverse_obj(_TEST_DATA, {0: ('dict', ...)}) == {}, \
+ 'remove key if branch in dict key not successful'
+
+ def test_traversal_default(self):
+ _DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
+
+ assert traverse_obj(_DEFAULT_DATA, 'fail') is None, \
+ 'default value should be `None`'
+ assert traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=...) == ..., \
+ 'chained fails should result in default'
+ assert traverse_obj(_DEFAULT_DATA, 'None', 'int') == 0, \
+ 'should not short cirquit on `None`'
+ assert traverse_obj(_DEFAULT_DATA, 'fail', default=1) == 1, \
+ 'invalid dict key should result in `default`'
+ assert traverse_obj(_DEFAULT_DATA, 'None', default=1) == 1, \
+ '`None` is a deliberate sentinel and should become `default`'
+ assert traverse_obj(_DEFAULT_DATA, ('list', 10)) is None, \
+ '`IndexError` should result in `default`'
+ assert traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=1) == 1, \
+ 'if branched but not successful return `default` if defined, not `[]`'
+ assert traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=None) is None, \
+ 'if branched but not successful return `default` even if `default` is `None`'
+ assert traverse_obj(_DEFAULT_DATA, (..., 'fail')) == [], \
+ 'if branched but not successful return `[]`, not `default`'
+ assert traverse_obj(_DEFAULT_DATA, ('list', ...)) == [], \
+ 'if branched but object is empty return `[]`, not `default`'
+ assert traverse_obj(None, ...) == [], \
+ 'if branched but object is `None` return `[]`, not `default`'
+ assert traverse_obj({0: None}, (0, ...)) == [], \
+ 'if branched but state is `None` return `[]`, not `default`'
+
+ @pytest.mark.parametrize('path', [
+ ('fail', ...),
+ (..., 'fail'),
+ 100 * ('fail',) + (...,),
+ (...,) + 100 * ('fail',),
+ ])
+ def test_traversal_branching(self, path):
+ assert traverse_obj({}, path) == [], \
+ 'if branched but state is `None`, return `[]` (not `default`)'
+ assert traverse_obj({}, 'fail', path) == [], \
+ 'if branching in last alternative and previous did not match, return `[]` (not `default`)'
+ assert traverse_obj({0: 'x'}, 0, path) == 'x', \
+ 'if branching in last alternative and previous did match, return single value'
+ assert traverse_obj({0: 'x'}, path, 0) == 'x', \
+ 'if branching in first alternative and non-branching path does match, return single value'
+ assert traverse_obj({}, path, 'fail') is None, \
+ 'if branching in first alternative and non-branching path does not match, return `default`'
+
+ def test_traversal_expected_type(self):
+ _EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
+
+ assert traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str) == 'str', \
+ 'accept matching `expected_type` type'
+ assert traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int) is None, \
+ 'reject non matching `expected_type` type'
+ assert traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)) == '0', \
+ 'transform type using type function'
+ assert traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0) is None, \
+ 'wrap expected_type fuction in try_call'
+ assert traverse_obj(_EXPECTED_TYPE_DATA, ..., expected_type=str) == ['str'], \
+ 'eliminate items that expected_type fails on'
+ assert traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int) == {0: 100}, \
+ 'type as expected_type should filter dict values'
+ assert traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none) == {0: '100', 1: '1.2'}, \
+ 'function as expected_type should transform dict values'
+ assert traverse_obj(_TEST_DATA, ({0: 1.2}, 0, {int_or_none}), expected_type=int) == 1, \
+ 'expected_type should not filter non final dict values'
+ assert traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int) == {0: {0: 100}}, \
+ 'expected_type should transform deep dict values'
+ assert traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(...)) == [{0: ...}, {0: ...}], \
+ 'expected_type should transform branched dict values'
+ assert traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int) == [4], \
+ 'expected_type regression for type matching in tuple branching'
+ assert traverse_obj(_TEST_DATA, ['data', ...], expected_type=int) == [], \
+ 'expected_type regression for type matching in dict result'
+
+ def test_traversal_get_all(self):
+ _GET_ALL_DATA = {'key': [0, 1, 2]}
+
+ assert traverse_obj(_GET_ALL_DATA, ('key', ...), get_all=False) == 0, \
+ 'if not `get_all`, return only first matching value'
+ assert traverse_obj(_GET_ALL_DATA, ..., get_all=False) == [0, 1, 2], \
+ 'do not overflatten if not `get_all`'
+
+ def test_traversal_casesense(self):
+ _CASESENSE_DATA = {
+ 'KeY': 'value0',
+ 0: {
+ 'KeY': 'value1',
+ 0: {'KeY': 'value2'},
+ },
+ }
+
+ assert traverse_obj(_CASESENSE_DATA, 'key') is None, \
+ 'dict keys should be case sensitive unless `casesense`'
+ assert traverse_obj(_CASESENSE_DATA, 'keY', casesense=False) == 'value0', \
+ 'allow non matching key case if `casesense`'
+ assert traverse_obj(_CASESENSE_DATA, [0, ('keY',)], casesense=False) == ['value1'], \
+ 'allow non matching key case in branch if `casesense`'
+ assert traverse_obj(_CASESENSE_DATA, [0, ([0, 'keY'],)], casesense=False) == ['value2'], \
+ 'allow non matching key case in branch path if `casesense`'
+
+ def test_traversal_traverse_string(self):
+ _TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
+
+ assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)) is None, \
+ 'do not traverse into string if not `traverse_string`'
+ assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0), traverse_string=True) == 's', \
+ 'traverse into string if `traverse_string`'
+ assert traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1), traverse_string=True) == '.', \
+ 'traverse into converted data if `traverse_string`'
+ assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', ...), traverse_string=True) == 'str', \
+ '`...` should result in string (same value) if `traverse_string`'
+ assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)), traverse_string=True) == 'sr', \
+ '`slice` should result in string if `traverse_string`'
+ assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == "s"), traverse_string=True) == 'str', \
+ 'function should result in string if `traverse_string`'
+ assert traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)), traverse_string=True) == ['s', 'r'], \
+ 'branching should result in list if `traverse_string`'
+ assert traverse_obj({}, (0, ...), traverse_string=True) == [], \
+ 'branching should result in list if `traverse_string`'
+ assert traverse_obj({}, (0, lambda x, y: True), traverse_string=True) == [], \
+ 'branching should result in list if `traverse_string`'
+ assert traverse_obj({}, (0, slice(1)), traverse_string=True) == [], \
+ 'branching should result in list if `traverse_string`'
+
+ def test_traversal_re(self):
+ mobj = re.fullmatch(r'0(12)(?P<group>3)(4)?', '0123')
+ assert traverse_obj(mobj, ...) == [x for x in mobj.groups() if x is not None], \
+ '`...` on a `re.Match` should give its `groups()`'
+ assert traverse_obj(mobj, lambda k, _: k in (0, 2)) == ['0123', '3'], \
+ 'function on a `re.Match` should give groupno, value starting at 0'
+ assert traverse_obj(mobj, 'group') == '3', \
+ 'str key on a `re.Match` should give group with that name'
+ assert traverse_obj(mobj, 2) == '3', \
+ 'int key on a `re.Match` should give group with that name'
+ assert traverse_obj(mobj, 'gRoUp', casesense=False) == '3', \
+ 'str key on a `re.Match` should respect casesense'
+ assert traverse_obj(mobj, 'fail') is None, \
+ 'failing str key on a `re.Match` should return `default`'
+ assert traverse_obj(mobj, 'gRoUpS', casesense=False) is None, \
+ 'failing str key on a `re.Match` should return `default`'
+ assert traverse_obj(mobj, 8) is None, \
+ 'failing int key on a `re.Match` should return `default`'
+ assert traverse_obj(mobj, lambda k, _: k in (0, 'group')) == ['0123', '3'], \
+ 'function on a `re.Match` should give group name as well'
+
+ def test_traversal_xml_etree(self):
+ etree = xml.etree.ElementTree.fromstring('''<?xml version="1.0"?>
+ <data>
+ <country name="Liechtenstein">
+ <rank>1</rank>
+ <year>2008</year>
+ <gdppc>141100</gdppc>
+ <neighbor name="Austria" direction="E"/>
+ <neighbor name="Switzerland" direction="W"/>
+ </country>
+ <country name="Singapore">
+ <rank>4</rank>
+ <year>2011</year>
+ <gdppc>59900</gdppc>
+ <neighbor name="Malaysia" direction="N"/>
+ </country>
+ <country name="Panama">
+ <rank>68</rank>
+ <year>2011</year>
+ <gdppc>13600</gdppc>
+ <neighbor name="Costa Rica" direction="W"/>
+ <neighbor name="Colombia" direction="E"/>
+ </country>
+ </data>''')
+ assert traverse_obj(etree, '') == etree, \
+ 'empty str key should return the element itself'
+ assert traverse_obj(etree, 'country') == list(etree), \
+ 'str key should lead all children with that tag name'
+ assert traverse_obj(etree, ...) == list(etree), \
+ '`...` as key should return all children'
+ assert traverse_obj(etree, lambda _, x: x[0].text == '4') == [etree[1]], \
+ 'function as key should get element as value'
+ assert traverse_obj(etree, lambda i, _: i == 1) == [etree[1]], \
+ 'function as key should get index as key'
+ assert traverse_obj(etree, 0) == etree[0], \
+ 'int key should return the nth child'
+ expected = ['Austria', 'Switzerland', 'Malaysia', 'Costa Rica', 'Colombia']
+ assert traverse_obj(etree, './/neighbor/@name') == expected, \
+ '`@<attribute>` at end of path should give that attribute'
+ assert traverse_obj(etree, '//neighbor/@fail') == [None, None, None, None, None], \
+ '`@<nonexistant>` at end of path should give `None`'
+ assert traverse_obj(etree, ('//neighbor/@', 2)) == {'name': 'Malaysia', 'direction': 'N'}, \
+ '`@` should give the full attribute dict'
+ assert traverse_obj(etree, '//year/text()') == ['2008', '2011', '2011'], \
+ '`text()` at end of path should give the inner text'
+ assert traverse_obj(etree, '//*[@direction]/@direction') == ['E', 'W', 'N', 'W', 'E'], \
+ 'full Python xpath features should be supported'
+ assert traverse_obj(etree, (0, '@name')) == 'Liechtenstein', \
+ 'special transformations should act on current element'
+ assert traverse_obj(etree, ('country', 0, ..., 'text()', {int_or_none})) == [1, 2008, 141100], \
+ 'special transformations should act on current element'
+
+ def test_traversal_unbranching(self):
+ assert traverse_obj(_TEST_DATA, [(100, 1.2), all]) == [100, 1.2], \
+ '`all` should give all results as list'
+ assert traverse_obj(_TEST_DATA, [(100, 1.2), any]) == 100, \
+ '`any` should give the first result'
+ assert traverse_obj(_TEST_DATA, [100, all]) == [100], \
+ '`all` should give list if non branching'
+ assert traverse_obj(_TEST_DATA, [100, any]) == 100, \
+ '`any` should give single item if non branching'
+ assert traverse_obj(_TEST_DATA, [('dict', 'None', 100), all]) == [100], \
+ '`all` should filter `None` and empty dict'
+ assert traverse_obj(_TEST_DATA, [('dict', 'None', 100), any]) == 100, \
+ '`any` should filter `None` and empty dict'
+ assert traverse_obj(_TEST_DATA, [{
+ 'all': [('dict', 'None', 100, 1.2), all],
+ 'any': [('dict', 'None', 100, 1.2), any],
+ }]) == {'all': [100, 1.2], 'any': 100}, \
+ '`all`/`any` should apply to each dict path separately'
+ assert traverse_obj(_TEST_DATA, [{
+ 'all': [('dict', 'None', 100, 1.2), all],
+ 'any': [('dict', 'None', 100, 1.2), any],
+ }], get_all=False) == {'all': [100, 1.2], 'any': 100}, \
+ '`all`/`any` should apply to dict regardless of `get_all`'
+ assert traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, {float}]) is None, \
+ '`all` should reset branching status'
+ assert traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), any, {float}]) is None, \
+ '`any` should reset branching status'
+ assert traverse_obj(_TEST_DATA, [('dict', 'None', 100, 1.2), all, ..., {float}]) == [1.2], \
+ '`all` should allow further branching'
+ assert traverse_obj(_TEST_DATA, [('dict', 'None', 'urls', 'data'), any, ..., 'index']) == [0, 1], \
+ '`any` should allow further branching'
+
+ def test_traversal_morsel(self):
+ values = {
+ 'expires': 'a',
+ 'path': 'b',
+ 'comment': 'c',
+ 'domain': 'd',
+ 'max-age': 'e',
+ 'secure': 'f',
+ 'httponly': 'g',
+ 'version': 'h',
+ 'samesite': 'i',
+ }
+ morsel = http.cookies.Morsel()
+ morsel.set('item_key', 'item_value', 'coded_value')
+ morsel.update(values)
+ values['key'] = 'item_key'
+ values['value'] = 'item_value'
+
+ for key, value in values.items():
+ assert traverse_obj(morsel, key) == value, \
+ 'Morsel should provide access to all values'
+ assert traverse_obj(morsel, ...) == list(values.values()), \
+ '`...` should yield all values'
+ assert traverse_obj(morsel, lambda k, v: True) == list(values.values()), \
+ 'function key should yield all values'
+ assert traverse_obj(morsel, [(None,), any]) == morsel, \
+ 'Morsel should not be implicitly changed to dict on usage'
+
+
+class TestDictGet:
+ def test_dict_get(self):
+ FALSE_VALUES = {
+ 'none': None,
+ 'false': False,
+ 'zero': 0,
+ 'empty_string': '',
+ 'empty_list': [],
+ }
+ d = {**FALSE_VALUES, 'a': 42}
+ assert dict_get(d, 'a') == 42
+ assert dict_get(d, 'b') is None
+ assert dict_get(d, 'b', 42) == 42
+ assert dict_get(d, ('a',)) == 42
+ assert dict_get(d, ('b', 'a')) == 42
+ assert dict_get(d, ('b', 'c', 'a', 'd')) == 42
+ assert dict_get(d, ('b', 'c')) is None
+ assert dict_get(d, ('b', 'c'), 42) == 42
+ for key, false_value in FALSE_VALUES.items():
+ assert dict_get(d, ('b', 'c', key)) is None
+ assert dict_get(d, ('b', 'c', key), skip_false_values=False) == false_value
diff --git a/test/test_utils.py b/test/test_utils.py
index a3073f0..ddf0a7c 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -2,7 +2,6 @@
# Allow direct execution
import os
-import re
import sys
import unittest
import warnings
@@ -45,7 +44,6 @@ from yt_dlp.utils import (
determine_ext,
determine_file_encoding,
dfxp2srt,
- dict_get,
encode_base_n,
encode_compat_str,
encodeFilename,
@@ -106,13 +104,11 @@ from yt_dlp.utils import (
sanitize_url,
shell_quote,
smuggle_url,
- str_or_none,
str_to_int,
strip_jsonp,
strip_or_none,
subtitles_filename,
timeconvert,
- traverse_obj,
try_call,
unescapeHTML,
unified_strdate,
@@ -755,28 +751,6 @@ class TestUtil(unittest.TestCase):
self.assertRaises(
ValueError, multipart_encode, {b'field': b'value'}, boundary='value')
- def test_dict_get(self):
- FALSE_VALUES = {
- 'none': None,
- 'false': False,
- 'zero': 0,
- 'empty_string': '',
- 'empty_list': [],
- }
- d = FALSE_VALUES.copy()
- d['a'] = 42
- self.assertEqual(dict_get(d, 'a'), 42)
- self.assertEqual(dict_get(d, 'b'), None)
- self.assertEqual(dict_get(d, 'b', 42), 42)
- self.assertEqual(dict_get(d, ('a', )), 42)
- self.assertEqual(dict_get(d, ('b', 'a', )), 42)
- self.assertEqual(dict_get(d, ('b', 'c', 'a', 'd', )), 42)
- self.assertEqual(dict_get(d, ('b', 'c', )), None)
- self.assertEqual(dict_get(d, ('b', 'c', ), 42), 42)
- for key, false_value in FALSE_VALUES.items():
- self.assertEqual(dict_get(d, ('b', 'c', key, )), None)
- self.assertEqual(dict_get(d, ('b', 'c', key, ), skip_false_values=False), false_value)
-
def test_merge_dicts(self):
self.assertEqual(merge_dicts({'a': 1}, {'b': 2}), {'a': 1, 'b': 2})
self.assertEqual(merge_dicts({'a': 1}, {'a': 2}), {'a': 1})
@@ -2039,359 +2013,6 @@ Line 1
warnings.simplefilter('ignore')
self.assertEqual(variadic('spam', allowed_types=[dict]), 'spam')
- def test_traverse_obj(self):
- _TEST_DATA = {
- 100: 100,
- 1.2: 1.2,
- 'str': 'str',
- 'None': None,
- '...': ...,
- 'urls': [
- {'index': 0, 'url': 'https://www.example.com/0'},
- {'index': 1, 'url': 'https://www.example.com/1'},
- ],
- 'data': (
- {'index': 2},
- {'index': 3},
- ),
- 'dict': {},
- }
-
- # Test base functionality
- self.assertEqual(traverse_obj(_TEST_DATA, ('str',)), 'str',
- msg='allow tuple path')
- self.assertEqual(traverse_obj(_TEST_DATA, ['str']), 'str',
- msg='allow list path')
- self.assertEqual(traverse_obj(_TEST_DATA, (value for value in ("str",))), 'str',
- msg='allow iterable path')
- self.assertEqual(traverse_obj(_TEST_DATA, 'str'), 'str',
- msg='single items should be treated as a path')
- self.assertEqual(traverse_obj(_TEST_DATA, None), _TEST_DATA)
- self.assertEqual(traverse_obj(_TEST_DATA, 100), 100)
- self.assertEqual(traverse_obj(_TEST_DATA, 1.2), 1.2)
-
- # Test Ellipsis behavior
- self.assertCountEqual(traverse_obj(_TEST_DATA, ...),
- (item for item in _TEST_DATA.values() if item not in (None, {})),
- msg='`...` should give all non discarded values')
- self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', 0, ...)), _TEST_DATA['urls'][0].values(),
- msg='`...` selection for dicts should select all values')
- self.assertEqual(traverse_obj(_TEST_DATA, (..., ..., 'url')),
- ['https://www.example.com/0', 'https://www.example.com/1'],
- msg='nested `...` queries should work')
- self.assertCountEqual(traverse_obj(_TEST_DATA, (..., ..., 'index')), range(4),
- msg='`...` query result should be flattened')
- self.assertEqual(traverse_obj(iter(range(4)), ...), list(range(4)),
- msg='`...` should accept iterables')
-
- # Test function as key
- self.assertEqual(traverse_obj(_TEST_DATA, lambda x, y: x == 'urls' and isinstance(y, list)),
- [_TEST_DATA['urls']],
- msg='function as query key should perform a filter based on (key, value)')
- self.assertCountEqual(traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)), {'str'},
- msg='exceptions in the query function should be catched')
- self.assertEqual(traverse_obj(iter(range(4)), lambda _, x: x % 2 == 0), [0, 2],
- msg='function key should accept iterables')
- if __debug__:
- with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
- traverse_obj(_TEST_DATA, lambda a: ...)
- with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
- traverse_obj(_TEST_DATA, lambda a, b, c: ...)
-
- # Test set as key (transformation/type, like `expected_type`)
- self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper}, )), ['STR'],
- msg='Function in set should be a transformation')
- self.assertEqual(traverse_obj(_TEST_DATA, (..., {str})), ['str'],
- msg='Type in set should be a type filter')
- self.assertEqual(traverse_obj(_TEST_DATA, {dict}), _TEST_DATA,
- msg='A single set should be wrapped into a path')
- self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper})), ['STR'],
- msg='Transformation function should not raise')
- self.assertEqual(traverse_obj(_TEST_DATA, (..., {str_or_none})),
- [item for item in map(str_or_none, _TEST_DATA.values()) if item is not None],
- msg='Function in set should be a transformation')
- self.assertEqual(traverse_obj(_TEST_DATA, ('fail', {lambda _: 'const'})), 'const',
- msg='Function in set should always be called')
- if __debug__:
- with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
- traverse_obj(_TEST_DATA, set())
- with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
- traverse_obj(_TEST_DATA, {str.upper, str})
-
- # Test `slice` as a key
- _SLICE_DATA = [0, 1, 2, 3, 4]
- self.assertEqual(traverse_obj(_TEST_DATA, ('dict', slice(1))), None,
- msg='slice on a dictionary should not throw')
- self.assertEqual(traverse_obj(_SLICE_DATA, slice(1)), _SLICE_DATA[:1],
- msg='slice key should apply slice to sequence')
- self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 2)), _SLICE_DATA[1:2],
- msg='slice key should apply slice to sequence')
- self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 4, 2)), _SLICE_DATA[1:4:2],
- msg='slice key should apply slice to sequence')
-
- # Test alternative paths
- self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'str'), 'str',
- msg='multiple `paths` should be treated as alternative paths')
- self.assertEqual(traverse_obj(_TEST_DATA, 'str', 100), 'str',
- msg='alternatives should exit early')
- self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'fail'), None,
- msg='alternatives should return `default` if exhausted')
- self.assertEqual(traverse_obj(_TEST_DATA, (..., 'fail'), 100), 100,
- msg='alternatives should track their own branching return')
- self.assertEqual(traverse_obj(_TEST_DATA, ('dict', ...), ('data', ...)), list(_TEST_DATA['data']),
- msg='alternatives on empty objects should search further')
-
- # Test branch and path nesting
- self.assertEqual(traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')), ['https://www.example.com/0'],
- msg='tuple as key should be treated as branches')
- self.assertEqual(traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')), ['https://www.example.com/0'],
- msg='list as key should be treated as branches')
- self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))), ['https://www.example.com/0'],
- msg='double nesting in path should be treated as paths')
- self.assertEqual(traverse_obj(['0', [1, 2]], [(0, 1), 0]), [1],
- msg='do not fail early on branching')
- self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', ((1, ('fail', 'url')), (0, 'url')))),
- ['https://www.example.com/0', 'https://www.example.com/1'],
- msg='tripple nesting in path should be treated as branches')
- self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ('fail', (..., 'url')))),
- ['https://www.example.com/0', 'https://www.example.com/1'],
- msg='ellipsis as branch path start gets flattened')
-
- # Test dictionary as key
- self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}), {0: 100, 1: 1.2},
- msg='dict key should result in a dict with the same keys')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}),
- {0: 'https://www.example.com/0'},
- msg='dict key should allow paths')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}),
- {0: ['https://www.example.com/0']},
- msg='tuple in dict path should be treated as branches')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}),
- {0: ['https://www.example.com/0']},
- msg='double nesting in dict path should be treated as paths')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}),
- {0: ['https://www.example.com/1', 'https://www.example.com/0']},
- msg='tripple nesting in dict path should be treated as branches')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}), {},
- msg='remove `None` values when top level dict key fails')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}, default=...), {0: ...},
- msg='use `default` if key fails and `default`')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}), {},
- msg='remove empty values when dict key')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}, default=...), {0: ...},
- msg='use `default` when dict key and `default`')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}), {},
- msg='remove empty values when nested dict key fails')
- self.assertEqual(traverse_obj(None, {0: 'fail'}), {},
- msg='default to dict if pruned')
- self.assertEqual(traverse_obj(None, {0: 'fail'}, default=...), {0: ...},
- msg='default to dict if pruned and default is given')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=...), {0: {0: ...}},
- msg='use nested `default` when nested dict key fails and `default`')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: ('dict', ...)}), {},
- msg='remove key if branch in dict key not successful')
-
- # Testing default parameter behavior
- _DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
- self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail'), None,
- msg='default value should be `None`')
- self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=...), ...,
- msg='chained fails should result in default')
- self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', 'int'), 0,
- msg='should not short cirquit on `None`')
- self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', default=1), 1,
- msg='invalid dict key should result in `default`')
- self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', default=1), 1,
- msg='`None` is a deliberate sentinel and should become `default`')
- self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', 10)), None,
- msg='`IndexError` should result in `default`')
- self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=1), 1,
- msg='if branched but not successful return `default` if defined, not `[]`')
- self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=None), None,
- msg='if branched but not successful return `default` even if `default` is `None`')
- self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail')), [],
- msg='if branched but not successful return `[]`, not `default`')
- self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', ...)), [],
- msg='if branched but object is empty return `[]`, not `default`')
- self.assertEqual(traverse_obj(None, ...), [],
- msg='if branched but object is `None` return `[]`, not `default`')
- self.assertEqual(traverse_obj({0: None}, (0, ...)), [],
- msg='if branched but state is `None` return `[]`, not `default`')
-
- branching_paths = [
- ('fail', ...),
- (..., 'fail'),
- 100 * ('fail',) + (...,),
- (...,) + 100 * ('fail',),
- ]
- for branching_path in branching_paths:
- self.assertEqual(traverse_obj({}, branching_path), [],
- msg='if branched but state is `None`, return `[]` (not `default`)')
- self.assertEqual(traverse_obj({}, 'fail', branching_path), [],
- msg='if branching in last alternative and previous did not match, return `[]` (not `default`)')
- self.assertEqual(traverse_obj({0: 'x'}, 0, branching_path), 'x',
- msg='if branching in last alternative and previous did match, return single value')
- self.assertEqual(traverse_obj({0: 'x'}, branching_path, 0), 'x',
- msg='if branching in first alternative and non-branching path does match, return single value')
- self.assertEqual(traverse_obj({}, branching_path, 'fail'), None,
- msg='if branching in first alternative and non-branching path does not match, return `default`')
-
- # Testing expected_type behavior
- _EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
- self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str),
- 'str', msg='accept matching `expected_type` type')
- self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int),
- None, msg='reject non matching `expected_type` type')
- self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)),
- '0', msg='transform type using type function')
- self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0),
- None, msg='wrap expected_type fuction in try_call')
- self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, ..., expected_type=str),
- ['str'], msg='eliminate items that expected_type fails on')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int),
- {0: 100}, msg='type as expected_type should filter dict values')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none),
- {0: '100', 1: '1.2'}, msg='function as expected_type should transform dict values')
- self.assertEqual(traverse_obj(_TEST_DATA, ({0: 1.2}, 0, {int_or_none}), expected_type=int),
- 1, msg='expected_type should not filter non final dict values')
- self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int),
- {0: {0: 100}}, msg='expected_type should transform deep dict values')
- self.assertEqual(traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(...)),
- [{0: ...}, {0: ...}], msg='expected_type should transform branched dict values')
- self.assertEqual(traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int),
- [4], msg='expected_type regression for type matching in tuple branching')
- self.assertEqual(traverse_obj(_TEST_DATA, ['data', ...], expected_type=int),
- [], msg='expected_type regression for type matching in dict result')
-
- # Test get_all behavior
- _GET_ALL_DATA = {'key': [0, 1, 2]}
- self.assertEqual(traverse_obj(_GET_ALL_DATA, ('key', ...), get_all=False), 0,
- msg='if not `get_all`, return only first matching value')
- self.assertEqual(traverse_obj(_GET_ALL_DATA, ..., get_all=False), [0, 1, 2],
- msg='do not overflatten if not `get_all`')
-
- # Test casesense behavior
- _CASESENSE_DATA = {
- 'KeY': 'value0',
- 0: {
- 'KeY': 'value1',
- 0: {'KeY': 'value2'},
- },
- }
- self.assertEqual(traverse_obj(_CASESENSE_DATA, 'key'), None,
- msg='dict keys should be case sensitive unless `casesense`')
- self.assertEqual(traverse_obj(_CASESENSE_DATA, 'keY',
- casesense=False), 'value0',
- msg='allow non matching key case if `casesense`')
- self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ('keY',)),
- casesense=False), ['value1'],
- msg='allow non matching key case in branch if `casesense`')
- self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ((0, 'keY'),)),
- casesense=False), ['value2'],
- msg='allow non matching key case in branch path if `casesense`')
-
- # Test traverse_string behavior
- _TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
- self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)), None,
- msg='do not traverse into string if not `traverse_string`')
- self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0),
- traverse_string=True), 's',
- msg='traverse into string if `traverse_string`')
- self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1),
- traverse_string=True), '.',
- msg='traverse into converted data if `traverse_string`')
- self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', ...),
- traverse_string=True), 'str',
- msg='`...` should result in string (same value) if `traverse_string`')
- self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)),
- traverse_string=True), 'sr',
- msg='`slice` should result in string if `traverse_string`')
- self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == "s"),
- traverse_string=True), 'str',
- msg='function should result in string if `traverse_string`')
- self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)),
- traverse_string=True), ['s', 'r'],
- msg='branching should result in list if `traverse_string`')
- self.assertEqual(traverse_obj({}, (0, ...), traverse_string=True), [],
- msg='branching should result in list if `traverse_string`')
- self.assertEqual(traverse_obj({}, (0, lambda x, y: True), traverse_string=True), [],
- msg='branching should result in list if `traverse_string`')
- self.assertEqual(traverse_obj({}, (0, slice(1)), traverse_string=True), [],
- msg='branching should result in list if `traverse_string`')
-
- # Test re.Match as input obj
- mobj = re.fullmatch(r'0(12)(?P<group>3)(4)?', '0123')
- self.assertEqual(traverse_obj(mobj, ...), [x for x in mobj.groups() if x is not None],
- msg='`...` on a `re.Match` should give its `groups()`')
- self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 2)), ['0123', '3'],
- msg='function on a `re.Match` should give groupno, value starting at 0')
- self.assertEqual(traverse_obj(mobj, 'group'), '3',
- msg='str key on a `re.Match` should give group with that name')
- self.assertEqual(traverse_obj(mobj, 2), '3',
- msg='int key on a `re.Match` should give group with that name')
- self.assertEqual(traverse_obj(mobj, 'gRoUp', casesense=False), '3',
- msg='str key on a `re.Match` should respect casesense')
- self.assertEqual(traverse_obj(mobj, 'fail'), None,
- msg='failing str key on a `re.Match` should return `default`')
- self.assertEqual(traverse_obj(mobj, 'gRoUpS', casesense=False), None,
- msg='failing str key on a `re.Match` should return `default`')
- self.assertEqual(traverse_obj(mobj, 8), None,
- msg='failing int key on a `re.Match` should return `default`')
- self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 'group')), ['0123', '3'],
- msg='function on a `re.Match` should give group name as well')
-
- # Test xml.etree.ElementTree.Element as input obj
- etree = xml.etree.ElementTree.fromstring('''<?xml version="1.0"?>
- <data>
- <country name="Liechtenstein">
- <rank>1</rank>
- <year>2008</year>
- <gdppc>141100</gdppc>
- <neighbor name="Austria" direction="E"/>
- <neighbor name="Switzerland" direction="W"/>
- </country>
- <country name="Singapore">
- <rank>4</rank>
- <year>2011</year>
- <gdppc>59900</gdppc>
- <neighbor name="Malaysia" direction="N"/>
- </country>
- <country name="Panama">
- <rank>68</rank>
- <year>2011</year>
- <gdppc>13600</gdppc>
- <neighbor name="Costa Rica" direction="W"/>
- <neighbor name="Colombia" direction="E"/>
- </country>
- </data>''')
- self.assertEqual(traverse_obj(etree, ''), etree,
- msg='empty str key should return the element itself')
- self.assertEqual(traverse_obj(etree, 'country'), list(etree),
- msg='str key should lead all children with that tag name')
- self.assertEqual(traverse_obj(etree, ...), list(etree),
- msg='`...` as key should return all children')
- self.assertEqual(traverse_obj(etree, lambda _, x: x[0].text == '4'), [etree[1]],
- msg='function as key should get element as value')
- self.assertEqual(traverse_obj(etree, lambda i, _: i == 1), [etree[1]],
- msg='function as key should get index as key')
- self.assertEqual(traverse_obj(etree, 0), etree[0],
- msg='int key should return the nth child')
- self.assertEqual(traverse_obj(etree, './/neighbor/@name'),
- ['Austria', 'Switzerland', 'Malaysia', 'Costa Rica', 'Colombia'],
- msg='`@<attribute>` at end of path should give that attribute')
- self.assertEqual(traverse_obj(etree, '//neighbor/@fail'), [None, None, None, None, None],
- msg='`@<nonexistant>` at end of path should give `None`')
- self.assertEqual(traverse_obj(etree, ('//neighbor/@', 2)), {'name': 'Malaysia', 'direction': 'N'},
- msg='`@` should give the full attribute dict')
- self.assertEqual(traverse_obj(etree, '//year/text()'), ['2008', '2011', '2011'],
- msg='`text()` at end of path should give the inner text')
- self.assertEqual(traverse_obj(etree, '//*[@direction]/@direction'), ['E', 'W', 'N', 'W', 'E'],
- msg='full Python xpath features should be supported')
- self.assertEqual(traverse_obj(etree, (0, '@name')), 'Liechtenstein',
- msg='special transformations should act on current element')
- self.assertEqual(traverse_obj(etree, ('country', 0, ..., 'text()', {int_or_none})), [1, 2008, 141100],
- msg='special transformations should act on current element')
-
def test_http_header_dict(self):
headers = HTTPHeaderDict()
headers['ytdl-test'] = b'0'
@@ -2448,6 +2069,10 @@ Line 1
# Test escaping
assert run_shell(['echo', 'test"&']) == '"test""&"\n'
+ assert run_shell(['echo', '%CMDCMDLINE:~-1%&']) == '"%CMDCMDLINE:~-1%&"\n'
+ assert run_shell(['echo', 'a\nb']) == '"a"\n"b"\n'
+ assert run_shell(['echo', '"']) == '""""\n'
+ assert run_shell(['echo', '\\']) == '\\\n'
# Test if delayed expansion is disabled
assert run_shell(['echo', '^!']) == '"^!"\n'
assert run_shell('echo "^!"') == '"^!"\n'
diff --git a/test/test_websockets.py b/test/test_websockets.py
index 13b3a1e..b294b09 100644
--- a/test/test_websockets.py
+++ b/test/test_websockets.py
@@ -32,8 +32,6 @@ from yt_dlp.networking.exceptions import (
)
from yt_dlp.utils.networking import HTTPHeaderDict
-from test.conftest import validate_and_send
-
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
@@ -66,7 +64,9 @@ def process_request(self, request):
def create_websocket_server(**ws_kwargs):
import websockets.sync.server
- wsd = websockets.sync.server.serve(websocket_handler, '127.0.0.1', 0, process_request=process_request, **ws_kwargs)
+ wsd = websockets.sync.server.serve(
+ websocket_handler, '127.0.0.1', 0,
+ process_request=process_request, open_timeout=2, **ws_kwargs)
ws_port = wsd.socket.getsockname()[1]
ws_server_thread = threading.Thread(target=wsd.serve_forever)
ws_server_thread.daemon = True
@@ -100,6 +100,19 @@ def create_mtls_wss_websocket_server():
return create_websocket_server(ssl_context=sslctx)
+def ws_validate_and_send(rh, req):
+ rh.validate(req)
+ max_tries = 3
+ for i in range(max_tries):
+ try:
+ return rh.send(req)
+ except TransportError as e:
+ if i < (max_tries - 1) and 'connection closed during handshake' in str(e):
+ # websockets server sometimes hangs on new connections
+ continue
+ raise
+
+
@pytest.mark.skipif(not websockets, reason='websockets must be installed to test websocket request handlers')
class TestWebsSocketRequestHandlerConformance:
@classmethod
@@ -119,7 +132,7 @@ class TestWebsSocketRequestHandlerConformance:
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
def test_basic_websockets(self, handler):
with handler() as rh:
- ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws = ws_validate_and_send(rh, Request(self.ws_base_url))
assert 'upgrade' in ws.headers
assert ws.status == 101
ws.send('foo')
@@ -131,7 +144,7 @@ class TestWebsSocketRequestHandlerConformance:
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
def test_send_types(self, handler, msg, opcode):
with handler() as rh:
- ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws = ws_validate_and_send(rh, Request(self.ws_base_url))
ws.send(msg)
assert int(ws.recv()) == opcode
ws.close()
@@ -140,10 +153,10 @@ class TestWebsSocketRequestHandlerConformance:
def test_verify_cert(self, handler):
with handler() as rh:
with pytest.raises(CertificateVerifyError):
- validate_and_send(rh, Request(self.wss_base_url))
+ ws_validate_and_send(rh, Request(self.wss_base_url))
with handler(verify=False) as rh:
- ws = validate_and_send(rh, Request(self.wss_base_url))
+ ws = ws_validate_and_send(rh, Request(self.wss_base_url))
assert ws.status == 101
ws.close()
@@ -151,7 +164,7 @@ class TestWebsSocketRequestHandlerConformance:
def test_ssl_error(self, handler):
with handler(verify=False) as rh:
with pytest.raises(SSLError, match=r'ssl(?:v3|/tls) alert handshake failure') as exc_info:
- validate_and_send(rh, Request(self.bad_wss_host))
+ ws_validate_and_send(rh, Request(self.bad_wss_host))
assert not issubclass(exc_info.type, CertificateVerifyError)
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
@@ -163,7 +176,7 @@ class TestWebsSocketRequestHandlerConformance:
])
def test_percent_encode(self, handler, path, expected):
with handler() as rh:
- ws = validate_and_send(rh, Request(f'{self.ws_base_url}{path}'))
+ ws = ws_validate_and_send(rh, Request(f'{self.ws_base_url}{path}'))
ws.send('path')
assert ws.recv() == expected
assert ws.status == 101
@@ -174,7 +187,7 @@ class TestWebsSocketRequestHandlerConformance:
with handler() as rh:
# This isn't a comprehensive test,
# but it should be enough to check whether the handler is removing dot segments
- ws = validate_and_send(rh, Request(f'{self.ws_base_url}/a/b/./../../test'))
+ ws = ws_validate_and_send(rh, Request(f'{self.ws_base_url}/a/b/./../../test'))
assert ws.status == 101
ws.send('path')
assert ws.recv() == '/test'
@@ -187,7 +200,7 @@ class TestWebsSocketRequestHandlerConformance:
def test_raise_http_error(self, handler, status):
with handler() as rh:
with pytest.raises(HTTPError) as exc_info:
- validate_and_send(rh, Request(f'{self.ws_base_url}/gen_{status}'))
+ ws_validate_and_send(rh, Request(f'{self.ws_base_url}/gen_{status}'))
assert exc_info.value.status == status
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
@@ -198,7 +211,7 @@ class TestWebsSocketRequestHandlerConformance:
def test_timeout(self, handler, params, extensions):
with handler(**params) as rh:
with pytest.raises(TransportError):
- validate_and_send(rh, Request(self.ws_base_url, extensions=extensions))
+ ws_validate_and_send(rh, Request(self.ws_base_url, extensions=extensions))
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
def test_cookies(self, handler):
@@ -210,18 +223,18 @@ class TestWebsSocketRequestHandlerConformance:
comment_url=None, rest={}))
with handler(cookiejar=cookiejar) as rh:
- ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws = ws_validate_and_send(rh, Request(self.ws_base_url))
ws.send('headers')
assert json.loads(ws.recv())['cookie'] == 'test=ytdlp'
ws.close()
with handler() as rh:
- ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws = ws_validate_and_send(rh, Request(self.ws_base_url))
ws.send('headers')
assert 'cookie' not in json.loads(ws.recv())
ws.close()
- ws = validate_and_send(rh, Request(self.ws_base_url, extensions={'cookiejar': cookiejar}))
+ ws = ws_validate_and_send(rh, Request(self.ws_base_url, extensions={'cookiejar': cookiejar}))
ws.send('headers')
assert json.loads(ws.recv())['cookie'] == 'test=ytdlp'
ws.close()
@@ -231,7 +244,7 @@ class TestWebsSocketRequestHandlerConformance:
source_address = f'127.0.0.{random.randint(5, 255)}'
verify_address_availability(source_address)
with handler(source_address=source_address) as rh:
- ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws = ws_validate_and_send(rh, Request(self.ws_base_url))
ws.send('source_address')
assert source_address == ws.recv()
ws.close()
@@ -240,7 +253,7 @@ class TestWebsSocketRequestHandlerConformance:
def test_response_url(self, handler):
with handler() as rh:
url = f'{self.ws_base_url}/something'
- ws = validate_and_send(rh, Request(url))
+ ws = ws_validate_and_send(rh, Request(url))
assert ws.url == url
ws.close()
@@ -248,14 +261,14 @@ class TestWebsSocketRequestHandlerConformance:
def test_request_headers(self, handler):
with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
# Global Headers
- ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws = ws_validate_and_send(rh, Request(self.ws_base_url))
ws.send('headers')
headers = HTTPHeaderDict(json.loads(ws.recv()))
assert headers['test1'] == 'test'
ws.close()
# Per request headers, merged with global
- ws = validate_and_send(rh, Request(
+ ws = ws_validate_and_send(rh, Request(
self.ws_base_url, headers={'test2': 'changed', 'test3': 'test3'}))
ws.send('headers')
headers = HTTPHeaderDict(json.loads(ws.recv()))
@@ -288,7 +301,7 @@ class TestWebsSocketRequestHandlerConformance:
verify=False,
client_cert=client_cert
) as rh:
- validate_and_send(rh, Request(self.mtls_wss_base_url)).close()
+ ws_validate_and_send(rh, Request(self.mtls_wss_base_url)).close()
def create_fake_ws_connection(raised):