summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-15 16:49:24 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-15 16:49:24 +0000
commit2415e66f889f38503b73e8ebc5f43ca342390e5c (patch)
treeac48ab69d1d96bae3d83756134921e0d90593aa5 /test
parentInitial commit. (diff)
downloadyt-dlp-2415e66f889f38503b73e8ebc5f43ca342390e5c.tar.xz
yt-dlp-2415e66f889f38503b73e8ebc5f43ca342390e5c.zip
Adding upstream version 2024.03.10.upstream/2024.03.10
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--test/__init__.py0
-rw-r--r--test/conftest.py26
-rw-r--r--test/helper.py340
-rw-r--r--test/parameters.json49
-rw-r--r--test/test_InfoExtractor.py1911
-rw-r--r--test/test_YoutubeDL.py1346
-rw-r--r--test/test_YoutubeDLCookieJar.py66
-rw-r--r--test/test_aes.py152
-rw-r--r--test/test_age_restriction.py55
-rw-r--r--test/test_all_urls.py122
-rw-r--r--test/test_cache.py57
-rw-r--r--test/test_compat.py105
-rw-r--r--test/test_config.py227
-rw-r--r--test/test_cookies.py306
-rwxr-xr-xtest/test_download.py314
-rw-r--r--test/test_downloader_external.py139
-rw-r--r--test/test_downloader_http.py106
-rw-r--r--test/test_execution.py60
-rw-r--r--test/test_iqiyi_sdk_interpreter.py44
-rw-r--r--test/test_jsinterp.py380
-rw-r--r--test/test_netrc.py28
-rw-r--r--test/test_networking.py1631
-rw-r--r--test/test_networking_utils.py208
-rw-r--r--test/test_overwrites.py54
-rw-r--r--test/test_plugins.py73
-rw-r--r--test/test_post_hooks.py70
-rw-r--r--test/test_postprocessors.py579
-rw-r--r--test/test_socks.py477
-rw-r--r--test/test_subtitles.py452
-rw-r--r--test/test_update.py228
-rw-r--r--test/test_utils.py2457
-rw-r--r--test/test_verbose_output.py75
-rw-r--r--test/test_websockets.py383
-rw-r--r--test/test_write_annotations.py.disabled77
-rw-r--r--test/test_youtube_lists.py71
-rw-r--r--test/test_youtube_misc.py26
-rw-r--r--test/test_youtube_signature.py253
-rw-r--r--test/testcert.pem52
-rw-r--r--test/testdata/certificate/ca.crt10
-rw-r--r--test/testdata/certificate/ca.key5
-rw-r--r--test/testdata/certificate/ca.srl1
-rw-r--r--test/testdata/certificate/client.crt9
-rw-r--r--test/testdata/certificate/client.csr7
-rw-r--r--test/testdata/certificate/client.key5
-rw-r--r--test/testdata/certificate/clientencrypted.key8
-rw-r--r--test/testdata/certificate/clientwithencryptedkey.crt17
-rw-r--r--test/testdata/certificate/clientwithkey.crt14
-rw-r--r--test/testdata/certificate/instructions.md19
-rw-r--r--test/testdata/cookies/httponly_cookies.txt6
-rw-r--r--test/testdata/cookies/malformed_cookies.txt9
-rw-r--r--test/testdata/cookies/session_cookies.txt6
-rw-r--r--test/testdata/f4m/custom_base_url.f4m10
-rw-r--r--test/testdata/ism/ec-3_test.Manifest1
-rw-r--r--test/testdata/ism/sintel.Manifest988
-rw-r--r--test/testdata/m3u8/bipbop_16x9.m3u838
-rw-r--r--test/testdata/m3u8/img_bipbop_adv_example_fmp4.m3u876
-rw-r--r--test/testdata/mpd/float_duration.mpd18
-rw-r--r--test/testdata/mpd/subtitles.mpd351
-rw-r--r--test/testdata/mpd/unfragmented.mpd28
-rw-r--r--test/testdata/mpd/urls_only.mpd218
-rw-r--r--test/testdata/thumbnails/foo %d bar/foo_%d.webpbin0 -> 3928 bytes
-rw-r--r--test/testdata/xspf/foo_xspf.xspf34
-rw-r--r--test/testdata/yt_dlp_plugins/extractor/_ignore.py5
-rw-r--r--test/testdata/yt_dlp_plugins/extractor/ignore.py12
-rw-r--r--test/testdata/yt_dlp_plugins/extractor/normal.py9
-rw-r--r--test/testdata/yt_dlp_plugins/postprocessor/normal.py5
-rw-r--r--test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py5
-rw-r--r--test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py5
68 files changed, 14918 insertions, 0 deletions
diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/__init__.py
diff --git a/test/conftest.py b/test/conftest.py
new file mode 100644
index 0000000..2fbc269
--- /dev/null
+++ b/test/conftest.py
@@ -0,0 +1,26 @@
+import functools
+import inspect
+
+import pytest
+
+from yt_dlp.networking import RequestHandler
+from yt_dlp.networking.common import _REQUEST_HANDLERS
+from yt_dlp.utils._utils import _YDLLogger as FakeLogger
+
+
+@pytest.fixture
+def handler(request):
+ RH_KEY = request.param
+ if inspect.isclass(RH_KEY) and issubclass(RH_KEY, RequestHandler):
+ handler = RH_KEY
+ elif RH_KEY in _REQUEST_HANDLERS:
+ handler = _REQUEST_HANDLERS[RH_KEY]
+ else:
+ pytest.skip(f'{RH_KEY} request handler is not available')
+
+ return functools.partial(handler, logger=FakeLogger)
+
+
+def validate_and_send(rh, req):
+ rh.validate(req)
+ return rh.send(req)
diff --git a/test/helper.py b/test/helper.py
new file mode 100644
index 0000000..7760fd8
--- /dev/null
+++ b/test/helper.py
@@ -0,0 +1,340 @@
+import errno
+import hashlib
+import json
+import os.path
+import re
+import ssl
+import sys
+import types
+
+import yt_dlp.extractor
+from yt_dlp import YoutubeDL
+from yt_dlp.compat import compat_os_name
+from yt_dlp.utils import preferredencoding, try_call, write_string, find_available_port
+
+if 'pytest' in sys.modules:
+ import pytest
+ is_download_test = pytest.mark.download
+else:
+ def is_download_test(testClass):
+ return testClass
+
+
+def get_params(override=None):
+ PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'parameters.json')
+ LOCAL_PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'local_parameters.json')
+ with open(PARAMETERS_FILE, encoding='utf-8') as pf:
+ parameters = json.load(pf)
+ if os.path.exists(LOCAL_PARAMETERS_FILE):
+ with open(LOCAL_PARAMETERS_FILE, encoding='utf-8') as pf:
+ parameters.update(json.load(pf))
+ if override:
+ parameters.update(override)
+ return parameters
+
+
+def try_rm(filename):
+ """ Remove a file if it exists """
+ try:
+ os.remove(filename)
+ except OSError as ose:
+ if ose.errno != errno.ENOENT:
+ raise
+
+
+def report_warning(message, *args, **kwargs):
+ '''
+ Print the message to stderr, it will be prefixed with 'WARNING:'
+ If stderr is a tty file the 'WARNING:' will be colored
+ '''
+ if sys.stderr.isatty() and compat_os_name != 'nt':
+ _msg_header = '\033[0;33mWARNING:\033[0m'
+ else:
+ _msg_header = 'WARNING:'
+ output = f'{_msg_header} {message}\n'
+ if 'b' in getattr(sys.stderr, 'mode', ''):
+ output = output.encode(preferredencoding())
+ sys.stderr.write(output)
+
+
+class FakeYDL(YoutubeDL):
+ def __init__(self, override=None):
+ # Different instances of the downloader can't share the same dictionary
+ # some test set the "sublang" parameter, which would break the md5 checks.
+ params = get_params(override=override)
+ super().__init__(params, auto_init=False)
+ self.result = []
+
+ def to_screen(self, s, *args, **kwargs):
+ print(s)
+
+ def trouble(self, s, *args, **kwargs):
+ raise Exception(s)
+
+ def download(self, x):
+ self.result.append(x)
+
+ def expect_warning(self, regex):
+ # Silence an expected warning matching a regex
+ old_report_warning = self.report_warning
+
+ def report_warning(self, message, *args, **kwargs):
+ if re.match(regex, message):
+ return
+ old_report_warning(message, *args, **kwargs)
+ self.report_warning = types.MethodType(report_warning, self)
+
+
+def gettestcases(include_onlymatching=False):
+ for ie in yt_dlp.extractor.gen_extractors():
+ yield from ie.get_testcases(include_onlymatching)
+
+
+def getwebpagetestcases():
+ for ie in yt_dlp.extractor.gen_extractors():
+ for tc in ie.get_webpage_testcases():
+ tc.setdefault('add_ie', []).append('Generic')
+ yield tc
+
+
+md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
+
+
+def expect_value(self, got, expected, field):
+ if isinstance(expected, str) and expected.startswith('re:'):
+ match_str = expected[len('re:'):]
+ match_rex = re.compile(match_str)
+
+ self.assertTrue(
+ isinstance(got, str),
+ f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
+ self.assertTrue(
+ match_rex.match(got),
+ f'field {field} (value: {got!r}) should match {match_str!r}')
+ elif isinstance(expected, str) and expected.startswith('startswith:'):
+ start_str = expected[len('startswith:'):]
+ self.assertTrue(
+ isinstance(got, str),
+ f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
+ self.assertTrue(
+ got.startswith(start_str),
+ f'field {field} (value: {got!r}) should start with {start_str!r}')
+ elif isinstance(expected, str) and expected.startswith('contains:'):
+ contains_str = expected[len('contains:'):]
+ self.assertTrue(
+ isinstance(got, str),
+ f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
+ self.assertTrue(
+ contains_str in got,
+ f'field {field} (value: {got!r}) should contain {contains_str!r}')
+ elif isinstance(expected, type):
+ self.assertTrue(
+ isinstance(got, expected),
+ f'Expected type {expected!r} for field {field}, but got value {got!r} of type {type(got)!r}')
+ elif isinstance(expected, dict) and isinstance(got, dict):
+ expect_dict(self, got, expected)
+ elif isinstance(expected, list) and isinstance(got, list):
+ self.assertEqual(
+ len(expected), len(got),
+ 'Expect a list of length %d, but got a list of length %d for field %s' % (
+ len(expected), len(got), field))
+ for index, (item_got, item_expected) in enumerate(zip(got, expected)):
+ type_got = type(item_got)
+ type_expected = type(item_expected)
+ self.assertEqual(
+ type_expected, type_got,
+ 'Type mismatch for list item at index %d for field %s, expected %r, got %r' % (
+ index, field, type_expected, type_got))
+ expect_value(self, item_got, item_expected, field)
+ else:
+ if isinstance(expected, str) and expected.startswith('md5:'):
+ self.assertTrue(
+ isinstance(got, str),
+ f'Expected field {field} to be a unicode object, but got value {got!r} of type {type(got)!r}')
+ got = 'md5:' + md5(got)
+ elif isinstance(expected, str) and re.match(r'^(?:min|max)?count:\d+', expected):
+ self.assertTrue(
+ isinstance(got, (list, dict)),
+ f'Expected field {field} to be a list or a dict, but it is of type {type(got).__name__}')
+ op, _, expected_num = expected.partition(':')
+ expected_num = int(expected_num)
+ if op == 'mincount':
+ assert_func = assertGreaterEqual
+ msg_tmpl = 'Expected %d items in field %s, but only got %d'
+ elif op == 'maxcount':
+ assert_func = assertLessEqual
+ msg_tmpl = 'Expected maximum %d items in field %s, but got %d'
+ elif op == 'count':
+ assert_func = assertEqual
+ msg_tmpl = 'Expected exactly %d items in field %s, but got %d'
+ else:
+ assert False
+ assert_func(
+ self, len(got), expected_num,
+ msg_tmpl % (expected_num, field, len(got)))
+ return
+ self.assertEqual(
+ expected, got,
+ f'Invalid value for field {field}, expected {expected!r}, got {got!r}')
+
+
+def expect_dict(self, got_dict, expected_dict):
+ for info_field, expected in expected_dict.items():
+ got = got_dict.get(info_field)
+ expect_value(self, got, expected, info_field)
+
+
+def sanitize_got_info_dict(got_dict):
+ IGNORED_FIELDS = (
+ *YoutubeDL._format_fields,
+
+ # Lists
+ 'formats', 'thumbnails', 'subtitles', 'automatic_captions', 'comments', 'entries',
+
+ # Auto-generated
+ 'autonumber', 'playlist', 'format_index', 'video_ext', 'audio_ext', 'duration_string', 'epoch', 'n_entries',
+ 'fulltitle', 'extractor', 'extractor_key', 'filename', 'filepath', 'infojson_filename', 'original_url',
+
+ # Only live_status needs to be checked
+ 'is_live', 'was_live',
+ )
+
+ IGNORED_PREFIXES = ('', 'playlist', 'requested', 'webpage')
+
+ def sanitize(key, value):
+ if isinstance(value, str) and len(value) > 100 and key != 'thumbnail':
+ return f'md5:{md5(value)}'
+ elif isinstance(value, list) and len(value) > 10:
+ return f'count:{len(value)}'
+ elif key.endswith('_count') and isinstance(value, int):
+ return int
+ return value
+
+ test_info_dict = {
+ key: sanitize(key, value) for key, value in got_dict.items()
+ if value is not None and key not in IGNORED_FIELDS and (
+ not any(key.startswith(f'{prefix}_') for prefix in IGNORED_PREFIXES)
+ or key == '_old_archive_ids')
+ }
+
+ # display_id may be generated from id
+ if test_info_dict.get('display_id') == test_info_dict.get('id'):
+ test_info_dict.pop('display_id')
+
+ # Remove deprecated fields
+ for old in YoutubeDL._deprecated_multivalue_fields.keys():
+ test_info_dict.pop(old, None)
+
+ # release_year may be generated from release_date
+ if try_call(lambda: test_info_dict['release_year'] == int(test_info_dict['release_date'][:4])):
+ test_info_dict.pop('release_year')
+
+ # Check url for flat entries
+ if got_dict.get('_type', 'video') != 'video' and got_dict.get('url'):
+ test_info_dict['url'] = got_dict['url']
+
+ return test_info_dict
+
+
+def expect_info_dict(self, got_dict, expected_dict):
+ expect_dict(self, got_dict, expected_dict)
+ # Check for the presence of mandatory fields
+ if got_dict.get('_type') not in ('playlist', 'multi_video'):
+ mandatory_fields = ['id', 'title']
+ if expected_dict.get('ext'):
+ mandatory_fields.extend(('url', 'ext'))
+ for key in mandatory_fields:
+ self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
+ # Check for mandatory fields that are automatically set by YoutubeDL
+ if got_dict.get('_type', 'video') == 'video':
+ for key in ['webpage_url', 'extractor', 'extractor_key']:
+ self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
+
+ test_info_dict = sanitize_got_info_dict(got_dict)
+
+ missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
+ if missing_keys:
+ def _repr(v):
+ if isinstance(v, str):
+ return "'%s'" % v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n')
+ elif isinstance(v, type):
+ return v.__name__
+ else:
+ return repr(v)
+ info_dict_str = ''.join(
+ f' {_repr(k)}: {_repr(v)},\n'
+ for k, v in test_info_dict.items() if k not in missing_keys)
+ if info_dict_str:
+ info_dict_str += '\n'
+ info_dict_str += ''.join(
+ f' {_repr(k)}: {_repr(test_info_dict[k])},\n'
+ for k in missing_keys)
+ info_dict_str = '\n\'info_dict\': {\n' + info_dict_str + '},\n'
+ write_string(info_dict_str.replace('\n', '\n '), out=sys.stderr)
+ self.assertFalse(
+ missing_keys,
+ 'Missing keys in test definition: %s' % (
+ ', '.join(sorted(missing_keys))))
+
+
+def assertRegexpMatches(self, text, regexp, msg=None):
+ if hasattr(self, 'assertRegexp'):
+ return self.assertRegexp(text, regexp, msg)
+ else:
+ m = re.match(regexp, text)
+ if not m:
+ note = 'Regexp didn\'t match: %r not found' % (regexp)
+ if len(text) < 1000:
+ note += ' in %r' % text
+ if msg is None:
+ msg = note
+ else:
+ msg = note + ', ' + msg
+ self.assertTrue(m, msg)
+
+
+def assertGreaterEqual(self, got, expected, msg=None):
+ if not (got >= expected):
+ if msg is None:
+ msg = f'{got!r} not greater than or equal to {expected!r}'
+ self.assertTrue(got >= expected, msg)
+
+
+def assertLessEqual(self, got, expected, msg=None):
+ if not (got <= expected):
+ if msg is None:
+ msg = f'{got!r} not less than or equal to {expected!r}'
+ self.assertTrue(got <= expected, msg)
+
+
+def assertEqual(self, got, expected, msg=None):
+ if not (got == expected):
+ if msg is None:
+ msg = f'{got!r} not equal to {expected!r}'
+ self.assertTrue(got == expected, msg)
+
+
+def expect_warnings(ydl, warnings_re):
+ real_warning = ydl.report_warning
+
+ def _report_warning(w, *args, **kwargs):
+ if not any(re.search(w_re, w) for w_re in warnings_re):
+ real_warning(w, *args, **kwargs)
+
+ ydl.report_warning = _report_warning
+
+
+def http_server_port(httpd):
+ if os.name == 'java' and isinstance(httpd.socket, ssl.SSLSocket):
+ # In Jython SSLSocket is not a subclass of socket.socket
+ sock = httpd.socket.sock
+ else:
+ sock = httpd.socket
+ return sock.getsockname()[1]
+
+
+def verify_address_availability(address):
+ if find_available_port(address) is None:
+ pytest.skip(f'Unable to bind to source address {address} (address may not exist)')
diff --git a/test/parameters.json b/test/parameters.json
new file mode 100644
index 0000000..8789ce1
--- /dev/null
+++ b/test/parameters.json
@@ -0,0 +1,49 @@
+{
+ "check_formats": false,
+ "consoletitle": false,
+ "continuedl": true,
+ "forcedescription": false,
+ "forcefilename": false,
+ "forceformat": false,
+ "forcethumbnail": false,
+ "forcetitle": false,
+ "forceurl": false,
+ "force_write_download_archive": false,
+ "format": "b/bv",
+ "ignoreerrors": false,
+ "listformats": null,
+ "logtostderr": false,
+ "matchtitle": null,
+ "max_downloads": null,
+ "overwrites": null,
+ "nopart": false,
+ "noprogress": false,
+ "outtmpl": "%(id)s.%(ext)s",
+ "password": null,
+ "playliststart": 1,
+ "prefer_free_formats": false,
+ "quiet": false,
+ "ratelimit": null,
+ "rejecttitle": null,
+ "retries": 10,
+ "simulate": false,
+ "subtitleslang": null,
+ "subtitlesformat": "best",
+ "test": true,
+ "updatetime": true,
+ "usenetrc": false,
+ "username": null,
+ "verbose": true,
+ "writedescription": false,
+ "writeinfojson": true,
+ "writeannotations": false,
+ "writelink": false,
+ "writeurllink": false,
+ "writewebloclink": false,
+ "writedesktoplink": false,
+ "writesubtitles": false,
+ "allsubtitles": false,
+ "listsubtitles": false,
+ "fixup": "never",
+ "allow_playlist_files": false
+}
diff --git a/test/test_InfoExtractor.py b/test/test_InfoExtractor.py
new file mode 100644
index 0000000..b7dee49
--- /dev/null
+++ b/test/test_InfoExtractor.py
@@ -0,0 +1,1911 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import http.server
+import threading
+
+from test.helper import FakeYDL, expect_dict, expect_value, http_server_port
+from yt_dlp.compat import compat_etree_fromstring
+from yt_dlp.extractor import YoutubeIE, get_info_extractor
+from yt_dlp.extractor.common import InfoExtractor
+from yt_dlp.utils import (
+ ExtractorError,
+ RegexNotFoundError,
+ encode_data_uri,
+ strip_jsonp,
+)
+
+TEAPOT_RESPONSE_STATUS = 418
+TEAPOT_RESPONSE_BODY = "<h1>418 I'm a teapot</h1>"
+
+
+class InfoExtractorTestRequestHandler(http.server.BaseHTTPRequestHandler):
+ def log_message(self, format, *args):
+ pass
+
+ def do_GET(self):
+ if self.path == '/teapot':
+ self.send_response(TEAPOT_RESPONSE_STATUS)
+ self.send_header('Content-Type', 'text/html; charset=utf-8')
+ self.end_headers()
+ self.wfile.write(TEAPOT_RESPONSE_BODY.encode())
+ else:
+ assert False
+
+
+class DummyIE(InfoExtractor):
+ def _sort_formats(self, formats, field_preference=[]):
+ self._downloader.sort_formats(
+ {'formats': formats, '_format_sort_fields': field_preference})
+
+
+class TestInfoExtractor(unittest.TestCase):
+ def setUp(self):
+ self.ie = DummyIE(FakeYDL())
+
+ def test_ie_key(self):
+ self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)
+
+ def test_html_search_regex(self):
+ html = '<p id="foo">Watch this <a href="http://www.youtube.com/watch?v=BaW_jenozKc">video</a></p>'
+ search = lambda re, *args: self.ie._html_search_regex(re, html, *args)
+ self.assertEqual(search(r'<p id="foo">(.+?)</p>', 'foo'), 'Watch this video')
+
+ def test_opengraph(self):
+ ie = self.ie
+ html = '''
+ <meta name="og:title" content='Foo'/>
+ <meta content="Some video's description " name="og:description"/>
+ <meta property='og:image' content='http://domain.com/pic.jpg?key1=val1&amp;key2=val2'/>
+ <meta content='application/x-shockwave-flash' property='og:video:type'>
+ <meta content='Foo' property=og:foobar>
+ <meta name="og:test1" content='foo > < bar'/>
+ <meta name="og:test2" content="foo >//< bar"/>
+ <meta property=og-test3 content='Ill-formatted opengraph'/>
+ <meta property=og:test4 content=unquoted-value/>
+ '''
+ self.assertEqual(ie._og_search_title(html), 'Foo')
+ self.assertEqual(ie._og_search_description(html), 'Some video\'s description ')
+ self.assertEqual(ie._og_search_thumbnail(html), 'http://domain.com/pic.jpg?key1=val1&key2=val2')
+ self.assertEqual(ie._og_search_video_url(html, default=None), None)
+ self.assertEqual(ie._og_search_property('foobar', html), 'Foo')
+ self.assertEqual(ie._og_search_property('test1', html), 'foo > < bar')
+ self.assertEqual(ie._og_search_property('test2', html), 'foo >//< bar')
+ self.assertEqual(ie._og_search_property('test3', html), 'Ill-formatted opengraph')
+ self.assertEqual(ie._og_search_property(('test0', 'test1'), html), 'foo > < bar')
+ self.assertRaises(RegexNotFoundError, ie._og_search_property, 'test0', html, None, fatal=True)
+ self.assertRaises(RegexNotFoundError, ie._og_search_property, ('test0', 'test00'), html, None, fatal=True)
+ self.assertEqual(ie._og_search_property('test4', html), 'unquoted-value')
+
+ def test_html_search_meta(self):
+ ie = self.ie
+ html = '''
+ <meta name="a" content="1" />
+ <meta name='b' content='2'>
+ <meta name="c" content='3'>
+ <meta name=d content='4'>
+ <meta property="e" content='5' >
+ <meta content="6" name="f">
+ '''
+
+ self.assertEqual(ie._html_search_meta('a', html), '1')
+ self.assertEqual(ie._html_search_meta('b', html), '2')
+ self.assertEqual(ie._html_search_meta('c', html), '3')
+ self.assertEqual(ie._html_search_meta('d', html), '4')
+ self.assertEqual(ie._html_search_meta('e', html), '5')
+ self.assertEqual(ie._html_search_meta('f', html), '6')
+ self.assertEqual(ie._html_search_meta(('a', 'b', 'c'), html), '1')
+ self.assertEqual(ie._html_search_meta(('c', 'b', 'a'), html), '3')
+ self.assertEqual(ie._html_search_meta(('z', 'x', 'c'), html), '3')
+ self.assertRaises(RegexNotFoundError, ie._html_search_meta, 'z', html, None, fatal=True)
+ self.assertRaises(RegexNotFoundError, ie._html_search_meta, ('z', 'x'), html, None, fatal=True)
+
+ def test_search_json_ld_realworld(self):
+ _TESTS = [
+ # https://github.com/ytdl-org/youtube-dl/issues/23306
+ (
+ r'''<script type="application/ld+json">
+{
+"@context": "http://schema.org/",
+"@type": "VideoObject",
+"name": "1 On 1 With Kleio",
+"url": "https://www.eporner.com/hd-porn/xN49A1cT3eB/1-On-1-With-Kleio/",
+"duration": "PT0H12M23S",
+"thumbnailUrl": ["https://static-eu-cdn.eporner.com/thumbs/static4/7/78/780/780814/9_360.jpg", "https://imggen.eporner.com/780814/1920/1080/9.jpg"],
+"contentUrl": "https://gvideo.eporner.com/xN49A1cT3eB/xN49A1cT3eB.mp4",
+"embedUrl": "https://www.eporner.com/embed/xN49A1cT3eB/1-On-1-With-Kleio/",
+"image": "https://static-eu-cdn.eporner.com/thumbs/static4/7/78/780/780814/9_360.jpg",
+"width": "1920",
+"height": "1080",
+"encodingFormat": "mp4",
+"bitrate": "6617kbps",
+"isFamilyFriendly": "False",
+"description": "Kleio Valentien",
+"uploadDate": "2015-12-05T21:24:35+01:00",
+"interactionStatistic": {
+"@type": "InteractionCounter",
+"interactionType": { "@type": "http://schema.org/WatchAction" },
+"userInteractionCount": 1120958
+}, "aggregateRating": {
+"@type": "AggregateRating",
+"ratingValue": "88",
+"ratingCount": "630",
+"bestRating": "100",
+"worstRating": "0"
+}, "actor": [{
+"@type": "Person",
+"name": "Kleio Valentien",
+"url": "https://www.eporner.com/pornstar/kleio-valentien/"
+}]}
+ </script>''',
+ {
+ 'title': '1 On 1 With Kleio',
+ 'description': 'Kleio Valentien',
+ 'url': 'https://gvideo.eporner.com/xN49A1cT3eB/xN49A1cT3eB.mp4',
+ 'timestamp': 1449347075,
+ 'duration': 743.0,
+ 'view_count': 1120958,
+ 'width': 1920,
+ 'height': 1080,
+ },
+ {},
+ ),
+ (
+ r'''<script type="application/ld+json">
+ {
+ "@context": "https://schema.org",
+ "@graph": [
+ {
+ "@type": "NewsArticle",
+ "mainEntityOfPage": {
+ "@type": "WebPage",
+ "@id": "https://www.ant1news.gr/Society/article/620286/symmoria-anilikon-dikigoros-thymaton-ithelan-na-toys-apoteleiosoyn"
+ },
+ "headline": "Συμμορία ανηλίκων – δικηγόρος θυμάτων: ήθελαν να τους αποτελειώσουν",
+ "name": "Συμμορία ανηλίκων – δικηγόρος θυμάτων: ήθελαν να τους αποτελειώσουν",
+ "description": "Τα παιδιά δέχθηκαν την επίθεση επειδή αρνήθηκαν να γίνουν μέλη της συμμορίας, ανέφερε ο Γ. Ζαχαρόπουλος.",
+ "image": {
+ "@type": "ImageObject",
+ "url": "https://ant1media.azureedge.net/imgHandler/1100/a635c968-be71-447c-bf9c-80d843ece21e.jpg",
+ "width": 1100,
+ "height": 756 },
+ "datePublished": "2021-11-10T08:50:00+03:00",
+ "dateModified": "2021-11-10T08:52:53+03:00",
+ "author": {
+ "@type": "Person",
+ "@id": "https://www.ant1news.gr/",
+ "name": "Ant1news",
+ "image": "https://www.ant1news.gr/images/logo-e5d7e4b3e714c88e8d2eca96130142f6.png",
+ "url": "https://www.ant1news.gr/"
+ },
+ "publisher": {
+ "@type": "Organization",
+ "@id": "https://www.ant1news.gr#publisher",
+ "name": "Ant1news",
+ "url": "https://www.ant1news.gr",
+ "logo": {
+ "@type": "ImageObject",
+ "url": "https://www.ant1news.gr/images/logo-e5d7e4b3e714c88e8d2eca96130142f6.png",
+ "width": 400,
+ "height": 400 },
+ "sameAs": [
+ "https://www.facebook.com/Ant1news.gr",
+ "https://twitter.com/antennanews",
+ "https://www.youtube.com/channel/UC0smvAbfczoN75dP0Hw4Pzw",
+ "https://www.instagram.com/ant1news/"
+ ]
+ },
+
+ "keywords": "μαχαίρωμα,συμμορία ανηλίκων,ΕΙΔΗΣΕΙΣ,ΕΙΔΗΣΕΙΣ ΣΗΜΕΡΑ,ΝΕΑ,Κοινωνία - Ant1news",
+
+
+ "articleSection": "Κοινωνία"
+ }
+ ]
+ }
+ </script>''',
+ {
+ 'timestamp': 1636523400,
+ 'title': 'md5:91fe569e952e4d146485740ae927662b',
+ },
+ {'expected_type': 'NewsArticle'},
+ ),
+ (
+ r'''<script type="application/ld+json">
+ {"url":"/vrtnu/a-z/het-journaal/2021/het-journaal-het-journaal-19u-20211231/",
+ "name":"Het journaal 19u",
+ "description":"Het journaal 19u van vrijdag 31 december 2021.",
+ "potentialAction":{"url":"https://vrtnu.page.link/pfVy6ihgCAJKgHqe8","@type":"ShareAction"},
+ "mainEntityOfPage":{"@id":"1640092242445","@type":"WebPage"},
+ "publication":[{
+ "startDate":"2021-12-31T19:00:00.000+01:00",
+ "endDate":"2022-01-30T23:55:00.000+01:00",
+ "publishedBy":{"name":"een","@type":"Organization"},
+ "publishedOn":{"url":"https://www.vrt.be/vrtnu/","name":"VRT NU","@type":"BroadcastService"},
+ "@id":"pbs-pub-3a7ec233-da95-4c1e-9b2b-cf5fdfebcbe8",
+ "@type":"BroadcastEvent"
+ }],
+ "video":{
+ "name":"Het journaal - Aflevering 365 (Seizoen 2021)",
+ "description":"Het journaal 19u van vrijdag 31 december 2021. Bekijk aflevering 365 van seizoen 2021 met VRT NU via de site of app.",
+ "thumbnailUrl":"//images.vrt.be/width1280/2021/12/31/80d5ed00-6a64-11ec-b07d-02b7b76bf47f.jpg",
+ "expires":"2022-01-30T23:55:00.000+01:00",
+ "hasPart":[
+ {"name":"Explosie Turnhout","startOffset":70,"@type":"Clip"},
+ {"name":"Jaarwisseling","startOffset":440,"@type":"Clip"},
+ {"name":"Natuurbranden Colorado","startOffset":1179,"@type":"Clip"},
+ {"name":"Klimaatverandering","startOffset":1263,"@type":"Clip"},
+ {"name":"Zacht weer","startOffset":1367,"@type":"Clip"},
+ {"name":"Financiële balans","startOffset":1383,"@type":"Clip"},
+ {"name":"Club Brugge","startOffset":1484,"@type":"Clip"},
+ {"name":"Mentale gezondheid bij topsporters","startOffset":1575,"@type":"Clip"},
+ {"name":"Olympische Winterspelen","startOffset":1728,"@type":"Clip"},
+ {"name":"Sober oudjaar in Nederland","startOffset":1873,"@type":"Clip"}
+ ],
+ "duration":"PT34M39.23S",
+ "uploadDate":"2021-12-31T19:00:00.000+01:00",
+ "@id":"vid-9457d0c6-b8ac-4aba-b5e1-15aa3a3295b5",
+ "@type":"VideoObject"
+ },
+ "genre":["Nieuws en actua"],
+ "episodeNumber":365,
+ "partOfSeries":{"name":"Het journaal","@id":"222831405527","@type":"TVSeries"},
+ "partOfSeason":{"name":"Seizoen 2021","@id":"961809365527","@type":"TVSeason"},
+ "@context":"https://schema.org","@id":"961685295527","@type":"TVEpisode"}</script>
+ ''',
+ {
+ 'chapters': [
+ {"title": "Explosie Turnhout", "start_time": 70, "end_time": 440},
+ {"title": "Jaarwisseling", "start_time": 440, "end_time": 1179},
+ {"title": "Natuurbranden Colorado", "start_time": 1179, "end_time": 1263},
+ {"title": "Klimaatverandering", "start_time": 1263, "end_time": 1367},
+ {"title": "Zacht weer", "start_time": 1367, "end_time": 1383},
+ {"title": "Financiële balans", "start_time": 1383, "end_time": 1484},
+ {"title": "Club Brugge", "start_time": 1484, "end_time": 1575},
+ {"title": "Mentale gezondheid bij topsporters", "start_time": 1575, "end_time": 1728},
+ {"title": "Olympische Winterspelen", "start_time": 1728, "end_time": 1873},
+ {"title": "Sober oudjaar in Nederland", "start_time": 1873, "end_time": 2079.23}
+ ],
+ 'title': 'Het journaal - Aflevering 365 (Seizoen 2021)'
+ }, {}
+ ),
+ (
+ # test multiple thumbnails in a list
+ r'''
+<script type="application/ld+json">
+{"@context":"https://schema.org",
+"@type":"VideoObject",
+"thumbnailUrl":["https://www.rainews.it/cropgd/640x360/dl/img/2021/12/30/1640886376927_GettyImages.jpg"]}
+</script>''',
+ {
+ 'thumbnails': [{'url': 'https://www.rainews.it/cropgd/640x360/dl/img/2021/12/30/1640886376927_GettyImages.jpg'}],
+ },
+ {},
+ ),
+ (
+ # test single thumbnail
+ r'''
+<script type="application/ld+json">
+{"@context":"https://schema.org",
+"@type":"VideoObject",
+"thumbnailUrl":"https://www.rainews.it/cropgd/640x360/dl/img/2021/12/30/1640886376927_GettyImages.jpg"}
+</script>''',
+ {
+ 'thumbnails': [{'url': 'https://www.rainews.it/cropgd/640x360/dl/img/2021/12/30/1640886376927_GettyImages.jpg'}],
+ },
+ {},
+ )
+ ]
+ for html, expected_dict, search_json_ld_kwargs in _TESTS:
+ expect_dict(
+ self,
+ self.ie._search_json_ld(html, None, **search_json_ld_kwargs),
+ expected_dict
+ )
+
+ def test_download_json(self):
+ uri = encode_data_uri(b'{"foo": "blah"}', 'application/json')
+ self.assertEqual(self.ie._download_json(uri, None), {'foo': 'blah'})
+ uri = encode_data_uri(b'callback({"foo": "blah"})', 'application/javascript')
+ self.assertEqual(self.ie._download_json(uri, None, transform_source=strip_jsonp), {'foo': 'blah'})
+ uri = encode_data_uri(b'{"foo": invalid}', 'application/json')
+ self.assertRaises(ExtractorError, self.ie._download_json, uri, None)
+ self.assertEqual(self.ie._download_json(uri, None, fatal=False), None)
+
+ def test_parse_html5_media_entries(self):
+ # inline video tag
+ expect_dict(
+ self,
+ self.ie._parse_html5_media_entries(
+ 'https://127.0.0.1/video.html',
+ r'<html><video src="/vid.mp4" /></html>', None)[0],
+ {
+ 'formats': [{
+ 'url': 'https://127.0.0.1/vid.mp4',
+ }],
+ })
+
+ # from https://www.r18.com/
+ # with kpbs in label
+ expect_dict(
+ self,
+ self.ie._parse_html5_media_entries(
+ 'https://www.r18.com/',
+ r'''
+ <video id="samplevideo_amateur" class="js-samplevideo video-js vjs-default-skin vjs-big-play-centered" controls preload="auto" width="400" height="225" poster="//pics.r18.com/digital/amateur/mgmr105/mgmr105jp.jpg">
+ <source id="video_source" src="https://awscc3001.r18.com/litevideo/freepv/m/mgm/mgmr105/mgmr105_sm_w.mp4" type="video/mp4" res="240" label="300kbps">
+ <source id="video_source" src="https://awscc3001.r18.com/litevideo/freepv/m/mgm/mgmr105/mgmr105_dm_w.mp4" type="video/mp4" res="480" label="1000kbps">
+ <source id="video_source" src="https://awscc3001.r18.com/litevideo/freepv/m/mgm/mgmr105/mgmr105_dmb_w.mp4" type="video/mp4" res="740" label="1500kbps">
+ <p>Your browser does not support the video tag.</p>
+ </video>
+ ''', None)[0],
+ {
+ 'formats': [{
+ 'url': 'https://awscc3001.r18.com/litevideo/freepv/m/mgm/mgmr105/mgmr105_sm_w.mp4',
+ 'ext': 'mp4',
+ 'format_id': '300kbps',
+ 'height': 240,
+ 'tbr': 300,
+ }, {
+ 'url': 'https://awscc3001.r18.com/litevideo/freepv/m/mgm/mgmr105/mgmr105_dm_w.mp4',
+ 'ext': 'mp4',
+ 'format_id': '1000kbps',
+ 'height': 480,
+ 'tbr': 1000,
+ }, {
+ 'url': 'https://awscc3001.r18.com/litevideo/freepv/m/mgm/mgmr105/mgmr105_dmb_w.mp4',
+ 'ext': 'mp4',
+ 'format_id': '1500kbps',
+ 'height': 740,
+ 'tbr': 1500,
+ }],
+ 'thumbnail': '//pics.r18.com/digital/amateur/mgmr105/mgmr105jp.jpg'
+ })
+
+ # from https://www.csfd.cz/
+ # with width and height
+ expect_dict(
+ self,
+ self.ie._parse_html5_media_entries(
+ 'https://www.csfd.cz/',
+ r'''
+ <video width="770" height="328" preload="none" controls poster="https://img.csfd.cz/files/images/film/video/preview/163/344/163344118_748d20.png?h360" >
+ <source src="https://video.csfd.cz/files/videos/157/750/157750813/163327358_eac647.mp4" type="video/mp4" width="640" height="360">
+ <source src="https://video.csfd.cz/files/videos/157/750/157750813/163327360_3d2646.mp4" type="video/mp4" width="1280" height="720">
+ <source src="https://video.csfd.cz/files/videos/157/750/157750813/163327356_91f258.mp4" type="video/mp4" width="1920" height="1080">
+ <source src="https://video.csfd.cz/files/videos/157/750/157750813/163327359_962b4a.webm" type="video/webm" width="640" height="360">
+ <source src="https://video.csfd.cz/files/videos/157/750/157750813/163327361_6feee0.webm" type="video/webm" width="1280" height="720">
+ <source src="https://video.csfd.cz/files/videos/157/750/157750813/163327357_8ab472.webm" type="video/webm" width="1920" height="1080">
+ <track src="https://video.csfd.cz/files/subtitles/163/344/163344115_4c388b.srt" type="text/x-srt" kind="subtitles" srclang="cs" label="cs">
+ </video>
+ ''', None)[0],
+ {
+ 'formats': [{
+ 'url': 'https://video.csfd.cz/files/videos/157/750/157750813/163327358_eac647.mp4',
+ 'ext': 'mp4',
+ 'width': 640,
+ 'height': 360,
+ }, {
+ 'url': 'https://video.csfd.cz/files/videos/157/750/157750813/163327360_3d2646.mp4',
+ 'ext': 'mp4',
+ 'width': 1280,
+ 'height': 720,
+ }, {
+ 'url': 'https://video.csfd.cz/files/videos/157/750/157750813/163327356_91f258.mp4',
+ 'ext': 'mp4',
+ 'width': 1920,
+ 'height': 1080,
+ }, {
+ 'url': 'https://video.csfd.cz/files/videos/157/750/157750813/163327359_962b4a.webm',
+ 'ext': 'webm',
+ 'width': 640,
+ 'height': 360,
+ }, {
+ 'url': 'https://video.csfd.cz/files/videos/157/750/157750813/163327361_6feee0.webm',
+ 'ext': 'webm',
+ 'width': 1280,
+ 'height': 720,
+ }, {
+ 'url': 'https://video.csfd.cz/files/videos/157/750/157750813/163327357_8ab472.webm',
+ 'ext': 'webm',
+ 'width': 1920,
+ 'height': 1080,
+ }],
+ 'subtitles': {
+ 'cs': [{'url': 'https://video.csfd.cz/files/subtitles/163/344/163344115_4c388b.srt'}]
+ },
+ 'thumbnail': 'https://img.csfd.cz/files/images/film/video/preview/163/344/163344118_748d20.png?h360'
+ })
+
+ # from https://tamasha.com/v/Kkdjw
+ # with height in label
+ expect_dict(
+ self,
+ self.ie._parse_html5_media_entries(
+ 'https://tamasha.com/v/Kkdjw',
+ r'''
+ <video crossorigin="anonymous">
+ <source src="https://s-v2.tamasha.com/statics/videos_file/19/8f/Kkdjw_198feff8577d0057536e905cce1fb61438dd64e0_n_240.mp4" type="video/mp4" label="AUTO" res="0"/>
+ <source src="https://s-v2.tamasha.com/statics/videos_file/19/8f/Kkdjw_198feff8577d0057536e905cce1fb61438dd64e0_n_240.mp4" type="video/mp4"
+ label="240p" res="240"/>
+ <source src="https://s-v2.tamasha.com/statics/videos_file/20/00/Kkdjw_200041c66f657fc967db464d156eafbc1ed9fe6f_n_144.mp4" type="video/mp4"
+ label="144p" res="144"/>
+ </video>
+ ''', None)[0],
+ {
+ 'formats': [{
+ 'url': 'https://s-v2.tamasha.com/statics/videos_file/19/8f/Kkdjw_198feff8577d0057536e905cce1fb61438dd64e0_n_240.mp4',
+ }, {
+ 'url': 'https://s-v2.tamasha.com/statics/videos_file/19/8f/Kkdjw_198feff8577d0057536e905cce1fb61438dd64e0_n_240.mp4',
+ 'ext': 'mp4',
+ 'format_id': '240p',
+ 'height': 240,
+ }, {
+ 'url': 'https://s-v2.tamasha.com/statics/videos_file/20/00/Kkdjw_200041c66f657fc967db464d156eafbc1ed9fe6f_n_144.mp4',
+ 'ext': 'mp4',
+ 'format_id': '144p',
+ 'height': 144,
+ }]
+ })
+
+ # from https://www.directvnow.com
+ # with data-src
+ expect_dict(
+ self,
+ self.ie._parse_html5_media_entries(
+ 'https://www.directvnow.com',
+ r'''
+ <video id="vid1" class="header--video-masked active" muted playsinline>
+ <source data-src="https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4" type="video/mp4" />
+ </video>
+ ''', None)[0],
+ {
+ 'formats': [{
+ 'ext': 'mp4',
+ 'url': 'https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4',
+ }]
+ })
+
+ # from https://www.directvnow.com
+ # with data-src
+ expect_dict(
+ self,
+ self.ie._parse_html5_media_entries(
+ 'https://www.directvnow.com',
+ r'''
+ <video id="vid1" class="header--video-masked active" muted playsinline>
+ <source data-src="https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4" type="video/mp4" />
+ </video>
+ ''', None)[0],
+ {
+ 'formats': [{
+ 'url': 'https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4',
+ 'ext': 'mp4',
+ }]
+ })
+
+ # from https://www.klarna.com/uk/
+ # with data-video-src
+ expect_dict(
+ self,
+ self.ie._parse_html5_media_entries(
+ 'https://www.directvnow.com',
+ r'''
+ <video loop autoplay muted class="responsive-video block-kl__video video-on-medium">
+ <source src="" data-video-desktop data-video-src="https://www.klarna.com/uk/wp-content/uploads/sites/11/2019/01/KL062_Smooth3_0_DogWalking_5s_920x080_.mp4" type="video/mp4" />
+ </video>
+ ''', None)[0],
+ {
+ 'formats': [{
+ 'url': 'https://www.klarna.com/uk/wp-content/uploads/sites/11/2019/01/KL062_Smooth3_0_DogWalking_5s_920x080_.mp4',
+ 'ext': 'mp4',
+ }],
+ })
+
+ # from https://0000.studio/
+ # with type attribute but without extension in URL
+ expect_dict(
+ self,
+ self.ie._parse_html5_media_entries(
+ 'https://0000.studio',
+ r'''
+ <video src="https://d1ggyt9m8pwf3g.cloudfront.net/protected/ap-northeast-1:1864af40-28d5-492b-b739-b32314b1a527/archive/clip/838db6a7-8973-4cd6-840d-8517e4093c92"
+ controls="controls" type="video/mp4" preload="metadata" autoplay="autoplay" playsinline class="object-contain">
+ </video>
+ ''', None)[0],
+ {
+ 'formats': [{
+ 'url': 'https://d1ggyt9m8pwf3g.cloudfront.net/protected/ap-northeast-1:1864af40-28d5-492b-b739-b32314b1a527/archive/clip/838db6a7-8973-4cd6-840d-8517e4093c92',
+ 'ext': 'mp4',
+ }],
+ })
+
+ def test_extract_jwplayer_data_realworld(self):
+ # from http://www.suffolk.edu/sjc/
+ expect_dict(
+ self,
+ self.ie._extract_jwplayer_data(r'''
+ <script type='text/javascript'>
+ jwplayer('my-video').setup({
+ file: 'rtmp://192.138.214.154/live/sjclive',
+ fallback: 'true',
+ width: '95%',
+ aspectratio: '16:9',
+ primary: 'flash',
+ mediaid:'XEgvuql4'
+ });
+ </script>
+ ''', None, require_title=False),
+ {
+ 'id': 'XEgvuql4',
+ 'formats': [{
+ 'url': 'rtmp://192.138.214.154/live/sjclive',
+ 'ext': 'flv'
+ }]
+ })
+
+ # from https://www.pornoxo.com/videos/7564/striptease-from-sexy-secretary/
+ expect_dict(
+ self,
+ self.ie._extract_jwplayer_data(r'''
+<script type="text/javascript">
+ jwplayer("mediaplayer").setup({
+ 'videoid': "7564",
+ 'width': "100%",
+ 'aspectratio': "16:9",
+ 'stretching': "exactfit",
+ 'autostart': 'false',
+ 'flashplayer': "https://t04.vipstreamservice.com/jwplayer/v5.10/player.swf",
+ 'file': "https://cdn.pornoxo.com/key=MF+oEbaxqTKb50P-w9G3nA,end=1489689259,ip=104.199.146.27/ip=104.199.146.27/speed=6573765/buffer=3.0/2009-12/4b2157147afe5efa93ce1978e0265289c193874e02597.flv",
+ 'image': "https://t03.vipstreamservice.com/thumbs/pxo-full/2009-12/14/a4b2157147afe5efa93ce1978e0265289c193874e02597.flv-full-13.jpg",
+ 'filefallback': "https://cdn.pornoxo.com/key=9ZPsTR5EvPLQrBaak2MUGA,end=1489689259,ip=104.199.146.27/ip=104.199.146.27/speed=6573765/buffer=3.0/2009-12/m_4b2157147afe5efa93ce1978e0265289c193874e02597.mp4",
+ 'logo.hide': true,
+ 'skin': "https://t04.vipstreamservice.com/jwplayer/skin/modieus-blk.zip",
+ 'plugins': "https://t04.vipstreamservice.com/jwplayer/dock/dockableskinnableplugin.swf",
+ 'dockableskinnableplugin.piclink': "/index.php?key=ajax-videothumbsn&vid=7564&data=2009-12--14--4b2157147afe5efa93ce1978e0265289c193874e02597.flv--17370",
+ 'controlbar': 'bottom',
+ 'modes': [
+ {type: 'flash', src: 'https://t04.vipstreamservice.com/jwplayer/v5.10/player.swf'}
+ ],
+ 'provider': 'http'
+ });
+ //noinspection JSAnnotator
+ invideo.setup({
+ adsUrl: "/banner-iframe/?zoneId=32",
+ adsUrl2: "",
+ autostart: false
+ });
+</script>
+ ''', 'dummy', require_title=False),
+ {
+ 'thumbnail': 'https://t03.vipstreamservice.com/thumbs/pxo-full/2009-12/14/a4b2157147afe5efa93ce1978e0265289c193874e02597.flv-full-13.jpg',
+ 'formats': [{
+ 'url': 'https://cdn.pornoxo.com/key=MF+oEbaxqTKb50P-w9G3nA,end=1489689259,ip=104.199.146.27/ip=104.199.146.27/speed=6573765/buffer=3.0/2009-12/4b2157147afe5efa93ce1978e0265289c193874e02597.flv',
+ 'ext': 'flv'
+ }]
+ })
+
+ # from http://www.indiedb.com/games/king-machine/videos
+ expect_dict(
+ self,
+ self.ie._extract_jwplayer_data(r'''
+<script>
+jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/\/www.indiedb.com\/","displaytitle":false,"autostart":false,"repeat":false,"title":"king machine trailer 1","sharing":{"link":"http:\/\/www.indiedb.com\/games\/king-machine\/videos\/king-machine-trailer-1","code":"<iframe width=\"560\" height=\"315\" src=\"http:\/\/www.indiedb.com\/media\/iframe\/1522983\" frameborder=\"0\" allowfullscreen><\/iframe><br><a href=\"http:\/\/www.indiedb.com\/games\/king-machine\/videos\/king-machine-trailer-1\">king machine trailer 1 - Indie DB<\/a>"},"related":{"file":"http:\/\/rss.indiedb.com\/media\/recommended\/1522983\/feed\/rss.xml","dimensions":"160x120","onclick":"link"},"sources":[{"file":"http:\/\/cdn.dbolical.com\/cache\/videos\/games\/1\/50\/49678\/encode_mp4\/king-machine-trailer.mp4","label":"360p SD","default":"true"},{"file":"http:\/\/cdn.dbolical.com\/cache\/videos\/games\/1\/50\/49678\/encode720p_mp4\/king-machine-trailer.mp4","label":"720p HD"}],"image":"http:\/\/media.indiedb.com\/cache\/images\/games\/1\/50\/49678\/thumb_620x2000\/king-machine-trailer.mp4.jpg","advertising":{"client":"vast","tag":"http:\/\/ads.intergi.com\/adrawdata\/3.0\/5205\/4251742\/0\/1013\/ADTECH;cors=yes;width=560;height=315;referring_url=http:\/\/www.indiedb.com\/games\/king-machine\/videos\/king-machine-trailer-1;content_url=http:\/\/www.indiedb.com\/games\/king-machine\/videos\/king-machine-trailer-1;media_id=1522983;title=king+machine+trailer+1;device=__DEVICE__;model=__MODEL__;os=Windows+OS;osversion=__OSVERSION__;ua=__UA__;ip=109.171.17.81;uniqueid=1522983;tags=__TAGS__;number=58cac25928151;time=1489683033"},"width":620,"height":349}).once("play", function(event) {
+ videoAnalytics("play");
+}).once("complete", function(event) {
+ videoAnalytics("completed");
+});
+</script>
+ ''', 'dummy'),
+ {
+ 'title': 'king machine trailer 1',
+ 'thumbnail': 'http://media.indiedb.com/cache/images/games/1/50/49678/thumb_620x2000/king-machine-trailer.mp4.jpg',
+ 'formats': [{
+ 'url': 'http://cdn.dbolical.com/cache/videos/games/1/50/49678/encode_mp4/king-machine-trailer.mp4',
+ 'height': 360,
+ 'ext': 'mp4'
+ }, {
+ 'url': 'http://cdn.dbolical.com/cache/videos/games/1/50/49678/encode720p_mp4/king-machine-trailer.mp4',
+ 'height': 720,
+ 'ext': 'mp4'
+ }]
+ })
+
+ def test_parse_m3u8_formats(self):
+ _TEST_CASES = [
+ (
+ # https://github.com/ytdl-org/youtube-dl/issues/11995
+ # http://teamcoco.com/video/clueless-gamer-super-bowl-for-honor
+ 'img_bipbop_adv_example_fmp4',
+ 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ [{
+ 'format_id': 'aud1-English',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a1/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'language': 'en',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'audio_ext': 'mp4',
+ }, {
+ 'format_id': 'aud2-English',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a2/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'language': 'en',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'audio_ext': 'mp4',
+ }, {
+ 'format_id': 'aud3-English',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a3/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'language': 'en',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'audio_ext': 'mp4',
+ }, {
+ 'format_id': '530',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v2/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 480,
+ 'height': 270,
+ 'vcodec': 'avc1.640015',
+ }, {
+ 'format_id': '561',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v2/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 480,
+ 'height': 270,
+ 'vcodec': 'avc1.640015',
+ }, {
+ 'format_id': '753',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v2/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 480,
+ 'height': 270,
+ 'vcodec': 'avc1.640015',
+ }, {
+ 'format_id': '895',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v3/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 640,
+ 'height': 360,
+ 'vcodec': 'avc1.64001e',
+ }, {
+ 'format_id': '926',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v3/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 640,
+ 'height': 360,
+ 'vcodec': 'avc1.64001e',
+ }, {
+ 'format_id': '1118',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v3/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 640,
+ 'height': 360,
+ 'vcodec': 'avc1.64001e',
+ }, {
+ 'format_id': '1265',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v4/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 768,
+ 'height': 432,
+ 'vcodec': 'avc1.64001e',
+ }, {
+ 'format_id': '1295',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v4/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 768,
+ 'height': 432,
+ 'vcodec': 'avc1.64001e',
+ }, {
+ 'format_id': '1487',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v4/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 768,
+ 'height': 432,
+ 'vcodec': 'avc1.64001e',
+ }, {
+ 'format_id': '2168',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v5/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 960,
+ 'height': 540,
+ 'vcodec': 'avc1.640020',
+ }, {
+ 'format_id': '2198',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v5/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 960,
+ 'height': 540,
+ 'vcodec': 'avc1.640020',
+ }, {
+ 'format_id': '2390',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v5/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 960,
+ 'height': 540,
+ 'vcodec': 'avc1.640020',
+ }, {
+ 'format_id': '3168',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v6/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1280,
+ 'height': 720,
+ 'vcodec': 'avc1.640020',
+ }, {
+ 'format_id': '3199',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v6/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1280,
+ 'height': 720,
+ 'vcodec': 'avc1.640020',
+ }, {
+ 'format_id': '3391',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v6/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1280,
+ 'height': 720,
+ 'vcodec': 'avc1.640020',
+ }, {
+ 'format_id': '4670',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v7/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.64002a',
+ }, {
+ 'format_id': '4701',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v7/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.64002a',
+ }, {
+ 'format_id': '4893',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v7/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.64002a',
+ }, {
+ 'format_id': '6170',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v8/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.64002a',
+ }, {
+ 'format_id': '6200',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v8/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.64002a',
+ }, {
+ 'format_id': '6392',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v8/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.64002a',
+ }, {
+ 'format_id': '7968',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v9/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.64002a',
+ }, {
+ 'format_id': '7998',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v9/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.64002a',
+ }, {
+ 'format_id': '8190',
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v9/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.64002a',
+ }],
+ {}
+ ),
+ (
+ 'bipbop_16x9',
+ 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8',
+ [{
+ 'format_id': 'bipbop_audio-BipBop Audio 2',
+ 'format_index': None,
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/alternate_audio_aac/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8',
+ 'language': 'eng',
+ 'ext': 'mp4',
+ 'protocol': 'm3u8_native',
+ 'preference': None,
+ 'quality': None,
+ 'vcodec': 'none',
+ 'audio_ext': 'mp4',
+ 'video_ext': 'none',
+ }, {
+ 'format_id': '41',
+ 'format_index': None,
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/gear0/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8',
+ 'tbr': 41.457,
+ 'ext': 'mp4',
+ 'fps': None,
+ 'protocol': 'm3u8_native',
+ 'preference': None,
+ 'quality': None,
+ 'vcodec': 'none',
+ 'acodec': 'mp4a.40.2',
+ 'audio_ext': 'mp4',
+ 'video_ext': 'none',
+ 'abr': 41.457,
+ }, {
+ 'format_id': '263',
+ 'format_index': None,
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/gear1/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8',
+ 'tbr': 263.851,
+ 'ext': 'mp4',
+ 'fps': None,
+ 'protocol': 'm3u8_native',
+ 'preference': None,
+ 'quality': None,
+ 'width': 416,
+ 'height': 234,
+ 'vcodec': 'avc1.4d400d',
+ 'acodec': 'mp4a.40.2',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ }, {
+ 'format_id': '577',
+ 'format_index': None,
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/gear2/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8',
+ 'tbr': 577.61,
+ 'ext': 'mp4',
+ 'fps': None,
+ 'protocol': 'm3u8_native',
+ 'preference': None,
+ 'quality': None,
+ 'width': 640,
+ 'height': 360,
+ 'vcodec': 'avc1.4d401e',
+ 'acodec': 'mp4a.40.2',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ }, {
+ 'format_id': '915',
+ 'format_index': None,
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/gear3/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8',
+ 'tbr': 915.905,
+ 'ext': 'mp4',
+ 'fps': None,
+ 'protocol': 'm3u8_native',
+ 'preference': None,
+ 'quality': None,
+ 'width': 960,
+ 'height': 540,
+ 'vcodec': 'avc1.4d401f',
+ 'acodec': 'mp4a.40.2',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ }, {
+ 'format_id': '1030',
+ 'format_index': None,
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/gear4/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8',
+ 'tbr': 1030.138,
+ 'ext': 'mp4',
+ 'fps': None,
+ 'protocol': 'm3u8_native',
+ 'preference': None,
+ 'quality': None,
+ 'width': 1280,
+ 'height': 720,
+ 'vcodec': 'avc1.4d401f',
+ 'acodec': 'mp4a.40.2',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ }, {
+ 'format_id': '1924',
+ 'format_index': None,
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/gear5/prog_index.m3u8',
+ 'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8',
+ 'tbr': 1924.009,
+ 'ext': 'mp4',
+ 'fps': None,
+ 'protocol': 'm3u8_native',
+ 'preference': None,
+ 'quality': None,
+ 'width': 1920,
+ 'height': 1080,
+ 'vcodec': 'avc1.4d401f',
+ 'acodec': 'mp4a.40.2',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ }],
+ {
+ 'en': [{
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/eng/prog_index.m3u8',
+ 'ext': 'vtt',
+ 'protocol': 'm3u8_native'
+ }, {
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/eng_forced/prog_index.m3u8',
+ 'ext': 'vtt',
+ 'protocol': 'm3u8_native'
+ }],
+ 'fr': [{
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/fra/prog_index.m3u8',
+ 'ext': 'vtt',
+ 'protocol': 'm3u8_native'
+ }, {
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/fra_forced/prog_index.m3u8',
+ 'ext': 'vtt',
+ 'protocol': 'm3u8_native'
+ }],
+ 'es': [{
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/spa/prog_index.m3u8',
+ 'ext': 'vtt',
+ 'protocol': 'm3u8_native'
+ }, {
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/spa_forced/prog_index.m3u8',
+ 'ext': 'vtt',
+ 'protocol': 'm3u8_native'
+ }],
+ 'ja': [{
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/jpn/prog_index.m3u8',
+ 'ext': 'vtt',
+ 'protocol': 'm3u8_native'
+ }, {
+ 'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/jpn_forced/prog_index.m3u8',
+ 'ext': 'vtt',
+ 'protocol': 'm3u8_native'
+ }],
+ }
+ ),
+ ]
+
+ for m3u8_file, m3u8_url, expected_formats, expected_subs in _TEST_CASES:
+ with open('./test/testdata/m3u8/%s.m3u8' % m3u8_file, encoding='utf-8') as f:
+ formats, subs = self.ie._parse_m3u8_formats_and_subtitles(
+ f.read(), m3u8_url, ext='mp4')
+ self.ie._sort_formats(formats)
+ expect_value(self, formats, expected_formats, None)
+ expect_value(self, subs, expected_subs, None)
+
+ def test_parse_mpd_formats(self):
+ _TEST_CASES = [
+ (
+ # https://github.com/ytdl-org/youtube-dl/issues/13919
+ # Also tests duplicate representation ids, see
+ # https://github.com/ytdl-org/youtube-dl/issues/15111
+ 'float_duration',
+ 'http://unknown/manifest.mpd', # mpd_url
+ None, # mpd_base_url
+ [{
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'm4a',
+ 'format_id': '318597',
+ 'format_note': 'DASH audio',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'mp4a.40.2',
+ 'vcodec': 'none',
+ 'tbr': 61.587,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': '318597',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'none',
+ 'vcodec': 'avc1.42001f',
+ 'tbr': 318.597,
+ 'width': 340,
+ 'height': 192,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': '638590',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'none',
+ 'vcodec': 'avc1.42001f',
+ 'tbr': 638.59,
+ 'width': 512,
+ 'height': 288,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': '1022565',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'none',
+ 'vcodec': 'avc1.4d001f',
+ 'tbr': 1022.565,
+ 'width': 688,
+ 'height': 384,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': '2046506',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'none',
+ 'vcodec': 'avc1.4d001f',
+ 'tbr': 2046.506,
+ 'width': 1024,
+ 'height': 576,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': '3998017',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'none',
+ 'vcodec': 'avc1.640029',
+ 'tbr': 3998.017,
+ 'width': 1280,
+ 'height': 720,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': '5997485',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'none',
+ 'vcodec': 'avc1.640032',
+ 'tbr': 5997.485,
+ 'width': 1920,
+ 'height': 1080,
+ }],
+ {},
+ ), (
+ # https://github.com/ytdl-org/youtube-dl/pull/14844
+ 'urls_only',
+ 'http://unknown/manifest.mpd', # mpd_url
+ None, # mpd_base_url
+ [{
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': 'h264_aac_144p_m4s',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'mp4a.40.2',
+ 'vcodec': 'avc3.42c01e',
+ 'tbr': 200,
+ 'width': 256,
+ 'height': 144,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': 'h264_aac_240p_m4s',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'mp4a.40.2',
+ 'vcodec': 'avc3.42c01e',
+ 'tbr': 400,
+ 'width': 424,
+ 'height': 240,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': 'h264_aac_360p_m4s',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'mp4a.40.2',
+ 'vcodec': 'avc3.42c01e',
+ 'tbr': 800,
+ 'width': 640,
+ 'height': 360,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': 'h264_aac_480p_m4s',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'mp4a.40.2',
+ 'vcodec': 'avc3.42c01e',
+ 'tbr': 1200,
+ 'width': 856,
+ 'height': 480,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': 'h264_aac_576p_m4s',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'mp4a.40.2',
+ 'vcodec': 'avc3.42c01e',
+ 'tbr': 1600,
+ 'width': 1024,
+ 'height': 576,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': 'h264_aac_720p_m4s',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'mp4a.40.2',
+ 'vcodec': 'avc3.42c01e',
+ 'tbr': 2400,
+ 'width': 1280,
+ 'height': 720,
+ }, {
+ 'manifest_url': 'http://unknown/manifest.mpd',
+ 'ext': 'mp4',
+ 'format_id': 'h264_aac_1080p_m4s',
+ 'format_note': 'DASH video',
+ 'protocol': 'http_dash_segments',
+ 'acodec': 'mp4a.40.2',
+ 'vcodec': 'avc3.42c01e',
+ 'tbr': 4400,
+ 'width': 1920,
+ 'height': 1080,
+ }],
+ {},
+ ), (
+ # https://github.com/ytdl-org/youtube-dl/issues/20346
+ # Media considered unfragmented even though it contains
+ # Initialization tag
+ 'unfragmented',
+ 'https://v.redd.it/hw1x7rcg7zl21/DASHPlaylist.mpd', # mpd_url
+ 'https://v.redd.it/hw1x7rcg7zl21', # mpd_base_url
+ [{
+ 'url': 'https://v.redd.it/hw1x7rcg7zl21/audio',
+ 'manifest_url': 'https://v.redd.it/hw1x7rcg7zl21/DASHPlaylist.mpd',
+ 'ext': 'm4a',
+ 'format_id': 'AUDIO-1',
+ 'format_note': 'DASH audio',
+ 'container': 'm4a_dash',
+ 'acodec': 'mp4a.40.2',
+ 'vcodec': 'none',
+ 'tbr': 129.87,
+ 'asr': 48000,
+
+ }, {
+ 'url': 'https://v.redd.it/hw1x7rcg7zl21/DASH_240',
+ 'manifest_url': 'https://v.redd.it/hw1x7rcg7zl21/DASHPlaylist.mpd',
+ 'ext': 'mp4',
+ 'format_id': 'VIDEO-2',
+ 'format_note': 'DASH video',
+ 'container': 'mp4_dash',
+ 'acodec': 'none',
+ 'vcodec': 'avc1.4d401e',
+ 'tbr': 608.0,
+ 'width': 240,
+ 'height': 240,
+ 'fps': 30,
+ }, {
+ 'url': 'https://v.redd.it/hw1x7rcg7zl21/DASH_360',
+ 'manifest_url': 'https://v.redd.it/hw1x7rcg7zl21/DASHPlaylist.mpd',
+ 'ext': 'mp4',
+ 'format_id': 'VIDEO-1',
+ 'format_note': 'DASH video',
+ 'container': 'mp4_dash',
+ 'acodec': 'none',
+ 'vcodec': 'avc1.4d401e',
+ 'tbr': 804.261,
+ 'width': 360,
+ 'height': 360,
+ 'fps': 30,
+ }],
+ {},
+ ), (
+ 'subtitles',
+ 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/',
+ [{
+ 'format_id': 'audio=128001',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'ext': 'm4a',
+ 'tbr': 128.001,
+ 'asr': 48000,
+ 'format_note': 'DASH audio',
+ 'container': 'm4a_dash',
+ 'vcodec': 'none',
+ 'acodec': 'mp4a.40.2',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
+ 'protocol': 'http_dash_segments',
+ 'audio_ext': 'm4a',
+ 'video_ext': 'none',
+ 'abr': 128.001,
+ }, {
+ 'format_id': 'video=100000',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'ext': 'mp4',
+ 'width': 336,
+ 'height': 144,
+ 'tbr': 100,
+ 'format_note': 'DASH video',
+ 'container': 'mp4_dash',
+ 'vcodec': 'avc1.4D401F',
+ 'acodec': 'none',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
+ 'protocol': 'http_dash_segments',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ 'vbr': 100,
+ }, {
+ 'format_id': 'video=326000',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'ext': 'mp4',
+ 'width': 562,
+ 'height': 240,
+ 'tbr': 326,
+ 'format_note': 'DASH video',
+ 'container': 'mp4_dash',
+ 'vcodec': 'avc1.4D401F',
+ 'acodec': 'none',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
+ 'protocol': 'http_dash_segments',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ 'vbr': 326,
+ }, {
+ 'format_id': 'video=698000',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'ext': 'mp4',
+ 'width': 844,
+ 'height': 360,
+ 'tbr': 698,
+ 'format_note': 'DASH video',
+ 'container': 'mp4_dash',
+ 'vcodec': 'avc1.4D401F',
+ 'acodec': 'none',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
+ 'protocol': 'http_dash_segments',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ 'vbr': 698,
+ }, {
+ 'format_id': 'video=1493000',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'ext': 'mp4',
+ 'width': 1126,
+ 'height': 480,
+ 'tbr': 1493,
+ 'format_note': 'DASH video',
+ 'container': 'mp4_dash',
+ 'vcodec': 'avc1.4D401F',
+ 'acodec': 'none',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
+ 'protocol': 'http_dash_segments',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ 'vbr': 1493,
+ }, {
+ 'format_id': 'video=4482000',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'ext': 'mp4',
+ 'width': 1688,
+ 'height': 720,
+ 'tbr': 4482,
+ 'format_note': 'DASH video',
+ 'container': 'mp4_dash',
+ 'vcodec': 'avc1.4D401F',
+ 'acodec': 'none',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
+ 'protocol': 'http_dash_segments',
+ 'video_ext': 'mp4',
+ 'audio_ext': 'none',
+ 'vbr': 4482,
+ }],
+ {
+ 'en': [
+ {
+ 'ext': 'mp4',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
+ 'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
+ 'protocol': 'http_dash_segments',
+ }
+ ]
+ },
+ )
+ ]
+
+ for mpd_file, mpd_url, mpd_base_url, expected_formats, expected_subtitles in _TEST_CASES:
+ with open('./test/testdata/mpd/%s.mpd' % mpd_file, encoding='utf-8') as f:
+ formats, subtitles = self.ie._parse_mpd_formats_and_subtitles(
+ compat_etree_fromstring(f.read().encode()),
+ mpd_base_url=mpd_base_url, mpd_url=mpd_url)
+ self.ie._sort_formats(formats)
+ expect_value(self, formats, expected_formats, None)
+ expect_value(self, subtitles, expected_subtitles, None)
+
+ def test_parse_ism_formats(self):
+ _TEST_CASES = [
+ (
+ 'sintel',
+ 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ [{
+ 'format_id': 'audio-128',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'ext': 'isma',
+ 'tbr': 128,
+ 'asr': 48000,
+ 'vcodec': 'none',
+ 'acodec': 'AACL',
+ 'protocol': 'ism',
+ 'audio_channels': 2,
+ '_download_params': {
+ 'stream_type': 'audio',
+ 'duration': 8880746666,
+ 'timescale': 10000000,
+ 'width': 0,
+ 'height': 0,
+ 'fourcc': 'AACL',
+ 'codec_private_data': '1190',
+ 'sampling_rate': 48000,
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video-100',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 336,
+ 'height': 144,
+ 'tbr': 100,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 8880746666,
+ 'timescale': 10000000,
+ 'width': 336,
+ 'height': 144,
+ 'fourcc': 'AVC1',
+ 'codec_private_data': '00000001674D401FDA0544EFFC2D002CBC40000003004000000C03C60CA80000000168EF32C8',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video-326',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 562,
+ 'height': 240,
+ 'tbr': 326,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 8880746666,
+ 'timescale': 10000000,
+ 'width': 562,
+ 'height': 240,
+ 'fourcc': 'AVC1',
+ 'codec_private_data': '00000001674D401FDA0241FE23FFC3BC83BA44000003000400000300C03C60CA800000000168EF32C8',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video-698',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 844,
+ 'height': 360,
+ 'tbr': 698,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 8880746666,
+ 'timescale': 10000000,
+ 'width': 844,
+ 'height': 360,
+ 'fourcc': 'AVC1',
+ 'codec_private_data': '00000001674D401FDA0350BFB97FF06AF06AD1000003000100000300300F1832A00000000168EF32C8',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video-1493',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 1126,
+ 'height': 480,
+ 'tbr': 1493,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 8880746666,
+ 'timescale': 10000000,
+ 'width': 1126,
+ 'height': 480,
+ 'fourcc': 'AVC1',
+ 'codec_private_data': '00000001674D401FDA011C3DE6FFF0D890D871000003000100000300300F1832A00000000168EF32C8',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video-4482',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 1688,
+ 'height': 720,
+ 'tbr': 4482,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 8880746666,
+ 'timescale': 10000000,
+ 'width': 1688,
+ 'height': 720,
+ 'fourcc': 'AVC1',
+ 'codec_private_data': '00000001674D401FDA01A816F97FFC1ABC1AB440000003004000000C03C60CA80000000168EF32C8',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }],
+ {
+ 'eng': [
+ {
+ 'ext': 'ismt',
+ 'protocol': 'ism',
+ 'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ 'manifest_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
+ '_download_params': {
+ 'stream_type': 'text',
+ 'duration': 8880746666,
+ 'timescale': 10000000,
+ 'fourcc': 'TTML',
+ 'codec_private_data': ''
+ }
+ }
+ ]
+ },
+ ),
+ (
+ 'ec-3_test',
+ 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ [{
+ 'format_id': 'audio_deu-127',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'isma',
+ 'tbr': 127,
+ 'asr': 48000,
+ 'vcodec': 'none',
+ 'acodec': 'AACL',
+ 'protocol': 'ism',
+ 'language': 'deu',
+ 'audio_channels': 2,
+ '_download_params': {
+ 'stream_type': 'audio',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 0,
+ 'height': 0,
+ 'fourcc': 'AACL',
+ 'language': 'deu',
+ 'codec_private_data': '1190',
+ 'sampling_rate': 48000,
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'audio_deu_1-224',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'isma',
+ 'tbr': 224,
+ 'asr': 48000,
+ 'vcodec': 'none',
+ 'acodec': 'EC-3',
+ 'protocol': 'ism',
+ 'language': 'deu',
+ 'audio_channels': 6,
+ '_download_params': {
+ 'stream_type': 'audio',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 0,
+ 'height': 0,
+ 'fourcc': 'EC-3',
+ 'language': 'deu',
+ 'codec_private_data': '00063F000000AF87FBA7022DFB42A4D405CD93843BDD0700200F00',
+ 'sampling_rate': 48000,
+ 'channels': 6,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video_deu-23',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 384,
+ 'height': 216,
+ 'tbr': 23,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ 'language': 'deu',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 384,
+ 'height': 216,
+ 'fourcc': 'AVC1',
+ 'language': 'deu',
+ 'codec_private_data': '000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video_deu-403',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 400,
+ 'height': 224,
+ 'tbr': 403,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ 'language': 'deu',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 400,
+ 'height': 224,
+ 'fourcc': 'AVC1',
+ 'language': 'deu',
+ 'codec_private_data': '00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video_deu-680',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 640,
+ 'height': 360,
+ 'tbr': 680,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ 'language': 'deu',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 640,
+ 'height': 360,
+ 'fourcc': 'AVC1',
+ 'language': 'deu',
+ 'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video_deu-1253',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 640,
+ 'height': 360,
+ 'tbr': 1253,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ 'vbr': 1253,
+ 'language': 'deu',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 640,
+ 'height': 360,
+ 'fourcc': 'AVC1',
+ 'language': 'deu',
+ 'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video_deu-2121',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 768,
+ 'height': 432,
+ 'tbr': 2121,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ 'language': 'deu',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 768,
+ 'height': 432,
+ 'fourcc': 'AVC1',
+ 'language': 'deu',
+ 'codec_private_data': '00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video_deu-3275',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 1280,
+ 'height': 720,
+ 'tbr': 3275,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ 'language': 'deu',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 1280,
+ 'height': 720,
+ 'fourcc': 'AVC1',
+ 'language': 'deu',
+ 'codec_private_data': '00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video_deu-5300',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 1920,
+ 'height': 1080,
+ 'tbr': 5300,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ 'language': 'deu',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 1920,
+ 'height': 1080,
+ 'fourcc': 'AVC1',
+ 'language': 'deu',
+ 'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }, {
+ 'format_id': 'video_deu-8079',
+ 'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
+ 'ext': 'ismv',
+ 'width': 1920,
+ 'height': 1080,
+ 'tbr': 8079,
+ 'vcodec': 'AVC1',
+ 'acodec': 'none',
+ 'protocol': 'ism',
+ 'language': 'deu',
+ '_download_params': {
+ 'stream_type': 'video',
+ 'duration': 370000000,
+ 'timescale': 10000000,
+ 'width': 1920,
+ 'height': 1080,
+ 'fourcc': 'AVC1',
+ 'language': 'deu',
+ 'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
+ 'channels': 2,
+ 'bits_per_sample': 16,
+ 'nal_unit_length_field': 4
+ },
+ }],
+ {},
+ ),
+ ]
+
+ for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
+ with open('./test/testdata/ism/%s.Manifest' % ism_file, encoding='utf-8') as f:
+ formats, subtitles = self.ie._parse_ism_formats_and_subtitles(
+ compat_etree_fromstring(f.read().encode()), ism_url=ism_url)
+ self.ie._sort_formats(formats)
+ expect_value(self, formats, expected_formats, None)
+ expect_value(self, subtitles, expected_subtitles, None)
+
+ def test_parse_f4m_formats(self):
+ _TEST_CASES = [
+ (
+ # https://github.com/ytdl-org/youtube-dl/issues/14660
+ 'custom_base_url',
+ 'http://api.new.livestream.com/accounts/6115179/events/6764928/videos/144884262.f4m',
+ [{
+ 'manifest_url': 'http://api.new.livestream.com/accounts/6115179/events/6764928/videos/144884262.f4m',
+ 'ext': 'flv',
+ 'format_id': '2148',
+ 'protocol': 'f4m',
+ 'tbr': 2148,
+ 'width': 1280,
+ 'height': 720,
+ }]
+ ),
+ ]
+
+ for f4m_file, f4m_url, expected_formats in _TEST_CASES:
+ with open('./test/testdata/f4m/%s.f4m' % f4m_file, encoding='utf-8') as f:
+ formats = self.ie._parse_f4m_formats(
+ compat_etree_fromstring(f.read().encode()),
+ f4m_url, None)
+ self.ie._sort_formats(formats)
+ expect_value(self, formats, expected_formats, None)
+
+ def test_parse_xspf(self):
+ _TEST_CASES = [
+ (
+ 'foo_xspf',
+ 'https://example.org/src/foo_xspf.xspf',
+ [{
+ 'id': 'foo_xspf',
+ 'title': 'Pandemonium',
+ 'description': 'Visit http://bigbrother404.bandcamp.com',
+ 'duration': 202.416,
+ 'formats': [{
+ 'manifest_url': 'https://example.org/src/foo_xspf.xspf',
+ 'url': 'https://example.org/src/cd1/track%201.mp3',
+ }],
+ }, {
+ 'id': 'foo_xspf',
+ 'title': 'Final Cartridge (Nichico Twelve Remix)',
+ 'description': 'Visit http://bigbrother404.bandcamp.com',
+ 'duration': 255.857,
+ 'formats': [{
+ 'manifest_url': 'https://example.org/src/foo_xspf.xspf',
+ 'url': 'https://example.org/%E3%83%88%E3%83%A9%E3%83%83%E3%82%AF%E3%80%80%EF%BC%92.mp3',
+ }],
+ }, {
+ 'id': 'foo_xspf',
+ 'title': 'Rebuilding Nightingale',
+ 'description': 'Visit http://bigbrother404.bandcamp.com',
+ 'duration': 287.915,
+ 'formats': [{
+ 'manifest_url': 'https://example.org/src/foo_xspf.xspf',
+ 'url': 'https://example.org/src/track3.mp3',
+ }, {
+ 'manifest_url': 'https://example.org/src/foo_xspf.xspf',
+ 'url': 'https://example.com/track3.mp3',
+ }]
+ }]
+ ),
+ ]
+
+ for xspf_file, xspf_url, expected_entries in _TEST_CASES:
+ with open('./test/testdata/xspf/%s.xspf' % xspf_file, encoding='utf-8') as f:
+ entries = self.ie._parse_xspf(
+ compat_etree_fromstring(f.read().encode()),
+ xspf_file, xspf_url=xspf_url, xspf_base_url=xspf_url)
+ expect_value(self, entries, expected_entries, None)
+ for i in range(len(entries)):
+ expect_dict(self, entries[i], expected_entries[i])
+
+ def test_response_with_expected_status_returns_content(self):
+ # Checks for mitigations against the effects of
+ # <https://bugs.python.org/issue15002> that affect Python 3.4.1+, which
+ # manifest as `_download_webpage`, `_download_xml`, `_download_json`,
+ # or the underlying `_download_webpage_handle` returning no content
+ # when a response matches `expected_status`.
+
+ httpd = http.server.HTTPServer(
+ ('127.0.0.1', 0), InfoExtractorTestRequestHandler)
+ port = http_server_port(httpd)
+ server_thread = threading.Thread(target=httpd.serve_forever)
+ server_thread.daemon = True
+ server_thread.start()
+
+ (content, urlh) = self.ie._download_webpage_handle(
+ 'http://127.0.0.1:%d/teapot' % port, None,
+ expected_status=TEAPOT_RESPONSE_STATUS)
+ self.assertEqual(content, TEAPOT_RESPONSE_BODY)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_YoutubeDL.py b/test/test_YoutubeDL.py
new file mode 100644
index 0000000..6be47af
--- /dev/null
+++ b/test/test_YoutubeDL.py
@@ -0,0 +1,1346 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import copy
+import json
+
+from test.helper import FakeYDL, assertRegexpMatches, try_rm
+from yt_dlp import YoutubeDL
+from yt_dlp.compat import compat_os_name
+from yt_dlp.extractor import YoutubeIE
+from yt_dlp.extractor.common import InfoExtractor
+from yt_dlp.postprocessor.common import PostProcessor
+from yt_dlp.utils import (
+ ExtractorError,
+ LazyList,
+ OnDemandPagedList,
+ int_or_none,
+ match_filter_func,
+)
+from yt_dlp.utils.traversal import traverse_obj
+
+TEST_URL = 'http://localhost/sample.mp4'
+
+
+class YDL(FakeYDL):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.downloaded_info_dicts = []
+ self.msgs = []
+
+ def process_info(self, info_dict):
+ self.downloaded_info_dicts.append(info_dict.copy())
+
+ def to_screen(self, msg, *args, **kwargs):
+ self.msgs.append(msg)
+
+ def dl(self, *args, **kwargs):
+ assert False, 'Downloader must not be invoked for test_YoutubeDL'
+
+
+def _make_result(formats, **kwargs):
+ res = {
+ 'formats': formats,
+ 'id': 'testid',
+ 'title': 'testttitle',
+ 'extractor': 'testex',
+ 'extractor_key': 'TestEx',
+ 'webpage_url': 'http://example.com/watch?v=shenanigans',
+ }
+ res.update(**kwargs)
+ return res
+
+
+class TestFormatSelection(unittest.TestCase):
+ def test_prefer_free_formats(self):
+ # Same resolution => download webm
+ ydl = YDL()
+ ydl.params['prefer_free_formats'] = True
+ formats = [
+ {'ext': 'webm', 'height': 460, 'url': TEST_URL},
+ {'ext': 'mp4', 'height': 460, 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['ext'], 'webm')
+
+ # Different resolution => download best quality (mp4)
+ ydl = YDL()
+ ydl.params['prefer_free_formats'] = True
+ formats = [
+ {'ext': 'webm', 'height': 720, 'url': TEST_URL},
+ {'ext': 'mp4', 'height': 1080, 'url': TEST_URL},
+ ]
+ info_dict['formats'] = formats
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['ext'], 'mp4')
+
+ # No prefer_free_formats => prefer mp4 and webm
+ ydl = YDL()
+ ydl.params['prefer_free_formats'] = False
+ formats = [
+ {'ext': 'webm', 'height': 720, 'url': TEST_URL},
+ {'ext': 'mp4', 'height': 720, 'url': TEST_URL},
+ {'ext': 'flv', 'height': 720, 'url': TEST_URL},
+ ]
+ info_dict['formats'] = formats
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['ext'], 'mp4')
+
+ ydl = YDL()
+ ydl.params['prefer_free_formats'] = False
+ formats = [
+ {'ext': 'flv', 'height': 720, 'url': TEST_URL},
+ {'ext': 'webm', 'height': 720, 'url': TEST_URL},
+ ]
+ info_dict['formats'] = formats
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['ext'], 'webm')
+
+ def test_format_selection(self):
+ formats = [
+ {'format_id': '35', 'ext': 'mp4', 'preference': 0, 'url': TEST_URL},
+ {'format_id': 'example-with-dashes', 'ext': 'webm', 'preference': 1, 'url': TEST_URL},
+ {'format_id': '45', 'ext': 'webm', 'preference': 2, 'url': TEST_URL},
+ {'format_id': '47', 'ext': 'webm', 'preference': 3, 'url': TEST_URL},
+ {'format_id': '2', 'ext': 'flv', 'preference': 4, 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+
+ def test(inp, *expected, multi=False):
+ ydl = YDL({
+ 'format': inp,
+ 'allow_multiple_video_streams': multi,
+ 'allow_multiple_audio_streams': multi,
+ })
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = map(lambda x: x['format_id'], ydl.downloaded_info_dicts)
+ self.assertEqual(list(downloaded), list(expected))
+
+ test('20/47', '47')
+ test('20/71/worst', '35')
+ test(None, '2')
+ test('webm/mp4', '47')
+ test('3gp/40/mp4', '35')
+ test('example-with-dashes', 'example-with-dashes')
+ test('all', '2', '47', '45', 'example-with-dashes', '35')
+ test('mergeall', '2+47+45+example-with-dashes+35', multi=True)
+ # See: https://github.com/yt-dlp/yt-dlp/pulls/8797
+ test('7_a/worst', '35')
+
+ def test_format_selection_audio(self):
+ formats = [
+ {'format_id': 'audio-low', 'ext': 'webm', 'preference': 1, 'vcodec': 'none', 'url': TEST_URL},
+ {'format_id': 'audio-mid', 'ext': 'webm', 'preference': 2, 'vcodec': 'none', 'url': TEST_URL},
+ {'format_id': 'audio-high', 'ext': 'flv', 'preference': 3, 'vcodec': 'none', 'url': TEST_URL},
+ {'format_id': 'vid', 'ext': 'mp4', 'preference': 4, 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+
+ ydl = YDL({'format': 'bestaudio'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'audio-high')
+
+ ydl = YDL({'format': 'worstaudio'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'audio-low')
+
+ formats = [
+ {'format_id': 'vid-low', 'ext': 'mp4', 'preference': 1, 'url': TEST_URL},
+ {'format_id': 'vid-high', 'ext': 'mp4', 'preference': 2, 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+
+ ydl = YDL({'format': 'bestaudio/worstaudio/best'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'vid-high')
+
+ def test_format_selection_audio_exts(self):
+ formats = [
+ {'format_id': 'mp3-64', 'ext': 'mp3', 'abr': 64, 'url': 'http://_', 'vcodec': 'none'},
+ {'format_id': 'ogg-64', 'ext': 'ogg', 'abr': 64, 'url': 'http://_', 'vcodec': 'none'},
+ {'format_id': 'aac-64', 'ext': 'aac', 'abr': 64, 'url': 'http://_', 'vcodec': 'none'},
+ {'format_id': 'mp3-32', 'ext': 'mp3', 'abr': 32, 'url': 'http://_', 'vcodec': 'none'},
+ {'format_id': 'aac-32', 'ext': 'aac', 'abr': 32, 'url': 'http://_', 'vcodec': 'none'},
+ ]
+
+ info_dict = _make_result(formats)
+ ydl = YDL({'format': 'best'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(copy.deepcopy(info_dict))
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'aac-64')
+
+ ydl = YDL({'format': 'mp3'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(copy.deepcopy(info_dict))
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'mp3-64')
+
+ ydl = YDL({'prefer_free_formats': True})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(copy.deepcopy(info_dict))
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'ogg-64')
+
+ def test_format_selection_video(self):
+ formats = [
+ {'format_id': 'dash-video-low', 'ext': 'mp4', 'preference': 1, 'acodec': 'none', 'url': TEST_URL},
+ {'format_id': 'dash-video-high', 'ext': 'mp4', 'preference': 2, 'acodec': 'none', 'url': TEST_URL},
+ {'format_id': 'vid', 'ext': 'mp4', 'preference': 3, 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+
+ ydl = YDL({'format': 'bestvideo'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'dash-video-high')
+
+ ydl = YDL({'format': 'worstvideo'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'dash-video-low')
+
+ ydl = YDL({'format': 'bestvideo[format_id^=dash][format_id$=low]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'dash-video-low')
+
+ formats = [
+ {'format_id': 'vid-vcodec-dot', 'ext': 'mp4', 'preference': 1, 'vcodec': 'avc1.123456', 'acodec': 'none', 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+
+ ydl = YDL({'format': 'bestvideo[vcodec=avc1.123456]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
+
+ def test_format_selection_string_ops(self):
+ formats = [
+ {'format_id': 'abc-cba', 'ext': 'mp4', 'url': TEST_URL},
+ {'format_id': 'zxc-cxz', 'ext': 'webm', 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+
+ # equals (=)
+ ydl = YDL({'format': '[format_id=abc-cba]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'abc-cba')
+
+ # does not equal (!=)
+ ydl = YDL({'format': '[format_id!=abc-cba]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'zxc-cxz')
+
+ ydl = YDL({'format': '[format_id!=abc-cba][format_id!=zxc-cxz]'})
+ self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
+
+ # starts with (^=)
+ ydl = YDL({'format': '[format_id^=abc]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'abc-cba')
+
+ # does not start with (!^=)
+ ydl = YDL({'format': '[format_id!^=abc]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'zxc-cxz')
+
+ ydl = YDL({'format': '[format_id!^=abc][format_id!^=zxc]'})
+ self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
+
+ # ends with ($=)
+ ydl = YDL({'format': '[format_id$=cba]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'abc-cba')
+
+ # does not end with (!$=)
+ ydl = YDL({'format': '[format_id!$=cba]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'zxc-cxz')
+
+ ydl = YDL({'format': '[format_id!$=cba][format_id!$=cxz]'})
+ self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
+
+ # contains (*=)
+ ydl = YDL({'format': '[format_id*=bc-cb]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'abc-cba')
+
+ # does not contain (!*=)
+ ydl = YDL({'format': '[format_id!*=bc-cb]'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'zxc-cxz')
+
+ ydl = YDL({'format': '[format_id!*=abc][format_id!*=zxc]'})
+ self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
+
+ ydl = YDL({'format': '[format_id!*=-]'})
+ self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
+
+ def test_youtube_format_selection(self):
+ # FIXME: Rewrite in accordance with the new format sorting options
+ return
+
+ order = [
+ '38', '37', '46', '22', '45', '35', '44', '18', '34', '43', '6', '5', '17', '36', '13',
+ # Apple HTTP Live Streaming
+ '96', '95', '94', '93', '92', '132', '151',
+ # 3D
+ '85', '84', '102', '83', '101', '82', '100',
+ # Dash video
+ '137', '248', '136', '247', '135', '246',
+ '245', '244', '134', '243', '133', '242', '160',
+ # Dash audio
+ '141', '172', '140', '171', '139',
+ ]
+
+ def format_info(f_id):
+ info = YoutubeIE._formats[f_id].copy()
+
+ # XXX: In real cases InfoExtractor._parse_mpd_formats() fills up 'acodec'
+ # and 'vcodec', while in tests such information is incomplete since
+ # commit a6c2c24479e5f4827ceb06f64d855329c0a6f593
+ # test_YoutubeDL.test_youtube_format_selection is broken without
+ # this fix
+ if 'acodec' in info and 'vcodec' not in info:
+ info['vcodec'] = 'none'
+ elif 'vcodec' in info and 'acodec' not in info:
+ info['acodec'] = 'none'
+
+ info['format_id'] = f_id
+ info['url'] = 'url:' + f_id
+ return info
+ formats_order = [format_info(f_id) for f_id in order]
+
+ info_dict = _make_result(list(formats_order), extractor='youtube')
+ ydl = YDL({'format': 'bestvideo+bestaudio'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], '248+172')
+ self.assertEqual(downloaded['ext'], 'mp4')
+
+ info_dict = _make_result(list(formats_order), extractor='youtube')
+ ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], '38')
+
+ info_dict = _make_result(list(formats_order), extractor='youtube')
+ ydl = YDL({'format': 'bestvideo/best,bestaudio'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
+ self.assertEqual(downloaded_ids, ['137', '141'])
+
+ info_dict = _make_result(list(formats_order), extractor='youtube')
+ ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
+ self.assertEqual(downloaded_ids, ['137+141', '248+141'])
+
+ info_dict = _make_result(list(formats_order), extractor='youtube')
+ ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
+ self.assertEqual(downloaded_ids, ['136+141', '247+141'])
+
+ info_dict = _make_result(list(formats_order), extractor='youtube')
+ ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
+ self.assertEqual(downloaded_ids, ['248+141'])
+
+ for f1, f2 in zip(formats_order, formats_order[1:]):
+ info_dict = _make_result([f1, f2], extractor='youtube')
+ ydl = YDL({'format': 'best/bestvideo'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], f1['format_id'])
+
+ info_dict = _make_result([f2, f1], extractor='youtube')
+ ydl = YDL({'format': 'best/bestvideo'})
+ ydl.sort_formats(info_dict)
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], f1['format_id'])
+
+ def test_audio_only_extractor_format_selection(self):
+ # For extractors with incomplete formats (all formats are audio-only or
+ # video-only) best and worst should fallback to corresponding best/worst
+ # video-only or audio-only formats (as per
+ # https://github.com/ytdl-org/youtube-dl/pull/5556)
+ formats = [
+ {'format_id': 'low', 'ext': 'mp3', 'preference': 1, 'vcodec': 'none', 'url': TEST_URL},
+ {'format_id': 'high', 'ext': 'mp3', 'preference': 2, 'vcodec': 'none', 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+
+ ydl = YDL({'format': 'best'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'high')
+
+ ydl = YDL({'format': 'worst'})
+ ydl.process_ie_result(info_dict.copy())
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'low')
+
+ def test_format_not_available(self):
+ formats = [
+ {'format_id': 'regular', 'ext': 'mp4', 'height': 360, 'url': TEST_URL},
+ {'format_id': 'video', 'ext': 'mp4', 'height': 720, 'acodec': 'none', 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+
+ # This must fail since complete video-audio format does not match filter
+ # and extractor does not provide incomplete only formats (i.e. only
+ # video-only or audio-only).
+ ydl = YDL({'format': 'best[height>360]'})
+ self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
+
+ def test_format_selection_issue_10083(self):
+ # See https://github.com/ytdl-org/youtube-dl/issues/10083
+ formats = [
+ {'format_id': 'regular', 'height': 360, 'url': TEST_URL},
+ {'format_id': 'video', 'height': 720, 'acodec': 'none', 'url': TEST_URL},
+ {'format_id': 'audio', 'vcodec': 'none', 'url': TEST_URL},
+ ]
+ info_dict = _make_result(formats)
+
+ ydl = YDL({'format': 'best[height>360]/bestvideo[height>360]+bestaudio'})
+ ydl.process_ie_result(info_dict.copy())
+ self.assertEqual(ydl.downloaded_info_dicts[0]['format_id'], 'video+audio')
+
+ def test_invalid_format_specs(self):
+ def assert_syntax_error(format_spec):
+ self.assertRaises(SyntaxError, YDL, {'format': format_spec})
+
+ assert_syntax_error('bestvideo,,best')
+ assert_syntax_error('+bestaudio')
+ assert_syntax_error('bestvideo+')
+ assert_syntax_error('/')
+ assert_syntax_error('[720<height]')
+
+ def test_format_filtering(self):
+ formats = [
+ {'format_id': 'A', 'filesize': 500, 'width': 1000},
+ {'format_id': 'B', 'filesize': 1000, 'width': 500},
+ {'format_id': 'C', 'filesize': 1000, 'width': 400},
+ {'format_id': 'D', 'filesize': 2000, 'width': 600},
+ {'format_id': 'E', 'filesize': 3000},
+ {'format_id': 'F'},
+ {'format_id': 'G', 'filesize': 1000000},
+ ]
+ for f in formats:
+ f['url'] = 'http://_/'
+ f['ext'] = 'unknown'
+ info_dict = _make_result(formats, _format_sort_fields=('id', ))
+
+ ydl = YDL({'format': 'best[filesize<3000]'})
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'D')
+
+ ydl = YDL({'format': 'best[filesize<=3000]'})
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'E')
+
+ ydl = YDL({'format': 'best[filesize <= ? 3000]'})
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'F')
+
+ ydl = YDL({'format': 'best [filesize = 1000] [width>450]'})
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'B')
+
+ ydl = YDL({'format': 'best [filesize = 1000] [width!=450]'})
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'C')
+
+ ydl = YDL({'format': '[filesize>?1]'})
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'G')
+
+ ydl = YDL({'format': '[filesize<1M]'})
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'E')
+
+ ydl = YDL({'format': '[filesize<1MiB]'})
+ ydl.process_ie_result(info_dict)
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['format_id'], 'G')
+
+ ydl = YDL({'format': 'all[width>=400][width<=600]'})
+ ydl.process_ie_result(info_dict)
+ downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
+ self.assertEqual(downloaded_ids, ['D', 'C', 'B'])
+
+ ydl = YDL({'format': 'best[height<40]'})
+ try:
+ ydl.process_ie_result(info_dict)
+ except ExtractorError:
+ pass
+ self.assertEqual(ydl.downloaded_info_dicts, [])
+
+ def test_default_format_spec(self):
+ ydl = YDL({'simulate': True})
+ self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
+
+ ydl = YDL({})
+ self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
+
+ ydl = YDL({'simulate': True})
+ self.assertEqual(ydl._default_format_spec({'is_live': True}), 'bestvideo*+bestaudio/best')
+
+ ydl = YDL({'outtmpl': '-'})
+ self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
+
+ ydl = YDL({})
+ self.assertEqual(ydl._default_format_spec({}, download=False), 'bestvideo*+bestaudio/best')
+ self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
+
+
+class TestYoutubeDL(unittest.TestCase):
+ def test_subtitles(self):
+ def s_formats(lang, autocaption=False):
+ return [{
+ 'ext': ext,
+ 'url': f'http://localhost/video.{lang}.{ext}',
+ '_auto': autocaption,
+ } for ext in ['vtt', 'srt', 'ass']]
+ subtitles = {l: s_formats(l) for l in ['en', 'fr', 'es']}
+ auto_captions = {l: s_formats(l, True) for l in ['it', 'pt', 'es']}
+ info_dict = {
+ 'id': 'test',
+ 'title': 'Test',
+ 'url': 'http://localhost/video.mp4',
+ 'subtitles': subtitles,
+ 'automatic_captions': auto_captions,
+ 'extractor': 'TEST',
+ 'webpage_url': 'http://example.com/watch?v=shenanigans',
+ }
+
+ def get_info(params={}):
+ params.setdefault('simulate', True)
+ ydl = YDL(params)
+ ydl.report_warning = lambda *args, **kargs: None
+ return ydl.process_video_result(info_dict, download=False)
+
+ result = get_info()
+ self.assertFalse(result.get('requested_subtitles'))
+ self.assertEqual(result['subtitles'], subtitles)
+ self.assertEqual(result['automatic_captions'], auto_captions)
+
+ result = get_info({'writesubtitles': True})
+ subs = result['requested_subtitles']
+ self.assertTrue(subs)
+ self.assertEqual(set(subs.keys()), {'en'})
+ self.assertTrue(subs['en'].get('data') is None)
+ self.assertEqual(subs['en']['ext'], 'ass')
+
+ result = get_info({'writesubtitles': True, 'subtitlesformat': 'foo/srt'})
+ subs = result['requested_subtitles']
+ self.assertEqual(subs['en']['ext'], 'srt')
+
+ result = get_info({'writesubtitles': True, 'subtitleslangs': ['es', 'fr', 'it']})
+ subs = result['requested_subtitles']
+ self.assertTrue(subs)
+ self.assertEqual(set(subs.keys()), {'es', 'fr'})
+
+ result = get_info({'writesubtitles': True, 'subtitleslangs': ['all', '-en']})
+ subs = result['requested_subtitles']
+ self.assertTrue(subs)
+ self.assertEqual(set(subs.keys()), {'es', 'fr'})
+
+ result = get_info({'writesubtitles': True, 'subtitleslangs': ['en', 'fr', '-en']})
+ subs = result['requested_subtitles']
+ self.assertTrue(subs)
+ self.assertEqual(set(subs.keys()), {'fr'})
+
+ result = get_info({'writesubtitles': True, 'subtitleslangs': ['-en', 'en']})
+ subs = result['requested_subtitles']
+ self.assertTrue(subs)
+ self.assertEqual(set(subs.keys()), {'en'})
+
+ result = get_info({'writesubtitles': True, 'subtitleslangs': ['e.+']})
+ subs = result['requested_subtitles']
+ self.assertTrue(subs)
+ self.assertEqual(set(subs.keys()), {'es', 'en'})
+
+ result = get_info({'writesubtitles': True, 'writeautomaticsub': True, 'subtitleslangs': ['es', 'pt']})
+ subs = result['requested_subtitles']
+ self.assertTrue(subs)
+ self.assertEqual(set(subs.keys()), {'es', 'pt'})
+ self.assertFalse(subs['es']['_auto'])
+ self.assertTrue(subs['pt']['_auto'])
+
+ result = get_info({'writeautomaticsub': True, 'subtitleslangs': ['es', 'pt']})
+ subs = result['requested_subtitles']
+ self.assertTrue(subs)
+ self.assertEqual(set(subs.keys()), {'es', 'pt'})
+ self.assertTrue(subs['es']['_auto'])
+ self.assertTrue(subs['pt']['_auto'])
+
+ def test_add_extra_info(self):
+ test_dict = {
+ 'extractor': 'Foo',
+ }
+ extra_info = {
+ 'extractor': 'Bar',
+ 'playlist': 'funny videos',
+ }
+ YDL.add_extra_info(test_dict, extra_info)
+ self.assertEqual(test_dict['extractor'], 'Foo')
+ self.assertEqual(test_dict['playlist'], 'funny videos')
+
+ outtmpl_info = {
+ 'id': '1234',
+ 'ext': 'mp4',
+ 'width': None,
+ 'height': 1080,
+ 'filesize': 1024,
+ 'title1': '$PATH',
+ 'title2': '%PATH%',
+ 'title3': 'foo/bar\\test',
+ 'title4': 'foo "bar" test',
+ 'title5': 'áéí 𝐀',
+ 'timestamp': 1618488000,
+ 'duration': 100000,
+ 'playlist_index': 1,
+ 'playlist_autonumber': 2,
+ '__last_playlist_index': 100,
+ 'n_entries': 10,
+ 'formats': [
+ {'id': 'id 1', 'height': 1080, 'width': 1920},
+ {'id': 'id 2', 'height': 720},
+ {'id': 'id 3'}
+ ]
+ }
+
+ def test_prepare_outtmpl_and_filename(self):
+ def test(tmpl, expected, *, info=None, **params):
+ params['outtmpl'] = tmpl
+ ydl = FakeYDL(params)
+ ydl._num_downloads = 1
+ self.assertEqual(ydl.validate_outtmpl(tmpl), None)
+
+ out = ydl.evaluate_outtmpl(tmpl, info or self.outtmpl_info)
+ fname = ydl.prepare_filename(info or self.outtmpl_info)
+
+ if not isinstance(expected, (list, tuple)):
+ expected = (expected, expected)
+ for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected):
+ if callable(expect):
+ self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
+ elif expect is not None:
+ self.assertEqual(got, expect, f'Wrong {name} from {tmpl}')
+
+ # Side-effects
+ original_infodict = dict(self.outtmpl_info)
+ test('foo.bar', 'foo.bar')
+ original_infodict['epoch'] = self.outtmpl_info.get('epoch')
+ self.assertTrue(isinstance(original_infodict['epoch'], int))
+ test('%(epoch)d', int_or_none)
+ self.assertEqual(original_infodict, self.outtmpl_info)
+
+ # Auto-generated fields
+ test('%(id)s.%(ext)s', '1234.mp4')
+ test('%(duration_string)s', ('27:46:40', '27-46-40'))
+ test('%(resolution)s', '1080p')
+ test('%(playlist_index|)s', '001')
+ test('%(playlist_index&{}!)s', '1!')
+ test('%(playlist_autonumber)s', '02')
+ test('%(autonumber)s', '00001')
+ test('%(autonumber+2)03d', '005', autonumber_start=3)
+ test('%(autonumber)s', '001', autonumber_size=3)
+
+ # Escaping %
+ test('%', '%')
+ test('%%', '%')
+ test('%%%%', '%%')
+ test('%s', '%s')
+ test('%%%s', '%%s')
+ test('%d', '%d')
+ test('%abc%', '%abc%')
+ test('%%(width)06d.%(ext)s', '%(width)06d.mp4')
+ test('%%%(height)s', '%1080')
+ test('%(width)06d.%(ext)s', 'NA.mp4')
+ test('%(width)06d.%%(ext)s', 'NA.%(ext)s')
+ test('%%(width)06d.%(ext)s', '%(width)06d.mp4')
+
+ # ID sanitization
+ test('%(id)s', '_abcd', info={'id': '_abcd'})
+ test('%(some_id)s', '_abcd', info={'some_id': '_abcd'})
+ test('%(formats.0.id)s', '_abcd', info={'formats': [{'id': '_abcd'}]})
+ test('%(id)s', '-abcd', info={'id': '-abcd'})
+ test('%(id)s', '.abcd', info={'id': '.abcd'})
+ test('%(id)s', 'ab__cd', info={'id': 'ab__cd'})
+ test('%(id)s', ('ab:cd', 'ab:cd'), info={'id': 'ab:cd'})
+ test('%(id.0)s', '-', info={'id': '--'})
+
+ # Invalid templates
+ self.assertTrue(isinstance(YoutubeDL.validate_outtmpl('%(title)'), ValueError))
+ test('%(invalid@tmpl|def)s', 'none', outtmpl_na_placeholder='none')
+ test('%(..)s', 'NA')
+ test('%(formats.{id)s', 'NA')
+
+ # Entire info_dict
+ def expect_same_infodict(out):
+ got_dict = json.loads(out)
+ for info_field, expected in self.outtmpl_info.items():
+ self.assertEqual(got_dict.get(info_field), expected, info_field)
+ return True
+
+ test('%()j', (expect_same_infodict, None))
+
+ # NA placeholder
+ NA_TEST_OUTTMPL = '%(uploader_date)s-%(width)d-%(x|def)s-%(id)s.%(ext)s'
+ test(NA_TEST_OUTTMPL, 'NA-NA-def-1234.mp4')
+ test(NA_TEST_OUTTMPL, 'none-none-def-1234.mp4', outtmpl_na_placeholder='none')
+ test(NA_TEST_OUTTMPL, '--def-1234.mp4', outtmpl_na_placeholder='')
+ test('%(non_existent.0)s', 'NA')
+
+ # String formatting
+ FMT_TEST_OUTTMPL = '%%(height)%s.%%(ext)s'
+ test(FMT_TEST_OUTTMPL % 's', '1080.mp4')
+ test(FMT_TEST_OUTTMPL % 'd', '1080.mp4')
+ test(FMT_TEST_OUTTMPL % '6d', ' 1080.mp4')
+ test(FMT_TEST_OUTTMPL % '-6d', '1080 .mp4')
+ test(FMT_TEST_OUTTMPL % '06d', '001080.mp4')
+ test(FMT_TEST_OUTTMPL % ' 06d', ' 01080.mp4')
+ test(FMT_TEST_OUTTMPL % ' 06d', ' 01080.mp4')
+ test(FMT_TEST_OUTTMPL % '0 6d', ' 01080.mp4')
+ test(FMT_TEST_OUTTMPL % '0 6d', ' 01080.mp4')
+ test(FMT_TEST_OUTTMPL % ' 0 6d', ' 01080.mp4')
+
+ # Type casting
+ test('%(id)d', '1234')
+ test('%(height)c', '1')
+ test('%(ext)c', 'm')
+ test('%(id)d %(id)r', "1234 '1234'")
+ test('%(id)r %(height)r', "'1234' 1080")
+ test('%(title5)a %(height)a', (R"'\xe1\xe9\xed \U0001d400' 1080", None))
+ test('%(ext)s-%(ext|def)d', 'mp4-def')
+ test('%(width|0)04d', '0')
+ test('a%(width|b)d', 'ab', outtmpl_na_placeholder='none')
+
+ FORMATS = self.outtmpl_info['formats']
+
+ # Custom type casting
+ test('%(formats.:.id)l', 'id 1, id 2, id 3')
+ test('%(formats.:.id)#l', ('id 1\nid 2\nid 3', 'id 1 id 2 id 3'))
+ test('%(ext)l', 'mp4')
+ test('%(formats.:.id) 18l', ' id 1, id 2, id 3')
+ test('%(formats)j', (json.dumps(FORMATS), None))
+ test('%(formats)#j', (
+ json.dumps(FORMATS, indent=4),
+ json.dumps(FORMATS, indent=4).replace(':', ':').replace('"', """).replace('\n', ' ')
+ ))
+ test('%(title5).3B', 'á')
+ test('%(title5)U', 'áéí 𝐀')
+ test('%(title5)#U', 'a\u0301e\u0301i\u0301 𝐀')
+ test('%(title5)+U', 'áéí A')
+ test('%(title5)+#U', 'a\u0301e\u0301i\u0301 A')
+ test('%(height)D', '1k')
+ test('%(filesize)#D', '1Ki')
+ test('%(height)5.2D', ' 1.08k')
+ test('%(title4)#S', 'foo_bar_test')
+ test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if compat_os_name == 'nt' else ' ')))
+ if compat_os_name == 'nt':
+ test('%(title4)q', ('"foo ""bar"" test"', None))
+ test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', None))
+ test('%(formats.0.id)#q', ('"id 1"', None))
+ else:
+ test('%(title4)q', ('\'foo "bar" test\'', '\'foo "bar" test\''))
+ test('%(formats.:.id)#q', "'id 1' 'id 2' 'id 3'")
+ test('%(formats.0.id)#q', "'id 1'")
+
+ # Internal formatting
+ test('%(timestamp-1000>%H-%M-%S)s', '11-43-20')
+ test('%(title|%)s %(title|%%)s', '% %%')
+ test('%(id+1-height+3)05d', '00158')
+ test('%(width+100)05d', 'NA')
+ test('%(filesize*8)d', '8192')
+ test('%(formats.0) 15s', ('% 15s' % FORMATS[0], None))
+ test('%(formats.0)r', (repr(FORMATS[0]), None))
+ test('%(height.0)03d', '001')
+ test('%(-height.0)04d', '-001')
+ test('%(formats.-1.id)s', FORMATS[-1]['id'])
+ test('%(formats.0.id.-1)d', FORMATS[0]['id'][-1])
+ test('%(formats.3)s', 'NA')
+ test('%(formats.:2:-1)r', repr(FORMATS[:2:-1]))
+ test('%(formats.0.id.-1+id)f', '1235.000000')
+ test('%(formats.0.id.-1+formats.1.id.-1)d', '3')
+ out = json.dumps([{'id': f['id'], 'height.:2': str(f['height'])[:2]}
+ if 'height' in f else {'id': f['id']}
+ for f in FORMATS])
+ test('%(formats.:.{id,height.:2})j', (out, None))
+ test('%(formats.:.{id,height}.id)l', ', '.join(f['id'] for f in FORMATS))
+ test('%(.{id,title})j', ('{"id": "1234"}', '{"id": "1234"}'))
+
+ # Alternates
+ test('%(title,id)s', '1234')
+ test('%(width-100,height+20|def)d', '1100')
+ test('%(width-100,height+width|def)s', 'def')
+ test('%(timestamp-x>%H\\,%M\\,%S,timestamp>%H\\,%M\\,%S)s', '12,00,00')
+
+ # Replacement
+ test('%(id&foo)s.bar', 'foo.bar')
+ test('%(title&foo)s.bar', 'NA.bar')
+ test('%(title&foo|baz)s.bar', 'baz.bar')
+ test('%(x,id&foo|baz)s.bar', 'foo.bar')
+ test('%(x,title&foo|baz)s.bar', 'baz.bar')
+ test('%(id&a\nb|)s', ('a\nb', 'a b'))
+ test('%(id&hi {:>10} {}|)s', 'hi 1234 1234')
+ test(R'%(id&{0} {}|)s', 'NA')
+ test(R'%(id&{0.1}|)s', 'NA')
+ test('%(height&{:,d})S', '1,080')
+
+ # Laziness
+ def gen():
+ yield from range(5)
+ raise self.assertTrue(False, 'LazyList should not be evaluated till here')
+ test('%(key.4)s', '4', info={'key': LazyList(gen())})
+
+ # Empty filename
+ test('%(foo|)s-%(bar|)s.%(ext)s', '-.mp4')
+ # test('%(foo|)s.%(ext)s', ('.mp4', '_.mp4')) # fixme
+ # test('%(foo|)s', ('', '_')) # fixme
+
+ # Environment variable expansion for prepare_filename
+ os.environ['__yt_dlp_var'] = 'expanded'
+ envvar = '%__yt_dlp_var%' if compat_os_name == 'nt' else '$__yt_dlp_var'
+ test(envvar, (envvar, 'expanded'))
+ if compat_os_name == 'nt':
+ test('%s%', ('%s%', '%s%'))
+ os.environ['s'] = 'expanded'
+ test('%s%', ('%s%', 'expanded')) # %s% should be expanded before escaping %s
+ os.environ['(test)s'] = 'expanded'
+ test('%(test)s%', ('NA%', 'expanded')) # Environment should take priority over template
+
+ # Path expansion and escaping
+ test('Hello %(title1)s', 'Hello $PATH')
+ test('Hello %(title2)s', 'Hello %PATH%')
+ test('%(title3)s', ('foo/bar\\test', 'foo⧸bar⧹test'))
+ test('folder/%(title3)s', ('folder/foo/bar\\test', 'folder%sfoo⧸bar⧹test' % os.path.sep))
+
+ def test_format_note(self):
+ ydl = YoutubeDL()
+ self.assertEqual(ydl._format_note({}), '')
+ assertRegexpMatches(self, ydl._format_note({
+ 'vbr': 10,
+ }), r'^\s*10k$')
+ assertRegexpMatches(self, ydl._format_note({
+ 'fps': 30,
+ }), r'^30fps$')
+
+ def test_postprocessors(self):
+ filename = 'post-processor-testfile.mp4'
+ audiofile = filename + '.mp3'
+
+ class SimplePP(PostProcessor):
+ def run(self, info):
+ with open(audiofile, 'w') as f:
+ f.write('EXAMPLE')
+ return [info['filepath']], info
+
+ def run_pp(params, PP):
+ with open(filename, 'w') as f:
+ f.write('EXAMPLE')
+ ydl = YoutubeDL(params)
+ ydl.add_post_processor(PP())
+ ydl.post_process(filename, {'filepath': filename})
+
+ run_pp({'keepvideo': True}, SimplePP)
+ self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
+ self.assertTrue(os.path.exists(audiofile), '%s doesn\'t exist' % audiofile)
+ os.unlink(filename)
+ os.unlink(audiofile)
+
+ run_pp({'keepvideo': False}, SimplePP)
+ self.assertFalse(os.path.exists(filename), '%s exists' % filename)
+ self.assertTrue(os.path.exists(audiofile), '%s doesn\'t exist' % audiofile)
+ os.unlink(audiofile)
+
+ class ModifierPP(PostProcessor):
+ def run(self, info):
+ with open(info['filepath'], 'w') as f:
+ f.write('MODIFIED')
+ return [], info
+
+ run_pp({'keepvideo': False}, ModifierPP)
+ self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
+ os.unlink(filename)
+
+ def test_match_filter(self):
+ first = {
+ 'id': '1',
+ 'url': TEST_URL,
+ 'title': 'one',
+ 'extractor': 'TEST',
+ 'duration': 30,
+ 'filesize': 10 * 1024,
+ 'playlist_id': '42',
+ 'uploader': "變態妍字幕版 太妍 тест",
+ 'creator': "тест ' 123 ' тест--",
+ 'webpage_url': 'http://example.com/watch?v=shenanigans',
+ }
+ second = {
+ 'id': '2',
+ 'url': TEST_URL,
+ 'title': 'two',
+ 'extractor': 'TEST',
+ 'duration': 10,
+ 'description': 'foo',
+ 'filesize': 5 * 1024,
+ 'playlist_id': '43',
+ 'uploader': "тест 123",
+ 'webpage_url': 'http://example.com/watch?v=SHENANIGANS',
+ }
+ videos = [first, second]
+
+ def get_videos(filter_=None):
+ ydl = YDL({'match_filter': filter_, 'simulate': True})
+ for v in videos:
+ ydl.process_ie_result(v.copy(), download=True)
+ return [v['id'] for v in ydl.downloaded_info_dicts]
+
+ res = get_videos()
+ self.assertEqual(res, ['1', '2'])
+
+ def f(v, incomplete):
+ if v['id'] == '1':
+ return None
+ else:
+ return 'Video id is not 1'
+ res = get_videos(f)
+ self.assertEqual(res, ['1'])
+
+ f = match_filter_func('duration < 30')
+ res = get_videos(f)
+ self.assertEqual(res, ['2'])
+
+ f = match_filter_func('description = foo')
+ res = get_videos(f)
+ self.assertEqual(res, ['2'])
+
+ f = match_filter_func('description =? foo')
+ res = get_videos(f)
+ self.assertEqual(res, ['1', '2'])
+
+ f = match_filter_func('filesize > 5KiB')
+ res = get_videos(f)
+ self.assertEqual(res, ['1'])
+
+ f = match_filter_func('playlist_id = 42')
+ res = get_videos(f)
+ self.assertEqual(res, ['1'])
+
+ f = match_filter_func('uploader = "變態妍字幕版 太妍 тест"')
+ res = get_videos(f)
+ self.assertEqual(res, ['1'])
+
+ f = match_filter_func('uploader != "變態妍字幕版 太妍 тест"')
+ res = get_videos(f)
+ self.assertEqual(res, ['2'])
+
+ f = match_filter_func('creator = "тест \' 123 \' тест--"')
+ res = get_videos(f)
+ self.assertEqual(res, ['1'])
+
+ f = match_filter_func("creator = 'тест \\' 123 \\' тест--'")
+ res = get_videos(f)
+ self.assertEqual(res, ['1'])
+
+ f = match_filter_func(r"creator = 'тест \' 123 \' тест--' & duration > 30")
+ res = get_videos(f)
+ self.assertEqual(res, [])
+
+ def test_playlist_items_selection(self):
+ INDICES, PAGE_SIZE = list(range(1, 11)), 3
+
+ def entry(i, evaluated):
+ evaluated.append(i)
+ return {
+ 'id': str(i),
+ 'title': str(i),
+ 'url': TEST_URL,
+ }
+
+ def pagedlist_entries(evaluated):
+ def page_func(n):
+ start = PAGE_SIZE * n
+ for i in INDICES[start: start + PAGE_SIZE]:
+ yield entry(i, evaluated)
+ return OnDemandPagedList(page_func, PAGE_SIZE)
+
+ def page_num(i):
+ return (i + PAGE_SIZE - 1) // PAGE_SIZE
+
+ def generator_entries(evaluated):
+ for i in INDICES:
+ yield entry(i, evaluated)
+
+ def list_entries(evaluated):
+ return list(generator_entries(evaluated))
+
+ def lazylist_entries(evaluated):
+ return LazyList(generator_entries(evaluated))
+
+ def get_downloaded_info_dicts(params, entries):
+ ydl = YDL(params)
+ ydl.process_ie_result({
+ '_type': 'playlist',
+ 'id': 'test',
+ 'extractor': 'test:playlist',
+ 'extractor_key': 'test:playlist',
+ 'webpage_url': 'http://example.com',
+ 'entries': entries,
+ })
+ return ydl.downloaded_info_dicts
+
+ def test_selection(params, expected_ids, evaluate_all=False):
+ expected_ids = list(expected_ids)
+ if evaluate_all:
+ generator_eval = pagedlist_eval = INDICES
+ elif not expected_ids:
+ generator_eval = pagedlist_eval = []
+ else:
+ generator_eval = INDICES[0: max(expected_ids)]
+ pagedlist_eval = INDICES[PAGE_SIZE * page_num(min(expected_ids)) - PAGE_SIZE:
+ PAGE_SIZE * page_num(max(expected_ids))]
+
+ for name, func, expected_eval in (
+ ('list', list_entries, INDICES),
+ ('Generator', generator_entries, generator_eval),
+ # ('LazyList', lazylist_entries, generator_eval), # Generator and LazyList follow the exact same code path
+ ('PagedList', pagedlist_entries, pagedlist_eval),
+ ):
+ evaluated = []
+ entries = func(evaluated)
+ results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
+ for v in get_downloaded_info_dicts(params, entries)]
+ self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
+ self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
+
+ test_selection({}, INDICES)
+ test_selection({'playlistend': 20}, INDICES, True)
+ test_selection({'playlistend': 2}, INDICES[:2])
+ test_selection({'playliststart': 11}, [], True)
+ test_selection({'playliststart': 2}, INDICES[1:])
+ test_selection({'playlist_items': '2-4'}, INDICES[1:4])
+ test_selection({'playlist_items': '2,4'}, [2, 4])
+ test_selection({'playlist_items': '20'}, [], True)
+ test_selection({'playlist_items': '0'}, [])
+
+ # Tests for https://github.com/ytdl-org/youtube-dl/issues/10591
+ test_selection({'playlist_items': '2-4,3-4,3'}, [2, 3, 4])
+ test_selection({'playlist_items': '4,2'}, [4, 2])
+
+ # Tests for https://github.com/yt-dlp/yt-dlp/issues/720
+ # https://github.com/yt-dlp/yt-dlp/issues/302
+ test_selection({'playlistreverse': True}, INDICES[::-1])
+ test_selection({'playliststart': 2, 'playlistreverse': True}, INDICES[:0:-1])
+ test_selection({'playlist_items': '2,4', 'playlistreverse': True}, [4, 2])
+ test_selection({'playlist_items': '4,2'}, [4, 2])
+
+ # Tests for --playlist-items start:end:step
+ test_selection({'playlist_items': ':'}, INDICES, True)
+ test_selection({'playlist_items': '::1'}, INDICES, True)
+ test_selection({'playlist_items': '::-1'}, INDICES[::-1], True)
+ test_selection({'playlist_items': ':6'}, INDICES[:6])
+ test_selection({'playlist_items': ':-6'}, INDICES[:-5], True)
+ test_selection({'playlist_items': '-1:6:-2'}, INDICES[:4:-2], True)
+ test_selection({'playlist_items': '9:-6:-2'}, INDICES[8:3:-2], True)
+
+ test_selection({'playlist_items': '1:inf:2'}, INDICES[::2], True)
+ test_selection({'playlist_items': '-2:inf'}, INDICES[-2:], True)
+ test_selection({'playlist_items': ':inf:-1'}, [], True)
+ test_selection({'playlist_items': '0-2:2'}, [2])
+ test_selection({'playlist_items': '1-:2'}, INDICES[::2], True)
+ test_selection({'playlist_items': '0--2:2'}, INDICES[1:-1:2], True)
+
+ test_selection({'playlist_items': '10::3'}, [10], True)
+ test_selection({'playlist_items': '-1::3'}, [10], True)
+ test_selection({'playlist_items': '11::3'}, [], True)
+ test_selection({'playlist_items': '-15::2'}, INDICES[1::2], True)
+ test_selection({'playlist_items': '-15::15'}, [], True)
+
+ def test_do_not_override_ie_key_in_url_transparent(self):
+ ydl = YDL()
+
+ class Foo1IE(InfoExtractor):
+ _VALID_URL = r'foo1:'
+
+ def _real_extract(self, url):
+ return {
+ '_type': 'url_transparent',
+ 'url': 'foo2:',
+ 'ie_key': 'Foo2',
+ 'title': 'foo1 title',
+ 'id': 'foo1_id',
+ }
+
+ class Foo2IE(InfoExtractor):
+ _VALID_URL = r'foo2:'
+
+ def _real_extract(self, url):
+ return {
+ '_type': 'url',
+ 'url': 'foo3:',
+ 'ie_key': 'Foo3',
+ }
+
+ class Foo3IE(InfoExtractor):
+ _VALID_URL = r'foo3:'
+
+ def _real_extract(self, url):
+ return _make_result([{'url': TEST_URL}], title='foo3 title')
+
+ ydl.add_info_extractor(Foo1IE(ydl))
+ ydl.add_info_extractor(Foo2IE(ydl))
+ ydl.add_info_extractor(Foo3IE(ydl))
+ ydl.extract_info('foo1:')
+ downloaded = ydl.downloaded_info_dicts[0]
+ self.assertEqual(downloaded['url'], TEST_URL)
+ self.assertEqual(downloaded['title'], 'foo1 title')
+ self.assertEqual(downloaded['id'], 'testid')
+ self.assertEqual(downloaded['extractor'], 'testex')
+ self.assertEqual(downloaded['extractor_key'], 'TestEx')
+
+ # Test case for https://github.com/ytdl-org/youtube-dl/issues/27064
+ def test_ignoreerrors_for_playlist_with_url_transparent_iterable_entries(self):
+
+ class _YDL(YDL):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ def trouble(self, s, tb=None):
+ pass
+
+ ydl = _YDL({
+ 'format': 'extra',
+ 'ignoreerrors': True,
+ })
+
+ class VideoIE(InfoExtractor):
+ _VALID_URL = r'video:(?P<id>\d+)'
+
+ def _real_extract(self, url):
+ video_id = self._match_id(url)
+ formats = [{
+ 'format_id': 'default',
+ 'url': 'url:',
+ }]
+ if video_id == '0':
+ raise ExtractorError('foo')
+ if video_id == '2':
+ formats.append({
+ 'format_id': 'extra',
+ 'url': TEST_URL,
+ })
+ return {
+ 'id': video_id,
+ 'title': 'Video %s' % video_id,
+ 'formats': formats,
+ }
+
+ class PlaylistIE(InfoExtractor):
+ _VALID_URL = r'playlist:'
+
+ def _entries(self):
+ for n in range(3):
+ video_id = str(n)
+ yield {
+ '_type': 'url_transparent',
+ 'ie_key': VideoIE.ie_key(),
+ 'id': video_id,
+ 'url': 'video:%s' % video_id,
+ 'title': 'Video Transparent %s' % video_id,
+ }
+
+ def _real_extract(self, url):
+ return self.playlist_result(self._entries())
+
+ ydl.add_info_extractor(VideoIE(ydl))
+ ydl.add_info_extractor(PlaylistIE(ydl))
+ info = ydl.extract_info('playlist:')
+ entries = info['entries']
+ self.assertEqual(len(entries), 3)
+ self.assertTrue(entries[0] is None)
+ self.assertTrue(entries[1] is None)
+ self.assertEqual(len(ydl.downloaded_info_dicts), 1)
+ downloaded = ydl.downloaded_info_dicts[0]
+ entries[2].pop('requested_downloads', None)
+ self.assertEqual(entries[2], downloaded)
+ self.assertEqual(downloaded['url'], TEST_URL)
+ self.assertEqual(downloaded['title'], 'Video Transparent 2')
+ self.assertEqual(downloaded['id'], '2')
+ self.assertEqual(downloaded['extractor'], 'Video')
+ self.assertEqual(downloaded['extractor_key'], 'Video')
+
+ def test_header_cookies(self):
+ from http.cookiejar import Cookie
+
+ ydl = FakeYDL()
+ ydl.report_warning = lambda *_, **__: None
+
+ def cookie(name, value, version=None, domain='', path='', secure=False, expires=None):
+ return Cookie(
+ version or 0, name, value, None, False,
+ domain, bool(domain), bool(domain), path, bool(path),
+ secure, expires, False, None, None, rest={})
+
+ _test_url = 'https://yt.dlp/test'
+
+ def test(encoded_cookies, cookies, *, headers=False, round_trip=None, error_re=None):
+ def _test():
+ ydl.cookiejar.clear()
+ ydl._load_cookies(encoded_cookies, autoscope=headers)
+ if headers:
+ ydl._apply_header_cookies(_test_url)
+ data = {'url': _test_url}
+ ydl._calc_headers(data)
+ self.assertCountEqual(
+ map(vars, ydl.cookiejar), map(vars, cookies),
+ 'Extracted cookiejar.Cookie is not the same')
+ if not headers:
+ self.assertEqual(
+ data.get('cookies'), round_trip or encoded_cookies,
+ 'Cookie is not the same as round trip')
+ ydl.__dict__['_YoutubeDL__header_cookies'] = []
+
+ with self.subTest(msg=encoded_cookies):
+ if not error_re:
+ _test()
+ return
+ with self.assertRaisesRegex(Exception, error_re):
+ _test()
+
+ test('test=value; Domain=.yt.dlp', [cookie('test', 'value', domain='.yt.dlp')])
+ test('test=value', [cookie('test', 'value')], error_re=r'Unscoped cookies are not allowed')
+ test('cookie1=value1; Domain=.yt.dlp; Path=/test; cookie2=value2; Domain=.yt.dlp; Path=/', [
+ cookie('cookie1', 'value1', domain='.yt.dlp', path='/test'),
+ cookie('cookie2', 'value2', domain='.yt.dlp', path='/')])
+ test('test=value; Domain=.yt.dlp; Path=/test; Secure; Expires=9999999999', [
+ cookie('test', 'value', domain='.yt.dlp', path='/test', secure=True, expires=9999999999)])
+ test('test="value; "; path=/test; domain=.yt.dlp', [
+ cookie('test', 'value; ', domain='.yt.dlp', path='/test')],
+ round_trip='test="value\\073 "; Domain=.yt.dlp; Path=/test')
+ test('name=; Domain=.yt.dlp', [cookie('name', '', domain='.yt.dlp')],
+ round_trip='name=""; Domain=.yt.dlp')
+
+ test('test=value', [cookie('test', 'value', domain='.yt.dlp')], headers=True)
+ test('cookie1=value; Domain=.yt.dlp; cookie2=value', [], headers=True, error_re=r'Invalid syntax')
+ ydl.deprecated_feature = ydl.report_error
+ test('test=value', [], headers=True, error_re=r'Passing cookies as a header is a potential security risk')
+
+ def test_infojson_cookies(self):
+ TEST_FILE = 'test_infojson_cookies.info.json'
+ TEST_URL = 'https://example.com/example.mp4'
+ COOKIES = 'a=b; Domain=.example.com; c=d; Domain=.example.com'
+ COOKIE_HEADER = {'Cookie': 'a=b; c=d'}
+
+ ydl = FakeYDL()
+ ydl.process_info = lambda x: ydl._write_info_json('test', x, TEST_FILE)
+
+ def make_info(info_header_cookies=False, fmts_header_cookies=False, cookies_field=False):
+ fmt = {'url': TEST_URL}
+ if fmts_header_cookies:
+ fmt['http_headers'] = COOKIE_HEADER
+ if cookies_field:
+ fmt['cookies'] = COOKIES
+ return _make_result([fmt], http_headers=COOKIE_HEADER if info_header_cookies else None)
+
+ def test(initial_info, note):
+ result = {}
+ result['processed'] = ydl.process_ie_result(initial_info)
+ self.assertTrue(ydl.cookiejar.get_cookies_for_url(TEST_URL),
+ msg=f'No cookies set in cookiejar after initial process when {note}')
+ ydl.cookiejar.clear()
+ with open(TEST_FILE) as infojson:
+ result['loaded'] = ydl.sanitize_info(json.load(infojson), True)
+ result['final'] = ydl.process_ie_result(result['loaded'].copy(), download=False)
+ self.assertTrue(ydl.cookiejar.get_cookies_for_url(TEST_URL),
+ msg=f'No cookies set in cookiejar after final process when {note}')
+ ydl.cookiejar.clear()
+ for key in ('processed', 'loaded', 'final'):
+ info = result[key]
+ self.assertIsNone(
+ traverse_obj(info, ((None, ('formats', 0)), 'http_headers', 'Cookie'), casesense=False, get_all=False),
+ msg=f'Cookie header not removed in {key} result when {note}')
+ self.assertEqual(
+ traverse_obj(info, ((None, ('formats', 0)), 'cookies'), get_all=False), COOKIES,
+ msg=f'No cookies field found in {key} result when {note}')
+
+ test({'url': TEST_URL, 'http_headers': COOKIE_HEADER, 'id': '1', 'title': 'x'}, 'no formats field')
+ test(make_info(info_header_cookies=True), 'info_dict header cokies')
+ test(make_info(fmts_header_cookies=True), 'format header cookies')
+ test(make_info(info_header_cookies=True, fmts_header_cookies=True), 'info_dict and format header cookies')
+ test(make_info(info_header_cookies=True, fmts_header_cookies=True, cookies_field=True), 'all cookies fields')
+ test(make_info(cookies_field=True), 'cookies format field')
+ test({'url': TEST_URL, 'cookies': COOKIES, 'id': '1', 'title': 'x'}, 'info_dict cookies field only')
+
+ try_rm(TEST_FILE)
+
+ def test_add_headers_cookie(self):
+ def check_for_cookie_header(result):
+ return traverse_obj(result, ((None, ('formats', 0)), 'http_headers', 'Cookie'), casesense=False, get_all=False)
+
+ ydl = FakeYDL({'http_headers': {'Cookie': 'a=b'}})
+ ydl._apply_header_cookies(_make_result([])['webpage_url']) # Scope to input webpage URL: .example.com
+
+ fmt = {'url': 'https://example.com/video.mp4'}
+ result = ydl.process_ie_result(_make_result([fmt]), download=False)
+ self.assertIsNone(check_for_cookie_header(result), msg='http_headers cookies in result info_dict')
+ self.assertEqual(result.get('cookies'), 'a=b; Domain=.example.com', msg='No cookies were set in cookies field')
+ self.assertIn('a=b', ydl.cookiejar.get_cookie_header(fmt['url']), msg='No cookies were set in cookiejar')
+
+ fmt = {'url': 'https://wrong.com/video.mp4'}
+ result = ydl.process_ie_result(_make_result([fmt]), download=False)
+ self.assertIsNone(check_for_cookie_header(result), msg='http_headers cookies for wrong domain')
+ self.assertFalse(result.get('cookies'), msg='Cookies set in cookies field for wrong domain')
+ self.assertFalse(ydl.cookiejar.get_cookie_header(fmt['url']), msg='Cookies set in cookiejar for wrong domain')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_YoutubeDLCookieJar.py b/test/test_YoutubeDLCookieJar.py
new file mode 100644
index 0000000..fdb9bae
--- /dev/null
+++ b/test/test_YoutubeDLCookieJar.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import re
+import tempfile
+
+from yt_dlp.cookies import YoutubeDLCookieJar
+
+
+class TestYoutubeDLCookieJar(unittest.TestCase):
+ def test_keep_session_cookies(self):
+ cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/session_cookies.txt')
+ cookiejar.load()
+ tf = tempfile.NamedTemporaryFile(delete=False)
+ try:
+ cookiejar.save(filename=tf.name)
+ temp = tf.read().decode()
+ self.assertTrue(re.search(
+ r'www\.foobar\.foobar\s+FALSE\s+/\s+TRUE\s+0\s+YoutubeDLExpiresEmpty\s+YoutubeDLExpiresEmptyValue', temp))
+ self.assertTrue(re.search(
+ r'www\.foobar\.foobar\s+FALSE\s+/\s+TRUE\s+0\s+YoutubeDLExpires0\s+YoutubeDLExpires0Value', temp))
+ finally:
+ tf.close()
+ os.remove(tf.name)
+
+ def test_strip_httponly_prefix(self):
+ cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/httponly_cookies.txt')
+ cookiejar.load()
+
+ def assert_cookie_has_value(key):
+ self.assertEqual(cookiejar._cookies['www.foobar.foobar']['/'][key].value, key + '_VALUE')
+
+ assert_cookie_has_value('HTTPONLY_COOKIE')
+ assert_cookie_has_value('JS_ACCESSIBLE_COOKIE')
+
+ def test_malformed_cookies(self):
+ cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/malformed_cookies.txt')
+ cookiejar.load()
+ # Cookies should be empty since all malformed cookie file entries
+ # will be ignored
+ self.assertFalse(cookiejar._cookies)
+
+ def test_get_cookie_header(self):
+ cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/httponly_cookies.txt')
+ cookiejar.load()
+ header = cookiejar.get_cookie_header('https://www.foobar.foobar')
+ self.assertIn('HTTPONLY_COOKIE', header)
+
+ def test_get_cookies_for_url(self):
+ cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/session_cookies.txt')
+ cookiejar.load()
+ cookies = cookiejar.get_cookies_for_url('https://www.foobar.foobar/')
+ self.assertEqual(len(cookies), 2)
+ cookies = cookiejar.get_cookies_for_url('https://foobar.foobar/')
+ self.assertFalse(cookies)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_aes.py b/test/test_aes.py
new file mode 100644
index 0000000..a26abfd
--- /dev/null
+++ b/test/test_aes.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import base64
+
+from yt_dlp.aes import (
+ aes_cbc_decrypt,
+ aes_cbc_decrypt_bytes,
+ aes_cbc_encrypt,
+ aes_ctr_decrypt,
+ aes_ctr_encrypt,
+ aes_decrypt,
+ aes_decrypt_text,
+ aes_ecb_decrypt,
+ aes_ecb_encrypt,
+ aes_encrypt,
+ aes_gcm_decrypt_and_verify,
+ aes_gcm_decrypt_and_verify_bytes,
+ key_expansion,
+ pad_block,
+)
+from yt_dlp.dependencies import Cryptodome
+from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
+
+# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
+
+
+class TestAES(unittest.TestCase):
+ def setUp(self):
+ self.key = self.iv = [0x20, 0x15] + 14 * [0]
+ self.secret_msg = b'Secret message goes here'
+
+ def test_encrypt(self):
+ msg = b'message'
+ key = list(range(16))
+ encrypted = aes_encrypt(bytes_to_intlist(msg), key)
+ decrypted = intlist_to_bytes(aes_decrypt(encrypted, key))
+ self.assertEqual(decrypted, msg)
+
+ def test_cbc_decrypt(self):
+ data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
+ decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
+ self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
+ if Cryptodome.AES:
+ decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
+ self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
+
+ def test_cbc_encrypt(self):
+ data = bytes_to_intlist(self.secret_msg)
+ encrypted = intlist_to_bytes(aes_cbc_encrypt(data, self.key, self.iv))
+ self.assertEqual(
+ encrypted,
+ b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd')
+
+ def test_ctr_decrypt(self):
+ data = bytes_to_intlist(b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
+ decrypted = intlist_to_bytes(aes_ctr_decrypt(data, self.key, self.iv))
+ self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
+
+ def test_ctr_encrypt(self):
+ data = bytes_to_intlist(self.secret_msg)
+ encrypted = intlist_to_bytes(aes_ctr_encrypt(data, self.key, self.iv))
+ self.assertEqual(
+ encrypted,
+ b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
+
+ def test_gcm_decrypt(self):
+ data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f.\x08\xb4T\xe4/\x17\xbd'
+ authentication_tag = b'\xe8&I\x80rI\x07\x9d}YWuU@:e'
+
+ decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
+ bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
+ self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
+ if Cryptodome.AES:
+ decrypted = aes_gcm_decrypt_and_verify_bytes(
+ data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
+ self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
+
+ def test_decrypt_text(self):
+ password = intlist_to_bytes(self.key).decode()
+ encrypted = base64.b64encode(
+ intlist_to_bytes(self.iv[:8])
+ + b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae'
+ ).decode()
+ decrypted = (aes_decrypt_text(encrypted, password, 16))
+ self.assertEqual(decrypted, self.secret_msg)
+
+ password = intlist_to_bytes(self.key).decode()
+ encrypted = base64.b64encode(
+ intlist_to_bytes(self.iv[:8])
+ + b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83'
+ ).decode()
+ decrypted = (aes_decrypt_text(encrypted, password, 32))
+ self.assertEqual(decrypted, self.secret_msg)
+
+ def test_ecb_encrypt(self):
+ data = bytes_to_intlist(self.secret_msg)
+ encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key))
+ self.assertEqual(
+ encrypted,
+ b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
+
+ def test_ecb_decrypt(self):
+ data = bytes_to_intlist(b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
+ decrypted = intlist_to_bytes(aes_ecb_decrypt(data, self.key, self.iv))
+ self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
+
+ def test_key_expansion(self):
+ key = '4f6bdaa39e2f8cb07f5e722d9edef314'
+
+ self.assertEqual(key_expansion(bytes_to_intlist(bytearray.fromhex(key))), [
+ 0x4F, 0x6B, 0xDA, 0xA3, 0x9E, 0x2F, 0x8C, 0xB0, 0x7F, 0x5E, 0x72, 0x2D, 0x9E, 0xDE, 0xF3, 0x14,
+ 0x53, 0x66, 0x20, 0xA8, 0xCD, 0x49, 0xAC, 0x18, 0xB2, 0x17, 0xDE, 0x35, 0x2C, 0xC9, 0x2D, 0x21,
+ 0x8C, 0xBE, 0xDD, 0xD9, 0x41, 0xF7, 0x71, 0xC1, 0xF3, 0xE0, 0xAF, 0xF4, 0xDF, 0x29, 0x82, 0xD5,
+ 0x2D, 0xAD, 0xDE, 0x47, 0x6C, 0x5A, 0xAF, 0x86, 0x9F, 0xBA, 0x00, 0x72, 0x40, 0x93, 0x82, 0xA7,
+ 0xF9, 0xBE, 0x82, 0x4E, 0x95, 0xE4, 0x2D, 0xC8, 0x0A, 0x5E, 0x2D, 0xBA, 0x4A, 0xCD, 0xAF, 0x1D,
+ 0x54, 0xC7, 0x26, 0x98, 0xC1, 0x23, 0x0B, 0x50, 0xCB, 0x7D, 0x26, 0xEA, 0x81, 0xB0, 0x89, 0xF7,
+ 0x93, 0x60, 0x4E, 0x94, 0x52, 0x43, 0x45, 0xC4, 0x99, 0x3E, 0x63, 0x2E, 0x18, 0x8E, 0xEA, 0xD9,
+ 0xCA, 0xE7, 0x7B, 0x39, 0x98, 0xA4, 0x3E, 0xFD, 0x01, 0x9A, 0x5D, 0xD3, 0x19, 0x14, 0xB7, 0x0A,
+ 0xB0, 0x4E, 0x1C, 0xED, 0x28, 0xEA, 0x22, 0x10, 0x29, 0x70, 0x7F, 0xC3, 0x30, 0x64, 0xC8, 0xC9,
+ 0xE8, 0xA6, 0xC1, 0xE9, 0xC0, 0x4C, 0xE3, 0xF9, 0xE9, 0x3C, 0x9C, 0x3A, 0xD9, 0x58, 0x54, 0xF3,
+ 0xB4, 0x86, 0xCC, 0xDC, 0x74, 0xCA, 0x2F, 0x25, 0x9D, 0xF6, 0xB3, 0x1F, 0x44, 0xAE, 0xE7, 0xEC])
+
+ def test_pad_block(self):
+ block = [0x21, 0xA0, 0x43, 0xFF]
+
+ self.assertEqual(pad_block(block, 'pkcs7'),
+ block + [0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C])
+
+ self.assertEqual(pad_block(block, 'iso7816'),
+ block + [0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
+
+ self.assertEqual(pad_block(block, 'whitespace'),
+ block + [0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20])
+
+ self.assertEqual(pad_block(block, 'zero'),
+ block + [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
+
+ block = list(range(16))
+ for mode in ('pkcs7', 'iso7816', 'whitespace', 'zero'):
+ self.assertEqual(pad_block(block, mode), block, mode)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_age_restriction.py b/test/test_age_restriction.py
new file mode 100644
index 0000000..6810759
--- /dev/null
+++ b/test/test_age_restriction.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+from test.helper import is_download_test, try_rm
+from yt_dlp import YoutubeDL
+from yt_dlp.utils import DownloadError
+
+
+def _download_restricted(url, filename, age):
+ """ Returns true if the file has been downloaded """
+
+ params = {
+ 'age_limit': age,
+ 'skip_download': True,
+ 'writeinfojson': True,
+ 'outtmpl': '%(id)s.%(ext)s',
+ }
+ ydl = YoutubeDL(params)
+ ydl.add_default_info_extractors()
+ json_filename = os.path.splitext(filename)[0] + '.info.json'
+ try_rm(json_filename)
+ try:
+ ydl.download([url])
+ except DownloadError:
+ pass
+ else:
+ return os.path.exists(json_filename)
+ finally:
+ try_rm(json_filename)
+
+
+@is_download_test
+class TestAgeRestriction(unittest.TestCase):
+ def _assert_restricted(self, url, filename, age, old_age=None):
+ self.assertTrue(_download_restricted(url, filename, old_age))
+ self.assertFalse(_download_restricted(url, filename, age))
+
+ def test_youtube(self):
+ self._assert_restricted('HtVdAasjOgU', 'HtVdAasjOgU.mp4', 10)
+
+ def test_youporn(self):
+ self._assert_restricted(
+ 'https://www.youporn.com/watch/16715086/sex-ed-in-detention-18-asmr/',
+ '16715086.mp4', 2, old_age=25)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_all_urls.py b/test/test_all_urls.py
new file mode 100644
index 0000000..848c96f
--- /dev/null
+++ b/test/test_all_urls.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import collections
+
+from test.helper import gettestcases
+from yt_dlp.extractor import FacebookIE, YoutubeIE, gen_extractors
+
+
+class TestAllURLsMatching(unittest.TestCase):
+ def setUp(self):
+ self.ies = gen_extractors()
+
+ def matching_ies(self, url):
+ return [ie.IE_NAME for ie in self.ies if ie.suitable(url) and ie.IE_NAME != 'generic']
+
+ def assertMatch(self, url, ie_list):
+ self.assertEqual(self.matching_ies(url), ie_list)
+
+ def test_youtube_playlist_matching(self):
+ assertPlaylist = lambda url: self.assertMatch(url, ['youtube:playlist'])
+ assertTab = lambda url: self.assertMatch(url, ['youtube:tab'])
+ assertPlaylist('ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8')
+ assertPlaylist('UUBABnxM4Ar9ten8Mdjj1j0Q') # 585
+ assertPlaylist('PL63F0C78739B09958')
+ assertTab('https://www.youtube.com/AsapSCIENCE')
+ assertTab('https://www.youtube.com/embedded')
+ assertTab('https://www.youtube.com/playlist?list=UUBABnxM4Ar9ten8Mdjj1j0Q')
+ assertTab('https://www.youtube.com/playlist?list=PLwP_SiAcdui0KVebT0mU9Apz359a4ubsC')
+ assertTab('https://www.youtube.com/watch?v=AV6J6_AeFEQ&playnext=1&list=PL4023E734DA416012') # 668
+ self.assertFalse('youtube:playlist' in self.matching_ies('PLtS2H6bU1M'))
+ # Top tracks
+ assertTab('https://www.youtube.com/playlist?list=MCUS.20142101')
+
+ def test_youtube_matching(self):
+ self.assertTrue(YoutubeIE.suitable('PLtS2H6bU1M'))
+ self.assertFalse(YoutubeIE.suitable('https://www.youtube.com/watch?v=AV6J6_AeFEQ&playnext=1&list=PL4023E734DA416012')) # 668
+ self.assertMatch('http://youtu.be/BaW_jenozKc', ['youtube'])
+ # self.assertMatch('http://www.youtube.com/v/BaW_jenozKc', ['youtube']) # /v/ is no longer valid
+ self.assertMatch('https://youtube.googleapis.com/v/BaW_jenozKc', ['youtube'])
+ self.assertMatch('http://www.cleanvideosearch.com/media/action/yt/watch?videoId=8v_4O44sfjM', ['youtube'])
+
+ def test_youtube_channel_matching(self):
+ assertChannel = lambda url: self.assertMatch(url, ['youtube:tab'])
+ assertChannel('https://www.youtube.com/channel/HCtnHdj3df7iM')
+ assertChannel('https://www.youtube.com/channel/HCtnHdj3df7iM?feature=gb_ch_rec')
+ assertChannel('https://www.youtube.com/channel/HCtnHdj3df7iM/videos')
+
+ def test_youtube_user_matching(self):
+ self.assertMatch('http://www.youtube.com/NASAgovVideo/videos', ['youtube:tab'])
+
+ def test_youtube_feeds(self):
+ self.assertMatch('https://www.youtube.com/feed/library', ['youtube:tab'])
+ self.assertMatch('https://www.youtube.com/feed/history', ['youtube:tab'])
+ self.assertMatch('https://www.youtube.com/feed/watch_later', ['youtube:tab'])
+ self.assertMatch('https://www.youtube.com/feed/subscriptions', ['youtube:tab'])
+
+ def test_youtube_search_matching(self):
+ self.assertMatch('http://www.youtube.com/results?search_query=making+mustard', ['youtube:search_url'])
+ self.assertMatch('https://www.youtube.com/results?baz=bar&search_query=youtube-dl+test+video&filters=video&lclk=video', ['youtube:search_url'])
+
+ def test_facebook_matching(self):
+ self.assertTrue(FacebookIE.suitable('https://www.facebook.com/Shiniknoh#!/photo.php?v=10153317450565268'))
+ self.assertTrue(FacebookIE.suitable('https://www.facebook.com/cindyweather?fref=ts#!/photo.php?v=10152183998945793'))
+
+ def test_no_duplicates(self):
+ ies = gen_extractors()
+ for tc in gettestcases(include_onlymatching=True):
+ url = tc['url']
+ for ie in ies:
+ if type(ie).__name__ in ('GenericIE', tc['name'] + 'IE'):
+ self.assertTrue(ie.suitable(url), f'{type(ie).__name__} should match URL {url!r}')
+ else:
+ self.assertFalse(
+ ie.suitable(url),
+ f'{type(ie).__name__} should not match URL {url!r} . That URL belongs to {tc["name"]}.')
+
+ def test_keywords(self):
+ self.assertMatch(':ytsubs', ['youtube:subscriptions'])
+ self.assertMatch(':ytsubscriptions', ['youtube:subscriptions'])
+ self.assertMatch(':ythistory', ['youtube:history'])
+
+ def test_vimeo_matching(self):
+ self.assertMatch('https://vimeo.com/channels/tributes', ['vimeo:channel'])
+ self.assertMatch('https://vimeo.com/channels/31259', ['vimeo:channel'])
+ self.assertMatch('https://vimeo.com/channels/31259/53576664', ['vimeo'])
+ self.assertMatch('https://vimeo.com/user7108434', ['vimeo:user'])
+ self.assertMatch('https://vimeo.com/user7108434/videos', ['vimeo:user'])
+ self.assertMatch('https://vimeo.com/user21297594/review/75524534/3c257a1b5d', ['vimeo:review'])
+
+ # https://github.com/ytdl-org/youtube-dl/issues/1930
+ def test_soundcloud_not_matching_sets(self):
+ self.assertMatch('http://soundcloud.com/floex/sets/gone-ep', ['soundcloud:set'])
+
+ def test_tumblr(self):
+ self.assertMatch('http://tatianamaslanydaily.tumblr.com/post/54196191430/orphan-black-dvd-extra-behind-the-scenes', ['Tumblr'])
+ self.assertMatch('http://tatianamaslanydaily.tumblr.com/post/54196191430', ['Tumblr'])
+
+ def test_pbs(self):
+ # https://github.com/ytdl-org/youtube-dl/issues/2350
+ self.assertMatch('http://video.pbs.org/viralplayer/2365173446/', ['pbs'])
+ self.assertMatch('http://video.pbs.org/widget/partnerplayer/980042464/', ['pbs'])
+
+ def test_no_duplicated_ie_names(self):
+ name_accu = collections.defaultdict(list)
+ for ie in self.ies:
+ name_accu[ie.IE_NAME.lower()].append(type(ie).__name__)
+ for (ie_name, ie_list) in name_accu.items():
+ self.assertEqual(
+ len(ie_list), 1,
+ f'Multiple extractors with the same IE_NAME "{ie_name}" ({", ".join(ie_list)})')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_cache.py b/test/test_cache.py
new file mode 100644
index 0000000..ce1624b
--- /dev/null
+++ b/test/test_cache.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import shutil
+
+from test.helper import FakeYDL
+from yt_dlp.cache import Cache
+
+
+def _is_empty(d):
+ return not bool(os.listdir(d))
+
+
+def _mkdir(d):
+ if not os.path.exists(d):
+ os.mkdir(d)
+
+
+class TestCache(unittest.TestCase):
+ def setUp(self):
+ TEST_DIR = os.path.dirname(os.path.abspath(__file__))
+ TESTDATA_DIR = os.path.join(TEST_DIR, 'testdata')
+ _mkdir(TESTDATA_DIR)
+ self.test_dir = os.path.join(TESTDATA_DIR, 'cache_test')
+ self.tearDown()
+
+ def tearDown(self):
+ if os.path.exists(self.test_dir):
+ shutil.rmtree(self.test_dir)
+
+ def test_cache(self):
+ ydl = FakeYDL({
+ 'cachedir': self.test_dir,
+ })
+ c = Cache(ydl)
+ obj = {'x': 1, 'y': ['ä', '\\a', True]}
+ self.assertEqual(c.load('test_cache', 'k.'), None)
+ c.store('test_cache', 'k.', obj)
+ self.assertEqual(c.load('test_cache', 'k2'), None)
+ self.assertFalse(_is_empty(self.test_dir))
+ self.assertEqual(c.load('test_cache', 'k.'), obj)
+ self.assertEqual(c.load('test_cache', 'y'), None)
+ self.assertEqual(c.load('test_cache2', 'k.'), None)
+ c.remove()
+ self.assertFalse(os.path.exists(self.test_dir))
+ self.assertEqual(c.load('test_cache', 'k.'), None)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_compat.py b/test/test_compat.py
new file mode 100644
index 0000000..71ca7f9
--- /dev/null
+++ b/test/test_compat.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import struct
+
+from yt_dlp import compat
+from yt_dlp.compat import urllib # isort: split
+from yt_dlp.compat import (
+ compat_etree_fromstring,
+ compat_expanduser,
+ compat_urllib_parse_unquote,
+ compat_urllib_parse_urlencode,
+)
+from yt_dlp.compat.urllib.request import getproxies
+
+
+class TestCompat(unittest.TestCase):
+ def test_compat_passthrough(self):
+ with self.assertWarns(DeprecationWarning):
+ compat.compat_basestring
+
+ with self.assertWarns(DeprecationWarning):
+ compat.WINDOWS_VT_MODE
+
+ self.assertEqual(urllib.request.getproxies, getproxies)
+
+ with self.assertWarns(DeprecationWarning):
+ compat.compat_pycrypto_AES # Must not raise error
+
+ def test_compat_expanduser(self):
+ old_home = os.environ.get('HOME')
+ test_str = R'C:\Documents and Settings\тест\Application Data'
+ try:
+ os.environ['HOME'] = test_str
+ self.assertEqual(compat_expanduser('~'), test_str)
+ finally:
+ os.environ['HOME'] = old_home or ''
+
+ def test_compat_urllib_parse_unquote(self):
+ self.assertEqual(compat_urllib_parse_unquote('abc%20def'), 'abc def')
+ self.assertEqual(compat_urllib_parse_unquote('%7e/abc+def'), '~/abc+def')
+ self.assertEqual(compat_urllib_parse_unquote(''), '')
+ self.assertEqual(compat_urllib_parse_unquote('%'), '%')
+ self.assertEqual(compat_urllib_parse_unquote('%%'), '%%')
+ self.assertEqual(compat_urllib_parse_unquote('%%%'), '%%%')
+ self.assertEqual(compat_urllib_parse_unquote('%2F'), '/')
+ self.assertEqual(compat_urllib_parse_unquote('%2f'), '/')
+ self.assertEqual(compat_urllib_parse_unquote('%E6%B4%A5%E6%B3%A2'), '津波')
+ self.assertEqual(
+ compat_urllib_parse_unquote('''<meta property="og:description" content="%E2%96%81%E2%96%82%E2%96%83%E2%96%84%25%E2%96%85%E2%96%86%E2%96%87%E2%96%88" />
+%<a href="https://ar.wikipedia.org/wiki/%D8%AA%D8%B3%D9%88%D9%86%D8%A7%D9%85%D9%8A">%a'''),
+ '''<meta property="og:description" content="▁▂▃▄%▅▆▇█" />
+%<a href="https://ar.wikipedia.org/wiki/تسونامي">%a''')
+ self.assertEqual(
+ compat_urllib_parse_unquote('''%28%5E%E2%97%A3_%E2%97%A2%5E%29%E3%81%A3%EF%B8%BB%E3%83%87%E2%95%90%E4%B8%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%86%B6%I%Break%25Things%'''),
+ '''(^◣_◢^)っ︻デ═一 ⇀ ⇀ ⇀ ⇀ ⇀ ↶%I%Break%Things%''')
+
+ def test_compat_urllib_parse_unquote_plus(self):
+ self.assertEqual(urllib.parse.unquote_plus('abc%20def'), 'abc def')
+ self.assertEqual(urllib.parse.unquote_plus('%7e/abc+def'), '~/abc def')
+
+ def test_compat_urllib_parse_urlencode(self):
+ self.assertEqual(compat_urllib_parse_urlencode({'abc': 'def'}), 'abc=def')
+ self.assertEqual(compat_urllib_parse_urlencode({'abc': b'def'}), 'abc=def')
+ self.assertEqual(compat_urllib_parse_urlencode({b'abc': 'def'}), 'abc=def')
+ self.assertEqual(compat_urllib_parse_urlencode({b'abc': b'def'}), 'abc=def')
+ self.assertEqual(compat_urllib_parse_urlencode([('abc', 'def')]), 'abc=def')
+ self.assertEqual(compat_urllib_parse_urlencode([('abc', b'def')]), 'abc=def')
+ self.assertEqual(compat_urllib_parse_urlencode([(b'abc', 'def')]), 'abc=def')
+ self.assertEqual(compat_urllib_parse_urlencode([(b'abc', b'def')]), 'abc=def')
+
+ def test_compat_etree_fromstring(self):
+ xml = '''
+ <root foo="bar" spam="中文">
+ <normal>foo</normal>
+ <chinese>中文</chinese>
+ <foo><bar>spam</bar></foo>
+ </root>
+ '''
+ doc = compat_etree_fromstring(xml.encode())
+ self.assertTrue(isinstance(doc.attrib['foo'], str))
+ self.assertTrue(isinstance(doc.attrib['spam'], str))
+ self.assertTrue(isinstance(doc.find('normal').text, str))
+ self.assertTrue(isinstance(doc.find('chinese').text, str))
+ self.assertTrue(isinstance(doc.find('foo/bar').text, str))
+
+ def test_compat_etree_fromstring_doctype(self):
+ xml = '''<?xml version="1.0"?>
+<!DOCTYPE smil PUBLIC "-//W3C//DTD SMIL 2.0//EN" "http://www.w3.org/2001/SMIL20/SMIL20.dtd">
+<smil xmlns="http://www.w3.org/2001/SMIL20/Language"></smil>'''
+ compat_etree_fromstring(xml)
+
+ def test_struct_unpack(self):
+ self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_config.py b/test/test_config.py
new file mode 100644
index 0000000..a393b65
--- /dev/null
+++ b/test/test_config.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+import unittest.mock
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+import contextlib
+import itertools
+from pathlib import Path
+
+from yt_dlp.compat import compat_expanduser
+from yt_dlp.options import create_parser, parseOpts
+from yt_dlp.utils import Config, get_executable_path
+
+ENVIRON_DEFAULTS = {
+ 'HOME': None,
+ 'XDG_CONFIG_HOME': '/_xdg_config_home/',
+ 'USERPROFILE': 'C:/Users/testing/',
+ 'APPDATA': 'C:/Users/testing/AppData/Roaming/',
+ 'HOMEDRIVE': 'C:/',
+ 'HOMEPATH': 'Users/testing/',
+}
+
+
+@contextlib.contextmanager
+def set_environ(**kwargs):
+ saved_environ = os.environ.copy()
+
+ for name, value in {**ENVIRON_DEFAULTS, **kwargs}.items():
+ if value is None:
+ os.environ.pop(name, None)
+ else:
+ os.environ[name] = value
+
+ yield
+
+ os.environ.clear()
+ os.environ.update(saved_environ)
+
+
+def _generate_expected_groups():
+ xdg_config_home = os.getenv('XDG_CONFIG_HOME') or compat_expanduser('~/.config')
+ appdata_dir = os.getenv('appdata')
+ home_dir = compat_expanduser('~')
+ return {
+ 'Portable': [
+ Path(get_executable_path(), 'yt-dlp.conf'),
+ ],
+ 'Home': [
+ Path('yt-dlp.conf'),
+ ],
+ 'User': [
+ Path(xdg_config_home, 'yt-dlp.conf'),
+ Path(xdg_config_home, 'yt-dlp', 'config'),
+ Path(xdg_config_home, 'yt-dlp', 'config.txt'),
+ *((
+ Path(appdata_dir, 'yt-dlp.conf'),
+ Path(appdata_dir, 'yt-dlp', 'config'),
+ Path(appdata_dir, 'yt-dlp', 'config.txt'),
+ ) if appdata_dir else ()),
+ Path(home_dir, 'yt-dlp.conf'),
+ Path(home_dir, 'yt-dlp.conf.txt'),
+ Path(home_dir, '.yt-dlp', 'config'),
+ Path(home_dir, '.yt-dlp', 'config.txt'),
+ ],
+ 'System': [
+ Path('/etc/yt-dlp.conf'),
+ Path('/etc/yt-dlp/config'),
+ Path('/etc/yt-dlp/config.txt'),
+ ]
+ }
+
+
+class TestConfig(unittest.TestCase):
+ maxDiff = None
+
+ @set_environ()
+ def test_config__ENVIRON_DEFAULTS_sanity(self):
+ expected = make_expected()
+ self.assertCountEqual(
+ set(expected), expected,
+ 'ENVIRON_DEFAULTS produces non unique names')
+
+ def test_config_all_environ_values(self):
+ for name, value in ENVIRON_DEFAULTS.items():
+ for new_value in (None, '', '.', value or '/some/dir'):
+ with set_environ(**{name: new_value}):
+ self._simple_grouping_test()
+
+ def test_config_default_expected_locations(self):
+ files, _ = self._simple_config_test()
+ self.assertEqual(
+ files, make_expected(),
+ 'Not all expected locations have been checked')
+
+ def test_config_default_grouping(self):
+ self._simple_grouping_test()
+
+ def _simple_grouping_test(self):
+ expected_groups = make_expected_groups()
+ for name, group in expected_groups.items():
+ for index, existing_path in enumerate(group):
+ result, opts = self._simple_config_test(existing_path)
+ expected = expected_from_expected_groups(expected_groups, existing_path)
+ self.assertEqual(
+ result, expected,
+ f'The checked locations do not match the expected ({name}, {index})')
+ self.assertEqual(
+ opts.outtmpl['default'], '1',
+ f'The used result value was incorrect ({name}, {index})')
+
+ def _simple_config_test(self, *stop_paths):
+ encountered = 0
+ paths = []
+
+ def read_file(filename, default=[]):
+ nonlocal encountered
+ path = Path(filename)
+ paths.append(path)
+ if path in stop_paths:
+ encountered += 1
+ return ['-o', f'{encountered}']
+
+ with ConfigMock(read_file):
+ _, opts, _ = parseOpts([], False)
+
+ return paths, opts
+
+ @set_environ()
+ def test_config_early_exit_commandline(self):
+ self._early_exit_test(0, '--ignore-config')
+
+ @set_environ()
+ def test_config_early_exit_files(self):
+ for index, _ in enumerate(make_expected(), 1):
+ self._early_exit_test(index)
+
+ def _early_exit_test(self, allowed_reads, *args):
+ reads = 0
+
+ def read_file(filename, default=[]):
+ nonlocal reads
+ reads += 1
+
+ if reads > allowed_reads:
+ self.fail('The remaining config was not ignored')
+ elif reads == allowed_reads:
+ return ['--ignore-config']
+
+ with ConfigMock(read_file):
+ parseOpts(args, False)
+
+ @set_environ()
+ def test_config_override_commandline(self):
+ self._override_test(0, '-o', 'pass')
+
+ @set_environ()
+ def test_config_override_files(self):
+ for index, _ in enumerate(make_expected(), 1):
+ self._override_test(index)
+
+ def _override_test(self, start_index, *args):
+ index = 0
+
+ def read_file(filename, default=[]):
+ nonlocal index
+ index += 1
+
+ if index > start_index:
+ return ['-o', 'fail']
+ elif index == start_index:
+ return ['-o', 'pass']
+
+ with ConfigMock(read_file):
+ _, opts, _ = parseOpts(args, False)
+
+ self.assertEqual(
+ opts.outtmpl['default'], 'pass',
+ 'The earlier group did not override the later ones')
+
+
+@contextlib.contextmanager
+def ConfigMock(read_file=None):
+ with unittest.mock.patch('yt_dlp.options.Config') as mock:
+ mock.return_value = Config(create_parser())
+ if read_file is not None:
+ mock.read_file = read_file
+
+ yield mock
+
+
+def make_expected(*filepaths):
+ return expected_from_expected_groups(_generate_expected_groups(), *filepaths)
+
+
+def make_expected_groups(*filepaths):
+ return _filter_expected_groups(_generate_expected_groups(), filepaths)
+
+
+def expected_from_expected_groups(expected_groups, *filepaths):
+ return list(itertools.chain.from_iterable(
+ _filter_expected_groups(expected_groups, filepaths).values()))
+
+
+def _filter_expected_groups(expected, filepaths):
+ if not filepaths:
+ return expected
+
+ result = {}
+ for group, paths in expected.items():
+ new_paths = []
+ for path in paths:
+ new_paths.append(path)
+ if path in filepaths:
+ break
+
+ result[group] = new_paths
+
+ return result
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_cookies.py b/test/test_cookies.py
new file mode 100644
index 0000000..5282ef6
--- /dev/null
+++ b/test/test_cookies.py
@@ -0,0 +1,306 @@
+import unittest
+from datetime import datetime, timezone
+
+from yt_dlp import cookies
+from yt_dlp.cookies import (
+ LenientSimpleCookie,
+ LinuxChromeCookieDecryptor,
+ MacChromeCookieDecryptor,
+ WindowsChromeCookieDecryptor,
+ _get_linux_desktop_environment,
+ _LinuxDesktopEnvironment,
+ parse_safari_cookies,
+ pbkdf2_sha1,
+)
+
+
+class Logger:
+ def debug(self, message, *args, **kwargs):
+ print(f'[verbose] {message}')
+
+ def info(self, message, *args, **kwargs):
+ print(message)
+
+ def warning(self, message, *args, **kwargs):
+ self.error(message)
+
+ def error(self, message, *args, **kwargs):
+ raise Exception(message)
+
+
+class MonkeyPatch:
+ def __init__(self, module, temporary_values):
+ self._module = module
+ self._temporary_values = temporary_values
+ self._backup_values = {}
+
+ def __enter__(self):
+ for name, temp_value in self._temporary_values.items():
+ self._backup_values[name] = getattr(self._module, name)
+ setattr(self._module, name, temp_value)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ for name, backup_value in self._backup_values.items():
+ setattr(self._module, name, backup_value)
+
+
+class TestCookies(unittest.TestCase):
+ def test_get_desktop_environment(self):
+ """ based on https://chromium.googlesource.com/chromium/src/+/refs/heads/main/base/nix/xdg_util_unittest.cc """
+ test_cases = [
+ ({}, _LinuxDesktopEnvironment.OTHER),
+ ({'DESKTOP_SESSION': 'my_custom_de'}, _LinuxDesktopEnvironment.OTHER),
+ ({'XDG_CURRENT_DESKTOP': 'my_custom_de'}, _LinuxDesktopEnvironment.OTHER),
+
+ ({'DESKTOP_SESSION': 'gnome'}, _LinuxDesktopEnvironment.GNOME),
+ ({'DESKTOP_SESSION': 'mate'}, _LinuxDesktopEnvironment.GNOME),
+ ({'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
+ ({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
+ ({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
+
+ ({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
+ ({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
+ ({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
+
+ ({'XDG_CURRENT_DESKTOP': 'X-Cinnamon'}, _LinuxDesktopEnvironment.CINNAMON),
+ ({'XDG_CURRENT_DESKTOP': 'Deepin'}, _LinuxDesktopEnvironment.DEEPIN),
+ ({'XDG_CURRENT_DESKTOP': 'GNOME'}, _LinuxDesktopEnvironment.GNOME),
+ ({'XDG_CURRENT_DESKTOP': 'GNOME:GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
+ ({'XDG_CURRENT_DESKTOP': 'GNOME : GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
+
+ ({'XDG_CURRENT_DESKTOP': 'Unity', 'DESKTOP_SESSION': 'gnome-fallback'}, _LinuxDesktopEnvironment.GNOME),
+ ({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '5'}, _LinuxDesktopEnvironment.KDE5),
+ ({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '6'}, _LinuxDesktopEnvironment.KDE6),
+ ({'XDG_CURRENT_DESKTOP': 'KDE'}, _LinuxDesktopEnvironment.KDE4),
+ ({'XDG_CURRENT_DESKTOP': 'Pantheon'}, _LinuxDesktopEnvironment.PANTHEON),
+ ({'XDG_CURRENT_DESKTOP': 'UKUI'}, _LinuxDesktopEnvironment.UKUI),
+ ({'XDG_CURRENT_DESKTOP': 'Unity'}, _LinuxDesktopEnvironment.UNITY),
+ ({'XDG_CURRENT_DESKTOP': 'Unity:Unity7'}, _LinuxDesktopEnvironment.UNITY),
+ ({'XDG_CURRENT_DESKTOP': 'Unity:Unity8'}, _LinuxDesktopEnvironment.UNITY),
+ ]
+
+ for env, expected_desktop_environment in test_cases:
+ self.assertEqual(_get_linux_desktop_environment(env, Logger()), expected_desktop_environment)
+
+ def test_chrome_cookie_decryptor_linux_derive_key(self):
+ key = LinuxChromeCookieDecryptor.derive_key(b'abc')
+ self.assertEqual(key, b'7\xa1\xec\xd4m\xfcA\xc7\xb19Z\xd0\x19\xdcM\x17')
+
+ def test_chrome_cookie_decryptor_mac_derive_key(self):
+ key = MacChromeCookieDecryptor.derive_key(b'abc')
+ self.assertEqual(key, b'Y\xe2\xc0\xd0P\xf6\xf4\xe1l\xc1\x8cQ\xcb|\xcdY')
+
+ def test_chrome_cookie_decryptor_linux_v10(self):
+ with MonkeyPatch(cookies, {'_get_linux_keyring_password': lambda *args, **kwargs: b''}):
+ encrypted_value = b'v10\xccW%\xcd\xe6\xe6\x9fM" \xa7\xb0\xca\xe4\x07\xd6'
+ value = 'USD'
+ decryptor = LinuxChromeCookieDecryptor('Chrome', Logger())
+ self.assertEqual(decryptor.decrypt(encrypted_value), value)
+
+ def test_chrome_cookie_decryptor_linux_v11(self):
+ with MonkeyPatch(cookies, {'_get_linux_keyring_password': lambda *args, **kwargs: b''}):
+ encrypted_value = b'v11#\x81\x10>`w\x8f)\xc0\xb2\xc1\r\xf4\x1al\xdd\x93\xfd\xf8\xf8N\xf2\xa9\x83\xf1\xe9o\x0elVQd'
+ value = 'tz=Europe.London'
+ decryptor = LinuxChromeCookieDecryptor('Chrome', Logger())
+ self.assertEqual(decryptor.decrypt(encrypted_value), value)
+
+ def test_chrome_cookie_decryptor_windows_v10(self):
+ with MonkeyPatch(cookies, {
+ '_get_windows_v10_key': lambda *args, **kwargs: b'Y\xef\xad\xad\xeerp\xf0Y\xe6\x9b\x12\xc2<z\x16]\n\xbb\xb8\xcb\xd7\x9bA\xc3\x14e\x99{\xd6\xf4&'
+ }):
+ encrypted_value = b'v10T\xb8\xf3\xb8\x01\xa7TtcV\xfc\x88\xb8\xb8\xef\x05\xb5\xfd\x18\xc90\x009\xab\xb1\x893\x85)\x87\xe1\xa9-\xa3\xad='
+ value = '32101439'
+ decryptor = WindowsChromeCookieDecryptor('', Logger())
+ self.assertEqual(decryptor.decrypt(encrypted_value), value)
+
+ def test_chrome_cookie_decryptor_mac_v10(self):
+ with MonkeyPatch(cookies, {'_get_mac_keyring_password': lambda *args, **kwargs: b'6eIDUdtKAacvlHwBVwvg/Q=='}):
+ encrypted_value = b'v10\xb3\xbe\xad\xa1[\x9fC\xa1\x98\xe0\x9a\x01\xd9\xcf\xbfc'
+ value = '2021-06-01-22'
+ decryptor = MacChromeCookieDecryptor('', Logger())
+ self.assertEqual(decryptor.decrypt(encrypted_value), value)
+
+ def test_safari_cookie_parsing(self):
+ cookies = \
+ b'cook\x00\x00\x00\x01\x00\x00\x00i\x00\x00\x01\x00\x01\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00Y' \
+ b'\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x008\x00\x00\x00B\x00\x00\x00F\x00\x00\x00H' \
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x03\xa5>\xc3A\x00\x00\x80\xc3\x07:\xc3A' \
+ b'localhost\x00foo\x00/\x00test%20%3Bcookie\x00\x00\x00\x054\x07\x17 \x05\x00\x00\x00Kbplist00\xd1\x01' \
+ b'\x02_\x10\x18NSHTTPCookieAcceptPolicy\x10\x02\x08\x0b&\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00' \
+ b'\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00('
+
+ jar = parse_safari_cookies(cookies)
+ self.assertEqual(len(jar), 1)
+ cookie = list(jar)[0]
+ self.assertEqual(cookie.domain, 'localhost')
+ self.assertEqual(cookie.port, None)
+ self.assertEqual(cookie.path, '/')
+ self.assertEqual(cookie.name, 'foo')
+ self.assertEqual(cookie.value, 'test%20%3Bcookie')
+ self.assertFalse(cookie.secure)
+ expected_expiration = datetime(2021, 6, 18, 21, 39, 19, tzinfo=timezone.utc)
+ self.assertEqual(cookie.expires, int(expected_expiration.timestamp()))
+
+ def test_pbkdf2_sha1(self):
+ key = pbkdf2_sha1(b'peanuts', b' ' * 16, 1, 16)
+ self.assertEqual(key, b'g\xe1\x8e\x0fQ\x1c\x9b\xf3\xc9`!\xaa\x90\xd9\xd34')
+
+
+class TestLenientSimpleCookie(unittest.TestCase):
+ def _run_tests(self, *cases):
+ for message, raw_cookie, expected in cases:
+ cookie = LenientSimpleCookie(raw_cookie)
+
+ with self.subTest(message, expected=expected):
+ self.assertEqual(cookie.keys(), expected.keys(), message)
+
+ for key, expected_value in expected.items():
+ morsel = cookie[key]
+ if isinstance(expected_value, tuple):
+ expected_value, expected_attributes = expected_value
+ else:
+ expected_attributes = {}
+
+ attributes = {
+ key: value
+ for key, value in dict(morsel).items()
+ if value != ""
+ }
+ self.assertEqual(attributes, expected_attributes, message)
+
+ self.assertEqual(morsel.value, expected_value, message)
+
+ def test_parsing(self):
+ self._run_tests(
+ # Copied from https://github.com/python/cpython/blob/v3.10.7/Lib/test/test_http_cookies.py
+ (
+ "Test basic cookie",
+ "chips=ahoy; vienna=finger",
+ {"chips": "ahoy", "vienna": "finger"},
+ ),
+ (
+ "Test quoted cookie",
+ 'keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"',
+ {"keebler": 'E=mc2; L="Loves"; fudge=\012;'},
+ ),
+ (
+ "Allow '=' in an unquoted value",
+ "keebler=E=mc2",
+ {"keebler": "E=mc2"},
+ ),
+ (
+ "Allow cookies with ':' in their name",
+ "key:term=value:term",
+ {"key:term": "value:term"},
+ ),
+ (
+ "Allow '[' and ']' in cookie values",
+ "a=b; c=[; d=r; f=h",
+ {"a": "b", "c": "[", "d": "r", "f": "h"},
+ ),
+ (
+ "Test basic cookie attributes",
+ 'Customer="WILE_E_COYOTE"; Version=1; Path=/acme',
+ {"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})},
+ ),
+ (
+ "Test flag only cookie attributes",
+ 'Customer="WILE_E_COYOTE"; HttpOnly; Secure',
+ {"Customer": ("WILE_E_COYOTE", {"httponly": True, "secure": True})},
+ ),
+ (
+ "Test flag only attribute with values",
+ "eggs=scrambled; httponly=foo; secure=bar; Path=/bacon",
+ {"eggs": ("scrambled", {"httponly": "foo", "secure": "bar", "path": "/bacon"})},
+ ),
+ (
+ "Test special case for 'expires' attribute, 4 digit year",
+ 'Customer="W"; expires=Wed, 01 Jan 2010 00:00:00 GMT',
+ {"Customer": ("W", {"expires": "Wed, 01 Jan 2010 00:00:00 GMT"})},
+ ),
+ (
+ "Test special case for 'expires' attribute, 2 digit year",
+ 'Customer="W"; expires=Wed, 01 Jan 98 00:00:00 GMT',
+ {"Customer": ("W", {"expires": "Wed, 01 Jan 98 00:00:00 GMT"})},
+ ),
+ (
+ "Test extra spaces in keys and values",
+ "eggs = scrambled ; secure ; path = bar ; foo=foo ",
+ {"eggs": ("scrambled", {"secure": True, "path": "bar"}), "foo": "foo"},
+ ),
+ (
+ "Test quoted attributes",
+ 'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"',
+ {"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})}
+ ),
+ # Our own tests that CPython passes
+ (
+ "Allow ';' in quoted value",
+ 'chips="a;hoy"; vienna=finger',
+ {"chips": "a;hoy", "vienna": "finger"},
+ ),
+ (
+ "Keep only the last set value",
+ "a=c; a=b",
+ {"a": "b"},
+ ),
+ )
+
+ def test_lenient_parsing(self):
+ self._run_tests(
+ (
+ "Ignore and try to skip invalid cookies",
+ 'chips={"ahoy;": 1}; vienna="finger;"',
+ {"vienna": "finger;"},
+ ),
+ (
+ "Ignore cookies without a name",
+ "a=b; unnamed; c=d",
+ {"a": "b", "c": "d"},
+ ),
+ (
+ "Ignore '\"' cookie without name",
+ 'a=b; "; c=d',
+ {"a": "b", "c": "d"},
+ ),
+ (
+ "Skip all space separated values",
+ "x a=b c=d x; e=f",
+ {"a": "b", "c": "d", "e": "f"},
+ ),
+ (
+ "Skip all space separated values",
+ 'x a=b; data={"complex": "json", "with": "key=value"}; x c=d x',
+ {"a": "b", "c": "d"},
+ ),
+ (
+ "Expect quote mending",
+ 'a=b; invalid="; c=d',
+ {"a": "b", "c": "d"},
+ ),
+ (
+ "Reset morsel after invalid to not capture attributes",
+ "a=b; invalid; Version=1; c=d",
+ {"a": "b", "c": "d"},
+ ),
+ (
+ "Reset morsel after invalid to not capture attributes",
+ "a=b; $invalid; $Version=1; c=d",
+ {"a": "b", "c": "d"},
+ ),
+ (
+ "Continue after non-flag attribute without value",
+ "a=b; path; Version=1; c=d",
+ {"a": "b", "c": "d"},
+ ),
+ (
+ "Allow cookie attributes with `$` prefix",
+ 'Customer="WILE_E_COYOTE"; $Version=1; $Secure; $Path=/acme',
+ {"Customer": ("WILE_E_COYOTE", {"version": "1", "secure": True, "path": "/acme"})},
+ ),
+ (
+ "Invalid Morsel keys should not result in an error",
+ "Key=Value; [Invalid]=Value; Another=Value",
+ {"Key": "Value", "Another": "Value"},
+ ),
+ )
diff --git a/test/test_download.py b/test/test_download.py
new file mode 100755
index 0000000..2530792
--- /dev/null
+++ b/test/test_download.py
@@ -0,0 +1,314 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import collections
+import hashlib
+import json
+
+from test.helper import (
+ assertGreaterEqual,
+ expect_info_dict,
+ expect_warnings,
+ get_params,
+ gettestcases,
+ getwebpagetestcases,
+ is_download_test,
+ report_warning,
+ try_rm,
+)
+
+import yt_dlp.YoutubeDL # isort: split
+from yt_dlp.extractor import get_info_extractor
+from yt_dlp.networking.exceptions import HTTPError, TransportError
+from yt_dlp.utils import (
+ DownloadError,
+ ExtractorError,
+ UnavailableVideoError,
+ YoutubeDLError,
+ format_bytes,
+ join_nonempty,
+)
+
+RETRIES = 3
+
+
+class YoutubeDL(yt_dlp.YoutubeDL):
+ def __init__(self, *args, **kwargs):
+ self.to_stderr = self.to_screen
+ self.processed_info_dicts = []
+ super().__init__(*args, **kwargs)
+
+ def report_warning(self, message, *args, **kwargs):
+ # Don't accept warnings during tests
+ raise ExtractorError(message)
+
+ def process_info(self, info_dict):
+ self.processed_info_dicts.append(info_dict.copy())
+ return super().process_info(info_dict)
+
+
+def _file_md5(fn):
+ with open(fn, 'rb') as f:
+ return hashlib.md5(f.read()).hexdigest()
+
+
+normal_test_cases = gettestcases()
+webpage_test_cases = getwebpagetestcases()
+tests_counter = collections.defaultdict(collections.Counter)
+
+
+@is_download_test
+class TestDownload(unittest.TestCase):
+ # Parallel testing in nosetests. See
+ # http://nose.readthedocs.org/en/latest/doc_tests/test_multiprocess/multiprocess.html
+ _multiprocess_shared_ = True
+
+ maxDiff = None
+
+ COMPLETED_TESTS = {}
+
+ def __str__(self):
+ """Identify each test with the `add_ie` attribute, if available."""
+ cls, add_ie = type(self), getattr(self, self._testMethodName).add_ie
+ return f'{self._testMethodName} ({cls.__module__}.{cls.__name__}){f" [{add_ie}]" if add_ie else ""}:'
+
+
+# Dynamically generate tests
+
+def generator(test_case, tname):
+ def test_template(self):
+ if self.COMPLETED_TESTS.get(tname):
+ return
+ self.COMPLETED_TESTS[tname] = True
+ ie = yt_dlp.extractor.get_info_extractor(test_case['name'])()
+ other_ies = [get_info_extractor(ie_key)() for ie_key in test_case.get('add_ie', [])]
+ is_playlist = any(k.startswith('playlist') for k in test_case)
+ test_cases = test_case.get(
+ 'playlist', [] if is_playlist else [test_case])
+
+ def print_skipping(reason):
+ print('Skipping %s: %s' % (test_case['name'], reason))
+ self.skipTest(reason)
+
+ if not ie.working():
+ print_skipping('IE marked as not _WORKING')
+
+ for tc in test_cases:
+ if tc.get('expected_exception'):
+ continue
+ info_dict = tc.get('info_dict', {})
+ params = tc.get('params', {})
+ if not info_dict.get('id'):
+ raise Exception(f'Test {tname} definition incorrect - "id" key is not present')
+ elif not info_dict.get('ext') and info_dict.get('_type', 'video') == 'video':
+ if params.get('skip_download') and params.get('ignore_no_formats_error'):
+ continue
+ raise Exception(f'Test {tname} definition incorrect - "ext" key must be present to define the output file')
+
+ if 'skip' in test_case:
+ print_skipping(test_case['skip'])
+
+ for other_ie in other_ies:
+ if not other_ie.working():
+ print_skipping('test depends on %sIE, marked as not WORKING' % other_ie.ie_key())
+
+ params = get_params(test_case.get('params', {}))
+ params['outtmpl'] = tname + '_' + params['outtmpl']
+ if is_playlist and 'playlist' not in test_case:
+ params.setdefault('extract_flat', 'in_playlist')
+ params.setdefault('playlistend', test_case.get(
+ 'playlist_mincount', test_case.get('playlist_count', -2) + 1))
+ params.setdefault('skip_download', True)
+
+ ydl = YoutubeDL(params, auto_init=False)
+ ydl.add_default_info_extractors()
+ finished_hook_called = set()
+
+ def _hook(status):
+ if status['status'] == 'finished':
+ finished_hook_called.add(status['filename'])
+ ydl.add_progress_hook(_hook)
+ expect_warnings(ydl, test_case.get('expected_warnings', []))
+
+ def get_tc_filename(tc):
+ return ydl.prepare_filename(dict(tc.get('info_dict', {})))
+
+ res_dict = None
+
+ def match_exception(err):
+ expected_exception = test_case.get('expected_exception')
+ if not expected_exception:
+ return False
+ if err.__class__.__name__ == expected_exception:
+ return True
+ for exc in err.exc_info:
+ if exc.__class__.__name__ == expected_exception:
+ return True
+ return False
+
+ def try_rm_tcs_files(tcs=None):
+ if tcs is None:
+ tcs = test_cases
+ for tc in tcs:
+ tc_filename = get_tc_filename(tc)
+ try_rm(tc_filename)
+ try_rm(tc_filename + '.part')
+ try_rm(os.path.splitext(tc_filename)[0] + '.info.json')
+ try_rm_tcs_files()
+ try:
+ try_num = 1
+ while True:
+ try:
+ # We're not using .download here since that is just a shim
+ # for outside error handling, and returns the exit code
+ # instead of the result dict.
+ res_dict = ydl.extract_info(
+ test_case['url'],
+ force_generic_extractor=params.get('force_generic_extractor', False))
+ except (DownloadError, ExtractorError) as err:
+ # Check if the exception is not a network related one
+ if not isinstance(err.exc_info[1], (TransportError, UnavailableVideoError)) or (isinstance(err.exc_info[1], HTTPError) and err.exc_info[1].status == 503):
+ if match_exception(err):
+ return
+ err.msg = f'{getattr(err, "msg", err)} ({tname})'
+ raise
+
+ if try_num == RETRIES:
+ report_warning('%s failed due to network errors, skipping...' % tname)
+ return
+
+ print(f'Retrying: {try_num} failed tries\n\n##########\n\n')
+
+ try_num += 1
+ except YoutubeDLError as err:
+ if match_exception(err):
+ return
+ raise
+ else:
+ break
+
+ if is_playlist:
+ self.assertTrue(res_dict['_type'] in ['playlist', 'multi_video'])
+ self.assertTrue('entries' in res_dict)
+ expect_info_dict(self, res_dict, test_case.get('info_dict', {}))
+
+ if 'playlist_mincount' in test_case:
+ assertGreaterEqual(
+ self,
+ len(res_dict['entries']),
+ test_case['playlist_mincount'],
+ 'Expected at least %d in playlist %s, but got only %d' % (
+ test_case['playlist_mincount'], test_case['url'],
+ len(res_dict['entries'])))
+ if 'playlist_count' in test_case:
+ self.assertEqual(
+ len(res_dict['entries']),
+ test_case['playlist_count'],
+ 'Expected %d entries in playlist %s, but got %d.' % (
+ test_case['playlist_count'],
+ test_case['url'],
+ len(res_dict['entries']),
+ ))
+ if 'playlist_duration_sum' in test_case:
+ got_duration = sum(e['duration'] for e in res_dict['entries'])
+ self.assertEqual(
+ test_case['playlist_duration_sum'], got_duration)
+
+ # Generalize both playlists and single videos to unified format for
+ # simplicity
+ if 'entries' not in res_dict:
+ res_dict['entries'] = [res_dict]
+
+ for tc_num, tc in enumerate(test_cases):
+ tc_res_dict = res_dict['entries'][tc_num]
+ # First, check test cases' data against extracted data alone
+ expect_info_dict(self, tc_res_dict, tc.get('info_dict', {}))
+ if tc_res_dict.get('_type', 'video') != 'video':
+ continue
+ # Now, check downloaded file consistency
+ tc_filename = get_tc_filename(tc)
+ if not test_case.get('params', {}).get('skip_download', False):
+ self.assertTrue(os.path.exists(tc_filename), msg='Missing file ' + tc_filename)
+ self.assertTrue(tc_filename in finished_hook_called)
+ expected_minsize = tc.get('file_minsize', 10000)
+ if expected_minsize is not None:
+ if params.get('test'):
+ expected_minsize = max(expected_minsize, 10000)
+ got_fsize = os.path.getsize(tc_filename)
+ assertGreaterEqual(
+ self, got_fsize, expected_minsize,
+ 'Expected %s to be at least %s, but it\'s only %s ' %
+ (tc_filename, format_bytes(expected_minsize),
+ format_bytes(got_fsize)))
+ if 'md5' in tc:
+ md5_for_file = _file_md5(tc_filename)
+ self.assertEqual(tc['md5'], md5_for_file)
+ # Finally, check test cases' data again but this time against
+ # extracted data from info JSON file written during processing
+ info_json_fn = os.path.splitext(tc_filename)[0] + '.info.json'
+ self.assertTrue(
+ os.path.exists(info_json_fn),
+ 'Missing info file %s' % info_json_fn)
+ with open(info_json_fn, encoding='utf-8') as infof:
+ info_dict = json.load(infof)
+ expect_info_dict(self, info_dict, tc.get('info_dict', {}))
+ finally:
+ try_rm_tcs_files()
+ if is_playlist and res_dict is not None and res_dict.get('entries'):
+ # Remove all other files that may have been extracted if the
+ # extractor returns full results even with extract_flat
+ res_tcs = [{'info_dict': e} for e in res_dict['entries']]
+ try_rm_tcs_files(res_tcs)
+ ydl.close()
+ return test_template
+
+
+# And add them to TestDownload
+def inject_tests(test_cases, label=''):
+ for test_case in test_cases:
+ name = test_case['name']
+ tname = join_nonempty('test', name, label, tests_counter[name][label], delim='_')
+ tests_counter[name][label] += 1
+
+ test_method = generator(test_case, tname)
+ test_method.__name__ = tname
+ test_method.add_ie = ','.join(test_case.get('add_ie', []))
+ setattr(TestDownload, test_method.__name__, test_method)
+
+
+inject_tests(normal_test_cases)
+
+# TODO: disable redirection to the IE to ensure we are actually testing the webpage extraction
+inject_tests(webpage_test_cases, 'webpage')
+
+
+def batch_generator(name):
+ def test_template(self):
+ for label, num_tests in tests_counter[name].items():
+ for i in range(num_tests):
+ test_name = join_nonempty('test', name, label, i, delim='_')
+ try:
+ getattr(self, test_name)()
+ except unittest.SkipTest:
+ print(f'Skipped {test_name}')
+
+ return test_template
+
+
+for name in tests_counter:
+ test_method = batch_generator(name)
+ test_method.__name__ = f'test_{name}_all'
+ test_method.add_ie = ''
+ setattr(TestDownload, test_method.__name__, test_method)
+del test_method
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_downloader_external.py b/test/test_downloader_external.py
new file mode 100644
index 0000000..62f7d45
--- /dev/null
+++ b/test/test_downloader_external.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+import http.cookiejar
+
+from test.helper import FakeYDL
+from yt_dlp.downloader.external import (
+ Aria2cFD,
+ AxelFD,
+ CurlFD,
+ FFmpegFD,
+ HttpieFD,
+ WgetFD,
+)
+
+TEST_COOKIE = {
+ 'version': 0,
+ 'name': 'test',
+ 'value': 'ytdlp',
+ 'port': None,
+ 'port_specified': False,
+ 'domain': '.example.com',
+ 'domain_specified': True,
+ 'domain_initial_dot': False,
+ 'path': '/',
+ 'path_specified': True,
+ 'secure': False,
+ 'expires': None,
+ 'discard': False,
+ 'comment': None,
+ 'comment_url': None,
+ 'rest': {},
+}
+
+TEST_INFO = {'url': 'http://www.example.com/'}
+
+
+class TestHttpieFD(unittest.TestCase):
+ def test_make_cmd(self):
+ with FakeYDL() as ydl:
+ downloader = HttpieFD(ydl, {})
+ self.assertEqual(
+ downloader._make_cmd('test', TEST_INFO),
+ ['http', '--download', '--output', 'test', 'http://www.example.com/'])
+
+ # Test cookie header is added
+ ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
+ self.assertEqual(
+ downloader._make_cmd('test', TEST_INFO),
+ ['http', '--download', '--output', 'test', 'http://www.example.com/', 'Cookie:test=ytdlp'])
+
+
+class TestAxelFD(unittest.TestCase):
+ def test_make_cmd(self):
+ with FakeYDL() as ydl:
+ downloader = AxelFD(ydl, {})
+ self.assertEqual(
+ downloader._make_cmd('test', TEST_INFO),
+ ['axel', '-o', 'test', '--', 'http://www.example.com/'])
+
+ # Test cookie header is added
+ ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
+ self.assertEqual(
+ downloader._make_cmd('test', TEST_INFO),
+ ['axel', '-o', 'test', '-H', 'Cookie: test=ytdlp', '--max-redirect=0', '--', 'http://www.example.com/'])
+
+
+class TestWgetFD(unittest.TestCase):
+ def test_make_cmd(self):
+ with FakeYDL() as ydl:
+ downloader = WgetFD(ydl, {})
+ self.assertNotIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
+ # Test cookiejar tempfile arg is added
+ ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
+ self.assertIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
+
+
+class TestCurlFD(unittest.TestCase):
+ def test_make_cmd(self):
+ with FakeYDL() as ydl:
+ downloader = CurlFD(ydl, {})
+ self.assertNotIn('--cookie', downloader._make_cmd('test', TEST_INFO))
+ # Test cookie header is added
+ ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
+ self.assertIn('--cookie', downloader._make_cmd('test', TEST_INFO))
+ self.assertIn('test=ytdlp', downloader._make_cmd('test', TEST_INFO))
+
+
+class TestAria2cFD(unittest.TestCase):
+ def test_make_cmd(self):
+ with FakeYDL() as ydl:
+ downloader = Aria2cFD(ydl, {})
+ downloader._make_cmd('test', TEST_INFO)
+ self.assertFalse(hasattr(downloader, '_cookies_tempfile'))
+
+ # Test cookiejar tempfile arg is added
+ ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
+ cmd = downloader._make_cmd('test', TEST_INFO)
+ self.assertIn(f'--load-cookies={downloader._cookies_tempfile}', cmd)
+
+
+@unittest.skipUnless(FFmpegFD.available(), 'ffmpeg not found')
+class TestFFmpegFD(unittest.TestCase):
+ _args = []
+
+ def _test_cmd(self, args):
+ self._args = args
+
+ def test_make_cmd(self):
+ with FakeYDL() as ydl:
+ downloader = FFmpegFD(ydl, {})
+ downloader._debug_cmd = self._test_cmd
+
+ downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
+ self.assertEqual(self._args, [
+ 'ffmpeg', '-y', '-hide_banner', '-i', 'http://www.example.com/',
+ '-c', 'copy', '-f', 'mp4', 'file:test'])
+
+ # Test cookies arg is added
+ ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
+ downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
+ self.assertEqual(self._args, [
+ 'ffmpeg', '-y', '-hide_banner', '-cookies', 'test=ytdlp; path=/; domain=.example.com;\r\n',
+ '-i', 'http://www.example.com/', '-c', 'copy', '-f', 'mp4', 'file:test'])
+
+ # Test with non-url input (ffmpeg reads from stdin '-' for websockets)
+ downloader._call_downloader('test', {'url': 'x', 'ext': 'mp4'})
+ self.assertEqual(self._args, [
+ 'ffmpeg', '-y', '-hide_banner', '-i', 'x', '-c', 'copy', '-f', 'mp4', 'file:test'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_downloader_http.py b/test/test_downloader_http.py
new file mode 100644
index 0000000..099ec2f
--- /dev/null
+++ b/test/test_downloader_http.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import http.server
+import re
+import threading
+
+from test.helper import http_server_port, try_rm
+from yt_dlp import YoutubeDL
+from yt_dlp.downloader.http import HttpFD
+from yt_dlp.utils import encodeFilename
+from yt_dlp.utils._utils import _YDLLogger as FakeLogger
+
+TEST_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+TEST_SIZE = 10 * 1024
+
+
+class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
+ def log_message(self, format, *args):
+ pass
+
+ def send_content_range(self, total=None):
+ range_header = self.headers.get('Range')
+ start = end = None
+ if range_header:
+ mobj = re.search(r'^bytes=(\d+)-(\d+)', range_header)
+ if mobj:
+ start = int(mobj.group(1))
+ end = int(mobj.group(2))
+ valid_range = start is not None and end is not None
+ if valid_range:
+ content_range = 'bytes %d-%d' % (start, end)
+ if total:
+ content_range += '/%d' % total
+ self.send_header('Content-Range', content_range)
+ return (end - start + 1) if valid_range else total
+
+ def serve(self, range=True, content_length=True):
+ self.send_response(200)
+ self.send_header('Content-Type', 'video/mp4')
+ size = TEST_SIZE
+ if range:
+ size = self.send_content_range(TEST_SIZE)
+ if content_length:
+ self.send_header('Content-Length', size)
+ self.end_headers()
+ self.wfile.write(b'#' * size)
+
+ def do_GET(self):
+ if self.path == '/regular':
+ self.serve()
+ elif self.path == '/no-content-length':
+ self.serve(content_length=False)
+ elif self.path == '/no-range':
+ self.serve(range=False)
+ elif self.path == '/no-range-no-content-length':
+ self.serve(range=False, content_length=False)
+ else:
+ assert False
+
+
+class TestHttpFD(unittest.TestCase):
+ def setUp(self):
+ self.httpd = http.server.HTTPServer(
+ ('127.0.0.1', 0), HTTPTestRequestHandler)
+ self.port = http_server_port(self.httpd)
+ self.server_thread = threading.Thread(target=self.httpd.serve_forever)
+ self.server_thread.daemon = True
+ self.server_thread.start()
+
+ def download(self, params, ep):
+ params['logger'] = FakeLogger()
+ ydl = YoutubeDL(params)
+ downloader = HttpFD(ydl, params)
+ filename = 'testfile.mp4'
+ try_rm(encodeFilename(filename))
+ self.assertTrue(downloader.real_download(filename, {
+ 'url': 'http://127.0.0.1:%d/%s' % (self.port, ep),
+ }), ep)
+ self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE, ep)
+ try_rm(encodeFilename(filename))
+
+ def download_all(self, params):
+ for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
+ self.download(params, ep)
+
+ def test_regular(self):
+ self.download_all({})
+
+ def test_chunked(self):
+ self.download_all({
+ 'http_chunk_size': 1000,
+ })
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_execution.py b/test/test_execution.py
new file mode 100644
index 0000000..c6ee9cf
--- /dev/null
+++ b/test/test_execution.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import contextlib
+import subprocess
+
+from yt_dlp.utils import Popen
+
+rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+LAZY_EXTRACTORS = 'yt_dlp/extractor/lazy_extractors.py'
+
+
+class TestExecution(unittest.TestCase):
+ def run_yt_dlp(self, exe=(sys.executable, 'yt_dlp/__main__.py'), opts=('--version', )):
+ stdout, stderr, returncode = Popen.run(
+ [*exe, '--ignore-config', *opts], cwd=rootDir, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ print(stderr, file=sys.stderr)
+ self.assertEqual(returncode, 0)
+ return stdout.strip(), stderr.strip()
+
+ def test_main_exec(self):
+ self.run_yt_dlp()
+
+ def test_import(self):
+ self.run_yt_dlp(exe=(sys.executable, '-c', 'import yt_dlp'))
+
+ def test_module_exec(self):
+ self.run_yt_dlp(exe=(sys.executable, '-m', 'yt_dlp'))
+
+ def test_cmdline_umlauts(self):
+ _, stderr = self.run_yt_dlp(opts=('ä', '--version'))
+ self.assertFalse(stderr)
+
+ def test_lazy_extractors(self):
+ try:
+ subprocess.check_call([sys.executable, 'devscripts/make_lazy_extractors.py', LAZY_EXTRACTORS],
+ cwd=rootDir, stdout=subprocess.DEVNULL)
+ self.assertTrue(os.path.exists(LAZY_EXTRACTORS))
+
+ _, stderr = self.run_yt_dlp(opts=('-s', 'test:'))
+ # `MIN_RECOMMENDED` emits a deprecated feature warning for deprecated Python versions
+ if stderr and stderr.startswith('Deprecated Feature: Support for Python'):
+ stderr = ''
+ self.assertFalse(stderr)
+
+ subprocess.check_call([sys.executable, 'test/test_all_urls.py'], cwd=rootDir, stdout=subprocess.DEVNULL)
+ finally:
+ with contextlib.suppress(OSError):
+ os.remove(LAZY_EXTRACTORS)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_iqiyi_sdk_interpreter.py b/test/test_iqiyi_sdk_interpreter.py
new file mode 100644
index 0000000..47c632a
--- /dev/null
+++ b/test/test_iqiyi_sdk_interpreter.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+from test.helper import FakeYDL, is_download_test
+from yt_dlp.extractor import IqiyiIE
+
+
+class WarningLogger:
+ def __init__(self):
+ self.messages = []
+
+ def warning(self, msg):
+ self.messages.append(msg)
+
+ def debug(self, msg):
+ pass
+
+ def error(self, msg):
+ pass
+
+
+@is_download_test
+class TestIqiyiSDKInterpreter(unittest.TestCase):
+ def test_iqiyi_sdk_interpreter(self):
+ '''
+ Test the functionality of IqiyiSDKInterpreter by trying to log in
+
+ If `sign` is incorrect, /validate call throws an HTTP 556 error
+ '''
+ logger = WarningLogger()
+ ie = IqiyiIE(FakeYDL({'logger': logger}))
+ ie._perform_login('foo', 'bar')
+ self.assertTrue('unable to log in:' in logger.messages[0])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_jsinterp.py b/test/test_jsinterp.py
new file mode 100644
index 0000000..86928a6
--- /dev/null
+++ b/test/test_jsinterp.py
@@ -0,0 +1,380 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+import math
+
+from yt_dlp.jsinterp import JS_Undefined, JSInterpreter
+
+
+class NaN:
+ pass
+
+
+class TestJSInterpreter(unittest.TestCase):
+ def _test(self, jsi_or_code, expected, func='f', args=()):
+ if isinstance(jsi_or_code, str):
+ jsi_or_code = JSInterpreter(jsi_or_code)
+ got = jsi_or_code.call_function(func, *args)
+ if expected is NaN:
+ self.assertTrue(math.isnan(got), f'{got} is not NaN')
+ else:
+ self.assertEqual(got, expected)
+
+ def test_basic(self):
+ jsi = JSInterpreter('function f(){;}')
+ self.assertEqual(repr(jsi.extract_function('f')), 'F<f>')
+ self._test(jsi, None)
+
+ self._test('function f(){return 42;}', 42)
+ self._test('function f(){42}', None)
+ self._test('var f = function(){return 42;}', 42)
+
+ def test_add(self):
+ self._test('function f(){return 42 + 7;}', 49)
+ self._test('function f(){return 42 + undefined;}', NaN)
+ self._test('function f(){return 42 + null;}', 42)
+
+ def test_sub(self):
+ self._test('function f(){return 42 - 7;}', 35)
+ self._test('function f(){return 42 - undefined;}', NaN)
+ self._test('function f(){return 42 - null;}', 42)
+
+ def test_mul(self):
+ self._test('function f(){return 42 * 7;}', 294)
+ self._test('function f(){return 42 * undefined;}', NaN)
+ self._test('function f(){return 42 * null;}', 0)
+
+ def test_div(self):
+ jsi = JSInterpreter('function f(a, b){return a / b;}')
+ self._test(jsi, NaN, args=(0, 0))
+ self._test(jsi, NaN, args=(JS_Undefined, 1))
+ self._test(jsi, float('inf'), args=(2, 0))
+ self._test(jsi, 0, args=(0, 3))
+
+ def test_mod(self):
+ self._test('function f(){return 42 % 7;}', 0)
+ self._test('function f(){return 42 % 0;}', NaN)
+ self._test('function f(){return 42 % undefined;}', NaN)
+
+ def test_exp(self):
+ self._test('function f(){return 42 ** 2;}', 1764)
+ self._test('function f(){return 42 ** undefined;}', NaN)
+ self._test('function f(){return 42 ** null;}', 1)
+ self._test('function f(){return undefined ** 42;}', NaN)
+
+ def test_calc(self):
+ self._test('function f(a){return 2*a+1;}', 7, args=[3])
+
+ def test_empty_return(self):
+ self._test('function f(){return; y()}', None)
+
+ def test_morespace(self):
+ self._test('function f (a) { return 2 * a + 1 ; }', 7, args=[3])
+ self._test('function f () { x = 2 ; return x; }', 2)
+
+ def test_strange_chars(self):
+ self._test('function $_xY1 ($_axY1) { var $_axY2 = $_axY1 + 1; return $_axY2; }',
+ 21, args=[20], func='$_xY1')
+
+ def test_operators(self):
+ self._test('function f(){return 1 << 5;}', 32)
+ self._test('function f(){return 2 ** 5}', 32)
+ self._test('function f(){return 19 & 21;}', 17)
+ self._test('function f(){return 11 >> 2;}', 2)
+ self._test('function f(){return []? 2+3: 4;}', 5)
+ self._test('function f(){return 1 == 2}', False)
+ self._test('function f(){return 0 && 1 || 2;}', 2)
+ self._test('function f(){return 0 ?? 42;}', 0)
+ self._test('function f(){return "life, the universe and everything" < 42;}', False)
+
+ def test_array_access(self):
+ self._test('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}', [5, 2, 7])
+
+ def test_parens(self):
+ self._test('function f(){return (1) + (2) * ((( (( (((((3)))))) )) ));}', 7)
+ self._test('function f(){return (1 + 2) * 3;}', 9)
+
+ def test_quotes(self):
+ self._test(R'function f(){return "a\"\\("}', R'a"\(')
+
+ def test_assignments(self):
+ self._test('function f(){var x = 20; x = 30 + 1; return x;}', 31)
+ self._test('function f(){var x = 20; x += 30 + 1; return x;}', 51)
+ self._test('function f(){var x = 20; x -= 30 + 1; return x;}', -11)
+
+ @unittest.skip('Not implemented')
+ def test_comments(self):
+ self._test('''
+ function f() {
+ var x = /* 1 + */ 2;
+ var y = /* 30
+ * 40 */ 50;
+ return x + y;
+ }
+ ''', 52)
+
+ self._test('''
+ function f() {
+ var x = "/*";
+ var y = 1 /* comment */ + 2;
+ return y;
+ }
+ ''', 3)
+
+ def test_precedence(self):
+ self._test('''
+ function f() {
+ var a = [10, 20, 30, 40, 50];
+ var b = 6;
+ a[0]=a[b%a.length];
+ return a;
+ }
+ ''', [20, 20, 30, 40, 50])
+
+ def test_builtins(self):
+ self._test('function f() { return NaN }', NaN)
+
+ def test_date(self):
+ self._test('function f() { return new Date("Wednesday 31 December 1969 18:01:26 MDT") - 0; }', 86000)
+
+ jsi = JSInterpreter('function f(dt) { return new Date(dt) - 0; }')
+ self._test(jsi, 86000, args=['Wednesday 31 December 1969 18:01:26 MDT'])
+ self._test(jsi, 86000, args=['12/31/1969 18:01:26 MDT']) # m/d/y
+ self._test(jsi, 0, args=['1 January 1970 00:00:00 UTC'])
+
+ def test_call(self):
+ jsi = JSInterpreter('''
+ function x() { return 2; }
+ function y(a) { return x() + (a?a:0); }
+ function z() { return y(3); }
+ ''')
+ self._test(jsi, 5, func='z')
+ self._test(jsi, 2, func='y')
+
+ def test_if(self):
+ self._test('''
+ function f() {
+ let a = 9;
+ if (0==0) {a++}
+ return a
+ }
+ ''', 10)
+
+ self._test('''
+ function f() {
+ if (0==0) {return 10}
+ }
+ ''', 10)
+
+ self._test('''
+ function f() {
+ if (0!=0) {return 1}
+ else {return 10}
+ }
+ ''', 10)
+
+ """ # Unsupported
+ self._test('''
+ function f() {
+ if (0!=0) {return 1}
+ else if (1==0) {return 2}
+ else {return 10}
+ }
+ ''', 10)
+ """
+
+ def test_for_loop(self):
+ self._test('function f() { a=0; for (i=0; i-10; i++) {a++} return a }', 10)
+
+ def test_switch(self):
+ jsi = JSInterpreter('''
+ function f(x) { switch(x){
+ case 1:x+=1;
+ case 2:x+=2;
+ case 3:x+=3;break;
+ case 4:x+=4;
+ default:x=0;
+ } return x }
+ ''')
+ self._test(jsi, 7, args=[1])
+ self._test(jsi, 6, args=[3])
+ self._test(jsi, 0, args=[5])
+
+ def test_switch_default(self):
+ jsi = JSInterpreter('''
+ function f(x) { switch(x){
+ case 2: x+=2;
+ default: x-=1;
+ case 5:
+ case 6: x+=6;
+ case 0: break;
+ case 1: x+=1;
+ } return x }
+ ''')
+ self._test(jsi, 2, args=[1])
+ self._test(jsi, 11, args=[5])
+ self._test(jsi, 14, args=[9])
+
+ def test_try(self):
+ self._test('function f() { try{return 10} catch(e){return 5} }', 10)
+
+ def test_catch(self):
+ self._test('function f() { try{throw 10} catch(e){return 5} }', 5)
+
+ def test_finally(self):
+ self._test('function f() { try{throw 10} finally {return 42} }', 42)
+ self._test('function f() { try{throw 10} catch(e){return 5} finally {return 42} }', 42)
+
+ def test_nested_try(self):
+ self._test('''
+ function f() {try {
+ try{throw 10} finally {throw 42}
+ } catch(e){return 5} }
+ ''', 5)
+
+ def test_for_loop_continue(self):
+ self._test('function f() { a=0; for (i=0; i-10; i++) { continue; a++ } return a }', 0)
+
+ def test_for_loop_break(self):
+ self._test('function f() { a=0; for (i=0; i-10; i++) { break; a++ } return a }', 0)
+
+ def test_for_loop_try(self):
+ self._test('''
+ function f() {
+ for (i=0; i-10; i++) { try { if (i == 5) throw i} catch {return 10} finally {break} };
+ return 42 }
+ ''', 42)
+
+ def test_literal_list(self):
+ self._test('function f() { return [1, 2, "asdf", [5, 6, 7]][3] }', [5, 6, 7])
+
+ def test_comma(self):
+ self._test('function f() { a=5; a -= 1, a+=3; return a }', 7)
+ self._test('function f() { a=5; return (a -= 1, a+=3, a); }', 7)
+ self._test('function f() { return (l=[0,1,2,3], function(a, b){return a+b})((l[1], l[2]), l[3]) }', 5)
+
+ def test_void(self):
+ self._test('function f() { return void 42; }', None)
+
+ def test_return_function(self):
+ jsi = JSInterpreter('''
+ function f() { return [1, function(){return 1}][1] }
+ ''')
+ self.assertEqual(jsi.call_function('f')([]), 1)
+
+ def test_null(self):
+ self._test('function f() { return null; }', None)
+ self._test('function f() { return [null > 0, null < 0, null == 0, null === 0]; }',
+ [False, False, False, False])
+ self._test('function f() { return [null >= 0, null <= 0]; }', [True, True])
+
+ def test_undefined(self):
+ self._test('function f() { return undefined === undefined; }', True)
+ self._test('function f() { return undefined; }', JS_Undefined)
+ self._test('function f() {return undefined ?? 42; }', 42)
+ self._test('function f() { let v; return v; }', JS_Undefined)
+ self._test('function f() { let v; return v**0; }', 1)
+ self._test('function f() { let v; return [v>42, v<=42, v&&42, 42&&v]; }',
+ [False, False, JS_Undefined, JS_Undefined])
+
+ self._test('''
+ function f() { return [
+ undefined === undefined,
+ undefined == undefined,
+ undefined == null,
+ undefined < undefined,
+ undefined > undefined,
+ undefined === 0,
+ undefined == 0,
+ undefined < 0,
+ undefined > 0,
+ undefined >= 0,
+ undefined <= 0,
+ undefined > null,
+ undefined < null,
+ undefined === null
+ ]; }
+ ''', list(map(bool, (1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))))
+
+ jsi = JSInterpreter('''
+ function f() { let v; return [42+v, v+42, v**42, 42**v, 0**v]; }
+ ''')
+ for y in jsi.call_function('f'):
+ self.assertTrue(math.isnan(y))
+
+ def test_object(self):
+ self._test('function f() { return {}; }', {})
+ self._test('function f() { let a = {m1: 42, m2: 0 }; return [a["m1"], a.m2]; }', [42, 0])
+ self._test('function f() { let a; return a?.qq; }', JS_Undefined)
+ self._test('function f() { let a = {m1: 42, m2: 0 }; return a?.qq; }', JS_Undefined)
+
+ def test_regex(self):
+ self._test('function f() { let a=/,,[/,913,/](,)}/; }', None)
+ self._test('function f() { let a=/,,[/,913,/](,)}/; return a; }', R'/,,[/,913,/](,)}/0')
+
+ R''' # We are not compiling regex
+ jsi = JSInterpreter('function f() { let a=/,,[/,913,/](,)}/; return a; }')
+ self.assertIsInstance(jsi.call_function('f'), re.Pattern)
+
+ jsi = JSInterpreter('function f() { let a=/,,[/,913,/](,)}/i; return a; }')
+ self.assertEqual(jsi.call_function('f').flags & re.I, re.I)
+
+ jsi = JSInterpreter(R'function f() { let a=/,][}",],()}(\[)/; return a; }')
+ self.assertEqual(jsi.call_function('f').pattern, r',][}",],()}(\[)')
+
+ jsi = JSInterpreter(R'function f() { let a=[/[)\\]/]; return a[0]; }')
+ self.assertEqual(jsi.call_function('f').pattern, r'[)\\]')
+ '''
+
+ @unittest.skip('Not implemented')
+ def test_replace(self):
+ self._test('function f() { let a="data-name".replace("data-", ""); return a }',
+ 'name')
+ self._test('function f() { let a="data-name".replace(new RegExp("^.+-"), ""); return a; }',
+ 'name')
+ self._test('function f() { let a="data-name".replace(/^.+-/, ""); return a; }',
+ 'name')
+ self._test('function f() { let a="data-name".replace(/a/g, "o"); return a; }',
+ 'doto-nome')
+ self._test('function f() { let a="data-name".replaceAll("a", "o"); return a; }',
+ 'doto-nome')
+
+ def test_char_code_at(self):
+ jsi = JSInterpreter('function f(i){return "test".charCodeAt(i)}')
+ self._test(jsi, 116, args=[0])
+ self._test(jsi, 101, args=[1])
+ self._test(jsi, 115, args=[2])
+ self._test(jsi, 116, args=[3])
+ self._test(jsi, None, args=[4])
+ self._test(jsi, 116, args=['not_a_number'])
+
+ def test_bitwise_operators_overflow(self):
+ self._test('function f(){return -524999584 << 5}', 379882496)
+ self._test('function f(){return 1236566549 << 5}', 915423904)
+
+ def test_bitwise_operators_typecast(self):
+ self._test('function f(){return null << 5}', 0)
+ self._test('function f(){return undefined >> 5}', 0)
+ self._test('function f(){return 42 << NaN}', 42)
+
+ def test_negative(self):
+ self._test('function f(){return 2 * -2.0 ;}', -4)
+ self._test('function f(){return 2 - - -2 ;}', 0)
+ self._test('function f(){return 2 - - - -2 ;}', 4)
+ self._test('function f(){return 2 - + + - -2;}', 0)
+ self._test('function f(){return 2 + - + - -2;}', 0)
+
+ @unittest.skip('Not implemented')
+ def test_packed(self):
+ jsi = JSInterpreter('''function f(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}''')
+ self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|')))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_netrc.py b/test/test_netrc.py
new file mode 100644
index 0000000..dc708d9
--- /dev/null
+++ b/test/test_netrc.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+from yt_dlp.extractor import gen_extractor_classes
+from yt_dlp.extractor.common import InfoExtractor
+
+NO_LOGIN = InfoExtractor._perform_login
+
+
+class TestNetRc(unittest.TestCase):
+ def test_netrc_present(self):
+ for ie in gen_extractor_classes():
+ if ie._perform_login is NO_LOGIN:
+ continue
+ self.assertTrue(
+ ie._NETRC_MACHINE,
+ 'Extractor %s supports login, but is missing a _NETRC_MACHINE property' % ie.IE_NAME)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_networking.py b/test/test_networking.py
new file mode 100644
index 0000000..628f1f1
--- /dev/null
+++ b/test/test_networking.py
@@ -0,0 +1,1631 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+
+import pytest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+import gzip
+import http.client
+import http.cookiejar
+import http.server
+import io
+import logging
+import pathlib
+import random
+import ssl
+import tempfile
+import threading
+import time
+import urllib.error
+import urllib.request
+import warnings
+import zlib
+from email.message import Message
+from http.cookiejar import CookieJar
+
+from test.helper import FakeYDL, http_server_port, verify_address_availability
+from yt_dlp.cookies import YoutubeDLCookieJar
+from yt_dlp.dependencies import brotli, requests, urllib3
+from yt_dlp.networking import (
+ HEADRequest,
+ PUTRequest,
+ Request,
+ RequestDirector,
+ RequestHandler,
+ Response,
+)
+from yt_dlp.networking._urllib import UrllibRH
+from yt_dlp.networking.exceptions import (
+ CertificateVerifyError,
+ HTTPError,
+ IncompleteRead,
+ NoSupportingHandlers,
+ ProxyError,
+ RequestError,
+ SSLError,
+ TransportError,
+ UnsupportedRequest,
+)
+from yt_dlp.utils._utils import _YDLLogger as FakeLogger
+from yt_dlp.utils.networking import HTTPHeaderDict
+
+from test.conftest import validate_and_send
+
+TEST_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+def _build_proxy_handler(name):
+ class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
+ proxy_name = name
+
+ def log_message(self, format, *args):
+ pass
+
+ def do_GET(self):
+ self.send_response(200)
+ self.send_header('Content-Type', 'text/plain; charset=utf-8')
+ self.end_headers()
+ self.wfile.write(f'{self.proxy_name}: {self.path}'.encode())
+ return HTTPTestRequestHandler
+
+
+class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
+ protocol_version = 'HTTP/1.1'
+
+ def log_message(self, format, *args):
+ pass
+
+ def _headers(self):
+ payload = str(self.headers).encode()
+ self.send_response(200)
+ self.send_header('Content-Type', 'application/json')
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload)
+
+ def _redirect(self):
+ self.send_response(int(self.path[len('/redirect_'):]))
+ self.send_header('Location', '/method')
+ self.send_header('Content-Length', '0')
+ self.end_headers()
+
+ def _method(self, method, payload=None):
+ self.send_response(200)
+ self.send_header('Content-Length', str(len(payload or '')))
+ self.send_header('Method', method)
+ self.end_headers()
+ if payload:
+ self.wfile.write(payload)
+
+ def _status(self, status):
+ payload = f'<html>{status} NOT FOUND</html>'.encode()
+ self.send_response(int(status))
+ self.send_header('Content-Type', 'text/html; charset=utf-8')
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload)
+
+ def _read_data(self):
+ if 'Content-Length' in self.headers:
+ return self.rfile.read(int(self.headers['Content-Length']))
+
+ def do_POST(self):
+ data = self._read_data() + str(self.headers).encode()
+ if self.path.startswith('/redirect_'):
+ self._redirect()
+ elif self.path.startswith('/method'):
+ self._method('POST', data)
+ elif self.path.startswith('/headers'):
+ self._headers()
+ else:
+ self._status(404)
+
+ def do_HEAD(self):
+ if self.path.startswith('/redirect_'):
+ self._redirect()
+ elif self.path.startswith('/method'):
+ self._method('HEAD')
+ else:
+ self._status(404)
+
+ def do_PUT(self):
+ data = self._read_data() + str(self.headers).encode()
+ if self.path.startswith('/redirect_'):
+ self._redirect()
+ elif self.path.startswith('/method'):
+ self._method('PUT', data)
+ else:
+ self._status(404)
+
+ def do_GET(self):
+ if self.path == '/video.html':
+ payload = b'<html><video src="/vid.mp4" /></html>'
+ self.send_response(200)
+ self.send_header('Content-Type', 'text/html; charset=utf-8')
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload)
+ elif self.path == '/vid.mp4':
+ payload = b'\x00\x00\x00\x00\x20\x66\x74[video]'
+ self.send_response(200)
+ self.send_header('Content-Type', 'video/mp4')
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload)
+ elif self.path == '/%E4%B8%AD%E6%96%87.html':
+ payload = b'<html><video src="/vid.mp4" /></html>'
+ self.send_response(200)
+ self.send_header('Content-Type', 'text/html; charset=utf-8')
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload)
+ elif self.path == '/%c7%9f':
+ payload = b'<html><video src="/vid.mp4" /></html>'
+ self.send_response(200)
+ self.send_header('Content-Type', 'text/html; charset=utf-8')
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload)
+ elif self.path.startswith('/redirect_loop'):
+ self.send_response(301)
+ self.send_header('Location', self.path)
+ self.send_header('Content-Length', '0')
+ self.end_headers()
+ elif self.path == '/redirect_dotsegments':
+ self.send_response(301)
+ # redirect to /headers but with dot segments before
+ self.send_header('Location', '/a/b/./../../headers')
+ self.send_header('Content-Length', '0')
+ self.end_headers()
+ elif self.path == '/redirect_dotsegments_absolute':
+ self.send_response(301)
+ # redirect to /headers but with dot segments before - absolute url
+ self.send_header('Location', f'http://127.0.0.1:{http_server_port(self.server)}/a/b/./../../headers')
+ self.send_header('Content-Length', '0')
+ self.end_headers()
+ elif self.path.startswith('/redirect_'):
+ self._redirect()
+ elif self.path.startswith('/method'):
+ self._method('GET', str(self.headers).encode())
+ elif self.path.startswith('/headers'):
+ self._headers()
+ elif self.path.startswith('/308-to-headers'):
+ self.send_response(308)
+ self.send_header('Location', '/headers')
+ self.send_header('Content-Length', '0')
+ self.end_headers()
+ elif self.path == '/trailing_garbage':
+ payload = b'<html><video src="/vid.mp4" /></html>'
+ self.send_response(200)
+ self.send_header('Content-Type', 'text/html; charset=utf-8')
+ self.send_header('Content-Encoding', 'gzip')
+ buf = io.BytesIO()
+ with gzip.GzipFile(fileobj=buf, mode='wb') as f:
+ f.write(payload)
+ compressed = buf.getvalue() + b'trailing garbage'
+ self.send_header('Content-Length', str(len(compressed)))
+ self.end_headers()
+ self.wfile.write(compressed)
+ elif self.path == '/302-non-ascii-redirect':
+ new_url = f'http://127.0.0.1:{http_server_port(self.server)}/中文.html'
+ self.send_response(301)
+ self.send_header('Location', new_url)
+ self.send_header('Content-Length', '0')
+ self.end_headers()
+ elif self.path == '/content-encoding':
+ encodings = self.headers.get('ytdl-encoding', '')
+ payload = b'<html><video src="/vid.mp4" /></html>'
+ for encoding in filter(None, (e.strip() for e in encodings.split(','))):
+ if encoding == 'br' and brotli:
+ payload = brotli.compress(payload)
+ elif encoding == 'gzip':
+ buf = io.BytesIO()
+ with gzip.GzipFile(fileobj=buf, mode='wb') as f:
+ f.write(payload)
+ payload = buf.getvalue()
+ elif encoding == 'deflate':
+ payload = zlib.compress(payload)
+ elif encoding == 'unsupported':
+ payload = b'raw'
+ break
+ else:
+ self._status(415)
+ return
+ self.send_response(200)
+ self.send_header('Content-Encoding', encodings)
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload)
+ elif self.path.startswith('/gen_'):
+ payload = b'<html></html>'
+ self.send_response(int(self.path[len('/gen_'):]))
+ self.send_header('Content-Type', 'text/html; charset=utf-8')
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload)
+ elif self.path.startswith('/incompleteread'):
+ payload = b'<html></html>'
+ self.send_response(200)
+ self.send_header('Content-Type', 'text/html; charset=utf-8')
+ self.send_header('Content-Length', '234234')
+ self.end_headers()
+ self.wfile.write(payload)
+ self.finish()
+ elif self.path.startswith('/timeout_'):
+ time.sleep(int(self.path[len('/timeout_'):]))
+ self._headers()
+ elif self.path == '/source_address':
+ payload = str(self.client_address[0]).encode()
+ self.send_response(200)
+ self.send_header('Content-Type', 'text/html; charset=utf-8')
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload)
+ self.finish()
+ else:
+ self._status(404)
+
+ def send_header(self, keyword, value):
+ """
+ Forcibly allow HTTP server to send non percent-encoded non-ASCII characters in headers.
+ This is against what is defined in RFC 3986, however we need to test we support this
+ since some sites incorrectly do this.
+ """
+ if keyword.lower() == 'connection':
+ return super().send_header(keyword, value)
+
+ if not hasattr(self, '_headers_buffer'):
+ self._headers_buffer = []
+
+ self._headers_buffer.append(f'{keyword}: {value}\r\n'.encode())
+
+
+class TestRequestHandlerBase:
+ @classmethod
+ def setup_class(cls):
+ cls.http_httpd = http.server.ThreadingHTTPServer(
+ ('127.0.0.1', 0), HTTPTestRequestHandler)
+ cls.http_port = http_server_port(cls.http_httpd)
+ cls.http_server_thread = threading.Thread(target=cls.http_httpd.serve_forever)
+ # FIXME: we should probably stop the http server thread after each test
+ # See: https://github.com/yt-dlp/yt-dlp/pull/7094#discussion_r1199746041
+ cls.http_server_thread.daemon = True
+ cls.http_server_thread.start()
+
+ # HTTPS server
+ certfn = os.path.join(TEST_DIR, 'testcert.pem')
+ cls.https_httpd = http.server.ThreadingHTTPServer(
+ ('127.0.0.1', 0), HTTPTestRequestHandler)
+ sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+ sslctx.load_cert_chain(certfn, None)
+ cls.https_httpd.socket = sslctx.wrap_socket(cls.https_httpd.socket, server_side=True)
+ cls.https_port = http_server_port(cls.https_httpd)
+ cls.https_server_thread = threading.Thread(target=cls.https_httpd.serve_forever)
+ cls.https_server_thread.daemon = True
+ cls.https_server_thread.start()
+
+
+class TestHTTPRequestHandler(TestRequestHandlerBase):
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_verify_cert(self, handler):
+ with handler() as rh:
+ with pytest.raises(CertificateVerifyError):
+ validate_and_send(rh, Request(f'https://127.0.0.1:{self.https_port}/headers'))
+
+ with handler(verify=False) as rh:
+ r = validate_and_send(rh, Request(f'https://127.0.0.1:{self.https_port}/headers'))
+ assert r.status == 200
+ r.close()
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_ssl_error(self, handler):
+ # HTTPS server with too old TLS version
+ # XXX: is there a better way to test this than to create a new server?
+ https_httpd = http.server.ThreadingHTTPServer(
+ ('127.0.0.1', 0), HTTPTestRequestHandler)
+ sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+ https_httpd.socket = sslctx.wrap_socket(https_httpd.socket, server_side=True)
+ https_port = http_server_port(https_httpd)
+ https_server_thread = threading.Thread(target=https_httpd.serve_forever)
+ https_server_thread.daemon = True
+ https_server_thread.start()
+
+ with handler(verify=False) as rh:
+ with pytest.raises(SSLError, match=r'ssl(?:v3|/tls) alert handshake failure') as exc_info:
+ validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
+ assert not issubclass(exc_info.type, CertificateVerifyError)
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_percent_encode(self, handler):
+ with handler() as rh:
+ # Unicode characters should be encoded with uppercase percent-encoding
+ res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/中文.html'))
+ assert res.status == 200
+ res.close()
+ # don't normalize existing percent encodings
+ res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/%c7%9f'))
+ assert res.status == 200
+ res.close()
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.parametrize('path', [
+ '/a/b/./../../headers',
+ '/redirect_dotsegments',
+ # https://github.com/yt-dlp/yt-dlp/issues/9020
+ '/redirect_dotsegments_absolute',
+ ])
+ def test_remove_dot_segments(self, handler, path):
+ with handler(verbose=True) as rh:
+ # This isn't a comprehensive test,
+ # but it should be enough to check whether the handler is removing dot segments in required scenarios
+ res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}{path}'))
+ assert res.status == 200
+ assert res.url == f'http://127.0.0.1:{self.http_port}/headers'
+ res.close()
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_unicode_path_redirection(self, handler):
+ with handler() as rh:
+ r = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/302-non-ascii-redirect'))
+ assert r.url == f'http://127.0.0.1:{self.http_port}/%E4%B8%AD%E6%96%87.html'
+ r.close()
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_raise_http_error(self, handler):
+ with handler() as rh:
+ for bad_status in (400, 500, 599, 302):
+ with pytest.raises(HTTPError):
+ validate_and_send(rh, Request('http://127.0.0.1:%d/gen_%d' % (self.http_port, bad_status)))
+
+ # Should not raise an error
+ validate_and_send(rh, Request('http://127.0.0.1:%d/gen_200' % self.http_port)).close()
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_response_url(self, handler):
+ with handler() as rh:
+ # Response url should be that of the last url in redirect chain
+ res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_301'))
+ assert res.url == f'http://127.0.0.1:{self.http_port}/method'
+ res.close()
+ res2 = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
+ assert res2.url == f'http://127.0.0.1:{self.http_port}/gen_200'
+ res2.close()
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_redirect(self, handler):
+ with handler() as rh:
+ def do_req(redirect_status, method, assert_no_content=False):
+ data = b'testdata' if method in ('POST', 'PUT') else None
+ res = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_{redirect_status}', method=method, data=data))
+
+ headers = b''
+ data_sent = b''
+ if data is not None:
+ data_sent += res.read(len(data))
+ if data_sent != data:
+ headers += data_sent
+ data_sent = b''
+
+ headers += res.read()
+
+ if assert_no_content or data is None:
+ assert b'Content-Type' not in headers
+ assert b'Content-Length' not in headers
+ else:
+ assert b'Content-Type' in headers
+ assert b'Content-Length' in headers
+
+ return data_sent.decode(), res.headers.get('method', '')
+
+ # A 303 must either use GET or HEAD for subsequent request
+ assert do_req(303, 'POST', True) == ('', 'GET')
+ assert do_req(303, 'HEAD') == ('', 'HEAD')
+
+ assert do_req(303, 'PUT', True) == ('', 'GET')
+
+ # 301 and 302 turn POST only into a GET
+ assert do_req(301, 'POST', True) == ('', 'GET')
+ assert do_req(301, 'HEAD') == ('', 'HEAD')
+ assert do_req(302, 'POST', True) == ('', 'GET')
+ assert do_req(302, 'HEAD') == ('', 'HEAD')
+
+ assert do_req(301, 'PUT') == ('testdata', 'PUT')
+ assert do_req(302, 'PUT') == ('testdata', 'PUT')
+
+ # 307 and 308 should not change method
+ for m in ('POST', 'PUT'):
+ assert do_req(307, m) == ('testdata', m)
+ assert do_req(308, m) == ('testdata', m)
+
+ assert do_req(307, 'HEAD') == ('', 'HEAD')
+ assert do_req(308, 'HEAD') == ('', 'HEAD')
+
+ # These should not redirect and instead raise an HTTPError
+ for code in (300, 304, 305, 306):
+ with pytest.raises(HTTPError):
+ do_req(code, 'GET')
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_request_cookie_header(self, handler):
+ # We should accept a Cookie header being passed as in normal headers and handle it appropriately.
+ with handler() as rh:
+ # Specified Cookie header should be used
+ res = validate_and_send(
+ rh, Request(
+ f'http://127.0.0.1:{self.http_port}/headers',
+ headers={'Cookie': 'test=test'})).read().decode()
+ assert 'Cookie: test=test' in res
+
+ # Specified Cookie header should be removed on any redirect
+ res = validate_and_send(
+ rh, Request(
+ f'http://127.0.0.1:{self.http_port}/308-to-headers',
+ headers={'Cookie': 'test=test'})).read().decode()
+ assert 'Cookie: test=test' not in res
+
+ # Specified Cookie header should override global cookiejar for that request
+ cookiejar = YoutubeDLCookieJar()
+ cookiejar.set_cookie(http.cookiejar.Cookie(
+ version=0, name='test', value='ytdlp', port=None, port_specified=False,
+ domain='127.0.0.1', domain_specified=True, domain_initial_dot=False, path='/',
+ path_specified=True, secure=False, expires=None, discard=False, comment=None,
+ comment_url=None, rest={}))
+
+ with handler(cookiejar=cookiejar) as rh:
+ data = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'cookie': 'test=test'})).read()
+ assert b'Cookie: test=ytdlp' not in data
+ assert b'Cookie: test=test' in data
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_redirect_loop(self, handler):
+ with handler() as rh:
+ with pytest.raises(HTTPError, match='redirect loop'):
+ validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_loop'))
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_incompleteread(self, handler):
+ with handler(timeout=2) as rh:
+ with pytest.raises(IncompleteRead):
+ validate_and_send(rh, Request('http://127.0.0.1:%d/incompleteread' % self.http_port)).read()
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_cookies(self, handler):
+ cookiejar = YoutubeDLCookieJar()
+ cookiejar.set_cookie(http.cookiejar.Cookie(
+ 0, 'test', 'ytdlp', None, False, '127.0.0.1', True,
+ False, '/headers', True, False, None, False, None, None, {}))
+
+ with handler(cookiejar=cookiejar) as rh:
+ data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
+ assert b'Cookie: test=ytdlp' in data
+
+ # Per request
+ with handler() as rh:
+ data = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={'cookiejar': cookiejar})).read()
+ assert b'Cookie: test=ytdlp' in data
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_headers(self, handler):
+
+ with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
+ # Global Headers
+ data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
+ assert b'Test1: test' in data
+
+ # Per request headers, merged with global
+ data = validate_and_send(rh, Request(
+ f'http://127.0.0.1:{self.http_port}/headers', headers={'test2': 'changed', 'test3': 'test3'})).read()
+ assert b'Test1: test' in data
+ assert b'Test2: changed' in data
+ assert b'Test2: test2' not in data
+ assert b'Test3: test3' in data
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_timeout(self, handler):
+ with handler() as rh:
+ # Default timeout is 20 seconds, so this should go through
+ validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_3'))
+
+ with handler(timeout=0.5) as rh:
+ with pytest.raises(TransportError):
+ validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1'))
+
+ # Per request timeout, should override handler timeout
+ validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1', extensions={'timeout': 4}))
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_source_address(self, handler):
+ source_address = f'127.0.0.{random.randint(5, 255)}'
+ # on some systems these loopback addresses we need for testing may not be available
+ # see: https://github.com/yt-dlp/yt-dlp/issues/8890
+ verify_address_availability(source_address)
+ with handler(source_address=source_address) as rh:
+ data = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/source_address')).read().decode()
+ assert source_address == data
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_gzip_trailing_garbage(self, handler):
+ with handler() as rh:
+ data = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage')).read().decode()
+ assert data == '<html><video src="/vid.mp4" /></html>'
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ @pytest.mark.skipif(not brotli, reason='brotli support is not installed')
+ def test_brotli(self, handler):
+ with handler() as rh:
+ res = validate_and_send(
+ rh, Request(
+ f'http://127.0.0.1:{self.http_port}/content-encoding',
+ headers={'ytdl-encoding': 'br'}))
+ assert res.headers.get('Content-Encoding') == 'br'
+ assert res.read() == b'<html><video src="/vid.mp4" /></html>'
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_deflate(self, handler):
+ with handler() as rh:
+ res = validate_and_send(
+ rh, Request(
+ f'http://127.0.0.1:{self.http_port}/content-encoding',
+ headers={'ytdl-encoding': 'deflate'}))
+ assert res.headers.get('Content-Encoding') == 'deflate'
+ assert res.read() == b'<html><video src="/vid.mp4" /></html>'
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_gzip(self, handler):
+ with handler() as rh:
+ res = validate_and_send(
+ rh, Request(
+ f'http://127.0.0.1:{self.http_port}/content-encoding',
+ headers={'ytdl-encoding': 'gzip'}))
+ assert res.headers.get('Content-Encoding') == 'gzip'
+ assert res.read() == b'<html><video src="/vid.mp4" /></html>'
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_multiple_encodings(self, handler):
+ with handler() as rh:
+ for pair in ('gzip,deflate', 'deflate, gzip', 'gzip, gzip', 'deflate, deflate'):
+ res = validate_and_send(
+ rh, Request(
+ f'http://127.0.0.1:{self.http_port}/content-encoding',
+ headers={'ytdl-encoding': pair}))
+ assert res.headers.get('Content-Encoding') == pair
+ assert res.read() == b'<html><video src="/vid.mp4" /></html>'
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_unsupported_encoding(self, handler):
+ with handler() as rh:
+ res = validate_and_send(
+ rh, Request(
+ f'http://127.0.0.1:{self.http_port}/content-encoding',
+ headers={'ytdl-encoding': 'unsupported'}))
+ assert res.headers.get('Content-Encoding') == 'unsupported'
+ assert res.read() == b'raw'
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_read(self, handler):
+ with handler() as rh:
+ res = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/headers'))
+ assert res.readable()
+ assert res.read(1) == b'H'
+ assert res.read(3) == b'ost'
+
+
+class TestHTTPProxy(TestRequestHandlerBase):
+ @classmethod
+ def setup_class(cls):
+ super().setup_class()
+ # HTTP Proxy server
+ cls.proxy = http.server.ThreadingHTTPServer(
+ ('127.0.0.1', 0), _build_proxy_handler('normal'))
+ cls.proxy_port = http_server_port(cls.proxy)
+ cls.proxy_thread = threading.Thread(target=cls.proxy.serve_forever)
+ cls.proxy_thread.daemon = True
+ cls.proxy_thread.start()
+
+ # Geo proxy server
+ cls.geo_proxy = http.server.ThreadingHTTPServer(
+ ('127.0.0.1', 0), _build_proxy_handler('geo'))
+ cls.geo_port = http_server_port(cls.geo_proxy)
+ cls.geo_proxy_thread = threading.Thread(target=cls.geo_proxy.serve_forever)
+ cls.geo_proxy_thread.daemon = True
+ cls.geo_proxy_thread.start()
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_http_proxy(self, handler):
+ http_proxy = f'http://127.0.0.1:{self.proxy_port}'
+ geo_proxy = f'http://127.0.0.1:{self.geo_port}'
+
+ # Test global http proxy
+ # Test per request http proxy
+ # Test per request http proxy disables proxy
+ url = 'http://foo.com/bar'
+
+ # Global HTTP proxy
+ with handler(proxies={'http': http_proxy}) as rh:
+ res = validate_and_send(rh, Request(url)).read().decode()
+ assert res == f'normal: {url}'
+
+ # Per request proxy overrides global
+ res = validate_and_send(rh, Request(url, proxies={'http': geo_proxy})).read().decode()
+ assert res == f'geo: {url}'
+
+ # and setting to None disables all proxies for that request
+ real_url = f'http://127.0.0.1:{self.http_port}/headers'
+ res = validate_and_send(
+ rh, Request(real_url, proxies={'http': None})).read().decode()
+ assert res != f'normal: {real_url}'
+ assert 'Accept' in res
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_noproxy(self, handler):
+ with handler(proxies={'proxy': f'http://127.0.0.1:{self.proxy_port}'}) as rh:
+ # NO_PROXY
+ for no_proxy in (f'127.0.0.1:{self.http_port}', '127.0.0.1', 'localhost'):
+ nop_response = validate_and_send(
+ rh, Request(f'http://127.0.0.1:{self.http_port}/headers', proxies={'no': no_proxy})).read().decode(
+ 'utf-8')
+ assert 'Accept' in nop_response
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_allproxy(self, handler):
+ url = 'http://foo.com/bar'
+ with handler() as rh:
+ response = validate_and_send(rh, Request(url, proxies={'all': f'http://127.0.0.1:{self.proxy_port}'})).read().decode(
+ 'utf-8')
+ assert response == f'normal: {url}'
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_http_proxy_with_idn(self, handler):
+ with handler(proxies={
+ 'http': f'http://127.0.0.1:{self.proxy_port}',
+ }) as rh:
+ url = 'http://中文.tw/'
+ response = rh.send(Request(url)).read().decode()
+ # b'xn--fiq228c' is '中文'.encode('idna')
+ assert response == 'normal: http://xn--fiq228c.tw/'
+
+
+class TestClientCertificate:
+
+ @classmethod
+ def setup_class(cls):
+ certfn = os.path.join(TEST_DIR, 'testcert.pem')
+ cls.certdir = os.path.join(TEST_DIR, 'testdata', 'certificate')
+ cacertfn = os.path.join(cls.certdir, 'ca.crt')
+ cls.httpd = http.server.ThreadingHTTPServer(('127.0.0.1', 0), HTTPTestRequestHandler)
+ sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+ sslctx.verify_mode = ssl.CERT_REQUIRED
+ sslctx.load_verify_locations(cafile=cacertfn)
+ sslctx.load_cert_chain(certfn, None)
+ cls.httpd.socket = sslctx.wrap_socket(cls.httpd.socket, server_side=True)
+ cls.port = http_server_port(cls.httpd)
+ cls.server_thread = threading.Thread(target=cls.httpd.serve_forever)
+ cls.server_thread.daemon = True
+ cls.server_thread.start()
+
+ def _run_test(self, handler, **handler_kwargs):
+ with handler(
+ # Disable client-side validation of unacceptable self-signed testcert.pem
+ # The test is of a check on the server side, so unaffected
+ verify=False,
+ **handler_kwargs,
+ ) as rh:
+ validate_and_send(rh, Request(f'https://127.0.0.1:{self.port}/video.html')).read().decode()
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_certificate_combined_nopass(self, handler):
+ self._run_test(handler, client_cert={
+ 'client_certificate': os.path.join(self.certdir, 'clientwithkey.crt'),
+ })
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_certificate_nocombined_nopass(self, handler):
+ self._run_test(handler, client_cert={
+ 'client_certificate': os.path.join(self.certdir, 'client.crt'),
+ 'client_certificate_key': os.path.join(self.certdir, 'client.key'),
+ })
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_certificate_combined_pass(self, handler):
+ self._run_test(handler, client_cert={
+ 'client_certificate': os.path.join(self.certdir, 'clientwithencryptedkey.crt'),
+ 'client_certificate_password': 'foobar',
+ })
+
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_certificate_nocombined_pass(self, handler):
+ self._run_test(handler, client_cert={
+ 'client_certificate': os.path.join(self.certdir, 'client.crt'),
+ 'client_certificate_key': os.path.join(self.certdir, 'clientencrypted.key'),
+ 'client_certificate_password': 'foobar',
+ })
+
+
+class TestRequestHandlerMisc:
+ """Misc generic tests for request handlers, not related to request or validation testing"""
+ @pytest.mark.parametrize('handler,logger_name', [
+ ('Requests', 'urllib3'),
+ ('Websockets', 'websockets.client'),
+ ('Websockets', 'websockets.server')
+ ], indirect=['handler'])
+ def test_remove_logging_handler(self, handler, logger_name):
+ # Ensure any logging handlers, which may contain a YoutubeDL instance,
+ # are removed when we close the request handler
+ # See: https://github.com/yt-dlp/yt-dlp/issues/8922
+ logging_handlers = logging.getLogger(logger_name).handlers
+ before_count = len(logging_handlers)
+ rh = handler()
+ assert len(logging_handlers) == before_count + 1
+ rh.close()
+ assert len(logging_handlers) == before_count
+
+
+class TestUrllibRequestHandler(TestRequestHandlerBase):
+ @pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
+ def test_file_urls(self, handler):
+ # See https://github.com/ytdl-org/youtube-dl/issues/8227
+ tf = tempfile.NamedTemporaryFile(delete=False)
+ tf.write(b'foobar')
+ tf.close()
+ req = Request(pathlib.Path(tf.name).as_uri())
+ with handler() as rh:
+ with pytest.raises(UnsupportedRequest):
+ rh.validate(req)
+
+ # Test that urllib never loaded FileHandler
+ with pytest.raises(TransportError):
+ rh.send(req)
+
+ with handler(enable_file_urls=True) as rh:
+ res = validate_and_send(rh, req)
+ assert res.read() == b'foobar'
+ res.close()
+
+ os.unlink(tf.name)
+
+ @pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
+ def test_http_error_returns_content(self, handler):
+ # urllib HTTPError will try close the underlying response if reference to the HTTPError object is lost
+ def get_response():
+ with handler() as rh:
+ # headers url
+ try:
+ validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_404'))
+ except HTTPError as e:
+ return e.response
+
+ assert get_response().read() == b'<html></html>'
+
+ @pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
+ def test_verify_cert_error_text(self, handler):
+ # Check the output of the error message
+ with handler() as rh:
+ with pytest.raises(
+ CertificateVerifyError,
+ match=r'\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed: self.signed certificate'
+ ):
+ validate_and_send(rh, Request(f'https://127.0.0.1:{self.https_port}/headers'))
+
+ @pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
+ @pytest.mark.parametrize('req,match,version_check', [
+ # https://github.com/python/cpython/blob/987b712b4aeeece336eed24fcc87a950a756c3e2/Lib/http/client.py#L1256
+ # bpo-39603: Check implemented in 3.7.9+, 3.8.5+
+ (
+ Request('http://127.0.0.1', method='GET\n'),
+ 'method can\'t contain control characters',
+ lambda v: v < (3, 7, 9) or (3, 8, 0) <= v < (3, 8, 5)
+ ),
+ # https://github.com/python/cpython/blob/987b712b4aeeece336eed24fcc87a950a756c3e2/Lib/http/client.py#L1265
+ # bpo-38576: Check implemented in 3.7.8+, 3.8.3+
+ (
+ Request('http://127.0.0. 1', method='GET'),
+ 'URL can\'t contain control characters',
+ lambda v: v < (3, 7, 8) or (3, 8, 0) <= v < (3, 8, 3)
+ ),
+ # https://github.com/python/cpython/blob/987b712b4aeeece336eed24fcc87a950a756c3e2/Lib/http/client.py#L1288C31-L1288C50
+ (Request('http://127.0.0.1', headers={'foo\n': 'bar'}), 'Invalid header name', None),
+ ])
+ def test_httplib_validation_errors(self, handler, req, match, version_check):
+ if version_check and version_check(sys.version_info):
+ pytest.skip(f'Python {sys.version} version does not have the required validation for this test.')
+
+ with handler() as rh:
+ with pytest.raises(RequestError, match=match) as exc_info:
+ validate_and_send(rh, req)
+ assert not isinstance(exc_info.value, TransportError)
+
+
+@pytest.mark.parametrize('handler', ['Requests'], indirect=True)
+class TestRequestsRequestHandler(TestRequestHandlerBase):
+ @pytest.mark.parametrize('raised,expected', [
+ (lambda: requests.exceptions.ConnectTimeout(), TransportError),
+ (lambda: requests.exceptions.ReadTimeout(), TransportError),
+ (lambda: requests.exceptions.Timeout(), TransportError),
+ (lambda: requests.exceptions.ConnectionError(), TransportError),
+ (lambda: requests.exceptions.ProxyError(), ProxyError),
+ (lambda: requests.exceptions.SSLError('12[CERTIFICATE_VERIFY_FAILED]34'), CertificateVerifyError),
+ (lambda: requests.exceptions.SSLError(), SSLError),
+ (lambda: requests.exceptions.InvalidURL(), RequestError),
+ (lambda: requests.exceptions.InvalidHeader(), RequestError),
+ # catch-all: https://github.com/psf/requests/blob/main/src/requests/adapters.py#L535
+ (lambda: urllib3.exceptions.HTTPError(), TransportError),
+ (lambda: requests.exceptions.RequestException(), RequestError)
+ # (lambda: requests.exceptions.TooManyRedirects(), HTTPError) - Needs a response object
+ ])
+ def test_request_error_mapping(self, handler, monkeypatch, raised, expected):
+ with handler() as rh:
+ def mock_get_instance(*args, **kwargs):
+ class MockSession:
+ def request(self, *args, **kwargs):
+ raise raised()
+ return MockSession()
+
+ monkeypatch.setattr(rh, '_get_instance', mock_get_instance)
+
+ with pytest.raises(expected) as exc_info:
+ rh.send(Request('http://fake'))
+
+ assert exc_info.type is expected
+
+ @pytest.mark.parametrize('raised,expected,match', [
+ (lambda: urllib3.exceptions.SSLError(), SSLError, None),
+ (lambda: urllib3.exceptions.TimeoutError(), TransportError, None),
+ (lambda: urllib3.exceptions.ReadTimeoutError(None, None, None), TransportError, None),
+ (lambda: urllib3.exceptions.ProtocolError(), TransportError, None),
+ (lambda: urllib3.exceptions.DecodeError(), TransportError, None),
+ (lambda: urllib3.exceptions.HTTPError(), TransportError, None), # catch-all
+ (
+ lambda: urllib3.exceptions.ProtocolError('error', http.client.IncompleteRead(partial=b'abc', expected=4)),
+ IncompleteRead,
+ '3 bytes read, 4 more expected'
+ ),
+ (
+ lambda: urllib3.exceptions.ProtocolError('error', urllib3.exceptions.IncompleteRead(partial=3, expected=5)),
+ IncompleteRead,
+ '3 bytes read, 5 more expected'
+ ),
+ ])
+ def test_response_error_mapping(self, handler, monkeypatch, raised, expected, match):
+ from requests.models import Response as RequestsResponse
+ from urllib3.response import HTTPResponse as Urllib3Response
+
+ from yt_dlp.networking._requests import RequestsResponseAdapter
+ requests_res = RequestsResponse()
+ requests_res.raw = Urllib3Response(body=b'', status=200)
+ res = RequestsResponseAdapter(requests_res)
+
+ def mock_read(*args, **kwargs):
+ raise raised()
+ monkeypatch.setattr(res.fp, 'read', mock_read)
+
+ with pytest.raises(expected, match=match) as exc_info:
+ res.read()
+
+ assert exc_info.type is expected
+
+ def test_close(self, handler, monkeypatch):
+ rh = handler()
+ session = rh._get_instance(cookiejar=rh.cookiejar)
+ called = False
+ original_close = session.close
+
+ def mock_close(*args, **kwargs):
+ nonlocal called
+ called = True
+ return original_close(*args, **kwargs)
+
+ monkeypatch.setattr(session, 'close', mock_close)
+ rh.close()
+ assert called
+
+
+def run_validation(handler, error, req, **handler_kwargs):
+ with handler(**handler_kwargs) as rh:
+ if error:
+ with pytest.raises(error):
+ rh.validate(req)
+ else:
+ rh.validate(req)
+
+
+class TestRequestHandlerValidation:
+
+ class ValidationRH(RequestHandler):
+ def _send(self, request):
+ raise RequestError('test')
+
+ class NoCheckRH(ValidationRH):
+ _SUPPORTED_FEATURES = None
+ _SUPPORTED_PROXY_SCHEMES = None
+ _SUPPORTED_URL_SCHEMES = None
+
+ def _check_extensions(self, extensions):
+ extensions.clear()
+
+ class HTTPSupportedRH(ValidationRH):
+ _SUPPORTED_URL_SCHEMES = ('http',)
+
+ URL_SCHEME_TESTS = [
+ # scheme, expected to fail, handler kwargs
+ ('Urllib', [
+ ('http', False, {}),
+ ('https', False, {}),
+ ('data', False, {}),
+ ('ftp', False, {}),
+ ('file', UnsupportedRequest, {}),
+ ('file', False, {'enable_file_urls': True}),
+ ]),
+ ('Requests', [
+ ('http', False, {}),
+ ('https', False, {}),
+ ]),
+ ('Websockets', [
+ ('ws', False, {}),
+ ('wss', False, {}),
+ ]),
+ (NoCheckRH, [('http', False, {})]),
+ (ValidationRH, [('http', UnsupportedRequest, {})])
+ ]
+
+ PROXY_SCHEME_TESTS = [
+ # scheme, expected to fail
+ ('Urllib', 'http', [
+ ('http', False),
+ ('https', UnsupportedRequest),
+ ('socks4', False),
+ ('socks4a', False),
+ ('socks5', False),
+ ('socks5h', False),
+ ('socks', UnsupportedRequest),
+ ]),
+ ('Requests', 'http', [
+ ('http', False),
+ ('https', False),
+ ('socks4', False),
+ ('socks4a', False),
+ ('socks5', False),
+ ('socks5h', False),
+ ]),
+ (NoCheckRH, 'http', [('http', False)]),
+ (HTTPSupportedRH, 'http', [('http', UnsupportedRequest)]),
+ ('Websockets', 'ws', [('http', UnsupportedRequest)]),
+ (NoCheckRH, 'http', [('http', False)]),
+ (HTTPSupportedRH, 'http', [('http', UnsupportedRequest)]),
+ ]
+
+ PROXY_KEY_TESTS = [
+ # key, expected to fail
+ ('Urllib', [
+ ('all', False),
+ ('unrelated', False),
+ ]),
+ ('Requests', [
+ ('all', False),
+ ('unrelated', False),
+ ]),
+ (NoCheckRH, [('all', False)]),
+ (HTTPSupportedRH, [('all', UnsupportedRequest)]),
+ (HTTPSupportedRH, [('no', UnsupportedRequest)]),
+ ]
+
+ EXTENSION_TESTS = [
+ ('Urllib', 'http', [
+ ({'cookiejar': 'notacookiejar'}, AssertionError),
+ ({'cookiejar': YoutubeDLCookieJar()}, False),
+ ({'cookiejar': CookieJar()}, AssertionError),
+ ({'timeout': 1}, False),
+ ({'timeout': 'notatimeout'}, AssertionError),
+ ({'unsupported': 'value'}, UnsupportedRequest),
+ ]),
+ ('Requests', 'http', [
+ ({'cookiejar': 'notacookiejar'}, AssertionError),
+ ({'cookiejar': YoutubeDLCookieJar()}, False),
+ ({'timeout': 1}, False),
+ ({'timeout': 'notatimeout'}, AssertionError),
+ ({'unsupported': 'value'}, UnsupportedRequest),
+ ]),
+ (NoCheckRH, 'http', [
+ ({'cookiejar': 'notacookiejar'}, False),
+ ({'somerandom': 'test'}, False), # but any extension is allowed through
+ ]),
+ ('Websockets', 'ws', [
+ ({'cookiejar': YoutubeDLCookieJar()}, False),
+ ({'timeout': 2}, False),
+ ]),
+ ]
+
+ @pytest.mark.parametrize('handler,scheme,fail,handler_kwargs', [
+ (handler_tests[0], scheme, fail, handler_kwargs)
+ for handler_tests in URL_SCHEME_TESTS
+ for scheme, fail, handler_kwargs in handler_tests[1]
+
+ ], indirect=['handler'])
+ def test_url_scheme(self, handler, scheme, fail, handler_kwargs):
+ run_validation(handler, fail, Request(f'{scheme}://'), **(handler_kwargs or {}))
+
+ @pytest.mark.parametrize('handler,fail', [('Urllib', False), ('Requests', False)], indirect=['handler'])
+ def test_no_proxy(self, handler, fail):
+ run_validation(handler, fail, Request('http://', proxies={'no': '127.0.0.1,github.com'}))
+ run_validation(handler, fail, Request('http://'), proxies={'no': '127.0.0.1,github.com'})
+
+ @pytest.mark.parametrize('handler,proxy_key,fail', [
+ (handler_tests[0], proxy_key, fail)
+ for handler_tests in PROXY_KEY_TESTS
+ for proxy_key, fail in handler_tests[1]
+ ], indirect=['handler'])
+ def test_proxy_key(self, handler, proxy_key, fail):
+ run_validation(handler, fail, Request('http://', proxies={proxy_key: 'http://example.com'}))
+ run_validation(handler, fail, Request('http://'), proxies={proxy_key: 'http://example.com'})
+
+ @pytest.mark.parametrize('handler,req_scheme,scheme,fail', [
+ (handler_tests[0], handler_tests[1], scheme, fail)
+ for handler_tests in PROXY_SCHEME_TESTS
+ for scheme, fail in handler_tests[2]
+ ], indirect=['handler'])
+ def test_proxy_scheme(self, handler, req_scheme, scheme, fail):
+ run_validation(handler, fail, Request(f'{req_scheme}://', proxies={req_scheme: f'{scheme}://example.com'}))
+ run_validation(handler, fail, Request(f'{req_scheme}://'), proxies={req_scheme: f'{scheme}://example.com'})
+
+ @pytest.mark.parametrize('handler', ['Urllib', HTTPSupportedRH, 'Requests'], indirect=True)
+ def test_empty_proxy(self, handler):
+ run_validation(handler, False, Request('http://', proxies={'http': None}))
+ run_validation(handler, False, Request('http://'), proxies={'http': None})
+
+ @pytest.mark.parametrize('proxy_url', ['//example.com', 'example.com', '127.0.0.1', '/a/b/c'])
+ @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+ def test_invalid_proxy_url(self, handler, proxy_url):
+ run_validation(handler, UnsupportedRequest, Request('http://', proxies={'http': proxy_url}))
+
+ @pytest.mark.parametrize('handler,scheme,extensions,fail', [
+ (handler_tests[0], handler_tests[1], extensions, fail)
+ for handler_tests in EXTENSION_TESTS
+ for extensions, fail in handler_tests[2]
+ ], indirect=['handler'])
+ def test_extension(self, handler, scheme, extensions, fail):
+ run_validation(
+ handler, fail, Request(f'{scheme}://', extensions=extensions))
+
+ def test_invalid_request_type(self):
+ rh = self.ValidationRH(logger=FakeLogger())
+ for method in (rh.validate, rh.send):
+ with pytest.raises(TypeError, match='Expected an instance of Request'):
+ method('not a request')
+
+
+class FakeResponse(Response):
+ def __init__(self, request):
+ # XXX: we could make request part of standard response interface
+ self.request = request
+ super().__init__(fp=io.BytesIO(b''), headers={}, url=request.url)
+
+
+class FakeRH(RequestHandler):
+
+ def _validate(self, request):
+ return
+
+ def _send(self, request: Request):
+ if request.url.startswith('ssl://'):
+ raise SSLError(request.url[len('ssl://'):])
+ return FakeResponse(request)
+
+
+class FakeRHYDL(FakeYDL):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._request_director = self.build_request_director([FakeRH])
+
+
+class AllUnsupportedRHYDL(FakeYDL):
+
+ def __init__(self, *args, **kwargs):
+
+ class UnsupportedRH(RequestHandler):
+ def _send(self, request: Request):
+ pass
+
+ _SUPPORTED_FEATURES = ()
+ _SUPPORTED_PROXY_SCHEMES = ()
+ _SUPPORTED_URL_SCHEMES = ()
+
+ super().__init__(*args, **kwargs)
+ self._request_director = self.build_request_director([UnsupportedRH])
+
+
+class TestRequestDirector:
+
+ def test_handler_operations(self):
+ director = RequestDirector(logger=FakeLogger())
+ handler = FakeRH(logger=FakeLogger())
+ director.add_handler(handler)
+ assert director.handlers.get(FakeRH.RH_KEY) is handler
+
+ # Handler should overwrite
+ handler2 = FakeRH(logger=FakeLogger())
+ director.add_handler(handler2)
+ assert director.handlers.get(FakeRH.RH_KEY) is not handler
+ assert director.handlers.get(FakeRH.RH_KEY) is handler2
+ assert len(director.handlers) == 1
+
+ class AnotherFakeRH(FakeRH):
+ pass
+ director.add_handler(AnotherFakeRH(logger=FakeLogger()))
+ assert len(director.handlers) == 2
+ assert director.handlers.get(AnotherFakeRH.RH_KEY).RH_KEY == AnotherFakeRH.RH_KEY
+
+ director.handlers.pop(FakeRH.RH_KEY, None)
+ assert director.handlers.get(FakeRH.RH_KEY) is None
+ assert len(director.handlers) == 1
+
+ # RequestErrors should passthrough
+ with pytest.raises(SSLError):
+ director.send(Request('ssl://something'))
+
+ def test_send(self):
+ director = RequestDirector(logger=FakeLogger())
+ with pytest.raises(RequestError):
+ director.send(Request('any://'))
+ director.add_handler(FakeRH(logger=FakeLogger()))
+ assert isinstance(director.send(Request('http://')), FakeResponse)
+
+ def test_unsupported_handlers(self):
+ class SupportedRH(RequestHandler):
+ _SUPPORTED_URL_SCHEMES = ['http']
+
+ def _send(self, request: Request):
+ return Response(fp=io.BytesIO(b'supported'), headers={}, url=request.url)
+
+ director = RequestDirector(logger=FakeLogger())
+ director.add_handler(SupportedRH(logger=FakeLogger()))
+ director.add_handler(FakeRH(logger=FakeLogger()))
+
+ # First should take preference
+ assert director.send(Request('http://')).read() == b'supported'
+ assert director.send(Request('any://')).read() == b''
+
+ director.handlers.pop(FakeRH.RH_KEY)
+ with pytest.raises(NoSupportingHandlers):
+ director.send(Request('any://'))
+
+ def test_unexpected_error(self):
+ director = RequestDirector(logger=FakeLogger())
+
+ class UnexpectedRH(FakeRH):
+ def _send(self, request: Request):
+ raise TypeError('something')
+
+ director.add_handler(UnexpectedRH(logger=FakeLogger))
+ with pytest.raises(NoSupportingHandlers, match=r'1 unexpected error'):
+ director.send(Request('any://'))
+
+ director.handlers.clear()
+ assert len(director.handlers) == 0
+
+ # Should not be fatal
+ director.add_handler(FakeRH(logger=FakeLogger()))
+ director.add_handler(UnexpectedRH(logger=FakeLogger))
+ assert director.send(Request('any://'))
+
+ def test_preference(self):
+ director = RequestDirector(logger=FakeLogger())
+ director.add_handler(FakeRH(logger=FakeLogger()))
+
+ class SomeRH(RequestHandler):
+ _SUPPORTED_URL_SCHEMES = ['http']
+
+ def _send(self, request: Request):
+ return Response(fp=io.BytesIO(b'supported'), headers={}, url=request.url)
+
+ def some_preference(rh, request):
+ return (0 if not isinstance(rh, SomeRH)
+ else 100 if 'prefer' in request.headers
+ else -1)
+
+ director.add_handler(SomeRH(logger=FakeLogger()))
+ director.preferences.add(some_preference)
+
+ assert director.send(Request('http://')).read() == b''
+ assert director.send(Request('http://', headers={'prefer': '1'})).read() == b'supported'
+
+ def test_close(self, monkeypatch):
+ director = RequestDirector(logger=FakeLogger())
+ director.add_handler(FakeRH(logger=FakeLogger()))
+ called = False
+
+ def mock_close(*args, **kwargs):
+ nonlocal called
+ called = True
+
+ monkeypatch.setattr(director.handlers[FakeRH.RH_KEY], 'close', mock_close)
+ director.close()
+ assert called
+
+
+# XXX: do we want to move this to test_YoutubeDL.py?
+class TestYoutubeDLNetworking:
+
+ @staticmethod
+ def build_handler(ydl, handler: RequestHandler = FakeRH):
+ return ydl.build_request_director([handler]).handlers.get(handler.RH_KEY)
+
+ def test_compat_opener(self):
+ with FakeYDL() as ydl:
+ with warnings.catch_warnings():
+ warnings.simplefilter('ignore', category=DeprecationWarning)
+ assert isinstance(ydl._opener, urllib.request.OpenerDirector)
+
+ @pytest.mark.parametrize('proxy,expected', [
+ ('http://127.0.0.1:8080', {'all': 'http://127.0.0.1:8080'}),
+ ('', {'all': '__noproxy__'}),
+ (None, {'http': 'http://127.0.0.1:8081', 'https': 'http://127.0.0.1:8081'}) # env, set https
+ ])
+ def test_proxy(self, proxy, expected):
+ old_http_proxy = os.environ.get('HTTP_PROXY')
+ try:
+ os.environ['HTTP_PROXY'] = 'http://127.0.0.1:8081' # ensure that provided proxies override env
+ with FakeYDL({'proxy': proxy}) as ydl:
+ assert ydl.proxies == expected
+ finally:
+ if old_http_proxy:
+ os.environ['HTTP_PROXY'] = old_http_proxy
+
+ def test_compat_request(self):
+ with FakeRHYDL() as ydl:
+ assert ydl.urlopen('test://')
+ urllib_req = urllib.request.Request('http://foo.bar', data=b'test', method='PUT', headers={'X-Test': '1'})
+ urllib_req.add_unredirected_header('Cookie', 'bob=bob')
+ urllib_req.timeout = 2
+ with warnings.catch_warnings():
+ warnings.simplefilter('ignore', category=DeprecationWarning)
+ req = ydl.urlopen(urllib_req).request
+ assert req.url == urllib_req.get_full_url()
+ assert req.data == urllib_req.data
+ assert req.method == urllib_req.get_method()
+ assert 'X-Test' in req.headers
+ assert 'Cookie' in req.headers
+ assert req.extensions.get('timeout') == 2
+
+ with pytest.raises(AssertionError):
+ ydl.urlopen(None)
+
+ def test_extract_basic_auth(self):
+ with FakeRHYDL() as ydl:
+ res = ydl.urlopen(Request('http://user:pass@foo.bar'))
+ assert res.request.headers['Authorization'] == 'Basic dXNlcjpwYXNz'
+
+ def test_sanitize_url(self):
+ with FakeRHYDL() as ydl:
+ res = ydl.urlopen(Request('httpss://foo.bar'))
+ assert res.request.url == 'https://foo.bar'
+
+ def test_file_urls_error(self):
+ # use urllib handler
+ with FakeYDL() as ydl:
+ with pytest.raises(RequestError, match=r'file:// URLs are disabled by default'):
+ ydl.urlopen('file://')
+
+ @pytest.mark.parametrize('scheme', (['ws', 'wss']))
+ def test_websocket_unavailable_error(self, scheme):
+ with AllUnsupportedRHYDL() as ydl:
+ with pytest.raises(RequestError, match=r'This request requires WebSocket support'):
+ ydl.urlopen(f'{scheme}://')
+
+ def test_legacy_server_connect_error(self):
+ with FakeRHYDL() as ydl:
+ for error in ('UNSAFE_LEGACY_RENEGOTIATION_DISABLED', 'SSLV3_ALERT_HANDSHAKE_FAILURE'):
+ with pytest.raises(RequestError, match=r'Try using --legacy-server-connect'):
+ ydl.urlopen(f'ssl://{error}')
+
+ with pytest.raises(SSLError, match='testerror'):
+ ydl.urlopen('ssl://testerror')
+
+ @pytest.mark.parametrize('proxy_key,proxy_url,expected', [
+ ('http', '__noproxy__', None),
+ ('no', '127.0.0.1,foo.bar', '127.0.0.1,foo.bar'),
+ ('https', 'example.com', 'http://example.com'),
+ ('https', '//example.com', 'http://example.com'),
+ ('https', 'socks5://example.com', 'socks5h://example.com'),
+ ('http', 'socks://example.com', 'socks4://example.com'),
+ ('http', 'socks4://example.com', 'socks4://example.com'),
+ ('unrelated', '/bad/proxy', '/bad/proxy'), # clean_proxies should ignore bad proxies
+ ])
+ def test_clean_proxy(self, proxy_key, proxy_url, expected):
+ # proxies should be cleaned in urlopen()
+ with FakeRHYDL() as ydl:
+ req = ydl.urlopen(Request('test://', proxies={proxy_key: proxy_url})).request
+ assert req.proxies[proxy_key] == expected
+
+ # and should also be cleaned when building the handler
+ env_key = f'{proxy_key.upper()}_PROXY'
+ old_env_proxy = os.environ.get(env_key)
+ try:
+ os.environ[env_key] = proxy_url # ensure that provided proxies override env
+ with FakeYDL() as ydl:
+ rh = self.build_handler(ydl)
+ assert rh.proxies[proxy_key] == expected
+ finally:
+ if old_env_proxy:
+ os.environ[env_key] = old_env_proxy
+
+ def test_clean_proxy_header(self):
+ with FakeRHYDL() as ydl:
+ req = ydl.urlopen(Request('test://', headers={'ytdl-request-proxy': '//foo.bar'})).request
+ assert 'ytdl-request-proxy' not in req.headers
+ assert req.proxies == {'all': 'http://foo.bar'}
+
+ with FakeYDL({'http_headers': {'ytdl-request-proxy': '//foo.bar'}}) as ydl:
+ rh = self.build_handler(ydl)
+ assert 'ytdl-request-proxy' not in rh.headers
+ assert rh.proxies == {'all': 'http://foo.bar'}
+
+ def test_clean_header(self):
+ with FakeRHYDL() as ydl:
+ res = ydl.urlopen(Request('test://', headers={'Youtubedl-no-compression': True}))
+ assert 'Youtubedl-no-compression' not in res.request.headers
+ assert res.request.headers.get('Accept-Encoding') == 'identity'
+
+ with FakeYDL({'http_headers': {'Youtubedl-no-compression': True}}) as ydl:
+ rh = self.build_handler(ydl)
+ assert 'Youtubedl-no-compression' not in rh.headers
+ assert rh.headers.get('Accept-Encoding') == 'identity'
+
+ with FakeYDL({'http_headers': {'Ytdl-socks-proxy': 'socks://localhost:1080'}}) as ydl:
+ rh = self.build_handler(ydl)
+ assert 'Ytdl-socks-proxy' not in rh.headers
+
+ def test_build_handler_params(self):
+ with FakeYDL({
+ 'http_headers': {'test': 'testtest'},
+ 'socket_timeout': 2,
+ 'proxy': 'http://127.0.0.1:8080',
+ 'source_address': '127.0.0.45',
+ 'debug_printtraffic': True,
+ 'compat_opts': ['no-certifi'],
+ 'nocheckcertificate': True,
+ 'legacyserverconnect': True,
+ }) as ydl:
+ rh = self.build_handler(ydl)
+ assert rh.headers.get('test') == 'testtest'
+ assert 'Accept' in rh.headers # ensure std_headers are still there
+ assert rh.timeout == 2
+ assert rh.proxies.get('all') == 'http://127.0.0.1:8080'
+ assert rh.source_address == '127.0.0.45'
+ assert rh.verbose is True
+ assert rh.prefer_system_certs is True
+ assert rh.verify is False
+ assert rh.legacy_ssl_support is True
+
+ @pytest.mark.parametrize('ydl_params', [
+ {'client_certificate': 'fakecert.crt'},
+ {'client_certificate': 'fakecert.crt', 'client_certificate_key': 'fakekey.key'},
+ {'client_certificate': 'fakecert.crt', 'client_certificate_key': 'fakekey.key', 'client_certificate_password': 'foobar'},
+ {'client_certificate_key': 'fakekey.key', 'client_certificate_password': 'foobar'},
+ ])
+ def test_client_certificate(self, ydl_params):
+ with FakeYDL(ydl_params) as ydl:
+ rh = self.build_handler(ydl)
+ assert rh._client_cert == ydl_params # XXX: Too bound to implementation
+
+ def test_urllib_file_urls(self):
+ with FakeYDL({'enable_file_urls': False}) as ydl:
+ rh = self.build_handler(ydl, UrllibRH)
+ assert rh.enable_file_urls is False
+
+ with FakeYDL({'enable_file_urls': True}) as ydl:
+ rh = self.build_handler(ydl, UrllibRH)
+ assert rh.enable_file_urls is True
+
+ def test_compat_opt_prefer_urllib(self):
+ # This assumes urllib only has a preference when this compat opt is given
+ with FakeYDL({'compat_opts': ['prefer-legacy-http-handler']}) as ydl:
+ director = ydl.build_request_director([UrllibRH])
+ assert len(director.preferences) == 1
+ assert director.preferences.pop()(UrllibRH, None)
+
+
+class TestRequest:
+
+ def test_query(self):
+ req = Request('http://example.com?q=something', query={'v': 'xyz'})
+ assert req.url == 'http://example.com?q=something&v=xyz'
+
+ req.update(query={'v': '123'})
+ assert req.url == 'http://example.com?q=something&v=123'
+ req.update(url='http://example.com', query={'v': 'xyz'})
+ assert req.url == 'http://example.com?v=xyz'
+
+ def test_method(self):
+ req = Request('http://example.com')
+ assert req.method == 'GET'
+ req.data = b'test'
+ assert req.method == 'POST'
+ req.data = None
+ assert req.method == 'GET'
+ req.data = b'test2'
+ req.method = 'PUT'
+ assert req.method == 'PUT'
+ req.data = None
+ assert req.method == 'PUT'
+ with pytest.raises(TypeError):
+ req.method = 1
+
+ def test_request_helpers(self):
+ assert HEADRequest('http://example.com').method == 'HEAD'
+ assert PUTRequest('http://example.com').method == 'PUT'
+
+ def test_headers(self):
+ req = Request('http://example.com', headers={'tesT': 'test'})
+ assert req.headers == HTTPHeaderDict({'test': 'test'})
+ req.update(headers={'teSt2': 'test2'})
+ assert req.headers == HTTPHeaderDict({'test': 'test', 'test2': 'test2'})
+
+ req.headers = new_headers = HTTPHeaderDict({'test': 'test'})
+ assert req.headers == HTTPHeaderDict({'test': 'test'})
+ assert req.headers is new_headers
+
+ # test converts dict to case insensitive dict
+ req.headers = new_headers = {'test2': 'test2'}
+ assert isinstance(req.headers, HTTPHeaderDict)
+ assert req.headers is not new_headers
+
+ with pytest.raises(TypeError):
+ req.headers = None
+
+ def test_data_type(self):
+ req = Request('http://example.com')
+ assert req.data is None
+ # test bytes is allowed
+ req.data = b'test'
+ assert req.data == b'test'
+ # test iterable of bytes is allowed
+ i = [b'test', b'test2']
+ req.data = i
+ assert req.data == i
+
+ # test file-like object is allowed
+ f = io.BytesIO(b'test')
+ req.data = f
+ assert req.data == f
+
+ # common mistake: test str not allowed
+ with pytest.raises(TypeError):
+ req.data = 'test'
+ assert req.data != 'test'
+
+ # common mistake: test dict is not allowed
+ with pytest.raises(TypeError):
+ req.data = {'test': 'test'}
+ assert req.data != {'test': 'test'}
+
+ def test_content_length_header(self):
+ req = Request('http://example.com', headers={'Content-Length': '0'}, data=b'')
+ assert req.headers.get('Content-Length') == '0'
+
+ req.data = b'test'
+ assert 'Content-Length' not in req.headers
+
+ req = Request('http://example.com', headers={'Content-Length': '10'})
+ assert 'Content-Length' not in req.headers
+
+ def test_content_type_header(self):
+ req = Request('http://example.com', headers={'Content-Type': 'test'}, data=b'test')
+ assert req.headers.get('Content-Type') == 'test'
+ req.data = b'test2'
+ assert req.headers.get('Content-Type') == 'test'
+ req.data = None
+ assert 'Content-Type' not in req.headers
+ req.data = b'test3'
+ assert req.headers.get('Content-Type') == 'application/x-www-form-urlencoded'
+
+ def test_update_req(self):
+ req = Request('http://example.com')
+ assert req.data is None
+ assert req.method == 'GET'
+ assert 'Content-Type' not in req.headers
+ # Test that zero-byte payloads will be sent
+ req.update(data=b'')
+ assert req.data == b''
+ assert req.method == 'POST'
+ assert req.headers.get('Content-Type') == 'application/x-www-form-urlencoded'
+
+ def test_proxies(self):
+ req = Request(url='http://example.com', proxies={'http': 'http://127.0.0.1:8080'})
+ assert req.proxies == {'http': 'http://127.0.0.1:8080'}
+
+ def test_extensions(self):
+ req = Request(url='http://example.com', extensions={'timeout': 2})
+ assert req.extensions == {'timeout': 2}
+
+ def test_copy(self):
+ req = Request(
+ url='http://example.com',
+ extensions={'cookiejar': CookieJar()},
+ headers={'Accept-Encoding': 'br'},
+ proxies={'http': 'http://127.0.0.1'},
+ data=[b'123']
+ )
+ req_copy = req.copy()
+ assert req_copy is not req
+ assert req_copy.url == req.url
+ assert req_copy.headers == req.headers
+ assert req_copy.headers is not req.headers
+ assert req_copy.proxies == req.proxies
+ assert req_copy.proxies is not req.proxies
+
+ # Data is not able to be copied
+ assert req_copy.data == req.data
+ assert req_copy.data is req.data
+
+ # Shallow copy extensions
+ assert req_copy.extensions is not req.extensions
+ assert req_copy.extensions['cookiejar'] == req.extensions['cookiejar']
+
+ # Subclasses are copied by default
+ class AnotherRequest(Request):
+ pass
+
+ req = AnotherRequest(url='http://127.0.0.1')
+ assert isinstance(req.copy(), AnotherRequest)
+
+ def test_url(self):
+ req = Request(url='https://фtest.example.com/ some spaceв?ä=c',)
+ assert req.url == 'https://xn--test-z6d.example.com/%20some%20space%D0%B2?%C3%A4=c'
+
+ assert Request(url='//example.com').url == 'http://example.com'
+
+ with pytest.raises(TypeError):
+ Request(url='https://').url = None
+
+
+class TestResponse:
+
+ @pytest.mark.parametrize('reason,status,expected', [
+ ('custom', 200, 'custom'),
+ (None, 404, 'Not Found'), # fallback status
+ ('', 403, 'Forbidden'),
+ (None, 999, None)
+ ])
+ def test_reason(self, reason, status, expected):
+ res = Response(io.BytesIO(b''), url='test://', headers={}, status=status, reason=reason)
+ assert res.reason == expected
+
+ def test_headers(self):
+ headers = Message()
+ headers.add_header('Test', 'test')
+ headers.add_header('Test', 'test2')
+ headers.add_header('content-encoding', 'br')
+ res = Response(io.BytesIO(b''), headers=headers, url='test://')
+ assert res.headers.get_all('test') == ['test', 'test2']
+ assert 'Content-Encoding' in res.headers
+
+ def test_get_header(self):
+ headers = Message()
+ headers.add_header('Set-Cookie', 'cookie1')
+ headers.add_header('Set-cookie', 'cookie2')
+ headers.add_header('Test', 'test')
+ headers.add_header('Test', 'test2')
+ res = Response(io.BytesIO(b''), headers=headers, url='test://')
+ assert res.get_header('test') == 'test, test2'
+ assert res.get_header('set-Cookie') == 'cookie1'
+ assert res.get_header('notexist', 'default') == 'default'
+
+ def test_compat(self):
+ res = Response(io.BytesIO(b''), url='test://', status=404, headers={'test': 'test'})
+ with warnings.catch_warnings():
+ warnings.simplefilter('ignore', category=DeprecationWarning)
+ assert res.code == res.getcode() == res.status
+ assert res.geturl() == res.url
+ assert res.info() is res.headers
+ assert res.getheader('test') == res.get_header('test')
diff --git a/test/test_networking_utils.py b/test/test_networking_utils.py
new file mode 100644
index 0000000..b7b7143
--- /dev/null
+++ b/test/test_networking_utils.py
@@ -0,0 +1,208 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+
+import pytest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+import io
+import random
+import ssl
+
+from yt_dlp.cookies import YoutubeDLCookieJar
+from yt_dlp.dependencies import certifi
+from yt_dlp.networking import Response
+from yt_dlp.networking._helper import (
+ InstanceStoreMixin,
+ add_accept_encoding_header,
+ get_redirect_method,
+ make_socks_proxy_opts,
+ select_proxy,
+ ssl_load_certs,
+)
+from yt_dlp.networking.exceptions import (
+ HTTPError,
+ IncompleteRead,
+)
+from yt_dlp.socks import ProxyType
+from yt_dlp.utils.networking import HTTPHeaderDict
+
+TEST_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+class TestNetworkingUtils:
+
+ def test_select_proxy(self):
+ proxies = {
+ 'all': 'socks5://example.com',
+ 'http': 'http://example.com:1080',
+ 'no': 'bypass.example.com,yt-dl.org'
+ }
+
+ assert select_proxy('https://example.com', proxies) == proxies['all']
+ assert select_proxy('http://example.com', proxies) == proxies['http']
+ assert select_proxy('http://bypass.example.com', proxies) is None
+ assert select_proxy('https://yt-dl.org', proxies) is None
+
+ @pytest.mark.parametrize('socks_proxy,expected', [
+ ('socks5h://example.com', {
+ 'proxytype': ProxyType.SOCKS5,
+ 'addr': 'example.com',
+ 'port': 1080,
+ 'rdns': True,
+ 'username': None,
+ 'password': None
+ }),
+ ('socks5://user:@example.com:5555', {
+ 'proxytype': ProxyType.SOCKS5,
+ 'addr': 'example.com',
+ 'port': 5555,
+ 'rdns': False,
+ 'username': 'user',
+ 'password': ''
+ }),
+ ('socks4://u%40ser:pa%20ss@127.0.0.1:1080', {
+ 'proxytype': ProxyType.SOCKS4,
+ 'addr': '127.0.0.1',
+ 'port': 1080,
+ 'rdns': False,
+ 'username': 'u@ser',
+ 'password': 'pa ss'
+ }),
+ ('socks4a://:pa%20ss@127.0.0.1', {
+ 'proxytype': ProxyType.SOCKS4A,
+ 'addr': '127.0.0.1',
+ 'port': 1080,
+ 'rdns': True,
+ 'username': '',
+ 'password': 'pa ss'
+ })
+ ])
+ def test_make_socks_proxy_opts(self, socks_proxy, expected):
+ assert make_socks_proxy_opts(socks_proxy) == expected
+
+ def test_make_socks_proxy_unknown(self):
+ with pytest.raises(ValueError, match='Unknown SOCKS proxy version: socks'):
+ make_socks_proxy_opts('socks://127.0.0.1')
+
+ @pytest.mark.skipif(not certifi, reason='certifi is not installed')
+ def test_load_certifi(self):
+ context_certifi = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ context_certifi.load_verify_locations(cafile=certifi.where())
+ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ ssl_load_certs(context, use_certifi=True)
+ assert context.get_ca_certs() == context_certifi.get_ca_certs()
+
+ context_default = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ context_default.load_default_certs()
+ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ ssl_load_certs(context, use_certifi=False)
+ assert context.get_ca_certs() == context_default.get_ca_certs()
+
+ if context_default.get_ca_certs() == context_certifi.get_ca_certs():
+ pytest.skip('System uses certifi as default. The test is not valid')
+
+ @pytest.mark.parametrize('method,status,expected', [
+ ('GET', 303, 'GET'),
+ ('HEAD', 303, 'HEAD'),
+ ('PUT', 303, 'GET'),
+ ('POST', 301, 'GET'),
+ ('HEAD', 301, 'HEAD'),
+ ('POST', 302, 'GET'),
+ ('HEAD', 302, 'HEAD'),
+ ('PUT', 302, 'PUT'),
+ ('POST', 308, 'POST'),
+ ('POST', 307, 'POST'),
+ ('HEAD', 308, 'HEAD'),
+ ('HEAD', 307, 'HEAD'),
+ ])
+ def test_get_redirect_method(self, method, status, expected):
+ assert get_redirect_method(method, status) == expected
+
+ @pytest.mark.parametrize('headers,supported_encodings,expected', [
+ ({'Accept-Encoding': 'br'}, ['gzip', 'br'], {'Accept-Encoding': 'br'}),
+ ({}, ['gzip', 'br'], {'Accept-Encoding': 'gzip, br'}),
+ ({'Content-type': 'application/json'}, [], {'Content-type': 'application/json', 'Accept-Encoding': 'identity'}),
+ ])
+ def test_add_accept_encoding_header(self, headers, supported_encodings, expected):
+ headers = HTTPHeaderDict(headers)
+ add_accept_encoding_header(headers, supported_encodings)
+ assert headers == HTTPHeaderDict(expected)
+
+
+class TestInstanceStoreMixin:
+
+ class FakeInstanceStoreMixin(InstanceStoreMixin):
+ def _create_instance(self, **kwargs):
+ return random.randint(0, 1000000)
+
+ def _close_instance(self, instance):
+ pass
+
+ def test_mixin(self):
+ mixin = self.FakeInstanceStoreMixin()
+ assert mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}}) == mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}})
+
+ assert mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'e', 4}}) != mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}})
+
+ assert mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}} != mixin._get_instance(d={'a': 1, 'b': 2, 'g': {'d', 4}}))
+
+ assert mixin._get_instance(d={'a': 1}, e=[1, 2, 3]) == mixin._get_instance(d={'a': 1}, e=[1, 2, 3])
+
+ assert mixin._get_instance(d={'a': 1}, e=[1, 2, 3]) != mixin._get_instance(d={'a': 1}, e=[1, 2, 3, 4])
+
+ cookiejar = YoutubeDLCookieJar()
+ assert mixin._get_instance(b=[1, 2], c=cookiejar) == mixin._get_instance(b=[1, 2], c=cookiejar)
+
+ assert mixin._get_instance(b=[1, 2], c=cookiejar) != mixin._get_instance(b=[1, 2], c=YoutubeDLCookieJar())
+
+ # Different order
+ assert mixin._get_instance(c=cookiejar, b=[1, 2]) == mixin._get_instance(b=[1, 2], c=cookiejar)
+
+ m = mixin._get_instance(t=1234)
+ assert mixin._get_instance(t=1234) == m
+ mixin._clear_instances()
+ assert mixin._get_instance(t=1234) != m
+
+
+class TestNetworkingExceptions:
+
+ @staticmethod
+ def create_response(status):
+ return Response(fp=io.BytesIO(b'test'), url='http://example.com', headers={'tesT': 'test'}, status=status)
+
+ def test_http_error(self):
+
+ response = self.create_response(403)
+ error = HTTPError(response)
+
+ assert error.status == 403
+ assert str(error) == error.msg == 'HTTP Error 403: Forbidden'
+ assert error.reason == response.reason
+ assert error.response is response
+
+ data = error.response.read()
+ assert data == b'test'
+ assert repr(error) == '<HTTPError 403: Forbidden>'
+
+ def test_redirect_http_error(self):
+ response = self.create_response(301)
+ error = HTTPError(response, redirect_loop=True)
+ assert str(error) == error.msg == 'HTTP Error 301: Moved Permanently (redirect loop detected)'
+ assert error.reason == 'Moved Permanently'
+
+ def test_incomplete_read_error(self):
+ error = IncompleteRead(4, 3, cause='test')
+ assert isinstance(error, IncompleteRead)
+ assert repr(error) == '<IncompleteRead: 4 bytes read, 3 more expected>'
+ assert str(error) == error.msg == '4 bytes read, 3 more expected'
+ assert error.partial == 4
+ assert error.expected == 3
+ assert error.cause == 'test'
+
+ error = IncompleteRead(3)
+ assert repr(error) == '<IncompleteRead: 3 bytes read>'
+ assert str(error) == '3 bytes read'
diff --git a/test/test_overwrites.py b/test/test_overwrites.py
new file mode 100644
index 0000000..6954c07
--- /dev/null
+++ b/test/test_overwrites.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import subprocess
+
+from test.helper import is_download_test, try_rm
+
+root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+download_file = os.path.join(root_dir, 'test.webm')
+
+
+@is_download_test
+class TestOverwrites(unittest.TestCase):
+ def setUp(self):
+ # create an empty file
+ open(download_file, 'a').close()
+
+ def test_default_overwrites(self):
+ outp = subprocess.Popen(
+ [
+ sys.executable, 'yt_dlp/__main__.py',
+ '-o', 'test.webm',
+ 'https://www.youtube.com/watch?v=jNQXAC9IVRw'
+ ], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ sout, serr = outp.communicate()
+ self.assertTrue(b'has already been downloaded' in sout)
+ # if the file has no content, it has not been redownloaded
+ self.assertTrue(os.path.getsize(download_file) < 1)
+
+ def test_yes_overwrites(self):
+ outp = subprocess.Popen(
+ [
+ sys.executable, 'yt_dlp/__main__.py', '--yes-overwrites',
+ '-o', 'test.webm',
+ 'https://www.youtube.com/watch?v=jNQXAC9IVRw'
+ ], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ sout, serr = outp.communicate()
+ self.assertTrue(b'has already been downloaded' not in sout)
+ # if the file has no content, it has not been redownloaded
+ self.assertTrue(os.path.getsize(download_file) > 1)
+
+ def tearDown(self):
+ try_rm(os.path.join(root_dir, 'test.webm'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_plugins.py b/test/test_plugins.py
new file mode 100644
index 0000000..6cde579
--- /dev/null
+++ b/test/test_plugins.py
@@ -0,0 +1,73 @@
+import importlib
+import os
+import shutil
+import sys
+import unittest
+from pathlib import Path
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+TEST_DATA_DIR = Path(os.path.dirname(os.path.abspath(__file__)), 'testdata')
+sys.path.append(str(TEST_DATA_DIR))
+importlib.invalidate_caches()
+
+from yt_dlp.plugins import PACKAGE_NAME, directories, load_plugins
+
+
+class TestPlugins(unittest.TestCase):
+
+ TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
+
+ def test_directories_containing_plugins(self):
+ self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
+
+ def test_extractor_classes(self):
+ for module_name in tuple(sys.modules):
+ if module_name.startswith(f'{PACKAGE_NAME}.extractor'):
+ del sys.modules[module_name]
+ plugins_ie = load_plugins('extractor', 'IE')
+
+ self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
+ self.assertIn('NormalPluginIE', plugins_ie.keys())
+
+ # don't load modules with underscore prefix
+ self.assertFalse(
+ f'{PACKAGE_NAME}.extractor._ignore' in sys.modules.keys(),
+ 'loaded module beginning with underscore')
+ self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
+
+ # Don't load extractors with underscore prefix
+ self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
+
+ # Don't load extractors not specified in __all__ (if supplied)
+ self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
+ self.assertIn('InAllPluginIE', plugins_ie.keys())
+
+ def test_postprocessor_classes(self):
+ plugins_pp = load_plugins('postprocessor', 'PP')
+ self.assertIn('NormalPluginPP', plugins_pp.keys())
+
+ def test_importing_zipped_module(self):
+ zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
+ shutil.make_archive(str(zip_path)[:-4], 'zip', str(zip_path)[:-4])
+ sys.path.append(str(zip_path)) # add zip to search paths
+ importlib.invalidate_caches() # reset the import caches
+
+ try:
+ for plugin_type in ('extractor', 'postprocessor'):
+ package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
+ self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
+
+ plugins_ie = load_plugins('extractor', 'IE')
+ self.assertIn('ZippedPluginIE', plugins_ie.keys())
+
+ plugins_pp = load_plugins('postprocessor', 'PP')
+ self.assertIn('ZippedPluginPP', plugins_pp.keys())
+
+ finally:
+ sys.path.remove(str(zip_path))
+ os.remove(zip_path)
+ importlib.invalidate_caches() # reset the import caches
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_post_hooks.py b/test/test_post_hooks.py
new file mode 100644
index 0000000..3778d17
--- /dev/null
+++ b/test/test_post_hooks.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+from test.helper import get_params, is_download_test, try_rm
+import yt_dlp.YoutubeDL # isort: split
+from yt_dlp.utils import DownloadError
+
+
+class YoutubeDL(yt_dlp.YoutubeDL):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.to_stderr = self.to_screen
+
+
+TEST_ID = 'gr51aVj-mLg'
+EXPECTED_NAME = 'gr51aVj-mLg'
+
+
+@is_download_test
+class TestPostHooks(unittest.TestCase):
+ def setUp(self):
+ self.stored_name_1 = None
+ self.stored_name_2 = None
+ self.params = get_params({
+ 'skip_download': False,
+ 'writeinfojson': False,
+ 'quiet': True,
+ 'verbose': False,
+ 'cachedir': False,
+ })
+ self.files = []
+
+ def test_post_hooks(self):
+ self.params['post_hooks'] = [self.hook_one, self.hook_two]
+ ydl = YoutubeDL(self.params)
+ ydl.download([TEST_ID])
+ self.assertEqual(self.stored_name_1, EXPECTED_NAME, 'Not the expected name from hook 1')
+ self.assertEqual(self.stored_name_2, EXPECTED_NAME, 'Not the expected name from hook 2')
+
+ def test_post_hook_exception(self):
+ self.params['post_hooks'] = [self.hook_three]
+ ydl = YoutubeDL(self.params)
+ self.assertRaises(DownloadError, ydl.download, [TEST_ID])
+
+ def hook_one(self, filename):
+ self.stored_name_1, _ = os.path.splitext(os.path.basename(filename))
+ self.files.append(filename)
+
+ def hook_two(self, filename):
+ self.stored_name_2, _ = os.path.splitext(os.path.basename(filename))
+ self.files.append(filename)
+
+ def hook_three(self, filename):
+ self.files.append(filename)
+ raise Exception('Test exception for \'%s\'' % filename)
+
+ def tearDown(self):
+ for f in self.files:
+ try_rm(f)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_postprocessors.py b/test/test_postprocessors.py
new file mode 100644
index 0000000..52e5587
--- /dev/null
+++ b/test/test_postprocessors.py
@@ -0,0 +1,579 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+from yt_dlp import YoutubeDL
+from yt_dlp.compat import compat_shlex_quote
+from yt_dlp.postprocessor import (
+ ExecPP,
+ FFmpegThumbnailsConvertorPP,
+ MetadataFromFieldPP,
+ MetadataParserPP,
+ ModifyChaptersPP,
+ SponsorBlockPP,
+)
+
+
+class TestMetadataFromField(unittest.TestCase):
+
+ def test_format_to_regex(self):
+ self.assertEqual(
+ MetadataParserPP.format_to_regex('%(title)s - %(artist)s'),
+ r'(?P<title>.+)\ \-\ (?P<artist>.+)')
+ self.assertEqual(MetadataParserPP.format_to_regex(r'(?P<x>.+)'), r'(?P<x>.+)')
+
+ def test_field_to_template(self):
+ self.assertEqual(MetadataParserPP.field_to_template('title'), '%(title)s')
+ self.assertEqual(MetadataParserPP.field_to_template('1'), '1')
+ self.assertEqual(MetadataParserPP.field_to_template('foo bar'), 'foo bar')
+ self.assertEqual(MetadataParserPP.field_to_template(' literal'), ' literal')
+
+ def test_metadatafromfield(self):
+ self.assertEqual(
+ MetadataFromFieldPP.to_action('%(title)s \\: %(artist)s:%(title)s : %(artist)s'),
+ (MetadataParserPP.Actions.INTERPRET, '%(title)s : %(artist)s', '%(title)s : %(artist)s'))
+
+
+class TestConvertThumbnail(unittest.TestCase):
+ def test_escaping(self):
+ pp = FFmpegThumbnailsConvertorPP()
+ if not pp.available:
+ print('Skipping: ffmpeg not found')
+ return
+
+ file = 'test/testdata/thumbnails/foo %d bar/foo_%d.{}'
+ tests = (('webp', 'png'), ('png', 'jpg'))
+
+ for inp, out in tests:
+ out_file = file.format(out)
+ if os.path.exists(out_file):
+ os.remove(out_file)
+ pp.convert_thumbnail(file.format(inp), out)
+ assert os.path.exists(out_file)
+
+ for _, out in tests:
+ os.remove(file.format(out))
+
+
+class TestExec(unittest.TestCase):
+ def test_parse_cmd(self):
+ pp = ExecPP(YoutubeDL(), '')
+ info = {'filepath': 'file name'}
+ cmd = 'echo %s' % compat_shlex_quote(info['filepath'])
+
+ self.assertEqual(pp.parse_cmd('echo', info), cmd)
+ self.assertEqual(pp.parse_cmd('echo {}', info), cmd)
+ self.assertEqual(pp.parse_cmd('echo %(filepath)q', info), cmd)
+
+
+class TestModifyChaptersPP(unittest.TestCase):
+ def setUp(self):
+ self._pp = ModifyChaptersPP(YoutubeDL())
+
+ @staticmethod
+ def _sponsor_chapter(start, end, cat, remove=False, title=None):
+ if title is None:
+ title = SponsorBlockPP.CATEGORIES[cat]
+ return {
+ 'start_time': start,
+ 'end_time': end,
+ '_categories': [(cat, start, end, title)],
+ **({'remove': True} if remove else {}),
+ }
+
+ @staticmethod
+ def _chapter(start, end, title=None, remove=False):
+ c = {'start_time': start, 'end_time': end}
+ if title is not None:
+ c['title'] = title
+ if remove:
+ c['remove'] = True
+ return c
+
+ def _chapters(self, ends, titles):
+ self.assertEqual(len(ends), len(titles))
+ start = 0
+ chapters = []
+ for e, t in zip(ends, titles):
+ chapters.append(self._chapter(start, e, t))
+ start = e
+ return chapters
+
+ def _remove_marked_arrange_sponsors_test_impl(
+ self, chapters, expected_chapters, expected_removed):
+ actual_chapters, actual_removed = (
+ self._pp._remove_marked_arrange_sponsors(chapters))
+ for c in actual_removed:
+ c.pop('title', None)
+ c.pop('_categories', None)
+ actual_chapters = [{
+ 'start_time': c['start_time'],
+ 'end_time': c['end_time'],
+ 'title': c['title'],
+ } for c in actual_chapters]
+ self.assertSequenceEqual(expected_chapters, actual_chapters)
+ self.assertSequenceEqual(expected_removed, actual_removed)
+
+ def test_remove_marked_arrange_sponsors_CanGetThroughUnaltered(self):
+ chapters = self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, chapters, [])
+
+ def test_remove_marked_arrange_sponsors_ChapterWithSponsors(self):
+ chapters = self._chapters([70], ['c']) + [
+ self._sponsor_chapter(10, 20, 'sponsor'),
+ self._sponsor_chapter(30, 40, 'preview'),
+ self._sponsor_chapter(50, 60, 'filler')]
+ expected = self._chapters(
+ [10, 20, 30, 40, 50, 60, 70],
+ ['c', '[SponsorBlock]: Sponsor', 'c', '[SponsorBlock]: Preview/Recap',
+ 'c', '[SponsorBlock]: Filler Tangent', 'c'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_SponsorBlockChapters(self):
+ chapters = self._chapters([70], ['c']) + [
+ self._sponsor_chapter(10, 20, 'chapter', title='sb c1'),
+ self._sponsor_chapter(15, 16, 'chapter', title='sb c2'),
+ self._sponsor_chapter(30, 40, 'preview'),
+ self._sponsor_chapter(50, 60, 'filler')]
+ expected = self._chapters(
+ [10, 15, 16, 20, 30, 40, 50, 60, 70],
+ ['c', '[SponsorBlock]: sb c1', '[SponsorBlock]: sb c1, sb c2', '[SponsorBlock]: sb c1',
+ 'c', '[SponsorBlock]: Preview/Recap',
+ 'c', '[SponsorBlock]: Filler Tangent', 'c'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors(self):
+ chapters = self._chapters([120], ['c']) + [
+ self._sponsor_chapter(10, 45, 'sponsor'), self._sponsor_chapter(20, 40, 'selfpromo'),
+ self._sponsor_chapter(50, 70, 'sponsor'), self._sponsor_chapter(60, 85, 'selfpromo'),
+ self._sponsor_chapter(90, 120, 'selfpromo'), self._sponsor_chapter(100, 110, 'sponsor')]
+ expected = self._chapters(
+ [10, 20, 40, 45, 50, 60, 70, 85, 90, 100, 110, 120],
+ ['c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Sponsor, Unpaid/Self Promotion',
+ '[SponsorBlock]: Sponsor',
+ 'c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Sponsor, Unpaid/Self Promotion',
+ '[SponsorBlock]: Unpaid/Self Promotion',
+ 'c', '[SponsorBlock]: Unpaid/Self Promotion', '[SponsorBlock]: Unpaid/Self Promotion, Sponsor',
+ '[SponsorBlock]: Unpaid/Self Promotion'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_ChapterWithCuts(self):
+ cuts = [self._chapter(10, 20, remove=True),
+ self._sponsor_chapter(30, 40, 'sponsor', remove=True),
+ self._chapter(50, 60, remove=True)]
+ chapters = self._chapters([70], ['c']) + cuts
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([40], ['c']), cuts)
+
+ def test_remove_marked_arrange_sponsors_ChapterWithSponsorsAndCuts(self):
+ chapters = self._chapters([70], ['c']) + [
+ self._sponsor_chapter(10, 20, 'sponsor'),
+ self._sponsor_chapter(30, 40, 'selfpromo', remove=True),
+ self._sponsor_chapter(50, 60, 'interaction')]
+ expected = self._chapters([10, 20, 40, 50, 60],
+ ['c', '[SponsorBlock]: Sponsor', 'c',
+ '[SponsorBlock]: Interaction Reminder', 'c'])
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, expected, [self._chapter(30, 40, remove=True)])
+
+ def test_remove_marked_arrange_sponsors_ChapterWithSponsorCutInTheMiddle(self):
+ cuts = [self._sponsor_chapter(20, 30, 'selfpromo', remove=True),
+ self._chapter(40, 50, remove=True)]
+ chapters = self._chapters([70], ['c']) + [self._sponsor_chapter(10, 60, 'sponsor')] + cuts
+ expected = self._chapters(
+ [10, 40, 50], ['c', '[SponsorBlock]: Sponsor', 'c'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor(self):
+ cuts = [self._sponsor_chapter(20, 50, 'selfpromo', remove=True)]
+ chapters = self._chapters([60], ['c']) + [
+ self._sponsor_chapter(10, 20, 'intro'),
+ self._sponsor_chapter(30, 40, 'sponsor'),
+ self._sponsor_chapter(50, 60, 'outro'),
+ ] + cuts
+ expected = self._chapters(
+ [10, 20, 30], ['c', '[SponsorBlock]: Intermission/Intro Animation', '[SponsorBlock]: Endcards/Credits'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_ChapterWithAdjacentSponsors(self):
+ chapters = self._chapters([70], ['c']) + [
+ self._sponsor_chapter(10, 20, 'sponsor'),
+ self._sponsor_chapter(20, 30, 'selfpromo'),
+ self._sponsor_chapter(30, 40, 'interaction')]
+ expected = self._chapters(
+ [10, 20, 30, 40, 70],
+ ['c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Unpaid/Self Promotion',
+ '[SponsorBlock]: Interaction Reminder', 'c'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_ChapterWithAdjacentCuts(self):
+ chapters = self._chapters([70], ['c']) + [
+ self._sponsor_chapter(10, 20, 'sponsor'),
+ self._sponsor_chapter(20, 30, 'interaction', remove=True),
+ self._chapter(30, 40, remove=True),
+ self._sponsor_chapter(40, 50, 'selfpromo', remove=True),
+ self._sponsor_chapter(50, 60, 'interaction')]
+ expected = self._chapters([10, 20, 30, 40],
+ ['c', '[SponsorBlock]: Sponsor',
+ '[SponsorBlock]: Interaction Reminder', 'c'])
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, expected, [self._chapter(20, 50, remove=True)])
+
+ def test_remove_marked_arrange_sponsors_ChapterWithOverlappingSponsors(self):
+ chapters = self._chapters([70], ['c']) + [
+ self._sponsor_chapter(10, 30, 'sponsor'),
+ self._sponsor_chapter(20, 50, 'selfpromo'),
+ self._sponsor_chapter(40, 60, 'interaction')]
+ expected = self._chapters(
+ [10, 20, 30, 40, 50, 60, 70],
+ ['c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Sponsor, Unpaid/Self Promotion',
+ '[SponsorBlock]: Unpaid/Self Promotion', '[SponsorBlock]: Unpaid/Self Promotion, Interaction Reminder',
+ '[SponsorBlock]: Interaction Reminder', 'c'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_ChapterWithOverlappingCuts(self):
+ chapters = self._chapters([70], ['c']) + [
+ self._sponsor_chapter(10, 30, 'sponsor', remove=True),
+ self._sponsor_chapter(20, 50, 'selfpromo', remove=True),
+ self._sponsor_chapter(40, 60, 'interaction', remove=True)]
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([20], ['c']), [self._chapter(10, 60, remove=True)])
+
+ def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsors(self):
+ chapters = self._chapters([170], ['c']) + [
+ self._sponsor_chapter(0, 30, 'intro'),
+ self._sponsor_chapter(20, 50, 'sponsor'),
+ self._sponsor_chapter(40, 60, 'selfpromo'),
+ self._sponsor_chapter(70, 90, 'sponsor'),
+ self._sponsor_chapter(80, 100, 'sponsor'),
+ self._sponsor_chapter(90, 110, 'sponsor'),
+ self._sponsor_chapter(120, 140, 'selfpromo'),
+ self._sponsor_chapter(130, 160, 'interaction'),
+ self._sponsor_chapter(150, 170, 'outro')]
+ expected = self._chapters(
+ [20, 30, 40, 50, 60, 70, 110, 120, 130, 140, 150, 160, 170],
+ ['[SponsorBlock]: Intermission/Intro Animation', '[SponsorBlock]: Intermission/Intro Animation, Sponsor', '[SponsorBlock]: Sponsor',
+ '[SponsorBlock]: Sponsor, Unpaid/Self Promotion', '[SponsorBlock]: Unpaid/Self Promotion', 'c',
+ '[SponsorBlock]: Sponsor', 'c', '[SponsorBlock]: Unpaid/Self Promotion',
+ '[SponsorBlock]: Unpaid/Self Promotion, Interaction Reminder',
+ '[SponsorBlock]: Interaction Reminder',
+ '[SponsorBlock]: Interaction Reminder, Endcards/Credits', '[SponsorBlock]: Endcards/Credits'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingCuts(self):
+ chapters = self._chapters([170], ['c']) + [
+ self._chapter(0, 30, remove=True),
+ self._sponsor_chapter(20, 50, 'sponsor', remove=True),
+ self._chapter(40, 60, remove=True),
+ self._sponsor_chapter(70, 90, 'sponsor', remove=True),
+ self._chapter(80, 100, remove=True),
+ self._chapter(90, 110, remove=True),
+ self._sponsor_chapter(120, 140, 'sponsor', remove=True),
+ self._sponsor_chapter(130, 160, 'selfpromo', remove=True),
+ self._chapter(150, 170, remove=True)]
+ expected_cuts = [self._chapter(0, 60, remove=True),
+ self._chapter(70, 110, remove=True),
+ self._chapter(120, 170, remove=True)]
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([20], ['c']), expected_cuts)
+
+ def test_remove_marked_arrange_sponsors_OverlappingSponsorsDifferentTitlesAfterCut(self):
+ chapters = self._chapters([60], ['c']) + [
+ self._sponsor_chapter(10, 60, 'sponsor'),
+ self._sponsor_chapter(10, 40, 'intro'),
+ self._sponsor_chapter(30, 50, 'interaction'),
+ self._sponsor_chapter(30, 50, 'selfpromo', remove=True),
+ self._sponsor_chapter(40, 50, 'interaction'),
+ self._sponsor_chapter(50, 60, 'outro')]
+ expected = self._chapters(
+ [10, 30, 40], ['c', '[SponsorBlock]: Sponsor, Intermission/Intro Animation', '[SponsorBlock]: Sponsor, Endcards/Credits'])
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, expected, [self._chapter(30, 50, remove=True)])
+
+ def test_remove_marked_arrange_sponsors_SponsorsNoLongerOverlapAfterCut(self):
+ chapters = self._chapters([70], ['c']) + [
+ self._sponsor_chapter(10, 30, 'sponsor'),
+ self._sponsor_chapter(20, 50, 'interaction'),
+ self._sponsor_chapter(30, 50, 'selfpromo', remove=True),
+ self._sponsor_chapter(40, 60, 'sponsor'),
+ self._sponsor_chapter(50, 60, 'interaction')]
+ expected = self._chapters(
+ [10, 20, 40, 50], ['c', '[SponsorBlock]: Sponsor',
+ '[SponsorBlock]: Sponsor, Interaction Reminder', 'c'])
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, expected, [self._chapter(30, 50, remove=True)])
+
+ def test_remove_marked_arrange_sponsors_SponsorsStillOverlapAfterCut(self):
+ chapters = self._chapters([70], ['c']) + [
+ self._sponsor_chapter(10, 60, 'sponsor'),
+ self._sponsor_chapter(20, 60, 'interaction'),
+ self._sponsor_chapter(30, 50, 'selfpromo', remove=True)]
+ expected = self._chapters(
+ [10, 20, 40, 50], ['c', '[SponsorBlock]: Sponsor',
+ '[SponsorBlock]: Sponsor, Interaction Reminder', 'c'])
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, expected, [self._chapter(30, 50, remove=True)])
+
+ def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsorsAndCuts(self):
+ chapters = self._chapters([200], ['c']) + [
+ self._sponsor_chapter(10, 40, 'sponsor'),
+ self._sponsor_chapter(10, 30, 'intro'),
+ self._chapter(20, 30, remove=True),
+ self._sponsor_chapter(30, 40, 'selfpromo'),
+ self._sponsor_chapter(50, 70, 'sponsor'),
+ self._sponsor_chapter(60, 80, 'interaction'),
+ self._chapter(70, 80, remove=True),
+ self._sponsor_chapter(70, 90, 'sponsor'),
+ self._sponsor_chapter(80, 100, 'interaction'),
+ self._sponsor_chapter(120, 170, 'selfpromo'),
+ self._sponsor_chapter(130, 180, 'outro'),
+ self._chapter(140, 150, remove=True),
+ self._chapter(150, 160, remove=True)]
+ expected = self._chapters(
+ [10, 20, 30, 40, 50, 70, 80, 100, 110, 130, 140, 160],
+ ['c', '[SponsorBlock]: Sponsor, Intermission/Intro Animation', '[SponsorBlock]: Sponsor, Unpaid/Self Promotion',
+ 'c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Sponsor, Interaction Reminder',
+ '[SponsorBlock]: Interaction Reminder', 'c', '[SponsorBlock]: Unpaid/Self Promotion',
+ '[SponsorBlock]: Unpaid/Self Promotion, Endcards/Credits', '[SponsorBlock]: Endcards/Credits', 'c'])
+ expected_cuts = [self._chapter(20, 30, remove=True),
+ self._chapter(70, 80, remove=True),
+ self._chapter(140, 160, remove=True)]
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, expected_cuts)
+
+ def test_remove_marked_arrange_sponsors_SponsorOverlapsMultipleChapters(self):
+ chapters = (self._chapters([20, 40, 60, 80, 100], ['c1', 'c2', 'c3', 'c4', 'c5'])
+ + [self._sponsor_chapter(10, 90, 'sponsor')])
+ expected = self._chapters([10, 90, 100], ['c1', '[SponsorBlock]: Sponsor', 'c5'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_CutOverlapsMultipleChapters(self):
+ cuts = [self._chapter(10, 90, remove=True)]
+ chapters = self._chapters([20, 40, 60, 80, 100], ['c1', 'c2', 'c3', 'c4', 'c5']) + cuts
+ expected = self._chapters([10, 20], ['c1', 'c5'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_SponsorsWithinSomeChaptersAndOverlappingOthers(self):
+ chapters = (self._chapters([10, 40, 60, 80], ['c1', 'c2', 'c3', 'c4'])
+ + [self._sponsor_chapter(20, 30, 'sponsor'),
+ self._sponsor_chapter(50, 70, 'selfpromo')])
+ expected = self._chapters([10, 20, 30, 40, 50, 70, 80],
+ ['c1', 'c2', '[SponsorBlock]: Sponsor', 'c2', 'c3',
+ '[SponsorBlock]: Unpaid/Self Promotion', 'c4'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_CutsWithinSomeChaptersAndOverlappingOthers(self):
+ cuts = [self._chapter(20, 30, remove=True), self._chapter(50, 70, remove=True)]
+ chapters = self._chapters([10, 40, 60, 80], ['c1', 'c2', 'c3', 'c4']) + cuts
+ expected = self._chapters([10, 30, 40, 50], ['c1', 'c2', 'c3', 'c4'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_ChaptersAfterLastSponsor(self):
+ chapters = (self._chapters([20, 40, 50, 60], ['c1', 'c2', 'c3', 'c4'])
+ + [self._sponsor_chapter(10, 30, 'music_offtopic')])
+ expected = self._chapters(
+ [10, 30, 40, 50, 60],
+ ['c1', '[SponsorBlock]: Non-Music Section', 'c2', 'c3', 'c4'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_ChaptersAfterLastCut(self):
+ cuts = [self._chapter(10, 30, remove=True)]
+ chapters = self._chapters([20, 40, 50, 60], ['c1', 'c2', 'c3', 'c4']) + cuts
+ expected = self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_SponsorStartsAtChapterStart(self):
+ chapters = (self._chapters([10, 20, 40], ['c1', 'c2', 'c3'])
+ + [self._sponsor_chapter(20, 30, 'sponsor')])
+ expected = self._chapters([10, 20, 30, 40], ['c1', 'c2', '[SponsorBlock]: Sponsor', 'c3'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_CutStartsAtChapterStart(self):
+ cuts = [self._chapter(20, 30, remove=True)]
+ chapters = self._chapters([10, 20, 40], ['c1', 'c2', 'c3']) + cuts
+ expected = self._chapters([10, 20, 30], ['c1', 'c2', 'c3'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_SponsorEndsAtChapterEnd(self):
+ chapters = (self._chapters([10, 30, 40], ['c1', 'c2', 'c3'])
+ + [self._sponsor_chapter(20, 30, 'sponsor')])
+ expected = self._chapters([10, 20, 30, 40], ['c1', 'c2', '[SponsorBlock]: Sponsor', 'c3'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_CutEndsAtChapterEnd(self):
+ cuts = [self._chapter(20, 30, remove=True)]
+ chapters = self._chapters([10, 30, 40], ['c1', 'c2', 'c3']) + cuts
+ expected = self._chapters([10, 20, 30], ['c1', 'c2', 'c3'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_SponsorCoincidesWithChapters(self):
+ chapters = (self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4'])
+ + [self._sponsor_chapter(10, 30, 'sponsor')])
+ expected = self._chapters([10, 30, 40], ['c1', '[SponsorBlock]: Sponsor', 'c4'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_CutCoincidesWithChapters(self):
+ cuts = [self._chapter(10, 30, remove=True)]
+ chapters = self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4']) + cuts
+ expected = self._chapters([10, 20], ['c1', 'c4'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_SponsorsAtVideoBoundaries(self):
+ chapters = (self._chapters([20, 40, 60], ['c1', 'c2', 'c3'])
+ + [self._sponsor_chapter(0, 10, 'intro'), self._sponsor_chapter(50, 60, 'outro')])
+ expected = self._chapters(
+ [10, 20, 40, 50, 60], ['[SponsorBlock]: Intermission/Intro Animation', 'c1', 'c2', 'c3', '[SponsorBlock]: Endcards/Credits'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_CutsAtVideoBoundaries(self):
+ cuts = [self._chapter(0, 10, remove=True), self._chapter(50, 60, remove=True)]
+ chapters = self._chapters([20, 40, 60], ['c1', 'c2', 'c3']) + cuts
+ expected = self._chapters([10, 30, 40], ['c1', 'c2', 'c3'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_SponsorsOverlapChaptersAtVideoBoundaries(self):
+ chapters = (self._chapters([10, 40, 50], ['c1', 'c2', 'c3'])
+ + [self._sponsor_chapter(0, 20, 'intro'), self._sponsor_chapter(30, 50, 'outro')])
+ expected = self._chapters(
+ [20, 30, 50], ['[SponsorBlock]: Intermission/Intro Animation', 'c2', '[SponsorBlock]: Endcards/Credits'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_CutsOverlapChaptersAtVideoBoundaries(self):
+ cuts = [self._chapter(0, 20, remove=True), self._chapter(30, 50, remove=True)]
+ chapters = self._chapters([10, 40, 50], ['c1', 'c2', 'c3']) + cuts
+ expected = self._chapters([10], ['c2'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
+
+ def test_remove_marked_arrange_sponsors_EverythingSponsored(self):
+ chapters = (self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4'])
+ + [self._sponsor_chapter(0, 20, 'intro'), self._sponsor_chapter(20, 40, 'outro')])
+ expected = self._chapters([20, 40], ['[SponsorBlock]: Intermission/Intro Animation', '[SponsorBlock]: Endcards/Credits'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
+
+ def test_remove_marked_arrange_sponsors_EverythingCut(self):
+ cuts = [self._chapter(0, 20, remove=True), self._chapter(20, 40, remove=True)]
+ chapters = self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4']) + cuts
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, [], [self._chapter(0, 40, remove=True)])
+
+ def test_remove_marked_arrange_sponsors_TinyChaptersInTheOriginalArePreserved(self):
+ chapters = self._chapters([0.1, 0.2, 0.3, 0.4], ['c1', 'c2', 'c3', 'c4'])
+ self._remove_marked_arrange_sponsors_test_impl(chapters, chapters, [])
+
+ def test_remove_marked_arrange_sponsors_TinySponsorsAreIgnored(self):
+ chapters = [self._sponsor_chapter(0, 0.1, 'intro'), self._chapter(0.1, 0.2, 'c1'),
+ self._sponsor_chapter(0.2, 0.3, 'sponsor'), self._chapter(0.3, 0.4, 'c2'),
+ self._sponsor_chapter(0.4, 0.5, 'outro')]
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([0.3, 0.5], ['c1', 'c2']), [])
+
+ def test_remove_marked_arrange_sponsors_TinyChaptersResultingFromCutsAreIgnored(self):
+ cuts = [self._chapter(1.5, 2.5, remove=True)]
+ chapters = self._chapters([2, 3, 3.5], ['c1', 'c2', 'c3']) + cuts
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([2, 2.5], ['c1', 'c3']), cuts)
+
+ def test_remove_marked_arrange_sponsors_SingleTinyChapterIsPreserved(self):
+ cuts = [self._chapter(0.5, 2, remove=True)]
+ chapters = self._chapters([2], ['c']) + cuts
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([0.5], ['c']), cuts)
+
+ def test_remove_marked_arrange_sponsors_TinyChapterAtTheStartPrependedToTheNext(self):
+ cuts = [self._chapter(0.5, 2, remove=True)]
+ chapters = self._chapters([2, 4], ['c1', 'c2']) + cuts
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([2.5], ['c2']), cuts)
+
+ def test_remove_marked_arrange_sponsors_TinyChaptersResultingFromSponsorOverlapAreIgnored(self):
+ chapters = self._chapters([1, 3, 4], ['c1', 'c2', 'c3']) + [
+ self._sponsor_chapter(1.5, 2.5, 'sponsor')]
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([1.5, 2.5, 4], ['c1', '[SponsorBlock]: Sponsor', 'c3']), [])
+
+ def test_remove_marked_arrange_sponsors_TinySponsorsOverlapsAreIgnored(self):
+ chapters = self._chapters([2, 3, 5], ['c1', 'c2', 'c3']) + [
+ self._sponsor_chapter(1, 3, 'sponsor'),
+ self._sponsor_chapter(2.5, 4, 'selfpromo')
+ ]
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([1, 3, 4, 5], [
+ 'c1', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Unpaid/Self Promotion', 'c3']), [])
+
+ def test_remove_marked_arrange_sponsors_TinySponsorsPrependedToTheNextSponsor(self):
+ chapters = self._chapters([4], ['c']) + [
+ self._sponsor_chapter(1.5, 2, 'sponsor'),
+ self._sponsor_chapter(2, 4, 'selfpromo')
+ ]
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([1.5, 4], ['c', '[SponsorBlock]: Unpaid/Self Promotion']), [])
+
+ def test_remove_marked_arrange_sponsors_SmallestSponsorInTheOverlapGetsNamed(self):
+ self._pp._sponsorblock_chapter_title = '[SponsorBlock]: %(name)s'
+ chapters = self._chapters([10], ['c']) + [
+ self._sponsor_chapter(2, 8, 'sponsor'),
+ self._sponsor_chapter(4, 6, 'selfpromo')
+ ]
+ self._remove_marked_arrange_sponsors_test_impl(
+ chapters, self._chapters([2, 4, 6, 8, 10], [
+ 'c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Unpaid/Self Promotion',
+ '[SponsorBlock]: Sponsor', 'c'
+ ]), [])
+
+ def test_make_concat_opts_CommonCase(self):
+ sponsor_chapters = [self._chapter(1, 2, 's1'), self._chapter(10, 20, 's2')]
+ expected = '''ffconcat version 1.0
+file 'file:test'
+outpoint 1.000000
+file 'file:test'
+inpoint 2.000000
+outpoint 10.000000
+file 'file:test'
+inpoint 20.000000
+'''
+ opts = self._pp._make_concat_opts(sponsor_chapters, 30)
+ self.assertEqual(expected, ''.join(self._pp._concat_spec(['test'] * len(opts), opts)))
+
+ def test_make_concat_opts_NoZeroDurationChunkAtVideoStart(self):
+ sponsor_chapters = [self._chapter(0, 1, 's1'), self._chapter(10, 20, 's2')]
+ expected = '''ffconcat version 1.0
+file 'file:test'
+inpoint 1.000000
+outpoint 10.000000
+file 'file:test'
+inpoint 20.000000
+'''
+ opts = self._pp._make_concat_opts(sponsor_chapters, 30)
+ self.assertEqual(expected, ''.join(self._pp._concat_spec(['test'] * len(opts), opts)))
+
+ def test_make_concat_opts_NoZeroDurationChunkAtVideoEnd(self):
+ sponsor_chapters = [self._chapter(1, 2, 's1'), self._chapter(10, 20, 's2')]
+ expected = '''ffconcat version 1.0
+file 'file:test'
+outpoint 1.000000
+file 'file:test'
+inpoint 2.000000
+outpoint 10.000000
+'''
+ opts = self._pp._make_concat_opts(sponsor_chapters, 20)
+ self.assertEqual(expected, ''.join(self._pp._concat_spec(['test'] * len(opts), opts)))
+
+ def test_quote_for_concat_RunsOfQuotes(self):
+ self.assertEqual(
+ r"'special '\'' '\'\''characters'\'\'\''galore'",
+ self._pp._quote_for_ffmpeg("special ' ''characters'''galore"))
+
+ def test_quote_for_concat_QuotesAtStart(self):
+ self.assertEqual(
+ r"\'\'\''special '\'' characters '\'' galore'",
+ self._pp._quote_for_ffmpeg("'''special ' characters ' galore"))
+
+ def test_quote_for_concat_QuotesAtEnd(self):
+ self.assertEqual(
+ r"'special '\'' characters '\'' galore'\'\'\'",
+ self._pp._quote_for_ffmpeg("special ' characters ' galore'''"))
diff --git a/test/test_socks.py b/test/test_socks.py
new file mode 100644
index 0000000..cb22b61
--- /dev/null
+++ b/test/test_socks.py
@@ -0,0 +1,477 @@
+#!/usr/bin/env python3
+# Allow direct execution
+import os
+import sys
+import threading
+import unittest
+
+import pytest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+import abc
+import contextlib
+import enum
+import functools
+import http.server
+import json
+import random
+import socket
+import struct
+import time
+from socketserver import (
+ BaseRequestHandler,
+ StreamRequestHandler,
+ ThreadingTCPServer,
+)
+
+from test.helper import http_server_port, verify_address_availability
+from yt_dlp.networking import Request
+from yt_dlp.networking.exceptions import ProxyError, TransportError
+from yt_dlp.socks import (
+ SOCKS4_REPLY_VERSION,
+ SOCKS4_VERSION,
+ SOCKS5_USER_AUTH_SUCCESS,
+ SOCKS5_USER_AUTH_VERSION,
+ SOCKS5_VERSION,
+ Socks5AddressType,
+ Socks5Auth,
+)
+
+SOCKS5_USER_AUTH_FAILURE = 0x1
+
+
+class Socks4CD(enum.IntEnum):
+ REQUEST_GRANTED = 90
+ REQUEST_REJECTED_OR_FAILED = 91
+ REQUEST_REJECTED_CANNOT_CONNECT_TO_IDENTD = 92
+ REQUEST_REJECTED_DIFFERENT_USERID = 93
+
+
+class Socks5Reply(enum.IntEnum):
+ SUCCEEDED = 0x0
+ GENERAL_FAILURE = 0x1
+ CONNECTION_NOT_ALLOWED = 0x2
+ NETWORK_UNREACHABLE = 0x3
+ HOST_UNREACHABLE = 0x4
+ CONNECTION_REFUSED = 0x5
+ TTL_EXPIRED = 0x6
+ COMMAND_NOT_SUPPORTED = 0x7
+ ADDRESS_TYPE_NOT_SUPPORTED = 0x8
+
+
+class SocksTestRequestHandler(BaseRequestHandler):
+
+ def __init__(self, *args, socks_info=None, **kwargs):
+ self.socks_info = socks_info
+ super().__init__(*args, **kwargs)
+
+
+class SocksProxyHandler(BaseRequestHandler):
+ def __init__(self, request_handler_class, socks_server_kwargs, *args, **kwargs):
+ self.socks_kwargs = socks_server_kwargs or {}
+ self.request_handler_class = request_handler_class
+ super().__init__(*args, **kwargs)
+
+
+class Socks5ProxyHandler(StreamRequestHandler, SocksProxyHandler):
+
+ # SOCKS5 protocol https://tools.ietf.org/html/rfc1928
+ # SOCKS5 username/password authentication https://tools.ietf.org/html/rfc1929
+
+ def handle(self):
+ sleep = self.socks_kwargs.get('sleep')
+ if sleep:
+ time.sleep(sleep)
+ version, nmethods = self.connection.recv(2)
+ assert version == SOCKS5_VERSION
+ methods = list(self.connection.recv(nmethods))
+
+ auth = self.socks_kwargs.get('auth')
+
+ if auth is not None and Socks5Auth.AUTH_USER_PASS not in methods:
+ self.connection.sendall(struct.pack('!BB', SOCKS5_VERSION, Socks5Auth.AUTH_NO_ACCEPTABLE))
+ self.server.close_request(self.request)
+ return
+
+ elif Socks5Auth.AUTH_USER_PASS in methods:
+ self.connection.sendall(struct.pack("!BB", SOCKS5_VERSION, Socks5Auth.AUTH_USER_PASS))
+
+ _, user_len = struct.unpack('!BB', self.connection.recv(2))
+ username = self.connection.recv(user_len).decode()
+ pass_len = ord(self.connection.recv(1))
+ password = self.connection.recv(pass_len).decode()
+
+ if username == auth[0] and password == auth[1]:
+ self.connection.sendall(struct.pack('!BB', SOCKS5_USER_AUTH_VERSION, SOCKS5_USER_AUTH_SUCCESS))
+ else:
+ self.connection.sendall(struct.pack('!BB', SOCKS5_USER_AUTH_VERSION, SOCKS5_USER_AUTH_FAILURE))
+ self.server.close_request(self.request)
+ return
+
+ elif Socks5Auth.AUTH_NONE in methods:
+ self.connection.sendall(struct.pack('!BB', SOCKS5_VERSION, Socks5Auth.AUTH_NONE))
+ else:
+ self.connection.sendall(struct.pack('!BB', SOCKS5_VERSION, Socks5Auth.AUTH_NO_ACCEPTABLE))
+ self.server.close_request(self.request)
+ return
+
+ version, command, _, address_type = struct.unpack('!BBBB', self.connection.recv(4))
+ socks_info = {
+ 'version': version,
+ 'auth_methods': methods,
+ 'command': command,
+ 'client_address': self.client_address,
+ 'ipv4_address': None,
+ 'domain_address': None,
+ 'ipv6_address': None,
+ }
+ if address_type == Socks5AddressType.ATYP_IPV4:
+ socks_info['ipv4_address'] = socket.inet_ntoa(self.connection.recv(4))
+ elif address_type == Socks5AddressType.ATYP_DOMAINNAME:
+ socks_info['domain_address'] = self.connection.recv(ord(self.connection.recv(1))).decode()
+ elif address_type == Socks5AddressType.ATYP_IPV6:
+ socks_info['ipv6_address'] = socket.inet_ntop(socket.AF_INET6, self.connection.recv(16))
+ else:
+ self.server.close_request(self.request)
+
+ socks_info['port'] = struct.unpack('!H', self.connection.recv(2))[0]
+
+ # dummy response, the returned IP is just a placeholder
+ self.connection.sendall(struct.pack(
+ '!BBBBIH', SOCKS5_VERSION, self.socks_kwargs.get('reply', Socks5Reply.SUCCEEDED), 0x0, 0x1, 0x7f000001, 40000))
+
+ self.request_handler_class(self.request, self.client_address, self.server, socks_info=socks_info)
+
+
+class Socks4ProxyHandler(StreamRequestHandler, SocksProxyHandler):
+
+ # SOCKS4 protocol http://www.openssh.com/txt/socks4.protocol
+ # SOCKS4A protocol http://www.openssh.com/txt/socks4a.protocol
+
+ def _read_until_null(self):
+ return b''.join(iter(functools.partial(self.connection.recv, 1), b'\x00'))
+
+ def handle(self):
+ sleep = self.socks_kwargs.get('sleep')
+ if sleep:
+ time.sleep(sleep)
+ socks_info = {
+ 'version': SOCKS4_VERSION,
+ 'command': None,
+ 'client_address': self.client_address,
+ 'ipv4_address': None,
+ 'port': None,
+ 'domain_address': None,
+ }
+ version, command, dest_port, dest_ip = struct.unpack('!BBHI', self.connection.recv(8))
+ socks_info['port'] = dest_port
+ socks_info['command'] = command
+ if version != SOCKS4_VERSION:
+ self.server.close_request(self.request)
+ return
+ use_remote_dns = False
+ if 0x0 < dest_ip <= 0xFF:
+ use_remote_dns = True
+ else:
+ socks_info['ipv4_address'] = socket.inet_ntoa(struct.pack("!I", dest_ip))
+
+ user_id = self._read_until_null().decode()
+ if user_id != (self.socks_kwargs.get('user_id') or ''):
+ self.connection.sendall(struct.pack(
+ '!BBHI', SOCKS4_REPLY_VERSION, Socks4CD.REQUEST_REJECTED_DIFFERENT_USERID, 0x00, 0x00000000))
+ self.server.close_request(self.request)
+ return
+
+ if use_remote_dns:
+ socks_info['domain_address'] = self._read_until_null().decode()
+
+ # dummy response, the returned IP is just a placeholder
+ self.connection.sendall(
+ struct.pack(
+ '!BBHI', SOCKS4_REPLY_VERSION,
+ self.socks_kwargs.get('cd_reply', Socks4CD.REQUEST_GRANTED), 40000, 0x7f000001))
+
+ self.request_handler_class(self.request, self.client_address, self.server, socks_info=socks_info)
+
+
+class IPv6ThreadingTCPServer(ThreadingTCPServer):
+ address_family = socket.AF_INET6
+
+
+class SocksHTTPTestRequestHandler(http.server.BaseHTTPRequestHandler, SocksTestRequestHandler):
+ def do_GET(self):
+ if self.path == '/socks_info':
+ payload = json.dumps(self.socks_info.copy())
+ self.send_response(200)
+ self.send_header('Content-Type', 'application/json; charset=utf-8')
+ self.send_header('Content-Length', str(len(payload)))
+ self.end_headers()
+ self.wfile.write(payload.encode())
+
+
+class SocksWebSocketTestRequestHandler(SocksTestRequestHandler):
+ def handle(self):
+ import websockets.sync.server
+ protocol = websockets.ServerProtocol()
+ connection = websockets.sync.server.ServerConnection(socket=self.request, protocol=protocol, close_timeout=0)
+ connection.handshake()
+ connection.send(json.dumps(self.socks_info))
+ connection.close()
+
+
+@contextlib.contextmanager
+def socks_server(socks_server_class, request_handler, bind_ip=None, **socks_server_kwargs):
+ server = server_thread = None
+ try:
+ bind_address = bind_ip or '127.0.0.1'
+ server_type = ThreadingTCPServer if '.' in bind_address else IPv6ThreadingTCPServer
+ server = server_type(
+ (bind_address, 0), functools.partial(socks_server_class, request_handler, socks_server_kwargs))
+ server_port = http_server_port(server)
+ server_thread = threading.Thread(target=server.serve_forever)
+ server_thread.daemon = True
+ server_thread.start()
+ if '.' not in bind_address:
+ yield f'[{bind_address}]:{server_port}'
+ else:
+ yield f'{bind_address}:{server_port}'
+ finally:
+ server.shutdown()
+ server.server_close()
+ server_thread.join(2.0)
+
+
+class SocksProxyTestContext(abc.ABC):
+ REQUEST_HANDLER_CLASS = None
+
+ def socks_server(self, server_class, *args, **kwargs):
+ return socks_server(server_class, self.REQUEST_HANDLER_CLASS, *args, **kwargs)
+
+ @abc.abstractmethod
+ def socks_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs) -> dict:
+ """return a dict of socks_info"""
+
+
+class HTTPSocksTestProxyContext(SocksProxyTestContext):
+ REQUEST_HANDLER_CLASS = SocksHTTPTestRequestHandler
+
+ def socks_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs):
+ request = Request(f'http://{target_domain or "127.0.0.1"}:{target_port or "40000"}/socks_info', **req_kwargs)
+ handler.validate(request)
+ return json.loads(handler.send(request).read().decode())
+
+
+class WebSocketSocksTestProxyContext(SocksProxyTestContext):
+ REQUEST_HANDLER_CLASS = SocksWebSocketTestRequestHandler
+
+ def socks_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs):
+ request = Request(f'ws://{target_domain or "127.0.0.1"}:{target_port or "40000"}', **req_kwargs)
+ handler.validate(request)
+ ws = handler.send(request)
+ ws.send('socks_info')
+ socks_info = ws.recv()
+ ws.close()
+ return json.loads(socks_info)
+
+
+CTX_MAP = {
+ 'http': HTTPSocksTestProxyContext,
+ 'ws': WebSocketSocksTestProxyContext,
+}
+
+
+@pytest.fixture(scope='module')
+def ctx(request):
+ return CTX_MAP[request.param]()
+
+
+class TestSocks4Proxy:
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks4_no_auth(self, handler, ctx):
+ with handler() as rh:
+ with ctx.socks_server(Socks4ProxyHandler) as server_address:
+ response = ctx.socks_info_request(
+ rh, proxies={'all': f'socks4://{server_address}'})
+ assert response['version'] == 4
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks4_auth(self, handler, ctx):
+ with handler() as rh:
+ with ctx.socks_server(Socks4ProxyHandler, user_id='user') as server_address:
+ with pytest.raises(ProxyError):
+ ctx.socks_info_request(rh, proxies={'all': f'socks4://{server_address}'})
+ response = ctx.socks_info_request(
+ rh, proxies={'all': f'socks4://user:@{server_address}'})
+ assert response['version'] == 4
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks4a_ipv4_target(self, handler, ctx):
+ with ctx.socks_server(Socks4ProxyHandler) as server_address:
+ with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
+ assert response['version'] == 4
+ assert (response['ipv4_address'] == '127.0.0.1') != (response['domain_address'] == '127.0.0.1')
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks4a_domain_target(self, handler, ctx):
+ with ctx.socks_server(Socks4ProxyHandler) as server_address:
+ with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh, target_domain='localhost')
+ assert response['version'] == 4
+ assert response['ipv4_address'] is None
+ assert response['domain_address'] == 'localhost'
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_ipv4_client_source_address(self, handler, ctx):
+ with ctx.socks_server(Socks4ProxyHandler) as server_address:
+ source_address = f'127.0.0.{random.randint(5, 255)}'
+ verify_address_availability(source_address)
+ with handler(proxies={'all': f'socks4://{server_address}'},
+ source_address=source_address) as rh:
+ response = ctx.socks_info_request(rh)
+ assert response['client_address'][0] == source_address
+ assert response['version'] == 4
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ @pytest.mark.parametrize('reply_code', [
+ Socks4CD.REQUEST_REJECTED_OR_FAILED,
+ Socks4CD.REQUEST_REJECTED_CANNOT_CONNECT_TO_IDENTD,
+ Socks4CD.REQUEST_REJECTED_DIFFERENT_USERID,
+ ])
+ def test_socks4_errors(self, handler, ctx, reply_code):
+ with ctx.socks_server(Socks4ProxyHandler, cd_reply=reply_code) as server_address:
+ with handler(proxies={'all': f'socks4://{server_address}'}) as rh:
+ with pytest.raises(ProxyError):
+ ctx.socks_info_request(rh)
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_ipv6_socks4_proxy(self, handler, ctx):
+ with ctx.socks_server(Socks4ProxyHandler, bind_ip='::1') as server_address:
+ with handler(proxies={'all': f'socks4://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
+ assert response['client_address'][0] == '::1'
+ assert response['ipv4_address'] == '127.0.0.1'
+ assert response['version'] == 4
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_timeout(self, handler, ctx):
+ with ctx.socks_server(Socks4ProxyHandler, sleep=2) as server_address:
+ with handler(proxies={'all': f'socks4://{server_address}'}, timeout=0.5) as rh:
+ with pytest.raises(TransportError):
+ ctx.socks_info_request(rh)
+
+
+class TestSocks5Proxy:
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks5_no_auth(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler) as server_address:
+ with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh)
+ assert response['auth_methods'] == [0x0]
+ assert response['version'] == 5
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks5_user_pass(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler, auth=('test', 'testpass')) as server_address:
+ with handler() as rh:
+ with pytest.raises(ProxyError):
+ ctx.socks_info_request(rh, proxies={'all': f'socks5://{server_address}'})
+
+ response = ctx.socks_info_request(
+ rh, proxies={'all': f'socks5://test:testpass@{server_address}'})
+
+ assert response['auth_methods'] == [Socks5Auth.AUTH_NONE, Socks5Auth.AUTH_USER_PASS]
+ assert response['version'] == 5
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks5_ipv4_target(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler) as server_address:
+ with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
+ assert response['ipv4_address'] == '127.0.0.1'
+ assert response['version'] == 5
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks5_domain_target(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler) as server_address:
+ with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh, target_domain='localhost')
+ assert (response['ipv4_address'] == '127.0.0.1') != (response['ipv6_address'] == '::1')
+ assert response['version'] == 5
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks5h_domain_target(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler) as server_address:
+ with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh, target_domain='localhost')
+ assert response['ipv4_address'] is None
+ assert response['domain_address'] == 'localhost'
+ assert response['version'] == 5
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks5h_ip_target(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler) as server_address:
+ with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
+ assert response['ipv4_address'] == '127.0.0.1'
+ assert response['domain_address'] is None
+ assert response['version'] == 5
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_socks5_ipv6_destination(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler) as server_address:
+ with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh, target_domain='[::1]')
+ assert response['ipv6_address'] == '::1'
+ assert response['version'] == 5
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_ipv6_socks5_proxy(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler, bind_ip='::1') as server_address:
+ with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
+ response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
+ assert response['client_address'][0] == '::1'
+ assert response['ipv4_address'] == '127.0.0.1'
+ assert response['version'] == 5
+
+ # XXX: is there any feasible way of testing IPv6 source addresses?
+ # Same would go for non-proxy source_address test...
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_ipv4_client_source_address(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler) as server_address:
+ source_address = f'127.0.0.{random.randint(5, 255)}'
+ verify_address_availability(source_address)
+ with handler(proxies={'all': f'socks5://{server_address}'}, source_address=source_address) as rh:
+ response = ctx.socks_info_request(rh)
+ assert response['client_address'][0] == source_address
+ assert response['version'] == 5
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
+ @pytest.mark.parametrize('reply_code', [
+ Socks5Reply.GENERAL_FAILURE,
+ Socks5Reply.CONNECTION_NOT_ALLOWED,
+ Socks5Reply.NETWORK_UNREACHABLE,
+ Socks5Reply.HOST_UNREACHABLE,
+ Socks5Reply.CONNECTION_REFUSED,
+ Socks5Reply.TTL_EXPIRED,
+ Socks5Reply.COMMAND_NOT_SUPPORTED,
+ Socks5Reply.ADDRESS_TYPE_NOT_SUPPORTED,
+ ])
+ def test_socks5_errors(self, handler, ctx, reply_code):
+ with ctx.socks_server(Socks5ProxyHandler, reply=reply_code) as server_address:
+ with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
+ with pytest.raises(ProxyError):
+ ctx.socks_info_request(rh)
+
+ @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Websockets', 'ws')], indirect=True)
+ def test_timeout(self, handler, ctx):
+ with ctx.socks_server(Socks5ProxyHandler, sleep=2) as server_address:
+ with handler(proxies={'all': f'socks5://{server_address}'}, timeout=1) as rh:
+ with pytest.raises(TransportError):
+ ctx.socks_info_request(rh)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_subtitles.py b/test/test_subtitles.py
new file mode 100644
index 0000000..5736289
--- /dev/null
+++ b/test/test_subtitles.py
@@ -0,0 +1,452 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+from test.helper import FakeYDL, is_download_test, md5
+from yt_dlp.extractor import (
+ NPOIE,
+ NRKTVIE,
+ PBSIE,
+ CeskaTelevizeIE,
+ ComedyCentralIE,
+ DailymotionIE,
+ DemocracynowIE,
+ LyndaIE,
+ RaiPlayIE,
+ RTVEALaCartaIE,
+ TedTalkIE,
+ ThePlatformFeedIE,
+ ThePlatformIE,
+ VikiIE,
+ VimeoIE,
+ WallaIE,
+ YoutubeIE,
+)
+
+
+@is_download_test
+class BaseTestSubtitles(unittest.TestCase):
+ url = None
+ IE = None
+
+ def setUp(self):
+ self.DL = FakeYDL()
+ self.ie = self.IE()
+ self.DL.add_info_extractor(self.ie)
+ if not self.IE.working():
+ print('Skipping: %s marked as not _WORKING' % self.IE.ie_key())
+ self.skipTest('IE marked as not _WORKING')
+
+ def getInfoDict(self):
+ info_dict = self.DL.extract_info(self.url, download=False)
+ return info_dict
+
+ def getSubtitles(self):
+ info_dict = self.getInfoDict()
+ subtitles = info_dict['requested_subtitles']
+ if not subtitles:
+ return subtitles
+ for sub_info in subtitles.values():
+ if sub_info.get('data') is None:
+ uf = self.DL.urlopen(sub_info['url'])
+ sub_info['data'] = uf.read().decode()
+ return {l: sub_info['data'] for l, sub_info in subtitles.items()}
+
+
+@is_download_test
+class TestYoutubeSubtitles(BaseTestSubtitles):
+ # Available subtitles for QRS8MkLhQmM:
+ # Language formats
+ # ru vtt, ttml, srv3, srv2, srv1, json3
+ # fr vtt, ttml, srv3, srv2, srv1, json3
+ # en vtt, ttml, srv3, srv2, srv1, json3
+ # nl vtt, ttml, srv3, srv2, srv1, json3
+ # de vtt, ttml, srv3, srv2, srv1, json3
+ # ko vtt, ttml, srv3, srv2, srv1, json3
+ # it vtt, ttml, srv3, srv2, srv1, json3
+ # zh-Hant vtt, ttml, srv3, srv2, srv1, json3
+ # hi vtt, ttml, srv3, srv2, srv1, json3
+ # pt-BR vtt, ttml, srv3, srv2, srv1, json3
+ # es-MX vtt, ttml, srv3, srv2, srv1, json3
+ # ja vtt, ttml, srv3, srv2, srv1, json3
+ # pl vtt, ttml, srv3, srv2, srv1, json3
+ url = 'QRS8MkLhQmM'
+ IE = YoutubeIE
+
+ def test_youtube_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(len(subtitles.keys()), 13)
+ self.assertEqual(md5(subtitles['en']), 'ae1bd34126571a77aabd4d276b28044d')
+ self.assertEqual(md5(subtitles['it']), '0e0b667ba68411d88fd1c5f4f4eab2f9')
+ for lang in ['fr', 'de']:
+ self.assertTrue(subtitles.get(lang) is not None, 'Subtitles for \'%s\' not extracted' % lang)
+
+ def _test_subtitles_format(self, fmt, md5_hash, lang='en'):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['subtitlesformat'] = fmt
+ subtitles = self.getSubtitles()
+ self.assertEqual(md5(subtitles[lang]), md5_hash)
+
+ def test_youtube_subtitles_ttml_format(self):
+ self._test_subtitles_format('ttml', 'c97ddf1217390906fa9fbd34901f3da2')
+
+ def test_youtube_subtitles_vtt_format(self):
+ self._test_subtitles_format('vtt', 'ae1bd34126571a77aabd4d276b28044d')
+
+ def test_youtube_subtitles_json3_format(self):
+ self._test_subtitles_format('json3', '688dd1ce0981683867e7fe6fde2a224b')
+
+ def _test_automatic_captions(self, url, lang):
+ self.url = url
+ self.DL.params['writeautomaticsub'] = True
+ self.DL.params['subtitleslangs'] = [lang]
+ subtitles = self.getSubtitles()
+ self.assertTrue(subtitles[lang] is not None)
+
+ def test_youtube_automatic_captions(self):
+ # Available automatic captions for 8YoUxe5ncPo:
+ # Language formats (all in vtt, ttml, srv3, srv2, srv1, json3)
+ # gu, zh-Hans, zh-Hant, gd, ga, gl, lb, la, lo, tt, tr,
+ # lv, lt, tk, th, tg, te, fil, haw, yi, ceb, yo, de, da,
+ # el, eo, en, eu, et, es, ru, rw, ro, bn, be, bg, uk, jv,
+ # bs, ja, or, xh, co, ca, cy, cs, ps, pt, pa, vi, pl, hy,
+ # hr, ht, hu, hmn, hi, ha, mg, uz, ml, mn, mi, mk, ur,
+ # mt, ms, mr, ug, ta, my, af, sw, is, am,
+ # *it*, iw, sv, ar,
+ # su, zu, az, id, ig, nl, no, ne, ny, fr, ku, fy, fa, fi,
+ # ka, kk, sr, sq, ko, kn, km, st, sk, si, so, sn, sm, sl,
+ # ky, sd
+ # ...
+ self._test_automatic_captions('8YoUxe5ncPo', 'it')
+
+ @unittest.skip('Video unavailable')
+ def test_youtube_translated_subtitles(self):
+ # This video has a subtitles track, which can be translated (#4555)
+ self._test_automatic_captions('Ky9eprVWzlI', 'it')
+
+ def test_youtube_nosubtitles(self):
+ self.DL.expect_warning('video doesn\'t have subtitles')
+ # Available automatic captions for 8YoUxe5ncPo:
+ # ...
+ # 8YoUxe5ncPo has no subtitles
+ self.url = '8YoUxe5ncPo'
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertFalse(subtitles)
+
+
+@is_download_test
+class TestDailymotionSubtitles(BaseTestSubtitles):
+ url = 'http://www.dailymotion.com/video/xczg00'
+ IE = DailymotionIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertTrue(len(subtitles.keys()) >= 6)
+ self.assertEqual(md5(subtitles['en']), '976553874490cba125086bbfea3ff76f')
+ self.assertEqual(md5(subtitles['fr']), '594564ec7d588942e384e920e5341792')
+ for lang in ['es', 'fr', 'de']:
+ self.assertTrue(subtitles.get(lang) is not None, 'Subtitles for \'%s\' not extracted' % lang)
+
+ def test_nosubtitles(self):
+ self.DL.expect_warning('video doesn\'t have subtitles')
+ self.url = 'http://www.dailymotion.com/video/x12u166_le-zapping-tele-star-du-08-aout-2013_tv'
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertFalse(subtitles)
+
+
+@is_download_test
+@unittest.skip('IE broken')
+class TestTedSubtitles(BaseTestSubtitles):
+ url = 'http://www.ted.com/talks/dan_dennett_on_our_consciousness.html'
+ IE = TedTalkIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertTrue(len(subtitles.keys()) >= 28)
+ self.assertEqual(md5(subtitles['en']), '4262c1665ff928a2dada178f62cb8d14')
+ self.assertEqual(md5(subtitles['fr']), '66a63f7f42c97a50f8c0e90bc7797bb5')
+ for lang in ['es', 'fr', 'de']:
+ self.assertTrue(subtitles.get(lang) is not None, 'Subtitles for \'%s\' not extracted' % lang)
+
+
+@is_download_test
+class TestVimeoSubtitles(BaseTestSubtitles):
+ url = 'http://vimeo.com/76979871'
+ IE = VimeoIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'de', 'en', 'es', 'fr'})
+ self.assertEqual(md5(subtitles['en']), '386cbc9320b94e25cb364b97935e5dd1')
+ self.assertEqual(md5(subtitles['fr']), 'c9b69eef35bc6641c0d4da8a04f9dfac')
+
+ def test_nosubtitles(self):
+ self.DL.expect_warning('video doesn\'t have subtitles')
+ self.url = 'http://vimeo.com/68093876'
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertFalse(subtitles)
+
+
+@is_download_test
+@unittest.skip('IE broken')
+class TestWallaSubtitles(BaseTestSubtitles):
+ url = 'http://vod.walla.co.il/movie/2705958/the-yes-men'
+ IE = WallaIE
+
+ def test_allsubtitles(self):
+ self.DL.expect_warning('Automatic Captions not supported by this server')
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'heb'})
+ self.assertEqual(md5(subtitles['heb']), 'e758c5d7cb982f6bef14f377ec7a3920')
+
+ def test_nosubtitles(self):
+ self.DL.expect_warning('video doesn\'t have subtitles')
+ self.url = 'http://vod.walla.co.il/movie/2642630/one-direction-all-for-one'
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertFalse(subtitles)
+
+
+@is_download_test
+@unittest.skip('IE broken')
+class TestCeskaTelevizeSubtitles(BaseTestSubtitles):
+ url = 'http://www.ceskatelevize.cz/ivysilani/10600540290-u6-uzasny-svet-techniky'
+ IE = CeskaTelevizeIE
+
+ def test_allsubtitles(self):
+ self.DL.expect_warning('Automatic Captions not supported by this server')
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'cs'})
+ self.assertTrue(len(subtitles['cs']) > 20000)
+
+ def test_nosubtitles(self):
+ self.DL.expect_warning('video doesn\'t have subtitles')
+ self.url = 'http://www.ceskatelevize.cz/ivysilani/ivysilani/10441294653-hyde-park-civilizace/214411058091220'
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertFalse(subtitles)
+
+
+@is_download_test
+@unittest.skip('IE broken')
+class TestLyndaSubtitles(BaseTestSubtitles):
+ url = 'http://www.lynda.com/Bootstrap-tutorials/Using-exercise-files/110885/114408-4.html'
+ IE = LyndaIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'en'})
+ self.assertEqual(md5(subtitles['en']), '09bbe67222259bed60deaa26997d73a7')
+
+
+@is_download_test
+@unittest.skip('IE broken')
+class TestNPOSubtitles(BaseTestSubtitles):
+ url = 'http://www.npo.nl/nos-journaal/28-08-2014/POW_00722860'
+ IE = NPOIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'nl'})
+ self.assertEqual(md5(subtitles['nl']), 'fc6435027572b63fb4ab143abd5ad3f4')
+
+
+@is_download_test
+@unittest.skip('IE broken')
+class TestMTVSubtitles(BaseTestSubtitles):
+ url = 'http://www.cc.com/video-clips/p63lk0/adam-devine-s-house-party-chasing-white-swans'
+ IE = ComedyCentralIE
+
+ def getInfoDict(self):
+ return super().getInfoDict()['entries'][0]
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'en'})
+ self.assertEqual(md5(subtitles['en']), '78206b8d8a0cfa9da64dc026eea48961')
+
+
+@is_download_test
+class TestNRKSubtitles(BaseTestSubtitles):
+ url = 'http://tv.nrk.no/serie/ikke-gjoer-dette-hjemme/DMPV73000411/sesong-2/episode-1'
+ IE = NRKTVIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'nb-ttv'})
+ self.assertEqual(md5(subtitles['nb-ttv']), '67e06ff02d0deaf975e68f6cb8f6a149')
+
+
+@is_download_test
+class TestRaiPlaySubtitles(BaseTestSubtitles):
+ IE = RaiPlayIE
+
+ def test_subtitles_key(self):
+ self.url = 'http://www.raiplay.it/video/2014/04/Report-del-07042014-cb27157f-9dd0-4aee-b788-b1f67643a391.html'
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'it'})
+ self.assertEqual(md5(subtitles['it']), 'b1d90a98755126b61e667567a1f6680a')
+
+ def test_subtitles_array_key(self):
+ self.url = 'https://www.raiplay.it/video/2020/12/Report---04-01-2021-2e90f1de-8eee-4de4-ac0e-78d21db5b600.html'
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'it'})
+ self.assertEqual(md5(subtitles['it']), '4b3264186fbb103508abe5311cfcb9cd')
+
+
+@is_download_test
+@unittest.skip('IE broken - DRM only')
+class TestVikiSubtitles(BaseTestSubtitles):
+ url = 'http://www.viki.com/videos/1060846v-punch-episode-18'
+ IE = VikiIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'en'})
+ self.assertEqual(md5(subtitles['en']), '53cb083a5914b2d84ef1ab67b880d18a')
+
+
+@is_download_test
+class TestThePlatformSubtitles(BaseTestSubtitles):
+ # from http://www.3playmedia.com/services-features/tools/integrations/theplatform/
+ # (see http://theplatform.com/about/partners/type/subtitles-closed-captioning/)
+ url = 'theplatform:JFUjUE1_ehvq'
+ IE = ThePlatformIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'en'})
+ self.assertEqual(md5(subtitles['en']), '97e7670cbae3c4d26ae8bcc7fdd78d4b')
+
+
+@is_download_test
+@unittest.skip('IE broken')
+class TestThePlatformFeedSubtitles(BaseTestSubtitles):
+ url = 'http://feed.theplatform.com/f/7wvmTC/msnbc_video-p-test?form=json&pretty=true&range=-40&byGuid=n_hardball_5biden_140207'
+ IE = ThePlatformFeedIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'en'})
+ self.assertEqual(md5(subtitles['en']), '48649a22e82b2da21c9a67a395eedade')
+
+
+@is_download_test
+class TestRtveSubtitles(BaseTestSubtitles):
+ url = 'http://www.rtve.es/alacarta/videos/los-misterios-de-laura/misterios-laura-capitulo-32-misterio-del-numero-17-2-parte/2428621/'
+ IE = RTVEALaCartaIE
+
+ def test_allsubtitles(self):
+ print('Skipping, only available from Spain')
+ return
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'es'})
+ self.assertEqual(md5(subtitles['es']), '69e70cae2d40574fb7316f31d6eb7fca')
+
+
+@is_download_test
+class TestDemocracynowSubtitles(BaseTestSubtitles):
+ url = 'http://www.democracynow.org/shows/2015/7/3'
+ IE = DemocracynowIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'en'})
+ self.assertEqual(md5(subtitles['en']), 'a3cc4c0b5eadd74d9974f1c1f5101045')
+
+ def test_subtitles_in_page(self):
+ self.url = 'http://www.democracynow.org/2015/7/3/this_flag_comes_down_today_bree'
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'en'})
+ self.assertEqual(md5(subtitles['en']), 'a3cc4c0b5eadd74d9974f1c1f5101045')
+
+
+@is_download_test
+class TestPBSSubtitles(BaseTestSubtitles):
+ url = 'https://www.pbs.org/video/how-fantasy-reflects-our-world-picecq/'
+ IE = PBSIE
+
+ def test_allsubtitles(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['allsubtitles'] = True
+ subtitles = self.getSubtitles()
+ self.assertEqual(set(subtitles.keys()), {'en'})
+
+ def test_subtitles_dfxp_format(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['subtitlesformat'] = 'dfxp'
+ subtitles = self.getSubtitles()
+ self.assertIn(md5(subtitles['en']), ['643b034254cdc3768ff1e750b6b5873b'])
+
+ def test_subtitles_vtt_format(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['subtitlesformat'] = 'vtt'
+ subtitles = self.getSubtitles()
+ self.assertIn(
+ md5(subtitles['en']), ['937a05711555b165d4c55a9667017045', 'f49ea998d6824d94959c8152a368ff73'])
+
+ def test_subtitles_srt_format(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['subtitlesformat'] = 'srt'
+ subtitles = self.getSubtitles()
+ self.assertIn(md5(subtitles['en']), ['2082c21b43759d9bf172931b2f2ca371'])
+
+ def test_subtitles_sami_format(self):
+ self.DL.params['writesubtitles'] = True
+ self.DL.params['subtitlesformat'] = 'sami'
+ subtitles = self.getSubtitles()
+ self.assertIn(md5(subtitles['en']), ['4256b16ac7da6a6780fafd04294e85cd'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_update.py b/test/test_update.py
new file mode 100644
index 0000000..bc13956
--- /dev/null
+++ b/test/test_update.py
@@ -0,0 +1,228 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+from test.helper import FakeYDL, report_warning
+from yt_dlp.update import UpdateInfo, Updater
+
+
+# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
+TEST_UPDATE_SOURCES = {
+ 'stable': 'yt-dlp/yt-dlp',
+ 'nightly': 'yt-dlp/yt-dlp-nightly-builds',
+ 'master': 'yt-dlp/yt-dlp-master-builds',
+}
+
+TEST_API_DATA = {
+ 'yt-dlp/yt-dlp/latest': {
+ 'tag_name': '2023.12.31',
+ 'target_commitish': 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
+ 'name': 'yt-dlp 2023.12.31',
+ 'body': 'BODY',
+ },
+ 'yt-dlp/yt-dlp-nightly-builds/latest': {
+ 'tag_name': '2023.12.31.123456',
+ 'target_commitish': 'master',
+ 'name': 'yt-dlp nightly 2023.12.31.123456',
+ 'body': 'Generated from: https://github.com/yt-dlp/yt-dlp/commit/cccccccccccccccccccccccccccccccccccccccc',
+ },
+ 'yt-dlp/yt-dlp-master-builds/latest': {
+ 'tag_name': '2023.12.31.987654',
+ 'target_commitish': 'master',
+ 'name': 'yt-dlp master 2023.12.31.987654',
+ 'body': 'Generated from: https://github.com/yt-dlp/yt-dlp/commit/dddddddddddddddddddddddddddddddddddddddd',
+ },
+ 'yt-dlp/yt-dlp/tags/testing': {
+ 'tag_name': 'testing',
+ 'target_commitish': '9999999999999999999999999999999999999999',
+ 'name': 'testing',
+ 'body': 'BODY',
+ },
+ 'fork/yt-dlp/latest': {
+ 'tag_name': '2050.12.31',
+ 'target_commitish': 'eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee',
+ 'name': '2050.12.31',
+ 'body': 'BODY',
+ },
+ 'fork/yt-dlp/tags/pr0000': {
+ 'tag_name': 'pr0000',
+ 'target_commitish': 'ffffffffffffffffffffffffffffffffffffffff',
+ 'name': 'pr1234 2023.11.11.000000',
+ 'body': 'BODY',
+ },
+ 'fork/yt-dlp/tags/pr1234': {
+ 'tag_name': 'pr1234',
+ 'target_commitish': '0000000000000000000000000000000000000000',
+ 'name': 'pr1234 2023.12.31.555555',
+ 'body': 'BODY',
+ },
+ 'fork/yt-dlp/tags/pr9999': {
+ 'tag_name': 'pr9999',
+ 'target_commitish': '1111111111111111111111111111111111111111',
+ 'name': 'pr9999',
+ 'body': 'BODY',
+ },
+ 'fork/yt-dlp-satellite/tags/pr987': {
+ 'tag_name': 'pr987',
+ 'target_commitish': 'master',
+ 'name': 'pr987',
+ 'body': 'Generated from: https://github.com/yt-dlp/yt-dlp/commit/2222222222222222222222222222222222222222',
+ },
+}
+
+TEST_LOCKFILE_COMMENT = '# This file is used for regulating self-update'
+
+TEST_LOCKFILE_V1 = r'''%s
+lock 2022.08.18.36 .+ Python 3\.6
+lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
+lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
+''' % TEST_LOCKFILE_COMMENT
+
+TEST_LOCKFILE_V2_TMPL = r'''%s
+lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
+lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
+lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
+lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
+lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
+lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
+lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
+'''
+
+TEST_LOCKFILE_V2 = TEST_LOCKFILE_V2_TMPL % TEST_LOCKFILE_COMMENT
+
+TEST_LOCKFILE_ACTUAL = TEST_LOCKFILE_V2_TMPL % TEST_LOCKFILE_V1.rstrip('\n')
+
+TEST_LOCKFILE_FORK = r'''%s# Test if a fork blocks updates to non-numeric tags
+lockV2 fork/yt-dlp pr0000 .+ Python 3.6
+lockV2 fork/yt-dlp pr1234 (?!win_x86_exe).+ Python 3\.7
+lockV2 fork/yt-dlp pr1234 win_x86_exe .+ Windows-(?:Vista|2008Server)
+lockV2 fork/yt-dlp pr9999 .+ Python 3.11
+''' % TEST_LOCKFILE_ACTUAL
+
+
+class FakeUpdater(Updater):
+ current_version = '2022.01.01'
+ current_commit = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
+
+ _channel = 'stable'
+ _origin = 'yt-dlp/yt-dlp'
+ _update_sources = TEST_UPDATE_SOURCES
+
+ def _download_update_spec(self, *args, **kwargs):
+ return TEST_LOCKFILE_ACTUAL
+
+ def _call_api(self, tag):
+ tag = f'tags/{tag}' if tag != 'latest' else tag
+ return TEST_API_DATA[f'{self.requested_repo}/{tag}']
+
+ def _report_error(self, msg, *args, **kwargs):
+ report_warning(msg)
+
+
+class TestUpdate(unittest.TestCase):
+ maxDiff = None
+
+ def test_update_spec(self):
+ ydl = FakeYDL()
+ updater = FakeUpdater(ydl, 'stable')
+
+ def test(lockfile, identifier, input_tag, expect_tag, exact=False, repo='yt-dlp/yt-dlp'):
+ updater._identifier = identifier
+ updater._exact = exact
+ updater.requested_repo = repo
+ result = updater._process_update_spec(lockfile, input_tag)
+ self.assertEqual(
+ result, expect_tag,
+ f'{identifier!r} requesting {repo}@{input_tag} (exact={exact}) '
+ f'returned {result!r} instead of {expect_tag!r}')
+
+ for lockfile in (TEST_LOCKFILE_V1, TEST_LOCKFILE_V2, TEST_LOCKFILE_ACTUAL, TEST_LOCKFILE_FORK):
+ # Normal operation
+ test(lockfile, 'zip Python 3.12.0', '2023.12.31', '2023.12.31')
+ test(lockfile, 'zip stable Python 3.12.0', '2023.12.31', '2023.12.31', exact=True)
+ # Python 3.6 --update should update only to its lock
+ test(lockfile, 'zip Python 3.6.0', '2023.11.16', '2022.08.18.36')
+ # --update-to an exact version later than the lock should return None
+ test(lockfile, 'zip stable Python 3.6.0', '2023.11.16', None, exact=True)
+ # Python 3.7 should be able to update to its lock
+ test(lockfile, 'zip Python 3.7.0', '2023.11.16', '2023.11.16')
+ test(lockfile, 'zip stable Python 3.7.1', '2023.11.16', '2023.11.16', exact=True)
+ # Non-win_x86_exe builds on py3.7 must be locked
+ test(lockfile, 'zip Python 3.7.1', '2023.12.31', '2023.11.16')
+ test(lockfile, 'zip stable Python 3.7.1', '2023.12.31', None, exact=True)
+ test( # Windows Vista w/ win_x86_exe must be locked
+ lockfile, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
+ '2023.12.31', '2023.11.16')
+ test( # Windows 2008Server w/ win_x86_exe must be locked
+ lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-2008Server',
+ '2023.12.31', None, exact=True)
+ test( # Windows 7 w/ win_x86_exe py3.7 build should be able to update beyond lock
+ lockfile, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
+ '2023.12.31', '2023.12.31')
+ test( # Windows 8.1 w/ '2008Server' in platform string should be able to update beyond lock
+ lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-post2008Server-6.2.9200',
+ '2023.12.31', '2023.12.31', exact=True)
+
+ # Forks can block updates to non-numeric tags rather than lock
+ test(TEST_LOCKFILE_FORK, 'zip Python 3.6.3', 'pr0000', None, repo='fork/yt-dlp')
+ test(TEST_LOCKFILE_FORK, 'zip stable Python 3.7.4', 'pr0000', 'pr0000', repo='fork/yt-dlp')
+ test(TEST_LOCKFILE_FORK, 'zip stable Python 3.7.4', 'pr1234', None, repo='fork/yt-dlp')
+ test(TEST_LOCKFILE_FORK, 'zip Python 3.8.1', 'pr1234', 'pr1234', repo='fork/yt-dlp', exact=True)
+ test(
+ TEST_LOCKFILE_FORK, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
+ 'pr1234', None, repo='fork/yt-dlp')
+ test(
+ TEST_LOCKFILE_FORK, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
+ '2023.12.31', '2023.12.31', repo='fork/yt-dlp')
+ test(TEST_LOCKFILE_FORK, 'zip Python 3.11.2', 'pr9999', None, repo='fork/yt-dlp', exact=True)
+ test(TEST_LOCKFILE_FORK, 'zip stable Python 3.12.0', 'pr9999', 'pr9999', repo='fork/yt-dlp')
+
+ def test_query_update(self):
+ ydl = FakeYDL()
+
+ def test(target, expected, current_version=None, current_commit=None, identifier=None):
+ updater = FakeUpdater(ydl, target)
+ if current_version:
+ updater.current_version = current_version
+ if current_commit:
+ updater.current_commit = current_commit
+ updater._identifier = identifier or 'zip'
+ update_info = updater.query_update(_output=True)
+ self.assertDictEqual(
+ update_info.__dict__ if update_info else {}, expected.__dict__ if expected else {})
+
+ test('yt-dlp/yt-dlp@latest', UpdateInfo(
+ '2023.12.31', version='2023.12.31', requested_version='2023.12.31', commit='b' * 40))
+ test('yt-dlp/yt-dlp-nightly-builds@latest', UpdateInfo(
+ '2023.12.31.123456', version='2023.12.31.123456', requested_version='2023.12.31.123456', commit='c' * 40))
+ test('yt-dlp/yt-dlp-master-builds@latest', UpdateInfo(
+ '2023.12.31.987654', version='2023.12.31.987654', requested_version='2023.12.31.987654', commit='d' * 40))
+ test('fork/yt-dlp@latest', UpdateInfo(
+ '2050.12.31', version='2050.12.31', requested_version='2050.12.31', commit='e' * 40))
+ test('fork/yt-dlp@pr0000', UpdateInfo(
+ 'pr0000', version='2023.11.11.000000', requested_version='2023.11.11.000000', commit='f' * 40))
+ test('fork/yt-dlp@pr1234', UpdateInfo(
+ 'pr1234', version='2023.12.31.555555', requested_version='2023.12.31.555555', commit='0' * 40))
+ test('fork/yt-dlp@pr9999', UpdateInfo(
+ 'pr9999', version=None, requested_version=None, commit='1' * 40))
+ test('fork/yt-dlp-satellite@pr987', UpdateInfo(
+ 'pr987', version=None, requested_version=None, commit='2' * 40))
+ test('yt-dlp/yt-dlp', None, current_version='2024.01.01')
+ test('stable', UpdateInfo(
+ '2023.12.31', version='2023.12.31', requested_version='2023.12.31', commit='b' * 40))
+ test('nightly', UpdateInfo(
+ '2023.12.31.123456', version='2023.12.31.123456', requested_version='2023.12.31.123456', commit='c' * 40))
+ test('master', UpdateInfo(
+ '2023.12.31.987654', version='2023.12.31.987654', requested_version='2023.12.31.987654', commit='d' * 40))
+ test('testing', None, current_commit='9' * 40)
+ test('testing', UpdateInfo('testing', commit='9' * 40))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_utils.py b/test/test_utils.py
new file mode 100644
index 0000000..a3073f0
--- /dev/null
+++ b/test/test_utils.py
@@ -0,0 +1,2457 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import re
+import sys
+import unittest
+import warnings
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import contextlib
+import io
+import itertools
+import json
+import subprocess
+import xml.etree.ElementTree
+
+from yt_dlp.compat import (
+ compat_etree_fromstring,
+ compat_HTMLParseError,
+ compat_os_name,
+)
+from yt_dlp.utils import (
+ Config,
+ DateRange,
+ ExtractorError,
+ InAdvancePagedList,
+ LazyList,
+ OnDemandPagedList,
+ Popen,
+ age_restricted,
+ args_to_str,
+ base_url,
+ caesar,
+ clean_html,
+ clean_podcast_url,
+ cli_bool_option,
+ cli_option,
+ cli_valueless_option,
+ date_from_str,
+ datetime_from_str,
+ detect_exe_version,
+ determine_ext,
+ determine_file_encoding,
+ dfxp2srt,
+ dict_get,
+ encode_base_n,
+ encode_compat_str,
+ encodeFilename,
+ expand_path,
+ extract_attributes,
+ extract_basic_auth,
+ find_xpath_attr,
+ fix_xml_ampersands,
+ float_or_none,
+ format_bytes,
+ get_compatible_ext,
+ get_element_by_attribute,
+ get_element_by_class,
+ get_element_html_by_attribute,
+ get_element_html_by_class,
+ get_element_text_and_html_by_tag,
+ get_elements_by_attribute,
+ get_elements_by_class,
+ get_elements_html_by_attribute,
+ get_elements_html_by_class,
+ get_elements_text_and_html_by_attribute,
+ int_or_none,
+ intlist_to_bytes,
+ iri_to_uri,
+ is_html,
+ js_to_json,
+ limit_length,
+ locked_file,
+ lowercase_escape,
+ match_str,
+ merge_dicts,
+ mimetype2ext,
+ month_by_name,
+ multipart_encode,
+ ohdave_rsa_encrypt,
+ orderedSet,
+ parse_age_limit,
+ parse_bitrate,
+ parse_codecs,
+ parse_count,
+ parse_dfxp_time_expr,
+ parse_duration,
+ parse_filesize,
+ parse_iso8601,
+ parse_qs,
+ parse_resolution,
+ pkcs1pad,
+ prepend_extension,
+ read_batch_urls,
+ remove_end,
+ remove_quotes,
+ remove_start,
+ render_table,
+ replace_extension,
+ rot47,
+ sanitize_filename,
+ sanitize_path,
+ sanitize_url,
+ shell_quote,
+ smuggle_url,
+ str_or_none,
+ str_to_int,
+ strip_jsonp,
+ strip_or_none,
+ subtitles_filename,
+ timeconvert,
+ traverse_obj,
+ try_call,
+ unescapeHTML,
+ unified_strdate,
+ unified_timestamp,
+ unsmuggle_url,
+ update_url_query,
+ uppercase_escape,
+ url_basename,
+ url_or_none,
+ urlencode_postdata,
+ urljoin,
+ urshift,
+ variadic,
+ version_tuple,
+ xpath_attr,
+ xpath_element,
+ xpath_text,
+ xpath_with_ns,
+)
+from yt_dlp.utils.networking import (
+ HTTPHeaderDict,
+ escape_rfc3986,
+ normalize_url,
+ remove_dot_segments,
+)
+
+
+class TestUtil(unittest.TestCase):
+ def test_timeconvert(self):
+ self.assertTrue(timeconvert('') is None)
+ self.assertTrue(timeconvert('bougrg') is None)
+
+ def test_sanitize_filename(self):
+ self.assertEqual(sanitize_filename(''), '')
+ self.assertEqual(sanitize_filename('abc'), 'abc')
+ self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e')
+
+ self.assertEqual(sanitize_filename('123'), '123')
+
+ self.assertEqual('abc⧸de', sanitize_filename('abc/de'))
+ self.assertFalse('/' in sanitize_filename('abc/de///'))
+
+ self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', is_id=False))
+ self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', is_id=False))
+ self.assertEqual('yes no', sanitize_filename('yes? no', is_id=False))
+ self.assertEqual('this - that', sanitize_filename('this: that', is_id=False))
+
+ self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
+ aumlaut = 'ä'
+ self.assertEqual(sanitize_filename(aumlaut), aumlaut)
+ tests = '\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430'
+ self.assertEqual(sanitize_filename(tests), tests)
+
+ self.assertEqual(
+ sanitize_filename('New World record at 0:12:34'),
+ 'New World record at 0_12_34')
+
+ self.assertEqual(sanitize_filename('--gasdgf'), '--gasdgf')
+ self.assertEqual(sanitize_filename('--gasdgf', is_id=True), '--gasdgf')
+ self.assertEqual(sanitize_filename('--gasdgf', is_id=False), '_-gasdgf')
+ self.assertEqual(sanitize_filename('.gasdgf'), '.gasdgf')
+ self.assertEqual(sanitize_filename('.gasdgf', is_id=True), '.gasdgf')
+ self.assertEqual(sanitize_filename('.gasdgf', is_id=False), 'gasdgf')
+
+ forbidden = '"\0\\/'
+ for fc in forbidden:
+ for fbc in forbidden:
+ self.assertTrue(fbc not in sanitize_filename(fc))
+
+ def test_sanitize_filename_restricted(self):
+ self.assertEqual(sanitize_filename('abc', restricted=True), 'abc')
+ self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e')
+
+ self.assertEqual(sanitize_filename('123', restricted=True), '123')
+
+ self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True))
+ self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True))
+
+ self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True))
+ self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True))
+ self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True))
+ self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True))
+
+ tests = 'aäb\u4e2d\u56fd\u7684c'
+ self.assertEqual(sanitize_filename(tests, restricted=True), 'aab_c')
+ self.assertTrue(sanitize_filename('\xf6', restricted=True) != '') # No empty filename
+
+ forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,#'
+ for fc in forbidden:
+ for fbc in forbidden:
+ self.assertTrue(fbc not in sanitize_filename(fc, restricted=True))
+
+ # Handle a common case more neatly
+ self.assertEqual(sanitize_filename('\u5927\u58f0\u5e26 - Song', restricted=True), 'Song')
+ self.assertEqual(sanitize_filename('\u603b\u7edf: Speech', restricted=True), 'Speech')
+ # .. but make sure the file name is never empty
+ self.assertTrue(sanitize_filename('-', restricted=True) != '')
+ self.assertTrue(sanitize_filename(':', restricted=True) != '')
+
+ self.assertEqual(sanitize_filename(
+ 'ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ', restricted=True),
+ 'AAAAAAAECEEEEIIIIDNOOOOOOOOEUUUUUYTHssaaaaaaaeceeeeiiiionooooooooeuuuuuythy')
+
+ def test_sanitize_ids(self):
+ self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw')
+ self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
+ self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
+
+ def test_sanitize_path(self):
+ if sys.platform != 'win32':
+ return
+
+ self.assertEqual(sanitize_path('abc'), 'abc')
+ self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
+ self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
+ self.assertEqual(sanitize_path('abc|def'), 'abc#def')
+ self.assertEqual(sanitize_path('<>:"|?*'), '#######')
+ self.assertEqual(sanitize_path('C:/abc/def'), 'C:\\abc\\def')
+ self.assertEqual(sanitize_path('C?:/abc/def'), 'C##\\abc\\def')
+
+ self.assertEqual(sanitize_path('\\\\?\\UNC\\ComputerName\\abc'), '\\\\?\\UNC\\ComputerName\\abc')
+ self.assertEqual(sanitize_path('\\\\?\\UNC/ComputerName/abc'), '\\\\?\\UNC\\ComputerName\\abc')
+
+ self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
+ self.assertEqual(sanitize_path('\\\\?\\C:/abc'), '\\\\?\\C:\\abc')
+ self.assertEqual(sanitize_path('\\\\?\\C:\\ab?c\\de:f'), '\\\\?\\C:\\ab#c\\de#f')
+ self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
+
+ self.assertEqual(
+ sanitize_path('youtube/%(uploader)s/%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s'),
+ 'youtube\\%(uploader)s\\%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s')
+
+ self.assertEqual(
+ sanitize_path('youtube/TheWreckingYard ./00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part'),
+ 'youtube\\TheWreckingYard #\\00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part')
+ self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
+ self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
+ self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
+
+ self.assertEqual(sanitize_path('../abc'), '..\\abc')
+ self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc')
+ self.assertEqual(sanitize_path('./abc'), 'abc')
+ self.assertEqual(sanitize_path('./../abc'), '..\\abc')
+
+ def test_sanitize_url(self):
+ self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
+ self.assertEqual(sanitize_url('httpss://foo.bar'), 'https://foo.bar')
+ self.assertEqual(sanitize_url('rmtps://foo.bar'), 'rtmps://foo.bar')
+ self.assertEqual(sanitize_url('https://foo.bar'), 'https://foo.bar')
+ self.assertEqual(sanitize_url('foo bar'), 'foo bar')
+
+ def test_expand_path(self):
+ def env(var):
+ return f'%{var}%' if sys.platform == 'win32' else f'${var}'
+
+ os.environ['yt_dlp_EXPATH_PATH'] = 'expanded'
+ self.assertEqual(expand_path(env('yt_dlp_EXPATH_PATH')), 'expanded')
+
+ old_home = os.environ.get('HOME')
+ test_str = R'C:\Documents and Settings\тест\Application Data'
+ try:
+ os.environ['HOME'] = test_str
+ self.assertEqual(expand_path(env('HOME')), os.getenv('HOME'))
+ self.assertEqual(expand_path('~'), os.getenv('HOME'))
+ self.assertEqual(
+ expand_path('~/%s' % env('yt_dlp_EXPATH_PATH')),
+ '%s/expanded' % os.getenv('HOME'))
+ finally:
+ os.environ['HOME'] = old_home or ''
+
+ def test_prepend_extension(self):
+ self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
+ self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')
+ self.assertEqual(prepend_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
+ self.assertEqual(prepend_extension('abc', 'temp'), 'abc.temp')
+ self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp')
+ self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext')
+
+ def test_replace_extension(self):
+ self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp')
+ self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp')
+ self.assertEqual(replace_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
+ self.assertEqual(replace_extension('abc', 'temp'), 'abc.temp')
+ self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp')
+ self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp')
+
+ def test_subtitles_filename(self):
+ self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt'), 'abc.en.vtt')
+ self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt', 'ext'), 'abc.en.vtt')
+ self.assertEqual(subtitles_filename('abc.unexpected_ext', 'en', 'vtt', 'ext'), 'abc.unexpected_ext.en.vtt')
+
+ def test_remove_start(self):
+ self.assertEqual(remove_start(None, 'A - '), None)
+ self.assertEqual(remove_start('A - B', 'A - '), 'B')
+ self.assertEqual(remove_start('B - A', 'A - '), 'B - A')
+
+ def test_remove_end(self):
+ self.assertEqual(remove_end(None, ' - B'), None)
+ self.assertEqual(remove_end('A - B', ' - B'), 'A')
+ self.assertEqual(remove_end('B - A', ' - B'), 'B - A')
+
+ def test_remove_quotes(self):
+ self.assertEqual(remove_quotes(None), None)
+ self.assertEqual(remove_quotes('"'), '"')
+ self.assertEqual(remove_quotes("'"), "'")
+ self.assertEqual(remove_quotes(';'), ';')
+ self.assertEqual(remove_quotes('";'), '";')
+ self.assertEqual(remove_quotes('""'), '')
+ self.assertEqual(remove_quotes('";"'), ';')
+
+ def test_ordered_set(self):
+ self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7])
+ self.assertEqual(orderedSet([]), [])
+ self.assertEqual(orderedSet([1]), [1])
+ # keep the list ordered
+ self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1])
+
+ def test_unescape_html(self):
+ self.assertEqual(unescapeHTML('%20;'), '%20;')
+ self.assertEqual(unescapeHTML('&#x2F;'), '/')
+ self.assertEqual(unescapeHTML('&#47;'), '/')
+ self.assertEqual(unescapeHTML('&eacute;'), 'é')
+ self.assertEqual(unescapeHTML('&#2013266066;'), '&#2013266066;')
+ self.assertEqual(unescapeHTML('&a&quot;'), '&a"')
+ # HTML5 entities
+ self.assertEqual(unescapeHTML('&period;&apos;'), '.\'')
+
+ def test_date_from_str(self):
+ self.assertEqual(date_from_str('yesterday'), date_from_str('now-1day'))
+ self.assertEqual(date_from_str('now+7day'), date_from_str('now+1week'))
+ self.assertEqual(date_from_str('now+14day'), date_from_str('now+2week'))
+ self.assertEqual(date_from_str('20200229+365day'), date_from_str('20200229+1year'))
+ self.assertEqual(date_from_str('20210131+28day'), date_from_str('20210131+1month'))
+
+ def test_datetime_from_str(self):
+ self.assertEqual(datetime_from_str('yesterday', precision='day'), datetime_from_str('now-1day', precision='auto'))
+ self.assertEqual(datetime_from_str('now+7day', precision='day'), datetime_from_str('now+1week', precision='auto'))
+ self.assertEqual(datetime_from_str('now+14day', precision='day'), datetime_from_str('now+2week', precision='auto'))
+ self.assertEqual(datetime_from_str('20200229+365day', precision='day'), datetime_from_str('20200229+1year', precision='auto'))
+ self.assertEqual(datetime_from_str('20210131+28day', precision='day'), datetime_from_str('20210131+1month', precision='auto'))
+ self.assertEqual(datetime_from_str('20210131+59day', precision='day'), datetime_from_str('20210131+2month', precision='auto'))
+ self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
+ self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
+
+ def test_daterange(self):
+ _20century = DateRange("19000101", "20000101")
+ self.assertFalse("17890714" in _20century)
+ _ac = DateRange("00010101")
+ self.assertTrue("19690721" in _ac)
+ _firstmilenium = DateRange(end="10000101")
+ self.assertTrue("07110427" in _firstmilenium)
+
+ def test_unified_dates(self):
+ self.assertEqual(unified_strdate('December 21, 2010'), '20101221')
+ self.assertEqual(unified_strdate('8/7/2009'), '20090708')
+ self.assertEqual(unified_strdate('Dec 14, 2012'), '20121214')
+ self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
+ self.assertEqual(unified_strdate('1968 12 10'), '19681210')
+ self.assertEqual(unified_strdate('1968-12-10'), '19681210')
+ self.assertEqual(unified_strdate('31-07-2022 20:00'), '20220731')
+ self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
+ self.assertEqual(
+ unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
+ '20141126')
+ self.assertEqual(
+ unified_strdate('2/2/2015 6:47:40 PM', day_first=False),
+ '20150202')
+ self.assertEqual(unified_strdate('Feb 14th 2016 5:45PM'), '20160214')
+ self.assertEqual(unified_strdate('25-09-2014'), '20140925')
+ self.assertEqual(unified_strdate('27.02.2016 17:30'), '20160227')
+ self.assertEqual(unified_strdate('UNKNOWN DATE FORMAT'), None)
+ self.assertEqual(unified_strdate('Feb 7, 2016 at 6:35 pm'), '20160207')
+ self.assertEqual(unified_strdate('July 15th, 2013'), '20130715')
+ self.assertEqual(unified_strdate('September 1st, 2013'), '20130901')
+ self.assertEqual(unified_strdate('Sep 2nd, 2013'), '20130902')
+ self.assertEqual(unified_strdate('November 3rd, 2019'), '20191103')
+ self.assertEqual(unified_strdate('October 23rd, 2005'), '20051023')
+
+ def test_unified_timestamps(self):
+ self.assertEqual(unified_timestamp('December 21, 2010'), 1292889600)
+ self.assertEqual(unified_timestamp('8/7/2009'), 1247011200)
+ self.assertEqual(unified_timestamp('Dec 14, 2012'), 1355443200)
+ self.assertEqual(unified_timestamp('2012/10/11 01:56:38 +0000'), 1349920598)
+ self.assertEqual(unified_timestamp('1968 12 10'), -33436800)
+ self.assertEqual(unified_timestamp('1968-12-10'), -33436800)
+ self.assertEqual(unified_timestamp('28/01/2014 21:00:00 +0100'), 1390939200)
+ self.assertEqual(
+ unified_timestamp('11/26/2014 11:30:00 AM PST', day_first=False),
+ 1417001400)
+ self.assertEqual(
+ unified_timestamp('2/2/2015 6:47:40 PM', day_first=False),
+ 1422902860)
+ self.assertEqual(unified_timestamp('Feb 14th 2016 5:45PM'), 1455471900)
+ self.assertEqual(unified_timestamp('25-09-2014'), 1411603200)
+ self.assertEqual(unified_timestamp('27.02.2016 17:30'), 1456594200)
+ self.assertEqual(unified_timestamp('UNKNOWN DATE FORMAT'), None)
+ self.assertEqual(unified_timestamp('May 16, 2016 11:15 PM'), 1463440500)
+ self.assertEqual(unified_timestamp('Feb 7, 2016 at 6:35 pm'), 1454870100)
+ self.assertEqual(unified_timestamp('2017-03-30T17:52:41Q'), 1490896361)
+ self.assertEqual(unified_timestamp('Sep 11, 2013 | 5:49 AM'), 1378878540)
+ self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
+ self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
+
+ self.assertEqual(unified_timestamp('December 31 1969 20:00:01 EDT'), 1)
+ self.assertEqual(unified_timestamp('Wednesday 31 December 1969 18:01:26 MDT'), 86)
+ self.assertEqual(unified_timestamp('12/31/1969 20:01:18 EDT', False), 78)
+
+ def test_determine_ext(self):
+ self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4')
+ self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None)
+ self.assertEqual(determine_ext('http://example.com/foo/bar.nonext/?download', None), None)
+ self.assertEqual(determine_ext('http://example.com/foo/bar/mp4?download', None), None)
+ self.assertEqual(determine_ext('http://example.com/foo/bar.m3u8//?download'), 'm3u8')
+ self.assertEqual(determine_ext('foobar', None), None)
+
+ def test_find_xpath_attr(self):
+ testxml = '''<root>
+ <node/>
+ <node x="a"/>
+ <node x="a" y="c" />
+ <node x="b" y="d" />
+ <node x="" />
+ </root>'''
+ doc = compat_etree_fromstring(testxml)
+
+ self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n'), None)
+ self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n', 'v'), None)
+ self.assertEqual(find_xpath_attr(doc, './/node', 'n'), None)
+ self.assertEqual(find_xpath_attr(doc, './/node', 'n', 'v'), None)
+ self.assertEqual(find_xpath_attr(doc, './/node', 'x'), doc[1])
+ self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'a'), doc[1])
+ self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'b'), doc[3])
+ self.assertEqual(find_xpath_attr(doc, './/node', 'y'), doc[2])
+ self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'c'), doc[2])
+ self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'd'), doc[3])
+ self.assertEqual(find_xpath_attr(doc, './/node', 'x', ''), doc[4])
+
+ def test_xpath_with_ns(self):
+ testxml = '''<root xmlns:media="http://example.com/">
+ <media:song>
+ <media:author>The Author</media:author>
+ <url>http://server.com/download.mp3</url>
+ </media:song>
+ </root>'''
+ doc = compat_etree_fromstring(testxml)
+ find = lambda p: doc.find(xpath_with_ns(p, {'media': 'http://example.com/'}))
+ self.assertTrue(find('media:song') is not None)
+ self.assertEqual(find('media:song/media:author').text, 'The Author')
+ self.assertEqual(find('media:song/url').text, 'http://server.com/download.mp3')
+
+ def test_xpath_element(self):
+ doc = xml.etree.ElementTree.Element('root')
+ div = xml.etree.ElementTree.SubElement(doc, 'div')
+ p = xml.etree.ElementTree.SubElement(div, 'p')
+ p.text = 'Foo'
+ self.assertEqual(xpath_element(doc, 'div/p'), p)
+ self.assertEqual(xpath_element(doc, ['div/p']), p)
+ self.assertEqual(xpath_element(doc, ['div/bar', 'div/p']), p)
+ self.assertEqual(xpath_element(doc, 'div/bar', default='default'), 'default')
+ self.assertEqual(xpath_element(doc, ['div/bar'], default='default'), 'default')
+ self.assertTrue(xpath_element(doc, 'div/bar') is None)
+ self.assertTrue(xpath_element(doc, ['div/bar']) is None)
+ self.assertTrue(xpath_element(doc, ['div/bar'], 'div/baz') is None)
+ self.assertRaises(ExtractorError, xpath_element, doc, 'div/bar', fatal=True)
+ self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar'], fatal=True)
+ self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar', 'div/baz'], fatal=True)
+
+ def test_xpath_text(self):
+ testxml = '''<root>
+ <div>
+ <p>Foo</p>
+ </div>
+ </root>'''
+ doc = compat_etree_fromstring(testxml)
+ self.assertEqual(xpath_text(doc, 'div/p'), 'Foo')
+ self.assertEqual(xpath_text(doc, 'div/bar', default='default'), 'default')
+ self.assertTrue(xpath_text(doc, 'div/bar') is None)
+ self.assertRaises(ExtractorError, xpath_text, doc, 'div/bar', fatal=True)
+
+ def test_xpath_attr(self):
+ testxml = '''<root>
+ <div>
+ <p x="a">Foo</p>
+ </div>
+ </root>'''
+ doc = compat_etree_fromstring(testxml)
+ self.assertEqual(xpath_attr(doc, 'div/p', 'x'), 'a')
+ self.assertEqual(xpath_attr(doc, 'div/bar', 'x'), None)
+ self.assertEqual(xpath_attr(doc, 'div/p', 'y'), None)
+ self.assertEqual(xpath_attr(doc, 'div/bar', 'x', default='default'), 'default')
+ self.assertEqual(xpath_attr(doc, 'div/p', 'y', default='default'), 'default')
+ self.assertRaises(ExtractorError, xpath_attr, doc, 'div/bar', 'x', fatal=True)
+ self.assertRaises(ExtractorError, xpath_attr, doc, 'div/p', 'y', fatal=True)
+
+ def test_smuggle_url(self):
+ data = {"ö": "ö", "abc": [3]}
+ url = 'https://foo.bar/baz?x=y#a'
+ smug_url = smuggle_url(url, data)
+ unsmug_url, unsmug_data = unsmuggle_url(smug_url)
+ self.assertEqual(url, unsmug_url)
+ self.assertEqual(data, unsmug_data)
+
+ res_url, res_data = unsmuggle_url(url)
+ self.assertEqual(res_url, url)
+ self.assertEqual(res_data, None)
+
+ smug_url = smuggle_url(url, {'a': 'b'})
+ smug_smug_url = smuggle_url(smug_url, {'c': 'd'})
+ res_url, res_data = unsmuggle_url(smug_smug_url)
+ self.assertEqual(res_url, url)
+ self.assertEqual(res_data, {'a': 'b', 'c': 'd'})
+
+ def test_shell_quote(self):
+ args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
+ self.assertEqual(
+ shell_quote(args),
+ """ffmpeg -i 'ñ€ß'"'"'.mp4'""" if compat_os_name != 'nt' else '''ffmpeg -i "ñ€ß'.mp4"''')
+
+ def test_float_or_none(self):
+ self.assertEqual(float_or_none('42.42'), 42.42)
+ self.assertEqual(float_or_none('42'), 42.0)
+ self.assertEqual(float_or_none(''), None)
+ self.assertEqual(float_or_none(None), None)
+ self.assertEqual(float_or_none([]), None)
+ self.assertEqual(float_or_none(set()), None)
+
+ def test_int_or_none(self):
+ self.assertEqual(int_or_none('42'), 42)
+ self.assertEqual(int_or_none(''), None)
+ self.assertEqual(int_or_none(None), None)
+ self.assertEqual(int_or_none([]), None)
+ self.assertEqual(int_or_none(set()), None)
+
+ def test_str_to_int(self):
+ self.assertEqual(str_to_int('123,456'), 123456)
+ self.assertEqual(str_to_int('123.456'), 123456)
+ self.assertEqual(str_to_int(523), 523)
+ self.assertEqual(str_to_int('noninteger'), None)
+ self.assertEqual(str_to_int([]), None)
+
+ def test_url_basename(self):
+ self.assertEqual(url_basename('http://foo.de/'), '')
+ self.assertEqual(url_basename('http://foo.de/bar/baz'), 'baz')
+ self.assertEqual(url_basename('http://foo.de/bar/baz?x=y'), 'baz')
+ self.assertEqual(url_basename('http://foo.de/bar/baz#x=y'), 'baz')
+ self.assertEqual(url_basename('http://foo.de/bar/baz/'), 'baz')
+ self.assertEqual(
+ url_basename('http://media.w3.org/2010/05/sintel/trailer.mp4'),
+ 'trailer.mp4')
+
+ def test_base_url(self):
+ self.assertEqual(base_url('http://foo.de/'), 'http://foo.de/')
+ self.assertEqual(base_url('http://foo.de/bar'), 'http://foo.de/')
+ self.assertEqual(base_url('http://foo.de/bar/'), 'http://foo.de/bar/')
+ self.assertEqual(base_url('http://foo.de/bar/baz'), 'http://foo.de/bar/')
+ self.assertEqual(base_url('http://foo.de/bar/baz?x=z/x/c'), 'http://foo.de/bar/')
+ self.assertEqual(base_url('http://foo.de/bar/baz&x=z&w=y/x/c'), 'http://foo.de/bar/baz&x=z&w=y/x/')
+
+ def test_urljoin(self):
+ self.assertEqual(urljoin('http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin(b'http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin('http://foo.de/', b'/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin(b'http://foo.de/', b'/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin('//foo.de/', '/a/b/c.txt'), '//foo.de/a/b/c.txt')
+ self.assertEqual(urljoin('http://foo.de/', 'a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin('http://foo.de', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin('http://foo.de', 'a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin('http://foo.de/', 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin('http://foo.de/', '//foo.de/a/b/c.txt'), '//foo.de/a/b/c.txt')
+ self.assertEqual(urljoin(None, 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin(None, '//foo.de/a/b/c.txt'), '//foo.de/a/b/c.txt')
+ self.assertEqual(urljoin('', 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin(['foobar'], 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
+ self.assertEqual(urljoin('http://foo.de/', None), None)
+ self.assertEqual(urljoin('http://foo.de/', ''), None)
+ self.assertEqual(urljoin('http://foo.de/', ['foobar']), None)
+ self.assertEqual(urljoin('http://foo.de/a/b/c.txt', '.././../d.txt'), 'http://foo.de/d.txt')
+ self.assertEqual(urljoin('http://foo.de/a/b/c.txt', 'rtmp://foo.de'), 'rtmp://foo.de')
+ self.assertEqual(urljoin(None, 'rtmp://foo.de'), 'rtmp://foo.de')
+
+ def test_url_or_none(self):
+ self.assertEqual(url_or_none(None), None)
+ self.assertEqual(url_or_none(''), None)
+ self.assertEqual(url_or_none('foo'), None)
+ self.assertEqual(url_or_none('http://foo.de'), 'http://foo.de')
+ self.assertEqual(url_or_none('https://foo.de'), 'https://foo.de')
+ self.assertEqual(url_or_none('http$://foo.de'), None)
+ self.assertEqual(url_or_none('http://foo.de'), 'http://foo.de')
+ self.assertEqual(url_or_none('//foo.de'), '//foo.de')
+ self.assertEqual(url_or_none('s3://foo.de'), None)
+ self.assertEqual(url_or_none('rtmpte://foo.de'), 'rtmpte://foo.de')
+ self.assertEqual(url_or_none('mms://foo.de'), 'mms://foo.de')
+ self.assertEqual(url_or_none('rtspu://foo.de'), 'rtspu://foo.de')
+ self.assertEqual(url_or_none('ftps://foo.de'), 'ftps://foo.de')
+
+ def test_parse_age_limit(self):
+ self.assertEqual(parse_age_limit(None), None)
+ self.assertEqual(parse_age_limit(False), None)
+ self.assertEqual(parse_age_limit('invalid'), None)
+ self.assertEqual(parse_age_limit(0), 0)
+ self.assertEqual(parse_age_limit(18), 18)
+ self.assertEqual(parse_age_limit(21), 21)
+ self.assertEqual(parse_age_limit(22), None)
+ self.assertEqual(parse_age_limit('18'), 18)
+ self.assertEqual(parse_age_limit('18+'), 18)
+ self.assertEqual(parse_age_limit('PG-13'), 13)
+ self.assertEqual(parse_age_limit('TV-14'), 14)
+ self.assertEqual(parse_age_limit('TV-MA'), 17)
+ self.assertEqual(parse_age_limit('TV14'), 14)
+ self.assertEqual(parse_age_limit('TV_G'), 0)
+
+ def test_parse_duration(self):
+ self.assertEqual(parse_duration(None), None)
+ self.assertEqual(parse_duration(False), None)
+ self.assertEqual(parse_duration('invalid'), None)
+ self.assertEqual(parse_duration('1'), 1)
+ self.assertEqual(parse_duration('1337:12'), 80232)
+ self.assertEqual(parse_duration('9:12:43'), 33163)
+ self.assertEqual(parse_duration('12:00'), 720)
+ self.assertEqual(parse_duration('00:01:01'), 61)
+ self.assertEqual(parse_duration('x:y'), None)
+ self.assertEqual(parse_duration('3h11m53s'), 11513)
+ self.assertEqual(parse_duration('3h 11m 53s'), 11513)
+ self.assertEqual(parse_duration('3 hours 11 minutes 53 seconds'), 11513)
+ self.assertEqual(parse_duration('3 hours 11 mins 53 secs'), 11513)
+ self.assertEqual(parse_duration('3 hours, 11 minutes, 53 seconds'), 11513)
+ self.assertEqual(parse_duration('3 hours, 11 mins, 53 secs'), 11513)
+ self.assertEqual(parse_duration('62m45s'), 3765)
+ self.assertEqual(parse_duration('6m59s'), 419)
+ self.assertEqual(parse_duration('49s'), 49)
+ self.assertEqual(parse_duration('0h0m0s'), 0)
+ self.assertEqual(parse_duration('0m0s'), 0)
+ self.assertEqual(parse_duration('0s'), 0)
+ self.assertEqual(parse_duration('01:02:03.05'), 3723.05)
+ self.assertEqual(parse_duration('T30M38S'), 1838)
+ self.assertEqual(parse_duration('5 s'), 5)
+ self.assertEqual(parse_duration('3 min'), 180)
+ self.assertEqual(parse_duration('2.5 hours'), 9000)
+ self.assertEqual(parse_duration('02:03:04'), 7384)
+ self.assertEqual(parse_duration('01:02:03:04'), 93784)
+ self.assertEqual(parse_duration('1 hour 3 minutes'), 3780)
+ self.assertEqual(parse_duration('87 Min.'), 5220)
+ self.assertEqual(parse_duration('PT1H0.040S'), 3600.04)
+ self.assertEqual(parse_duration('PT00H03M30SZ'), 210)
+ self.assertEqual(parse_duration('P0Y0M0DT0H4M20.880S'), 260.88)
+ self.assertEqual(parse_duration('01:02:03:050'), 3723.05)
+ self.assertEqual(parse_duration('103:050'), 103.05)
+ self.assertEqual(parse_duration('1HR 3MIN'), 3780)
+ self.assertEqual(parse_duration('2hrs 3mins'), 7380)
+
+ def test_fix_xml_ampersands(self):
+ self.assertEqual(
+ fix_xml_ampersands('"&x=y&z=a'), '"&amp;x=y&amp;z=a')
+ self.assertEqual(
+ fix_xml_ampersands('"&amp;x=y&wrong;&z=a'),
+ '"&amp;x=y&amp;wrong;&amp;z=a')
+ self.assertEqual(
+ fix_xml_ampersands('&amp;&apos;&gt;&lt;&quot;'),
+ '&amp;&apos;&gt;&lt;&quot;')
+ self.assertEqual(
+ fix_xml_ampersands('&#1234;&#x1abC;'), '&#1234;&#x1abC;')
+ self.assertEqual(fix_xml_ampersands('&#&#'), '&amp;#&amp;#')
+
+ def test_paged_list(self):
+ def testPL(size, pagesize, sliceargs, expected):
+ def get_page(pagenum):
+ firstid = pagenum * pagesize
+ upto = min(size, pagenum * pagesize + pagesize)
+ yield from range(firstid, upto)
+
+ pl = OnDemandPagedList(get_page, pagesize)
+ got = pl.getslice(*sliceargs)
+ self.assertEqual(got, expected)
+
+ iapl = InAdvancePagedList(get_page, size // pagesize + 1, pagesize)
+ got = iapl.getslice(*sliceargs)
+ self.assertEqual(got, expected)
+
+ testPL(5, 2, (), [0, 1, 2, 3, 4])
+ testPL(5, 2, (1,), [1, 2, 3, 4])
+ testPL(5, 2, (2,), [2, 3, 4])
+ testPL(5, 2, (4,), [4])
+ testPL(5, 2, (0, 3), [0, 1, 2])
+ testPL(5, 2, (1, 4), [1, 2, 3])
+ testPL(5, 2, (2, 99), [2, 3, 4])
+ testPL(5, 2, (20, 99), [])
+
+ def test_read_batch_urls(self):
+ f = io.StringIO('''\xef\xbb\xbf foo
+ bar\r
+ baz
+ # More after this line\r
+ ; or after this
+ bam''')
+ self.assertEqual(read_batch_urls(f), ['foo', 'bar', 'baz', 'bam'])
+
+ def test_urlencode_postdata(self):
+ data = urlencode_postdata({'username': 'foo@bar.com', 'password': '1234'})
+ self.assertTrue(isinstance(data, bytes))
+
+ def test_update_url_query(self):
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path', {'quality': ['HD'], 'format': ['mp4']})),
+ parse_qs('http://example.com/path?quality=HD&format=mp4'))
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path', {'system': ['LINUX', 'WINDOWS']})),
+ parse_qs('http://example.com/path?system=LINUX&system=WINDOWS'))
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path', {'fields': 'id,formats,subtitles'})),
+ parse_qs('http://example.com/path?fields=id,formats,subtitles'))
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path', {'fields': ('id,formats,subtitles', 'thumbnails')})),
+ parse_qs('http://example.com/path?fields=id,formats,subtitles&fields=thumbnails'))
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path?manifest=f4m', {'manifest': []})),
+ parse_qs('http://example.com/path'))
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path?system=LINUX&system=WINDOWS', {'system': 'LINUX'})),
+ parse_qs('http://example.com/path?system=LINUX'))
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path', {'fields': b'id,formats,subtitles'})),
+ parse_qs('http://example.com/path?fields=id,formats,subtitles'))
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path', {'width': 1080, 'height': 720})),
+ parse_qs('http://example.com/path?width=1080&height=720'))
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path', {'bitrate': 5020.43})),
+ parse_qs('http://example.com/path?bitrate=5020.43'))
+ self.assertEqual(parse_qs(update_url_query(
+ 'http://example.com/path', {'test': '第二行тест'})),
+ parse_qs('http://example.com/path?test=%E7%AC%AC%E4%BA%8C%E8%A1%8C%D1%82%D0%B5%D1%81%D1%82'))
+
+ def test_multipart_encode(self):
+ self.assertEqual(
+ multipart_encode({b'field': b'value'}, boundary='AAAAAA')[0],
+ b'--AAAAAA\r\nContent-Disposition: form-data; name="field"\r\n\r\nvalue\r\n--AAAAAA--\r\n')
+ self.assertEqual(
+ multipart_encode({'欄位'.encode(): '值'.encode()}, boundary='AAAAAA')[0],
+ b'--AAAAAA\r\nContent-Disposition: form-data; name="\xe6\xac\x84\xe4\xbd\x8d"\r\n\r\n\xe5\x80\xbc\r\n--AAAAAA--\r\n')
+ self.assertRaises(
+ ValueError, multipart_encode, {b'field': b'value'}, boundary='value')
+
+ def test_dict_get(self):
+ FALSE_VALUES = {
+ 'none': None,
+ 'false': False,
+ 'zero': 0,
+ 'empty_string': '',
+ 'empty_list': [],
+ }
+ d = FALSE_VALUES.copy()
+ d['a'] = 42
+ self.assertEqual(dict_get(d, 'a'), 42)
+ self.assertEqual(dict_get(d, 'b'), None)
+ self.assertEqual(dict_get(d, 'b', 42), 42)
+ self.assertEqual(dict_get(d, ('a', )), 42)
+ self.assertEqual(dict_get(d, ('b', 'a', )), 42)
+ self.assertEqual(dict_get(d, ('b', 'c', 'a', 'd', )), 42)
+ self.assertEqual(dict_get(d, ('b', 'c', )), None)
+ self.assertEqual(dict_get(d, ('b', 'c', ), 42), 42)
+ for key, false_value in FALSE_VALUES.items():
+ self.assertEqual(dict_get(d, ('b', 'c', key, )), None)
+ self.assertEqual(dict_get(d, ('b', 'c', key, ), skip_false_values=False), false_value)
+
+ def test_merge_dicts(self):
+ self.assertEqual(merge_dicts({'a': 1}, {'b': 2}), {'a': 1, 'b': 2})
+ self.assertEqual(merge_dicts({'a': 1}, {'a': 2}), {'a': 1})
+ self.assertEqual(merge_dicts({'a': 1}, {'a': None}), {'a': 1})
+ self.assertEqual(merge_dicts({'a': 1}, {'a': ''}), {'a': 1})
+ self.assertEqual(merge_dicts({'a': 1}, {}), {'a': 1})
+ self.assertEqual(merge_dicts({'a': None}, {'a': 1}), {'a': 1})
+ self.assertEqual(merge_dicts({'a': ''}, {'a': 1}), {'a': ''})
+ self.assertEqual(merge_dicts({'a': ''}, {'a': 'abc'}), {'a': 'abc'})
+ self.assertEqual(merge_dicts({'a': None}, {'a': ''}, {'a': 'abc'}), {'a': 'abc'})
+
+ def test_encode_compat_str(self):
+ self.assertEqual(encode_compat_str(b'\xd1\x82\xd0\xb5\xd1\x81\xd1\x82', 'utf-8'), 'тест')
+ self.assertEqual(encode_compat_str('тест', 'utf-8'), 'тест')
+
+ def test_parse_iso8601(self):
+ self.assertEqual(parse_iso8601('2014-03-23T23:04:26+0100'), 1395612266)
+ self.assertEqual(parse_iso8601('2014-03-23T22:04:26+0000'), 1395612266)
+ self.assertEqual(parse_iso8601('2014-03-23T22:04:26Z'), 1395612266)
+ self.assertEqual(parse_iso8601('2014-03-23T22:04:26.1234Z'), 1395612266)
+ self.assertEqual(parse_iso8601('2015-09-29T08:27:31.727'), 1443515251)
+ self.assertEqual(parse_iso8601('2015-09-29T08-27-31.727'), None)
+
+ def test_strip_jsonp(self):
+ stripped = strip_jsonp('cb ([ {"id":"532cb",\n\n\n"x":\n3}\n]\n);')
+ d = json.loads(stripped)
+ self.assertEqual(d, [{"id": "532cb", "x": 3}])
+
+ stripped = strip_jsonp('parseMetadata({"STATUS":"OK"})\n\n\n//epc')
+ d = json.loads(stripped)
+ self.assertEqual(d, {'STATUS': 'OK'})
+
+ stripped = strip_jsonp('ps.embedHandler({"status": "success"});')
+ d = json.loads(stripped)
+ self.assertEqual(d, {'status': 'success'})
+
+ stripped = strip_jsonp('window.cb && window.cb({"status": "success"});')
+ d = json.loads(stripped)
+ self.assertEqual(d, {'status': 'success'})
+
+ stripped = strip_jsonp('window.cb && cb({"status": "success"});')
+ d = json.loads(stripped)
+ self.assertEqual(d, {'status': 'success'})
+
+ stripped = strip_jsonp('({"status": "success"});')
+ d = json.loads(stripped)
+ self.assertEqual(d, {'status': 'success'})
+
+ def test_strip_or_none(self):
+ self.assertEqual(strip_or_none(' abc'), 'abc')
+ self.assertEqual(strip_or_none('abc '), 'abc')
+ self.assertEqual(strip_or_none(' abc '), 'abc')
+ self.assertEqual(strip_or_none('\tabc\t'), 'abc')
+ self.assertEqual(strip_or_none('\n\tabc\n\t'), 'abc')
+ self.assertEqual(strip_or_none('abc'), 'abc')
+ self.assertEqual(strip_or_none(''), '')
+ self.assertEqual(strip_or_none(None), None)
+ self.assertEqual(strip_or_none(42), None)
+ self.assertEqual(strip_or_none([]), None)
+
+ def test_uppercase_escape(self):
+ self.assertEqual(uppercase_escape('aä'), 'aä')
+ self.assertEqual(uppercase_escape('\\U0001d550'), '𝕐')
+
+ def test_lowercase_escape(self):
+ self.assertEqual(lowercase_escape('aä'), 'aä')
+ self.assertEqual(lowercase_escape('\\u0026'), '&')
+
+ def test_limit_length(self):
+ self.assertEqual(limit_length(None, 12), None)
+ self.assertEqual(limit_length('foo', 12), 'foo')
+ self.assertTrue(
+ limit_length('foo bar baz asd', 12).startswith('foo bar'))
+ self.assertTrue('...' in limit_length('foo bar baz asd', 12))
+
+ def test_mimetype2ext(self):
+ self.assertEqual(mimetype2ext(None), None)
+ self.assertEqual(mimetype2ext('video/x-flv'), 'flv')
+ self.assertEqual(mimetype2ext('application/x-mpegURL'), 'm3u8')
+ self.assertEqual(mimetype2ext('text/vtt'), 'vtt')
+ self.assertEqual(mimetype2ext('text/vtt;charset=utf-8'), 'vtt')
+ self.assertEqual(mimetype2ext('text/html; charset=utf-8'), 'html')
+ self.assertEqual(mimetype2ext('audio/x-wav'), 'wav')
+ self.assertEqual(mimetype2ext('audio/x-wav;codec=pcm'), 'wav')
+
+ def test_month_by_name(self):
+ self.assertEqual(month_by_name(None), None)
+ self.assertEqual(month_by_name('December', 'en'), 12)
+ self.assertEqual(month_by_name('décembre', 'fr'), 12)
+ self.assertEqual(month_by_name('December'), 12)
+ self.assertEqual(month_by_name('décembre'), None)
+ self.assertEqual(month_by_name('Unknown', 'unknown'), None)
+
+ def test_parse_codecs(self):
+ self.assertEqual(parse_codecs(''), {})
+ self.assertEqual(parse_codecs('avc1.77.30, mp4a.40.2'), {
+ 'vcodec': 'avc1.77.30',
+ 'acodec': 'mp4a.40.2',
+ 'dynamic_range': None,
+ })
+ self.assertEqual(parse_codecs('mp4a.40.2'), {
+ 'vcodec': 'none',
+ 'acodec': 'mp4a.40.2',
+ 'dynamic_range': None,
+ })
+ self.assertEqual(parse_codecs('mp4a.40.5,avc1.42001e'), {
+ 'vcodec': 'avc1.42001e',
+ 'acodec': 'mp4a.40.5',
+ 'dynamic_range': None,
+ })
+ self.assertEqual(parse_codecs('avc3.640028'), {
+ 'vcodec': 'avc3.640028',
+ 'acodec': 'none',
+ 'dynamic_range': None,
+ })
+ self.assertEqual(parse_codecs(', h264,,newcodec,aac'), {
+ 'vcodec': 'h264',
+ 'acodec': 'aac',
+ 'dynamic_range': None,
+ })
+ self.assertEqual(parse_codecs('av01.0.05M.08'), {
+ 'vcodec': 'av01.0.05M.08',
+ 'acodec': 'none',
+ 'dynamic_range': None,
+ })
+ self.assertEqual(parse_codecs('vp9.2'), {
+ 'vcodec': 'vp9.2',
+ 'acodec': 'none',
+ 'dynamic_range': 'HDR10',
+ })
+ self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
+ 'vcodec': 'av01.0.12M.10.0.110.09.16.09.0',
+ 'acodec': 'none',
+ 'dynamic_range': 'HDR10',
+ })
+ self.assertEqual(parse_codecs('dvhe'), {
+ 'vcodec': 'dvhe',
+ 'acodec': 'none',
+ 'dynamic_range': 'DV',
+ })
+ self.assertEqual(parse_codecs('theora, vorbis'), {
+ 'vcodec': 'theora',
+ 'acodec': 'vorbis',
+ 'dynamic_range': None,
+ })
+ self.assertEqual(parse_codecs('unknownvcodec, unknownacodec'), {
+ 'vcodec': 'unknownvcodec',
+ 'acodec': 'unknownacodec',
+ })
+ self.assertEqual(parse_codecs('unknown'), {})
+
+ def test_escape_rfc3986(self):
+ reserved = "!*'();:@&=+$,/?#[]"
+ unreserved = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~'
+ self.assertEqual(escape_rfc3986(reserved), reserved)
+ self.assertEqual(escape_rfc3986(unreserved), unreserved)
+ self.assertEqual(escape_rfc3986('тест'), '%D1%82%D0%B5%D1%81%D1%82')
+ self.assertEqual(escape_rfc3986('%D1%82%D0%B5%D1%81%D1%82'), '%D1%82%D0%B5%D1%81%D1%82')
+ self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
+ self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
+
+ def test_normalize_url(self):
+ self.assertEqual(
+ normalize_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
+ 'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
+ )
+ self.assertEqual(
+ normalize_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
+ 'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
+ )
+ self.assertEqual(
+ normalize_url('http://тест.рф/фрагмент'),
+ 'http://xn--e1aybc.xn--p1ai/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
+ )
+ self.assertEqual(
+ normalize_url('http://тест.рф/абв?абв=абв#абв'),
+ 'http://xn--e1aybc.xn--p1ai/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
+ )
+ self.assertEqual(normalize_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
+
+ self.assertEqual(normalize_url('http://www.example.com/../a/b/../c/./d.html'), 'http://www.example.com/a/c/d.html')
+
+ def test_remove_dot_segments(self):
+ self.assertEqual(remove_dot_segments('/a/b/c/./../../g'), '/a/g')
+ self.assertEqual(remove_dot_segments('mid/content=5/../6'), 'mid/6')
+ self.assertEqual(remove_dot_segments('/ad/../cd'), '/cd')
+ self.assertEqual(remove_dot_segments('/ad/../cd/'), '/cd/')
+ self.assertEqual(remove_dot_segments('/..'), '/')
+ self.assertEqual(remove_dot_segments('/./'), '/')
+ self.assertEqual(remove_dot_segments('/./a'), '/a')
+ self.assertEqual(remove_dot_segments('/abc/./.././d/././e/.././f/./../../ghi'), '/ghi')
+ self.assertEqual(remove_dot_segments('/'), '/')
+ self.assertEqual(remove_dot_segments('/t'), '/t')
+ self.assertEqual(remove_dot_segments('t'), 't')
+ self.assertEqual(remove_dot_segments(''), '')
+ self.assertEqual(remove_dot_segments('/../a/b/c'), '/a/b/c')
+ self.assertEqual(remove_dot_segments('../a'), 'a')
+ self.assertEqual(remove_dot_segments('./a'), 'a')
+ self.assertEqual(remove_dot_segments('.'), '')
+ self.assertEqual(remove_dot_segments('////'), '////')
+
+ def test_js_to_json_vars_strings(self):
+ self.assertDictEqual(
+ json.loads(js_to_json(
+ '''{
+ 'null': a,
+ 'nullStr': b,
+ 'true': c,
+ 'trueStr': d,
+ 'false': e,
+ 'falseStr': f,
+ 'unresolvedVar': g,
+ }''',
+ {
+ 'a': 'null',
+ 'b': '"null"',
+ 'c': 'true',
+ 'd': '"true"',
+ 'e': 'false',
+ 'f': '"false"',
+ 'g': 'var',
+ }
+ )),
+ {
+ 'null': None,
+ 'nullStr': 'null',
+ 'true': True,
+ 'trueStr': 'true',
+ 'false': False,
+ 'falseStr': 'false',
+ 'unresolvedVar': 'var'
+ }
+ )
+
+ self.assertDictEqual(
+ json.loads(js_to_json(
+ '''{
+ 'int': a,
+ 'intStr': b,
+ 'float': c,
+ 'floatStr': d,
+ }''',
+ {
+ 'a': '123',
+ 'b': '"123"',
+ 'c': '1.23',
+ 'd': '"1.23"',
+ }
+ )),
+ {
+ 'int': 123,
+ 'intStr': '123',
+ 'float': 1.23,
+ 'floatStr': '1.23',
+ }
+ )
+
+ self.assertDictEqual(
+ json.loads(js_to_json(
+ '''{
+ 'object': a,
+ 'objectStr': b,
+ 'array': c,
+ 'arrayStr': d,
+ }''',
+ {
+ 'a': '{}',
+ 'b': '"{}"',
+ 'c': '[]',
+ 'd': '"[]"',
+ }
+ )),
+ {
+ 'object': {},
+ 'objectStr': '{}',
+ 'array': [],
+ 'arrayStr': '[]',
+ }
+ )
+
+ def test_js_to_json_realworld(self):
+ inp = '''{
+ 'clip':{'provider':'pseudo'}
+ }'''
+ self.assertEqual(js_to_json(inp), '''{
+ "clip":{"provider":"pseudo"}
+ }''')
+ json.loads(js_to_json(inp))
+
+ inp = '''{
+ 'playlist':[{'controls':{'all':null}}]
+ }'''
+ self.assertEqual(js_to_json(inp), '''{
+ "playlist":[{"controls":{"all":null}}]
+ }''')
+
+ inp = '''"The CW\\'s \\'Crazy Ex-Girlfriend\\'"'''
+ self.assertEqual(js_to_json(inp), '''"The CW's 'Crazy Ex-Girlfriend'"''')
+
+ inp = '"SAND Number: SAND 2013-7800P\\nPresenter: Tom Russo\\nHabanero Software Training - Xyce Software\\nXyce, Sandia\\u0027s"'
+ json_code = js_to_json(inp)
+ self.assertEqual(json.loads(json_code), json.loads(inp))
+
+ inp = '''{
+ 0:{src:'skipped', type: 'application/dash+xml'},
+ 1:{src:'skipped', type: 'application/vnd.apple.mpegURL'},
+ }'''
+ self.assertEqual(js_to_json(inp), '''{
+ "0":{"src":"skipped", "type": "application/dash+xml"},
+ "1":{"src":"skipped", "type": "application/vnd.apple.mpegURL"}
+ }''')
+
+ inp = '''{"foo":101}'''
+ self.assertEqual(js_to_json(inp), '''{"foo":101}''')
+
+ inp = '''{"duration": "00:01:07"}'''
+ self.assertEqual(js_to_json(inp), '''{"duration": "00:01:07"}''')
+
+ inp = '''{segments: [{"offset":-3.885780586188048e-16,"duration":39.75000000000001}]}'''
+ self.assertEqual(js_to_json(inp), '''{"segments": [{"offset":-3.885780586188048e-16,"duration":39.75000000000001}]}''')
+
+ def test_js_to_json_edgecases(self):
+ on = js_to_json("{abc_def:'1\\'\\\\2\\\\\\'3\"4'}")
+ self.assertEqual(json.loads(on), {"abc_def": "1'\\2\\'3\"4"})
+
+ on = js_to_json('{"abc": true}')
+ self.assertEqual(json.loads(on), {'abc': True})
+
+ # Ignore JavaScript code as well
+ on = js_to_json('''{
+ "x": 1,
+ y: "a",
+ z: some.code
+ }''')
+ d = json.loads(on)
+ self.assertEqual(d['x'], 1)
+ self.assertEqual(d['y'], 'a')
+
+ # Just drop ! prefix for now though this results in a wrong value
+ on = js_to_json('''{
+ a: !0,
+ b: !1,
+ c: !!0,
+ d: !!42.42,
+ e: !!![],
+ f: !"abc",
+ g: !"",
+ !42: 42
+ }''')
+ self.assertEqual(json.loads(on), {
+ 'a': 0,
+ 'b': 1,
+ 'c': 0,
+ 'd': 42.42,
+ 'e': [],
+ 'f': "abc",
+ 'g': "",
+ '42': 42
+ })
+
+ on = js_to_json('["abc", "def",]')
+ self.assertEqual(json.loads(on), ['abc', 'def'])
+
+ on = js_to_json('[/*comment\n*/"abc"/*comment\n*/,/*comment\n*/"def",/*comment\n*/]')
+ self.assertEqual(json.loads(on), ['abc', 'def'])
+
+ on = js_to_json('[//comment\n"abc" //comment\n,//comment\n"def",//comment\n]')
+ self.assertEqual(json.loads(on), ['abc', 'def'])
+
+ on = js_to_json('{"abc": "def",}')
+ self.assertEqual(json.loads(on), {'abc': 'def'})
+
+ on = js_to_json('{/*comment\n*/"abc"/*comment\n*/:/*comment\n*/"def"/*comment\n*/,/*comment\n*/}')
+ self.assertEqual(json.loads(on), {'abc': 'def'})
+
+ on = js_to_json('{ 0: /* " \n */ ",]" , }')
+ self.assertEqual(json.loads(on), {'0': ',]'})
+
+ on = js_to_json('{ /*comment\n*/0/*comment\n*/: /* " \n */ ",]" , }')
+ self.assertEqual(json.loads(on), {'0': ',]'})
+
+ on = js_to_json('{ 0: // comment\n1 }')
+ self.assertEqual(json.loads(on), {'0': 1})
+
+ on = js_to_json(r'["<p>x<\/p>"]')
+ self.assertEqual(json.loads(on), ['<p>x</p>'])
+
+ on = js_to_json(r'["\xaa"]')
+ self.assertEqual(json.loads(on), ['\u00aa'])
+
+ on = js_to_json("['a\\\nb']")
+ self.assertEqual(json.loads(on), ['ab'])
+
+ on = js_to_json("/*comment\n*/[/*comment\n*/'a\\\nb'/*comment\n*/]/*comment\n*/")
+ self.assertEqual(json.loads(on), ['ab'])
+
+ on = js_to_json('{0xff:0xff}')
+ self.assertEqual(json.loads(on), {'255': 255})
+
+ on = js_to_json('{/*comment\n*/0xff/*comment\n*/:/*comment\n*/0xff/*comment\n*/}')
+ self.assertEqual(json.loads(on), {'255': 255})
+
+ on = js_to_json('{077:077}')
+ self.assertEqual(json.loads(on), {'63': 63})
+
+ on = js_to_json('{/*comment\n*/077/*comment\n*/:/*comment\n*/077/*comment\n*/}')
+ self.assertEqual(json.loads(on), {'63': 63})
+
+ on = js_to_json('{42:42}')
+ self.assertEqual(json.loads(on), {'42': 42})
+
+ on = js_to_json('{/*comment\n*/42/*comment\n*/:/*comment\n*/42/*comment\n*/}')
+ self.assertEqual(json.loads(on), {'42': 42})
+
+ on = js_to_json('{42:4.2e1}')
+ self.assertEqual(json.loads(on), {'42': 42.0})
+
+ on = js_to_json('{ "0x40": "0x40" }')
+ self.assertEqual(json.loads(on), {'0x40': '0x40'})
+
+ on = js_to_json('{ "040": "040" }')
+ self.assertEqual(json.loads(on), {'040': '040'})
+
+ on = js_to_json('[1,//{},\n2]')
+ self.assertEqual(json.loads(on), [1, 2])
+
+ on = js_to_json(R'"\^\$\#"')
+ self.assertEqual(json.loads(on), R'^$#', msg='Unnecessary escapes should be stripped')
+
+ on = js_to_json('\'"\\""\'')
+ self.assertEqual(json.loads(on), '"""', msg='Unnecessary quote escape should be escaped')
+
+ on = js_to_json('[new Date("spam"), \'("eggs")\']')
+ self.assertEqual(json.loads(on), ['spam', '("eggs")'], msg='Date regex should match a single string')
+
+ def test_js_to_json_malformed(self):
+ self.assertEqual(js_to_json('42a1'), '42"a1"')
+ self.assertEqual(js_to_json('42a-1'), '42"a"-1')
+
+ def test_js_to_json_template_literal(self):
+ self.assertEqual(js_to_json('`Hello ${name}`', {'name': '"world"'}), '"Hello world"')
+ self.assertEqual(js_to_json('`${name}${name}`', {'name': '"X"'}), '"XX"')
+ self.assertEqual(js_to_json('`${name}${name}`', {'name': '5'}), '"55"')
+ self.assertEqual(js_to_json('`${name}"${name}"`', {'name': '5'}), '"5\\"5\\""')
+ self.assertEqual(js_to_json('`${name}`', {}), '"name"')
+
+ def test_js_to_json_common_constructors(self):
+ self.assertEqual(json.loads(js_to_json('new Map([["a", 5]])')), {'a': 5})
+ self.assertEqual(json.loads(js_to_json('Array(5, 10)')), [5, 10])
+ self.assertEqual(json.loads(js_to_json('new Array(15,5)')), [15, 5])
+ self.assertEqual(json.loads(js_to_json('new Map([Array(5, 10),new Array(15,5)])')), {'5': 10, '15': 5})
+ self.assertEqual(json.loads(js_to_json('new Date("123")')), "123")
+ self.assertEqual(json.loads(js_to_json('new Date(\'2023-10-19\')')), "2023-10-19")
+
+ def test_extract_attributes(self):
+ self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
+ self.assertEqual(extract_attributes("<e x='y'>"), {'x': 'y'})
+ self.assertEqual(extract_attributes('<e x=y>'), {'x': 'y'})
+ self.assertEqual(extract_attributes('<e x="a \'b\' c">'), {'x': "a 'b' c"})
+ self.assertEqual(extract_attributes('<e x=\'a "b" c\'>'), {'x': 'a "b" c'})
+ self.assertEqual(extract_attributes('<e x="&#121;">'), {'x': 'y'})
+ self.assertEqual(extract_attributes('<e x="&#x79;">'), {'x': 'y'})
+ self.assertEqual(extract_attributes('<e x="&amp;">'), {'x': '&'}) # XML
+ self.assertEqual(extract_attributes('<e x="&quot;">'), {'x': '"'})
+ self.assertEqual(extract_attributes('<e x="&pound;">'), {'x': '£'}) # HTML 3.2
+ self.assertEqual(extract_attributes('<e x="&lambda;">'), {'x': 'λ'}) # HTML 4.0
+ self.assertEqual(extract_attributes('<e x="&foo">'), {'x': '&foo'})
+ self.assertEqual(extract_attributes('<e x="\'">'), {'x': "'"})
+ self.assertEqual(extract_attributes('<e x=\'"\'>'), {'x': '"'})
+ self.assertEqual(extract_attributes('<e x >'), {'x': None})
+ self.assertEqual(extract_attributes('<e x=y a>'), {'x': 'y', 'a': None})
+ self.assertEqual(extract_attributes('<e x= y>'), {'x': 'y'})
+ self.assertEqual(extract_attributes('<e x=1 y=2 x=3>'), {'y': '2', 'x': '3'})
+ self.assertEqual(extract_attributes('<e \nx=\ny\n>'), {'x': 'y'})
+ self.assertEqual(extract_attributes('<e \nx=\n"y"\n>'), {'x': 'y'})
+ self.assertEqual(extract_attributes("<e \nx=\n'y'\n>"), {'x': 'y'})
+ self.assertEqual(extract_attributes('<e \nx="\ny\n">'), {'x': '\ny\n'})
+ self.assertEqual(extract_attributes('<e CAPS=x>'), {'caps': 'x'}) # Names lowercased
+ self.assertEqual(extract_attributes('<e x=1 X=2>'), {'x': '2'})
+ self.assertEqual(extract_attributes('<e X=1 x=2>'), {'x': '2'})
+ self.assertEqual(extract_attributes('<e _:funny-name1=1>'), {'_:funny-name1': '1'})
+ self.assertEqual(extract_attributes('<e x="Fáilte 世界 \U0001f600">'), {'x': 'Fáilte 世界 \U0001f600'})
+ self.assertEqual(extract_attributes('<e x="décompose&#769;">'), {'x': 'décompose\u0301'})
+ # "Narrow" Python builds don't support unicode code points outside BMP.
+ try:
+ chr(0x10000)
+ supports_outside_bmp = True
+ except ValueError:
+ supports_outside_bmp = False
+ if supports_outside_bmp:
+ self.assertEqual(extract_attributes('<e x="Smile &#128512;!">'), {'x': 'Smile \U0001f600!'})
+ # Malformed HTML should not break attributes extraction on older Python
+ self.assertEqual(extract_attributes('<mal"formed/>'), {})
+
+ def test_clean_html(self):
+ self.assertEqual(clean_html('a:\nb'), 'a: b')
+ self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
+ self.assertEqual(clean_html('a<br>\xa0b'), 'a\nb')
+
+ def test_intlist_to_bytes(self):
+ self.assertEqual(
+ intlist_to_bytes([0, 1, 127, 128, 255]),
+ b'\x00\x01\x7f\x80\xff')
+
+ def test_args_to_str(self):
+ self.assertEqual(
+ args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
+ 'foo ba/r -baz \'2 be\' \'\'' if compat_os_name != 'nt' else 'foo ba/r -baz "2 be" ""'
+ )
+
+ def test_parse_filesize(self):
+ self.assertEqual(parse_filesize(None), None)
+ self.assertEqual(parse_filesize(''), None)
+ self.assertEqual(parse_filesize('91 B'), 91)
+ self.assertEqual(parse_filesize('foobar'), None)
+ self.assertEqual(parse_filesize('2 MiB'), 2097152)
+ self.assertEqual(parse_filesize('5 GB'), 5000000000)
+ self.assertEqual(parse_filesize('1.2Tb'), 1200000000000)
+ self.assertEqual(parse_filesize('1.2tb'), 1200000000000)
+ self.assertEqual(parse_filesize('1,24 KB'), 1240)
+ self.assertEqual(parse_filesize('1,24 kb'), 1240)
+ self.assertEqual(parse_filesize('8.5 megabytes'), 8500000)
+
+ def test_parse_count(self):
+ self.assertEqual(parse_count(None), None)
+ self.assertEqual(parse_count(''), None)
+ self.assertEqual(parse_count('0'), 0)
+ self.assertEqual(parse_count('1000'), 1000)
+ self.assertEqual(parse_count('1.000'), 1000)
+ self.assertEqual(parse_count('1.1k'), 1100)
+ self.assertEqual(parse_count('1.1 k'), 1100)
+ self.assertEqual(parse_count('1,1 k'), 1100)
+ self.assertEqual(parse_count('1.1kk'), 1100000)
+ self.assertEqual(parse_count('1.1kk '), 1100000)
+ self.assertEqual(parse_count('1,1kk'), 1100000)
+ self.assertEqual(parse_count('100 views'), 100)
+ self.assertEqual(parse_count('1,100 views'), 1100)
+ self.assertEqual(parse_count('1.1kk views'), 1100000)
+ self.assertEqual(parse_count('10M views'), 10000000)
+ self.assertEqual(parse_count('has 10M views'), 10000000)
+
+ def test_parse_resolution(self):
+ self.assertEqual(parse_resolution(None), {})
+ self.assertEqual(parse_resolution(''), {})
+ self.assertEqual(parse_resolution(' 1920x1080'), {'width': 1920, 'height': 1080})
+ self.assertEqual(parse_resolution('1920×1080 '), {'width': 1920, 'height': 1080})
+ self.assertEqual(parse_resolution('1920 x 1080'), {'width': 1920, 'height': 1080})
+ self.assertEqual(parse_resolution('720p'), {'height': 720})
+ self.assertEqual(parse_resolution('4k'), {'height': 2160})
+ self.assertEqual(parse_resolution('8K'), {'height': 4320})
+ self.assertEqual(parse_resolution('pre_1920x1080_post'), {'width': 1920, 'height': 1080})
+ self.assertEqual(parse_resolution('ep1x2'), {})
+ self.assertEqual(parse_resolution('1920, 1080'), {'width': 1920, 'height': 1080})
+
+ def test_parse_bitrate(self):
+ self.assertEqual(parse_bitrate(None), None)
+ self.assertEqual(parse_bitrate(''), None)
+ self.assertEqual(parse_bitrate('300kbps'), 300)
+ self.assertEqual(parse_bitrate('1500kbps'), 1500)
+ self.assertEqual(parse_bitrate('300 kbps'), 300)
+
+ def test_version_tuple(self):
+ self.assertEqual(version_tuple('1'), (1,))
+ self.assertEqual(version_tuple('10.23.344'), (10, 23, 344))
+ self.assertEqual(version_tuple('10.1-6'), (10, 1, 6)) # avconv style
+
+ def test_detect_exe_version(self):
+ self.assertEqual(detect_exe_version('''ffmpeg version 1.2.1
+built on May 27 2013 08:37:26 with gcc 4.7 (Debian 4.7.3-4)
+configuration: --prefix=/usr --extra-'''), '1.2.1')
+ self.assertEqual(detect_exe_version('''ffmpeg version N-63176-g1fb4685
+built on May 15 2014 22:09:06 with gcc 4.8.2 (GCC)'''), 'N-63176-g1fb4685')
+ self.assertEqual(detect_exe_version('''X server found. dri2 connection failed!
+Trying to open render node...
+Success at /dev/dri/renderD128.
+ffmpeg version 2.4.4 Copyright (c) 2000-2014 the FFmpeg ...'''), '2.4.4')
+
+ def test_age_restricted(self):
+ self.assertFalse(age_restricted(None, 10)) # unrestricted content
+ self.assertFalse(age_restricted(1, None)) # unrestricted policy
+ self.assertFalse(age_restricted(8, 10))
+ self.assertTrue(age_restricted(18, 14))
+ self.assertFalse(age_restricted(18, 18))
+
+ def test_is_html(self):
+ self.assertFalse(is_html(b'\x49\x44\x43<html'))
+ self.assertTrue(is_html(b'<!DOCTYPE foo>\xaaa'))
+ self.assertTrue(is_html( # UTF-8 with BOM
+ b'\xef\xbb\xbf<!DOCTYPE foo>\xaaa'))
+ self.assertTrue(is_html( # UTF-16-LE
+ b'\xff\xfe<\x00h\x00t\x00m\x00l\x00>\x00\xe4\x00'
+ ))
+ self.assertTrue(is_html( # UTF-16-BE
+ b'\xfe\xff\x00<\x00h\x00t\x00m\x00l\x00>\x00\xe4'
+ ))
+ self.assertTrue(is_html( # UTF-32-BE
+ b'\x00\x00\xFE\xFF\x00\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4'))
+ self.assertTrue(is_html( # UTF-32-LE
+ b'\xFF\xFE\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4\x00\x00\x00'))
+
+ def test_render_table(self):
+ self.assertEqual(
+ render_table(
+ ['a', 'empty', 'bcd'],
+ [[123, '', 4], [9999, '', 51]]),
+ 'a empty bcd\n'
+ '123 4\n'
+ '9999 51')
+
+ self.assertEqual(
+ render_table(
+ ['a', 'empty', 'bcd'],
+ [[123, '', 4], [9999, '', 51]],
+ hide_empty=True),
+ 'a bcd\n'
+ '123 4\n'
+ '9999 51')
+
+ self.assertEqual(
+ render_table(
+ ['\ta', 'bcd'],
+ [['1\t23', 4], ['\t9999', 51]]),
+ ' a bcd\n'
+ '1 23 4\n'
+ '9999 51')
+
+ self.assertEqual(
+ render_table(
+ ['a', 'bcd'],
+ [[123, 4], [9999, 51]],
+ delim='-'),
+ 'a bcd\n'
+ '--------\n'
+ '123 4\n'
+ '9999 51')
+
+ self.assertEqual(
+ render_table(
+ ['a', 'bcd'],
+ [[123, 4], [9999, 51]],
+ delim='-', extra_gap=2),
+ 'a bcd\n'
+ '----------\n'
+ '123 4\n'
+ '9999 51')
+
+ def test_match_str(self):
+ # Unary
+ self.assertFalse(match_str('xy', {'x': 1200}))
+ self.assertTrue(match_str('!xy', {'x': 1200}))
+ self.assertTrue(match_str('x', {'x': 1200}))
+ self.assertFalse(match_str('!x', {'x': 1200}))
+ self.assertTrue(match_str('x', {'x': 0}))
+ self.assertTrue(match_str('is_live', {'is_live': True}))
+ self.assertFalse(match_str('is_live', {'is_live': False}))
+ self.assertFalse(match_str('is_live', {'is_live': None}))
+ self.assertFalse(match_str('is_live', {}))
+ self.assertFalse(match_str('!is_live', {'is_live': True}))
+ self.assertTrue(match_str('!is_live', {'is_live': False}))
+ self.assertTrue(match_str('!is_live', {'is_live': None}))
+ self.assertTrue(match_str('!is_live', {}))
+ self.assertTrue(match_str('title', {'title': 'abc'}))
+ self.assertTrue(match_str('title', {'title': ''}))
+ self.assertFalse(match_str('!title', {'title': 'abc'}))
+ self.assertFalse(match_str('!title', {'title': ''}))
+
+ # Numeric
+ self.assertFalse(match_str('x>0', {'x': 0}))
+ self.assertFalse(match_str('x>0', {}))
+ self.assertTrue(match_str('x>?0', {}))
+ self.assertTrue(match_str('x>1K', {'x': 1200}))
+ self.assertFalse(match_str('x>2K', {'x': 1200}))
+ self.assertTrue(match_str('x>=1200 & x < 1300', {'x': 1200}))
+ self.assertFalse(match_str('x>=1100 & x < 1200', {'x': 1200}))
+ self.assertTrue(match_str('x > 1:0:0', {'x': 3700}))
+
+ # String
+ self.assertFalse(match_str('y=a212', {'y': 'foobar42'}))
+ self.assertTrue(match_str('y=foobar42', {'y': 'foobar42'}))
+ self.assertFalse(match_str('y!=foobar42', {'y': 'foobar42'}))
+ self.assertTrue(match_str('y!=foobar2', {'y': 'foobar42'}))
+ self.assertTrue(match_str('y^=foo', {'y': 'foobar42'}))
+ self.assertFalse(match_str('y!^=foo', {'y': 'foobar42'}))
+ self.assertFalse(match_str('y^=bar', {'y': 'foobar42'}))
+ self.assertTrue(match_str('y!^=bar', {'y': 'foobar42'}))
+ self.assertRaises(ValueError, match_str, 'x^=42', {'x': 42})
+ self.assertTrue(match_str('y*=bar', {'y': 'foobar42'}))
+ self.assertFalse(match_str('y!*=bar', {'y': 'foobar42'}))
+ self.assertFalse(match_str('y*=baz', {'y': 'foobar42'}))
+ self.assertTrue(match_str('y!*=baz', {'y': 'foobar42'}))
+ self.assertTrue(match_str('y$=42', {'y': 'foobar42'}))
+ self.assertFalse(match_str('y$=43', {'y': 'foobar42'}))
+
+ # And
+ self.assertFalse(match_str(
+ 'like_count > 100 & dislike_count <? 50 & description',
+ {'like_count': 90, 'description': 'foo'}))
+ self.assertTrue(match_str(
+ 'like_count > 100 & dislike_count <? 50 & description',
+ {'like_count': 190, 'description': 'foo'}))
+ self.assertFalse(match_str(
+ 'like_count > 100 & dislike_count <? 50 & description',
+ {'like_count': 190, 'dislike_count': 60, 'description': 'foo'}))
+ self.assertFalse(match_str(
+ 'like_count > 100 & dislike_count <? 50 & description',
+ {'like_count': 190, 'dislike_count': 10}))
+
+ # Regex
+ self.assertTrue(match_str(r'x~=\bbar', {'x': 'foo bar'}))
+ self.assertFalse(match_str(r'x~=\bbar.+', {'x': 'foo bar'}))
+ self.assertFalse(match_str(r'x~=^FOO', {'x': 'foo bar'}))
+ self.assertTrue(match_str(r'x~=(?i)^FOO', {'x': 'foo bar'}))
+
+ # Quotes
+ self.assertTrue(match_str(r'x^="foo"', {'x': 'foo "bar"'}))
+ self.assertFalse(match_str(r'x^="foo "', {'x': 'foo "bar"'}))
+ self.assertFalse(match_str(r'x$="bar"', {'x': 'foo "bar"'}))
+ self.assertTrue(match_str(r'x$=" \"bar\""', {'x': 'foo "bar"'}))
+
+ # Escaping &
+ self.assertFalse(match_str(r'x=foo & bar', {'x': 'foo & bar'}))
+ self.assertTrue(match_str(r'x=foo \& bar', {'x': 'foo & bar'}))
+ self.assertTrue(match_str(r'x=foo \& bar & x^=foo', {'x': 'foo & bar'}))
+ self.assertTrue(match_str(r'x="foo \& bar" & x^=foo', {'x': 'foo & bar'}))
+
+ # Example from docs
+ self.assertTrue(match_str(
+ r"!is_live & like_count>?100 & description~='(?i)\bcats \& dogs\b'",
+ {'description': 'Raining Cats & Dogs'}))
+
+ # Incomplete
+ self.assertFalse(match_str('id!=foo', {'id': 'foo'}, True))
+ self.assertTrue(match_str('x', {'id': 'foo'}, True))
+ self.assertTrue(match_str('!x', {'id': 'foo'}, True))
+ self.assertFalse(match_str('x', {'id': 'foo'}, False))
+
+ def test_parse_dfxp_time_expr(self):
+ self.assertEqual(parse_dfxp_time_expr(None), None)
+ self.assertEqual(parse_dfxp_time_expr(''), None)
+ self.assertEqual(parse_dfxp_time_expr('0.1'), 0.1)
+ self.assertEqual(parse_dfxp_time_expr('0.1s'), 0.1)
+ self.assertEqual(parse_dfxp_time_expr('00:00:01'), 1.0)
+ self.assertEqual(parse_dfxp_time_expr('00:00:01.100'), 1.1)
+ self.assertEqual(parse_dfxp_time_expr('00:00:01:100'), 1.1)
+
+ def test_dfxp2srt(self):
+ dfxp_data = '''<?xml version="1.0" encoding="UTF-8"?>
+ <tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
+ <body>
+ <div xml:lang="en">
+ <p begin="0" end="1">The following line contains Chinese characters and special symbols</p>
+ <p begin="1" end="2">第二行<br/>♪♪</p>
+ <p begin="2" dur="1"><span>Third<br/>Line</span></p>
+ <p begin="3" end="-1">Lines with invalid timestamps are ignored</p>
+ <p begin="-1" end="-1">Ignore, two</p>
+ <p begin="3" dur="-1">Ignored, three</p>
+ </div>
+ </body>
+ </tt>'''.encode()
+ srt_data = '''1
+00:00:00,000 --> 00:00:01,000
+The following line contains Chinese characters and special symbols
+
+2
+00:00:01,000 --> 00:00:02,000
+第二行
+♪♪
+
+3
+00:00:02,000 --> 00:00:03,000
+Third
+Line
+
+'''
+ self.assertEqual(dfxp2srt(dfxp_data), srt_data)
+
+ dfxp_data_no_default_namespace = b'''<?xml version="1.0" encoding="UTF-8"?>
+ <tt xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
+ <body>
+ <div xml:lang="en">
+ <p begin="0" end="1">The first line</p>
+ </div>
+ </body>
+ </tt>'''
+ srt_data = '''1
+00:00:00,000 --> 00:00:01,000
+The first line
+
+'''
+ self.assertEqual(dfxp2srt(dfxp_data_no_default_namespace), srt_data)
+
+ dfxp_data_with_style = b'''<?xml version="1.0" encoding="utf-8"?>
+<tt xmlns="http://www.w3.org/2006/10/ttaf1" xmlns:ttp="http://www.w3.org/2006/10/ttaf1#parameter" ttp:timeBase="media" xmlns:tts="http://www.w3.org/2006/10/ttaf1#style" xml:lang="en" xmlns:ttm="http://www.w3.org/2006/10/ttaf1#metadata">
+ <head>
+ <styling>
+ <style id="s2" style="s0" tts:color="cyan" tts:fontWeight="bold" />
+ <style id="s1" style="s0" tts:color="yellow" tts:fontStyle="italic" />
+ <style id="s3" style="s0" tts:color="lime" tts:textDecoration="underline" />
+ <style id="s0" tts:backgroundColor="black" tts:fontStyle="normal" tts:fontSize="16" tts:fontFamily="sansSerif" tts:color="white" />
+ </styling>
+ </head>
+ <body tts:textAlign="center" style="s0">
+ <div>
+ <p begin="00:00:02.08" id="p0" end="00:00:05.84">default style<span tts:color="red">custom style</span></p>
+ <p style="s2" begin="00:00:02.08" id="p0" end="00:00:05.84"><span tts:color="lime">part 1<br /></span><span tts:color="cyan">part 2</span></p>
+ <p style="s3" begin="00:00:05.84" id="p1" end="00:00:09.56">line 3<br />part 3</p>
+ <p style="s1" tts:textDecoration="underline" begin="00:00:09.56" id="p2" end="00:00:12.36"><span style="s2" tts:color="lime">inner<br /> </span>style</p>
+ </div>
+ </body>
+</tt>'''
+ srt_data = '''1
+00:00:02,080 --> 00:00:05,840
+<font color="white" face="sansSerif" size="16">default style<font color="red">custom style</font></font>
+
+2
+00:00:02,080 --> 00:00:05,840
+<b><font color="cyan" face="sansSerif" size="16"><font color="lime">part 1
+</font>part 2</font></b>
+
+3
+00:00:05,840 --> 00:00:09,560
+<u><font color="lime">line 3
+part 3</font></u>
+
+4
+00:00:09,560 --> 00:00:12,360
+<i><u><font color="yellow"><font color="lime">inner
+ </font>style</font></u></i>
+
+'''
+ self.assertEqual(dfxp2srt(dfxp_data_with_style), srt_data)
+
+ dfxp_data_non_utf8 = '''<?xml version="1.0" encoding="UTF-16"?>
+ <tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
+ <body>
+ <div xml:lang="en">
+ <p begin="0" end="1">Line 1</p>
+ <p begin="1" end="2">第二行</p>
+ </div>
+ </body>
+ </tt>'''.encode('utf-16')
+ srt_data = '''1
+00:00:00,000 --> 00:00:01,000
+Line 1
+
+2
+00:00:01,000 --> 00:00:02,000
+第二行
+
+'''
+ self.assertEqual(dfxp2srt(dfxp_data_non_utf8), srt_data)
+
+ def test_cli_option(self):
+ self.assertEqual(cli_option({'proxy': '127.0.0.1:3128'}, '--proxy', 'proxy'), ['--proxy', '127.0.0.1:3128'])
+ self.assertEqual(cli_option({'proxy': None}, '--proxy', 'proxy'), [])
+ self.assertEqual(cli_option({}, '--proxy', 'proxy'), [])
+ self.assertEqual(cli_option({'retries': 10}, '--retries', 'retries'), ['--retries', '10'])
+
+ def test_cli_valueless_option(self):
+ self.assertEqual(cli_valueless_option(
+ {'downloader': 'external'}, '--external-downloader', 'downloader', 'external'), ['--external-downloader'])
+ self.assertEqual(cli_valueless_option(
+ {'downloader': 'internal'}, '--external-downloader', 'downloader', 'external'), [])
+ self.assertEqual(cli_valueless_option(
+ {'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'), ['--no-check-certificate'])
+ self.assertEqual(cli_valueless_option(
+ {'nocheckcertificate': False}, '--no-check-certificate', 'nocheckcertificate'), [])
+ self.assertEqual(cli_valueless_option(
+ {'checkcertificate': True}, '--no-check-certificate', 'checkcertificate', False), [])
+ self.assertEqual(cli_valueless_option(
+ {'checkcertificate': False}, '--no-check-certificate', 'checkcertificate', False), ['--no-check-certificate'])
+
+ def test_cli_bool_option(self):
+ self.assertEqual(
+ cli_bool_option(
+ {'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'),
+ ['--no-check-certificate', 'true'])
+ self.assertEqual(
+ cli_bool_option(
+ {'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate', separator='='),
+ ['--no-check-certificate=true'])
+ self.assertEqual(
+ cli_bool_option(
+ {'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
+ ['--check-certificate', 'false'])
+ self.assertEqual(
+ cli_bool_option(
+ {'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
+ ['--check-certificate=false'])
+ self.assertEqual(
+ cli_bool_option(
+ {'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
+ ['--check-certificate', 'true'])
+ self.assertEqual(
+ cli_bool_option(
+ {'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
+ ['--check-certificate=true'])
+ self.assertEqual(
+ cli_bool_option(
+ {}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
+ [])
+
+ def test_ohdave_rsa_encrypt(self):
+ N = 0xab86b6371b5318aaa1d3c9e612a9f1264f372323c8c0f19875b5fc3b3fd3afcc1e5bec527aa94bfa85bffc157e4245aebda05389a5357b75115ac94f074aefcd
+ e = 65537
+
+ self.assertEqual(
+ ohdave_rsa_encrypt(b'aa111222', e, N),
+ '726664bd9a23fd0c70f9f1b84aab5e3905ce1e45a584e9cbcf9bcc7510338fc1986d6c599ff990d923aa43c51c0d9013cd572e13bc58f4ae48f2ed8c0b0ba881')
+
+ def test_pkcs1pad(self):
+ data = [1, 2, 3]
+ padded_data = pkcs1pad(data, 32)
+ self.assertEqual(padded_data[:2], [0, 2])
+ self.assertEqual(padded_data[28:], [0, 1, 2, 3])
+
+ self.assertRaises(ValueError, pkcs1pad, data, 8)
+
+ def test_encode_base_n(self):
+ self.assertEqual(encode_base_n(0, 30), '0')
+ self.assertEqual(encode_base_n(80, 30), '2k')
+
+ custom_table = '9876543210ZYXWVUTSRQPONMLKJIHGFEDCBA'
+ self.assertEqual(encode_base_n(0, 30, custom_table), '9')
+ self.assertEqual(encode_base_n(80, 30, custom_table), '7P')
+
+ self.assertRaises(ValueError, encode_base_n, 0, 70)
+ self.assertRaises(ValueError, encode_base_n, 0, 60, custom_table)
+
+ def test_caesar(self):
+ self.assertEqual(caesar('ace', 'abcdef', 2), 'cea')
+ self.assertEqual(caesar('cea', 'abcdef', -2), 'ace')
+ self.assertEqual(caesar('ace', 'abcdef', -2), 'eac')
+ self.assertEqual(caesar('eac', 'abcdef', 2), 'ace')
+ self.assertEqual(caesar('ace', 'abcdef', 0), 'ace')
+ self.assertEqual(caesar('xyz', 'abcdef', 2), 'xyz')
+ self.assertEqual(caesar('abc', 'acegik', 2), 'ebg')
+ self.assertEqual(caesar('ebg', 'acegik', -2), 'abc')
+
+ def test_rot47(self):
+ self.assertEqual(rot47('yt-dlp'), r'JE\5=A')
+ self.assertEqual(rot47('YT-DLP'), r'*%\s{!')
+
+ def test_urshift(self):
+ self.assertEqual(urshift(3, 1), 1)
+ self.assertEqual(urshift(-3, 1), 2147483646)
+
+ GET_ELEMENT_BY_CLASS_TEST_STRING = '''
+ <span class="foo bar">nice</span>
+ '''
+
+ def test_get_element_by_class(self):
+ html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
+
+ self.assertEqual(get_element_by_class('foo', html), 'nice')
+ self.assertEqual(get_element_by_class('no-such-class', html), None)
+
+ def test_get_element_html_by_class(self):
+ html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
+
+ self.assertEqual(get_element_html_by_class('foo', html), html.strip())
+ self.assertEqual(get_element_by_class('no-such-class', html), None)
+
+ GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING = '''
+ <div itemprop="author" itemscope>foo</div>
+ '''
+
+ def test_get_element_by_attribute(self):
+ html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
+
+ self.assertEqual(get_element_by_attribute('class', 'foo bar', html), 'nice')
+ self.assertEqual(get_element_by_attribute('class', 'foo', html), None)
+ self.assertEqual(get_element_by_attribute('class', 'no-such-foo', html), None)
+
+ html = self.GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING
+
+ self.assertEqual(get_element_by_attribute('itemprop', 'author', html), 'foo')
+
+ def test_get_element_html_by_attribute(self):
+ html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
+
+ self.assertEqual(get_element_html_by_attribute('class', 'foo bar', html), html.strip())
+ self.assertEqual(get_element_html_by_attribute('class', 'foo', html), None)
+ self.assertEqual(get_element_html_by_attribute('class', 'no-such-foo', html), None)
+
+ html = self.GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING
+
+ self.assertEqual(get_element_html_by_attribute('itemprop', 'author', html), html.strip())
+
+ GET_ELEMENTS_BY_CLASS_TEST_STRING = '''
+ <span class="foo bar">nice</span><span class="foo bar">also nice</span>
+ '''
+ GET_ELEMENTS_BY_CLASS_RES = ['<span class="foo bar">nice</span>', '<span class="foo bar">also nice</span>']
+
+ def test_get_elements_by_class(self):
+ html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
+
+ self.assertEqual(get_elements_by_class('foo', html), ['nice', 'also nice'])
+ self.assertEqual(get_elements_by_class('no-such-class', html), [])
+
+ def test_get_elements_html_by_class(self):
+ html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
+
+ self.assertEqual(get_elements_html_by_class('foo', html), self.GET_ELEMENTS_BY_CLASS_RES)
+ self.assertEqual(get_elements_html_by_class('no-such-class', html), [])
+
+ def test_get_elements_by_attribute(self):
+ html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
+
+ self.assertEqual(get_elements_by_attribute('class', 'foo bar', html), ['nice', 'also nice'])
+ self.assertEqual(get_elements_by_attribute('class', 'foo', html), [])
+ self.assertEqual(get_elements_by_attribute('class', 'no-such-foo', html), [])
+
+ def test_get_elements_html_by_attribute(self):
+ html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
+
+ self.assertEqual(get_elements_html_by_attribute('class', 'foo bar', html), self.GET_ELEMENTS_BY_CLASS_RES)
+ self.assertEqual(get_elements_html_by_attribute('class', 'foo', html), [])
+ self.assertEqual(get_elements_html_by_attribute('class', 'no-such-foo', html), [])
+
+ def test_get_elements_text_and_html_by_attribute(self):
+ html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
+
+ self.assertEqual(
+ list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
+ list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES)))
+ self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
+ self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
+
+ self.assertEqual(list(get_elements_text_and_html_by_attribute(
+ 'class', 'foo', '<a class="foo">nice</a><span class="foo">nice</span>', tag='a')), [('nice', '<a class="foo">nice</a>')])
+
+ GET_ELEMENT_BY_TAG_TEST_STRING = '''
+ random text lorem ipsum</p>
+ <div>
+ this should be returned
+ <span>this should also be returned</span>
+ <div>
+ this should also be returned
+ </div>
+ closing tag above should not trick, so this should also be returned
+ </div>
+ but this text should not be returned
+ '''
+ GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML = GET_ELEMENT_BY_TAG_TEST_STRING.strip()[32:276]
+ GET_ELEMENT_BY_TAG_RES_OUTERDIV_TEXT = GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML[5:-6]
+ GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML = GET_ELEMENT_BY_TAG_TEST_STRING.strip()[78:119]
+ GET_ELEMENT_BY_TAG_RES_INNERSPAN_TEXT = GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML[6:-7]
+
+ def test_get_element_text_and_html_by_tag(self):
+ html = self.GET_ELEMENT_BY_TAG_TEST_STRING
+
+ self.assertEqual(
+ get_element_text_and_html_by_tag('div', html),
+ (self.GET_ELEMENT_BY_TAG_RES_OUTERDIV_TEXT, self.GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML))
+ self.assertEqual(
+ get_element_text_and_html_by_tag('span', html),
+ (self.GET_ELEMENT_BY_TAG_RES_INNERSPAN_TEXT, self.GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML))
+ self.assertRaises(compat_HTMLParseError, get_element_text_and_html_by_tag, 'article', html)
+
+ def test_iri_to_uri(self):
+ self.assertEqual(
+ iri_to_uri('https://www.google.com/search?q=foo&ie=utf-8&oe=utf-8&client=firefox-b'),
+ 'https://www.google.com/search?q=foo&ie=utf-8&oe=utf-8&client=firefox-b') # Same
+ self.assertEqual(
+ iri_to_uri('https://www.google.com/search?q=Käsesoßenrührlöffel'), # German for cheese sauce stirring spoon
+ 'https://www.google.com/search?q=K%C3%A4seso%C3%9Fenr%C3%BChrl%C3%B6ffel')
+ self.assertEqual(
+ iri_to_uri('https://www.google.com/search?q=lt<+gt>+eq%3D+amp%26+percent%25+hash%23+colon%3A+tilde~#trash=?&garbage=#'),
+ 'https://www.google.com/search?q=lt%3C+gt%3E+eq%3D+amp%26+percent%25+hash%23+colon%3A+tilde~#trash=?&garbage=#')
+ self.assertEqual(
+ iri_to_uri('http://правозащита38.рф/category/news/'),
+ 'http://xn--38-6kcaak9aj5chl4a3g.xn--p1ai/category/news/')
+ self.assertEqual(
+ iri_to_uri('http://www.правозащита38.рф/category/news/'),
+ 'http://www.xn--38-6kcaak9aj5chl4a3g.xn--p1ai/category/news/')
+ self.assertEqual(
+ iri_to_uri('https://i❤.ws/emojidomain/👍👏🤝💪'),
+ 'https://xn--i-7iq.ws/emojidomain/%F0%9F%91%8D%F0%9F%91%8F%F0%9F%A4%9D%F0%9F%92%AA')
+ self.assertEqual(
+ iri_to_uri('http://日本語.jp/'),
+ 'http://xn--wgv71a119e.jp/')
+ self.assertEqual(
+ iri_to_uri('http://导航.中国/'),
+ 'http://xn--fet810g.xn--fiqs8s/')
+
+ def test_clean_podcast_url(self):
+ self.assertEqual(clean_podcast_url('https://www.podtrac.com/pts/redirect.mp3/chtbl.com/track/5899E/traffic.megaphone.fm/HSW7835899191.mp3'), 'https://traffic.megaphone.fm/HSW7835899191.mp3')
+ self.assertEqual(clean_podcast_url('https://play.podtrac.com/npr-344098539/edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3'), 'https://edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3')
+ self.assertEqual(clean_podcast_url('https://pdst.fm/e/2.gum.fm/chtbl.com/track/chrt.fm/track/34D33/pscrb.fm/rss/p/traffic.megaphone.fm/ITLLC7765286967.mp3?updated=1687282661'), 'https://traffic.megaphone.fm/ITLLC7765286967.mp3?updated=1687282661')
+ self.assertEqual(clean_podcast_url('https://pdst.fm/e/https://mgln.ai/e/441/www.buzzsprout.com/1121972/13019085-ep-252-the-deep-life-stack.mp3'), 'https://www.buzzsprout.com/1121972/13019085-ep-252-the-deep-life-stack.mp3')
+
+ def test_LazyList(self):
+ it = list(range(10))
+
+ self.assertEqual(list(LazyList(it)), it)
+ self.assertEqual(LazyList(it).exhaust(), it)
+ self.assertEqual(LazyList(it)[5], it[5])
+
+ self.assertEqual(LazyList(it)[5:], it[5:])
+ self.assertEqual(LazyList(it)[:5], it[:5])
+ self.assertEqual(LazyList(it)[::2], it[::2])
+ self.assertEqual(LazyList(it)[1::2], it[1::2])
+ self.assertEqual(LazyList(it)[5::-1], it[5::-1])
+ self.assertEqual(LazyList(it)[6:2:-2], it[6:2:-2])
+ self.assertEqual(LazyList(it)[::-1], it[::-1])
+
+ self.assertTrue(LazyList(it))
+ self.assertFalse(LazyList(range(0)))
+ self.assertEqual(len(LazyList(it)), len(it))
+ self.assertEqual(repr(LazyList(it)), repr(it))
+ self.assertEqual(str(LazyList(it)), str(it))
+
+ self.assertEqual(list(LazyList(it, reverse=True)), it[::-1])
+ self.assertEqual(list(reversed(LazyList(it))[::-1]), it)
+ self.assertEqual(list(reversed(LazyList(it))[1:3:7]), it[::-1][1:3:7])
+
+ def test_LazyList_laziness(self):
+
+ def test(ll, idx, val, cache):
+ self.assertEqual(ll[idx], val)
+ self.assertEqual(ll._cache, list(cache))
+
+ ll = LazyList(range(10))
+ test(ll, 0, 0, range(1))
+ test(ll, 5, 5, range(6))
+ test(ll, -3, 7, range(10))
+
+ ll = LazyList(range(10), reverse=True)
+ test(ll, -1, 0, range(1))
+ test(ll, 3, 6, range(10))
+
+ ll = LazyList(itertools.count())
+ test(ll, 10, 10, range(11))
+ ll = reversed(ll)
+ test(ll, -15, 14, range(15))
+
+ def test_format_bytes(self):
+ self.assertEqual(format_bytes(0), '0.00B')
+ self.assertEqual(format_bytes(1000), '1000.00B')
+ self.assertEqual(format_bytes(1024), '1.00KiB')
+ self.assertEqual(format_bytes(1024**2), '1.00MiB')
+ self.assertEqual(format_bytes(1024**3), '1.00GiB')
+ self.assertEqual(format_bytes(1024**4), '1.00TiB')
+ self.assertEqual(format_bytes(1024**5), '1.00PiB')
+ self.assertEqual(format_bytes(1024**6), '1.00EiB')
+ self.assertEqual(format_bytes(1024**7), '1.00ZiB')
+ self.assertEqual(format_bytes(1024**8), '1.00YiB')
+ self.assertEqual(format_bytes(1024**9), '1024.00YiB')
+
+ def test_hide_login_info(self):
+ self.assertEqual(Config.hide_login_info(['-u', 'foo', '-p', 'bar']),
+ ['-u', 'PRIVATE', '-p', 'PRIVATE'])
+ self.assertEqual(Config.hide_login_info(['-u']), ['-u'])
+ self.assertEqual(Config.hide_login_info(['-u', 'foo', '-u', 'bar']),
+ ['-u', 'PRIVATE', '-u', 'PRIVATE'])
+ self.assertEqual(Config.hide_login_info(['--username=foo']),
+ ['--username=PRIVATE'])
+
+ def test_locked_file(self):
+ TEXT = 'test_locked_file\n'
+ FILE = 'test_locked_file.ytdl'
+ MODES = 'war' # Order is important
+
+ try:
+ for lock_mode in MODES:
+ with locked_file(FILE, lock_mode, False) as f:
+ if lock_mode == 'r':
+ self.assertEqual(f.read(), TEXT * 2, 'Wrong file content')
+ else:
+ f.write(TEXT)
+ for test_mode in MODES:
+ testing_write = test_mode != 'r'
+ try:
+ with locked_file(FILE, test_mode, False):
+ pass
+ except (BlockingIOError, PermissionError):
+ if not testing_write: # FIXME
+ print(f'Known issue: Exclusive lock ({lock_mode}) blocks read access ({test_mode})')
+ continue
+ self.assertTrue(testing_write, f'{test_mode} is blocked by {lock_mode}')
+ else:
+ self.assertFalse(testing_write, f'{test_mode} is not blocked by {lock_mode}')
+ finally:
+ with contextlib.suppress(OSError):
+ os.remove(FILE)
+
+ def test_determine_file_encoding(self):
+ self.assertEqual(determine_file_encoding(b''), (None, 0))
+ self.assertEqual(determine_file_encoding(b'--verbose -x --audio-format mkv\n'), (None, 0))
+
+ self.assertEqual(determine_file_encoding(b'\xef\xbb\xbf'), ('utf-8', 3))
+ self.assertEqual(determine_file_encoding(b'\x00\x00\xfe\xff'), ('utf-32-be', 4))
+ self.assertEqual(determine_file_encoding(b'\xff\xfe'), ('utf-16-le', 2))
+
+ self.assertEqual(determine_file_encoding(b'\xff\xfe# coding: utf-8\n--verbose'), ('utf-16-le', 2))
+
+ self.assertEqual(determine_file_encoding(b'# coding: utf-8\n--verbose'), ('utf-8', 0))
+ self.assertEqual(determine_file_encoding(b'# coding: someencodinghere-12345\n--verbose'), ('someencodinghere-12345', 0))
+
+ self.assertEqual(determine_file_encoding(b'#coding:utf-8\n--verbose'), ('utf-8', 0))
+ self.assertEqual(determine_file_encoding(b'# coding: utf-8 \r\n--verbose'), ('utf-8', 0))
+
+ self.assertEqual(determine_file_encoding('# coding: utf-32-be'.encode('utf-32-be')), ('utf-32-be', 0))
+ self.assertEqual(determine_file_encoding('# coding: utf-16-le'.encode('utf-16-le')), ('utf-16-le', 0))
+
+ def test_get_compatible_ext(self):
+ self.assertEqual(get_compatible_ext(
+ vcodecs=[None], acodecs=[None, None], vexts=['mp4'], aexts=['m4a', 'm4a']), 'mkv')
+ self.assertEqual(get_compatible_ext(
+ vcodecs=[None], acodecs=[None], vexts=['flv'], aexts=['flv']), 'flv')
+
+ self.assertEqual(get_compatible_ext(
+ vcodecs=[None], acodecs=[None], vexts=['mp4'], aexts=['m4a']), 'mp4')
+ self.assertEqual(get_compatible_ext(
+ vcodecs=[None], acodecs=[None], vexts=['mp4'], aexts=['webm']), 'mkv')
+ self.assertEqual(get_compatible_ext(
+ vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['m4a']), 'mkv')
+ self.assertEqual(get_compatible_ext(
+ vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['webm']), 'webm')
+ self.assertEqual(get_compatible_ext(
+ vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['weba']), 'webm')
+
+ self.assertEqual(get_compatible_ext(
+ vcodecs=['h264'], acodecs=['mp4a'], vexts=['mov'], aexts=['m4a']), 'mp4')
+ self.assertEqual(get_compatible_ext(
+ vcodecs=['av01.0.12M.08'], acodecs=['opus'], vexts=['mp4'], aexts=['webm']), 'webm')
+
+ self.assertEqual(get_compatible_ext(
+ vcodecs=['vp9'], acodecs=['opus'], vexts=['webm'], aexts=['webm'], preferences=['flv', 'mp4']), 'mp4')
+ self.assertEqual(get_compatible_ext(
+ vcodecs=['av1'], acodecs=['mp4a'], vexts=['webm'], aexts=['m4a'], preferences=('webm', 'mkv')), 'mkv')
+
+ def test_try_call(self):
+ def total(*x, **kwargs):
+ return sum(x) + sum(kwargs.values())
+
+ self.assertEqual(try_call(None), None,
+ msg='not a fn should give None')
+ self.assertEqual(try_call(lambda: 1), 1,
+ msg='int fn with no expected_type should give int')
+ self.assertEqual(try_call(lambda: 1, expected_type=int), 1,
+ msg='int fn with expected_type int should give int')
+ self.assertEqual(try_call(lambda: 1, expected_type=dict), None,
+ msg='int fn with wrong expected_type should give None')
+ self.assertEqual(try_call(total, args=(0, 1, 0, ), expected_type=int), 1,
+ msg='fn should accept arglist')
+ self.assertEqual(try_call(total, kwargs={'a': 0, 'b': 1, 'c': 0}, expected_type=int), 1,
+ msg='fn should accept kwargs')
+ self.assertEqual(try_call(lambda: 1, expected_type=dict), None,
+ msg='int fn with no expected_type should give None')
+ self.assertEqual(try_call(lambda x: {}, total, args=(42, ), expected_type=int), 42,
+ msg='expect first int result with expected_type int')
+
+ def test_variadic(self):
+ self.assertEqual(variadic(None), (None, ))
+ self.assertEqual(variadic('spam'), ('spam', ))
+ self.assertEqual(variadic('spam', allowed_types=dict), 'spam')
+ with warnings.catch_warnings():
+ warnings.simplefilter('ignore')
+ self.assertEqual(variadic('spam', allowed_types=[dict]), 'spam')
+
+ def test_traverse_obj(self):
+ _TEST_DATA = {
+ 100: 100,
+ 1.2: 1.2,
+ 'str': 'str',
+ 'None': None,
+ '...': ...,
+ 'urls': [
+ {'index': 0, 'url': 'https://www.example.com/0'},
+ {'index': 1, 'url': 'https://www.example.com/1'},
+ ],
+ 'data': (
+ {'index': 2},
+ {'index': 3},
+ ),
+ 'dict': {},
+ }
+
+ # Test base functionality
+ self.assertEqual(traverse_obj(_TEST_DATA, ('str',)), 'str',
+ msg='allow tuple path')
+ self.assertEqual(traverse_obj(_TEST_DATA, ['str']), 'str',
+ msg='allow list path')
+ self.assertEqual(traverse_obj(_TEST_DATA, (value for value in ("str",))), 'str',
+ msg='allow iterable path')
+ self.assertEqual(traverse_obj(_TEST_DATA, 'str'), 'str',
+ msg='single items should be treated as a path')
+ self.assertEqual(traverse_obj(_TEST_DATA, None), _TEST_DATA)
+ self.assertEqual(traverse_obj(_TEST_DATA, 100), 100)
+ self.assertEqual(traverse_obj(_TEST_DATA, 1.2), 1.2)
+
+ # Test Ellipsis behavior
+ self.assertCountEqual(traverse_obj(_TEST_DATA, ...),
+ (item for item in _TEST_DATA.values() if item not in (None, {})),
+ msg='`...` should give all non discarded values')
+ self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', 0, ...)), _TEST_DATA['urls'][0].values(),
+ msg='`...` selection for dicts should select all values')
+ self.assertEqual(traverse_obj(_TEST_DATA, (..., ..., 'url')),
+ ['https://www.example.com/0', 'https://www.example.com/1'],
+ msg='nested `...` queries should work')
+ self.assertCountEqual(traverse_obj(_TEST_DATA, (..., ..., 'index')), range(4),
+ msg='`...` query result should be flattened')
+ self.assertEqual(traverse_obj(iter(range(4)), ...), list(range(4)),
+ msg='`...` should accept iterables')
+
+ # Test function as key
+ self.assertEqual(traverse_obj(_TEST_DATA, lambda x, y: x == 'urls' and isinstance(y, list)),
+ [_TEST_DATA['urls']],
+ msg='function as query key should perform a filter based on (key, value)')
+ self.assertCountEqual(traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)), {'str'},
+ msg='exceptions in the query function should be catched')
+ self.assertEqual(traverse_obj(iter(range(4)), lambda _, x: x % 2 == 0), [0, 2],
+ msg='function key should accept iterables')
+ if __debug__:
+ with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
+ traverse_obj(_TEST_DATA, lambda a: ...)
+ with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
+ traverse_obj(_TEST_DATA, lambda a, b, c: ...)
+
+ # Test set as key (transformation/type, like `expected_type`)
+ self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper}, )), ['STR'],
+ msg='Function in set should be a transformation')
+ self.assertEqual(traverse_obj(_TEST_DATA, (..., {str})), ['str'],
+ msg='Type in set should be a type filter')
+ self.assertEqual(traverse_obj(_TEST_DATA, {dict}), _TEST_DATA,
+ msg='A single set should be wrapped into a path')
+ self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper})), ['STR'],
+ msg='Transformation function should not raise')
+ self.assertEqual(traverse_obj(_TEST_DATA, (..., {str_or_none})),
+ [item for item in map(str_or_none, _TEST_DATA.values()) if item is not None],
+ msg='Function in set should be a transformation')
+ self.assertEqual(traverse_obj(_TEST_DATA, ('fail', {lambda _: 'const'})), 'const',
+ msg='Function in set should always be called')
+ if __debug__:
+ with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
+ traverse_obj(_TEST_DATA, set())
+ with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
+ traverse_obj(_TEST_DATA, {str.upper, str})
+
+ # Test `slice` as a key
+ _SLICE_DATA = [0, 1, 2, 3, 4]
+ self.assertEqual(traverse_obj(_TEST_DATA, ('dict', slice(1))), None,
+ msg='slice on a dictionary should not throw')
+ self.assertEqual(traverse_obj(_SLICE_DATA, slice(1)), _SLICE_DATA[:1],
+ msg='slice key should apply slice to sequence')
+ self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 2)), _SLICE_DATA[1:2],
+ msg='slice key should apply slice to sequence')
+ self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 4, 2)), _SLICE_DATA[1:4:2],
+ msg='slice key should apply slice to sequence')
+
+ # Test alternative paths
+ self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'str'), 'str',
+ msg='multiple `paths` should be treated as alternative paths')
+ self.assertEqual(traverse_obj(_TEST_DATA, 'str', 100), 'str',
+ msg='alternatives should exit early')
+ self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'fail'), None,
+ msg='alternatives should return `default` if exhausted')
+ self.assertEqual(traverse_obj(_TEST_DATA, (..., 'fail'), 100), 100,
+ msg='alternatives should track their own branching return')
+ self.assertEqual(traverse_obj(_TEST_DATA, ('dict', ...), ('data', ...)), list(_TEST_DATA['data']),
+ msg='alternatives on empty objects should search further')
+
+ # Test branch and path nesting
+ self.assertEqual(traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')), ['https://www.example.com/0'],
+ msg='tuple as key should be treated as branches')
+ self.assertEqual(traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')), ['https://www.example.com/0'],
+ msg='list as key should be treated as branches')
+ self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))), ['https://www.example.com/0'],
+ msg='double nesting in path should be treated as paths')
+ self.assertEqual(traverse_obj(['0', [1, 2]], [(0, 1), 0]), [1],
+ msg='do not fail early on branching')
+ self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', ((1, ('fail', 'url')), (0, 'url')))),
+ ['https://www.example.com/0', 'https://www.example.com/1'],
+ msg='tripple nesting in path should be treated as branches')
+ self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ('fail', (..., 'url')))),
+ ['https://www.example.com/0', 'https://www.example.com/1'],
+ msg='ellipsis as branch path start gets flattened')
+
+ # Test dictionary as key
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}), {0: 100, 1: 1.2},
+ msg='dict key should result in a dict with the same keys')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}),
+ {0: 'https://www.example.com/0'},
+ msg='dict key should allow paths')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}),
+ {0: ['https://www.example.com/0']},
+ msg='tuple in dict path should be treated as branches')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}),
+ {0: ['https://www.example.com/0']},
+ msg='double nesting in dict path should be treated as paths')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}),
+ {0: ['https://www.example.com/1', 'https://www.example.com/0']},
+ msg='tripple nesting in dict path should be treated as branches')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}), {},
+ msg='remove `None` values when top level dict key fails')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}, default=...), {0: ...},
+ msg='use `default` if key fails and `default`')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}), {},
+ msg='remove empty values when dict key')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}, default=...), {0: ...},
+ msg='use `default` when dict key and `default`')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}), {},
+ msg='remove empty values when nested dict key fails')
+ self.assertEqual(traverse_obj(None, {0: 'fail'}), {},
+ msg='default to dict if pruned')
+ self.assertEqual(traverse_obj(None, {0: 'fail'}, default=...), {0: ...},
+ msg='default to dict if pruned and default is given')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=...), {0: {0: ...}},
+ msg='use nested `default` when nested dict key fails and `default`')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: ('dict', ...)}), {},
+ msg='remove key if branch in dict key not successful')
+
+ # Testing default parameter behavior
+ _DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail'), None,
+ msg='default value should be `None`')
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=...), ...,
+ msg='chained fails should result in default')
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', 'int'), 0,
+ msg='should not short cirquit on `None`')
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', default=1), 1,
+ msg='invalid dict key should result in `default`')
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', default=1), 1,
+ msg='`None` is a deliberate sentinel and should become `default`')
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', 10)), None,
+ msg='`IndexError` should result in `default`')
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=1), 1,
+ msg='if branched but not successful return `default` if defined, not `[]`')
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=None), None,
+ msg='if branched but not successful return `default` even if `default` is `None`')
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail')), [],
+ msg='if branched but not successful return `[]`, not `default`')
+ self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', ...)), [],
+ msg='if branched but object is empty return `[]`, not `default`')
+ self.assertEqual(traverse_obj(None, ...), [],
+ msg='if branched but object is `None` return `[]`, not `default`')
+ self.assertEqual(traverse_obj({0: None}, (0, ...)), [],
+ msg='if branched but state is `None` return `[]`, not `default`')
+
+ branching_paths = [
+ ('fail', ...),
+ (..., 'fail'),
+ 100 * ('fail',) + (...,),
+ (...,) + 100 * ('fail',),
+ ]
+ for branching_path in branching_paths:
+ self.assertEqual(traverse_obj({}, branching_path), [],
+ msg='if branched but state is `None`, return `[]` (not `default`)')
+ self.assertEqual(traverse_obj({}, 'fail', branching_path), [],
+ msg='if branching in last alternative and previous did not match, return `[]` (not `default`)')
+ self.assertEqual(traverse_obj({0: 'x'}, 0, branching_path), 'x',
+ msg='if branching in last alternative and previous did match, return single value')
+ self.assertEqual(traverse_obj({0: 'x'}, branching_path, 0), 'x',
+ msg='if branching in first alternative and non-branching path does match, return single value')
+ self.assertEqual(traverse_obj({}, branching_path, 'fail'), None,
+ msg='if branching in first alternative and non-branching path does not match, return `default`')
+
+ # Testing expected_type behavior
+ _EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
+ self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str),
+ 'str', msg='accept matching `expected_type` type')
+ self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int),
+ None, msg='reject non matching `expected_type` type')
+ self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)),
+ '0', msg='transform type using type function')
+ self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0),
+ None, msg='wrap expected_type fuction in try_call')
+ self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, ..., expected_type=str),
+ ['str'], msg='eliminate items that expected_type fails on')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int),
+ {0: 100}, msg='type as expected_type should filter dict values')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none),
+ {0: '100', 1: '1.2'}, msg='function as expected_type should transform dict values')
+ self.assertEqual(traverse_obj(_TEST_DATA, ({0: 1.2}, 0, {int_or_none}), expected_type=int),
+ 1, msg='expected_type should not filter non final dict values')
+ self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int),
+ {0: {0: 100}}, msg='expected_type should transform deep dict values')
+ self.assertEqual(traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(...)),
+ [{0: ...}, {0: ...}], msg='expected_type should transform branched dict values')
+ self.assertEqual(traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int),
+ [4], msg='expected_type regression for type matching in tuple branching')
+ self.assertEqual(traverse_obj(_TEST_DATA, ['data', ...], expected_type=int),
+ [], msg='expected_type regression for type matching in dict result')
+
+ # Test get_all behavior
+ _GET_ALL_DATA = {'key': [0, 1, 2]}
+ self.assertEqual(traverse_obj(_GET_ALL_DATA, ('key', ...), get_all=False), 0,
+ msg='if not `get_all`, return only first matching value')
+ self.assertEqual(traverse_obj(_GET_ALL_DATA, ..., get_all=False), [0, 1, 2],
+ msg='do not overflatten if not `get_all`')
+
+ # Test casesense behavior
+ _CASESENSE_DATA = {
+ 'KeY': 'value0',
+ 0: {
+ 'KeY': 'value1',
+ 0: {'KeY': 'value2'},
+ },
+ }
+ self.assertEqual(traverse_obj(_CASESENSE_DATA, 'key'), None,
+ msg='dict keys should be case sensitive unless `casesense`')
+ self.assertEqual(traverse_obj(_CASESENSE_DATA, 'keY',
+ casesense=False), 'value0',
+ msg='allow non matching key case if `casesense`')
+ self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ('keY',)),
+ casesense=False), ['value1'],
+ msg='allow non matching key case in branch if `casesense`')
+ self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ((0, 'keY'),)),
+ casesense=False), ['value2'],
+ msg='allow non matching key case in branch path if `casesense`')
+
+ # Test traverse_string behavior
+ _TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
+ self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)), None,
+ msg='do not traverse into string if not `traverse_string`')
+ self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0),
+ traverse_string=True), 's',
+ msg='traverse into string if `traverse_string`')
+ self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1),
+ traverse_string=True), '.',
+ msg='traverse into converted data if `traverse_string`')
+ self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', ...),
+ traverse_string=True), 'str',
+ msg='`...` should result in string (same value) if `traverse_string`')
+ self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)),
+ traverse_string=True), 'sr',
+ msg='`slice` should result in string if `traverse_string`')
+ self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == "s"),
+ traverse_string=True), 'str',
+ msg='function should result in string if `traverse_string`')
+ self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)),
+ traverse_string=True), ['s', 'r'],
+ msg='branching should result in list if `traverse_string`')
+ self.assertEqual(traverse_obj({}, (0, ...), traverse_string=True), [],
+ msg='branching should result in list if `traverse_string`')
+ self.assertEqual(traverse_obj({}, (0, lambda x, y: True), traverse_string=True), [],
+ msg='branching should result in list if `traverse_string`')
+ self.assertEqual(traverse_obj({}, (0, slice(1)), traverse_string=True), [],
+ msg='branching should result in list if `traverse_string`')
+
+ # Test re.Match as input obj
+ mobj = re.fullmatch(r'0(12)(?P<group>3)(4)?', '0123')
+ self.assertEqual(traverse_obj(mobj, ...), [x for x in mobj.groups() if x is not None],
+ msg='`...` on a `re.Match` should give its `groups()`')
+ self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 2)), ['0123', '3'],
+ msg='function on a `re.Match` should give groupno, value starting at 0')
+ self.assertEqual(traverse_obj(mobj, 'group'), '3',
+ msg='str key on a `re.Match` should give group with that name')
+ self.assertEqual(traverse_obj(mobj, 2), '3',
+ msg='int key on a `re.Match` should give group with that name')
+ self.assertEqual(traverse_obj(mobj, 'gRoUp', casesense=False), '3',
+ msg='str key on a `re.Match` should respect casesense')
+ self.assertEqual(traverse_obj(mobj, 'fail'), None,
+ msg='failing str key on a `re.Match` should return `default`')
+ self.assertEqual(traverse_obj(mobj, 'gRoUpS', casesense=False), None,
+ msg='failing str key on a `re.Match` should return `default`')
+ self.assertEqual(traverse_obj(mobj, 8), None,
+ msg='failing int key on a `re.Match` should return `default`')
+ self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 'group')), ['0123', '3'],
+ msg='function on a `re.Match` should give group name as well')
+
+ # Test xml.etree.ElementTree.Element as input obj
+ etree = xml.etree.ElementTree.fromstring('''<?xml version="1.0"?>
+ <data>
+ <country name="Liechtenstein">
+ <rank>1</rank>
+ <year>2008</year>
+ <gdppc>141100</gdppc>
+ <neighbor name="Austria" direction="E"/>
+ <neighbor name="Switzerland" direction="W"/>
+ </country>
+ <country name="Singapore">
+ <rank>4</rank>
+ <year>2011</year>
+ <gdppc>59900</gdppc>
+ <neighbor name="Malaysia" direction="N"/>
+ </country>
+ <country name="Panama">
+ <rank>68</rank>
+ <year>2011</year>
+ <gdppc>13600</gdppc>
+ <neighbor name="Costa Rica" direction="W"/>
+ <neighbor name="Colombia" direction="E"/>
+ </country>
+ </data>''')
+ self.assertEqual(traverse_obj(etree, ''), etree,
+ msg='empty str key should return the element itself')
+ self.assertEqual(traverse_obj(etree, 'country'), list(etree),
+ msg='str key should lead all children with that tag name')
+ self.assertEqual(traverse_obj(etree, ...), list(etree),
+ msg='`...` as key should return all children')
+ self.assertEqual(traverse_obj(etree, lambda _, x: x[0].text == '4'), [etree[1]],
+ msg='function as key should get element as value')
+ self.assertEqual(traverse_obj(etree, lambda i, _: i == 1), [etree[1]],
+ msg='function as key should get index as key')
+ self.assertEqual(traverse_obj(etree, 0), etree[0],
+ msg='int key should return the nth child')
+ self.assertEqual(traverse_obj(etree, './/neighbor/@name'),
+ ['Austria', 'Switzerland', 'Malaysia', 'Costa Rica', 'Colombia'],
+ msg='`@<attribute>` at end of path should give that attribute')
+ self.assertEqual(traverse_obj(etree, '//neighbor/@fail'), [None, None, None, None, None],
+ msg='`@<nonexistant>` at end of path should give `None`')
+ self.assertEqual(traverse_obj(etree, ('//neighbor/@', 2)), {'name': 'Malaysia', 'direction': 'N'},
+ msg='`@` should give the full attribute dict')
+ self.assertEqual(traverse_obj(etree, '//year/text()'), ['2008', '2011', '2011'],
+ msg='`text()` at end of path should give the inner text')
+ self.assertEqual(traverse_obj(etree, '//*[@direction]/@direction'), ['E', 'W', 'N', 'W', 'E'],
+ msg='full Python xpath features should be supported')
+ self.assertEqual(traverse_obj(etree, (0, '@name')), 'Liechtenstein',
+ msg='special transformations should act on current element')
+ self.assertEqual(traverse_obj(etree, ('country', 0, ..., 'text()', {int_or_none})), [1, 2008, 141100],
+ msg='special transformations should act on current element')
+
+ def test_http_header_dict(self):
+ headers = HTTPHeaderDict()
+ headers['ytdl-test'] = b'0'
+ self.assertEqual(list(headers.items()), [('Ytdl-Test', '0')])
+ headers['ytdl-test'] = 1
+ self.assertEqual(list(headers.items()), [('Ytdl-Test', '1')])
+ headers['Ytdl-test'] = '2'
+ self.assertEqual(list(headers.items()), [('Ytdl-Test', '2')])
+ self.assertTrue('ytDl-Test' in headers)
+ self.assertEqual(str(headers), str(dict(headers)))
+ self.assertEqual(repr(headers), str(dict(headers)))
+
+ headers.update({'X-dlp': 'data'})
+ self.assertEqual(set(headers.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data')})
+ self.assertEqual(dict(headers), {'Ytdl-Test': '2', 'X-Dlp': 'data'})
+ self.assertEqual(len(headers), 2)
+ self.assertEqual(headers.copy(), headers)
+ headers2 = HTTPHeaderDict({'X-dlp': 'data3'}, **headers, **{'X-dlp': 'data2'})
+ self.assertEqual(set(headers2.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data2')})
+ self.assertEqual(len(headers2), 2)
+ headers2.clear()
+ self.assertEqual(len(headers2), 0)
+
+ # ensure we prefer latter headers
+ headers3 = HTTPHeaderDict({'Ytdl-TeSt': 1}, {'Ytdl-test': 2})
+ self.assertEqual(set(headers3.items()), {('Ytdl-Test', '2')})
+ del headers3['ytdl-tesT']
+ self.assertEqual(dict(headers3), {})
+
+ headers4 = HTTPHeaderDict({'ytdl-test': 'data;'})
+ self.assertEqual(set(headers4.items()), {('Ytdl-Test', 'data;')})
+
+ # common mistake: strip whitespace from values
+ # https://github.com/yt-dlp/yt-dlp/issues/8729
+ headers5 = HTTPHeaderDict({'ytdl-test': ' data; '})
+ self.assertEqual(set(headers5.items()), {('Ytdl-Test', 'data;')})
+
+ def test_extract_basic_auth(self):
+ assert extract_basic_auth('http://:foo.bar') == ('http://:foo.bar', None)
+ assert extract_basic_auth('http://foo.bar') == ('http://foo.bar', None)
+ assert extract_basic_auth('http://@foo.bar') == ('http://foo.bar', 'Basic Og==')
+ assert extract_basic_auth('http://:pass@foo.bar') == ('http://foo.bar', 'Basic OnBhc3M=')
+ assert extract_basic_auth('http://user:@foo.bar') == ('http://foo.bar', 'Basic dXNlcjo=')
+ assert extract_basic_auth('http://user:pass@foo.bar') == ('http://foo.bar', 'Basic dXNlcjpwYXNz')
+
+ @unittest.skipUnless(compat_os_name == 'nt', 'Only relevant on Windows')
+ def test_Popen_windows_escaping(self):
+ def run_shell(args):
+ stdout, stderr, error = Popen.run(
+ args, text=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ assert not stderr
+ assert not error
+ return stdout
+
+ # Test escaping
+ assert run_shell(['echo', 'test"&']) == '"test""&"\n'
+ # Test if delayed expansion is disabled
+ assert run_shell(['echo', '^!']) == '"^!"\n'
+ assert run_shell('echo "^!"') == '"^!"\n'
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_verbose_output.py b/test/test_verbose_output.py
new file mode 100644
index 0000000..21ce10a
--- /dev/null
+++ b/test/test_verbose_output.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import subprocess
+
+rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+
+class TestVerboseOutput(unittest.TestCase):
+ def test_private_info_arg(self):
+ outp = subprocess.Popen(
+ [
+ sys.executable, 'yt_dlp/__main__.py',
+ '-v', '--ignore-config',
+ '--username', 'johnsmith@gmail.com',
+ '--password', 'my_secret_password',
+ ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ sout, serr = outp.communicate()
+ self.assertTrue(b'--username' in serr)
+ self.assertTrue(b'johnsmith' not in serr)
+ self.assertTrue(b'--password' in serr)
+ self.assertTrue(b'my_secret_password' not in serr)
+
+ def test_private_info_shortarg(self):
+ outp = subprocess.Popen(
+ [
+ sys.executable, 'yt_dlp/__main__.py',
+ '-v', '--ignore-config',
+ '-u', 'johnsmith@gmail.com',
+ '-p', 'my_secret_password',
+ ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ sout, serr = outp.communicate()
+ self.assertTrue(b'-u' in serr)
+ self.assertTrue(b'johnsmith' not in serr)
+ self.assertTrue(b'-p' in serr)
+ self.assertTrue(b'my_secret_password' not in serr)
+
+ def test_private_info_eq(self):
+ outp = subprocess.Popen(
+ [
+ sys.executable, 'yt_dlp/__main__.py',
+ '-v', '--ignore-config',
+ '--username=johnsmith@gmail.com',
+ '--password=my_secret_password',
+ ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ sout, serr = outp.communicate()
+ self.assertTrue(b'--username' in serr)
+ self.assertTrue(b'johnsmith' not in serr)
+ self.assertTrue(b'--password' in serr)
+ self.assertTrue(b'my_secret_password' not in serr)
+
+ def test_private_info_shortarg_eq(self):
+ outp = subprocess.Popen(
+ [
+ sys.executable, 'yt_dlp/__main__.py',
+ '-v', '--ignore-config',
+ '-u=johnsmith@gmail.com',
+ '-p=my_secret_password',
+ ], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ sout, serr = outp.communicate()
+ self.assertTrue(b'-u' in serr)
+ self.assertTrue(b'johnsmith' not in serr)
+ self.assertTrue(b'-p' in serr)
+ self.assertTrue(b'my_secret_password' not in serr)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_websockets.py b/test/test_websockets.py
new file mode 100644
index 0000000..13b3a1e
--- /dev/null
+++ b/test/test_websockets.py
@@ -0,0 +1,383 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+
+import pytest
+
+from test.helper import verify_address_availability
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+import http.client
+import http.cookiejar
+import http.server
+import json
+import random
+import ssl
+import threading
+
+from yt_dlp import socks
+from yt_dlp.cookies import YoutubeDLCookieJar
+from yt_dlp.dependencies import websockets
+from yt_dlp.networking import Request
+from yt_dlp.networking.exceptions import (
+ CertificateVerifyError,
+ HTTPError,
+ ProxyError,
+ RequestError,
+ SSLError,
+ TransportError,
+)
+from yt_dlp.utils.networking import HTTPHeaderDict
+
+from test.conftest import validate_and_send
+
+TEST_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+def websocket_handler(websocket):
+ for message in websocket:
+ if isinstance(message, bytes):
+ if message == b'bytes':
+ return websocket.send('2')
+ elif isinstance(message, str):
+ if message == 'headers':
+ return websocket.send(json.dumps(dict(websocket.request.headers)))
+ elif message == 'path':
+ return websocket.send(websocket.request.path)
+ elif message == 'source_address':
+ return websocket.send(websocket.remote_address[0])
+ elif message == 'str':
+ return websocket.send('1')
+ return websocket.send(message)
+
+
+def process_request(self, request):
+ if request.path.startswith('/gen_'):
+ status = http.HTTPStatus(int(request.path[5:]))
+ if 300 <= status.value <= 300:
+ return websockets.http11.Response(
+ status.value, status.phrase, websockets.datastructures.Headers([('Location', '/')]), b'')
+ return self.protocol.reject(status.value, status.phrase)
+ return self.protocol.accept(request)
+
+
+def create_websocket_server(**ws_kwargs):
+ import websockets.sync.server
+ wsd = websockets.sync.server.serve(websocket_handler, '127.0.0.1', 0, process_request=process_request, **ws_kwargs)
+ ws_port = wsd.socket.getsockname()[1]
+ ws_server_thread = threading.Thread(target=wsd.serve_forever)
+ ws_server_thread.daemon = True
+ ws_server_thread.start()
+ return ws_server_thread, ws_port
+
+
+def create_ws_websocket_server():
+ return create_websocket_server()
+
+
+def create_wss_websocket_server():
+ certfn = os.path.join(TEST_DIR, 'testcert.pem')
+ sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+ sslctx.load_cert_chain(certfn, None)
+ return create_websocket_server(ssl_context=sslctx)
+
+
+MTLS_CERT_DIR = os.path.join(TEST_DIR, 'testdata', 'certificate')
+
+
+def create_mtls_wss_websocket_server():
+ certfn = os.path.join(TEST_DIR, 'testcert.pem')
+ cacertfn = os.path.join(MTLS_CERT_DIR, 'ca.crt')
+
+ sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+ sslctx.verify_mode = ssl.CERT_REQUIRED
+ sslctx.load_verify_locations(cafile=cacertfn)
+ sslctx.load_cert_chain(certfn, None)
+
+ return create_websocket_server(ssl_context=sslctx)
+
+
+@pytest.mark.skipif(not websockets, reason='websockets must be installed to test websocket request handlers')
+class TestWebsSocketRequestHandlerConformance:
+ @classmethod
+ def setup_class(cls):
+ cls.ws_thread, cls.ws_port = create_ws_websocket_server()
+ cls.ws_base_url = f'ws://127.0.0.1:{cls.ws_port}'
+
+ cls.wss_thread, cls.wss_port = create_wss_websocket_server()
+ cls.wss_base_url = f'wss://127.0.0.1:{cls.wss_port}'
+
+ cls.bad_wss_thread, cls.bad_wss_port = create_websocket_server(ssl_context=ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER))
+ cls.bad_wss_host = f'wss://127.0.0.1:{cls.bad_wss_port}'
+
+ cls.mtls_wss_thread, cls.mtls_wss_port = create_mtls_wss_websocket_server()
+ cls.mtls_wss_base_url = f'wss://127.0.0.1:{cls.mtls_wss_port}'
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_basic_websockets(self, handler):
+ with handler() as rh:
+ ws = validate_and_send(rh, Request(self.ws_base_url))
+ assert 'upgrade' in ws.headers
+ assert ws.status == 101
+ ws.send('foo')
+ assert ws.recv() == 'foo'
+ ws.close()
+
+ # https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
+ @pytest.mark.parametrize('msg,opcode', [('str', 1), (b'bytes', 2)])
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_send_types(self, handler, msg, opcode):
+ with handler() as rh:
+ ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws.send(msg)
+ assert int(ws.recv()) == opcode
+ ws.close()
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_verify_cert(self, handler):
+ with handler() as rh:
+ with pytest.raises(CertificateVerifyError):
+ validate_and_send(rh, Request(self.wss_base_url))
+
+ with handler(verify=False) as rh:
+ ws = validate_and_send(rh, Request(self.wss_base_url))
+ assert ws.status == 101
+ ws.close()
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_ssl_error(self, handler):
+ with handler(verify=False) as rh:
+ with pytest.raises(SSLError, match=r'ssl(?:v3|/tls) alert handshake failure') as exc_info:
+ validate_and_send(rh, Request(self.bad_wss_host))
+ assert not issubclass(exc_info.type, CertificateVerifyError)
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ @pytest.mark.parametrize('path,expected', [
+ # Unicode characters should be encoded with uppercase percent-encoding
+ ('/中文', '/%E4%B8%AD%E6%96%87'),
+ # don't normalize existing percent encodings
+ ('/%c7%9f', '/%c7%9f'),
+ ])
+ def test_percent_encode(self, handler, path, expected):
+ with handler() as rh:
+ ws = validate_and_send(rh, Request(f'{self.ws_base_url}{path}'))
+ ws.send('path')
+ assert ws.recv() == expected
+ assert ws.status == 101
+ ws.close()
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_remove_dot_segments(self, handler):
+ with handler() as rh:
+ # This isn't a comprehensive test,
+ # but it should be enough to check whether the handler is removing dot segments
+ ws = validate_and_send(rh, Request(f'{self.ws_base_url}/a/b/./../../test'))
+ assert ws.status == 101
+ ws.send('path')
+ assert ws.recv() == '/test'
+ ws.close()
+
+ # We are restricted to known HTTP status codes in http.HTTPStatus
+ # Redirects are not supported for websockets
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ @pytest.mark.parametrize('status', (200, 204, 301, 302, 303, 400, 500, 511))
+ def test_raise_http_error(self, handler, status):
+ with handler() as rh:
+ with pytest.raises(HTTPError) as exc_info:
+ validate_and_send(rh, Request(f'{self.ws_base_url}/gen_{status}'))
+ assert exc_info.value.status == status
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ @pytest.mark.parametrize('params,extensions', [
+ ({'timeout': sys.float_info.min}, {}),
+ ({}, {'timeout': sys.float_info.min}),
+ ])
+ def test_timeout(self, handler, params, extensions):
+ with handler(**params) as rh:
+ with pytest.raises(TransportError):
+ validate_and_send(rh, Request(self.ws_base_url, extensions=extensions))
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_cookies(self, handler):
+ cookiejar = YoutubeDLCookieJar()
+ cookiejar.set_cookie(http.cookiejar.Cookie(
+ version=0, name='test', value='ytdlp', port=None, port_specified=False,
+ domain='127.0.0.1', domain_specified=True, domain_initial_dot=False, path='/',
+ path_specified=True, secure=False, expires=None, discard=False, comment=None,
+ comment_url=None, rest={}))
+
+ with handler(cookiejar=cookiejar) as rh:
+ ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws.send('headers')
+ assert json.loads(ws.recv())['cookie'] == 'test=ytdlp'
+ ws.close()
+
+ with handler() as rh:
+ ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws.send('headers')
+ assert 'cookie' not in json.loads(ws.recv())
+ ws.close()
+
+ ws = validate_and_send(rh, Request(self.ws_base_url, extensions={'cookiejar': cookiejar}))
+ ws.send('headers')
+ assert json.loads(ws.recv())['cookie'] == 'test=ytdlp'
+ ws.close()
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_source_address(self, handler):
+ source_address = f'127.0.0.{random.randint(5, 255)}'
+ verify_address_availability(source_address)
+ with handler(source_address=source_address) as rh:
+ ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws.send('source_address')
+ assert source_address == ws.recv()
+ ws.close()
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_response_url(self, handler):
+ with handler() as rh:
+ url = f'{self.ws_base_url}/something'
+ ws = validate_and_send(rh, Request(url))
+ assert ws.url == url
+ ws.close()
+
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_request_headers(self, handler):
+ with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
+ # Global Headers
+ ws = validate_and_send(rh, Request(self.ws_base_url))
+ ws.send('headers')
+ headers = HTTPHeaderDict(json.loads(ws.recv()))
+ assert headers['test1'] == 'test'
+ ws.close()
+
+ # Per request headers, merged with global
+ ws = validate_and_send(rh, Request(
+ self.ws_base_url, headers={'test2': 'changed', 'test3': 'test3'}))
+ ws.send('headers')
+ headers = HTTPHeaderDict(json.loads(ws.recv()))
+ assert headers['test1'] == 'test'
+ assert headers['test2'] == 'changed'
+ assert headers['test3'] == 'test3'
+ ws.close()
+
+ @pytest.mark.parametrize('client_cert', (
+ {'client_certificate': os.path.join(MTLS_CERT_DIR, 'clientwithkey.crt')},
+ {
+ 'client_certificate': os.path.join(MTLS_CERT_DIR, 'client.crt'),
+ 'client_certificate_key': os.path.join(MTLS_CERT_DIR, 'client.key'),
+ },
+ {
+ 'client_certificate': os.path.join(MTLS_CERT_DIR, 'clientwithencryptedkey.crt'),
+ 'client_certificate_password': 'foobar',
+ },
+ {
+ 'client_certificate': os.path.join(MTLS_CERT_DIR, 'client.crt'),
+ 'client_certificate_key': os.path.join(MTLS_CERT_DIR, 'clientencrypted.key'),
+ 'client_certificate_password': 'foobar',
+ }
+ ))
+ @pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+ def test_mtls(self, handler, client_cert):
+ with handler(
+ # Disable client-side validation of unacceptable self-signed testcert.pem
+ # The test is of a check on the server side, so unaffected
+ verify=False,
+ client_cert=client_cert
+ ) as rh:
+ validate_and_send(rh, Request(self.mtls_wss_base_url)).close()
+
+
+def create_fake_ws_connection(raised):
+ import websockets.sync.client
+
+ class FakeWsConnection(websockets.sync.client.ClientConnection):
+ def __init__(self, *args, **kwargs):
+ class FakeResponse:
+ body = b''
+ headers = {}
+ status_code = 101
+ reason_phrase = 'test'
+
+ self.response = FakeResponse()
+
+ def send(self, *args, **kwargs):
+ raise raised()
+
+ def recv(self, *args, **kwargs):
+ raise raised()
+
+ def close(self, *args, **kwargs):
+ return
+
+ return FakeWsConnection()
+
+
+@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
+class TestWebsocketsRequestHandler:
+ @pytest.mark.parametrize('raised,expected', [
+ # https://websockets.readthedocs.io/en/stable/reference/exceptions.html
+ (lambda: websockets.exceptions.InvalidURI(msg='test', uri='test://'), RequestError),
+ # Requires a response object. Should be covered by HTTP error tests.
+ # (lambda: websockets.exceptions.InvalidStatus(), TransportError),
+ (lambda: websockets.exceptions.InvalidHandshake(), TransportError),
+ # These are subclasses of InvalidHandshake
+ (lambda: websockets.exceptions.InvalidHeader(name='test'), TransportError),
+ (lambda: websockets.exceptions.NegotiationError(), TransportError),
+ # Catch-all
+ (lambda: websockets.exceptions.WebSocketException(), TransportError),
+ (lambda: TimeoutError(), TransportError),
+ # These may be raised by our create_connection implementation, which should also be caught
+ (lambda: OSError(), TransportError),
+ (lambda: ssl.SSLError(), SSLError),
+ (lambda: ssl.SSLCertVerificationError(), CertificateVerifyError),
+ (lambda: socks.ProxyError(), ProxyError),
+ ])
+ def test_request_error_mapping(self, handler, monkeypatch, raised, expected):
+ import websockets.sync.client
+
+ import yt_dlp.networking._websockets
+ with handler() as rh:
+ def fake_connect(*args, **kwargs):
+ raise raised()
+ monkeypatch.setattr(yt_dlp.networking._websockets, 'create_connection', lambda *args, **kwargs: None)
+ monkeypatch.setattr(websockets.sync.client, 'connect', fake_connect)
+ with pytest.raises(expected) as exc_info:
+ rh.send(Request('ws://fake-url'))
+ assert exc_info.type is expected
+
+ @pytest.mark.parametrize('raised,expected,match', [
+ # https://websockets.readthedocs.io/en/stable/reference/sync/client.html#websockets.sync.client.ClientConnection.send
+ (lambda: websockets.exceptions.ConnectionClosed(None, None), TransportError, None),
+ (lambda: RuntimeError(), TransportError, None),
+ (lambda: TimeoutError(), TransportError, None),
+ (lambda: TypeError(), RequestError, None),
+ (lambda: socks.ProxyError(), ProxyError, None),
+ # Catch-all
+ (lambda: websockets.exceptions.WebSocketException(), TransportError, None),
+ ])
+ def test_ws_send_error_mapping(self, handler, monkeypatch, raised, expected, match):
+ from yt_dlp.networking._websockets import WebsocketsResponseAdapter
+ ws = WebsocketsResponseAdapter(create_fake_ws_connection(raised), url='ws://fake-url')
+ with pytest.raises(expected, match=match) as exc_info:
+ ws.send('test')
+ assert exc_info.type is expected
+
+ @pytest.mark.parametrize('raised,expected,match', [
+ # https://websockets.readthedocs.io/en/stable/reference/sync/client.html#websockets.sync.client.ClientConnection.recv
+ (lambda: websockets.exceptions.ConnectionClosed(None, None), TransportError, None),
+ (lambda: RuntimeError(), TransportError, None),
+ (lambda: TimeoutError(), TransportError, None),
+ (lambda: socks.ProxyError(), ProxyError, None),
+ # Catch-all
+ (lambda: websockets.exceptions.WebSocketException(), TransportError, None),
+ ])
+ def test_ws_recv_error_mapping(self, handler, monkeypatch, raised, expected, match):
+ from yt_dlp.networking._websockets import WebsocketsResponseAdapter
+ ws = WebsocketsResponseAdapter(create_fake_ws_connection(raised), url='ws://fake-url')
+ with pytest.raises(expected, match=match) as exc_info:
+ ws.recv()
+ assert exc_info.type is expected
diff --git a/test/test_write_annotations.py.disabled b/test/test_write_annotations.py.disabled
new file mode 100644
index 0000000..c7cf199
--- /dev/null
+++ b/test/test_write_annotations.py.disabled
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import xml.etree.ElementTree
+
+import yt_dlp.extractor
+import yt_dlp.YoutubeDL
+from test.helper import get_params, is_download_test, try_rm
+
+
+class YoutubeDL(yt_dlp.YoutubeDL):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.to_stderr = self.to_screen
+
+
+params = get_params({
+ 'writeannotations': True,
+ 'skip_download': True,
+ 'writeinfojson': False,
+ 'format': 'flv',
+})
+
+
+TEST_ID = 'gr51aVj-mLg'
+ANNOTATIONS_FILE = TEST_ID + '.annotations.xml'
+EXPECTED_ANNOTATIONS = ['Speech bubble', 'Note', 'Title', 'Spotlight', 'Label']
+
+
+@is_download_test
+class TestAnnotations(unittest.TestCase):
+ def setUp(self):
+ # Clear old files
+ self.tearDown()
+
+ def test_info_json(self):
+ expected = list(EXPECTED_ANNOTATIONS) # Two annotations could have the same text.
+ ie = yt_dlp.extractor.YoutubeIE()
+ ydl = YoutubeDL(params)
+ ydl.add_info_extractor(ie)
+ ydl.download([TEST_ID])
+ self.assertTrue(os.path.exists(ANNOTATIONS_FILE))
+ annoxml = None
+ with open(ANNOTATIONS_FILE, encoding='utf-8') as annof:
+ annoxml = xml.etree.ElementTree.parse(annof)
+ self.assertTrue(annoxml is not None, 'Failed to parse annotations XML')
+ root = annoxml.getroot()
+ self.assertEqual(root.tag, 'document')
+ annotationsTag = root.find('annotations')
+ self.assertEqual(annotationsTag.tag, 'annotations')
+ annotations = annotationsTag.findall('annotation')
+
+ # Not all the annotations have TEXT children and the annotations are returned unsorted.
+ for a in annotations:
+ self.assertEqual(a.tag, 'annotation')
+ if a.get('type') == 'text':
+ textTag = a.find('TEXT')
+ text = textTag.text
+ self.assertTrue(text in expected) # assertIn only added in python 2.7
+ # remove the first occurrence, there could be more than one annotation with the same text
+ expected.remove(text)
+ # We should have seen (and removed) all the expected annotation texts.
+ self.assertEqual(len(expected), 0, 'Not all expected annotations were found.')
+
+ def tearDown(self):
+ try_rm(ANNOTATIONS_FILE)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_youtube_lists.py b/test/test_youtube_lists.py
new file mode 100644
index 0000000..b3f323e
--- /dev/null
+++ b/test/test_youtube_lists.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+from test.helper import FakeYDL, is_download_test
+from yt_dlp.extractor import YoutubeIE, YoutubeTabIE
+from yt_dlp.utils import ExtractorError
+
+
+@is_download_test
+class TestYoutubeLists(unittest.TestCase):
+ def assertIsPlaylist(self, info):
+ """Make sure the info has '_type' set to 'playlist'"""
+ self.assertEqual(info['_type'], 'playlist')
+
+ def test_youtube_playlist_noplaylist(self):
+ dl = FakeYDL()
+ dl.params['noplaylist'] = True
+ ie = YoutubeTabIE(dl)
+ result = ie.extract('https://www.youtube.com/watch?v=OmJ-4B-mS-Y&list=PLydZ2Hrp_gPRJViZjLFKaBMgCQOYEEkyp&index=2')
+ self.assertEqual(result['_type'], 'url')
+ self.assertEqual(result['ie_key'], YoutubeIE.ie_key())
+ self.assertEqual(YoutubeIE.extract_id(result['url']), 'OmJ-4B-mS-Y')
+
+ def test_youtube_mix(self):
+ dl = FakeYDL()
+ ie = YoutubeTabIE(dl)
+ result = ie.extract('https://www.youtube.com/watch?v=tyITL_exICo&list=RDCLAK5uy_kLWIr9gv1XLlPbaDS965-Db4TrBoUTxQ8')
+ entries = list(result['entries'])
+ self.assertTrue(len(entries) >= 50)
+ original_video = entries[0]
+ self.assertEqual(original_video['id'], 'tyITL_exICo')
+
+ def test_youtube_flat_playlist_extraction(self):
+ dl = FakeYDL()
+ dl.params['extract_flat'] = True
+ ie = YoutubeTabIE(dl)
+ result = ie.extract('https://www.youtube.com/playlist?list=PL4lCao7KL_QFVb7Iudeipvc2BCavECqzc')
+ self.assertIsPlaylist(result)
+ entries = list(result['entries'])
+ self.assertTrue(len(entries) == 1)
+ video = entries[0]
+ self.assertEqual(video['_type'], 'url')
+ self.assertEqual(video['ie_key'], 'Youtube')
+ self.assertEqual(video['id'], 'BaW_jenozKc')
+ self.assertEqual(video['url'], 'https://www.youtube.com/watch?v=BaW_jenozKc')
+ self.assertEqual(video['title'], 'youtube-dl test video "\'/\\ä↭𝕐')
+ self.assertEqual(video['duration'], 10)
+ self.assertEqual(video['uploader'], 'Philipp Hagemeister')
+
+ def test_youtube_channel_no_uploads(self):
+ dl = FakeYDL()
+ dl.params['extract_flat'] = True
+ ie = YoutubeTabIE(dl)
+ # no uploads
+ with self.assertRaisesRegex(ExtractorError, r'no uploads'):
+ ie.extract('https://www.youtube.com/channel/UC2yXPzFejc422buOIzn_0CA')
+
+ # no uploads and no UCID given
+ with self.assertRaisesRegex(ExtractorError, r'no uploads'):
+ ie.extract('https://www.youtube.com/news')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_youtube_misc.py b/test/test_youtube_misc.py
new file mode 100644
index 0000000..81be5d3
--- /dev/null
+++ b/test/test_youtube_misc.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+from yt_dlp.extractor import YoutubeIE
+
+
+class TestYoutubeMisc(unittest.TestCase):
+ def test_youtube_extract(self):
+ assertExtractId = lambda url, id: self.assertEqual(YoutubeIE.extract_id(url), id)
+ assertExtractId('http://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc')
+ assertExtractId('https://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc')
+ assertExtractId('https://www.youtube.com/watch?feature=player_embedded&v=BaW_jenozKc', 'BaW_jenozKc')
+ assertExtractId('https://www.youtube.com/watch_popup?v=BaW_jenozKc', 'BaW_jenozKc')
+ assertExtractId('http://www.youtube.com/watch?v=BaW_jenozKcsharePLED17F32AD9753930', 'BaW_jenozKc')
+ assertExtractId('BaW_jenozKc', 'BaW_jenozKc')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_youtube_signature.py b/test/test_youtube_signature.py
new file mode 100644
index 0000000..c559284
--- /dev/null
+++ b/test/test_youtube_signature.py
@@ -0,0 +1,253 @@
+#!/usr/bin/env python3
+
+# Allow direct execution
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+
+import contextlib
+import re
+import string
+import urllib.request
+
+from test.helper import FakeYDL, is_download_test
+from yt_dlp.extractor import YoutubeIE
+from yt_dlp.jsinterp import JSInterpreter
+
+_SIG_TESTS = [
+ (
+ 'https://s.ytimg.com/yts/jsbin/html5player-vflHOr_nV.js',
+ 86,
+ '>=<;:/.-[+*)(\'&%$#"!ZYX0VUTSRQPONMLKJIHGFEDCBA\\yxwvutsrqponmlkjihgfedcba987654321',
+ ),
+ (
+ 'https://s.ytimg.com/yts/jsbin/html5player-vfldJ8xgI.js',
+ 85,
+ '3456789a0cdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRS[UVWXYZ!"#$%&\'()*+,-./:;<=>?@',
+ ),
+ (
+ 'https://s.ytimg.com/yts/jsbin/html5player-vfle-mVwz.js',
+ 90,
+ ']\\[@?>=<;:/.-,+*)(\'&%$#"hZYXWVUTSRQPONMLKJIHGFEDCBAzyxwvutsrqponmlkjiagfedcb39876',
+ ),
+ (
+ 'https://s.ytimg.com/yts/jsbin/html5player-en_US-vfl0Cbn9e.js',
+ 84,
+ 'O1I3456789abcde0ghijklmnopqrstuvwxyzABCDEFGHfJKLMN2PQRSTUVW@YZ!"#$%&\'()*+,-./:;<=',
+ ),
+ (
+ 'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflXGBaUN.js',
+ '2ACFC7A61CA478CD21425E5A57EBD73DDC78E22A.2094302436B2D377D14A3BBA23022D023B8BC25AA',
+ 'A52CB8B320D22032ABB3A41D773D2B6342034902.A22E87CDD37DBE75A5E52412DC874AC16A7CFCA2',
+ ),
+ (
+ 'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflBb0OQx.js',
+ 84,
+ '123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQ0STUVWXYZ!"#$%&\'()*+,@./:;<=>'
+ ),
+ (
+ 'https://s.ytimg.com/yts/jsbin/html5player-en_US-vfl9FYC6l.js',
+ 83,
+ '123456789abcdefghijklmnopqr0tuvwxyzABCDETGHIJKLMNOPQRS>UVWXYZ!"#$%&\'()*+,-./:;<=F'
+ ),
+ (
+ 'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflCGk6yw/html5player.js',
+ '4646B5181C6C3020DF1D9C7FCFEA.AD80ABF70C39BD369CCCAE780AFBB98FA6B6CB42766249D9488C288',
+ '82C8849D94266724DC6B6AF89BBFA087EACCD963.B93C07FBA084ACAEFCF7C9D1FD0203C6C1815B6B'
+ ),
+ (
+ 'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflKjOTVq/html5player.js',
+ '312AA52209E3623129A412D56A40F11CB0AF14AE.3EE09501CB14E3BCDC3B2AE808BF3F1D14E7FBF12',
+ '112AA5220913623229A412D56A40F11CB0AF14AE.3EE0950FCB14EEBCDC3B2AE808BF331D14E7FBF3',
+ ),
+ (
+ 'https://www.youtube.com/s/player/6ed0d907/player_ias.vflset/en_US/base.js',
+ '2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
+ 'AOq0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xx8j7v1pDL2QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0',
+ ),
+]
+
+_NSIG_TESTS = [
+ (
+ 'https://www.youtube.com/s/player/7862ca1f/player_ias.vflset/en_US/base.js',
+ 'X_LCxVDjAavgE5t', 'yxJ1dM6iz5ogUg',
+ ),
+ (
+ 'https://www.youtube.com/s/player/9216d1f7/player_ias.vflset/en_US/base.js',
+ 'SLp9F5bwjAdhE9F-', 'gWnb9IK2DJ8Q1w',
+ ),
+ (
+ 'https://www.youtube.com/s/player/f8cb7a3b/player_ias.vflset/en_US/base.js',
+ 'oBo2h5euWy6osrUt', 'ivXHpm7qJjJN',
+ ),
+ (
+ 'https://www.youtube.com/s/player/2dfe380c/player_ias.vflset/en_US/base.js',
+ 'oBo2h5euWy6osrUt', '3DIBbn3qdQ',
+ ),
+ (
+ 'https://www.youtube.com/s/player/f1ca6900/player_ias.vflset/en_US/base.js',
+ 'cu3wyu6LQn2hse', 'jvxetvmlI9AN9Q',
+ ),
+ (
+ 'https://www.youtube.com/s/player/8040e515/player_ias.vflset/en_US/base.js',
+ 'wvOFaY-yjgDuIEg5', 'HkfBFDHmgw4rsw',
+ ),
+ (
+ 'https://www.youtube.com/s/player/e06dea74/player_ias.vflset/en_US/base.js',
+ 'AiuodmaDDYw8d3y4bf', 'ankd8eza2T6Qmw',
+ ),
+ (
+ 'https://www.youtube.com/s/player/5dd88d1d/player-plasma-ias-phone-en_US.vflset/base.js',
+ 'kSxKFLeqzv_ZyHSAt', 'n8gS8oRlHOxPFA',
+ ),
+ (
+ 'https://www.youtube.com/s/player/324f67b9/player_ias.vflset/en_US/base.js',
+ 'xdftNy7dh9QGnhW', '22qLGxrmX8F1rA',
+ ),
+ (
+ 'https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js',
+ 'TDCstCG66tEAO5pR9o', 'dbxNtZ14c-yWyw',
+ ),
+ (
+ 'https://www.youtube.com/s/player/c81bbb4a/player_ias.vflset/en_US/base.js',
+ 'gre3EcLurNY2vqp94', 'Z9DfGxWP115WTg',
+ ),
+ (
+ 'https://www.youtube.com/s/player/1f7d5369/player_ias.vflset/en_US/base.js',
+ 'batNX7sYqIJdkJ', 'IhOkL_zxbkOZBw',
+ ),
+ (
+ 'https://www.youtube.com/s/player/009f1d77/player_ias.vflset/en_US/base.js',
+ '5dwFHw8aFWQUQtffRq', 'audescmLUzI3jw',
+ ),
+ (
+ 'https://www.youtube.com/s/player/dc0c6770/player_ias.vflset/en_US/base.js',
+ '5EHDMgYLV6HPGk_Mu-kk', 'n9lUJLHbxUI0GQ',
+ ),
+ (
+ 'https://www.youtube.com/s/player/113ca41c/player_ias.vflset/en_US/base.js',
+ 'cgYl-tlYkhjT7A', 'hI7BBr2zUgcmMg',
+ ),
+ (
+ 'https://www.youtube.com/s/player/c57c113c/player_ias.vflset/en_US/base.js',
+ 'M92UUMHa8PdvPd3wyM', '3hPqLJsiNZx7yA',
+ ),
+ (
+ 'https://www.youtube.com/s/player/5a3b6271/player_ias.vflset/en_US/base.js',
+ 'B2j7f_UPT4rfje85Lu_e', 'm5DmNymaGQ5RdQ',
+ ),
+ (
+ 'https://www.youtube.com/s/player/7a062b77/player_ias.vflset/en_US/base.js',
+ 'NRcE3y3mVtm_cV-W', 'VbsCYUATvqlt5w',
+ ),
+ (
+ 'https://www.youtube.com/s/player/dac945fd/player_ias.vflset/en_US/base.js',
+ 'o8BkRxXhuYsBCWi6RplPdP', '3Lx32v_hmzTm6A',
+ ),
+ (
+ 'https://www.youtube.com/s/player/6f20102c/player_ias.vflset/en_US/base.js',
+ 'lE8DhoDmKqnmJJ', 'pJTTX6XyJP2BYw',
+ ),
+ (
+ 'https://www.youtube.com/s/player/cfa9e7cb/player_ias.vflset/en_US/base.js',
+ 'aCi3iElgd2kq0bxVbQ', 'QX1y8jGb2IbZ0w',
+ ),
+ (
+ 'https://www.youtube.com/s/player/8c7583ff/player_ias.vflset/en_US/base.js',
+ '1wWCVpRR96eAmMI87L', 'KSkWAVv1ZQxC3A',
+ ),
+ (
+ 'https://www.youtube.com/s/player/b7910ca8/player_ias.vflset/en_US/base.js',
+ '_hXMCwMt9qE310D', 'LoZMgkkofRMCZQ',
+ ),
+]
+
+
+@is_download_test
+class TestPlayerInfo(unittest.TestCase):
+ def test_youtube_extract_player_info(self):
+ PLAYER_URLS = (
+ ('https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js', '4c3f79c5'),
+ ('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/en_US/base.js', '64dddad9'),
+ ('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/fr_FR/base.js', '64dddad9'),
+ ('https://www.youtube.com/s/player/64dddad9/player-plasma-ias-phone-en_US.vflset/base.js', '64dddad9'),
+ ('https://www.youtube.com/s/player/64dddad9/player-plasma-ias-phone-de_DE.vflset/base.js', '64dddad9'),
+ ('https://www.youtube.com/s/player/64dddad9/player-plasma-ias-tablet-en_US.vflset/base.js', '64dddad9'),
+ # obsolete
+ ('https://www.youtube.com/yts/jsbin/player_ias-vfle4-e03/en_US/base.js', 'vfle4-e03'),
+ ('https://www.youtube.com/yts/jsbin/player_ias-vfl49f_g4/en_US/base.js', 'vfl49f_g4'),
+ ('https://www.youtube.com/yts/jsbin/player_ias-vflCPQUIL/en_US/base.js', 'vflCPQUIL'),
+ ('https://www.youtube.com/yts/jsbin/player-vflzQZbt7/en_US/base.js', 'vflzQZbt7'),
+ ('https://www.youtube.com/yts/jsbin/player-en_US-vflaxXRn1/base.js', 'vflaxXRn1'),
+ ('https://s.ytimg.com/yts/jsbin/html5player-en_US-vflXGBaUN.js', 'vflXGBaUN'),
+ ('https://s.ytimg.com/yts/jsbin/html5player-en_US-vflKjOTVq/html5player.js', 'vflKjOTVq'),
+ )
+ for player_url, expected_player_id in PLAYER_URLS:
+ player_id = YoutubeIE._extract_player_info(player_url)
+ self.assertEqual(player_id, expected_player_id)
+
+
+@is_download_test
+class TestSignature(unittest.TestCase):
+ def setUp(self):
+ TEST_DIR = os.path.dirname(os.path.abspath(__file__))
+ self.TESTDATA_DIR = os.path.join(TEST_DIR, 'testdata/sigs')
+ if not os.path.exists(self.TESTDATA_DIR):
+ os.mkdir(self.TESTDATA_DIR)
+
+ def tearDown(self):
+ with contextlib.suppress(OSError):
+ for f in os.listdir(self.TESTDATA_DIR):
+ os.remove(f)
+
+
+def t_factory(name, sig_func, url_pattern):
+ def make_tfunc(url, sig_input, expected_sig):
+ m = url_pattern.match(url)
+ assert m, '%r should follow URL format' % url
+ test_id = m.group('id')
+
+ def test_func(self):
+ basename = f'player-{name}-{test_id}.js'
+ fn = os.path.join(self.TESTDATA_DIR, basename)
+
+ if not os.path.exists(fn):
+ urllib.request.urlretrieve(url, fn)
+ with open(fn, encoding='utf-8') as testf:
+ jscode = testf.read()
+ self.assertEqual(sig_func(jscode, sig_input), expected_sig)
+
+ test_func.__name__ = f'test_{name}_js_{test_id}'
+ setattr(TestSignature, test_func.__name__, test_func)
+ return make_tfunc
+
+
+def signature(jscode, sig_input):
+ func = YoutubeIE(FakeYDL())._parse_sig_js(jscode)
+ src_sig = (
+ str(string.printable[:sig_input])
+ if isinstance(sig_input, int) else sig_input)
+ return func(src_sig)
+
+
+def n_sig(jscode, sig_input):
+ funcname = YoutubeIE(FakeYDL())._extract_n_function_name(jscode)
+ return JSInterpreter(jscode).call_function(funcname, sig_input)
+
+
+make_sig_test = t_factory(
+ 'signature', signature, re.compile(r'.*(?:-|/player/)(?P<id>[a-zA-Z0-9_-]+)(?:/.+\.js|(?:/watch_as3|/html5player)?\.[a-z]+)$'))
+for test_spec in _SIG_TESTS:
+ make_sig_test(*test_spec)
+
+make_nsig_test = t_factory(
+ 'nsig', n_sig, re.compile(r'.+/player/(?P<id>[a-zA-Z0-9_-]+)/.+.js$'))
+for test_spec in _NSIG_TESTS:
+ make_nsig_test(*test_spec)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/testcert.pem b/test/testcert.pem
new file mode 100644
index 0000000..b3e0f00
--- /dev/null
+++ b/test/testcert.pem
@@ -0,0 +1,52 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDMF0bAzaHAdIyB
+HRmnIp4vv40lGqEePmWqicCl0QZ0wsb5dNysSxSa7330M2QeQopGfdaUYF1uTcNp
+Qx6ECgBSfg+RrOBI7r/u4F+sKX8MUXVaf/5QoBUrGNGSn/pp7HMGOuQqO6BVg4+h
+A1ySSwUG8mZItLRry1ISyErmW8b9xlqfd97uLME/5tX+sMelRFjUbAx8A4CK58Ev
+mMguHVTlXzx5RMdYcf1VScYcjlV/qA45uzP8zwI5aigfcmUD+tbGuQRhKxUhmw0J
+aobtOR6+JSOAULW5gYa/egE4dWLwbyM6b6eFbdnjlQzEA1EW7ChMPAW/Mo83KyiP
+tKMCSQulAgMBAAECggEALCfBDAexPjU5DNoh6bIorUXxIJzxTNzNHCdvgbCGiA54
+BBKPh8s6qwazpnjT6WQWDIg/O5zZufqjE4wM9x4+0Zoqfib742ucJO9wY4way6x4
+Clt0xzbLPabB+MoZ4H7ip+9n2+dImhe7pGdYyOHoNYeOL57BBi1YFW42Hj6u/8pd
+63YCXisto3Rz1YvRQVjwsrS+cRKZlzAFQRviL30jav7Wh1aWEfcXxjj4zhm8pJdk
+ITGtq6howz57M0NtX6hZnfe8ywzTnDFIGKIMA2cYHuYJcBh9bc4tCGubTvTKK9UE
+8fM+f6UbfGqfpKCq1mcgs0XMoFDSzKS9+mSJn0+5JQKBgQD+OCKaeH3Yzw5zGnlw
+XuQfMJGNcgNr+ImjmvzUAC2fAZUJLAcQueE5kzMv5Fmd+EFE2CEX1Vit3tg0SXvA
+G+bq609doILHMA03JHnV1npO/YNIhG3AAtJlKYGxQNfWH9mflYj9mEui8ZFxG52o
+zWhHYuifOjjZszUR+/eio6NPzwKBgQDNhUBTrT8LIX4SE/EFUiTlYmWIvOMgXYvN
+8Cm3IRNQ/yyphZaXEU0eJzfX5uCDfSVOgd6YM/2pRah+t+1Hvey4H8e0GVTu5wMP
+gkkqwKPGIR1YOmlw6ippqwvoJD7LuYrm6Q4D6e1PvkjwCq6lEndrOPmPrrXNd0JJ
+XO60y3U2SwKBgQDLkyZarryQXxcCI6Q10Tc6pskYDMIit095PUbTeiUOXNT9GE28
+Hi32ziLCakk9kCysNasii81MxtQ54tJ/f5iGbNMMddnkKl2a19Hc5LjjAm4cJzg/
+98KGEhvyVqvAo5bBDZ06/rcrD+lZOzUglQS5jcIcqCIYa0LHWQ/wJLxFzwKBgFcZ
+1SRhdSmDfUmuF+S4ZpistflYjC3IV5rk4NkS9HvMWaJS0nqdw4A3AMzItXgkjq4S
+DkOVLTkTI5Do5HAWRv/VwC5M2hkR4NMu1VGAKSisGiKtRsirBWSZMEenLNHshbjN
+Jrpz5rZ4H7NT46ZkCCZyFBpX4gb9NyOedjA7Via3AoGARF8RxbYjnEGGFuhnbrJB
+FTPR0vaL4faY3lOgRZ8jOG9V2c9Hzi/y8a8TU4C11jnJSDqYCXBTd5XN28npYxtD
+pjRsCwy6ze+yvYXPO7C978eMG3YRyj366NXUxnXN59ibwe/lxi2OD9z8J1LEdF6z
+VJua1Wn8HKxnXMI61DhTCSo=
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIEEzCCAvugAwIBAgIJAK1haYi6gmSKMA0GCSqGSIb3DQEBCwUAMIGeMQswCQYD
+VQQGEwJERTEMMAoGA1UECAwDTlJXMRQwEgYDVQQHDAtEdWVzc2VsZG9yZjEbMBkG
+A1UECgwSeW91dHViZS1kbCBwcm9qZWN0MRkwFwYDVQQLDBB5b3V0dWJlLWRsIHRl
+c3RzMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEHBoaWhhZ0Bw
+aGloYWcuZGUwIBcNMTUwMTMwMDExNTA4WhgPMjExNTAxMDYwMTE1MDhaMIGeMQsw
+CQYDVQQGEwJERTEMMAoGA1UECAwDTlJXMRQwEgYDVQQHDAtEdWVzc2VsZG9yZjEb
+MBkGA1UECgwSeW91dHViZS1kbCBwcm9qZWN0MRkwFwYDVQQLDBB5b3V0dWJlLWRs
+IHRlc3RzMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEHBoaWhh
+Z0BwaGloYWcuZGUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDMF0bA
+zaHAdIyBHRmnIp4vv40lGqEePmWqicCl0QZ0wsb5dNysSxSa7330M2QeQopGfdaU
+YF1uTcNpQx6ECgBSfg+RrOBI7r/u4F+sKX8MUXVaf/5QoBUrGNGSn/pp7HMGOuQq
+O6BVg4+hA1ySSwUG8mZItLRry1ISyErmW8b9xlqfd97uLME/5tX+sMelRFjUbAx8
+A4CK58EvmMguHVTlXzx5RMdYcf1VScYcjlV/qA45uzP8zwI5aigfcmUD+tbGuQRh
+KxUhmw0JaobtOR6+JSOAULW5gYa/egE4dWLwbyM6b6eFbdnjlQzEA1EW7ChMPAW/
+Mo83KyiPtKMCSQulAgMBAAGjUDBOMB0GA1UdDgQWBBTBUZoqhQkzHQ6xNgZfFxOd
+ZEVt8TAfBgNVHSMEGDAWgBTBUZoqhQkzHQ6xNgZfFxOdZEVt8TAMBgNVHRMEBTAD
+AQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCUOCl3T/J9B08Z+ijfOJAtkbUaEHuVZb4x
+5EpZSy2ZbkLvtsftMFieHVNXn9dDswQc5qjYStCC4o60LKw4M6Y63FRsAZ/DNaqb
+PY3jyCyuugZ8/sNf50vHYkAcF7SQYqOQFQX4TQsNUk2xMJIt7H0ErQFmkf/u3dg6
+cy89zkT462IwxzSG7NNhIlRkL9o5qg+Y1mF9eZA1B0rcL6hO24PPTHOd90HDChBu
+SZ6XMi/LzYQSTf0Vg2R+uMIVlzSlkdcZ6sqVnnqeLL8dFyIa4e9sj/D4ZCYP8Mqe
+Z73H5/NNhmwCHRqVUTgm307xblQaWGhwAiDkaRvRW2aJQ0qGEdZK
+-----END CERTIFICATE-----
diff --git a/test/testdata/certificate/ca.crt b/test/testdata/certificate/ca.crt
new file mode 100644
index 0000000..ddf7be7
--- /dev/null
+++ b/test/testdata/certificate/ca.crt
@@ -0,0 +1,10 @@
+-----BEGIN CERTIFICATE-----
+MIIBfDCCASOgAwIBAgIUUgngoxFpuWft8gjj3uEFoqJyoJowCgYIKoZIzj0EAwIw
+FDESMBAGA1UEAwwJeXRkbHB0ZXN0MB4XDTIyMDQxNTAzMDEwMVoXDTM4MTAxNTAz
+MDEwMVowFDESMBAGA1UEAwwJeXRkbHB0ZXN0MFkwEwYHKoZIzj0CAQYIKoZIzj0D
+AQcDQgAEcTaKMtIn2/1kgid1zXFpLm87FMT5PP3/bltKVVH3DLO//0kUslCHYxFU
+KpcCfVt9aueRyUFi1TNkkkEZ9D6fbqNTMFEwHQYDVR0OBBYEFBdY2rVNLFGM6r1F
+iuamNDaiq0QoMB8GA1UdIwQYMBaAFBdY2rVNLFGM6r1FiuamNDaiq0QoMA8GA1Ud
+EwEB/wQFMAMBAf8wCgYIKoZIzj0EAwIDRwAwRAIgXJg2jio1kow2g/iP54Qq+iI2
+m4EAvZiY0Im/Ni3PHawCIC6KCl6QcHANbeq8ckOXNGusjl6OWhvEM3uPBPhqskq1
+-----END CERTIFICATE-----
diff --git a/test/testdata/certificate/ca.key b/test/testdata/certificate/ca.key
new file mode 100644
index 0000000..38920d5
--- /dev/null
+++ b/test/testdata/certificate/ca.key
@@ -0,0 +1,5 @@
+-----BEGIN EC PRIVATE KEY-----
+MHcCAQEEIG2L1bHdl3PnaLiJ7Zm8aAGCj4GiVbSbXQcrJAdL+yqOoAoGCCqGSM49
+AwEHoUQDQgAEcTaKMtIn2/1kgid1zXFpLm87FMT5PP3/bltKVVH3DLO//0kUslCH
+YxFUKpcCfVt9aueRyUFi1TNkkkEZ9D6fbg==
+-----END EC PRIVATE KEY-----
diff --git a/test/testdata/certificate/ca.srl b/test/testdata/certificate/ca.srl
new file mode 100644
index 0000000..de2d1ea
--- /dev/null
+++ b/test/testdata/certificate/ca.srl
@@ -0,0 +1 @@
+4A260C33C4D34612646E6321E1E767DF1A95EF0B
diff --git a/test/testdata/certificate/client.crt b/test/testdata/certificate/client.crt
new file mode 100644
index 0000000..874622f
--- /dev/null
+++ b/test/testdata/certificate/client.crt
@@ -0,0 +1,9 @@
+-----BEGIN CERTIFICATE-----
+MIIBIzCBygIUSiYMM8TTRhJkbmMh4edn3xqV7wswCgYIKoZIzj0EAwIwFDESMBAG
+A1UEAwwJeXRkbHB0ZXN0MB4XDTIyMDQxNTAzMDEyN1oXDTM4MTAxNTAzMDEyN1ow
+FTETMBEGA1UEAwwKeXRkbHB0ZXN0MjBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IA
+BKREKVDWfLKZknzYg+BUkmTn43f2pl/LNSyKPtXo/UV7hhp6JXIq3ZuZ7rubyuMS
+XNuH+2Cl9msSpJB2LhJs5kcwCgYIKoZIzj0EAwIDSAAwRQIhAMRr46vO25/5nUhD
+aHp4L67AeSvrjvSFHfubyD3Kr5dwAiA8EfOgVxc8Qh6ozTcbXO/WnBfS48ZFRSQY
+D0dB8M1kJw==
+-----END CERTIFICATE-----
diff --git a/test/testdata/certificate/client.csr b/test/testdata/certificate/client.csr
new file mode 100644
index 0000000..2d5d7a5
--- /dev/null
+++ b/test/testdata/certificate/client.csr
@@ -0,0 +1,7 @@
+-----BEGIN CERTIFICATE REQUEST-----
+MIHQMHcCAQAwFTETMBEGA1UEAwwKeXRkbHB0ZXN0MjBZMBMGByqGSM49AgEGCCqG
+SM49AwEHA0IABKREKVDWfLKZknzYg+BUkmTn43f2pl/LNSyKPtXo/UV7hhp6JXIq
+3ZuZ7rubyuMSXNuH+2Cl9msSpJB2LhJs5kegADAKBggqhkjOPQQDAgNJADBGAiEA
+1LZ72mtPmVxhGtdMvpZ0fyA68H2RC5IMHpLq18T55UcCIQDKpkXXVTvAzS0JioCq
+6kiYq8Oxx6ZMoI+11k75/Kip1g==
+-----END CERTIFICATE REQUEST-----
diff --git a/test/testdata/certificate/client.key b/test/testdata/certificate/client.key
new file mode 100644
index 0000000..e47389b
--- /dev/null
+++ b/test/testdata/certificate/client.key
@@ -0,0 +1,5 @@
+-----BEGIN EC PRIVATE KEY-----
+MHcCAQEEIAW6h9hwT0Aha+JBukgmHnrKRPoqPNWYA86ic0UaKHs8oAoGCCqGSM49
+AwEHoUQDQgAEpEQpUNZ8spmSfNiD4FSSZOfjd/amX8s1LIo+1ej9RXuGGnolcird
+m5nuu5vK4xJc24f7YKX2axKkkHYuEmzmRw==
+-----END EC PRIVATE KEY-----
diff --git a/test/testdata/certificate/clientencrypted.key b/test/testdata/certificate/clientencrypted.key
new file mode 100644
index 0000000..0baee37
--- /dev/null
+++ b/test/testdata/certificate/clientencrypted.key
@@ -0,0 +1,8 @@
+-----BEGIN EC PRIVATE KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: AES-256-CBC,4B39160146F15544922E553E08299A35
+
+96A7/iBkIfTVb8r2812ued2pS49FfVY4Ppz/45OGF0uFayMtMl8/GuEBCamuhFXS
+rnOOpco96TTeeKZHqR45wnf4tgHM8IjoQ6H0EX3lVF19OHnArAgrGYtohWUGSyGn
+IgLJFdUewIjdI7XApTJprQFE5E2tETXFA95mCz88u1c=
+-----END EC PRIVATE KEY-----
diff --git a/test/testdata/certificate/clientwithencryptedkey.crt b/test/testdata/certificate/clientwithencryptedkey.crt
new file mode 100644
index 0000000..f357e4c
--- /dev/null
+++ b/test/testdata/certificate/clientwithencryptedkey.crt
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE-----
+MIIBIzCBygIUSiYMM8TTRhJkbmMh4edn3xqV7wswCgYIKoZIzj0EAwIwFDESMBAG
+A1UEAwwJeXRkbHB0ZXN0MB4XDTIyMDQxNTAzMDEyN1oXDTM4MTAxNTAzMDEyN1ow
+FTETMBEGA1UEAwwKeXRkbHB0ZXN0MjBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IA
+BKREKVDWfLKZknzYg+BUkmTn43f2pl/LNSyKPtXo/UV7hhp6JXIq3ZuZ7rubyuMS
+XNuH+2Cl9msSpJB2LhJs5kcwCgYIKoZIzj0EAwIDSAAwRQIhAMRr46vO25/5nUhD
+aHp4L67AeSvrjvSFHfubyD3Kr5dwAiA8EfOgVxc8Qh6ozTcbXO/WnBfS48ZFRSQY
+D0dB8M1kJw==
+-----END CERTIFICATE-----
+-----BEGIN EC PRIVATE KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: AES-256-CBC,4B39160146F15544922E553E08299A35
+
+96A7/iBkIfTVb8r2812ued2pS49FfVY4Ppz/45OGF0uFayMtMl8/GuEBCamuhFXS
+rnOOpco96TTeeKZHqR45wnf4tgHM8IjoQ6H0EX3lVF19OHnArAgrGYtohWUGSyGn
+IgLJFdUewIjdI7XApTJprQFE5E2tETXFA95mCz88u1c=
+-----END EC PRIVATE KEY-----
diff --git a/test/testdata/certificate/clientwithkey.crt b/test/testdata/certificate/clientwithkey.crt
new file mode 100644
index 0000000..942f6e2
--- /dev/null
+++ b/test/testdata/certificate/clientwithkey.crt
@@ -0,0 +1,14 @@
+-----BEGIN CERTIFICATE-----
+MIIBIzCBygIUSiYMM8TTRhJkbmMh4edn3xqV7wswCgYIKoZIzj0EAwIwFDESMBAG
+A1UEAwwJeXRkbHB0ZXN0MB4XDTIyMDQxNTAzMDEyN1oXDTM4MTAxNTAzMDEyN1ow
+FTETMBEGA1UEAwwKeXRkbHB0ZXN0MjBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IA
+BKREKVDWfLKZknzYg+BUkmTn43f2pl/LNSyKPtXo/UV7hhp6JXIq3ZuZ7rubyuMS
+XNuH+2Cl9msSpJB2LhJs5kcwCgYIKoZIzj0EAwIDSAAwRQIhAMRr46vO25/5nUhD
+aHp4L67AeSvrjvSFHfubyD3Kr5dwAiA8EfOgVxc8Qh6ozTcbXO/WnBfS48ZFRSQY
+D0dB8M1kJw==
+-----END CERTIFICATE-----
+-----BEGIN EC PRIVATE KEY-----
+MHcCAQEEIAW6h9hwT0Aha+JBukgmHnrKRPoqPNWYA86ic0UaKHs8oAoGCCqGSM49
+AwEHoUQDQgAEpEQpUNZ8spmSfNiD4FSSZOfjd/amX8s1LIo+1ej9RXuGGnolcird
+m5nuu5vK4xJc24f7YKX2axKkkHYuEmzmRw==
+-----END EC PRIVATE KEY-----
diff --git a/test/testdata/certificate/instructions.md b/test/testdata/certificate/instructions.md
new file mode 100644
index 0000000..b0e3fbd
--- /dev/null
+++ b/test/testdata/certificate/instructions.md
@@ -0,0 +1,19 @@
+# Generate certificates for client cert tests
+
+## CA
+```sh
+openssl ecparam -name prime256v1 -genkey -noout -out ca.key
+openssl req -new -x509 -sha256 -days 6027 -key ca.key -out ca.crt -subj "/CN=ytdlptest"
+```
+
+## Client
+```sh
+openssl ecparam -name prime256v1 -genkey -noout -out client.key
+openssl ec -in client.key -out clientencrypted.key -passout pass:foobar -aes256
+openssl req -new -sha256 -key client.key -out client.csr -subj "/CN=ytdlptest2"
+openssl x509 -req -in client.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out client.crt -days 6027 -sha256
+cp client.crt clientwithkey.crt
+cp client.crt clientwithencryptedkey.crt
+cat client.key >> clientwithkey.crt
+cat clientencrypted.key >> clientwithencryptedkey.crt
+``` \ No newline at end of file
diff --git a/test/testdata/cookies/httponly_cookies.txt b/test/testdata/cookies/httponly_cookies.txt
new file mode 100644
index 0000000..c46541d
--- /dev/null
+++ b/test/testdata/cookies/httponly_cookies.txt
@@ -0,0 +1,6 @@
+# Netscape HTTP Cookie File
+# http://curl.haxx.se/rfc/cookie_spec.html
+# This is a generated file! Do not edit.
+
+#HttpOnly_www.foobar.foobar FALSE / TRUE 2147483647 HTTPONLY_COOKIE HTTPONLY_COOKIE_VALUE
+www.foobar.foobar FALSE / TRUE 2147483647 JS_ACCESSIBLE_COOKIE JS_ACCESSIBLE_COOKIE_VALUE
diff --git a/test/testdata/cookies/malformed_cookies.txt b/test/testdata/cookies/malformed_cookies.txt
new file mode 100644
index 0000000..17bc403
--- /dev/null
+++ b/test/testdata/cookies/malformed_cookies.txt
@@ -0,0 +1,9 @@
+# Netscape HTTP Cookie File
+# http://curl.haxx.se/rfc/cookie_spec.html
+# This is a generated file! Do not edit.
+
+# Cookie file entry with invalid number of fields - 6 instead of 7
+www.foobar.foobar FALSE / FALSE 0 COOKIE
+
+# Cookie file entry with invalid expires at
+www.foobar.foobar FALSE / FALSE 1.7976931348623157e+308 COOKIE VALUE
diff --git a/test/testdata/cookies/session_cookies.txt b/test/testdata/cookies/session_cookies.txt
new file mode 100644
index 0000000..f6996f0
--- /dev/null
+++ b/test/testdata/cookies/session_cookies.txt
@@ -0,0 +1,6 @@
+# Netscape HTTP Cookie File
+# http://curl.haxx.se/rfc/cookie_spec.html
+# This is a generated file! Do not edit.
+
+www.foobar.foobar FALSE / TRUE YoutubeDLExpiresEmpty YoutubeDLExpiresEmptyValue
+www.foobar.foobar FALSE / TRUE 0 YoutubeDLExpires0 YoutubeDLExpires0Value
diff --git a/test/testdata/f4m/custom_base_url.f4m b/test/testdata/f4m/custom_base_url.f4m
new file mode 100644
index 0000000..74e1539
--- /dev/null
+++ b/test/testdata/f4m/custom_base_url.f4m
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<manifest xmlns="http://ns.adobe.com/f4m/1.0">
+ <streamType>recorded</streamType>
+ <baseURL>http://vod.livestream.com/events/0000000000673980/</baseURL>
+ <duration>269.293</duration>
+ <bootstrapInfo profile="named" id="bootstrap_1">AAAAm2Fic3QAAAAAAAAAAQAAAAPoAAAAAAAEG+0AAAAAAAAAAAAAAAAAAQAAABlhc3J0AAAAAAAAAAABAAAAAQAAAC4BAAAAVmFmcnQAAAAAAAAD6AAAAAAEAAAAAQAAAAAAAAAAAAAXcAAAAC0AAAAAAAQHQAAAE5UAAAAuAAAAAAAEGtUAAAEYAAAAAAAAAAAAAAAAAAAAAAA=</bootstrapInfo>
+ <media url="b90f532f-b0f6-4f4e-8289-706d490b2fd8_2292" bootstrapInfoId="bootstrap_1" bitrate="2148" width="1280" height="720" videoCodec="avc1.4d401f" audioCodec="mp4a.40.2">
+ <metadata>AgAKb25NZXRhRGF0YQgAAAAIAAhkdXJhdGlvbgBAcNSwIMSbpgAFd2lkdGgAQJQAAAAAAAAABmhlaWdodABAhoAAAAAAAAAJZnJhbWVyYXRlAEA4/7DoLwW3AA12aWRlb2RhdGFyYXRlAECe1DLgjcobAAx2aWRlb2NvZGVjaWQAQBwAAAAAAAAADWF1ZGlvZGF0YXJhdGUAQGSimlvaPKQADGF1ZGlvY29kZWNpZABAJAAAAAAAAAAACQ==</metadata>
+ </media>
+</manifest>
diff --git a/test/testdata/ism/ec-3_test.Manifest b/test/testdata/ism/ec-3_test.Manifest
new file mode 100644
index 0000000..45f95de
--- /dev/null
+++ b/test/testdata/ism/ec-3_test.Manifest
@@ -0,0 +1 @@
+<?xml version="1.0" encoding="utf-8"?><!--Transformed by VSMT using XSL stylesheet for rule Identity--><!-- Created with Unified Streaming Platform (version=1.10.12-18737) --><SmoothStreamingMedia MajorVersion="2" MinorVersion="0" TimeScale="10000000" Duration="370000000"><StreamIndex Type="audio" QualityLevels="1" TimeScale="10000000" Language="deu" Name="audio_deu" Chunks="19" Url="QualityLevels({bitrate})/Fragments(audio_deu={start time})?noStreamProfile=1"><QualityLevel Index="0" Bitrate="127802" CodecPrivateData="1190" SamplingRate="48000" Channels="2" BitsPerSample="16" PacketSize="4" AudioTag="255" FourCC="AACL" /><c t="0" d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="7253333" /></StreamIndex><StreamIndex Type="audio" QualityLevels="1" TimeScale="10000000" Language="deu" Name="audio_deu_1" Chunks="19" Url="QualityLevels({bitrate})/Fragments(audio_deu_1={start time})?noStreamProfile=1"><QualityLevel Index="0" Bitrate="224000" CodecPrivateData="00063F000000AF87FBA7022DFB42A4D405CD93843BDD0700200F00" FourCCData="0700200F00" SamplingRate="48000" Channels="6" BitsPerSample="16" PacketSize="896" AudioTag="65534" FourCC="EC-3" /><c t="0" d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="8320000" /></StreamIndex><StreamIndex Type="video" QualityLevels="8" TimeScale="10000000" Language="deu" Name="video_deu" Chunks="19" Url="QualityLevels({bitrate})/Fragments(video_deu={start time})?noStreamProfile=1" MaxWidth="1920" MaxHeight="1080" DisplayWidth="1920" DisplayHeight="1080"><QualityLevel Index="0" Bitrate="23909" CodecPrivateData="000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8" MaxWidth="384" MaxHeight="216" FourCC="AVC1" /><QualityLevel Index="1" Bitrate="403188" CodecPrivateData="00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2" MaxWidth="400" MaxHeight="224" FourCC="AVC1" /><QualityLevel Index="2" Bitrate="680365" CodecPrivateData="00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2" MaxWidth="640" MaxHeight="360" FourCC="AVC1" /><QualityLevel Index="3" Bitrate="1253465" CodecPrivateData="00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2" MaxWidth="640" MaxHeight="360" FourCC="AVC1" /><QualityLevel Index="4" Bitrate="2121558" CodecPrivateData="00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80" MaxWidth="768" MaxHeight="432" FourCC="AVC1" /><QualityLevel Index="5" Bitrate="3275545" CodecPrivateData="00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80" MaxWidth="1280" MaxHeight="720" FourCC="AVC1" /><QualityLevel Index="6" Bitrate="5300196" CodecPrivateData="00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80" MaxWidth="1920" MaxHeight="1080" FourCC="AVC1" /><QualityLevel Index="7" Bitrate="8079312" CodecPrivateData="00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80" MaxWidth="1920" MaxHeight="1080" FourCC="AVC1" /><c t="0" d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="10000000" /></StreamIndex></SmoothStreamingMedia> \ No newline at end of file
diff --git a/test/testdata/ism/sintel.Manifest b/test/testdata/ism/sintel.Manifest
new file mode 100644
index 0000000..2ff8c24
--- /dev/null
+++ b/test/testdata/ism/sintel.Manifest
@@ -0,0 +1,988 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Created with Unified Streaming Platform (version=1.10.18-20255) -->
+<SmoothStreamingMedia
+ MajorVersion="2"
+ MinorVersion="0"
+ TimeScale="10000000"
+ Duration="8880746666">
+ <StreamIndex
+ Type="audio"
+ QualityLevels="1"
+ TimeScale="10000000"
+ Name="audio"
+ Chunks="445"
+ Url="QualityLevels({bitrate})/Fragments(audio={start time})">
+ <QualityLevel
+ Index="0"
+ Bitrate="128001"
+ CodecPrivateData="1190"
+ SamplingRate="48000"
+ Channels="2"
+ BitsPerSample="16"
+ PacketSize="4"
+ AudioTag="255"
+ FourCC="AACL" />
+ <c t="0" d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="20053333" />
+ <c d="20053333" />
+ <c d="20053334" />
+ <c d="19840000" />
+ <c d="746666" />
+ </StreamIndex>
+ <StreamIndex
+ Type="text"
+ QualityLevels="1"
+ TimeScale="10000000"
+ Language="eng"
+ Subtype="CAPT"
+ Name="textstream_eng"
+ Chunks="11"
+ Url="QualityLevels({bitrate})/Fragments(textstream_eng={start time})">
+ <QualityLevel
+ Index="0"
+ Bitrate="1000"
+ CodecPrivateData=""
+ FourCC="TTML" />
+ <c t="0" d="600000000" />
+ <c d="600000000" />
+ <c d="600000000" />
+ <c d="600000000" />
+ <c d="600000000" />
+ <c d="600000000" />
+ <c d="600000000" />
+ <c d="600000000" />
+ <c d="600000000" />
+ <c d="600000000" />
+ <c d="240000000" />
+ </StreamIndex>
+ <StreamIndex
+ Type="video"
+ QualityLevels="5"
+ TimeScale="10000000"
+ Name="video"
+ Chunks="444"
+ Url="QualityLevels({bitrate})/Fragments(video={start time})"
+ MaxWidth="1688"
+ MaxHeight="720"
+ DisplayWidth="1689"
+ DisplayHeight="720">
+ <QualityLevel
+ Index="0"
+ Bitrate="100000"
+ CodecPrivateData="00000001674D401FDA0544EFFC2D002CBC40000003004000000C03C60CA80000000168EF32C8"
+ MaxWidth="336"
+ MaxHeight="144"
+ FourCC="AVC1" />
+ <QualityLevel
+ Index="1"
+ Bitrate="326000"
+ CodecPrivateData="00000001674D401FDA0241FE23FFC3BC83BA44000003000400000300C03C60CA800000000168EF32C8"
+ MaxWidth="562"
+ MaxHeight="240"
+ FourCC="AVC1" />
+ <QualityLevel
+ Index="2"
+ Bitrate="698000"
+ CodecPrivateData="00000001674D401FDA0350BFB97FF06AF06AD1000003000100000300300F1832A00000000168EF32C8"
+ MaxWidth="844"
+ MaxHeight="360"
+ FourCC="AVC1" />
+ <QualityLevel
+ Index="3"
+ Bitrate="1493000"
+ CodecPrivateData="00000001674D401FDA011C3DE6FFF0D890D871000003000100000300300F1832A00000000168EF32C8"
+ MaxWidth="1126"
+ MaxHeight="480"
+ FourCC="AVC1" />
+ <QualityLevel
+ Index="4"
+ Bitrate="4482000"
+ CodecPrivateData="00000001674D401FDA01A816F97FFC1ABC1AB440000003004000000C03C60CA80000000168EF32C8"
+ MaxWidth="1688"
+ MaxHeight="720"
+ FourCC="AVC1" />
+ <c t="0" d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ <c d="20000000" />
+ </StreamIndex>
+</SmoothStreamingMedia>
diff --git a/test/testdata/m3u8/bipbop_16x9.m3u8 b/test/testdata/m3u8/bipbop_16x9.m3u8
new file mode 100644
index 0000000..1ce87dd
--- /dev/null
+++ b/test/testdata/m3u8/bipbop_16x9.m3u8
@@ -0,0 +1,38 @@
+#EXTM3U
+
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="bipbop_audio",LANGUAGE="eng",NAME="BipBop Audio 1",AUTOSELECT=YES,DEFAULT=YES
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="bipbop_audio",LANGUAGE="eng",NAME="BipBop Audio 2",AUTOSELECT=NO,DEFAULT=NO,URI="alternate_audio_aac/prog_index.m3u8"
+
+
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="subs",NAME="English",DEFAULT=YES,AUTOSELECT=YES,FORCED=NO,LANGUAGE="en",CHARACTERISTICS="public.accessibility.transcribes-spoken-dialog, public.accessibility.describes-music-and-sound",URI="subtitles/eng/prog_index.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="subs",NAME="English (Forced)",DEFAULT=NO,AUTOSELECT=NO,FORCED=YES,LANGUAGE="en",URI="subtitles/eng_forced/prog_index.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="subs",NAME="Français",DEFAULT=NO,AUTOSELECT=YES,FORCED=NO,LANGUAGE="fr",CHARACTERISTICS="public.accessibility.transcribes-spoken-dialog, public.accessibility.describes-music-and-sound",URI="subtitles/fra/prog_index.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="subs",NAME="Français (Forced)",DEFAULT=NO,AUTOSELECT=NO,FORCED=YES,LANGUAGE="fr",URI="subtitles/fra_forced/prog_index.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="subs",NAME="Español",DEFAULT=NO,AUTOSELECT=YES,FORCED=NO,LANGUAGE="es",CHARACTERISTICS="public.accessibility.transcribes-spoken-dialog, public.accessibility.describes-music-and-sound",URI="subtitles/spa/prog_index.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="subs",NAME="Español (Forced)",DEFAULT=NO,AUTOSELECT=NO,FORCED=YES,LANGUAGE="es",URI="subtitles/spa_forced/prog_index.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="subs",NAME="日本語",DEFAULT=NO,AUTOSELECT=YES,FORCED=NO,LANGUAGE="ja",CHARACTERISTICS="public.accessibility.transcribes-spoken-dialog, public.accessibility.describes-music-and-sound",URI="subtitles/jpn/prog_index.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="subs",NAME="日本語 (Forced)",DEFAULT=NO,AUTOSELECT=NO,FORCED=YES,LANGUAGE="ja",URI="subtitles/jpn_forced/prog_index.m3u8"
+
+
+#EXT-X-STREAM-INF:BANDWIDTH=263851,CODECS="mp4a.40.2, avc1.4d400d",RESOLUTION=416x234,AUDIO="bipbop_audio",SUBTITLES="subs"
+gear1/prog_index.m3u8
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=28451,CODECS="avc1.4d400d",URI="gear1/iframe_index.m3u8"
+
+#EXT-X-STREAM-INF:BANDWIDTH=577610,CODECS="mp4a.40.2, avc1.4d401e",RESOLUTION=640x360,AUDIO="bipbop_audio",SUBTITLES="subs"
+gear2/prog_index.m3u8
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=181534,CODECS="avc1.4d401e",URI="gear2/iframe_index.m3u8"
+
+#EXT-X-STREAM-INF:BANDWIDTH=915905,CODECS="mp4a.40.2, avc1.4d401f",RESOLUTION=960x540,AUDIO="bipbop_audio",SUBTITLES="subs"
+gear3/prog_index.m3u8
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=297056,CODECS="avc1.4d401f",URI="gear3/iframe_index.m3u8"
+
+#EXT-X-STREAM-INF:BANDWIDTH=1030138,CODECS="mp4a.40.2, avc1.4d401f",RESOLUTION=1280x720,AUDIO="bipbop_audio",SUBTITLES="subs"
+gear4/prog_index.m3u8
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=339492,CODECS="avc1.4d401f",URI="gear4/iframe_index.m3u8"
+
+#EXT-X-STREAM-INF:BANDWIDTH=1924009,CODECS="mp4a.40.2, avc1.4d401f",RESOLUTION=1920x1080,AUDIO="bipbop_audio",SUBTITLES="subs"
+gear5/prog_index.m3u8
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=669554,CODECS="avc1.4d401f",URI="gear5/iframe_index.m3u8"
+
+#EXT-X-STREAM-INF:BANDWIDTH=41457,CODECS="mp4a.40.2",AUDIO="bipbop_audio",SUBTITLES="subs"
+gear0/prog_index.m3u8
diff --git a/test/testdata/m3u8/img_bipbop_adv_example_fmp4.m3u8 b/test/testdata/m3u8/img_bipbop_adv_example_fmp4.m3u8
new file mode 100644
index 0000000..620ce04
--- /dev/null
+++ b/test/testdata/m3u8/img_bipbop_adv_example_fmp4.m3u8
@@ -0,0 +1,76 @@
+#EXTM3U
+#EXT-X-VERSION:6
+#EXT-X-INDEPENDENT-SEGMENTS
+
+
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=2168183,BANDWIDTH=2177116,CODECS="avc1.640020,mp4a.40.2",RESOLUTION=960x540,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud1",SUBTITLES="sub1"
+v5/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=7968416,BANDWIDTH=8001098,CODECS="avc1.64002a,mp4a.40.2",RESOLUTION=1920x1080,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud1",SUBTITLES="sub1"
+v9/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=6170000,BANDWIDTH=6312875,CODECS="avc1.64002a,mp4a.40.2",RESOLUTION=1920x1080,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud1",SUBTITLES="sub1"
+v8/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=4670769,BANDWIDTH=4943747,CODECS="avc1.64002a,mp4a.40.2",RESOLUTION=1920x1080,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud1",SUBTITLES="sub1"
+v7/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=3168702,BANDWIDTH=3216424,CODECS="avc1.640020,mp4a.40.2",RESOLUTION=1280x720,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud1",SUBTITLES="sub1"
+v6/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=1265132,BANDWIDTH=1268994,CODECS="avc1.64001e,mp4a.40.2",RESOLUTION=768x432,FRAME-RATE=30.000,CLOSED-CAPTIONS="cc1",AUDIO="aud1",SUBTITLES="sub1"
+v4/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=895755,BANDWIDTH=902298,CODECS="avc1.64001e,mp4a.40.2",RESOLUTION=640x360,FRAME-RATE=30.000,CLOSED-CAPTIONS="cc1",AUDIO="aud1",SUBTITLES="sub1"
+v3/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=530721,BANDWIDTH=541052,CODECS="avc1.640015,mp4a.40.2",RESOLUTION=480x270,FRAME-RATE=30.000,CLOSED-CAPTIONS="cc1",AUDIO="aud1",SUBTITLES="sub1"
+v2/prog_index.m3u8
+
+
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=2390686,BANDWIDTH=2399619,CODECS="avc1.640020,ac-3",RESOLUTION=960x540,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud2",SUBTITLES="sub1"
+v5/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=8190919,BANDWIDTH=8223601,CODECS="avc1.64002a,ac-3",RESOLUTION=1920x1080,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud2",SUBTITLES="sub1"
+v9/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=6392503,BANDWIDTH=6535378,CODECS="avc1.64002a,ac-3",RESOLUTION=1920x1080,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud2",SUBTITLES="sub1"
+v8/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=4893272,BANDWIDTH=5166250,CODECS="avc1.64002a,ac-3",RESOLUTION=1920x1080,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud2",SUBTITLES="sub1"
+v7/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=3391205,BANDWIDTH=3438927,CODECS="avc1.640020,ac-3",RESOLUTION=1280x720,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud2",SUBTITLES="sub1"
+v6/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=1487635,BANDWIDTH=1491497,CODECS="avc1.64001e,ac-3",RESOLUTION=768x432,FRAME-RATE=30.000,CLOSED-CAPTIONS="cc1",AUDIO="aud2",SUBTITLES="sub1"
+v4/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=1118258,BANDWIDTH=1124801,CODECS="avc1.64001e,ac-3",RESOLUTION=640x360,FRAME-RATE=30.000,CLOSED-CAPTIONS="cc1",AUDIO="aud2",SUBTITLES="sub1"
+v3/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=753224,BANDWIDTH=763555,CODECS="avc1.640015,ac-3",RESOLUTION=480x270,FRAME-RATE=30.000,CLOSED-CAPTIONS="cc1",AUDIO="aud2",SUBTITLES="sub1"
+v2/prog_index.m3u8
+
+
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=2198686,BANDWIDTH=2207619,CODECS="avc1.640020,ec-3",RESOLUTION=960x540,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud3",SUBTITLES="sub1"
+v5/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=7998919,BANDWIDTH=8031601,CODECS="avc1.64002a,ec-3",RESOLUTION=1920x1080,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud3",SUBTITLES="sub1"
+v9/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=6200503,BANDWIDTH=6343378,CODECS="avc1.64002a,ec-3",RESOLUTION=1920x1080,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud3",SUBTITLES="sub1"
+v8/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=4701272,BANDWIDTH=4974250,CODECS="avc1.64002a,ec-3",RESOLUTION=1920x1080,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud3",SUBTITLES="sub1"
+v7/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=3199205,BANDWIDTH=3246927,CODECS="avc1.640020,ec-3",RESOLUTION=1280x720,FRAME-RATE=60.000,CLOSED-CAPTIONS="cc1",AUDIO="aud3",SUBTITLES="sub1"
+v6/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=1295635,BANDWIDTH=1299497,CODECS="avc1.64001e,ec-3",RESOLUTION=768x432,FRAME-RATE=30.000,CLOSED-CAPTIONS="cc1",AUDIO="aud3",SUBTITLES="sub1"
+v4/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=926258,BANDWIDTH=932801,CODECS="avc1.64001e,ec-3",RESOLUTION=640x360,FRAME-RATE=30.000,CLOSED-CAPTIONS="cc1",AUDIO="aud3",SUBTITLES="sub1"
+v3/prog_index.m3u8
+#EXT-X-STREAM-INF:AVERAGE-BANDWIDTH=561224,BANDWIDTH=571555,CODECS="avc1.640015,ec-3",RESOLUTION=480x270,FRAME-RATE=30.000,CLOSED-CAPTIONS="cc1",AUDIO="aud3",SUBTITLES="sub1"
+v2/prog_index.m3u8
+
+
+#EXT-X-I-FRAME-STREAM-INF:AVERAGE-BANDWIDTH=183689,BANDWIDTH=187492,CODECS="avc1.64002a",RESOLUTION=1920x1080,URI="v7/iframe_index.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:AVERAGE-BANDWIDTH=132672,BANDWIDTH=136398,CODECS="avc1.640020",RESOLUTION=1280x720,URI="v6/iframe_index.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:AVERAGE-BANDWIDTH=97767,BANDWIDTH=101378,CODECS="avc1.640020",RESOLUTION=960x540,URI="v5/iframe_index.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:AVERAGE-BANDWIDTH=75722,BANDWIDTH=77818,CODECS="avc1.64001e",RESOLUTION=768x432,URI="v4/iframe_index.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:AVERAGE-BANDWIDTH=63522,BANDWIDTH=65091,CODECS="avc1.64001e",RESOLUTION=640x360,URI="v3/iframe_index.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:AVERAGE-BANDWIDTH=39678,BANDWIDTH=40282,CODECS="avc1.640015",RESOLUTION=480x270,URI="v2/iframe_index.m3u8"
+
+
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="aud1",LANGUAGE="en",NAME="English",AUTOSELECT=YES,DEFAULT=YES,CHANNELS="2",URI="a1/prog_index.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="aud2",LANGUAGE="en",NAME="English",AUTOSELECT=YES,DEFAULT=YES,CHANNELS="6",URI="a2/prog_index.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="aud3",LANGUAGE="en",NAME="English",AUTOSELECT=YES,DEFAULT=YES,CHANNELS="6",URI="a3/prog_index.m3u8"
+
+
+#EXT-X-MEDIA:TYPE=CLOSED-CAPTIONS,GROUP-ID="cc1",LANGUAGE="en",NAME="English",AUTOSELECT=YES,DEFAULT=YES,INSTREAM-ID="CC1"
+
+
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="sub1",LANGUAGE="en",NAME="English",AUTOSELECT=YES,DEFAULT=YES,FORCED=NO,URI="s1/en/prog_index.m3u8"
diff --git a/test/testdata/mpd/float_duration.mpd b/test/testdata/mpd/float_duration.mpd
new file mode 100644
index 0000000..8dc1d2d
--- /dev/null
+++ b/test/testdata/mpd/float_duration.mpd
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<MPD xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="urn:mpeg:dash:schema:mpd:2011" type="static" minBufferTime="PT2S" profiles="urn:mpeg:dash:profile:isoff-on-demand:2011" mediaPresentationDuration="PT6014S">
+ <Period bitstreamSwitching="true">
+ <AdaptationSet mimeType="audio/mp4" codecs="mp4a.40.2" startWithSAP="1" segmentAlignment="true">
+ <SegmentTemplate timescale="1000000" presentationTimeOffset="0" initialization="ai_$RepresentationID$.mp4d" media="a_$RepresentationID$_$Number$.mp4d" duration="2000000.0" startNumber="0"></SegmentTemplate>
+ <Representation id="318597" bandwidth="61587"></Representation>
+ </AdaptationSet>
+ <AdaptationSet mimeType="video/mp4" startWithSAP="1" segmentAlignment="true">
+ <SegmentTemplate timescale="1000000" presentationTimeOffset="0" initialization="vi_$RepresentationID$.mp4d" media="v_$RepresentationID$_$Number$.mp4d" duration="2000000.0" startNumber="0"></SegmentTemplate>
+ <Representation id="318597" codecs="avc1.42001f" width="340" height="192" bandwidth="318597"></Representation>
+ <Representation id="638590" codecs="avc1.42001f" width="512" height="288" bandwidth="638590"></Representation>
+ <Representation id="1022565" codecs="avc1.4d001f" width="688" height="384" bandwidth="1022565"></Representation>
+ <Representation id="2046506" codecs="avc1.4d001f" width="1024" height="576" bandwidth="2046506"></Representation>
+ <Representation id="3998017" codecs="avc1.640029" width="1280" height="720" bandwidth="3998017"></Representation>
+ <Representation id="5997485" codecs="avc1.640032" width="1920" height="1080" bandwidth="5997485"></Representation>
+ </AdaptationSet>
+ </Period>
+</MPD> \ No newline at end of file
diff --git a/test/testdata/mpd/subtitles.mpd b/test/testdata/mpd/subtitles.mpd
new file mode 100644
index 0000000..6f948ad
--- /dev/null
+++ b/test/testdata/mpd/subtitles.mpd
@@ -0,0 +1,351 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Created with Unified Streaming Platform (version=1.10.18-20255) -->
+<MPD
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns="urn:mpeg:dash:schema:mpd:2011"
+ xsi:schemaLocation="urn:mpeg:dash:schema:mpd:2011 http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-DASH_schema_files/DASH-MPD.xsd"
+ type="static"
+ mediaPresentationDuration="PT14M48S"
+ maxSegmentDuration="PT1M"
+ minBufferTime="PT10S"
+ profiles="urn:mpeg:dash:profile:isoff-live:2011">
+ <Period
+ id="1"
+ duration="PT14M48S">
+ <BaseURL>dash/</BaseURL>
+ <AdaptationSet
+ id="1"
+ group="1"
+ contentType="audio"
+ segmentAlignment="true"
+ audioSamplingRate="48000"
+ mimeType="audio/mp4"
+ codecs="mp4a.40.2"
+ startWithSAP="1">
+ <AudioChannelConfiguration
+ schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011"
+ value="2" />
+ <Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
+ <SegmentTemplate
+ timescale="48000"
+ initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
+ media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
+ <SegmentTimeline>
+ <S t="0" d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="96256" r="2" />
+ <S d="95232" />
+ <S d="3584" />
+ </SegmentTimeline>
+ </SegmentTemplate>
+ <Representation
+ id="audio=128001"
+ bandwidth="128001">
+ </Representation>
+ </AdaptationSet>
+ <AdaptationSet
+ id="2"
+ group="3"
+ contentType="text"
+ lang="en"
+ mimeType="application/mp4"
+ codecs="stpp"
+ startWithSAP="1">
+ <Role schemeIdUri="urn:mpeg:dash:role:2011" value="subtitle" />
+ <SegmentTemplate
+ timescale="1000"
+ initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
+ media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
+ <SegmentTimeline>
+ <S t="0" d="60000" r="9" />
+ <S d="24000" />
+ </SegmentTimeline>
+ </SegmentTemplate>
+ <Representation
+ id="textstream_eng=1000"
+ bandwidth="1000">
+ </Representation>
+ </AdaptationSet>
+ <AdaptationSet
+ id="3"
+ group="2"
+ contentType="video"
+ par="960:409"
+ minBandwidth="100000"
+ maxBandwidth="4482000"
+ maxWidth="1689"
+ maxHeight="720"
+ segmentAlignment="true"
+ mimeType="video/mp4"
+ codecs="avc1.4D401F"
+ startWithSAP="1">
+ <Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
+ <SegmentTemplate
+ timescale="12288"
+ initialization="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$.dash"
+ media="3144-kZT4LWMQw6Rh7Kpd-$RepresentationID$-$Time$.dash">
+ <SegmentTimeline>
+ <S t="0" d="24576" r="443" />
+ </SegmentTimeline>
+ </SegmentTemplate>
+ <Representation
+ id="video=100000"
+ bandwidth="100000"
+ width="336"
+ height="144"
+ sar="2880:2863"
+ scanType="progressive">
+ </Representation>
+ <Representation
+ id="video=326000"
+ bandwidth="326000"
+ width="562"
+ height="240"
+ sar="115200:114929"
+ scanType="progressive">
+ </Representation>
+ <Representation
+ id="video=698000"
+ bandwidth="698000"
+ width="844"
+ height="360"
+ sar="86400:86299"
+ scanType="progressive">
+ </Representation>
+ <Representation
+ id="video=1493000"
+ bandwidth="1493000"
+ width="1126"
+ height="480"
+ sar="230400:230267"
+ scanType="progressive">
+ </Representation>
+ <Representation
+ id="video=4482000"
+ bandwidth="4482000"
+ width="1688"
+ height="720"
+ sar="86400:86299"
+ scanType="progressive">
+ </Representation>
+ </AdaptationSet>
+ </Period>
+</MPD>
diff --git a/test/testdata/mpd/unfragmented.mpd b/test/testdata/mpd/unfragmented.mpd
new file mode 100644
index 0000000..5a3720b
--- /dev/null
+++ b/test/testdata/mpd/unfragmented.mpd
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<MPD mediaPresentationDuration="PT54.915S" minBufferTime="PT1.500S" profiles="urn:mpeg:dash:profile:isoff-on-demand:2011" type="static" xmlns="urn:mpeg:dash:schema:mpd:2011">
+ <Period duration="PT54.915S">
+ <AdaptationSet segmentAlignment="true" subsegmentAlignment="true" subsegmentStartsWithSAP="1">
+ <Representation bandwidth="804261" codecs="avc1.4d401e" frameRate="30" height="360" id="VIDEO-1" mimeType="video/mp4" startWithSAP="1" width="360">
+ <BaseURL>DASH_360</BaseURL>
+ <SegmentBase indexRange="915-1114" indexRangeExact="true">
+ <Initialization range="0-914"/>
+ </SegmentBase>
+ </Representation>
+ <Representation bandwidth="608000" codecs="avc1.4d401e" frameRate="30" height="240" id="VIDEO-2" mimeType="video/mp4" startWithSAP="1" width="240">
+ <BaseURL>DASH_240</BaseURL>
+ <SegmentBase indexRange="913-1112" indexRangeExact="true">
+ <Initialization range="0-912"/>
+ </SegmentBase>
+ </Representation>
+ </AdaptationSet>
+ <AdaptationSet>
+ <Representation audioSamplingRate="48000" bandwidth="129870" codecs="mp4a.40.2" id="AUDIO-1" mimeType="audio/mp4" startWithSAP="1">
+ <AudioChannelConfiguration schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011" value="2"/>
+ <BaseURL>audio</BaseURL>
+ <SegmentBase indexRange="832-1007" indexRangeExact="true">
+ <Initialization range="0-831"/>
+ </SegmentBase>
+ </Representation>
+ </AdaptationSet>
+ </Period>
+</MPD>
diff --git a/test/testdata/mpd/urls_only.mpd b/test/testdata/mpd/urls_only.mpd
new file mode 100644
index 0000000..2b9d595
--- /dev/null
+++ b/test/testdata/mpd/urls_only.mpd
@@ -0,0 +1,218 @@
+<?xml version="1.0" ?>
+<MPD maxSegmentDuration="PT0H0M10.000S" mediaPresentationDuration="PT0H4M1.728S" minBufferTime="PT1.500S" profiles="urn:mpeg:dash:profile:isoff-main:2011" type="static" xmlns="urn:mpeg:dash:schema:mpd:2011">
+ <Period duration="PT0H4M1.728S">
+ <AdaptationSet bitstreamSwitching="true" lang="und" maxHeight="1080" maxWidth="1920" par="16:9" segmentAlignment="true">
+ <ContentComponent contentType="video" id="1"/>
+ <Representation audioSamplingRate="44100" bandwidth="200000" codecs="avc3.42c01e,mp4a.40.2" frameRate="25" height="144" id="h264_aac_144p_m4s" mimeType="video/mp4" sar="1:1" startWithSAP="1" width="256">
+ <SegmentList duration="10000" timescale="1000">
+ <Initialization sourceURL="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/init/432f65a0.mp4"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/0/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/1/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/2/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/3/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/4/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/5/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/6/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/7/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/8/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/9/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/10/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/11/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/12/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/13/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/14/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/15/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/16/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/17/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/18/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/19/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/20/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/21/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/22/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/23/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_144p_m4s/24/432f65a0.m4s"/>
+ </SegmentList>
+ </Representation>
+ <Representation audioSamplingRate="44100" bandwidth="400000" codecs="avc3.42c01e,mp4a.40.2" frameRate="25" height="240" id="h264_aac_240p_m4s" mimeType="video/mp4" sar="160:159" startWithSAP="1" width="424">
+ <SegmentList duration="10000" timescale="1000">
+ <Initialization sourceURL="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/init/432f65a0.mp4"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/0/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/1/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/2/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/3/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/4/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/5/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/6/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/7/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/8/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/9/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/10/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/11/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/12/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/13/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/14/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/15/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/16/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/17/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/18/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/19/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/20/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/21/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/22/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/23/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_240p_m4s/24/432f65a0.m4s"/>
+ </SegmentList>
+ </Representation>
+ <Representation audioSamplingRate="44100" bandwidth="800000" codecs="avc3.42c01e,mp4a.40.2" frameRate="25" height="360" id="h264_aac_360p_m4s" mimeType="video/mp4" sar="1:1" startWithSAP="1" width="640">
+ <SegmentList duration="10000" timescale="1000">
+ <Initialization sourceURL="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/init/432f65a0.mp4"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/0/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/1/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/2/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/3/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/4/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/5/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/6/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/7/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/8/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/9/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/10/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/11/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/12/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/13/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/14/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/15/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/16/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/17/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/18/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/19/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/20/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/21/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/22/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/23/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_360p_m4s/24/432f65a0.m4s"/>
+ </SegmentList>
+ </Representation>
+ <Representation audioSamplingRate="44100" bandwidth="1200000" codecs="avc3.42c01e,mp4a.40.2" frameRate="25" height="480" id="h264_aac_480p_m4s" mimeType="video/mp4" sar="320:321" startWithSAP="1" width="856">
+ <SegmentList duration="10000" timescale="1000">
+ <Initialization sourceURL="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/init/432f65a0.mp4"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/0/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/1/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/2/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/3/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/4/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/5/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/6/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/7/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/8/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/9/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/10/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/11/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/12/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/13/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/14/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/15/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/16/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/17/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/18/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/19/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/20/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/21/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/22/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/23/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_480p_m4s/24/432f65a0.m4s"/>
+ </SegmentList>
+ </Representation>
+ <Representation audioSamplingRate="44100" bandwidth="1600000" codecs="avc3.42c01e,mp4a.40.2" frameRate="25" height="576" id="h264_aac_576p_m4s" mimeType="video/mp4" sar="1:1" startWithSAP="1" width="1024">
+ <SegmentList duration="10000" timescale="1000">
+ <Initialization sourceURL="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/init/432f65a0.mp4"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/0/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/1/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/2/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/3/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/4/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/5/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/6/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/7/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/8/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/9/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/10/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/11/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/12/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/13/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/14/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/15/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/16/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/17/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/18/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/19/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/20/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/21/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/22/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/23/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_576p_m4s/24/432f65a0.m4s"/>
+ </SegmentList>
+ </Representation>
+ <Representation audioSamplingRate="44100" bandwidth="2400000" codecs="avc3.42c01e,mp4a.40.2" frameRate="25" height="720" id="h264_aac_720p_m4s" mimeType="video/mp4" sar="1:1" startWithSAP="1" width="1280">
+ <SegmentList duration="10000" timescale="1000">
+ <Initialization sourceURL="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/init/432f65a0.mp4"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/0/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/1/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/2/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/3/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/4/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/5/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/6/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/7/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/8/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/9/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/10/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/11/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/12/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/13/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/14/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/15/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/16/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/17/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/18/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/19/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/20/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/21/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/22/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/23/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_720p_m4s/24/432f65a0.m4s"/>
+ </SegmentList>
+ </Representation>
+ <Representation audioSamplingRate="44100" bandwidth="4400000" codecs="avc3.42c01e,mp4a.40.2" frameRate="25" height="1080" id="h264_aac_1080p_m4s" mimeType="video/mp4" sar="1:1" startWithSAP="1" width="1920">
+ <SegmentList duration="10000" timescale="1000">
+ <Initialization sourceURL="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/init/432f65a0.mp4"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/0/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/1/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/2/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/3/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/4/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/5/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/6/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/7/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/8/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/9/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/10/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/11/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/12/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/13/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/14/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/15/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/16/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/17/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/18/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/19/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/20/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/21/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/22/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/23/432f65a0.m4s"/>
+ <SegmentURL media="../vd_5999c902ea707c67d8e267a9_1503250723/h264_aac_1080p_m4s/24/432f65a0.m4s"/>
+ </SegmentList>
+ </Representation>
+ </AdaptationSet>
+ </Period>
+</MPD>
diff --git a/test/testdata/thumbnails/foo %d bar/foo_%d.webp b/test/testdata/thumbnails/foo %d bar/foo_%d.webp
new file mode 100644
index 0000000..d64d083
--- /dev/null
+++ b/test/testdata/thumbnails/foo %d bar/foo_%d.webp
Binary files differ
diff --git a/test/testdata/xspf/foo_xspf.xspf b/test/testdata/xspf/foo_xspf.xspf
new file mode 100644
index 0000000..b7f0086
--- /dev/null
+++ b/test/testdata/xspf/foo_xspf.xspf
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<playlist version="1" xmlns="http://xspf.org/ns/0/">
+ <date>2018-03-09T18:01:43Z</date>
+ <trackList>
+ <track>
+ <location>cd1/track%201.mp3</location>
+ <title>Pandemonium</title>
+ <creator>Foilverb</creator>
+ <annotation>Visit http://bigbrother404.bandcamp.com</annotation>
+ <album>Pandemonium EP</album>
+ <trackNum>1</trackNum>
+ <duration>202416</duration>
+ </track>
+ <track>
+ <location>../%E3%83%88%E3%83%A9%E3%83%83%E3%82%AF%E3%80%80%EF%BC%92.mp3</location>
+ <title>Final Cartridge (Nichico Twelve Remix)</title>
+ <annotation>Visit http://bigbrother404.bandcamp.com</annotation>
+ <creator>Foilverb</creator>
+ <album>Pandemonium EP</album>
+ <trackNum>2</trackNum>
+ <duration>255857</duration>
+ </track>
+ <track>
+ <location>track3.mp3</location>
+ <location>https://example.com/track3.mp3</location>
+ <title>Rebuilding Nightingale</title>
+ <annotation>Visit http://bigbrother404.bandcamp.com</annotation>
+ <creator>Foilverb</creator>
+ <album>Pandemonium EP</album>
+ <trackNum>3</trackNum>
+ <duration>287915</duration>
+ </track>
+ </trackList>
+</playlist>
diff --git a/test/testdata/yt_dlp_plugins/extractor/_ignore.py b/test/testdata/yt_dlp_plugins/extractor/_ignore.py
new file mode 100644
index 0000000..57faf75
--- /dev/null
+++ b/test/testdata/yt_dlp_plugins/extractor/_ignore.py
@@ -0,0 +1,5 @@
+from yt_dlp.extractor.common import InfoExtractor
+
+
+class IgnorePluginIE(InfoExtractor):
+ pass
diff --git a/test/testdata/yt_dlp_plugins/extractor/ignore.py b/test/testdata/yt_dlp_plugins/extractor/ignore.py
new file mode 100644
index 0000000..816a16a
--- /dev/null
+++ b/test/testdata/yt_dlp_plugins/extractor/ignore.py
@@ -0,0 +1,12 @@
+from yt_dlp.extractor.common import InfoExtractor
+
+
+class IgnoreNotInAllPluginIE(InfoExtractor):
+ pass
+
+
+class InAllPluginIE(InfoExtractor):
+ pass
+
+
+__all__ = ['InAllPluginIE']
diff --git a/test/testdata/yt_dlp_plugins/extractor/normal.py b/test/testdata/yt_dlp_plugins/extractor/normal.py
new file mode 100644
index 0000000..b09009b
--- /dev/null
+++ b/test/testdata/yt_dlp_plugins/extractor/normal.py
@@ -0,0 +1,9 @@
+from yt_dlp.extractor.common import InfoExtractor
+
+
+class NormalPluginIE(InfoExtractor):
+ pass
+
+
+class _IgnoreUnderscorePluginIE(InfoExtractor):
+ pass
diff --git a/test/testdata/yt_dlp_plugins/postprocessor/normal.py b/test/testdata/yt_dlp_plugins/postprocessor/normal.py
new file mode 100644
index 0000000..315b85a
--- /dev/null
+++ b/test/testdata/yt_dlp_plugins/postprocessor/normal.py
@@ -0,0 +1,5 @@
+from yt_dlp.postprocessor.common import PostProcessor
+
+
+class NormalPluginPP(PostProcessor):
+ pass
diff --git a/test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py b/test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py
new file mode 100644
index 0000000..01542e0
--- /dev/null
+++ b/test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py
@@ -0,0 +1,5 @@
+from yt_dlp.extractor.common import InfoExtractor
+
+
+class ZippedPluginIE(InfoExtractor):
+ pass
diff --git a/test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py b/test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py
new file mode 100644
index 0000000..223822b
--- /dev/null
+++ b/test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py
@@ -0,0 +1,5 @@
+from yt_dlp.postprocessor.common import PostProcessor
+
+
+class ZippedPluginPP(PostProcessor):
+ pass