summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/webcodecs
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
commit0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d (patch)
treea31f07c9bcca9d56ce61e9a1ffd30ef350d513aa /testing/web-platform/tests/webcodecs
parentInitial commit. (diff)
downloadfirefox-esr-0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d.tar.xz
firefox-esr-0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d.zip
Adding upstream version 115.8.0esr.upstream/115.8.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/web-platform/tests/webcodecs')
-rw-r--r--testing/web-platform/tests/webcodecs/META.yml1
-rw-r--r--testing/web-platform/tests/webcodecs/README.md150
-rw-r--r--testing/web-platform/tests/webcodecs/audio-data-serialization.any.js93
-rw-r--r--testing/web-platform/tests/webcodecs/audio-data.any.js357
-rw-r--r--testing/web-platform/tests/webcodecs/audio-data.crossOriginIsolated.https.any.js44
-rw-r--r--testing/web-platform/tests/webcodecs/audio-data.crossOriginIsolated.https.any.js.headers2
-rw-r--r--testing/web-platform/tests/webcodecs/audio-decoder.crossOriginIsolated.https.any.js71
-rw-r--r--testing/web-platform/tests/webcodecs/audio-decoder.crossOriginIsolated.https.any.js.headers2
-rw-r--r--testing/web-platform/tests/webcodecs/audio-decoder.https.any.js73
-rw-r--r--testing/web-platform/tests/webcodecs/audio-encoder-codec-specific.https.any.js98
-rw-r--r--testing/web-platform/tests/webcodecs/audio-encoder-config.https.any.js237
-rw-r--r--testing/web-platform/tests/webcodecs/audio-encoder.https.any.js572
-rw-r--r--testing/web-platform/tests/webcodecs/audioDecoder-codec-specific.https.any.js371
-rw-r--r--testing/web-platform/tests/webcodecs/av1.mp4bin0 -> 4019 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/chunk-serialization.any.js80
-rw-r--r--testing/web-platform/tests/webcodecs/encoded-audio-chunk.any.js45
-rw-r--r--testing/web-platform/tests/webcodecs/encoded-audio-chunk.crossOriginIsolated.https.any.js29
-rw-r--r--testing/web-platform/tests/webcodecs/encoded-audio-chunk.crossOriginIsolated.https.any.js.headers2
-rw-r--r--testing/web-platform/tests/webcodecs/encoded-video-chunk.any.js45
-rw-r--r--testing/web-platform/tests/webcodecs/encoded-video-chunk.crossOriginIsolated.https.any.js29
-rw-r--r--testing/web-platform/tests/webcodecs/encoded-video-chunk.crossOriginIsolated.https.any.js.headers2
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors-flip.avifbin0 -> 2528 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors-flip.gifbin0 -> 2701 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors-full-range-bt2020-pq-444-10bpc.avifbin0 -> 383 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.avifbin0 -> 375 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.jpgbin0 -> 1006 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.webpbin0 -> 456 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors-limited-range-422-8bpc.avifbin0 -> 380 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors-limited-range-444-8bpc.avifbin0 -> 372 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors.avifbin0 -> 375 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors.gifbin0 -> 1376 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors.jpgbin0 -> 1242 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors.mp4bin0 -> 709 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors.pngbin0 -> 1442 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/four-colors.webpbin0 -> 78 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/full-cycle-test.https.any.js136
-rw-r--r--testing/web-platform/tests/webcodecs/h264.annexbbin0 -> 8940 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/h264.mp4bin0 -> 9821 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/idlharness.https.any.js61
-rw-r--r--testing/web-platform/tests/webcodecs/image-decoder-disconnect-readable-stream-crash.https.html12
-rw-r--r--testing/web-platform/tests/webcodecs/image-decoder-image-orientation-none.https.html88
-rw-r--r--testing/web-platform/tests/webcodecs/image-decoder-utils.js206
-rw-r--r--testing/web-platform/tests/webcodecs/image-decoder.crossOriginIsolated.https.any.js27
-rw-r--r--testing/web-platform/tests/webcodecs/image-decoder.crossOriginIsolated.https.any.js.headers2
-rw-r--r--testing/web-platform/tests/webcodecs/image-decoder.https.any.js502
-rw-r--r--testing/web-platform/tests/webcodecs/pattern.pngbin0 -> 39650 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/per-frame-qp-encoding.https.any.js134
-rw-r--r--testing/web-platform/tests/webcodecs/reconfiguring-encoder.https.any.js121
-rw-r--r--testing/web-platform/tests/webcodecs/sfx-aac.mp4bin0 -> 2867 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/sfx-alaw.wavbin0 -> 10332 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/sfx-mulaw.wavbin0 -> 10332 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/sfx-opus.oggbin0 -> 3244 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/sfx.adtsbin0 -> 2078 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/sfx.mp3bin0 -> 3213 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/temporal-svc-encoding.https.any.js105
-rw-r--r--testing/web-platform/tests/webcodecs/utils.js235
-rw-r--r--testing/web-platform/tests/webcodecs/video-decoder.crossOriginIsolated.https.any.js68
-rw-r--r--testing/web-platform/tests/webcodecs/video-decoder.crossOriginIsolated.https.any.js.headers2
-rw-r--r--testing/web-platform/tests/webcodecs/video-decoder.https.any.js64
-rw-r--r--testing/web-platform/tests/webcodecs/video-encoder-config.https.any.js159
-rw-r--r--testing/web-platform/tests/webcodecs/video-encoder-utils.js103
-rw-r--r--testing/web-platform/tests/webcodecs/video-encoder.https.any.js320
-rw-r--r--testing/web-platform/tests/webcodecs/video-frame-serialization.any.js139
-rw-r--r--testing/web-platform/tests/webcodecs/videoColorSpace.any.js47
-rw-r--r--testing/web-platform/tests/webcodecs/videoDecoder-codec-specific.https.any.js555
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-alpha.any.js50
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-canvasImageSource.html142
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-construction.any.js757
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginIsolated.https.any.js11
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginIsolated.https.any.js.headers3
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginSource.sub.html198
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-construction.window.js21
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-copyTo.any.js322
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-copyTo.crossOriginIsolated.https.any.js18
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-copyTo.crossOriginIsolated.https.any.js.headers2
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-createImageBitmap.any.js28
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-createImageBitmap.https.any.js84
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-drawImage.any.js104
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.helper.html23
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.https.html234
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.serviceworker.js30
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-texImage.any.js141
-rw-r--r--testing/web-platform/tests/webcodecs/videoFrame-utils.js118
-rw-r--r--testing/web-platform/tests/webcodecs/vp8.webmbin0 -> 12230 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/vp9.mp4bin0 -> 6159 bytes
-rw-r--r--testing/web-platform/tests/webcodecs/webgl-test-utils.js321
86 files changed, 7996 insertions, 0 deletions
diff --git a/testing/web-platform/tests/webcodecs/META.yml b/testing/web-platform/tests/webcodecs/META.yml
new file mode 100644
index 0000000000..b5b838a4cf
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/META.yml
@@ -0,0 +1 @@
+spec: https://w3c.github.io/webcodecs/
diff --git a/testing/web-platform/tests/webcodecs/README.md b/testing/web-platform/tests/webcodecs/README.md
new file mode 100644
index 0000000000..accbcd438b
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/README.md
@@ -0,0 +1,150 @@
+# WebCodecs Test Files
+
+[TOC]
+
+## Instructions
+
+To add, update or remove a test file, please update the list below.
+
+Please provide full reference and steps to generate the test file so that
+any people can regenerate or update the file in the future.
+
+## Notes
+* When updating the sample offsets and descriptions for tests using mp4 files, it's easiest to use [mp4box.js](https://gpac.github.io/mp4box.js/test/filereader.html).
+ * Sample offsets can be copied from the "Sample View" tab after unchecking all but offset and size. Use a multi-line edit mode and clang-format to quickly format entries.
+ * Description entries can be found under moov.trak.mdia.minf.stbl.stsd in box view.
+ * avc1.avcC has an offset, size in the same view. Add 8 to offset and subtract 8 from the size to get the values the tests want.
+ * If you use ffprobe -show_packets to get sample offsets, you may need to add 4 to each `pos` value. You can tell if you need to by whether or not tests pass.
+
+## List of Test Files
+
+### four-colors.png
+Generated using MSPaint like a true professional.
+
+### four-colors.avif
+Lossless encoding must be used to ensure colors are perfect.
+```
+avifenc -l four-colors.png -o four-colors.avif
+```
+
+### four-colors.webp
+Lossless encoding must be used to ensure colors are perfect.
+```
+ffmpeg -i four-colors.png -lossless 1 -y four-colors.webp
+```
+
+### four-colors-limited-range-420-8bpc.webp
+```
+ffmpeg -i four-colors.png -pix_fmt yuv420p four-colors-limited-range-420-8bpc.webp
+```
+
+### four-colors.gif
+High quality encoding must be used to ensure colors are perfect.
+```
+cp four-colors.png four-colors2.png
+gifski -o four-colors.gif four-colors*.png
+```
+
+### four-colors-flip.gif
+High quality encoding must be used to ensure colors are perfect.
+```
+ffmpeg -i four-colors.png -vf "rotate=PI" four-colors2.png
+gifski -o four-colors-flip.gif four-colors*.png
+```
+
+### four-colors-flip.avif
+```
+ffmpeg -i four-colors-flip.gif -vcodec libaom-av1 -crf 16 four-colors-flip.mp4
+mp4box -add-image ref:primary:tk=1:samp=1 -ab avis -ab avif -ab miaf -brand avis four-colors-flip.mp4 -out four-colors-flip.avif
+mp4box -edits 1=r four-colors-flip.avif
+```
+
+### four-colors-limited-range-(420|422|444)-8bpc.avif
+```
+avifenc -r l -d 8 -y 420 -s 0 four-colors.png four-colors-limited-range-420-8bpc.avif
+avifenc -r l -d 8 -y 422 -s 0 four-colors.png four-colors-limited-range-422-8bpc.avif
+avifenc -r l -d 8 -y 444 -s 0 four-colors.png four-colors-limited-range-444-8bpc.avif
+```
+
+### four-colors-full-range-bt2020-pq-444-10bpc.avif
+```
+avifenc -r f -d 10 -y 444 -s 0 --nclx 9/16/9 four-colors.png four-colors-full-range-bt2020-pq-444-10bpc.avif
+```
+
+### four-colors.jpg
+Used [Sqoosh.app](https://squoosh.app/) with MozJPEG compression and RGB
+channels. exiftool was then used to add an orientation marker.
+```
+exiftool -Orientation=1 -n four-colors.jpg
+```
+
+### four-colors-limited-range-420-8bpc.jpg
+Used [Sqoosh.app](https://squoosh.app/) with MozJPEG compression and YUV
+channels. exiftool was then used to add an orientation marker.
+```
+exiftool -Orientation=1 -n four-colors-limited-range-420-8bpc.jpg
+```
+
+### four-colors.mp4
+Used a [custom tool](https://storage.googleapis.com/dalecurtis/avif2mp4.html) to convert four-colors.avif into a .mp4 file.
+
+### h264.mp4
+```
+ffmpeg -f lavfi -i testsrc=rate=10:n=1 -t 1 -pix_fmt yuv420p -vcodec h264 -tune zerolatency h264.mp4
+```
+
+### h264.annexb
+```
+ffmpeg -i h264.mp4 -codec copy -bsf:v h264_mp4toannexb -f h264 h264.annexb
+```
+
+### sfx.adts
+```
+sox -n -r 48000 sfx.wav synth 1 sine 480
+ffmpeg -i sfx.wav -frames:a 10 -acodec aac -b:a 96K sfx.adts
+```
+
+### sfx-alaw.wav
+```
+sox -n -r 48000 sfx.wav synth 1 sine 480
+ffmpeg -i sfx.wav -frames:a 10 -acodec pcm_alaw sfx-alaw.wav
+```
+
+### sfx.mp3
+```
+sox -n -r 48000 sfx.wav synth 1 sine 480
+ffmpeg -i sfx.wav -frames:a 10 -acodec libmp3lame -b:a 96K sfx.mp3
+```
+
+### sfx-aac.mp4
+```
+sox -n -r 48000 sfx.wav synth 1 sine 480
+ffmpeg -i sfx.wav -frames:a 10 -acodec aac -b:a 96K sfx-aac.mp4
+```
+
+### sfx-mulaw.wav
+```
+sox -n -r 48000 sfx.wav synth 1 sine 480
+ffmpeg -i sfx.wav -frames:a 10 -acodec pcm_mulaw sfx-mulaw.wav
+```
+
+### sfx-opus.ogg
+```
+sox -n -r 48000 sfx.wav synth 1 sine 480
+ffmpeg -i sfx.wav -frames:a 10 -acodec libopus -b:a 96K sfx-opus.ogg
+```
+
+### av1.mp4
+```
+ffmpeg -f lavfi -i testsrc=rate=10:n=1 -t 1 -pix_fmt yuv420p -vcodec libaom-av1 av1.mp4
+```
+
+### vp8.webm
+```
+ffmpeg -f lavfi -i testsrc=rate=10:n=1 -t 1 -pix_fmt yuv420p -vcodec vp8 vp8.webm
+```
+
+### vp9.mp4
+```
+ffmpeg -f lavfi -i testsrc=rate=10:n=1 -t 1 -pix_fmt yuv420p -vcodec vp9 vp9.mp4
+```
diff --git a/testing/web-platform/tests/webcodecs/audio-data-serialization.any.js b/testing/web-platform/tests/webcodecs/audio-data-serialization.any.js
new file mode 100644
index 0000000000..280934cd05
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-data-serialization.any.js
@@ -0,0 +1,93 @@
+// META: global=window
+// META: script=/common/media.js
+// META: script=/webcodecs/utils.js
+
+var defaultInit = {
+ timestamp: 1234,
+ channels: 2,
+ sampleRate: 8000,
+ frames: 100,
+}
+
+function createDefaultAudioData() {
+ return make_audio_data(defaultInit.timestamp,
+ defaultInit.channels,
+ defaultInit.sampleRate,
+ defaultInit.frames);
+}
+
+async_test(t => {
+ let originalData = createDefaultAudioData();
+
+ let channel = new MessageChannel();
+ let localPort = channel.port1;
+ let externalPort = channel.port2;
+
+ externalPort.onmessage = t.step_func((e) => {
+ let newData = e.data;
+
+ // We should have a valid deserialized buffer.
+ assert_equals(newData.numberOfFrames, defaultInit.frames, 'numberOfFrames');
+ assert_equals(
+ newData.numberOfChannels, defaultInit.channels, 'numberOfChannels');
+ assert_equals(newData.sampleRate, defaultInit.sampleRate, 'sampleRate');
+
+ const originalData_copyDest = new Float32Array(defaultInit.frames);
+ const newData_copyDest = new Float32Array(defaultInit.frames);
+
+ for (var channel = 0; channel < defaultInit.channels; channel++) {
+ originalData.copyTo(originalData_copyDest, { planeIndex: channel});
+ newData.copyTo(newData_copyDest, { planeIndex: channel});
+
+ for (var i = 0; i < newData_copyDest.length; i+=10) {
+ assert_equals(newData_copyDest[i], originalData_copyDest[i],
+ "data (ch=" + channel + ", i=" + i + ")");
+ }
+ }
+
+ newData.close();
+ externalPort.postMessage("Done");
+ })
+
+ localPort.onmessage = t.step_func_done((e) => {
+ assert_equals(originalData.numberOfFrames, defaultInit.frames);
+ originalData.close();
+ })
+
+ localPort.postMessage(originalData);
+
+}, 'Verify closing AudioData does not propagate accross contexts.');
+
+async_test(t => {
+ let data = createDefaultAudioData();
+
+ let channel = new MessageChannel();
+ let localPort = channel.port1;
+
+ localPort.onmessage = t.unreached_func();
+
+ data.close();
+
+ assert_throws_dom("DataCloneError", () => {
+ localPort.postMessage(data);
+ });
+
+ t.done();
+}, 'Verify posting closed AudioData throws.');
+
+async_test(t => {
+ let localData = createDefaultAudioData();
+
+ let channel = new MessageChannel();
+ let localPort = channel.port1;
+ let externalPort = channel.port2;
+
+ externalPort.onmessage = t.step_func_done((e) => {
+ let externalData = e.data;
+ assert_equals(externalData.numberOfFrames, defaultInit.frames);
+ externalData.close();
+ })
+
+ localPort.postMessage(localData, [localData]);
+ assert_not_equals(localData.numberOfFrames, defaultInit.frames);
+}, 'Verify transferring audio data closes them.'); \ No newline at end of file
diff --git a/testing/web-platform/tests/webcodecs/audio-data.any.js b/testing/web-platform/tests/webcodecs/audio-data.any.js
new file mode 100644
index 0000000000..4c2d96ab80
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-data.any.js
@@ -0,0 +1,357 @@
+// META: global=window,dedicatedworker
+// META: script=/common/media.js
+// META: script=/webcodecs/utils.js
+
+var defaultInit =
+ {
+ timestamp: 1234,
+ channels: 2,
+ sampleRate: 8000,
+ frames: 100,
+ }
+
+function
+createDefaultAudioData() {
+ return make_audio_data(
+ defaultInit.timestamp, defaultInit.channels, defaultInit.sampleRate,
+ defaultInit.frames);
+}
+
+test(t => {
+ let local_data = new Float32Array(defaultInit.channels * defaultInit.frames);
+
+ let audio_data_init = {
+ timestamp: defaultInit.timestamp,
+ data: local_data,
+ numberOfFrames: defaultInit.frames,
+ numberOfChannels: defaultInit.channels,
+ sampleRate: defaultInit.sampleRate,
+ format: 'f32-planar',
+ }
+
+ let data = new AudioData(audio_data_init);
+
+ assert_equals(data.timestamp, defaultInit.timestamp, 'timestamp');
+ assert_equals(data.numberOfFrames, defaultInit.frames, 'frames');
+ assert_equals(data.numberOfChannels, defaultInit.channels, 'channels');
+ assert_equals(data.sampleRate, defaultInit.sampleRate, 'sampleRate');
+ assert_equals(
+ data.duration, defaultInit.frames / defaultInit.sampleRate * 1_000_000,
+ 'duration');
+ assert_equals(data.format, 'f32-planar', 'format');
+
+ // Create an Int16 array of the right length.
+ let small_data = new Int16Array(defaultInit.channels * defaultInit.frames);
+
+ let wrong_format_init = {...audio_data_init};
+ wrong_format_init.data = small_data;
+
+ // Creating `f32-planar` AudioData from Int16 from should throw.
+ assert_throws_js(TypeError, () => {
+ let data = new AudioData(wrong_format_init);
+ }, `AudioDataInit.data needs to be big enough`);
+
+ var members = [
+ 'timestamp',
+ 'data',
+ 'numberOfFrames',
+ 'numberOfChannels',
+ 'sampleRate',
+ 'format',
+ ];
+
+ for (const member of members) {
+ let incomplete_init = {...audio_data_init};
+ delete incomplete_init[member];
+
+ assert_throws_js(
+ TypeError, () => {let data = new AudioData(incomplete_init)},
+ 'AudioData requires \'' + member + '\'');
+ }
+
+ let invalid_init = {...audio_data_init};
+ invalid_init.numberOfFrames = 0
+
+ assert_throws_js(
+ TypeError, () => {let data = new AudioData(invalid_init)},
+ 'AudioData requires numberOfFrames > 0');
+
+ invalid_init = {...audio_data_init};
+ invalid_init.numberOfChannels = 0
+
+ assert_throws_js(
+ TypeError, () => {let data = new AudioData(invalid_init)},
+ 'AudioData requires numberOfChannels > 0');
+
+}, 'Verify AudioData constructors');
+
+test(t => {
+ let data = createDefaultAudioData();
+
+ let clone = data.clone();
+
+ // Verify the parameters match.
+ assert_equals(data.timestamp, clone.timestamp, 'timestamp');
+ assert_equals(data.numberOfFrames, clone.numberOfFrames, 'frames');
+ assert_equals(data.numberOfChannels, clone.numberOfChannels, 'channels');
+ assert_equals(data.sampleRate, clone.sampleRate, 'sampleRate');
+ assert_equals(data.format, clone.format, 'format');
+
+ const data_copyDest = new Float32Array(defaultInit.frames);
+ const clone_copyDest = new Float32Array(defaultInit.frames);
+
+ // Verify the data matches.
+ for (var channel = 0; channel < defaultInit.channels; channel++) {
+ data.copyTo(data_copyDest, {planeIndex: channel});
+ clone.copyTo(clone_copyDest, {planeIndex: channel});
+
+ assert_array_equals(
+ data_copyDest, clone_copyDest, 'Cloned data ch=' + channel);
+ }
+
+ // Verify closing the original data doesn't close the clone.
+ data.close();
+ assert_equals(data.numberOfFrames, 0, 'data.buffer (closed)');
+ assert_not_equals(clone.numberOfFrames, 0, 'clone.buffer (not closed)');
+
+ clone.close();
+ assert_equals(clone.numberOfFrames, 0, 'clone.buffer (closed)');
+
+ // Verify closing a closed AudioData does not throw.
+ data.close();
+}, 'Verify closing and cloning AudioData');
+
+test(t => {
+ let data = make_audio_data(
+ -10, defaultInit.channels, defaultInit.sampleRate, defaultInit.frames);
+ assert_equals(data.timestamp, -10, 'timestamp');
+ data.close();
+}, 'Test we can construct AudioData with a negative timestamp.');
+
+
+// Each test vector represents two channels of data in the following arbitrary
+// layout: <min, zero, max, min, max / 2, min / 2, zero, max, zero, zero>.
+const testVectorFrames = 5;
+const testVectorChannels = 2;
+const testVectorInterleavedResult =
+ [[-1.0, 1.0, 0.5, 0.0, 0.0], [0.0, -1.0, -0.5, 1.0, 0.0]];
+const testVectorPlanarResult =
+ [[-1.0, 0.0, 1.0, -1.0, 0.5], [-0.5, 0.0, 1.0, 0.0, 0.0]];
+
+test(t => {
+ const INT8_MIN = (-0x7f - 1);
+ const INT8_MAX = 0x7f;
+ const UINT8_MAX = 0xff;
+
+ const testVectorUint8 = [
+ 0, -INT8_MIN, UINT8_MAX, 0, INT8_MAX / 2 + 128, INT8_MIN / 2 + 128,
+ -INT8_MIN, UINT8_MAX, -INT8_MIN, -INT8_MIN
+ ];
+
+ let data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Uint8Array(testVectorUint8),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 'u8'
+ });
+
+ const epsilon = 1.0 / (UINT8_MAX - 1);
+
+ let dest = new Float32Array(data.numberOfFrames);
+ data.copyTo(dest, {planeIndex: 0, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorInterleavedResult[0], epsilon, 'interleaved channel 0');
+ data.copyTo(dest, {planeIndex: 1, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorInterleavedResult[1], epsilon, 'interleaved channel 1');
+
+ data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Uint8Array(testVectorUint8),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 'u8-planar'
+ });
+
+ data.copyTo(dest, {planeIndex: 0, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[0], epsilon, 'planar channel 0');
+ data.copyTo(dest, {planeIndex: 1, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[1], epsilon, 'planar channel 1');
+}, 'Test conversion of uint8 data to float32');
+
+test(t => {
+ const INT16_MIN = (-0x7fff - 1);
+ const INT16_MAX = 0x7fff;
+ const testVectorInt16 = [
+ INT16_MIN, 0, INT16_MAX, INT16_MIN, INT16_MAX / 2, INT16_MIN / 2, 0,
+ INT16_MAX, 0, 0
+ ];
+
+ let data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Int16Array(testVectorInt16),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 's16'
+ });
+
+ const epsilon = 1.0 / (INT16_MAX + 1);
+
+ let dest = new Float32Array(data.numberOfFrames);
+ data.copyTo(dest, {planeIndex: 0, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorInterleavedResult[0], epsilon, 'interleaved channel 0');
+ data.copyTo(dest, {planeIndex: 1, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorInterleavedResult[1], epsilon, 'interleaved channel 1');
+
+ data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Int16Array(testVectorInt16),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 's16-planar'
+ });
+
+ data.copyTo(dest, {planeIndex: 0, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[0], epsilon, 'planar channel 0');
+ data.copyTo(dest, {planeIndex: 1, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[1], epsilon, 'planar channel 1');
+}, 'Test conversion of int16 data to float32');
+
+test(t => {
+ const INT32_MIN = (-0x7fffffff - 1);
+ const INT32_MAX = 0x7fffffff;
+ const testVectorInt32 = [
+ INT32_MIN, 0, INT32_MAX, INT32_MIN, INT32_MAX / 2, INT32_MIN / 2, 0,
+ INT32_MAX, 0, 0
+ ];
+
+ let data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Int32Array(testVectorInt32),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 's32'
+ });
+
+ const epsilon = 1.0 / INT32_MAX;
+
+ let dest = new Float32Array(data.numberOfFrames);
+ data.copyTo(dest, {planeIndex: 0, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorInterleavedResult[0], epsilon, 'interleaved channel 0');
+ data.copyTo(dest, {planeIndex: 1, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorInterleavedResult[1], epsilon, 'interleaved channel 1');
+
+ data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Int32Array(testVectorInt32),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 's32-planar'
+ });
+
+ data.copyTo(dest, {planeIndex: 0, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[0], epsilon, 'planar channel 0');
+ data.copyTo(dest, {planeIndex: 1, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[1], epsilon, 'planar channel 1');
+}, 'Test conversion of int32 data to float32');
+
+test(t => {
+ const testVectorFloat32 =
+ [-1.0, 0.0, 1.0, -1.0, 0.5, -0.5, 0.0, 1.0, 0.0, 0.0];
+
+ let data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Float32Array(testVectorFloat32),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 'f32'
+ });
+
+ const epsilon = 0;
+
+ let dest = new Float32Array(data.numberOfFrames);
+ data.copyTo(dest, {planeIndex: 0, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorInterleavedResult[0], epsilon, 'interleaved channel 0');
+ data.copyTo(dest, {planeIndex: 1, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorInterleavedResult[1], epsilon, 'interleaved channel 1');
+
+ data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Float32Array(testVectorFloat32),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 'f32-planar'
+ });
+
+ data.copyTo(dest, {planeIndex: 0, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[0], epsilon, 'planar channel 0');
+ data.copyTo(dest, {planeIndex: 1, format: 'f32-planar'});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[1], epsilon, 'planar channel 1');
+}, 'Test conversion of float32 data to float32');
+
+test(t => {
+ const testVectorFloat32 =
+ [-1.0, 0.0, 1.0, -1.0, 0.5, -0.5, 0.0, 1.0, 0.0, 0.0];
+
+ let data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Float32Array(testVectorFloat32),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 'f32'
+ });
+
+ const epsilon = 0;
+
+ // Call copyTo() without specifying a format, for interleaved data.
+ let dest = new Float32Array(data.numberOfFrames * testVectorChannels);
+ data.copyTo(dest, {planeIndex: 0});
+ assert_array_approx_equals(
+ dest, testVectorFloat32, epsilon, 'interleaved data');
+
+ assert_throws_js(RangeError, () => {
+ data.copyTo(dest, {planeIndex: 1});
+ }, 'Interleaved AudioData cannot copy out planeIndex > 0');
+
+ data = new AudioData({
+ timestamp: defaultInit.timestamp,
+ data: new Float32Array(testVectorFloat32),
+ numberOfFrames: testVectorFrames,
+ numberOfChannels: testVectorChannels,
+ sampleRate: defaultInit.sampleRate,
+ format: 'f32-planar'
+ });
+
+ // Call copyTo() without specifying a format, for planar data.
+ dest = new Float32Array(data.numberOfFrames);
+ data.copyTo(dest, {planeIndex: 0});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[0], epsilon, 'planar channel 0');
+ data.copyTo(dest, {planeIndex: 1});
+ assert_array_approx_equals(
+ dest, testVectorPlanarResult[1], epsilon, 'planar channel 1');
+}, 'Test copying out planar and interleaved data');
diff --git a/testing/web-platform/tests/webcodecs/audio-data.crossOriginIsolated.https.any.js b/testing/web-platform/tests/webcodecs/audio-data.crossOriginIsolated.https.any.js
new file mode 100644
index 0000000000..a5cb478670
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-data.crossOriginIsolated.https.any.js
@@ -0,0 +1,44 @@
+// META: global=window
+// META: script=/common/media.js
+// META: script=/webcodecs/utils.js
+
+var defaultInit = {
+ timestamp: 1234,
+ channels: 2,
+ sampleRate: 8000,
+ frames: 1,
+};
+
+function testAudioData(useView) {
+ let localData =
+ new SharedArrayBuffer(defaultInit.channels * defaultInit.frames * 4);
+ let view = new Float32Array(localData);
+ view[0] = -1.0;
+ view[1] = 1.0;
+
+ let audio_data_init = {
+ timestamp: defaultInit.timestamp,
+ data: useView ? view : localData,
+ numberOfFrames: defaultInit.frames,
+ numberOfChannels: defaultInit.channels,
+ sampleRate: defaultInit.sampleRate,
+ format: 'f32-planar',
+ }
+
+ let data = new AudioData(audio_data_init);
+
+ let copyDest = new SharedArrayBuffer(data.allocationSize({planeIndex: 0}));
+ let destView = new Float32Array(copyDest);
+ data.copyTo(useView ? destView : copyDest, {planeIndex: 0});
+ assert_equals(destView[0], -1.0, 'copyDest[0]');
+ data.copyTo(useView ? destView : copyDest, {planeIndex: 1});
+ assert_equals(destView[0], 1.0, 'copyDest[1]');
+}
+
+test(t => {
+ testAudioData(/*useView=*/ false);
+}, 'Test construction and copyTo() using a SharedArrayBuffer');
+
+test(t => {
+ testAudioData(/*useView=*/ true);
+}, 'Test construction and copyTo() using a Uint8Array(SharedArrayBuffer)');
diff --git a/testing/web-platform/tests/webcodecs/audio-data.crossOriginIsolated.https.any.js.headers b/testing/web-platform/tests/webcodecs/audio-data.crossOriginIsolated.https.any.js.headers
new file mode 100644
index 0000000000..5f8621ef83
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-data.crossOriginIsolated.https.any.js.headers
@@ -0,0 +1,2 @@
+Cross-Origin-Embedder-Policy: require-corp
+Cross-Origin-Opener-Policy: same-origin
diff --git a/testing/web-platform/tests/webcodecs/audio-decoder.crossOriginIsolated.https.any.js b/testing/web-platform/tests/webcodecs/audio-decoder.crossOriginIsolated.https.any.js
new file mode 100644
index 0000000000..17009e0118
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-decoder.crossOriginIsolated.https.any.js
@@ -0,0 +1,71 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+const testData = {
+ src: 'sfx-aac.mp4',
+ config: {
+ codec: 'mp4a.40.2',
+ sampleRate: 48000,
+ numberOfChannels: 1,
+ description: {offset: 2552, size: 5},
+ }
+};
+
+// Create a view of an ArrayBuffer.
+function view(buffer, {offset, size}) {
+ return new Uint8Array(buffer, offset, size);
+}
+
+function testSharedArrayBufferDescription(t, useView) {
+ const data = testData;
+
+ // Don't run test if the codec is not supported.
+ assert_equals("function", typeof AudioDecoder.isConfigSupported);
+ let supported = false;
+ return AudioDecoder
+ .isConfigSupported({
+ codec: data.config.codec,
+ sampleRate: data.config.sampleRate,
+ numberOfChannels: data.config.numberOfChannels
+ })
+ .catch(_ => {
+ assert_implements_optional(false, data.config.codec + ' unsupported');
+ })
+ .then(support => {
+ supported = support.supported;
+ assert_implements_optional(
+ supported, data.config.codec + ' unsupported');
+ return fetch(data.src);
+ })
+ .then(response => {
+ return response.arrayBuffer();
+ })
+ .then(buf => {
+ config = {...data.config};
+ if (data.config.description) {
+ let desc = new SharedArrayBuffer(data.config.description.size);
+ let descView = new Uint8Array(desc);
+ descView.set(view(buf, data.config.description));
+ config.description = useView ? descView : desc;
+ }
+
+ // Support was verified above, so the description shouldn't change
+ // that.
+ return AudioDecoder.isConfigSupported(config);
+ })
+ .then(support => {
+ assert_true(support.supported);
+
+ const decoder = new AudioDecoder(getDefaultCodecInit(t));
+ decoder.configure(config);
+ assert_equals(decoder.state, 'configured', 'state');
+ });
+}
+
+promise_test(t => {
+ return testSharedArrayBufferDescription(t, /*useView=*/ false);
+}, 'Test isConfigSupported() and configure() using a SharedArrayBuffer');
+
+promise_test(t => {
+ return testSharedArrayBufferDescription(t, /*useView=*/ true);
+}, 'Test isConfigSupported() and configure() using a Uint8Array(SharedArrayBuffer)');
diff --git a/testing/web-platform/tests/webcodecs/audio-decoder.crossOriginIsolated.https.any.js.headers b/testing/web-platform/tests/webcodecs/audio-decoder.crossOriginIsolated.https.any.js.headers
new file mode 100644
index 0000000000..5f8621ef83
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-decoder.crossOriginIsolated.https.any.js.headers
@@ -0,0 +1,2 @@
+Cross-Origin-Embedder-Policy: require-corp
+Cross-Origin-Opener-Policy: same-origin
diff --git a/testing/web-platform/tests/webcodecs/audio-decoder.https.any.js b/testing/web-platform/tests/webcodecs/audio-decoder.https.any.js
new file mode 100644
index 0000000000..4374e904a3
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-decoder.https.any.js
@@ -0,0 +1,73 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+const invalidConfigs = [
+ {
+ comment: 'Empty codec',
+ config: {codec: ''},
+ },
+ {
+ comment: 'Unrecognized codec',
+ config: {codec: 'bogus'},
+ },
+ {
+ comment: 'Video codec',
+ config: {codec: 'vp8'},
+ },
+ {
+ comment: 'Ambiguous codec',
+ config: {codec: 'vp9'},
+ },
+ {
+ comment: 'Codec with MIME type',
+ config: {codec: 'audio/webm; codecs="opus"'},
+ },
+];
+
+invalidConfigs.forEach(entry => {
+ promise_test(
+ t => {
+ return promise_rejects_js(
+ t, TypeError, AudioDecoder.isConfigSupported(entry.config));
+ },
+ 'Test that AudioDecoder.isConfigSupported() rejects invalid config:' +
+ entry.comment);
+});
+
+invalidConfigs.forEach(entry => {
+ async_test(
+ t => {
+ let codec = new AudioDecoder(getDefaultCodecInit(t));
+ assert_throws_js(TypeError, () => {
+ codec.configure(entry.config);
+ });
+ t.done();
+ },
+ 'Test that AudioDecoder.configure() rejects invalid config:' +
+ entry.comment);
+});
+
+function getFakeChunk() {
+ return new EncodedAudioChunk(
+ {type: 'key', timestamp: 0, data: Uint8Array.of(0)});
+}
+
+promise_test(t => {
+ // AudioDecoderInit lacks required fields.
+ assert_throws_js(TypeError, () => {
+ new AudioDecoder({});
+ });
+
+ // AudioDecoderInit has required fields.
+ let decoder = new AudioDecoder(getDefaultCodecInit(t));
+
+ assert_equals(decoder.state, 'unconfigured');
+ decoder.close();
+
+ return endAfterEventLoopTurn();
+}, 'Test AudioDecoder construction');
+
+promise_test(t => {
+ let decoder = new AudioDecoder(getDefaultCodecInit(t));
+ return testUnconfiguredCodec(t, decoder, getFakeChunk());
+}, 'Verify unconfigured AudioDecoder operations');
diff --git a/testing/web-platform/tests/webcodecs/audio-encoder-codec-specific.https.any.js b/testing/web-platform/tests/webcodecs/audio-encoder-codec-specific.https.any.js
new file mode 100644
index 0000000000..28c75d2a60
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-encoder-codec-specific.https.any.js
@@ -0,0 +1,98 @@
+// META: global=window
+// META: script=/webcodecs/utils.js
+
+function make_silent_audio_data(timestamp, channels, sampleRate, frames) {
+ let data = new Float32Array(frames*channels);
+
+ return new AudioData({
+ timestamp: timestamp,
+ data: data,
+ numberOfChannels: channels,
+ numberOfFrames: frames,
+ sampleRate: sampleRate,
+ format: "f32-planar",
+ });
+}
+
+// The Opus DTX flag (discontinuous transmission) reduces the encoding bitrate
+// for silence. This test ensures the DTX flag is working properly by encoding
+// almost 10s of silence and comparing the bitrate with and without the flag.
+promise_test(async t => {
+ let sample_rate = 48000;
+ let total_duration_s = 10;
+ let data_count = 100;
+ let normal_outputs = [];
+ let dtx_outputs = [];
+
+ let normal_encoder = new AudioEncoder({
+ error: e => {
+ assert_unreached('error: ' + e);
+ },
+ output: chunk => {
+ normal_outputs.push(chunk);
+ }
+ });
+
+ let dtx_encoder = new AudioEncoder({
+ error: e => {
+ assert_unreached('error: ' + e);
+ },
+ output: chunk => {
+ dtx_outputs.push(chunk);
+ }
+ });
+
+ let config = {
+ codec: 'opus',
+ sampleRate: sample_rate,
+ numberOfChannels: 2,
+ bitrate: 256000, // 256kbit
+ };
+
+ let normal_config = {...config, opus: {usedtx: false}};
+ let dtx_config = {...config, opus: {usedtx: true}};
+
+ let normal_config_support = await AudioEncoder.isConfigSupported(normal_config);
+ assert_implements_optional(normal_config_support.supported, "Opus not supported");
+
+ let dtx_config_support = await AudioEncoder.isConfigSupported(dtx_config);
+ assert_implements_optional(dtx_config_support.supported, "Opus DTX not supported");
+
+ // Configure one encoder with and one without the DTX flag
+ normal_encoder.configure(normal_config);
+ dtx_encoder.configure(dtx_config);
+
+ let timestamp_us = 0;
+ let data_duration_s = total_duration_s / data_count;
+ let data_length = data_duration_s * config.sampleRate;
+ for (let i = 0; i < data_count; i++) {
+ let data;
+
+ if (i == 0 || i == (data_count - 1)) {
+ // Send real data for the first and last 100ms.
+ data = make_audio_data(
+ timestamp_us, config.numberOfChannels, config.sampleRate,
+ data_length);
+
+ } else {
+ // Send silence for the rest of the 10s.
+ data = make_silent_audio_data(
+ timestamp_us, config.numberOfChannels, config.sampleRate,
+ data_length);
+ }
+
+ normal_encoder.encode(data);
+ dtx_encoder.encode(data);
+ data.close();
+
+ timestamp_us += data_duration_s * 1_000_000;
+ }
+
+ await Promise.all([normal_encoder.flush(), dtx_encoder.flush()])
+
+ normal_encoder.close();
+ dtx_encoder.close();
+
+ // We expect a significant reduction in the number of packets, over ~10s of silence.
+ assert_less_than(dtx_outputs.length, (normal_outputs.length / 2));
+}, 'Test the Opus DTX flag works.');
diff --git a/testing/web-platform/tests/webcodecs/audio-encoder-config.https.any.js b/testing/web-platform/tests/webcodecs/audio-encoder-config.https.any.js
new file mode 100644
index 0000000000..52ff3dc16e
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-encoder-config.https.any.js
@@ -0,0 +1,237 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+const invalidConfigs = [
+ {
+ comment: 'Emtpy codec',
+ config: {codec: ''},
+ },
+ {
+ comment: 'Unrecognized codec',
+ config: {codec: 'bogus'},
+ },
+ {
+ comment: 'Sample rate is too small',
+ config: {
+ codec: 'opus',
+ sampleRate: 100,
+ numberOfChannels: 2,
+ },
+ },
+ {
+ comment: 'Sample rate is too large',
+ config: {
+ codec: 'opus',
+ sampleRate: 1e6,
+ numberOfChannels: 2,
+ },
+ },
+ {
+ comment: 'Too few channels',
+ config: {
+ codec: 'opus',
+ sampleRate: 8000,
+ numberOfChannels: 0,
+ },
+ },
+ {
+ comment: 'Way too many channels',
+ config: {
+ codec: 'opus',
+ sampleRate: 8000,
+ numberOfChannels: 100,
+ bitrate: 128000
+ },
+ },
+ {
+ comment: 'Bit rate too big',
+ config:
+ {codec: 'opus', sampleRate: 8000, numberOfChannels: 2, bitrate: 6e9},
+ },
+ {
+ comment: 'Opus complexity too big',
+ config: {
+ codec: 'opus',
+ sampleRate: 8000,
+ numberOfChannels: 2,
+ opus: {
+ complexity: 11,
+ },
+ },
+ },
+ {
+ comment: 'Opus packetlossperc too big',
+ config: {
+ codec: 'opus',
+ sampleRate: 8000,
+ numberOfChannels: 2,
+ opus: {
+ packetlossperc: 101,
+ },
+ },
+ },
+ {
+ comment: 'Opus frame duration too small',
+ config: {
+ codec: 'opus',
+ sampleRate: 8000,
+ numberOfChannels: 2,
+ opus: {
+ frameDuration: 0,
+ },
+ },
+ },
+ {
+ comment: 'Opus frame duration too big',
+ config: {
+ codec: 'opus',
+ sampleRate: 8000,
+ numberOfChannels: 2,
+ opus: {
+ frameDuration: 122500,
+ },
+ },
+ },
+ {
+ comment: 'Invalid Opus frameDuration',
+ config: {
+ codec: 'opus',
+ sampleRate: 8000,
+ numberOfChannels: 2,
+ opus: {
+ frameDuration: 2501,
+ },
+ },
+ },
+];
+
+invalidConfigs.forEach(entry => {
+ promise_test(t => {
+ return promise_rejects_js(t, TypeError, AudioEncoder.isConfigSupported(entry.config));
+ }, 'Test that AudioEncoder.isConfigSupported() rejects invalid config:' + entry.comment);
+});
+
+const validButUnsupportedConfigs = [
+ {
+ comment: 'Too many channels',
+ config: {
+ codec: 'opus',
+ sampleRate: 48000,
+ numberOfChannels: 30,
+ },
+ },
+ {
+ comment: 'Bitrate is too low',
+ config: {
+ codec: 'opus',
+ sampleRate: 48000,
+ numberOfChannels: 2,
+ bitrate: 1
+ },
+ }
+];
+
+validButUnsupportedConfigs.forEach(entry => {
+ promise_test(async t => {
+ let support = await AudioEncoder.isConfigSupported(entry.config);
+ assert_false(support.supported);
+
+ let config = support.config;
+ assert_equals(config.codec, entry.config.codec);
+ assert_equals(config.sampleRate, entry.config.sampleRate);
+ assert_equals(config.numberOfChannels, entry.config.numberOfChannels);
+
+ }, "Test that AudioEncoder.isConfigSupported() doesn't support config:" + entry.comment);
+});
+
+const validConfigs = [
+ {
+ codec: 'opus',
+ sampleRate: 8000,
+ numberOfChannels: 1,
+ },
+ {
+ codec: 'opus',
+ sampleRate: 48000,
+ numberOfChannels: 2,
+ },
+ {
+ codec: 'opus',
+ sampleRate: 48000,
+ numberOfChannels: 2,
+ bitrate: 128000,
+ bogus: 123
+ },
+ {
+ codec: 'opus',
+ sampleRate: 48000,
+ numberOfChannels: 2,
+ opus: {
+ complexity: 5,
+ frameDuration: 20000,
+ packetlossperc: 10,
+ useinbandfec: true,
+ },
+ },
+ {
+ codec: 'opus',
+ sampleRate: 48000,
+ numberOfChannels: 2,
+ opus: {
+ format: 'opus',
+ complexity: 10,
+ frameDuration: 60000,
+ packetlossperc: 20, // Irrelevant without useinbandfec, but still valid.
+ usedtx: true,
+ bogus: 456,
+ },
+ },
+ {
+ codec: 'opus',
+ sampleRate: 48000,
+ numberOfChannels: 2,
+ opus: {}, // Use default values.
+ },
+];
+
+validConfigs.forEach(config => {
+ promise_test(async t => {
+ let support = await AudioEncoder.isConfigSupported(config);
+ assert_true(support.supported);
+
+ let new_config = support.config;
+ assert_equals(new_config.codec, config.codec);
+ assert_equals(new_config.sampleRate, config.sampleRate);
+ assert_equals(new_config.numberOfChannels, config.numberOfChannels);
+ if (config.bitrate)
+ assert_equals(new_config.bitrate, config.bitrate);
+
+ if (config.opus) {
+ let opus_config = config.opus;
+ let new_opus_config = new_config.opus;
+
+ assert_equals(new_opus_config.format, opus_config.format ?? 'opus');
+ assert_equals(
+ new_opus_config.frameDuration, opus_config.frameDuration ?? 20000);
+ assert_equals(
+ new_opus_config.packetlossperc, opus_config.packetlossperc ?? 0);
+ assert_equals(
+ new_opus_config.useinbandfec, opus_config.useinbandfec ?? false);
+ assert_equals(new_opus_config.usedtx, opus_config.usedtx ?? false);
+ assert_false(new_opus_config.hasOwnProperty('bogus'));
+
+ if (opus_config.complexity) {
+ assert_equals(new_opus_config.complexity, opus_config.complexity);
+ } else {
+ // Default complexity is 5 for mobile/ARM platforms, and 9 otherwise.
+ assert_true(
+ new_opus_config.complexity == 5 || new_opus_config.complexity == 9);
+ }
+
+ } else {
+ assert_false(new_config.hasOwnProperty('opus'));
+ }
+
+ assert_false(new_config.hasOwnProperty('bogus'));
+ }, "AudioEncoder.isConfigSupported() supports:" + JSON.stringify(config));
+});
diff --git a/testing/web-platform/tests/webcodecs/audio-encoder.https.any.js b/testing/web-platform/tests/webcodecs/audio-encoder.https.any.js
new file mode 100644
index 0000000000..7db9148ed5
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audio-encoder.https.any.js
@@ -0,0 +1,572 @@
+// META: global=window
+// META: script=/webcodecs/utils.js
+
+// Merge all audio buffers into a new big one with all the data.
+function join_audio_data(audio_data_array) {
+ assert_greater_than_equal(audio_data_array.length, 0);
+ let total_frames = 0;
+ let base_buffer = audio_data_array[0];
+ for (const data of audio_data_array) {
+ assert_not_equals(data, null);
+ assert_equals(data.sampleRate, base_buffer.sampleRate);
+ assert_equals(data.numberOfChannels, base_buffer.numberOfChannels);
+ assert_equals(data.format, base_buffer.format);
+ total_frames += data.numberOfFrames;
+ }
+
+ assert_true(base_buffer.format == 'f32' || base_buffer.format == 'f32-planar');
+
+ if (base_buffer.format == 'f32')
+ return join_interleaved_data(audio_data_array, total_frames);
+
+ // The format is 'FLTP'.
+ return join_planar_data(audio_data_array, total_frames);
+}
+
+function join_interleaved_data(audio_data_array, total_frames) {
+ let base_data = audio_data_array[0];
+ let channels = base_data.numberOfChannels;
+ let total_samples = total_frames * channels;
+
+ let result = new Float32Array(total_samples);
+
+ let copy_dest = new Float32Array(base_data.numberOfFrames * channels);
+
+ // Copy all the interleaved data.
+ let position = 0;
+ for (const data of audio_data_array) {
+ let samples = data.numberOfFrames * channels;
+ if (copy_dest.length < samples)
+ copy_dest = new Float32Array(samples);
+
+ data.copyTo(copy_dest, {planeIndex: 0});
+ result.set(copy_dest, position);
+ position += samples;
+ }
+
+ assert_equals(position, total_samples);
+
+ return result;
+}
+
+function join_planar_data(audio_data_array, total_frames) {
+ let base_frames = audio_data_array[0].numberOfFrames;
+ let channels = audio_data_array[0].numberOfChannels;
+ let result = new Float32Array(total_frames*channels);
+ let copyDest = new Float32Array(base_frames);
+
+ // Merge all samples and lay them out according to the FLTP memory layout.
+ let position = 0;
+ for (let ch = 0; ch < channels; ch++) {
+ for (const data of audio_data_array) {
+ data.copyTo(copyDest, { planeIndex: ch});
+ result.set(copyDest, position);
+ position += data.numberOfFrames;
+ }
+ }
+ assert_equals(position, total_frames * channels);
+
+ return result;
+}
+
+promise_test(async t => {
+ let sample_rate = 48000;
+ let total_duration_s = 1;
+ let data_count = 10;
+ let outputs = [];
+ let init = {
+ error: e => {
+ assert_unreached("error: " + e);
+ },
+ output: chunk => {
+ outputs.push(chunk);
+ }
+ };
+
+ let encoder = new AudioEncoder(init);
+
+ assert_equals(encoder.state, "unconfigured");
+ let config = {
+ codec: 'opus',
+ sampleRate: sample_rate,
+ numberOfChannels: 2,
+ bitrate: 256000 //256kbit
+ };
+
+ encoder.configure(config);
+
+ let timestamp_us = 0;
+ let data_duration_s = total_duration_s / data_count;
+ let data_length = data_duration_s * config.sampleRate;
+ for (let i = 0; i < data_count; i++) {
+ let data = make_audio_data(timestamp_us, config.numberOfChannels,
+ config.sampleRate, data_length);
+ encoder.encode(data);
+ data.close();
+ timestamp_us += data_duration_s * 1_000_000;
+ }
+ await encoder.flush();
+ encoder.close();
+ assert_greater_than_equal(outputs.length, data_count);
+ assert_equals(outputs[0].timestamp, 0, "first chunk timestamp");
+ let total_encoded_duration = 0
+ for (chunk of outputs) {
+ assert_greater_than(chunk.byteLength, 0);
+ assert_greater_than_equal(timestamp_us, chunk.timestamp);
+ assert_greater_than(chunk.duration, 0);
+ total_encoded_duration += chunk.duration;
+ }
+
+ // The total duration might be padded with silence.
+ assert_greater_than_equal(
+ total_encoded_duration, total_duration_s * 1_000_000);
+}, 'Simple audio encoding');
+
+promise_test(async t => {
+ let sample_rate = 48000;
+ let total_duration_s = 1;
+ let data_count = 10;
+ let outputs = [];
+ let init = {
+ error: e => {
+ assert_unreached('error: ' + e);
+ },
+ output: chunk => {
+ outputs.push(chunk);
+ }
+ };
+
+ let encoder = new AudioEncoder(init);
+
+ assert_equals(encoder.state, 'unconfigured');
+ let config = {
+ codec: 'opus',
+ sampleRate: sample_rate,
+ numberOfChannels: 2,
+ bitrate: 256000 // 256kbit
+ };
+
+ encoder.configure(config);
+
+ let timestamp_us = -10000;
+ let data = make_audio_data(
+ timestamp_us, config.numberOfChannels, config.sampleRate, 10000);
+ encoder.encode(data);
+ data.close();
+ await encoder.flush();
+ encoder.close();
+ assert_greater_than_equal(outputs.length, 1);
+ assert_equals(outputs[0].timestamp, -10000, 'first chunk timestamp');
+ for (chunk of outputs) {
+ assert_greater_than(chunk.byteLength, 0);
+ assert_greater_than_equal(chunk.timestamp, timestamp_us);
+ }
+}, 'Encode audio with negative timestamp');
+
+async function checkEncodingError(config, good_data, bad_data) {
+ let error = null;
+ let outputs = 0;
+ let init = {
+ error: e => {
+ error = e;
+ },
+ output: chunk => {
+ outputs++;
+ }
+ };
+ let encoder = new AudioEncoder(init);
+
+
+ let support = await AudioEncoder.isConfigSupported(config);
+ assert_true(support.supported)
+ config = support.config;
+
+ encoder.configure(config);
+ for (let data of good_data) {
+ encoder.encode(data);
+ data.close();
+ }
+ await encoder.flush();
+
+ let txt_config = "sampleRate: " + config.sampleRate
+ + " numberOfChannels: " + config.numberOfChannels;
+ assert_equals(error, null, txt_config);
+ assert_greater_than(outputs, 0);
+ encoder.encode(bad_data);
+ await encoder.flush().catch(() => {});
+ assert_not_equals(error, null, txt_config);
+}
+
+function channelNumberVariationTests() {
+ let sample_rate = 48000;
+ for (let channels = 1; channels <= 2; channels++) {
+ let config = {
+ codec: 'opus',
+ sampleRate: sample_rate,
+ numberOfChannels: channels,
+ bitrate: 128000
+ };
+
+ let ts = 0;
+ let length = sample_rate / 10;
+ let data1 = make_audio_data(ts, channels, sample_rate, length);
+
+ ts += Math.floor(data1.duration / 1000000);
+ let data2 = make_audio_data(ts, channels, sample_rate, length);
+ ts += Math.floor(data2.duration / 1000000);
+
+ let bad_data = make_audio_data(ts, channels + 1, sample_rate, length);
+ promise_test(async t =>
+ checkEncodingError(config, [data1, data2], bad_data),
+ "Channel number variation: " + channels);
+ }
+}
+channelNumberVariationTests();
+
+function sampleRateVariationTests() {
+ let channels = 1
+ for (let sample_rate = 3000; sample_rate < 96000; sample_rate += 10000) {
+ let config = {
+ codec: 'opus',
+ sampleRate: sample_rate,
+ numberOfChannels: channels,
+ bitrate: 128000
+ };
+
+ let ts = 0;
+ let length = sample_rate / 10;
+ let data1 = make_audio_data(ts, channels, sample_rate, length);
+
+ ts += Math.floor(data1.duration / 1000000);
+ let data2 = make_audio_data(ts, channels, sample_rate, length);
+ ts += Math.floor(data2.duration / 1000000);
+
+ let bad_data = make_audio_data(ts, channels, sample_rate + 333, length);
+ promise_test(async t =>
+ checkEncodingError(config, [data1, data2], bad_data),
+ "Sample rate variation: " + sample_rate);
+ }
+}
+sampleRateVariationTests();
+
+promise_test(async t => {
+ let sample_rate = 48000;
+ let total_duration_s = 1;
+ let data_count = 10;
+ let input_data = [];
+ let output_data = [];
+
+ let decoder_init = {
+ error: t.unreached_func("Decode error"),
+ output: data => {
+ output_data.push(data);
+ }
+ };
+ let decoder = new AudioDecoder(decoder_init);
+
+ let encoder_init = {
+ error: t.unreached_func("Encoder error"),
+ output: (chunk, metadata) => {
+ let config = metadata.decoderConfig;
+ if (config)
+ decoder.configure(config);
+ decoder.decode(chunk);
+ }
+ };
+ let encoder = new AudioEncoder(encoder_init);
+
+ let config = {
+ codec: 'opus',
+ sampleRate: sample_rate,
+ numberOfChannels: 2,
+ bitrate: 256000, //256kbit
+ };
+ encoder.configure(config);
+
+ let timestamp_us = 0;
+ const data_duration_s = total_duration_s / data_count;
+ const data_length = data_duration_s * config.sampleRate;
+ for (let i = 0; i < data_count; i++) {
+ let data = make_audio_data(timestamp_us, config.numberOfChannels,
+ config.sampleRate, data_length);
+ input_data.push(data);
+ encoder.encode(data);
+ timestamp_us += data_duration_s * 1_000_000;
+ }
+ await encoder.flush();
+ encoder.close();
+ await decoder.flush();
+ decoder.close();
+
+
+ let total_input = join_audio_data(input_data);
+ let frames_per_plane = total_input.length / config.numberOfChannels;
+
+ let total_output = join_audio_data(output_data);
+
+ let base_input = input_data[0];
+ let base_output = output_data[0];
+
+ // TODO: Convert formats to simplify conversions, once
+ // https://github.com/w3c/webcodecs/issues/232 is resolved.
+ assert_equals(base_input.format, "f32-planar");
+ assert_equals(base_output.format, "f32");
+
+ assert_equals(base_output.numberOfChannels, config.numberOfChannels);
+ assert_equals(base_output.sampleRate, sample_rate);
+
+ // Output can be slightly longer that the input due to padding
+ assert_greater_than_equal(total_output.length, total_input.length);
+
+ // Compare waveform before and after encoding
+ for (let channel = 0; channel < base_input.numberOfChannels; channel++) {
+
+ let plane_start = channel * frames_per_plane;
+ let input_plane = total_input.slice(
+ plane_start, plane_start + frames_per_plane);
+
+ for (let i = 0; i < base_input.numberOfFrames; i += 10) {
+ // Instead of de-interleaving the data, directly look into |total_output|
+ // for the sample we are interested in.
+ let ouput_index = i * base_input.numberOfChannels + channel;
+
+ // Checking only every 10th sample to save test time in slow
+ // configurations like MSAN etc.
+ assert_approx_equals(
+ input_plane[i], total_output[ouput_index], 0.5,
+ 'Difference between input and output is too large.' +
+ ' index: ' + i + ' channel: ' + channel +
+ ' input: ' + input_plane[i] +
+ ' output: ' + total_output[ouput_index]);
+ }
+ }
+
+}, 'Encoding and decoding');
+
+promise_test(async t => {
+ let output_count = 0;
+ let encoder_config = {
+ codec: 'opus',
+ sampleRate: 24000,
+ numberOfChannels: 1,
+ bitrate: 96000
+ };
+ let decoder_config = null;
+
+ let init = {
+ error: t.unreached_func("Encoder error"),
+ output: (chunk, metadata) => {
+ let config = metadata.decoderConfig;
+ // Only the first invocation of the output callback is supposed to have
+ // a |config| in it.
+ output_count++;
+ if (output_count == 1) {
+ assert_equals(typeof config, "object");
+ decoder_config = config;
+ } else {
+ assert_equals(config, undefined);
+ }
+ }
+ };
+
+ let encoder = new AudioEncoder(init);
+ encoder.configure(encoder_config);
+
+ let large_data = make_audio_data(0, encoder_config.numberOfChannels,
+ encoder_config.sampleRate, encoder_config.sampleRate);
+ encoder.encode(large_data);
+ await encoder.flush();
+
+ // Large data produced more than one output, and we've got decoder_config
+ assert_greater_than(output_count, 1);
+ assert_not_equals(decoder_config, null);
+ assert_equals(decoder_config.codec, encoder_config.codec);
+ assert_equals(decoder_config.sampleRate, encoder_config.sampleRate);
+ assert_equals(decoder_config.numberOfChannels, encoder_config.numberOfChannels);
+
+ // Check that description start with 'Opus'
+ let extra_data = new Uint8Array(decoder_config.description);
+ assert_equals(extra_data[0], 0x4f);
+ assert_equals(extra_data[1], 0x70);
+ assert_equals(extra_data[2], 0x75);
+ assert_equals(extra_data[3], 0x73);
+
+ decoder_config = null;
+ output_count = 0;
+ encoder_config.bitrate = 256000;
+ encoder.configure(encoder_config);
+ encoder.encode(large_data);
+ await encoder.flush();
+
+ // After reconfiguring encoder should produce decoder config again
+ assert_greater_than(output_count, 1);
+ assert_not_equals(decoder_config, null);
+ assert_not_equals(decoder_config.description, null);
+ encoder.close();
+}, "Emit decoder config and extra data.");
+
+promise_test(async t => {
+ let sample_rate = 48000;
+ let total_duration_s = 1;
+ let data_count = 100;
+ let init = getDefaultCodecInit(t);
+ init.output = (chunk, metadata) => {}
+
+ let encoder = new AudioEncoder(init);
+
+ // No encodes yet.
+ assert_equals(encoder.encodeQueueSize, 0);
+
+ let config = {
+ codec: 'opus',
+ sampleRate: sample_rate,
+ numberOfChannels: 2,
+ bitrate: 256000 //256kbit
+ };
+ encoder.configure(config);
+
+ // Still no encodes.
+ assert_equals(encoder.encodeQueueSize, 0);
+
+ let datas = [];
+ let timestamp_us = 0;
+ let data_duration_s = total_duration_s / data_count;
+ let data_length = data_duration_s * config.sampleRate;
+ for (let i = 0; i < data_count; i++) {
+ let data = make_audio_data(timestamp_us, config.numberOfChannels,
+ config.sampleRate, data_length);
+ datas.push(data);
+ timestamp_us += data_duration_s * 1_000_000;
+ }
+
+ let lastDequeueSize = Infinity;
+ encoder.ondequeue = () => {
+ assert_greater_than(lastDequeueSize, 0, "Dequeue event after queue empty");
+ assert_greater_than(lastDequeueSize, encoder.encodeQueueSize,
+ "Dequeue event without decreased queue size");
+ lastDequeueSize = encoder.encodeQueueSize;
+ };
+
+ for (let data of datas)
+ encoder.encode(data);
+
+ assert_greater_than_equal(encoder.encodeQueueSize, 0);
+ assert_less_than_equal(encoder.encodeQueueSize, data_count);
+
+ await encoder.flush();
+ // We can guarantee that all encodes are processed after a flush.
+ assert_equals(encoder.encodeQueueSize, 0);
+ // Last dequeue event should fire when the queue is empty.
+ assert_equals(lastDequeueSize, 0);
+
+ // Reset this to Infinity to track the decline of queue size for this next
+ // batch of encodes.
+ lastDequeueSize = Infinity;
+
+ for (let data of datas) {
+ encoder.encode(data);
+ data.close();
+ }
+
+ assert_greater_than_equal(encoder.encodeQueueSize, 0);
+ encoder.reset();
+ assert_equals(encoder.encodeQueueSize, 0);
+}, 'encodeQueueSize test');
+
+const testOpusEncoderConfigs = [
+ {
+ comment: 'Empty Opus config',
+ opus: {},
+ },
+ {
+ comment: 'Opus with frameDuration',
+ opus: {frameDuration: 2500},
+ },
+ {
+ comment: 'Opus with complexity',
+ opus: {complexity: 10},
+ },
+ {
+ comment: 'Opus with useinbandfec',
+ opus: {
+ packetlossperc: 15,
+ useinbandfec: true,
+ },
+ },
+ {
+ comment: 'Opus with usedtx',
+ opus: {usedtx: true},
+ },
+ {
+ comment: 'Opus mixed parameters',
+ opus: {
+ frameDuration: 40000,
+ complexity: 0,
+ packetlossperc: 10,
+ useinbandfec: true,
+ usedtx: true,
+ },
+ }
+];
+
+testOpusEncoderConfigs.forEach(entry => {
+ promise_test(async t => {
+ let sample_rate = 48000;
+ let total_duration_s = 0.5;
+ let data_count = 10;
+ let outputs = [];
+ let init = {
+ error: e => {
+ assert_unreached('error: ' + e);
+ },
+ output: chunk => {
+ outputs.push(chunk);
+ }
+ };
+
+ let encoder = new AudioEncoder(init);
+
+ assert_equals(encoder.state, 'unconfigured');
+ let config = {
+ codec: 'opus',
+ sampleRate: sample_rate,
+ numberOfChannels: 2,
+ bitrate: 256000, // 256kbit
+ opus: entry.opus,
+ };
+
+ encoder.configure(config);
+
+ let timestamp_us = 0;
+ let data_duration_s = total_duration_s / data_count;
+ let data_length = data_duration_s * config.sampleRate;
+ for (let i = 0; i < data_count; i++) {
+ let data = make_audio_data(
+ timestamp_us, config.numberOfChannels, config.sampleRate,
+ data_length);
+ encoder.encode(data);
+ data.close();
+ timestamp_us += data_duration_s * 1_000_000;
+ }
+
+ // Encoders might output an extra buffer of silent padding.
+ let padding_us = data_duration_s * 1_000_000;
+
+ await encoder.flush();
+ encoder.close();
+ assert_greater_than_equal(outputs.length, data_count);
+ assert_equals(outputs[0].timestamp, 0, 'first chunk timestamp');
+ let total_encoded_duration = 0
+ for (chunk of outputs) {
+ assert_greater_than(chunk.byteLength, 0, 'chunk byteLength');
+ assert_greater_than_equal(
+ timestamp_us + padding_us, chunk.timestamp, 'chunk timestamp');
+ assert_greater_than(chunk.duration, 0, 'chunk duration');
+ total_encoded_duration += chunk.duration;
+ }
+
+ // The total duration might be padded with silence.
+ assert_greater_than_equal(
+ total_encoded_duration, total_duration_s * 1_000_000);
+ }, 'Test encoding Opus with additional parameters: ' + entry.comment);
+}) \ No newline at end of file
diff --git a/testing/web-platform/tests/webcodecs/audioDecoder-codec-specific.https.any.js b/testing/web-platform/tests/webcodecs/audioDecoder-codec-specific.https.any.js
new file mode 100644
index 0000000000..92513be087
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/audioDecoder-codec-specific.https.any.js
@@ -0,0 +1,371 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+// META: variant=?adts_aac
+// META: variant=?mp4_aac
+// META: variant=?mp3
+// META: variant=?opus
+// META: variant=?pcm_alaw
+// META: variant=?pcm_mulaw
+
+const ADTS_AAC_DATA = {
+ src: 'sfx.adts',
+ config: {
+ codec: 'mp4a.40.2',
+ sampleRate: 48000,
+ numberOfChannels: 1,
+ },
+ chunks: [
+ {offset: 0, size: 248}, {offset: 248, size: 280}, {offset: 528, size: 258},
+ {offset: 786, size: 125}, {offset: 911, size: 230},
+ {offset: 1141, size: 148}, {offset: 1289, size: 224},
+ {offset: 1513, size: 166}, {offset: 1679, size: 216},
+ {offset: 1895, size: 183}
+ ],
+ duration: 24000
+};
+
+const MP3_DATA = {
+ src: 'sfx.mp3',
+ config: {
+ codec: 'mp3',
+ sampleRate: 48000,
+ numberOfChannels: 1,
+ },
+ chunks: [
+ {offset: 333, size: 288}, {offset: 621, size: 288},
+ {offset: 909, size: 288}, {offset: 1197, size: 288},
+ {offset: 1485, size: 288}, {offset: 1773, size: 288},
+ {offset: 2061, size: 288}, {offset: 2349, size: 288},
+ {offset: 2637, size: 288}, {offset: 2925, size: 288}
+ ],
+ duration: 24000
+};
+
+const MP4_AAC_DATA = {
+ src: 'sfx-aac.mp4',
+ config: {
+ codec: 'mp4a.40.2',
+ sampleRate: 48000,
+ numberOfChannels: 1,
+ description: {offset: 2552, size: 5},
+ },
+ chunks: [
+ {offset: 44, size: 241},
+ {offset: 285, size: 273},
+ {offset: 558, size: 251},
+ {offset: 809, size: 118},
+ {offset: 927, size: 223},
+ {offset: 1150, size: 141},
+ {offset: 1291, size: 217},
+ {offset: 1508, size: 159},
+ {offset: 1667, size: 209},
+ {offset: 1876, size: 176},
+ ],
+ duration: 21333
+};
+
+const OPUS_DATA = {
+ src: 'sfx-opus.ogg',
+ config: {
+ codec: 'opus',
+ sampleRate: 48000,
+ numberOfChannels: 1,
+ description: {offset: 28, size: 19},
+ },
+ chunks: [
+ {offset: 185, size: 450}, {offset: 635, size: 268},
+ {offset: 903, size: 285}, {offset: 1188, size: 296},
+ {offset: 1484, size: 287}, {offset: 1771, size: 308},
+ {offset: 2079, size: 289}, {offset: 2368, size: 286},
+ {offset: 2654, size: 296}, {offset: 2950, size: 294}
+ ],
+ duration: 20000
+};
+
+const PCM_ALAW_DATA = {
+ src: 'sfx-alaw.wav',
+ config: {
+ codec: 'alaw',
+ sampleRate: 48000,
+ numberOfChannels: 1,
+ },
+ // Any arbitrary grouping should work.
+ chunks: [
+ {offset: 0, size: 2048}, {offset: 2048, size: 2048},
+ {offset: 4096, size: 2048}, {offset: 6144, size: 2048},
+ {offset: 8192, size: 2048}, {offset: 10240, size: 92}
+ ],
+ duration: 35555
+};
+
+const PCM_MULAW_DATA = {
+ src: 'sfx-mulaw.wav',
+ config: {
+ codec: 'ulaw',
+ sampleRate: 48000,
+ numberOfChannels: 1,
+ },
+
+ // Any arbitrary grouping should work.
+ chunks: [
+ {offset: 0, size: 2048}, {offset: 2048, size: 2048},
+ {offset: 4096, size: 2048}, {offset: 6144, size: 2048},
+ {offset: 8192, size: 2048}, {offset: 10240, size: 92}
+ ],
+ duration: 35555
+};
+
+// Allows mutating `callbacks` after constructing the AudioDecoder, wraps calls
+// in t.step().
+function createAudioDecoder(t, callbacks) {
+ return new AudioDecoder({
+ output(frame) {
+ if (callbacks && callbacks.output) {
+ t.step(() => callbacks.output(frame));
+ } else {
+ t.unreached_func('unexpected output()');
+ }
+ },
+ error(e) {
+ if (callbacks && callbacks.error) {
+ t.step(() => callbacks.error(e));
+ } else {
+ t.unreached_func('unexpected error()');
+ }
+ }
+ });
+}
+
+// Create a view of an ArrayBuffer.
+function view(buffer, {offset, size}) {
+ return new Uint8Array(buffer, offset, size);
+}
+
+let CONFIG = null;
+let CHUNK_DATA = null;
+let CHUNKS = null;
+promise_setup(async () => {
+ const data = {
+ '?adts_aac': ADTS_AAC_DATA,
+ '?mp3': MP3_DATA,
+ '?mp4_aac': MP4_AAC_DATA,
+ '?opus': OPUS_DATA,
+ '?pcm_alaw': PCM_ALAW_DATA,
+ '?pcm_mulaw': PCM_MULAW_DATA,
+ }[location.search];
+
+ // Don't run any tests if the codec is not supported.
+ assert_equals("function", typeof AudioDecoder.isConfigSupported);
+ let supported = false;
+ try {
+ const support = await AudioDecoder.isConfigSupported({
+ codec: data.config.codec,
+ sampleRate: data.config.sampleRate,
+ numberOfChannels: data.config.numberOfChannels
+ });
+ supported = support.supported;
+ } catch (e) {
+ }
+ assert_implements_optional(supported, data.config.codec + ' unsupported');
+
+ // Fetch the media data and prepare buffers.
+ const response = await fetch(data.src);
+ const buf = await response.arrayBuffer();
+
+ CONFIG = {...data.config};
+ if (data.config.description) {
+ CONFIG.description = view(buf, data.config.description);
+ }
+
+ CHUNK_DATA = data.chunks.map((chunk, i) => view(buf, chunk));
+
+ CHUNKS = CHUNK_DATA.map((encodedData, i) => new EncodedAudioChunk({
+ type: 'key',
+ timestamp: i * data.duration,
+ duration: data.duration,
+ data: encodedData
+ }));
+});
+
+promise_test(t => {
+ return AudioDecoder.isConfigSupported(CONFIG);
+}, 'Test isConfigSupported()');
+
+promise_test(t => {
+ // Define a valid config that includes a hypothetical 'futureConfigFeature',
+ // which is not yet recognized by the User Agent.
+ const validConfig = {
+ ...CONFIG,
+ futureConfigFeature: 'foo',
+ };
+
+ // The UA will evaluate validConfig as being "valid", ignoring the
+ // `futureConfigFeature` it doesn't recognize.
+ return AudioDecoder.isConfigSupported(validConfig).then((decoderSupport) => {
+ // AudioDecoderSupport must contain the following properites.
+ assert_true(decoderSupport.hasOwnProperty('supported'));
+ assert_true(decoderSupport.hasOwnProperty('config'));
+
+ // AudioDecoderSupport.config must not contain unrecognized properties.
+ assert_false(decoderSupport.config.hasOwnProperty('futureConfigFeature'));
+
+ // AudioDecoderSupport.config must contiain the recognized properties.
+ assert_equals(decoderSupport.config.codec, validConfig.codec);
+ assert_equals(decoderSupport.config.sampleRate, validConfig.sampleRate);
+ assert_equals(
+ decoderSupport.config.numberOfChannels, validConfig.numberOfChannels);
+
+ if (validConfig.description) {
+ // The description must be copied.
+ assert_false(
+ decoderSupport.config.description === validConfig.description,
+ 'description is unique');
+ assert_array_equals(
+ new Uint8Array(decoderSupport.config.description, 0),
+ new Uint8Array(validConfig.description, 0), 'description');
+ } else {
+ assert_false(
+ decoderSupport.config.hasOwnProperty('description'), 'description');
+ }
+ });
+}, 'Test that AudioDecoder.isConfigSupported() returns a parsed configuration');
+
+promise_test(async t => {
+ const decoder = createAudioDecoder(t);
+ decoder.configure(CONFIG);
+ assert_equals(decoder.state, 'configured', 'state');
+}, 'Test configure()');
+
+promise_test(t => {
+ const decoder = createAudioDecoder(t);
+ return testClosedCodec(t, decoder, CONFIG, CHUNKS[0]);
+}, 'Verify closed AudioDecoder operations');
+
+promise_test(async t => {
+ const callbacks = {};
+ const decoder = createAudioDecoder(t, callbacks);
+
+ let outputs = 0;
+ callbacks.output = frame => {
+ outputs++;
+ frame.close();
+ };
+
+ decoder.configure(CONFIG);
+ CHUNKS.forEach(chunk => {
+ decoder.decode(chunk);
+ });
+
+ await decoder.flush();
+ assert_equals(outputs, CHUNKS.length, 'outputs');
+}, 'Test decoding');
+
+promise_test(async t => {
+ const callbacks = {};
+ const decoder = createAudioDecoder(t, callbacks);
+
+ let outputs = 0;
+ callbacks.output = frame => {
+ outputs++;
+ frame.close();
+ };
+
+ decoder.configure(CONFIG);
+ decoder.decode(new EncodedAudioChunk(
+ {type: 'key', timestamp: -42, data: CHUNK_DATA[0]}));
+
+ await decoder.flush();
+ assert_equals(outputs, 1, 'outputs');
+}, 'Test decoding a with negative timestamp');
+
+promise_test(async t => {
+ const callbacks = {};
+ const decoder = createAudioDecoder(t, callbacks);
+
+ let outputs = 0;
+ callbacks.output = frame => {
+ outputs++;
+ frame.close();
+ };
+
+ decoder.configure(CONFIG);
+ decoder.decode(CHUNKS[0]);
+
+ await decoder.flush();
+ assert_equals(outputs, 1, 'outputs');
+
+ decoder.decode(CHUNKS[0]);
+ await decoder.flush();
+ assert_equals(outputs, 2, 'outputs');
+}, 'Test decoding after flush');
+
+promise_test(async t => {
+ const callbacks = {};
+ const decoder = createAudioDecoder(t, callbacks);
+
+ decoder.configure(CONFIG);
+ decoder.decode(CHUNKS[0]);
+ decoder.decode(CHUNKS[1]);
+ const flushDone = decoder.flush();
+
+ // Wait for the first output, then reset.
+ let outputs = 0;
+ await new Promise(resolve => {
+ callbacks.output = frame => {
+ outputs++;
+ assert_equals(outputs, 1, 'outputs');
+ decoder.reset();
+ frame.close();
+ resolve();
+ };
+ });
+
+ // Flush should have been synchronously rejected.
+ await promise_rejects_dom(t, 'AbortError', flushDone);
+
+ assert_equals(outputs, 1, 'outputs');
+}, 'Test reset during flush');
+
+promise_test(async t => {
+ const callbacks = {};
+ const decoder = createAudioDecoder(t, callbacks);
+
+ // No decodes yet.
+ assert_equals(decoder.decodeQueueSize, 0);
+
+ decoder.configure(CONFIG);
+
+ // Still no decodes.
+ assert_equals(decoder.decodeQueueSize, 0);
+
+ let lastDequeueSize = Infinity;
+ decoder.ondequeue = () => {
+ assert_greater_than(lastDequeueSize, 0, "Dequeue event after queue empty");
+ assert_greater_than(lastDequeueSize, decoder.decodeQueueSize,
+ "Dequeue event without decreased queue size");
+ lastDequeueSize = decoder.decodeQueueSize;
+ };
+
+ for (let chunk of CHUNKS)
+ decoder.decode(chunk);
+
+ assert_greater_than_equal(decoder.decodeQueueSize, 0);
+ assert_less_than_equal(decoder.decodeQueueSize, CHUNKS.length);
+
+ await decoder.flush();
+ // We can guarantee that all decodes are processed after a flush.
+ assert_equals(decoder.decodeQueueSize, 0);
+ // Last dequeue event should fire when the queue is empty.
+ assert_equals(lastDequeueSize, 0);
+
+ // Reset this to Infinity to track the decline of queue size for this next
+ // batch of decodes.
+ lastDequeueSize = Infinity;
+
+ for (let chunk of CHUNKS)
+ decoder.decode(chunk);
+
+ assert_greater_than_equal(decoder.decodeQueueSize, 0);
+ decoder.reset();
+ assert_equals(decoder.decodeQueueSize, 0);
+}, 'AudioDecoder decodeQueueSize test');
diff --git a/testing/web-platform/tests/webcodecs/av1.mp4 b/testing/web-platform/tests/webcodecs/av1.mp4
new file mode 100644
index 0000000000..8d2a7acdb8
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/av1.mp4
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/chunk-serialization.any.js b/testing/web-platform/tests/webcodecs/chunk-serialization.any.js
new file mode 100644
index 0000000000..821a71170d
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/chunk-serialization.any.js
@@ -0,0 +1,80 @@
+// META: global=window
+// META: script=/common/media.js
+// META: script=/webcodecs/utils.js
+
+var defaultAudioInit = {
+ type: 'key',
+ timestamp: 1234,
+ duration: 9876,
+ data: new Uint8Array([5, 6, 7, 8])
+};
+
+var defaultVideoInit = {
+ type: 'key',
+ timestamp: 1234,
+ duration: 5678,
+ data: new Uint8Array([9, 10, 11, 12])
+};
+
+function createDefaultChunk(type, init) {
+ return type == 'audio' ? new EncodedAudioChunk(init) :
+ new EncodedVideoChunk(init);
+}
+
+function runTest(t, type) {
+ let defaultInit = type == 'audio' ? defaultAudioInit : defaultVideoInit;
+ let originalData = createDefaultChunk(type, defaultInit);
+
+ let channel = new MessageChannel();
+ let localPort = channel.port1;
+ let externalPort = channel.port2;
+
+ externalPort.onmessage = t.step_func((e) => {
+ let newData = e.data;
+
+ // We should have a valid deserialized buffer.
+ assert_equals(newData.type, defaultInit.type, 'type');
+ assert_equals(newData.duration, defaultInit.duration, 'duration');
+ assert_equals(newData.timestamp, defaultInit.timestamp, 'timestamp');
+ assert_equals(
+ newData.byteLength, defaultInit.data.byteLength, 'byteLength');
+
+ const originalData_copyDest = new Uint8Array(defaultInit.data);
+ const newData_copyDest = new Uint8Array(defaultInit.data);
+
+ originalData.copyTo(originalData_copyDest);
+ newData.copyTo(newData_copyDest);
+
+ for (var i = 0; i < newData_copyDest.length; ++i) {
+ assert_equals(
+ newData_copyDest[i], originalData_copyDest[i], `data (i=${i})`);
+ }
+
+ externalPort.postMessage('Done');
+ })
+
+ localPort.onmessage = t.step_func_done((e) => {
+ assert_equals(originalData.type, defaultInit.type, 'type');
+ assert_equals(originalData.duration, defaultInit.duration, 'duration');
+ assert_equals(originalData.timestamp, defaultInit.timestamp, 'timestamp');
+ assert_equals(
+ originalData.byteLength, defaultInit.data.byteLength, 'byteLength');
+ })
+
+ localPort.postMessage(originalData);
+}
+
+async_test(t => {
+ runTest(t, 'audio');
+}, 'Verify EncodedAudioChunk is serializable.');
+
+
+async_test(t => {
+ runTest(t, 'video');
+}, 'Verify EncodedVideoChunk is serializable.');
+
+test(() => {
+ const chunk = createDefaultChunk("video", defaultVideoInit);
+ if (window.history)
+ assert_throws_dom("DataCloneError", () => history.pushState({ chunk }, null));
+}, "Verify EncodedVideoChunk cannot be stored");
diff --git a/testing/web-platform/tests/webcodecs/encoded-audio-chunk.any.js b/testing/web-platform/tests/webcodecs/encoded-audio-chunk.any.js
new file mode 100644
index 0000000000..1ada120e4d
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/encoded-audio-chunk.any.js
@@ -0,0 +1,45 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+test(t => {
+ let chunk = new EncodedAudioChunk({type: 'key',
+ timestamp: 10,
+ duration: 123,
+ data: new Uint8Array([0x0A, 0x0B, 0x0C])});
+ assert_equals(chunk.type, 'key', 'type');
+ assert_equals(chunk.timestamp, 10, 'timestamp');
+ assert_equals(chunk.duration, 123, 'duration');
+ assert_equals(chunk.byteLength, 3, 'byteLength');
+ let copyDest = new Uint8Array(3);
+ chunk.copyTo(copyDest);
+ assert_equals(copyDest[0], 0x0A, 'copyDest[0]');
+ assert_equals(copyDest[1], 0x0B, 'copyDest[1]');
+ assert_equals(copyDest[2], 0x0C, 'copyDest[2]');
+
+ // Make another chunk with different values for good measure.
+ chunk = new EncodedAudioChunk({type: 'delta',
+ timestamp: 100,
+ data: new Uint8Array([0x00, 0x01])});
+ assert_equals(chunk.type, 'delta', 'type');
+ assert_equals(chunk.timestamp, 100, 'timestamp');
+ assert_equals(chunk.duration, null, 'missing duration');
+ assert_equals(chunk.byteLength, 2, 'byteLength');
+ copyDest = new Uint8Array(2);
+ chunk.copyTo(copyDest);
+ assert_equals(copyDest[0], 0x00, 'copyDest[0]');
+ assert_equals(copyDest[1], 0x01, 'copyDest[1]');
+}, 'Test we can construct an EncodedAudioChunk.');
+
+test(t => {
+ let chunk = new EncodedAudioChunk({type: 'delta',
+ timestamp: 100,
+ data: new Uint8Array([0x00, 0x01, 0x02])});
+ assert_throws_js(
+ TypeError,
+ () => chunk.copyTo(new Uint8Array(2)), 'destination is not large enough');
+
+ const detached = makeDetachedArrayBuffer();
+ assert_throws_js(
+ TypeError,
+ () => chunk.copyTo(detached), 'destination is detached');
+}, 'Test copyTo() exception if destination invalid');
diff --git a/testing/web-platform/tests/webcodecs/encoded-audio-chunk.crossOriginIsolated.https.any.js b/testing/web-platform/tests/webcodecs/encoded-audio-chunk.crossOriginIsolated.https.any.js
new file mode 100644
index 0000000000..7063d85887
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/encoded-audio-chunk.crossOriginIsolated.https.any.js
@@ -0,0 +1,29 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+function testSharedArrayBufferEncodedAudioChunk(useView) {
+ let data = new SharedArrayBuffer(3);
+ let view = new Uint8Array(data);
+ view[0] = 0x0A;
+ view[1] = 0x0B;
+ view[2] = 0x0C;
+
+ let chunk = new EncodedAudioChunk(
+ {type: 'key', timestamp: 10, duration: 123, data: useView ? view : data});
+ assert_equals(chunk.byteLength, 3, 'byteLength');
+
+ let copyDest = new SharedArrayBuffer(3);
+ let destView = new Uint8Array(copyDest);
+ chunk.copyTo(useView ? destView : copyDest);
+ assert_equals(destView[0], 0x0A, 'copyDest[0]');
+ assert_equals(destView[1], 0x0B, 'copyDest[1]');
+ assert_equals(destView[2], 0x0C, 'copyDest[2]');
+}
+
+test(t => {
+ testSharedArrayBufferEncodedAudioChunk(/*useView=*/ false);
+}, 'Test construction and copyTo() using a SharedArrayBuffer');
+
+test(t => {
+ testSharedArrayBufferEncodedAudioChunk(/*useView=*/ true);
+}, 'Test construction and copyTo() using a Uint8Array(SharedArrayBuffer)');
diff --git a/testing/web-platform/tests/webcodecs/encoded-audio-chunk.crossOriginIsolated.https.any.js.headers b/testing/web-platform/tests/webcodecs/encoded-audio-chunk.crossOriginIsolated.https.any.js.headers
new file mode 100644
index 0000000000..5f8621ef83
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/encoded-audio-chunk.crossOriginIsolated.https.any.js.headers
@@ -0,0 +1,2 @@
+Cross-Origin-Embedder-Policy: require-corp
+Cross-Origin-Opener-Policy: same-origin
diff --git a/testing/web-platform/tests/webcodecs/encoded-video-chunk.any.js b/testing/web-platform/tests/webcodecs/encoded-video-chunk.any.js
new file mode 100644
index 0000000000..0cf0d8de44
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/encoded-video-chunk.any.js
@@ -0,0 +1,45 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+test(t => {
+ let chunk = new EncodedVideoChunk({type: 'key',
+ timestamp: 10,
+ duration: 300,
+ data: new Uint8Array([0x0A, 0x0B, 0x0C])});
+ assert_equals(chunk.type, 'key', 'type');
+ assert_equals(chunk.timestamp, 10, 'timestamp');
+ assert_equals(chunk.duration, 300, 'duration');
+ assert_equals(chunk.byteLength, 3, 'byteLength');
+ let copyDest = new Uint8Array(3);
+ chunk.copyTo(copyDest);
+ assert_equals(copyDest[0], 0x0A, 'copyDest[0]');
+ assert_equals(copyDest[1], 0x0B, 'copyDest[1]');
+ assert_equals(copyDest[2], 0x0C, 'copyDest[2]');
+
+ // Make another chunk with different values for good measure.
+ chunk = new EncodedVideoChunk({type: 'delta',
+ timestamp: 100,
+ data: new Uint8Array([0x00, 0x01])});
+ assert_equals(chunk.type, 'delta', 'type');
+ assert_equals(chunk.timestamp, 100, 'timestamp');
+ assert_equals(chunk.duration, null, 'duration');
+ assert_equals(chunk.byteLength, 2, 'byteLength');
+ copyDest = new Uint8Array(2);
+ chunk.copyTo(copyDest);
+ assert_equals(copyDest[0], 0x00, 'copyDest[0]');
+ assert_equals(copyDest[1], 0x01, 'copyDest[1]');
+}, 'Test we can construct an EncodedVideoChunk.');
+
+test(t => {
+ let chunk = new EncodedVideoChunk({type: 'delta',
+ timestamp: 100,
+ data: new Uint8Array([0x00, 0x01, 0x02])});
+ assert_throws_js(
+ TypeError,
+ () => chunk.copyTo(new Uint8Array(2)), 'destination is not large enough');
+
+ const detached = makeDetachedArrayBuffer();
+ assert_throws_js(
+ TypeError,
+ () => chunk.copyTo(detached), 'destination is detached');
+}, 'Test copyTo() exception if destiation invalid'); \ No newline at end of file
diff --git a/testing/web-platform/tests/webcodecs/encoded-video-chunk.crossOriginIsolated.https.any.js b/testing/web-platform/tests/webcodecs/encoded-video-chunk.crossOriginIsolated.https.any.js
new file mode 100644
index 0000000000..7f414fec1f
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/encoded-video-chunk.crossOriginIsolated.https.any.js
@@ -0,0 +1,29 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+function testSharedArrayBufferEncodedVideoChunk(useView) {
+ let data = new SharedArrayBuffer(3);
+ let view = new Uint8Array(data);
+ view[0] = 0x0A;
+ view[1] = 0x0B;
+ view[2] = 0x0C;
+
+ let chunk = new EncodedVideoChunk(
+ {type: 'key', timestamp: 10, duration: 123, data: useView ? view : data});
+ assert_equals(chunk.byteLength, 3, 'byteLength');
+
+ let copyDest = new SharedArrayBuffer(3);
+ let destView = new Uint8Array(copyDest);
+ chunk.copyTo(useView ? destView : copyDest);
+ assert_equals(destView[0], 0x0A, 'copyDest[0]');
+ assert_equals(destView[1], 0x0B, 'copyDest[1]');
+ assert_equals(destView[2], 0x0C, 'copyDest[2]');
+}
+
+test(t => {
+ testSharedArrayBufferEncodedVideoChunk(/*useView=*/ false);
+}, 'Test construction and copyTo() using a SharedArrayBuffer');
+
+test(t => {
+ testSharedArrayBufferEncodedVideoChunk(/*useView=*/ true);
+}, 'Test construction and copyTo() using a Uint8Array(SharedArrayBuffer)');
diff --git a/testing/web-platform/tests/webcodecs/encoded-video-chunk.crossOriginIsolated.https.any.js.headers b/testing/web-platform/tests/webcodecs/encoded-video-chunk.crossOriginIsolated.https.any.js.headers
new file mode 100644
index 0000000000..5f8621ef83
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/encoded-video-chunk.crossOriginIsolated.https.any.js.headers
@@ -0,0 +1,2 @@
+Cross-Origin-Embedder-Policy: require-corp
+Cross-Origin-Opener-Policy: same-origin
diff --git a/testing/web-platform/tests/webcodecs/four-colors-flip.avif b/testing/web-platform/tests/webcodecs/four-colors-flip.avif
new file mode 100644
index 0000000000..eb08106160
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors-flip.avif
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors-flip.gif b/testing/web-platform/tests/webcodecs/four-colors-flip.gif
new file mode 100644
index 0000000000..ff7b69a0e4
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors-flip.gif
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors-full-range-bt2020-pq-444-10bpc.avif b/testing/web-platform/tests/webcodecs/four-colors-full-range-bt2020-pq-444-10bpc.avif
new file mode 100644
index 0000000000..512a2b855e
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors-full-range-bt2020-pq-444-10bpc.avif
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.avif b/testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.avif
new file mode 100644
index 0000000000..925477b04c
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.avif
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.jpg b/testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.jpg
new file mode 100644
index 0000000000..9ce1f1abbe
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.jpg
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.webp b/testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.webp
new file mode 100644
index 0000000000..8086d0140a
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors-limited-range-420-8bpc.webp
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors-limited-range-422-8bpc.avif b/testing/web-platform/tests/webcodecs/four-colors-limited-range-422-8bpc.avif
new file mode 100644
index 0000000000..e348bade31
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors-limited-range-422-8bpc.avif
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors-limited-range-444-8bpc.avif b/testing/web-platform/tests/webcodecs/four-colors-limited-range-444-8bpc.avif
new file mode 100644
index 0000000000..300cd1ca97
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors-limited-range-444-8bpc.avif
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors.avif b/testing/web-platform/tests/webcodecs/four-colors.avif
new file mode 100644
index 0000000000..38ed02e69d
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors.avif
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors.gif b/testing/web-platform/tests/webcodecs/four-colors.gif
new file mode 100644
index 0000000000..d189e98900
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors.gif
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors.jpg b/testing/web-platform/tests/webcodecs/four-colors.jpg
new file mode 100644
index 0000000000..f888e8e844
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors.jpg
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors.mp4 b/testing/web-platform/tests/webcodecs/four-colors.mp4
new file mode 100644
index 0000000000..95a7df6411
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors.mp4
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors.png b/testing/web-platform/tests/webcodecs/four-colors.png
new file mode 100644
index 0000000000..2a8b31c426
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors.png
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/four-colors.webp b/testing/web-platform/tests/webcodecs/four-colors.webp
new file mode 100644
index 0000000000..f7dd40bee9
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/four-colors.webp
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/full-cycle-test.https.any.js b/testing/web-platform/tests/webcodecs/full-cycle-test.https.any.js
new file mode 100644
index 0000000000..7428f60748
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/full-cycle-test.https.any.js
@@ -0,0 +1,136 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/video-encoder-utils.js
+// META: variant=?av1
+// META: variant=?vp8
+// META: variant=?vp9_p0
+// META: variant=?vp9_p2
+// META: variant=?h264_avc
+// META: variant=?h264_annexb
+
+var ENCODER_CONFIG = null;
+promise_setup(async () => {
+ const config = {
+ // FIXME: H.264 has embedded color space information too.
+ '?av1': {codec: 'av01.0.04M.08', hasEmbeddedColorSpace: true},
+ '?vp8': {codec: 'vp8', hasEmbeddedColorSpace: false},
+ '?vp9_p0': {codec: 'vp09.00.10.08', hasEmbeddedColorSpace: true},
+ '?vp9_p2': {codec: 'vp09.02.10.10', hasEmbeddedColorSpace: true},
+ '?h264_avc': {
+ codec: 'avc1.42001E',
+ avc: {format: 'avc'},
+ hasEmbeddedColorSpace: true
+ },
+ '?h264_annexb': {
+ codec: 'avc1.42001E',
+ avc: {format: 'annexb'},
+ hasEmbeddedColorSpace: true
+ }
+ }[location.search];
+ config.hardwareAcceleration = 'prefer-software';
+ config.width = 320;
+ config.height = 200;
+ config.bitrate = 1000000;
+ config.bitrateMode = "constant";
+ config.framerate = 30;
+ ENCODER_CONFIG = config;
+});
+
+async function runFullCycleTest(t, options) {
+ let encoder_config = { ...ENCODER_CONFIG };
+ let encoder_color_space = {};
+ const w = encoder_config.width;
+ const h = encoder_config.height;
+ let next_ts = 0
+ let frames_to_encode = 16;
+ let frames_encoded = 0;
+ let frames_decoded = 0;
+
+ await checkEncoderSupport(t, encoder_config);
+ let decoder = new VideoDecoder({
+ output(frame) {
+ assert_equals(frame.visibleRect.width, w, "visibleRect.width");
+ assert_equals(frame.visibleRect.height, h, "visibleRect.height");
+ assert_equals(frame.timestamp, next_ts++, "decode timestamp");
+
+ // The encoder is allowed to change the color space to satisfy the
+ // encoder when readback is needed to send the frame for encoding, but
+ // the decoder shouldn't change it after the fact.
+ assert_equals(
+ frame.colorSpace.primaries, encoder_color_space.primaries,
+ 'colorSpace.primaries');
+ assert_equals(
+ frame.colorSpace.transfer, encoder_color_space.transfer,
+ 'colorSpace.transfer');
+ assert_equals(
+ frame.colorSpace.matrix, encoder_color_space.matrix,
+ 'colorSpace.matrix');
+ assert_equals(
+ frame.colorSpace.fullRange, encoder_color_space.fullRange,
+ 'colorSpace.fullRange');
+
+ frames_decoded++;
+ assert_true(validateBlackDots(frame, frame.timestamp),
+ "frame doesn't match. ts: " + frame.timestamp);
+ frame.close();
+ },
+ error(e) {
+ assert_unreached(e.message);
+ }
+ });
+
+ let next_encode_ts = 0;
+ const encoder_init = {
+ output(chunk, metadata) {
+ let config = metadata.decoderConfig;
+ if (config) {
+ config.hardwareAcceleration = encoder_config.hardwareAcceleration;
+ encoder_color_space = config.colorSpace;
+
+ // Removes the color space provided by the encoder so that color space
+ // information in the underlying bitstream is exposed during decode.
+ if (options.stripDecoderConfigColorSpace)
+ config.colorSpace = {};
+
+ decoder.configure(config);
+ }
+ decoder.decode(chunk);
+ frames_encoded++;
+ assert_equals(chunk.timestamp, next_encode_ts++, "encode timestamp");
+ },
+ error(e) {
+ assert_unreached(e.message);
+ }
+ };
+
+ let encoder = new VideoEncoder(encoder_init);
+ encoder.configure(encoder_config);
+
+ for (let i = 0; i < frames_to_encode; i++) {
+ let frame = createDottedFrame(w, h, i);
+
+ // Frames should have a valid color space when created from canvas.
+ assert_not_equals(frame.colorSpace.primaries, null, 'colorSpace.primaries');
+ assert_not_equals(frame.colorSpace.transfer, null, 'colorSpace.transfer');
+ assert_not_equals(frame.colorSpace.matrix, null, 'colorSpace.matrix');
+ assert_not_equals(frame.colorSpace.fullRange, null, 'colorSpace.fullRange');
+
+ let keyframe = (i % 5 == 0);
+ encoder.encode(frame, { keyFrame: keyframe });
+ frame.close();
+ }
+ await encoder.flush();
+ await decoder.flush();
+ encoder.close();
+ decoder.close();
+ assert_equals(frames_encoded, frames_to_encode, "frames_encoded");
+ assert_equals(frames_decoded, frames_to_encode, "frames_decoded");
+}
+
+promise_test(async t => {
+ return runFullCycleTest(t, {});
+}, 'Encoding and decoding cycle');
+
+promise_test(async t => {
+ if (ENCODER_CONFIG.hasEmbeddedColorSpace)
+ return runFullCycleTest(t, {stripDecoderConfigColorSpace: true});
+}, 'Encoding and decoding cycle w/ stripped color space');
diff --git a/testing/web-platform/tests/webcodecs/h264.annexb b/testing/web-platform/tests/webcodecs/h264.annexb
new file mode 100644
index 0000000000..60c3b8cdec
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/h264.annexb
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/h264.mp4 b/testing/web-platform/tests/webcodecs/h264.mp4
new file mode 100644
index 0000000000..e0d6a6bedc
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/h264.mp4
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/idlharness.https.any.js b/testing/web-platform/tests/webcodecs/idlharness.https.any.js
new file mode 100644
index 0000000000..f1ed92a159
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/idlharness.https.any.js
@@ -0,0 +1,61 @@
+// META: global=window,dedicatedworker
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+// META: script=./utils.js
+// META: timeout=long
+
+'use strict';
+
+var defaultCodecInit = {
+ output: function() {
+ assert_unreached("unexpected output");
+ },
+ error: function() {
+ assert_unreached("unexpected error");
+ },
+}
+
+var defaultAudioChunkInit = {
+ type: 'key',
+ timestamp: 1234,
+ duration: 9876,
+ data: new Uint8Array([5, 6, 7, 8])
+};
+
+var defaultVideoChunkInit = {
+ type: 'key',
+ timestamp: 1234,
+ duration: 5678,
+ data: new Uint8Array([9, 10, 11, 12])
+};
+
+idl_test(['webcodecs'], ['dom', 'html', 'webidl'], async idlArray => {
+ self.imageBody =
+ await fetch('four-colors.png').then(response => response.arrayBuffer());
+
+ let decoder = new ImageDecoder({data: self.imageBody, type: 'image/png'});
+ await decoder.tracks.ready;
+ self.imageTracks = decoder.tracks.selectedTrack;
+
+ idlArray.add_objects({
+ AudioDecoder: [`new AudioDecoder(defaultCodecInit)`],
+ VideoDecoder: [`new VideoDecoder(defaultCodecInit)`],
+ AudioEncoder: [`new AudioEncoder(defaultCodecInit)`],
+ VideoEncoder: [`new VideoEncoder(defaultCodecInit)`],
+ EncodedAudioChunk: [`new EncodedAudioChunk(defaultAudioChunkInit)`],
+ EncodedVideoChunk: [`new EncodedVideoChunk(defaultVideoChunkInit)`],
+ AudioData: [`make_audio_data(1234, 2, 8000, 100)`],
+ VideoFrame: [
+ `new VideoFrame(makeImageBitmap(32, 16), {timestamp: 100, duration: 33})`
+ ],
+ VideoColorSpace: [
+ `new VideoColorSpace()`,
+ `new VideoColorSpace({primaries: 'bt709', transfer: 'bt709', matrix: 'bt709', fullRange: true})`,
+ ],
+ ImageDecoder:
+ [`new ImageDecoder({data: self.imageBody, type: 'image/png'})`],
+ ImageTrackList:
+ [`new ImageDecoder({data: self.imageBody, type: 'image/png'}).tracks`],
+ ImageTrack: [`self.imageTracks`],
+ });
+});
diff --git a/testing/web-platform/tests/webcodecs/image-decoder-disconnect-readable-stream-crash.https.html b/testing/web-platform/tests/webcodecs/image-decoder-disconnect-readable-stream-crash.https.html
new file mode 100644
index 0000000000..d04c3e7019
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/image-decoder-disconnect-readable-stream-crash.https.html
@@ -0,0 +1,12 @@
+<title>Test ImageDecoder destruction w/ ReadableStream doesn't crash.</title>
+<body>
+<script>
+let iframe = document.createElement('iframe');
+document.body.appendChild(iframe);
+let decoder = new iframe.contentWindow.ImageDecoder({
+ data: new Blob(['blob']).stream(),
+ type: 'image/jpeg',
+});
+document.querySelector('body').remove();
+</script>
+</body>
diff --git a/testing/web-platform/tests/webcodecs/image-decoder-image-orientation-none.https.html b/testing/web-platform/tests/webcodecs/image-decoder-image-orientation-none.https.html
new file mode 100644
index 0000000000..2e555dbe21
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/image-decoder-image-orientation-none.https.html
@@ -0,0 +1,88 @@
+<!DOCTYPE html>
+<title>Test ImageDecoder outputs to a image-orientation: none canvas.</title>
+<canvas style="image-orientation: none"></canvas>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/webcodecs/image-decoder-utils.js"></script>
+<script>
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 1, document.querySelector('canvas'));
+}, 'Test JPEG w/ EXIF orientation top-left on canvas w/o orientation');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 2, document.querySelector('canvas'));
+}, 'Test JPEG w/ EXIF orientation top-right on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 3, document.querySelector('canvas'));
+}, 'Test JPEG w/ EXIF orientation bottom-right on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 4, document.querySelector('canvas'));
+}, 'Test JPEG w/ EXIF orientation bottom-left on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 5, document.querySelector('canvas'));
+}, 'Test JPEG w/ EXIF orientation left-top on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 6, document.querySelector('canvas'));
+}, 'Test JPEG w/ EXIF orientation right-top on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 7, document.querySelector('canvas'));
+}, 'Test JPEG w/ EXIF orientation right-bottom on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 8, document.querySelector('canvas'));
+}, 'Test JPEG w/ EXIF orientation left-bottom on canvas w/o orientation.');
+
+// YUV tests
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 1, document.querySelector('canvas'), /*useYuv=*/true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation top-left on canvas w/o orientation');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 2, document.querySelector('canvas'), /*useYuv=*/true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation top-right on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 3, document.querySelector('canvas'), /*useYuv=*/true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation bottom-right on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 4, document.querySelector('canvas'), /*useYuv=*/true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation bottom-left on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 5, document.querySelector('canvas'), /*useYuv=*/true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation left-top on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 6, document.querySelector('canvas'), /*useYuv=*/true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation right-top on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 7, document.querySelector('canvas'), /*useYuv=*/true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation right-bottom on canvas w/o orientation.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(
+ 8, document.querySelector('canvas'), /*useYuv=*/true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation left-bottom on canvas w/o orientation.');
+</script>
diff --git a/testing/web-platform/tests/webcodecs/image-decoder-utils.js b/testing/web-platform/tests/webcodecs/image-decoder-utils.js
new file mode 100644
index 0000000000..eccab9b09a
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/image-decoder-utils.js
@@ -0,0 +1,206 @@
+const kYellow = 0xFFFF00FF;
+const kRed = 0xFF0000FF;
+const kBlue = 0x0000FFFF;
+const kGreen = 0x00FF00FF;
+
+function getColorName(color) {
+ switch (color) {
+ case kYellow:
+ return "Yellow";
+ case kRed:
+ return "Red";
+ case kBlue:
+ return "Blue";
+ case kGreen:
+ return "Green";
+ }
+ return "#" + color.toString(16);
+}
+
+function toUInt32(pixelArray, roundForYuv) {
+ let p = pixelArray.data;
+
+ // YUV to RGB conversion introduces some loss, so provide some leeway.
+ if (roundForYuv) {
+ const tolerance = 3;
+ for (var i = 0; i < p.length; ++i) {
+ if (p[i] >= 0xFF - tolerance)
+ p[i] = 0xFF;
+ if (p[i] <= 0x00 + tolerance)
+ p[i] = 0x00;
+ }
+ }
+
+ return ((p[0] << 24) + (p[1] << 16) + (p[2] << 8) + p[3]) >>> 0;
+}
+
+function flipMatrix(m) {
+ return m.map(row => row.reverse());
+}
+
+function rotateMatrix(m, count) {
+ for (var i = 0; i < count; ++i)
+ m = m[0].map((val, index) => m.map(row => row[index]).reverse());
+ return m;
+}
+
+function testFourColorsDecodeBuffer(buffer, mimeType, options = {}) {
+ var decoder = new ImageDecoder(
+ {data: buffer, type: mimeType, preferAnimation: options.preferAnimation});
+ return decoder.decode().then(result => {
+ assert_equals(result.image.displayWidth, 320);
+ assert_equals(result.image.displayHeight, 240);
+ if (options.preferAnimation !== undefined) {
+ assert_greater_than(decoder.tracks.length, 1);
+ assert_equals(
+ options.preferAnimation, decoder.tracks.selectedTrack.animated);
+ }
+ if (options.yuvFormat !== undefined)
+ assert_equals(result.image.format, options.yuvFormat);
+ if (options.tolerance === undefined)
+ options.tolerance = 0;
+
+ let canvas = new OffscreenCanvas(
+ result.image.displayWidth, result.image.displayHeight);
+ let ctx = canvas.getContext('2d');
+ ctx.drawImage(result.image, 0, 0);
+
+ let top_left = ctx.getImageData(0, 0, 1, 1);
+ let top_right = ctx.getImageData(result.image.displayWidth - 1, 0, 1, 1);
+ let bottom_left = ctx.getImageData(0, result.image.displayHeight - 1, 1, 1);
+ let left_corner = ctx.getImageData(
+ result.image.displayWidth - 1, result.image.displayHeight - 1, 1, 1);
+
+ assert_array_approx_equals(
+ top_left.data, [0xFF, 0xFF, 0x00, 0xFF], options.tolerance,
+ 'top left corner is yellow');
+ assert_array_approx_equals(
+ top_right.data, [0xFF, 0x00, 0x00, 0xFF], options.tolerance,
+ 'top right corner is red');
+ assert_array_approx_equals(
+ bottom_left.data, [0x00, 0x00, 0xFF, 0xFF], options.tolerance,
+ 'bottom left corner is blue');
+ assert_array_approx_equals(
+ left_corner.data, [0x00, 0xFF, 0x00, 0xFF], options.tolerance,
+ 'bottom right corner is green');
+ });
+}
+
+function testFourColorDecodeWithExifOrientation(orientation, canvas, useYuv) {
+ return ImageDecoder.isTypeSupported('image/jpeg').then(support => {
+ assert_implements_optional(
+ support, 'Optional codec image/jpeg not supported.');
+ const testFile =
+ useYuv ? 'four-colors-limited-range-420-8bpc.jpg' : 'four-colors.jpg';
+ return fetch(testFile)
+ .then(response => {
+ return response.arrayBuffer();
+ })
+ .then(buffer => {
+ let u8buffer = new Uint8Array(buffer);
+ u8buffer[useYuv ? 0x31 : 0x1F] =
+ orientation; // Location derived via diff.
+ let decoder = new ImageDecoder({data: u8buffer, type: 'image/jpeg'});
+ return decoder.decode();
+ })
+ .then(result => {
+ let respectOrientation = true;
+ if (canvas)
+ respectOrientation = canvas.style.imageOrientation != 'none';
+
+ let expectedWidth = 320;
+ let expectedHeight = 240;
+ if (orientation > 4 && respectOrientation)
+ [expectedWidth, expectedHeight] = [expectedHeight, expectedWidth];
+
+ if (respectOrientation) {
+ assert_equals(result.image.displayWidth, expectedWidth);
+ assert_equals(result.image.displayHeight, expectedHeight);
+ } else if (orientation > 4) {
+ assert_equals(result.image.displayHeight, expectedWidth);
+ assert_equals(result.image.displayWidth, expectedHeight);
+ }
+
+ if (!canvas) {
+ canvas = new OffscreenCanvas(
+ result.image.displayWidth, result.image.displayHeight);
+ } else {
+ canvas.width = expectedWidth;
+ canvas.height = expectedHeight;
+ }
+
+ let ctx = canvas.getContext('2d');
+ ctx.drawImage(result.image, 0, 0);
+
+ let matrix = [
+ [kYellow, kRed],
+ [kBlue, kGreen],
+ ];
+ if (respectOrientation) {
+ switch (orientation) {
+ case 1: // kOriginTopLeft, default
+ break;
+ case 2: // kOriginTopRight, mirror along y-axis
+ matrix = flipMatrix(matrix);
+ break;
+ case 3: // kOriginBottomRight, 180 degree rotation
+ matrix = rotateMatrix(matrix, 2);
+ break;
+ case 4: // kOriginBottomLeft, mirror along the x-axis
+ matrix = flipMatrix(rotateMatrix(matrix, 2));
+ break;
+ case 5: // kOriginLeftTop, mirror along x-axis + 270 degree CW
+ // rotation
+ matrix = flipMatrix(rotateMatrix(matrix, 1));
+ break;
+ case 6: // kOriginRightTop, 90 degree CW rotation
+ matrix = rotateMatrix(matrix, 1);
+ break;
+ case 7: // kOriginRightBottom, mirror along x-axis + 90 degree CW
+ // rotation
+ matrix = flipMatrix(rotateMatrix(matrix, 3));
+ break;
+ case 8: // kOriginLeftBottom, 270 degree CW rotation
+ matrix = rotateMatrix(matrix, 3);
+ break;
+ default:
+ assert_between_inclusive(
+ orientation, 1, 8, 'unknown image orientation');
+ break;
+ };
+ }
+
+ verifyFourColorsImage(
+ expectedWidth, expectedHeight, ctx, matrix, useYuv);
+ });
+ });
+}
+
+function verifyFourColorsImage(width, height, ctx, matrix, isYuv) {
+ if (!matrix) {
+ matrix = [
+ [kYellow, kRed],
+ [kBlue, kGreen],
+ ];
+ }
+
+ let expectedTopLeft = matrix[0][0];
+ let expectedTopRight = matrix[0][1];
+ let expectedBottomLeft = matrix[1][0];
+ let expectedBottomRight = matrix[1][1];
+
+ let topLeft = toUInt32(ctx.getImageData(0, 0, 1, 1), isYuv);
+ let topRight = toUInt32(ctx.getImageData(width - 1, 0, 1, 1), isYuv);
+ let bottomLeft = toUInt32(ctx.getImageData(0, height - 1, 1, 1), isYuv);
+ let bottomRight =
+ toUInt32(ctx.getImageData(width - 1, height - 1, 1, 1), isYuv);
+
+ assert_equals(getColorName(topLeft), getColorName(expectedTopLeft),
+ 'top left corner');
+ assert_equals(getColorName(topRight), getColorName(expectedTopRight),
+ 'top right corner');
+ assert_equals(getColorName(bottomLeft), getColorName(expectedBottomLeft),
+ 'bottom left corner');
+ assert_equals(getColorName(bottomRight), getColorName(expectedBottomRight),
+ 'bottom right corner');
+}
diff --git a/testing/web-platform/tests/webcodecs/image-decoder.crossOriginIsolated.https.any.js b/testing/web-platform/tests/webcodecs/image-decoder.crossOriginIsolated.https.any.js
new file mode 100644
index 0000000000..f10cf7a067
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/image-decoder.crossOriginIsolated.https.any.js
@@ -0,0 +1,27 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/image-decoder-utils.js
+
+function testSharedArrayBuffer(useView) {
+ const mimeType = 'image/png';
+ var decoder = null;
+ return ImageDecoder.isTypeSupported(mimeType).then(support => {
+ assert_implements_optional(
+ support, 'Optional codec ' + mimeType + ' not supported.');
+ return fetch('four-colors.png').then(response => {
+ return response.arrayBuffer().then(buffer => {
+ let data = new SharedArrayBuffer(buffer.byteLength);
+ let view = new Uint8Array(data);
+ view.set(new Uint8Array(buffer));
+ return testFourColorsDecodeBuffer(useView ? view : data, mimeType);
+ });
+ });
+ });
+}
+
+promise_test(t => {
+ return testSharedArrayBuffer(/*useView=*/ false);
+}, 'Test ImageDecoder decoding with a SharedArrayBuffer source');
+
+promise_test(t => {
+ return testSharedArrayBuffer(/*useView=*/ true);
+}, 'Test ImageDecoder decoding with a Uint8Array(SharedArrayBuffer) source');
diff --git a/testing/web-platform/tests/webcodecs/image-decoder.crossOriginIsolated.https.any.js.headers b/testing/web-platform/tests/webcodecs/image-decoder.crossOriginIsolated.https.any.js.headers
new file mode 100644
index 0000000000..5f8621ef83
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/image-decoder.crossOriginIsolated.https.any.js.headers
@@ -0,0 +1,2 @@
+Cross-Origin-Embedder-Policy: require-corp
+Cross-Origin-Opener-Policy: same-origin
diff --git a/testing/web-platform/tests/webcodecs/image-decoder.https.any.js b/testing/web-platform/tests/webcodecs/image-decoder.https.any.js
new file mode 100644
index 0000000000..78eea763aa
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/image-decoder.https.any.js
@@ -0,0 +1,502 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/image-decoder-utils.js
+
+function testFourColorsDecode(filename, mimeType, options = {}) {
+ var decoder = null;
+ return ImageDecoder.isTypeSupported(mimeType).then(support => {
+ assert_implements_optional(
+ support, 'Optional codec ' + mimeType + ' not supported.');
+ return fetch(filename).then(response => {
+ return testFourColorsDecodeBuffer(response.body, mimeType, options);
+ });
+ });
+}
+
+// Note: Requiring all data to do YUV decoding is a Chromium limitation, other
+// implementations may support YUV decode with partial ReadableStream data.
+function testFourColorsYuvDecode(filename, mimeType, options = {}) {
+ var decoder = null;
+ return ImageDecoder.isTypeSupported(mimeType).then(support => {
+ assert_implements_optional(
+ support, 'Optional codec ' + mimeType + ' not supported.');
+ return fetch(filename).then(response => {
+ return response.arrayBuffer().then(buffer => {
+ return testFourColorsDecodeBuffer(buffer, mimeType, options);
+ });
+ });
+ });
+}
+
+promise_test(t => {
+ return testFourColorsDecode('four-colors.jpg', 'image/jpeg');
+}, 'Test JPEG image decoding.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(1);
+}, 'Test JPEG w/ EXIF orientation top-left.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(2);
+}, 'Test JPEG w/ EXIF orientation top-right.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(3);
+}, 'Test JPEG w/ EXIF orientation bottom-right.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(4);
+}, 'Test JPEG w/ EXIF orientation bottom-left.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(5);
+}, 'Test JPEG w/ EXIF orientation left-top.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(6);
+}, 'Test JPEG w/ EXIF orientation right-top.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(7);
+}, 'Test JPEG w/ EXIF orientation right-bottom.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(8);
+}, 'Test JPEG w/ EXIF orientation left-bottom.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(1, null, /*useYuv=*/ true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation top-left.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(2, null, /*useYuv=*/ true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation top-right.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(3, null, /*useYuv=*/ true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation bottom-right.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(4, null, /*useYuv=*/ true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation bottom-left.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(5, null, /*useYuv=*/ true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation left-top.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(6, null, /*useYuv=*/ true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation right-top.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(7, null, /*useYuv=*/ true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation right-bottom.');
+
+promise_test(t => {
+ return testFourColorDecodeWithExifOrientation(8, null, /*useYuv=*/ true);
+}, 'Test 4:2:0 JPEG w/ EXIF orientation left-bottom.');
+
+
+promise_test(t => {
+ return testFourColorsDecode('four-colors.png', 'image/png');
+}, 'Test PNG image decoding.');
+
+promise_test(t => {
+ return testFourColorsDecode('four-colors.avif', 'image/avif');
+}, 'Test AVIF image decoding.');
+
+promise_test(t => {
+ return testFourColorsDecode(
+ 'four-colors-full-range-bt2020-pq-444-10bpc.avif', 'image/avif',
+ { tolerance: 3 });
+}, 'Test high bit depth HDR AVIF image decoding.');
+
+promise_test(t => {
+ return testFourColorsDecode(
+ 'four-colors-flip.avif', 'image/avif', {preferAnimation: false});
+}, 'Test multi-track AVIF image decoding w/ preferAnimation=false.');
+
+promise_test(t => {
+ return testFourColorsDecode(
+ 'four-colors-flip.avif', 'image/avif', {preferAnimation: true});
+}, 'Test multi-track AVIF image decoding w/ preferAnimation=true.');
+
+promise_test(t => {
+ return testFourColorsDecode('four-colors.webp', 'image/webp');
+}, 'Test WEBP image decoding.');
+
+promise_test(t => {
+ return testFourColorsDecode('four-colors.gif', 'image/gif');
+}, 'Test GIF image decoding.');
+
+promise_test(t => {
+ return testFourColorsYuvDecode(
+ 'four-colors-limited-range-420-8bpc.jpg', 'image/jpeg',
+ {yuvFormat: 'I420', tolerance: 3});
+}, 'Test JPEG image YUV 4:2:0 decoding.');
+
+promise_test(t => {
+ return testFourColorsYuvDecode(
+ 'four-colors-limited-range-420-8bpc.avif', 'image/avif',
+ {yuvFormat: 'I420', tolerance: 3});
+}, 'Test AVIF image YUV 4:2:0 decoding.');
+
+promise_test(t => {
+ return testFourColorsYuvDecode(
+ 'four-colors-limited-range-422-8bpc.avif', 'image/avif',
+ {yuvFormat: 'I422', tolerance: 3});
+}, 'Test AVIF image YUV 4:2:2 decoding.');
+
+promise_test(t => {
+ return testFourColorsYuvDecode(
+ 'four-colors-limited-range-444-8bpc.avif', 'image/avif',
+ {yuvFormat: 'I444', tolerance: 3});
+}, 'Test AVIF image YUV 4:4:4 decoding.');
+
+promise_test(t => {
+ return testFourColorsYuvDecode(
+ 'four-colors-limited-range-420-8bpc.webp', 'image/webp',
+ {yuvFormat: 'I420', tolerance: 3});
+}, 'Test WEBP image YUV 4:2:0 decoding.');
+
+promise_test(t => {
+ return fetch('four-colors.png').then(response => {
+ let decoder = new ImageDecoder({data: response.body, type: 'junk/type'});
+ return promise_rejects_dom(t, 'NotSupportedError', decoder.decode());
+ });
+}, 'Test invalid mime type rejects decode() requests');
+
+promise_test(t => {
+ return fetch('four-colors.png').then(response => {
+ let decoder = new ImageDecoder({data: response.body, type: 'junk/type'});
+ return promise_rejects_dom(t, 'NotSupportedError', decoder.tracks.ready);
+ });
+}, 'Test invalid mime type rejects decodeMetadata() requests');
+
+promise_test(t => {
+ return ImageDecoder.isTypeSupported('image/png').then(support => {
+ assert_implements_optional(
+ support, 'Optional codec image/png not supported.');
+ return fetch('four-colors.png')
+ .then(response => {
+ return response.arrayBuffer();
+ })
+ .then(buffer => {
+ let decoder = new ImageDecoder({data: buffer, type: 'image/png'});
+ return promise_rejects_js(
+ t, RangeError, decoder.decode({frameIndex: 1}));
+ });
+ });
+}, 'Test out of range index returns RangeError');
+
+promise_test(t => {
+ var decoder;
+ var p1;
+ return ImageDecoder.isTypeSupported('image/png').then(support => {
+ assert_implements_optional(
+ support, 'Optional codec image/png not supported.');
+ return fetch('four-colors.png')
+ .then(response => {
+ return response.arrayBuffer();
+ })
+ .then(buffer => {
+ decoder =
+ new ImageDecoder({data: buffer.slice(0, 100), type: 'image/png'});
+ return decoder.tracks.ready;
+ })
+ .then(_ => {
+ // Queue two decodes to ensure index verification and decoding are
+ // properly ordered.
+ p1 = decoder.decode({frameIndex: 0});
+ return promise_rejects_js(
+ t, RangeError, decoder.decode({frameIndex: 1}));
+ })
+ .then(_ => {
+ return promise_rejects_js(t, RangeError, p1);
+ })
+ });
+}, 'Test partial decoding without a frame results in an error');
+
+promise_test(t => {
+ var decoder;
+ var p1;
+ return ImageDecoder.isTypeSupported('image/png').then(support => {
+ assert_implements_optional(
+ support, 'Optional codec image/png not supported.');
+ return fetch('four-colors.png')
+ .then(response => {
+ return response.arrayBuffer();
+ })
+ .then(buffer => {
+ decoder =
+ new ImageDecoder({data: buffer.slice(0, 100), type: 'image/png'});
+ return decoder.completed;
+ })
+ });
+}, 'Test completed property on fully buffered decode');
+
+promise_test(t => {
+ var decoder = null;
+
+ return ImageDecoder.isTypeSupported('image/png').then(support => {
+ assert_implements_optional(
+ support, 'Optional codec image/png not supported.');
+ return fetch('four-colors.png')
+ .then(response => {
+ decoder = new ImageDecoder({data: response.body, type: 'image/png'});
+ return decoder.tracks.ready;
+ })
+ .then(_ => {
+ decoder.tracks.selectedTrack.selected = false;
+ assert_equals(decoder.tracks.selectedIndex, -1);
+ assert_equals(decoder.tracks.selectedTrack, null);
+ return decoder.tracks.ready;
+ })
+ .then(_ => {
+ return promise_rejects_dom(t, 'InvalidStateError', decoder.decode());
+ })
+ .then(_ => {
+ decoder.tracks[0].selected = true;
+ assert_equals(decoder.tracks.selectedIndex, 0);
+ assert_not_equals(decoder.tracks.selected, null);
+ return decoder.decode();
+ })
+ .then(result => {
+ assert_equals(result.image.displayWidth, 320);
+ assert_equals(result.image.displayHeight, 240);
+ });
+ });
+}, 'Test decode, decodeMetadata after no track selected.');
+
+promise_test(t => {
+ var decoder = null;
+
+ return ImageDecoder.isTypeSupported('image/avif').then(support => {
+ assert_implements_optional(
+ support, 'Optional codec image/avif not supported.');
+ return fetch('four-colors-flip.avif')
+ .then(response => {
+ decoder = new ImageDecoder({
+ data: response.body,
+ type: 'image/avif',
+ preferAnimation: false
+ });
+ return decoder.tracks.ready;
+ })
+ .then(_ => {
+ assert_equals(decoder.tracks.length, 2);
+ assert_false(decoder.tracks[decoder.tracks.selectedIndex].animated)
+ assert_false(decoder.tracks.selectedTrack.animated);
+ assert_equals(decoder.tracks.selectedTrack.frameCount, 1);
+ assert_equals(decoder.tracks.selectedTrack.repetitionCount, 0);
+ return decoder.decode();
+ })
+ .then(result => {
+ assert_equals(result.image.displayWidth, 320);
+ assert_equals(result.image.displayHeight, 240);
+ assert_equals(result.image.timestamp, 0);
+
+ // Swap to the the other track.
+ let newIndex = (decoder.tracks.selectedIndex + 1) % 2;
+ decoder.tracks[newIndex].selected = true;
+ return decoder.decode()
+ })
+ .then(result => {
+ assert_equals(result.image.displayWidth, 320);
+ assert_equals(result.image.displayHeight, 240);
+ assert_equals(result.image.timestamp, 0);
+ assert_equals(result.image.duration, 10000);
+
+ assert_equals(decoder.tracks.length, 2);
+ assert_true(decoder.tracks[decoder.tracks.selectedIndex].animated)
+ assert_true(decoder.tracks.selectedTrack.animated);
+ assert_equals(decoder.tracks.selectedTrack.frameCount, 7);
+ assert_equals(decoder.tracks.selectedTrack.repetitionCount, Infinity);
+ return decoder.decode({frameIndex: 1});
+ })
+ .then(result => {
+ assert_equals(result.image.timestamp, 10000);
+ assert_equals(result.image.duration, 10000);
+ });
+ });
+}, 'Test track selection in multi track image.');
+
+class InfiniteGifSource {
+ async load(repetitionCount) {
+ let response = await fetch('four-colors-flip.gif');
+ let buffer = await response.arrayBuffer();
+
+ // Strip GIF trailer (0x3B) so we can continue to append frames.
+ this.baseImage = new Uint8Array(buffer.slice(0, buffer.byteLength - 1));
+ this.baseImage[0x23] = repetitionCount;
+ this.counter = 0;
+ }
+
+ start(controller) {
+ this.controller = controller;
+ this.controller.enqueue(this.baseImage);
+ }
+
+ close() {
+ this.controller.enqueue(new Uint8Array([0x3B]));
+ this.controller.close();
+ }
+
+ addFrame() {
+ const FRAME1_START = 0x26;
+ const FRAME2_START = 0x553;
+
+ if (this.counter++ % 2 == 0)
+ this.controller.enqueue(this.baseImage.slice(FRAME1_START, FRAME2_START));
+ else
+ this.controller.enqueue(this.baseImage.slice(FRAME2_START));
+ }
+}
+
+promise_test(async t => {
+ let support = await ImageDecoder.isTypeSupported('image/gif');
+ assert_implements_optional(
+ support, 'Optional codec image/gif not supported.');
+
+ let source = new InfiniteGifSource();
+ await source.load(5);
+
+ let stream = new ReadableStream(source, {type: 'bytes'});
+ let decoder = new ImageDecoder({data: stream, type: 'image/gif'});
+ return decoder.tracks.ready
+ .then(_ => {
+ assert_equals(decoder.tracks.selectedTrack.frameCount, 2);
+ assert_equals(decoder.tracks.selectedTrack.repetitionCount, 5);
+
+ source.addFrame();
+ return decoder.decode({frameIndex: 2});
+ })
+ .then(result => {
+ assert_equals(decoder.tracks.selectedTrack.frameCount, 3);
+ assert_equals(result.image.displayWidth, 320);
+ assert_equals(result.image.displayHeight, 240);
+
+ // Note: The stream has an alternating duration of 30ms, 40ms per frame.
+ assert_equals(result.image.timestamp, 70000, "timestamp frame 2");
+ assert_equals(result.image.duration, 30000, "duration frame 2");
+ source.addFrame();
+ return decoder.decode({frameIndex: 3});
+ })
+ .then(result => {
+ assert_equals(decoder.tracks.selectedTrack.frameCount, 4);
+ assert_equals(result.image.displayWidth, 320);
+ assert_equals(result.image.displayHeight, 240);
+ assert_equals(result.image.timestamp, 100000, "timestamp frame 3");
+ assert_equals(result.image.duration, 40000, "duration frame 3");
+
+ // Decode frame not yet available then reset before it comes in.
+ let p = decoder.decode({frameIndex: 5});
+ decoder.reset();
+ return promise_rejects_dom(t, 'AbortError', p);
+ })
+ .then(_ => {
+ // Ensure we can still decode earlier frames.
+ assert_equals(decoder.tracks.selectedTrack.frameCount, 4);
+ return decoder.decode({frameIndex: 3});
+ })
+ .then(result => {
+ assert_equals(decoder.tracks.selectedTrack.frameCount, 4);
+ assert_equals(result.image.displayWidth, 320);
+ assert_equals(result.image.displayHeight, 240);
+ assert_equals(result.image.timestamp, 100000, "timestamp frame 3");
+ assert_equals(result.image.duration, 40000, "duration frame 3");
+
+ // Decode frame not yet available then close before it comes in.
+ let p = decoder.decode({frameIndex: 5});
+ let tracks = decoder.tracks;
+ let track = decoder.tracks.selectedTrack;
+ decoder.close();
+
+ assert_equals(decoder.type, '');
+ assert_equals(decoder.tracks.length, 0);
+ assert_equals(tracks.length, 0);
+ track.selected = true; // Should do nothing.
+
+ // Previous decode should be aborted.
+ return promise_rejects_dom(t, 'AbortError', p);
+ })
+ .then(_ => {
+ // Ensure feeding the source after closing doesn't crash.
+ assert_throws_js(TypeError, () => {
+ source.addFrame();
+ });
+ });
+}, 'Test ReadableStream of gif');
+
+promise_test(async t => {
+ let support = await ImageDecoder.isTypeSupported('image/gif');
+ assert_implements_optional(
+ support, 'Optional codec image/gif not supported.');
+
+ let source = new InfiniteGifSource();
+ await source.load(5);
+
+ let stream = new ReadableStream(source, {type: 'bytes'});
+ let decoder = new ImageDecoder({data: stream, type: 'image/gif'});
+ return decoder.tracks.ready.then(_ => {
+ assert_equals(decoder.tracks.selectedTrack.frameCount, 2);
+ assert_equals(decoder.tracks.selectedTrack.repetitionCount, 5);
+
+ decoder.decode({frameIndex: 2}).then(t.unreached_func());
+ decoder.decode({frameIndex: 1}).then(t.unreached_func());
+ return decoder.tracks.ready;
+ });
+}, 'Test that decode requests are serialized.');
+
+promise_test(async t => {
+ let support = await ImageDecoder.isTypeSupported('image/gif');
+ assert_implements_optional(
+ support, 'Optional codec image/gif not supported.');
+
+ let source = new InfiniteGifSource();
+ await source.load(5);
+
+ let stream = new ReadableStream(source, {type: 'bytes'});
+ let decoder = new ImageDecoder({data: stream, type: 'image/gif'});
+ return decoder.tracks.ready.then(_ => {
+ assert_equals(decoder.tracks.selectedTrack.frameCount, 2);
+ assert_equals(decoder.tracks.selectedTrack.repetitionCount, 5);
+
+ // Decode frame not yet available then change tracks before it comes in.
+ let p = decoder.decode({frameIndex: 5});
+ decoder.tracks.selectedTrack.selected = false;
+ return promise_rejects_dom(t, 'AbortError', p);
+ });
+}, 'Test ReadableStream aborts promises on track change');
+
+promise_test(async t => {
+ let support = await ImageDecoder.isTypeSupported('image/gif');
+ assert_implements_optional(
+ support, 'Optional codec image/gif not supported.');
+
+ let source = new InfiniteGifSource();
+ await source.load(5);
+
+ let stream = new ReadableStream(source, {type: 'bytes'});
+ let decoder = new ImageDecoder({data: stream, type: 'image/gif'});
+ return decoder.tracks.ready.then(_ => {
+ let p = decoder.completed;
+ decoder.close();
+ return promise_rejects_dom(t, 'AbortError', p);
+ });
+}, 'Test ReadableStream aborts completed on close');
+
+promise_test(async t => {
+ let support = await ImageDecoder.isTypeSupported('image/gif');
+ assert_implements_optional(
+ support, 'Optional codec image/gif not supported.');
+
+ let source = new InfiniteGifSource();
+ await source.load(5);
+
+ let stream = new ReadableStream(source, {type: 'bytes'});
+ let decoder = new ImageDecoder({data: stream, type: 'image/gif'});
+ return decoder.tracks.ready.then(_ => {
+ source.close();
+ return decoder.completed;
+ });
+}, 'Test ReadableStream resolves completed');
diff --git a/testing/web-platform/tests/webcodecs/pattern.png b/testing/web-platform/tests/webcodecs/pattern.png
new file mode 100644
index 0000000000..85676f29ff
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/pattern.png
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/per-frame-qp-encoding.https.any.js b/testing/web-platform/tests/webcodecs/per-frame-qp-encoding.https.any.js
new file mode 100644
index 0000000000..3207fa8356
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/per-frame-qp-encoding.https.any.js
@@ -0,0 +1,134 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/video-encoder-utils.js
+// META: variant=?av1
+// META: variant=?vp9_p0
+// META: variant=?vp9_p2
+
+function get_config() {
+ const config = {
+ '?av1': {codec: 'av01.0.04M.08'},
+ '?vp8': {codec: 'vp8'},
+ '?vp9_p0': {codec: 'vp09.00.10.08'},
+ '?vp9_p2': {codec: 'vp09.02.10.10'},
+ '?h264': {codec: 'avc1.42001E', avc: {format: 'annexb'}}
+ }[location.search];
+ config.width = 320;
+ config.height = 200;
+ config.bitrate = 1000000;
+ config.bitrateMode = 'quantizer';
+ config.framerate = 30;
+ return config;
+}
+
+function get_qp_range() {
+ switch (location.search) {
+ case '?av1':
+ return {min: 1, max: 63};
+ case '?vp9_p0':
+ return {min: 1, max: 63};
+ case '?vp9_p2':
+ return {min: 1, max: 63};
+ }
+ return null;
+}
+
+function set_qp(options, value) {
+ switch (location.search) {
+ case '?av1':
+ options.av1 = {quantizer: value};
+ return;
+ case '?vp9_p0':
+ options.vp9 = {quantizer: value};
+ return;
+ case '?vp9_p2':
+ options.vp9 = {quantizer: value};
+ return;
+ }
+}
+
+async function per_frame_qp_test(t, encoder_config, qp_range, validate_result) {
+ const w = encoder_config.width;
+ const h = encoder_config.height;
+ await checkEncoderSupport(t, encoder_config);
+
+ let frames_to_encode = 24;
+ let frames_decoded = 0;
+ let frames_encoded = 0;
+ let chunks = [];
+ let corrupted_frames = [];
+
+ const encoder_init = {
+ output(chunk, metadata) {
+ frames_encoded++;
+ chunks.push(chunk);
+ },
+ error(e) {
+ assert_unreached(e.message);
+ }
+ };
+
+ let encoder = new VideoEncoder(encoder_init);
+ encoder.configure(encoder_config);
+
+ let qp = qp_range.min;
+ for (let i = 0; i < frames_to_encode; i++) {
+ let frame = createDottedFrame(w, h, i);
+ if (qp < qp_range.max) {
+ qp++;
+ } else {
+ qp = qp_range.min;
+ }
+ let encode_options = {keyFrame: false};
+ set_qp(encode_options, qp);
+ encoder.encode(frame, encode_options);
+ frame.close();
+ }
+ await encoder.flush();
+
+ let decoder = new VideoDecoder({
+ output(frame) {
+ frames_decoded++;
+ // Check that we have intended number of dots and no more.
+ // Completely black frame shouldn't pass the test.
+ if (validate_result && !validateBlackDots(frame, frame.timestamp) ||
+ validateBlackDots(frame, frame.timestamp + 1)) {
+ corrupted_frames.push(frame.timestamp)
+ }
+ frame.close();
+ },
+ error(e) {
+ assert_unreached(e.message);
+ }
+ });
+
+ let decoder_config = {
+ codec: encoder_config.codec,
+ codedWidth: w,
+ codedHeight: h,
+ };
+ decoder.configure(decoder_config);
+
+ for (let chunk of chunks) {
+ decoder.decode(chunk);
+ }
+ await decoder.flush();
+
+ encoder.close();
+ decoder.close();
+ assert_equals(frames_encoded, frames_to_encode);
+ assert_equals(chunks.length, frames_to_encode);
+ assert_equals(frames_decoded, frames_to_encode);
+ assert_equals(
+ corrupted_frames.length, 0, `corrupted_frames: ${corrupted_frames}`);
+}
+
+promise_test(async t => {
+ let config = get_config();
+ let range = get_qp_range();
+ return per_frame_qp_test(t, config, range, false);
+}, 'Frame QP encoding, full range');
+
+promise_test(async t => {
+ let config = get_config();
+ return per_frame_qp_test(t, config, {min: 1, max: 20}, true);
+}, 'Frame QP encoding, good range with validation');
diff --git a/testing/web-platform/tests/webcodecs/reconfiguring-encoder.https.any.js b/testing/web-platform/tests/webcodecs/reconfiguring-encoder.https.any.js
new file mode 100644
index 0000000000..bc7e9b74fb
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/reconfiguring-encoder.https.any.js
@@ -0,0 +1,121 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/video-encoder-utils.js
+// META: variant=?av1
+// META: variant=?vp8
+// META: variant=?vp9_p0
+// META: variant=?vp9_p2
+// META: variant=?h264_avc
+// META: variant=?h264_annexb
+
+var ENCODER_CONFIG = null;
+promise_setup(async () => {
+ const config = {
+ '?av1': {codec: 'av01.0.04M.08'},
+ '?vp8': {codec: 'vp8'},
+ '?vp9_p0': {codec: 'vp09.00.10.08'},
+ '?vp9_p2': {codec: 'vp09.02.10.10'},
+ '?h264_avc': {codec: 'avc1.42001F', avc: {format: 'avc'}},
+ '?h264_annexb': {codec: 'avc1.42001F', avc: {format: 'annexb'}}
+ }[location.search];
+ config.hardwareAcceleration = 'prefer-software';
+ config.bitrateMode = "constant";
+ config.scalabilityMode = "L1T2";
+ config.framerate = 30;
+ ENCODER_CONFIG = config;
+});
+
+promise_test(async t => {
+ let original_w = 800;
+ let original_h = 600;
+ let original_bitrate = 3_000_000;
+
+ let new_w = 640;
+ let new_h = 480;
+ let new_bitrate = 2_000_000;
+
+ let next_ts = 0
+ let reconf_ts = 0;
+ let frames_to_encode = 16;
+ let before_reconf_frames = 0;
+ let after_reconf_frames = 0;
+
+ let process_video_chunk = function (chunk, metadata) {
+ let config = metadata.decoderConfig;
+ var data = new Uint8Array(chunk.data);
+ assert_greater_than_equal(data.length, 0);
+ let after_reconf = (reconf_ts != 0) && (chunk.timestamp >= reconf_ts);
+ if (after_reconf) {
+ after_reconf_frames++;
+ if (config) {
+ assert_equals(config.codedWidth, new_w);
+ assert_equals(config.codedHeight, new_h);
+ }
+ } else {
+ before_reconf_frames++;
+ if (config) {
+ assert_equals(config.codedWidth, original_w);
+ assert_equals(config.codedHeight, original_h);
+ }
+ }
+ };
+
+ const init = {
+ output: (chunk, md) => {
+ try {
+ process_video_chunk(chunk, md);
+ } catch (e) {
+ assert_unreached(e.message);
+ }
+ },
+ error: (e) => {
+ assert_unreached(e.message);
+ },
+ };
+ const params = {
+ ...ENCODER_CONFIG,
+ width: original_w,
+ height: original_h,
+ bitrate: original_bitrate,
+ };
+ await checkEncoderSupport(t, params);
+
+ let encoder = new VideoEncoder(init);
+ encoder.configure(params);
+
+ // Remove this flush after crbug.com/1275789 is fixed
+ await encoder.flush();
+
+ // Encode |frames_to_encode| frames with original settings
+ for (let i = 0; i < frames_to_encode; i++) {
+ var frame = createFrame(original_w, original_h, next_ts++);
+ encoder.encode(frame, {});
+ frame.close();
+ }
+
+ params.width = new_w;
+ params.height = new_h;
+ params.bitrate = new_bitrate;
+
+ // Reconfigure encoder and run |frames_to_encode| frames with new settings
+ encoder.configure(params);
+ reconf_ts = next_ts;
+
+ for (let i = 0; i < frames_to_encode; i++) {
+ var frame = createFrame(new_w, new_h, next_ts++);
+ encoder.encode(frame, {});
+ frame.close();
+ }
+
+ await encoder.flush();
+
+ // Configure back to original config
+ params.width = original_w;
+ params.height = original_h;
+ params.bitrate = original_bitrate;
+ encoder.configure(params);
+ await encoder.flush();
+
+ encoder.close();
+ assert_equals(before_reconf_frames, frames_to_encode);
+ assert_equals(after_reconf_frames, frames_to_encode);
+}, "Reconfiguring encoder");
diff --git a/testing/web-platform/tests/webcodecs/sfx-aac.mp4 b/testing/web-platform/tests/webcodecs/sfx-aac.mp4
new file mode 100644
index 0000000000..c7b3417d9c
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/sfx-aac.mp4
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/sfx-alaw.wav b/testing/web-platform/tests/webcodecs/sfx-alaw.wav
new file mode 100644
index 0000000000..da9a22759c
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/sfx-alaw.wav
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/sfx-mulaw.wav b/testing/web-platform/tests/webcodecs/sfx-mulaw.wav
new file mode 100644
index 0000000000..ba9d6bdf1b
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/sfx-mulaw.wav
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/sfx-opus.ogg b/testing/web-platform/tests/webcodecs/sfx-opus.ogg
new file mode 100644
index 0000000000..01a9b862ce
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/sfx-opus.ogg
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/sfx.adts b/testing/web-platform/tests/webcodecs/sfx.adts
new file mode 100644
index 0000000000..80f9c8c91f
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/sfx.adts
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/sfx.mp3 b/testing/web-platform/tests/webcodecs/sfx.mp3
new file mode 100644
index 0000000000..d260017446
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/sfx.mp3
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/temporal-svc-encoding.https.any.js b/testing/web-platform/tests/webcodecs/temporal-svc-encoding.https.any.js
new file mode 100644
index 0000000000..7cf7225e5d
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/temporal-svc-encoding.https.any.js
@@ -0,0 +1,105 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/video-encoder-utils.js
+// META: variant=?av1
+// META: variant=?vp8
+// META: variant=?vp9
+// META: variant=?h264
+
+var ENCODER_CONFIG = null;
+promise_setup(async () => {
+ const config = {
+ '?av1': {codec: 'av01.0.04M.08'},
+ '?vp8': {codec: 'vp8'},
+ '?vp9': {codec: 'vp09.00.10.08'},
+ '?h264': {codec: 'avc1.42001E', avc: {format: 'annexb'}}
+ }[location.search];
+ config.hardwareAcceleration = 'prefer-software';
+ config.width = 320;
+ config.height = 200;
+ config.bitrate = 1000000;
+ config.bitrateMode = "constant";
+ config.framerate = 30;
+ ENCODER_CONFIG = config;
+});
+
+async function svc_test(t, layers, base_layer_decimator) {
+ let encoder_config = { ...ENCODER_CONFIG };
+ encoder_config.scalabilityMode = "L1T" + layers;
+ const w = encoder_config.width;
+ const h = encoder_config.height;
+ await checkEncoderSupport(t, encoder_config);
+
+ let frames_to_encode = 24;
+ let frames_decoded = 0;
+ let frames_encoded = 0;
+ let chunks = [];
+ let corrupted_frames = [];
+
+ const encoder_init = {
+ output(chunk, metadata) {
+ frames_encoded++;
+
+ // Filter out all frames, but base layer.
+ assert_own_property(metadata, "svc");
+ assert_own_property(metadata.svc, "temporalLayerId");
+ assert_less_than(metadata.svc.temporalLayerId, layers);
+ if (metadata.svc.temporalLayerId == 0)
+ chunks.push(chunk);
+ },
+ error(e) {
+ assert_unreached(e.message);
+ }
+ };
+
+ let encoder = new VideoEncoder(encoder_init);
+ encoder.configure(encoder_config);
+
+ for (let i = 0; i < frames_to_encode; i++) {
+ let frame = createDottedFrame(w, h, i);
+ encoder.encode(frame, { keyFrame: false });
+ frame.close();
+ }
+ await encoder.flush();
+
+ let decoder = new VideoDecoder({
+ output(frame) {
+ frames_decoded++;
+ // Check that we have intended number of dots and no more.
+ // Completely black frame shouldn't pass the test.
+ if(!validateBlackDots(frame, frame.timestamp) ||
+ validateBlackDots(frame, frame.timestamp + 1)) {
+ corrupted_frames.push(frame.timestamp)
+ }
+ frame.close();
+ },
+ error(e) {
+ assert_unreached(e.message);
+ }
+ });
+
+ let decoder_config = {
+ codec: encoder_config.codec,
+ hardwareAcceleration: encoder_config.hardwareAcceleration,
+ codedWidth: w,
+ codedHeight: h,
+ };
+ decoder.configure(decoder_config);
+
+ for (let chunk of chunks) {
+ decoder.decode(chunk);
+ }
+ await decoder.flush();
+
+ encoder.close();
+ decoder.close();
+ assert_equals(frames_encoded, frames_to_encode);
+
+ let base_layer_frames = frames_to_encode / base_layer_decimator;
+ assert_equals(chunks.length, base_layer_frames);
+ assert_equals(frames_decoded, base_layer_frames);
+ assert_equals(corrupted_frames.length, 0,
+ `corrupted_frames: ${corrupted_frames}`);
+}
+
+promise_test(async t => { return svc_test(t, 2, 2) }, "SVC L1T2");
+promise_test(async t => { return svc_test(t, 3, 4) }, "SVC L1T3");
diff --git a/testing/web-platform/tests/webcodecs/utils.js b/testing/web-platform/tests/webcodecs/utils.js
new file mode 100644
index 0000000000..bbb53e70e0
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/utils.js
@@ -0,0 +1,235 @@
+function make_audio_data(timestamp, channels, sampleRate, frames) {
+ let data = new Float32Array(frames*channels);
+
+ // This generates samples in a planar format.
+ for (var channel = 0; channel < channels; channel++) {
+ let hz = 100 + channel * 50; // sound frequency
+ let base_index = channel * frames;
+ for (var i = 0; i < frames; i++) {
+ let t = (i / sampleRate) * hz * (Math.PI * 2);
+ data[base_index + i] = Math.sin(t);
+ }
+ }
+
+ return new AudioData({
+ timestamp: timestamp,
+ data: data,
+ numberOfChannels: channels,
+ numberOfFrames: frames,
+ sampleRate: sampleRate,
+ format: "f32-planar",
+ });
+}
+
+function makeOffscreenCanvas(width, height, options) {
+ let canvas = new OffscreenCanvas(width, height);
+ let ctx = canvas.getContext('2d', options);
+ ctx.fillStyle = 'rgba(50, 100, 150, 255)';
+ ctx.fillRect(0, 0, width, height);
+ return canvas;
+}
+
+function makeImageBitmap(width, height) {
+ return makeOffscreenCanvas(width, height).transferToImageBitmap();
+}
+
+// Gives a chance to pending output and error callbacks to complete before
+// resolving.
+function endAfterEventLoopTurn() {
+ return new Promise(resolve => step_timeout(resolve, 0));
+}
+
+// Returns a codec initialization with callbacks that expected to not be called.
+function getDefaultCodecInit(test) {
+ return {
+ output: test.unreached_func("unexpected output"),
+ error: test.unreached_func("unexpected error"),
+ }
+}
+
+// Checks that codec can be configured, reset, reconfigured, and that incomplete
+// or invalid configs throw errors immediately.
+function testConfigurations(codec, validCondig, invalidCodecs) {
+ assert_equals(codec.state, "unconfigured");
+
+ const requiredConfigPairs = validCondig;
+ let incrementalConfig = {};
+
+ for (let key in requiredConfigPairs) {
+ // Configure should fail while required keys are missing.
+ assert_throws_js(TypeError, () => { codec.configure(incrementalConfig); });
+ incrementalConfig[key] = requiredConfigPairs[key];
+ assert_equals(codec.state, "unconfigured");
+ }
+
+ // Configure should pass once incrementalConfig meets all requirements.
+ codec.configure(incrementalConfig);
+ assert_equals(codec.state, "configured");
+
+ // We should be able to reconfigure the codec.
+ codec.configure(incrementalConfig);
+ assert_equals(codec.state, "configured");
+
+ let config = incrementalConfig;
+
+ invalidCodecs.forEach(badCodec => {
+ // Invalid codecs should fail.
+ config.codec = badCodec;
+ assert_throws_js(TypeError, () => { codec.configure(config); }, badCodec);
+ })
+
+ // The failed configures should not affect the current config.
+ assert_equals(codec.state, "configured");
+
+ // Test we can configure after a reset.
+ codec.reset()
+ assert_equals(codec.state, "unconfigured");
+
+ codec.configure(validCondig);
+ assert_equals(codec.state, "configured");
+}
+
+// Performs an encode or decode with the provided input, depending on whether
+// the passed codec is an encoder or a decoder.
+function encodeOrDecodeShouldThrow(codec, input) {
+ // We are testing encode/decode on codecs in invalid states.
+ assert_not_equals(codec.state, "configured");
+
+ if (codec.decode) {
+ assert_throws_dom("InvalidStateError",
+ () => codec.decode(input),
+ "decode");
+ } else if (codec.encode) {
+ // Encoders consume frames, so clone it to be safe.
+ assert_throws_dom("InvalidStateError",
+ () => codec.encode(input.clone()),
+ "encode");
+
+ } else {
+ assert_unreached("Codec should have encode or decode function");
+ }
+}
+
+// Makes sure that we cannot close, configure, reset, flush, decode or encode a
+// closed codec.
+function testClosedCodec(test, codec, validconfig, codecInput) {
+ assert_equals(codec.state, "unconfigured");
+
+ codec.close();
+ assert_equals(codec.state, "closed");
+
+ assert_throws_dom("InvalidStateError",
+ () => codec.configure(validconfig),
+ "configure");
+ assert_throws_dom("InvalidStateError",
+ () => codec.reset(),
+ "reset");
+ assert_throws_dom("InvalidStateError",
+ () => codec.close(),
+ "close");
+
+ encodeOrDecodeShouldThrow(codec, codecInput);
+
+ return promise_rejects_dom(test, 'InvalidStateError', codec.flush(), 'flush');
+}
+
+// Makes sure we cannot flush, encode or decode with an unconfigured coded, and
+// that reset is a valid no-op.
+function testUnconfiguredCodec(test, codec, codecInput) {
+ assert_equals(codec.state, "unconfigured");
+
+ // Configure() and Close() are valid operations that would transition us into
+ // a different state.
+
+ // Resetting an unconfigured encoder is a no-op.
+ codec.reset();
+ assert_equals(codec.state, "unconfigured");
+
+ encodeOrDecodeShouldThrow(codec, codecInput);
+
+ return promise_rejects_dom(test, 'InvalidStateError', codec.flush(), 'flush');
+}
+
+// Reference values generated by:
+// https://fiddle.skia.org/c/f100d4d5f085a9e09896aabcbc463868
+
+const kSRGBPixel = [50, 100, 150, 255];
+const kP3Pixel = [62, 99, 146, 255];
+const kRec2020Pixel = [87, 106, 151, 255];
+
+const kCanvasOptionsP3Uint8 = {
+ colorSpace: 'display-p3',
+ pixelFormat: 'uint8'
+};
+
+const kImageSettingOptionsP3Uint8 = {
+ colorSpace: 'display-p3',
+ storageFormat: 'uint8'
+};
+
+const kCanvasOptionsRec2020Uint8 = {
+ colorSpace: 'rec2020',
+ pixelFormat: 'uint8'
+};
+
+const kImageSettingOptionsRec2020Uint8 = {
+ colorSpace: 'rec2020',
+ storageFormat: 'uint8'
+};
+
+function testCanvas(ctx, width, height, expected_pixel, imageSetting, assert_compares) {
+ // The dup getImageData is to workaournd crbug.com/1100233
+ let imageData = ctx.getImageData(0, 0, width, height, imageSetting);
+ let colorData = ctx.getImageData(0, 0, width, height, imageSetting).data;
+ const kMaxPixelToCheck = 128 * 96;
+ let step = width * height / kMaxPixelToCheck;
+ step = Math.round(step);
+ step = (step < 1) ? 1 : step;
+ for (let i = 0; i < 4 * width * height; i += (4 * step)) {
+ assert_compares(colorData[i], expected_pixel[0]);
+ assert_compares(colorData[i + 1], expected_pixel[1]);
+ assert_compares(colorData[i + 2], expected_pixel[2]);
+ assert_compares(colorData[i + 3], expected_pixel[3]);
+ }
+}
+
+function makeDetachedArrayBuffer() {
+ const buffer = new ArrayBuffer(10);
+ const view = new Uint8Array(buffer);
+ new MessageChannel().port1.postMessage(buffer, [buffer]);
+ return view;
+}
+
+function isFrameClosed(frame) {
+ return frame.format == null && frame.codedWidth == 0 &&
+ frame.codedHeight == 0 && frame.displayWidth == 0 &&
+ frame.displayHeight == 0 && frame.codedRect == null &&
+ frame.visibleRect == null;
+}
+
+function testImageBitmapToAndFromVideoFrame(
+ width, height, expectedPixel, canvasOptions, imageBitmapOptions,
+ imageSetting) {
+ let canvas = new OffscreenCanvas(width, height);
+ let ctx = canvas.getContext('2d', canvasOptions);
+ ctx.fillStyle = 'rgb(50, 100, 150)';
+ ctx.fillRect(0, 0, width, height);
+ testCanvas(ctx, width, height, expectedPixel, imageSetting, assert_equals);
+
+ return createImageBitmap(canvas, imageBitmapOptions)
+ .then((fromImageBitmap) => {
+ let videoFrame = new VideoFrame(fromImageBitmap, {timestamp: 0});
+ return createImageBitmap(videoFrame, imageBitmapOptions);
+ })
+ .then((toImageBitmap) => {
+ let myCanvas = new OffscreenCanvas(width, height);
+ let myCtx = myCanvas.getContext('2d', canvasOptions);
+ myCtx.drawImage(toImageBitmap, 0, 0);
+ let tolerance = 2;
+ testCanvas(
+ myCtx, width, height, expectedPixel, imageSetting,
+ (actual, expected) => {
+ assert_approx_equals(actual, expected, tolerance);
+ });
+ });
+}
diff --git a/testing/web-platform/tests/webcodecs/video-decoder.crossOriginIsolated.https.any.js b/testing/web-platform/tests/webcodecs/video-decoder.crossOriginIsolated.https.any.js
new file mode 100644
index 0000000000..3232844a31
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/video-decoder.crossOriginIsolated.https.any.js
@@ -0,0 +1,68 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+const testData = {
+ src: 'h264.mp4',
+ config: {
+ codec: 'avc1.64000b',
+ description: {offset: 9490, size: 45},
+ codedWidth: 320,
+ codedHeight: 240,
+ displayAspectWidth: 320,
+ displayAspectHeight: 240,
+ }
+};
+
+// Create a view of an ArrayBuffer.
+function view(buffer, {offset, size}) {
+ return new Uint8Array(buffer, offset, size);
+}
+
+function testSharedArrayBufferDescription(t, useView) {
+ const data = testData;
+
+ // Don't run test if the codec is not supported.
+ assert_equals("function", typeof VideoDecoder.isConfigSupported);
+ let supported = false;
+ return VideoDecoder.isConfigSupported({codec: data.config.codec})
+ .catch(_ => {
+ assert_implements_optional(false, data.config.codec + ' unsupported');
+ })
+ .then(support => {
+ supported = support.supported;
+ assert_implements_optional(
+ supported, data.config.codec + ' unsupported');
+ return fetch(data.src);
+ })
+ .then(response => {
+ return response.arrayBuffer();
+ })
+ .then(buf => {
+ config = {...data.config};
+ if (data.config.description) {
+ let desc = new SharedArrayBuffer(data.config.description.size);
+ let descView = new Uint8Array(desc);
+ descView.set(view(buf, data.config.description));
+ config.description = useView ? descView : desc;
+ }
+
+ // Support was verified above, so the description shouldn't change
+ // that.
+ return VideoDecoder.isConfigSupported(config);
+ })
+ .then(support => {
+ assert_true(support.supported);
+
+ const decoder = new VideoDecoder(getDefaultCodecInit(t));
+ decoder.configure(config);
+ assert_equals(decoder.state, 'configured', 'state');
+ });
+}
+
+promise_test(t => {
+ return testSharedArrayBufferDescription(t, /*useView=*/ false);
+}, 'Test isConfigSupported() and configure() using a SharedArrayBuffer');
+
+promise_test(t => {
+ return testSharedArrayBufferDescription(t, /*useView=*/ true);
+}, 'Test isConfigSupported() and configure() using a Uint8Array(SharedArrayBuffer)');
diff --git a/testing/web-platform/tests/webcodecs/video-decoder.crossOriginIsolated.https.any.js.headers b/testing/web-platform/tests/webcodecs/video-decoder.crossOriginIsolated.https.any.js.headers
new file mode 100644
index 0000000000..5f8621ef83
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/video-decoder.crossOriginIsolated.https.any.js.headers
@@ -0,0 +1,2 @@
+Cross-Origin-Embedder-Policy: require-corp
+Cross-Origin-Opener-Policy: same-origin
diff --git a/testing/web-platform/tests/webcodecs/video-decoder.https.any.js b/testing/web-platform/tests/webcodecs/video-decoder.https.any.js
new file mode 100644
index 0000000000..565134271d
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/video-decoder.https.any.js
@@ -0,0 +1,64 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+const invalidConfigs = [
+ {
+ comment: 'Empty codec',
+ config: {codec: ''},
+ },
+ {
+ comment: 'Unrecognized codec',
+ config: {codec: 'bogus'},
+ },
+ {
+ comment: 'Audio codec',
+ config: {codec: 'vorbis'},
+ },
+ {
+ comment: 'Ambiguous codec',
+ config: {codec: 'vp9'},
+ },
+ {
+ comment: 'Codec with MIME type',
+ config: {codec: 'video/webm; codecs="vp8"'},
+ },
+]; // invalidConfigs
+
+invalidConfigs.forEach(entry => {
+ promise_test(
+ t => {
+ return promise_rejects_js(
+ t, TypeError, VideoDecoder.isConfigSupported(entry.config));
+ },
+ 'Test that VideoDecoder.isConfigSupported() rejects invalid config:' +
+ entry.comment);
+});
+
+invalidConfigs.forEach(entry => {
+ async_test(
+ t => {
+ let codec = new VideoDecoder(getDefaultCodecInit(t));
+ assert_throws_js(TypeError, () => {
+ codec.configure(entry.config);
+ });
+ t.done();
+ },
+ 'Test that VideoDecoder.configure() rejects invalid config:' +
+ entry.comment);
+});
+
+promise_test(t => {
+ // VideoDecoderInit lacks required fields.
+ assert_throws_js(TypeError, () => {
+ new VideoDecoder({});
+ });
+
+ // VideoDecoderInit has required fields.
+ let decoder = new VideoDecoder(getDefaultCodecInit(t));
+
+ assert_equals(decoder.state, 'unconfigured');
+
+ decoder.close();
+
+ return endAfterEventLoopTurn();
+}, 'Test VideoDecoder construction');
diff --git a/testing/web-platform/tests/webcodecs/video-encoder-config.https.any.js b/testing/web-platform/tests/webcodecs/video-encoder-config.https.any.js
new file mode 100644
index 0000000000..fe0c59c002
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/video-encoder-config.https.any.js
@@ -0,0 +1,159 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+const invalidConfigs = [
+ {
+ comment: 'Emtpy codec',
+ config: {
+ codec: '',
+ width: 640,
+ height: 480,
+ },
+ },
+ {
+ comment: 'Unrecognized codec',
+ config: {
+ codec: 'bogus',
+ width: 640,
+ height: 480,
+ },
+ },
+ {
+ comment: 'Width is too large',
+ config: {
+ codec: 'vp8',
+ width: 1000000,
+ height: 480,
+ },
+ },
+ {
+ comment: 'Height is too large',
+ config: {
+ codec: 'vp8',
+ width: 640,
+ height: 1000000,
+ },
+ },
+ {
+ comment: 'Invalid scalability mode',
+ config: {codec: 'vp8', width: 640, height: 480, scalabilityMode: 'ABC'}
+ }
+];
+
+invalidConfigs.forEach(entry => {
+ promise_test(t => {
+ return promise_rejects_js(t, TypeError, VideoEncoder.isConfigSupported(entry.config));
+ }, 'Test that VideoEncoder.isConfigSupported() rejects invalid config:' + entry.comment);
+});
+
+
+const validButUnsupportedConfigs = [
+ {
+ comment: 'Too strenuous accelerated encoding parameters',
+ config: {
+ codec: "vp8",
+ hardwareAcceleration: "prefer-hardware",
+ width: 7000,
+ height: 7000,
+ bitrate: 1,
+ framerate: 240,
+ }
+ },
+ {
+ comment: 'Odd sized frames for H264',
+ config: {
+ codec: "avc1.42001E",
+ width: 641,
+ height: 480,
+ bitrate: 1000000,
+ framerate: 24,
+ }
+ },
+];
+
+validButUnsupportedConfigs.forEach(entry => {
+ let config = entry.config;
+ promise_test(async t => {
+ let support = await VideoEncoder.isConfigSupported(config);
+ assert_false(support.supported);
+
+ let new_config = support.config;
+ assert_equals(new_config.codec, config.codec);
+ assert_equals(new_config.width, config.width);
+ assert_equals(new_config.height, config.height);
+ if (config.bitrate)
+ assert_equals(new_config.bitrate, config.bitrate);
+ if (config.framerate)
+ assert_equals(new_config.framerate, config.framerate);
+ }, "VideoEncoder.isConfigSupported() doesn't support config:" + entry.comment);
+});
+
+const validConfigs = [
+ {
+ codec: 'avc1.42001E',
+ hardwareAcceleration: 'no-preference',
+ width: 640,
+ height: 480,
+ bitrate: 5000000,
+ framerate: 24,
+ avc: {format: 'annexb'},
+ futureConfigFeature: 'foo',
+ },
+ {
+ codec: 'vp8',
+ hardwareAcceleration: 'no-preference',
+ width: 800,
+ height: 600,
+ bitrate: 7000000,
+ bitrateMode: 'variable',
+ framerate: 60,
+ scalabilityMode: 'L1T2',
+ futureConfigFeature: 'foo',
+ latencyMode: 'quality',
+ avc: {format: 'annexb'}
+ },
+ {
+ codec: 'vp09.00.10.08',
+ hardwareAcceleration: 'no-preference',
+ width: 1280,
+ height: 720,
+ bitrate: 7000000,
+ bitrateMode: 'constant',
+ framerate: 25,
+ futureConfigFeature: 'foo',
+ latencyMode: 'realtime',
+ alpha: 'discard'
+ }
+];
+
+validConfigs.forEach(config => {
+ promise_test(async t => {
+ let support = await VideoEncoder.isConfigSupported(config);
+ assert_implements_optional(support.supported);
+
+ let new_config = support.config;
+ assert_false(new_config.hasOwnProperty('futureConfigFeature'));
+ assert_equals(new_config.codec, config.codec);
+ assert_equals(new_config.width, config.width);
+ assert_equals(new_config.height, config.height);
+ if (config.bitrate)
+ assert_equals(new_config.bitrate, config.bitrate);
+ if (config.framerate)
+ assert_equals(new_config.framerate, config.framerate);
+ if (config.bitrateMode)
+ assert_equals(new_config.bitrateMode, config.bitrateMode);
+ if (config.latencyMode)
+ assert_equals(new_config.latencyMode, config.latencyMode);
+ if (config.alpha)
+ assert_equals(new_config.alpha, config.alpha);
+ if (config.codec.startsWith('avc')) {
+ if (config.avc) {
+ assert_equals(new_config.avc.format, config.avc.format);
+ }
+ } else {
+ assert_equals(new_config.avc, undefined);
+ }
+ }, "VideoEncoder.isConfigSupported() supports:" + JSON.stringify(config));
+});
+
+
diff --git a/testing/web-platform/tests/webcodecs/video-encoder-utils.js b/testing/web-platform/tests/webcodecs/video-encoder-utils.js
new file mode 100644
index 0000000000..7201e054d8
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/video-encoder-utils.js
@@ -0,0 +1,103 @@
+async function checkEncoderSupport(test, config) {
+ assert_equals("function", typeof VideoEncoder.isConfigSupported);
+ let supported = false;
+ try {
+ const support = await VideoEncoder.isConfigSupported(config);
+ supported = support.supported;
+ } catch (e) {}
+
+ assert_implements_optional(supported, 'Unsupported config: ' +
+ JSON.stringify(config));
+}
+
+function fourColorsFrame(ctx, width, height, text) {
+ const kYellow = "#FFFF00";
+ const kRed = "#FF0000";
+ const kBlue = "#0000FF";
+ const kGreen = "#00FF00";
+
+ ctx.fillStyle = kYellow;
+ ctx.fillRect(0, 0, width / 2, height / 2);
+
+ ctx.fillStyle = kRed;
+ ctx.fillRect(width / 2, 0, width / 2, height / 2);
+
+ ctx.fillStyle = kBlue;
+ ctx.fillRect(0, height / 2, width / 2, height / 2);
+
+ ctx.fillStyle = kGreen;
+ ctx.fillRect(width / 2, height / 2, width / 2, height / 2);
+
+ ctx.fillStyle = 'white';
+ ctx.font = (height / 10) + 'px sans-serif';
+ ctx.fillText(text, width / 2, height / 2);
+}
+
+// Paints |count| black dots on the |ctx|, so their presence can be validated
+// later. This is an analog of the most basic bar code.
+function putBlackDots(ctx, width, height, count) {
+ ctx.fillStyle = 'black';
+ const dot_size = 10;
+ const step = dot_size * 3;
+
+ for (let i = 1; i <= count; i++) {
+ let x = i * step;
+ let y = step * (x / width + 1);
+ x %= width;
+ ctx.fillRect(x, y, dot_size, dot_size);
+ }
+}
+
+// Validates that frame has |count| black dots in predefined places.
+function validateBlackDots(frame, count) {
+ const width = frame.displayWidth;
+ const height = frame.displayHeight;
+ let cnv = new OffscreenCanvas(width, height);
+ var ctx = cnv.getContext('2d');
+ ctx.drawImage(frame, 0, 0);
+ const dot_size = 10;
+ const step = dot_size * 3;
+
+ for (let i = 1; i <= count; i++) {
+ let x = i * step + dot_size / 2;
+ let y = step * (x / width + 1) + dot_size / 2;
+ x %= width;
+
+ if (x)
+ x = x -1;
+ if (y)
+ y = y -1;
+
+ let rgba = ctx.getImageData(x, y, 2, 2).data;
+ const tolerance = 40;
+ if ((rgba[0] > tolerance || rgba[1] > tolerance || rgba[2] > tolerance)
+ && (rgba[4] > tolerance || rgba[5] > tolerance || rgba[6] > tolerance)
+ && (rgba[8] > tolerance || rgba[9] > tolerance || rgba[10] > tolerance)
+ && (rgba[12] > tolerance || rgba[13] > tolerance || rgba[14] > tolerance)) {
+ // The dot is too bright to be a black dot.
+ return false;
+ }
+ }
+ return true;
+}
+
+function createFrame(width, height, ts = 0) {
+ let duration = 33333; // 30fps
+ let text = ts.toString();
+ let cnv = new OffscreenCanvas(width, height);
+ var ctx = cnv.getContext('2d');
+ fourColorsFrame(ctx, width, height, text);
+ return new VideoFrame(cnv, { timestamp: ts, duration });
+}
+
+function createDottedFrame(width, height, dots, ts) {
+ if (ts === undefined)
+ ts = dots;
+ let duration = 33333; // 30fps
+ let text = ts.toString();
+ let cnv = new OffscreenCanvas(width, height);
+ var ctx = cnv.getContext('2d');
+ fourColorsFrame(ctx, width, height, text);
+ putBlackDots(ctx, width, height, dots);
+ return new VideoFrame(cnv, { timestamp: ts, duration });
+}
diff --git a/testing/web-platform/tests/webcodecs/video-encoder.https.any.js b/testing/web-platform/tests/webcodecs/video-encoder.https.any.js
new file mode 100644
index 0000000000..229ae32edd
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/video-encoder.https.any.js
@@ -0,0 +1,320 @@
+// META: global=window,dedicatedworker
+// META: script=/common/media.js
+// META: script=/webcodecs/utils.js
+// META: script=/webcodecs/video-encoder-utils.js
+
+const defaultConfig = {
+ codec: 'vp8',
+ width: 640,
+ height: 480
+};
+
+promise_test(t => {
+ // VideoEncoderInit lacks required fields.
+ assert_throws_js(TypeError, () => { new VideoEncoder({}); });
+
+ // VideoEncoderInit has required fields.
+ let encoder = new VideoEncoder(getDefaultCodecInit(t));
+
+ assert_equals(encoder.state, "unconfigured");
+
+ encoder.close();
+
+ return endAfterEventLoopTurn();
+}, 'Test VideoEncoder construction');
+
+promise_test(t => {
+ let encoder = new VideoEncoder(getDefaultCodecInit(t));
+
+ let badCodecsList = [
+ '', // Empty codec
+ 'bogus', // Non exsitent codec
+ 'vorbis', // Audio codec
+ 'vp9', // Ambiguous codec
+ 'video/webm; codecs="vp9"' // Codec with mime type
+ ]
+
+ testConfigurations(encoder, defaultConfig, badCodecsList);
+
+ return endAfterEventLoopTurn();
+}, 'Test VideoEncoder.configure()');
+
+promise_test(async t => {
+ let output_chunks = [];
+ let codecInit = getDefaultCodecInit(t);
+ let decoderConfig = null;
+ let encoderConfig = {
+ codec: 'vp8',
+ width: 640,
+ height: 480,
+ displayWidth: 800,
+ displayHeight: 600,
+ };
+
+ codecInit.output = (chunk, metadata) => {
+ assert_not_equals(metadata, null);
+ if (metadata.decoderConfig)
+ decoderConfig = metadata.decoderConfig;
+ output_chunks.push(chunk);
+ }
+
+ let encoder = new VideoEncoder(codecInit);
+ encoder.configure(encoderConfig);
+
+ let frame1 = createFrame(640, 480, 0);
+ let frame2 = createFrame(640, 480, 33333);
+ t.add_cleanup(() => {
+ frame1.close();
+ frame2.close();
+ });
+
+ encoder.encode(frame1);
+ encoder.encode(frame2);
+
+ await encoder.flush();
+
+ // Decoder config should be given with the first chunk
+ assert_not_equals(decoderConfig, null);
+ assert_equals(decoderConfig.codec, encoderConfig.codec);
+ assert_greater_than_equal(decoderConfig.codedHeight, encoderConfig.height);
+ assert_greater_than_equal(decoderConfig.codedWidth, encoderConfig.width);
+ assert_equals(decoderConfig.displayAspectHeight, encoderConfig.displayHeight);
+ assert_equals(decoderConfig.displayAspectWidth, encoderConfig.displayWidth);
+ assert_not_equals(decoderConfig.colorSpace.primaries, null);
+ assert_not_equals(decoderConfig.colorSpace.transfer, null);
+ assert_not_equals(decoderConfig.colorSpace.matrix, null);
+ assert_not_equals(decoderConfig.colorSpace.fullRange, null);
+
+ assert_equals(output_chunks.length, 2);
+ assert_equals(output_chunks[0].timestamp, frame1.timestamp);
+ assert_equals(output_chunks[0].duration, frame1.duration);
+ assert_equals(output_chunks[1].timestamp, frame2.timestamp);
+ assert_equals(output_chunks[1].duration, frame2.duration);
+}, 'Test successful configure(), encode(), and flush()');
+
+promise_test(async t => {
+ let codecInit = getDefaultCodecInit(t);
+ let encoderConfig = {
+ codec: 'vp8',
+ width: 320,
+ height: 200
+ };
+
+ codecInit.output = (chunk, metadata) => {}
+
+ let encoder = new VideoEncoder(codecInit);
+
+ // No encodes yet.
+ assert_equals(encoder.encodeQueueSize, 0);
+
+ encoder.configure(encoderConfig);
+
+ // Still no encodes.
+ assert_equals(encoder.encodeQueueSize, 0);
+
+ const frames_count = 100;
+ let frames = [];
+ for (let i = 0; i < frames_count; i++) {
+ let frame = createFrame(320, 200, i * 16000);
+ frames.push(frame);
+ }
+
+ let lastDequeueSize = Infinity;
+ encoder.ondequeue = () => {
+ assert_greater_than(lastDequeueSize, 0, "Dequeue event after queue empty");
+ assert_greater_than(lastDequeueSize, encoder.encodeQueueSize,
+ "Dequeue event without decreased queue size");
+ lastDequeueSize = encoder.encodeQueueSize;
+ };
+
+ for (let frame of frames)
+ encoder.encode(frame);
+
+ assert_greater_than_equal(encoder.encodeQueueSize, 0);
+ assert_less_than_equal(encoder.encodeQueueSize, frames_count);
+
+ await encoder.flush();
+ // We can guarantee that all encodes are processed after a flush.
+ assert_equals(encoder.encodeQueueSize, 0);
+ // Last dequeue event should fire when the queue is empty.
+ assert_equals(lastDequeueSize, 0);
+
+ // Reset this to Infinity to track the decline of queue size for this next
+ // batch of encodes.
+ lastDequeueSize = Infinity;
+
+ for (let frame of frames) {
+ encoder.encode(frame);
+ frame.close();
+ }
+
+ assert_greater_than_equal(encoder.encodeQueueSize, 0);
+ encoder.reset();
+ assert_equals(encoder.encodeQueueSize, 0);
+}, 'encodeQueueSize test');
+
+
+promise_test(async t => {
+ let timestamp = 0;
+ let callbacks_before_reset = 0;
+ let callbacks_after_reset = 0;
+ const timestamp_step = 40000;
+ const expected_callbacks_before_reset = 3;
+ let codecInit = getDefaultCodecInit(t);
+ let original = createFrame(320, 200, 0);
+ let encoder = null;
+ let reset_completed = false;
+ codecInit.output = (chunk, metadata) => {
+ if (chunk.timestamp % 2 == 0) {
+ // pre-reset frames have even timestamp
+ callbacks_before_reset++;
+ if (callbacks_before_reset == expected_callbacks_before_reset) {
+ encoder.reset();
+ reset_completed = true;
+ }
+ } else {
+ // after-reset frames have odd timestamp
+ callbacks_after_reset++;
+ }
+ }
+
+ encoder = new VideoEncoder(codecInit);
+ encoder.configure(defaultConfig);
+ await encoder.flush();
+
+ // Send 10x frames to the encoder, call reset() on it after x outputs,
+ // and make sure no more chunks are emitted afterwards.
+ let encodes_before_reset = expected_callbacks_before_reset * 10;
+ for (let i = 0; i < encodes_before_reset; i++) {
+ let frame = new VideoFrame(original, { timestamp: timestamp });
+ timestamp += timestamp_step;
+ encoder.encode(frame);
+ frame.close();
+ }
+
+ await t.step_wait(() => reset_completed,
+ "Reset() should be called by output callback", 10000, 1);
+
+ assert_equals(callbacks_before_reset, expected_callbacks_before_reset);
+ assert_true(reset_completed);
+ assert_equals(encoder.encodeQueueSize, 0);
+
+ let newConfig = { ...defaultConfig };
+ newConfig.width = 800;
+ newConfig.height = 600;
+ encoder.configure(newConfig);
+
+ const frames_after_reset = 5;
+ for (let i = 0; i < frames_after_reset; i++) {
+ let frame = createFrame(800, 600, timestamp + 1);
+ timestamp += timestamp_step;
+ encoder.encode(frame);
+ frame.close();
+ }
+ await encoder.flush();
+
+ assert_equals(callbacks_after_reset, frames_after_reset,
+ "not all after-reset() outputs have been emitted");
+ assert_equals(callbacks_before_reset, expected_callbacks_before_reset,
+ "pre-reset() outputs were emitter after reset() and flush()");
+ assert_equals(encoder.encodeQueueSize, 0);
+}, 'Test successful reset() and re-confiugre()');
+
+promise_test(async t => {
+ let output_chunks = [];
+ let codecInit = getDefaultCodecInit(t);
+ codecInit.output = chunk => output_chunks.push(chunk);
+
+ let encoder = new VideoEncoder(codecInit);
+
+ // No encodes yet.
+ assert_equals(encoder.encodeQueueSize, 0);
+
+ let config = defaultConfig;
+
+ encoder.configure(config);
+
+ let frame1 = createFrame(640, 480, 0);
+ let frame2 = createFrame(640, 480, 33333);
+
+ encoder.encode(frame1);
+ encoder.configure(config);
+
+ encoder.encode(frame2);
+
+ await encoder.flush();
+
+ // We can guarantee that all encodes are processed after a flush.
+ assert_equals(encoder.encodeQueueSize, 0, "queue size after encode");
+
+ assert_equals(output_chunks.length, 2, "number of chunks");
+ assert_equals(output_chunks[0].timestamp, frame1.timestamp);
+ assert_equals(output_chunks[1].timestamp, frame2.timestamp);
+
+ output_chunks = [];
+
+ let frame3 = createFrame(640, 480, 66666);
+ let frame4 = createFrame(640, 480, 100000);
+
+ encoder.encode(frame3);
+
+ // Verify that a failed call to configure does not change the encoder's state.
+ let badConfig = { ...defaultConfig };
+ badConfig.codec = 'bogus';
+ assert_throws_js(TypeError, () => encoder.configure(badConfig));
+
+ encoder.encode(frame4);
+
+ await encoder.flush();
+
+ assert_equals(output_chunks[0].timestamp, frame3.timestamp);
+ assert_equals(output_chunks[1].timestamp, frame4.timestamp);
+}, 'Test successful encode() after re-configure().');
+
+promise_test(async t => {
+ let encoder = new VideoEncoder(getDefaultCodecInit(t));
+
+ let frame = createFrame(640, 480, 0);
+
+ return testClosedCodec(t, encoder, defaultConfig, frame);
+}, 'Verify closed VideoEncoder operations');
+
+promise_test(async t => {
+ let encoder = new VideoEncoder(getDefaultCodecInit(t));
+
+ let frame = createFrame(640, 480, 0);
+
+ return testUnconfiguredCodec(t, encoder, frame);
+}, 'Verify unconfigured VideoEncoder operations');
+
+promise_test(async t => {
+ let encoder = new VideoEncoder(getDefaultCodecInit(t));
+
+ let frame = createFrame(640, 480, 0);
+ frame.close();
+
+ encoder.configure(defaultConfig);
+
+ assert_throws_js(TypeError, () => {
+ encoder.encode(frame);
+ });
+}, 'Verify encoding closed frames throws.');
+
+promise_test(async t => {
+ let output_chunks = [];
+ let codecInit = getDefaultCodecInit(t);
+ codecInit.output = chunk => output_chunks.push(chunk);
+
+ let encoder = new VideoEncoder(codecInit);
+ let config = defaultConfig;
+ encoder.configure(config);
+
+ let frame = createFrame(640, 480, -10000);
+ encoder.encode(frame);
+ frame.close();
+ await encoder.flush();
+ encoder.close();
+ assert_equals(output_chunks.length, 1);
+ assert_equals(output_chunks[0].timestamp, -10000, "first chunk timestamp");
+ assert_greater_than(output_chunks[0].byteLength, 0);
+}, 'Encode video with negative timestamp');
diff --git a/testing/web-platform/tests/webcodecs/video-frame-serialization.any.js b/testing/web-platform/tests/webcodecs/video-frame-serialization.any.js
new file mode 100644
index 0000000000..4968c43cda
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/video-frame-serialization.any.js
@@ -0,0 +1,139 @@
+// META: global=window,dedicatedworker
+// META: script=/common/media.js
+// META: script=/webcodecs/utils.js
+
+var defaultInit = {
+ timestamp : 100,
+ duration : 33,
+}
+
+function createDefaultVideoFrame() {
+ let image = makeImageBitmap(32,16);
+
+ return new VideoFrame(image, defaultInit);
+}
+
+test(t => {
+ let frame = createDefaultVideoFrame();
+
+ let clone = frame.clone();
+
+ assert_equals(frame.timestamp, clone.timestamp);
+ assert_equals(frame.duration, clone.duration);
+ assert_equals(frame.visibleRect.left, clone.visibleRect.left);
+ assert_equals(frame.visibleRect.top, clone.visibleRect.top);
+ assert_equals(frame.visibleRect.width, clone.visibleRect.width);
+ assert_equals(frame.visibleRect.height, clone.visibleRect.height);
+
+ frame.close();
+ assert_true(isFrameClosed(frame));
+ clone.close();
+ assert_true(isFrameClosed(clone));
+}, 'Test we can clone a VideoFrame.');
+
+test(t => {
+ let frame = createDefaultVideoFrame();
+
+ let copy = frame;
+ let clone = frame.clone();
+
+ frame.close();
+
+ assert_equals(copy.timestamp, defaultInit.timestamp);
+ assert_equals(copy.duration, defaultInit.duration);
+ assert_true(isFrameClosed(copy));
+ assert_equals(clone.timestamp, defaultInit.timestamp);
+ assert_false(isFrameClosed(clone));
+
+ clone.close();
+}, 'Verify closing a frame doesn\'t affect its clones.');
+
+test(t => {
+ let frame = createDefaultVideoFrame();
+
+ frame.close();
+
+ assert_throws_dom("InvalidStateError", () => {
+ let clone = frame.clone();
+ });
+}, 'Verify cloning a closed frame throws.');
+
+async_test(t => {
+ let localFrame = createDefaultVideoFrame();
+
+ let channel = new MessageChannel();
+ let localPort = channel.port1;
+ let externalPort = channel.port2;
+
+ externalPort.onmessage = t.step_func((e) => {
+ let externalFrame = e.data;
+ externalFrame.close();
+ externalPort.postMessage("Done");
+ })
+
+ localPort.onmessage = t.step_func_done((e) => {
+ assert_equals(localFrame.timestamp, defaultInit.timestamp);
+ localFrame.close();
+ })
+
+ localPort.postMessage(localFrame);
+}, 'Verify closing frames does not propagate accross contexts.');
+
+async_test(t => {
+ let localFrame = createDefaultVideoFrame();
+
+ let channel = new MessageChannel();
+ let localPort = channel.port1;
+ let externalPort = channel.port2;
+
+ externalPort.onmessage = t.step_func_done((e) => {
+ let externalFrame = e.data;
+ assert_equals(externalFrame.timestamp, defaultInit.timestamp);
+ externalFrame.close();
+ })
+
+ localPort.postMessage(localFrame, [localFrame]);
+ assert_true(isFrameClosed(localFrame));
+}, 'Verify transferring frames closes them.');
+
+async_test(t => {
+ let localFrame = createDefaultVideoFrame();
+
+ let channel = new MessageChannel();
+ let localPort = channel.port1;
+
+ localPort.onmessage = t.unreached_func();
+
+ localFrame.close();
+
+ assert_throws_dom("DataCloneError", () => {
+ localPort.postMessage(localFrame);
+ });
+
+ t.done();
+}, 'Verify posting closed frames throws.');
+
+promise_test(async t => {
+ const open = indexedDB.open('VideoFrameTestDB', 1);
+ open.onerror = t.unreached_func('open should succeed');
+ open.onupgradeneeded = (event) => {
+ let db = event.target.result;
+ db.createObjectStore('MyVideoFrames', { keyPath: 'id' });
+ };
+ let db = await new Promise((resolve) => {
+ open.onsuccess = (e) => {
+ resolve(e.target.result);
+ };
+ });
+ t.add_cleanup(() => {
+ db.close();
+ indexedDB.deleteDatabase(db.name);
+ });
+
+ let transaction = db.transaction(['MyVideoFrames'], 'readwrite');
+ const store = transaction.objectStore('MyVideoFrames');
+ let frame = createDefaultVideoFrame();
+ assert_throws_dom("DataCloneError", () => {
+ store.add(frame);
+ });
+}, 'Verify storing a frame throws.');
diff --git a/testing/web-platform/tests/webcodecs/videoColorSpace.any.js b/testing/web-platform/tests/webcodecs/videoColorSpace.any.js
new file mode 100644
index 0000000000..3af828a5bd
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoColorSpace.any.js
@@ -0,0 +1,47 @@
+// META: global=window,dedicatedworker
+
+const VIDEO_COLOR_SPACE_SETS = {
+ primaries: ['bt709', 'bt470bg', 'smpte170m', 'bt2020', 'smpte432'],
+ transfer: ['bt709', 'smpte170m', 'iec61966-2-1', 'linear', 'pq', 'hlg'],
+ matrix: ['rgb', 'bt709', 'bt470bg', 'smpte170m', 'bt2020-ncl'],
+ fullRange: [true, false],
+};
+
+function generateAllCombinations() {
+ const keys = Object.keys(VIDEO_COLOR_SPACE_SETS);
+ let colorSpaces = [];
+ generateAllCombinationsHelper(keys, 0, {}, colorSpaces);
+ return colorSpaces;
+}
+
+function generateAllCombinationsHelper(keys, keyIndex, colorSpace, results) {
+ if (keyIndex >= keys.length) {
+ // Push the copied object since the colorSpace will be reused.
+ results.push(Object.assign({}, colorSpace));
+ return;
+ }
+
+ const prop = keys[keyIndex];
+ // case 1: Skip this property.
+ generateAllCombinationsHelper(keys, keyIndex + 1, colorSpace, results);
+ // case 2: Set this property with a valid value.
+ for (const val of VIDEO_COLOR_SPACE_SETS[prop]) {
+ colorSpace[prop] = val;
+ generateAllCombinationsHelper(keys, keyIndex + 1, colorSpace, results);
+ delete colorSpace[prop];
+ }
+}
+
+test(t => {
+ let colorSpaces = generateAllCombinations();
+ for (const colorSpace of colorSpaces) {
+ let vcs = new VideoColorSpace(colorSpace);
+ let json = vcs.toJSON();
+ for (const k of Object.keys(json)) {
+ assert_equals(
+ json[k],
+ colorSpace.hasOwnProperty(k) ? colorSpace[k] : null
+ );
+ }
+ }
+}, 'Test VideoColorSpace toJSON() works.');
diff --git a/testing/web-platform/tests/webcodecs/videoDecoder-codec-specific.https.any.js b/testing/web-platform/tests/webcodecs/videoDecoder-codec-specific.https.any.js
new file mode 100644
index 0000000000..4e0843646b
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoDecoder-codec-specific.https.any.js
@@ -0,0 +1,555 @@
+// META: global=window,dedicatedworker
+// META: variant=?av1
+// META: variant=?vp8
+// META: variant=?vp9
+// META: variant=?h264_avc
+// META: variant=?h264_annexb
+
+const AV1_DATA = {
+ src: 'av1.mp4',
+ config: {
+ codec: 'av01.0.04M.08',
+ codedWidth: 320,
+ codedHeight: 240,
+ visibleRect: {x: 0, y: 0, width: 320, height: 240},
+ displayWidth: 320,
+ displayHeight: 240,
+ },
+ chunks: [
+ {offset: 48, size: 1938}, {offset: 1986, size: 848},
+ {offset: 2834, size: 3}, {offset: 2837, size: 47}, {offset: 2884, size: 3},
+ {offset: 2887, size: 116}, {offset: 3003, size: 3},
+ {offset: 3006, size: 51}, {offset: 3057, size: 25},
+ {offset: 3082, size: 105}
+ ]
+};
+
+const VP8_DATA = {
+ src: 'vp8.webm',
+ config: {
+ codec: 'vp8',
+ codedWidth: 320,
+ codedHeight: 240,
+ visibleRect: {x: 0, y: 0, width: 320, height: 240},
+ displayWidth: 320,
+ displayHeight: 240,
+ },
+ chunks: [
+ {offset: 522, size: 4826}, {offset: 5355, size: 394},
+ {offset: 5756, size: 621}, {offset: 6384, size: 424},
+ {offset: 6815, size: 532}, {offset: 7354, size: 655},
+ {offset: 8016, size: 670}, {offset: 8693, size: 2413},
+ {offset: 11113, size: 402}, {offset: 11522, size: 686}
+ ]
+};
+
+const VP9_DATA = {
+ src: 'vp9.mp4',
+ // TODO(sandersd): Verify that the file is actually level 1.
+ config: {
+ codec: 'vp09.00.10.08',
+ codedWidth: 320,
+ codedHeight: 240,
+ displayAspectWidth: 320,
+ displayAspectHeight: 240,
+ },
+ chunks: [
+ {offset: 44, size: 3315}, {offset: 3359, size: 203},
+ {offset: 3562, size: 245}, {offset: 3807, size: 172},
+ {offset: 3979, size: 312}, {offset: 4291, size: 170},
+ {offset: 4461, size: 195}, {offset: 4656, size: 181},
+ {offset: 4837, size: 356}, {offset: 5193, size: 159}
+ ]
+};
+
+const H264_AVC_DATA = {
+ src: 'h264.mp4',
+ config: {
+ codec: 'avc1.64000b',
+ description: {offset: 9490, size: 45},
+ codedWidth: 320,
+ codedHeight: 240,
+ displayAspectWidth: 320,
+ displayAspectHeight: 240,
+ },
+ chunks: [
+ {offset: 48, size: 4140}, {offset: 4188, size: 604},
+ {offset: 4792, size: 475}, {offset: 5267, size: 561},
+ {offset: 5828, size: 587}, {offset: 6415, size: 519},
+ {offset: 6934, size: 532}, {offset: 7466, size: 523},
+ {offset: 7989, size: 454}, {offset: 8443, size: 528}
+ ]
+};
+
+const H264_ANNEXB_DATA = {
+ src: 'h264.annexb',
+ config: {
+ codec: 'avc1.64000b',
+ codedWidth: 320,
+ codedHeight: 240,
+ displayAspectWidth: 320,
+ displayAspectHeight: 240,
+ },
+ chunks: [
+ {offset: 0, size: 4175}, {offset: 4175, size: 602},
+ {offset: 4777, size: 473}, {offset: 5250, size: 559},
+ {offset: 5809, size: 585}, {offset: 6394, size: 517},
+ {offset: 6911, size: 530}, {offset: 7441, size: 521},
+ {offset: 7962, size: 452}, {offset: 8414, size: 526}
+ ]
+};
+
+// Allows mutating `callbacks` after constructing the VideoDecoder, wraps calls
+// in t.step().
+function createVideoDecoder(t, callbacks) {
+ return new VideoDecoder({
+ output(frame) {
+ if (callbacks && callbacks.output) {
+ t.step(() => callbacks.output(frame));
+ } else {
+ t.unreached_func('unexpected output()');
+ }
+ },
+ error(e) {
+ if (callbacks && callbacks.error) {
+ t.step(() => callbacks.error(e));
+ } else {
+ t.unreached_func('unexpected error()');
+ }
+ }
+ });
+}
+
+function createCorruptChunk(index) {
+ let bad_data = CHUNK_DATA[index];
+ for (var i = 0; i < bad_data.byteLength; i += 4)
+ bad_data[i] = 0xFF;
+ return new EncodedVideoChunk(
+ {type: 'delta', timestamp: index, data: bad_data});
+}
+
+// Create a view of an ArrayBuffer.
+function view(buffer, {offset, size}) {
+ return new Uint8Array(buffer, offset, size);
+}
+
+async function checkImplements() {
+ // Don't run any tests if the codec is not supported.
+ assert_equals("function", typeof VideoDecoder.isConfigSupported);
+ let supported = false;
+ try {
+ // TODO(sandersd): To properly support H.264 in AVC format, this should
+ // include the `description`. For now this test assumes that H.264 Annex B
+ // support is the same as H.264 AVC support.
+ const support =
+ await VideoDecoder.isConfigSupported({codec: CONFIG.codec});
+ supported = support.supported;
+ } catch (e) {
+ }
+ assert_implements_optional(supported, CONFIG.codec + ' unsupported');
+}
+
+let CONFIG = null;
+let CHUNK_DATA = null;
+let CHUNKS = null;
+promise_setup(async () => {
+ const data = {
+ '?av1': AV1_DATA,
+ '?vp8': VP8_DATA,
+ '?vp9': VP9_DATA,
+ '?h264_avc': H264_AVC_DATA,
+ '?h264_annexb': H264_ANNEXB_DATA
+ }[location.search];
+
+ // Fetch the media data and prepare buffers.
+ const response = await fetch(data.src);
+ const buf = await response.arrayBuffer();
+
+ CONFIG = {...data.config};
+ if (data.config.description) {
+ CONFIG.description = view(buf, data.config.description);
+ }
+
+ CHUNK_DATA = data.chunks.map((chunk, i) => view(buf, chunk));
+
+ CHUNKS = CHUNK_DATA.map(
+ (data, i) => new EncodedVideoChunk(
+ {type: i == 0 ? 'key' : 'delta', timestamp: i, duration: 1, data}));
+});
+
+promise_test(async t => {
+ await checkImplements();
+ const support = await VideoDecoder.isConfigSupported(CONFIG);
+ assert_true(support.supported, 'supported');
+}, 'Test isConfigSupported()');
+
+promise_test(async t => {
+ await checkImplements();
+ // TODO(sandersd): Create a 1080p `description` for H.264 in AVC format.
+ // This version is testing only the H.264 Annex B path.
+ const config = {
+ codec: CONFIG.codec,
+ codedWidth: 1920,
+ codedHeight: 1088,
+ displayAspectWidth: 1920,
+ displayAspectHeight: 1080,
+ };
+
+ const support = await VideoDecoder.isConfigSupported(config);
+ assert_true(support.supported, 'supported');
+}, 'Test isConfigSupported() with 1080p crop');
+
+promise_test(async t => {
+ await checkImplements();
+ // Define a valid config that includes a hypothetical `futureConfigFeature`,
+ // which is not yet recognized by the User Agent.
+ const config = {
+ ...CONFIG,
+ colorSpace: {primaries: 'bt709'},
+ futureConfigFeature: 'foo',
+ };
+
+ // The UA will evaluate validConfig as being "valid", ignoring the
+ // `futureConfigFeature` it doesn't recognize.
+ const support = await VideoDecoder.isConfigSupported(config);
+ assert_true(support.supported, 'supported');
+ assert_equals(support.config.codec, config.codec, 'codec');
+ assert_equals(support.config.codedWidth, config.codedWidth, 'codedWidth');
+ assert_equals(support.config.codedHeight, config.codedHeight, 'codedHeight');
+ assert_equals(support.config.displayAspectWidth, config.displayAspectWidth, 'displayAspectWidth');
+ assert_equals(support.config.displayAspectHeight, config.displayAspectHeight, 'displayAspectHeight');
+ assert_equals(support.config.colorSpace.primaries, config.colorSpace.primaries, 'color primaries');
+ assert_equals(support.config.colorSpace.transfer, null, 'color transfer');
+ assert_equals(support.config.colorSpace.matrix, null, 'color matrix');
+ assert_equals(support.config.colorSpace.fullRange, null, 'color range');
+ assert_false(support.config.hasOwnProperty('futureConfigFeature'), 'futureConfigFeature');
+
+ if (config.description) {
+ // The description must be copied.
+ assert_false(
+ support.config.description === config.description,
+ 'description is unique');
+ assert_array_equals(
+ new Uint8Array(support.config.description, 0),
+ new Uint8Array(config.description, 0), 'description');
+ } else {
+ assert_false(support.config.hasOwnProperty('description'), 'description');
+ }
+}, 'Test that isConfigSupported() returns a parsed configuration');
+
+promise_test(async t => {
+ await checkImplements();
+ async function test(t, config, description) {
+ await promise_rejects_js(
+ t, TypeError, VideoDecoder.isConfigSupported(config), description);
+
+ const decoder = createVideoDecoder(t);
+ assert_throws_js(TypeError, () => decoder.configure(config), description);
+ assert_equals(decoder.state, 'unconfigured', 'state');
+ }
+
+ await test(t, {...CONFIG, codedWidth: 0}, 'invalid codedWidth');
+ await test(t, {...CONFIG, displayAspectWidth: 0}, 'invalid displayAspectWidth');
+}, 'Test invalid configs');
+
+promise_test(async t => {
+ await checkImplements();
+ const decoder = createVideoDecoder(t);
+ decoder.configure(CONFIG);
+ assert_equals(decoder.state, 'configured', 'state');
+}, 'Test configure()');
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+ const decoder = createVideoDecoder(t, callbacks);
+ decoder.configure(CONFIG);
+ decoder.decode(CHUNKS[0]);
+
+ let outputs = 0;
+ callbacks.output = frame => {
+ outputs++;
+ assert_equals(frame.timestamp, CHUNKS[0].timestamp, 'timestamp');
+ assert_equals(frame.duration, CHUNKS[0].duration, 'duration');
+ frame.close();
+ };
+
+ await decoder.flush();
+ assert_equals(outputs, 1, 'outputs');
+}, 'Decode a key frame');
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+ const decoder = createVideoDecoder(t, callbacks);
+ decoder.configure(CONFIG);
+
+ // Ensure type value is verified.
+ assert_equals(CHUNKS[1].type, 'delta');
+ assert_throws_dom('DataError', () => decoder.decode(CHUNKS[1], 'decode'));
+}, 'Decode a non key frame first fails');
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+ const decoder = createVideoDecoder(t, callbacks);
+ decoder.configure(CONFIG);
+ for (let i = 0; i < 16; i++) {
+ decoder.decode(new EncodedVideoChunk(
+ {type: 'key', timestamp: 0, data: CHUNK_DATA[0]}));
+ }
+ assert_greater_than(decoder.decodeQueueSize, 0);
+
+ // Wait for the first output, then reset the decoder.
+ let outputs = 0;
+ await new Promise(resolve => {
+ callbacks.output = frame => {
+ outputs++;
+ assert_equals(outputs, 1, 'outputs');
+ assert_equals(frame.timestamp, 0, 'timestamp');
+ frame.close();
+ decoder.reset();
+ assert_equals(decoder.decodeQueueSize, 0, 'decodeQueueSize');
+ resolve();
+ };
+ });
+
+ decoder.configure(CONFIG);
+ for (let i = 0; i < 4; i++) {
+ decoder.decode(new EncodedVideoChunk(
+ {type: 'key', timestamp: 1, data: CHUNK_DATA[0]}));
+ }
+
+ // Expect future outputs to come from after the reset.
+ callbacks.output = frame => {
+ outputs++;
+ assert_equals(frame.timestamp, 1, 'timestamp');
+ frame.close();
+ };
+
+ await decoder.flush();
+ assert_equals(outputs, 5);
+ assert_equals(decoder.decodeQueueSize, 0);
+}, 'Verify reset() suppresses outputs');
+
+promise_test(async t => {
+ await checkImplements();
+ const decoder = createVideoDecoder(t);
+ assert_equals(decoder.state, 'unconfigured');
+
+ decoder.reset();
+ assert_equals(decoder.state, 'unconfigured');
+ assert_throws_dom(
+ 'InvalidStateError', () => decoder.decode(CHUNKS[0]), 'decode');
+ await promise_rejects_dom(t, 'InvalidStateError', decoder.flush(), 'flush');
+}, 'Test unconfigured VideoDecoder operations');
+
+promise_test(async t => {
+ await checkImplements();
+ const decoder = createVideoDecoder(t);
+ decoder.close();
+ assert_equals(decoder.state, 'closed');
+ assert_throws_dom(
+ 'InvalidStateError', () => decoder.configure(CONFIG), 'configure');
+ assert_throws_dom('InvalidStateError', () => decoder.reset(), 'reset');
+ assert_throws_dom('InvalidStateError', () => decoder.close(), 'close');
+ assert_throws_dom(
+ 'InvalidStateError', () => decoder.decode(CHUNKS[0]), 'decode');
+ await promise_rejects_dom(t, 'InvalidStateError', decoder.flush(), 'flush');
+}, 'Test closed VideoDecoder operations');
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+
+ let errors = 0;
+ callbacks.error = e => errors++;
+ callbacks.output = frame => { frame.close(); };
+
+ const decoder = createVideoDecoder(t, callbacks);
+ decoder.configure(CONFIG);
+ decoder.decode(CHUNKS[0]); // Decode keyframe first.
+ decoder.decode(new EncodedVideoChunk(
+ {type: 'key', timestamp: 1, data: new ArrayBuffer(0)}));
+
+ await promise_rejects_dom(t, 'AbortError', decoder.flush());
+
+ assert_equals(errors, 1, 'errors');
+ assert_equals(decoder.state, 'closed', 'state');
+}, 'Decode empty frame');
+
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+
+ let errors = 0;
+ callbacks.error = e => errors++;
+
+ let outputs = 0;
+ callbacks.output = frame => {
+ outputs++;
+ frame.close();
+ };
+
+ const decoder = createVideoDecoder(t, callbacks);
+ decoder.configure(CONFIG);
+ decoder.decode(CHUNKS[0]); // Decode keyframe first.
+ decoder.decode(createCorruptChunk(2));
+
+ await promise_rejects_dom(t, 'AbortError', decoder.flush());
+
+ assert_less_than_equal(outputs, 1);
+ assert_equals(errors, 1, 'errors');
+ assert_equals(decoder.state, 'closed', 'state');
+}, 'Decode corrupt frame');
+
+promise_test(async t => {
+ await checkImplements();
+ const decoder = createVideoDecoder(t);
+
+ decoder.configure(CONFIG);
+ decoder.decode(CHUNKS[0]); // Decode keyframe first.
+ decoder.decode(createCorruptChunk(1));
+
+ let flushDone = decoder.flush();
+ decoder.close();
+
+ // Flush should have been synchronously rejected, with no output() or error()
+ // callbacks.
+ await promise_rejects_dom(t, 'AbortError', flushDone);
+}, 'Close while decoding corrupt frame');
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+ const decoder = createVideoDecoder(t, callbacks);
+
+ decoder.configure(CONFIG);
+ decoder.decode(CHUNKS[0]);
+
+ let outputs = 0;
+ callbacks.output = frame => {
+ outputs++;
+ frame.close();
+ };
+
+ await decoder.flush();
+ assert_equals(outputs, 1, 'outputs');
+
+ decoder.decode(CHUNKS[0]);
+ await decoder.flush();
+ assert_equals(outputs, 2, 'outputs');
+}, 'Test decoding after flush');
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+ const decoder = createVideoDecoder(t, callbacks);
+
+ decoder.configure(CONFIG);
+ decoder.decode(new EncodedVideoChunk(
+ {type: 'key', timestamp: -42, data: CHUNK_DATA[0]}));
+
+ let outputs = 0;
+ callbacks.output = frame => {
+ outputs++;
+ assert_equals(frame.timestamp, -42, 'timestamp');
+ frame.close();
+ };
+
+ await decoder.flush();
+ assert_equals(outputs, 1, 'outputs');
+}, 'Test decoding a with negative timestamp');
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+ const decoder = createVideoDecoder(t, callbacks);
+
+ decoder.configure(CONFIG);
+ decoder.decode(CHUNKS[0]);
+ decoder.decode(CHUNKS[1]);
+ const flushDone = decoder.flush();
+
+ // Wait for the first output, then reset.
+ let outputs = 0;
+ await new Promise(resolve => {
+ callbacks.output = frame => {
+ outputs++;
+ assert_equals(outputs, 1, 'outputs');
+ decoder.reset();
+ frame.close();
+ resolve();
+ };
+ });
+
+ // Flush should have been synchronously rejected.
+ await promise_rejects_dom(t, 'AbortError', flushDone);
+
+ assert_equals(outputs, 1, 'outputs');
+}, 'Test reset during flush');
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+ const decoder = createVideoDecoder(t, callbacks);
+
+ decoder.configure({...CONFIG, optimizeForLatency: true});
+ decoder.decode(CHUNKS[0]);
+
+ // The frame should be output without flushing.
+ await new Promise(resolve => {
+ callbacks.output = frame => {
+ frame.close();
+ resolve();
+ };
+ });
+}, 'Test low-latency decoding');
+
+
+promise_test(async t => {
+ await checkImplements();
+ const callbacks = {};
+ callbacks.output = frame => { frame.close(); };
+ const decoder = createVideoDecoder(t, callbacks);
+
+ // No decodes yet.
+ assert_equals(decoder.decodeQueueSize, 0);
+
+ decoder.configure(CONFIG);
+
+ // Still no decodes.
+ assert_equals(decoder.decodeQueueSize, 0);
+
+ let lastDequeueSize = Infinity;
+ decoder.ondequeue = () => {
+ assert_greater_than(lastDequeueSize, 0, "Dequeue event after queue empty");
+ assert_greater_than(lastDequeueSize, decoder.decodeQueueSize,
+ "Dequeue event without decreased queue size");
+ lastDequeueSize = decoder.decodeQueueSize;
+ };
+
+ for (let chunk of CHUNKS)
+ decoder.decode(chunk);
+
+ assert_greater_than_equal(decoder.decodeQueueSize, 0);
+ assert_less_than_equal(decoder.decodeQueueSize, CHUNKS.length);
+
+ await decoder.flush();
+ // We can guarantee that all decodes are processed after a flush.
+ assert_equals(decoder.decodeQueueSize, 0);
+ // Last dequeue event should fire when the queue is empty.
+ assert_equals(lastDequeueSize, 0);
+
+ // Reset this to Infinity to track the decline of queue size for this next
+ // batch of decodes.
+ lastDequeueSize = Infinity;
+
+ for (let chunk of CHUNKS)
+ decoder.decode(chunk);
+
+ assert_greater_than_equal(decoder.decodeQueueSize, 0);
+ decoder.reset();
+ assert_equals(decoder.decodeQueueSize, 0);
+}, 'VideoDecoder decodeQueueSize test');
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-alpha.any.js b/testing/web-platform/tests/webcodecs/videoFrame-alpha.any.js
new file mode 100644
index 0000000000..f4c4dfa737
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-alpha.any.js
@@ -0,0 +1,50 @@
+// META: global=window,dedicatedworker
+
+function makeRGBACanvas() {
+ let canvas = new OffscreenCanvas(32, 32, {alpha: true});
+ let ctx = canvas.getContext('2d');
+
+ // Opaque red quadrant.
+ ctx.fillStyle = 'rgba(255, 0, 0, 255)';
+ ctx.fillRect(0, 0, 16, 16);
+
+ // Opaque blue quadrant.
+ ctx.fillStyle = 'rgba(0, 255, 0, 255)';
+ ctx.fillRect(16, 0, 16, 16);
+
+ // Opaque green quadrant.
+ ctx.fillStyle = 'rgba(0, 0, 255, 255)';
+ ctx.fillRect(0, 16, 16, 16);
+
+ // Remaining quadrant should be transparent black.
+ return canvas;
+}
+
+function getPixel(ctx, x, y) {
+ let data = ctx.getImageData(x, y, 1, 1).data;
+ return data[0] * 2 ** 24 + data[1] * 2 ** 16 + data[2] * 2 ** 8 + data[3];
+}
+
+function verifyPicture(picture) {
+ let canvas = new OffscreenCanvas(32, 32, {alpha: true});
+ let ctx = canvas.getContext('2d');
+ ctx.drawImage(picture, 0, 0);
+ assert_equals(getPixel(ctx, 8, 8), 0xFF0000FF);
+ assert_equals(getPixel(ctx, 24, 8), 0x00FF00FF);
+ assert_equals(getPixel(ctx, 8, 24), 0x0000FFFF);
+ assert_equals(getPixel(ctx, 24, 24), 0x00000000);
+}
+
+promise_test(async () => {
+ let src = makeRGBACanvas();
+ let frame = new VideoFrame(src, {alpha: 'keep', timestamp: 0});
+ verifyPicture(frame);
+ verifyPicture(await createImageBitmap(frame));
+}, 'OffscreenCanvas source preserves alpha');
+
+promise_test(async () => {
+ let src = makeRGBACanvas().transferToImageBitmap();
+ let frame = new VideoFrame(src, {alpha: 'keep', timestamp: 0});
+ verifyPicture(frame);
+ verifyPicture(await createImageBitmap(frame));
+}, 'ImageBitmap source preserves alpha');
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-canvasImageSource.html b/testing/web-platform/tests/webcodecs/videoFrame-canvasImageSource.html
new file mode 100644
index 0000000000..397404be4e
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-canvasImageSource.html
@@ -0,0 +1,142 @@
+<title>Test VideoFrame creation from CanvasImageSource.</title>
+<style>
+button {
+ display: inline-block;
+ min-height: 100px; min-width: 100px;
+ background: no-repeat 5% center url(four-colors.png);
+}
+</style>
+<video preload="auto"></video>
+<img src="four-colors.png"/>
+<canvas id=""></canvas>
+<svg width="320" height="240" xmlns="http://www.w3.org/2000/svg">
+<image href="four-colors.png" height="320" width="240"/>
+</svg>
+<button></button>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/webcodecs/image-decoder-utils.js"></script>
+<script>
+async_test(t => {
+ let video = document.querySelector('video');
+ video.onerror = t.unreached_func();
+ video.requestVideoFrameCallback(t.step_func(_ => {
+ let frame = new VideoFrame(video);
+ assert_true(!!frame);
+ assert_equals(frame.displayWidth, video.videoWidth);
+ assert_equals(frame.displayHeight, video.videoHeight);
+
+ let canvas = new OffscreenCanvas(frame.displayWidth, frame.displayHeight);
+ let ctx = canvas.getContext('2d');
+ ctx.drawImage(video, 0, 0);
+ verifyFourColorsImage(video.videoWidth, video.videoHeight, ctx);
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
+ ctx.drawImage(frame, 0, 0);
+ verifyFourColorsImage(frame.displayWidth, frame.displayHeight, ctx);
+
+ let frame_copy = new VideoFrame(frame, {duration: 1234});
+ assert_equals(frame.timestamp, frame_copy.timestamp);
+ assert_equals(frame_copy.duration, 1234);
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
+ ctx.drawImage(frame_copy, 0, 0);
+ verifyFourColorsImage(frame_copy.displayWidth, frame_copy.displayHeight,
+ ctx);
+ frame_copy.close();
+
+ frame_copy = new VideoFrame(frame, {timestamp: 1234, duration: 456});
+ assert_equals(frame_copy.timestamp, 1234);
+ assert_equals(frame_copy.duration, 456);
+ frame_copy.close();
+
+ frame_copy = new VideoFrame(frame);
+ assert_equals(frame.format, frame_copy.format);
+ assert_equals(frame.timestamp, frame_copy.timestamp);
+ assert_equals(frame.codedWidth, frame_copy.codedWidth);
+ assert_equals(frame.codedHeight, frame_copy.codedHeight);
+ assert_equals(frame.displayWidth, frame_copy.displayWidth);
+ assert_equals(frame.displayHeight, frame_copy.displayHeight);
+ assert_equals(frame.duration, frame_copy.duration);
+ frame_copy.close();
+
+ frame.close();
+ t.done();
+ }));
+ video.src = 'four-colors.mp4';
+}, '<video> and VideoFrame constructed VideoFrame');
+
+test(t => {
+ let button = document.querySelector('button');
+ let bgImage = button.computedStyleMap().get('background-image');
+ assert_throws_dom('SecurityError', _ => { new VideoFrame(bgImage, {timestamp: 0}); },
+ 'CSSImageValues are currently always tainted');
+}, 'CSSImageValue constructed VideoFrame');
+
+test(t => {
+ let frame = new VideoFrame(document.querySelector('img'), {timestamp: 0});
+ let canvas = new OffscreenCanvas(frame.displayWidth, frame.displayHeight);
+ let ctx = canvas.getContext('2d');
+ ctx.drawImage(frame, 0, 0);
+ verifyFourColorsImage(frame.displayWidth, frame.displayHeight, ctx);
+ frame.close();
+}, 'Image element constructed VideoFrame');
+
+test(t => {
+ let frame = new VideoFrame(document.querySelector('image'), {timestamp: 0});
+ let canvas = new OffscreenCanvas(frame.displayWidth, frame.displayHeight);
+ let ctx = canvas.getContext('2d');
+ ctx.drawImage(frame, 0, 0);
+ verifyFourColorsImage(frame.displayWidth, frame.displayHeight, ctx);
+ frame.close();
+}, 'SVGImageElement constructed VideoFrame');
+
+function drawFourColors(canvas) {
+ let ctx = canvas.getContext('2d');
+ ctx.fillStyle = '#FFFF00'; // yellow
+ ctx.fillRect(0, 0, canvas.width / 2, canvas.height / 2);
+ ctx.fillStyle = '#FF0000'; // red
+ ctx.fillRect(canvas.width / 2, 0, canvas.width / 2, canvas.height / 2);
+ ctx.fillStyle = '#0000FF'; // blue
+ ctx.fillRect(0, canvas.height / 2, canvas.width / 2, canvas.height / 2);
+ ctx.fillStyle = '#00FF00'; // green
+ ctx.fillRect(canvas.width / 2, canvas.height / 2, canvas.width / 2,
+ canvas.height / 2);
+}
+
+test(t => {
+ let canvas = document.querySelector('canvas');
+ canvas.width = 320;
+ canvas.height = 240;
+
+ // Draw and verify four colors image.
+ drawFourColors(canvas);
+ let ctx = canvas.getContext('2d');
+ verifyFourColorsImage(canvas.width, canvas.height, ctx);
+
+ let frame = new VideoFrame(canvas, {timestamp: 0});
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
+ ctx.drawImage(frame, 0, 0);
+ verifyFourColorsImage(canvas.width, canvas.height, ctx);
+ frame.close();
+}, 'Canvas element constructed VideoFrame');
+
+test(t => {
+ let canvas = document.querySelector('canvas');
+ canvas.width = 320;
+ canvas.height = 240;
+
+ // Draw and verify four colors image.
+ drawFourColors(canvas);
+ let ctx = canvas.getContext('2d');
+ verifyFourColorsImage(canvas.width, canvas.height, ctx);
+
+ // Set a different timestamp to try and ensure the same frame isn't reused.
+ let frame = new VideoFrame(canvas, {timestamp: 0});
+ let frame_copy = new VideoFrame(frame, {timestamp: 1});
+ frame.close();
+
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
+ ctx.drawImage(frame_copy, 0, 0);
+ verifyFourColorsImage(canvas.width, canvas.height, ctx);
+ frame_copy.close();
+}, 'Copy of canvas element constructed VideoFrame');
+</script>
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-construction.any.js b/testing/web-platform/tests/webcodecs/videoFrame-construction.any.js
new file mode 100644
index 0000000000..d6374c11d1
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-construction.any.js
@@ -0,0 +1,757 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+// META: script=/webcodecs/videoFrame-utils.js
+
+test(t => {
+ let image = makeImageBitmap(32, 16);
+ let frame = new VideoFrame(image, {timestamp: 10});
+
+ assert_equals(frame.timestamp, 10, 'timestamp');
+ assert_equals(frame.duration, null, 'duration');
+ assert_equals(frame.visibleRect.width, 32, 'visibleRect.width');
+ assert_equals(frame.visibleRect.height, 16, 'visibleRect.height');
+ assert_equals(frame.displayWidth, 32, 'displayWidth');
+ assert_equals(frame.displayHeight, 16, 'displayHeight');
+
+ frame.close();
+}, 'Test we can construct a VideoFrame.');
+
+test(t => {
+ let image = makeImageBitmap(32, 16);
+ let frame = new VideoFrame(image, {timestamp: 10, duration: 15});
+ frame.close();
+
+ assert_equals(frame.format, null, 'format')
+ assert_equals(frame.timestamp, 10, 'timestamp');
+ assert_equals(frame.duration, 15, 'duration');
+ assert_equals(frame.codedWidth, 0, 'codedWidth');
+ assert_equals(frame.codedHeight, 0, 'codedHeight');
+ assert_equals(frame.visibleRect, null, 'visibleRect');
+ assert_equals(frame.displayWidth, 0, 'displayWidth');
+ assert_equals(frame.displayHeight, 0, 'displayHeight');
+ assert_equals(frame.colorSpace.primaries, null, 'colorSpace.primaries');
+ assert_equals(frame.colorSpace.transfer, null, 'colorSpace.transfer');
+ assert_equals(frame.colorSpace.matrix, null, 'colorSpace.matrix');
+ assert_equals(frame.colorSpace.fullRange, null, 'colorSpace.fullRange');
+ assert_true(isFrameClosed(frame));
+
+ assert_throws_dom('InvalidStateError', () => frame.clone());
+}, 'Test closed VideoFrame.');
+
+test(t => {
+ let image = makeImageBitmap(32, 16);
+ let frame = new VideoFrame(image, {timestamp: -10});
+ assert_equals(frame.timestamp, -10, 'timestamp');
+ frame.close();
+}, 'Test we can construct a VideoFrame with a negative timestamp.');
+
+promise_test(async t => {
+ verifyTimestampRequiredToConstructFrame(makeImageBitmap(1, 1));
+}, 'Test that timestamp is required when constructing VideoFrame from ImageBitmap');
+
+promise_test(async t => {
+ verifyTimestampRequiredToConstructFrame(makeOffscreenCanvas(16, 16));
+}, 'Test that timestamp is required when constructing VideoFrame from OffscreenCanvas');
+
+promise_test(async t => {
+ let init = {
+ format: 'I420',
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2
+ };
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ ]);
+ let i420Frame = new VideoFrame(data, init);
+ let validFrame = new VideoFrame(i420Frame);
+ validFrame.close();
+}, 'Test that timestamp is NOT required when constructing VideoFrame from another VideoFrame');
+
+test(t => {
+ let image = makeImageBitmap(1, 1);
+ let frame = new VideoFrame(image, {timestamp: 10});
+
+ assert_equals(frame.visibleRect.width, 1, 'visibleRect.width');
+ assert_equals(frame.visibleRect.height, 1, 'visibleRect.height');
+ assert_equals(frame.displayWidth, 1, 'displayWidth');
+ assert_equals(frame.displayHeight, 1, 'displayHeight');
+
+ frame.close();
+}, 'Test we can construct an odd-sized VideoFrame.');
+
+test(t => {
+ // Test only valid for Window contexts.
+ if (!('document' in self))
+ return;
+
+ let video = document.createElement('video');
+
+ assert_throws_dom('InvalidStateError', () => {
+ let frame = new VideoFrame(video, {timestamp: 10});
+ })
+}, 'Test constructing w/ unusable image argument throws: HAVE_NOTHING <video>.');
+
+promise_test(async t => {
+ // Test only valid for Window contexts.
+ if (!('document' in self))
+ return;
+
+ let video = document.createElement('video');
+ video.src = 'av1.mp4';
+ video.autoplay = true;
+ video.controls = false;
+ video.muted = false;
+ document.body.appendChild(video);
+
+ const loadVideo = new Promise((resolve) => {
+ video.onloadeddata = () => resolve();
+ });
+ await loadVideo;
+
+ let frame = new VideoFrame(video, {timestamp: 10});
+ assert_equals(frame.codedWidth, 320, 'codedWidth');
+ assert_equals(frame.codedHeight, 240, 'codedHeight');
+ assert_equals(frame.timestamp, 10, 'timestamp');
+ frame.close();
+}, 'Test we can construct a VideoFrame from a <video>.');
+
+test(t => {
+ let canvas = new OffscreenCanvas(0, 0);
+
+ assert_throws_dom('InvalidStateError', () => {
+ let frame = new VideoFrame(canvas, {timestamp: 10});
+ })
+}, 'Test constructing w/ unusable image argument throws: emtpy Canvas.');
+
+test(t => {
+ let image = makeImageBitmap(32, 16);
+ image.close();
+
+ assert_throws_dom('InvalidStateError', () => {
+ let frame = new VideoFrame(image, {timestamp: 10});
+ })
+}, 'Test constructing w/ unusable image argument throws: closed ImageBitmap.');
+
+test(t => {
+ let image = makeImageBitmap(32, 16);
+ let frame = new VideoFrame(image, {timestamp: 10});
+ frame.close();
+
+ assert_throws_dom('InvalidStateError', () => {
+ let newFrame = new VideoFrame(frame);
+ })
+}, 'Test constructing w/ unusable image argument throws: closed VideoFrame.');
+
+test(t => {
+ let init = {
+ format: 'I420',
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2
+ };
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ ]);
+ let i420Frame = new VideoFrame(data, init);
+ let image = makeImageBitmap(32, 16);
+
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ image,
+ {timestamp: 10, visibleRect: {x: -1, y: 0, width: 10, height: 10}}),
+ 'negative visibleRect x');
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ image,
+ {timestamp: 10, visibleRect: {x: 0, y: 0, width: -10, height: 10}}),
+ 'negative visibleRect width');
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ image,
+ {timestamp: 10, visibleRect: {x: 0, y: 0, width: 10, height: 0}}),
+ 'zero visibleRect height');
+
+ assert_throws_js(
+ TypeError, () => new VideoFrame(image, {
+ timestamp: 10,
+ visibleRect: {x: 0, y: Infinity, width: 10, height: 10}
+ }),
+ 'non finite visibleRect y');
+
+ assert_throws_js(
+ TypeError, () => new VideoFrame(image, {
+ timestamp: 10,
+ visibleRect: {x: 0, y: 0, width: 10, height: Infinity}
+ }),
+ 'non finite visibleRect height');
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ image,
+ {timestamp: 10, visibleRect: {x: 0, y: 0, width: 33, height: 17}}),
+ 'visibleRect area exceeds coded size');
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ image,
+ {timestamp: 10, visibleRect: {x: 2, y: 2, width: 32, height: 16}}),
+ 'visibleRect outside coded size');
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(image, {timestamp: 10, displayHeight: 10}),
+ 'displayHeight provided without displayWidth');
+
+ assert_throws_js(
+ TypeError, () => new VideoFrame(image, {timestamp: 10, displayWidth: 10}),
+ 'displayWidth provided without displayHeight');
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ image, {timestamp: 10, displayWidth: 0, displayHeight: 10}),
+ 'displayWidth is zero');
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ image, {timestamp: 10, displayWidth: 10, displayHeight: 0}),
+ 'displayHeight is zero');
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ i420Frame, {visibleRect: {x: 1, y: 0, width: 2, height: 2}}),
+ 'visibleRect x is not sample aligned');
+
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ i420Frame, {visibleRect: {x: 0, y: 1, width: 2, height: 2}}),
+ 'visibleRect y is not sample aligned');
+
+}, 'Test invalid CanvasImageSource constructed VideoFrames');
+
+test(t => {
+ let init = {
+ format: 'I420',
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2
+ };
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ ]);
+ let origFrame = new VideoFrame(data, init);
+
+ let cropLeftHalf = new VideoFrame(
+ origFrame, {visibleRect: {x: 0, y: 0, width: 2, height: 2}});
+ assert_equals(cropLeftHalf.codedWidth, origFrame.codedWidth);
+ assert_equals(cropLeftHalf.codedHeight, origFrame.codedHeight);
+ assert_equals(cropLeftHalf.visibleRect.x, 0);
+ assert_equals(cropLeftHalf.visibleRect.y, 0);
+ assert_equals(cropLeftHalf.visibleRect.width, 2);
+ assert_equals(cropLeftHalf.visibleRect.height, 2);
+ assert_equals(cropLeftHalf.displayWidth, 2);
+ assert_equals(cropLeftHalf.displayHeight, 2);
+}, 'Test visibleRect metadata override where source display size = visible size');
+
+test(t => {
+ let init = {
+ format: 'I420',
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ displayWidth: 8,
+ displayHeight: 2
+ };
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ ]);
+ let anamorphicFrame = new VideoFrame(data, init);
+
+ let cropRightFrame = new VideoFrame(
+ anamorphicFrame, {visibleRect: {x: 2, y: 0, width: 2, height: 2}});
+ assert_equals(cropRightFrame.codedWidth, anamorphicFrame.codedWidth);
+ assert_equals(cropRightFrame.codedHeight, anamorphicFrame.codedHeight);
+ assert_equals(cropRightFrame.visibleRect.x, 2);
+ assert_equals(cropRightFrame.visibleRect.y, 0);
+ assert_equals(cropRightFrame.visibleRect.width, 2);
+ assert_equals(cropRightFrame.visibleRect.height, 2);
+ assert_equals(cropRightFrame.displayWidth, 4, 'cropRightFrame.displayWidth');
+ assert_equals(cropRightFrame.displayHeight, 2, 'cropRightFrame.displayHeight');
+}, 'Test visibleRect metadata override where source display width = 2 * visible width (anamorphic)');
+
+test(t => {
+ let init = {
+ format: 'I420',
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ displayWidth: 8,
+ displayHeight: 4
+ };
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ ]);
+ let scaledFrame = new VideoFrame(data, init);
+
+ let cropRightFrame = new VideoFrame(
+ scaledFrame, {visibleRect: {x: 2, y: 0, width: 2, height: 2}});
+ assert_equals(cropRightFrame.codedWidth, scaledFrame.codedWidth);
+ assert_equals(cropRightFrame.codedHeight, scaledFrame.codedHeight);
+ assert_equals(cropRightFrame.visibleRect.x, 2);
+ assert_equals(cropRightFrame.visibleRect.y, 0);
+ assert_equals(cropRightFrame.visibleRect.width, 2);
+ assert_equals(cropRightFrame.visibleRect.height, 2);
+ assert_equals(cropRightFrame.displayWidth, 4, 'cropRightFrame.displayWidth');
+ assert_equals(cropRightFrame.displayHeight, 4, 'cropRightFrame.displayHeight');
+}, 'Test visibleRect metadata override where source display size = 2 * visible size for both width and height');
+
+test(t => {
+ let image = makeImageBitmap(32, 16);
+
+ let scaledFrame = new VideoFrame(image, {
+ visibleRect: {x: 0, y: 0, width: 2, height: 2},
+ displayWidth: 10,
+ displayHeight: 20,
+ timestamp: 0
+ });
+ assert_equals(scaledFrame.codedWidth, 32);
+ assert_equals(scaledFrame.codedHeight, 16);
+ assert_equals(scaledFrame.visibleRect.x, 0);
+ assert_equals(scaledFrame.visibleRect.y, 0);
+ assert_equals(scaledFrame.visibleRect.width, 2);
+ assert_equals(scaledFrame.visibleRect.height, 2);
+ assert_equals(scaledFrame.displayWidth, 10, 'scaledFrame.displayWidth');
+ assert_equals(scaledFrame.displayHeight, 20, 'scaledFrame.displayHeight');
+}, 'Test visibleRect + display size metadata override');
+
+test(t => {
+ let image = makeImageBitmap(32, 16);
+
+ let scaledFrame = new VideoFrame(image,
+ {
+ displayWidth: 10, displayHeight: 20,
+ timestamp: 0
+ });
+ assert_equals(scaledFrame.codedWidth, 32);
+ assert_equals(scaledFrame.codedHeight, 16);
+ assert_equals(scaledFrame.visibleRect.x, 0);
+ assert_equals(scaledFrame.visibleRect.y, 0);
+ assert_equals(scaledFrame.visibleRect.width, 32);
+ assert_equals(scaledFrame.visibleRect.height, 16);
+ assert_equals(scaledFrame.displayWidth, 10, 'scaledFrame.displayWidth');
+ assert_equals(scaledFrame.displayHeight, 20, 'scaledFrame.displayHeight');
+}, 'Test display size metadata override');
+
+test(t => {
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(
+ new Uint8Array(1),
+ {format: 'ABCD', timestamp: 1234, codedWidth: 4, codedHeight: 2}),
+ 'invalid pixel format');
+
+ assert_throws_js(
+ TypeError,
+ () =>
+ new VideoFrame(new Uint32Array(1), {format: 'RGBA', timestamp: 1234}),
+ 'missing coded size');
+
+ function constructFrame(init) {
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ ]);
+ return new VideoFrame(data, {...init, format: 'I420'});
+ }
+
+ assert_throws_js(
+ TypeError, () => constructFrame({
+ timestamp: 1234,
+ codedWidth: Math.pow(2, 32) - 1,
+ codedHeight: Math.pow(2, 32) - 1,
+ }),
+ 'invalid coded size');
+ assert_throws_js(
+ TypeError,
+ () => constructFrame({timestamp: 1234, codedWidth: 4, codedHeight: 0}),
+ 'invalid coded height');
+ assert_throws_js(
+ TypeError,
+ () => constructFrame({timestamp: 1234, codedWidth: 4, codedHeight: 1}),
+ 'odd coded height');
+ assert_throws_js(
+ TypeError,
+ () => constructFrame({timestamp: 1234, codedWidth: 0, codedHeight: 4}),
+ 'invalid coded width');
+ assert_throws_js(
+ TypeError,
+ () => constructFrame({timestamp: 1234, codedWidth: 3, codedHeight: 2}),
+ 'odd coded width');
+ assert_throws_js(
+ TypeError, () => constructFrame({
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ visibleRect: {x: 100, y: 100, width: 1, height: 1}
+ }),
+ 'invalid visible left/right');
+ assert_throws_js(
+ TypeError, () => constructFrame({
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ visibleRect: {x: 0, y: 0, width: 0, height: 2}
+ }),
+ 'invalid visible width');
+ assert_throws_js(
+ TypeError, () => constructFrame({
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ visibleRect: {x: 0, y: 0, width: 2, height: 0}
+ }),
+ 'invalid visible height');
+ assert_throws_js(
+ TypeError, () => constructFrame({
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ visibleRect: {x: 0, y: 0, width: -100, height: -100}
+ }),
+ 'invalid negative visible size');
+ assert_throws_js(
+ TypeError, () => constructFrame({
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ displayWidth: Math.pow(2, 32),
+ }),
+ 'invalid display width');
+ assert_throws_js(
+ TypeError, () => constructFrame({
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ displayWidth: Math.pow(2, 32) - 1,
+ displayHeight: Math.pow(2, 32)
+ }),
+ 'invalid display height');
+}, 'Test invalid buffer constructed VideoFrames');
+
+test(t => {
+ testBufferConstructedI420Frame('Uint8Array(ArrayBuffer)');
+}, 'Test Uint8Array(ArrayBuffer) constructed I420 VideoFrame');
+
+test(t => {
+ testBufferConstructedI420Frame('ArrayBuffer');
+}, 'Test ArrayBuffer constructed I420 VideoFrame');
+
+test(t => {
+ let fmt = 'I420';
+ let vfInit = {
+ format: fmt,
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ colorSpace: {
+ primaries: 'smpte170m',
+ matrix: 'bt470bg',
+ },
+ };
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ ]);
+ let frame = new VideoFrame(data, vfInit);
+ assert_equals(frame.colorSpace.primaries, 'smpte170m', 'color primaries');
+ assert_true(frame.colorSpace.transfer == null, 'color transfer');
+ assert_equals(frame.colorSpace.matrix, 'bt470bg', 'color matrix');
+ assert_true(frame.colorSpace.fullRange == null, 'color range');
+}, 'Test planar constructed I420 VideoFrame with colorSpace');
+
+test(t => {
+ let fmt = 'I420';
+ let vfInit = {
+ format: fmt,
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ colorSpace: {
+ primaries: null,
+ transfer: null,
+ matrix: null,
+ fullRange: null,
+ },
+ };
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ ]);
+ let frame = new VideoFrame(data, vfInit);
+ assert_true(frame.colorSpace.primaries == null, 'color primaries');
+ assert_true(frame.colorSpace.transfer == null, 'color transfer');
+ assert_true(frame.colorSpace.matrix == null, 'color matrix');
+ assert_true(frame.colorSpace.fullRange == null, 'color range');
+}, 'Test planar constructed I420 VideoFrame with null colorSpace values');
+
+test(t => {
+ let fmt = 'I420A';
+ let vfInit = {format: fmt, timestamp: 1234, codedWidth: 4, codedHeight: 2};
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ 8, 7, 6, 5, 4, 3, 2, 1, // a
+ ]);
+ let frame = new VideoFrame(data, vfInit);
+ assert_equals(frame.format, fmt, 'plane format');
+ assert_equals(frame.colorSpace.primaries, 'bt709', 'color primaries');
+ assert_equals(frame.colorSpace.transfer, 'bt709', 'color transfer');
+ assert_equals(frame.colorSpace.matrix, 'bt709', 'color matrix');
+ assert_false(frame.colorSpace.fullRange, 'color range');
+ frame.close();
+
+ // Most constraints are tested as part of I420 above.
+
+ let y = {offset: 0, stride: 4};
+ let u = {offset: 8, stride: 2};
+ let v = {offset: 10, stride: 2};
+ let a = {offset: 12, stride: 4};
+
+ assert_throws_js(TypeError, () => {
+ let a = {offset: 12, stride: 1};
+ let frame = new VideoFrame(data, {...vfInit, layout: [y, u, v, a]});
+ }, 'a stride too small');
+ assert_throws_js(TypeError, () => {
+ let frame = new VideoFrame(data.slice(0, 12), vfInit);
+ }, 'data too small');
+}, 'Test buffer constructed I420+Alpha VideoFrame');
+
+test(t => {
+ let fmt = 'NV12';
+ let vfInit = {format: fmt, timestamp: 1234, codedWidth: 4, codedHeight: 2};
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, 3, 4, // uv
+ ]);
+ let frame = new VideoFrame(data, vfInit);
+ assert_equals(frame.format, fmt, 'plane format');
+ assert_equals(frame.colorSpace.primaries, 'bt709', 'color primaries');
+ assert_equals(frame.colorSpace.transfer, 'bt709', 'color transfer');
+ assert_equals(frame.colorSpace.matrix, 'bt709', 'color matrix');
+ assert_false(frame.colorSpace.fullRange, 'color range');
+ frame.close();
+
+ let y = {offset: 0, stride: 4};
+ let uv = {offset: 8, stride: 4};
+
+ assert_throws_js(TypeError, () => {
+ let y = {offset: 0, stride: 1};
+ let frame = new VideoFrame(data, {...vfInit, layout: [y, uv]});
+ }, 'y stride too small');
+ assert_throws_js(TypeError, () => {
+ let uv = {offset: 8, stride: 1};
+ let frame = new VideoFrame(data, {...vfInit, layout: [y, uv]});
+ }, 'uv stride too small');
+ assert_throws_js(TypeError, () => {
+ let frame = new VideoFrame(data.slice(0, 8), vfInit);
+ }, 'data too small');
+}, 'Test buffer constructed NV12 VideoFrame');
+
+test(t => {
+ let vfInit = {timestamp: 1234, codedWidth: 4, codedHeight: 2};
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ ]);
+ let frame = new VideoFrame(data, {...vfInit, format: 'RGBA'});
+ assert_equals(frame.format, 'RGBA', 'plane format');
+ assert_equals(frame.colorSpace.primaries, 'bt709', 'color primaries');
+ assert_equals(frame.colorSpace.transfer, 'iec61966-2-1', 'color transfer');
+ assert_equals(frame.colorSpace.matrix, 'rgb', 'color matrix');
+ assert_true(frame.colorSpace.fullRange, 'color range');
+ frame.close();
+
+ frame = new VideoFrame(data, {...vfInit, format: 'RGBX'});
+ assert_equals(frame.format, 'RGBX', 'plane format');
+ assert_equals(frame.colorSpace.primaries, 'bt709', 'color primaries');
+ assert_equals(frame.colorSpace.transfer, 'iec61966-2-1', 'color transfer');
+ assert_equals(frame.colorSpace.matrix, 'rgb', 'color matrix');
+ assert_true(frame.colorSpace.fullRange, 'color range');
+ frame.close();
+
+ frame = new VideoFrame(data, {...vfInit, format: 'BGRA'});
+ assert_equals(frame.format, 'BGRA', 'plane format');
+ assert_equals(frame.colorSpace.primaries, 'bt709', 'color primaries');
+ assert_equals(frame.colorSpace.transfer, 'iec61966-2-1', 'color transfer');
+ assert_equals(frame.colorSpace.matrix, 'rgb', 'color matrix');
+ assert_true(frame.colorSpace.fullRange, 'color range');
+ frame.close();
+
+ frame = new VideoFrame(data, {...vfInit, format: 'BGRX'});
+ assert_equals(frame.format, 'BGRX', 'plane format');
+ assert_equals(frame.colorSpace.primaries, 'bt709', 'color primaries');
+ assert_equals(frame.colorSpace.transfer, 'iec61966-2-1', 'color transfer');
+ assert_equals(frame.colorSpace.matrix, 'rgb', 'color matrix');
+ assert_true(frame.colorSpace.fullRange, 'color range');
+ frame.close();
+}, 'Test buffer constructed RGB VideoFrames');
+
+test(t => {
+ let image = makeImageBitmap(32, 16);
+ let frame = new VideoFrame(image, {timestamp: 0});
+ assert_true(!!frame);
+
+ frame_copy = new VideoFrame(frame);
+ assert_equals(frame.format, frame_copy.format);
+ assert_equals(frame.timestamp, frame_copy.timestamp);
+ assert_equals(frame.codedWidth, frame_copy.codedWidth);
+ assert_equals(frame.codedHeight, frame_copy.codedHeight);
+ assert_equals(frame.displayWidth, frame_copy.displayWidth);
+ assert_equals(frame.displayHeight, frame_copy.displayHeight);
+ assert_equals(frame.duration, frame_copy.duration);
+ frame_copy.close();
+
+ frame_copy = new VideoFrame(frame, {duration: 1234});
+ assert_equals(frame.timestamp, frame_copy.timestamp);
+ assert_equals(frame_copy.duration, 1234);
+ frame_copy.close();
+
+ frame_copy = new VideoFrame(frame, {timestamp: 1234, duration: 456});
+ assert_equals(frame_copy.timestamp, 1234);
+ assert_equals(frame_copy.duration, 456);
+ frame_copy.close();
+
+ frame.close();
+}, 'Test VideoFrame constructed VideoFrame');
+
+test(t => {
+ let canvas = makeOffscreenCanvas(16, 16);
+ let frame = new VideoFrame(canvas, {timestamp: 0});
+ assert_equals(frame.displayWidth, 16);
+ assert_equals(frame.displayHeight, 16);
+ frame.close();
+}, 'Test we can construct a VideoFrame from an offscreen canvas.');
+
+test(t => {
+ let fmt = 'I420';
+ let vfInit = {
+ format: fmt,
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2,
+ visibleRect: {x: 0, y: 0, width: 1, height: 1},
+ };
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ 8, 7, 6, 5, 4, 3, 2, 1, // a
+ ]);
+ let frame = new VideoFrame(data, vfInit);
+ assert_equals(frame.format, fmt, 'format');
+ assert_equals(frame.visibleRect.x, 0, 'visibleRect.x');
+ assert_equals(frame.visibleRect.y, 0, 'visibleRect.y');
+ assert_equals(frame.visibleRect.width, 1, 'visibleRect.width');
+ assert_equals(frame.visibleRect.height, 1, 'visibleRect.height');
+ frame.close();
+}, 'Test I420 VideoFrame with odd visible size');
+
+test(t => {
+ let fmt = 'I420A';
+ let vfInit = {format: fmt, timestamp: 1234, codedWidth: 4, codedHeight: 2};
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ 8, 7, 6, 5, 4, 3, 2, 1, // a
+ ]);
+ let frame = new VideoFrame(data, vfInit);
+ assert_equals(frame.format, fmt, 'plane format');
+
+ let alpha_frame_copy = new VideoFrame(frame, {alpha: 'keep'});
+ assert_equals(alpha_frame_copy.format, 'I420A', 'plane format');
+
+ let opaque_frame_copy = new VideoFrame(frame, {alpha: 'discard'});
+ assert_equals(opaque_frame_copy.format, 'I420', 'plane format');
+
+ frame.close();
+ alpha_frame_copy.close();
+ opaque_frame_copy.close();
+}, 'Test I420A VideoFrame and alpha={keep,discard}');
+
+test(t => {
+ let vfInit = {timestamp: 1234, codedWidth: 4, codedHeight: 2};
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
+ ]);
+ let frame = new VideoFrame(data, {...vfInit, format: 'RGBA'});
+ assert_equals(frame.format, 'RGBA', 'plane format');
+
+ let alpha_frame_copy = new VideoFrame(frame, {alpha: 'keep'});
+ assert_equals(alpha_frame_copy.format, 'RGBA', 'plane format');
+
+ let opaque_frame_copy = new VideoFrame(frame, {alpha: 'discard'});
+ assert_equals(opaque_frame_copy.format, 'RGBX', 'plane format');
+
+ alpha_frame_copy.close();
+ opaque_frame_copy.close();
+ frame.close();
+
+ frame = new VideoFrame(data, {...vfInit, format: 'BGRA'});
+ assert_equals(frame.format, 'BGRA', 'plane format');
+
+ alpha_frame_copy = new VideoFrame(frame, {alpha: 'keep'});
+ assert_equals(alpha_frame_copy.format, 'BGRA', 'plane format');
+
+ opaque_frame_copy = new VideoFrame(frame, {alpha: 'discard'});
+ assert_equals(opaque_frame_copy.format, 'BGRX', 'plane format');
+
+ alpha_frame_copy.close();
+ opaque_frame_copy.close();
+ frame.close();
+}, 'Test RGBA, BGRA VideoFrames with alpha={keep,discard}');
+
+test(t => {
+ let canvas = makeOffscreenCanvas(16, 16, {alpha: true});
+ let frame = new VideoFrame(canvas, {timestamp: 0});
+ assert_true(
+ frame.format == 'RGBA' || frame.format == 'BGRA' ||
+ frame.format == 'I420A',
+ 'plane format should have alpha: ' + frame.format);
+ frame.close();
+
+ frame = new VideoFrame(canvas, {alpha: 'discard', timestamp: 0});
+ assert_true(
+ frame.format == 'RGBX' || frame.format == 'BGRX' ||
+ frame.format == 'I420',
+ 'plane format should not have alpha: ' + frame.format);
+ frame.close();
+}, 'Test a VideoFrame constructed from canvas can drop the alpha channel.');
+
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginIsolated.https.any.js b/testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginIsolated.https.any.js
new file mode 100644
index 0000000000..f5af5c0296
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginIsolated.https.any.js
@@ -0,0 +1,11 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+// META: script=/webcodecs/videoFrame-utils.js
+
+test(t => {
+ testBufferConstructedI420Frame('SharedArrayBuffer');
+}, 'Test SharedArrayBuffer constructed I420 VideoFrame');
+
+test(t => {
+ testBufferConstructedI420Frame('Uint8Array(SharedArrayBuffer)');
+}, 'Test Uint8Array(SharedArrayBuffer) constructed I420 VideoFrame');
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginIsolated.https.any.js.headers b/testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginIsolated.https.any.js.headers
new file mode 100644
index 0000000000..985da71a2b
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginIsolated.https.any.js.headers
@@ -0,0 +1,3 @@
+Cross-Origin-Embedder-Policy: require-corp
+Cross-Origin-Opener-Policy: same-origin
+
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginSource.sub.html b/testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginSource.sub.html
new file mode 100644
index 0000000000..62f2bd934f
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-construction.crossOriginSource.sub.html
@@ -0,0 +1,198 @@
+
+<!DOCTYPE html>
+<html>
+<head>
+ <script src='/resources/testharness.js'></script>
+ <script src='/resources/testharnessreport.js'></script>
+ <script src="/common/get-host-info.sub.js"></script>
+</head>
+<body>
+<script>
+const SAMEORIGIN_BASE = get_host_info().HTTP_ORIGIN;
+const CROSSORIGIN_BASE = get_host_info().HTTP_NOTSAMESITE_ORIGIN;
+
+const TESTS = [
+ // HTMLImageElement
+ {
+ title: 'Test creating a VideoFrame with a same-origin HTMLImageElement',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const image = new Image();
+ image.onload = () => resolve(image);
+ image.onerror = reject;
+ image.src = SAMEORIGIN_BASE + '/webcodecs/four-colors.jpg';
+ });
+ },
+ should_throw: false,
+ },
+ {
+ title: 'Test creating a VideoFrame with a cross-origin HTMLImageElement',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const image = new Image();
+ image.onload = () => resolve(image);
+ image.onerror = reject;
+ image.src = CROSSORIGIN_BASE + '/webcodecs/four-colors.jpg';
+ });
+ },
+ should_throw: true,
+ },
+ {
+ title: 'Test creating a VideoFrame with a CORS enabled cross-origin HTMLImageElement without setting crossorigin',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const image = new Image();
+ image.onload = () => resolve(image);
+ image.onerror = reject;
+ image.src = CROSSORIGIN_BASE + '/webcodecs/four-colors.jpg?pipe=header(Access-Control-Allow-Origin,*)';
+ });
+ },
+ should_throw: true,
+ },
+ {
+ title: 'Test creating a VideoFrame with a CORS enabled cross-origin HTMLImageElement with crossorigin="anonymous"',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const image = new Image();
+ image.onload = () => resolve(image);
+ image.onerror = reject;
+ image.crossOrigin = 'anonymous';
+ image.src = CROSSORIGIN_BASE + '/webcodecs/four-colors.jpg?pipe=header(Access-Control-Allow-Origin,*)';
+ });
+ },
+ should_throw: false,
+ },
+ // SVGImageElement
+ {
+ title: 'Test creating a VideoFrame with a same-origin SVGImageElement',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const image = document.createElementNS('http://www.w3.org/2000/svg','image');
+ image.onload = () => resolve(image);
+ image.onerror = reject;
+ image.setAttribute('href', SAMEORIGIN_BASE + '/webcodecs/four-colors.jpg');
+ });
+ },
+ should_throw: false,
+ },
+ {
+ title: 'Test creating a VideoFrame with a cross-origin SVGImageElement',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const image = document.createElementNS('http://www.w3.org/2000/svg','image');
+ image.onload = () => resolve(image);
+ image.onerror = reject;
+ image.setAttribute('href', CROSSORIGIN_BASE + '/webcodecs/four-colors.jpg');
+ });
+ },
+ should_throw: true,
+ },
+ {
+ title: 'Test creating a VideoFrame with a CORS enabled cross-origin SVGImageElement without setting crossorigin',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const image = document.createElementNS('http://www.w3.org/2000/svg','image');
+ image.onload = () => resolve(image);
+ image.onerror = reject;
+ image.setAttribute('href', CROSSORIGIN_BASE + '/webcodecs/four-colors.jpg?pipe=header(Access-Control-Allow-Origin,*)');
+ });
+ },
+ should_throw: true,
+ },
+ {
+ title: 'Test creating a VideoFrame with a CORS enabled cross-origin SVGImageElement with crossorigin="anonymous"',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const image = document.createElementNS('http://www.w3.org/2000/svg','image');
+ image.onload = () => resolve(image);
+ image.onerror = reject;
+ image.crossOrigin = 'anonymous';
+ image.setAttribute('href', CROSSORIGIN_BASE + '/webcodecs/four-colors.jpg?pipe=header(Access-Control-Allow-Origin,*)');
+ });
+ },
+ should_throw: () => {
+ // SVGImageElement.crossOrigin is not standardized.
+ const image = document.createElementNS('http://www.w3.org/2000/svg','image');
+ return !('crossOrigin' in image);
+ },
+ },
+ // HTMLVideoElement
+ {
+ title: 'Test creating a VideoFrame with a same-origin HTMLVideoElement',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const video = document.createElement('video');
+ on_frame_available(video, () => resolve(video));
+ video.onerror = reject;
+ video.src = SAMEORIGIN_BASE + '/webcodecs/av1.mp4';
+ });
+ },
+ should_throw: false,
+ },
+ {
+ title: 'Test creating a VideoFrame with a cross-origin HTMLVideoElement',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const video = document.createElement('video');
+ on_frame_available(video, () => resolve(video));
+ video.onerror = reject;
+ video.src = CROSSORIGIN_BASE + '/webcodecs/av1.mp4';
+ });
+ },
+ should_throw: true,
+ },
+ {
+ title: 'Test creating a VideoFrame with a CORS enabled cross-origin HTMLVideoElement without setting crossorigin',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const video = document.createElement('video');
+ on_frame_available(video, () => resolve(video));
+ video.onerror = reject;
+ video.src = CROSSORIGIN_BASE + '/webcodecs/av1.mp4?pipe=header(Access-Control-Allow-Origin,*)';
+ });
+ },
+ should_throw: true,
+ },
+ {
+ title: 'Test creating a VideoFrame with a CORS enabled cross-origin HTMLVideoElement with crossorigin="anonymous"',
+ factory: () => {
+ return new Promise((resolve, reject) => {
+ const video = document.createElement('video');
+ on_frame_available(video, () => resolve(video));
+ video.onerror = reject;
+ video.crossOrigin = 'anonymous';
+ video.src = CROSSORIGIN_BASE + '/webcodecs/av1.mp4?pipe=header(Access-Control-Allow-Origin,*)';
+ });
+ },
+ should_throw: false,
+ },
+];
+
+TESTS.forEach(test => run_test(test));
+
+function run_test(test) {
+ promise_test(async t => {
+ const source = await test.factory();
+ if (test.should_throw) {
+ assert_throws_dom('SecurityError', () => { create_frame(source); });
+ } else {
+ create_frame(source);
+ }
+ }, test.title);
+}
+
+function create_frame(img) {
+ let frame = new VideoFrame(img, {timestamp: 0});
+ frame.close();
+}
+
+function on_frame_available(video, callback) {
+ if ('requestVideoFrameCallback' in video)
+ video.requestVideoFrameCallback(callback);
+ else
+ video.onloadeddata = callback;
+}
+
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-construction.window.js b/testing/web-platform/tests/webcodecs/videoFrame-construction.window.js
new file mode 100644
index 0000000000..02f8421552
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-construction.window.js
@@ -0,0 +1,21 @@
+// META: script=/webcodecs/videoFrame-utils.js
+
+promise_test(async t => {
+ let imgElement = document.createElement('img');
+ let loadPromise = new Promise(r => imgElement.onload = r);
+ imgElement.src = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==';
+ await loadPromise;
+ verifyTimestampRequiredToConstructFrame(imgElement);
+}, 'Test that timestamp is required when constructing VideoFrame from HTMLImageElement');
+
+promise_test(async t => {
+ let svgImageElement = document.createElementNS('http://www.w3.org/2000/svg','image');
+ let loadPromise = new Promise(r => svgImageElement.onload = r);
+ svgImageElement.setAttributeNS('http://www.w3.org/1999/xlink','href','data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==');
+ await loadPromise;
+ verifyTimestampRequiredToConstructFrame(svgImageElement);
+}, 'Test that timestamp is required when constructing VideoFrame from SVGImageElement');
+
+promise_test(async t => {
+ verifyTimestampRequiredToConstructFrame(document.createElement('canvas'))
+}, 'Test that timeamp is required when constructing VideoFrame from HTMLCanvasElement');
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-copyTo.any.js b/testing/web-platform/tests/webcodecs/videoFrame-copyTo.any.js
new file mode 100644
index 0000000000..79ba3c6304
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-copyTo.any.js
@@ -0,0 +1,322 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/videoFrame-utils.js
+
+function makeRGBA_2x2() {
+ const data = new Uint8Array([
+ 1,2,3,4, 5,6,7,8,
+ 9,10,11,12, 13,14,15,16,
+ ]);
+ const init = {
+ format: 'RGBA',
+ timestamp: 0,
+ codedWidth: 2,
+ codedHeight: 2,
+ };
+ return new VideoFrame(data, init);
+}
+
+const NV12_DATA = new Uint8Array([
+ 1, 2, 3, 4, // y
+ 5, 6, 7, 8,
+ 9, 10, 11, 12 // uv
+ ]);
+
+function makeNV12_4x2() {
+ const init = {
+ format: 'NV12',
+ timestamp: 0,
+ codedWidth: 4,
+ codedHeight: 2,
+ };
+ return new VideoFrame(NV12_DATA, init);
+}
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ frame.close();
+
+ assert_throws_dom('InvalidStateError', () => frame.allocationSize(), 'allocationSize()');
+
+ let data = new Uint8Array(12);
+ await promise_rejects_dom(t, 'InvalidStateError', frame.copyTo(data), 'copyTo()');
+}, 'Test closed frame.');
+
+promise_test(async t => {
+ const destination = new ArrayBuffer(I420_DATA.length);
+ await testI420_4x2_copyTo(destination);
+}, 'Test copying I420 frame to a non-shared ArrayBuffer');
+
+promise_test(async t => {
+ const destination = new Uint8Array(I420_DATA.length);
+ await testI420_4x2_copyTo(destination);
+}, 'Test copying I420 frame to a non-shared ArrayBufferView');
+
+promise_test(async t => {
+ const frame = makeRGBA_2x2();
+ const expectedLayout = [
+ {offset: 0, stride: 8},
+ ];
+ const expectedData = new Uint8Array([
+ 1,2,3,4, 5,6,7,8,
+ 9,10,11,12, 13,14,15,16,
+ ]);
+ assert_equals(frame.allocationSize(), expectedData.length, 'allocationSize()');
+ const data = new Uint8Array(expectedData.length);
+ const layout = await frame.copyTo(data);
+ assert_layout_equals(layout, expectedLayout);
+ assert_buffer_equals(data, expectedData);
+}, 'Test RGBA frame.');
+
+promise_test(async t => {
+ const frame = makeNV12_4x2();
+ const expectedLayout = [
+ {offset: 0, stride: 4},
+ {offset: 8, stride: 4},
+ ];
+ const expectedData = new Uint8Array([
+ 1,2,3,4,
+ 5,6,7,8,
+ 9,10,11,12
+ ]);
+ assert_equals(frame.allocationSize(), expectedData.length, 'allocationSize()');
+ const data = new Uint8Array(expectedData.length);
+ const layout = await frame.copyTo(data);
+ assert_layout_equals(layout, expectedLayout);
+ assert_buffer_equals(data, expectedData);
+}, 'Test NV12 frame.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const data = new Uint8Array(11);
+ await promise_rejects_js(t, TypeError, frame.copyTo(data));
+}, 'Test undersized buffer.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ layout: [{offset: 0, stride: 4}],
+ };
+ assert_throws_js(TypeError, () => frame.allocationSize(options));
+ const data = new Uint8Array(12);
+ await promise_rejects_js(t, TypeError, frame.copyTo(data, options));
+}, 'Test incorrect plane count.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ layout: [
+ {offset: 4, stride: 4},
+ {offset: 0, stride: 2},
+ {offset: 2, stride: 2},
+ ],
+ };
+ const expectedData = new Uint8Array([
+ 9, 10, // u
+ 11, 12, // v
+ 1, 2, 3, 4, // y
+ 5, 6, 7, 8,
+ ]);
+ assert_equals(frame.allocationSize(options), expectedData.length, 'allocationSize()');
+ const data = new Uint8Array(expectedData.length);
+ const layout = await frame.copyTo(data, options);
+ assert_layout_equals(layout, options.layout);
+ assert_buffer_equals(data, expectedData);
+}, 'Test I420 stride and offset work.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ layout: [
+ {offset: 9, stride: 5},
+ {offset: 1, stride: 3},
+ {offset: 5, stride: 3},
+ ],
+ };
+ const expectedData = new Uint8Array([
+ 0,
+ 9, 10, 0, // u
+ 0,
+ 11, 12, 0, // v
+ 0,
+ 1, 2, 3, 4, 0, // y
+ 5, 6, 7, 8, 0,
+ ]);
+ assert_equals(frame.allocationSize(options), expectedData.length, 'allocationSize()');
+ const data = new Uint8Array(expectedData.length);
+ const layout = await frame.copyTo(data, options);
+ assert_layout_equals(layout, options.layout);
+ assert_buffer_equals(data, expectedData);
+}, 'Test I420 stride and offset with padding.');
+
+promise_test(async t => {
+ const init = {
+ format: 'I420A',
+ timestamp: 0,
+ codedWidth: 4,
+ codedHeight: 2,
+ };
+ const buf = new Uint8Array([
+ 1, 2, 3, 4, // y
+ 5, 6, 7, 8,
+ 9, 10, // u
+ 11, 12, // v
+ 13, 14, 15, 16, // a
+ 17, 18, 19, 20,
+ ]);
+ const frame = new VideoFrame(buf, init);
+ const options = {
+ layout: [
+ {offset: 12, stride: 4},
+ {offset: 8, stride: 2},
+ {offset: 10, stride: 2},
+ {offset: 0, stride: 4},
+ ],
+ };
+ const expectedData = new Uint8Array([
+ 13, 14, 15, 16, // a
+ 17, 18, 19, 20,
+ 9, 10, // u
+ 11, 12, // v
+ 1, 2, 3, 4, // y
+ 5, 6, 7, 8,
+ ]);
+ assert_equals(frame.allocationSize(options), expectedData.length, 'allocationSize()');
+ const data = new Uint8Array(expectedData.length);
+ const layout = await frame.copyTo(data, options);
+ assert_layout_equals(layout, options.layout);
+ assert_buffer_equals(data, expectedData);
+}, 'Test I420A stride and offset work.');
+
+promise_test(async t => {
+ const init = {
+ format: 'NV12',
+ timestamp: 0,
+ codedWidth: 4,
+ codedHeight: 2,
+ };
+ const buf = new Uint8Array([
+ 1, 2, 3, 4, // y
+ 5, 6, 7, 8,
+ 9, 10, 11, 12 // uv
+ ]);
+ const frame = new VideoFrame(buf, init);
+ const options = {
+ layout: [
+ {offset: 4, stride: 4},
+ {offset: 0, stride: 4},
+ ],
+ };
+ const expectedData = new Uint8Array([
+ 9, 10, 11, 12, // uv
+ 1, 2, 3, 4, // y
+ 5, 6, 7, 8
+ ]);
+ assert_equals(frame.allocationSize(options), expectedData.length, 'allocationSize()');
+ const data = new Uint8Array(expectedData.length);
+ const layout = await frame.copyTo(data, options);
+ assert_layout_equals(layout, options.layout);
+ assert_buffer_equals(data, expectedData);
+}, 'Test NV12 stride and offset work.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ layout: [
+ {offset: 0, stride: 1},
+ {offset: 8, stride: 2},
+ {offset: 10, stride: 2},
+ ],
+ };
+ assert_throws_js(TypeError, () => frame.allocationSize(options));
+ const data = new Uint8Array(12);
+ await promise_rejects_js(t, TypeError, frame.copyTo(data, options));
+}, 'Test invalid stride.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ layout: [
+ {offset: 0, stride: 4},
+ {offset: 8, stride: 2},
+ {offset: 2 ** 32 - 2, stride: 2},
+ ],
+ };
+ assert_throws_js(TypeError, () => frame.allocationSize(options));
+ const data = new Uint8Array(12);
+ await promise_rejects_js(t, TypeError, frame.copyTo(data, options));
+}, 'Test address overflow.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ rect: frame.codedRect,
+ };
+ const expectedLayout = [
+ {offset: 0, stride: 4},
+ {offset: 8, stride: 2},
+ {offset: 10, stride: 2},
+ ];
+ const expectedData = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 9, 10, // u
+ 11, 12 // v
+ ]);
+ assert_equals(frame.allocationSize(options), expectedData.length, 'allocationSize()');
+ const data = new Uint8Array(expectedData.length);
+ const layout = await frame.copyTo(data, options);
+ assert_layout_equals(layout, expectedLayout);
+ assert_buffer_equals(data, expectedData);
+}, 'Test codedRect.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ rect: {x: 0, y: 0, width: 4, height: 0},
+ };
+ assert_throws_js(TypeError, () => frame.allocationSize(options));
+ const data = new Uint8Array(12);
+ await promise_rejects_js(t, TypeError, frame.copyTo(data, options));
+}, 'Test empty rect.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ rect: {x: 0, y: 0, width: 4, height: 1},
+ };
+ assert_throws_js(TypeError, () => frame.allocationSize(options));
+ const data = new Uint8Array(12);
+ await promise_rejects_js(t, TypeError, frame.copyTo(data, options));
+}, 'Test unaligned rect.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ rect: {x: 2, y: 0, width: 2, height: 2},
+ };
+ const expectedLayout = [
+ {offset: 0, stride: 2},
+ {offset: 4, stride: 1},
+ {offset: 5, stride: 1},
+ ];
+ const expectedData = new Uint8Array([
+ 3, 4, // y
+ 7, 8,
+ 10, // u
+ 12 // v
+ ]);
+ assert_equals(frame.allocationSize(options), expectedData.length, 'allocationSize()');
+ const data = new Uint8Array(expectedData.length);
+ const layout = await frame.copyTo(data, options);
+ assert_layout_equals(layout, expectedLayout);
+ assert_buffer_equals(data, expectedData);
+}, 'Test left crop.');
+
+promise_test(async t => {
+ const frame = makeI420_4x2();
+ const options = {
+ rect: {x: 0, y: 0, width: 4, height: 4},
+ };
+ assert_throws_js(TypeError, () => frame.allocationSize(options));
+ const data = new Uint8Array(12);
+ await promise_rejects_js(t, TypeError, frame.copyTo(data, options));
+}, 'Test invalid rect.');
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-copyTo.crossOriginIsolated.https.any.js b/testing/web-platform/tests/webcodecs/videoFrame-copyTo.crossOriginIsolated.https.any.js
new file mode 100644
index 0000000000..bde3bfae03
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-copyTo.crossOriginIsolated.https.any.js
@@ -0,0 +1,18 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/videoFrame-utils.js
+
+promise_test(async t => {
+ // *.headers file should ensure we sesrve COOP and COEP headers.
+ assert_true(self.crossOriginIsolated,
+ "Cross origin isolation is required to construct SharedArrayBuffer");
+ const destination = new SharedArrayBuffer(I420_DATA.length);
+ await testI420_4x2_copyTo(destination);
+}, 'Test copying I420 frame to SharedArrayBuffer.');
+
+promise_test(async t => {
+ // *.headers file should ensure we sesrve COOP and COEP headers.
+ assert_true(self.crossOriginIsolated,
+ "Cross origin isolation is required to construct SharedArrayBuffer");
+ const destination = new Uint8Array(new SharedArrayBuffer(I420_DATA.length));
+ await testI420_4x2_copyTo(destination);
+}, 'Test copying I420 frame to shared ArrayBufferView.');
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-copyTo.crossOriginIsolated.https.any.js.headers b/testing/web-platform/tests/webcodecs/videoFrame-copyTo.crossOriginIsolated.https.any.js.headers
new file mode 100644
index 0000000000..5f8621ef83
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-copyTo.crossOriginIsolated.https.any.js.headers
@@ -0,0 +1,2 @@
+Cross-Origin-Embedder-Policy: require-corp
+Cross-Origin-Opener-Policy: same-origin
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-createImageBitmap.any.js b/testing/web-platform/tests/webcodecs/videoFrame-createImageBitmap.any.js
new file mode 100644
index 0000000000..8369713623
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-createImageBitmap.any.js
@@ -0,0 +1,28 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+promise_test(() => {
+ return testImageBitmapToAndFromVideoFrame(48, 36, kSRGBPixel);
+}, 'ImageBitmap<->VideoFrame with canvas(48x36 srgb uint8).');
+
+promise_test(() => {
+ return testImageBitmapToAndFromVideoFrame(480, 360, kSRGBPixel);
+}, 'ImageBitmap<->VideoFrame with canvas(480x360 srgb uint8).');
+
+promise_test(async () => {
+ const width = 128;
+ const height = 128;
+ let vfInit = {format: 'RGBA', timestamp: 0,
+ codedWidth: width, codedHeight: height,
+ displayWidth: width / 2, displayHeight: height / 2};
+ let data = new Uint32Array(vfInit.codedWidth * vfInit.codedHeight);
+ data.fill(0xFF966432); // 'rgb(50, 100, 150)';
+ let frame = new VideoFrame(data, vfInit);
+
+ let bitmap = await createImageBitmap(frame);
+ frame.close();
+
+ assert_equals(bitmap.width, width / 2);
+ assert_equals(bitmap.height, height / 2);
+ bitmap.close();
+}, 'createImageBitmap uses frame display size');
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-createImageBitmap.https.any.js b/testing/web-platform/tests/webcodecs/videoFrame-createImageBitmap.https.any.js
new file mode 100644
index 0000000000..8bcff0e5e6
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-createImageBitmap.https.any.js
@@ -0,0 +1,84 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+function testCreateImageBitmapFromVideoFrameVP9Decoder() {
+ // Prefers hardware decoders by setting video size as large as 720p.
+ const width = 1280;
+ const height = 720;
+
+ let canvas = new OffscreenCanvas(width, height);
+ let ctx = canvas.getContext('2d');
+ ctx.fillStyle = 'rgb(50, 100, 150)';
+ ctx.fillRect(0, 0, width, height);
+
+ return createImageBitmap(canvas).then((fromImageBitmap) => {
+ let videoFrame = new VideoFrame(fromImageBitmap, {
+ timestamp: 0
+ });
+ return new Promise((resolve, reject) => {
+ let processVideoFrame = (frame) => {
+ createImageBitmap(frame).then((toImageBitmap) => {
+ let myCanvas = new OffscreenCanvas(width, height);
+ let myCtx = myCanvas.getContext('2d');
+ myCtx.drawImage(toImageBitmap, 0, 0);
+ let tolerance = 10;
+ try {
+ testCanvas(myCtx, width, height, kSRGBPixel, null,
+ (actual, expected) => {
+ assert_approx_equals(actual, expected, tolerance);
+ }
+ );
+ } catch (error) {
+ reject(error);
+ }
+ resolve('Done.');
+ });
+ };
+
+ const decoderInit = {
+ output: processVideoFrame,
+ error: (e) => {
+ reject(e);
+ }
+ };
+
+ const encodedVideoConfig = {
+ codec: "vp09.00.10.08",
+ };
+
+ let decoder = new VideoDecoder(decoderInit);
+ decoder.configure(encodedVideoConfig);
+
+ let processVideoChunk = (chunk) => {
+ decoder.decode(chunk);
+ decoder.flush();
+ };
+
+ const encoderInit = {
+ output: processVideoChunk,
+ error: (e) => {
+ reject(e);
+ }
+ };
+
+ const videoEncoderConfig = {
+ codec: "vp09.00.10.08",
+ width: width,
+ height: height,
+ bitrate: 10e6,
+ framerate: 30
+ };
+
+ let encoder = new VideoEncoder(encoderInit);
+ encoder.configure(videoEncoderConfig);
+ encoder.encode(videoFrame, {
+ keyFrame: true
+ });
+ encoder.flush();
+ });
+ });
+}
+
+promise_test(() => {
+ return testCreateImageBitmapFromVideoFrameVP9Decoder();
+}, 'Create ImageBitmap for a VideoFrame from VP9 decoder.');
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-drawImage.any.js b/testing/web-platform/tests/webcodecs/videoFrame-drawImage.any.js
new file mode 100644
index 0000000000..9830181c4f
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-drawImage.any.js
@@ -0,0 +1,104 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+
+function testDrawImageFromVideoFrame(
+ width, height, expectedPixel, canvasOptions, imageSetting) {
+ let vfInit =
+ {format: 'RGBA', timestamp: 0, codedWidth: width, codedHeight: height};
+ let data = new Uint32Array(vfInit.codedWidth * vfInit.codedHeight);
+ data.fill(0xFF966432); // 'rgb(50, 100, 150)';
+ let frame = new VideoFrame(data, vfInit);
+ let canvas = new OffscreenCanvas(width, height);
+ let ctx = canvas.getContext('2d', canvasOptions);
+ ctx.drawImage(frame, 0, 0);
+ testCanvas(ctx, width, height, expectedPixel, imageSetting, assert_equals);
+ frame.close();
+}
+
+test(_ => {
+ return testDrawImageFromVideoFrame(48, 36, kSRGBPixel);
+}, 'drawImage(VideoFrame) with canvas(48x36 srgb uint8).');
+
+test(_ => {
+ return testDrawImageFromVideoFrame(480, 360, kSRGBPixel);
+}, 'drawImage(VideoFrame) with canvas(480x360 srgb uint8).');
+
+test(_ => {
+ return testDrawImageFromVideoFrame(
+ 48, 36, kP3Pixel, kCanvasOptionsP3Uint8, {colorSpaceConversion: 'none'},
+ kImageSettingOptionsP3Uint8);
+}, 'drawImage(VideoFrame) with canvas(48x36 display-p3 uint8).');
+
+test(_ => {
+ return testDrawImageFromVideoFrame(
+ 480, 360, kP3Pixel, kCanvasOptionsP3Uint8, {colorSpaceConversion: 'none'},
+ kImageSettingOptionsP3Uint8);
+}, 'drawImage(VideoFrame) with canvas(480x360 display-p3 uint8).');
+
+test(_ => {
+ return testDrawImageFromVideoFrame(
+ 48, 36, kRec2020Pixel, kCanvasOptionsRec2020Uint8,
+ {colorSpaceConversion: 'none'}, kImageSettingOptionsRec2020Uint8);
+}, 'drawImage(VideoFrame) with canvas(48x36 rec2020 uint8).');
+
+test(_ => {
+ let width = 128;
+ let height = 128;
+ let vfInit =
+ {format: 'RGBA', timestamp: 0, codedWidth: width, codedHeight: height};
+ let data = new Uint32Array(vfInit.codedWidth * vfInit.codedHeight);
+ data.fill(0xFF966432); // 'rgb(50, 100, 150)';
+ let frame = new VideoFrame(data, vfInit);
+ let canvas = new OffscreenCanvas(width, height);
+ let ctx = canvas.getContext('2d');
+
+ frame.close();
+ assert_throws_dom('InvalidStateError', _ => {
+ ctx.drawImage(frame, 0, 0);
+ }, 'drawImage with a closed VideoFrame should throw InvalidStateError.');
+}, 'drawImage on a closed VideoFrame throws InvalidStateError.');
+
+
+test(_ => {
+ let canvas = new OffscreenCanvas(128, 128);
+ let ctx = canvas.getContext('2d');
+
+ let init = {alpha: 'discard', timestamp: 33090};
+ let frame = new VideoFrame(canvas, {timestamp: 0});
+ let frame2 = new VideoFrame(frame, init);
+ let frame3 = new VideoFrame(frame2, init);
+
+ ctx.drawImage(frame3, 0, 0);
+ frame.close();
+ frame2.close();
+ frame3.close();
+}, 'drawImage of nested frame works properly');
+
+test(_ => {
+ const width = 128;
+ const height = 128;
+ let vfInit = {format: 'RGBA', timestamp: 0,
+ codedWidth: width, codedHeight: height,
+ displayWidth: width / 2, displayHeight: height / 2};
+ let data = new Uint32Array(vfInit.codedWidth * vfInit.codedHeight);
+ data.fill(0xFF966432); // 'rgb(50, 100, 150)';
+ let frame = new VideoFrame(data, vfInit);
+ let canvas = new OffscreenCanvas(width, height);
+ let ctx = canvas.getContext('2d');
+ ctx.fillStyle = "#FFFFFF";
+ ctx.fillRect(0, 0, width, height);
+ ctx.drawImage(frame, 0, 0);
+ frame.close();
+
+ function peekPixel(ctx, x, y) {
+ return ctx.getImageData(x, y, 1, 1).data;
+ }
+
+ assert_array_equals(peekPixel(ctx, 0, 0), [50, 100, 150, 255]);
+ assert_array_equals(peekPixel(ctx, width / 2 - 1, height / 2 - 1),
+ [50, 100, 150, 255]);
+ assert_array_equals(peekPixel(ctx, width / 2 + 1, height / 2 + 1),
+ [255, 255, 255, 255]);
+ assert_array_equals(peekPixel(ctx, width - 1, height - 1),
+ [255, 255, 255, 255]);
+}, 'drawImage with display size != visible size'); \ No newline at end of file
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.helper.html b/testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.helper.html
new file mode 100644
index 0000000000..8e751632a1
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.helper.html
@@ -0,0 +1,23 @@
+<!DOCTYPE html>
+<html>
+<body>
+<p id='location'></p>
+<div id='log'></div>
+<script>
+ document.querySelector('#location').innerHTML = window.origin;
+ let received = new Map();
+ window.onmessage = (e) => {
+ let msg = e.data + ' (from ' + e.origin + ')';
+ document.querySelector('#log').innerHTML += '<p>' + msg + '<p>';
+ if (e.data.hasOwnProperty('id')) {
+ e.source.postMessage(
+ received.get(e.data.id) ? 'RECEIVED' : 'NOT_RECEIVED', '*');
+ return;
+ }
+ if (e.data.toString() == '[object VideoFrame]') {
+ received.set(e.data.timestamp, e.data);
+ }
+ };
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.https.html b/testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.https.html
new file mode 100644
index 0000000000..8fe7cf44cc
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.https.html
@@ -0,0 +1,234 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <script src='/resources/testharness.js'></script>
+ <script src='/resources/testharnessreport.js'></script>
+ <script src='/common/get-host-info.sub.js'></script>
+ <script src='/webcodecs/utils.js'></script>
+ <script id='workerCode' type='javascript/worker'>
+ self.onmessage = (e) => {
+ let frame = e.data.frame;
+ if (e.data.transfer) {
+ postMessage(frame, [frame]);
+ } else {
+ postMessage(frame);
+ }
+ };
+ </script>
+ <script id='sharedWorkerCode' type='javascript/worker'>
+ const data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16,
+ ]);
+ let received = new Map();
+ self.onconnect = function (event) {
+ const port = event.ports[0];
+ port.onmessage = function (e) {
+ if (e.data == 'create-frame') {
+ let frameOrError = null;
+ try {
+ frameOrError = new VideoFrame(data, {
+ timestamp: 0,
+ codedWidth: 2,
+ codedHeight: 2,
+ format: 'RGBA',
+ });
+ } catch (error) {
+ frameOrError = error
+ }
+ port.postMessage(frameOrError);
+ return;
+ }
+ if (e.data.hasOwnProperty('id')) {
+ port.postMessage(
+ received.get(e.data.id) ? 'RECEIVED' : 'NOT_RECEIVED');
+ return;
+ }
+ if (e.data.toString() == '[object VideoFrame]') {
+ received.set(e.data.timestamp, e.data);
+ }
+ };
+ };
+ </script>
+</head>
+<body>
+<script>
+const HELPER = '/webcodecs/videoFrame-serialization.crossAgentCluster.helper.html';
+const SAMEORIGIN_BASE = get_host_info().HTTPS_ORIGIN;
+const CROSSORIGIN_BASE = get_host_info().HTTPS_NOTSAMESITE_ORIGIN;
+const SAMEORIGIN_HELPER = SAMEORIGIN_BASE + HELPER;
+const CROSSORIGIN_HELPER = CROSSORIGIN_BASE + HELPER;
+
+promise_test(async () => {
+ const target = (await appendIframe(SAMEORIGIN_HELPER)).contentWindow;
+ let frame = createVideoFrame(10);
+ assert_true(await canSerializeVideoFrame(target, frame));
+}, 'Verify frames can be passed within the same agent clusters');
+
+promise_test(async () => {
+ const target = (await appendIframe(CROSSORIGIN_HELPER)).contentWindow;
+ let frame = createVideoFrame(20);
+ assert_false(await canSerializeVideoFrame(target, frame));
+}, 'Verify frames cannot be passed accross the different agent clusters');
+
+promise_test(async () => {
+ const blob = new Blob([document.querySelector('#workerCode').textContent], {
+ type: 'text/javascript',
+ });
+ const worker = new Worker(window.URL.createObjectURL(blob));
+ let frame = createVideoFrame(30);
+ worker.postMessage({frame: frame, transfer: false});
+ const received = await new Promise(resolve => worker.onmessage = e => {
+ resolve(e.data);
+ });
+ assert_equals(received.toString(), '[object VideoFrame]');
+ assert_equals(received.timestamp, 30);
+}, 'Verify frames can be passed back and forth between main and worker');
+
+promise_test(async () => {
+ const blob = new Blob([document.querySelector('#sharedWorkerCode').textContent], {
+ type: 'text/javascript',
+ });
+ const worker = new SharedWorker(window.URL.createObjectURL(blob));
+ let frame = createVideoFrame(40);
+ worker.port.postMessage(frame);
+ worker.port.postMessage({'id': 40});
+ const received = await new Promise(resolve => worker.port.onmessage = e => {
+ resolve(e.data);
+ });
+ assert_equals(received, 'NOT_RECEIVED');
+}, 'Verify frames cannot be passed to sharedworker');
+
+promise_test(async () => {
+ navigator.serviceWorker.register('videoFrame-serialization.crossAgentCluster.serviceworker.js');
+ navigator.serviceWorker.ready.then((registration) => {
+ let frame = createVideoFrame(50);
+ registration.active.postMessage(frame);
+ registration.active.postMessage({'id': 50});
+ });
+ const received = await new Promise(resolve => navigator.serviceWorker.onmessage = (e) => {
+ resolve(e.data);
+ });
+ assert_equals(received, 'NOT_RECEIVED');
+}, 'Verify frames cannot be passed to serviceworker');
+
+promise_test(async () => {
+ const target = (await appendIframe(SAMEORIGIN_HELPER)).contentWindow;
+ let frame = createVideoFrame(60);
+ assert_true(await canTransferVideoFrame(target, frame));
+ assert_true(isFrameClosed(frame));
+}, 'Verify frames can be transferred within the same agent clusters');
+
+promise_test(async () => {
+ const target = (await appendIframe(CROSSORIGIN_HELPER)).contentWindow;
+ let frame = createVideoFrame(70);
+ assert_false(await canTransferVideoFrame(target, frame));
+}, 'Verify frames cannot be transferred accross the different agent clusters');
+
+promise_test(async () => {
+ const blob = new Blob([document.querySelector('#workerCode').textContent], {
+ type: 'text/javascript',
+ });
+ const worker = new Worker(window.URL.createObjectURL(blob));
+ let frame = createVideoFrame(80);
+ worker.postMessage({frame: frame, transfer: true}, [frame]);
+ const received = await new Promise(resolve => worker.onmessage = e => {
+ resolve(e.data);
+ });
+ assert_equals(received.toString(), '[object VideoFrame]');
+ assert_equals(received.timestamp, 80);
+}, 'Verify frames can be transferred back and forth between main and worker');
+
+promise_test(async () => {
+ const blob = new Blob([document.querySelector('#sharedWorkerCode').textContent], {
+ type: 'text/javascript',
+ });
+ const worker = new SharedWorker(window.URL.createObjectURL(blob));
+ let frame = createVideoFrame(90);
+ worker.port.postMessage(frame, [frame]);
+ worker.port.postMessage({'id': 90});
+ const received = await new Promise(resolve => worker.port.onmessage = e => {
+ resolve(e.data);
+ });
+ assert_equals(received, 'NOT_RECEIVED');
+}, 'Verify frames cannot be transferred to a sharedworker');
+
+promise_test(async () => {
+ navigator.serviceWorker.register('videoFrame-serialization.crossAgentCluster.serviceworker.js');
+ navigator.serviceWorker.ready.then((registration) => {
+ let frame = createVideoFrame(100);
+ registration.active.postMessage(frame, [frame]);
+ registration.active.postMessage({'id': 100});
+ });
+ const received = await new Promise(resolve => navigator.serviceWorker.onmessage = (e) => {
+ resolve(e.data);
+ });
+ assert_equals(received, 'NOT_RECEIVED');
+}, 'Verify frames cannot be transferred to serviceworker');
+
+promise_test(async () => {
+ const blob = new Blob([document.querySelector('#sharedWorkerCode').textContent], {
+ type: 'text/javascript',
+ });
+ const worker = new SharedWorker(window.URL.createObjectURL(blob));
+ worker.port.postMessage('create-frame');
+ const received = await new Promise(resolve => worker.port.onmessage = e => {
+ resolve(e.data);
+ });
+ assert_true(received instanceof ReferenceError);
+}, 'Verify frames is unavailable in sharedworker');
+
+promise_test(async () => {
+ navigator.serviceWorker.register('videoFrame-serialization.crossAgentCluster.serviceworker.js');
+ let registration = await navigator.serviceWorker.ready;
+ registration.active.postMessage('create-frame');
+ const received = await new Promise(resolve => navigator.serviceWorker.onmessage = (e) => {
+ resolve(e.data);
+ });
+ assert_true(received instanceof ReferenceError);
+}, 'Verify frames is unavailable in serviceworker');
+
+function appendIframe(src) {
+ const frame = document.createElement('iframe');
+ document.body.appendChild(frame);
+ frame.src = src;
+ return new Promise(resolve => frame.onload = () => resolve(frame));
+};
+
+function createVideoFrame(ts) {
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16,
+ ]);
+ return new VideoFrame(data, {
+ timestamp: ts,
+ codedWidth: 2,
+ codedHeight: 2,
+ format: 'RGBA',
+ });
+}
+
+function canSerializeVideoFrame(target, vf) {
+ return canPostVideoFrame(target, vf, false);
+};
+
+function canTransferVideoFrame(target, vf) {
+ return canPostVideoFrame(target, vf, true);
+};
+
+function canPostVideoFrame(target, vf, transfer) {
+ if (transfer) {
+ target.postMessage(vf, '*', [vf]);
+ assert_true(isFrameClosed(vf));
+ } else {
+ target.postMessage(vf, '*');
+ }
+ // vf.timestamp doesn't change after vf is closed, so it's fine to use it.
+ target.postMessage({'id': vf.timestamp}, '*');
+ return new Promise(resolve => window.onmessage = e => {
+ resolve(e.data == 'RECEIVED');
+ });
+};
+</script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.serviceworker.js b/testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.serviceworker.js
new file mode 100644
index 0000000000..9b963610be
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-serialization.crossAgentCluster.serviceworker.js
@@ -0,0 +1,30 @@
+const data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16,
+]);
+let received = new Map();
+self.onmessage = (e) => {
+ if (e.data == 'create-frame') {
+ let frameOrError = null;
+ try {
+ frameOrError = new VideoFrame(data, {
+ timestamp: 0,
+ codedWidth: 2,
+ codedHeight: 2,
+ format: 'RGBA',
+ });
+ } catch (error) {
+ frameOrError = error
+ }
+ e.source.postMessage(frameOrError);
+ return;
+ }
+ if (e.data.hasOwnProperty('id')) {
+ e.source.postMessage(
+ received.get(e.data.id) ? 'RECEIVED' : 'NOT_RECEIVED');
+ return;
+ }
+ if (e.data.toString() == '[object VideoFrame]') {
+ received.set(e.data.timestamp, e.data);
+ }
+};
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-texImage.any.js b/testing/web-platform/tests/webcodecs/videoFrame-texImage.any.js
new file mode 100644
index 0000000000..2eab6c8cde
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-texImage.any.js
@@ -0,0 +1,141 @@
+// META: global=window,dedicatedworker
+// META: script=/webcodecs/utils.js
+// META: script=/webcodecs/webgl-test-utils.js
+
+function testGLCanvas(gl, width, height, expectedPixel, assertCompares) {
+ var colorData =
+ new Uint8Array(gl.drawingBufferWidth * gl.drawingBufferHeight * 4);
+ gl.readPixels(
+ 0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RGBA,
+ gl.UNSIGNED_BYTE, colorData);
+ assertCompares(gl.getError(), gl.NO_ERROR);
+
+ const kMaxPixelToCheck = 128 * 96;
+ let step = width * height / kMaxPixelToCheck;
+ step = Math.round(step);
+ step = (step < 1) ? 1 : step;
+ for (let i = 0; i < 4 * width * height; i += (4 * step)) {
+ assertCompares(colorData[i], expectedPixel[0]);
+ assertCompares(colorData[i + 1], expectedPixel[1]);
+ assertCompares(colorData[i + 2], expectedPixel[2]);
+ assertCompares(colorData[i + 3], expectedPixel[3]);
+ }
+}
+
+function testTexImage2DFromVideoFrame(
+ width, height, useTexSubImage2D, expectedPixel) {
+ let vfInit =
+ {format: 'RGBA', timestamp: 0, codedWidth: width, codedHeight: height};
+ let argbData = new Uint32Array(vfInit.codedWidth * vfInit.codedHeight);
+ argbData.fill(0xFF966432); // 'rgb(50, 100, 150)';
+ let frame = new VideoFrame(argbData, vfInit);
+
+ let canvas;
+ if (self.HTMLCanvasElement) {
+ canvas = document.createElement("canvas");
+ canvas.width = width;
+ canvas.height = height;
+ } else
+ canvas = new OffscreenCanvas(width, height);
+ let gl = canvas.getContext('webgl');
+
+ let program = WebGLTestUtils.setupTexturedQuad(gl);
+ gl.clearColor(0, 0, 0, 1);
+ gl.clearDepth(1);
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+ gl.colorMask(1, 1, 1, 0); // Disable any writes to the alpha channel.
+ let textureLoc = gl.getUniformLocation(program, 'tex');
+
+ let texture = gl.createTexture();
+
+ // Bind the texture to texture unit 0.
+ gl.bindTexture(gl.TEXTURE_2D, texture);
+
+ // Set up texture parameters.
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+
+ // Set up pixel store parameters.
+ gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
+ gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
+
+ // Upload the videoElement into the texture
+ if (useTexSubImage2D) {
+ // Initialize the texture to black first
+ gl.texImage2D(
+ gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE,
+ null);
+ gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, gl.RGBA, gl.UNSIGNED_BYTE, frame);
+ } else {
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, frame);
+ }
+
+ frame.close();
+
+ assert_equals(gl.getError(), gl.NO_ERROR);
+
+ // Point the uniform sampler to texture unit 0
+ gl.uniform1i(textureLoc, 0);
+
+ // Draw the triangles
+ WebGLTestUtils.drawQuad(gl, [0, 0, 0, 255]);
+
+ // Wait for drawing to complete.
+ gl.finish();
+
+ testGLCanvas(gl, width, height, expectedPixel, assert_equals);
+}
+
+function testTexImageWithClosedVideoFrame(useTexSubImage2D) {
+ let width = 128;
+ let height = 128;
+ let vfInit =
+ {format: 'RGBA', timestamp: 0, codedWidth: width, codedHeight: height};
+ let argbData = new Uint32Array(vfInit.codedWidth * vfInit.codedHeight);
+ argbData.fill(0xFF966432); // 'rgb(50, 100, 150)';
+ let frame = new VideoFrame(argbData, vfInit);
+
+ let canvas;
+ if (self.HTMLCanvasElement) {
+ canvas = document.createElement("canvas");
+ canvas.width = width;
+ canvas.height = height;
+ } else
+ canvas = new OffscreenCanvas(width, height);
+ let gl = canvas.getContext('webgl');
+
+ frame.close();
+ if (useTexSubImage2D) {
+ gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, gl.RGBA, gl.UNSIGNED_BYTE, frame);
+ } else {
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, frame);
+ }
+
+ assert_equals(gl.getError(), gl.INVALID_OPERATION);
+}
+
+test(_ => {
+ testTexImage2DFromVideoFrame(48, 36, false, kSRGBPixel);
+}, 'texImage2D with 48x36 srgb VideoFrame.');
+
+test(_ => {
+ testTexImage2DFromVideoFrame(48, 36, true, kSRGBPixel);
+}, 'texSubImage2D with 48x36 srgb VideoFrame.');
+
+test(_ => {
+ testTexImage2DFromVideoFrame(480, 360, false, kSRGBPixel);
+}, 'texImage2D with 480x360 srgb VideoFrame.');
+
+test(_ => {
+ testTexImage2DFromVideoFrame(480, 360, true, kSRGBPixel);
+}, 'texSubImage2D with 480x360 srgb VideoFrame.');
+
+test(_ => {
+ testTexImageWithClosedVideoFrame(false);
+}, 'texImage2D with a closed VideoFrame.');
+
+test(_ => {
+ testTexImageWithClosedVideoFrame(true);
+}, 'texSubImage2D with a closed VideoFrame.');
diff --git a/testing/web-platform/tests/webcodecs/videoFrame-utils.js b/testing/web-platform/tests/webcodecs/videoFrame-utils.js
new file mode 100644
index 0000000000..a4c761306c
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/videoFrame-utils.js
@@ -0,0 +1,118 @@
+const I420_DATA = new Uint8Array([
+ 1, 2, 3, 4, // y
+ 5, 6, 7, 8,
+ 9, 10, // u
+ 11, 12, // v
+ ]);
+
+function makeI420_4x2() {
+ const init = {
+ format: 'I420',
+ timestamp: 0,
+ codedWidth: 4,
+ codedHeight: 2,
+ };
+ return new VideoFrame(I420_DATA, init);
+}
+
+function testBufferConstructedI420Frame(bufferType) {
+ let fmt = 'I420';
+ let vfInit = {format: fmt, timestamp: 1234, codedWidth: 4, codedHeight: 2};
+
+ let buffer;
+ if (bufferType == 'SharedArrayBuffer' ||
+ bufferType == 'Uint8Array(SharedArrayBuffer)') {
+ buffer = new SharedArrayBuffer(I420_DATA.length);
+ } else {
+ assert_true(bufferType == 'ArrayBuffer' ||
+ bufferType == 'Uint8Array(ArrayBuffer)');
+ buffer = new ArrayBuffer(I420_DATA.length);
+ }
+ let bufferView = new Uint8Array(buffer);
+ bufferView.set(I420_DATA);
+ let data = bufferType.startsWith('Uint8Array') ? bufferView : buffer;
+
+ let frame = new VideoFrame(data, vfInit);
+ assert_equals(frame.format, fmt, 'plane format');
+ assert_equals(frame.colorSpace.primaries, 'bt709', 'color primaries');
+ assert_equals(frame.colorSpace.transfer, 'bt709', 'color transfer');
+ assert_equals(frame.colorSpace.matrix, 'bt709', 'color matrix');
+ assert_false(frame.colorSpace.fullRange, 'color range');
+ frame.close();
+
+ let y = {offset: 0, stride: 4};
+ let u = {offset: 8, stride: 2};
+ let v = {offset: 10, stride: 2};
+
+ assert_throws_js(TypeError, () => {
+ let y = {offset: 0, stride: 1};
+ let frame = new VideoFrame(data, {...vfInit, layout: [y, u, v]});
+ }, 'y stride too small');
+ assert_throws_js(TypeError, () => {
+ let u = {offset: 8, stride: 1};
+ let frame = new VideoFrame(data, {...vfInit, layout: [y, u, v]});
+ }, 'u stride too small');
+ assert_throws_js(TypeError, () => {
+ let v = {offset: 10, stride: 1};
+ let frame = new VideoFrame(data, {...vfInit, layout: [y, u, v]});
+ }, 'v stride too small');
+ assert_throws_js(TypeError, () => {
+ let frame = new VideoFrame(data.slice(0, 8), vfInit);
+ }, 'data too small');
+}
+
+function assert_buffer_equals(actual, expected) {
+ assert_true(expected instanceof Uint8Array, 'actual instanceof Uint8Array');
+
+ if (actual instanceof ArrayBuffer ||
+ (typeof(SharedArrayBuffer) != 'undefined' &&
+ actual instanceof SharedArrayBuffer)) {
+ actual = new Uint8Array(actual);
+ } else {
+ assert_true(actual instanceof Uint8Array,
+ 'expected instanceof Uint8Array, ArrayBuffer, or SharedArrayBuffer');
+ }
+
+ assert_equals(actual.length, expected.length, 'buffer length');
+ for (let i = 0; i < actual.length; i++) {
+ if (actual[i] != expected[i]) {
+ assert_equals(actual[i], expected[i], 'buffer contents at index ' + i);
+ }
+ }
+}
+
+function assert_layout_equals(actual, expected) {
+ assert_equals(actual.length, expected.length, 'layout planes');
+ for (let i = 0; i < actual.length; i++) {
+ assert_object_equals(actual[i], expected[i], 'plane ' + i + ' layout');
+ }
+}
+
+async function testI420_4x2_copyTo(destination) {
+ const frame = makeI420_4x2();
+ const expectedLayout = [
+ {offset: 0, stride: 4},
+ {offset: 8, stride: 2},
+ {offset: 10, stride: 2},
+ ];
+ const expectedData = new Uint8Array([
+ 1, 2, 3, 4, // y
+ 5, 6, 7, 8,
+ 9, 10, // u
+ 11, 12 // v
+ ]);
+
+ assert_equals(frame.allocationSize(), expectedData.length, 'allocationSize()');
+ const layout = await frame.copyTo(destination);
+ assert_layout_equals(layout, expectedLayout);
+ assert_buffer_equals(destination, expectedData);
+}
+
+function verifyTimestampRequiredToConstructFrame(imageSource) {
+ assert_throws_js(
+ TypeError,
+ () => new VideoFrame(imageSource),
+ 'timestamp required to construct VideoFrame from this source');
+ let validFrame = new VideoFrame(imageSource, {timestamp: 0});
+ validFrame.close();
+}
diff --git a/testing/web-platform/tests/webcodecs/vp8.webm b/testing/web-platform/tests/webcodecs/vp8.webm
new file mode 100644
index 0000000000..14d970e301
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/vp8.webm
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/vp9.mp4 b/testing/web-platform/tests/webcodecs/vp9.mp4
new file mode 100644
index 0000000000..7553e5cae9
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/vp9.mp4
Binary files differ
diff --git a/testing/web-platform/tests/webcodecs/webgl-test-utils.js b/testing/web-platform/tests/webcodecs/webgl-test-utils.js
new file mode 100644
index 0000000000..f623a6c986
--- /dev/null
+++ b/testing/web-platform/tests/webcodecs/webgl-test-utils.js
@@ -0,0 +1,321 @@
+// Copyright 2011 The Chromium Authors
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+WebGLTestUtils = (function() {
+ /**
+ * Converts a WebGL enum to a string
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @param {number} value The enum value.
+ * @return {string} The enum as a string.
+ */
+ var glEnumToString = function(gl, value) {
+ for (var p in gl) {
+ if (gl[p] == value) {
+ return p;
+ }
+ }
+ return '0x' + value.toString(16);
+ };
+
+ var lastError = '';
+
+ /**
+ * Returns the last compiler/linker error.
+ * @return {string} The last compiler/linker error.
+ */
+ var getLastError = function() {
+ return lastError;
+ };
+
+ // clang-format off
+
+ /**
+ * A vertex shader for a single texture.
+ * @type {string}
+ */
+ var simpleTextureVertexShader = [
+ 'attribute vec4 vPosition;', //
+ 'attribute vec2 texCoord0;',
+ 'varying vec2 texCoord;',
+ 'void main() {',
+ ' gl_Position = vPosition;',
+ ' texCoord = texCoord0;',
+ '}'
+ ].join('\n');
+
+ /**
+ * A fragment shader for a single texture.
+ * @type {string}
+ */
+ var simpleTextureFragmentShader = [
+ 'precision mediump float;',
+ 'uniform sampler2D tex;',
+ 'varying vec2 texCoord;',
+ 'void main() {',
+ ' gl_FragData[0] = texture2D(tex, texCoord);',
+ '}'
+ ].join('\n');
+
+ // clang-format on
+
+ /**
+ * Creates a simple texture vertex shader.
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @return {!WebGLShader}
+ */
+ var setupSimpleTextureVertexShader = function(gl) {
+ return loadShader(gl, simpleTextureVertexShader, gl.VERTEX_SHADER);
+ };
+
+ /**
+ * Creates a simple texture fragment shader.
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @return {!WebGLShader}
+ */
+ var setupSimpleTextureFragmentShader = function(gl) {
+ return loadShader(gl, simpleTextureFragmentShader, gl.FRAGMENT_SHADER);
+ };
+
+ /**
+ * Creates a program, attaches shaders, binds attrib locations, links the
+ * program and calls useProgram.
+ * @param {!Array.<!WebGLShader>} shaders The shaders to attach .
+ * @param {!Array.<string>} opt_attribs The attribs names.
+ * @param {!Array.<number>} opt_locations The locations for the attribs.
+ */
+ var setupProgram = function(gl, shaders, opt_attribs, opt_locations) {
+ var realShaders = [];
+ var program = gl.createProgram();
+ for (var ii = 0; ii < shaders.length; ++ii) {
+ var shader = shaders[ii];
+ if (typeof shader == 'string') {
+ var element = document.getElementById(shader);
+ if (element) {
+ shader = loadShaderFromScript(gl, shader);
+ } else {
+ shader = loadShader(
+ gl, shader, ii ? gl.FRAGMENT_SHADER : gl.VERTEX_SHADER);
+ }
+ }
+ gl.attachShader(program, shader);
+ }
+ if (opt_attribs) {
+ for (var ii = 0; ii < opt_attribs.length; ++ii) {
+ gl.bindAttribLocation(
+ program, opt_locations ? opt_locations[ii] : ii, opt_attribs[ii]);
+ }
+ }
+ gl.linkProgram(program);
+
+ // Check the link status
+ var linked = gl.getProgramParameter(program, gl.LINK_STATUS);
+ if (!linked) {
+ gl.deleteProgram(program);
+ return null;
+ }
+
+ gl.useProgram(program);
+ return program;
+ };
+
+ /**
+ * Creates a simple texture program.
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @param {number} opt_positionLocation The attrib location for position.
+ * @param {number} opt_texcoordLocation The attrib location for texture
+ * coords.
+ * @return {WebGLProgram}
+ */
+ var setupSimpleTextureProgram = function(
+ gl, opt_positionLocation, opt_texcoordLocation) {
+ opt_positionLocation = opt_positionLocation || 0;
+ opt_texcoordLocation = opt_texcoordLocation || 1;
+ var vs = setupSimpleTextureVertexShader(gl);
+ var fs = setupSimpleTextureFragmentShader(gl);
+ if (!vs || !fs) {
+ return null;
+ }
+ var program = setupProgram(
+ gl, [vs, fs], ['vPosition', 'texCoord0'],
+ [opt_positionLocation, opt_texcoordLocation]);
+ if (!program) {
+ gl.deleteShader(fs);
+ gl.deleteShader(vs);
+ }
+ gl.useProgram(program);
+ return program;
+ };
+
+ /**
+ * Creates buffers for a textured unit quad and attaches them to vertex
+ * attribs.
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @param {number} opt_positionLocation The attrib location for position.
+ * @param {number} opt_texcoordLocation The attrib location for texture
+ * coords.
+ * @return {!Array.<WebGLBuffer>} The buffer objects that were
+ * created.
+ */
+ var setupUnitQuad = function(gl, opt_positionLocation, opt_texcoordLocation) {
+ opt_positionLocation = opt_positionLocation || 0;
+ opt_texcoordLocation = opt_texcoordLocation || 1;
+ var objects = [];
+
+ var vertexObject = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, vertexObject);
+ gl.bufferData(
+ gl.ARRAY_BUFFER, new Float32Array([
+ 1.0, 1.0, 0.0, -1.0, 1.0, 0.0, -1.0, -1.0, 0.0, 1.0, 1.0, 0.0, -1.0,
+ -1.0, 0.0, 1.0, -1.0, 0.0
+ ]),
+ gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(opt_positionLocation);
+ gl.vertexAttribPointer(opt_positionLocation, 3, gl.FLOAT, false, 0, 0);
+ objects.push(vertexObject);
+
+ var vertexObject = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, vertexObject);
+ gl.bufferData(
+ gl.ARRAY_BUFFER,
+ new Float32Array(
+ [1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0]),
+ gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(opt_texcoordLocation);
+ gl.vertexAttribPointer(opt_texcoordLocation, 2, gl.FLOAT, false, 0, 0);
+ objects.push(vertexObject);
+ return objects;
+ };
+
+ /**
+ * Creates a program and buffers for rendering a textured quad.
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @param {number} opt_positionLocation The attrib location for position.
+ * @param {number} opt_texcoordLocation The attrib location for texture
+ * coords.
+ * @return {!WebGLProgram}
+ */
+ var setupTexturedQuad = function(
+ gl, opt_positionLocation, opt_texcoordLocation) {
+ var program = setupSimpleTextureProgram(
+ gl, opt_positionLocation, opt_texcoordLocation);
+ setupUnitQuad(gl, opt_positionLocation, opt_texcoordLocation);
+ return program;
+ };
+
+ /**
+ * Draws a previously setup quad.
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @param {!Array.<number>} opt_color The color to fill clear with before
+ * drawing. A 4 element array where each element is in the range 0 to
+ * 255. Default [255, 255, 255, 255]
+ */
+ var drawQuad = function(gl, opt_color) {
+ opt_color = opt_color || [255, 255, 255, 255];
+ gl.clearColor(
+ opt_color[0] / 255, opt_color[1] / 255, opt_color[2] / 255,
+ opt_color[3] / 255);
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+ gl.drawArrays(gl.TRIANGLES, 0, 6);
+ };
+
+ /**
+ * Links a WebGL program, throws if there are errors.
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @param {!WebGLProgram} program The WebGLProgram to link.
+ * @param {function(string): void) opt_errorCallback callback for errors.
+ */
+ var linkProgram = function(gl, program, opt_errorCallback) {
+ // Link the program
+ gl.linkProgram(program);
+
+ // Check the link status
+ var linked = gl.getProgramParameter(program, gl.LINK_STATUS);
+ if (!linked) {
+ // something went wrong with the link
+ gl.deleteProgram(program);
+ return false;
+ }
+
+ return true;
+ };
+
+ /**
+ * Loads a shader.
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @param {string} shaderSource The shader source.
+ * @param {number} shaderType The type of shader.
+ * @param {function(string): void) opt_errorCallback callback for errors.
+ * @return {!WebGLShader} The created shader.
+ */
+ var loadShader =
+ function(gl, shaderSource, shaderType, opt_errorCallback) {
+ var errFn = opt_errorCallback || (_ => {});
+ // Create the shader object
+ var shader = gl.createShader(shaderType);
+ if (shader == null) {
+ errFn('*** Error: unable to create shader \'' + shaderSource + '\'');
+ return null;
+ }
+
+ // Load the shader source
+ gl.shaderSource(shader, shaderSource);
+ var err = gl.getError();
+ if (err != gl.NO_ERROR) {
+ errFn(
+ '*** Error loading shader \'' + shader +
+ '\':' + glEnumToString(gl, err));
+ return null;
+ }
+
+ // Compile the shader
+ gl.compileShader(shader);
+
+ // Check the compile status
+ var compiled = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
+ if (!compiled) {
+ // Something went wrong during compilation; get the error
+ lastError = gl.getShaderInfoLog(shader);
+ errFn('*** Error compiling shader \'' + shader + '\':' + lastError);
+ gl.deleteShader(shader);
+ return null;
+ }
+
+ return shader;
+ }
+
+ /**
+ * Loads shaders from source, creates a program, attaches the shaders and
+ * links.
+ * @param {!WebGLContext} gl The WebGLContext to use.
+ * @param {string} vertexShader The vertex shader.
+ * @param {string} fragmentShader The fragment shader.
+ * @param {function(string): void) opt_errorCallback callback for errors.
+ * @return {!WebGLProgram} The created program.
+ */
+ var loadProgram = function(
+ gl, vertexShader, fragmentShader, opt_errorCallback) {
+ var program = gl.createProgram();
+ gl.attachShader(
+ program,
+ loadShader(gl, vertexShader, gl.VERTEX_SHADER, opt_errorCallback));
+ gl.attachShader(
+ program,
+ loadShader(gl, fragmentShader, gl.FRAGMENT_SHADER, opt_errorCallback));
+ return linkProgram(gl, program, opt_errorCallback) ? program : null;
+ };
+
+ return {
+ drawQuad: drawQuad,
+ getLastError: getLastError,
+ glEnumToString: glEnumToString,
+ loadProgram: loadProgram,
+ loadShader: loadShader,
+ setupProgram: setupProgram,
+ setupSimpleTextureFragmentShader: setupSimpleTextureFragmentShader,
+ setupSimpleTextureProgram: setupSimpleTextureProgram,
+ setupSimpleTextureVertexShader: setupSimpleTextureVertexShader,
+ setupTexturedQuad: setupTexturedQuad,
+ setupUnitQuad: setupUnitQuad,
+ };
+}());