From 43a97878ce14b72f0981164f87f2e35e14151312 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sun, 7 Apr 2024 11:22:09 +0200 Subject: Adding upstream version 110.0.1. Signed-off-by: Daniel Baumann --- .../compression-stream.tentative.any.js | 91 ++++++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 testing/web-platform/tests/compression/compression-stream.tentative.any.js (limited to 'testing/web-platform/tests/compression/compression-stream.tentative.any.js') diff --git a/testing/web-platform/tests/compression/compression-stream.tentative.any.js b/testing/web-platform/tests/compression/compression-stream.tentative.any.js new file mode 100644 index 0000000000..8c02a6d699 --- /dev/null +++ b/testing/web-platform/tests/compression/compression-stream.tentative.any.js @@ -0,0 +1,91 @@ +// META: global=window,worker +// META: script=third_party/pako/pako_inflate.min.js +// META: timeout=long + +'use strict'; + +const SMALL_FILE = "/media/foo.vtt"; +const LARGE_FILE = "/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm"; + +async function compressArrayBuffer(input, format) { + const cs = new CompressionStream(format); + const writer = cs.writable.getWriter(); + writer.write(input); + const closePromise = writer.close(); + const out = []; + const reader = cs.readable.getReader(); + let totalSize = 0; + while (true) { + const { value, done } = await reader.read(); + if (done) + break; + out.push(value); + totalSize += value.byteLength; + } + await closePromise; + const concatenated = new Uint8Array(totalSize); + let offset = 0; + for (const array of out) { + concatenated.set(array, offset); + offset += array.byteLength; + } + return concatenated; +} + +test(() => { + assert_throws_js(TypeError, () => { + const transformer = new CompressionStream("nonvalid"); + }, "non supported format should throw"); +}, "CompressionStream constructor should throw on invalid format"); + +promise_test(async () => { + const buffer = new ArrayBuffer(0); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "deflate"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "deflated empty data should be reinflated back to its origin"); + +promise_test(async () => { + const response = await fetch(SMALL_FILE) + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "deflate"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "deflated small amount data should be reinflated back to its origin"); + +promise_test(async () => { + const response = await fetch(LARGE_FILE) + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "deflate"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "deflated large amount data should be reinflated back to its origin"); + +promise_test(async () => { + const buffer = new ArrayBuffer(0); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "gzip"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "gzipped empty data should be reinflated back to its origin"); + +promise_test(async () => { + const response = await fetch(SMALL_FILE) + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "gzip"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "gzipped small amount data should be reinflated back to its origin"); + +promise_test(async () => { + const response = await fetch(LARGE_FILE) + const buffer = await response.arrayBuffer(); + const bufferView = new Uint8Array(buffer); + const compressedData = await compressArrayBuffer(bufferView, "gzip"); + // decompress with pako, and check that we got the same result as our original string + assert_array_equals(bufferView, pako.inflate(compressedData)); +}, "gzipped large amount data should be reinflated back to its origin"); -- cgit v1.2.3