summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/compression/compression-stream.tentative.any.js
diff options
context:
space:
mode:
Diffstat (limited to 'testing/web-platform/tests/compression/compression-stream.tentative.any.js')
-rw-r--r--testing/web-platform/tests/compression/compression-stream.tentative.any.js91
1 files changed, 91 insertions, 0 deletions
diff --git a/testing/web-platform/tests/compression/compression-stream.tentative.any.js b/testing/web-platform/tests/compression/compression-stream.tentative.any.js
new file mode 100644
index 0000000000..8c02a6d699
--- /dev/null
+++ b/testing/web-platform/tests/compression/compression-stream.tentative.any.js
@@ -0,0 +1,91 @@
+// META: global=window,worker
+// META: script=third_party/pako/pako_inflate.min.js
+// META: timeout=long
+
+'use strict';
+
+const SMALL_FILE = "/media/foo.vtt";
+const LARGE_FILE = "/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm";
+
+async function compressArrayBuffer(input, format) {
+ const cs = new CompressionStream(format);
+ const writer = cs.writable.getWriter();
+ writer.write(input);
+ const closePromise = writer.close();
+ const out = [];
+ const reader = cs.readable.getReader();
+ let totalSize = 0;
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done)
+ break;
+ out.push(value);
+ totalSize += value.byteLength;
+ }
+ await closePromise;
+ const concatenated = new Uint8Array(totalSize);
+ let offset = 0;
+ for (const array of out) {
+ concatenated.set(array, offset);
+ offset += array.byteLength;
+ }
+ return concatenated;
+}
+
+test(() => {
+ assert_throws_js(TypeError, () => {
+ const transformer = new CompressionStream("nonvalid");
+ }, "non supported format should throw");
+}, "CompressionStream constructor should throw on invalid format");
+
+promise_test(async () => {
+ const buffer = new ArrayBuffer(0);
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "deflate");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "deflated empty data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(SMALL_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "deflate");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "deflated small amount data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(LARGE_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "deflate");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "deflated large amount data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const buffer = new ArrayBuffer(0);
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "gzip");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "gzipped empty data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(SMALL_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "gzip");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "gzipped small amount data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(LARGE_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "gzip");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "gzipped large amount data should be reinflated back to its origin");