summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/compression
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /testing/web-platform/tests/compression
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/web-platform/tests/compression')
-rw-r--r--testing/web-platform/tests/compression/META.yml3
-rw-r--r--testing/web-platform/tests/compression/compression-bad-chunks.tentative.any.js74
-rw-r--r--testing/web-platform/tests/compression/compression-constructor-error.tentative.any.js15
-rw-r--r--testing/web-platform/tests/compression/compression-including-empty-chunk.tentative.any.js63
-rw-r--r--testing/web-platform/tests/compression/compression-large-flush-output.any.js41
-rw-r--r--testing/web-platform/tests/compression/compression-multiple-chunks.tentative.any.js67
-rw-r--r--testing/web-platform/tests/compression/compression-output-length.tentative.any.js64
-rw-r--r--testing/web-platform/tests/compression/compression-stream.tentative.any.js91
-rw-r--r--testing/web-platform/tests/compression/compression-with-detach.tentative.window.js55
-rw-r--r--testing/web-platform/tests/compression/decompression-bad-chunks.tentative.any.js85
-rw-r--r--testing/web-platform/tests/compression/decompression-buffersource.tentative.any.js192
-rw-r--r--testing/web-platform/tests/compression/decompression-constructor-error.tentative.any.js15
-rw-r--r--testing/web-platform/tests/compression/decompression-correct-input.tentative.any.js39
-rw-r--r--testing/web-platform/tests/compression/decompression-corrupt-input.tentative.any.js318
-rw-r--r--testing/web-platform/tests/compression/decompression-empty-input.tentative.any.js43
-rw-r--r--testing/web-platform/tests/compression/decompression-split-chunk.tentative.any.js53
-rw-r--r--testing/web-platform/tests/compression/decompression-uint8array-output.tentative.any.js30
-rw-r--r--testing/web-platform/tests/compression/decompression-with-detach.tentative.window.js41
-rw-r--r--testing/web-platform/tests/compression/idlharness-shadowrealm.window.js2
-rw-r--r--testing/web-platform/tests/compression/idlharness.https.any.js17
-rw-r--r--testing/web-platform/tests/compression/resources/concatenate-stream.js25
-rw-r--r--testing/web-platform/tests/compression/third_party/pako/LICENSE21
-rw-r--r--testing/web-platform/tests/compression/third_party/pako/README2
-rw-r--r--testing/web-platform/tests/compression/third_party/pako/pako_inflate.min.js1
24 files changed, 1357 insertions, 0 deletions
diff --git a/testing/web-platform/tests/compression/META.yml b/testing/web-platform/tests/compression/META.yml
new file mode 100644
index 0000000000..0afbe29a53
--- /dev/null
+++ b/testing/web-platform/tests/compression/META.yml
@@ -0,0 +1,3 @@
+spec: https://wicg.github.io/compression/
+suggested_reviewers:
+ - ricea
diff --git a/testing/web-platform/tests/compression/compression-bad-chunks.tentative.any.js b/testing/web-platform/tests/compression/compression-bad-chunks.tentative.any.js
new file mode 100644
index 0000000000..2d0b568473
--- /dev/null
+++ b/testing/web-platform/tests/compression/compression-bad-chunks.tentative.any.js
@@ -0,0 +1,74 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+const badChunks = [
+ {
+ name: 'undefined',
+ value: undefined
+ },
+ {
+ name: 'null',
+ value: null
+ },
+ {
+ name: 'numeric',
+ value: 3.14
+ },
+ {
+ name: 'object, not BufferSource',
+ value: {}
+ },
+ {
+ name: 'array',
+ value: [65]
+ },
+ {
+ name: 'SharedArrayBuffer',
+ // Use a getter to postpone construction so that all tests don't fail where
+ // SharedArrayBuffer is not yet implemented.
+ get value() {
+ // See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
+ return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer;
+ }
+ },
+ {
+ name: 'shared Uint8Array',
+ get value() {
+ // See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
+ return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer)
+ }
+ },
+];
+
+for (const chunk of badChunks) {
+ promise_test(async t => {
+ const cs = new CompressionStream('gzip');
+ const reader = cs.readable.getReader();
+ const writer = cs.writable.getWriter();
+ const writePromise = writer.write(chunk.value);
+ const readPromise = reader.read();
+ await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
+ await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
+ }, `chunk of type ${chunk.name} should error the stream for gzip`);
+
+ promise_test(async t => {
+ const cs = new CompressionStream('deflate');
+ const reader = cs.readable.getReader();
+ const writer = cs.writable.getWriter();
+ const writePromise = writer.write(chunk.value);
+ const readPromise = reader.read();
+ await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
+ await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
+ }, `chunk of type ${chunk.name} should error the stream for deflate`);
+
+ promise_test(async t => {
+ const cs = new CompressionStream('deflate-raw');
+ const reader = cs.readable.getReader();
+ const writer = cs.writable.getWriter();
+ const writePromise = writer.write(chunk.value);
+ const readPromise = reader.read();
+ await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
+ await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
+ }, `chunk of type ${chunk.name} should error the stream for deflate-raw`);
+}
diff --git a/testing/web-platform/tests/compression/compression-constructor-error.tentative.any.js b/testing/web-platform/tests/compression/compression-constructor-error.tentative.any.js
new file mode 100644
index 0000000000..b39ab93bd0
--- /dev/null
+++ b/testing/web-platform/tests/compression/compression-constructor-error.tentative.any.js
@@ -0,0 +1,15 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+test(t => {
+ assert_throws_js(TypeError, () => new CompressionStream('a'), 'constructor should throw');
+}, '"a" should cause the constructor to throw');
+
+test(t => {
+ assert_throws_js(TypeError, () => new CompressionStream(), 'constructor should throw');
+}, 'no input should cause the constructor to throw');
+
+test(t => {
+ assert_throws_js(Error, () => new CompressionStream({ toString() { throw Error(); } }), 'constructor should throw');
+}, 'non-string input should cause the constructor to throw');
diff --git a/testing/web-platform/tests/compression/compression-including-empty-chunk.tentative.any.js b/testing/web-platform/tests/compression/compression-including-empty-chunk.tentative.any.js
new file mode 100644
index 0000000000..a7fd1ceb24
--- /dev/null
+++ b/testing/web-platform/tests/compression/compression-including-empty-chunk.tentative.any.js
@@ -0,0 +1,63 @@
+// META: global=window,worker,shadowrealm
+// META: script=third_party/pako/pako_inflate.min.js
+// META: timeout=long
+
+'use strict';
+
+// This test asserts that compressing '' doesn't affect the compressed data.
+// Example: compressing ['Hello', '', 'Hello'] results in 'HelloHello'
+
+async function compressChunkList(chunkList, format) {
+ const cs = new CompressionStream(format);
+ const writer = cs.writable.getWriter();
+ for (const chunk of chunkList) {
+ const chunkByte = new TextEncoder().encode(chunk);
+ writer.write(chunkByte);
+ }
+ const closePromise = writer.close();
+ const out = [];
+ const reader = cs.readable.getReader();
+ let totalSize = 0;
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done)
+ break;
+ out.push(value);
+ totalSize += value.byteLength;
+ }
+ await closePromise;
+ const concatenated = new Uint8Array(totalSize);
+ let offset = 0;
+ for (const array of out) {
+ concatenated.set(array, offset);
+ offset += array.byteLength;
+ }
+ return concatenated;
+}
+
+const chunkLists = [
+ ['', 'Hello', 'Hello'],
+ ['Hello', '', 'Hello'],
+ ['Hello', 'Hello', '']
+];
+const expectedValue = new TextEncoder().encode('HelloHello');
+
+for (const chunkList of chunkLists) {
+ promise_test(async t => {
+ const compressedData = await compressChunkList(chunkList, 'deflate');
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
+ }, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`);
+
+ promise_test(async t => {
+ const compressedData = await compressChunkList(chunkList, 'gzip');
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
+ }, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`);
+
+ promise_test(async t => {
+ const compressedData = await compressChunkList(chunkList, 'deflate-raw');
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
+ }, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`);
+}
diff --git a/testing/web-platform/tests/compression/compression-large-flush-output.any.js b/testing/web-platform/tests/compression/compression-large-flush-output.any.js
new file mode 100644
index 0000000000..6afcb4d528
--- /dev/null
+++ b/testing/web-platform/tests/compression/compression-large-flush-output.any.js
@@ -0,0 +1,41 @@
+// META: global=window,worker,shadowrealm
+// META: script=third_party/pako/pako_inflate.min.js
+// META: script=resources/concatenate-stream.js
+// META: timeout=long
+
+'use strict';
+
+// This test verifies that a large flush output will not truncate the
+// final results.
+
+async function compressData(chunk, format) {
+ const cs = new CompressionStream(format);
+ const writer = cs.writable.getWriter();
+ writer.write(chunk);
+ writer.close();
+ return await concatenateStream(cs.readable);
+}
+
+// JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data.
+const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i)));
+const data = fullData.subarray(0, 35_579);
+const expectedValue = data;
+
+promise_test(async t => {
+ const compressedData = await compressData(data, 'deflate');
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
+}, `deflate compression with large flush output`);
+
+promise_test(async t => {
+ const compressedData = await compressData(data, 'gzip');
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
+}, `gzip compression with large flush output`);
+
+promise_test(async t => {
+ const compressedData = await compressData(data, 'deflate-raw');
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
+}, `deflate-raw compression with large flush output`);
+
diff --git a/testing/web-platform/tests/compression/compression-multiple-chunks.tentative.any.js b/testing/web-platform/tests/compression/compression-multiple-chunks.tentative.any.js
new file mode 100644
index 0000000000..28a90e5ca5
--- /dev/null
+++ b/testing/web-platform/tests/compression/compression-multiple-chunks.tentative.any.js
@@ -0,0 +1,67 @@
+// META: global=window,worker,shadowrealm
+// META: script=third_party/pako/pako_inflate.min.js
+// META: timeout=long
+
+'use strict';
+
+// This test asserts that compressing multiple chunks should work.
+
+// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello')
+function makeExpectedChunk(input, numberOfChunks) {
+ const expectedChunk = input.repeat(numberOfChunks);
+ return new TextEncoder().encode(expectedChunk);
+}
+
+// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello']
+async function compressMultipleChunks(input, numberOfChunks, format) {
+ const cs = new CompressionStream(format);
+ const writer = cs.writable.getWriter();
+ const chunk = new TextEncoder().encode(input);
+ for (let i = 0; i < numberOfChunks; ++i) {
+ writer.write(chunk);
+ }
+ const closePromise = writer.close();
+ const out = [];
+ const reader = cs.readable.getReader();
+ let totalSize = 0;
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done)
+ break;
+ out.push(value);
+ totalSize += value.byteLength;
+ }
+ await closePromise;
+ const concatenated = new Uint8Array(totalSize);
+ let offset = 0;
+ for (const array of out) {
+ concatenated.set(array, offset);
+ offset += array.byteLength;
+ }
+ return concatenated;
+}
+
+const hello = 'Hello';
+
+for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
+ promise_test(async t => {
+ const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate');
+ const expectedValue = makeExpectedChunk(hello, numberOfChunks);
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
+ }, `compressing ${numberOfChunks} chunks with deflate should work`);
+
+ promise_test(async t => {
+ const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip');
+ const expectedValue = makeExpectedChunk(hello, numberOfChunks);
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
+ }, `compressing ${numberOfChunks} chunks with gzip should work`);
+
+ promise_test(async t => {
+ const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw');
+ const expectedValue = makeExpectedChunk(hello, numberOfChunks);
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
+ }, `compressing ${numberOfChunks} chunks with deflate-raw should work`);
+}
diff --git a/testing/web-platform/tests/compression/compression-output-length.tentative.any.js b/testing/web-platform/tests/compression/compression-output-length.tentative.any.js
new file mode 100644
index 0000000000..7aa1373450
--- /dev/null
+++ b/testing/web-platform/tests/compression/compression-output-length.tentative.any.js
@@ -0,0 +1,64 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+// This test asserts that compressed data length is shorter than the original
+// data length. If the input is extremely small, the compressed data may be
+// larger than the original data.
+
+const LARGE_FILE = '/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm';
+
+async function compressArrayBuffer(input, format) {
+ const cs = new CompressionStream(format);
+ const writer = cs.writable.getWriter();
+ writer.write(input);
+ const closePromise = writer.close();
+ const out = [];
+ const reader = cs.readable.getReader();
+ let totalSize = 0;
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done)
+ break;
+ out.push(value);
+ totalSize += value.byteLength;
+ }
+ await closePromise;
+ const concatenated = new Uint8Array(totalSize);
+ let offset = 0;
+ for (const array of out) {
+ concatenated.set(array, offset);
+ offset += array.byteLength;
+ }
+ return concatenated;
+}
+
+promise_test(async () => {
+ const response = await fetch(LARGE_FILE);
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const originalLength = bufferView.length;
+ const compressedData = await compressArrayBuffer(bufferView, 'deflate');
+ const compressedLength = compressedData.length;
+ assert_less_than(compressedLength, originalLength, 'output should be smaller');
+}, 'the length of deflated data should be shorter than that of the original data');
+
+promise_test(async () => {
+ const response = await fetch(LARGE_FILE);
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const originalLength = bufferView.length;
+ const compressedData = await compressArrayBuffer(bufferView, 'gzip');
+ const compressedLength = compressedData.length;
+ assert_less_than(compressedLength, originalLength, 'output should be smaller');
+}, 'the length of gzipped data should be shorter than that of the original data');
+
+promise_test(async () => {
+ const response = await fetch(LARGE_FILE);
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const originalLength = bufferView.length;
+ const compressedData = await compressArrayBuffer(bufferView, 'deflate-raw');
+ const compressedLength = compressedData.length;
+ assert_less_than(compressedLength, originalLength, 'output should be smaller');
+}, 'the length of deflated (with -raw) data should be shorter than that of the original data');
diff --git a/testing/web-platform/tests/compression/compression-stream.tentative.any.js b/testing/web-platform/tests/compression/compression-stream.tentative.any.js
new file mode 100644
index 0000000000..a7ea0cb908
--- /dev/null
+++ b/testing/web-platform/tests/compression/compression-stream.tentative.any.js
@@ -0,0 +1,91 @@
+// META: global=window,worker,shadowrealm
+// META: script=third_party/pako/pako_inflate.min.js
+// META: timeout=long
+
+'use strict';
+
+const SMALL_FILE = "/media/foo.vtt";
+const LARGE_FILE = "/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm";
+
+async function compressArrayBuffer(input, format) {
+ const cs = new CompressionStream(format);
+ const writer = cs.writable.getWriter();
+ writer.write(input);
+ const closePromise = writer.close();
+ const out = [];
+ const reader = cs.readable.getReader();
+ let totalSize = 0;
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done)
+ break;
+ out.push(value);
+ totalSize += value.byteLength;
+ }
+ await closePromise;
+ const concatenated = new Uint8Array(totalSize);
+ let offset = 0;
+ for (const array of out) {
+ concatenated.set(array, offset);
+ offset += array.byteLength;
+ }
+ return concatenated;
+}
+
+test(() => {
+ assert_throws_js(TypeError, () => {
+ const transformer = new CompressionStream("nonvalid");
+ }, "non supported format should throw");
+}, "CompressionStream constructor should throw on invalid format");
+
+promise_test(async () => {
+ const buffer = new ArrayBuffer(0);
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "deflate");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "deflated empty data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(SMALL_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "deflate");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "deflated small amount data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(LARGE_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "deflate");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "deflated large amount data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const buffer = new ArrayBuffer(0);
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "gzip");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "gzipped empty data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(SMALL_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "gzip");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "gzipped small amount data should be reinflated back to its origin");
+
+promise_test(async () => {
+ const response = await fetch(LARGE_FILE)
+ const buffer = await response.arrayBuffer();
+ const bufferView = new Uint8Array(buffer);
+ const compressedData = await compressArrayBuffer(bufferView, "gzip");
+ // decompress with pako, and check that we got the same result as our original string
+ assert_array_equals(bufferView, pako.inflate(compressedData));
+}, "gzipped large amount data should be reinflated back to its origin");
diff --git a/testing/web-platform/tests/compression/compression-with-detach.tentative.window.js b/testing/web-platform/tests/compression/compression-with-detach.tentative.window.js
new file mode 100644
index 0000000000..465feaa47d
--- /dev/null
+++ b/testing/web-platform/tests/compression/compression-with-detach.tentative.window.js
@@ -0,0 +1,55 @@
+// META: global=window,worker,shadowrealm
+// META: script=resources/concatenate-stream.js
+
+'use strict';
+
+const kInputLength = 500000;
+
+function createLargeRandomInput() {
+ const buffer = new ArrayBuffer(kInputLength);
+ // The getRandomValues API will only let us get 65536 bytes at a time, so call
+ // it multiple times.
+ const kChunkSize = 65536;
+ for (let offset = 0; offset < kInputLength; offset += kChunkSize) {
+ const length =
+ offset + kChunkSize > kInputLength ? kInputLength - offset : kChunkSize;
+ const view = new Uint8Array(buffer, offset, length);
+ crypto.getRandomValues(view);
+ }
+ return new Uint8Array(buffer);
+}
+
+function decompress(view) {
+ const ds = new DecompressionStream('deflate');
+ const writer = ds.writable.getWriter();
+ writer.write(view);
+ writer.close();
+ return concatenateStream(ds.readable);
+}
+
+promise_test(async () => {
+ const input = createLargeRandomInput();
+ const inputCopy = input.slice(0, input.byteLength);
+ const cs = new CompressionStream('deflate');
+ const writer = cs.writable.getWriter();
+ writer.write(input);
+ writer.close();
+ // Object.prototype.then will be looked up synchronously when the promise
+ // returned by read() is resolved.
+ Object.defineProperty(Object.prototype, 'then', {
+ get() {
+ // Cause input to become detached and unreferenced.
+ try {
+ postMessage(undefined, 'nowhere', [input.buffer]);
+ } catch (e) {
+ // It's already detached.
+ }
+ }
+ });
+ const output = await concatenateStream(cs.readable);
+ // Perform the comparison as strings since this is reasonably fast even when
+ // JITted JavaScript is running under an emulator.
+ assert_equals(
+ inputCopy.toString(), (await decompress(output)).toString(),
+ 'decompressing the output should return the input');
+}, 'data should be correctly compressed even if input is detached partway');
diff --git a/testing/web-platform/tests/compression/decompression-bad-chunks.tentative.any.js b/testing/web-platform/tests/compression/decompression-bad-chunks.tentative.any.js
new file mode 100644
index 0000000000..f450b0c4cb
--- /dev/null
+++ b/testing/web-platform/tests/compression/decompression-bad-chunks.tentative.any.js
@@ -0,0 +1,85 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+const badChunks = [
+ {
+ name: 'undefined',
+ value: undefined
+ },
+ {
+ name: 'null',
+ value: null
+ },
+ {
+ name: 'numeric',
+ value: 3.14
+ },
+ {
+ name: 'object, not BufferSource',
+ value: {}
+ },
+ {
+ name: 'array',
+ value: [65]
+ },
+ {
+ name: 'SharedArrayBuffer',
+ // Use a getter to postpone construction so that all tests don't fail where
+ // SharedArrayBuffer is not yet implemented.
+ get value() {
+ // See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
+ return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer;
+ }
+ },
+ {
+ name: 'shared Uint8Array',
+ get value() {
+ // See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
+ return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer)
+ }
+ },
+ {
+ name: 'invalid deflate bytes',
+ value: new Uint8Array([0, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36])
+ },
+ {
+ name: 'invalid gzip bytes',
+ value: new Uint8Array([0, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0])
+ },
+];
+
+// Test Case Design
+// We need to wait until after we close the writable stream to check if the decoded stream is valid.
+// We can end up in a state where all reads/writes are valid, but upon closing the writable stream an error is detected.
+// (Example: A zlib encoded chunk w/o the checksum).
+
+async function decompress(chunk, format, t)
+{
+ const ds = new DecompressionStream(format);
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+
+ writer.write(chunk.value).then(() => {}, () => {});
+ reader.read().then(() => {}, () => {});
+
+ await promise_rejects_js(t, TypeError, writer.close(), 'writer.close() should reject');
+ await promise_rejects_js(t, TypeError, writer.closed, 'write.closed should reject');
+
+ await promise_rejects_js(t, TypeError, reader.read(), 'reader.read() should reject');
+ await promise_rejects_js(t, TypeError, reader.closed, 'read.closed should reject');
+}
+
+for (const chunk of badChunks) {
+ promise_test(async t => {
+ await decompress(chunk, 'gzip', t);
+ }, `chunk of type ${chunk.name} should error the stream for gzip`);
+
+ promise_test(async t => {
+ await decompress(chunk, 'deflate', t);
+ }, `chunk of type ${chunk.name} should error the stream for deflate`);
+
+ promise_test(async t => {
+ await decompress(chunk, 'deflate-raw', t);
+ }, `chunk of type ${chunk.name} should error the stream for deflate-raw`);
+}
diff --git a/testing/web-platform/tests/compression/decompression-buffersource.tentative.any.js b/testing/web-platform/tests/compression/decompression-buffersource.tentative.any.js
new file mode 100644
index 0000000000..e81fc56677
--- /dev/null
+++ b/testing/web-platform/tests/compression/decompression-buffersource.tentative.any.js
@@ -0,0 +1,192 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+const compressedBytesWithDeflate = [120, 156, 75, 52, 48, 52, 50, 54, 49, 53, 3, 0, 8, 136, 1, 199];
+const compressedBytesWithGzip = [31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 52, 48, 52, 2, 0, 216, 252, 63, 136, 4, 0, 0, 0];
+const compressedBytesWithDeflateRaw = [
+ 0x00, 0x06, 0x00, 0xf9, 0xff, 0x41, 0x42, 0x43,
+ 0x44, 0x45, 0x46, 0x01, 0x00, 0x00, 0xff, 0xff,
+];
+// These chunk values below were chosen to make the length of the compressed
+// output be a multiple of 8 bytes.
+const deflateExpectedChunkValue = new TextEncoder().encode('a0123456');
+const gzipExpectedChunkValue = new TextEncoder().encode('a012');
+const deflateRawExpectedChunkValue = new TextEncoder().encode('ABCDEF');
+
+const bufferSourceChunksForDeflate = [
+ {
+ name: 'ArrayBuffer',
+ value: new Uint8Array(compressedBytesWithDeflate).buffer
+ },
+ {
+ name: 'Int8Array',
+ value: new Int8Array(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+ {
+ name: 'Uint8Array',
+ value: new Uint8Array(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+ {
+ name: 'Uint8ClampedArray',
+ value: new Uint8ClampedArray(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+ {
+ name: 'Int16Array',
+ value: new Int16Array(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+ {
+ name: 'Uint16Array',
+ value: new Uint16Array(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+ {
+ name: 'Int32Array',
+ value: new Int32Array(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+ {
+ name: 'Uint32Array',
+ value: new Uint32Array(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+ {
+ name: 'Float32Array',
+ value: new Float32Array(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+ {
+ name: 'Float64Array',
+ value: new Float64Array(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+ {
+ name: 'DataView',
+ value: new DataView(new Uint8Array(compressedBytesWithDeflate).buffer)
+ },
+];
+
+const bufferSourceChunksForGzip = [
+ {
+ name: 'ArrayBuffer',
+ value: new Uint8Array(compressedBytesWithGzip).buffer
+ },
+ {
+ name: 'Int8Array',
+ value: new Int8Array(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+ {
+ name: 'Uint8Array',
+ value: new Uint8Array(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+ {
+ name: 'Uint8ClambedArray',
+ value: new Uint8ClampedArray(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+ {
+ name: 'Int16Array',
+ value: new Int16Array(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+ {
+ name: 'Uint16Array',
+ value: new Uint16Array(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+ {
+ name: 'Int32Array',
+ value: new Int32Array(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+ {
+ name: 'Uint32Array',
+ value: new Uint32Array(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+ {
+ name: 'Float32Array',
+ value: new Float32Array(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+ {
+ name: 'Float64Array',
+ value: new Float64Array(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+ {
+ name: 'DataView',
+ value: new DataView(new Uint8Array(compressedBytesWithGzip).buffer)
+ },
+];
+
+const bufferSourceChunksForDeflateRaw = [
+ {
+ name: 'ArrayBuffer',
+ value: new Uint8Array(compressedBytesWithDeflateRaw).buffer
+ },
+ {
+ name: 'Int8Array',
+ value: new Int8Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+ {
+ name: 'Uint8Array',
+ value: new Uint8Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+ {
+ name: 'Uint8ClampedArray',
+ value: new Uint8ClampedArray(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+ {
+ name: 'Int16Array',
+ value: new Int16Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+ {
+ name: 'Uint16Array',
+ value: new Uint16Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+ {
+ name: 'Int32Array',
+ value: new Int32Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+ {
+ name: 'Uint32Array',
+ value: new Uint32Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+ {
+ name: 'Float32Array',
+ value: new Float32Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+ {
+ name: 'Float64Array',
+ value: new Float64Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+ {
+ name: 'DataView',
+ value: new DataView(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
+ },
+];
+
+for (const chunk of bufferSourceChunksForDeflate) {
+ promise_test(async t => {
+ const ds = new DecompressionStream('deflate');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(chunk.value);
+ writer.close();
+ const { value } = await reader.read();
+ assert_array_equals(Array.from(value), deflateExpectedChunkValue, 'value should match');
+ }, `chunk of type ${chunk.name} should work for deflate`);
+}
+
+for (const chunk of bufferSourceChunksForGzip) {
+ promise_test(async t => {
+ const ds = new DecompressionStream('gzip');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(chunk.value);
+ writer.close();
+ const { value } = await reader.read();
+ assert_array_equals(Array.from(value), gzipExpectedChunkValue, 'value should match');
+ }, `chunk of type ${chunk.name} should work for gzip`);
+}
+
+for (const chunk of bufferSourceChunksForDeflateRaw) {
+ promise_test(async t => {
+ const ds = new DecompressionStream('deflate-raw');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(chunk.value);
+ writer.close();
+ const { value } = await reader.read();
+ assert_array_equals(Array.from(value), deflateRawExpectedChunkValue, 'value should match');
+ }, `chunk of type ${chunk.name} should work for deflate-raw`);
+}
diff --git a/testing/web-platform/tests/compression/decompression-constructor-error.tentative.any.js b/testing/web-platform/tests/compression/decompression-constructor-error.tentative.any.js
new file mode 100644
index 0000000000..0270ba7353
--- /dev/null
+++ b/testing/web-platform/tests/compression/decompression-constructor-error.tentative.any.js
@@ -0,0 +1,15 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+test(t => {
+ assert_throws_js(TypeError, () => new DecompressionStream('a'), 'constructor should throw');
+}, '"a" should cause the constructor to throw');
+
+test(t => {
+ assert_throws_js(TypeError, () => new DecompressionStream(), 'constructor should throw');
+}, 'no input should cause the constructor to throw');
+
+test(t => {
+ assert_throws_js(Error, () => new DecompressionStream({ toString() { throw Error(); } }), 'constructor should throw');
+}, 'non-string input should cause the constructor to throw');
diff --git a/testing/web-platform/tests/compression/decompression-correct-input.tentative.any.js b/testing/web-platform/tests/compression/decompression-correct-input.tentative.any.js
new file mode 100644
index 0000000000..90519445e3
--- /dev/null
+++ b/testing/web-platform/tests/compression/decompression-correct-input.tentative.any.js
@@ -0,0 +1,39 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+const deflateChunkValue = new Uint8Array([120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]);
+const gzipChunkValue = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]);
+const deflateRawChunkValue = new Uint8Array([
+ 0x4b, 0xad, 0x28, 0x48, 0x4d, 0x2e, 0x49, 0x4d, 0x51, 0xc8,
+ 0x2f, 0x2d, 0x29, 0x28, 0x2d, 0x01, 0x00,
+]);
+const trueChunkValue = new TextEncoder().encode('expected output');
+
+promise_test(async t => {
+ const ds = new DecompressionStream('deflate');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(deflateChunkValue);
+ const { done, value } = await reader.read();
+ assert_array_equals(Array.from(value), trueChunkValue, "value should match");
+}, 'decompressing deflated input should work');
+
+
+promise_test(async t => {
+ const ds = new DecompressionStream('gzip');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(gzipChunkValue);
+ const { done, value } = await reader.read();
+ assert_array_equals(Array.from(value), trueChunkValue, "value should match");
+}, 'decompressing gzip input should work');
+
+promise_test(async t => {
+ const ds = new DecompressionStream('deflate-raw');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(deflateRawChunkValue);
+ const { done, value } = await reader.read();
+ assert_array_equals(Array.from(value), trueChunkValue, "value should match");
+}, 'decompressing deflated (with -raw) input should work');
diff --git a/testing/web-platform/tests/compression/decompression-corrupt-input.tentative.any.js b/testing/web-platform/tests/compression/decompression-corrupt-input.tentative.any.js
new file mode 100644
index 0000000000..fc18197dfb
--- /dev/null
+++ b/testing/web-platform/tests/compression/decompression-corrupt-input.tentative.any.js
@@ -0,0 +1,318 @@
+// META global=window,worker,shadowrealm
+
+// This test checks that DecompressionStream behaves according to the standard
+// when the input is corrupted. To avoid a combinatorial explosion in the
+// number of tests, we only mutate one field at a time, and we only test
+// "interesting" values.
+
+'use strict';
+
+// The many different cases are summarised in this data structure.
+const expectations = [
+ {
+ format: 'deflate',
+
+ // Decompresses to 'expected output'.
+ baseInput: [120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41,
+ 40, 45, 1, 0, 48, 173, 6, 36],
+
+ // See RFC1950 for the definition of the various fields used by deflate:
+ // https://tools.ietf.org/html/rfc1950.
+ fields: [
+ {
+ // The function of this field. This matches the name used in the RFC.
+ name: 'CMF',
+
+ // The offset of the field in bytes from the start of the input.
+ offset: 0,
+
+ // The length of the field in bytes.
+ length: 1,
+
+ cases: [
+ {
+ // The value to set the field to. If the field contains multiple
+ // bytes, all the bytes will be set to this value.
+ value: 0,
+
+ // The expected result. 'success' means the input is decoded
+ // successfully. 'error' means that the stream will be errored.
+ result: 'error'
+ }
+ ]
+ },
+ {
+ name: 'FLG',
+ offset: 1,
+ length: 1,
+
+ // FLG contains a 4-bit checksum (FCHECK) which is calculated in such a
+ // way that there are 4 valid values for this field.
+ cases: [
+ {
+ value: 218,
+ result: 'success'
+ },
+ {
+ value: 1,
+ result: 'success'
+ },
+ {
+ value: 94,
+ result: 'success'
+ },
+ {
+ // The remaining 252 values cause an error.
+ value: 157,
+ result: 'error'
+ }
+ ]
+ },
+ {
+ name: 'DATA',
+ // In general, changing any bit of the data will trigger a checksum
+ // error. Only the last byte does anything else.
+ offset: 18,
+ length: 1,
+ cases: [
+ {
+ value: 4,
+ result: 'success'
+ },
+ {
+ value: 5,
+ result: 'error'
+ }
+ ]
+ },
+ {
+ name: 'ADLER',
+ offset: -4,
+ length: 4,
+ cases: [
+ {
+ value: 255,
+ result: 'error'
+ }
+ ]
+ }
+ ]
+ },
+ {
+ format: 'gzip',
+
+ // Decompresses to 'expected output'.
+ baseInput: [31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73,
+ 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0,
+ 0, 0],
+
+ // See RFC1952 for the definition of the various fields used by gzip:
+ // https://tools.ietf.org/html/rfc1952.
+ fields: [
+ {
+ name: 'ID',
+ offset: 0,
+ length: 2,
+ cases: [
+ {
+ value: 255,
+ result: 'error'
+ }
+ ]
+ },
+ {
+ name: 'CM',
+ offset: 2,
+ length: 1,
+ cases: [
+ {
+ value: 0,
+ result: 'error'
+ }
+ ]
+ },
+ {
+ name: 'FLG',
+ offset: 3,
+ length: 1,
+ cases: [
+ {
+ value: 1, // FTEXT
+ result: 'success'
+ },
+ {
+ value: 2, // FHCRC
+ result: 'error'
+ }
+ ]
+ },
+ {
+ name: 'MTIME',
+ offset: 4,
+ length: 4,
+ cases: [
+ {
+ // Any value is valid for this field.
+ value: 255,
+ result: 'success'
+ }
+ ]
+ },
+ {
+ name: 'XFL',
+ offset: 8,
+ length: 1,
+ cases: [
+ {
+ // Any value is accepted.
+ value: 255,
+ result: 'success'
+ }
+ ]
+ },
+ {
+ name: 'OS',
+ offset: 9,
+ length: 1,
+ cases: [
+ {
+ // Any value is accepted.
+ value: 128,
+ result: 'success'
+ }
+ ]
+ },
+ {
+ name: 'DATA',
+
+ // The last byte of the data is the most interesting.
+ offset: 26,
+ length: 1,
+ cases: [
+ {
+ value: 3,
+ result: 'error'
+ },
+ {
+ value: 4,
+ result: 'success'
+ }
+ ]
+ },
+ {
+ name: 'CRC',
+ offset: -8,
+ length: 4,
+ cases: [
+ {
+ // Any change will error the stream.
+ value: 0,
+ result: 'error'
+ }
+ ]
+ },
+ {
+ name: 'ISIZE',
+ offset: -4,
+ length: 4,
+ cases: [
+ {
+ // A mismatch will error the stream.
+ value: 1,
+ result: 'error'
+ }
+ ]
+ }
+ ]
+ }
+];
+
+async function tryDecompress(input, format) {
+ const ds = new DecompressionStream(format);
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ writer.write(input).catch(() => {});
+ writer.close().catch(() => {});
+ let out = [];
+ while (true) {
+ try {
+ const { value, done } = await reader.read();
+ if (done) {
+ break;
+ }
+ out = out.concat(Array.from(value));
+ } catch (e) {
+ if (e instanceof TypeError) {
+ return { result: 'error' };
+ } else {
+ return { result: e.name };
+ }
+ }
+ }
+ const expectedOutput = 'expected output';
+ if (out.length !== expectedOutput.length) {
+ return { result: 'corrupt' };
+ }
+ for (let i = 0; i < out.length; ++i) {
+ if (out[i] !== expectedOutput.charCodeAt(i)) {
+ return { result: 'corrupt' };
+ }
+ }
+ return { result: 'success' };
+}
+
+function corruptInput(input, offset, length, value) {
+ const output = new Uint8Array(input);
+ if (offset < 0) {
+ offset += input.length;
+ }
+ for (let i = offset; i < offset + length; ++i) {
+ output[i] = value;
+ }
+ return output;
+}
+
+for (const { format, baseInput, fields } of expectations) {
+ promise_test(async () => {
+ const { result } = await tryDecompress(new Uint8Array(baseInput), format);
+ assert_equals(result, 'success', 'decompression should succeed');
+ }, `the unchanged input for '${format}' should decompress successfully`);
+
+ promise_test(async () => {
+ const truncatedInput = new Uint8Array(baseInput.slice(0, -1));
+ const { result } = await tryDecompress(truncatedInput, format);
+ assert_equals(result, 'error', 'decompression should fail');
+ }, `truncating the input for '${format}' should give an error`);
+
+ promise_test(async () => {
+ const extendedInput = new Uint8Array(baseInput.concat([0]));
+ const { result } = await tryDecompress(extendedInput, format);
+ assert_equals(result, 'error', 'decompression should fail');
+ }, `trailing junk for '${format}' should give an error`);
+
+ for (const { name, offset, length, cases } of fields) {
+ for (const { value, result } of cases) {
+ promise_test(async () => {
+ const corruptedInput = corruptInput(baseInput, offset, length, value);
+ const { result: actual } =
+ await tryDecompress(corruptedInput, format);
+ assert_equals(actual, result, 'result should match');
+ }, `format '${format}' field ${name} should be ${result} for ${value}`);
+ }
+ }
+}
+
+promise_test(async () => {
+ // Data generated in Python:
+ // ```py
+ // h = b"thequickbrownfoxjumped\x00"
+ // words = h.split()
+ // zdict = b''.join(words)
+ // co = zlib.compressobj(zdict=zdict)
+ // cd = co.compress(h) + co.flush()
+ // ```
+ const { result } = await tryDecompress(new Uint8Array([
+ 0x78, 0xbb, 0x74, 0xee, 0x09, 0x59, 0x2b, 0xc1, 0x2e, 0x0c, 0x00, 0x74, 0xee, 0x09, 0x59
+ ]), 'deflate');
+ assert_equals(result, 'error', 'Data compressed with a dictionary should throw TypeError');
+}, 'the deflate input compressed with dictionary should give an error')
diff --git a/testing/web-platform/tests/compression/decompression-empty-input.tentative.any.js b/testing/web-platform/tests/compression/decompression-empty-input.tentative.any.js
new file mode 100644
index 0000000000..201db8ec0b
--- /dev/null
+++ b/testing/web-platform/tests/compression/decompression-empty-input.tentative.any.js
@@ -0,0 +1,43 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+const gzipEmptyValue = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
+const deflateEmptyValue = new Uint8Array([120, 156, 3, 0, 0, 0, 0, 1]);
+const deflateRawEmptyValue = new Uint8Array([1, 0, 0, 255, 255]);
+
+promise_test(async t => {
+ const ds = new DecompressionStream('gzip');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(gzipEmptyValue);
+ writer.close();
+ const { value, done } = await reader.read();
+ assert_true(done, "read() should set done");
+ assert_equals(value, undefined, "value should be undefined");
+ await writePromise;
+}, 'decompressing gzip empty input should work');
+
+promise_test(async t => {
+ const ds = new DecompressionStream('deflate');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(deflateEmptyValue);
+ writer.close();
+ const { value, done } = await reader.read();
+ assert_true(done, "read() should set done");
+ assert_equals(value, undefined, "value should be undefined");
+ await writePromise;
+}, 'decompressing deflate empty input should work');
+
+promise_test(async t => {
+ const ds = new DecompressionStream('deflate-raw');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(deflateRawEmptyValue);
+ writer.close();
+ const { value, done } = await reader.read();
+ assert_true(done, "read() should set done");
+ assert_equals(value, undefined, "value should be undefined");
+ await writePromise;
+}, 'decompressing deflate-raw empty input should work');
diff --git a/testing/web-platform/tests/compression/decompression-split-chunk.tentative.any.js b/testing/web-platform/tests/compression/decompression-split-chunk.tentative.any.js
new file mode 100644
index 0000000000..eb12c2a236
--- /dev/null
+++ b/testing/web-platform/tests/compression/decompression-split-chunk.tentative.any.js
@@ -0,0 +1,53 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+const compressedBytesWithDeflate = new Uint8Array([120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]);
+const compressedBytesWithGzip = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]);
+const compressedBytesWithDeflateRaw = new Uint8Array([
+ 0x4b, 0xad, 0x28, 0x48, 0x4d, 0x2e, 0x49, 0x4d, 0x51, 0xc8,
+ 0x2f, 0x2d, 0x29, 0x28, 0x2d, 0x01, 0x00,
+]);
+const expectedChunkValue = new TextEncoder().encode('expected output');
+
+async function decompressArrayBuffer(input, format, chunkSize) {
+ const ds = new DecompressionStream(format);
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ for (let beginning = 0; beginning < input.length; beginning += chunkSize) {
+ writer.write(input.slice(beginning, beginning + chunkSize));
+ }
+ writer.close();
+ const out = [];
+ let totalSize = 0;
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done) break;
+ out.push(value);
+ totalSize += value.byteLength;
+ }
+ const concatenated = new Uint8Array(totalSize);
+ let offset = 0;
+ for (const array of out) {
+ concatenated.set(array, offset);
+ offset += array.byteLength;
+ }
+ return concatenated;
+}
+
+for (let chunkSize = 1; chunkSize < 16; ++chunkSize) {
+ promise_test(async t => {
+ const decompressedData = await decompressArrayBuffer(compressedBytesWithDeflate, 'deflate', chunkSize);
+ assert_array_equals(decompressedData, expectedChunkValue, "value should match");
+ }, `decompressing splitted chunk into pieces of size ${chunkSize} should work in deflate`);
+
+ promise_test(async t => {
+ const decompressedData = await decompressArrayBuffer(compressedBytesWithGzip, 'gzip', chunkSize);
+ assert_array_equals(decompressedData, expectedChunkValue, "value should match");
+ }, `decompressing splitted chunk into pieces of size ${chunkSize} should work in gzip`);
+
+ promise_test(async t => {
+ const decompressedData = await decompressArrayBuffer(compressedBytesWithDeflateRaw, 'deflate-raw', chunkSize);
+ assert_array_equals(decompressedData, expectedChunkValue, "value should match");
+ }, `decompressing splitted chunk into pieces of size ${chunkSize} should work in deflate-raw`);
+}
diff --git a/testing/web-platform/tests/compression/decompression-uint8array-output.tentative.any.js b/testing/web-platform/tests/compression/decompression-uint8array-output.tentative.any.js
new file mode 100644
index 0000000000..0c45a0aaa7
--- /dev/null
+++ b/testing/web-platform/tests/compression/decompression-uint8array-output.tentative.any.js
@@ -0,0 +1,30 @@
+// META: global=window,worker,shadowrealm
+// META: timeout=long
+//
+// This test isn't actually slow usually, but sometimes it takes >10 seconds on
+// Firefox with service worker for no obvious reason.
+
+'use strict';
+
+const deflateChunkValue = new Uint8Array([120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]);
+const gzipChunkValue = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]);
+
+promise_test(async t => {
+ const ds = new DecompressionStream('deflate');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(deflateChunkValue);
+ const { value } = await reader.read();
+ assert_equals(value.constructor, Uint8Array, "type should match");
+ await writePromise;
+}, 'decompressing deflated output should give Uint8Array chunks');
+
+promise_test(async t => {
+ const ds = new DecompressionStream('gzip');
+ const reader = ds.readable.getReader();
+ const writer = ds.writable.getWriter();
+ const writePromise = writer.write(gzipChunkValue);
+ const { value } = await reader.read();
+ assert_equals(value.constructor, Uint8Array, "type should match");
+ await writePromise;
+}, 'decompressing gzip output should give Uint8Array chunks');
diff --git a/testing/web-platform/tests/compression/decompression-with-detach.tentative.window.js b/testing/web-platform/tests/compression/decompression-with-detach.tentative.window.js
new file mode 100644
index 0000000000..1ff9c26983
--- /dev/null
+++ b/testing/web-platform/tests/compression/decompression-with-detach.tentative.window.js
@@ -0,0 +1,41 @@
+// META: global=window,worker,shadowrealm
+// META: script=resources/concatenate-stream.js
+
+'use strict';
+
+const kInputLength = 1000000;
+
+async function createLargeCompressedInput() {
+ const cs = new CompressionStream('deflate');
+ // The input has to be large enough that it won't fit in a single chunk when
+ // decompressed.
+ const writer = cs.writable.getWriter();
+ writer.write(new Uint8Array(kInputLength));
+ writer.close();
+ return concatenateStream(cs.readable);
+}
+
+promise_test(async () => {
+ const input = await createLargeCompressedInput();
+ const ds = new DecompressionStream('deflate');
+ const writer = ds.writable.getWriter();
+ writer.write(input);
+ writer.close();
+ // Object.prototype.then will be looked up synchronously when the promise
+ // returned by read() is resolved.
+ Object.defineProperty(Object.prototype, 'then', {
+ get() {
+ // Cause input to become detached and unreferenced.
+ try {
+ postMessage(undefined, 'nowhere', [input.buffer]);
+ } catch (e) {
+ // It's already detached.
+ }
+ }
+ });
+ const output = await concatenateStream(ds.readable);
+ // If output successfully decompressed and gave the right length, we can be
+ // reasonably confident that no data corruption happened.
+ assert_equals(
+ output.byteLength, kInputLength, 'output should be the right length');
+}, 'data should be correctly decompressed even if input is detached partway');
diff --git a/testing/web-platform/tests/compression/idlharness-shadowrealm.window.js b/testing/web-platform/tests/compression/idlharness-shadowrealm.window.js
new file mode 100644
index 0000000000..2fdc807ee0
--- /dev/null
+++ b/testing/web-platform/tests/compression/idlharness-shadowrealm.window.js
@@ -0,0 +1,2 @@
+// META: script=/resources/idlharness-shadowrealm.js
+idl_test_shadowrealm(["compression"], ["streams"]);
diff --git a/testing/web-platform/tests/compression/idlharness.https.any.js b/testing/web-platform/tests/compression/idlharness.https.any.js
new file mode 100644
index 0000000000..8d96cf523c
--- /dev/null
+++ b/testing/web-platform/tests/compression/idlharness.https.any.js
@@ -0,0 +1,17 @@
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+
+'use strict';
+
+// https://wicg.github.io/compression/
+
+idl_test(
+ ['compression'],
+ ['streams'],
+ idl_array => {
+ idl_array.add_objects({
+ CompressionStream: ['new CompressionStream("deflate")'],
+ DecompressionStream: ['new DecompressionStream("deflate")'],
+ });
+ }
+);
diff --git a/testing/web-platform/tests/compression/resources/concatenate-stream.js b/testing/web-platform/tests/compression/resources/concatenate-stream.js
new file mode 100644
index 0000000000..a35bb1416e
--- /dev/null
+++ b/testing/web-platform/tests/compression/resources/concatenate-stream.js
@@ -0,0 +1,25 @@
+'use strict';
+
+// Read all the chunks from a stream that returns BufferSource objects and
+// concatenate them into a single Uint8Array.
+async function concatenateStream(readableStream) {
+ const reader = readableStream.getReader();
+ let totalSize = 0;
+ const buffers = [];
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done) {
+ break;
+ }
+ buffers.push(value);
+ totalSize += value.byteLength;
+ }
+ reader.releaseLock();
+ const concatenated = new Uint8Array(totalSize);
+ let offset = 0;
+ for (const buffer of buffers) {
+ concatenated.set(buffer, offset);
+ offset += buffer.byteLength;
+ }
+ return concatenated;
+}
diff --git a/testing/web-platform/tests/compression/third_party/pako/LICENSE b/testing/web-platform/tests/compression/third_party/pako/LICENSE
new file mode 100644
index 0000000000..a934ef8db4
--- /dev/null
+++ b/testing/web-platform/tests/compression/third_party/pako/LICENSE
@@ -0,0 +1,21 @@
+(The MIT License)
+
+Copyright (C) 2014-2017 by Vitaly Puzrin and Andrei Tuputcyn
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/testing/web-platform/tests/compression/third_party/pako/README b/testing/web-platform/tests/compression/third_party/pako/README
new file mode 100644
index 0000000000..96028388eb
--- /dev/null
+++ b/testing/web-platform/tests/compression/third_party/pako/README
@@ -0,0 +1,2 @@
+original repository:
+https://github.com/nodeca/pako
diff --git a/testing/web-platform/tests/compression/third_party/pako/pako_inflate.min.js b/testing/web-platform/tests/compression/third_party/pako/pako_inflate.min.js
new file mode 100644
index 0000000000..a191a78a89
--- /dev/null
+++ b/testing/web-platform/tests/compression/third_party/pako/pako_inflate.min.js
@@ -0,0 +1 @@
+!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).pako=e()}}(function(){return function r(o,s,f){function l(t,e){if(!s[t]){if(!o[t]){var i="function"==typeof require&&require;if(!e&&i)return i(t,!0);if(d)return d(t,!0);var n=new Error("Cannot find module '"+t+"'");throw n.code="MODULE_NOT_FOUND",n}var a=s[t]={exports:{}};o[t][0].call(a.exports,function(e){return l(o[t][1][e]||e)},a,a.exports,r,o,s,f)}return s[t].exports}for(var d="function"==typeof require&&require,e=0;e<f.length;e++)l(f[e]);return l}({1:[function(e,t,i){"use strict";var n="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Int32Array;i.assign=function(e){for(var t,i,n=Array.prototype.slice.call(arguments,1);n.length;){var a=n.shift();if(a){if("object"!=typeof a)throw new TypeError(a+"must be non-object");for(var r in a)t=a,i=r,Object.prototype.hasOwnProperty.call(t,i)&&(e[r]=a[r])}}return e},i.shrinkBuf=function(e,t){return e.length===t?e:e.subarray?e.subarray(0,t):(e.length=t,e)};var a={arraySet:function(e,t,i,n,a){if(t.subarray&&e.subarray)e.set(t.subarray(i,i+n),a);else for(var r=0;r<n;r++)e[a+r]=t[i+r]},flattenChunks:function(e){var t,i,n,a,r,o;for(t=n=0,i=e.length;t<i;t++)n+=e[t].length;for(o=new Uint8Array(n),t=a=0,i=e.length;t<i;t++)r=e[t],o.set(r,a),a+=r.length;return o}},r={arraySet:function(e,t,i,n,a){for(var r=0;r<n;r++)e[a+r]=t[i+r]},flattenChunks:function(e){return[].concat.apply([],e)}};i.setTyped=function(e){e?(i.Buf8=Uint8Array,i.Buf16=Uint16Array,i.Buf32=Int32Array,i.assign(i,a)):(i.Buf8=Array,i.Buf16=Array,i.Buf32=Array,i.assign(i,r))},i.setTyped(n)},{}],2:[function(e,t,i){"use strict";var f=e("./common"),a=!0,r=!0;try{String.fromCharCode.apply(null,[0])}catch(e){a=!1}try{String.fromCharCode.apply(null,new Uint8Array(1))}catch(e){r=!1}for(var l=new f.Buf8(256),n=0;n<256;n++)l[n]=252<=n?6:248<=n?5:240<=n?4:224<=n?3:192<=n?2:1;function d(e,t){if(t<65534&&(e.subarray&&r||!e.subarray&&a))return String.fromCharCode.apply(null,f.shrinkBuf(e,t));for(var i="",n=0;n<t;n++)i+=String.fromCharCode(e[n]);return i}l[254]=l[254]=1,i.string2buf=function(e){var t,i,n,a,r,o=e.length,s=0;for(a=0;a<o;a++)55296==(64512&(i=e.charCodeAt(a)))&&a+1<o&&56320==(64512&(n=e.charCodeAt(a+1)))&&(i=65536+(i-55296<<10)+(n-56320),a++),s+=i<128?1:i<2048?2:i<65536?3:4;for(t=new f.Buf8(s),a=r=0;r<s;a++)55296==(64512&(i=e.charCodeAt(a)))&&a+1<o&&56320==(64512&(n=e.charCodeAt(a+1)))&&(i=65536+(i-55296<<10)+(n-56320),a++),i<128?t[r++]=i:(i<2048?t[r++]=192|i>>>6:(i<65536?t[r++]=224|i>>>12:(t[r++]=240|i>>>18,t[r++]=128|i>>>12&63),t[r++]=128|i>>>6&63),t[r++]=128|63&i);return t},i.buf2binstring=function(e){return d(e,e.length)},i.binstring2buf=function(e){for(var t=new f.Buf8(e.length),i=0,n=t.length;i<n;i++)t[i]=e.charCodeAt(i);return t},i.buf2string=function(e,t){var i,n,a,r,o=t||e.length,s=new Array(2*o);for(i=n=0;i<o;)if((a=e[i++])<128)s[n++]=a;else if(4<(r=l[a]))s[n++]=65533,i+=r-1;else{for(a&=2===r?31:3===r?15:7;1<r&&i<o;)a=a<<6|63&e[i++],r--;1<r?s[n++]=65533:a<65536?s[n++]=a:(a-=65536,s[n++]=55296|a>>10&1023,s[n++]=56320|1023&a)}return d(s,n)},i.utf8border=function(e,t){var i;for((t=t||e.length)>e.length&&(t=e.length),i=t-1;0<=i&&128==(192&e[i]);)i--;return i<0?t:0===i?t:i+l[e[i]]>t?i:t}},{"./common":1}],3:[function(e,t,i){"use strict";t.exports=function(e,t,i,n){for(var a=65535&e|0,r=e>>>16&65535|0,o=0;0!==i;){for(i-=o=2e3<i?2e3:i;r=r+(a=a+t[n++]|0)|0,--o;);a%=65521,r%=65521}return a|r<<16|0}},{}],4:[function(e,t,i){"use strict";t.exports={Z_NO_FLUSH:0,Z_PARTIAL_FLUSH:1,Z_SYNC_FLUSH:2,Z_FULL_FLUSH:3,Z_FINISH:4,Z_BLOCK:5,Z_TREES:6,Z_OK:0,Z_STREAM_END:1,Z_NEED_DICT:2,Z_ERRNO:-1,Z_STREAM_ERROR:-2,Z_DATA_ERROR:-3,Z_BUF_ERROR:-5,Z_NO_COMPRESSION:0,Z_BEST_SPEED:1,Z_BEST_COMPRESSION:9,Z_DEFAULT_COMPRESSION:-1,Z_FILTERED:1,Z_HUFFMAN_ONLY:2,Z_RLE:3,Z_FIXED:4,Z_DEFAULT_STRATEGY:0,Z_BINARY:0,Z_TEXT:1,Z_UNKNOWN:2,Z_DEFLATED:8}},{}],5:[function(e,t,i){"use strict";var s=function(){for(var e,t=[],i=0;i<256;i++){e=i;for(var n=0;n<8;n++)e=1&e?3988292384^e>>>1:e>>>1;t[i]=e}return t}();t.exports=function(e,t,i,n){var a=s,r=n+i;e^=-1;for(var o=n;o<r;o++)e=e>>>8^a[255&(e^t[o])];return-1^e}},{}],6:[function(e,t,i){"use strict";t.exports=function(){this.text=0,this.time=0,this.xflags=0,this.os=0,this.extra=null,this.extra_len=0,this.name="",this.comment="",this.hcrc=0,this.done=!1}},{}],7:[function(e,t,i){"use strict";t.exports=function(e,t){var i,n,a,r,o,s,f,l,d,c,u,h,b,m,w,k,_,g,v,p,x,y,S,E,Z;i=e.state,n=e.next_in,E=e.input,a=n+(e.avail_in-5),r=e.next_out,Z=e.output,o=r-(t-e.avail_out),s=r+(e.avail_out-257),f=i.dmax,l=i.wsize,d=i.whave,c=i.wnext,u=i.window,h=i.hold,b=i.bits,m=i.lencode,w=i.distcode,k=(1<<i.lenbits)-1,_=(1<<i.distbits)-1;e:do{b<15&&(h+=E[n++]<<b,b+=8,h+=E[n++]<<b,b+=8),g=m[h&k];t:for(;;){if(h>>>=v=g>>>24,b-=v,0===(v=g>>>16&255))Z[r++]=65535&g;else{if(!(16&v)){if(0==(64&v)){g=m[(65535&g)+(h&(1<<v)-1)];continue t}if(32&v){i.mode=12;break e}e.msg="invalid literal/length code",i.mode=30;break e}p=65535&g,(v&=15)&&(b<v&&(h+=E[n++]<<b,b+=8),p+=h&(1<<v)-1,h>>>=v,b-=v),b<15&&(h+=E[n++]<<b,b+=8,h+=E[n++]<<b,b+=8),g=w[h&_];i:for(;;){if(h>>>=v=g>>>24,b-=v,!(16&(v=g>>>16&255))){if(0==(64&v)){g=w[(65535&g)+(h&(1<<v)-1)];continue i}e.msg="invalid distance code",i.mode=30;break e}if(x=65535&g,b<(v&=15)&&(h+=E[n++]<<b,(b+=8)<v&&(h+=E[n++]<<b,b+=8)),f<(x+=h&(1<<v)-1)){e.msg="invalid distance too far back",i.mode=30;break e}if(h>>>=v,b-=v,(v=r-o)<x){if(d<(v=x-v)&&i.sane){e.msg="invalid distance too far back",i.mode=30;break e}if(S=u,(y=0)===c){if(y+=l-v,v<p){for(p-=v;Z[r++]=u[y++],--v;);y=r-x,S=Z}}else if(c<v){if(y+=l+c-v,(v-=c)<p){for(p-=v;Z[r++]=u[y++],--v;);if(y=0,c<p){for(p-=v=c;Z[r++]=u[y++],--v;);y=r-x,S=Z}}}else if(y+=c-v,v<p){for(p-=v;Z[r++]=u[y++],--v;);y=r-x,S=Z}for(;2<p;)Z[r++]=S[y++],Z[r++]=S[y++],Z[r++]=S[y++],p-=3;p&&(Z[r++]=S[y++],1<p&&(Z[r++]=S[y++]))}else{for(y=r-x;Z[r++]=Z[y++],Z[r++]=Z[y++],Z[r++]=Z[y++],2<(p-=3););p&&(Z[r++]=Z[y++],1<p&&(Z[r++]=Z[y++]))}break}}break}}while(n<a&&r<s);n-=p=b>>3,h&=(1<<(b-=p<<3))-1,e.next_in=n,e.next_out=r,e.avail_in=n<a?a-n+5:5-(n-a),e.avail_out=r<s?s-r+257:257-(r-s),i.hold=h,i.bits=b}},{}],8:[function(e,t,i){"use strict";var z=e("../utils/common"),R=e("./adler32"),N=e("./crc32"),O=e("./inffast"),C=e("./inftrees"),I=1,D=2,T=0,U=-2,F=1,n=852,a=592;function L(e){return(e>>>24&255)+(e>>>8&65280)+((65280&e)<<8)+((255&e)<<24)}function r(){this.mode=0,this.last=!1,this.wrap=0,this.havedict=!1,this.flags=0,this.dmax=0,this.check=0,this.total=0,this.head=null,this.wbits=0,this.wsize=0,this.whave=0,this.wnext=0,this.window=null,this.hold=0,this.bits=0,this.length=0,this.offset=0,this.extra=0,this.lencode=null,this.distcode=null,this.lenbits=0,this.distbits=0,this.ncode=0,this.nlen=0,this.ndist=0,this.have=0,this.next=null,this.lens=new z.Buf16(320),this.work=new z.Buf16(288),this.lendyn=null,this.distdyn=null,this.sane=0,this.back=0,this.was=0}function o(e){var t;return e&&e.state?(t=e.state,e.total_in=e.total_out=t.total=0,e.msg="",t.wrap&&(e.adler=1&t.wrap),t.mode=F,t.last=0,t.havedict=0,t.dmax=32768,t.head=null,t.hold=0,t.bits=0,t.lencode=t.lendyn=new z.Buf32(n),t.distcode=t.distdyn=new z.Buf32(a),t.sane=1,t.back=-1,T):U}function s(e){var t;return e&&e.state?((t=e.state).wsize=0,t.whave=0,t.wnext=0,o(e)):U}function f(e,t){var i,n;return e&&e.state?(n=e.state,t<0?(i=0,t=-t):(i=1+(t>>4),t<48&&(t&=15)),t&&(t<8||15<t)?U:(null!==n.window&&n.wbits!==t&&(n.window=null),n.wrap=i,n.wbits=t,s(e))):U}function l(e,t){var i,n;return e?(n=new r,(e.state=n).window=null,(i=f(e,t))!==T&&(e.state=null),i):U}var d,c,u=!0;function H(e){if(u){var t;for(d=new z.Buf32(512),c=new z.Buf32(32),t=0;t<144;)e.lens[t++]=8;for(;t<256;)e.lens[t++]=9;for(;t<280;)e.lens[t++]=7;for(;t<288;)e.lens[t++]=8;for(C(I,e.lens,0,288,d,0,e.work,{bits:9}),t=0;t<32;)e.lens[t++]=5;C(D,e.lens,0,32,c,0,e.work,{bits:5}),u=!1}e.lencode=d,e.lenbits=9,e.distcode=c,e.distbits=5}function j(e,t,i,n){var a,r=e.state;return null===r.window&&(r.wsize=1<<r.wbits,r.wnext=0,r.whave=0,r.window=new z.Buf8(r.wsize)),n>=r.wsize?(z.arraySet(r.window,t,i-r.wsize,r.wsize,0),r.wnext=0,r.whave=r.wsize):(n<(a=r.wsize-r.wnext)&&(a=n),z.arraySet(r.window,t,i-n,a,r.wnext),(n-=a)?(z.arraySet(r.window,t,i-n,n,0),r.wnext=n,r.whave=r.wsize):(r.wnext+=a,r.wnext===r.wsize&&(r.wnext=0),r.whave<r.wsize&&(r.whave+=a))),0}i.inflateReset=s,i.inflateReset2=f,i.inflateResetKeep=o,i.inflateInit=function(e){return l(e,15)},i.inflateInit2=l,i.inflate=function(e,t){var i,n,a,r,o,s,f,l,d,c,u,h,b,m,w,k,_,g,v,p,x,y,S,E,Z=0,B=new z.Buf8(4),A=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15];if(!e||!e.state||!e.output||!e.input&&0!==e.avail_in)return U;12===(i=e.state).mode&&(i.mode=13),o=e.next_out,a=e.output,f=e.avail_out,r=e.next_in,n=e.input,s=e.avail_in,l=i.hold,d=i.bits,c=s,u=f,y=T;e:for(;;)switch(i.mode){case F:if(0===i.wrap){i.mode=13;break}for(;d<16;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if(2&i.wrap&&35615===l){B[i.check=0]=255&l,B[1]=l>>>8&255,i.check=N(i.check,B,2,0),d=l=0,i.mode=2;break}if(i.flags=0,i.head&&(i.head.done=!1),!(1&i.wrap)||(((255&l)<<8)+(l>>8))%31){e.msg="incorrect header check",i.mode=30;break}if(8!=(15&l)){e.msg="unknown compression method",i.mode=30;break}if(d-=4,x=8+(15&(l>>>=4)),0===i.wbits)i.wbits=x;else if(x>i.wbits){e.msg="invalid window size",i.mode=30;break}i.dmax=1<<x,e.adler=i.check=1,i.mode=512&l?10:12,d=l=0;break;case 2:for(;d<16;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if(i.flags=l,8!=(255&i.flags)){e.msg="unknown compression method",i.mode=30;break}if(57344&i.flags){e.msg="unknown header flags set",i.mode=30;break}i.head&&(i.head.text=l>>8&1),512&i.flags&&(B[0]=255&l,B[1]=l>>>8&255,i.check=N(i.check,B,2,0)),d=l=0,i.mode=3;case 3:for(;d<32;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}i.head&&(i.head.time=l),512&i.flags&&(B[0]=255&l,B[1]=l>>>8&255,B[2]=l>>>16&255,B[3]=l>>>24&255,i.check=N(i.check,B,4,0)),d=l=0,i.mode=4;case 4:for(;d<16;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}i.head&&(i.head.xflags=255&l,i.head.os=l>>8),512&i.flags&&(B[0]=255&l,B[1]=l>>>8&255,i.check=N(i.check,B,2,0)),d=l=0,i.mode=5;case 5:if(1024&i.flags){for(;d<16;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}i.length=l,i.head&&(i.head.extra_len=l),512&i.flags&&(B[0]=255&l,B[1]=l>>>8&255,i.check=N(i.check,B,2,0)),d=l=0}else i.head&&(i.head.extra=null);i.mode=6;case 6:if(1024&i.flags&&(s<(h=i.length)&&(h=s),h&&(i.head&&(x=i.head.extra_len-i.length,i.head.extra||(i.head.extra=new Array(i.head.extra_len)),z.arraySet(i.head.extra,n,r,h,x)),512&i.flags&&(i.check=N(i.check,n,h,r)),s-=h,r+=h,i.length-=h),i.length))break e;i.length=0,i.mode=7;case 7:if(2048&i.flags){if(0===s)break e;for(h=0;x=n[r+h++],i.head&&x&&i.length<65536&&(i.head.name+=String.fromCharCode(x)),x&&h<s;);if(512&i.flags&&(i.check=N(i.check,n,h,r)),s-=h,r+=h,x)break e}else i.head&&(i.head.name=null);i.length=0,i.mode=8;case 8:if(4096&i.flags){if(0===s)break e;for(h=0;x=n[r+h++],i.head&&x&&i.length<65536&&(i.head.comment+=String.fromCharCode(x)),x&&h<s;);if(512&i.flags&&(i.check=N(i.check,n,h,r)),s-=h,r+=h,x)break e}else i.head&&(i.head.comment=null);i.mode=9;case 9:if(512&i.flags){for(;d<16;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if(l!==(65535&i.check)){e.msg="header crc mismatch",i.mode=30;break}d=l=0}i.head&&(i.head.hcrc=i.flags>>9&1,i.head.done=!0),e.adler=i.check=0,i.mode=12;break;case 10:for(;d<32;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}e.adler=i.check=L(l),d=l=0,i.mode=11;case 11:if(0===i.havedict)return e.next_out=o,e.avail_out=f,e.next_in=r,e.avail_in=s,i.hold=l,i.bits=d,2;e.adler=i.check=1,i.mode=12;case 12:if(5===t||6===t)break e;case 13:if(i.last){l>>>=7&d,d-=7&d,i.mode=27;break}for(;d<3;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}switch(i.last=1&l,d-=1,3&(l>>>=1)){case 0:i.mode=14;break;case 1:if(H(i),i.mode=20,6!==t)break;l>>>=2,d-=2;break e;case 2:i.mode=17;break;case 3:e.msg="invalid block type",i.mode=30}l>>>=2,d-=2;break;case 14:for(l>>>=7&d,d-=7&d;d<32;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if((65535&l)!=(l>>>16^65535)){e.msg="invalid stored block lengths",i.mode=30;break}if(i.length=65535&l,d=l=0,i.mode=15,6===t)break e;case 15:i.mode=16;case 16:if(h=i.length){if(s<h&&(h=s),f<h&&(h=f),0===h)break e;z.arraySet(a,n,r,h,o),s-=h,r+=h,f-=h,o+=h,i.length-=h;break}i.mode=12;break;case 17:for(;d<14;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if(i.nlen=257+(31&l),l>>>=5,d-=5,i.ndist=1+(31&l),l>>>=5,d-=5,i.ncode=4+(15&l),l>>>=4,d-=4,286<i.nlen||30<i.ndist){e.msg="too many length or distance symbols",i.mode=30;break}i.have=0,i.mode=18;case 18:for(;i.have<i.ncode;){for(;d<3;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}i.lens[A[i.have++]]=7&l,l>>>=3,d-=3}for(;i.have<19;)i.lens[A[i.have++]]=0;if(i.lencode=i.lendyn,i.lenbits=7,S={bits:i.lenbits},y=C(0,i.lens,0,19,i.lencode,0,i.work,S),i.lenbits=S.bits,y){e.msg="invalid code lengths set",i.mode=30;break}i.have=0,i.mode=19;case 19:for(;i.have<i.nlen+i.ndist;){for(;k=(Z=i.lencode[l&(1<<i.lenbits)-1])>>>16&255,_=65535&Z,!((w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if(_<16)l>>>=w,d-=w,i.lens[i.have++]=_;else{if(16===_){for(E=w+2;d<E;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if(l>>>=w,d-=w,0===i.have){e.msg="invalid bit length repeat",i.mode=30;break}x=i.lens[i.have-1],h=3+(3&l),l>>>=2,d-=2}else if(17===_){for(E=w+3;d<E;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}d-=w,x=0,h=3+(7&(l>>>=w)),l>>>=3,d-=3}else{for(E=w+7;d<E;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}d-=w,x=0,h=11+(127&(l>>>=w)),l>>>=7,d-=7}if(i.have+h>i.nlen+i.ndist){e.msg="invalid bit length repeat",i.mode=30;break}for(;h--;)i.lens[i.have++]=x}}if(30===i.mode)break;if(0===i.lens[256]){e.msg="invalid code -- missing end-of-block",i.mode=30;break}if(i.lenbits=9,S={bits:i.lenbits},y=C(I,i.lens,0,i.nlen,i.lencode,0,i.work,S),i.lenbits=S.bits,y){e.msg="invalid literal/lengths set",i.mode=30;break}if(i.distbits=6,i.distcode=i.distdyn,S={bits:i.distbits},y=C(D,i.lens,i.nlen,i.ndist,i.distcode,0,i.work,S),i.distbits=S.bits,y){e.msg="invalid distances set",i.mode=30;break}if(i.mode=20,6===t)break e;case 20:i.mode=21;case 21:if(6<=s&&258<=f){e.next_out=o,e.avail_out=f,e.next_in=r,e.avail_in=s,i.hold=l,i.bits=d,O(e,u),o=e.next_out,a=e.output,f=e.avail_out,r=e.next_in,n=e.input,s=e.avail_in,l=i.hold,d=i.bits,12===i.mode&&(i.back=-1);break}for(i.back=0;k=(Z=i.lencode[l&(1<<i.lenbits)-1])>>>16&255,_=65535&Z,!((w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if(k&&0==(240&k)){for(g=w,v=k,p=_;k=(Z=i.lencode[p+((l&(1<<g+v)-1)>>g)])>>>16&255,_=65535&Z,!(g+(w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}l>>>=g,d-=g,i.back+=g}if(l>>>=w,d-=w,i.back+=w,i.length=_,0===k){i.mode=26;break}if(32&k){i.back=-1,i.mode=12;break}if(64&k){e.msg="invalid literal/length code",i.mode=30;break}i.extra=15&k,i.mode=22;case 22:if(i.extra){for(E=i.extra;d<E;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}i.length+=l&(1<<i.extra)-1,l>>>=i.extra,d-=i.extra,i.back+=i.extra}i.was=i.length,i.mode=23;case 23:for(;k=(Z=i.distcode[l&(1<<i.distbits)-1])>>>16&255,_=65535&Z,!((w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if(0==(240&k)){for(g=w,v=k,p=_;k=(Z=i.distcode[p+((l&(1<<g+v)-1)>>g)])>>>16&255,_=65535&Z,!(g+(w=Z>>>24)<=d);){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}l>>>=g,d-=g,i.back+=g}if(l>>>=w,d-=w,i.back+=w,64&k){e.msg="invalid distance code",i.mode=30;break}i.offset=_,i.extra=15&k,i.mode=24;case 24:if(i.extra){for(E=i.extra;d<E;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}i.offset+=l&(1<<i.extra)-1,l>>>=i.extra,d-=i.extra,i.back+=i.extra}if(i.offset>i.dmax){e.msg="invalid distance too far back",i.mode=30;break}i.mode=25;case 25:if(0===f)break e;if(h=u-f,i.offset>h){if((h=i.offset-h)>i.whave&&i.sane){e.msg="invalid distance too far back",i.mode=30;break}h>i.wnext?(h-=i.wnext,b=i.wsize-h):b=i.wnext-h,h>i.length&&(h=i.length),m=i.window}else m=a,b=o-i.offset,h=i.length;for(f<h&&(h=f),f-=h,i.length-=h;a[o++]=m[b++],--h;);0===i.length&&(i.mode=21);break;case 26:if(0===f)break e;a[o++]=i.length,f--,i.mode=21;break;case 27:if(i.wrap){for(;d<32;){if(0===s)break e;s--,l|=n[r++]<<d,d+=8}if(u-=f,e.total_out+=u,i.total+=u,u&&(e.adler=i.check=i.flags?N(i.check,a,u,o-u):R(i.check,a,u,o-u)),u=f,(i.flags?l:L(l))!==i.check){e.msg="incorrect data check",i.mode=30;break}d=l=0}i.mode=28;case 28:if(i.wrap&&i.flags){for(;d<32;){if(0===s)break e;s--,l+=n[r++]<<d,d+=8}if(l!==(4294967295&i.total)){e.msg="incorrect length check",i.mode=30;break}d=l=0}i.mode=29;case 29:y=1;break e;case 30:y=-3;break e;case 31:return-4;case 32:default:return U}return e.next_out=o,e.avail_out=f,e.next_in=r,e.avail_in=s,i.hold=l,i.bits=d,(i.wsize||u!==e.avail_out&&i.mode<30&&(i.mode<27||4!==t))&&j(e,e.output,e.next_out,u-e.avail_out)?(i.mode=31,-4):(c-=e.avail_in,u-=e.avail_out,e.total_in+=c,e.total_out+=u,i.total+=u,i.wrap&&u&&(e.adler=i.check=i.flags?N(i.check,a,u,e.next_out-u):R(i.check,a,u,e.next_out-u)),e.data_type=i.bits+(i.last?64:0)+(12===i.mode?128:0)+(20===i.mode||15===i.mode?256:0),(0===c&&0===u||4===t)&&y===T&&(y=-5),y)},i.inflateEnd=function(e){if(!e||!e.state)return U;var t=e.state;return t.window&&(t.window=null),e.state=null,T},i.inflateGetHeader=function(e,t){var i;return e&&e.state?0==(2&(i=e.state).wrap)?U:((i.head=t).done=!1,T):U},i.inflateSetDictionary=function(e,t){var i,n=t.length;return e&&e.state?0!==(i=e.state).wrap&&11!==i.mode?U:11===i.mode&&R(1,t,n,0)!==i.check?-3:j(e,t,n,n)?(i.mode=31,-4):(i.havedict=1,T):U},i.inflateInfo="pako inflate (from Nodeca project)"},{"../utils/common":1,"./adler32":3,"./crc32":5,"./inffast":7,"./inftrees":9}],9:[function(e,t,i){"use strict";var I=e("../utils/common"),D=[3,4,5,6,7,8,9,10,11,13,15,17,19,23,27,31,35,43,51,59,67,83,99,115,131,163,195,227,258,0,0],T=[16,16,16,16,16,16,16,16,17,17,17,17,18,18,18,18,19,19,19,19,20,20,20,20,21,21,21,21,16,72,78],U=[1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,0,0],F=[16,16,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,64,64];t.exports=function(e,t,i,n,a,r,o,s){var f,l,d,c,u,h,b,m,w,k=s.bits,_=0,g=0,v=0,p=0,x=0,y=0,S=0,E=0,Z=0,B=0,A=null,z=0,R=new I.Buf16(16),N=new I.Buf16(16),O=null,C=0;for(_=0;_<=15;_++)R[_]=0;for(g=0;g<n;g++)R[t[i+g]]++;for(x=k,p=15;1<=p&&0===R[p];p--);if(p<x&&(x=p),0===p)return a[r++]=20971520,a[r++]=20971520,s.bits=1,0;for(v=1;v<p&&0===R[v];v++);for(x<v&&(x=v),_=E=1;_<=15;_++)if(E<<=1,(E-=R[_])<0)return-1;if(0<E&&(0===e||1!==p))return-1;for(N[1]=0,_=1;_<15;_++)N[_+1]=N[_]+R[_];for(g=0;g<n;g++)0!==t[i+g]&&(o[N[t[i+g]]++]=g);if(0===e?(A=O=o,h=19):1===e?(A=D,z-=257,O=T,C-=257,h=256):(A=U,O=F,h=-1),_=v,u=r,S=g=B=0,d=-1,c=(Z=1<<(y=x))-1,1===e&&852<Z||2===e&&592<Z)return 1;for(;;){for(b=_-S,o[g]<h?(m=0,w=o[g]):o[g]>h?(m=O[C+o[g]],w=A[z+o[g]]):(m=96,w=0),f=1<<_-S,v=l=1<<y;a[u+(B>>S)+(l-=f)]=b<<24|m<<16|w|0,0!==l;);for(f=1<<_-1;B&f;)f>>=1;if(0!==f?(B&=f-1,B+=f):B=0,g++,0==--R[_]){if(_===p)break;_=t[i+o[g]]}if(x<_&&(B&c)!==d){for(0===S&&(S=x),u+=v,E=1<<(y=_-S);y+S<p&&!((E-=R[y+S])<=0);)y++,E<<=1;if(Z+=1<<y,1===e&&852<Z||2===e&&592<Z)return 1;a[d=B&c]=x<<24|y<<16|u-r|0}}return 0!==B&&(a[u+B]=_-S<<24|64<<16|0),s.bits=x,0}},{"../utils/common":1}],10:[function(e,t,i){"use strict";t.exports={2:"need dictionary",1:"stream end",0:"","-1":"file error","-2":"stream error","-3":"data error","-4":"insufficient memory","-5":"buffer error","-6":"incompatible version"}},{}],11:[function(e,t,i){"use strict";t.exports=function(){this.input=null,this.next_in=0,this.avail_in=0,this.total_in=0,this.output=null,this.next_out=0,this.avail_out=0,this.total_out=0,this.msg="",this.state=null,this.data_type=2,this.adler=0}},{}],"/lib/inflate.js":[function(e,t,i){"use strict";var c=e("./zlib/inflate"),u=e("./utils/common"),h=e("./utils/strings"),b=e("./zlib/constants"),n=e("./zlib/messages"),a=e("./zlib/zstream"),r=e("./zlib/gzheader"),m=Object.prototype.toString;function o(e){if(!(this instanceof o))return new o(e);this.options=u.assign({chunkSize:16384,windowBits:0,to:""},e||{});var t=this.options;t.raw&&0<=t.windowBits&&t.windowBits<16&&(t.windowBits=-t.windowBits,0===t.windowBits&&(t.windowBits=-15)),!(0<=t.windowBits&&t.windowBits<16)||e&&e.windowBits||(t.windowBits+=32),15<t.windowBits&&t.windowBits<48&&0==(15&t.windowBits)&&(t.windowBits|=15),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new a,this.strm.avail_out=0;var i=c.inflateInit2(this.strm,t.windowBits);if(i!==b.Z_OK)throw new Error(n[i]);if(this.header=new r,c.inflateGetHeader(this.strm,this.header),t.dictionary&&("string"==typeof t.dictionary?t.dictionary=h.string2buf(t.dictionary):"[object ArrayBuffer]"===m.call(t.dictionary)&&(t.dictionary=new Uint8Array(t.dictionary)),t.raw&&(i=c.inflateSetDictionary(this.strm,t.dictionary))!==b.Z_OK))throw new Error(n[i])}function s(e,t){var i=new o(t);if(i.push(e,!0),i.err)throw i.msg||n[i.err];return i.result}o.prototype.push=function(e,t){var i,n,a,r,o,s=this.strm,f=this.options.chunkSize,l=this.options.dictionary,d=!1;if(this.ended)return!1;n=t===~~t?t:!0===t?b.Z_FINISH:b.Z_NO_FLUSH,"string"==typeof e?s.input=h.binstring2buf(e):"[object ArrayBuffer]"===m.call(e)?s.input=new Uint8Array(e):s.input=e,s.next_in=0,s.avail_in=s.input.length;do{if(0===s.avail_out&&(s.output=new u.Buf8(f),s.next_out=0,s.avail_out=f),(i=c.inflate(s,b.Z_NO_FLUSH))===b.Z_NEED_DICT&&l&&(i=c.inflateSetDictionary(this.strm,l)),i===b.Z_BUF_ERROR&&!0===d&&(i=b.Z_OK,d=!1),i!==b.Z_STREAM_END&&i!==b.Z_OK)return this.onEnd(i),!(this.ended=!0);s.next_out&&(0!==s.avail_out&&i!==b.Z_STREAM_END&&(0!==s.avail_in||n!==b.Z_FINISH&&n!==b.Z_SYNC_FLUSH)||("string"===this.options.to?(a=h.utf8border(s.output,s.next_out),r=s.next_out-a,o=h.buf2string(s.output,a),s.next_out=r,s.avail_out=f-r,r&&u.arraySet(s.output,s.output,a,r,0),this.onData(o)):this.onData(u.shrinkBuf(s.output,s.next_out)))),0===s.avail_in&&0===s.avail_out&&(d=!0)}while((0<s.avail_in||0===s.avail_out)&&i!==b.Z_STREAM_END);return i===b.Z_STREAM_END&&(n=b.Z_FINISH),n===b.Z_FINISH?(i=c.inflateEnd(this.strm),this.onEnd(i),this.ended=!0,i===b.Z_OK):n!==b.Z_SYNC_FLUSH||(this.onEnd(b.Z_OK),!(s.avail_out=0))},o.prototype.onData=function(e){this.chunks.push(e)},o.prototype.onEnd=function(e){e===b.Z_OK&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=u.flattenChunks(this.chunks)),this.chunks=[],this.err=e,this.msg=this.strm.msg},i.Inflate=o,i.inflate=s,i.inflateRaw=function(e,t){return(t=t||{}).raw=!0,s(e,t)},i.ungzip=s},{"./utils/common":1,"./utils/strings":2,"./zlib/constants":4,"./zlib/gzheader":6,"./zlib/inflate":8,"./zlib/messages":10,"./zlib/zstream":11}]},{},[])("/lib/inflate.js")});