diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-19 00:47:55 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-19 00:47:55 +0000 |
commit | 26a029d407be480d791972afb5975cf62c9360a6 (patch) | |
tree | f435a8308119effd964b339f76abb83a57c29483 /testing/web-platform/tests/fetch/content-encoding/gzip | |
parent | Initial commit. (diff) | |
download | firefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz firefox-26a029d407be480d791972afb5975cf62c9360a6.zip |
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/web-platform/tests/fetch/content-encoding/gzip')
10 files changed, 103 insertions, 0 deletions
diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/bad-gzip-body.any.js b/testing/web-platform/tests/fetch/content-encoding/gzip/bad-gzip-body.any.js new file mode 100644 index 0000000000..17bc1261a3 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/bad-gzip-body.any.js @@ -0,0 +1,22 @@ +// META: global=window,worker + +promise_test((test) => { + return fetch("resources/bad-gzip-body.py").then(res => { + assert_equals(res.status, 200); + }); +}, "Fetching a resource with bad gzip content should still resolve"); + +[ + "arrayBuffer", + "blob", + "formData", + "json", + "text" +].forEach(method => { + promise_test(t => { + return fetch("resources/bad-gzip-body.py").then(res => { + assert_equals(res.status, 200); + return promise_rejects_js(t, TypeError, res[method]()); + }); + }, "Consuming the body of a resource with bad gzip content with " + method + "() should reject"); +}); diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/big-gzip-body.https.any.js b/testing/web-platform/tests/fetch/content-encoding/gzip/big-gzip-body.https.any.js new file mode 100644 index 0000000000..b5d62c9804 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/big-gzip-body.https.any.js @@ -0,0 +1,55 @@ +// META: global=window,worker + +const EXPECTED_SIZE = 27000000; +const EXPECTED_SHA256 = [ + 74, 100, 37, 243, 147, 61, 116, 60, 241, 221, 126, + 18, 24, 71, 204, 28, 50, 62, 201, 130, 152, 225, + 217, 183, 10, 201, 143, 214, 102, 155, 212, 248, + ]; + +promise_test(async () => { + const response = await fetch('resources/big.text.gz'); + assert_true(response.ok); + const arrayBuffer = await response.arrayBuffer(); + assert_equals(arrayBuffer.byteLength, EXPECTED_SIZE, + 'uncompressed size should match'); + const sha256 = await crypto.subtle.digest('SHA-256', arrayBuffer); + assert_array_equals(new Uint8Array(sha256), EXPECTED_SHA256, + 'digest should match'); +}, 'large gzip data should be decompressed successfully'); + +promise_test(async () => { + const response = await fetch('resources/big.text.gz'); + assert_true(response.ok); + const reader = response.body.getReader({mode: 'byob'}); + let offset = 0; + // Pre-allocate space for the output. The response body will be read + // chunk-by-chunk into this array. + let ab = new ArrayBuffer(EXPECTED_SIZE); + while (offset < EXPECTED_SIZE) { + // To stress the data pipe, we want to use a different size read each + // time. Unfortunately, JavaScript doesn't have a seeded random number + // generator, so this creates the possibility of making this test flaky if + // it doesn't work for some edge cases. + let size = Math.floor(Math.random() * 65535 + 1); + if (size + offset > EXPECTED_SIZE) { + size = EXPECTED_SIZE - offset; + } + const u8 = new Uint8Array(ab, offset, size); + const { value, done } = await reader.read(u8); + ab = value.buffer; + // Check that we got our original array back. + assert_equals(ab.byteLength, EXPECTED_SIZE, + 'backing array should be the same size'); + assert_equals(offset, value.byteOffset, 'offset should match'); + assert_less_than_equal(value.byteLength, size, + 'we should not have got more than we asked for'); + offset = value.byteOffset + value.byteLength; + if (done) break; + } + assert_equals(offset, EXPECTED_SIZE, + 'we should have read the whole thing'); + const sha256 = await crypto.subtle.digest('SHA-256', new Uint8Array(ab)); + assert_array_equals(new Uint8Array(sha256), EXPECTED_SHA256, + 'digest should match'); +}, 'large gzip data should be decompressed successfully with byte stream'); diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/gzip-body.any.js b/testing/web-platform/tests/fetch/content-encoding/gzip/gzip-body.any.js new file mode 100644 index 0000000000..37758b7d91 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/gzip-body.any.js @@ -0,0 +1,16 @@ +// META: global=window,worker + +const expectedDecompressedSize = 10500; +[ + "text", + "octetstream" +].forEach(contentType => { + promise_test(async t => { + let response = await fetch(`resources/foo.${contentType}.gz`); + assert_true(response.ok); + let arrayBuffer = await response.arrayBuffer() + let u8 = new Uint8Array(arrayBuffer); + assert_equals(u8.length, expectedDecompressedSize); + }, `fetched gzip data with content type ${contentType} should be decompressed.`); +}); + diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/resources/bad-gzip-body.py b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/bad-gzip-body.py new file mode 100644 index 0000000000..a79b94ed04 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/bad-gzip-body.py @@ -0,0 +1,3 @@ +def main(request, response): + headers = [(b"Content-Encoding", b"gzip")] + return headers, b"not actually gzip" diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/resources/big.text.gz b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/big.text.gz Binary files differnew file mode 100644 index 0000000000..13441bc399 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/big.text.gz diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/resources/big.text.gz.headers b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/big.text.gz.headers new file mode 100644 index 0000000000..55d2345c23 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/big.text.gz.headers @@ -0,0 +1,3 @@ +Content-type: text/plain +Content-Encoding: gzip +Cache-Control: no-store diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.octetstream.gz b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.octetstream.gz Binary files differnew file mode 100644 index 0000000000..f3df4cb89b --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.octetstream.gz diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.octetstream.gz.headers b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.octetstream.gz.headers new file mode 100644 index 0000000000..27d4f401f1 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.octetstream.gz.headers @@ -0,0 +1,2 @@ +Content-type: application/octet-stream +Content-Encoding: gzip diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.text.gz b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.text.gz Binary files differnew file mode 100644 index 0000000000..05a5cce07b --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.text.gz diff --git a/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.text.gz.headers b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.text.gz.headers new file mode 100644 index 0000000000..7def3ddc14 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/gzip/resources/foo.text.gz.headers @@ -0,0 +1,2 @@ +Content-type: text/plain +Content-Encoding: gzip |