diff options
Diffstat (limited to 'testing/web-platform/tests/fetch/compression-dictionary')
11 files changed, 492 insertions, 0 deletions
diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-clear-site-data.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-clear-site-data.tentative.https.html new file mode 100644 index 0000000000..b583834831 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-clear-site-data.tentative.https.html @@ -0,0 +1,54 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Clear site data. + assert_equals(await clearSiteData(/*directive=*/'cache'), 'OK'); + // Check if `available-dictionary` header is not available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {max_retry: 0}), + '"available-dictionary" header is not available'); +}, 'Clear-Site-Data with "cache" directive must unregister dictionary'); + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Clear site data. + assert_equals(await clearSiteData(/*directive=*/'cookies'), 'OK'); + // Check if `available-dictionary` header is not available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {max_retry: 0}), + '"available-dictionary" header is not available'); +}, 'Clear-Site-Data with "cookies" directive must unregister dictionary'); + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Clear site data. + assert_equals(await clearSiteData(/*directive=*/'storage'), 'OK'); + // Check if `available-dictionary` header is not available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {max_retry: 0}), + kDefaultDictionaryHashBase64); +}, 'Clear-Site-Data with "storage" directive must not unregister dictionary'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-decompression.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-decompression.tentative.https.html new file mode 100644 index 0000000000..cd20625816 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-decompression.tentative.https.html @@ -0,0 +1,57 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="/common/get-host-info.sub.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + assert_equals(dict, kDefaultDictionaryContent); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = `${kCompressedDataPath}?content_encoding=br-d`; + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Decompresion using Brotli with the dictionary works as expected'); + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + assert_equals(dict, kDefaultDictionaryContent); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + + // Check if the data compressed using Zstandard with the dictionary can be + // decompressed. + const data_url = `${kCompressedDataPath}?content_encoding=zstd-d`; + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Decompresion using Zstandard with the dictionary works as expected'); + +compression_dictionary_promise_test(async (t) => { + const dict = + await (await fetch(getRemoteHostUrl(kRegisterDictionaryPath))).text(); + assert_equals(dict, kDefaultDictionaryContent); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {check_remote: true}), + kDefaultDictionaryHashBase64); + + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = + getRemoteHostUrl(`${kCompressedDataPath}?content_encoding=br-d`); + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Decompresion of a cross origin resource works as expected'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-element.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-element.tentative.https.html new file mode 100644 index 0000000000..71a9b1c050 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-element.tentative.https.html @@ -0,0 +1,70 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="/common/get-host-info.sub.js"></script> +<script src="/common/utils.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +function addLinkRelDictionaryElement(url, crossOrigin) { + const link = document.createElement('link'); + link.rel = 'dictionary'; + link.href = url; + if (crossOrigin) { + link.crossOrigin = crossOrigin; + } + document.head.appendChild(link); +} + +test(t => { + assert_true(document.createElement('link').relList.supports('dictionary')); + }, "Browser supports link element with dictionary rel."); + +compression_dictionary_promise_test(async (t) => { + const dict_token = token(); + const url = `${kRegisterDictionaryPath}?save_header=${dict_token}`; + addLinkRelDictionaryElement(url); + // Wait for a while to ensure that the dictionary is fetched. + await new Promise(resolve => window.requestIdleCallback(resolve)); + const headers = await waitUntilPreviousRequestHeaders(t, dict_token); + assert_true(headers !== undefined, 'Headers should be available'); + assert_equals(headers['sec-fetch-mode'], 'cors'); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = `${kCompressedDataPath}?content_encoding=br-d`; + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Fetch same origin dictionary using link element'); + +compression_dictionary_promise_test(async (t) => { + const dict_token = token(); + const url = + getRemoteHostUrl(`${kRegisterDictionaryPath}?save_header=${dict_token}`); + addLinkRelDictionaryElement(url, 'anonymous'); + // Wait for a while to ensure that the dictionary is fetched. + await new Promise(resolve => window.requestIdleCallback(resolve)); + const headers = await waitUntilPreviousRequestHeaders( + t, dict_token, /*check_remote=*/ true); + assert_true(headers !== undefined, 'Headers should be available'); + assert_equals(headers['sec-fetch-mode'], 'cors'); + + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {check_remote: true}), + kDefaultDictionaryHashBase64); + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = + getRemoteHostUrl(`${kCompressedDataPath}?content_encoding=br-d`); + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Fetch cross origin dictionary using link element'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-header.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-header.tentative.https.html new file mode 100644 index 0000000000..a3ffd8ba74 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-header.tentative.https.html @@ -0,0 +1,51 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="/common/get-host-info.sub.js"></script> +<script src="/common/utils.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +async function addIframeWithLinkRelDictionaryHeader(dict_url) { + return new Promise((resolve) => { + const base_page_url = './resources/empty.html'; + const page_url = + base_page_url + `?pipe=header(link,<${dict_url}>; rel="dictionary")`; + const iframe = document.createElement('iframe'); + iframe.src = page_url; + iframe.addEventListener('load', () => { + resolve(iframe); + }); + document.body.appendChild(iframe); + }) +} + +compression_dictionary_promise_test(async (t) => { + const dict_token = token(); + const url = new URL( + `${kRegisterDictionaryPath}?save_header=${dict_token}`, location.href); + const iframe = await addIframeWithLinkRelDictionaryHeader(url.href); + t.add_cleanup(() => { + iframe.remove(); + }); + // Wait for a while to ensure that the dictionary is fetched. + await new Promise(resolve => window.requestIdleCallback(resolve)); + const headers = await waitUntilPreviousRequestHeaders(t, dict_token); + assert_true(headers !== undefined, 'Headers should be available'); + assert_equals(headers['sec-fetch-mode'], 'cors'); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = `${kCompressedDataPath}?content_encoding=br-d`; + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Fetch same origin dictionary using link header'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-registration.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-registration.tentative.https.html new file mode 100644 index 0000000000..7921b12946 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-registration.tentative.https.html @@ -0,0 +1,60 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + assert_equals(dict, kDefaultDictionaryContent); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); +}, 'Simple dictionary registration and unregistration'); + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(`${kRegisterDictionaryPath}?id=test`)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + assert_equals((await checkHeaders())['dictionary-id'], '"test"'); +}, 'Dictionary registration with dictionary ID'); + +compression_dictionary_promise_test(async (t) => { + // Registers a first dictionary. + const dictionary_path1 = `${kRegisterDictionaryPath}?id=id1`; + const dict1 = await (await fetch(dictionary_path1)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Check the `dictionary-id` header. + assert_equals((await checkHeaders())['dictionary-id'], '"id1"'); + + // Registers a second dictionary. + const kAlternativeDictionaryContent = + 'This is an alternative test dictionary.'; + const dictionary_path2 = + `${kRegisterDictionaryPath}?content=${kAlternativeDictionaryContent}&id=id2`; + const expected_dictionary_header = + await calculateDictionaryHash(kAlternativeDictionaryContent); + const dict2 = await (await fetch(dictionary_path2)).text(); + assert_equals(dict2, kAlternativeDictionaryContent); + // Wait until `available-dictionary` header is available. + // Note: Passing `expected_header` to ignore the old dictionary. + assert_equals( + await waitUntilAvailableDictionaryHeader( + t, {expected_header: expected_dictionary_header}), + expected_dictionary_header); + // Check the `dictionary-id` header. + assert_equals((await checkHeaders())['dictionary-id'], '"id2"'); +}, 'New dictionary registration overrides the existing one'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/clear-site-data.py b/testing/web-platform/tests/fetch/compression-dictionary/resources/clear-site-data.py new file mode 100644 index 0000000000..0db51bf797 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/clear-site-data.py @@ -0,0 +1,4 @@ +def main(request, response): + directive = request.GET.first(b"directive") + response.headers.set(b"Clear-Site-Data", b"\"" + directive + b"\"") + return b"OK" diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/compressed-data.py b/testing/web-platform/tests/fetch/compression-dictionary/resources/compressed-data.py new file mode 100644 index 0000000000..4be4b55564 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/compressed-data.py @@ -0,0 +1,28 @@ +def main(request, response): + response.headers.set(b"Access-Control-Allow-Origin", b"*") + response.headers.set(b"Content-Type", b"text/plain") + response.headers.set( + b"Content-Dictionary", + b":U5abz16WDg7b8KS93msLPpOB4Vbef1uRzoORYkJw9BY=:") + + # `br_d_data` and `zstd_d_data` are generated using the following commands: + # + # $ echo "This is a test dictionary." > /tmp/dict + # $ echo -n "This is compressed test data using a test dictionary" \ + # > /tmp/data + # $ brotli -o /tmp/out.brd -D /tmp/dict /tmp/data + # $ xxd -p /tmp/out.brd | tr -d '\n' | sed 's/\(..\)/\\x\1/g' + br_d_data = b"\xa1\x98\x01\x80\x62\xa4\x4c\x1d\xdf\x12\x84\x8c\xae\xc2\xca\x60\x22\x07\x6e\x81\x05\x14\xc9\xb7\xc3\x44\x8e\xbc\x16\xe0\x15\x0e\xec\xc1\xee\x34\x33\x3e\x0d" + # $ zstd -o /tmp/out.zstdd -D /tmp/dict /tmp/data + # $ xxd -p /tmp/out.zstdd | tr -d '\n' | sed 's/\(..\)/\\x\1/g' + zstd_d_data = b"\x28\xb5\x2f\xfd\x24\x34\xf5\x00\x00\x98\x63\x6f\x6d\x70\x72\x65\x73\x73\x65\x64\x61\x74\x61\x20\x75\x73\x69\x6e\x67\x03\x00\x59\xf9\x73\x54\x46\x27\x26\x10\x9e\x99\xf2\xbc" + + if b'content_encoding' in request.GET: + content_encoding = request.GET.first(b"content_encoding") + response.headers.set(b"Content-Encoding", content_encoding) + if content_encoding == b"br-d": + # Send the pre compressed file + response.content = br_d_data + if content_encoding == b"zstd-d": + # Send the pre compressed file + response.content = zstd_d_data diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/compression-dictionary-util.js b/testing/web-platform/tests/fetch/compression-dictionary/resources/compression-dictionary-util.js new file mode 100644 index 0000000000..46d95041d8 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/compression-dictionary-util.js @@ -0,0 +1,120 @@ + +const kDefaultDictionaryContent = 'This is a test dictionary.\n'; +const kDefaultDictionaryHashBase64 = + ':U5abz16WDg7b8KS93msLPpOB4Vbef1uRzoORYkJw9BY=:'; +const kRegisterDictionaryPath = './resources/register-dictionary.py'; +const kCompressedDataPath = './resources/compressed-data.py'; +const kExpectedCompressedData = + `This is compressed test data using a test dictionary`; +const kCheckAvailableDictionaryHeaderMaxRetry = 5; +const kCheckAvailableDictionaryHeaderRetryTimeout = 100; +const kCheckPreviousRequestHeadersMaxRetry = 5; +const kCheckPreviousRequestHeadersRetryTimeout = 250; + +// Gets the remote URL corresponding to `relative_path`. +function getRemoteHostUrl(relative_path) { + const remote_origin = new URL(get_host_info().HTTPS_REMOTE_ORIGIN); + let result = new URL(relative_path, location.href); + result.protocol = remote_origin.protocol; + result.hostname = remote_origin.hostname; + result.port = remote_origin.port; + return result.href; +} + +// Calculates the Structured Field Byte Sequence containing the SHA-256 hash of +// the contents of the dictionary text. +async function calculateDictionaryHash(dictionary_text) { + const encoded = (new TextEncoder()).encode(dictionary_text); + const digest = await crypto.subtle.digest('SHA-256', encoded) + return ':' + btoa(String.fromCharCode(...new Uint8Array(digest))) + ':'; +} + +// Checks the HTTP request headers which is sent to the server. +async function checkHeaders(check_remote = false) { + let url = './resources/echo-headers.py'; + if (check_remote) { + url = getRemoteHostUrl(url); + } + return await (await fetch(url)).json(); +} + +// Checks the "available-dictionary" header in the HTTP request headers. +async function checkAvailableDictionaryHeader(check_remote = false) { + return (await checkHeaders(check_remote))['available-dictionary']; +} + +// Waits until the "available-dictionary" header is available in the HTTP +// request headers, and returns the header. If the header is not available after +// the specified number of retries, returns an error message. If the +// `expected_header` is specified, this method waits until the header is +// available and matches the `expected_header`. +async function waitUntilAvailableDictionaryHeader(test, { + max_retry = kCheckAvailableDictionaryHeaderMaxRetry, + expected_header = undefined, + check_remote = false +}) { + for (let retry_count = 0; retry_count <= max_retry; retry_count++) { + const header = await checkAvailableDictionaryHeader(check_remote); + if (header) { + if (expected_header === undefined || header == expected_header) { + return header; + } + } + await new Promise( + (resolve) => test.step_timeout( + resolve, kCheckAvailableDictionaryHeaderRetryTimeout)); + } + return '"available-dictionary" header is not available'; +} + +// Checks the HTTP request headers which was sent to the server with `token` +// to register a dictionary. +async function checkPreviousRequestHeaders(token, check_remote = false) { + let url = `./resources/register-dictionary.py?get_previous_header=${token}`; + if (check_remote) { + url = getRemoteHostUrl(url); + } + return await (await fetch(url)).json(); +} + +// Waits until the HTTP request headers which was sent to the server with +// `token` to register a dictionary is available, and returns the header. If the +// header is not available after the specified number of retries, returns +// `undefined`. +async function waitUntilPreviousRequestHeaders( + test, token, check_remote = false) { + for (let retry_count = 0; retry_count <= kCheckPreviousRequestHeadersMaxRetry; + retry_count++) { + const header = + (await checkPreviousRequestHeaders(token, check_remote))['headers']; + if (header) { + return header; + } + await new Promise( + (resolve) => test.step_timeout( + resolve, kCheckPreviousRequestHeadersRetryTimeout)); + } + return undefined; +} + +// Clears the site data for the specified directive by sending a request to +// `./resources/clear-site-data.py` which returns `Clear-Site-Data` response +// header. +// Note: When `directive` is 'cache' or 'cookies' is specified, registered +// compression dictionaries should be also cleared. +async function clearSiteData(directive = 'cache') { + return await (await fetch( + `./resources/clear-site-data.py?directive=${directive}`)) + .text(); +} + +// A utility test method that adds the `clearSiteData()` method to the +// testharness cleanup function. This is intended to ensure that registered +// dictionaries are cleared in tests and that registered dictionaries do not +// interfere with subsequent tests. +function compression_dictionary_promise_test(func, name, properties) { + promise_test(async (test) => { + test.add_cleanup(clearSiteData); + await func(test); + }, name, properties); +} diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/echo-headers.py b/testing/web-platform/tests/fetch/compression-dictionary/resources/echo-headers.py new file mode 100644 index 0000000000..aabd99eb10 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/echo-headers.py @@ -0,0 +1,10 @@ +import json + +def main(request, response): + response.headers.set(b"Access-Control-Allow-Origin", b"*") + headers = {} + for header in request.headers: + key = header.decode('utf-8') + value = request.headers.get(header).decode('utf-8') + headers[key] = value + return json.dumps(headers) diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/empty.html b/testing/web-platform/tests/fetch/compression-dictionary/resources/empty.html new file mode 100644 index 0000000000..0e76edd65b --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/empty.html @@ -0,0 +1 @@ +<!DOCTYPE html> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/register-dictionary.py b/testing/web-platform/tests/fetch/compression-dictionary/resources/register-dictionary.py new file mode 100644 index 0000000000..0bd57225ef --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/register-dictionary.py @@ -0,0 +1,37 @@ +import json + +def main(request, response): + response.headers.set(b"Access-Control-Allow-Origin", b"*") + match = b"/fetch/compression-dictionary/resources/*" + content = b"This is a test dictionary.\n" + if b"match" in request.GET: + match = request.GET.first(b"match") + if b"content" in request.GET: + content = request.GET.first(b"content") + + token = request.GET.first(b"save_header", None) + if token is not None: + headers = {} + for header in request.headers: + key = header.decode('utf-8') + value = request.headers.get(header).decode('utf-8') + headers[key] = value + with request.server.stash.lock: + request.server.stash.put(token, json.dumps(headers)) + + previous_token = request.GET.first(b"get_previous_header", None) + if previous_token is not None: + result = {} + with request.server.stash.lock: + store = request.server.stash.take(previous_token) + if store is not None: + headers = json.loads(store) + result["headers"] = headers + return json.dumps(result) + + options = b"match=\"" + match + b"\"" + if b"id" in request.GET: + options += b", id=\"" + request.GET.first(b"id") + b"\"" + response.headers.set(b"Use-As-Dictionary", options) + response.headers.set(b"Cache-Control", b"max-age=3600") + return content |