diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-15 03:34:50 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-05-15 03:34:50 +0000 |
commit | def92d1b8e9d373e2f6f27c366d578d97d8960c6 (patch) | |
tree | 2ef34b9ad8bb9a9220e05d60352558b15f513894 /testing/web-platform/tests/fetch | |
parent | Adding debian version 125.0.3-1. (diff) | |
download | firefox-def92d1b8e9d373e2f6f27c366d578d97d8960c6.tar.xz firefox-def92d1b8e9d373e2f6f27c366d578d97d8960c6.zip |
Merging upstream version 126.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/web-platform/tests/fetch')
49 files changed, 1011 insertions, 176 deletions
diff --git a/testing/web-platform/tests/fetch/api/credentials/authentication-redirection.any.js b/testing/web-platform/tests/fetch/api/credentials/authentication-redirection.any.js index 16656b5435..5a15507437 100644 --- a/testing/web-platform/tests/fetch/api/credentials/authentication-redirection.any.js +++ b/testing/web-platform/tests/fetch/api/credentials/authentication-redirection.any.js @@ -24,6 +24,6 @@ promise_test(async test => { }, "getAuthorizationHeaderValue - same origin redirection"); promise_test(async (test) => { - const result = await getAuthorizationHeaderValue(get_host_info().HTTPS_REMOTE_ORIGIN + "/fetch/api/resources/redirect.py?allow_headers=Authorization&location=" + encodeURIComponent(get_host_info().HTTPS_ORIGIN + "/fetch/api/resources/dump-authorization-header.py")); + const result = await getAuthorizationHeaderValue(get_host_info().HTTPS_REMOTE_ORIGIN + "/fetch/api/resources/redirect.py?allow_headers=Authorization&location=" + encodeURIComponent(get_host_info().HTTPS_ORIGIN + "/fetch/api/resources/dump-authorization-header.py?strip_auth_header=true")); assert_equals(result, "none"); }, "getAuthorizationHeaderValue - cross origin redirection"); diff --git a/testing/web-platform/tests/fetch/api/request/destination/resources/dummy_video.ogv b/testing/web-platform/tests/fetch/api/request/destination/resources/dummy_video.ogv Binary files differdeleted file mode 100644 index de99616ece..0000000000 --- a/testing/web-platform/tests/fetch/api/request/destination/resources/dummy_video.ogv +++ /dev/null diff --git a/testing/web-platform/tests/fetch/api/request/request-bad-port.any.js b/testing/web-platform/tests/fetch/api/request/request-bad-port.any.js index b0684d4be0..5c29823eaa 100644 --- a/testing/web-platform/tests/fetch/api/request/request-bad-port.any.js +++ b/testing/web-platform/tests/fetch/api/request/request-bad-port.any.js @@ -72,6 +72,7 @@ var BLOCKED_PORTS_LIST = [ 2049, // nfs 3659, // apple-sasl 4045, // lockd + 4190, // sieve 5060, // sip 5061, // sips 6000, // x11 @@ -81,6 +82,7 @@ var BLOCKED_PORTS_LIST = [ 6667, // irc (default) 6668, // irc (alternate) 6669, // irc (alternate) + 6679, // osaut 6697, // irc+tls 10080, // amanda ]; diff --git a/testing/web-platform/tests/fetch/api/resources/dump-authorization-header.py b/testing/web-platform/tests/fetch/api/resources/dump-authorization-header.py index a651aeb4e8..0d82809f59 100644 --- a/testing/web-platform/tests/fetch/api/resources/dump-authorization-header.py +++ b/testing/web-platform/tests/fetch/api/resources/dump-authorization-header.py @@ -2,6 +2,11 @@ def main(request, response): headers = [(b"Content-Type", "text/html"), (b"Cache-Control", b"no-cache")] + if (request.GET.first(b"strip_auth_header", False) and request.method == "OPTIONS" and + b"authorization" in request.headers.get(b"Access-Control-Request-Headers", b"").lower()): + # Auth header should not be sent for preflight after cross-origin redirect. + return 500, headers, "fail" + if b"Origin" in request.headers: headers.append((b"Access-Control-Allow-Origin", request.headers.get(b"Origin", b""))) headers.append((b"Access-Control-Allow-Credentials", b"true")) diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-clear-site-data.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-clear-site-data.tentative.https.html new file mode 100644 index 0000000000..b583834831 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-clear-site-data.tentative.https.html @@ -0,0 +1,54 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Clear site data. + assert_equals(await clearSiteData(/*directive=*/'cache'), 'OK'); + // Check if `available-dictionary` header is not available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {max_retry: 0}), + '"available-dictionary" header is not available'); +}, 'Clear-Site-Data with "cache" directive must unregister dictionary'); + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Clear site data. + assert_equals(await clearSiteData(/*directive=*/'cookies'), 'OK'); + // Check if `available-dictionary` header is not available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {max_retry: 0}), + '"available-dictionary" header is not available'); +}, 'Clear-Site-Data with "cookies" directive must unregister dictionary'); + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Clear site data. + assert_equals(await clearSiteData(/*directive=*/'storage'), 'OK'); + // Check if `available-dictionary` header is not available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {max_retry: 0}), + kDefaultDictionaryHashBase64); +}, 'Clear-Site-Data with "storage" directive must not unregister dictionary'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-decompression.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-decompression.tentative.https.html new file mode 100644 index 0000000000..cd20625816 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-decompression.tentative.https.html @@ -0,0 +1,57 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="/common/get-host-info.sub.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + assert_equals(dict, kDefaultDictionaryContent); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = `${kCompressedDataPath}?content_encoding=br-d`; + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Decompresion using Brotli with the dictionary works as expected'); + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + assert_equals(dict, kDefaultDictionaryContent); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + + // Check if the data compressed using Zstandard with the dictionary can be + // decompressed. + const data_url = `${kCompressedDataPath}?content_encoding=zstd-d`; + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Decompresion using Zstandard with the dictionary works as expected'); + +compression_dictionary_promise_test(async (t) => { + const dict = + await (await fetch(getRemoteHostUrl(kRegisterDictionaryPath))).text(); + assert_equals(dict, kDefaultDictionaryContent); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {check_remote: true}), + kDefaultDictionaryHashBase64); + + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = + getRemoteHostUrl(`${kCompressedDataPath}?content_encoding=br-d`); + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Decompresion of a cross origin resource works as expected'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-element.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-element.tentative.https.html new file mode 100644 index 0000000000..71a9b1c050 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-element.tentative.https.html @@ -0,0 +1,70 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="/common/get-host-info.sub.js"></script> +<script src="/common/utils.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +function addLinkRelDictionaryElement(url, crossOrigin) { + const link = document.createElement('link'); + link.rel = 'dictionary'; + link.href = url; + if (crossOrigin) { + link.crossOrigin = crossOrigin; + } + document.head.appendChild(link); +} + +test(t => { + assert_true(document.createElement('link').relList.supports('dictionary')); + }, "Browser supports link element with dictionary rel."); + +compression_dictionary_promise_test(async (t) => { + const dict_token = token(); + const url = `${kRegisterDictionaryPath}?save_header=${dict_token}`; + addLinkRelDictionaryElement(url); + // Wait for a while to ensure that the dictionary is fetched. + await new Promise(resolve => window.requestIdleCallback(resolve)); + const headers = await waitUntilPreviousRequestHeaders(t, dict_token); + assert_true(headers !== undefined, 'Headers should be available'); + assert_equals(headers['sec-fetch-mode'], 'cors'); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = `${kCompressedDataPath}?content_encoding=br-d`; + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Fetch same origin dictionary using link element'); + +compression_dictionary_promise_test(async (t) => { + const dict_token = token(); + const url = + getRemoteHostUrl(`${kRegisterDictionaryPath}?save_header=${dict_token}`); + addLinkRelDictionaryElement(url, 'anonymous'); + // Wait for a while to ensure that the dictionary is fetched. + await new Promise(resolve => window.requestIdleCallback(resolve)); + const headers = await waitUntilPreviousRequestHeaders( + t, dict_token, /*check_remote=*/ true); + assert_true(headers !== undefined, 'Headers should be available'); + assert_equals(headers['sec-fetch-mode'], 'cors'); + + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {check_remote: true}), + kDefaultDictionaryHashBase64); + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = + getRemoteHostUrl(`${kCompressedDataPath}?content_encoding=br-d`); + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Fetch cross origin dictionary using link element'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-header.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-header.tentative.https.html new file mode 100644 index 0000000000..a3ffd8ba74 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-fetch-with-link-header.tentative.https.html @@ -0,0 +1,51 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="/common/get-host-info.sub.js"></script> +<script src="/common/utils.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +async function addIframeWithLinkRelDictionaryHeader(dict_url) { + return new Promise((resolve) => { + const base_page_url = './resources/empty.html'; + const page_url = + base_page_url + `?pipe=header(link,<${dict_url}>; rel="dictionary")`; + const iframe = document.createElement('iframe'); + iframe.src = page_url; + iframe.addEventListener('load', () => { + resolve(iframe); + }); + document.body.appendChild(iframe); + }) +} + +compression_dictionary_promise_test(async (t) => { + const dict_token = token(); + const url = new URL( + `${kRegisterDictionaryPath}?save_header=${dict_token}`, location.href); + const iframe = await addIframeWithLinkRelDictionaryHeader(url.href); + t.add_cleanup(() => { + iframe.remove(); + }); + // Wait for a while to ensure that the dictionary is fetched. + await new Promise(resolve => window.requestIdleCallback(resolve)); + const headers = await waitUntilPreviousRequestHeaders(t, dict_token); + assert_true(headers !== undefined, 'Headers should be available'); + assert_equals(headers['sec-fetch-mode'], 'cors'); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Check if the data compressed using Brotli with the dictionary can be + // decompressed. + const data_url = `${kCompressedDataPath}?content_encoding=br-d`; + assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData); +}, 'Fetch same origin dictionary using link header'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/dictionary-registration.tentative.https.html b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-registration.tentative.https.html new file mode 100644 index 0000000000..7921b12946 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/dictionary-registration.tentative.https.html @@ -0,0 +1,60 @@ +<!DOCTYPE html> +<head> +<meta charset="utf-8"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="./resources/compression-dictionary-util.js"></script> +</head> +<body> +<script> + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(kRegisterDictionaryPath)).text(); + assert_equals(dict, kDefaultDictionaryContent); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); +}, 'Simple dictionary registration and unregistration'); + +compression_dictionary_promise_test(async (t) => { + const dict = await (await fetch(`${kRegisterDictionaryPath}?id=test`)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + assert_equals((await checkHeaders())['dictionary-id'], '"test"'); +}, 'Dictionary registration with dictionary ID'); + +compression_dictionary_promise_test(async (t) => { + // Registers a first dictionary. + const dictionary_path1 = `${kRegisterDictionaryPath}?id=id1`; + const dict1 = await (await fetch(dictionary_path1)).text(); + // Wait until `available-dictionary` header is available. + assert_equals( + await waitUntilAvailableDictionaryHeader(t, {}), + kDefaultDictionaryHashBase64); + // Check the `dictionary-id` header. + assert_equals((await checkHeaders())['dictionary-id'], '"id1"'); + + // Registers a second dictionary. + const kAlternativeDictionaryContent = + 'This is an alternative test dictionary.'; + const dictionary_path2 = + `${kRegisterDictionaryPath}?content=${kAlternativeDictionaryContent}&id=id2`; + const expected_dictionary_header = + await calculateDictionaryHash(kAlternativeDictionaryContent); + const dict2 = await (await fetch(dictionary_path2)).text(); + assert_equals(dict2, kAlternativeDictionaryContent); + // Wait until `available-dictionary` header is available. + // Note: Passing `expected_header` to ignore the old dictionary. + assert_equals( + await waitUntilAvailableDictionaryHeader( + t, {expected_header: expected_dictionary_header}), + expected_dictionary_header); + // Check the `dictionary-id` header. + assert_equals((await checkHeaders())['dictionary-id'], '"id2"'); +}, 'New dictionary registration overrides the existing one'); + +</script> +</body> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/clear-site-data.py b/testing/web-platform/tests/fetch/compression-dictionary/resources/clear-site-data.py new file mode 100644 index 0000000000..0db51bf797 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/clear-site-data.py @@ -0,0 +1,4 @@ +def main(request, response): + directive = request.GET.first(b"directive") + response.headers.set(b"Clear-Site-Data", b"\"" + directive + b"\"") + return b"OK" diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/compressed-data.py b/testing/web-platform/tests/fetch/compression-dictionary/resources/compressed-data.py new file mode 100644 index 0000000000..4be4b55564 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/compressed-data.py @@ -0,0 +1,28 @@ +def main(request, response): + response.headers.set(b"Access-Control-Allow-Origin", b"*") + response.headers.set(b"Content-Type", b"text/plain") + response.headers.set( + b"Content-Dictionary", + b":U5abz16WDg7b8KS93msLPpOB4Vbef1uRzoORYkJw9BY=:") + + # `br_d_data` and `zstd_d_data` are generated using the following commands: + # + # $ echo "This is a test dictionary." > /tmp/dict + # $ echo -n "This is compressed test data using a test dictionary" \ + # > /tmp/data + # $ brotli -o /tmp/out.brd -D /tmp/dict /tmp/data + # $ xxd -p /tmp/out.brd | tr -d '\n' | sed 's/\(..\)/\\x\1/g' + br_d_data = b"\xa1\x98\x01\x80\x62\xa4\x4c\x1d\xdf\x12\x84\x8c\xae\xc2\xca\x60\x22\x07\x6e\x81\x05\x14\xc9\xb7\xc3\x44\x8e\xbc\x16\xe0\x15\x0e\xec\xc1\xee\x34\x33\x3e\x0d" + # $ zstd -o /tmp/out.zstdd -D /tmp/dict /tmp/data + # $ xxd -p /tmp/out.zstdd | tr -d '\n' | sed 's/\(..\)/\\x\1/g' + zstd_d_data = b"\x28\xb5\x2f\xfd\x24\x34\xf5\x00\x00\x98\x63\x6f\x6d\x70\x72\x65\x73\x73\x65\x64\x61\x74\x61\x20\x75\x73\x69\x6e\x67\x03\x00\x59\xf9\x73\x54\x46\x27\x26\x10\x9e\x99\xf2\xbc" + + if b'content_encoding' in request.GET: + content_encoding = request.GET.first(b"content_encoding") + response.headers.set(b"Content-Encoding", content_encoding) + if content_encoding == b"br-d": + # Send the pre compressed file + response.content = br_d_data + if content_encoding == b"zstd-d": + # Send the pre compressed file + response.content = zstd_d_data diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/compression-dictionary-util.js b/testing/web-platform/tests/fetch/compression-dictionary/resources/compression-dictionary-util.js new file mode 100644 index 0000000000..46d95041d8 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/compression-dictionary-util.js @@ -0,0 +1,120 @@ + +const kDefaultDictionaryContent = 'This is a test dictionary.\n'; +const kDefaultDictionaryHashBase64 = + ':U5abz16WDg7b8KS93msLPpOB4Vbef1uRzoORYkJw9BY=:'; +const kRegisterDictionaryPath = './resources/register-dictionary.py'; +const kCompressedDataPath = './resources/compressed-data.py'; +const kExpectedCompressedData = + `This is compressed test data using a test dictionary`; +const kCheckAvailableDictionaryHeaderMaxRetry = 5; +const kCheckAvailableDictionaryHeaderRetryTimeout = 100; +const kCheckPreviousRequestHeadersMaxRetry = 5; +const kCheckPreviousRequestHeadersRetryTimeout = 250; + +// Gets the remote URL corresponding to `relative_path`. +function getRemoteHostUrl(relative_path) { + const remote_origin = new URL(get_host_info().HTTPS_REMOTE_ORIGIN); + let result = new URL(relative_path, location.href); + result.protocol = remote_origin.protocol; + result.hostname = remote_origin.hostname; + result.port = remote_origin.port; + return result.href; +} + +// Calculates the Structured Field Byte Sequence containing the SHA-256 hash of +// the contents of the dictionary text. +async function calculateDictionaryHash(dictionary_text) { + const encoded = (new TextEncoder()).encode(dictionary_text); + const digest = await crypto.subtle.digest('SHA-256', encoded) + return ':' + btoa(String.fromCharCode(...new Uint8Array(digest))) + ':'; +} + +// Checks the HTTP request headers which is sent to the server. +async function checkHeaders(check_remote = false) { + let url = './resources/echo-headers.py'; + if (check_remote) { + url = getRemoteHostUrl(url); + } + return await (await fetch(url)).json(); +} + +// Checks the "available-dictionary" header in the HTTP request headers. +async function checkAvailableDictionaryHeader(check_remote = false) { + return (await checkHeaders(check_remote))['available-dictionary']; +} + +// Waits until the "available-dictionary" header is available in the HTTP +// request headers, and returns the header. If the header is not available after +// the specified number of retries, returns an error message. If the +// `expected_header` is specified, this method waits until the header is +// available and matches the `expected_header`. +async function waitUntilAvailableDictionaryHeader(test, { + max_retry = kCheckAvailableDictionaryHeaderMaxRetry, + expected_header = undefined, + check_remote = false +}) { + for (let retry_count = 0; retry_count <= max_retry; retry_count++) { + const header = await checkAvailableDictionaryHeader(check_remote); + if (header) { + if (expected_header === undefined || header == expected_header) { + return header; + } + } + await new Promise( + (resolve) => test.step_timeout( + resolve, kCheckAvailableDictionaryHeaderRetryTimeout)); + } + return '"available-dictionary" header is not available'; +} + +// Checks the HTTP request headers which was sent to the server with `token` +// to register a dictionary. +async function checkPreviousRequestHeaders(token, check_remote = false) { + let url = `./resources/register-dictionary.py?get_previous_header=${token}`; + if (check_remote) { + url = getRemoteHostUrl(url); + } + return await (await fetch(url)).json(); +} + +// Waits until the HTTP request headers which was sent to the server with +// `token` to register a dictionary is available, and returns the header. If the +// header is not available after the specified number of retries, returns +// `undefined`. +async function waitUntilPreviousRequestHeaders( + test, token, check_remote = false) { + for (let retry_count = 0; retry_count <= kCheckPreviousRequestHeadersMaxRetry; + retry_count++) { + const header = + (await checkPreviousRequestHeaders(token, check_remote))['headers']; + if (header) { + return header; + } + await new Promise( + (resolve) => test.step_timeout( + resolve, kCheckPreviousRequestHeadersRetryTimeout)); + } + return undefined; +} + +// Clears the site data for the specified directive by sending a request to +// `./resources/clear-site-data.py` which returns `Clear-Site-Data` response +// header. +// Note: When `directive` is 'cache' or 'cookies' is specified, registered +// compression dictionaries should be also cleared. +async function clearSiteData(directive = 'cache') { + return await (await fetch( + `./resources/clear-site-data.py?directive=${directive}`)) + .text(); +} + +// A utility test method that adds the `clearSiteData()` method to the +// testharness cleanup function. This is intended to ensure that registered +// dictionaries are cleared in tests and that registered dictionaries do not +// interfere with subsequent tests. +function compression_dictionary_promise_test(func, name, properties) { + promise_test(async (test) => { + test.add_cleanup(clearSiteData); + await func(test); + }, name, properties); +} diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/echo-headers.py b/testing/web-platform/tests/fetch/compression-dictionary/resources/echo-headers.py new file mode 100644 index 0000000000..aabd99eb10 --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/echo-headers.py @@ -0,0 +1,10 @@ +import json + +def main(request, response): + response.headers.set(b"Access-Control-Allow-Origin", b"*") + headers = {} + for header in request.headers: + key = header.decode('utf-8') + value = request.headers.get(header).decode('utf-8') + headers[key] = value + return json.dumps(headers) diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/empty.html b/testing/web-platform/tests/fetch/compression-dictionary/resources/empty.html new file mode 100644 index 0000000000..0e76edd65b --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/empty.html @@ -0,0 +1 @@ +<!DOCTYPE html> diff --git a/testing/web-platform/tests/fetch/compression-dictionary/resources/register-dictionary.py b/testing/web-platform/tests/fetch/compression-dictionary/resources/register-dictionary.py new file mode 100644 index 0000000000..0bd57225ef --- /dev/null +++ b/testing/web-platform/tests/fetch/compression-dictionary/resources/register-dictionary.py @@ -0,0 +1,37 @@ +import json + +def main(request, response): + response.headers.set(b"Access-Control-Allow-Origin", b"*") + match = b"/fetch/compression-dictionary/resources/*" + content = b"This is a test dictionary.\n" + if b"match" in request.GET: + match = request.GET.first(b"match") + if b"content" in request.GET: + content = request.GET.first(b"content") + + token = request.GET.first(b"save_header", None) + if token is not None: + headers = {} + for header in request.headers: + key = header.decode('utf-8') + value = request.headers.get(header).decode('utf-8') + headers[key] = value + with request.server.stash.lock: + request.server.stash.put(token, json.dumps(headers)) + + previous_token = request.GET.first(b"get_previous_header", None) + if previous_token is not None: + result = {} + with request.server.stash.lock: + store = request.server.stash.take(previous_token) + if store is not None: + headers = json.loads(store) + result["headers"] = headers + return json.dumps(result) + + options = b"match=\"" + match + b"\"" + if b"id" in request.GET: + options += b", id=\"" + request.GET.first(b"id") + b"\"" + response.headers.set(b"Use-As-Dictionary", options) + response.headers.set(b"Cache-Control", b"max-age=3600") + return content diff --git a/testing/web-platform/tests/fetch/content-encoding/br/bad-br-body.https.any.js b/testing/web-platform/tests/fetch/content-encoding/br/bad-br-body.https.any.js new file mode 100644 index 0000000000..43ea90a336 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/bad-br-body.https.any.js @@ -0,0 +1,12 @@ +// META: global=window + +[ + "arrayBuffer", +].forEach(method => { + promise_test(t => { + return fetch("resources/bad-br-body.py").then(res => { + assert_equals(res.status, 200); + return promise_rejects_js(t, TypeError, res[method]()); + }); + }, "Consuming the body of a resource with bad br content with " + method + "() should reject"); +}); diff --git a/testing/web-platform/tests/fetch/content-encoding/br/big-br-body.https.any.js b/testing/web-platform/tests/fetch/content-encoding/br/big-br-body.https.any.js new file mode 100644 index 0000000000..1427dd7302 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/big-br-body.https.any.js @@ -0,0 +1,55 @@ +// META: global=window,worker + +const EXPECTED_SIZE = 27000000; +const EXPECTED_SHA256 = [ + 74, 100, 37, 243, 147, 61, 116, 60, 241, 221, 126, + 18, 24, 71, 204, 28, 50, 62, 201, 130, 152, 225, + 217, 183, 10, 201, 143, 214, 102, 155, 212, 248, + ]; + +promise_test(async () => { + const response = await fetch('resources/big.text.br'); + assert_true(response.ok); + const arrayBuffer = await response.arrayBuffer(); + assert_equals(arrayBuffer.byteLength, EXPECTED_SIZE, + 'uncompressed size should match'); + const sha256 = await crypto.subtle.digest('SHA-256', arrayBuffer); + assert_array_equals(new Uint8Array(sha256), EXPECTED_SHA256, + 'digest should match'); +}, 'large br data should be decompressed successfully'); + +promise_test(async () => { + const response = await fetch('resources/big.text.br'); + assert_true(response.ok); + const reader = response.body.getReader({mode: 'byob'}); + let offset = 0; + // Pre-allocate space for the output. The response body will be read + // chunk-by-chunk into this array. + let ab = new ArrayBuffer(EXPECTED_SIZE); + while (offset < EXPECTED_SIZE) { + // To stress the data pipe, we want to use a different size read each + // time. Unfortunately, JavaScript doesn't have a seeded random number + // generator, so this creates the possibility of making this test flaky if + // it doesn't work for some edge cases. + let size = Math.floor(Math.random() * 65535 + 1); + if (size + offset > EXPECTED_SIZE) { + size = EXPECTED_SIZE - offset; + } + const u8 = new Uint8Array(ab, offset, size); + const { value, done } = await reader.read(u8); + ab = value.buffer; + // Check that we got our original array back. + assert_equals(ab.byteLength, EXPECTED_SIZE, + 'backing array should be the same size'); + assert_equals(offset, value.byteOffset, 'offset should match'); + assert_less_than_equal(value.byteLength, size, + 'we should not have got more than we asked for'); + offset = value.byteOffset + value.byteLength; + if (done) break; + } + assert_equals(offset, EXPECTED_SIZE, + 'we should have read the whole thing'); + const sha256 = await crypto.subtle.digest('SHA-256', new Uint8Array(ab)); + assert_array_equals(new Uint8Array(sha256), EXPECTED_SHA256, + 'digest should match'); +}, 'large br data should be decompressed successfully with byte stream'); diff --git a/testing/web-platform/tests/fetch/content-encoding/br/br-body.https.any.js b/testing/web-platform/tests/fetch/content-encoding/br/br-body.https.any.js new file mode 100644 index 0000000000..2c2dbb5d29 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/br-body.https.any.js @@ -0,0 +1,15 @@ +// META: global=window,worker + +const expectedDecompressedSize = 10500; +[ + "text", + "octetstream" +].forEach(contentType => { + promise_test(async t => { + let response = await fetch(`resources/foo.${contentType}.br`); + assert_true(response.ok); + let arrayBuffer = await response.arrayBuffer() + let u8 = new Uint8Array(arrayBuffer); + assert_equals(u8.length, expectedDecompressedSize); + }, `fetched br data with content type ${contentType} should be decompressed.`); +}); diff --git a/testing/web-platform/tests/fetch/content-encoding/br/resources/bad-br-body.py b/testing/web-platform/tests/fetch/content-encoding/br/resources/bad-br-body.py new file mode 100644 index 0000000000..0710e7ffde --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/resources/bad-br-body.py @@ -0,0 +1,3 @@ +def main(request, response): + headers = [(b"Content-Encoding", b"br")] + return headers, b"not actually br" diff --git a/testing/web-platform/tests/fetch/content-encoding/br/resources/big.text.br b/testing/web-platform/tests/fetch/content-encoding/br/resources/big.text.br Binary files differnew file mode 100644 index 0000000000..b3a530d757 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/resources/big.text.br diff --git a/testing/web-platform/tests/fetch/content-encoding/br/resources/big.text.br.headers b/testing/web-platform/tests/fetch/content-encoding/br/resources/big.text.br.headers new file mode 100644 index 0000000000..aba00bd5d4 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/resources/big.text.br.headers @@ -0,0 +1,3 @@ +Content-type: text/plain +Content-Encoding: br +Cache-Control: no-store diff --git a/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.octetstream.br b/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.octetstream.br Binary files differnew file mode 100644 index 0000000000..30cb2f7095 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.octetstream.br diff --git a/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.octetstream.br.headers b/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.octetstream.br.headers new file mode 100644 index 0000000000..c0c19bc82a --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.octetstream.br.headers @@ -0,0 +1,2 @@ +Content-type: application/octet-stream +Content-Encoding: br diff --git a/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.text.br b/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.text.br Binary files differnew file mode 100644 index 0000000000..30cb2f7095 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.text.br diff --git a/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.text.br.headers b/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.text.br.headers new file mode 100644 index 0000000000..8c03b823e0 --- /dev/null +++ b/testing/web-platform/tests/fetch/content-encoding/br/resources/foo.text.br.headers @@ -0,0 +1,2 @@ +Content-type: text/plain +Content-Encoding: br diff --git a/testing/web-platform/tests/fetch/fetch-later/activate-after.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/activate-after.tentative.https.window.js index 18b368066b..e62da0508a 100644 --- a/testing/web-platform/tests/fetch/fetch-later/activate-after.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/activate-after.tentative.https.window.js @@ -3,7 +3,7 @@ // META: script=/common/utils.js // META: script=/html/browsers/browsing-the-web/remote-context-helper/resources/remote-context-helper.js // META: script=/html/browsers/browsing-the-web/back-forward-cache/resources/rc-helper.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js 'use strict'; diff --git a/testing/web-platform/tests/fetch/fetch-later/iframe.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/iframe.tentative.https.window.js index 1e9fed1117..305272af41 100644 --- a/testing/web-platform/tests/fetch/fetch-later/iframe.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/iframe.tentative.https.window.js @@ -1,6 +1,6 @@ // META: script=/common/utils.js // META: script=/common/get-host-info.sub.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js 'use strict'; diff --git a/testing/web-platform/tests/fetch/fetch-later/new-window.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/new-window.tentative.https.window.js index 93705418f2..27922f4626 100644 --- a/testing/web-platform/tests/fetch/fetch-later/new-window.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/new-window.tentative.https.window.js @@ -1,6 +1,6 @@ // META: script=/common/utils.js // META: script=/common/get-host-info.sub.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js 'use strict'; diff --git a/testing/web-platform/tests/fetch/fetch-later/policies/csp-allowed.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/policies/csp-allowed.tentative.https.window.js index 60730e0242..32a3e106a0 100644 --- a/testing/web-platform/tests/fetch/fetch-later/policies/csp-allowed.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/policies/csp-allowed.tentative.https.window.js @@ -1,7 +1,7 @@ // META: title=FetchLater: allowed by CSP // META: script=/common/utils.js // META: script=/common/get-host-info.sub.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js 'use strict'; const { diff --git a/testing/web-platform/tests/fetch/fetch-later/policies/csp-blocked.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/policies/csp-blocked.tentative.https.window.js index b32ddaecfc..ca9d881e8c 100644 --- a/testing/web-platform/tests/fetch/fetch-later/policies/csp-blocked.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/policies/csp-blocked.tentative.https.window.js @@ -1,7 +1,7 @@ // META: title=FetchLater: blocked by CSP // META: script=/common/utils.js // META: script=/common/get-host-info.sub.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js 'use strict'; const { diff --git a/testing/web-platform/tests/fetch/fetch-later/policies/csp-redirect-to-blocked.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/policies/csp-redirect-to-blocked.tentative.https.window.js index 3c18727156..584f476b45 100644 --- a/testing/web-platform/tests/fetch/fetch-later/policies/csp-redirect-to-blocked.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/policies/csp-redirect-to-blocked.tentative.https.window.js @@ -1,7 +1,7 @@ // META: title=FetchLater: redirect blocked by CSP // META: script=/common/utils.js // META: script=/common/get-host-info.sub.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js // META: timeout=long 'use strict'; diff --git a/testing/web-platform/tests/fetch/fetch-later/quota.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/quota.tentative.https.window.js index 1b5b85563d..9d0ae4287d 100644 --- a/testing/web-platform/tests/fetch/fetch-later/quota.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/quota.tentative.https.window.js @@ -1,6 +1,6 @@ // META: script=/common/get-host-info.sub.js // META: script=/common/utils.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js 'use strict'; diff --git a/testing/web-platform/tests/fetch/fetch-later/resources/fetch-later-helper.js b/testing/web-platform/tests/fetch/fetch-later/resources/fetch-later-helper.js new file mode 100644 index 0000000000..566b3e0a1a --- /dev/null +++ b/testing/web-platform/tests/fetch/fetch-later/resources/fetch-later-helper.js @@ -0,0 +1,206 @@ +'use strict'; + +const ROOT_NAME = 'fetch/fetch-later'; + +function parallelPromiseTest(func, description) { + async_test((t) => { + Promise.resolve(func(t)).then(() => t.done()).catch(t.step_func((e) => { + throw e; + })); + }, description); +} + +/** @enum {string} */ +const BeaconDataType = { + String: 'String', + ArrayBuffer: 'ArrayBuffer', + FormData: 'FormData', + URLSearchParams: 'URLSearchParams', + Blob: 'Blob', + File: 'File', +}; + +/** @enum {string} */ +const BeaconDataTypeToSkipCharset = { + String: '', + ArrayBuffer: '', + FormData: '\n\r', // CRLF characters will be normalized by FormData + URLSearchParams: ';,/?:@&=+$', // reserved URI characters + Blob: '', + File: '', +}; + +const BEACON_PAYLOAD_KEY = 'payload'; + +// Creates beacon data of the given `dataType` from `data`. +// @param {string} data - A string representation of the beacon data. Note that +// it cannot contain UTF-16 surrogates for all `BeaconDataType` except BLOB. +// @param {BeaconDataType} dataType - must be one of `BeaconDataType`. +// @param {string} contentType - Request Content-Type. +function makeBeaconData(data, dataType, contentType) { + switch (dataType) { + case BeaconDataType.String: + return data; + case BeaconDataType.ArrayBuffer: + return new TextEncoder().encode(data).buffer; + case BeaconDataType.FormData: + const formData = new FormData(); + if (data.length > 0) { + formData.append(BEACON_PAYLOAD_KEY, data); + } + return formData; + case BeaconDataType.URLSearchParams: + if (data.length > 0) { + return new URLSearchParams(`${BEACON_PAYLOAD_KEY}=${data}`); + } + return new URLSearchParams(); + case BeaconDataType.Blob: { + const options = {type: contentType || undefined}; + return new Blob([data], options); + } + case BeaconDataType.File: { + const options = {type: contentType || 'text/plain'}; + return new File([data], 'file.txt', options); + } + default: + throw Error(`Unsupported beacon dataType: ${dataType}`); + } +} + +// Create a string of `end`-`begin` characters, with characters starting from +// UTF-16 code unit `begin` to `end`-1. +function generateSequentialData(begin, end, skip) { + const codeUnits = Array(end - begin).fill().map((el, i) => i + begin); + if (skip) { + return String.fromCharCode( + ...codeUnits.filter(c => !skip.includes(String.fromCharCode(c)))); + } + return String.fromCharCode(...codeUnits); +} + +function generatePayload(size) { + if (size == 0) { + return ''; + } + const prefix = String(size) + ':'; + if (size < prefix.length) { + return Array(size).fill('*').join(''); + } + if (size == prefix.length) { + return prefix; + } + + return prefix + Array(size - prefix.length).fill('*').join(''); +} + +function generateSetBeaconURL(uuid, options) { + const host = (options && options.host) || ''; + let url = `${host}/${ROOT_NAME}/resources/set_beacon.py?uuid=${uuid}`; + if (options) { + if (options.expectOrigin !== undefined) { + url = `${url}&expectOrigin=${options.expectOrigin}`; + } + if (options.expectPreflight !== undefined) { + url = `${url}&expectPreflight=${options.expectPreflight}`; + } + if (options.expectCredentials !== undefined) { + url = `${url}&expectCredentials=${options.expectCredentials}`; + } + + if (options.useRedirectHandler) { + const redirect = `${host}/common/redirect.py` + + `?location=${encodeURIComponent(url)}`; + url = redirect; + } + } + return url; +} + +async function poll(asyncFunc, expected) { + const maxRetries = 30; + const waitInterval = 100; // milliseconds. + const delay = ms => new Promise(res => setTimeout(res, ms)); + + let result = {data: []}; + for (let i = 0; i < maxRetries; i++) { + result = await asyncFunc(); + if (!expected(result)) { + await delay(waitInterval); + continue; + } + return result; + } + return result; +} + +// Waits until the `options.count` number of beacon data available from the +// server. Defaults to 1. +// If `options.data` is set, it will be used to compare with the data from the +// response. +async function expectBeacon(uuid, options) { + const expectedCount = + (options && options.count !== undefined) ? options.count : 1; + + const res = await poll( + async () => { + const res = await fetch( + `/${ROOT_NAME}/resources/get_beacon.py?uuid=${uuid}`, + {cache: 'no-store'}); + return await res.json(); + }, + (res) => { + if (expectedCount == 0) { + // If expecting no beacon, we should try to wait as long as possible. + // So always returning false here until `poll()` decides to terminate + // itself. + return false; + } + return res.data.length == expectedCount; + }); + if (!options || !options.data) { + assert_equals( + res.data.length, expectedCount, + 'Number of sent beacons does not match expected count:'); + return; + } + + if (expectedCount == 0) { + assert_equals( + res.data.length, 0, + 'Number of sent beacons does not match expected count:'); + return; + } + + const decoder = options && options.percentDecoded ? (s) => { + // application/x-www-form-urlencoded serializer encodes space as '+' + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent + s = s.replace(/\+/g, '%20'); + return decodeURIComponent(s); + } : (s) => s; + + assert_equals( + res.data.length, options.data.length, + `The size of beacon data ${ + res.data.length} from server does not match expected value ${ + options.data.length}.`); + for (let i = 0; i < options.data.length; i++) { + assert_equals( + decoder(res.data[i]), options.data[i], + 'The beacon data does not match expected value.'); + } +} + +function generateHTML(script) { + return `<!DOCTYPE html><body><script>${script}</script></body>`; +} + +// Loads `script` into an iframe and appends it to the current document. +// Returns the loaded iframe element. +async function loadScriptAsIframe(script) { + const iframe = document.createElement('iframe'); + iframe.srcdoc = generateHTML(script); + const iframeLoaded = new Promise(resolve => iframe.onload = resolve); + document.body.appendChild(iframe); + await iframeLoaded; + return iframe; +} diff --git a/testing/web-platform/tests/fetch/fetch-later/resources/get_beacon.py b/testing/web-platform/tests/fetch/fetch-later/resources/get_beacon.py new file mode 100644 index 0000000000..32cb9a9ba3 --- /dev/null +++ b/testing/web-platform/tests/fetch/fetch-later/resources/get_beacon.py @@ -0,0 +1,30 @@ +"""An HTTP request handler for WPT that handles /get_beacon.py requests.""" + +import json + +_BEACON_ID_KEY = b"uuid" +_BEACON_DATA_PATH = "beacon_data" + + +def main(request, response): + """Retrieves the beacon data keyed by the given uuid from server storage. + + The response content is a JSON string in one of the following formats: + - "{'data': ['abc', null, '123',...]}" + - "{'data': []}" indicates that no data has been set for this uuid. + """ + if _BEACON_ID_KEY not in request.GET: + response.status = 400 + return "Must provide a UUID to store beacon data" + uuid = request.GET.first(_BEACON_ID_KEY) + + with request.server.stash.lock: + body = {'data': []} + data = request.server.stash.take(key=uuid, path=_BEACON_DATA_PATH) + if data: + body['data'] = data + # The stash is read-once/write-once, so it has to be put back after + # reading if `data` is not None. + request.server.stash.put( + key=uuid, value=data, path=_BEACON_DATA_PATH) + return [(b'Content-Type', b'text/plain')], json.dumps(body) diff --git a/testing/web-platform/tests/fetch/fetch-later/resources/set_beacon.py b/testing/web-platform/tests/fetch/fetch-later/resources/set_beacon.py new file mode 100644 index 0000000000..1c71f23e57 --- /dev/null +++ b/testing/web-platform/tests/fetch/fetch-later/resources/set_beacon.py @@ -0,0 +1,83 @@ +"""An HTTP request handler for WPT that handles /set_beacon.py requests.""" + +_BEACON_ID_KEY = b"uuid" +_BEACON_DATA_PATH = "beacon_data" +_BEACON_FORM_PAYLOAD_KEY = b"payload" +_BEACON_BODY_PAYLOAD_KEY = "payload=" +_BEACON_EXPECT_ORIGIN_KEY = b"expectOrigin" +_BEACON_EXPECT_PREFLIGHT_KEY = b"expectPreflight" +_BEACON_EXPECT_CREDS_KEY = b"expectCredentials" + + +def main(request, response): + """Stores the given beacon's data keyed by uuid in the server. + + For GET request, this handler assumes no data. + For POST request, this handler extracts data from request body: + - Content-Type=multipart/form-data: data keyed by 'payload'. + - the entire request body. + + Multiple data can be added for the same uuid. + + The data is stored as UTF-8 format. + """ + if _BEACON_ID_KEY not in request.GET: + response.status = 400 + return "Must provide a UUID to store beacon data" + uuid = request.GET.first(_BEACON_ID_KEY) + + expected_origin = request.GET.get(_BEACON_EXPECT_ORIGIN_KEY) + if b"origin" in request.headers: + origin = request.headers.get(b"origin") + if expected_origin: + assert origin == expected_origin, f"expected {expected_origin}, got {origin}" + response.headers.set(b"Access-Control-Allow-Origin", origin) + else: + assert expected_origin is None, f"expected None, got {expected_origin}" + + # Handles preflight request first. + if request.method == u"OPTIONS": + assert request.GET.get( + _BEACON_EXPECT_PREFLIGHT_KEY) == b"true", "Preflight not expected." + + # preflight must not have cookies. + assert b"Cookie" not in request.headers + + requested_headers = request.headers.get( + b"Access-Control-Request-Headers") + assert b"content-type" in requested_headers, f"expected content-type, got {requested_headers}" + response.headers.set(b"Access-Control-Allow-Headers", b"content-type") + + requested_method = request.headers.get(b"Access-Control-Request-Method") + assert requested_method == b"POST", f"expected POST, got {requested_method}" + response.headers.set(b"Access-Control-Allow-Methods", b"POST") + + return response + + expect_creds = request.GET.get(_BEACON_EXPECT_CREDS_KEY) == b"true" + if expect_creds: + assert b"Cookie" in request.headers + else: + assert b"Cookie" not in request.headers + + data = None + if request.method == u"POST": + if b"multipart/form-data" in request.headers.get(b"Content-Type", b""): + if _BEACON_FORM_PAYLOAD_KEY in request.POST: + data = request.POST.first(_BEACON_FORM_PAYLOAD_KEY).decode( + 'utf-8') + elif request.body: + data = request.body.decode('utf-8') + if data.startswith(_BEACON_BODY_PAYLOAD_KEY): + data = data.split(_BEACON_BODY_PAYLOAD_KEY)[1] + + with request.server.stash.lock: + saved_data = request.server.stash.take(key=uuid, path=_BEACON_DATA_PATH) + if not saved_data: + saved_data = [data] + else: + saved_data.append(data) + request.server.stash.put( + key=uuid, value=saved_data, path=_BEACON_DATA_PATH) + + response.status = 200 diff --git a/testing/web-platform/tests/fetch/fetch-later/send-on-deactivate.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/send-on-deactivate.tentative.https.window.js index d91c73580a..3bcf07483a 100644 --- a/testing/web-platform/tests/fetch/fetch-later/send-on-deactivate.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/send-on-deactivate.tentative.https.window.js @@ -3,7 +3,7 @@ // META: script=/common/utils.js // META: script=/html/browsers/browsing-the-web/remote-context-helper/resources/remote-context-helper.js // META: script=/html/browsers/browsing-the-web/back-forward-cache/resources/rc-helper.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js 'use strict'; diff --git a/testing/web-platform/tests/fetch/fetch-later/send-on-discard/not-send-after-abort.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/send-on-discard/not-send-after-abort.tentative.https.window.js index ff8d9520e0..6ddafd7813 100644 --- a/testing/web-platform/tests/fetch/fetch-later/send-on-discard/not-send-after-abort.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/send-on-discard/not-send-after-abort.tentative.https.window.js @@ -1,5 +1,5 @@ // META: script=/common/utils.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js 'use strict'; diff --git a/testing/web-platform/tests/fetch/fetch-later/send-on-discard/send-multiple-with-activate-after.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/send-on-discard/send-multiple-with-activate-after.tentative.https.window.js index 11e85b31a7..0bbe94c39f 100644 --- a/testing/web-platform/tests/fetch/fetch-later/send-on-discard/send-multiple-with-activate-after.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/send-on-discard/send-multiple-with-activate-after.tentative.https.window.js @@ -1,5 +1,5 @@ // META: script=/common/utils.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js // META: timeout=long 'use strict'; diff --git a/testing/web-platform/tests/fetch/fetch-later/send-on-discard/send-multiple.tentative.https.window.js b/testing/web-platform/tests/fetch/fetch-later/send-on-discard/send-multiple.tentative.https.window.js index df34ec9ac0..05bb2dc114 100644 --- a/testing/web-platform/tests/fetch/fetch-later/send-on-discard/send-multiple.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/fetch-later/send-on-discard/send-multiple.tentative.https.window.js @@ -1,5 +1,5 @@ // META: script=/common/utils.js -// META: script=/pending-beacon/resources/pending_beacon-helper.js +// META: script=/fetch/fetch-later/resources/fetch-later-helper.js // META: timeout=long 'use strict'; diff --git a/testing/web-platform/tests/fetch/private-network-access/anchor.tentative.https.window.js b/testing/web-platform/tests/fetch/private-network-access/anchor.tentative.https.window.js index f5473868b7..4e860ad381 100644 --- a/testing/web-platform/tests/fetch/private-network-access/anchor.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/private-network-access/anchor.tentative.https.window.js @@ -149,44 +149,6 @@ subsetTestByKey("from-public", promise_test_parallel, t => anchorTest(t, { expected: NavigationTestResult.SUCCESS, }), "public to public: no preflight required."); -subsetTestByKey( - 'from-public', promise_test_parallel, - t => anchorTest(t, { - source: {server: Server.HTTPS_PUBLIC}, - target: { - server: Server.HTTPS_PUBLIC, - behavior: { - redirect: preflightUrl({ - server: Server.HTTPS_PRIVATE, - behavior: { - preflight: PreflightBehavior.noCorsHeader(token()), - } - }), - } - }, - expected: NavigationTestResult.FAILURE, - }), - 'public to public redirected to private: missing CORS headers.'); - -subsetTestByKey( - 'from-public', promise_test_parallel, - t => anchorTest(t, { - source: {server: Server.HTTPS_PUBLIC}, - target: { - server: Server.HTTPS_PUBLIC, - behavior: { - redirect: preflightUrl({ - server: Server.HTTPS_PRIVATE, - behavior: { - preflight: PreflightBehavior.navigation(token()), - } - }), - } - }, - expected: NavigationTestResult.SUCCESS, - }), - 'public to public to private: success.'); - // The following tests verify that `CSP: treat-as-public-address` makes // documents behave as if they had been served from a public IP address. diff --git a/testing/web-platform/tests/fetch/private-network-access/resources/support.sub.js b/testing/web-platform/tests/fetch/private-network-access/resources/support.sub.js index 1cb432b787..7d133b0288 100644 --- a/testing/web-platform/tests/fetch/private-network-access/resources/support.sub.js +++ b/testing/web-platform/tests/fetch/private-network-access/resources/support.sub.js @@ -480,13 +480,6 @@ const NavigationTestResult = { }; async function windowOpenTest(t, { source, target, expected }) { - if (target.behavior && target.behavior.redirect) { - target.behavior.redirect.searchParams.set('file', 'openee.html'); - target.behavior.redirect.searchParams.set( - 'file-if-no-preflight-received', - 'no-preflight-received.html', - ); - } const targetUrl = preflightUrl(target); targetUrl.searchParams.set("file", "openee.html"); targetUrl.searchParams.set( @@ -514,13 +507,6 @@ async function windowOpenTest(t, { source, target, expected }) { } async function windowOpenExistingTest(t, { source, target, expected }) { - if (target.behavior && target.behavior.redirect) { - target.behavior.redirect.searchParams.set('file', 'openee.html'); - target.behavior.redirect.searchParams.set( - 'file-if-no-preflight-received', - 'no-preflight-received.html', - ); - } const targetUrl = preflightUrl(target); targetUrl.searchParams.set("file", "openee.html"); targetUrl.searchParams.set( @@ -549,13 +535,6 @@ async function windowOpenExistingTest(t, { source, target, expected }) { } async function anchorTest(t, { source, target, expected }) { - if (target.behavior && target.behavior.redirect) { - target.behavior.redirect.searchParams.set('file', 'openee.html'); - target.behavior.redirect.searchParams.set( - 'file-if-no-preflight-received', - 'no-preflight-received.html', - ); - } const targetUrl = preflightUrl(target); targetUrl.searchParams.set("file", "openee.html"); targetUrl.searchParams.set( diff --git a/testing/web-platform/tests/fetch/private-network-access/window-open-existing.tentative.https.window.js b/testing/web-platform/tests/fetch/private-network-access/window-open-existing.tentative.https.window.js index 565a2117a8..6a2a624fc8 100644 --- a/testing/web-platform/tests/fetch/private-network-access/window-open-existing.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/private-network-access/window-open-existing.tentative.https.window.js @@ -167,44 +167,6 @@ subsetTestByKey( }), 'public to public: no preflight required.'); -subsetTestByKey( - 'from-public', promise_test_parallel, - t => windowOpenExistingTest(t, { - source: {server: Server.HTTPS_PUBLIC}, - target: { - server: Server.HTTPS_PUBLIC, - behavior: { - redirect: preflightUrl({ - server: Server.HTTPS_PRIVATE, - behavior: { - preflight: PreflightBehavior.noCorsHeader(token()), - } - }), - } - }, - expected: NavigationTestResult.FAILURE, - }), - 'public to public redirected to private: missing CORS headers.'); - -subsetTestByKey( - 'from-public', promise_test_parallel, - t => windowOpenExistingTest(t, { - source: {server: Server.HTTPS_PUBLIC}, - target: { - server: Server.HTTPS_PUBLIC, - behavior: { - redirect: preflightUrl({ - server: Server.HTTPS_PRIVATE, - behavior: { - preflight: PreflightBehavior.navigation(token()), - } - }), - } - }, - expected: NavigationTestResult.SUCCESS, - }), - 'public to public to private: success.'); - // The following tests verify that `CSP: treat-as-public-address` makes // documents behave as if they had been served from a public IP address. diff --git a/testing/web-platform/tests/fetch/private-network-access/window-open.tentative.https.window.js b/testing/web-platform/tests/fetch/private-network-access/window-open.tentative.https.window.js index 42d70af4e4..6793d1f3b4 100644 --- a/testing/web-platform/tests/fetch/private-network-access/window-open.tentative.https.window.js +++ b/testing/web-platform/tests/fetch/private-network-access/window-open.tentative.https.window.js @@ -149,44 +149,6 @@ subsetTestByKey("from-public", promise_test_parallel, t => windowOpenTest(t, { expected: NavigationTestResult.SUCCESS, }), "public to public: no preflight required."); -subsetTestByKey( - 'from-public', promise_test_parallel, - t => windowOpenTest(t, { - source: {server: Server.HTTPS_PUBLIC}, - target: { - server: Server.HTTPS_PUBLIC, - behavior: { - redirect: preflightUrl({ - server: Server.HTTPS_PRIVATE, - behavior: { - preflight: PreflightBehavior.noCorsHeader(token()), - } - }), - } - }, - expected: NavigationTestResult.FAILURE, - }), - 'public to public redirected to private: missing CORS headers.'); - -subsetTestByKey( - 'from-public', promise_test_parallel, - t => windowOpenTest(t, { - source: {server: Server.HTTPS_PUBLIC}, - target: { - server: Server.HTTPS_PUBLIC, - behavior: { - redirect: preflightUrl({ - server: Server.HTTPS_PRIVATE, - behavior: { - preflight: PreflightBehavior.navigation(token()), - } - }), - } - }, - expected: NavigationTestResult.SUCCESS, - }), - 'public to public to private: success.'); - // The following tests verify that `CSP: treat-as-public-address` makes // documents behave as if they had been served from a public IP address. diff --git a/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-allowed-apis.html b/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-allowed-apis.html deleted file mode 100644 index 66456a8876..0000000000 --- a/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-allowed-apis.html +++ /dev/null @@ -1,26 +0,0 @@ -<!DOCTYPE html> -<script src="/resources/testharness.js"></script> -<script src="/resources/testharnessreport.js"></script> -<body> -<script> - const blank = 'about:blank'; - const dangling_url = 'resources/empty.html?\n<'; - const api_calls = [ - `window.open(\`${dangling_url}\`,'_self')`, - `location.replace(\`${dangling_url}\`)`, - ]; - - api_calls.forEach(call => { - async_test(t => { - const iframe = - document.body.appendChild(document.createElement('iframe')); - t.step(() => { - iframe.contentWindow.eval(call) - t.step_timeout(()=>{ - assert_false(iframe.contentWindow.location.href.endsWith(blank)); - t.done(); - }, 500); - }); - }, `Does not block ${call}`); - }); -</script> diff --git a/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-allowed-apis.tentative.https.html b/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-allowed-apis.tentative.https.html new file mode 100644 index 0000000000..428decfc58 --- /dev/null +++ b/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-allowed-apis.tentative.https.html @@ -0,0 +1,80 @@ +<!DOCTYPE html> +<meta name="timeout" content="long"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<body> +<script> + const blank = 'about:blank'; + const dangling_url = 'resources/empty.html?\n<'; + const navigation_api_calls = [ + `window.open(\`${dangling_url}\`,'_self')`, + `location.replace(\`${dangling_url}\`)`, + ]; + + function get_requests(worker, expected) { + return new Promise(resolve => { + navigator.serviceWorker.addEventListener('message', function onMsg(evt) { + if (evt.data.size >= expected) { + navigator.serviceWorker.removeEventListener('message', onMsg); + resolve(evt.data); + } else { + worker.postMessage(""); + } + }); + worker.postMessage(""); + }); + } + + navigation_api_calls.forEach(call => { + async_test(t => { + const iframe = + document.body.appendChild(document.createElement('iframe')); + t.step(() => { + iframe.contentWindow.eval(call); + t.step_timeout(() => { + assert_false(iframe.contentWindow.location.href.endsWith(blank)); + t.done(); + }, 500); + }); + }, `Does not block ${call}`); + }); + + const dangling_resource = "404?type=text/javascript&\n<" + const api_calls = [ + [`const xhr = new XMLHttpRequest(); + xhr.open("GET", \`${"xhr" + dangling_resource}\`); + xhr.send(null);`, "xhr"], + [`new EventSource(\`${"EventSource" + dangling_resource}\`)`,"EventSource"], + [`fetch(\`${"fetch" + dangling_resource}\`).catch(()=>{})`, "fetch"], + [`new Worker(\`${"Worker" + dangling_resource}\`)`, "Worker"], + [`let text = \`try{importScripts(\\\`${location.href + "/../importScripts" + dangling_resource}\\\`)}catch(e){}\`; + let blob = new Blob([text], {type : 'text/javascript'}); + let url = URL.createObjectURL(blob); + new Worker(url)`, "importScripts"], + + ]; + + navigator.serviceWorker.register('service-worker.js'); + const iframe = document.createElement('iframe'); + iframe.src = "resources/empty.html"; + document.body.appendChild(iframe); + api_calls.forEach(call => { + promise_test(t => { + return new Promise(resolve => { + navigator.serviceWorker.ready.then(t.step_func(registration => { + iframe.contentWindow.eval(call[0]); + get_requests(registration.active, 0).then(t.step_func(requests => { + resolve(assert_true(requests.has(call[1] + dangling_resource))); + })); + })); + }); + }, `Does not block ${call[1]}`); + }); + + async_test(t => { + let url = new URL(location.origin + "/" + dangling_url); + // Newlines are removed by the URL parser. + assert_true(url.href.endsWith(encodeURI(dangling_url.replace("\n","")))); + t.done(); + }, `Does not block new URL()`); +</script> diff --git a/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-data-url.sub.html b/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-data-url.tentative.sub.html index f27735daa1..f27735daa1 100644 --- a/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-data-url.sub.html +++ b/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation-data-url.tentative.sub.html diff --git a/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation.html b/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation.tentative.html index 61a931608b..61a931608b 100644 --- a/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation.html +++ b/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation.tentative.html diff --git a/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation.https.html b/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation.tentative.https.html index 3f038cbb7b..3f038cbb7b 100644 --- a/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation.https.html +++ b/testing/web-platform/tests/fetch/security/dangling-markup/dangling-markup-mitigation.tentative.https.html diff --git a/testing/web-platform/tests/fetch/security/dangling-markup/service-worker.js b/testing/web-platform/tests/fetch/security/dangling-markup/service-worker.js index 837e216a01..99d5456a87 100644 --- a/testing/web-platform/tests/fetch/security/dangling-markup/service-worker.js +++ b/testing/web-platform/tests/fetch/security/dangling-markup/service-worker.js @@ -16,18 +16,24 @@ addEventListener('fetch', evt => { const url = new URL(evt.request.url); const path = url.pathname; const search = url.search || "?"; + const params = new URLSearchParams(search); + const type = params.get('type'); if (path.includes('404')) { const dir = path.split('/'); const request = dir[dir.length-1] + search; if (!requests.has(request)) { requests.add(request); } - evt.respondWith(new Response("")); + evt.respondWith(new Response("", { + headers: { + "Content-Type": type || "text/plain" + } + })); } else if (path.endsWith('resources.html')) { - const html = (new URLSearchParams(search)).get('html'); + const html = params.get('html') || ""; evt.respondWith(new Response(html, { headers: { - "Content-Type": "text/html" + "Content-Type": type || "text/html" } })); } |