summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/encoding/streams
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
commit43a97878ce14b72f0981164f87f2e35e14151312 (patch)
tree620249daf56c0258faa40cbdcf9cfba06de2a846 /testing/web-platform/tests/encoding/streams
parentInitial commit. (diff)
downloadfirefox-upstream.tar.xz
firefox-upstream.zip
Adding upstream version 110.0.1.upstream/110.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/web-platform/tests/encoding/streams')
-rw-r--r--testing/web-platform/tests/encoding/streams/backpressure.any.js60
-rw-r--r--testing/web-platform/tests/encoding/streams/decode-attributes.any.js71
-rw-r--r--testing/web-platform/tests/encoding/streams/decode-bad-chunks.any.js38
-rw-r--r--testing/web-platform/tests/encoding/streams/decode-ignore-bom.any.js38
-rw-r--r--testing/web-platform/tests/encoding/streams/decode-incomplete-input.any.js24
-rw-r--r--testing/web-platform/tests/encoding/streams/decode-non-utf8.any.js83
-rw-r--r--testing/web-platform/tests/encoding/streams/decode-split-character.any.js50
-rw-r--r--testing/web-platform/tests/encoding/streams/decode-utf8.any.js76
-rw-r--r--testing/web-platform/tests/encoding/streams/decode-utf8.any.js.headers2
-rw-r--r--testing/web-platform/tests/encoding/streams/encode-bad-chunks.any.js63
-rw-r--r--testing/web-platform/tests/encoding/streams/encode-utf8.any.js144
-rw-r--r--testing/web-platform/tests/encoding/streams/invalid-realm.window.js37
-rw-r--r--testing/web-platform/tests/encoding/streams/readable-writable-properties.any.js22
-rw-r--r--testing/web-platform/tests/encoding/streams/realms.window.js304
-rw-r--r--testing/web-platform/tests/encoding/streams/resources/readable-stream-from-array.js12
-rw-r--r--testing/web-platform/tests/encoding/streams/resources/readable-stream-to-array.js11
16 files changed, 1035 insertions, 0 deletions
diff --git a/testing/web-platform/tests/encoding/streams/backpressure.any.js b/testing/web-platform/tests/encoding/streams/backpressure.any.js
new file mode 100644
index 0000000000..22f7f0aa2b
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/backpressure.any.js
@@ -0,0 +1,60 @@
+// META: global=window,worker
+
+'use strict';
+
+const classes = [
+ {
+ name: 'TextDecoderStream',
+ input: new Uint8Array([65])
+ },
+ {
+ name: 'TextEncoderStream',
+ input: 'A'
+ }
+];
+
+const microtasksRun = () => new Promise(resolve => step_timeout(resolve, 0));
+
+for (const streamClass of classes) {
+ promise_test(async () => {
+ const stream = new self[streamClass.name]();
+ const writer = stream.writable.getWriter();
+ const reader = stream.readable.getReader();
+ const events = [];
+ await microtasksRun();
+ const writePromise = writer.write(streamClass.input);
+ writePromise.then(() => events.push('write'));
+ await microtasksRun();
+ events.push('paused');
+ await reader.read();
+ events.push('read');
+ await writePromise;
+ assert_array_equals(events, ['paused', 'read', 'write'],
+ 'write should happen after read');
+ }, 'write() should not complete until read relieves backpressure for ' +
+ `${streamClass.name}`);
+
+ promise_test(async () => {
+ const stream = new self[streamClass.name]();
+ const writer = stream.writable.getWriter();
+ const reader = stream.readable.getReader();
+ const events = [];
+ await microtasksRun();
+ const readPromise1 = reader.read();
+ readPromise1.then(() => events.push('read1'));
+ const writePromise1 = writer.write(streamClass.input);
+ const writePromise2 = writer.write(streamClass.input);
+ writePromise1.then(() => events.push('write1'));
+ writePromise2.then(() => events.push('write2'));
+ await microtasksRun();
+ events.push('paused');
+ const readPromise2 = reader.read();
+ readPromise2.then(() => events.push('read2'));
+ await Promise.all([writePromise1, writePromise2,
+ readPromise1, readPromise2]);
+ assert_array_equals(events, ['read1', 'write1', 'paused', 'read2',
+ 'write2'],
+ 'writes should not happen before read2');
+ }, 'additional writes should wait for backpressure to be relieved for ' +
+ `class ${streamClass.name}`);
+}
diff --git a/testing/web-platform/tests/encoding/streams/decode-attributes.any.js b/testing/web-platform/tests/encoding/streams/decode-attributes.any.js
new file mode 100644
index 0000000000..0843cc665f
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/decode-attributes.any.js
@@ -0,0 +1,71 @@
+// META: global=window,worker
+
+'use strict';
+
+// Verify that constructor arguments are correctly reflected in the attributes.
+
+// Mapping of the first argument to TextDecoderStream to the expected value of
+// the encoding attribute. We assume that if this subset works correctly, the
+// rest probably work too.
+const labelToName = {
+ 'unicode-1-1-utf-8': 'utf-8',
+ 'iso-8859-2': 'iso-8859-2',
+ 'ascii': 'windows-1252',
+ 'utf-16': 'utf-16le'
+};
+
+for (const label of Object.keys(labelToName)) {
+ test(() => {
+ const stream = new TextDecoderStream(label);
+ assert_equals(stream.encoding, labelToName[label], 'encoding should match');
+ }, `encoding attribute should have correct value for '${label}'`);
+}
+
+for (const falseValue of [false, 0, '', undefined, null]) {
+ test(() => {
+ const stream = new TextDecoderStream('utf-8', { fatal: falseValue });
+ assert_false(stream.fatal, 'fatal should be false');
+ }, `setting fatal to '${falseValue}' should set the attribute to false`);
+
+ test(() => {
+ const stream = new TextDecoderStream('utf-8', { ignoreBOM: falseValue });
+ assert_false(stream.ignoreBOM, 'ignoreBOM should be false');
+ }, `setting ignoreBOM to '${falseValue}' should set the attribute to false`);
+}
+
+for (const trueValue of [true, 1, {}, [], 'yes']) {
+ test(() => {
+ const stream = new TextDecoderStream('utf-8', { fatal: trueValue });
+ assert_true(stream.fatal, 'fatal should be true');
+ }, `setting fatal to '${trueValue}' should set the attribute to true`);
+
+ test(() => {
+ const stream = new TextDecoderStream('utf-8', { ignoreBOM: trueValue });
+ assert_true(stream.ignoreBOM, 'ignoreBOM should be true');
+ }, `setting ignoreBOM to '${trueValue}' should set the attribute to true`);
+}
+
+test(() => {
+ assert_throws_js(RangeError, () => new TextDecoderStream(''),
+ 'the constructor should throw');
+}, 'constructing with an invalid encoding should throw');
+
+test(() => {
+ assert_throws_js(TypeError, () => new TextDecoderStream({
+ toString() { return {}; }
+ }), 'the constructor should throw');
+}, 'constructing with a non-stringifiable encoding should throw');
+
+test(() => {
+ assert_throws_js(Error,
+ () => new TextDecoderStream('utf-8', {
+ get fatal() { throw new Error(); }
+ }), 'the constructor should throw');
+}, 'a throwing fatal member should cause the constructor to throw');
+
+test(() => {
+ assert_throws_js(Error,
+ () => new TextDecoderStream('utf-8', {
+ get ignoreBOM() { throw new Error(); }
+ }), 'the constructor should throw');
+}, 'a throwing ignoreBOM member should cause the constructor to throw');
diff --git a/testing/web-platform/tests/encoding/streams/decode-bad-chunks.any.js b/testing/web-platform/tests/encoding/streams/decode-bad-chunks.any.js
new file mode 100644
index 0000000000..b7b2dd5845
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/decode-bad-chunks.any.js
@@ -0,0 +1,38 @@
+// META: global=window,worker
+
+'use strict';
+
+const badChunks = [
+ {
+ name: 'undefined',
+ value: undefined
+ },
+ {
+ name: 'null',
+ value: null
+ },
+ {
+ name: 'numeric',
+ value: 3.14
+ },
+ {
+ name: 'object, not BufferSource',
+ value: {}
+ },
+ {
+ name: 'array',
+ value: [65]
+ }
+];
+
+for (const chunk of badChunks) {
+ promise_test(async t => {
+ const tds = new TextDecoderStream();
+ const reader = tds.readable.getReader();
+ const writer = tds.writable.getWriter();
+ const writePromise = writer.write(chunk.value);
+ const readPromise = reader.read();
+ await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
+ await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
+ }, `chunk of type ${chunk.name} should error the stream`);
+}
diff --git a/testing/web-platform/tests/encoding/streams/decode-ignore-bom.any.js b/testing/web-platform/tests/encoding/streams/decode-ignore-bom.any.js
new file mode 100644
index 0000000000..92f89c8015
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/decode-ignore-bom.any.js
@@ -0,0 +1,38 @@
+// META: global=window,worker
+// META: script=resources/readable-stream-from-array.js
+// META: script=resources/readable-stream-to-array.js
+
+const cases = [
+ {encoding: 'utf-8', bytes: [0xEF, 0xBB, 0xBF, 0x61, 0x62, 0x63]},
+ {encoding: 'utf-16le', bytes: [0xFF, 0xFE, 0x61, 0x00, 0x62, 0x00, 0x63, 0x00]},
+ {encoding: 'utf-16be', bytes: [0xFE, 0xFF, 0x00, 0x61, 0x00, 0x62, 0x00, 0x63]}
+];
+const BOM = '\uFEFF';
+
+// |inputChunks| is an array of chunks, each represented by an array of
+// integers. |ignoreBOM| is true or false. The result value is the output of the
+// pipe, concatenated into a single string.
+async function pipeAndAssemble(inputChunks, encoding, ignoreBOM) {
+ const chunksAsUint8 = inputChunks.map(values => new Uint8Array(values));
+ const readable = readableStreamFromArray(chunksAsUint8);
+ const outputArray = await readableStreamToArray(readable.pipeThrough(
+ new TextDecoderStream(encoding, {ignoreBOM})));
+ return outputArray.join('');
+}
+
+for (const testCase of cases) {
+ for (let splitPoint = 0; splitPoint < 4; ++splitPoint) {
+ promise_test(async () => {
+ const inputChunks = [testCase.bytes.slice(0, splitPoint),
+ testCase.bytes.slice(splitPoint)];
+ const withIgnoreBOM =
+ await pipeAndAssemble(inputChunks, testCase.encoding, true);
+ assert_equals(withIgnoreBOM, BOM + 'abc', 'BOM should be preserved');
+
+ const withoutIgnoreBOM =
+ await pipeAndAssemble(inputChunks, testCase.encoding, false);
+ assert_equals(withoutIgnoreBOM, 'abc', 'BOM should be stripped')
+ }, `ignoreBOM should work for encoding ${testCase.encoding}, split at ` +
+ `character ${splitPoint}`);
+ }
+}
diff --git a/testing/web-platform/tests/encoding/streams/decode-incomplete-input.any.js b/testing/web-platform/tests/encoding/streams/decode-incomplete-input.any.js
new file mode 100644
index 0000000000..3add74336d
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/decode-incomplete-input.any.js
@@ -0,0 +1,24 @@
+// META: global=window,worker
+// META: script=resources/readable-stream-from-array.js
+// META: script=resources/readable-stream-to-array.js
+
+'use strict';
+
+const inputBytes = [229];
+
+promise_test(async () => {
+ const input = readableStreamFromArray([new Uint8Array(inputBytes)]);
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, ['\uFFFD'], 'array should have one element');
+}, 'incomplete input with error mode "replacement" should end with a ' +
+ 'replacement character');
+
+promise_test(async t => {
+ const input = readableStreamFromArray([new Uint8Array(inputBytes)]);
+ const output = input.pipeThrough(new TextDecoderStream(
+ 'utf-8', {fatal: true}));
+ const reader = output.getReader();
+ await promise_rejects_js(t, TypeError, reader.read(),
+ 'read should reject');
+}, 'incomplete input with error mode "fatal" should error the stream');
diff --git a/testing/web-platform/tests/encoding/streams/decode-non-utf8.any.js b/testing/web-platform/tests/encoding/streams/decode-non-utf8.any.js
new file mode 100644
index 0000000000..2950a9e58e
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/decode-non-utf8.any.js
@@ -0,0 +1,83 @@
+// META: global=window,worker
+
+'use strict';
+
+// The browser is assumed to use the same implementation as for TextDecoder, so
+// this file don't replicate the exhaustive checks it has. It is just a smoke
+// test that non-UTF-8 encodings work at all.
+
+const encodings = [
+ {
+ name: 'UTF-16BE',
+ value: [108, 52],
+ expected: "\u{6c34}",
+ invalid: [0xD8, 0x00]
+ },
+ {
+ name: 'UTF-16LE',
+ value: [52, 108],
+ expected: "\u{6c34}",
+ invalid: [0x00, 0xD8]
+ },
+ {
+ name: 'Shift_JIS',
+ value: [144, 133],
+ expected: "\u{6c34}",
+ invalid: [255]
+ },
+ {
+ name: 'ISO-2022-JP',
+ value: [65, 66, 67, 0x1B, 65, 66, 67],
+ expected: "ABC\u{fffd}ABC",
+ invalid: [0x0E]
+ },
+ {
+ name: 'ISO-8859-14',
+ value: [100, 240, 114],
+ expected: "d\u{0175}r",
+ invalid: undefined // all bytes are treated as valid
+ }
+];
+
+for (const encoding of encodings) {
+ promise_test(async () => {
+ const stream = new TextDecoderStream(encoding.name);
+ const reader = stream.readable.getReader();
+ const writer = stream.writable.getWriter();
+ const writePromise = writer.write(new Uint8Array(encoding.value));
+ const {value, done} = await reader.read();
+ assert_false(done, 'readable should not be closed');
+ assert_equals(value, encoding.expected, 'chunk should match expected');
+ await writePromise;
+ }, `TextDecoderStream should be able to decode ${encoding.name}`);
+
+ if (!encoding.invalid)
+ continue;
+
+ promise_test(async t => {
+ const stream = new TextDecoderStream(encoding.name);
+ const reader = stream.readable.getReader();
+ const writer = stream.writable.getWriter();
+ const writePromise = writer.write(new Uint8Array(encoding.invalid));
+ const closePromise = writer.close();
+ const {value, done} = await reader.read();
+ assert_false(done, 'readable should not be closed');
+ assert_equals(value, '\u{FFFD}', 'output should be replacement character');
+ await Promise.all([writePromise, closePromise]);
+ }, `TextDecoderStream should be able to decode invalid sequences in ` +
+ `${encoding.name}`);
+
+ promise_test(async t => {
+ const stream = new TextDecoderStream(encoding.name, {fatal: true});
+ const reader = stream.readable.getReader();
+ const writer = stream.writable.getWriter();
+ const writePromise = writer.write(new Uint8Array(encoding.invalid));
+ const closePromise = writer.close();
+ await promise_rejects_js(t, TypeError, reader.read(),
+ 'readable should be errored');
+ await promise_rejects_js(t, TypeError,
+ Promise.all([writePromise, closePromise]),
+ 'writable should be errored');
+ }, `TextDecoderStream should be able to reject invalid sequences in ` +
+ `${encoding.name}`);
+}
diff --git a/testing/web-platform/tests/encoding/streams/decode-split-character.any.js b/testing/web-platform/tests/encoding/streams/decode-split-character.any.js
new file mode 100644
index 0000000000..d589694fce
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/decode-split-character.any.js
@@ -0,0 +1,50 @@
+// META: global=window,worker
+// META: script=resources/readable-stream-from-array.js
+// META: script=resources/readable-stream-to-array.js
+
+'use strict';
+
+const inputBytes = [73, 32, 240, 159, 146, 153, 32, 115, 116, 114, 101,
+ 97, 109, 115];
+for (const splitPoint of [2, 3, 4, 5]) {
+ promise_test(async () => {
+ const input = readableStreamFromArray(
+ [new Uint8Array(inputBytes.slice(0, splitPoint)),
+ new Uint8Array(inputBytes.slice(splitPoint))]);
+ const expectedOutput = ['I ', '\u{1F499} streams'];
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, expectedOutput,
+ 'the split code point should be in the second chunk ' +
+ 'of the output');
+ }, 'a code point split between chunks should not be emitted until all ' +
+ 'bytes are available; split point = ' + splitPoint);
+}
+
+promise_test(async () => {
+ const splitPoint = 6;
+ const input = readableStreamFromArray(
+ [new Uint8Array(inputBytes.slice(0, splitPoint)),
+ new Uint8Array(inputBytes.slice(splitPoint))]);
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, ['I \u{1F499}', ' streams'],
+ 'the multibyte character should be in the first chunk ' +
+ 'of the output');
+}, 'a code point should be emitted as soon as all bytes are available');
+
+for (let splitPoint = 1; splitPoint < 7; ++splitPoint) {
+ promise_test(async () => {
+ const input = readableStreamFromArray(
+ [new Uint8Array(inputBytes.slice(0, splitPoint)),
+ new Uint8Array([]),
+ new Uint8Array(inputBytes.slice(splitPoint))]);
+ const concatenatedOutput = 'I \u{1F499} streams';
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_equals(array.length, 2, 'two chunks should be output');
+ assert_equals(array[0].concat(array[1]), concatenatedOutput,
+ 'output should be unchanged by the empty chunk');
+ }, 'an empty chunk inside a code point split between chunks should not ' +
+ 'change the output; split point = ' + splitPoint);
+}
diff --git a/testing/web-platform/tests/encoding/streams/decode-utf8.any.js b/testing/web-platform/tests/encoding/streams/decode-utf8.any.js
new file mode 100644
index 0000000000..f6fceb299b
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/decode-utf8.any.js
@@ -0,0 +1,76 @@
+// META: global=window,worker
+// META: script=resources/readable-stream-from-array.js
+// META: script=resources/readable-stream-to-array.js
+// META: script=/common/sab.js
+'use strict';
+
+["ArrayBuffer", "SharedArrayBuffer"].forEach((arrayBufferOrSharedArrayBuffer) => {
+ const inputChunkData = [73, 32, 240, 159, 146, 153, 32, 115, 116, 114, 101, 97, 109, 115];
+
+ const emptyChunk = new Uint8Array(createBuffer(arrayBufferOrSharedArrayBuffer, 0));
+ const inputChunk = new Uint8Array(createBuffer(arrayBufferOrSharedArrayBuffer, inputChunkData.length));
+
+ inputChunk.set(inputChunkData);
+
+ const expectedOutputString = 'I \u{1F499} streams';
+
+ promise_test(async () => {
+ const input = readableStreamFromArray([inputChunk]);
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, [expectedOutputString],
+ 'the output should be in one chunk');
+ }, 'decoding one UTF-8 chunk should give one output string - ' + arrayBufferOrSharedArrayBuffer);
+
+ promise_test(async () => {
+ const input = readableStreamFromArray([emptyChunk]);
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, [], 'no chunks should be output');
+ }, 'decoding an empty chunk should give no output chunks - ' + arrayBufferOrSharedArrayBuffer);
+
+ promise_test(async () => {
+ const input = readableStreamFromArray([emptyChunk, inputChunk]);
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, [expectedOutputString],
+ 'the output should be in one chunk');
+ }, 'an initial empty chunk should be ignored - ' + arrayBufferOrSharedArrayBuffer);
+
+ promise_test(async () => {
+ const input = readableStreamFromArray([inputChunk, emptyChunk]);
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, [expectedOutputString],
+ 'the output should be in one chunk');
+ }, 'a trailing empty chunk should be ignored - ' + arrayBufferOrSharedArrayBuffer);
+
+ promise_test(async () => {
+ const chunk = new Uint8Array(createBuffer(arrayBufferOrSharedArrayBuffer, 3));
+ chunk.set([0xF0, 0x9F, 0x92]);
+ const input = readableStreamFromArray([chunk]);
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, ['\uFFFD']);
+ }, 'UTF-8 EOF handling - ' + arrayBufferOrSharedArrayBuffer);
+});
+
+promise_test(async () => {
+ const buffer = new ArrayBuffer(3);
+ const view = new Uint8Array(buffer, 1, 1);
+ view[0] = 65;
+ new MessageChannel().port1.postMessage(buffer, [buffer]);
+ const input = readableStreamFromArray([view]);
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, [], 'no chunks should be output');
+}, 'decoding a transferred Uint8Array chunk should give no output');
+
+promise_test(async () => {
+ const buffer = new ArrayBuffer(1);
+ new MessageChannel().port1.postMessage(buffer, [buffer]);
+ const input = readableStreamFromArray([buffer]);
+ const output = input.pipeThrough(new TextDecoderStream());
+ const array = await readableStreamToArray(output);
+ assert_array_equals(array, [], 'no chunks should be output');
+}, 'decoding a transferred ArrayBuffer chunk should give no output');
diff --git a/testing/web-platform/tests/encoding/streams/decode-utf8.any.js.headers b/testing/web-platform/tests/encoding/streams/decode-utf8.any.js.headers
new file mode 100644
index 0000000000..4fff9d9fba
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/decode-utf8.any.js.headers
@@ -0,0 +1,2 @@
+Cross-Origin-Opener-Policy: same-origin
+Cross-Origin-Embedder-Policy: require-corp \ No newline at end of file
diff --git a/testing/web-platform/tests/encoding/streams/encode-bad-chunks.any.js b/testing/web-platform/tests/encoding/streams/encode-bad-chunks.any.js
new file mode 100644
index 0000000000..4a926a67d2
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/encode-bad-chunks.any.js
@@ -0,0 +1,63 @@
+// META: global=window,worker
+// META: script=resources/readable-stream-from-array.js
+// META: script=resources/readable-stream-to-array.js
+
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+promise_test(t => {
+ const ts = new TextEncoderStream();
+ const writer = ts.writable.getWriter();
+ const reader = ts.readable.getReader();
+ const writePromise = writer.write({
+ toString() { throw error1; }
+ });
+ const readPromise = reader.read();
+ return Promise.all([
+ promise_rejects_exactly(t, error1, readPromise, 'read should reject with error1'),
+ promise_rejects_exactly(t, error1, writePromise, 'write should reject with error1'),
+ promise_rejects_exactly(t, error1, reader.closed, 'readable should be errored with error1'),
+ promise_rejects_exactly(t, error1, writer.closed, 'writable should be errored with error1'),
+ ]);
+}, 'a chunk that cannot be converted to a string should error the streams');
+
+const oddInputs = [
+ {
+ name: 'undefined',
+ value: undefined,
+ expected: 'undefined'
+ },
+ {
+ name: 'null',
+ value: null,
+ expected: 'null'
+ },
+ {
+ name: 'numeric',
+ value: 3.14,
+ expected: '3.14'
+ },
+ {
+ name: 'object',
+ value: {},
+ expected: '[object Object]'
+ },
+ {
+ name: 'array',
+ value: ['hi'],
+ expected: 'hi'
+ }
+];
+
+for (const input of oddInputs) {
+ promise_test(async () => {
+ const outputReadable = readableStreamFromArray([input.value])
+ .pipeThrough(new TextEncoderStream())
+ .pipeThrough(new TextDecoderStream());
+ const output = await readableStreamToArray(outputReadable);
+ assert_equals(output.length, 1, 'output should contain one chunk');
+ assert_equals(output[0], input.expected, 'output should be correct');
+ }, `input of type ${input.name} should be converted correctly to string`);
+}
diff --git a/testing/web-platform/tests/encoding/streams/encode-utf8.any.js b/testing/web-platform/tests/encoding/streams/encode-utf8.any.js
new file mode 100644
index 0000000000..a5ba8f91ea
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/encode-utf8.any.js
@@ -0,0 +1,144 @@
+// META: global=window,worker
+// META: script=resources/readable-stream-from-array.js
+// META: script=resources/readable-stream-to-array.js
+
+'use strict';
+const inputString = 'I \u{1F499} streams';
+const expectedOutputBytes = [0x49, 0x20, 0xf0, 0x9f, 0x92, 0x99, 0x20, 0x73,
+ 0x74, 0x72, 0x65, 0x61, 0x6d, 0x73];
+// This is a character that must be represented in two code units in a string,
+// ie. it is not in the Basic Multilingual Plane.
+const astralCharacter = '\u{1F499}'; // BLUE HEART
+const astralCharacterEncoded = [0xf0, 0x9f, 0x92, 0x99];
+const leading = astralCharacter[0];
+const trailing = astralCharacter[1];
+const replacementEncoded = [0xef, 0xbf, 0xbd];
+
+// These tests assume that the implementation correctly classifies leading and
+// trailing surrogates and treats all the code units in each set equivalently.
+
+const testCases = [
+ {
+ input: [inputString],
+ output: [expectedOutputBytes],
+ description: 'encoding one string of UTF-8 should give one complete chunk'
+ },
+ {
+ input: [leading, trailing],
+ output: [astralCharacterEncoded],
+ description: 'a character split between chunks should be correctly encoded'
+ },
+ {
+ input: [leading, trailing + astralCharacter],
+ output: [astralCharacterEncoded.concat(astralCharacterEncoded)],
+ description: 'a character following one split between chunks should be ' +
+ 'correctly encoded'
+ },
+ {
+ input: [leading, trailing + leading, trailing],
+ output: [astralCharacterEncoded, astralCharacterEncoded],
+ description: 'two consecutive astral characters each split down the ' +
+ 'middle should be correctly reassembled'
+ },
+ {
+ input: [leading, trailing + leading + leading, trailing],
+ output: [astralCharacterEncoded.concat(replacementEncoded), astralCharacterEncoded],
+ description: 'two consecutive astral characters each split down the ' +
+ 'middle with an invalid surrogate in the middle should be correctly ' +
+ 'encoded'
+ },
+ {
+ input: [leading],
+ output: [replacementEncoded],
+ description: 'a stream ending in a leading surrogate should emit a ' +
+ 'replacement character as a final chunk'
+ },
+ {
+ input: [leading, astralCharacter],
+ output: [replacementEncoded.concat(astralCharacterEncoded)],
+ description: 'an unmatched surrogate at the end of a chunk followed by ' +
+ 'an astral character in the next chunk should be replaced with ' +
+ 'the replacement character at the start of the next output chunk'
+ },
+ {
+ input: [leading, 'A'],
+ output: [replacementEncoded.concat([65])],
+ description: 'an unmatched surrogate at the end of a chunk followed by ' +
+ 'an ascii character in the next chunk should be replaced with ' +
+ 'the replacement character at the start of the next output chunk'
+ },
+ {
+ input: [leading, leading, trailing],
+ output: [replacementEncoded, astralCharacterEncoded],
+ description: 'an unmatched surrogate at the end of a chunk followed by ' +
+ 'a plane 1 character split into two chunks should result in ' +
+ 'the encoded plane 1 character appearing in the last output chunk'
+ },
+ {
+ input: [leading, leading],
+ output: [replacementEncoded, replacementEncoded],
+ description: 'two leading chunks should result in two replacement ' +
+ 'characters'
+ },
+ {
+ input: [leading + leading, trailing],
+ output: [replacementEncoded, astralCharacterEncoded],
+ description: 'a non-terminal unpaired leading surrogate should ' +
+ 'immediately be replaced'
+ },
+ {
+ input: [trailing, astralCharacter],
+ output: [replacementEncoded, astralCharacterEncoded],
+ description: 'a terminal unpaired trailing surrogate should ' +
+ 'immediately be replaced'
+ },
+ {
+ input: [leading, '', trailing],
+ output: [astralCharacterEncoded],
+ description: 'a leading surrogate chunk should be carried past empty chunks'
+ },
+ {
+ input: [leading, ''],
+ output: [replacementEncoded],
+ description: 'a leading surrogate chunk should error when it is clear ' +
+ 'it didn\'t form a pair'
+ },
+ {
+ input: [''],
+ output: [],
+ description: 'an empty string should result in no output chunk'
+ },
+ {
+ input: ['', inputString],
+ output: [expectedOutputBytes],
+ description: 'a leading empty chunk should be ignored'
+ },
+ {
+ input: [inputString, ''],
+ output: [expectedOutputBytes],
+ description: 'a trailing empty chunk should be ignored'
+ },
+ {
+ input: ['A'],
+ output: [[65]],
+ description: 'a plain ASCII chunk should be converted'
+ },
+ {
+ input: ['\xff'],
+ output: [[195, 191]],
+ description: 'characters in the ISO-8859-1 range should be encoded correctly'
+ },
+];
+
+for (const {input, output, description} of testCases) {
+ promise_test(async () => {
+ const inputStream = readableStreamFromArray(input);
+ const outputStream = inputStream.pipeThrough(new TextEncoderStream());
+ const chunkArray = await readableStreamToArray(outputStream);
+ assert_equals(chunkArray.length, output.length,
+ 'number of chunks should match');
+ for (let i = 0; i < output.length; ++i) {
+ assert_array_equals(chunkArray[i], output[i], `chunk ${i} should match`);
+ }
+ }, description);
+}
diff --git a/testing/web-platform/tests/encoding/streams/invalid-realm.window.js b/testing/web-platform/tests/encoding/streams/invalid-realm.window.js
new file mode 100644
index 0000000000..beaec42641
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/invalid-realm.window.js
@@ -0,0 +1,37 @@
+// Text*Stream should still work even if the realm is detached.
+
+// Adds an iframe to the document and returns it.
+function addIframe() {
+ const iframe = document.createElement('iframe');
+ document.body.appendChild(iframe);
+ return iframe;
+}
+
+promise_test(async t => {
+ const iframe = addIframe();
+ const stream = new iframe.contentWindow.TextDecoderStream();
+ const readPromise = stream.readable.getReader().read();
+ const writer = stream.writable.getWriter();
+ await writer.ready;
+ iframe.remove();
+ return Promise.all([writer.write(new Uint8Array([65])),readPromise]);
+}, 'TextDecoderStream: write in detached realm should succeed');
+
+promise_test(async t => {
+ const iframe = addIframe();
+ const stream = new iframe.contentWindow.TextEncoderStream();
+ const readPromise = stream.readable.getReader().read();
+ const writer = stream.writable.getWriter();
+ await writer.ready;
+ iframe.remove();
+ return Promise.all([writer.write('A'), readPromise]);
+}, 'TextEncoderStream: write in detached realm should succeed');
+
+for (const type of ['TextEncoderStream', 'TextDecoderStream']) {
+ promise_test(async t => {
+ const iframe = addIframe();
+ const stream = new iframe.contentWindow[type]();
+ iframe.remove();
+ return stream.writable.close();
+ }, `${type}: close in detached realm should succeed`);
+}
diff --git a/testing/web-platform/tests/encoding/streams/readable-writable-properties.any.js b/testing/web-platform/tests/encoding/streams/readable-writable-properties.any.js
new file mode 100644
index 0000000000..234649209c
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/readable-writable-properties.any.js
@@ -0,0 +1,22 @@
+// META: global=window,worker
+
+// This just tests that the "readable" and "writable" properties pass the brand
+// checks. All other relevant attributes are covered by the IDL tests.
+
+'use strict';
+
+test(() => {
+ const te = new TextEncoderStream();
+ assert_equals(typeof ReadableStream.prototype.getReader.call(te.readable),
+ 'object', 'readable property must pass brand check');
+ assert_equals(typeof WritableStream.prototype.getWriter.call(te.writable),
+ 'object', 'writable property must pass brand check');
+}, 'TextEncoderStream readable and writable properties must pass brand checks');
+
+test(() => {
+ const td = new TextDecoderStream();
+ assert_equals(typeof ReadableStream.prototype.getReader.call(td.readable),
+ 'object', 'readable property must pass brand check');
+ assert_equals(typeof WritableStream.prototype.getWriter.call(td.writable),
+ 'object', 'writable property must pass brand check');
+}, 'TextDecoderStream readable and writable properties must pass brand checks');
diff --git a/testing/web-platform/tests/encoding/streams/realms.window.js b/testing/web-platform/tests/encoding/streams/realms.window.js
new file mode 100644
index 0000000000..ca9ce21abc
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/realms.window.js
@@ -0,0 +1,304 @@
+'use strict';
+
+// Test that objects created by the TextEncoderStream and TextDecoderStream APIs
+// are created in the correct realm. The tests work by creating an iframe for
+// each realm and then posting Javascript to them to be evaluated. Inputs and
+// outputs are passed around via global variables in each realm's scope.
+
+// Async setup is required before creating any tests, so require done() to be
+// called.
+setup({explicit_done: true});
+
+function createRealm() {
+ let iframe = document.createElement('iframe');
+ const scriptEndTag = '<' + '/script>';
+ iframe.srcdoc = `<!doctype html>
+<script>
+onmessage = event => {
+ if (event.source !== window.parent) {
+ throw new Error('unexpected message with source ' + event.source);
+ }
+ eval(event.data);
+};
+${scriptEndTag}`;
+ iframe.style.display = 'none';
+ document.body.appendChild(iframe);
+ let realmPromiseResolve;
+ const realmPromise = new Promise(resolve => {
+ realmPromiseResolve = resolve;
+ });
+ iframe.onload = () => {
+ realmPromiseResolve(iframe.contentWindow);
+ };
+ return realmPromise;
+}
+
+async function createRealms() {
+ // All realms are visible on the global object so they can access each other.
+
+ // The realm that the constructor function comes from.
+ window.constructorRealm = await createRealm();
+
+ // The realm in which the constructor object is called.
+ window.constructedRealm = await createRealm();
+
+ // The realm in which reading happens.
+ window.readRealm = await createRealm();
+
+ // The realm in which writing happens.
+ window.writeRealm = await createRealm();
+
+ // The realm that provides the definitions of Readable and Writable methods.
+ window.methodRealm = await createRealm();
+
+ await evalInRealmAndWait(methodRealm, `
+ window.ReadableStreamDefaultReader =
+ new ReadableStream().getReader().constructor;
+ window.WritableStreamDefaultWriter =
+ new WritableStream().getWriter().constructor;
+`);
+ window.readMethod = methodRealm.ReadableStreamDefaultReader.prototype.read;
+ window.writeMethod = methodRealm.WritableStreamDefaultWriter.prototype.write;
+}
+
+// In order for values to be visible between realms, they need to be
+// global. To prevent interference between tests, variable names are generated
+// automatically.
+const id = (() => {
+ let nextId = 0;
+ return () => {
+ return `realmsId${nextId++}`;
+ };
+})();
+
+// Eval string "code" in the content of realm "realm". Evaluation happens
+// asynchronously, meaning it hasn't happened when the function returns.
+function evalInRealm(realm, code) {
+ realm.postMessage(code, window.origin);
+}
+
+// Same as evalInRealm() but returns a Promise which will resolve when the
+// function has actually.
+async function evalInRealmAndWait(realm, code) {
+ const resolve = id();
+ const waitOn = new Promise(r => {
+ realm[resolve] = r;
+ });
+ evalInRealm(realm, code);
+ evalInRealm(realm, `${resolve}();`);
+ await waitOn;
+}
+
+// The same as evalInRealmAndWait but returns the result of evaluating "code" as
+// an expression.
+async function evalInRealmAndReturn(realm, code) {
+ const myId = id();
+ await evalInRealmAndWait(realm, `window.${myId} = ${code};`);
+ return realm[myId];
+}
+
+// Constructs an object in constructedRealm and copies it into readRealm and
+// writeRealm. Returns the id that can be used to access the object in those
+// realms. |what| can contain constructor arguments.
+async function constructAndStore(what) {
+ const objId = id();
+ // Call |constructorRealm|'s constructor from inside |constructedRealm|.
+ writeRealm[objId] = await evalInRealmAndReturn(
+ constructedRealm, `new parent.constructorRealm.${what}`);
+ readRealm[objId] = writeRealm[objId];
+ return objId;
+}
+
+// Calls read() on the readable side of the TransformStream stored in
+// readRealm[objId]. Locks the readable side as a side-effect.
+function readInReadRealm(objId) {
+ return evalInRealmAndReturn(readRealm, `
+parent.readMethod.call(window.${objId}.readable.getReader())`);
+}
+
+// Calls write() on the writable side of the TransformStream stored in
+// writeRealm[objId], passing |value|. Locks the writable side as a
+// side-effect.
+function writeInWriteRealm(objId, value) {
+ const valueId = id();
+ writeRealm[valueId] = value;
+ return evalInRealmAndReturn(writeRealm, `
+parent.writeMethod.call(window.${objId}.writable.getWriter(),
+ window.${valueId})`);
+}
+
+window.onload = () => {
+ createRealms().then(() => {
+ runGenericTests('TextEncoderStream');
+ runTextEncoderStreamTests();
+ runGenericTests('TextDecoderStream');
+ runTextDecoderStreamTests();
+ done();
+ });
+};
+
+function runGenericTests(classname) {
+ promise_test(async () => {
+ const obj = await evalInRealmAndReturn(
+ constructedRealm, `new parent.constructorRealm.${classname}()`);
+ assert_equals(obj.constructor, constructorRealm[classname],
+ 'obj should be in constructor realm');
+ }, `a ${classname} object should be associated with the realm the ` +
+ 'constructor came from');
+
+ promise_test(async () => {
+ const objId = await constructAndStore(classname);
+ const readableGetterId = id();
+ readRealm[readableGetterId] = Object.getOwnPropertyDescriptor(
+ methodRealm[classname].prototype, 'readable').get;
+ const writableGetterId = id();
+ writeRealm[writableGetterId] = Object.getOwnPropertyDescriptor(
+ methodRealm[classname].prototype, 'writable').get;
+ const readable = await evalInRealmAndReturn(
+ readRealm, `${readableGetterId}.call(${objId})`);
+ const writable = await evalInRealmAndReturn(
+ writeRealm, `${writableGetterId}.call(${objId})`);
+ assert_equals(readable.constructor, constructorRealm.ReadableStream,
+ 'readable should be in constructor realm');
+ assert_equals(writable.constructor, constructorRealm.WritableStream,
+ 'writable should be in constructor realm');
+ }, `${classname}'s readable and writable attributes should come from the ` +
+ 'same realm as the constructor definition');
+}
+
+function runTextEncoderStreamTests() {
+ promise_test(async () => {
+ const objId = await constructAndStore('TextEncoderStream');
+ const writePromise = writeInWriteRealm(objId, 'A');
+ const result = await readInReadRealm(objId);
+ await writePromise;
+ assert_equals(result.constructor, constructorRealm.Object,
+ 'result should be in constructor realm');
+ assert_equals(result.value.constructor, constructorRealm.Uint8Array,
+ 'chunk should be in constructor realm');
+ }, 'the output chunks when read is called after write should come from the ' +
+ 'same realm as the constructor of TextEncoderStream');
+
+ promise_test(async () => {
+ const objId = await constructAndStore('TextEncoderStream');
+ const chunkPromise = readInReadRealm(objId);
+ writeInWriteRealm(objId, 'A');
+ // Now the read() should resolve.
+ const result = await chunkPromise;
+ assert_equals(result.constructor, constructorRealm.Object,
+ 'result should be in constructor realm');
+ assert_equals(result.value.constructor, constructorRealm.Uint8Array,
+ 'chunk should be in constructor realm');
+ }, 'the output chunks when write is called with a pending read should come ' +
+ 'from the same realm as the constructor of TextEncoderStream');
+
+ // There is not absolute consensus regarding what realm exceptions should be
+ // created in. Implementations may vary. The expectations in exception-related
+ // tests may change in future once consensus is reached.
+ promise_test(async t => {
+ const objId = await constructAndStore('TextEncoderStream');
+ // Read first to relieve backpressure.
+ const readPromise = readInReadRealm(objId);
+
+ await promise_rejects_js(t, constructorRealm.TypeError,
+ writeInWriteRealm(objId, {
+ toString() { return {}; }
+ }),
+ 'write TypeError should come from constructor realm');
+
+ return promise_rejects_js(t, constructorRealm.TypeError, readPromise,
+ 'read TypeError should come from constructor realm');
+ }, 'TypeError for unconvertable chunk should come from constructor realm ' +
+ 'of TextEncoderStream');
+}
+
+function runTextDecoderStreamTests() {
+ promise_test(async () => {
+ const objId = await constructAndStore('TextDecoderStream');
+ const writePromise = writeInWriteRealm(objId, new Uint8Array([65]));
+ const result = await readInReadRealm(objId);
+ await writePromise;
+ assert_equals(result.constructor, constructorRealm.Object,
+ 'result should be in constructor realm');
+ // A string is not an object, so doesn't have an associated realm. Accessing
+ // string properties will create a transient object wrapper belonging to the
+ // current realm. So checking the realm of result.value is not useful.
+ }, 'the result object when read is called after write should come from the ' +
+ 'same realm as the constructor of TextDecoderStream');
+
+ promise_test(async () => {
+ const objId = await constructAndStore('TextDecoderStream');
+ const chunkPromise = readInReadRealm(objId);
+ writeInWriteRealm(objId, new Uint8Array([65]));
+ // Now the read() should resolve.
+ const result = await chunkPromise;
+ assert_equals(result.constructor, constructorRealm.Object,
+ 'result should be in constructor realm');
+ // A string is not an object, so doesn't have an associated realm. Accessing
+ // string properties will create a transient object wrapper belonging to the
+ // current realm. So checking the realm of result.value is not useful.
+ }, 'the result object when write is called with a pending ' +
+ 'read should come from the same realm as the constructor of TextDecoderStream');
+
+ promise_test(async t => {
+ const objId = await constructAndStore('TextDecoderStream');
+ // Read first to relieve backpressure.
+ const readPromise = readInReadRealm(objId);
+ await promise_rejects_js(
+ t, constructorRealm.TypeError,
+ writeInWriteRealm(objId, {}),
+ 'write TypeError should come from constructor realm'
+ );
+
+ return promise_rejects_js(
+ t, constructorRealm.TypeError, readPromise,
+ 'read TypeError should come from constructor realm'
+ );
+ }, 'TypeError for chunk with the wrong type should come from constructor ' +
+ 'realm of TextDecoderStream');
+
+ promise_test(async t => {
+ const objId =
+ await constructAndStore(`TextDecoderStream('utf-8', {fatal: true})`);
+ // Read first to relieve backpressure.
+ const readPromise = readInReadRealm(objId);
+
+ await promise_rejects_js(
+ t, constructorRealm.TypeError,
+ writeInWriteRealm(objId, new Uint8Array([0xff])),
+ 'write TypeError should come from constructor realm'
+ );
+
+ return promise_rejects_js(
+ t, constructorRealm.TypeError, readPromise,
+ 'read TypeError should come from constructor realm'
+ );
+ }, 'TypeError for invalid chunk should come from constructor realm ' +
+ 'of TextDecoderStream');
+
+ promise_test(async t => {
+ const objId =
+ await constructAndStore(`TextDecoderStream('utf-8', {fatal: true})`);
+ // Read first to relieve backpressure.
+ readInReadRealm(objId);
+ // Write an unfinished sequence of bytes.
+ const incompleteBytesId = id();
+ writeRealm[incompleteBytesId] = new Uint8Array([0xf0]);
+
+ return promise_rejects_js(
+ t, constructorRealm.TypeError,
+ // Can't use writeInWriteRealm() here because it doesn't make it possible
+ // to reuse the writer.
+ evalInRealmAndReturn(writeRealm, `
+(() => {
+ const writer = window.${objId}.writable.getWriter();
+ parent.writeMethod.call(writer, window.${incompleteBytesId});
+ return parent.methodRealm.WritableStreamDefaultWriter.prototype
+ .close.call(writer);
+})();
+`),
+ 'close TypeError should come from constructor realm'
+ );
+ }, 'TypeError for incomplete input should come from constructor realm ' +
+ 'of TextDecoderStream');
+}
diff --git a/testing/web-platform/tests/encoding/streams/resources/readable-stream-from-array.js b/testing/web-platform/tests/encoding/streams/resources/readable-stream-from-array.js
new file mode 100644
index 0000000000..5c12ba8c8b
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/resources/readable-stream-from-array.js
@@ -0,0 +1,12 @@
+'use strict';
+
+function readableStreamFromArray(array) {
+ return new ReadableStream({
+ start(controller) {
+ for (let entry of array) {
+ controller.enqueue(entry);
+ }
+ controller.close();
+ }
+ });
+}
diff --git a/testing/web-platform/tests/encoding/streams/resources/readable-stream-to-array.js b/testing/web-platform/tests/encoding/streams/resources/readable-stream-to-array.js
new file mode 100644
index 0000000000..fda03e2264
--- /dev/null
+++ b/testing/web-platform/tests/encoding/streams/resources/readable-stream-to-array.js
@@ -0,0 +1,11 @@
+'use strict';
+
+function readableStreamToArray(stream) {
+ var array = [];
+ var writable = new WritableStream({
+ write(chunk) {
+ array.push(chunk);
+ }
+ });
+ return stream.pipeTo(writable).then(() => array);
+}