summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/streams
diff options
context:
space:
mode:
Diffstat (limited to 'testing/web-platform/tests/streams')
-rw-r--r--testing/web-platform/tests/streams/META.yml7
-rw-r--r--testing/web-platform/tests/streams/README.md3
-rw-r--r--testing/web-platform/tests/streams/idlharness-shadowrealm.window.js2
-rw-r--r--testing/web-platform/tests/streams/idlharness.any.js79
-rw-r--r--testing/web-platform/tests/streams/piping/abort.any.js408
-rw-r--r--testing/web-platform/tests/streams/piping/close-propagation-backward.any.js153
-rw-r--r--testing/web-platform/tests/streams/piping/close-propagation-forward.any.js589
-rw-r--r--testing/web-platform/tests/streams/piping/crashtests/cross-piping.html12
-rw-r--r--testing/web-platform/tests/streams/piping/error-propagation-backward.any.js630
-rw-r--r--testing/web-platform/tests/streams/piping/error-propagation-forward.any.js569
-rw-r--r--testing/web-platform/tests/streams/piping/flow-control.any.js297
-rw-r--r--testing/web-platform/tests/streams/piping/general-addition.any.js15
-rw-r--r--testing/web-platform/tests/streams/piping/general.any.js212
-rw-r--r--testing/web-platform/tests/streams/piping/multiple-propagation.any.js227
-rw-r--r--testing/web-platform/tests/streams/piping/pipe-through.any.js331
-rw-r--r--testing/web-platform/tests/streams/piping/then-interception.any.js68
-rw-r--r--testing/web-platform/tests/streams/piping/throwing-options.any.js65
-rw-r--r--testing/web-platform/tests/streams/piping/transform-streams.any.js22
-rw-r--r--testing/web-platform/tests/streams/queuing-strategies-size-function-per-global.window.js14
-rw-r--r--testing/web-platform/tests/streams/queuing-strategies.any.js150
-rw-r--r--testing/web-platform/tests/streams/readable-byte-streams/bad-buffers-and-views.any.js398
-rw-r--r--testing/web-platform/tests/streams/readable-byte-streams/construct-byob-request.any.js53
-rw-r--r--testing/web-platform/tests/streams/readable-byte-streams/enqueue-with-detached-buffer.any.js21
-rw-r--r--testing/web-platform/tests/streams/readable-byte-streams/general.any.js2901
-rw-r--r--testing/web-platform/tests/streams/readable-byte-streams/non-transferable-buffers.any.js70
-rw-r--r--testing/web-platform/tests/streams/readable-byte-streams/read-min.any.js774
-rw-r--r--testing/web-platform/tests/streams/readable-byte-streams/respond-after-enqueue.any.js55
-rw-r--r--testing/web-platform/tests/streams/readable-byte-streams/tee.any.js936
-rw-r--r--testing/web-platform/tests/streams/readable-streams/async-iterator.any.js650
-rw-r--r--testing/web-platform/tests/streams/readable-streams/bad-strategies.any.js198
-rw-r--r--testing/web-platform/tests/streams/readable-streams/bad-underlying-sources.any.js400
-rw-r--r--testing/web-platform/tests/streams/readable-streams/cancel.any.js236
-rw-r--r--testing/web-platform/tests/streams/readable-streams/constructor.any.js17
-rw-r--r--testing/web-platform/tests/streams/readable-streams/count-queuing-strategy-integration.any.js208
-rw-r--r--testing/web-platform/tests/streams/readable-streams/crashtests/empty.js0
-rw-r--r--testing/web-platform/tests/streams/readable-streams/crashtests/strategy-worker-terminate.html10
-rw-r--r--testing/web-platform/tests/streams/readable-streams/crashtests/strategy-worker.js4
-rw-r--r--testing/web-platform/tests/streams/readable-streams/cross-realm-crash.window.js13
-rw-r--r--testing/web-platform/tests/streams/readable-streams/default-reader.any.js539
-rw-r--r--testing/web-platform/tests/streams/readable-streams/floating-point-total-queue-size.any.js116
-rw-r--r--testing/web-platform/tests/streams/readable-streams/from.any.js474
-rw-r--r--testing/web-platform/tests/streams/readable-streams/garbage-collection.any.js71
-rw-r--r--testing/web-platform/tests/streams/readable-streams/general.any.js840
-rw-r--r--testing/web-platform/tests/streams/readable-streams/global.html162
-rw-r--r--testing/web-platform/tests/streams/readable-streams/owning-type-message-port.any.js49
-rw-r--r--testing/web-platform/tests/streams/readable-streams/owning-type-video-frame.any.js128
-rw-r--r--testing/web-platform/tests/streams/readable-streams/owning-type.any.js91
-rw-r--r--testing/web-platform/tests/streams/readable-streams/patched-global.any.js142
-rw-r--r--testing/web-platform/tests/streams/readable-streams/read-task-handling.window.js46
-rw-r--r--testing/web-platform/tests/streams/readable-streams/reentrant-strategies.any.js264
-rw-r--r--testing/web-platform/tests/streams/readable-streams/tee.any.js479
-rw-r--r--testing/web-platform/tests/streams/readable-streams/templated.any.js143
-rw-r--r--testing/web-platform/tests/streams/resources/recording-streams.js131
-rw-r--r--testing/web-platform/tests/streams/resources/rs-test-templates.js721
-rw-r--r--testing/web-platform/tests/streams/resources/rs-utils.js197
-rw-r--r--testing/web-platform/tests/streams/resources/test-utils.js27
-rw-r--r--testing/web-platform/tests/streams/transferable/deserialize-error.window.js39
-rw-r--r--testing/web-platform/tests/streams/transferable/gc-crash.html17
-rw-r--r--testing/web-platform/tests/streams/transferable/readable-stream.html260
-rw-r--r--testing/web-platform/tests/streams/transferable/reason.html132
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/create-wasm-module.js11
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/deserialize-error-frame.html39
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/echo-iframe.html7
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/echo-worker.js2
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/helpers.js132
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/receiving-shared-worker.js11
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/receiving-worker.js7
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/sending-shared-worker.js12
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/sending-worker.js5
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/service-worker-iframe.html39
-rw-r--r--testing/web-platform/tests/streams/transferable/resources/service-worker.js30
-rw-r--r--testing/web-platform/tests/streams/transferable/service-worker.https.html28
-rw-r--r--testing/web-platform/tests/streams/transferable/shared-worker.html25
-rw-r--r--testing/web-platform/tests/streams/transferable/transfer-with-messageport.window.js219
-rw-r--r--testing/web-platform/tests/streams/transferable/transform-stream-members.any.js18
-rw-r--r--testing/web-platform/tests/streams/transferable/transform-stream.html108
-rw-r--r--testing/web-platform/tests/streams/transferable/window.html55
-rw-r--r--testing/web-platform/tests/streams/transferable/worker.html76
-rw-r--r--testing/web-platform/tests/streams/transferable/writable-stream.html146
-rw-r--r--testing/web-platform/tests/streams/transform-streams/backpressure.any.js195
-rw-r--r--testing/web-platform/tests/streams/transform-streams/cancel.any.js115
-rw-r--r--testing/web-platform/tests/streams/transform-streams/errors.any.js360
-rw-r--r--testing/web-platform/tests/streams/transform-streams/flush.any.js146
-rw-r--r--testing/web-platform/tests/streams/transform-streams/general.any.js452
-rw-r--r--testing/web-platform/tests/streams/transform-streams/invalid-realm.tentative.window.js17
-rw-r--r--testing/web-platform/tests/streams/transform-streams/lipfuzz.any.js163
-rw-r--r--testing/web-platform/tests/streams/transform-streams/patched-global.any.js53
-rw-r--r--testing/web-platform/tests/streams/transform-streams/properties.any.js49
-rw-r--r--testing/web-platform/tests/streams/transform-streams/reentrant-strategies.any.js323
-rw-r--r--testing/web-platform/tests/streams/transform-streams/strategies.any.js150
-rw-r--r--testing/web-platform/tests/streams/transform-streams/terminate.any.js100
-rw-r--r--testing/web-platform/tests/streams/writable-streams/aborting.any.js1487
-rw-r--r--testing/web-platform/tests/streams/writable-streams/bad-strategies.any.js95
-rw-r--r--testing/web-platform/tests/streams/writable-streams/bad-underlying-sinks.any.js204
-rw-r--r--testing/web-platform/tests/streams/writable-streams/byte-length-queuing-strategy.any.js28
-rw-r--r--testing/web-platform/tests/streams/writable-streams/close.any.js470
-rw-r--r--testing/web-platform/tests/streams/writable-streams/constructor.any.js155
-rw-r--r--testing/web-platform/tests/streams/writable-streams/count-queuing-strategy.any.js124
-rw-r--r--testing/web-platform/tests/streams/writable-streams/error.any.js64
-rw-r--r--testing/web-platform/tests/streams/writable-streams/floating-point-total-queue-size.any.js87
-rw-r--r--testing/web-platform/tests/streams/writable-streams/general.any.js277
-rw-r--r--testing/web-platform/tests/streams/writable-streams/properties.any.js53
-rw-r--r--testing/web-platform/tests/streams/writable-streams/reentrant-strategy.any.js174
-rw-r--r--testing/web-platform/tests/streams/writable-streams/start.any.js163
-rw-r--r--testing/web-platform/tests/streams/writable-streams/write.any.js284
105 files changed, 22623 insertions, 0 deletions
diff --git a/testing/web-platform/tests/streams/META.yml b/testing/web-platform/tests/streams/META.yml
new file mode 100644
index 0000000000..1259a55cb5
--- /dev/null
+++ b/testing/web-platform/tests/streams/META.yml
@@ -0,0 +1,7 @@
+spec: https://streams.spec.whatwg.org/
+suggested_reviewers:
+ - domenic
+ - yutakahirano
+ - youennf
+ - wanderview
+ - ricea
diff --git a/testing/web-platform/tests/streams/README.md b/testing/web-platform/tests/streams/README.md
new file mode 100644
index 0000000000..9ab6e1284a
--- /dev/null
+++ b/testing/web-platform/tests/streams/README.md
@@ -0,0 +1,3 @@
+# Streams Tests
+
+The work on the streams tests is closely tracked by the specification authors, who maintain a reference implementation intended to match the spec line-by-line while passing all of these tests. See [the whatwg/streams repository for details](https://github.com/whatwg/streams/tree/main/reference-implementation). Some tests may be in that repository while the spec sections they test are still undergoing heavy churn.
diff --git a/testing/web-platform/tests/streams/idlharness-shadowrealm.window.js b/testing/web-platform/tests/streams/idlharness-shadowrealm.window.js
new file mode 100644
index 0000000000..099b2475ca
--- /dev/null
+++ b/testing/web-platform/tests/streams/idlharness-shadowrealm.window.js
@@ -0,0 +1,2 @@
+// META: script=/resources/idlharness-shadowrealm.js
+idl_test_shadowrealm(["streams"], ["dom"]);
diff --git a/testing/web-platform/tests/streams/idlharness.any.js b/testing/web-platform/tests/streams/idlharness.any.js
new file mode 100644
index 0000000000..42a17da58c
--- /dev/null
+++ b/testing/web-platform/tests/streams/idlharness.any.js
@@ -0,0 +1,79 @@
+// META: global=window,worker
+// META: script=/resources/WebIDLParser.js
+// META: script=/resources/idlharness.js
+// META: timeout=long
+
+idl_test(
+ ['streams'],
+ ['dom'], // for AbortSignal
+ async idl_array => {
+ // Empty try/catches ensure that if something isn't implemented (e.g., readable byte streams, or writable streams)
+ // the harness still sets things up correctly. Note that the corresponding interface tests will still fail.
+
+ try {
+ new ReadableStream({
+ start(c) {
+ self.readableStreamDefaultController = c;
+ }
+ });
+ } catch {}
+
+ try {
+ new ReadableStream({
+ start(c) {
+ self.readableByteStreamController = c;
+ },
+ type: 'bytes'
+ });
+ } catch {}
+
+ try {
+ let resolvePullCalledPromise;
+ const pullCalledPromise = new Promise(resolve => {
+ resolvePullCalledPromise = resolve;
+ });
+ const stream = new ReadableStream({
+ pull(c) {
+ self.readableStreamByobRequest = c.byobRequest;
+ resolvePullCalledPromise();
+ },
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+ reader.read(new Uint8Array(1));
+ await pullCalledPromise;
+ } catch {}
+
+ try {
+ new WritableStream({
+ start(c) {
+ self.writableStreamDefaultController = c;
+ }
+ });
+ } catch {}
+
+ try {
+ new TransformStream({
+ start(c) {
+ self.transformStreamDefaultController = c;
+ }
+ });
+ } catch {}
+
+ idl_array.add_objects({
+ ReadableStream: ["new ReadableStream()"],
+ ReadableStreamDefaultReader: ["(new ReadableStream()).getReader()"],
+ ReadableStreamBYOBReader: ["(new ReadableStream({ type: 'bytes' })).getReader({ mode: 'byob' })"],
+ ReadableStreamDefaultController: ["self.readableStreamDefaultController"],
+ ReadableByteStreamController: ["self.readableByteStreamController"],
+ ReadableStreamBYOBRequest: ["self.readableStreamByobRequest"],
+ WritableStream: ["new WritableStream()"],
+ WritableStreamDefaultWriter: ["(new WritableStream()).getWriter()"],
+ WritableStreamDefaultController: ["self.writableStreamDefaultController"],
+ TransformStream: ["new TransformStream()"],
+ TransformStreamDefaultController: ["self.transformStreamDefaultController"],
+ ByteLengthQueuingStrategy: ["new ByteLengthQueuingStrategy({ highWaterMark: 5 })"],
+ CountQueuingStrategy: ["new CountQueuingStrategy({ highWaterMark: 5 })"]
+ });
+ }
+);
diff --git a/testing/web-platform/tests/streams/piping/abort.any.js b/testing/web-platform/tests/streams/piping/abort.any.js
new file mode 100644
index 0000000000..c9929df91c
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/abort.any.js
@@ -0,0 +1,408 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/recording-streams.js
+// META: script=../resources/test-utils.js
+'use strict';
+
+// Tests for the use of pipeTo with AbortSignal.
+// There is some extra complexity to avoid timeouts in environments where abort is not implemented.
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+const error2 = new Error('error2');
+error2.name = 'error2';
+
+const errorOnPull = {
+ pull(controller) {
+ // This will cause the test to error if pipeTo abort is not implemented.
+ controller.error('failed to abort');
+ }
+};
+
+// To stop pull() being called immediately when the stream is created, we need to set highWaterMark to 0.
+const hwm0 = { highWaterMark: 0 };
+
+for (const invalidSignal of [null, 'AbortSignal', true, -1, Object.create(AbortSignal.prototype)]) {
+ promise_test(t => {
+ const rs = recordingReadableStream(errorOnPull, hwm0);
+ const ws = recordingWritableStream();
+ return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { signal: invalidSignal }), 'pipeTo should reject')
+ .then(() => {
+ assert_equals(rs.events.length, 0, 'no ReadableStream methods should have been called');
+ assert_equals(ws.events.length, 0, 'no WritableStream methods should have been called');
+ });
+ }, `a signal argument '${invalidSignal}' should cause pipeTo() to reject`);
+}
+
+promise_test(t => {
+ const rs = recordingReadableStream(errorOnPull, hwm0);
+ const ws = new WritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject')
+ .then(() => Promise.all([
+ rs.getReader().closed,
+ promise_rejects_dom(t, 'AbortError', ws.getWriter().closed, 'writer.closed should reject')
+ ]))
+ .then(() => {
+ assert_equals(rs.events.length, 2, 'cancel should have been called');
+ assert_equals(rs.events[0], 'cancel', 'first event should be cancel');
+ assert_equals(rs.events[1].name, 'AbortError', 'the argument to cancel should be an AbortError');
+ assert_equals(rs.events[1].constructor.name, 'DOMException',
+ 'the argument to cancel should be a DOMException');
+ });
+}, 'an aborted signal should cause the writable stream to reject with an AbortError');
+
+for (const reason of [null, undefined, error1]) {
+ promise_test(async t => {
+ const rs = recordingReadableStream(errorOnPull, hwm0);
+ const ws = new WritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort(reason);
+ const pipeToPromise = rs.pipeTo(ws, { signal });
+ if (reason !== undefined) {
+ await promise_rejects_exactly(t, reason, pipeToPromise, 'pipeTo rejects with abort reason');
+ } else {
+ await promise_rejects_dom(t, 'AbortError', pipeToPromise, 'pipeTo rejects with AbortError');
+ }
+ const error = await pipeToPromise.catch(e => e);
+ await rs.getReader().closed;
+ await promise_rejects_exactly(t, error, ws.getWriter().closed, 'the writable should be errored with the same object');
+ assert_equals(signal.reason, error, 'signal.reason should be error'),
+ assert_equals(rs.events.length, 2, 'cancel should have been called');
+ assert_equals(rs.events[0], 'cancel', 'first event should be cancel');
+ assert_equals(rs.events[1], error, 'the readable should be canceled with the same object');
+ }, `(reason: '${reason}') all the error objects should be the same object`);
+}
+
+promise_test(t => {
+ const rs = recordingReadableStream(errorOnPull, hwm0);
+ const ws = new WritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal, preventCancel: true }), 'pipeTo should reject')
+ .then(() => assert_equals(rs.events.length, 0, 'cancel should not be called'));
+}, 'preventCancel should prevent canceling the readable');
+
+promise_test(t => {
+ const rs = new ReadableStream(errorOnPull, hwm0);
+ const ws = recordingWritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal, preventAbort: true }), 'pipeTo should reject')
+ .then(() => {
+ assert_equals(ws.events.length, 0, 'writable should not have been aborted');
+ return ws.getWriter().ready;
+ });
+}, 'preventAbort should prevent aborting the readable');
+
+promise_test(t => {
+ const rs = recordingReadableStream(errorOnPull, hwm0);
+ const ws = recordingWritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal, preventCancel: true, preventAbort: true }),
+ 'pipeTo should reject')
+ .then(() => {
+ assert_equals(rs.events.length, 0, 'cancel should not be called');
+ assert_equals(ws.events.length, 0, 'writable should not have been aborted');
+ return ws.getWriter().ready;
+ });
+}, 'preventCancel and preventAbort should prevent canceling the readable and aborting the readable');
+
+for (const reason of [null, undefined, error1]) {
+ promise_test(async t => {
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.enqueue('b');
+ controller.close();
+ }
+ });
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ const ws = recordingWritableStream({
+ write() {
+ abortController.abort(reason);
+ }
+ });
+ const pipeToPromise = rs.pipeTo(ws, { signal });
+ if (reason !== undefined) {
+ await promise_rejects_exactly(t, reason, pipeToPromise, 'pipeTo rejects with abort reason');
+ } else {
+ await promise_rejects_dom(t, 'AbortError', pipeToPromise, 'pipeTo rejects with AbortError');
+ }
+ const error = await pipeToPromise.catch(e => e);
+ assert_equals(signal.reason, error, 'signal.reason should be error');
+ assert_equals(ws.events.length, 4, 'only chunk "a" should have been written');
+ assert_array_equals(ws.events.slice(0, 3), ['write', 'a', 'abort'], 'events should match');
+ assert_equals(ws.events[3], error, 'abort reason should be error');
+ }, `(reason: '${reason}') abort should prevent further reads`);
+}
+
+for (const reason of [null, undefined, error1]) {
+ promise_test(async t => {
+ let readController;
+ const rs = new ReadableStream({
+ start(c) {
+ readController = c;
+ c.enqueue('a');
+ c.enqueue('b');
+ }
+ });
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ let resolveWrite;
+ const writePromise = new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ const ws = recordingWritableStream({
+ write() {
+ return writePromise;
+ }
+ }, new CountQueuingStrategy({ highWaterMark: Infinity }));
+ const pipeToPromise = rs.pipeTo(ws, { signal });
+ await delay(0);
+ await abortController.abort(reason);
+ await readController.close(); // Make sure the test terminates when signal is not implemented.
+ await resolveWrite();
+ if (reason !== undefined) {
+ await promise_rejects_exactly(t, reason, pipeToPromise, 'pipeTo rejects with abort reason');
+ } else {
+ await promise_rejects_dom(t, 'AbortError', pipeToPromise, 'pipeTo rejects with AbortError');
+ }
+ const error = await pipeToPromise.catch(e => e);
+ assert_equals(signal.reason, error, 'signal.reason should be error');
+ assert_equals(ws.events.length, 6, 'chunks "a" and "b" should have been written');
+ assert_array_equals(ws.events.slice(0, 5), ['write', 'a', 'write', 'b', 'abort'], 'events should match');
+ assert_equals(ws.events[5], error, 'abort reason should be error');
+ }, `(reason: '${reason}') all pending writes should complete on abort`);
+}
+
+promise_test(t => {
+ const rs = new ReadableStream({
+ pull(controller) {
+ controller.error('failed to abort');
+ },
+ cancel() {
+ return Promise.reject(error1);
+ }
+ }, hwm0);
+ const ws = new WritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { signal }), 'pipeTo should reject');
+}, 'a rejection from underlyingSource.cancel() should be returned by pipeTo()');
+
+promise_test(t => {
+ const rs = new ReadableStream(errorOnPull, hwm0);
+ const ws = new WritableStream({
+ abort() {
+ return Promise.reject(error1);
+ }
+ });
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { signal }), 'pipeTo should reject');
+}, 'a rejection from underlyingSink.abort() should be returned by pipeTo()');
+
+promise_test(t => {
+ const events = [];
+ const rs = new ReadableStream({
+ pull(controller) {
+ controller.error('failed to abort');
+ },
+ cancel() {
+ events.push('cancel');
+ return Promise.reject(error1);
+ }
+ }, hwm0);
+ const ws = new WritableStream({
+ abort() {
+ events.push('abort');
+ return Promise.reject(error2);
+ }
+ });
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_exactly(t, error2, rs.pipeTo(ws, { signal }), 'pipeTo should reject')
+ .then(() => assert_array_equals(events, ['abort', 'cancel'], 'abort() should be called before cancel()'));
+}, 'a rejection from underlyingSink.abort() should be preferred to one from underlyingSource.cancel()');
+
+promise_test(t => {
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+ const ws = new WritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject');
+}, 'abort signal takes priority over closed readable');
+
+promise_test(t => {
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.error(error1);
+ }
+ });
+ const ws = new WritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject');
+}, 'abort signal takes priority over errored readable');
+
+promise_test(t => {
+ const rs = new ReadableStream({
+ pull(controller) {
+ controller.error('failed to abort');
+ }
+ }, hwm0);
+ const ws = new WritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ const writer = ws.getWriter();
+ return writer.close().then(() => {
+ writer.releaseLock();
+ return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject');
+ });
+}, 'abort signal takes priority over closed writable');
+
+promise_test(t => {
+ const rs = new ReadableStream({
+ pull(controller) {
+ controller.error('failed to abort');
+ }
+ }, hwm0);
+ const ws = new WritableStream({
+ start(controller) {
+ controller.error(error1);
+ }
+ });
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ abortController.abort();
+ return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject');
+}, 'abort signal takes priority over errored writable');
+
+promise_test(() => {
+ let readController;
+ const rs = new ReadableStream({
+ start(c) {
+ readController = c;
+ }
+ });
+ const ws = new WritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ const pipeToPromise = rs.pipeTo(ws, { signal, preventClose: true });
+ readController.close();
+ return Promise.resolve().then(() => {
+ abortController.abort();
+ return pipeToPromise;
+ }).then(() => ws.getWriter().write('this should succeed'));
+}, 'abort should do nothing after the readable is closed');
+
+promise_test(t => {
+ let readController;
+ const rs = new ReadableStream({
+ start(c) {
+ readController = c;
+ }
+ });
+ const ws = new WritableStream();
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ const pipeToPromise = rs.pipeTo(ws, { signal, preventAbort: true });
+ readController.error(error1);
+ return Promise.resolve().then(() => {
+ abortController.abort();
+ return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject');
+ }).then(() => ws.getWriter().write('this should succeed'));
+}, 'abort should do nothing after the readable is errored');
+
+promise_test(t => {
+ let readController;
+ const rs = new ReadableStream({
+ start(c) {
+ readController = c;
+ }
+ });
+ let resolveWrite;
+ const writePromise = new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ const ws = new WritableStream({
+ write() {
+ readController.error(error1);
+ return writePromise;
+ }
+ });
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ const pipeToPromise = rs.pipeTo(ws, { signal, preventAbort: true });
+ readController.enqueue('a');
+ return delay(0).then(() => {
+ abortController.abort();
+ resolveWrite();
+ return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject');
+ }).then(() => ws.getWriter().write('this should succeed'));
+}, 'abort should do nothing after the readable is errored, even with pending writes');
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ pull(controller) {
+ return delay(0).then(() => controller.close());
+ }
+ });
+ let writeController;
+ const ws = new WritableStream({
+ start(c) {
+ writeController = c;
+ }
+ });
+ const abortController = new AbortController();
+ const signal = abortController.signal;
+ const pipeToPromise = rs.pipeTo(ws, { signal, preventCancel: true });
+ return Promise.resolve().then(() => {
+ writeController.error(error1);
+ return Promise.resolve();
+ }).then(() => {
+ abortController.abort();
+ return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject');
+ }).then(() => {
+ assert_array_equals(rs.events, ['pull'], 'cancel should not have been called');
+ });
+}, 'abort should do nothing after the writable is errored');
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ pull(c) {
+ c.enqueue(new Uint8Array([]));
+ },
+ type: "bytes",
+ });
+ const ws = new WritableStream();
+ const [first, second] = rs.tee();
+
+ let aborted = false;
+ first.pipeTo(ws, { signal: AbortSignal.abort() }).catch(() => {
+ aborted = true;
+ });
+ await delay(0);
+ assert_true(!aborted, "pipeTo should not resolve yet");
+ await second.cancel();
+ await delay(0);
+ assert_true(aborted, "pipeTo should be aborted now");
+}, "pipeTo on a teed readable byte stream should only be aborted when both branches are aborted");
diff --git a/testing/web-platform/tests/streams/piping/close-propagation-backward.any.js b/testing/web-platform/tests/streams/piping/close-propagation-backward.any.js
new file mode 100644
index 0000000000..25bd475ed1
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/close-propagation-backward.any.js
@@ -0,0 +1,153 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1!');
+error1.name = 'error1';
+
+promise_test(() => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+
+ return rs.pipeTo(ws).then(
+ () => assert_unreached('the promise must not fulfill'),
+ err => {
+ assert_equals(err.name, 'TypeError', 'the promise must reject with a TypeError');
+
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ }
+ );
+
+}, 'Closing must be propagated backward: starts closed; preventCancel omitted; fulfilled cancel promise');
+
+promise_test(t => {
+
+ // Our recording streams do not deal well with errors generated by the system, so give them some help
+ let recordedError;
+ const rs = recordingReadableStream({
+ cancel(cancelErr) {
+ recordedError = cancelErr;
+ throw error1;
+ }
+ });
+
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => {
+ assert_equals(recordedError.name, 'TypeError', 'the cancel reason must be a TypeError');
+
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', recordedError]);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ });
+
+}, 'Closing must be propagated backward: starts closed; preventCancel omitted; rejected cancel promise');
+
+for (const falsy of [undefined, null, false, +0, -0, NaN, '']) {
+ const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy);
+
+ promise_test(() => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+
+ return rs.pipeTo(ws, { preventCancel: falsy }).then(
+ () => assert_unreached('the promise must not fulfill'),
+ err => {
+ assert_equals(err.name, 'TypeError', 'the promise must reject with a TypeError');
+
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ }
+ );
+
+ }, `Closing must be propagated backward: starts closed; preventCancel = ${stringVersion} (falsy); fulfilled cancel ` +
+ `promise`);
+}
+
+for (const truthy of [true, 'a', 1, Symbol(), { }]) {
+ promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+
+ return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { preventCancel: truthy })).then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return ws.getWriter().closed;
+ });
+
+ }, `Closing must be propagated backward: starts closed; preventCancel = ${String(truthy)} (truthy)`);
+}
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+
+ return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { preventCancel: true, preventAbort: true }))
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return ws.getWriter().closed;
+ });
+
+}, 'Closing must be propagated backward: starts closed; preventCancel = true, preventAbort = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+
+ return promise_rejects_js(t, TypeError,
+ rs.pipeTo(ws, { preventCancel: true, preventAbort: true, preventClose: true }))
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return ws.getWriter().closed;
+ });
+
+}, 'Closing must be propagated backward: starts closed; preventCancel = true, preventAbort = true, preventClose ' +
+ '= true');
diff --git a/testing/web-platform/tests/streams/piping/close-propagation-forward.any.js b/testing/web-platform/tests/streams/piping/close-propagation-forward.any.js
new file mode 100644
index 0000000000..0ec94f80ab
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/close-propagation-forward.any.js
@@ -0,0 +1,589 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1!');
+error1.name = 'error1';
+
+promise_test(() => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return rs.pipeTo(ws).then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ });
+
+}, 'Closing must be propagated forward: starts closed; preventClose omitted; fulfilled close promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+
+ const ws = recordingWritableStream({
+ close() {
+ throw error1;
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ promise_rejects_exactly(t, error1, ws.getWriter().closed)
+ ]);
+ });
+
+}, 'Closing must be propagated forward: starts closed; preventClose omitted; rejected close promise');
+
+for (const falsy of [undefined, null, false, +0, -0, NaN, '']) {
+ const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy);
+
+ promise_test(() => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return rs.pipeTo(ws, { preventClose: falsy }).then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ });
+
+ }, `Closing must be propagated forward: starts closed; preventClose = ${stringVersion} (falsy); fulfilled close ` +
+ `promise`);
+}
+
+for (const truthy of [true, 'a', 1, Symbol(), { }]) {
+ promise_test(() => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return rs.pipeTo(ws, { preventClose: truthy }).then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, []);
+
+ return rs.getReader().closed;
+ });
+
+ }, `Closing must be propagated forward: starts closed; preventClose = ${String(truthy)} (truthy)`);
+}
+
+promise_test(() => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return rs.pipeTo(ws, { preventClose: true, preventAbort: true }).then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, []);
+
+ return rs.getReader().closed;
+ });
+
+}, 'Closing must be propagated forward: starts closed; preventClose = true, preventAbort = true');
+
+promise_test(() => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return rs.pipeTo(ws, { preventClose: true, preventAbort: true, preventCancel: true }).then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, []);
+
+ return rs.getReader().closed;
+ });
+
+}, 'Closing must be propagated forward: starts closed; preventClose = true, preventAbort = true, preventCancel = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = rs.pipeTo(ws);
+
+ t.step_timeout(() => rs.controller.close());
+
+ return pipePromise.then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ });
+
+}, 'Closing must be propagated forward: becomes closed asynchronously; preventClose omitted; fulfilled close promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ close() {
+ throw error1;
+ }
+ });
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => rs.controller.close());
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ promise_rejects_exactly(t, error1, ws.getWriter().closed)
+ ]);
+ });
+
+}, 'Closing must be propagated forward: becomes closed asynchronously; preventClose omitted; rejected close promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = rs.pipeTo(ws, { preventClose: true });
+
+ t.step_timeout(() => rs.controller.close());
+
+ return pipePromise.then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, []);
+
+ return rs.getReader().closed;
+ });
+
+}, 'Closing must be propagated forward: becomes closed asynchronously; preventClose = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = rs.pipeTo(ws);
+
+ t.step_timeout(() => rs.controller.close());
+
+ return pipePromise.then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ });
+
+}, 'Closing must be propagated forward: becomes closed asynchronously; dest never desires chunks; ' +
+ 'preventClose omitted; fulfilled close promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ close() {
+ throw error1;
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => rs.controller.close());
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ promise_rejects_exactly(t, error1, ws.getWriter().closed)
+ ]);
+ });
+
+}, 'Closing must be propagated forward: becomes closed asynchronously; dest never desires chunks; ' +
+ 'preventClose omitted; rejected close promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = rs.pipeTo(ws, { preventClose: true });
+
+ t.step_timeout(() => rs.controller.close());
+
+ return pipePromise.then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, []);
+
+ return rs.getReader().closed;
+ });
+
+}, 'Closing must be propagated forward: becomes closed asynchronously; dest never desires chunks; ' +
+ 'preventClose = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = rs.pipeTo(ws);
+
+ t.step_timeout(() => {
+ rs.controller.enqueue('Hello');
+ t.step_timeout(() => rs.controller.close());
+ }, 10);
+
+ return pipePromise.then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello', 'close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ });
+
+}, 'Closing must be propagated forward: becomes closed after one chunk; preventClose omitted; fulfilled close promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ close() {
+ throw error1;
+ }
+ });
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => {
+ rs.controller.enqueue('Hello');
+ t.step_timeout(() => rs.controller.close());
+ }, 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello', 'close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ promise_rejects_exactly(t, error1, ws.getWriter().closed)
+ ]);
+ });
+
+}, 'Closing must be propagated forward: becomes closed after one chunk; preventClose omitted; rejected close promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = rs.pipeTo(ws, { preventClose: true });
+
+ t.step_timeout(() => {
+ rs.controller.enqueue('Hello');
+ t.step_timeout(() => rs.controller.close());
+ }, 10);
+
+ return pipePromise.then(value => {
+ assert_equals(value, undefined, 'the promise must fulfill with undefined');
+ })
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+
+ return rs.getReader().closed;
+ });
+
+}, 'Closing must be propagated forward: becomes closed after one chunk; preventClose = true');
+
+promise_test(() => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ });
+
+ let pipeComplete = false;
+ const pipePromise = rs.pipeTo(ws).then(() => {
+ pipeComplete = true;
+ });
+
+ rs.controller.enqueue('a');
+ rs.controller.close();
+
+ // Flush async events and verify that no shutdown occurs.
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ws.events, ['write', 'a']); // no 'close'
+ assert_equals(pipeComplete, false, 'the pipe must not be complete');
+
+ resolveWritePromise();
+
+ return pipePromise.then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'close']);
+ });
+ });
+
+}, 'Closing must be propagated forward: shutdown must not occur until the final write completes');
+
+promise_test(() => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ });
+
+ let pipeComplete = false;
+ const pipePromise = rs.pipeTo(ws, { preventClose: true }).then(() => {
+ pipeComplete = true;
+ });
+
+ rs.controller.enqueue('a');
+ rs.controller.close();
+
+ // Flush async events and verify that no shutdown occurs.
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ws.events, ['write', 'a'],
+ 'the chunk must have been written, but close must not have happened');
+ assert_equals(pipeComplete, false, 'the pipe must not be complete');
+
+ resolveWritePromise();
+
+ return pipePromise;
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a'],
+ 'the chunk must have been written, but close must not have happened');
+ });
+
+}, 'Closing must be propagated forward: shutdown must not occur until the final write completes; preventClose = true');
+
+promise_test(() => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWriteCalled;
+ const writeCalledPromise = new Promise(resolve => {
+ resolveWriteCalled = resolve;
+ });
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ resolveWriteCalled();
+
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 2 }));
+
+ let pipeComplete = false;
+ const pipePromise = rs.pipeTo(ws).then(() => {
+ pipeComplete = true;
+ });
+
+ rs.controller.enqueue('a');
+ rs.controller.enqueue('b');
+
+ return writeCalledPromise.then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a'],
+ 'the first chunk must have been written, but close must not have happened yet');
+ assert_false(pipeComplete, 'the pipe should not complete while the first write is pending');
+
+ rs.controller.close();
+ resolveWritePromise();
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b'],
+ 'the second chunk must have been written, but close must not have happened yet');
+ assert_false(pipeComplete, 'the pipe should not complete while the second write is pending');
+
+ resolveWritePromise();
+ return pipePromise;
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'close'],
+ 'all chunks must have been written and close must have happened');
+ });
+
+}, 'Closing must be propagated forward: shutdown must not occur until the final write completes; becomes closed after first write');
+
+promise_test(() => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWriteCalled;
+ const writeCalledPromise = new Promise(resolve => {
+ resolveWriteCalled = resolve;
+ });
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ resolveWriteCalled();
+
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 2 }));
+
+ let pipeComplete = false;
+ const pipePromise = rs.pipeTo(ws, { preventClose: true }).then(() => {
+ pipeComplete = true;
+ });
+
+ rs.controller.enqueue('a');
+ rs.controller.enqueue('b');
+
+ return writeCalledPromise.then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a'],
+ 'the first chunk must have been written, but close must not have happened');
+ assert_false(pipeComplete, 'the pipe should not complete while the first write is pending');
+
+ rs.controller.close();
+ resolveWritePromise();
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b'],
+ 'the second chunk must have been written, but close must not have happened');
+ assert_false(pipeComplete, 'the pipe should not complete while the second write is pending');
+
+ resolveWritePromise();
+ return pipePromise;
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b'],
+ 'all chunks must have been written, but close must not have happened');
+ });
+
+}, 'Closing must be propagated forward: shutdown must not occur until the final write completes; becomes closed after first write; preventClose = true');
+
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.close();
+ }
+ });
+ let rejectWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ return new Promise((resolve, reject) => {
+ rejectWritePromise = reject;
+ });
+ }
+ }, { highWaterMark: 3 });
+ const pipeToPromise = rs.pipeTo(ws);
+ return delay(0).then(() => {
+ rejectWritePromise(error1);
+ return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject');
+ }).then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['write', 'a']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ promise_rejects_exactly(t, error1, ws.getWriter().closed, 'ws should be errored')
+ ]);
+ });
+}, 'Closing must be propagated forward: erroring the writable while flushing pending writes should error pipeTo');
diff --git a/testing/web-platform/tests/streams/piping/crashtests/cross-piping.html b/testing/web-platform/tests/streams/piping/crashtests/cross-piping.html
new file mode 100644
index 0000000000..712d5ecebe
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/crashtests/cross-piping.html
@@ -0,0 +1,12 @@
+<!DOCTYPE html>
+<script type="module">
+ let a = new ReadableStream();
+ let b = self.open()
+ let f = new b.WritableStream();
+ a.pipeThrough(
+ { "readable": a, "writable": f },
+ { "signal": AbortSignal.abort() }
+ )
+ await new Promise(setTimeout);
+ structuredClone(undefined, { "transfer": [f] })
+</script>
diff --git a/testing/web-platform/tests/streams/piping/error-propagation-backward.any.js b/testing/web-platform/tests/streams/piping/error-propagation-backward.any.js
new file mode 100644
index 0000000000..f786469d6c
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/error-propagation-backward.any.js
@@ -0,0 +1,630 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1!');
+error1.name = 'error1';
+
+const error2 = new Error('error2!');
+error2.name = 'error2';
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated backward: starts errored; preventCancel omitted; fulfilled cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ write() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error')
+ .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error'))
+ .then(() => {
+ writer.releaseLock();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the write error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+ });
+
+}, 'Errors must be propagated backward: becomes errored before piping due to write; preventCancel omitted; ' +
+ 'fulfilled cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ cancel() {
+ throw error2;
+ }
+ });
+
+ const ws = recordingWritableStream({
+ write() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error')
+ .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error'))
+ .then(() => {
+ writer.releaseLock();
+
+ return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+ });
+
+}, 'Errors must be propagated backward: becomes errored before piping due to write; preventCancel omitted; rejected ' +
+ 'cancel promise');
+
+for (const falsy of [undefined, null, false, +0, -0, NaN, '']) {
+ const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy);
+
+ promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ write() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error')
+ .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error'))
+ .then(() => {
+ writer.releaseLock();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: falsy }),
+ 'pipeTo must reject with the write error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+ });
+
+ }, `Errors must be propagated backward: becomes errored before piping due to write; preventCancel = ` +
+ `${stringVersion} (falsy); fulfilled cancel promise`);
+}
+
+for (const truthy of [true, 'a', 1, Symbol(), { }]) {
+ promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ write() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error')
+ .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error'))
+ .then(() => {
+ writer.releaseLock();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: truthy }),
+ 'pipeTo must reject with the write error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+ });
+
+ }, `Errors must be propagated backward: becomes errored before piping due to write; preventCancel = ` +
+ `${String(truthy)} (truthy)`);
+}
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ write() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error')
+ .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error'))
+ .then(() => {
+ writer.releaseLock();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true, preventAbort: true }),
+ 'pipeTo must reject with the write error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+ });
+
+}, 'Errors must be propagated backward: becomes errored before piping due to write, preventCancel = true; ' +
+ 'preventAbort = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ write() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error')
+ .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error'))
+ .then(() => {
+ writer.releaseLock();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true, preventAbort: true, preventClose: true }),
+ 'pipeTo must reject with the write error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+ });
+
+}, 'Errors must be propagated backward: becomes errored before piping due to write; preventCancel = true, ' +
+ 'preventAbort = true, preventClose = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('Hello');
+ }
+ });
+
+ const ws = recordingWritableStream({
+ write() {
+ throw error1;
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+
+}, 'Errors must be propagated backward: becomes errored during piping due to write; preventCancel omitted; fulfilled ' +
+ 'cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('Hello');
+ },
+ cancel() {
+ throw error2;
+ }
+ });
+
+ const ws = recordingWritableStream({
+ write() {
+ throw error1;
+ }
+ });
+
+ return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error').then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+
+}, 'Errors must be propagated backward: becomes errored during piping due to write; preventCancel omitted; rejected ' +
+ 'cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('Hello');
+ }
+ });
+
+ const ws = recordingWritableStream({
+ write() {
+ throw error1;
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), 'pipeTo must reject with the same error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+
+}, 'Errors must be propagated backward: becomes errored during piping due to write; preventCancel = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.enqueue('b');
+ controller.enqueue('c');
+ }
+ });
+
+ const ws = recordingWritableStream({
+ write() {
+ if (ws.events.length > 2) {
+ return delay(0).then(() => {
+ throw error1;
+ });
+ }
+ return undefined;
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b']);
+ });
+
+}, 'Errors must be propagated backward: becomes errored during piping due to write, but async; preventCancel = ' +
+ 'false; fulfilled cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.enqueue('b');
+ controller.enqueue('c');
+ },
+ cancel() {
+ throw error2;
+ }
+ });
+
+ const ws = recordingWritableStream({
+ write() {
+ if (ws.events.length > 2) {
+ return delay(0).then(() => {
+ throw error1;
+ });
+ }
+ return undefined;
+ }
+ });
+
+ return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error').then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b']);
+ });
+
+}, 'Errors must be propagated backward: becomes errored during piping due to write, but async; preventCancel = ' +
+ 'false; rejected cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.enqueue('b');
+ controller.enqueue('c');
+ }
+ });
+
+ const ws = recordingWritableStream({
+ write() {
+ if (ws.events.length > 2) {
+ return delay(0).then(() => {
+ throw error1;
+ });
+ }
+ return undefined;
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), 'pipeTo must reject with the same error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b']);
+ });
+
+}, 'Errors must be propagated backward: becomes errored during piping due to write, but async; preventCancel = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => ws.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated backward: becomes errored after piping; preventCancel omitted; fulfilled cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ cancel() {
+ throw error2;
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error');
+
+ t.step_timeout(() => ws.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated backward: becomes errored after piping; preventCancel omitted; rejected cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }),
+ 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => ws.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated backward: becomes errored after piping; preventCancel = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.enqueue('b');
+ controller.enqueue('c');
+ controller.close();
+ }
+ });
+
+ const ws = recordingWritableStream({
+ write(chunk) {
+ if (chunk === 'c') {
+ return Promise.reject(error1);
+ }
+ return undefined;
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'write', 'c']);
+ });
+
+}, 'Errors must be propagated backward: becomes errored after piping due to last write; source is closed; ' +
+ 'preventCancel omitted (but cancel is never called)');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.enqueue('b');
+ controller.enqueue('c');
+ controller.close();
+ }
+ });
+
+ const ws = recordingWritableStream({
+ write(chunk) {
+ if (chunk === 'c') {
+ return Promise.reject(error1);
+ }
+ return undefined;
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), 'pipeTo must reject with the same error')
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'write', 'c']);
+ });
+
+}, 'Errors must be propagated backward: becomes errored after piping due to last write; source is closed; ' +
+ 'preventCancel = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => ws.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated backward: becomes errored after piping; dest never desires chunks; preventCancel = ' +
+ 'false; fulfilled cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ cancel() {
+ throw error2;
+ }
+ });
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error');
+
+ t.step_timeout(() => ws.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated backward: becomes errored after piping; dest never desires chunks; preventCancel = ' +
+ 'false; rejected cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }),
+ 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => ws.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated backward: becomes errored after piping; dest never desires chunks; preventCancel = ' +
+ 'true');
+
+promise_test(() => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ ws.abort(error1);
+
+ return rs.pipeTo(ws).then(
+ () => assert_unreached('the promise must not fulfill'),
+ err => {
+ assert_equals(err, error1, 'the promise must reject with error1');
+
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]);
+ assert_array_equals(ws.events, ['abort', error1]);
+ }
+ );
+
+}, 'Errors must be propagated backward: becomes errored before piping via abort; preventCancel omitted; fulfilled ' +
+ 'cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ cancel() {
+ throw error2;
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ ws.abort(error1);
+
+ return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error')
+ .then(() => {
+ return ws.getWriter().closed.then(
+ () => assert_unreached('the promise must not fulfill'),
+ err => {
+ assert_equals(err, error1, 'the promise must reject with error1');
+
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]);
+ assert_array_equals(ws.events, ['abort', error1]);
+ }
+ );
+ });
+
+}, 'Errors must be propagated backward: becomes errored before piping via abort; preventCancel omitted; rejected ' +
+ 'cancel promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ ws.abort(error1);
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true })).then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+}, 'Errors must be propagated backward: becomes errored before piping via abort; preventCancel = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWriteCalled;
+ const writeCalledPromise = new Promise(resolve => {
+ resolveWriteCalled = resolve;
+ });
+
+ const ws = recordingWritableStream({
+ write() {
+ resolveWriteCalled();
+ return flushAsyncEvents();
+ }
+ });
+
+ const pipePromise = rs.pipeTo(ws);
+
+ rs.controller.enqueue('a');
+
+ return writeCalledPromise.then(() => {
+ ws.controller.error(error1);
+
+ return promise_rejects_exactly(t, error1, pipePromise);
+ }).then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]);
+ assert_array_equals(ws.events, ['write', 'a']);
+ });
+
+}, 'Errors must be propagated backward: erroring via the controller errors once pending write completes');
diff --git a/testing/web-platform/tests/streams/piping/error-propagation-forward.any.js b/testing/web-platform/tests/streams/piping/error-propagation-forward.any.js
new file mode 100644
index 0000000000..e9260f9ea2
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/error-propagation-forward.any.js
@@ -0,0 +1,569 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1!');
+error1.name = 'error1';
+
+const error2 = new Error('error2!');
+error2.name = 'error2';
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error')
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: starts errored; preventAbort = false; fulfilled abort promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const ws = recordingWritableStream({
+ abort() {
+ throw error2;
+ }
+ });
+
+ return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error')
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: starts errored; preventAbort = false; rejected abort promise');
+
+for (const falsy of [undefined, null, false, +0, -0, NaN, '']) {
+ const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy);
+
+ promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: falsy }), 'pipeTo must reject with the same error')
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+ }, `Errors must be propagated forward: starts errored; preventAbort = ${stringVersion} (falsy); fulfilled abort ` +
+ `promise`);
+}
+
+for (const truthy of [true, 'a', 1, Symbol(), { }]) {
+ promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: truthy }),
+ 'pipeTo must reject with the same error')
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, []);
+ });
+
+ }, `Errors must be propagated forward: starts errored; preventAbort = ${String(truthy)} (truthy)`);
+}
+
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true, preventCancel: true }),
+ 'pipeTo must reject with the same error')
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated forward: starts errored; preventAbort = true, preventCancel = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true, preventCancel: true, preventClose: true }),
+ 'pipeTo must reject with the same error')
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated forward: starts errored; preventAbort = true, preventCancel = true, preventClose = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => rs.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: becomes errored while empty; preventAbort = false; fulfilled abort promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ abort() {
+ throw error2;
+ }
+ });
+
+ const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error');
+
+ t.step_timeout(() => rs.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: becomes errored while empty; preventAbort = false; rejected abort promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }),
+ 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => rs.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated forward: becomes errored while empty; preventAbort = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => rs.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: becomes errored while empty; dest never desires chunks; ' +
+ 'preventAbort = false; fulfilled abort promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ abort() {
+ throw error2;
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error');
+
+ t.step_timeout(() => rs.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: becomes errored while empty; dest never desires chunks; ' +
+ 'preventAbort = false; rejected abort promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }),
+ 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => rs.controller.error(error1), 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated forward: becomes errored while empty; dest never desires chunks; ' +
+ 'preventAbort = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => {
+ rs.controller.enqueue('Hello');
+ t.step_timeout(() => rs.controller.error(error1), 10);
+ }, 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello', 'abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: becomes errored after one chunk; preventAbort = false; fulfilled abort promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ abort() {
+ throw error2;
+ }
+ });
+
+ const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error');
+
+ t.step_timeout(() => {
+ rs.controller.enqueue('Hello');
+ t.step_timeout(() => rs.controller.error(error1), 10);
+ }, 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello', 'abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: becomes errored after one chunk; preventAbort = false; rejected abort promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }),
+ 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => {
+ rs.controller.enqueue('Hello');
+ t.step_timeout(() => rs.controller.error(error1), 10);
+ }, 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'Hello']);
+ });
+
+}, 'Errors must be propagated forward: becomes errored after one chunk; preventAbort = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => {
+ rs.controller.enqueue('Hello');
+ t.step_timeout(() => rs.controller.error(error1), 10);
+ }, 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: becomes errored after one chunk; dest never desires chunks; ' +
+ 'preventAbort = false; fulfilled abort promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream({
+ abort() {
+ throw error2;
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error');
+
+ t.step_timeout(() => {
+ rs.controller.enqueue('Hello');
+ t.step_timeout(() => rs.controller.error(error1), 10);
+ }, 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+
+}, 'Errors must be propagated forward: becomes errored after one chunk; dest never desires chunks; ' +
+ 'preventAbort = false; rejected abort promise');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }),
+ 'pipeTo must reject with the same error');
+
+ t.step_timeout(() => {
+ rs.controller.enqueue('Hello');
+ t.step_timeout(() => rs.controller.error(error1), 10);
+ }, 10);
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, []);
+ });
+
+}, 'Errors must be propagated forward: becomes errored after one chunk; dest never desires chunks; ' +
+ 'preventAbort = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWriteCalled;
+ const writeCalledPromise = new Promise(resolve => {
+ resolveWriteCalled = resolve;
+ });
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ resolveWriteCalled();
+
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ });
+
+ let pipeComplete = false;
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws)).then(() => {
+ pipeComplete = true;
+ });
+
+ rs.controller.enqueue('a');
+
+ return writeCalledPromise.then(() => {
+ rs.controller.error(error1);
+
+ // Flush async events and verify that no shutdown occurs.
+ return flushAsyncEvents();
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', 'a']); // no 'abort'
+ assert_equals(pipeComplete, false, 'the pipe must not be complete');
+
+ resolveWritePromise();
+
+ return pipePromise.then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'abort', error1]);
+ });
+ });
+
+}, 'Errors must be propagated forward: shutdown must not occur until the final write completes');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWriteCalled;
+ const writeCalledPromise = new Promise(resolve => {
+ resolveWriteCalled = resolve;
+ });
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ resolveWriteCalled();
+
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ });
+
+ let pipeComplete = false;
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true })).then(() => {
+ pipeComplete = true;
+ });
+
+ rs.controller.enqueue('a');
+
+ return writeCalledPromise.then(() => {
+ rs.controller.error(error1);
+
+ // Flush async events and verify that no shutdown occurs.
+ return flushAsyncEvents();
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', 'a']); // no 'abort'
+ assert_equals(pipeComplete, false, 'the pipe must not be complete');
+
+ resolveWritePromise();
+ return pipePromise;
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a']); // no 'abort'
+ });
+
+}, 'Errors must be propagated forward: shutdown must not occur until the final write completes; preventAbort = true');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWriteCalled;
+ const writeCalledPromise = new Promise(resolve => {
+ resolveWriteCalled = resolve;
+ });
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ resolveWriteCalled();
+
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 2 }));
+
+ let pipeComplete = false;
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws)).then(() => {
+ pipeComplete = true;
+ });
+
+ rs.controller.enqueue('a');
+ rs.controller.enqueue('b');
+
+ return writeCalledPromise.then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a'],
+ 'the first chunk must have been written, but abort must not have happened yet');
+ assert_false(pipeComplete, 'the pipe should not complete while the first write is pending');
+
+ rs.controller.error(error1);
+ resolveWritePromise();
+ return flushAsyncEvents();
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b'],
+ 'the second chunk must have been written, but abort must not have happened yet');
+ assert_false(pipeComplete, 'the pipe should not complete while the second write is pending');
+
+ resolveWritePromise();
+ return pipePromise;
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'abort', error1],
+ 'all chunks must have been written and abort must have happened');
+ });
+
+}, 'Errors must be propagated forward: shutdown must not occur until the final write completes; becomes errored after first write');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWriteCalled;
+ const writeCalledPromise = new Promise(resolve => {
+ resolveWriteCalled = resolve;
+ });
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ resolveWriteCalled();
+
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 2 }));
+
+ let pipeComplete = false;
+ const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true })).then(() => {
+ pipeComplete = true;
+ });
+
+ rs.controller.enqueue('a');
+ rs.controller.enqueue('b');
+
+ return writeCalledPromise.then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a'],
+ 'the first chunk must have been written, but abort must not have happened');
+ assert_false(pipeComplete, 'the pipe should not complete while the first write is pending');
+
+ rs.controller.error(error1);
+ resolveWritePromise();
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b'],
+ 'the second chunk must have been written, but abort must not have happened');
+ assert_false(pipeComplete, 'the pipe should not complete while the second write is pending');
+
+ resolveWritePromise();
+ return pipePromise;
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b'],
+ 'all chunks must have been written, but abort must not have happened');
+ });
+
+}, 'Errors must be propagated forward: shutdown must not occur until the final write completes; becomes errored after first write; preventAbort = true');
diff --git a/testing/web-platform/tests/streams/piping/flow-control.any.js b/testing/web-platform/tests/streams/piping/flow-control.any.js
new file mode 100644
index 0000000000..e2318da375
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/flow-control.any.js
@@ -0,0 +1,297 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/rs-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1!');
+error1.name = 'error1';
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.enqueue('b');
+ controller.close();
+ }
+ });
+
+ const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const pipePromise = rs.pipeTo(ws, { preventCancel: true });
+
+ // Wait and make sure it doesn't do any reading.
+ return flushAsyncEvents().then(() => {
+ ws.controller.error(error1);
+ })
+ .then(() => promise_rejects_exactly(t, error1, pipePromise, 'pipeTo must reject with the same error'))
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, []);
+ })
+ .then(() => readableStreamToArray(rs))
+ .then(chunksNotPreviouslyRead => {
+ assert_array_equals(chunksNotPreviouslyRead, ['a', 'b']);
+ });
+
+}, 'Piping from a non-empty ReadableStream into a WritableStream that does not desire chunks');
+
+promise_test(() => {
+
+ const rs = recordingReadableStream({
+ start(controller) {
+ controller.enqueue('b');
+ controller.close();
+ }
+ });
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ if (!resolveWritePromise) {
+ // first write
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ return undefined;
+ }
+ });
+
+ const writer = ws.getWriter();
+ const firstWritePromise = writer.write('a');
+ assert_equals(writer.desiredSize, 0, 'after writing the writer\'s desiredSize must be 0');
+ writer.releaseLock();
+
+ // firstWritePromise won't settle until we call resolveWritePromise.
+
+ const pipePromise = rs.pipeTo(ws);
+
+ return flushAsyncEvents().then(() => resolveWritePromise())
+ .then(() => Promise.all([firstWritePromise, pipePromise]))
+ .then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'close']);
+ });
+
+}, 'Piping from a non-empty ReadableStream into a WritableStream that does not desire chunks, but then does');
+
+promise_test(() => {
+
+ const rs = recordingReadableStream();
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ if (!resolveWritePromise) {
+ // first write
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ return undefined;
+ }
+ });
+
+ const writer = ws.getWriter();
+ writer.write('a');
+
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ws.events, ['write', 'a']);
+ assert_equals(writer.desiredSize, 0, 'after writing the writer\'s desiredSize must be 0');
+ writer.releaseLock();
+
+ const pipePromise = rs.pipeTo(ws);
+
+ rs.controller.enqueue('b');
+ resolveWritePromise();
+ rs.controller.close();
+
+ return pipePromise.then(() => {
+ assert_array_equals(rs.eventsWithoutPulls, []);
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'close']);
+ });
+ });
+
+}, 'Piping from an empty ReadableStream into a WritableStream that does not desire chunks, but then the readable ' +
+ 'stream becomes non-empty and the writable stream starts desiring chunks');
+
+promise_test(() => {
+ const unreadChunks = ['b', 'c', 'd'];
+
+ const rs = recordingReadableStream({
+ pull(controller) {
+ controller.enqueue(unreadChunks.shift());
+ if (unreadChunks.length === 0) {
+ controller.close();
+ }
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ let resolveWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ if (!resolveWritePromise) {
+ // first write
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ }
+ return undefined;
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 3 }));
+
+ const writer = ws.getWriter();
+ const firstWritePromise = writer.write('a');
+ assert_equals(writer.desiredSize, 2, 'after writing the writer\'s desiredSize must be 2');
+ writer.releaseLock();
+
+ // firstWritePromise won't settle until we call resolveWritePromise.
+
+ const pipePromise = rs.pipeTo(ws);
+
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ws.events, ['write', 'a']);
+ assert_equals(unreadChunks.length, 1, 'chunks should continue to be enqueued until the HWM is reached');
+ }).then(() => resolveWritePromise())
+ .then(() => Promise.all([firstWritePromise, pipePromise]))
+ .then(() => {
+ assert_array_equals(rs.events, ['pull', 'pull', 'pull']);
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b','write', 'c','write', 'd', 'close']);
+ });
+
+}, 'Piping from a ReadableStream to a WritableStream that desires more chunks before finishing with previous ones');
+
+class StepTracker {
+ constructor() {
+ this.waiters = [];
+ this.wakers = [];
+ }
+
+ // Returns promise which resolves when step `n` is reached. Also schedules step n + 1 to happen shortly after the
+ // promise is resolved.
+ waitThenAdvance(n) {
+ if (this.waiters[n] === undefined) {
+ this.waiters[n] = new Promise(resolve => {
+ this.wakers[n] = resolve;
+ });
+ this.waiters[n]
+ .then(() => flushAsyncEvents())
+ .then(() => {
+ if (this.wakers[n + 1] !== undefined) {
+ this.wakers[n + 1]();
+ }
+ });
+ }
+ if (n == 0) {
+ this.wakers[0]();
+ }
+ return this.waiters[n];
+ }
+}
+
+promise_test(() => {
+ const steps = new StepTracker();
+ const desiredSizes = [];
+ const rs = recordingReadableStream({
+ start(controller) {
+ steps.waitThenAdvance(1).then(() => enqueue('a'));
+ steps.waitThenAdvance(3).then(() => enqueue('b'));
+ steps.waitThenAdvance(5).then(() => enqueue('c'));
+ steps.waitThenAdvance(7).then(() => enqueue('d'));
+ steps.waitThenAdvance(11).then(() => controller.close());
+
+ function enqueue(chunk) {
+ controller.enqueue(chunk);
+ desiredSizes.push(controller.desiredSize);
+ }
+ }
+ });
+
+ const chunksFinishedWriting = [];
+ const writableStartPromise = Promise.resolve();
+ let writeCalled = false;
+ const ws = recordingWritableStream({
+ start() {
+ return writableStartPromise;
+ },
+ write(chunk) {
+ const waitForStep = writeCalled ? 12 : 9;
+ writeCalled = true;
+ return steps.waitThenAdvance(waitForStep).then(() => {
+ chunksFinishedWriting.push(chunk);
+ });
+ }
+ });
+
+ return writableStartPromise.then(() => {
+ const pipePromise = rs.pipeTo(ws);
+ steps.waitThenAdvance(0);
+
+ return Promise.all([
+ steps.waitThenAdvance(2).then(() => {
+ assert_array_equals(chunksFinishedWriting, [], 'at step 2, zero chunks must have finished writing');
+ assert_array_equals(ws.events, ['write', 'a'], 'at step 2, one chunk must have been written');
+
+ // When 'a' (the very first chunk) was enqueued, it was immediately used to fulfill the outstanding read request
+ // promise, leaving the queue empty.
+ assert_array_equals(desiredSizes, [1],
+ 'at step 2, the desiredSize at the last enqueue (step 1) must have been 1');
+ assert_equals(rs.controller.desiredSize, 1, 'at step 2, the current desiredSize must be 1');
+ }),
+
+ steps.waitThenAdvance(4).then(() => {
+ assert_array_equals(chunksFinishedWriting, [], 'at step 4, zero chunks must have finished writing');
+ assert_array_equals(ws.events, ['write', 'a'], 'at step 4, one chunk must have been written');
+
+ // When 'b' was enqueued at step 3, the queue was also empty, since immediately after enqueuing 'a' at
+ // step 1, it was dequeued in order to fulfill the read() call that was made at step 0. Thus the queue
+ // had size 1 (thus desiredSize of 0).
+ assert_array_equals(desiredSizes, [1, 0],
+ 'at step 4, the desiredSize at the last enqueue (step 3) must have been 0');
+ assert_equals(rs.controller.desiredSize, 0, 'at step 4, the current desiredSize must be 0');
+ }),
+
+ steps.waitThenAdvance(6).then(() => {
+ assert_array_equals(chunksFinishedWriting, [], 'at step 6, zero chunks must have finished writing');
+ assert_array_equals(ws.events, ['write', 'a'], 'at step 6, one chunk must have been written');
+
+ // When 'c' was enqueued at step 5, the queue was not empty; it had 'b' in it, since 'b' will not be read until
+ // the first write completes at step 9. Thus, the queue size is 2 after enqueuing 'c', giving a desiredSize of
+ // -1.
+ assert_array_equals(desiredSizes, [1, 0, -1],
+ 'at step 6, the desiredSize at the last enqueue (step 5) must have been -1');
+ assert_equals(rs.controller.desiredSize, -1, 'at step 6, the current desiredSize must be -1');
+ }),
+
+ steps.waitThenAdvance(8).then(() => {
+ assert_array_equals(chunksFinishedWriting, [], 'at step 8, zero chunks must have finished writing');
+ assert_array_equals(ws.events, ['write', 'a'], 'at step 8, one chunk must have been written');
+
+ // When 'd' was enqueued at step 7, the situation is the same as before, leading to a queue containing 'b', 'c',
+ // and 'd'.
+ assert_array_equals(desiredSizes, [1, 0, -1, -2],
+ 'at step 8, the desiredSize at the last enqueue (step 7) must have been -2');
+ assert_equals(rs.controller.desiredSize, -2, 'at step 8, the current desiredSize must be -2');
+ }),
+
+ steps.waitThenAdvance(10).then(() => {
+ assert_array_equals(chunksFinishedWriting, ['a'], 'at step 10, one chunk must have finished writing');
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b'],
+ 'at step 10, two chunks must have been written');
+
+ assert_equals(rs.controller.desiredSize, -1, 'at step 10, the current desiredSize must be -1');
+ }),
+
+ pipePromise.then(() => {
+ assert_array_equals(desiredSizes, [1, 0, -1, -2], 'backpressure must have been exerted at the source');
+ assert_array_equals(chunksFinishedWriting, ['a', 'b', 'c', 'd'], 'all chunks finished writing');
+
+ assert_array_equals(rs.eventsWithoutPulls, [], 'nothing unexpected should happen to the ReadableStream');
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'write', 'c', 'write', 'd', 'close'],
+ 'all chunks were written (and the WritableStream closed)');
+ })
+ ]);
+ });
+}, 'Piping to a WritableStream that does not consume the writes fast enough exerts backpressure on the ReadableStream');
diff --git a/testing/web-platform/tests/streams/piping/general-addition.any.js b/testing/web-platform/tests/streams/piping/general-addition.any.js
new file mode 100644
index 0000000000..cf4aa9bea6
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/general-addition.any.js
@@ -0,0 +1,15 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+promise_test(async t => {
+ /** @type {ReadableStreamDefaultController} */
+ var con;
+ let synchronous = false;
+ new ReadableStream({ start(c) { con = c }}, { highWaterMark: 0 }).pipeTo(
+ new WritableStream({ write() { synchronous = true; } })
+ )
+ // wait until start algorithm finishes
+ await Promise.resolve();
+ con.enqueue();
+ assert_false(synchronous, 'write algorithm must not run synchronously');
+}, "enqueue() must not synchronously call write algorithm");
diff --git a/testing/web-platform/tests/streams/piping/general.any.js b/testing/web-platform/tests/streams/piping/general.any.js
new file mode 100644
index 0000000000..f051d8102c
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/general.any.js
@@ -0,0 +1,212 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/recording-streams.js
+'use strict';
+
+test(() => {
+
+ const rs = new ReadableStream();
+ const ws = new WritableStream();
+
+ assert_false(rs.locked, 'sanity check: the ReadableStream must not start locked');
+ assert_false(ws.locked, 'sanity check: the WritableStream must not start locked');
+
+ rs.pipeTo(ws);
+
+ assert_true(rs.locked, 'the ReadableStream must become locked');
+ assert_true(ws.locked, 'the WritableStream must become locked');
+
+}, 'Piping must lock both the ReadableStream and WritableStream');
+
+promise_test(() => {
+
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+ const ws = new WritableStream();
+
+ return rs.pipeTo(ws).then(() => {
+ assert_false(rs.locked, 'the ReadableStream must become unlocked');
+ assert_false(ws.locked, 'the WritableStream must become unlocked');
+ });
+
+}, 'Piping finishing must unlock both the ReadableStream and WritableStream');
+
+promise_test(t => {
+
+ const fakeRS = Object.create(ReadableStream.prototype);
+ const ws = new WritableStream();
+
+ return promise_rejects_js(t, TypeError, ReadableStream.prototype.pipeTo.apply(fakeRS, [ws]),
+ 'pipeTo should reject with a TypeError');
+
+}, 'pipeTo must check the brand of its ReadableStream this value');
+
+promise_test(t => {
+
+ const rs = new ReadableStream();
+ const fakeWS = Object.create(WritableStream.prototype);
+
+ return promise_rejects_js(t, TypeError, ReadableStream.prototype.pipeTo.apply(rs, [fakeWS]),
+ 'pipeTo should reject with a TypeError');
+
+}, 'pipeTo must check the brand of its WritableStream argument');
+
+promise_test(t => {
+
+ const rs = new ReadableStream();
+ const ws = new WritableStream();
+
+ rs.getReader();
+
+ assert_true(rs.locked, 'sanity check: the ReadableStream starts locked');
+ assert_false(ws.locked, 'sanity check: the WritableStream does not start locked');
+
+ return promise_rejects_js(t, TypeError, rs.pipeTo(ws)).then(() => {
+ assert_false(ws.locked, 'the WritableStream must still be unlocked');
+ });
+
+}, 'pipeTo must fail if the ReadableStream is locked, and not lock the WritableStream');
+
+promise_test(t => {
+
+ const rs = new ReadableStream();
+ const ws = new WritableStream();
+
+ ws.getWriter();
+
+ assert_false(rs.locked, 'sanity check: the ReadableStream does not start locked');
+ assert_true(ws.locked, 'sanity check: the WritableStream starts locked');
+
+ return promise_rejects_js(t, TypeError, rs.pipeTo(ws)).then(() => {
+ assert_false(rs.locked, 'the ReadableStream must still be unlocked');
+ });
+
+}, 'pipeTo must fail if the WritableStream is locked, and not lock the ReadableStream');
+
+promise_test(() => {
+
+ const CHUNKS = 10;
+
+ const rs = new ReadableStream({
+ start(c) {
+ for (let i = 0; i < CHUNKS; ++i) {
+ c.enqueue(i);
+ }
+ c.close();
+ }
+ });
+
+ const written = [];
+ const ws = new WritableStream({
+ write(chunk) {
+ written.push(chunk);
+ },
+ close() {
+ written.push('closed');
+ }
+ }, new CountQueuingStrategy({ highWaterMark: CHUNKS }));
+
+ return rs.pipeTo(ws).then(() => {
+ const targetValues = [];
+ for (let i = 0; i < CHUNKS; ++i) {
+ targetValues.push(i);
+ }
+ targetValues.push('closed');
+
+ assert_array_equals(written, targetValues, 'the correct values must be written');
+
+ // Ensure both readable and writable are closed by the time the pipe finishes.
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ });
+
+ // NOTE: no requirement on *when* the pipe finishes; that is left to implementations.
+
+}, 'Piping from a ReadableStream from which lots of chunks are synchronously readable');
+
+promise_test(t => {
+
+ let controller;
+ const rs = recordingReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const ws = recordingWritableStream();
+
+ const pipePromise = rs.pipeTo(ws).then(() => {
+ assert_array_equals(ws.events, ['write', 'Hello', 'close']);
+ });
+
+ t.step_timeout(() => {
+ controller.enqueue('Hello');
+ t.step_timeout(() => controller.close(), 10);
+ }, 10);
+
+ return pipePromise;
+
+}, 'Piping from a ReadableStream for which a chunk becomes asynchronously readable after the pipeTo');
+
+for (const preventAbort of [true, false]) {
+ promise_test(() => {
+
+ const rs = new ReadableStream({
+ pull() {
+ return Promise.reject(undefined);
+ }
+ });
+
+ return rs.pipeTo(new WritableStream(), { preventAbort }).then(
+ () => assert_unreached('pipeTo promise should be rejected'),
+ value => assert_equals(value, undefined, 'rejection value should be undefined'));
+
+ }, `an undefined rejection from pull should cause pipeTo() to reject when preventAbort is ${preventAbort}`);
+}
+
+for (const preventCancel of [true, false]) {
+ promise_test(() => {
+
+ const rs = new ReadableStream({
+ pull(controller) {
+ controller.enqueue(0);
+ }
+ });
+
+ const ws = new WritableStream({
+ write() {
+ return Promise.reject(undefined);
+ }
+ });
+
+ return rs.pipeTo(ws, { preventCancel }).then(
+ () => assert_unreached('pipeTo promise should be rejected'),
+ value => assert_equals(value, undefined, 'rejection value should be undefined'));
+
+ }, `an undefined rejection from write should cause pipeTo() to reject when preventCancel is ${preventCancel}`);
+}
+
+promise_test(t => {
+ const rs = new ReadableStream();
+ const ws = new WritableStream();
+ return promise_rejects_js(t, TypeError, rs.pipeTo(ws, {
+ get preventAbort() {
+ ws.getWriter();
+ }
+ }), 'pipeTo should reject');
+}, 'pipeTo() should reject if an option getter grabs a writer');
+
+promise_test(t => {
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+ const ws = new WritableStream();
+
+ return rs.pipeTo(ws, null);
+}, 'pipeTo() promise should resolve if null is passed');
diff --git a/testing/web-platform/tests/streams/piping/multiple-propagation.any.js b/testing/web-platform/tests/streams/piping/multiple-propagation.any.js
new file mode 100644
index 0000000000..9be828a232
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/multiple-propagation.any.js
@@ -0,0 +1,227 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1!');
+error1.name = 'error1';
+
+const error2 = new Error('error2!');
+error2.name = 'error2';
+
+function createErroredWritableStream(t) {
+ return Promise.resolve().then(() => {
+ const ws = recordingWritableStream({
+ start(c) {
+ c.error(error2);
+ }
+ });
+
+ const writer = ws.getWriter();
+ return promise_rejects_exactly(t, error2, writer.closed, 'the writable stream must be errored with error2')
+ .then(() => {
+ writer.releaseLock();
+ assert_array_equals(ws.events, []);
+ return ws;
+ });
+ });
+}
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.error(error1);
+ }
+ });
+ const ws = recordingWritableStream({
+ start(c) {
+ c.error(error2);
+ }
+ });
+
+ // Trying to abort a stream that is erroring will give the writable's error
+ return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the writable stream\'s error').then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, []);
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'),
+ promise_rejects_exactly(t, error2, ws.getWriter().closed, 'the writable stream must be errored with error2')
+ ]);
+ });
+
+}, 'Piping from an errored readable stream to an erroring writable stream');
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.error(error1);
+ }
+ });
+
+ return createErroredWritableStream(t)
+ .then(ws => promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the readable stream\'s error'))
+ .then(() => {
+ assert_array_equals(rs.events, []);
+
+ return promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1');
+ });
+}, 'Piping from an errored readable stream to an errored writable stream');
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.error(error1);
+ }
+ });
+ const ws = recordingWritableStream({
+ start(c) {
+ c.error(error2);
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }),
+ 'pipeTo must reject with the readable stream\'s error')
+ .then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, []);
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'),
+ promise_rejects_exactly(t, error2, ws.getWriter().closed, 'the writable stream must be errored with error2')
+ ]);
+ });
+
+}, 'Piping from an errored readable stream to an erroring writable stream; preventAbort = true');
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.error(error1);
+ }
+ });
+ return createErroredWritableStream(t)
+ .then(ws => promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }),
+ 'pipeTo must reject with the readable stream\'s error'))
+ .then(() => {
+ assert_array_equals(rs.events, []);
+
+ return promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1');
+ });
+
+}, 'Piping from an errored readable stream to an errored writable stream; preventAbort = true');
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.error(error1);
+ }
+ });
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ const closePromise = writer.close();
+ writer.releaseLock();
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the readable stream\'s error').then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['abort', error1]);
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'),
+ promise_rejects_exactly(t, error1, ws.getWriter().closed,
+ 'closed must reject with error1'),
+ promise_rejects_exactly(t, error1, closePromise,
+ 'close() must reject with error1')
+ ]);
+ });
+
+}, 'Piping from an errored readable stream to a closing writable stream');
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.error(error1);
+ }
+ });
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ const closePromise = writer.close();
+ writer.releaseLock();
+
+ return flushAsyncEvents().then(() => {
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the readable stream\'s error').then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'),
+ ws.getWriter().closed,
+ closePromise
+ ]);
+ });
+ });
+
+}, 'Piping from an errored readable stream to a closed writable stream');
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.close();
+ }
+ });
+ const ws = recordingWritableStream({
+ start(c) {
+ c.error(error1);
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the writable stream\'s error').then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, []);
+
+ return Promise.all([
+ rs.getReader().closed,
+ promise_rejects_exactly(t, error1, ws.getWriter().closed, 'the writable stream must be errored with error1')
+ ]);
+ });
+
+}, 'Piping from a closed readable stream to an erroring writable stream');
+
+promise_test(t => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.close();
+ }
+ });
+ return createErroredWritableStream(t)
+ .then(ws => promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the writable stream\'s error'))
+ .then(() => {
+ assert_array_equals(rs.events, []);
+
+ return rs.getReader().closed;
+ });
+
+}, 'Piping from a closed readable stream to an errored writable stream');
+
+promise_test(() => {
+ const rs = recordingReadableStream({
+ start(c) {
+ c.close();
+ }
+ });
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+
+ return rs.pipeTo(ws).then(() => {
+ assert_array_equals(rs.events, []);
+ assert_array_equals(ws.events, ['close']);
+
+ return Promise.all([
+ rs.getReader().closed,
+ ws.getWriter().closed
+ ]);
+ });
+
+}, 'Piping from a closed readable stream to a closed writable stream');
diff --git a/testing/web-platform/tests/streams/piping/pipe-through.any.js b/testing/web-platform/tests/streams/piping/pipe-through.any.js
new file mode 100644
index 0000000000..339cee1999
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/pipe-through.any.js
@@ -0,0 +1,331 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/rs-utils.js
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+function duckTypedPassThroughTransform() {
+ let enqueueInReadable;
+ let closeReadable;
+
+ return {
+ writable: new WritableStream({
+ write(chunk) {
+ enqueueInReadable(chunk);
+ },
+
+ close() {
+ closeReadable();
+ }
+ }),
+
+ readable: new ReadableStream({
+ start(c) {
+ enqueueInReadable = c.enqueue.bind(c);
+ closeReadable = c.close.bind(c);
+ }
+ })
+ };
+}
+
+function uninterestingReadableWritablePair() {
+ return { writable: new WritableStream(), readable: new ReadableStream() };
+}
+
+promise_test(() => {
+ const readableEnd = sequentialReadableStream(5).pipeThrough(duckTypedPassThroughTransform());
+
+ return readableStreamToArray(readableEnd).then(chunks =>
+ assert_array_equals(chunks, [1, 2, 3, 4, 5]), 'chunks should match');
+}, 'Piping through a duck-typed pass-through transform stream should work');
+
+promise_test(() => {
+ const transform = {
+ writable: new WritableStream({
+ start(c) {
+ c.error(new Error('this rejection should not be reported as unhandled'));
+ }
+ }),
+ readable: new ReadableStream()
+ };
+
+ sequentialReadableStream(5).pipeThrough(transform);
+
+ // The test harness should complain about unhandled rejections by then.
+ return flushAsyncEvents();
+
+}, 'Piping through a transform errored on the writable end does not cause an unhandled promise rejection');
+
+test(() => {
+ let calledPipeTo = false;
+ class BadReadableStream extends ReadableStream {
+ pipeTo() {
+ calledPipeTo = true;
+ }
+ }
+
+ const brs = new BadReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+ const readable = new ReadableStream();
+ const writable = new WritableStream();
+ const result = brs.pipeThrough({ readable, writable });
+
+ assert_false(calledPipeTo, 'the overridden pipeTo should not have been called');
+ assert_equals(result, readable, 'return value should be the passed readable property');
+}, 'pipeThrough should not call pipeTo on this');
+
+test(t => {
+ let calledFakePipeTo = false;
+ const realPipeTo = ReadableStream.prototype.pipeTo;
+ t.add_cleanup(() => {
+ ReadableStream.prototype.pipeTo = realPipeTo;
+ });
+ ReadableStream.prototype.pipeTo = () => {
+ calledFakePipeTo = true;
+ };
+ const rs = new ReadableStream();
+ const readable = new ReadableStream();
+ const writable = new WritableStream();
+ const result = rs.pipeThrough({ readable, writable });
+
+ assert_false(calledFakePipeTo, 'the monkey-patched pipeTo should not have been called');
+ assert_equals(result, readable, 'return value should be the passed readable property');
+
+}, 'pipeThrough should not call pipeTo on the ReadableStream prototype');
+
+const badReadables = [null, undefined, 0, NaN, true, 'ReadableStream', Object.create(ReadableStream.prototype)];
+for (const readable of badReadables) {
+ test(() => {
+ assert_throws_js(TypeError,
+ ReadableStream.prototype.pipeThrough.bind(readable, uninterestingReadableWritablePair()),
+ 'pipeThrough should throw');
+ }, `pipeThrough should brand-check this and not allow '${readable}'`);
+
+ test(() => {
+ const rs = new ReadableStream();
+ let writableGetterCalled = false;
+ assert_throws_js(
+ TypeError,
+ () => rs.pipeThrough({
+ get writable() {
+ writableGetterCalled = true;
+ return new WritableStream();
+ },
+ readable
+ }),
+ 'pipeThrough should brand-check readable'
+ );
+ assert_false(writableGetterCalled, 'writable should not have been accessed');
+ }, `pipeThrough should brand-check readable and not allow '${readable}'`);
+}
+
+const badWritables = [null, undefined, 0, NaN, true, 'WritableStream', Object.create(WritableStream.prototype)];
+for (const writable of badWritables) {
+ test(() => {
+ const rs = new ReadableStream({
+ start(c) {
+ c.close();
+ }
+ });
+ let readableGetterCalled = false;
+ assert_throws_js(TypeError, () => rs.pipeThrough({
+ get readable() {
+ readableGetterCalled = true;
+ return new ReadableStream();
+ },
+ writable
+ }),
+ 'pipeThrough should brand-check writable');
+ assert_true(readableGetterCalled, 'readable should have been accessed');
+ }, `pipeThrough should brand-check writable and not allow '${writable}'`);
+}
+
+test(t => {
+ const error = new Error();
+ error.name = 'custom';
+
+ const rs = new ReadableStream({
+ pull: t.unreached_func('pull should not be called')
+ }, { highWaterMark: 0 });
+
+ const throwingWritable = {
+ readable: rs,
+ get writable() {
+ throw error;
+ }
+ };
+ assert_throws_exactly(error,
+ () => ReadableStream.prototype.pipeThrough.call(rs, throwingWritable, {}),
+ 'pipeThrough should rethrow the error thrown by the writable getter');
+
+ const throwingReadable = {
+ get readable() {
+ throw error;
+ },
+ writable: {}
+ };
+ assert_throws_exactly(error,
+ () => ReadableStream.prototype.pipeThrough.call(rs, throwingReadable, {}),
+ 'pipeThrough should rethrow the error thrown by the readable getter');
+
+}, 'pipeThrough should rethrow errors from accessing readable or writable');
+
+const badSignals = [null, 0, NaN, true, 'AbortSignal', Object.create(AbortSignal.prototype)];
+for (const signal of badSignals) {
+ test(() => {
+ const rs = new ReadableStream();
+ assert_throws_js(TypeError, () => rs.pipeThrough(uninterestingReadableWritablePair(), { signal }),
+ 'pipeThrough should throw');
+ }, `invalid values of signal should throw; specifically '${signal}'`);
+}
+
+test(() => {
+ const rs = new ReadableStream();
+ const controller = new AbortController();
+ const signal = controller.signal;
+ rs.pipeThrough(uninterestingReadableWritablePair(), { signal });
+}, 'pipeThrough should accept a real AbortSignal');
+
+test(() => {
+ const rs = new ReadableStream();
+ rs.getReader();
+ assert_throws_js(TypeError, () => rs.pipeThrough(uninterestingReadableWritablePair()),
+ 'pipeThrough should throw');
+}, 'pipeThrough should throw if this is locked');
+
+test(() => {
+ const rs = new ReadableStream();
+ const writable = new WritableStream();
+ const readable = new ReadableStream();
+ writable.getWriter();
+ assert_throws_js(TypeError, () => rs.pipeThrough({writable, readable}),
+ 'pipeThrough should throw');
+}, 'pipeThrough should throw if writable is locked');
+
+test(() => {
+ const rs = new ReadableStream();
+ const writable = new WritableStream();
+ const readable = new ReadableStream();
+ readable.getReader();
+ assert_equals(rs.pipeThrough({ writable, readable }), readable,
+ 'pipeThrough should not throw');
+}, 'pipeThrough should not care if readable is locked');
+
+promise_test(() => {
+ const rs = recordingReadableStream();
+ const writable = new WritableStream({
+ start(controller) {
+ controller.error();
+ }
+ });
+ const readable = new ReadableStream();
+ rs.pipeThrough({ writable, readable }, { preventCancel: true });
+ return flushAsyncEvents(0).then(() => {
+ assert_array_equals(rs.events, ['pull'], 'cancel should not have been called');
+ });
+}, 'preventCancel should work');
+
+promise_test(() => {
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.close();
+ }
+ });
+ const writable = recordingWritableStream();
+ const readable = new ReadableStream();
+ rs.pipeThrough({ writable, readable }, { preventClose: true });
+ return flushAsyncEvents(0).then(() => {
+ assert_array_equals(writable.events, [], 'writable should not be closed');
+ });
+}, 'preventClose should work');
+
+promise_test(() => {
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.error();
+ }
+ });
+ const writable = recordingWritableStream();
+ const readable = new ReadableStream();
+ rs.pipeThrough({ writable, readable }, { preventAbort: true });
+ return flushAsyncEvents(0).then(() => {
+ assert_array_equals(writable.events, [], 'writable should not be aborted');
+ });
+}, 'preventAbort should work');
+
+test(() => {
+ const rs = new ReadableStream();
+ const readable = new ReadableStream();
+ const writable = new WritableStream();
+ assert_throws_js(TypeError, () => rs.pipeThrough({readable, writable}, {
+ get preventAbort() {
+ writable.getWriter();
+ }
+ }), 'pipeThrough should throw');
+}, 'pipeThrough() should throw if an option getter grabs a writer');
+
+test(() => {
+ const rs = new ReadableStream();
+ const readable = new ReadableStream();
+ const writable = new WritableStream();
+ rs.pipeThrough({readable, writable}, null);
+}, 'pipeThrough() should not throw if option is null');
+
+test(() => {
+ const rs = new ReadableStream();
+ const readable = new ReadableStream();
+ const writable = new WritableStream();
+ rs.pipeThrough({readable, writable}, {signal:undefined});
+}, 'pipeThrough() should not throw if signal is undefined');
+
+function tryPipeThrough(pair, options)
+{
+ const rs = new ReadableStream();
+ if (!pair)
+ pair = {readable:new ReadableStream(), writable:new WritableStream()};
+ try {
+ rs.pipeThrough(pair, options)
+ } catch (e) {
+ return e;
+ }
+}
+
+test(() => {
+ let result = tryPipeThrough({
+ get readable() {
+ return new ReadableStream();
+ },
+ get writable() {
+ throw "writable threw";
+ }
+ }, { });
+ assert_equals(result, "writable threw");
+
+ result = tryPipeThrough({
+ get readable() {
+ throw "readable threw";
+ },
+ get writable() {
+ throw "writable threw";
+ }
+ }, { });
+ assert_equals(result, "readable threw");
+
+ result = tryPipeThrough({
+ get readable() {
+ throw "readable threw";
+ },
+ get writable() {
+ throw "writable threw";
+ }
+ }, {
+ get preventAbort() {
+ throw "preventAbort threw";
+ }
+ });
+ assert_equals(result, "readable threw");
+
+}, 'pipeThrough() should throw if readable/writable getters throw');
diff --git a/testing/web-platform/tests/streams/piping/then-interception.any.js b/testing/web-platform/tests/streams/piping/then-interception.any.js
new file mode 100644
index 0000000000..fc48c36831
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/then-interception.any.js
@@ -0,0 +1,68 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+function interceptThen() {
+ const intercepted = [];
+ let callCount = 0;
+ Object.prototype.then = function(resolver) {
+ if (!this.done) {
+ intercepted.push(this.value);
+ }
+ const retval = Object.create(null);
+ retval.done = ++callCount === 3;
+ retval.value = callCount;
+ resolver(retval);
+ if (retval.done) {
+ delete Object.prototype.then;
+ }
+ }
+ return intercepted;
+}
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.close();
+ }
+ });
+ const ws = recordingWritableStream();
+
+ const intercepted = interceptThen();
+ t.add_cleanup(() => {
+ delete Object.prototype.then;
+ });
+
+ await rs.pipeTo(ws);
+ delete Object.prototype.then;
+
+
+ assert_array_equals(intercepted, [], 'nothing should have been intercepted');
+ assert_array_equals(ws.events, ['write', 'a', 'close'], 'written chunk should be "a"');
+}, 'piping should not be observable');
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.close();
+ }
+ });
+ const ws = recordingWritableStream();
+
+ const [ branch1, branch2 ] = rs.tee();
+
+ const intercepted = interceptThen();
+ t.add_cleanup(() => {
+ delete Object.prototype.then;
+ });
+
+ await branch1.pipeTo(ws);
+ delete Object.prototype.then;
+ branch2.cancel();
+
+ assert_array_equals(intercepted, [], 'nothing should have been intercepted');
+ assert_array_equals(ws.events, ['write', 'a', 'close'], 'written chunk should be "a"');
+}, 'tee should not be observable');
diff --git a/testing/web-platform/tests/streams/piping/throwing-options.any.js b/testing/web-platform/tests/streams/piping/throwing-options.any.js
new file mode 100644
index 0000000000..186f8ded19
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/throwing-options.any.js
@@ -0,0 +1,65 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+class ThrowingOptions {
+ constructor(whatShouldThrow) {
+ this.whatShouldThrow = whatShouldThrow;
+ this.touched = [];
+ }
+
+ get preventClose() {
+ this.maybeThrow('preventClose');
+ return false;
+ }
+
+ get preventAbort() {
+ this.maybeThrow('preventAbort');
+ return false;
+ }
+
+ get preventCancel() {
+ this.maybeThrow('preventCancel');
+ return false;
+ }
+
+ get signal() {
+ this.maybeThrow('signal');
+ return undefined;
+ }
+
+ maybeThrow(forWhat) {
+ this.touched.push(forWhat);
+ if (this.whatShouldThrow === forWhat) {
+ throw new Error(this.whatShouldThrow);
+ }
+ }
+}
+
+const checkOrder = ['preventAbort', 'preventCancel', 'preventClose', 'signal'];
+
+for (let i = 0; i < checkOrder.length; ++i) {
+ const whatShouldThrow = checkOrder[i];
+ const whatShouldBeTouched = checkOrder.slice(0, i + 1);
+
+ promise_test(t => {
+ const options = new ThrowingOptions(whatShouldThrow);
+ return promise_rejects_js(
+ t, Error,
+ new ReadableStream().pipeTo(new WritableStream(), options),
+ 'pipeTo should reject')
+ .then(() => assert_array_equals(
+ options.touched, whatShouldBeTouched,
+ 'options should be touched in the right order'));
+ }, `pipeTo should stop after getting ${whatShouldThrow} throws`);
+
+ test(() => {
+ const options = new ThrowingOptions(whatShouldThrow);
+ assert_throws_js(
+ Error,
+ () => new ReadableStream().pipeThrough(new TransformStream(), options),
+ 'pipeThrough should throw');
+ assert_array_equals(
+ options.touched, whatShouldBeTouched,
+ 'options should be touched in the right order');
+ }, `pipeThrough should stop after getting ${whatShouldThrow} throws`);
+}
diff --git a/testing/web-platform/tests/streams/piping/transform-streams.any.js b/testing/web-platform/tests/streams/piping/transform-streams.any.js
new file mode 100644
index 0000000000..e079bb637c
--- /dev/null
+++ b/testing/web-platform/tests/streams/piping/transform-streams.any.js
@@ -0,0 +1,22 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+promise_test(() => {
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.enqueue('c');
+ c.close();
+ }
+ });
+
+ const ts = new TransformStream();
+
+ const ws = new WritableStream();
+
+ return rs.pipeThrough(ts).pipeTo(ws).then(() => {
+ const writer = ws.getWriter();
+ return writer.closed;
+ });
+}, 'Piping through an identity transform stream should close the destination when the source closes');
diff --git a/testing/web-platform/tests/streams/queuing-strategies-size-function-per-global.window.js b/testing/web-platform/tests/streams/queuing-strategies-size-function-per-global.window.js
new file mode 100644
index 0000000000..0f869f13b3
--- /dev/null
+++ b/testing/web-platform/tests/streams/queuing-strategies-size-function-per-global.window.js
@@ -0,0 +1,14 @@
+const iframe = document.createElement('iframe');
+document.body.appendChild(iframe);
+
+for (const type of ['CountQueuingStrategy', 'ByteLengthQueuingStrategy']) {
+ test(() => {
+ const myQs = new window[type]({ highWaterMark: 1 });
+ const yourQs = new iframe.contentWindow[type]({ highWaterMark: 1 });
+ assert_not_equals(myQs.size, yourQs.size,
+ 'size should not be the same object');
+ }, `${type} size should be different for objects in different realms`);
+}
+
+// Cleanup the document to avoid messing up the result page.
+iframe.remove();
diff --git a/testing/web-platform/tests/streams/queuing-strategies.any.js b/testing/web-platform/tests/streams/queuing-strategies.any.js
new file mode 100644
index 0000000000..9efc4570cf
--- /dev/null
+++ b/testing/web-platform/tests/streams/queuing-strategies.any.js
@@ -0,0 +1,150 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+const highWaterMarkConversions = new Map([
+ [-Infinity, -Infinity],
+ [-5, -5],
+ [false, 0],
+ [true, 1],
+ [NaN, NaN],
+ ['foo', NaN],
+ ['0', 0],
+ [{}, NaN],
+ [() => {}, NaN]
+]);
+
+for (const QueuingStrategy of [CountQueuingStrategy, ByteLengthQueuingStrategy]) {
+ test(() => {
+ new QueuingStrategy({ highWaterMark: 4 });
+ }, `${QueuingStrategy.name}: Can construct a with a valid high water mark`);
+
+ test(() => {
+ const highWaterMark = 1;
+ const highWaterMarkObjectGetter = {
+ get highWaterMark() { return highWaterMark; }
+ };
+ const error = new Error('wow!');
+ const highWaterMarkObjectGetterThrowing = {
+ get highWaterMark() { throw error; }
+ };
+
+ assert_throws_js(TypeError, () => new QueuingStrategy(), 'construction fails with undefined');
+ assert_throws_js(TypeError, () => new QueuingStrategy(null), 'construction fails with null');
+ assert_throws_js(TypeError, () => new QueuingStrategy(true), 'construction fails with true');
+ assert_throws_js(TypeError, () => new QueuingStrategy(5), 'construction fails with 5');
+ assert_throws_js(TypeError, () => new QueuingStrategy({}), 'construction fails with {}');
+ assert_throws_exactly(error, () => new QueuingStrategy(highWaterMarkObjectGetterThrowing),
+ 'construction fails with an object with a throwing highWaterMark getter');
+
+ assert_equals((new QueuingStrategy(highWaterMarkObjectGetter)).highWaterMark, highWaterMark);
+ }, `${QueuingStrategy.name}: Constructor behaves as expected with strange arguments`);
+
+ test(() => {
+ for (const [input, output] of highWaterMarkConversions.entries()) {
+ const strategy = new QueuingStrategy({ highWaterMark: input });
+ assert_equals(strategy.highWaterMark, output, `${input} gets set correctly`);
+ }
+ }, `${QueuingStrategy.name}: highWaterMark constructor values are converted per the unrestricted double rules`);
+
+ test(() => {
+ const size1 = (new QueuingStrategy({ highWaterMark: 5 })).size;
+ const size2 = (new QueuingStrategy({ highWaterMark: 10 })).size;
+
+ assert_equals(size1, size2);
+ }, `${QueuingStrategy.name}: size is the same function across all instances`);
+
+ test(() => {
+ const size = (new QueuingStrategy({ highWaterMark: 5 })).size;
+ assert_equals(size.name, 'size');
+ }, `${QueuingStrategy.name}: size should have the right name`);
+
+ test(() => {
+ class SubClass extends QueuingStrategy {
+ size() {
+ return 2;
+ }
+
+ subClassMethod() {
+ return true;
+ }
+ }
+
+ const sc = new SubClass({ highWaterMark: 77 });
+ assert_equals(sc.constructor.name, 'SubClass', 'constructor.name should be correct');
+ assert_equals(sc.highWaterMark, 77, 'highWaterMark should come from the parent class');
+ assert_equals(sc.size(), 2, 'size() on the subclass should override the parent');
+ assert_true(sc.subClassMethod(), 'subClassMethod() should work');
+ }, `${QueuingStrategy.name}: subclassing should work correctly`);
+
+ test(() => {
+ const size = new QueuingStrategy({ highWaterMark: 5 }).size;
+ assert_false('prototype' in size);
+ }, `${QueuingStrategy.name}: size should not have a prototype property`);
+}
+
+test(() => {
+ const size = new CountQueuingStrategy({ highWaterMark: 5 }).size;
+ assert_throws_js(TypeError, () => new size());
+}, `CountQueuingStrategy: size should not be a constructor`);
+
+test(() => {
+ const size = new ByteLengthQueuingStrategy({ highWaterMark: 5 }).size;
+ assert_throws_js(TypeError, () => new size({ byteLength: 1024 }));
+}, `ByteLengthQueuingStrategy: size should not be a constructor`);
+
+test(() => {
+ const size = (new CountQueuingStrategy({ highWaterMark: 5 })).size;
+ assert_equals(size.length, 0);
+}, 'CountQueuingStrategy: size should have the right length');
+
+test(() => {
+ const size = (new ByteLengthQueuingStrategy({ highWaterMark: 5 })).size;
+ assert_equals(size.length, 1);
+}, 'ByteLengthQueuingStrategy: size should have the right length');
+
+test(() => {
+ const size = 1024;
+ const chunk = { byteLength: size };
+ const chunkGetter = {
+ get byteLength() { return size; }
+ };
+ const error = new Error('wow!');
+ const chunkGetterThrowing = {
+ get byteLength() { throw error; }
+ };
+
+ const sizeFunction = (new CountQueuingStrategy({ highWaterMark: 5 })).size;
+
+ assert_equals(sizeFunction(), 1, 'size returns 1 with undefined');
+ assert_equals(sizeFunction(null), 1, 'size returns 1 with null');
+ assert_equals(sizeFunction('potato'), 1, 'size returns 1 with non-object type');
+ assert_equals(sizeFunction({}), 1, 'size returns 1 with empty object');
+ assert_equals(sizeFunction(chunk), 1, 'size returns 1 with a chunk');
+ assert_equals(sizeFunction(chunkGetter), 1, 'size returns 1 with chunk getter');
+ assert_equals(sizeFunction(chunkGetterThrowing), 1,
+ 'size returns 1 with chunk getter that throws');
+}, 'CountQueuingStrategy: size behaves as expected with strange arguments');
+
+test(() => {
+ const size = 1024;
+ const chunk = { byteLength: size };
+ const chunkGetter = {
+ get byteLength() { return size; }
+ };
+ const error = new Error('wow!');
+ const chunkGetterThrowing = {
+ get byteLength() { throw error; }
+ };
+
+ const sizeFunction = (new ByteLengthQueuingStrategy({ highWaterMark: 5 })).size;
+
+ assert_throws_js(TypeError, () => sizeFunction(), 'size fails with undefined');
+ assert_throws_js(TypeError, () => sizeFunction(null), 'size fails with null');
+ assert_equals(sizeFunction('potato'), undefined, 'size succeeds with undefined with a random non-object type');
+ assert_equals(sizeFunction({}), undefined, 'size succeeds with undefined with an object without hwm property');
+ assert_equals(sizeFunction(chunk), size, 'size succeeds with the right amount with an object with a hwm');
+ assert_equals(sizeFunction(chunkGetter), size,
+ 'size succeeds with the right amount with an object with a hwm getter');
+ assert_throws_exactly(error, () => sizeFunction(chunkGetterThrowing),
+ 'size fails with the error thrown by the getter');
+}, 'ByteLengthQueuingStrategy: size behaves as expected with strange arguments');
diff --git a/testing/web-platform/tests/streams/readable-byte-streams/bad-buffers-and-views.any.js b/testing/web-platform/tests/streams/readable-byte-streams/bad-buffers-and-views.any.js
new file mode 100644
index 0000000000..afcc61e680
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-byte-streams/bad-buffers-and-views.any.js
@@ -0,0 +1,398 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+promise_test(() => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.close();
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ const view = new Uint8Array([1, 2, 3]);
+ return reader.read(view).then(({ value, done }) => {
+ // Sanity checks
+ assert_true(value instanceof Uint8Array, 'The value read must be a Uint8Array');
+ assert_not_equals(value, view, 'The value read must not be the *same* Uint8Array');
+ assert_array_equals(value, [], 'The value read must be an empty Uint8Array, since the stream is closed');
+ assert_true(done, 'done must be true, since the stream is closed');
+
+ // The important assertions
+ assert_not_equals(value.buffer, view.buffer, 'a different ArrayBuffer must underlie the value');
+ assert_equals(view.buffer.byteLength, 0, 'the original buffer must be detached');
+ });
+}, 'ReadableStream with byte source: read()ing from a closed stream still transfers the buffer');
+
+promise_test(() => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint8Array([1, 2, 3]));
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ const view = new Uint8Array([4, 5, 6]);
+ return reader.read(view).then(({ value, done }) => {
+ // Sanity checks
+ assert_true(value instanceof Uint8Array, 'The value read must be a Uint8Array');
+ assert_not_equals(value, view, 'The value read must not be the *same* Uint8Array');
+ assert_array_equals(value, [1, 2, 3], 'The value read must be the enqueued Uint8Array, not the original values');
+ assert_false(done, 'done must be false, since the stream is not closed');
+
+ // The important assertions
+ assert_not_equals(value.buffer, view.buffer, 'a different ArrayBuffer must underlie the value');
+ assert_equals(view.buffer.byteLength, 0, 'the original buffer must be detached');
+ });
+}, 'ReadableStream with byte source: read()ing from a stream with queued chunks still transfers the buffer');
+
+test(() => {
+ new ReadableStream({
+ start(c) {
+ const view = new Uint8Array([1, 2, 3]);
+ c.enqueue(view);
+ assert_throws_js(TypeError, () => c.enqueue(view));
+ },
+ type: 'bytes'
+ });
+}, 'ReadableStream with byte source: enqueuing an already-detached buffer throws');
+
+test(() => {
+ new ReadableStream({
+ start(c) {
+ const view = new Uint8Array([]);
+ assert_throws_js(TypeError, () => c.enqueue(view));
+ },
+ type: 'bytes'
+ });
+}, 'ReadableStream with byte source: enqueuing a zero-length buffer throws');
+
+test(() => {
+ new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(new ArrayBuffer(10), 0, 0);
+ assert_throws_js(TypeError, () => c.enqueue(view));
+ },
+ type: 'bytes'
+ });
+}, 'ReadableStream with byte source: enqueuing a zero-length view on a non-zero-length buffer throws');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint8Array([1, 2, 3]));
+ },
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const view = new Uint8Array([4, 5, 6]);
+ return reader.read(view).then(() => {
+ // view is now detached
+ return promise_rejects_js(t, TypeError, reader.read(view));
+ });
+}, 'ReadableStream with byte source: reading into an already-detached buffer rejects');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint8Array([1, 2, 3]));
+ },
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const view = new Uint8Array();
+ return promise_rejects_js(t, TypeError, reader.read(view));
+}, 'ReadableStream with byte source: reading into a zero-length buffer rejects');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint8Array([1, 2, 3]));
+ },
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const view = new Uint8Array(new ArrayBuffer(10), 0, 0);
+ return promise_rejects_js(t, TypeError, reader.read(view));
+}, 'ReadableStream with byte source: reading into a zero-length view on a non-zero-length buffer rejects');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ // Detach it by reading into it
+ reader.read(c.byobRequest.view);
+
+ assert_throws_js(TypeError, () => c.byobRequest.respond(1),
+ 'respond() must throw if the corresponding view has become detached');
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respond() throws if the BYOB request\'s buffer has been detached (in the ' +
+ 'readable state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ c.close();
+
+ // Detach it by reading into it
+ reader.read(c.byobRequest.view);
+
+ assert_throws_js(TypeError, () => c.byobRequest.respond(0),
+ 'respond() must throw if the corresponding view has become detached');
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respond() throws if the BYOB request\'s buffer has been detached (in the ' +
+ 'closed state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ // Detach it by reading into it
+ const view = new Uint8Array([1, 2, 3]);
+ reader.read(view);
+
+ assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has been detached ' +
+ '(in the readable state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ const view = new Uint8Array();
+
+ assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer is zero-length ' +
+ '(in the readable state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ const view = new Uint8Array(c.byobRequest.view.buffer, 0, 0);
+
+ assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view is zero-length on a ' +
+ 'non-zero-length buffer (in the readable state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ const view = c.byobRequest.view.subarray(1, 2);
+
+ assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view has a different offset ' +
+ '(in the readable state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ c.close();
+
+ const view = c.byobRequest.view.subarray(1, 1);
+
+ assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view has a different offset ' +
+ '(in the closed state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ const view = new Uint8Array(new ArrayBuffer(10), 0, 3);
+
+ assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has a ' +
+ 'different length (in the readable state)');
+
+async_test(t => {
+ // Tests https://github.com/nodejs/node/issues/41886
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ const view = new Uint8Array(new ArrayBuffer(11), 0, 3);
+
+ assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes',
+ autoAllocateChunkSize: 10
+ });
+ const reader = stream.getReader();
+
+ reader.read();
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has a ' +
+ 'different length (autoAllocateChunkSize)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ const view = new Uint8Array(c.byobRequest.view.buffer, 0, 4);
+ view[0] = 20;
+ view[1] = 21;
+ view[2] = 22;
+ view[3] = 23;
+
+ assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const buffer = new ArrayBuffer(10);
+ const view = new Uint8Array(buffer, 0, 3);
+ view[0] = 10;
+ view[1] = 11;
+ view[2] = 12;
+ reader.read(view);
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view has a larger length ' +
+ '(in the readable state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ c.close();
+
+ // Detach it by reading into it
+ const view = new Uint8Array([1, 2, 3]);
+ reader.read(view);
+
+ assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has been detached ' +
+ '(in the closed state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ const view = new Uint8Array();
+
+ c.close();
+
+ assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer is zero-length ' +
+ '(in the closed state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ const view = new Uint8Array(c.byobRequest.view.buffer, 0, 1);
+
+ c.close();
+
+ assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view is non-zero-length ' +
+ '(in the closed state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ const view = new Uint8Array(new ArrayBuffer(10), 0, 0);
+
+ c.close();
+
+ assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view));
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has a ' +
+ 'different length (in the closed state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ // Detach it by reading into it
+ reader.read(c.byobRequest.view);
+
+ assert_throws_js(TypeError, () => c.enqueue(new Uint8Array([1])),
+ 'enqueue() must throw if the BYOB request\'s buffer has become detached');
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: enqueue() throws if the BYOB request\'s buffer has been detached (in the ' +
+ 'readable state)');
+
+async_test(t => {
+ const stream = new ReadableStream({
+ pull: t.step_func_done(c => {
+ c.close();
+
+ // Detach it by reading into it
+ reader.read(c.byobRequest.view);
+
+ assert_throws_js(TypeError, () => c.enqueue(new Uint8Array([1])),
+ 'enqueue() must throw if the BYOB request\'s buffer has become detached');
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ reader.read(new Uint8Array([4, 5, 6]));
+}, 'ReadableStream with byte source: enqueue() throws if the BYOB request\'s buffer has been detached (in the ' +
+ 'closed state)');
diff --git a/testing/web-platform/tests/streams/readable-byte-streams/construct-byob-request.any.js b/testing/web-platform/tests/streams/readable-byte-streams/construct-byob-request.any.js
new file mode 100644
index 0000000000..a26f949ee2
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-byte-streams/construct-byob-request.any.js
@@ -0,0 +1,53 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/rs-utils.js
+'use strict';
+
+// Prior to whatwg/stream#870 it was possible to construct a ReadableStreamBYOBRequest directly. This made it possible
+// to construct requests that were out-of-sync with the state of the ReadableStream. They could then be used to call
+// internal operations, resulting in asserts or bad behaviour. This file contains regression tests for the change.
+
+function getRealByteStreamController() {
+ let controller;
+ new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ type: 'bytes'
+ });
+ return controller;
+}
+
+// Create an object pretending to have prototype |prototype|, of type |type|. |type| is one of "undefined", "null",
+// "fake", or "real". "real" will call the realObjectCreator function to get a real instance of the object.
+function createDummyObject(prototype, type, realObjectCreator) {
+ switch (type) {
+ case 'undefined':
+ return undefined;
+
+ case 'null':
+ return null;
+
+ case 'fake':
+ return Object.create(prototype);
+
+ case 'real':
+ return realObjectCreator();
+ }
+
+ throw new Error('not reached');
+}
+
+const dummyTypes = ['undefined', 'null', 'fake', 'real'];
+
+for (const controllerType of dummyTypes) {
+ const controller = createDummyObject(ReadableByteStreamController.prototype, controllerType,
+ getRealByteStreamController);
+ for (const viewType of dummyTypes) {
+ const view = createDummyObject(Uint8Array.prototype, viewType, () => new Uint8Array(16));
+ test(() => {
+ assert_throws_js(TypeError, () => new ReadableStreamBYOBRequest(controller, view),
+ 'constructor should throw');
+ }, `ReadableStreamBYOBRequest constructor should throw when passed a ${controllerType} ` +
+ `ReadableByteStreamController and a ${viewType} view`);
+ }
+}
diff --git a/testing/web-platform/tests/streams/readable-byte-streams/enqueue-with-detached-buffer.any.js b/testing/web-platform/tests/streams/readable-byte-streams/enqueue-with-detached-buffer.any.js
new file mode 100644
index 0000000000..92bd0a26a0
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-byte-streams/enqueue-with-detached-buffer.any.js
@@ -0,0 +1,21 @@
+// META: global=window,worker,shadowrealm
+
+promise_test(async t => {
+ const error = new Error('cannot proceed');
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.step_func((controller) => {
+ const buffer = controller.byobRequest.view.buffer;
+ // Detach the buffer.
+ structuredClone(buffer, { transfer: [buffer] });
+
+ // Try to enqueue with a new buffer.
+ assert_throws_js(TypeError, () => controller.enqueue(new Uint8Array([42])));
+
+ // If we got here the test passed.
+ controller.error(error);
+ })
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ await promise_rejects_exactly(t, error, reader.read(new Uint8Array(1)));
+}, 'enqueue after detaching byobRequest.view.buffer should throw');
diff --git a/testing/web-platform/tests/streams/readable-byte-streams/general.any.js b/testing/web-platform/tests/streams/readable-byte-streams/general.any.js
new file mode 100644
index 0000000000..cdce2244c3
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-byte-streams/general.any.js
@@ -0,0 +1,2901 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/rs-utils.js
+// META: script=../resources/test-utils.js
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+test(() => {
+ assert_throws_js(TypeError, () => new ReadableStream().getReader({ mode: 'byob' }));
+}, 'getReader({mode: "byob"}) throws on non-bytes streams');
+
+
+test(() => {
+ // Constructing ReadableStream with an empty underlying byte source object as parameter shouldn't throw.
+ new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' });
+ // Constructor must perform ToString(type).
+ new ReadableStream({ type: { toString() {return 'bytes';} } })
+ .getReader({ mode: 'byob' });
+ new ReadableStream({ type: { toString: null, valueOf() {return 'bytes';} } })
+ .getReader({ mode: 'byob' });
+}, 'ReadableStream with byte source can be constructed with no errors');
+
+test(() => {
+ const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor;
+ const rs = new ReadableStream({ type: 'bytes' });
+
+ let reader = rs.getReader({ mode: { toString() { return 'byob'; } } });
+ assert_true(reader instanceof ReadableStreamBYOBReader, 'must give a BYOB reader');
+ reader.releaseLock();
+
+ reader = rs.getReader({ mode: { toString: null, valueOf() {return 'byob';} } });
+ assert_true(reader instanceof ReadableStreamBYOBReader, 'must give a BYOB reader');
+ reader.releaseLock();
+
+ reader = rs.getReader({ mode: 'byob', notmode: 'ignored' });
+ assert_true(reader instanceof ReadableStreamBYOBReader, 'must give a BYOB reader');
+}, 'getReader({mode}) must perform ToString()');
+
+promise_test(() => {
+ let startCalled = false;
+ let startCalledBeforePull = false;
+ let desiredSize;
+ let controller;
+
+ let resolveTestPromise;
+ const testPromise = new Promise(resolve => {
+ resolveTestPromise = resolve;
+ });
+
+ new ReadableStream({
+ start(c) {
+ controller = c;
+ startCalled = true;
+ },
+ pull() {
+ startCalledBeforePull = startCalled;
+ desiredSize = controller.desiredSize;
+ resolveTestPromise();
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 256
+ });
+
+ return testPromise.then(() => {
+ assert_true(startCalledBeforePull, 'start should be called before pull');
+ assert_equals(desiredSize, 256, 'desiredSize should equal highWaterMark');
+ });
+
+}, 'ReadableStream with byte source: Construct and expect start and pull being called');
+
+promise_test(() => {
+ let pullCount = 0;
+ let checkedNoPull = false;
+
+ let resolveTestPromise;
+ const testPromise = new Promise(resolve => {
+ resolveTestPromise = resolve;
+ });
+ let resolveStartPromise;
+
+ new ReadableStream({
+ start() {
+ return new Promise(resolve => {
+ resolveStartPromise = resolve;
+ });
+ },
+ pull() {
+ if (checkedNoPull) {
+ resolveTestPromise();
+ }
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 256
+ });
+
+ Promise.resolve().then(() => {
+ assert_equals(pullCount, 0);
+ checkedNoPull = true;
+ resolveStartPromise();
+ });
+
+ return testPromise;
+
+}, 'ReadableStream with byte source: No automatic pull call if start doesn\'t finish');
+
+test(() => {
+ assert_throws_js(Error, () => new ReadableStream({ start() { throw new Error(); }, type:'bytes' }),
+ 'start() can throw an exception with type: bytes');
+}, 'ReadableStream with byte source: start() throws an exception');
+
+promise_test(t => {
+ new ReadableStream({
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ }, {
+ highWaterMark: 0
+ });
+
+ return Promise.resolve();
+}, 'ReadableStream with byte source: Construct with highWaterMark of 0');
+
+test(() => {
+ new ReadableStream({
+ start(c) {
+ assert_equals(c.desiredSize, 10, 'desiredSize must start at the highWaterMark');
+ c.close();
+ assert_equals(c.desiredSize, 0, 'after closing, desiredSize must be 0');
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 10
+ });
+}, 'ReadableStream with byte source: desiredSize when closed');
+
+test(() => {
+ new ReadableStream({
+ start(c) {
+ assert_equals(c.desiredSize, 10, 'desiredSize must start at the highWaterMark');
+ c.error();
+ assert_equals(c.desiredSize, null, 'after erroring, desiredSize must be null');
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 10
+ });
+}, 'ReadableStream with byte source: desiredSize when errored');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+ reader.releaseLock();
+
+ return promise_rejects_js(t, TypeError, reader.closed, 'closed must reject');
+}, 'ReadableStream with byte source: getReader(), then releaseLock()');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ reader.releaseLock();
+
+ return promise_rejects_js(t, TypeError, reader.closed, 'closed must reject');
+}, 'ReadableStream with byte source: getReader() with mode set to byob, then releaseLock()');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.close();
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ return reader.closed.then(() => {
+ assert_throws_js(TypeError, () => stream.getReader(), 'getReader() must throw');
+ });
+}, 'ReadableStream with byte source: Test that closing a stream does not release a reader automatically');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.close();
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.closed.then(() => {
+ assert_throws_js(TypeError, () => stream.getReader({ mode: 'byob' }), 'getReader() must throw');
+ });
+}, 'ReadableStream with byte source: Test that closing a stream does not release a BYOB reader automatically');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.error(error1);
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ return promise_rejects_exactly(t, error1, reader.closed, 'closed must reject').then(() => {
+ assert_throws_js(TypeError, () => stream.getReader(), 'getReader() must throw');
+ });
+}, 'ReadableStream with byte source: Test that erroring a stream does not release a reader automatically');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.error(error1);
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return promise_rejects_exactly(t, error1, reader.closed, 'closed must reject').then(() => {
+ assert_throws_js(TypeError, () => stream.getReader({ mode: 'byob' }), 'getReader() must throw');
+ });
+}, 'ReadableStream with byte source: Test that erroring a stream does not release a BYOB reader automatically');
+
+promise_test(async t => {
+ const stream = new ReadableStream({
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+ const read = reader.read();
+ reader.releaseLock();
+ await promise_rejects_js(t, TypeError, read, 'pending read must reject');
+}, 'ReadableStream with byte source: releaseLock() on ReadableStreamDefaultReader must reject pending read()');
+
+promise_test(async t => {
+ const stream = new ReadableStream({
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ const read = reader.read(new Uint8Array(1));
+ reader.releaseLock();
+ await promise_rejects_js(t, TypeError, read, 'pending read must reject');
+}, 'ReadableStream with byte source: releaseLock() on ReadableStreamBYOBReader must reject pending read()');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ const stream = new ReadableStream({
+ pull() {
+ ++pullCount;
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 8
+ });
+
+ stream.getReader();
+
+ assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream');
+
+ return Promise.resolve().then(() => {
+ assert_equals(pullCount, 1, 'pull must be invoked');
+ });
+}, 'ReadableStream with byte source: Automatic pull() after start()');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ const stream = new ReadableStream({
+ pull() {
+ ++pullCount;
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 0
+ });
+
+ const reader = stream.getReader();
+ reader.read();
+
+ assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream');
+
+ return Promise.resolve().then(() => {
+ assert_equals(pullCount, 1, 'pull must be invoked');
+ });
+}, 'ReadableStream with byte source: Automatic pull() after start() and read()');
+
+// View buffers are detached after pull() returns, so record the information at the time that pull() was called.
+function extractViewInfo(view) {
+ return {
+ constructor: view.constructor,
+ bufferByteLength: view.buffer.byteLength,
+ byteOffset: view.byteOffset,
+ byteLength: view.byteLength
+ };
+}
+
+promise_test(() => {
+ let pullCount = 0;
+ let controller;
+ const byobRequests = [];
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ const byobRequest = controller.byobRequest;
+ const view = byobRequest.view;
+ byobRequests[pullCount] = {
+ nonNull: byobRequest !== null,
+ viewNonNull: view !== null,
+ viewInfo: extractViewInfo(view)
+ };
+ if (pullCount === 0) {
+ view[0] = 0x01;
+ byobRequest.respond(1);
+ } else if (pullCount === 1) {
+ view[0] = 0x02;
+ view[1] = 0x03;
+ byobRequest.respond(2);
+ }
+
+ ++pullCount;
+ },
+ type: 'bytes',
+ autoAllocateChunkSize: 16
+ }, {
+ highWaterMark: 0
+ });
+
+ const reader = stream.getReader();
+ const p0 = reader.read();
+ const p1 = reader.read();
+
+ assert_equals(pullCount, 0, 'No pull() as start() just finished and is not yet reflected to the state of the stream');
+
+ return Promise.resolve().then(() => {
+ assert_equals(pullCount, 1, 'pull() must have been invoked once');
+ const byobRequest = byobRequests[0];
+ assert_true(byobRequest.nonNull, 'first byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 16, 'first view.buffer.byteLength should be 16');
+ assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 16, 'first view.byteLength should be 16');
+
+ return p0;
+ }).then(result => {
+ assert_equals(pullCount, 2, 'pull() must have been invoked twice');
+ const value = result.value;
+ assert_not_equals(value, undefined, 'first read should have a value');
+ assert_equals(value.constructor, Uint8Array, 'first value should be a Uint8Array');
+ assert_equals(value.buffer.byteLength, 16, 'first value.buffer.byteLength should be 16');
+ assert_equals(value.byteOffset, 0, 'first value.byteOffset should be 0');
+ assert_equals(value.byteLength, 1, 'first value.byteLength should be 1');
+ assert_equals(value[0], 0x01, 'first value[0] should be 0x01');
+ const byobRequest = byobRequests[1];
+ assert_true(byobRequest.nonNull, 'second byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 16, 'second view.buffer.byteLength should be 16');
+ assert_equals(viewInfo.byteOffset, 0, 'second view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 16, 'second view.byteLength should be 16');
+
+ return p1;
+ }).then(result => {
+ assert_equals(pullCount, 2, 'pull() should only be invoked twice');
+ const value = result.value;
+ assert_not_equals(value, undefined, 'second read should have a value');
+ assert_equals(value.constructor, Uint8Array, 'second value should be a Uint8Array');
+ assert_equals(value.buffer.byteLength, 16, 'second value.buffer.byteLength should be 16');
+ assert_equals(value.byteOffset, 0, 'second value.byteOffset should be 0');
+ assert_equals(value.byteLength, 2, 'second value.byteLength should be 2');
+ assert_equals(value[0], 0x02, 'second value[0] should be 0x02');
+ assert_equals(value[1], 0x03, 'second value[1] should be 0x03');
+ });
+}, 'ReadableStream with byte source: autoAllocateChunkSize');
+
+promise_test(() => {
+ let pullCount = 0;
+ let controller;
+ const byobRequests = [];
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ const byobRequest = controller.byobRequest;
+ const view = byobRequest.view;
+ byobRequests[pullCount] = {
+ nonNull: byobRequest !== null,
+ viewNonNull: view !== null,
+ viewInfo: extractViewInfo(view)
+ };
+ if (pullCount === 0) {
+ view[0] = 0x01;
+ byobRequest.respond(1);
+ } else if (pullCount === 1) {
+ view[0] = 0x02;
+ view[1] = 0x03;
+ byobRequest.respond(2);
+ }
+
+ ++pullCount;
+ },
+ type: 'bytes',
+ autoAllocateChunkSize: 16
+ }, {
+ highWaterMark: 0
+ });
+
+ const reader = stream.getReader();
+ return reader.read().then(result => {
+ const value = result.value;
+ assert_not_equals(value, undefined, 'first read should have a value');
+ assert_equals(value.constructor, Uint8Array, 'first value should be a Uint8Array');
+ assert_equals(value.buffer.byteLength, 16, 'first value.buffer.byteLength should be 16');
+ assert_equals(value.byteOffset, 0, 'first value.byteOffset should be 0');
+ assert_equals(value.byteLength, 1, 'first value.byteLength should be 1');
+ assert_equals(value[0], 0x01, 'first value[0] should be 0x01');
+ const byobRequest = byobRequests[0];
+ assert_true(byobRequest.nonNull, 'first byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 16, 'first view.buffer.byteLength should be 16');
+ assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 16, 'first view.byteLength should be 16');
+
+ reader.releaseLock();
+ const byobReader = stream.getReader({ mode: 'byob' });
+ return byobReader.read(new Uint8Array(32));
+ }).then(result => {
+ const value = result.value;
+ assert_not_equals(value, undefined, 'second read should have a value');
+ assert_equals(value.constructor, Uint8Array, 'second value should be a Uint8Array');
+ assert_equals(value.buffer.byteLength, 32, 'second value.buffer.byteLength should be 32');
+ assert_equals(value.byteOffset, 0, 'second value.byteOffset should be 0');
+ assert_equals(value.byteLength, 2, 'second value.byteLength should be 2');
+ assert_equals(value[0], 0x02, 'second value[0] should be 0x02');
+ assert_equals(value[1], 0x03, 'second value[1] should be 0x03');
+ const byobRequest = byobRequests[1];
+ assert_true(byobRequest.nonNull, 'second byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 32, 'second view.buffer.byteLength should be 32');
+ assert_equals(viewInfo.byteOffset, 0, 'second view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 32, 'second view.byteLength should be 32');
+ assert_equals(pullCount, 2, 'pullCount should be 2');
+ });
+}, 'ReadableStream with byte source: Mix of auto allocate and BYOB');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ const stream = new ReadableStream({
+ pull() {
+ ++pullCount;
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 0
+ });
+
+ const reader = stream.getReader();
+ reader.read(new Uint8Array(8));
+
+ assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream');
+
+ return Promise.resolve().then(() => {
+ assert_equals(pullCount, 1, 'pull must be invoked');
+ });
+}, 'ReadableStream with byte source: Automatic pull() after start() and read(view)');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ let controller;
+ let desiredSizeInStart;
+ let desiredSizeInPull;
+
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint8Array(16));
+ desiredSizeInStart = c.desiredSize;
+ controller = c;
+ },
+ pull() {
+ ++pullCount;
+
+ if (pullCount === 1) {
+ desiredSizeInPull = controller.desiredSize;
+ }
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 8
+ });
+
+ return Promise.resolve().then(() => {
+ assert_equals(pullCount, 0, 'No pull as the queue was filled by start()');
+ assert_equals(desiredSizeInStart, -8, 'desiredSize after enqueue() in start()');
+
+ const reader = stream.getReader();
+
+ const promise = reader.read();
+ assert_equals(pullCount, 1, 'The first pull() should be made on read()');
+ assert_equals(desiredSizeInPull, 8, 'desiredSize in pull()');
+
+ return promise.then(result => {
+ assert_false(result.done, 'result.done');
+
+ const view = result.value;
+ assert_equals(view.constructor, Uint8Array, 'view.constructor');
+ assert_equals(view.buffer.byteLength, 16, 'view.buffer');
+ assert_equals(view.byteOffset, 0, 'view.byteOffset');
+ assert_equals(view.byteLength, 16, 'view.byteLength');
+ });
+ });
+}, 'ReadableStream with byte source: enqueue(), getReader(), then read()');
+
+promise_test(() => {
+ let controller;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ const promise = reader.read().then(result => {
+ assert_false(result.done);
+
+ const view = result.value;
+ assert_equals(view.constructor, Uint8Array);
+ assert_equals(view.buffer.byteLength, 1);
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 1);
+ });
+
+ controller.enqueue(new Uint8Array(1));
+
+ return promise;
+}, 'ReadableStream with byte source: Push source that doesn\'t understand pull signal');
+
+test(() => {
+ assert_throws_js(TypeError, () => new ReadableStream({
+ pull: 'foo',
+ type: 'bytes'
+ }), 'constructor should throw');
+}, 'ReadableStream with byte source: pull() function is not callable');
+
+promise_test(() => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint16Array(16));
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ return reader.read().then(result => {
+ assert_false(result.done);
+
+ const view = result.value;
+ assert_equals(view.constructor, Uint8Array);
+ assert_equals(view.buffer.byteLength, 32);
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 32);
+ });
+}, 'ReadableStream with byte source: enqueue() with Uint16Array, getReader(), then read()');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(16);
+ view[0] = 0x01;
+ view[8] = 0x02;
+ c.enqueue(view);
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const byobReader = stream.getReader({ mode: 'byob' });
+
+ return byobReader.read(new Uint8Array(8)).then(result => {
+ assert_false(result.done, 'done');
+
+ const view = result.value;
+ assert_equals(view.constructor, Uint8Array, 'value.constructor');
+ assert_equals(view.buffer.byteLength, 8, 'value.buffer.byteLength');
+ assert_equals(view.byteOffset, 0, 'value.byteOffset');
+ assert_equals(view.byteLength, 8, 'value.byteLength');
+ assert_equals(view[0], 0x01);
+
+ byobReader.releaseLock();
+
+ const reader = stream.getReader();
+
+ return reader.read();
+ }).then(result => {
+ assert_false(result.done, 'done');
+
+ const view = result.value;
+ assert_equals(view.constructor, Uint8Array, 'value.constructor');
+ assert_equals(view.buffer.byteLength, 16, 'value.buffer.byteLength');
+ assert_equals(view.byteOffset, 8, 'value.byteOffset');
+ assert_equals(view.byteLength, 8, 'value.byteLength');
+ assert_equals(view[0], 0x02);
+ });
+}, 'ReadableStream with byte source: enqueue(), read(view) partially, then read()');
+
+promise_test(t => {
+ let controller;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ controller.enqueue(new Uint8Array(16));
+ controller.close();
+
+ return reader.read().then(result => {
+ assert_false(result.done, 'done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'byteOffset');
+ assert_equals(view.byteLength, 16, 'byteLength');
+
+ return reader.read();
+ }).then(result => {
+ assert_true(result.done, 'done');
+ assert_equals(result.value, undefined, 'value');
+ });
+}, 'ReadableStream with byte source: getReader(), enqueue(), close(), then read()');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint8Array(16));
+ c.close();
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ return reader.read().then(result => {
+ assert_false(result.done, 'done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'byteOffset');
+ assert_equals(view.byteLength, 16, 'byteLength');
+
+ return reader.read();
+ }).then(result => {
+ assert_true(result.done, 'done');
+ assert_equals(result.value, undefined, 'value');
+ });
+}, 'ReadableStream with byte source: enqueue(), close(), getReader(), then read()');
+
+promise_test(() => {
+ let controller;
+ let byobRequest;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ controller.enqueue(new Uint8Array(16));
+ byobRequest = controller.byobRequest;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ return reader.read().then(result => {
+ assert_false(result.done, 'done');
+ assert_equals(result.value.byteLength, 16, 'byteLength');
+ assert_equals(byobRequest, null, 'byobRequest must be null');
+ });
+}, 'ReadableStream with byte source: Respond to pull() by enqueue()');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ let controller;
+ let byobRequest;
+ const desiredSizes = [];
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ byobRequest = controller.byobRequest;
+ desiredSizes.push(controller.desiredSize);
+ controller.enqueue(new Uint8Array(1));
+ desiredSizes.push(controller.desiredSize);
+ controller.enqueue(new Uint8Array(1));
+ desiredSizes.push(controller.desiredSize);
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 0
+ });
+
+ const reader = stream.getReader();
+
+ const p0 = reader.read();
+ const p1 = reader.read();
+ const p2 = reader.read();
+
+ // Respond to the first pull call.
+ controller.enqueue(new Uint8Array(1));
+
+ assert_equals(pullCount, 0, 'pullCount after the enqueue() outside pull');
+
+ return Promise.all([p0, p1, p2]).then(result => {
+ assert_equals(pullCount, 1, 'pullCount after completion of all read()s');
+
+ assert_equals(result[0].done, false, 'result[0].done');
+ assert_equals(result[0].value.byteLength, 1, 'result[0].value.byteLength');
+ assert_equals(result[1].done, false, 'result[1].done');
+ assert_equals(result[1].value.byteLength, 1, 'result[1].value.byteLength');
+ assert_equals(result[2].done, false, 'result[2].done');
+ assert_equals(result[2].value.byteLength, 1, 'result[2].value.byteLength');
+ assert_equals(byobRequest, null, 'byobRequest should be null');
+ assert_equals(desiredSizes[0], 0, 'desiredSize on pull should be 0');
+ assert_equals(desiredSizes[1], 0, 'desiredSize after 1st enqueue() should be 0');
+ assert_equals(desiredSizes[2], 0, 'desiredSize after 2nd enqueue() should be 0');
+ assert_equals(pullCount, 1, 'pull() should only be called once');
+ });
+}, 'ReadableStream with byte source: Respond to pull() by enqueue() asynchronously');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ let byobRequest;
+ const desiredSizes = [];
+
+ const stream = new ReadableStream({
+ pull(c) {
+ byobRequest = c.byobRequest;
+ desiredSizes.push(c.desiredSize);
+
+ if (pullCount < 3) {
+ c.enqueue(new Uint8Array(1));
+ } else {
+ c.close();
+ }
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ }, {
+ highWaterMark: 256
+ });
+
+ const reader = stream.getReader();
+
+ const p0 = reader.read();
+ const p1 = reader.read();
+ const p2 = reader.read();
+
+ assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream');
+
+ return Promise.all([p0, p1, p2]).then(result => {
+ assert_equals(pullCount, 4, 'pullCount after completion of all read()s');
+
+ assert_equals(result[0].done, false, 'result[0].done');
+ assert_equals(result[0].value.byteLength, 1, 'result[0].value.byteLength');
+ assert_equals(result[1].done, false, 'result[1].done');
+ assert_equals(result[1].value.byteLength, 1, 'result[1].value.byteLength');
+ assert_equals(result[2].done, false, 'result[2].done');
+ assert_equals(result[2].value.byteLength, 1, 'result[2].value.byteLength');
+ assert_equals(byobRequest, null, 'byobRequest should be null');
+ assert_equals(desiredSizes[0], 256, 'desiredSize on pull should be 256');
+ assert_equals(desiredSizes[1], 256, 'desiredSize after 1st enqueue() should be 256');
+ assert_equals(desiredSizes[2], 256, 'desiredSize after 2nd enqueue() should be 256');
+ assert_equals(desiredSizes[3], 256, 'desiredSize after 3rd enqueue() should be 256');
+ });
+}, 'ReadableStream with byte source: Respond to multiple pull() by separate enqueue()');
+
+promise_test(() => {
+ let controller;
+
+ let pullCount = 0;
+ const byobRequestDefined = [];
+ let byobRequestViewDefined;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ byobRequestDefined.push(controller.byobRequest !== null);
+ const initialByobRequest = controller.byobRequest;
+
+ const view = controller.byobRequest.view;
+ view[0] = 0x01;
+ controller.byobRequest.respond(1);
+
+ byobRequestDefined.push(controller.byobRequest !== null);
+ byobRequestViewDefined = initialByobRequest.view !== null;
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint8Array(1)).then(result => {
+ assert_false(result.done, 'result.done');
+ assert_equals(result.value.byteLength, 1, 'result.value.byteLength');
+ assert_equals(result.value[0], 0x01, 'result.value[0]');
+ assert_equals(pullCount, 1, 'pull() should be called only once');
+ assert_true(byobRequestDefined[0], 'byobRequest must not be null before respond()');
+ assert_false(byobRequestDefined[1], 'byobRequest must be null after respond()');
+ assert_false(byobRequestViewDefined, 'view of initial byobRequest must be null after respond()');
+ });
+}, 'ReadableStream with byte source: read(view), then respond()');
+
+promise_test(() => {
+ let controller;
+
+ let pullCount = 0;
+ const byobRequestDefined = [];
+ let byobRequestViewDefined;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ async pull() {
+ byobRequestDefined.push(controller.byobRequest !== null);
+ const initialByobRequest = controller.byobRequest;
+
+ const transferredView = await transferArrayBufferView(controller.byobRequest.view);
+ transferredView[0] = 0x01;
+ controller.byobRequest.respondWithNewView(transferredView);
+
+ byobRequestDefined.push(controller.byobRequest !== null);
+ byobRequestViewDefined = initialByobRequest.view !== null;
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint8Array(1)).then(result => {
+ assert_false(result.done, 'result.done');
+ assert_equals(result.value.byteLength, 1, 'result.value.byteLength');
+ assert_equals(result.value[0], 0x01, 'result.value[0]');
+ assert_equals(pullCount, 1, 'pull() should be called only once');
+ assert_true(byobRequestDefined[0], 'byobRequest must not be null before respondWithNewView()');
+ assert_false(byobRequestDefined[1], 'byobRequest must be null after respondWithNewView()');
+ assert_false(byobRequestViewDefined, 'view of initial byobRequest must be null after respondWithNewView()');
+ });
+}, 'ReadableStream with byte source: read(view), then respondWithNewView() with a transferred ArrayBuffer');
+
+promise_test(() => {
+ let controller;
+ let byobRequestWasDefined;
+ let incorrectRespondException;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ byobRequestWasDefined = controller.byobRequest !== null;
+
+ try {
+ controller.byobRequest.respond(2);
+ } catch (e) {
+ incorrectRespondException = e;
+ }
+
+ controller.byobRequest.respond(1);
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint8Array(1)).then(() => {
+ assert_true(byobRequestWasDefined, 'byobRequest should be non-null');
+ assert_not_equals(incorrectRespondException, undefined, 'respond() must throw');
+ assert_equals(incorrectRespondException.name, 'RangeError', 'respond() must throw a RangeError');
+ });
+}, 'ReadableStream with byte source: read(view), then respond() with too big value');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ let controller;
+ let byobRequest;
+ let viewInfo;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ ++pullCount;
+
+ byobRequest = controller.byobRequest;
+ const view = byobRequest.view;
+ viewInfo = extractViewInfo(view);
+
+ view[0] = 0x01;
+ view[1] = 0x02;
+ view[2] = 0x03;
+
+ controller.byobRequest.respond(3);
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint16Array(2)).then(result => {
+ assert_equals(pullCount, 1);
+
+ assert_false(result.done, 'done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'byteOffset');
+ assert_equals(view.byteLength, 2, 'byteLength');
+
+ const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength);
+ assert_equals(dataView.getUint16(0), 0x0102);
+
+ return reader.read(new Uint8Array(1));
+ }).then(result => {
+ assert_equals(pullCount, 1);
+ assert_not_equals(byobRequest, null, 'byobRequest must not be null');
+ assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 4, 'view.buffer.byteLength should be 4');
+ assert_equals(viewInfo.byteOffset, 0, 'view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 4, 'view.byteLength should be 4');
+
+ assert_false(result.done, 'done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'byteOffset');
+ assert_equals(view.byteLength, 1, 'byteLength');
+
+ assert_equals(view[0], 0x03);
+ });
+}, 'ReadableStream with byte source: respond(3) to read(view) with 2 element Uint16Array enqueues the 1 byte ' +
+ 'remainder');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(controller) {
+ const view = new Uint8Array(16);
+ view[15] = 0x01;
+ controller.enqueue(view);
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint8Array(16)).then(result => {
+ assert_false(result.done);
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 16);
+ assert_equals(view[15], 0x01);
+ });
+}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view)');
+
+promise_test(t => {
+ let cancelCount = 0;
+ let reason;
+
+ const passedReason = new TypeError('foo');
+
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint8Array(16));
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ cancel(r) {
+ if (cancelCount === 0) {
+ reason = r;
+ }
+
+ ++cancelCount;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ return reader.cancel(passedReason).then(result => {
+ assert_equals(result, undefined);
+ assert_equals(cancelCount, 1);
+ assert_equals(reason, passedReason, 'reason should equal the passed reason');
+ });
+}, 'ReadableStream with byte source: enqueue(), getReader(), then cancel() (mode = not BYOB)');
+
+promise_test(t => {
+ let cancelCount = 0;
+ let reason;
+
+ const passedReason = new TypeError('foo');
+
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint8Array(16));
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ cancel(r) {
+ if (cancelCount === 0) {
+ reason = r;
+ }
+
+ ++cancelCount;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.cancel(passedReason).then(result => {
+ assert_equals(result, undefined);
+ assert_equals(cancelCount, 1);
+ assert_equals(reason, passedReason, 'reason should equal the passed reason');
+ });
+}, 'ReadableStream with byte source: enqueue(), getReader(), then cancel() (mode = BYOB)');
+
+promise_test(t => {
+ let cancelCount = 0;
+ let reason;
+
+ const passedReason = new TypeError('foo');
+
+ let controller;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ cancel(r) {
+ if (cancelCount === 0) {
+ reason = r;
+ }
+
+ ++cancelCount;
+
+ return 'bar';
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const readPromise = reader.read(new Uint8Array(1)).then(result => {
+ assert_true(result.done, 'result.done');
+ assert_equals(result.value, undefined, 'result.value');
+ });
+
+ const cancelPromise = reader.cancel(passedReason).then(result => {
+ assert_equals(result, undefined, 'cancel() return value should be fulfilled with undefined');
+ assert_equals(cancelCount, 1, 'cancel() should be called only once');
+ assert_equals(reason, passedReason, 'reason should equal the passed reason');
+ });
+
+ return Promise.all([readPromise, cancelPromise]);
+}, 'ReadableStream with byte source: getReader(), read(view), then cancel()');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ let controller;
+ let byobRequest;
+ const viewInfos = [];
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ byobRequest = controller.byobRequest;
+
+ viewInfos.push(extractViewInfo(controller.byobRequest.view));
+ controller.enqueue(new Uint8Array(1));
+ viewInfos.push(extractViewInfo(controller.byobRequest.view));
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ });
+
+ return Promise.resolve().then(() => {
+ assert_equals(pullCount, 0, 'No pull() as no read(view) yet');
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const promise = reader.read(new Uint16Array(1)).then(result => {
+ assert_true(result.done, 'result.done');
+ assert_equals(result.value, undefined, 'result.value');
+ });
+
+ assert_equals(pullCount, 1, '1 pull() should have been made in response to partial fill by enqueue()');
+ assert_not_equals(byobRequest, null, 'byobRequest should not be null');
+ assert_equals(viewInfos[0].byteLength, 2, 'byteLength before enqueue() should be 2');
+ assert_equals(viewInfos[1].byteLength, 1, 'byteLength after enqueue() should be 1');
+
+ reader.cancel();
+
+ assert_equals(pullCount, 1, 'pull() should only be called once');
+ return promise;
+ });
+}, 'ReadableStream with byte source: cancel() with partially filled pending pull() request');
+
+promise_test(() => {
+ let controller;
+ let pullCalled = false;
+
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(8);
+ view[7] = 0x01;
+ c.enqueue(view);
+
+ controller = c;
+ },
+ pull() {
+ pullCalled = true;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const buffer = new ArrayBuffer(16);
+
+ return reader.read(new Uint8Array(buffer, 8, 8)).then(result => {
+ assert_false(result.done);
+
+ assert_false(pullCalled, 'pull() must not have been called');
+
+ const view = result.value;
+ assert_equals(view.constructor, Uint8Array);
+ assert_equals(view.buffer.byteLength, 16);
+ assert_equals(view.byteOffset, 8);
+ assert_equals(view.byteLength, 8);
+ assert_equals(view[7], 0x01);
+ });
+}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view) where view.buffer is not fully ' +
+ 'covered by view');
+
+promise_test(() => {
+ let controller;
+ let pullCalled = false;
+
+ const stream = new ReadableStream({
+ start(c) {
+ let view;
+
+ view = new Uint8Array(16);
+ view[15] = 123;
+ c.enqueue(view);
+
+ view = new Uint8Array(8);
+ view[7] = 111;
+ c.enqueue(view);
+
+ controller = c;
+ },
+ pull() {
+ pullCalled = true;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint8Array(24)).then(result => {
+ assert_false(result.done, 'done');
+
+ assert_false(pullCalled, 'pull() must not have been called');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'byteOffset');
+ assert_equals(view.byteLength, 24, 'byteLength');
+ assert_equals(view[15], 123, 'Contents are set from the first chunk');
+ assert_equals(view[23], 111, 'Contents are set from the second chunk');
+ });
+}, 'ReadableStream with byte source: Multiple enqueue(), getReader(), then read(view)');
+
+promise_test(() => {
+ let pullCalled = false;
+
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(16);
+ view[15] = 0x01;
+ c.enqueue(view);
+ },
+ pull() {
+ pullCalled = true;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint8Array(24)).then(result => {
+ assert_false(result.done);
+
+ assert_false(pullCalled, 'pull() must not have been called');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 16);
+ assert_equals(view[15], 0x01);
+ });
+}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view) with a bigger view');
+
+promise_test(() => {
+ let pullCalled = false;
+
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(16);
+ view[7] = 0x01;
+ view[15] = 0x02;
+ c.enqueue(view);
+ },
+ pull() {
+ pullCalled = true;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint8Array(8)).then(result => {
+ assert_false(result.done, 'done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 8);
+ assert_equals(view[7], 0x01);
+
+ return reader.read(new Uint8Array(8));
+ }).then(result => {
+ assert_false(result.done, 'done');
+
+ assert_false(pullCalled, 'pull() must not have been called');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 8);
+ assert_equals(view[7], 0x02);
+ });
+}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view) with smaller views');
+
+promise_test(() => {
+ let controller;
+ let viewInfo;
+
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(1);
+ view[0] = 0xff;
+ c.enqueue(view);
+
+ controller = c;
+ },
+ pull() {
+ if (controller.byobRequest === null) {
+ return;
+ }
+
+ const view = controller.byobRequest.view;
+ viewInfo = extractViewInfo(view);
+
+ view[0] = 0xaa;
+ controller.byobRequest.respond(1);
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint16Array(1)).then(result => {
+ assert_false(result.done);
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 2);
+
+ const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength);
+ assert_equals(dataView.getUint16(0), 0xffaa);
+
+ assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 2, 'view.buffer.byteLength should be 2');
+ assert_equals(viewInfo.byteOffset, 1, 'view.byteOffset should be 1');
+ assert_equals(viewInfo.byteLength, 1, 'view.byteLength should be 1');
+ });
+}, 'ReadableStream with byte source: enqueue() 1 byte, getReader(), then read(view) with Uint16Array');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ let controller;
+ let byobRequest;
+ let viewInfo;
+ let desiredSize;
+
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(3);
+ view[0] = 0x01;
+ view[2] = 0x02;
+ c.enqueue(view);
+
+ controller = c;
+ },
+ pull() {
+ byobRequest = controller.byobRequest;
+
+ const view = controller.byobRequest.view;
+
+ viewInfo = extractViewInfo(view);
+
+ view[0] = 0x03;
+ controller.byobRequest.respond(1);
+
+ desiredSize = controller.desiredSize;
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ });
+
+ // Wait for completion of the start method to be reflected.
+ return Promise.resolve().then(() => {
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const promise = reader.read(new Uint16Array(2)).then(result => {
+ assert_false(result.done, 'done');
+
+ const view = result.value;
+ assert_equals(view.constructor, Uint16Array, 'constructor');
+ assert_equals(view.buffer.byteLength, 4, 'buffer.byteLength');
+ assert_equals(view.byteOffset, 0, 'byteOffset');
+ assert_equals(view.byteLength, 2, 'byteLength');
+
+ const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength);
+ assert_equals(dataView.getUint16(0), 0x0100, 'contents are set');
+
+ const p = reader.read(new Uint16Array(1));
+
+ assert_equals(pullCount, 1);
+
+ return p;
+ }).then(result => {
+ assert_false(result.done, 'done');
+
+ const view = result.value;
+ assert_equals(view.buffer.byteLength, 2, 'buffer.byteLength');
+ assert_equals(view.byteOffset, 0, 'byteOffset');
+ assert_equals(view.byteLength, 2, 'byteLength');
+
+ const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength);
+ assert_equals(dataView.getUint16(0), 0x0203, 'contents are set');
+
+ assert_not_equals(byobRequest, null, 'byobRequest must not be null');
+ assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 2, 'view.buffer.byteLength should be 2');
+ assert_equals(viewInfo.byteOffset, 1, 'view.byteOffset should be 1');
+ assert_equals(viewInfo.byteLength, 1, 'view.byteLength should be 1');
+ assert_equals(desiredSize, 0, 'desiredSize should be zero');
+ });
+
+ assert_equals(pullCount, 0);
+
+ return promise;
+ });
+}, 'ReadableStream with byte source: enqueue() 3 byte, getReader(), then read(view) with 2-element Uint16Array');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(1);
+ view[0] = 0xff;
+ c.enqueue(view);
+ c.close();
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+
+ return promise_rejects_js(t, TypeError, reader.read(new Uint16Array(1)), 'read(view) must fail')
+ .then(() => promise_rejects_js(t, TypeError, reader.closed, 'reader.closed should reject'));
+}, 'ReadableStream with byte source: read(view) with Uint16Array on close()-d stream with 1 byte enqueue()-d must ' +
+ 'fail');
+
+promise_test(t => {
+ let controller;
+
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(1);
+ view[0] = 0xff;
+ c.enqueue(view);
+
+ controller = c;
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const readPromise = reader.read(new Uint16Array(1));
+
+ assert_throws_js(TypeError, () => controller.close(), 'controller.close() must throw');
+
+ return promise_rejects_js(t, TypeError, readPromise, 'read(view) must fail')
+ .then(() => promise_rejects_js(t, TypeError, reader.closed, 'reader.closed must reject'));
+}, 'ReadableStream with byte source: A stream must be errored if close()-d before fulfilling read(view) with ' +
+ 'Uint16Array');
+
+test(() => {
+ let controller;
+
+ new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ type: 'bytes'
+ });
+
+ // Enqueue a chunk so that the stream doesn't get closed. This is to check duplicate close() calls are rejected
+ // even if the stream has not yet entered the closed state.
+ const view = new Uint8Array(1);
+ controller.enqueue(view);
+ controller.close();
+
+ assert_throws_js(TypeError, () => controller.close(), 'controller.close() must throw');
+}, 'ReadableStream with byte source: Throw if close()-ed more than once');
+
+test(() => {
+ let controller;
+
+ new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ type: 'bytes'
+ });
+
+ // Enqueue a chunk so that the stream doesn't get closed. This is to check enqueue() after close() is rejected
+ // even if the stream has not yet entered the closed state.
+ const view = new Uint8Array(1);
+ controller.enqueue(view);
+ controller.close();
+
+ assert_throws_js(TypeError, () => controller.enqueue(view), 'controller.close() must throw');
+}, 'ReadableStream with byte source: Throw on enqueue() after close()');
+
+promise_test(() => {
+ let controller;
+ let byobRequest;
+ let viewInfo;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ byobRequest = controller.byobRequest;
+ const view = controller.byobRequest.view;
+ viewInfo = extractViewInfo(view);
+
+ view[15] = 0x01;
+ controller.byobRequest.respond(16);
+ controller.close();
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint8Array(16)).then(result => {
+ assert_false(result.done);
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 16);
+ assert_equals(view[15], 0x01);
+
+ return reader.read(new Uint8Array(16));
+ }).then(result => {
+ assert_true(result.done);
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 0);
+
+ assert_not_equals(byobRequest, null, 'byobRequest must not be null');
+ assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 16, 'view.buffer.byteLength should be 16');
+ assert_equals(viewInfo.byteOffset, 0, 'view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 16, 'view.byteLength should be 16');
+ });
+}, 'ReadableStream with byte source: read(view), then respond() and close() in pull()');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ let controller;
+ const viewInfos = [];
+ const viewInfosAfterRespond = [];
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ if (controller.byobRequest === null) {
+ return;
+ }
+
+ for (let i = 0; i < 4; ++i) {
+ const view = controller.byobRequest.view;
+ viewInfos.push(extractViewInfo(view));
+
+ view[0] = 0x01;
+ controller.byobRequest.respond(1);
+ viewInfosAfterRespond.push(extractViewInfo(view));
+ }
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint32Array(1)).then(result => {
+ assert_false(result.done, 'result.done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'result.value.byteOffset');
+ assert_equals(view.byteLength, 4, 'result.value.byteLength');
+ assert_equals(view[0], 0x01010101, 'result.value[0]');
+
+ assert_equals(pullCount, 1, 'pull() should only be called once');
+
+ for (let i = 0; i < 4; ++i) {
+ assert_equals(viewInfos[i].constructor, Uint8Array, 'view.constructor should be Uint8Array');
+ assert_equals(viewInfos[i].bufferByteLength, 4, 'view.buffer.byteLength should be 4');
+
+ assert_equals(viewInfos[i].byteOffset, i, 'view.byteOffset should be i');
+ assert_equals(viewInfos[i].byteLength, 4 - i, 'view.byteLength should be 4 - i');
+
+ assert_equals(viewInfosAfterRespond[i].bufferByteLength, 0, 'view.buffer should be transferred after respond()');
+ }
+ });
+}, 'ReadableStream with byte source: read(view) with Uint32Array, then fill it by multiple respond() calls');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ let controller;
+ const viewInfos = [];
+ const viewInfosAfterEnqueue = [];
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ if (controller.byobRequest === null) {
+ return;
+ }
+
+ for (let i = 0; i < 4; ++i) {
+ const view = controller.byobRequest.view;
+ viewInfos.push(extractViewInfo(view));
+
+ controller.enqueue(new Uint8Array([0x01]));
+ viewInfosAfterEnqueue.push(extractViewInfo(view));
+ }
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return reader.read(new Uint32Array(1)).then(result => {
+ assert_false(result.done, 'result.done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'result.value.byteOffset');
+ assert_equals(view.byteLength, 4, 'result.value.byteLength');
+ assert_equals(view[0], 0x01010101, 'result.value[0]');
+
+ assert_equals(pullCount, 1, 'pull() should only be called once');
+
+ for (let i = 0; i < 4; ++i) {
+ assert_equals(viewInfos[i].constructor, Uint8Array, 'view.constructor should be Uint8Array');
+ assert_equals(viewInfos[i].bufferByteLength, 4, 'view.buffer.byteLength should be 4');
+
+ assert_equals(viewInfos[i].byteOffset, i, 'view.byteOffset should be i');
+ assert_equals(viewInfos[i].byteLength, 4 - i, 'view.byteLength should be 4 - i');
+
+ assert_equals(viewInfosAfterEnqueue[i].bufferByteLength, 0, 'view.buffer should be transferred after enqueue()');
+ }
+ });
+}, 'ReadableStream with byte source: read(view) with Uint32Array, then fill it by multiple enqueue() calls');
+
+promise_test(() => {
+ let pullCount = 0;
+
+ let controller;
+ let byobRequest;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ byobRequest = controller.byobRequest;
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ const p0 = reader.read().then(result => {
+ assert_equals(pullCount, 1);
+
+ controller.enqueue(new Uint8Array(2));
+
+ // Since the queue has data no less than HWM, no more pull.
+ assert_equals(pullCount, 1);
+
+ assert_false(result.done);
+
+ const view = result.value;
+ assert_equals(view.constructor, Uint8Array);
+ assert_equals(view.buffer.byteLength, 1);
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 1);
+ });
+
+ assert_equals(pullCount, 0, 'No pull should have been made since the startPromise has not yet been handled');
+
+ const p1 = reader.read().then(result => {
+ assert_equals(pullCount, 1);
+
+ assert_false(result.done);
+
+ const view = result.value;
+ assert_equals(view.constructor, Uint8Array);
+ assert_equals(view.buffer.byteLength, 2);
+ assert_equals(view.byteOffset, 0);
+ assert_equals(view.byteLength, 2);
+
+ assert_equals(byobRequest, null, 'byobRequest must be null');
+ });
+
+ assert_equals(pullCount, 0, 'No pull should have been made since the startPromise has not yet been handled');
+
+ controller.enqueue(new Uint8Array(1));
+
+ assert_equals(pullCount, 0, 'No pull should have been made since the startPromise has not yet been handled');
+
+ return Promise.all([p0, p1]);
+}, 'ReadableStream with byte source: read() twice, then enqueue() twice');
+
+promise_test(t => {
+ let controller;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const p0 = reader.read(new Uint8Array(16)).then(result => {
+ assert_true(result.done, '1st read: done');
+
+ const view = result.value;
+ assert_equals(view.buffer.byteLength, 16, '1st read: buffer.byteLength');
+ assert_equals(view.byteOffset, 0, '1st read: byteOffset');
+ assert_equals(view.byteLength, 0, '1st read: byteLength');
+ });
+
+ const p1 = reader.read(new Uint8Array(32)).then(result => {
+ assert_true(result.done, '2nd read: done');
+
+ const view = result.value;
+ assert_equals(view.buffer.byteLength, 32, '2nd read: buffer.byteLength');
+ assert_equals(view.byteOffset, 0, '2nd read: byteOffset');
+ assert_equals(view.byteLength, 0, '2nd read: byteLength');
+ });
+
+ controller.close();
+ controller.byobRequest.respond(0);
+
+ return Promise.all([p0, p1]);
+}, 'ReadableStream with byte source: Multiple read(view), close() and respond()');
+
+promise_test(t => {
+ let controller;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const p0 = reader.read(new Uint8Array(16)).then(result => {
+ assert_false(result.done, '1st read: done');
+
+ const view = result.value;
+ assert_equals(view.buffer.byteLength, 16, '1st read: buffer.byteLength');
+ assert_equals(view.byteOffset, 0, '1st read: byteOffset');
+ assert_equals(view.byteLength, 16, '1st read: byteLength');
+ });
+
+ const p1 = reader.read(new Uint8Array(16)).then(result => {
+ assert_false(result.done, '2nd read: done');
+
+ const view = result.value;
+ assert_equals(view.buffer.byteLength, 16, '2nd read: buffer.byteLength');
+ assert_equals(view.byteOffset, 0, '2nd read: byteOffset');
+ assert_equals(view.byteLength, 8, '2nd read: byteLength');
+ });
+
+ controller.enqueue(new Uint8Array(24));
+
+ return Promise.all([p0, p1]);
+}, 'ReadableStream with byte source: Multiple read(view), big enqueue()');
+
+promise_test(t => {
+ let controller;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ let bytesRead = 0;
+
+ function pump() {
+ return reader.read(new Uint8Array(7)).then(result => {
+ if (result.done) {
+ assert_equals(bytesRead, 1024);
+ return undefined;
+ }
+
+ bytesRead += result.value.byteLength;
+
+ return pump();
+ });
+ }
+ const promise = pump();
+
+ controller.enqueue(new Uint8Array(512));
+ controller.enqueue(new Uint8Array(512));
+ controller.close();
+
+ return promise;
+}, 'ReadableStream with byte source: Multiple read(view) and multiple enqueue()');
+
+promise_test(t => {
+ let pullCalled = false;
+ const stream = new ReadableStream({
+ pull(controller) {
+ pullCalled = true;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return promise_rejects_js(t, TypeError, reader.read(), 'read() must fail')
+ .then(() => assert_false(pullCalled, 'pull() must not have been called'));
+}, 'ReadableStream with byte source: read(view) with passing undefined as view must fail');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return promise_rejects_js(t, TypeError, reader.read({}), 'read(view) must fail');
+}, 'ReadableStream with byte source: read(view) with passing an empty object as view must fail');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return promise_rejects_js(t, TypeError,
+ reader.read({ buffer: new ArrayBuffer(10), byteOffset: 0, byteLength: 10 }),
+ 'read(view) must fail');
+}, 'ReadableStream with byte source: Even read(view) with passing ArrayBufferView like object as view must fail');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.error(error1);
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ return promise_rejects_exactly(t, error1, reader.read(), 'read() must fail');
+}, 'ReadableStream with byte source: read() on an errored stream');
+
+promise_test(t => {
+ let controller;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ const promise = promise_rejects_exactly(t, error1, reader.read(), 'read() must fail');
+
+ controller.error(error1);
+
+ return promise;
+}, 'ReadableStream with byte source: read(), then error()');
+
+promise_test(t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.error(error1);
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return promise_rejects_exactly(t, error1, reader.read(new Uint8Array(1)), 'read() must fail');
+}, 'ReadableStream with byte source: read(view) on an errored stream');
+
+promise_test(t => {
+ let controller;
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const promise = promise_rejects_exactly(t, error1, reader.read(new Uint8Array(1)), 'read() must fail');
+
+ controller.error(error1);
+
+ return promise;
+}, 'ReadableStream with byte source: read(view), then error()');
+
+promise_test(t => {
+ let controller;
+ let byobRequest;
+
+ const testError = new TypeError('foo');
+
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ byobRequest = controller.byobRequest;
+ throw testError;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ const promise = promise_rejects_exactly(t, testError, reader.read(), 'read() must fail');
+ return promise_rejects_exactly(t, testError, promise.then(() => reader.closed))
+ .then(() => assert_equals(byobRequest, null, 'byobRequest must be null'));
+}, 'ReadableStream with byte source: Throwing in pull function must error the stream');
+
+promise_test(t => {
+ let byobRequest;
+
+ const stream = new ReadableStream({
+ pull(controller) {
+ byobRequest = controller.byobRequest;
+ controller.error(error1);
+ throw new TypeError('foo');
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader();
+
+ return promise_rejects_exactly(t, error1, reader.read(), 'read() must fail')
+ .then(() => promise_rejects_exactly(t, error1, reader.closed, 'closed must fail'))
+ .then(() => assert_equals(byobRequest, null, 'byobRequest must be null'));
+}, 'ReadableStream with byte source: Throwing in pull in response to read() must be ignored if the stream is ' +
+ 'errored in it');
+
+promise_test(t => {
+ let byobRequest;
+
+ const testError = new TypeError('foo');
+
+ const stream = new ReadableStream({
+ pull(controller) {
+ byobRequest = controller.byobRequest;
+ throw testError;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return promise_rejects_exactly(t, testError, reader.read(new Uint8Array(1)), 'read(view) must fail')
+ .then(() => promise_rejects_exactly(t, testError, reader.closed, 'reader.closed must reject'))
+ .then(() => assert_not_equals(byobRequest, null, 'byobRequest must not be null'));
+}, 'ReadableStream with byte source: Throwing in pull in response to read(view) function must error the stream');
+
+promise_test(t => {
+ let byobRequest;
+
+ const stream = new ReadableStream({
+ pull(controller) {
+ byobRequest = controller.byobRequest;
+ controller.error(error1);
+ throw new TypeError('foo');
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ return promise_rejects_exactly(t, error1, reader.read(new Uint8Array(1)), 'read(view) must fail')
+ .then(() => promise_rejects_exactly(t, error1, reader.closed, 'closed must fail'))
+ .then(() => assert_not_equals(byobRequest, null, 'byobRequest must not be null'));
+}, 'ReadableStream with byte source: Throwing in pull in response to read(view) must be ignored if the stream is ' +
+ 'errored in it');
+
+promise_test(() => {
+ let byobRequest;
+ const rs = new ReadableStream({
+ pull(controller) {
+ byobRequest = controller.byobRequest;
+ byobRequest.respond(4);
+ },
+ type: 'bytes'
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ const view = new Uint8Array(16);
+ return reader.read(view).then(() => {
+ assert_throws_js(TypeError, () => byobRequest.respond(4), 'respond() should throw a TypeError');
+ });
+}, 'calling respond() twice on the same byobRequest should throw');
+
+promise_test(() => {
+ let byobRequest;
+ const newView = () => new Uint8Array(16);
+ const rs = new ReadableStream({
+ pull(controller) {
+ byobRequest = controller.byobRequest;
+ byobRequest.respondWithNewView(newView());
+ },
+ type: 'bytes'
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ return reader.read(newView()).then(() => {
+ assert_throws_js(TypeError, () => byobRequest.respondWithNewView(newView()),
+ 'respondWithNewView() should throw a TypeError');
+ });
+}, 'calling respondWithNewView() twice on the same byobRequest should throw');
+
+promise_test(() => {
+ let controller;
+ let byobRequest;
+ let resolvePullCalledPromise;
+ const pullCalledPromise = new Promise(resolve => {
+ resolvePullCalledPromise = resolve;
+ });
+ let resolvePull;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull(c) {
+ byobRequest = c.byobRequest;
+ resolvePullCalledPromise();
+ return new Promise(resolve => {
+ resolvePull = resolve;
+ });
+ },
+ type: 'bytes'
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ const readPromise = reader.read(new Uint8Array(16));
+ return pullCalledPromise.then(() => {
+ controller.close();
+ byobRequest.respond(0);
+ resolvePull();
+ return readPromise.then(() => {
+ assert_throws_js(TypeError, () => byobRequest.respond(0), 'respond() should throw');
+ });
+ });
+}, 'calling respond(0) twice on the same byobRequest should throw even when closed');
+
+promise_test(() => {
+ let controller;
+ let byobRequest;
+ let resolvePullCalledPromise;
+ const pullCalledPromise = new Promise(resolve => {
+ resolvePullCalledPromise = resolve;
+ });
+ let resolvePull;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull(c) {
+ byobRequest = c.byobRequest;
+ resolvePullCalledPromise();
+ return new Promise(resolve => {
+ resolvePull = resolve;
+ });
+ },
+ type: 'bytes'
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ const readPromise = reader.read(new Uint8Array(16));
+ return pullCalledPromise.then(() => {
+ const cancelPromise = reader.cancel('meh');
+ assert_throws_js(TypeError, () => byobRequest.respond(0), 'respond() should throw');
+ resolvePull();
+ return Promise.all([readPromise, cancelPromise]);
+ });
+}, 'calling respond() should throw when canceled');
+
+promise_test(async t => {
+ let resolvePullCalledPromise;
+ const pullCalledPromise = new Promise(resolve => {
+ resolvePullCalledPromise = resolve;
+ });
+ let resolvePull;
+ const rs = new ReadableStream({
+ pull() {
+ resolvePullCalledPromise();
+ return new Promise(resolve => {
+ resolvePull = resolve;
+ });
+ },
+ type: 'bytes'
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ const read = reader.read(new Uint8Array(16));
+ await pullCalledPromise;
+ resolvePull();
+ await delay(0);
+ reader.releaseLock();
+ await promise_rejects_js(t, TypeError, read, 'pending read should reject');
+}, 'pull() resolving should not resolve read()');
+
+promise_test(() => {
+ // Tests https://github.com/whatwg/streams/issues/686
+
+ let controller;
+ const rs = new ReadableStream({
+ autoAllocateChunkSize: 128,
+ start(c) {
+ controller = c;
+ },
+ type: 'bytes'
+ });
+
+ const readPromise = rs.getReader().read();
+
+ const br = controller.byobRequest;
+ controller.close();
+
+ br.respond(0);
+
+ return readPromise;
+}, 'ReadableStream with byte source: default reader + autoAllocateChunkSize + byobRequest interaction');
+
+test(() => {
+ assert_throws_js(TypeError, () => new ReadableStream({ autoAllocateChunkSize: 0, type: 'bytes' }),
+ 'controller cannot be setup with autoAllocateChunkSize = 0');
+}, 'ReadableStream with byte source: autoAllocateChunkSize cannot be 0');
+
+test(() => {
+ const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor;
+ const stream = new ReadableStream({ type: 'bytes' });
+ new ReadableStreamBYOBReader(stream);
+}, 'ReadableStreamBYOBReader can be constructed directly');
+
+test(() => {
+ const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor;
+ assert_throws_js(TypeError, () => new ReadableStreamBYOBReader({}), 'constructor must throw');
+}, 'ReadableStreamBYOBReader constructor requires a ReadableStream argument');
+
+test(() => {
+ const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor;
+ const stream = new ReadableStream({ type: 'bytes' });
+ stream.getReader();
+ assert_throws_js(TypeError, () => new ReadableStreamBYOBReader(stream), 'constructor must throw');
+}, 'ReadableStreamBYOBReader constructor requires an unlocked ReadableStream');
+
+test(() => {
+ const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor;
+ const stream = new ReadableStream();
+ assert_throws_js(TypeError, () => new ReadableStreamBYOBReader(stream), 'constructor must throw');
+}, 'ReadableStreamBYOBReader constructor requires a ReadableStream with type "bytes"');
+
+test(() => {
+ assert_throws_js(RangeError, () => new ReadableStream({ type: 'bytes' }, {
+ size() {
+ return 1;
+ }
+ }), 'constructor should throw for size function');
+
+ assert_throws_js(RangeError,
+ () => new ReadableStream({ type: 'bytes' }, new CountQueuingStrategy({ highWaterMark: 1 })),
+ 'constructor should throw when strategy is CountQueuingStrategy');
+
+ assert_throws_js(RangeError,
+ () => new ReadableStream({ type: 'bytes' }, new ByteLengthQueuingStrategy({ highWaterMark: 512 })),
+ 'constructor should throw when strategy is ByteLengthQueuingStrategy');
+
+ class HasSizeMethod {
+ size() {}
+ }
+
+ assert_throws_js(RangeError, () => new ReadableStream({ type: 'bytes' }, new HasSizeMethod()),
+ 'constructor should throw when size on the prototype chain');
+}, 'ReadableStream constructor should not accept a strategy with a size defined if type is "bytes"');
+
+promise_test(async t => {
+ const stream = new ReadableStream({
+ pull: t.step_func(c => {
+ const view = new Uint8Array(c.byobRequest.view.buffer, 0, 1);
+ view[0] = 1;
+
+ c.byobRequest.respondWithNewView(view);
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new Uint8Array([4, 5, 6]));
+ assert_false(result.done, 'result.done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'result.value.byteOffset');
+ assert_equals(view.byteLength, 1, 'result.value.byteLength');
+ assert_equals(view[0], 1, 'result.value[0]');
+ assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength');
+ assert_array_equals([...new Uint8Array(view.buffer)], [1, 5, 6], 'result.value.buffer');
+}, 'ReadableStream with byte source: respondWithNewView() with a smaller view');
+
+promise_test(async t => {
+ const stream = new ReadableStream({
+ pull: t.step_func(c => {
+ const view = new Uint8Array(c.byobRequest.view.buffer, 0, 0);
+
+ c.close();
+
+ c.byobRequest.respondWithNewView(view);
+ }),
+ type: 'bytes'
+ });
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new Uint8Array([4, 5, 6]));
+ assert_true(result.done, 'result.done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'result.value.byteOffset');
+ assert_equals(view.byteLength, 0, 'result.value.byteLength');
+ assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength');
+ assert_array_equals([...new Uint8Array(view.buffer)], [4, 5, 6], 'result.value.buffer');
+}, 'ReadableStream with byte source: respondWithNewView() with a zero-length view (in the closed state)');
+
+promise_test(async t => {
+ let controller;
+ let resolvePullCalledPromise;
+ const pullCalledPromise = new Promise(resolve => {
+ resolvePullCalledPromise = resolve;
+ });
+ const stream = new ReadableStream({
+ start: t.step_func((c) => {
+ controller = c;
+ }),
+ pull: t.step_func(() => {
+ resolvePullCalledPromise();
+ }),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ const readPromise = reader.read(new Uint8Array([4, 5, 6]));
+ await pullCalledPromise;
+
+ // Transfer the original BYOB request's buffer, and respond with a new view on that buffer
+ const transferredView = await transferArrayBufferView(controller.byobRequest.view);
+ const newView = transferredView.subarray(0, 1);
+ newView[0] = 42;
+
+ controller.byobRequest.respondWithNewView(newView);
+
+ const result = await readPromise;
+ assert_false(result.done, 'result.done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'result.value.byteOffset');
+ assert_equals(view.byteLength, 1, 'result.value.byteLength');
+ assert_equals(view[0], 42, 'result.value[0]');
+ assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength');
+ assert_array_equals([...new Uint8Array(view.buffer)], [42, 5, 6], 'result.value.buffer');
+
+}, 'ReadableStream with byte source: respondWithNewView() with a transferred non-zero-length view ' +
+ '(in the readable state)');
+
+promise_test(async t => {
+ let controller;
+ let resolvePullCalledPromise;
+ const pullCalledPromise = new Promise(resolve => {
+ resolvePullCalledPromise = resolve;
+ });
+ const stream = new ReadableStream({
+ start: t.step_func((c) => {
+ controller = c;
+ }),
+ pull: t.step_func(() => {
+ resolvePullCalledPromise();
+ }),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ const readPromise = reader.read(new Uint8Array([4, 5, 6]));
+ await pullCalledPromise;
+
+ // Transfer the original BYOB request's buffer, and respond with an empty view on that buffer
+ const transferredView = await transferArrayBufferView(controller.byobRequest.view);
+ const newView = transferredView.subarray(0, 0);
+
+ controller.close();
+ controller.byobRequest.respondWithNewView(newView);
+
+ const result = await readPromise;
+ assert_true(result.done, 'result.done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'result.value.byteOffset');
+ assert_equals(view.byteLength, 0, 'result.value.byteLength');
+ assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength');
+ assert_array_equals([...new Uint8Array(view.buffer)], [4, 5, 6], 'result.value.buffer');
+
+}, 'ReadableStream with byte source: respondWithNewView() with a transferred zero-length view ' +
+ '(in the closed state)');
+
+promise_test(async t => {
+ let controller;
+ let pullCount = 0;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ autoAllocateChunkSize: 10,
+ start: t.step_func((c) => {
+ controller = c;
+ }),
+ pull: t.step_func(() => {
+ ++pullCount;
+ })
+ });
+
+ await flushAsyncEvents();
+ assert_equals(pullCount, 0, 'pull() must not have been invoked yet');
+
+ const reader1 = rs.getReader();
+ const read1 = reader1.read();
+ assert_equals(pullCount, 1, 'pull() must have been invoked once');
+ const byobRequest1 = controller.byobRequest;
+ assert_equals(byobRequest1.view.byteLength, 10, 'first byobRequest.view.byteLength');
+
+ // enqueue() must discard the auto-allocated BYOB request
+ controller.enqueue(new Uint8Array([1, 2, 3]));
+ assert_equals(byobRequest1.view, null, 'first byobRequest must be invalidated after enqueue()');
+
+ const result1 = await read1;
+ assert_false(result1.done, 'first result.done');
+ const view1 = result1.value;
+ assert_equals(view1.byteOffset, 0, 'first result.value.byteOffset');
+ assert_equals(view1.byteLength, 3, 'first result.value.byteLength');
+ assert_array_equals([...new Uint8Array(view1.buffer)], [1, 2, 3], 'first result.value.buffer');
+
+ reader1.releaseLock();
+
+ // read(view) should work after discarding the auto-allocated BYOB request
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint8Array([4, 5, 6]));
+ assert_equals(pullCount, 2, 'pull() must have been invoked twice');
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2.view.byteOffset, 0, 'second byobRequest.view.byteOffset');
+ assert_equals(byobRequest2.view.byteLength, 3, 'second byobRequest.view.byteLength');
+ assert_array_equals([...new Uint8Array(byobRequest2.view.buffer)], [4, 5, 6], 'second byobRequest.view.buffer');
+
+ byobRequest2.respond(3);
+ assert_equals(byobRequest2.view, null, 'second byobRequest must be invalidated after respond()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ const view2 = result2.value;
+ assert_equals(view2.byteOffset, 0, 'second result.value.byteOffset');
+ assert_equals(view2.byteLength, 3, 'second result.value.byteLength');
+ assert_array_equals([...new Uint8Array(view2.buffer)], [4, 5, 6], 'second result.value.buffer');
+
+ reader2.releaseLock();
+ assert_equals(pullCount, 2, 'pull() must only have been invoked twice');
+}, 'ReadableStream with byte source: enqueue() discards auto-allocated BYOB request');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader({ mode: 'byob' });
+ const read1 = reader1.read(new Uint8Array([1, 2, 3]));
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([1, 2, 3]), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint8Array([4, 5, 6]));
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ assert_equals(controller.byobRequest, byobRequest1, 'byobRequest should be unchanged');
+ assert_array_equals([...new Uint8Array(byobRequest1.view.buffer)], [1, 2, 3], 'byobRequest.view.buffer should be unchanged');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // respond() should fulfill the *second* read() request
+ byobRequest1.view[0] = 11;
+ byobRequest1.respond(1);
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2, null, 'byobRequest should be null after respond()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([11, 5, 6]).subarray(0, 1), 'second result.value');
+
+}, 'ReadableStream with byte source: releaseLock() with pending read(view), read(view) on second reader, respond()');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader({ mode: 'byob' });
+ const read1 = reader1.read(new Uint8Array([1, 2, 3]));
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([1, 2, 3]), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint16Array(1));
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ assert_equals(controller.byobRequest, byobRequest1, 'byobRequest should be unchanged');
+ assert_array_equals([...new Uint8Array(byobRequest1.view.buffer)], [1, 2, 3], 'byobRequest.view.buffer should be unchanged');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // respond(1) should partially fill the second read(), but not yet fulfill it
+ byobRequest1.view[0] = 0x11;
+ byobRequest1.respond(1);
+
+ // second BYOB request should use remaining buffer from the second read()
+ const byobRequest2 = controller.byobRequest;
+ assert_not_equals(byobRequest2, null, 'second byobRequest should exist');
+ assert_typed_array_equals(byobRequest2.view, new Uint8Array([0x11, 0]).subarray(1, 2), 'second byobRequest.view');
+
+ // second respond(1) should fill the read request and fulfill it
+ byobRequest2.view[0] = 0x22;
+ byobRequest2.respond(1);
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ const view2 = result2.value;
+ assert_equals(view2.byteOffset, 0, 'second result.value.byteOffset');
+ assert_equals(view2.byteLength, 2, 'second result.value.byteLength');
+ const dataView2 = new DataView(view2.buffer, view2.byteOffset, view2.byteLength);
+ assert_equals(dataView2.getUint16(0), 0x1122, 'second result.value[0]');
+
+}, 'ReadableStream with byte source: releaseLock() with pending read(view), read(view) on second reader with ' +
+ '1 element Uint16Array, respond(1)');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader({ mode: 'byob' });
+ const read1 = reader1.read(new Uint8Array([1, 2, 3]));
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([1, 2, 3]), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint8Array([4, 5]));
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ assert_equals(controller.byobRequest, byobRequest1, 'byobRequest should be unchanged');
+ assert_array_equals([...new Uint8Array(byobRequest1.view.buffer)], [1, 2, 3], 'byobRequest.view.buffer should be unchanged');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // respond(3) should fulfill the second read(), and put 1 remaining byte in the queue
+ byobRequest1.view[0] = 6;
+ byobRequest1.view[1] = 7;
+ byobRequest1.view[2] = 8;
+ byobRequest1.respond(3);
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2, null, 'byobRequest should be null after respond()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([6, 7]), 'second result.value');
+
+ // third read() should fulfill with the remaining byte
+ const result3 = await reader2.read(new Uint8Array([0, 0, 0]));
+ assert_false(result3.done, 'third result.done');
+ assert_typed_array_equals(result3.value, new Uint8Array([8, 0, 0]).subarray(0, 1), 'third result.value');
+
+}, 'ReadableStream with byte source: releaseLock() with pending read(view), read(view) on second reader with ' +
+ '2 element Uint8Array, respond(3)');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader({ mode: 'byob' });
+ const read1 = reader1.read(new Uint8Array([1, 2, 3]));
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([1, 2, 3]), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint8Array([4, 5, 6]));
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // respondWithNewView() should fulfill the *second* read() request
+ byobRequest1.view[0] = 11;
+ byobRequest1.view[1] = 12;
+ byobRequest1.respondWithNewView(byobRequest1.view.subarray(0, 2));
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2, null, 'byobRequest should be null after respondWithNewView()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([11, 12, 6]).subarray(0, 2), 'second result.value');
+
+}, 'ReadableStream with byte source: releaseLock() with pending read(view), read(view) on second reader, respondWithNewView()');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader({ mode: 'byob' });
+ const read1 = reader1.read(new Uint8Array([1, 2, 3]));
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([1, 2, 3]), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint8Array([4, 5, 6]));
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // enqueue() should fulfill the *second* read() request
+ controller.enqueue(new Uint8Array([11, 12]));
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2, null, 'byobRequest should be null after enqueue()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([11, 12, 6]).subarray(0, 2), 'second result.value');
+
+}, 'ReadableStream with byte source: releaseLock() with pending read(view), read(view) on second reader, enqueue()');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader({ mode: 'byob' });
+ const read1 = reader1.read(new Uint8Array([1, 2, 3]));
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([1, 2, 3]), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint8Array([4, 5, 6]));
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // close() followed by respond(0) should fulfill the second read()
+ controller.close();
+ byobRequest1.respond(0);
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2, null, 'byobRequest should be null after respond()');
+
+ const result2 = await read2;
+ assert_true(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([4, 5, 6]).subarray(0, 0), 'second result.value');
+}, 'ReadableStream with byte source: releaseLock() with pending read(view), read(view) on second reader, ' +
+ 'close(), respond(0)');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ autoAllocateChunkSize: 4,
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader();
+ const read1 = reader1.read();
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array(4), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader();
+ const read2 = reader2.read();
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // respond() should fulfill the *second* read() request
+ byobRequest1.view[0] = 11;
+ byobRequest1.respond(1);
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2, null, 'byobRequest should be null after respond()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([11, 0, 0, 0]).subarray(0, 1), 'second result.value');
+
+}, 'ReadableStream with byte source: autoAllocateChunkSize, releaseLock() with pending read(), read() on second reader, respond()');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ autoAllocateChunkSize: 4,
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader();
+ const read1 = reader1.read();
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array(4), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader();
+ const read2 = reader2.read();
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // enqueue() should fulfill the *second* read() request
+ controller.enqueue(new Uint8Array([11]));
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2, null, 'byobRequest should be null after enqueue()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([11]), 'second result.value');
+
+}, 'ReadableStream with byte source: autoAllocateChunkSize, releaseLock() with pending read(), read() on second reader, enqueue()');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ autoAllocateChunkSize: 4,
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader();
+ const read1 = reader1.read();
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array(4), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint8Array([4, 5, 6]));
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // respond() should fulfill the *second* read() request
+ byobRequest1.view[0] = 11;
+ byobRequest1.respond(1);
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2, null, 'byobRequest should be null after respond()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([11, 5, 6]).subarray(0, 1), 'second result.value');
+
+}, 'ReadableStream with byte source: autoAllocateChunkSize, releaseLock() with pending read(), read(view) on second reader, respond()');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ autoAllocateChunkSize: 4,
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader();
+ const read1 = reader1.read();
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array(4), 'first byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint8Array([4, 5, 6]));
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // enqueue() should fulfill the *second* read() request
+ controller.enqueue(new Uint8Array([11]));
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2, null, 'byobRequest should be null after enqueue()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([11, 5, 6]).subarray(0, 1), 'second result.value');
+
+}, 'ReadableStream with byte source: autoAllocateChunkSize, releaseLock() with pending read(), read(view) on second reader, enqueue()');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader({ mode: 'byob' });
+ const read1 = reader1.read(new Uint16Array(1));
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([0, 0]), 'first byobRequest.view');
+
+ // respond(1) should partially fill the first read(), but not yet fulfill it
+ byobRequest1.view[0] = 0x11;
+ byobRequest1.respond(1);
+ const byobRequest2 = controller.byobRequest;
+ assert_not_equals(byobRequest2, null, 'second byobRequest should exist');
+ assert_typed_array_equals(byobRequest2.view, new Uint8Array([0x11, 0]).subarray(1, 2), 'second byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader({ mode: 'byob' });
+ const read2 = reader2.read(new Uint16Array(1));
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ assert_equals(controller.byobRequest, byobRequest2, 'byobRequest should be unchanged');
+ assert_typed_array_equals(byobRequest2.view, new Uint8Array([0x11, 0]).subarray(1, 2), 'byobRequest.view should be unchanged');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // second respond(1) should fill the read request and fulfill it
+ byobRequest2.view[0] = 0x22;
+ byobRequest2.respond(1);
+ assert_equals(controller.byobRequest, null, 'byobRequest should be invalidated after second respond()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ const view2 = result2.value;
+ assert_equals(view2.byteOffset, 0, 'second result.value.byteOffset');
+ assert_equals(view2.byteLength, 2, 'second result.value.byteLength');
+ const dataView2 = new DataView(view2.buffer, view2.byteOffset, view2.byteLength);
+ assert_equals(dataView2.getUint16(0), 0x1122, 'second result.value[0]');
+
+}, 'ReadableStream with byte source: read(view) with 1 element Uint16Array, respond(1), releaseLock(), read(view) on ' +
+ 'second reader with 1 element Uint16Array, respond(1)');
+
+promise_test(async t => {
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ controller = c;
+ })
+ });
+ await flushAsyncEvents();
+
+ const reader1 = rs.getReader({ mode: 'byob' });
+ const read1 = reader1.read(new Uint16Array(1));
+ const byobRequest1 = controller.byobRequest;
+ assert_not_equals(byobRequest1, null, 'first byobRequest should exist');
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([0, 0]), 'first byobRequest.view');
+
+ // respond(1) should partially fill the first read(), but not yet fulfill it
+ byobRequest1.view[0] = 0x11;
+ byobRequest1.respond(1);
+ const byobRequest2 = controller.byobRequest;
+ assert_not_equals(byobRequest2, null, 'second byobRequest should exist');
+ assert_typed_array_equals(byobRequest2.view, new Uint8Array([0x11, 0]).subarray(1, 2), 'second byobRequest.view');
+
+ // releaseLock() should reject the pending read, but *not* invalidate the BYOB request
+ reader1.releaseLock();
+ const reader2 = rs.getReader();
+ const read2 = reader2.read();
+ assert_not_equals(controller.byobRequest, null, 'byobRequest should not be invalidated after releaseLock()');
+ assert_equals(controller.byobRequest, byobRequest2, 'byobRequest should be unchanged');
+ assert_typed_array_equals(byobRequest2.view, new Uint8Array([0x11, 0]).subarray(1, 2), 'byobRequest.view should be unchanged');
+ await promise_rejects_js(t, TypeError, read1, 'pending read must reject after releaseLock()');
+
+ // enqueue() should fulfill the read request and put remaining byte in the queue
+ controller.enqueue(new Uint8Array([0x22]));
+ assert_equals(controller.byobRequest, null, 'byobRequest should be invalidated after second respond()');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result.done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x11]), 'second result.value');
+
+ const result3 = await reader2.read();
+ assert_false(result3.done, 'third result.done');
+ assert_typed_array_equals(result3.value, new Uint8Array([0x22]), 'third result.value');
+
+}, 'ReadableStream with byte source: read(view) with 1 element Uint16Array, respond(1), releaseLock(), read() on ' +
+ 'second reader, enqueue()');
+
+promise_test(async t => {
+ // Tests https://github.com/nodejs/node/issues/41886
+ const stream = new ReadableStream({
+ type: 'bytes',
+ autoAllocateChunkSize: 10,
+ pull: t.step_func((c) => {
+ const newView = new Uint8Array(c.byobRequest.view.buffer, 0, 3);
+ newView.set([20, 21, 22]);
+ c.byobRequest.respondWithNewView(newView);
+ })
+ });
+
+ const reader = stream.getReader();
+ const result = await reader.read();
+ assert_false(result.done, 'result.done');
+
+ const view = result.value;
+ assert_equals(view.byteOffset, 0, 'result.value.byteOffset');
+ assert_equals(view.byteLength, 3, 'result.value.byteLength');
+ assert_equals(view.buffer.byteLength, 10, 'result.value.buffer.byteLength');
+ assert_array_equals([...new Uint8Array(view)], [20, 21, 22], 'result.value');
+}, 'ReadableStream with byte source: autoAllocateChunkSize, read(), respondWithNewView()');
diff --git a/testing/web-platform/tests/streams/readable-byte-streams/non-transferable-buffers.any.js b/testing/web-platform/tests/streams/readable-byte-streams/non-transferable-buffers.any.js
new file mode 100644
index 0000000000..4bddaef5d6
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-byte-streams/non-transferable-buffers.any.js
@@ -0,0 +1,70 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = rs.getReader({ mode: 'byob' });
+ const memory = new WebAssembly.Memory({ initial: 1 });
+ const view = new Uint8Array(memory.buffer, 0, 1);
+ await promise_rejects_js(t, TypeError, reader.read(view));
+}, 'ReadableStream with byte source: read() with a non-transferable buffer');
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = rs.getReader({ mode: 'byob' });
+ const memory = new WebAssembly.Memory({ initial: 1 });
+ const view = new Uint8Array(memory.buffer, 0, 1);
+ await promise_rejects_js(t, TypeError, reader.read(view, { min: 1 }));
+}, 'ReadableStream with byte source: fill() with a non-transferable buffer');
+
+test(t => {
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const memory = new WebAssembly.Memory({ initial: 1 });
+ const view = new Uint8Array(memory.buffer, 0, 1);
+ assert_throws_js(TypeError, () => controller.enqueue(view));
+}, 'ReadableStream with byte source: enqueue() with a non-transferable buffer');
+
+promise_test(async t => {
+ let byobRequest;
+ let resolvePullCalledPromise;
+ const pullCalledPromise = new Promise(resolve => {
+ resolvePullCalledPromise = resolve;
+ });
+ const rs = new ReadableStream({
+ pull(controller) {
+ byobRequest = controller.byobRequest;
+ resolvePullCalledPromise();
+ },
+ type: 'bytes'
+ });
+
+ const memory = new WebAssembly.Memory({ initial: 1 });
+ // Make sure the backing buffers of both views have the same length
+ const byobView = new Uint8Array(new ArrayBuffer(memory.buffer.byteLength), 0, 1);
+ const newView = new Uint8Array(memory.buffer, byobView.byteOffset, byobView.byteLength);
+
+ const reader = rs.getReader({ mode: 'byob' });
+ reader.read(byobView).then(
+ t.unreached_func('read() should not resolve'),
+ t.unreached_func('read() should not reject')
+ );
+ await pullCalledPromise;
+
+ assert_throws_js(TypeError, () => byobRequest.respondWithNewView(newView));
+}, 'ReadableStream with byte source: respondWithNewView() with a non-transferable buffer');
diff --git a/testing/web-platform/tests/streams/readable-byte-streams/read-min.any.js b/testing/web-platform/tests/streams/readable-byte-streams/read-min.any.js
new file mode 100644
index 0000000000..4010e3750c
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-byte-streams/read-min.any.js
@@ -0,0 +1,774 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/rs-utils.js
+// META: script=../resources/test-utils.js
+'use strict';
+
+// View buffers are detached after pull() returns, so record the information at the time that pull() was called.
+function extractViewInfo(view) {
+ return {
+ constructor: view.constructor,
+ bufferByteLength: view.buffer.byteLength,
+ byteOffset: view.byteOffset,
+ byteLength: view.byteLength
+ };
+}
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.unreached_func('pull() should not be called'),
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ await promise_rejects_js(t, TypeError, reader.read(new Uint8Array(1), { min: 0 }));
+}, 'ReadableStream with byte source: read({ min }) rejects if min is 0');
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.unreached_func('pull() should not be called'),
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ await promise_rejects_js(t, TypeError, reader.read(new Uint8Array(1), { min: -1 }));
+}, 'ReadableStream with byte source: read({ min }) rejects if min is negative');
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.unreached_func('pull() should not be called'),
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ await promise_rejects_js(t, RangeError, reader.read(new Uint8Array(1), { min: 2 }));
+}, 'ReadableStream with byte source: read({ min }) rejects if min is larger than view\'s length (Uint8Array)');
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.unreached_func('pull() should not be called'),
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ await promise_rejects_js(t, RangeError, reader.read(new Uint16Array(1), { min: 2 }));
+}, 'ReadableStream with byte source: read({ min }) rejects if min is larger than view\'s length (Uint16Array)');
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.unreached_func('pull() should not be called'),
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ await promise_rejects_js(t, RangeError, reader.read(new DataView(new ArrayBuffer(1)), { min: 2 }));
+}, 'ReadableStream with byte source: read({ min }) rejects if min is larger than view\'s length (DataView)');
+
+promise_test(async t => {
+ let pullCount = 0;
+ const byobRequests = [];
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.step_func((c) => {
+ const byobRequest = c.byobRequest;
+ const view = byobRequest.view;
+ byobRequests[pullCount] = {
+ nonNull: byobRequest !== null,
+ viewNonNull: view !== null,
+ viewInfo: extractViewInfo(view)
+ };
+ if (pullCount === 0) {
+ view[0] = 0x01;
+ view[1] = 0x02;
+ byobRequest.respond(2);
+ } else if (pullCount === 1) {
+ view[0] = 0x03;
+ byobRequest.respond(1);
+ } else if (pullCount === 2) {
+ view[0] = 0x04;
+ byobRequest.respond(1);
+ }
+ ++pullCount;
+ })
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ const read1 = reader.read(new Uint8Array(3), { min: 3 });
+ const read2 = reader.read(new Uint8Array(1));
+
+ const result1 = await read1;
+ assert_false(result1.done, 'first result should not be done');
+ assert_typed_array_equals(result1.value, new Uint8Array([0x01, 0x02, 0x03]), 'first result value');
+
+ const result2 = await read2;
+ assert_false(result2.done, 'second result should not be done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x04]), 'second result value');
+
+ assert_equals(pullCount, 3, 'pull() must have been called 3 times');
+
+ {
+ const byobRequest = byobRequests[0];
+ assert_true(byobRequest.nonNull, 'first byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 3, 'first view.buffer.byteLength should be 3');
+ assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 3, 'first view.byteLength should be 3');
+ }
+
+ {
+ const byobRequest = byobRequests[1];
+ assert_true(byobRequest.nonNull, 'second byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 3, 'second view.buffer.byteLength should be 3');
+ assert_equals(viewInfo.byteOffset, 2, 'second view.byteOffset should be 2');
+ assert_equals(viewInfo.byteLength, 1, 'second view.byteLength should be 1');
+ }
+
+ {
+ const byobRequest = byobRequests[2];
+ assert_true(byobRequest.nonNull, 'third byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'third byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'third view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 1, 'third view.buffer.byteLength should be 1');
+ assert_equals(viewInfo.byteOffset, 0, 'third view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 1, 'third view.byteLength should be 1');
+ }
+
+}, 'ReadableStream with byte source: read({ min }), then read()');
+
+promise_test(async t => {
+ let pullCount = 0;
+ const byobRequests = [];
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.step_func((c) => {
+ const byobRequest = c.byobRequest;
+ const view = byobRequest.view;
+ byobRequests[pullCount] = {
+ nonNull: byobRequest !== null,
+ viewNonNull: view !== null,
+ viewInfo: extractViewInfo(view)
+ };
+ if (pullCount === 0) {
+ view[0] = 0x01;
+ view[1] = 0x02;
+ byobRequest.respond(2);
+ } else if (pullCount === 1) {
+ view[0] = 0x03;
+ byobRequest.respond(1);
+ }
+ ++pullCount;
+ })
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new DataView(new ArrayBuffer(3)), { min: 3 });
+ assert_false(result.done, 'result should not be done');
+ assert_equals(result.value.constructor, DataView, 'result.value must be a DataView');
+ assert_equals(result.value.byteOffset, 0, 'result.value.byteOffset');
+ assert_equals(result.value.byteLength, 3, 'result.value.byteLength');
+ assert_equals(result.value.buffer.byteLength, 3, 'result.value.buffer.byteLength');
+ assert_array_equals([...new Uint8Array(result.value.buffer)], [0x01, 0x02, 0x03], `result.value.buffer contents`);
+
+ assert_equals(pullCount, 2, 'pull() must have been called 2 times');
+
+ {
+ const byobRequest = byobRequests[0];
+ assert_true(byobRequest.nonNull, 'first byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 3, 'first view.buffer.byteLength should be 3');
+ assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 3, 'first view.byteLength should be 3');
+ }
+
+ {
+ const byobRequest = byobRequests[1];
+ assert_true(byobRequest.nonNull, 'second byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 3, 'second view.buffer.byteLength should be 3');
+ assert_equals(viewInfo.byteOffset, 2, 'second view.byteOffset should be 2');
+ assert_equals(viewInfo.byteLength, 1, 'second view.byteLength should be 1');
+ }
+
+}, 'ReadableStream with byte source: read({ min }) with a DataView');
+
+promise_test(async t => {
+ let pullCount = 0;
+ const byobRequests = [];
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ c.enqueue(new Uint8Array([0x01]));
+ }),
+ pull: t.step_func((c) => {
+ const byobRequest = c.byobRequest;
+ const view = byobRequest.view;
+ byobRequests[pullCount] = {
+ nonNull: byobRequest !== null,
+ viewNonNull: view !== null,
+ viewInfo: extractViewInfo(view)
+ };
+ if (pullCount === 0) {
+ view[0] = 0x02;
+ view[1] = 0x03;
+ byobRequest.respond(2);
+ }
+ ++pullCount;
+ })
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new Uint8Array(3), { min: 3 });
+ assert_false(result.done, 'first result should not be done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01, 0x02, 0x03]), 'first result value');
+
+ assert_equals(pullCount, 1, 'pull() must have only been called once');
+
+ const byobRequest = byobRequests[0];
+ assert_true(byobRequest.nonNull, 'first byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 3, 'first view.buffer.byteLength should be 3');
+ assert_equals(viewInfo.byteOffset, 1, 'first view.byteOffset should be 1');
+ assert_equals(viewInfo.byteLength, 2, 'first view.byteLength should be 2');
+
+}, 'ReadableStream with byte source: enqueue(), then read({ min })');
+
+promise_test(async t => {
+ let pullCount = 0;
+ const byobRequests = [];
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.step_func((c) => {
+ const byobRequest = c.byobRequest;
+ const view = byobRequest.view;
+ byobRequests[pullCount] = {
+ nonNull: byobRequest !== null,
+ viewNonNull: view !== null,
+ viewInfo: extractViewInfo(view)
+ };
+ if (pullCount === 0) {
+ c.enqueue(new Uint8Array([0x01, 0x02]));
+ } else if (pullCount === 1) {
+ c.enqueue(new Uint8Array([0x03]));
+ }
+ ++pullCount;
+ })
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new Uint8Array(3), { min: 3 });
+ assert_false(result.done, 'first result should not be done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01, 0x02, 0x03]), 'first result value');
+
+ assert_equals(pullCount, 2, 'pull() must have been called 2 times');
+
+ {
+ const byobRequest = byobRequests[0];
+ assert_true(byobRequest.nonNull, 'first byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 3, 'first view.buffer.byteLength should be 3');
+ assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 3, 'first view.byteLength should be 3');
+ }
+
+ {
+ const byobRequest = byobRequests[1];
+ assert_true(byobRequest.nonNull, 'second byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 3, 'second view.buffer.byteLength should be 3');
+ assert_equals(viewInfo.byteOffset, 2, 'second view.byteOffset should be 2');
+ assert_equals(viewInfo.byteLength, 1, 'second view.byteLength should be 1');
+ }
+
+}, 'ReadableStream with byte source: read({ min: 3 }) on a 3-byte Uint8Array, then multiple enqueue() up to 3 bytes');
+
+promise_test(async t => {
+ let pullCount = 0;
+ const byobRequests = [];
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.step_func((c) => {
+ const byobRequest = c.byobRequest;
+ const view = byobRequest.view;
+ byobRequests[pullCount] = {
+ nonNull: byobRequest !== null,
+ viewNonNull: view !== null,
+ viewInfo: extractViewInfo(view)
+ };
+ if (pullCount === 0) {
+ c.enqueue(new Uint8Array([0x01, 0x02]));
+ } else if (pullCount === 1) {
+ c.enqueue(new Uint8Array([0x03]));
+ }
+ ++pullCount;
+ })
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new Uint8Array(5), { min: 3 });
+ assert_false(result.done, 'first result should not be done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01, 0x02, 0x03, 0, 0]).subarray(0, 3), 'first result value');
+
+ assert_equals(pullCount, 2, 'pull() must have been called 2 times');
+
+ {
+ const byobRequest = byobRequests[0];
+ assert_true(byobRequest.nonNull, 'first byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 5, 'first view.buffer.byteLength should be 5');
+ assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 5, 'first view.byteLength should be 5');
+ }
+
+ {
+ const byobRequest = byobRequests[1];
+ assert_true(byobRequest.nonNull, 'second byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 5, 'second view.buffer.byteLength should be 5');
+ assert_equals(viewInfo.byteOffset, 2, 'second view.byteOffset should be 2');
+ assert_equals(viewInfo.byteLength, 3, 'second view.byteLength should be 3');
+ }
+
+}, 'ReadableStream with byte source: read({ min: 3 }) on a 5-byte Uint8Array, then multiple enqueue() up to 3 bytes');
+
+promise_test(async t => {
+ let pullCount = 0;
+ const byobRequests = [];
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.step_func((c) => {
+ const byobRequest = c.byobRequest;
+ const view = byobRequest.view;
+ byobRequests[pullCount] = {
+ nonNull: byobRequest !== null,
+ viewNonNull: view !== null,
+ viewInfo: extractViewInfo(view)
+ };
+ if (pullCount === 0) {
+ c.enqueue(new Uint8Array([0x01, 0x02]));
+ } else if (pullCount === 1) {
+ c.enqueue(new Uint8Array([0x03, 0x04]));
+ }
+ ++pullCount;
+ })
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new Uint8Array(5), { min: 3 });
+ assert_false(result.done, 'first result should not be done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01, 0x02, 0x03, 0x04, 0]).subarray(0, 4), 'first result value');
+
+ assert_equals(pullCount, 2, 'pull() must have been called 2 times');
+
+ {
+ const byobRequest = byobRequests[0];
+ assert_true(byobRequest.nonNull, 'first byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 5, 'first view.buffer.byteLength should be 5');
+ assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0');
+ assert_equals(viewInfo.byteLength, 5, 'first view.byteLength should be 5');
+ }
+
+ {
+ const byobRequest = byobRequests[1];
+ assert_true(byobRequest.nonNull, 'second byobRequest must not be null');
+ assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null');
+ const viewInfo = byobRequest.viewInfo;
+ assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array');
+ assert_equals(viewInfo.bufferByteLength, 5, 'second view.buffer.byteLength should be 5');
+ assert_equals(viewInfo.byteOffset, 2, 'second view.byteOffset should be 2');
+ assert_equals(viewInfo.byteLength, 3, 'second view.byteLength should be 3');
+ }
+
+}, 'ReadableStream with byte source: read({ min: 3 }) on a 5-byte Uint8Array, then multiple enqueue() up to 4 bytes');
+
+promise_test(async t => {
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(16);
+ view[0] = 0x01;
+ view[8] = 0x02;
+ c.enqueue(view);
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const byobReader = stream.getReader({ mode: 'byob' });
+ const result1 = await byobReader.read(new Uint8Array(8), { min: 8 });
+ assert_false(result1.done, 'result1.done');
+
+ const view1 = result1.value;
+ assert_equals(view1.constructor, Uint8Array, 'result1.value.constructor');
+ assert_equals(view1.buffer.byteLength, 8, 'result1.value.buffer.byteLength');
+ assert_equals(view1.byteOffset, 0, 'result1.value.byteOffset');
+ assert_equals(view1.byteLength, 8, 'result1.value.byteLength');
+ assert_equals(view1[0], 0x01, 'result1.value[0]');
+
+ byobReader.releaseLock();
+
+ const reader = stream.getReader();
+ const result2 = await reader.read();
+ assert_false(result2.done, 'result2.done');
+
+ const view2 = result2.value;
+ assert_equals(view2.constructor, Uint8Array, 'result2.value.constructor');
+ assert_equals(view2.buffer.byteLength, 16, 'result2.value.buffer.byteLength');
+ assert_equals(view2.byteOffset, 8, 'result2.value.byteOffset');
+ assert_equals(view2.byteLength, 8, 'result2.value.byteLength');
+ assert_equals(view2[0], 0x02, 'result2.value[0]');
+}, 'ReadableStream with byte source: enqueue(), read({ min }) partially, then read()');
+
+promise_test(async () => {
+ let pullCount = 0;
+ const byobRequestDefined = [];
+ let byobRequestViewDefined;
+
+ const stream = new ReadableStream({
+ async pull(c) {
+ byobRequestDefined.push(c.byobRequest !== null);
+ const initialByobRequest = c.byobRequest;
+
+ const transferredView = await transferArrayBufferView(c.byobRequest.view);
+ transferredView[0] = 0x01;
+ c.byobRequest.respondWithNewView(transferredView);
+
+ byobRequestDefined.push(c.byobRequest !== null);
+ byobRequestViewDefined = initialByobRequest.view !== null;
+
+ ++pullCount;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ const result = await reader.read(new Uint8Array(1), { min: 1 });
+ assert_false(result.done, 'result.done');
+ assert_equals(result.value.byteLength, 1, 'result.value.byteLength');
+ assert_equals(result.value[0], 0x01, 'result.value[0]');
+ assert_equals(pullCount, 1, 'pull() should be called only once');
+ assert_true(byobRequestDefined[0], 'byobRequest must not be null before respondWithNewView()');
+ assert_false(byobRequestDefined[1], 'byobRequest must be null after respondWithNewView()');
+ assert_false(byobRequestViewDefined, 'view of initial byobRequest must be null after respondWithNewView()');
+}, 'ReadableStream with byte source: read({ min }), then respondWithNewView() with a transferred ArrayBuffer');
+
+promise_test(async t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.close();
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new Uint8Array([0x01]), { min: 1 });
+ assert_true(result.done, 'result.done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01]).subarray(0, 0), 'result.value');
+
+ await reader.closed;
+}, 'ReadableStream with byte source: read({ min }) on a closed stream');
+
+promise_test(async t => {
+ let pullCount = 0;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.step_func((c) => {
+ if (pullCount === 0) {
+ c.byobRequest.view[0] = 0x01;
+ c.byobRequest.respond(1);
+ } else if (pullCount === 1) {
+ c.close();
+ c.byobRequest.respond(0);
+ }
+ ++pullCount;
+ })
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new Uint8Array(3), { min: 3 });
+ assert_true(result.done, 'result.done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01, 0, 0]).subarray(0, 1), 'result.value');
+
+ assert_equals(pullCount, 2, 'pull() must have been called 2 times');
+
+ await reader.closed;
+}, 'ReadableStream with byte source: read({ min }) when closed before view is filled');
+
+promise_test(async t => {
+ let pullCount = 0;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.step_func((c) => {
+ if (pullCount === 0) {
+ c.byobRequest.view[0] = 0x01;
+ c.byobRequest.view[1] = 0x02;
+ c.byobRequest.respond(2);
+ } else if (pullCount === 1) {
+ c.byobRequest.view[0] = 0x03;
+ c.byobRequest.respond(1);
+ c.close();
+ }
+ ++pullCount;
+ })
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+
+ const result = await reader.read(new Uint8Array(3), { min: 3 });
+ assert_false(result.done, 'result.done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01, 0x02, 0x03]), 'result.value');
+
+ assert_equals(pullCount, 2, 'pull() must have been called 2 times');
+
+ await reader.closed;
+}, 'ReadableStream with byte source: read({ min }) when closed immediately after view is filled');
+
+promise_test(async t => {
+ const error1 = new Error('error1');
+ const stream = new ReadableStream({
+ start(c) {
+ c.error(error1);
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ const read = reader.read(new Uint8Array(1), { min: 1 });
+
+ await Promise.all([
+ promise_rejects_exactly(t, error1, read, 'read() must fail'),
+ promise_rejects_exactly(t, error1, reader.closed, 'closed must fail')
+ ]);
+}, 'ReadableStream with byte source: read({ min }) on an errored stream');
+
+promise_test(async t => {
+ const error1 = new Error('error1');
+ let controller;
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ const read = reader.read(new Uint8Array(1), { min: 1 });
+
+ controller.error(error1);
+
+ await Promise.all([
+ promise_rejects_exactly(t, error1, read, 'read() must fail'),
+ promise_rejects_exactly(t, error1, reader.closed, 'closed must fail')
+ ]);
+}, 'ReadableStream with byte source: read({ min }), then error()');
+
+promise_test(t => {
+ let cancelCount = 0;
+ let reason;
+
+ const passedReason = new TypeError('foo');
+
+ const stream = new ReadableStream({
+ pull: t.unreached_func('pull() should not be called'),
+ cancel(r) {
+ if (cancelCount === 0) {
+ reason = r;
+ }
+
+ ++cancelCount;
+
+ return 'bar';
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const readPromise = reader.read(new Uint8Array(1), { min: 1 }).then(result => {
+ assert_true(result.done, 'result.done');
+ assert_equals(result.value, undefined, 'result.value');
+ });
+
+ const cancelPromise = reader.cancel(passedReason).then(result => {
+ assert_equals(result, undefined, 'cancel() return value should be fulfilled with undefined');
+ assert_equals(cancelCount, 1, 'cancel() should be called only once');
+ assert_equals(reason, passedReason, 'reason should equal the passed reason');
+ });
+
+ return Promise.all([readPromise, cancelPromise]);
+}, 'ReadableStream with byte source: getReader(), read({ min }), then cancel()');
+
+promise_test(async t => {
+ let pullCount = 0;
+ let byobRequest;
+ const viewInfos = [];
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: t.step_func((c) => {
+ byobRequest = c.byobRequest;
+
+ viewInfos.push(extractViewInfo(c.byobRequest.view));
+ c.byobRequest.view[0] = 0x01;
+ c.byobRequest.respond(1);
+ viewInfos.push(extractViewInfo(c.byobRequest.view));
+
+ ++pullCount;
+ })
+ });
+
+ await Promise.resolve();
+ assert_equals(pullCount, 0, 'pull() must not have been called yet');
+
+ const reader = rs.getReader({ mode: 'byob' });
+ const read = reader.read(new Uint8Array(3), { min: 3 });
+ assert_equals(pullCount, 1, 'pull() must have been called once');
+ assert_not_equals(byobRequest, null, 'byobRequest should not be null');
+ assert_equals(viewInfos[0].byteLength, 3, 'byteLength before respond() should be 3');
+ assert_equals(viewInfos[1].byteLength, 2, 'byteLength after respond() should be 2');
+
+ reader.cancel().catch(t.unreached_func('cancel() should not reject'));
+
+ const result = await read;
+ assert_true(result.done, 'result.done');
+ assert_equals(result.value, undefined, 'result.value');
+
+ assert_equals(pullCount, 1, 'pull() must only be called once');
+
+ await reader.closed;
+}, 'ReadableStream with byte source: cancel() with partially filled pending read({ min }) request');
+
+promise_test(async () => {
+ let pullCalled = false;
+
+ const stream = new ReadableStream({
+ start(c) {
+ const view = new Uint8Array(16);
+ view[7] = 0x01;
+ view[15] = 0x02;
+ c.enqueue(view);
+ },
+ pull() {
+ pullCalled = true;
+ },
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ const result1 = await reader.read(new Uint8Array(8), { min: 8 });
+ assert_false(result1.done, 'result1.done');
+
+ const view1 = result1.value;
+ assert_equals(view1.byteOffset, 0, 'result1.value.byteOffset');
+ assert_equals(view1.byteLength, 8, 'result1.value.byteLength');
+ assert_equals(view1[7], 0x01, 'result1.value[7]');
+
+ const result2 = await reader.read(new Uint8Array(8), { min: 8 });
+ assert_false(pullCalled, 'pull() must not have been called');
+ assert_false(result2.done, 'result2.done');
+
+ const view2 = result2.value;
+ assert_equals(view2.byteOffset, 0, 'result2.value.byteOffset');
+ assert_equals(view2.byteLength, 8, 'result2.value.byteLength');
+ assert_equals(view2[7], 0x02, 'result2.value[7]');
+}, 'ReadableStream with byte source: enqueue(), then read({ min }) with smaller views');
+
+promise_test(async t => {
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(new Uint8Array([0xaa, 0xbb, 0xcc]));
+ c.close();
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+
+ await promise_rejects_js(t, TypeError, reader.read(new Uint16Array(2), { min: 2 }), 'read() must fail');
+ await promise_rejects_js(t, TypeError, reader.closed, 'reader.closed should reject');
+}, 'ReadableStream with byte source: 3 byte enqueue(), then close(), then read({ min }) with 2-element Uint16Array must fail');
+
+promise_test(async t => {
+ let controller;
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull: t.unreached_func('pull() should not be called'),
+ type: 'bytes'
+ });
+
+ const reader = stream.getReader({ mode: 'byob' });
+ const readPromise = reader.read(new Uint16Array(2), { min: 2 });
+
+ controller.enqueue(new Uint8Array([0xaa, 0xbb, 0xcc]));
+ assert_throws_js(TypeError, () => controller.close(), 'controller.close() must throw');
+
+ await promise_rejects_js(t, TypeError, readPromise, 'read() must fail');
+ await promise_rejects_js(t, TypeError, reader.closed, 'reader.closed must reject');
+}, 'ReadableStream with byte source: read({ min }) with 2-element Uint16Array, then 3 byte enqueue(), then close() must fail');
+
+promise_test(async t => {
+ let pullCount = 0;
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start: t.step_func((c) => {
+ controller = c;
+ }),
+ pull: t.step_func((c) => {
+ ++pullCount;
+ })
+ });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ await Promise.resolve();
+ assert_equals(pullCount, 0, 'pull() must not have been called yet');
+
+ const read1 = reader1.read(new Uint8Array(3), { min: 3 });
+ const read2 = reader2.read(new Uint8Array(1));
+
+ assert_equals(pullCount, 1, 'pull() must have been called once');
+ const byobRequest1 = controller.byobRequest;
+ assert_equals(byobRequest1.view.byteLength, 3, 'first byobRequest.view.byteLength should be 3');
+ byobRequest1.view[0] = 0x01;
+ byobRequest1.respond(1);
+
+ const result2 = await read2;
+ assert_false(result2.done, 'branch2 first read() should not be done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x01]), 'branch2 first read() value');
+
+ assert_equals(pullCount, 2, 'pull() must have been called 2 times');
+ const byobRequest2 = controller.byobRequest;
+ assert_equals(byobRequest2.view.byteLength, 2, 'second byobRequest.view.byteLength should be 2');
+ byobRequest2.view[0] = 0x02;
+ byobRequest2.view[1] = 0x03;
+ byobRequest2.respond(2);
+
+ const result1 = await read1;
+ assert_false(result1.done, 'branch1 read() should not be done');
+ assert_typed_array_equals(result1.value, new Uint8Array([0x01, 0x02, 0x03]), 'branch1 read() value');
+
+ const result3 = await reader2.read(new Uint8Array(2));
+ assert_equals(pullCount, 2, 'pull() must only be called 2 times');
+ assert_false(result3.done, 'branch2 second read() should not be done');
+ assert_typed_array_equals(result3.value, new Uint8Array([0x02, 0x03]), 'branch2 second read() value');
+}, 'ReadableStream with byte source: tee() with read({ min }) from branch1 and read() from branch2');
diff --git a/testing/web-platform/tests/streams/readable-byte-streams/respond-after-enqueue.any.js b/testing/web-platform/tests/streams/readable-byte-streams/respond-after-enqueue.any.js
new file mode 100644
index 0000000000..e51efa061a
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-byte-streams/respond-after-enqueue.any.js
@@ -0,0 +1,55 @@
+// META: global=window,worker,shadowrealm
+
+'use strict';
+
+// Repro for Blink bug https://crbug.com/1255762.
+promise_test(async () => {
+ const rs = new ReadableStream({
+ type: 'bytes',
+ autoAllocateChunkSize: 10,
+ pull(controller) {
+ controller.enqueue(new Uint8Array([1, 2, 3]));
+ controller.byobRequest.respond(10);
+ }
+ });
+
+ const reader = rs.getReader();
+ const {value, done} = await reader.read();
+ assert_false(done, 'done should not be true');
+ assert_array_equals(value, [1, 2, 3], 'value should be 3 bytes');
+}, 'byobRequest.respond() after enqueue() should not crash');
+
+promise_test(async () => {
+ const rs = new ReadableStream({
+ type: 'bytes',
+ autoAllocateChunkSize: 10,
+ pull(controller) {
+ const byobRequest = controller.byobRequest;
+ controller.enqueue(new Uint8Array([1, 2, 3]));
+ byobRequest.respond(10);
+ }
+ });
+
+ const reader = rs.getReader();
+ const {value, done} = await reader.read();
+ assert_false(done, 'done should not be true');
+ assert_array_equals(value, [1, 2, 3], 'value should be 3 bytes');
+}, 'byobRequest.respond() with cached byobRequest after enqueue() should not crash');
+
+promise_test(async () => {
+ const rs = new ReadableStream({
+ type: 'bytes',
+ autoAllocateChunkSize: 10,
+ pull(controller) {
+ controller.enqueue(new Uint8Array([1, 2, 3]));
+ controller.byobRequest.respond(2);
+ }
+ });
+
+ const reader = rs.getReader();
+ const [read1, read2] = await Promise.all([reader.read(), reader.read()]);
+ assert_false(read1.done, 'read1.done should not be true');
+ assert_array_equals(read1.value, [1, 2, 3], 'read1.value should be 3 bytes');
+ assert_false(read2.done, 'read2.done should not be true');
+ assert_array_equals(read2.value, [0, 0], 'read2.value should be 2 bytes');
+}, 'byobRequest.respond() after enqueue() with double read should not crash');
diff --git a/testing/web-platform/tests/streams/readable-byte-streams/tee.any.js b/testing/web-platform/tests/streams/readable-byte-streams/tee.any.js
new file mode 100644
index 0000000000..7dd5ba3f3f
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-byte-streams/tee.any.js
@@ -0,0 +1,936 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/rs-utils.js
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+// META: script=../resources/rs-test-templates.js
+'use strict';
+
+test(() => {
+
+ const rs = new ReadableStream({ type: 'bytes' });
+ const result = rs.tee();
+
+ assert_true(Array.isArray(result), 'return value should be an array');
+ assert_equals(result.length, 2, 'array should have length 2');
+ assert_equals(result[0].constructor, ReadableStream, '0th element should be a ReadableStream');
+ assert_equals(result[1].constructor, ReadableStream, '1st element should be a ReadableStream');
+
+}, 'ReadableStream teeing with byte source: rs.tee() returns an array of two ReadableStreams');
+
+promise_test(async t => {
+
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ c.enqueue(new Uint8Array([0x01]));
+ c.enqueue(new Uint8Array([0x02]));
+ c.close();
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+ const reader2 = branch2.getReader({ mode: 'byob' });
+
+ reader2.closed.then(t.unreached_func('branch2 should not be closed'));
+
+ {
+ const result = await reader1.read(new Uint8Array(1));
+ assert_equals(result.done, false, 'done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01]), 'value');
+ }
+
+ {
+ const result = await reader1.read(new Uint8Array(1));
+ assert_equals(result.done, false, 'done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x02]), 'value');
+ }
+
+ {
+ const result = await reader1.read(new Uint8Array(1));
+ assert_equals(result.done, true, 'done');
+ assert_typed_array_equals(result.value, new Uint8Array([0]).subarray(0, 0), 'value');
+ }
+
+ {
+ const result = await reader2.read(new Uint8Array(1));
+ assert_equals(result.done, false, 'done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01]), 'value');
+ }
+
+ await reader1.closed;
+
+}, 'ReadableStream teeing with byte source: should be able to read one branch to the end without affecting the other');
+
+promise_test(async () => {
+
+ let pullCount = 0;
+ const enqueuedChunk = new Uint8Array([0x01]);
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull(c) {
+ ++pullCount;
+ if (pullCount === 1) {
+ c.enqueue(enqueuedChunk);
+ }
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader();
+ const reader2 = branch2.getReader();
+
+ const [result1, result2] = await Promise.all([reader1.read(), reader2.read()]);
+ assert_equals(result1.done, false, 'reader1 done');
+ assert_equals(result2.done, false, 'reader2 done');
+
+ const view1 = result1.value;
+ const view2 = result2.value;
+ assert_typed_array_equals(view1, new Uint8Array([0x01]), 'reader1 value');
+ assert_typed_array_equals(view2, new Uint8Array([0x01]), 'reader2 value');
+
+ assert_not_equals(view1.buffer, view2.buffer, 'chunks should have different buffers');
+ assert_not_equals(enqueuedChunk.buffer, view1.buffer, 'enqueued chunk and branch1\'s chunk should have different buffers');
+ assert_not_equals(enqueuedChunk.buffer, view2.buffer, 'enqueued chunk and branch2\'s chunk should have different buffers');
+
+}, 'ReadableStream teeing with byte source: chunks should be cloned for each branch');
+
+promise_test(async () => {
+
+ let pullCount = 0;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull(c) {
+ ++pullCount;
+ if (pullCount === 1) {
+ c.byobRequest.view[0] = 0x01;
+ c.byobRequest.respond(1);
+ }
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+ const reader2 = branch2.getReader();
+ const buffer = new Uint8Array([42, 42, 42]).buffer;
+
+ {
+ const result = await reader1.read(new Uint8Array(buffer, 0, 1));
+ assert_equals(result.done, false, 'done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01, 42, 42]).subarray(0, 1), 'value');
+ }
+
+ {
+ const result = await reader2.read();
+ assert_equals(result.done, false, 'done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01]), 'value');
+ }
+
+}, 'ReadableStream teeing with byte source: chunks for BYOB requests from branch 1 should be cloned to branch 2');
+
+promise_test(async t => {
+
+ const theError = { name: 'boo!' };
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ c.enqueue(new Uint8Array([0x01]));
+ c.enqueue(new Uint8Array([0x02]));
+ },
+ pull() {
+ throw theError;
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+ const reader2 = branch2.getReader({ mode: 'byob' });
+
+ {
+ const result = await reader1.read(new Uint8Array(1));
+ assert_equals(result.done, false, 'first read from branch1 should not be done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x01]), 'first read from branch1');
+ }
+
+ {
+ const result = await reader1.read(new Uint8Array(1));
+ assert_equals(result.done, false, 'second read from branch1 should not be done');
+ assert_typed_array_equals(result.value, new Uint8Array([0x02]), 'second read from branch1');
+ }
+
+ await promise_rejects_exactly(t, theError, reader1.read(new Uint8Array(1)));
+ await promise_rejects_exactly(t, theError, reader2.read(new Uint8Array(1)));
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed)
+ ]);
+
+}, 'ReadableStream teeing with byte source: errors in the source should propagate to both branches');
+
+promise_test(async () => {
+
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ c.enqueue(new Uint8Array([0x01]));
+ c.enqueue(new Uint8Array([0x02]));
+ c.close();
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ branch1.cancel();
+
+ const [chunks1, chunks2] = await Promise.all([readableStreamToArray(branch1), readableStreamToArray(branch2)]);
+ assert_array_equals(chunks1, [], 'branch1 should have no chunks');
+ assert_equals(chunks2.length, 2, 'branch2 should have two chunks');
+ assert_typed_array_equals(chunks2[0], new Uint8Array([0x01]), 'first chunk from branch2');
+ assert_typed_array_equals(chunks2[1], new Uint8Array([0x02]), 'second chunk from branch2');
+
+}, 'ReadableStream teeing with byte source: canceling branch1 should not impact branch2');
+
+promise_test(async () => {
+
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ c.enqueue(new Uint8Array([0x01]));
+ c.enqueue(new Uint8Array([0x02]));
+ c.close();
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ branch2.cancel();
+
+ const [chunks1, chunks2] = await Promise.all([readableStreamToArray(branch1), readableStreamToArray(branch2)]);
+ assert_equals(chunks1.length, 2, 'branch1 should have two chunks');
+ assert_typed_array_equals(chunks1[0], new Uint8Array([0x01]), 'first chunk from branch1');
+ assert_typed_array_equals(chunks1[1], new Uint8Array([0x02]), 'second chunk from branch1');
+ assert_array_equals(chunks2, [], 'branch2 should have no chunks');
+
+}, 'ReadableStream teeing with byte source: canceling branch2 should not impact branch1');
+
+templatedRSTeeCancel('ReadableStream teeing with byte source', (extras) => {
+ return new ReadableStream({ type: 'bytes', ...extras });
+});
+
+promise_test(async () => {
+
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+ const reader2 = branch2.getReader({ mode: 'byob' });
+
+ const promise = Promise.all([reader1.closed, reader2.closed]);
+
+ controller.close();
+
+ // The branches are created with HWM 0, so we need to read from at least one of them
+ // to observe the stream becoming closed.
+ const read1 = await reader1.read(new Uint8Array(1));
+ assert_equals(read1.done, true, 'first read from branch1 should be done');
+
+ await promise;
+
+}, 'ReadableStream teeing with byte source: closing the original should close the branches');
+
+promise_test(async t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+ const reader2 = branch2.getReader({ mode: 'byob' });
+
+ const theError = { name: 'boo!' };
+ const promise = Promise.all([
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed)
+ ]);
+
+ controller.error(theError);
+ await promise;
+
+}, 'ReadableStream teeing with byte source: erroring the original should immediately error the branches');
+
+promise_test(async t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader();
+ const reader2 = branch2.getReader();
+
+ const theError = { name: 'boo!' };
+ const promise = Promise.all([
+ promise_rejects_exactly(t, theError, reader1.read()),
+ promise_rejects_exactly(t, theError, reader2.read())
+ ]);
+
+ controller.error(theError);
+ await promise;
+
+}, 'ReadableStream teeing with byte source: erroring the original should error pending reads from default reader');
+
+promise_test(async t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+ const reader2 = branch2.getReader({ mode: 'byob' });
+
+ const theError = { name: 'boo!' };
+ const promise = Promise.all([
+ promise_rejects_exactly(t, theError, reader1.read(new Uint8Array(1))),
+ promise_rejects_exactly(t, theError, reader2.read(new Uint8Array(1)))
+ ]);
+
+ controller.error(theError);
+ await promise;
+
+}, 'ReadableStream teeing with byte source: erroring the original should error pending reads from BYOB reader');
+
+promise_test(async () => {
+
+ let controller;
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+ const reader2 = branch2.getReader({ mode: 'byob' });
+ const cancelPromise = reader2.cancel();
+
+ controller.enqueue(new Uint8Array([0x01]));
+
+ const read1 = await reader1.read(new Uint8Array(1));
+ assert_equals(read1.done, false, 'first read() from branch1 should not be done');
+ assert_typed_array_equals(read1.value, new Uint8Array([0x01]), 'first read() from branch1');
+
+ controller.close();
+
+ const read2 = await reader1.read(new Uint8Array(1));
+ assert_equals(read2.done, true, 'second read() from branch1 should be done');
+
+ await Promise.all([
+ reader1.closed,
+ cancelPromise
+ ]);
+
+}, 'ReadableStream teeing with byte source: canceling branch1 should finish when branch2 reads until end of stream');
+
+promise_test(async t => {
+
+ let controller;
+ const theError = { name: 'boo!' };
+ const rs = new ReadableStream({
+ type: 'bytes',
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+ const reader2 = branch2.getReader({ mode: 'byob' });
+ const cancelPromise = reader2.cancel();
+
+ controller.error(theError);
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, reader1.read(new Uint8Array(1))),
+ cancelPromise
+ ]);
+
+}, 'ReadableStream teeing with byte source: canceling branch1 should finish when original stream errors');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+
+ // Create two branches, each with a HWM of 0. This should result in no chunks being pulled.
+ rs.tee();
+
+ await flushAsyncEvents();
+ assert_array_equals(rs.events, [], 'pull should not be called');
+
+}, 'ReadableStream teeing with byte source: should not pull any chunks if no branches are reading');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({
+ type: 'bytes',
+ pull(controller) {
+ controller.enqueue(new Uint8Array([0x01]));
+ }
+ });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+ await Promise.all([
+ reader1.read(new Uint8Array(1)),
+ reader2.read(new Uint8Array(1))
+ ]);
+ assert_array_equals(rs.events, ['pull'], 'pull should be called once');
+
+}, 'ReadableStream teeing with byte source: should only pull enough to fill the emptiest queue');
+
+promise_test(async t => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+ const theError = { name: 'boo!' };
+
+ rs.controller.error(theError);
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ await flushAsyncEvents();
+ assert_array_equals(rs.events, [], 'pull should not be called');
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed)
+ ]);
+
+}, 'ReadableStream teeing with byte source: should not pull when original is already errored');
+
+for (const branch of [1, 2]) {
+ promise_test(async t => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+ const theError = { name: 'boo!' };
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ await flushAsyncEvents();
+ assert_array_equals(rs.events, [], 'pull should not be called');
+
+ const reader = (branch === 1) ? reader1 : reader2;
+ const read1 = reader.read(new Uint8Array(1));
+
+ await flushAsyncEvents();
+ assert_array_equals(rs.events, ['pull'], 'pull should be called once');
+
+ rs.controller.error(theError);
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, read1),
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed)
+ ]);
+
+ await flushAsyncEvents();
+ assert_array_equals(rs.events, ['pull'], 'pull should be called once');
+
+ }, `ReadableStream teeing with byte source: stops pulling when original stream errors while branch ${branch} is reading`);
+}
+
+promise_test(async t => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+ const theError = { name: 'boo!' };
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ await flushAsyncEvents();
+ assert_array_equals(rs.events, [], 'pull should not be called');
+
+ const read1 = reader1.read(new Uint8Array(1));
+ const read2 = reader2.read(new Uint8Array(1));
+
+ await flushAsyncEvents();
+ assert_array_equals(rs.events, ['pull'], 'pull should be called once');
+
+ rs.controller.error(theError);
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, read1),
+ promise_rejects_exactly(t, theError, read2),
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed)
+ ]);
+
+ await flushAsyncEvents();
+ assert_array_equals(rs.events, ['pull'], 'pull should be called once');
+
+}, 'ReadableStream teeing with byte source: stops pulling when original stream errors while both branches are reading');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ const read1 = reader1.read(new Uint8Array([0x11]));
+ const read2 = reader2.read(new Uint8Array([0x22]));
+
+ const cancel1 = reader1.cancel();
+ await flushAsyncEvents();
+ const cancel2 = reader2.cancel();
+
+ const result1 = await read1;
+ assert_object_equals(result1, { value: undefined, done: true });
+ const result2 = await read2;
+ assert_object_equals(result2, { value: undefined, done: true });
+
+ await Promise.all([cancel1, cancel2]);
+
+}, 'ReadableStream teeing with byte source: canceling both branches in sequence with delay');
+
+promise_test(async t => {
+
+ const theError = { name: 'boo!' };
+ const rs = new ReadableStream({
+ type: 'bytes',
+ cancel() {
+ throw theError;
+ }
+ });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ const read1 = reader1.read(new Uint8Array([0x11]));
+ const read2 = reader2.read(new Uint8Array([0x22]));
+
+ const cancel1 = reader1.cancel();
+ await flushAsyncEvents();
+ const cancel2 = reader2.cancel();
+
+ const result1 = await read1;
+ assert_object_equals(result1, { value: undefined, done: true });
+ const result2 = await read2;
+ assert_object_equals(result2, { value: undefined, done: true });
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, cancel1),
+ promise_rejects_exactly(t, theError, cancel2)
+ ]);
+
+}, 'ReadableStream teeing with byte source: failing to cancel when canceling both branches in sequence with delay');
+
+promise_test(async () => {
+
+ let cancelResolve;
+ const cancelCalled = new Promise((resolve) => {
+ cancelResolve = resolve;
+ });
+ const rs = recordingReadableStream({
+ type: 'bytes',
+ cancel() {
+ cancelResolve();
+ }
+ });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ const read1 = reader1.read(new Uint8Array([0x11]));
+ await flushAsyncEvents();
+ const read2 = reader2.read(new Uint8Array([0x22]));
+ await flushAsyncEvents();
+
+ // We are reading into branch1's buffer.
+ const byobRequest1 = rs.controller.byobRequest;
+ assert_not_equals(byobRequest1, null);
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([0x11]), 'byobRequest1.view');
+
+ // Cancelling branch1 should not affect the BYOB request.
+ const cancel1 = reader1.cancel();
+ const result1 = await read1;
+ assert_equals(result1.done, true);
+ assert_equals(result1.value, undefined);
+ await flushAsyncEvents();
+ const byobRequest2 = rs.controller.byobRequest;
+ assert_typed_array_equals(byobRequest2.view, new Uint8Array([0x11]), 'byobRequest2.view');
+
+ // Cancelling branch1 should invalidate the BYOB request.
+ const cancel2 = reader2.cancel();
+ await cancelCalled;
+ const byobRequest3 = rs.controller.byobRequest;
+ assert_equals(byobRequest3, null);
+ const result2 = await read2;
+ assert_equals(result2.done, true);
+ assert_equals(result2.value, undefined);
+
+ await Promise.all([cancel1, cancel2]);
+
+}, 'ReadableStream teeing with byte source: read from branch1 and branch2, cancel branch1, cancel branch2');
+
+promise_test(async () => {
+
+ let cancelResolve;
+ const cancelCalled = new Promise((resolve) => {
+ cancelResolve = resolve;
+ });
+ const rs = recordingReadableStream({
+ type: 'bytes',
+ cancel() {
+ cancelResolve();
+ }
+ });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ const read1 = reader1.read(new Uint8Array([0x11]));
+ await flushAsyncEvents();
+ const read2 = reader2.read(new Uint8Array([0x22]));
+ await flushAsyncEvents();
+
+ // We are reading into branch1's buffer.
+ const byobRequest1 = rs.controller.byobRequest;
+ assert_not_equals(byobRequest1, null);
+ assert_typed_array_equals(byobRequest1.view, new Uint8Array([0x11]), 'byobRequest1.view');
+
+ // Cancelling branch2 should not affect the BYOB request.
+ const cancel2 = reader2.cancel();
+ const result2 = await read2;
+ assert_equals(result2.done, true);
+ assert_equals(result2.value, undefined);
+ await flushAsyncEvents();
+ const byobRequest2 = rs.controller.byobRequest;
+ assert_typed_array_equals(byobRequest2.view, new Uint8Array([0x11]), 'byobRequest2.view');
+
+ // Cancelling branch1 should invalidate the BYOB request.
+ const cancel1 = reader1.cancel();
+ await cancelCalled;
+ const byobRequest3 = rs.controller.byobRequest;
+ assert_equals(byobRequest3, null);
+ const result1 = await read1;
+ assert_equals(result1.done, true);
+ assert_equals(result1.value, undefined);
+
+ await Promise.all([cancel1, cancel2]);
+
+}, 'ReadableStream teeing with byte source: read from branch1 and branch2, cancel branch2, cancel branch1');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ const read1 = reader1.read(new Uint8Array([0x11]));
+ await flushAsyncEvents();
+ const read2 = reader2.read(new Uint8Array([0x22]));
+ await flushAsyncEvents();
+
+ // We are reading into branch1's buffer.
+ assert_typed_array_equals(rs.controller.byobRequest.view, new Uint8Array([0x11]), 'first byobRequest.view');
+
+ // Cancelling branch2 should not affect the BYOB request.
+ reader2.cancel();
+ const result2 = await read2;
+ assert_equals(result2.done, true);
+ assert_equals(result2.value, undefined);
+ await flushAsyncEvents();
+ assert_typed_array_equals(rs.controller.byobRequest.view, new Uint8Array([0x11]), 'second byobRequest.view');
+
+ // Respond to the BYOB request.
+ rs.controller.byobRequest.view[0] = 0x33;
+ rs.controller.byobRequest.respond(1);
+
+ // branch1 should receive the read chunk.
+ const result1 = await read1;
+ assert_equals(result1.done, false);
+ assert_typed_array_equals(result1.value, new Uint8Array([0x33]), 'first read() from branch1');
+
+}, 'ReadableStream teeing with byte source: read from branch1 and branch2, cancel branch2, enqueue to branch1');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ const read1 = reader1.read(new Uint8Array([0x11]));
+ await flushAsyncEvents();
+ const read2 = reader2.read(new Uint8Array([0x22]));
+ await flushAsyncEvents();
+
+ // We are reading into branch1's buffer.
+ assert_typed_array_equals(rs.controller.byobRequest.view, new Uint8Array([0x11]), 'first byobRequest.view');
+
+ // Cancelling branch1 should not affect the BYOB request.
+ reader1.cancel();
+ const result1 = await read1;
+ assert_equals(result1.done, true);
+ assert_equals(result1.value, undefined);
+ await flushAsyncEvents();
+ assert_typed_array_equals(rs.controller.byobRequest.view, new Uint8Array([0x11]), 'second byobRequest.view');
+
+ // Respond to the BYOB request.
+ rs.controller.byobRequest.view[0] = 0x33;
+ rs.controller.byobRequest.respond(1);
+
+ // branch2 should receive the read chunk.
+ const result2 = await read2;
+ assert_equals(result2.done, false);
+ assert_typed_array_equals(result2.value, new Uint8Array([0x33]), 'first read() from branch2');
+
+}, 'ReadableStream teeing with byte source: read from branch1 and branch2, cancel branch1, respond to branch2');
+
+promise_test(async () => {
+
+ let pullCount = 0;
+ const byobRequestDefined = [];
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull(c) {
+ ++pullCount;
+ byobRequestDefined.push(c.byobRequest !== null);
+ c.enqueue(new Uint8Array([pullCount]));
+ }
+ });
+
+ const [branch1, _] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+
+ const result1 = await reader1.read(new Uint8Array([0x11]));
+ assert_equals(result1.done, false, 'first read should not be done');
+ assert_typed_array_equals(result1.value, new Uint8Array([0x1]), 'first read');
+ assert_equals(pullCount, 1, 'pull() should be called once');
+ assert_equals(byobRequestDefined[0], true, 'should have created a BYOB request for first read');
+
+ reader1.releaseLock();
+ const reader2 = branch1.getReader();
+
+ const result2 = await reader2.read();
+ assert_equals(result2.done, false, 'second read should not be done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x2]), 'second read');
+ assert_equals(pullCount, 2, 'pull() should be called twice');
+ assert_equals(byobRequestDefined[1], false, 'should not have created a BYOB request for second read');
+
+}, 'ReadableStream teeing with byte source: pull with BYOB reader, then pull with default reader');
+
+promise_test(async () => {
+
+ let pullCount = 0;
+ const byobRequestDefined = [];
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull(c) {
+ ++pullCount;
+ byobRequestDefined.push(c.byobRequest !== null);
+ c.enqueue(new Uint8Array([pullCount]));
+ }
+ });
+
+ const [branch1, _] = rs.tee();
+ const reader1 = branch1.getReader();
+
+ const result1 = await reader1.read();
+ assert_equals(result1.done, false, 'first read should not be done');
+ assert_typed_array_equals(result1.value, new Uint8Array([0x1]), 'first read');
+ assert_equals(pullCount, 1, 'pull() should be called once');
+ assert_equals(byobRequestDefined[0], false, 'should not have created a BYOB request for first read');
+
+ reader1.releaseLock();
+ const reader2 = branch1.getReader({ mode: 'byob' });
+
+ const result2 = await reader2.read(new Uint8Array([0x22]));
+ assert_equals(result2.done, false, 'second read should not be done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x2]), 'second read');
+ assert_equals(pullCount, 2, 'pull() should be called twice');
+ assert_equals(byobRequestDefined[1], true, 'should have created a BYOB request for second read');
+
+}, 'ReadableStream teeing with byte source: pull with default reader, then pull with BYOB reader');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({
+ type: 'bytes'
+ });
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+
+ // Wait for each branch's start() promise to resolve.
+ await flushAsyncEvents();
+
+ const read2 = reader2.read(new Uint8Array([0x22]));
+ const read1 = reader1.read(new Uint8Array([0x11]));
+ await flushAsyncEvents();
+
+ // branch2 should provide the BYOB request.
+ const byobRequest = rs.controller.byobRequest;
+ assert_typed_array_equals(byobRequest.view, new Uint8Array([0x22]), 'first BYOB request');
+ byobRequest.view[0] = 0x01;
+ byobRequest.respond(1);
+
+ const result1 = await read1;
+ assert_equals(result1.done, false, 'first read should not be done');
+ assert_typed_array_equals(result1.value, new Uint8Array([0x1]), 'first read');
+
+ const result2 = await read2;
+ assert_equals(result2.done, false, 'second read should not be done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x1]), 'second read');
+
+}, 'ReadableStream teeing with byte source: read from branch2, then read from branch1');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader();
+ const reader2 = branch2.getReader({ mode: 'byob' });
+ await flushAsyncEvents();
+
+ const read1 = reader1.read();
+ const read2 = reader2.read(new Uint8Array([0x22]));
+ await flushAsyncEvents();
+
+ // There should be no BYOB request.
+ assert_equals(rs.controller.byobRequest, null, 'first BYOB request');
+
+ // Close the stream.
+ rs.controller.close();
+
+ const result1 = await read1;
+ assert_equals(result1.done, true, 'read from branch1 should be done');
+ assert_equals(result1.value, undefined, 'read from branch1');
+
+ // branch2 should get its buffer back.
+ const result2 = await read2;
+ assert_equals(result2.done, true, 'read from branch2 should be done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x22]).subarray(0, 0), 'read from branch2');
+
+}, 'ReadableStream teeing with byte source: read from branch1 with default reader, then close while branch2 has pending BYOB read');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+ const [branch1, branch2] = rs.tee();
+ const reader1 = branch1.getReader({ mode: 'byob' });
+ const reader2 = branch2.getReader();
+ await flushAsyncEvents();
+
+ const read2 = reader2.read();
+ const read1 = reader1.read(new Uint8Array([0x11]));
+ await flushAsyncEvents();
+
+ // There should be no BYOB request.
+ assert_equals(rs.controller.byobRequest, null, 'first BYOB request');
+
+ // Close the stream.
+ rs.controller.close();
+
+ const result2 = await read2;
+ assert_equals(result2.done, true, 'read from branch2 should be done');
+ assert_equals(result2.value, undefined, 'read from branch2');
+
+ // branch1 should get its buffer back.
+ const result1 = await read1;
+ assert_equals(result1.done, true, 'read from branch1 should be done');
+ assert_typed_array_equals(result1.value, new Uint8Array([0x11]).subarray(0, 0), 'read from branch1');
+
+}, 'ReadableStream teeing with byte source: read from branch2 with default reader, then close while branch1 has pending BYOB read');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+ await flushAsyncEvents();
+
+ const read1 = reader1.read(new Uint8Array([0x11]));
+ const read2 = reader2.read(new Uint8Array([0x22]));
+ await flushAsyncEvents();
+
+ // branch1 should provide the BYOB request.
+ const byobRequest = rs.controller.byobRequest;
+ assert_typed_array_equals(byobRequest.view, new Uint8Array([0x11]), 'first BYOB request');
+
+ // Close the stream.
+ rs.controller.close();
+ byobRequest.respond(0);
+
+ // Both branches should get their buffers back.
+ const result1 = await read1;
+ assert_equals(result1.done, true, 'first read should be done');
+ assert_typed_array_equals(result1.value, new Uint8Array([0x11]).subarray(0, 0), 'first read');
+
+ const result2 = await read2;
+ assert_equals(result2.done, true, 'second read should be done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x22]).subarray(0, 0), 'second read');
+
+}, 'ReadableStream teeing with byte source: close when both branches have pending BYOB reads');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader());
+ const branch1Reads = [reader1.read(), reader1.read()];
+ const branch2Reads = [reader2.read(), reader2.read()];
+
+ await flushAsyncEvents();
+ rs.controller.enqueue(new Uint8Array([0x11]));
+ rs.controller.close();
+
+ const result1 = await branch1Reads[0];
+ assert_equals(result1.done, false, 'first read() from branch1 should be not done');
+ assert_typed_array_equals(result1.value, new Uint8Array([0x11]), 'first chunk from branch1 should be correct');
+ const result2 = await branch2Reads[0];
+ assert_equals(result2.done, false, 'first read() from branch2 should be not done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x11]), 'first chunk from branch2 should be correct');
+
+ assert_object_equals(await branch1Reads[1], { value: undefined, done: true }, 'second read() from branch1 should be done');
+ assert_object_equals(await branch2Reads[1], { value: undefined, done: true }, 'second read() from branch2 should be done');
+
+}, 'ReadableStream teeing with byte source: enqueue() and close() while both branches are pulling');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream({ type: 'bytes' });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader({ mode: 'byob' }));
+ const branch1Reads = [reader1.read(new Uint8Array(1)), reader1.read(new Uint8Array(1))];
+ const branch2Reads = [reader2.read(new Uint8Array(1)), reader2.read(new Uint8Array(1))];
+
+ await flushAsyncEvents();
+ rs.controller.byobRequest.view[0] = 0x11;
+ rs.controller.byobRequest.respond(1);
+ rs.controller.close();
+
+ const result1 = await branch1Reads[0];
+ assert_equals(result1.done, false, 'first read() from branch1 should be not done');
+ assert_typed_array_equals(result1.value, new Uint8Array([0x11]), 'first chunk from branch1 should be correct');
+ const result2 = await branch2Reads[0];
+ assert_equals(result2.done, false, 'first read() from branch2 should be not done');
+ assert_typed_array_equals(result2.value, new Uint8Array([0x11]), 'first chunk from branch2 should be correct');
+
+ const result3 = await branch1Reads[1];
+ assert_equals(result3.done, true, 'second read() from branch1 should be done');
+ assert_typed_array_equals(result3.value, new Uint8Array([0]).subarray(0, 0), 'second chunk from branch1 should be correct');
+ const result4 = await branch2Reads[1];
+ assert_equals(result4.done, true, 'second read() from branch2 should be done');
+ assert_typed_array_equals(result4.value, new Uint8Array([0]).subarray(0, 0), 'second chunk from branch2 should be correct');
+
+}, 'ReadableStream teeing with byte source: respond() and close() while both branches are pulling');
diff --git a/testing/web-platform/tests/streams/readable-streams/async-iterator.any.js b/testing/web-platform/tests/streams/readable-streams/async-iterator.any.js
new file mode 100644
index 0000000000..4b674bea84
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/async-iterator.any.js
@@ -0,0 +1,650 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/rs-utils.js
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1');
+
+function assert_iter_result(iterResult, value, done, message) {
+ const prefix = message === undefined ? '' : `${message} `;
+ assert_equals(typeof iterResult, 'object', `${prefix}type is object`);
+ assert_equals(Object.getPrototypeOf(iterResult), Object.prototype, `${prefix}[[Prototype]]`);
+ assert_array_equals(Object.getOwnPropertyNames(iterResult).sort(), ['done', 'value'], `${prefix}property names`);
+ assert_equals(iterResult.value, value, `${prefix}value`);
+ assert_equals(iterResult.done, done, `${prefix}done`);
+}
+
+test(() => {
+ const s = new ReadableStream();
+ const it = s.values();
+ const proto = Object.getPrototypeOf(it);
+
+ const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () {}).prototype);
+ assert_equals(Object.getPrototypeOf(proto), AsyncIteratorPrototype, 'prototype should extend AsyncIteratorPrototype');
+
+ const methods = ['next', 'return'].sort();
+ assert_array_equals(Object.getOwnPropertyNames(proto).sort(), methods, 'should have all the correct methods');
+
+ for (const m of methods) {
+ const propDesc = Object.getOwnPropertyDescriptor(proto, m);
+ assert_true(propDesc.enumerable, 'method should be enumerable');
+ assert_true(propDesc.configurable, 'method should be configurable');
+ assert_true(propDesc.writable, 'method should be writable');
+ assert_equals(typeof it[m], 'function', 'method should be a function');
+ assert_equals(it[m].name, m, 'method should have the correct name');
+ }
+
+ assert_equals(it.next.length, 0, 'next should have no parameters');
+ assert_equals(it.return.length, 1, 'return should have 1 parameter');
+ assert_equals(typeof it.throw, 'undefined', 'throw should not exist');
+}, 'Async iterator instances should have the correct list of properties');
+
+promise_test(async () => {
+ const s = new ReadableStream({
+ start(c) {
+ c.enqueue(1);
+ c.enqueue(2);
+ c.enqueue(3);
+ c.close();
+ }
+ });
+
+ const chunks = [];
+ for await (const chunk of s) {
+ chunks.push(chunk);
+ }
+ assert_array_equals(chunks, [1, 2, 3]);
+}, 'Async-iterating a push source');
+
+promise_test(async () => {
+ let i = 1;
+ const s = new ReadableStream({
+ pull(c) {
+ c.enqueue(i);
+ if (i >= 3) {
+ c.close();
+ }
+ i += 1;
+ }
+ });
+
+ const chunks = [];
+ for await (const chunk of s) {
+ chunks.push(chunk);
+ }
+ assert_array_equals(chunks, [1, 2, 3]);
+}, 'Async-iterating a pull source');
+
+promise_test(async () => {
+ const s = new ReadableStream({
+ start(c) {
+ c.enqueue(undefined);
+ c.enqueue(undefined);
+ c.enqueue(undefined);
+ c.close();
+ }
+ });
+
+ const chunks = [];
+ for await (const chunk of s) {
+ chunks.push(chunk);
+ }
+ assert_array_equals(chunks, [undefined, undefined, undefined]);
+}, 'Async-iterating a push source with undefined values');
+
+promise_test(async () => {
+ let i = 1;
+ const s = new ReadableStream({
+ pull(c) {
+ c.enqueue(undefined);
+ if (i >= 3) {
+ c.close();
+ }
+ i += 1;
+ }
+ });
+
+ const chunks = [];
+ for await (const chunk of s) {
+ chunks.push(chunk);
+ }
+ assert_array_equals(chunks, [undefined, undefined, undefined]);
+}, 'Async-iterating a pull source with undefined values');
+
+promise_test(async () => {
+ let i = 1;
+ const s = recordingReadableStream({
+ pull(c) {
+ c.enqueue(i);
+ if (i >= 3) {
+ c.close();
+ }
+ i += 1;
+ },
+ }, new CountQueuingStrategy({ highWaterMark: 0 }));
+
+ const it = s.values();
+ assert_array_equals(s.events, []);
+
+ const read1 = await it.next();
+ assert_iter_result(read1, 1, false);
+ assert_array_equals(s.events, ['pull']);
+
+ const read2 = await it.next();
+ assert_iter_result(read2, 2, false);
+ assert_array_equals(s.events, ['pull', 'pull']);
+
+ const read3 = await it.next();
+ assert_iter_result(read3, 3, false);
+ assert_array_equals(s.events, ['pull', 'pull', 'pull']);
+
+ const read4 = await it.next();
+ assert_iter_result(read4, undefined, true);
+ assert_array_equals(s.events, ['pull', 'pull', 'pull']);
+}, 'Async-iterating a pull source manually');
+
+promise_test(async () => {
+ const s = new ReadableStream({
+ start(c) {
+ c.error('e');
+ },
+ });
+
+ try {
+ for await (const chunk of s) {}
+ assert_unreached();
+ } catch (e) {
+ assert_equals(e, 'e');
+ }
+}, 'Async-iterating an errored stream throws');
+
+promise_test(async () => {
+ const s = new ReadableStream({
+ start(c) {
+ c.close();
+ }
+ });
+
+ for await (const chunk of s) {
+ assert_unreached();
+ }
+}, 'Async-iterating a closed stream never executes the loop body, but works fine');
+
+promise_test(async () => {
+ const s = new ReadableStream();
+
+ const loop = async () => {
+ for await (const chunk of s) {
+ assert_unreached();
+ }
+ assert_unreached();
+ };
+
+ await Promise.race([
+ loop(),
+ flushAsyncEvents()
+ ]);
+}, 'Async-iterating an empty but not closed/errored stream never executes the loop body and stalls the async function');
+
+promise_test(async () => {
+ const s = new ReadableStream({
+ start(c) {
+ c.enqueue(1);
+ c.enqueue(2);
+ c.enqueue(3);
+ c.close();
+ },
+ });
+
+ const reader = s.getReader();
+ const readResult = await reader.read();
+ assert_iter_result(readResult, 1, false);
+ reader.releaseLock();
+
+ const chunks = [];
+ for await (const chunk of s) {
+ chunks.push(chunk);
+ }
+ assert_array_equals(chunks, [2, 3]);
+}, 'Async-iterating a partially consumed stream');
+
+for (const type of ['throw', 'break', 'return']) {
+ for (const preventCancel of [false, true]) {
+ promise_test(async () => {
+ const s = recordingReadableStream({
+ start(c) {
+ c.enqueue(0);
+ }
+ });
+
+ // use a separate function for the loop body so return does not stop the test
+ const loop = async () => {
+ for await (const c of s.values({ preventCancel })) {
+ if (type === 'throw') {
+ throw new Error();
+ } else if (type === 'break') {
+ break;
+ } else if (type === 'return') {
+ return;
+ }
+ }
+ };
+
+ try {
+ await loop();
+ } catch (e) {}
+
+ if (preventCancel) {
+ assert_array_equals(s.events, ['pull'], `cancel() should not be called`);
+ } else {
+ assert_array_equals(s.events, ['pull', 'cancel', undefined], `cancel() should be called`);
+ }
+ }, `Cancellation behavior when ${type}ing inside loop body; preventCancel = ${preventCancel}`);
+ }
+}
+
+for (const preventCancel of [false, true]) {
+ promise_test(async () => {
+ const s = recordingReadableStream({
+ start(c) {
+ c.enqueue(0);
+ }
+ });
+
+ const it = s.values({ preventCancel });
+ await it.return();
+
+ if (preventCancel) {
+ assert_array_equals(s.events, [], `cancel() should not be called`);
+ } else {
+ assert_array_equals(s.events, ['cancel', undefined], `cancel() should be called`);
+ }
+ }, `Cancellation behavior when manually calling return(); preventCancel = ${preventCancel}`);
+}
+
+promise_test(async t => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ if (timesPulled === 0) {
+ c.enqueue(0);
+ ++timesPulled;
+ } else {
+ c.error(error1);
+ }
+ }
+ });
+
+ const it = s[Symbol.asyncIterator]();
+
+ const iterResult1 = await it.next();
+ assert_iter_result(iterResult1, 0, false, '1st next()');
+
+ await promise_rejects_exactly(t, error1, it.next(), '2nd next()');
+}, 'next() rejects if the stream errors');
+
+promise_test(async () => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ if (timesPulled === 0) {
+ c.enqueue(0);
+ ++timesPulled;
+ } else {
+ c.error(error1);
+ }
+ }
+ });
+
+ const it = s[Symbol.asyncIterator]();
+
+ const iterResult = await it.return('return value');
+ assert_iter_result(iterResult, 'return value', true);
+}, 'return() does not rejects if the stream has not errored yet');
+
+promise_test(async t => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ // Do not error in start() because doing so would prevent acquiring a reader/async iterator.
+ c.error(error1);
+ }
+ });
+
+ const it = s[Symbol.asyncIterator]();
+
+ await flushAsyncEvents();
+ await promise_rejects_exactly(t, error1, it.return('return value'));
+}, 'return() rejects if the stream has errored');
+
+promise_test(async t => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ if (timesPulled === 0) {
+ c.enqueue(0);
+ ++timesPulled;
+ } else {
+ c.error(error1);
+ }
+ }
+ });
+
+ const it = s[Symbol.asyncIterator]();
+
+ const iterResult1 = await it.next();
+ assert_iter_result(iterResult1, 0, false, '1st next()');
+
+ await promise_rejects_exactly(t, error1, it.next(), '2nd next()');
+
+ const iterResult3 = await it.next();
+ assert_iter_result(iterResult3, undefined, true, '3rd next()');
+}, 'next() that succeeds; next() that reports an error; next()');
+
+promise_test(async () => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ if (timesPulled === 0) {
+ c.enqueue(0);
+ ++timesPulled;
+ } else {
+ c.error(error1);
+ }
+ }
+ });
+
+ const it = s[Symbol.asyncIterator]();
+
+ const iterResults = await Promise.allSettled([it.next(), it.next(), it.next()]);
+
+ assert_equals(iterResults[0].status, 'fulfilled', '1st next() promise status');
+ assert_iter_result(iterResults[0].value, 0, false, '1st next()');
+
+ assert_equals(iterResults[1].status, 'rejected', '2nd next() promise status');
+ assert_equals(iterResults[1].reason, error1, '2nd next() rejection reason');
+
+ assert_equals(iterResults[2].status, 'fulfilled', '3rd next() promise status');
+ assert_iter_result(iterResults[2].value, undefined, true, '3rd next()');
+}, 'next() that succeeds; next() that reports an error(); next() [no awaiting]');
+
+promise_test(async t => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ if (timesPulled === 0) {
+ c.enqueue(0);
+ ++timesPulled;
+ } else {
+ c.error(error1);
+ }
+ }
+ });
+
+ const it = s[Symbol.asyncIterator]();
+
+ const iterResult1 = await it.next();
+ assert_iter_result(iterResult1, 0, false, '1st next()');
+
+ await promise_rejects_exactly(t, error1, it.next(), '2nd next()');
+
+ const iterResult3 = await it.return('return value');
+ assert_iter_result(iterResult3, 'return value', true, 'return()');
+}, 'next() that succeeds; next() that reports an error(); return()');
+
+promise_test(async () => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ if (timesPulled === 0) {
+ c.enqueue(0);
+ ++timesPulled;
+ } else {
+ c.error(error1);
+ }
+ }
+ });
+
+ const it = s[Symbol.asyncIterator]();
+
+ const iterResults = await Promise.allSettled([it.next(), it.next(), it.return('return value')]);
+
+ assert_equals(iterResults[0].status, 'fulfilled', '1st next() promise status');
+ assert_iter_result(iterResults[0].value, 0, false, '1st next()');
+
+ assert_equals(iterResults[1].status, 'rejected', '2nd next() promise status');
+ assert_equals(iterResults[1].reason, error1, '2nd next() rejection reason');
+
+ assert_equals(iterResults[2].status, 'fulfilled', 'return() promise status');
+ assert_iter_result(iterResults[2].value, 'return value', true, 'return()');
+}, 'next() that succeeds; next() that reports an error(); return() [no awaiting]');
+
+promise_test(async () => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ c.enqueue(timesPulled);
+ ++timesPulled;
+ }
+ });
+ const it = s[Symbol.asyncIterator]();
+
+ const iterResult1 = await it.next();
+ assert_iter_result(iterResult1, 0, false, 'next()');
+
+ const iterResult2 = await it.return('return value');
+ assert_iter_result(iterResult2, 'return value', true, 'return()');
+
+ assert_equals(timesPulled, 2);
+}, 'next() that succeeds; return()');
+
+promise_test(async () => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ c.enqueue(timesPulled);
+ ++timesPulled;
+ }
+ });
+ const it = s[Symbol.asyncIterator]();
+
+ const iterResults = await Promise.allSettled([it.next(), it.return('return value')]);
+
+ assert_equals(iterResults[0].status, 'fulfilled', 'next() promise status');
+ assert_iter_result(iterResults[0].value, 0, false, 'next()');
+
+ assert_equals(iterResults[1].status, 'fulfilled', 'return() promise status');
+ assert_iter_result(iterResults[1].value, 'return value', true, 'return()');
+
+ assert_equals(timesPulled, 2);
+}, 'next() that succeeds; return() [no awaiting]');
+
+promise_test(async () => {
+ const rs = new ReadableStream();
+ const it = rs.values();
+
+ const iterResult1 = await it.return('return value');
+ assert_iter_result(iterResult1, 'return value', true, 'return()');
+
+ const iterResult2 = await it.next();
+ assert_iter_result(iterResult2, undefined, true, 'next()');
+}, 'return(); next()');
+
+promise_test(async () => {
+ const rs = new ReadableStream();
+ const it = rs.values();
+
+ const iterResults = await Promise.allSettled([it.return('return value'), it.next()]);
+
+ assert_equals(iterResults[0].status, 'fulfilled', 'return() promise status');
+ assert_iter_result(iterResults[0].value, 'return value', true, 'return()');
+
+ assert_equals(iterResults[1].status, 'fulfilled', 'next() promise status');
+ assert_iter_result(iterResults[1].value, undefined, true, 'next()');
+}, 'return(); next() [no awaiting]');
+
+promise_test(async () => {
+ const rs = new ReadableStream();
+ const it = rs.values();
+
+ const iterResult1 = await it.return('return value 1');
+ assert_iter_result(iterResult1, 'return value 1', true, '1st return()');
+
+ const iterResult2 = await it.return('return value 2');
+ assert_iter_result(iterResult2, 'return value 2', true, '1st return()');
+}, 'return(); return()');
+
+promise_test(async () => {
+ const rs = new ReadableStream();
+ const it = rs.values();
+
+ const iterResults = await Promise.allSettled([it.return('return value 1'), it.return('return value 2')]);
+
+ assert_equals(iterResults[0].status, 'fulfilled', '1st return() promise status');
+ assert_iter_result(iterResults[0].value, 'return value 1', true, '1st return()');
+
+ assert_equals(iterResults[1].status, 'fulfilled', '2nd return() promise status');
+ assert_iter_result(iterResults[1].value, 'return value 2', true, '1st return()');
+}, 'return(); return() [no awaiting]');
+
+test(() => {
+ const s = new ReadableStream({
+ start(c) {
+ c.enqueue(0);
+ c.close();
+ },
+ });
+ s.values();
+ assert_throws_js(TypeError, () => s.values(), 'values() should throw');
+}, 'values() throws if there\'s already a lock');
+
+promise_test(async () => {
+ const s = new ReadableStream({
+ start(c) {
+ c.enqueue(1);
+ c.enqueue(2);
+ c.enqueue(3);
+ c.close();
+ }
+ });
+
+ const chunks = [];
+ for await (const chunk of s) {
+ chunks.push(chunk);
+ }
+ assert_array_equals(chunks, [1, 2, 3]);
+
+ const reader = s.getReader();
+ await reader.closed;
+}, 'Acquiring a reader after exhaustively async-iterating a stream');
+
+promise_test(async t => {
+ let timesPulled = 0;
+ const s = new ReadableStream({
+ pull(c) {
+ if (timesPulled === 0) {
+ c.enqueue(0);
+ ++timesPulled;
+ } else {
+ c.error(error1);
+ }
+ }
+ });
+
+ const it = s[Symbol.asyncIterator]({ preventCancel: true });
+
+ const iterResult1 = await it.next();
+ assert_iter_result(iterResult1, 0, false, '1st next()');
+
+ await promise_rejects_exactly(t, error1, it.next(), '2nd next()');
+
+ const iterResult2 = await it.return('return value');
+ assert_iter_result(iterResult2, 'return value', true, 'return()');
+
+ // i.e. it should not reject with a generic "this stream is locked" TypeError.
+ const reader = s.getReader();
+ await promise_rejects_exactly(t, error1, reader.closed, 'closed on the new reader should reject with the error');
+}, 'Acquiring a reader after return()ing from a stream that errors');
+
+promise_test(async () => {
+ const s = new ReadableStream({
+ start(c) {
+ c.enqueue(1);
+ c.enqueue(2);
+ c.enqueue(3);
+ c.close();
+ },
+ });
+
+ // read the first two chunks, then cancel
+ const chunks = [];
+ for await (const chunk of s) {
+ chunks.push(chunk);
+ if (chunk >= 2) {
+ break;
+ }
+ }
+ assert_array_equals(chunks, [1, 2]);
+
+ const reader = s.getReader();
+ await reader.closed;
+}, 'Acquiring a reader after partially async-iterating a stream');
+
+promise_test(async () => {
+ const s = new ReadableStream({
+ start(c) {
+ c.enqueue(1);
+ c.enqueue(2);
+ c.enqueue(3);
+ c.close();
+ },
+ });
+
+ // read the first two chunks, then release lock
+ const chunks = [];
+ for await (const chunk of s.values({preventCancel: true})) {
+ chunks.push(chunk);
+ if (chunk >= 2) {
+ break;
+ }
+ }
+ assert_array_equals(chunks, [1, 2]);
+
+ const reader = s.getReader();
+ const readResult = await reader.read();
+ assert_iter_result(readResult, 3, false);
+ await reader.closed;
+}, 'Acquiring a reader and reading the remaining chunks after partially async-iterating a stream with preventCancel = true');
+
+for (const preventCancel of [false, true]) {
+ test(() => {
+ const rs = new ReadableStream();
+ rs.values({ preventCancel }).return();
+ // The test passes if this line doesn't throw.
+ rs.getReader();
+ }, `return() should unlock the stream synchronously when preventCancel = ${preventCancel}`);
+}
+
+promise_test(async () => {
+ const rs = new ReadableStream({
+ async start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.enqueue('c');
+ await flushAsyncEvents();
+ // At this point, the async iterator has a read request in the stream's queue for its pending next() promise.
+ // Closing the stream now causes two things to happen *synchronously*:
+ // 1. ReadableStreamClose resolves reader.[[closedPromise]] with undefined.
+ // 2. ReadableStreamClose calls the read request's close steps, which calls ReadableStreamReaderGenericRelease,
+ // which replaces reader.[[closedPromise]] with a rejected promise.
+ c.close();
+ }
+ });
+
+ const chunks = [];
+ for await (const chunk of rs) {
+ chunks.push(chunk);
+ }
+ assert_array_equals(chunks, ['a', 'b', 'c']);
+}, 'close() while next() is pending');
diff --git a/testing/web-platform/tests/streams/readable-streams/bad-strategies.any.js b/testing/web-platform/tests/streams/readable-streams/bad-strategies.any.js
new file mode 100644
index 0000000000..49fa4bdbec
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/bad-strategies.any.js
@@ -0,0 +1,198 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+test(() => {
+
+ const theError = new Error('a unique string');
+
+ assert_throws_exactly(theError, () => {
+ new ReadableStream({}, {
+ get size() {
+ throw theError;
+ },
+ highWaterMark: 5
+ });
+ }, 'construction should re-throw the error');
+
+}, 'Readable stream: throwing strategy.size getter');
+
+promise_test(t => {
+
+ const controllerError = { name: 'controller error' };
+ const thrownError = { name: 'thrown error' };
+
+ let controller;
+ const rs = new ReadableStream(
+ {
+ start(c) {
+ controller = c;
+ }
+ },
+ {
+ size() {
+ controller.error(controllerError);
+ throw thrownError;
+ },
+ highWaterMark: 5
+ }
+ );
+
+ assert_throws_exactly(thrownError, () => controller.enqueue('a'), 'enqueue should re-throw the error');
+
+ return promise_rejects_exactly(t, controllerError, rs.getReader().closed);
+
+}, 'Readable stream: strategy.size errors the stream and then throws');
+
+promise_test(t => {
+
+ const theError = { name: 'my error' };
+
+ let controller;
+ const rs = new ReadableStream(
+ {
+ start(c) {
+ controller = c;
+ }
+ },
+ {
+ size() {
+ controller.error(theError);
+ return Infinity;
+ },
+ highWaterMark: 5
+ }
+ );
+
+ assert_throws_js(RangeError, () => controller.enqueue('a'), 'enqueue should throw a RangeError');
+
+ return promise_rejects_exactly(t, theError, rs.getReader().closed, 'closed should reject with the error');
+
+}, 'Readable stream: strategy.size errors the stream and then returns Infinity');
+
+promise_test(() => {
+
+ const theError = new Error('a unique string');
+ const rs = new ReadableStream(
+ {
+ start(c) {
+ assert_throws_exactly(theError, () => c.enqueue('a'), 'enqueue should throw the error');
+ }
+ },
+ {
+ size() {
+ throw theError;
+ },
+ highWaterMark: 5
+ }
+ );
+
+ return rs.getReader().closed.catch(e => {
+ assert_equals(e, theError, 'closed should reject with the error');
+ });
+
+}, 'Readable stream: throwing strategy.size method');
+
+test(() => {
+
+ const theError = new Error('a unique string');
+
+ assert_throws_exactly(theError, () => {
+ new ReadableStream({}, {
+ size() {
+ return 1;
+ },
+ get highWaterMark() {
+ throw theError;
+ }
+ });
+ }, 'construction should re-throw the error');
+
+}, 'Readable stream: throwing strategy.highWaterMark getter');
+
+test(() => {
+
+ for (const highWaterMark of [-1, -Infinity, NaN, 'foo', {}]) {
+ assert_throws_js(RangeError, () => {
+ new ReadableStream({}, {
+ size() {
+ return 1;
+ },
+ highWaterMark
+ });
+ }, 'construction should throw a RangeError for ' + highWaterMark);
+ }
+
+}, 'Readable stream: invalid strategy.highWaterMark');
+
+promise_test(() => {
+
+ const promises = [];
+ for (const size of [NaN, -Infinity, Infinity, -1]) {
+ let theError;
+ const rs = new ReadableStream(
+ {
+ start(c) {
+ try {
+ c.enqueue('hi');
+ assert_unreached('enqueue didn\'t throw');
+ } catch (error) {
+ assert_equals(error.name, 'RangeError', 'enqueue should throw a RangeError for ' + size);
+ theError = error;
+ }
+ }
+ },
+ {
+ size() {
+ return size;
+ },
+ highWaterMark: 5
+ }
+ );
+
+ promises.push(rs.getReader().closed.then(() => {
+ assert_unreached('closed didn\'t throw');
+ }, e => {
+ assert_equals(e, theError, 'closed should reject with the error for ' + size);
+ }));
+ }
+
+ return Promise.all(promises);
+
+}, 'Readable stream: invalid strategy.size return value');
+
+promise_test(() => {
+
+ const promises = [];
+ for (const size of [NaN, -Infinity, Infinity, -1]) {
+ let theError;
+ const rs = new ReadableStream(
+ {
+ pull(c) {
+ try {
+ c.enqueue('hi');
+ assert_unreached('enqueue didn\'t throw');
+ } catch (error) {
+ assert_equals(error.name, 'RangeError', 'enqueue should throw a RangeError for ' + size);
+ theError = error;
+ }
+ }
+ },
+ {
+ size() {
+ return size;
+ },
+ highWaterMark: 5
+ }
+ );
+
+ promises.push(rs.getReader().closed.then(() => {
+ assert_unreached('closed didn\'t throw');
+ }, e => {
+ assert_equals(e, theError, 'closed should reject with the error for ' + size);
+ }));
+ }
+
+ return Promise.all(promises);
+
+}, 'Readable stream: invalid strategy.size return value when pulling');
+
diff --git a/testing/web-platform/tests/streams/readable-streams/bad-underlying-sources.any.js b/testing/web-platform/tests/streams/readable-streams/bad-underlying-sources.any.js
new file mode 100644
index 0000000000..3d77b923d1
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/bad-underlying-sources.any.js
@@ -0,0 +1,400 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+
+test(() => {
+
+ const theError = new Error('a unique string');
+
+ assert_throws_exactly(theError, () => {
+ new ReadableStream({
+ get start() {
+ throw theError;
+ }
+ });
+ }, 'constructing the stream should re-throw the error');
+
+}, 'Underlying source start: throwing getter');
+
+
+test(() => {
+
+ const theError = new Error('a unique string');
+
+ assert_throws_exactly(theError, () => {
+ new ReadableStream({
+ start() {
+ throw theError;
+ }
+ });
+ }, 'constructing the stream should re-throw the error');
+
+}, 'Underlying source start: throwing method');
+
+
+test(() => {
+
+ const theError = new Error('a unique string');
+ assert_throws_exactly(theError, () => new ReadableStream({
+ get pull() {
+ throw theError;
+ }
+ }), 'constructor should throw');
+
+}, 'Underlying source: throwing pull getter (initial pull)');
+
+
+promise_test(t => {
+
+ const theError = new Error('a unique string');
+ const rs = new ReadableStream({
+ pull() {
+ throw theError;
+ }
+ });
+
+ return promise_rejects_exactly(t, theError, rs.getReader().closed);
+
+}, 'Underlying source: throwing pull method (initial pull)');
+
+
+promise_test(t => {
+
+ const theError = new Error('a unique string');
+
+ let counter = 0;
+ const rs = new ReadableStream({
+ get pull() {
+ ++counter;
+ if (counter === 1) {
+ return c => c.enqueue('a');
+ }
+
+ throw theError;
+ }
+ });
+ const reader = rs.getReader();
+
+ return Promise.all([
+ reader.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'the first chunk read should be correct');
+ }),
+ reader.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'the second chunk read should be correct');
+ assert_equals(counter, 1, 'counter should be 1');
+ })
+ ]);
+
+}, 'Underlying source pull: throwing getter (second pull does not result in a second get)');
+
+promise_test(t => {
+
+ const theError = new Error('a unique string');
+
+ let counter = 0;
+ const rs = new ReadableStream({
+ pull(c) {
+ ++counter;
+ if (counter === 1) {
+ c.enqueue('a');
+ return;
+ }
+
+ throw theError;
+ }
+ });
+ const reader = rs.getReader();
+
+ return Promise.all([
+ reader.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'the chunk read should be correct');
+ }),
+ promise_rejects_exactly(t, theError, reader.closed)
+ ]);
+
+}, 'Underlying source pull: throwing method (second pull)');
+
+test(() => {
+
+ const theError = new Error('a unique string');
+ assert_throws_exactly(theError, () => new ReadableStream({
+ get cancel() {
+ throw theError;
+ }
+ }), 'constructor should throw');
+
+}, 'Underlying source cancel: throwing getter');
+
+promise_test(t => {
+
+ const theError = new Error('a unique string');
+ const rs = new ReadableStream({
+ cancel() {
+ throw theError;
+ }
+ });
+
+ return promise_rejects_exactly(t, theError, rs.cancel());
+
+}, 'Underlying source cancel: throwing method');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ rs.cancel();
+ assert_throws_js(TypeError, () => controller.enqueue('a'), 'Calling enqueue after canceling should throw');
+
+ return rs.getReader().closed;
+
+}, 'Underlying source: calling enqueue on an empty canceled stream should throw');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ controller = c;
+ }
+ });
+
+ rs.cancel();
+ assert_throws_js(TypeError, () => controller.enqueue('c'), 'Calling enqueue after canceling should throw');
+
+ return rs.getReader().closed;
+
+}, 'Underlying source: calling enqueue on a non-empty canceled stream should throw');
+
+promise_test(() => {
+
+ return new ReadableStream({
+ start(c) {
+ c.close();
+ assert_throws_js(TypeError, () => c.enqueue('a'), 'call to enqueue should throw a TypeError');
+ }
+ }).getReader().closed;
+
+}, 'Underlying source: calling enqueue on a closed stream should throw');
+
+promise_test(t => {
+
+ const theError = new Error('boo');
+ const closed = new ReadableStream({
+ start(c) {
+ c.error(theError);
+ assert_throws_js(TypeError, () => c.enqueue('a'), 'call to enqueue should throw the error');
+ }
+ }).getReader().closed;
+
+ return promise_rejects_exactly(t, theError, closed);
+
+}, 'Underlying source: calling enqueue on an errored stream should throw');
+
+promise_test(() => {
+
+ return new ReadableStream({
+ start(c) {
+ c.close();
+ assert_throws_js(TypeError, () => c.close(), 'second call to close should throw a TypeError');
+ }
+ }).getReader().closed;
+
+}, 'Underlying source: calling close twice on an empty stream should throw the second time');
+
+promise_test(() => {
+
+ let startCalled = false;
+ let readCalled = false;
+ const reader = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.close();
+ assert_throws_js(TypeError, () => c.close(), 'second call to close should throw a TypeError');
+ startCalled = true;
+ }
+ }).getReader();
+
+ return Promise.all([
+ reader.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'read() should read the enqueued chunk');
+ readCalled = true;
+ }),
+ reader.closed.then(() => {
+ assert_true(startCalled);
+ assert_true(readCalled);
+ })
+ ]);
+
+}, 'Underlying source: calling close twice on a non-empty stream should throw the second time');
+
+promise_test(() => {
+
+ let controller;
+ let startCalled = false;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ startCalled = true;
+ }
+ });
+
+ rs.cancel();
+ assert_throws_js(TypeError, () => controller.close(), 'Calling close after canceling should throw');
+
+ return rs.getReader().closed.then(() => {
+ assert_true(startCalled);
+ });
+
+}, 'Underlying source: calling close on an empty canceled stream should throw');
+
+promise_test(() => {
+
+ let controller;
+ let startCalled = false;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ c.enqueue('a');
+ startCalled = true;
+ }
+ });
+
+ rs.cancel();
+ assert_throws_js(TypeError, () => controller.close(), 'Calling close after canceling should throw');
+
+ return rs.getReader().closed.then(() => {
+ assert_true(startCalled);
+ });
+
+}, 'Underlying source: calling close on a non-empty canceled stream should throw');
+
+promise_test(() => {
+
+ const theError = new Error('boo');
+ let startCalled = false;
+
+ const closed = new ReadableStream({
+ start(c) {
+ c.error(theError);
+ assert_throws_js(TypeError, () => c.close(), 'call to close should throw a TypeError');
+ startCalled = true;
+ }
+ }).getReader().closed;
+
+ return closed.catch(e => {
+ assert_true(startCalled);
+ assert_equals(e, theError, 'closed should reject with the error');
+ });
+
+}, 'Underlying source: calling close after error should throw');
+
+promise_test(() => {
+
+ const theError = new Error('boo');
+ let startCalled = false;
+
+ const closed = new ReadableStream({
+ start(c) {
+ c.error(theError);
+ c.error();
+ startCalled = true;
+ }
+ }).getReader().closed;
+
+ return closed.catch(e => {
+ assert_true(startCalled);
+ assert_equals(e, theError, 'closed should reject with the error');
+ });
+
+}, 'Underlying source: calling error twice should not throw');
+
+promise_test(() => {
+
+ let startCalled = false;
+
+ const closed = new ReadableStream({
+ start(c) {
+ c.close();
+ c.error();
+ startCalled = true;
+ }
+ }).getReader().closed;
+
+ return closed.then(() => assert_true(startCalled));
+
+}, 'Underlying source: calling error after close should not throw');
+
+promise_test(() => {
+
+ let startCalled = false;
+ const firstError = new Error('1');
+ const secondError = new Error('2');
+
+ const closed = new ReadableStream({
+ start(c) {
+ c.error(firstError);
+ startCalled = true;
+ return Promise.reject(secondError);
+ }
+ }).getReader().closed;
+
+ return closed.catch(e => {
+ assert_true(startCalled);
+ assert_equals(e, firstError, 'closed should reject with the first error');
+ });
+
+}, 'Underlying source: calling error and returning a rejected promise from start should cause the stream to error ' +
+ 'with the first error');
+
+promise_test(() => {
+
+ let startCalled = false;
+ const firstError = new Error('1');
+ const secondError = new Error('2');
+
+ const closed = new ReadableStream({
+ pull(c) {
+ c.error(firstError);
+ startCalled = true;
+ return Promise.reject(secondError);
+ }
+ }).getReader().closed;
+
+ return closed.catch(e => {
+ assert_true(startCalled);
+ assert_equals(e, firstError, 'closed should reject with the first error');
+ });
+
+}, 'Underlying source: calling error and returning a rejected promise from pull should cause the stream to error ' +
+ 'with the first error');
+
+const error1 = { name: 'error1' };
+
+promise_test(t => {
+
+ let pullShouldThrow = false;
+ const rs = new ReadableStream({
+ pull(controller) {
+ if (pullShouldThrow) {
+ throw error1;
+ }
+ controller.enqueue(0);
+ }
+ }, new CountQueuingStrategy({highWaterMark: 1}));
+ const reader = rs.getReader();
+ return Promise.resolve().then(() => {
+ pullShouldThrow = true;
+ return Promise.all([
+ reader.read(),
+ promise_rejects_exactly(t, error1, reader.closed, '.closed promise should reject')
+ ]);
+ });
+
+}, 'read should not error if it dequeues and pull() throws');
diff --git a/testing/web-platform/tests/streams/readable-streams/cancel.any.js b/testing/web-platform/tests/streams/readable-streams/cancel.any.js
new file mode 100644
index 0000000000..9915c1fb63
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/cancel.any.js
@@ -0,0 +1,236 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/rs-utils.js
+'use strict';
+
+promise_test(t => {
+
+ const randomSource = new RandomPushSource();
+
+ let cancellationFinished = false;
+ const rs = new ReadableStream({
+ start(c) {
+ randomSource.ondata = c.enqueue.bind(c);
+ randomSource.onend = c.close.bind(c);
+ randomSource.onerror = c.error.bind(c);
+ },
+
+ pull() {
+ randomSource.readStart();
+ },
+
+ cancel() {
+ randomSource.readStop();
+
+ return new Promise(resolve => {
+ t.step_timeout(() => {
+ cancellationFinished = true;
+ resolve();
+ }, 1);
+ });
+ }
+ });
+
+ const reader = rs.getReader();
+
+ // We call delay multiple times to avoid cancelling too early for the
+ // source to enqueue at least one chunk.
+ const cancel = delay(5).then(() => delay(5)).then(() => delay(5)).then(() => {
+ const cancelPromise = reader.cancel();
+ assert_false(cancellationFinished, 'cancellation in source should happen later');
+ return cancelPromise;
+ });
+
+ return readableStreamToArray(rs, reader).then(chunks => {
+ assert_greater_than(chunks.length, 0, 'at least one chunk should be read');
+ for (let i = 0; i < chunks.length; i++) {
+ assert_equals(chunks[i].length, 128, 'chunk ' + i + ' should have 128 bytes');
+ }
+ return cancel;
+ }).then(() => {
+ assert_true(cancellationFinished, 'it returns a promise that is fulfilled when the cancellation finishes');
+ });
+
+}, 'ReadableStream cancellation: integration test on an infinite stream derived from a random push source');
+
+test(() => {
+
+ let recordedReason;
+ const rs = new ReadableStream({
+ cancel(reason) {
+ recordedReason = reason;
+ }
+ });
+
+ const passedReason = new Error('Sorry, it just wasn\'t meant to be.');
+ rs.cancel(passedReason);
+
+ assert_equals(recordedReason, passedReason,
+ 'the error passed to the underlying source\'s cancel method should equal the one passed to the stream\'s cancel');
+
+}, 'ReadableStream cancellation: cancel(reason) should pass through the given reason to the underlying source');
+
+promise_test(() => {
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.close();
+ },
+ cancel() {
+ assert_unreached('underlying source cancel() should not have been called');
+ }
+ });
+
+ const reader = rs.getReader();
+
+ return rs.cancel().then(() => {
+ assert_unreached('cancel() should be rejected');
+ }, e => {
+ assert_equals(e.name, 'TypeError', 'cancel() should be rejected with a TypeError');
+ }).then(() => {
+ return reader.read();
+ }).then(result => {
+ assert_object_equals(result, { value: 'a', done: false }, 'read() should still work after the attempted cancel');
+ return reader.closed;
+ });
+
+}, 'ReadableStream cancellation: cancel() on a locked stream should fail and not call the underlying source cancel');
+
+promise_test(() => {
+
+ let cancelReceived = false;
+ const cancelReason = new Error('I am tired of this stream, I prefer to cancel it');
+ const rs = new ReadableStream({
+ cancel(reason) {
+ cancelReceived = true;
+ assert_equals(reason, cancelReason, 'cancellation reason given to the underlying source should be equal to the one passed');
+ }
+ });
+
+ return rs.cancel(cancelReason).then(() => {
+ assert_true(cancelReceived);
+ });
+
+}, 'ReadableStream cancellation: should fulfill promise when cancel callback went fine');
+
+promise_test(() => {
+
+ const rs = new ReadableStream({
+ cancel() {
+ return 'Hello';
+ }
+ });
+
+ return rs.cancel().then(v => {
+ assert_equals(v, undefined, 'cancel() return value should be fulfilled with undefined');
+ });
+
+}, 'ReadableStream cancellation: returning a value from the underlying source\'s cancel should not affect the fulfillment value of the promise returned by the stream\'s cancel');
+
+promise_test(() => {
+
+ const thrownError = new Error('test');
+ let cancelCalled = false;
+
+ const rs = new ReadableStream({
+ cancel() {
+ cancelCalled = true;
+ throw thrownError;
+ }
+ });
+
+ return rs.cancel('test').then(() => {
+ assert_unreached('cancel should reject');
+ }, e => {
+ assert_true(cancelCalled);
+ assert_equals(e, thrownError);
+ });
+
+}, 'ReadableStream cancellation: should reject promise when cancel callback raises an exception');
+
+promise_test(() => {
+
+ const cancelReason = new Error('test');
+
+ const rs = new ReadableStream({
+ cancel(error) {
+ assert_equals(error, cancelReason);
+ return delay(1);
+ }
+ });
+
+ return rs.cancel(cancelReason);
+
+}, 'ReadableStream cancellation: if the underlying source\'s cancel method returns a promise, the promise returned by the stream\'s cancel should fulfill when that one does (1)');
+
+promise_test(t => {
+
+ let resolveSourceCancelPromise;
+ let sourceCancelPromiseHasFulfilled = false;
+
+ const rs = new ReadableStream({
+ cancel() {
+ const sourceCancelPromise = new Promise(resolve => resolveSourceCancelPromise = resolve);
+
+ sourceCancelPromise.then(() => {
+ sourceCancelPromiseHasFulfilled = true;
+ });
+
+ return sourceCancelPromise;
+ }
+ });
+
+ t.step_timeout(() => resolveSourceCancelPromise('Hello'), 1);
+
+ return rs.cancel().then(value => {
+ assert_true(sourceCancelPromiseHasFulfilled, 'cancel() return value should be fulfilled only after the promise returned by the underlying source\'s cancel');
+ assert_equals(value, undefined, 'cancel() return value should be fulfilled with undefined');
+ });
+
+}, 'ReadableStream cancellation: if the underlying source\'s cancel method returns a promise, the promise returned by the stream\'s cancel should fulfill when that one does (2)');
+
+promise_test(t => {
+
+ let rejectSourceCancelPromise;
+ let sourceCancelPromiseHasRejected = false;
+
+ const rs = new ReadableStream({
+ cancel() {
+ const sourceCancelPromise = new Promise((resolve, reject) => rejectSourceCancelPromise = reject);
+
+ sourceCancelPromise.catch(() => {
+ sourceCancelPromiseHasRejected = true;
+ });
+
+ return sourceCancelPromise;
+ }
+ });
+
+ const errorInCancel = new Error('Sorry, it just wasn\'t meant to be.');
+
+ t.step_timeout(() => rejectSourceCancelPromise(errorInCancel), 1);
+
+ return rs.cancel().then(() => {
+ assert_unreached('cancel() return value should be rejected');
+ }, r => {
+ assert_true(sourceCancelPromiseHasRejected, 'cancel() return value should be rejected only after the promise returned by the underlying source\'s cancel');
+ assert_equals(r, errorInCancel, 'cancel() return value should be rejected with the underlying source\'s rejection reason');
+ });
+
+}, 'ReadableStream cancellation: if the underlying source\'s cancel method returns a promise, the promise returned by the stream\'s cancel should reject when that one does');
+
+promise_test(() => {
+
+ const rs = new ReadableStream({
+ start() {
+ return new Promise(() => {});
+ },
+ pull() {
+ assert_unreached('pull should not have been called');
+ }
+ });
+
+ return Promise.all([rs.cancel(), rs.getReader().closed]);
+
+}, 'ReadableStream cancellation: cancelling before start finishes should prevent pull() from being called');
diff --git a/testing/web-platform/tests/streams/readable-streams/constructor.any.js b/testing/web-platform/tests/streams/readable-streams/constructor.any.js
new file mode 100644
index 0000000000..0b995f0cb1
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/constructor.any.js
@@ -0,0 +1,17 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+const error2 = new Error('error2');
+error2.name = 'error2';
+
+test(() => {
+ const underlyingSource = { get start() { throw error1; } };
+ const queuingStrategy = { highWaterMark: 0, get size() { throw error2; } };
+
+ // underlyingSource is converted in prose in the method body, whereas queuingStrategy is done at the IDL layer.
+ // So the queuingStrategy exception should be encountered first.
+ assert_throws_exactly(error2, () => new ReadableStream(underlyingSource, queuingStrategy));
+}, 'underlyingSource argument should be converted after queuingStrategy argument');
diff --git a/testing/web-platform/tests/streams/readable-streams/count-queuing-strategy-integration.any.js b/testing/web-platform/tests/streams/readable-streams/count-queuing-strategy-integration.any.js
new file mode 100644
index 0000000000..a8c1b91d00
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/count-queuing-strategy-integration.any.js
@@ -0,0 +1,208 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+test(() => {
+
+ new ReadableStream({}, new CountQueuingStrategy({ highWaterMark: 4 }));
+
+}, 'Can construct a readable stream with a valid CountQueuingStrategy');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream(
+ {
+ start(c) {
+ controller = c;
+ }
+ },
+ new CountQueuingStrategy({ highWaterMark: 0 })
+ );
+ const reader = rs.getReader();
+
+ assert_equals(controller.desiredSize, 0, '0 reads, 0 enqueues: desiredSize should be 0');
+ controller.enqueue('a');
+ assert_equals(controller.desiredSize, -1, '0 reads, 1 enqueue: desiredSize should be -1');
+ controller.enqueue('b');
+ assert_equals(controller.desiredSize, -2, '0 reads, 2 enqueues: desiredSize should be -2');
+ controller.enqueue('c');
+ assert_equals(controller.desiredSize, -3, '0 reads, 3 enqueues: desiredSize should be -3');
+ controller.enqueue('d');
+ assert_equals(controller.desiredSize, -4, '0 reads, 4 enqueues: desiredSize should be -4');
+
+ return reader.read()
+ .then(result => {
+ assert_object_equals(result, { value: 'a', done: false },
+ '1st read gives back the 1st chunk enqueued (queue now contains 3 chunks)');
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'b', done: false },
+ '2nd read gives back the 2nd chunk enqueued (queue now contains 2 chunks)');
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'c', done: false },
+ '3rd read gives back the 3rd chunk enqueued (queue now contains 1 chunk)');
+
+ assert_equals(controller.desiredSize, -1, '3 reads, 4 enqueues: desiredSize should be -1');
+ controller.enqueue('e');
+ assert_equals(controller.desiredSize, -2, '3 reads, 5 enqueues: desiredSize should be -2');
+
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'd', done: false },
+ '4th read gives back the 4th chunk enqueued (queue now contains 1 chunks)');
+ return reader.read();
+
+ }).then(result => {
+ assert_object_equals(result, { value: 'e', done: false },
+ '5th read gives back the 5th chunk enqueued (queue now contains 0 chunks)');
+
+ assert_equals(controller.desiredSize, 0, '5 reads, 5 enqueues: desiredSize should be 0');
+ controller.enqueue('f');
+ assert_equals(controller.desiredSize, -1, '5 reads, 6 enqueues: desiredSize should be -1');
+ controller.enqueue('g');
+ assert_equals(controller.desiredSize, -2, '5 reads, 7 enqueues: desiredSize should be -2');
+ });
+
+}, 'Correctly governs a ReadableStreamController\'s desiredSize property (HWM = 0)');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream(
+ {
+ start(c) {
+ controller = c;
+ }
+ },
+ new CountQueuingStrategy({ highWaterMark: 1 })
+ );
+ const reader = rs.getReader();
+
+ assert_equals(controller.desiredSize, 1, '0 reads, 0 enqueues: desiredSize should be 1');
+ controller.enqueue('a');
+ assert_equals(controller.desiredSize, 0, '0 reads, 1 enqueue: desiredSize should be 0');
+ controller.enqueue('b');
+ assert_equals(controller.desiredSize, -1, '0 reads, 2 enqueues: desiredSize should be -1');
+ controller.enqueue('c');
+ assert_equals(controller.desiredSize, -2, '0 reads, 3 enqueues: desiredSize should be -2');
+ controller.enqueue('d');
+ assert_equals(controller.desiredSize, -3, '0 reads, 4 enqueues: desiredSize should be -3');
+
+ return reader.read()
+ .then(result => {
+ assert_object_equals(result, { value: 'a', done: false },
+ '1st read gives back the 1st chunk enqueued (queue now contains 3 chunks)');
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'b', done: false },
+ '2nd read gives back the 2nd chunk enqueued (queue now contains 2 chunks)');
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'c', done: false },
+ '3rd read gives back the 3rd chunk enqueued (queue now contains 1 chunk)');
+
+ assert_equals(controller.desiredSize, 0, '3 reads, 4 enqueues: desiredSize should be 0');
+ controller.enqueue('e');
+ assert_equals(controller.desiredSize, -1, '3 reads, 5 enqueues: desiredSize should be -1');
+
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'd', done: false },
+ '4th read gives back the 4th chunk enqueued (queue now contains 1 chunks)');
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'e', done: false },
+ '5th read gives back the 5th chunk enqueued (queue now contains 0 chunks)');
+
+ assert_equals(controller.desiredSize, 1, '5 reads, 5 enqueues: desiredSize should be 1');
+ controller.enqueue('f');
+ assert_equals(controller.desiredSize, 0, '5 reads, 6 enqueues: desiredSize should be 0');
+ controller.enqueue('g');
+ assert_equals(controller.desiredSize, -1, '5 reads, 7 enqueues: desiredSize should be -1');
+ });
+
+}, 'Correctly governs a ReadableStreamController\'s desiredSize property (HWM = 1)');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream(
+ {
+ start(c) {
+ controller = c;
+ }
+ },
+ new CountQueuingStrategy({ highWaterMark: 4 })
+ );
+ const reader = rs.getReader();
+
+ assert_equals(controller.desiredSize, 4, '0 reads, 0 enqueues: desiredSize should be 4');
+ controller.enqueue('a');
+ assert_equals(controller.desiredSize, 3, '0 reads, 1 enqueue: desiredSize should be 3');
+ controller.enqueue('b');
+ assert_equals(controller.desiredSize, 2, '0 reads, 2 enqueues: desiredSize should be 2');
+ controller.enqueue('c');
+ assert_equals(controller.desiredSize, 1, '0 reads, 3 enqueues: desiredSize should be 1');
+ controller.enqueue('d');
+ assert_equals(controller.desiredSize, 0, '0 reads, 4 enqueues: desiredSize should be 0');
+ controller.enqueue('e');
+ assert_equals(controller.desiredSize, -1, '0 reads, 5 enqueues: desiredSize should be -1');
+ controller.enqueue('f');
+ assert_equals(controller.desiredSize, -2, '0 reads, 6 enqueues: desiredSize should be -2');
+
+
+ return reader.read()
+ .then(result => {
+ assert_object_equals(result, { value: 'a', done: false },
+ '1st read gives back the 1st chunk enqueued (queue now contains 5 chunks)');
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'b', done: false },
+ '2nd read gives back the 2nd chunk enqueued (queue now contains 4 chunks)');
+
+ assert_equals(controller.desiredSize, 0, '2 reads, 6 enqueues: desiredSize should be 0');
+ controller.enqueue('g');
+ assert_equals(controller.desiredSize, -1, '2 reads, 7 enqueues: desiredSize should be -1');
+
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'c', done: false },
+ '3rd read gives back the 3rd chunk enqueued (queue now contains 4 chunks)');
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'd', done: false },
+ '4th read gives back the 4th chunk enqueued (queue now contains 3 chunks)');
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'e', done: false },
+ '5th read gives back the 5th chunk enqueued (queue now contains 2 chunks)');
+ return reader.read();
+ })
+ .then(result => {
+ assert_object_equals(result, { value: 'f', done: false },
+ '6th read gives back the 6th chunk enqueued (queue now contains 0 chunks)');
+
+ assert_equals(controller.desiredSize, 3, '6 reads, 7 enqueues: desiredSize should be 3');
+ controller.enqueue('h');
+ assert_equals(controller.desiredSize, 2, '6 reads, 8 enqueues: desiredSize should be 2');
+ controller.enqueue('i');
+ assert_equals(controller.desiredSize, 1, '6 reads, 9 enqueues: desiredSize should be 1');
+ controller.enqueue('j');
+ assert_equals(controller.desiredSize, 0, '6 reads, 10 enqueues: desiredSize should be 0');
+ controller.enqueue('k');
+ assert_equals(controller.desiredSize, -1, '6 reads, 11 enqueues: desiredSize should be -1');
+ });
+
+}, 'Correctly governs a ReadableStreamController\'s desiredSize property (HWM = 4)');
diff --git a/testing/web-platform/tests/streams/readable-streams/crashtests/empty.js b/testing/web-platform/tests/streams/readable-streams/crashtests/empty.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/crashtests/empty.js
diff --git a/testing/web-platform/tests/streams/readable-streams/crashtests/strategy-worker-terminate.html b/testing/web-platform/tests/streams/readable-streams/crashtests/strategy-worker-terminate.html
new file mode 100644
index 0000000000..a75c3c66b6
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/crashtests/strategy-worker-terminate.html
@@ -0,0 +1,10 @@
+<!DOCTYPE html>
+<html class="test-wait">
+<meta charset="utf-8">
+<script>
+ var c = new Worker("/streams/readable-streams/crashtests/strategy-worker.js");
+ c.onmessage = () => {
+ c.terminate();
+ document.documentElement.classList.remove("test-wait");
+ }
+</script>
diff --git a/testing/web-platform/tests/streams/readable-streams/crashtests/strategy-worker.js b/testing/web-platform/tests/streams/readable-streams/crashtests/strategy-worker.js
new file mode 100644
index 0000000000..dd0ab03b55
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/crashtests/strategy-worker.js
@@ -0,0 +1,4 @@
+var b = new CountQueuingStrategy({ highWaterMark: 3 });
+
+importScripts("empty.js");
+postMessage("done");
diff --git a/testing/web-platform/tests/streams/readable-streams/cross-realm-crash.window.js b/testing/web-platform/tests/streams/readable-streams/cross-realm-crash.window.js
new file mode 100644
index 0000000000..5fc7ce37a5
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/cross-realm-crash.window.js
@@ -0,0 +1,13 @@
+// This is a repro for a crash bug that existed in Blink. See
+// https://crbug.com/1290014. If there's no crash then the test passed.
+
+test(t => {
+ const iframeTag = document.createElement('iframe');
+ document.body.appendChild(iframeTag);
+
+ const readableStream = new ReadableStream();
+ const reader = new iframeTag.contentWindow.ReadableStreamDefaultReader(readableStream);
+ iframeTag.remove();
+ reader.cancel();
+ reader.read();
+}, 'should not crash on reading from stream cancelled in destroyed realm');
diff --git a/testing/web-platform/tests/streams/readable-streams/default-reader.any.js b/testing/web-platform/tests/streams/readable-streams/default-reader.any.js
new file mode 100644
index 0000000000..f92862719e
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/default-reader.any.js
@@ -0,0 +1,539 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/rs-utils.js
+'use strict';
+
+test(() => {
+
+ assert_throws_js(TypeError, () => new ReadableStreamDefaultReader('potato'));
+ assert_throws_js(TypeError, () => new ReadableStreamDefaultReader({}));
+ assert_throws_js(TypeError, () => new ReadableStreamDefaultReader());
+
+}, 'ReadableStreamDefaultReader constructor should get a ReadableStream object as argument');
+
+test(() => {
+
+ const rsReader = new ReadableStreamDefaultReader(new ReadableStream());
+ assert_equals(rsReader.closed, rsReader.closed, 'closed should return the same promise');
+
+}, 'ReadableStreamDefaultReader closed should always return the same promise object');
+
+test(() => {
+
+ const rs = new ReadableStream();
+ new ReadableStreamDefaultReader(rs); // Constructing directly the first time should be fine.
+ assert_throws_js(TypeError, () => new ReadableStreamDefaultReader(rs),
+ 'constructing directly the second time should fail');
+
+}, 'Constructing a ReadableStreamDefaultReader directly should fail if the stream is already locked (via direct ' +
+ 'construction)');
+
+test(() => {
+
+ const rs = new ReadableStream();
+ new ReadableStreamDefaultReader(rs); // Constructing directly should be fine.
+ assert_throws_js(TypeError, () => rs.getReader(), 'getReader() should fail');
+
+}, 'Getting a ReadableStreamDefaultReader via getReader should fail if the stream is already locked (via direct ' +
+ 'construction)');
+
+test(() => {
+
+ const rs = new ReadableStream();
+ rs.getReader(); // getReader() should be fine.
+ assert_throws_js(TypeError, () => new ReadableStreamDefaultReader(rs), 'constructing directly should fail');
+
+}, 'Constructing a ReadableStreamDefaultReader directly should fail if the stream is already locked (via getReader)');
+
+test(() => {
+
+ const rs = new ReadableStream();
+ rs.getReader(); // getReader() should be fine.
+ assert_throws_js(TypeError, () => rs.getReader(), 'getReader() should fail');
+
+}, 'Getting a ReadableStreamDefaultReader via getReader should fail if the stream is already locked (via getReader)');
+
+test(() => {
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.close();
+ }
+ });
+
+ new ReadableStreamDefaultReader(rs); // Constructing directly should not throw.
+
+}, 'Constructing a ReadableStreamDefaultReader directly should be OK if the stream is closed');
+
+test(() => {
+
+ const theError = new Error('don\'t say i didn\'t warn ya');
+ const rs = new ReadableStream({
+ start(c) {
+ c.error(theError);
+ }
+ });
+
+ new ReadableStreamDefaultReader(rs); // Constructing directly should not throw.
+
+}, 'Constructing a ReadableStreamDefaultReader directly should be OK if the stream is errored');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const reader = rs.getReader();
+
+ const promise = reader.read().then(result => {
+ assert_object_equals(result, { value: 'a', done: false }, 'read() should fulfill with the enqueued chunk');
+ });
+
+ controller.enqueue('a');
+ return promise;
+
+}, 'Reading from a reader for an empty stream will wait until a chunk is available');
+
+promise_test(() => {
+
+ let cancelCalled = false;
+ const passedReason = new Error('it wasn\'t the right time, sorry');
+ const rs = new ReadableStream({
+ cancel(reason) {
+ assert_true(rs.locked, 'the stream should still be locked');
+ assert_throws_js(TypeError, () => rs.getReader(), 'should not be able to get another reader');
+ assert_equals(reason, passedReason, 'the cancellation reason is passed through to the underlying source');
+ cancelCalled = true;
+ }
+ });
+
+ const reader = rs.getReader();
+ return reader.cancel(passedReason).then(() => assert_true(cancelCalled));
+
+}, 'cancel() on a reader does not release the reader');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const reader = rs.getReader();
+ const promise = reader.closed;
+
+ controller.close();
+ return promise;
+
+}, 'closed should be fulfilled after stream is closed (.closed access before acquiring)');
+
+promise_test(t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const reader1 = rs.getReader();
+
+ reader1.releaseLock();
+
+ const reader2 = rs.getReader();
+ controller.close();
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, reader1.closed),
+ reader2.closed
+ ]);
+
+}, 'closed should be rejected after reader releases its lock (multiple stream locks)');
+
+promise_test(t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const reader = rs.getReader();
+ const promise1 = reader.closed;
+
+ controller.close();
+
+ reader.releaseLock();
+ const promise2 = reader.closed;
+
+ assert_not_equals(promise1, promise2, '.closed should be replaced');
+ return Promise.all([
+ promise1,
+ promise_rejects_js(t, TypeError, promise2, '.closed after releasing lock'),
+ ]);
+
+}, 'closed is replaced when stream closes and reader releases its lock');
+
+promise_test(t => {
+
+ const theError = { name: 'unique error' };
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const reader = rs.getReader();
+ const promise1 = reader.closed;
+
+ controller.error(theError);
+
+ reader.releaseLock();
+ const promise2 = reader.closed;
+
+ assert_not_equals(promise1, promise2, '.closed should be replaced');
+ return Promise.all([
+ promise_rejects_exactly(t, theError, promise1, '.closed before releasing lock'),
+ promise_rejects_js(t, TypeError, promise2, '.closed after releasing lock')
+ ]);
+
+}, 'closed is replaced when stream errors and reader releases its lock');
+
+promise_test(() => {
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.close();
+ }
+ });
+
+ const reader1 = rs.getReader();
+ const promise1 = reader1.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'reading the first chunk from reader1 works');
+ });
+ reader1.releaseLock();
+
+ const reader2 = rs.getReader();
+ const promise2 = reader2.read().then(r => {
+ assert_object_equals(r, { value: 'b', done: false }, 'reading the second chunk from reader2 works');
+ });
+ reader2.releaseLock();
+
+ return Promise.all([promise1, promise2]);
+
+}, 'Multiple readers can access the stream in sequence');
+
+promise_test(() => {
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ }
+ });
+
+ const reader1 = rs.getReader();
+ reader1.releaseLock();
+
+ const reader2 = rs.getReader();
+
+ // Should be a no-op
+ reader1.releaseLock();
+
+ return reader2.read().then(result => {
+ assert_object_equals(result, { value: 'a', done: false },
+ 'read() should still work on reader2 even after reader1 is released');
+ });
+
+}, 'Cannot use an already-released reader to unlock a stream again');
+
+promise_test(t => {
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ },
+ cancel() {
+ assert_unreached('underlying source cancel should not be called');
+ }
+ });
+
+ const reader = rs.getReader();
+ reader.releaseLock();
+ const cancelPromise = reader.cancel();
+
+ const reader2 = rs.getReader();
+ const readPromise = reader2.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'a new reader should be able to read a chunk');
+ });
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, cancelPromise),
+ readPromise
+ ]);
+
+}, 'cancel() on a released reader is a no-op and does not pass through');
+
+promise_test(t => {
+
+ const promiseAsserts = [];
+
+ let controller;
+ const theError = { name: 'unique error' };
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const reader1 = rs.getReader();
+
+ promiseAsserts.push(
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader1.read())
+ );
+
+ assert_throws_js(TypeError, () => rs.getReader(), 'trying to get another reader before erroring should throw');
+
+ controller.error(theError);
+
+ reader1.releaseLock();
+
+ const reader2 = rs.getReader();
+
+ promiseAsserts.push(
+ promise_rejects_exactly(t, theError, reader2.closed),
+ promise_rejects_exactly(t, theError, reader2.read())
+ );
+
+ return Promise.all(promiseAsserts);
+
+}, 'Getting a second reader after erroring the stream and releasing the reader should succeed');
+
+promise_test(t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const promise = rs.getReader().closed.then(
+ t.unreached_func('closed promise should not be fulfilled when stream is errored'),
+ err => {
+ assert_equals(err, undefined, 'passed error should be undefined as it was');
+ }
+ );
+
+ controller.error();
+ return promise;
+
+}, 'ReadableStreamDefaultReader closed promise should be rejected with undefined if that is the error');
+
+
+promise_test(t => {
+
+ const rs = new ReadableStream({
+ start() {
+ return Promise.reject();
+ }
+ });
+
+ return rs.getReader().read().then(
+ t.unreached_func('read promise should not be fulfilled when stream is errored'),
+ err => {
+ assert_equals(err, undefined, 'passed error should be undefined as it was');
+ }
+ );
+
+}, 'ReadableStreamDefaultReader: if start rejects with no parameter, it should error the stream with an undefined ' +
+ 'error');
+
+promise_test(t => {
+
+ const theError = { name: 'unique string' };
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const promise = promise_rejects_exactly(t, theError, rs.getReader().closed);
+
+ controller.error(theError);
+ return promise;
+
+}, 'Erroring a ReadableStream after checking closed should reject ReadableStreamDefaultReader closed promise');
+
+promise_test(t => {
+
+ const theError = { name: 'unique string' };
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ controller.error(theError);
+
+ // Let's call getReader twice for extra test coverage of this code path.
+ rs.getReader().releaseLock();
+
+ return promise_rejects_exactly(t, theError, rs.getReader().closed);
+
+}, 'Erroring a ReadableStream before checking closed should reject ReadableStreamDefaultReader closed promise');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const reader = rs.getReader();
+
+ const promise = Promise.all([
+ reader.read().then(result => {
+ assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (1)');
+ }),
+ reader.read().then(result => {
+ assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (2)');
+ }),
+ reader.closed
+ ]);
+
+ controller.close();
+ return promise;
+
+}, 'Reading twice on a stream that gets closed');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ controller.close();
+ const reader = rs.getReader();
+
+ return Promise.all([
+ reader.read().then(result => {
+ assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (1)');
+ }),
+ reader.read().then(result => {
+ assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (2)');
+ }),
+ reader.closed
+ ]);
+
+}, 'Reading twice on a closed stream');
+
+promise_test(t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const myError = { name: 'mashed potatoes' };
+ controller.error(myError);
+
+ const reader = rs.getReader();
+
+ return Promise.all([
+ promise_rejects_exactly(t, myError, reader.read()),
+ promise_rejects_exactly(t, myError, reader.read()),
+ promise_rejects_exactly(t, myError, reader.closed)
+ ]);
+
+}, 'Reading twice on an errored stream');
+
+promise_test(t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const myError = { name: 'mashed potatoes' };
+ const reader = rs.getReader();
+
+ const promise = Promise.all([
+ promise_rejects_exactly(t, myError, reader.read()),
+ promise_rejects_exactly(t, myError, reader.read()),
+ promise_rejects_exactly(t, myError, reader.closed)
+ ]);
+
+ controller.error(myError);
+ return promise;
+
+}, 'Reading twice on a stream that gets errored');
+
+test(() => {
+ const rs = new ReadableStream();
+ let toStringCalled = false;
+ const mode = {
+ toString() {
+ toStringCalled = true;
+ return '';
+ }
+ };
+ assert_throws_js(TypeError, () => rs.getReader({ mode }), 'getReader() should throw');
+ assert_true(toStringCalled, 'toString() should be called');
+}, 'getReader() should call ToString() on mode');
+
+promise_test(() => {
+ const rs = new ReadableStream({
+ pull(controller) {
+ controller.close();
+ }
+ });
+
+ const reader = rs.getReader();
+ return reader.read().then(() => {
+ // The test passes if releaseLock() does not throw.
+ reader.releaseLock();
+ });
+}, 'controller.close() should clear the list of pending read requests');
+
+promise_test(t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const reader1 = rs.getReader();
+ const promise1 = promise_rejects_js(t, TypeError, reader1.read(), 'read() from reader1 should reject when reader1 is released');
+ reader1.releaseLock();
+
+ controller.enqueue('a');
+
+ const reader2 = rs.getReader();
+ const promise2 = reader2.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'read() from reader2 should resolve with enqueued chunk');
+ })
+ reader2.releaseLock();
+
+ return Promise.all([promise1, promise2]);
+
+}, 'Second reader can read chunks after first reader was released with pending read requests');
diff --git a/testing/web-platform/tests/streams/readable-streams/floating-point-total-queue-size.any.js b/testing/web-platform/tests/streams/readable-streams/floating-point-total-queue-size.any.js
new file mode 100644
index 0000000000..8b88c21d7f
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/floating-point-total-queue-size.any.js
@@ -0,0 +1,116 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+// Due to the limitations of floating-point precision, the calculation of desiredSize sometimes gives different answers
+// than adding up the items in the queue would. It is important that implementations give the same result in these edge
+// cases so that developers do not come to depend on non-standard behaviour. See
+// https://github.com/whatwg/streams/issues/582 and linked issues for further discussion.
+
+promise_test(() => {
+ const { reader, controller } = setupTestStream();
+
+ controller.enqueue(2);
+ assert_equals(controller.desiredSize, 0 - 2, 'desiredSize must be -2 after enqueueing such a chunk');
+
+ controller.enqueue(Number.MAX_SAFE_INTEGER);
+ assert_equals(controller.desiredSize, 0 - Number.MAX_SAFE_INTEGER - 2,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)');
+
+ return reader.read().then(() => {
+ assert_equals(controller.desiredSize, 0 - Number.MAX_SAFE_INTEGER - 2 + 2,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)');
+
+ return reader.read();
+ }).then(() => {
+ assert_equals(controller.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative');
+ });
+}, 'Floating point arithmetic must manifest near NUMBER.MAX_SAFE_INTEGER (total ends up positive)');
+
+promise_test(() => {
+ const { reader, controller } = setupTestStream();
+
+ controller.enqueue(1e-16);
+ assert_equals(controller.desiredSize, 0 - 1e-16, 'desiredSize must be -1e16 after enqueueing such a chunk');
+
+ controller.enqueue(1);
+ assert_equals(controller.desiredSize, 0 - 1e-16 - 1,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)');
+
+ return reader.read().then(() => {
+ assert_equals(controller.desiredSize, 0 - 1e-16 - 1 + 1e-16,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)');
+
+ return reader.read();
+ }).then(() => {
+ assert_equals(controller.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative');
+ });
+}, 'Floating point arithmetic must manifest near 0 (total ends up positive, but clamped)');
+
+promise_test(() => {
+ const { reader, controller } = setupTestStream();
+
+ controller.enqueue(1e-16);
+ assert_equals(controller.desiredSize, 0 - 1e-16, 'desiredSize must be -2e16 after enqueueing such a chunk');
+
+ controller.enqueue(1);
+ assert_equals(controller.desiredSize, 0 - 1e-16 - 1,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)');
+
+ controller.enqueue(2e-16);
+ assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a third chunk)');
+
+ return reader.read().then(() => {
+ assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)');
+
+ return reader.read();
+ }).then(() => {
+ assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16 + 1,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a second chunk)');
+
+ return reader.read();
+ }).then(() => {
+ assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16 + 1 + 2e-16,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a third chunk)');
+ });
+}, 'Floating point arithmetic must manifest near 0 (total ends up positive, and not clamped)');
+
+promise_test(() => {
+ const { reader, controller } = setupTestStream();
+
+ controller.enqueue(2e-16);
+ assert_equals(controller.desiredSize, 0 - 2e-16, 'desiredSize must be -2e16 after enqueueing such a chunk');
+
+ controller.enqueue(1);
+ assert_equals(controller.desiredSize, 0 - 2e-16 - 1,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)');
+
+ return reader.read().then(() => {
+ assert_equals(controller.desiredSize, 0 - 2e-16 - 1 + 2e-16,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)');
+
+ return reader.read();
+ }).then(() => {
+ assert_equals(controller.desiredSize, 0,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a second chunk)');
+ });
+}, 'Floating point arithmetic must manifest near 0 (total ends up zero)');
+
+function setupTestStream() {
+ const strategy = {
+ size(x) {
+ return x;
+ },
+ highWaterMark: 0
+ };
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, strategy);
+
+ return { reader: rs.getReader(), controller };
+}
diff --git a/testing/web-platform/tests/streams/readable-streams/from.any.js b/testing/web-platform/tests/streams/readable-streams/from.any.js
new file mode 100644
index 0000000000..58ad4d4add
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/from.any.js
@@ -0,0 +1,474 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+'use strict';
+
+const iterableFactories = [
+ ['an array of values', () => {
+ return ['a', 'b'];
+ }],
+
+ ['an array of promises', () => {
+ return [
+ Promise.resolve('a'),
+ Promise.resolve('b')
+ ];
+ }],
+
+ ['an array iterator', () => {
+ return ['a', 'b'][Symbol.iterator]();
+ }],
+
+ ['a string', () => {
+ // This iterates over the code points of the string.
+ return 'ab';
+ }],
+
+ ['a Set', () => {
+ return new Set(['a', 'b']);
+ }],
+
+ ['a Set iterator', () => {
+ return new Set(['a', 'b'])[Symbol.iterator]();
+ }],
+
+ ['a sync generator', () => {
+ function* syncGenerator() {
+ yield 'a';
+ yield 'b';
+ }
+
+ return syncGenerator();
+ }],
+
+ ['an async generator', () => {
+ async function* asyncGenerator() {
+ yield 'a';
+ yield 'b';
+ }
+
+ return asyncGenerator();
+ }],
+
+ ['a sync iterable of values', () => {
+ const chunks = ['a', 'b'];
+ const it = {
+ next() {
+ return {
+ done: chunks.length === 0,
+ value: chunks.shift()
+ };
+ },
+ [Symbol.iterator]: () => it
+ };
+ return it;
+ }],
+
+ ['a sync iterable of promises', () => {
+ const chunks = ['a', 'b'];
+ const it = {
+ next() {
+ return chunks.length === 0 ? { done: true } : {
+ done: false,
+ value: Promise.resolve(chunks.shift())
+ };
+ },
+ [Symbol.iterator]: () => it
+ };
+ return it;
+ }],
+
+ ['an async iterable', () => {
+ const chunks = ['a', 'b'];
+ const it = {
+ next() {
+ return Promise.resolve({
+ done: chunks.length === 0,
+ value: chunks.shift()
+ })
+ },
+ [Symbol.asyncIterator]: () => it
+ };
+ return it;
+ }],
+
+ ['a ReadableStream', () => {
+ return new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.close();
+ }
+ });
+ }],
+
+ ['a ReadableStream async iterator', () => {
+ return new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.close();
+ }
+ })[Symbol.asyncIterator]();
+ }]
+];
+
+for (const [label, factory] of iterableFactories) {
+ promise_test(async () => {
+
+ const iterable = factory();
+ const rs = ReadableStream.from(iterable);
+ assert_equals(rs.constructor, ReadableStream, 'from() should return a ReadableStream');
+
+ const reader = rs.getReader();
+ assert_object_equals(await reader.read(), { value: 'a', done: false }, 'first read should be correct');
+ assert_object_equals(await reader.read(), { value: 'b', done: false }, 'second read should be correct');
+ assert_object_equals(await reader.read(), { value: undefined, done: true }, 'third read should be done');
+ await reader.closed;
+
+ }, `ReadableStream.from accepts ${label}`);
+}
+
+const badIterables = [
+ ['null', null],
+ ['undefined', undefined],
+ ['0', 0],
+ ['NaN', NaN],
+ ['true', true],
+ ['{}', {}],
+ ['Object.create(null)', Object.create(null)],
+ ['a function', () => 42],
+ ['a symbol', Symbol()],
+ ['an object with a non-callable @@iterator method', {
+ [Symbol.iterator]: 42
+ }],
+ ['an object with a non-callable @@asyncIterator method', {
+ [Symbol.asyncIterator]: 42
+ }],
+];
+
+for (const [label, iterable] of badIterables) {
+ test(() => {
+ assert_throws_js(TypeError, () => ReadableStream.from(iterable), 'from() should throw a TypeError')
+ }, `ReadableStream.from throws on invalid iterables; specifically ${label}`);
+}
+
+test(() => {
+ const theError = new Error('a unique string');
+ const iterable = {
+ [Symbol.iterator]() {
+ throw theError;
+ }
+ };
+
+ assert_throws_exactly(theError, () => ReadableStream.from(iterable), 'from() should re-throw the error');
+}, `ReadableStream.from re-throws errors from calling the @@iterator method`);
+
+test(() => {
+ const theError = new Error('a unique string');
+ const iterable = {
+ [Symbol.asyncIterator]() {
+ throw theError;
+ }
+ };
+
+ assert_throws_exactly(theError, () => ReadableStream.from(iterable), 'from() should re-throw the error');
+}, `ReadableStream.from re-throws errors from calling the @@asyncIterator method`);
+
+test(t => {
+ const theError = new Error('a unique string');
+ const iterable = {
+ [Symbol.iterator]: t.unreached_func('@@iterator should not be called'),
+ [Symbol.asyncIterator]() {
+ throw theError;
+ }
+ };
+
+ assert_throws_exactly(theError, () => ReadableStream.from(iterable), 'from() should re-throw the error');
+}, `ReadableStream.from ignores @@iterator if @@asyncIterator exists`);
+
+promise_test(async () => {
+
+ const iterable = {
+ async next() {
+ return { value: undefined, done: true };
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ const reader = rs.getReader();
+
+ const read = await reader.read();
+ assert_object_equals(read, { value: undefined, done: true }, 'first read should be done');
+
+ await reader.closed;
+
+}, `ReadableStream.from accepts an empty iterable`);
+
+promise_test(async t => {
+
+ const theError = new Error('a unique string');
+
+ const iterable = {
+ async next() {
+ throw theError;
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ const reader = rs.getReader();
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, reader.read()),
+ promise_rejects_exactly(t, theError, reader.closed)
+ ]);
+
+}, `ReadableStream.from: stream errors when next() rejects`);
+
+promise_test(async t => {
+
+ const iterable = {
+ next() {
+ return new Promise(() => {});
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ const reader = rs.getReader();
+
+ await Promise.race([
+ reader.read().then(t.unreached_func('read() should not resolve'), t.unreached_func('read() should not reject')),
+ reader.closed.then(t.unreached_func('closed should not resolve'), t.unreached_func('closed should not reject')),
+ flushAsyncEvents()
+ ]);
+
+}, 'ReadableStream.from: stream stalls when next() never settles');
+
+promise_test(async () => {
+
+ let nextCalls = 0;
+ let nextArgs;
+ const iterable = {
+ async next(...args) {
+ nextCalls += 1;
+ nextArgs = args;
+ return { value: 'a', done: false };
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ const reader = rs.getReader();
+
+ await flushAsyncEvents();
+ assert_equals(nextCalls, 0, 'next() should not be called yet');
+
+ const read = await reader.read();
+ assert_object_equals(read, { value: 'a', done: false }, 'first read should be correct');
+ assert_equals(nextCalls, 1, 'next() should be called after first read()');
+ assert_array_equals(nextArgs, [], 'next() should be called with no arguments');
+
+}, `ReadableStream.from: calls next() after first read()`);
+
+promise_test(async t => {
+
+ const theError = new Error('a unique string');
+
+ let returnCalls = 0;
+ let returnArgs;
+ let resolveReturn;
+ const iterable = {
+ next: t.unreached_func('next() should not be called'),
+ throw: t.unreached_func('throw() should not be called'),
+ async return(...args) {
+ returnCalls += 1;
+ returnArgs = args;
+ await new Promise(r => resolveReturn = r);
+ return { done: true };
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ const reader = rs.getReader();
+ assert_equals(returnCalls, 0, 'return() should not be called yet');
+
+ let cancelResolved = false;
+ const cancelPromise = reader.cancel(theError).then(() => {
+ cancelResolved = true;
+ });
+
+ await flushAsyncEvents();
+ assert_equals(returnCalls, 1, 'return() should be called');
+ assert_array_equals(returnArgs, [theError], 'return() should be called with cancel reason');
+ assert_false(cancelResolved, 'cancel() should not resolve while promise from return() is pending');
+
+ resolveReturn();
+ await Promise.all([
+ cancelPromise,
+ reader.closed
+ ]);
+
+}, `ReadableStream.from: cancelling the returned stream calls and awaits return()`);
+
+promise_test(async t => {
+
+ let nextCalls = 0;
+ let returnCalls = 0;
+
+ const iterable = {
+ async next() {
+ nextCalls += 1;
+ return { value: undefined, done: true };
+ },
+ throw: t.unreached_func('throw() should not be called'),
+ async return() {
+ returnCalls += 1;
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ const reader = rs.getReader();
+
+ const read = await reader.read();
+ assert_object_equals(read, { value: undefined, done: true }, 'first read should be done');
+ assert_equals(nextCalls, 1, 'next() should be called once');
+
+ await reader.closed;
+ assert_equals(returnCalls, 0, 'return() should not be called');
+
+}, `ReadableStream.from: return() is not called when iterator completes normally`);
+
+promise_test(async t => {
+
+ const theError = new Error('a unique string');
+
+ const iterable = {
+ next: t.unreached_func('next() should not be called'),
+ throw: t.unreached_func('throw() should not be called'),
+ async return() {
+ return 42;
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ const reader = rs.getReader();
+
+ await promise_rejects_js(t, TypeError, reader.cancel(theError), 'cancel() should reject with a TypeError');
+
+ await reader.closed;
+
+}, `ReadableStream.from: cancel() rejects when return() fulfills with a non-object`);
+
+promise_test(async () => {
+
+ let nextCalls = 0;
+ let reader;
+ let values = ['a', 'b', 'c'];
+
+ const iterable = {
+ async next() {
+ nextCalls += 1;
+ if (nextCalls === 1) {
+ reader.read();
+ }
+ return { value: values.shift(), done: false };
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ reader = rs.getReader();
+
+ const read1 = await reader.read();
+ assert_object_equals(read1, { value: 'a', done: false }, 'first read should be correct');
+ await flushAsyncEvents();
+ assert_equals(nextCalls, 2, 'next() should be called two times');
+
+ const read2 = await reader.read();
+ assert_object_equals(read2, { value: 'c', done: false }, 'second read should be correct');
+ assert_equals(nextCalls, 3, 'next() should be called three times');
+
+}, `ReadableStream.from: reader.read() inside next()`);
+
+promise_test(async () => {
+
+ let nextCalls = 0;
+ let returnCalls = 0;
+ let reader;
+
+ const iterable = {
+ async next() {
+ nextCalls++;
+ await reader.cancel();
+ assert_equals(returnCalls, 1, 'return() should be called once');
+ return { value: 'something else', done: false };
+ },
+ async return() {
+ returnCalls++;
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ reader = rs.getReader();
+
+ const read = await reader.read();
+ assert_object_equals(read, { value: undefined, done: true }, 'first read should be done');
+ assert_equals(nextCalls, 1, 'next() should be called once');
+
+ await reader.closed;
+
+}, `ReadableStream.from: reader.cancel() inside next()`);
+
+promise_test(async t => {
+
+ let returnCalls = 0;
+ let reader;
+
+ const iterable = {
+ next: t.unreached_func('next() should not be called'),
+ async return() {
+ returnCalls++;
+ await reader.cancel();
+ return { done: true };
+ },
+ [Symbol.asyncIterator]: () => iterable
+ };
+
+ const rs = ReadableStream.from(iterable);
+ reader = rs.getReader();
+
+ await reader.cancel();
+ assert_equals(returnCalls, 1, 'return() should be called once');
+
+ await reader.closed;
+
+}, `ReadableStream.from: reader.cancel() inside return()`);
+
+promise_test(async t => {
+
+ let array = ['a', 'b'];
+
+ const rs = ReadableStream.from(array);
+ const reader = rs.getReader();
+
+ const read1 = await reader.read();
+ assert_object_equals(read1, { value: 'a', done: false }, 'first read should be correct');
+ const read2 = await reader.read();
+ assert_object_equals(read2, { value: 'b', done: false }, 'second read should be correct');
+
+ array.push('c');
+
+ const read3 = await reader.read();
+ assert_object_equals(read3, { value: 'c', done: false }, 'third read after push() should be correct');
+ const read4 = await reader.read();
+ assert_object_equals(read4, { value: undefined, done: true }, 'fourth read should be done');
+
+ await reader.closed;
+
+}, `ReadableStream.from(array), push() to array while reading`);
diff --git a/testing/web-platform/tests/streams/readable-streams/garbage-collection.any.js b/testing/web-platform/tests/streams/readable-streams/garbage-collection.any.js
new file mode 100644
index 0000000000..13bd1fb343
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/garbage-collection.any.js
@@ -0,0 +1,71 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=/common/gc.js
+'use strict';
+
+promise_test(async () => {
+
+ let controller;
+ new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ await garbageCollect();
+
+ return delay(50).then(() => {
+ controller.close();
+ assert_throws_js(TypeError, () => controller.close(), 'close should throw a TypeError the second time');
+ controller.error();
+ });
+
+}, 'ReadableStreamController methods should continue working properly when scripts lose their reference to the ' +
+ 'readable stream');
+
+promise_test(async () => {
+
+ let controller;
+
+ const closedPromise = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }).getReader().closed;
+
+ await garbageCollect();
+
+ return delay(50).then(() => controller.close()).then(() => closedPromise);
+
+}, 'ReadableStream closed promise should fulfill even if the stream and reader JS references are lost');
+
+promise_test(async t => {
+
+ const theError = new Error('boo');
+ let controller;
+
+ const closedPromise = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }).getReader().closed;
+
+ await garbageCollect();
+
+ return delay(50).then(() => controller.error(theError))
+ .then(() => promise_rejects_exactly(t, theError, closedPromise));
+
+}, 'ReadableStream closed promise should reject even if stream and reader JS references are lost');
+
+promise_test(async () => {
+
+ const rs = new ReadableStream({});
+
+ rs.getReader();
+
+ await garbageCollect();
+
+ return delay(50).then(() => assert_throws_js(TypeError, () => rs.getReader(),
+ 'old reader should still be locking the stream even after garbage collection'));
+
+}, 'Garbage-collecting a ReadableStreamDefaultReader should not unlock its stream');
diff --git a/testing/web-platform/tests/streams/readable-streams/general.any.js b/testing/web-platform/tests/streams/readable-streams/general.any.js
new file mode 100644
index 0000000000..eee3f62215
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/general.any.js
@@ -0,0 +1,840 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/rs-utils.js
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+test(() => {
+
+ new ReadableStream(); // ReadableStream constructed with no parameters
+ new ReadableStream({ }); // ReadableStream constructed with an empty object as parameter
+ new ReadableStream({ type: undefined }); // ReadableStream constructed with undefined type
+ new ReadableStream(undefined); // ReadableStream constructed with undefined as parameter
+
+ let x;
+ new ReadableStream(x); // ReadableStream constructed with an undefined variable as parameter
+
+}, 'ReadableStream can be constructed with no errors');
+
+test(() => {
+
+ assert_throws_js(TypeError, () => new ReadableStream(null), 'constructor should throw when the source is null');
+
+}, 'ReadableStream can\'t be constructed with garbage');
+
+test(() => {
+
+ assert_throws_js(TypeError, () => new ReadableStream({ type: null }),
+ 'constructor should throw when the type is null');
+ assert_throws_js(TypeError, () => new ReadableStream({ type: '' }),
+ 'constructor should throw when the type is empty string');
+ assert_throws_js(TypeError, () => new ReadableStream({ type: 'asdf' }),
+ 'constructor should throw when the type is asdf');
+ assert_throws_exactly(
+ error1,
+ () => new ReadableStream({ type: { get toString() { throw error1; } } }),
+ 'constructor should throw when ToString() throws'
+ );
+ assert_throws_exactly(
+ error1,
+ () => new ReadableStream({ type: { toString() { throw error1; } } }),
+ 'constructor should throw when ToString() throws'
+ );
+
+}, 'ReadableStream can\'t be constructed with an invalid type');
+
+test(() => {
+
+ assert_throws_js(TypeError, () => {
+ new ReadableStream({ start: 'potato' });
+ }, 'constructor should throw when start is not a function');
+
+}, 'ReadableStream constructor should throw for non-function start arguments');
+
+test(() => {
+
+ assert_throws_js(TypeError, () => new ReadableStream({ cancel: '2' }), 'constructor should throw');
+
+}, 'ReadableStream constructor will not tolerate initial garbage as cancel argument');
+
+test(() => {
+
+ assert_throws_js(TypeError, () => new ReadableStream({ pull: { } }), 'constructor should throw');
+
+}, 'ReadableStream constructor will not tolerate initial garbage as pull argument');
+
+test(() => {
+
+ let startCalled = false;
+
+ const source = {
+ start() {
+ assert_equals(this, source, 'source is this during start');
+ startCalled = true;
+ }
+ };
+
+ new ReadableStream(source);
+ assert_true(startCalled);
+
+}, 'ReadableStream start should be called with the proper thisArg');
+
+test(() => {
+
+ let startCalled = false;
+ const source = {
+ start(controller) {
+ const properties = ['close', 'constructor', 'desiredSize', 'enqueue', 'error'];
+ assert_array_equals(Object.getOwnPropertyNames(Object.getPrototypeOf(controller)).sort(), properties,
+ 'prototype should have the right properties');
+
+ controller.test = '';
+ assert_array_equals(Object.getOwnPropertyNames(Object.getPrototypeOf(controller)).sort(), properties,
+ 'prototype should still have the right properties');
+ assert_not_equals(Object.getOwnPropertyNames(controller).indexOf('test'), -1,
+ '"test" should be a property of the controller');
+
+ startCalled = true;
+ }
+ };
+
+ new ReadableStream(source);
+ assert_true(startCalled);
+
+}, 'ReadableStream start controller parameter should be extensible');
+
+test(() => {
+ (new ReadableStream()).getReader(undefined);
+ (new ReadableStream()).getReader({});
+ (new ReadableStream()).getReader({ mode: undefined, notmode: 'ignored' });
+ assert_throws_js(TypeError, () => (new ReadableStream()).getReader({ mode: 'potato' }));
+}, 'default ReadableStream getReader() should only accept mode:undefined');
+
+promise_test(() => {
+
+ function SimpleStreamSource() {}
+ let resolve;
+ const promise = new Promise(r => resolve = r);
+ SimpleStreamSource.prototype = {
+ start: resolve
+ };
+
+ new ReadableStream(new SimpleStreamSource());
+ return promise;
+
+}, 'ReadableStream should be able to call start method within prototype chain of its source');
+
+promise_test(() => {
+
+ const rs = new ReadableStream({
+ start(c) {
+ return delay(5).then(() => {
+ c.enqueue('a');
+ c.close();
+ });
+ }
+ });
+
+ const reader = rs.getReader();
+ return reader.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'value read should be the one enqueued');
+ return reader.closed;
+ });
+
+}, 'ReadableStream start should be able to return a promise');
+
+promise_test(() => {
+
+ const theError = new Error('rejected!');
+ const rs = new ReadableStream({
+ start() {
+ return delay(1).then(() => {
+ throw theError;
+ });
+ }
+ });
+
+ return rs.getReader().closed.then(() => {
+ assert_unreached('closed promise should be rejected');
+ }, e => {
+ assert_equals(e, theError, 'promise should be rejected with the same error');
+ });
+
+}, 'ReadableStream start should be able to return a promise and reject it');
+
+promise_test(() => {
+
+ const objects = [
+ { potato: 'Give me more!' },
+ 'test',
+ 1
+ ];
+
+ const rs = new ReadableStream({
+ start(c) {
+ for (const o of objects) {
+ c.enqueue(o);
+ }
+ c.close();
+ }
+ });
+
+ const reader = rs.getReader();
+
+ return Promise.all([reader.read(), reader.read(), reader.read(), reader.closed]).then(r => {
+ assert_object_equals(r[0], { value: objects[0], done: false }, 'value read should be the one enqueued');
+ assert_object_equals(r[1], { value: objects[1], done: false }, 'value read should be the one enqueued');
+ assert_object_equals(r[2], { value: objects[2], done: false }, 'value read should be the one enqueued');
+ });
+
+}, 'ReadableStream should be able to enqueue different objects.');
+
+promise_test(() => {
+
+ const error = new Error('pull failure');
+ const rs = new ReadableStream({
+ pull() {
+ return Promise.reject(error);
+ }
+ });
+
+ const reader = rs.getReader();
+
+ let closed = false;
+ let read = false;
+
+ return Promise.all([
+ reader.closed.then(() => {
+ assert_unreached('closed should be rejected');
+ }, e => {
+ closed = true;
+ assert_false(read);
+ assert_equals(e, error, 'closed should be rejected with the thrown error');
+ }),
+ reader.read().then(() => {
+ assert_unreached('read() should be rejected');
+ }, e => {
+ read = true;
+ assert_true(closed);
+ assert_equals(e, error, 'read() should be rejected with the thrown error');
+ })
+ ]);
+
+}, 'ReadableStream: if pull rejects, it should error the stream');
+
+promise_test(() => {
+
+ let pullCount = 0;
+
+ new ReadableStream({
+ pull() {
+ pullCount++;
+ }
+ });
+
+ return flushAsyncEvents().then(() => {
+ assert_equals(pullCount, 1, 'pull should be called once start finishes');
+ return delay(10);
+ }).then(() => {
+ assert_equals(pullCount, 1, 'pull should be called exactly once');
+ });
+
+}, 'ReadableStream: should only call pull once upon starting the stream');
+
+promise_test(() => {
+
+ let pullCount = 0;
+
+ const rs = new ReadableStream({
+ pull(c) {
+ // Don't enqueue immediately after start. We want the stream to be empty when we call .read() on it.
+ if (pullCount > 0) {
+ c.enqueue(pullCount);
+ }
+ ++pullCount;
+ }
+ });
+
+ return flushAsyncEvents().then(() => {
+ assert_equals(pullCount, 1, 'pull should be called once start finishes');
+ }).then(() => {
+ const reader = rs.getReader();
+ const read = reader.read();
+ assert_equals(pullCount, 2, 'pull should be called when read is called');
+ return read;
+ }).then(result => {
+ assert_equals(pullCount, 3, 'pull should be called again in reaction to calling read');
+ assert_object_equals(result, { value: 1, done: false }, 'the result read should be the one enqueued');
+ });
+
+}, 'ReadableStream: should call pull when trying to read from a started, empty stream');
+
+promise_test(() => {
+
+ let pullCount = 0;
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ },
+ pull() {
+ pullCount++;
+ }
+ });
+
+ const read = rs.getReader().read();
+ assert_equals(pullCount, 0, 'calling read() should not cause pull to be called yet');
+
+ return flushAsyncEvents().then(() => {
+ assert_equals(pullCount, 1, 'pull should be called once start finishes');
+ return read;
+ }).then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'first read() should return first chunk');
+ assert_equals(pullCount, 1, 'pull should not have been called again');
+ return delay(10);
+ }).then(() => {
+ assert_equals(pullCount, 1, 'pull should be called exactly once');
+ });
+
+}, 'ReadableStream: should only call pull once on a non-empty stream read from before start fulfills');
+
+promise_test(() => {
+
+ let pullCount = 0;
+ const startPromise = Promise.resolve();
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ },
+ pull() {
+ pullCount++;
+ }
+ });
+
+ return flushAsyncEvents().then(() => {
+ assert_equals(pullCount, 0, 'pull should not be called once start finishes, since the queue is full');
+
+ const read = rs.getReader().read();
+ assert_equals(pullCount, 1, 'calling read() should cause pull to be called immediately');
+ return read;
+ }).then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'first read() should return first chunk');
+ return delay(10);
+ }).then(() => {
+ assert_equals(pullCount, 1, 'pull should be called exactly once');
+ });
+
+}, 'ReadableStream: should only call pull once on a non-empty stream read from after start fulfills');
+
+promise_test(() => {
+
+ let pullCount = 0;
+ let controller;
+
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ ++pullCount;
+ }
+ });
+
+ const reader = rs.getReader();
+ return flushAsyncEvents().then(() => {
+ assert_equals(pullCount, 1, 'pull should have been called once by the time the stream starts');
+
+ controller.enqueue('a');
+ assert_equals(pullCount, 1, 'pull should not have been called again after enqueue');
+
+ return reader.read();
+ }).then(() => {
+ assert_equals(pullCount, 2, 'pull should have been called again after read');
+
+ return delay(10);
+ }).then(() => {
+ assert_equals(pullCount, 2, 'pull should be called exactly twice');
+ });
+}, 'ReadableStream: should call pull in reaction to read()ing the last chunk, if not draining');
+
+promise_test(() => {
+
+ let pullCount = 0;
+ let controller;
+
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ pull() {
+ ++pullCount;
+ }
+ });
+
+ const reader = rs.getReader();
+
+ return flushAsyncEvents().then(() => {
+ assert_equals(pullCount, 1, 'pull should have been called once by the time the stream starts');
+
+ controller.enqueue('a');
+ assert_equals(pullCount, 1, 'pull should not have been called again after enqueue');
+
+ controller.close();
+
+ return reader.read();
+ }).then(() => {
+ assert_equals(pullCount, 1, 'pull should not have been called a second time after read');
+
+ return delay(10);
+ }).then(() => {
+ assert_equals(pullCount, 1, 'pull should be called exactly once');
+ });
+
+}, 'ReadableStream: should not call pull() in reaction to read()ing the last chunk, if draining');
+
+promise_test(() => {
+
+ let resolve;
+ let returnedPromise;
+ let timesCalled = 0;
+
+ const rs = new ReadableStream({
+ pull(c) {
+ c.enqueue(++timesCalled);
+ returnedPromise = new Promise(r => resolve = r);
+ return returnedPromise;
+ }
+ });
+ const reader = rs.getReader();
+
+ return reader.read()
+ .then(result1 => {
+ assert_equals(timesCalled, 1,
+ 'pull should have been called once after start, but not yet have been called a second time');
+ assert_object_equals(result1, { value: 1, done: false }, 'read() should fulfill with the enqueued value');
+
+ return delay(10);
+ }).then(() => {
+ assert_equals(timesCalled, 1, 'after 10 ms, pull should still only have been called once');
+
+ resolve();
+ return returnedPromise;
+ }).then(() => {
+ assert_equals(timesCalled, 2,
+ 'after the promise returned by pull is fulfilled, pull should be called a second time');
+ });
+
+}, 'ReadableStream: should not call pull until the previous pull call\'s promise fulfills');
+
+promise_test(() => {
+
+ let timesCalled = 0;
+
+ const rs = new ReadableStream(
+ {
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.enqueue('c');
+ },
+ pull() {
+ ++timesCalled;
+ }
+ },
+ {
+ size() {
+ return 1;
+ },
+ highWaterMark: Infinity
+ }
+ );
+ const reader = rs.getReader();
+
+ return flushAsyncEvents().then(() => {
+ return reader.read();
+ }).then(result1 => {
+ assert_object_equals(result1, { value: 'a', done: false }, 'first chunk should be as expected');
+
+ return reader.read();
+ }).then(result2 => {
+ assert_object_equals(result2, { value: 'b', done: false }, 'second chunk should be as expected');
+
+ return reader.read();
+ }).then(result3 => {
+ assert_object_equals(result3, { value: 'c', done: false }, 'third chunk should be as expected');
+
+ return delay(10);
+ }).then(() => {
+ // Once for after start, and once for every read.
+ assert_equals(timesCalled, 4, 'pull() should be called exactly four times');
+ });
+
+}, 'ReadableStream: should pull after start, and after every read');
+
+promise_test(() => {
+
+ let timesCalled = 0;
+ const startPromise = Promise.resolve();
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.close();
+ return startPromise;
+ },
+ pull() {
+ ++timesCalled;
+ }
+ });
+
+ const reader = rs.getReader();
+ return startPromise.then(() => {
+ assert_equals(timesCalled, 0, 'after start finishes, pull should not have been called');
+
+ return reader.read();
+ }).then(() => {
+ assert_equals(timesCalled, 0, 'reading should not have triggered a pull call');
+
+ return reader.closed;
+ }).then(() => {
+ assert_equals(timesCalled, 0, 'stream should have closed with still no calls to pull');
+ });
+
+}, 'ReadableStream: should not call pull after start if the stream is now closed');
+
+promise_test(() => {
+
+ let timesCalled = 0;
+ let resolve;
+ const ready = new Promise(r => resolve = r);
+
+ new ReadableStream(
+ {
+ start() {},
+ pull(c) {
+ c.enqueue(++timesCalled);
+
+ if (timesCalled === 4) {
+ resolve();
+ }
+ }
+ },
+ {
+ size() {
+ return 1;
+ },
+ highWaterMark: 4
+ }
+ );
+
+ return ready.then(() => {
+ // after start: size = 0, pull()
+ // after enqueue(1): size = 1, pull()
+ // after enqueue(2): size = 2, pull()
+ // after enqueue(3): size = 3, pull()
+ // after enqueue(4): size = 4, do not pull
+ assert_equals(timesCalled, 4, 'pull() should have been called four times');
+ });
+
+}, 'ReadableStream: should call pull after enqueueing from inside pull (with no read requests), if strategy allows');
+
+promise_test(() => {
+
+ let pullCalled = false;
+
+ const rs = new ReadableStream({
+ pull(c) {
+ pullCalled = true;
+ c.close();
+ }
+ });
+
+ const reader = rs.getReader();
+ return reader.closed.then(() => {
+ assert_true(pullCalled);
+ });
+
+}, 'ReadableStream pull should be able to close a stream.');
+
+promise_test(t => {
+
+ const controllerError = { name: 'controller error' };
+
+ const rs = new ReadableStream({
+ pull(c) {
+ c.error(controllerError);
+ }
+ });
+
+ return promise_rejects_exactly(t, controllerError, rs.getReader().closed);
+
+}, 'ReadableStream pull should be able to error a stream.');
+
+promise_test(t => {
+
+ const controllerError = { name: 'controller error' };
+ const thrownError = { name: 'thrown error' };
+
+ const rs = new ReadableStream({
+ pull(c) {
+ c.error(controllerError);
+ throw thrownError;
+ }
+ });
+
+ return promise_rejects_exactly(t, controllerError, rs.getReader().closed);
+
+}, 'ReadableStream pull should be able to error a stream and throw.');
+
+test(() => {
+
+ let startCalled = false;
+
+ new ReadableStream({
+ start(c) {
+ assert_equals(c.enqueue('a'), undefined, 'the first enqueue should return undefined');
+ c.close();
+
+ assert_throws_js(TypeError, () => c.enqueue('b'), 'enqueue after close should throw a TypeError');
+ startCalled = true;
+ }
+ });
+
+ assert_true(startCalled);
+
+}, 'ReadableStream: enqueue should throw when the stream is readable but draining');
+
+test(() => {
+
+ let startCalled = false;
+
+ new ReadableStream({
+ start(c) {
+ c.close();
+
+ assert_throws_js(TypeError, () => c.enqueue('a'), 'enqueue after close should throw a TypeError');
+ startCalled = true;
+ }
+ });
+
+ assert_true(startCalled);
+
+}, 'ReadableStream: enqueue should throw when the stream is closed');
+
+promise_test(() => {
+
+ let startCalled = 0;
+ let pullCalled = 0;
+ let cancelCalled = 0;
+
+ /* eslint-disable no-use-before-define */
+ class Source {
+ start(c) {
+ startCalled++;
+ assert_equals(this, theSource, 'start() should be called with the correct this');
+ c.enqueue('a');
+ }
+
+ pull() {
+ pullCalled++;
+ assert_equals(this, theSource, 'pull() should be called with the correct this');
+ }
+
+ cancel() {
+ cancelCalled++;
+ assert_equals(this, theSource, 'cancel() should be called with the correct this');
+ }
+ }
+ /* eslint-enable no-use-before-define */
+
+ const theSource = new Source();
+ theSource.debugName = 'the source object passed to the constructor'; // makes test failures easier to diagnose
+
+ const rs = new ReadableStream(theSource);
+ const reader = rs.getReader();
+
+ return reader.read().then(() => {
+ reader.releaseLock();
+ rs.cancel();
+ assert_equals(startCalled, 1);
+ assert_equals(pullCalled, 1);
+ assert_equals(cancelCalled, 1);
+ return rs.getReader().closed;
+ });
+
+}, 'ReadableStream: should call underlying source methods as methods');
+
+test(() => {
+ new ReadableStream({
+ start(c) {
+ assert_equals(c.desiredSize, 10, 'desiredSize must start at highWaterMark');
+ c.close();
+ assert_equals(c.desiredSize, 0, 'after closing, desiredSize must be 0');
+ }
+ }, {
+ highWaterMark: 10
+ });
+}, 'ReadableStream: desiredSize when closed');
+
+test(() => {
+ new ReadableStream({
+ start(c) {
+ assert_equals(c.desiredSize, 10, 'desiredSize must start at highWaterMark');
+ c.error();
+ assert_equals(c.desiredSize, null, 'after erroring, desiredSize must be null');
+ }
+ }, {
+ highWaterMark: 10
+ });
+}, 'ReadableStream: desiredSize when errored');
+
+test(() => {
+ class Subclass extends ReadableStream {
+ extraFunction() {
+ return true;
+ }
+ }
+ assert_equals(
+ Object.getPrototypeOf(Subclass.prototype), ReadableStream.prototype,
+ 'Subclass.prototype\'s prototype should be ReadableStream.prototype');
+ assert_equals(Object.getPrototypeOf(Subclass), ReadableStream,
+ 'Subclass\'s prototype should be ReadableStream');
+ const sub = new Subclass();
+ assert_true(sub instanceof ReadableStream,
+ 'Subclass object should be an instance of ReadableStream');
+ assert_true(sub instanceof Subclass,
+ 'Subclass object should be an instance of Subclass');
+ const lockedGetter = Object.getOwnPropertyDescriptor(
+ ReadableStream.prototype, 'locked').get;
+ assert_equals(lockedGetter.call(sub), sub.locked,
+ 'Subclass object should pass brand check');
+ assert_true(sub.extraFunction(),
+ 'extraFunction() should be present on Subclass object');
+}, 'Subclassing ReadableStream should work');
+
+test(() => {
+
+ let startCalled = false;
+ new ReadableStream({
+ start(c) {
+ assert_equals(c.desiredSize, 1);
+ c.enqueue('a');
+ assert_equals(c.desiredSize, 0);
+ c.enqueue('b');
+ assert_equals(c.desiredSize, -1);
+ c.enqueue('c');
+ assert_equals(c.desiredSize, -2);
+ c.enqueue('d');
+ assert_equals(c.desiredSize, -3);
+ c.enqueue('e');
+ startCalled = true;
+ }
+ });
+
+ assert_true(startCalled);
+
+}, 'ReadableStream strategies: the default strategy should give desiredSize of 1 to start, decreasing by 1 per enqueue');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const reader = rs.getReader();
+
+ assert_equals(controller.desiredSize, 1, 'desiredSize should start at 1');
+ controller.enqueue('a');
+ assert_equals(controller.desiredSize, 0, 'desiredSize should decrease to 0 after first enqueue');
+
+ return reader.read().then(result1 => {
+ assert_object_equals(result1, { value: 'a', done: false }, 'first chunk read should be correct');
+
+ assert_equals(controller.desiredSize, 1, 'desiredSize should go up to 1 after the first read');
+ controller.enqueue('b');
+ assert_equals(controller.desiredSize, 0, 'desiredSize should go down to 0 after the second enqueue');
+
+ return reader.read();
+ }).then(result2 => {
+ assert_object_equals(result2, { value: 'b', done: false }, 'second chunk read should be correct');
+
+ assert_equals(controller.desiredSize, 1, 'desiredSize should go up to 1 after the second read');
+ controller.enqueue('c');
+ assert_equals(controller.desiredSize, 0, 'desiredSize should go down to 0 after the third enqueue');
+
+ return reader.read();
+ }).then(result3 => {
+ assert_object_equals(result3, { value: 'c', done: false }, 'third chunk read should be correct');
+
+ assert_equals(controller.desiredSize, 1, 'desiredSize should go up to 1 after the third read');
+ controller.enqueue('d');
+ assert_equals(controller.desiredSize, 0, 'desiredSize should go down to 0 after the fourth enqueue');
+ });
+
+}, 'ReadableStream strategies: the default strategy should continue giving desiredSize of 1 if the chunks are read immediately');
+
+promise_test(t => {
+
+ const randomSource = new RandomPushSource(8);
+
+ const rs = new ReadableStream({
+ start(c) {
+ assert_equals(typeof c, 'object', 'c should be an object in start');
+ assert_equals(typeof c.enqueue, 'function', 'enqueue should be a function in start');
+ assert_equals(typeof c.close, 'function', 'close should be a function in start');
+ assert_equals(typeof c.error, 'function', 'error should be a function in start');
+
+ randomSource.ondata = t.step_func(chunk => {
+ if (!c.enqueue(chunk) <= 0) {
+ randomSource.readStop();
+ }
+ });
+
+ randomSource.onend = c.close.bind(c);
+ randomSource.onerror = c.error.bind(c);
+ },
+
+ pull(c) {
+ assert_equals(typeof c, 'object', 'c should be an object in pull');
+ assert_equals(typeof c.enqueue, 'function', 'enqueue should be a function in pull');
+ assert_equals(typeof c.close, 'function', 'close should be a function in pull');
+
+ randomSource.readStart();
+ }
+ });
+
+ return readableStreamToArray(rs).then(chunks => {
+ assert_equals(chunks.length, 8, '8 chunks should be read');
+ for (const chunk of chunks) {
+ assert_equals(chunk.length, 128, 'chunk should have 128 bytes');
+ }
+ });
+
+}, 'ReadableStream integration test: adapting a random push source');
+
+promise_test(() => {
+
+ const rs = sequentialReadableStream(10);
+
+ return readableStreamToArray(rs).then(chunks => {
+ assert_true(rs.source.closed, 'source should be closed after all chunks are read');
+ assert_array_equals(chunks, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 'the expected 10 chunks should be read');
+ });
+
+}, 'ReadableStream integration test: adapting a sync pull source');
+
+promise_test(() => {
+
+ const rs = sequentialReadableStream(10, { async: true });
+
+ return readableStreamToArray(rs).then(chunks => {
+ assert_true(rs.source.closed, 'source should be closed after all chunks are read');
+ assert_array_equals(chunks, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 'the expected 10 chunks should be read');
+ });
+
+}, 'ReadableStream integration test: adapting an async pull source');
diff --git a/testing/web-platform/tests/streams/readable-streams/global.html b/testing/web-platform/tests/streams/readable-streams/global.html
new file mode 100644
index 0000000000..08665d318e
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/global.html
@@ -0,0 +1,162 @@
+<!doctype html>
+<meta charset="utf-8">
+<title>Ensure Stream objects are created in expected globals. </title>
+
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+
+<body></body>
+<script>
+// These tests are loosely derived from Gecko's readable-stream-globals.js,
+// which is a test case designed around the JS Streams implementation.
+//
+// Unlike in JS Streams, where function calls switch realms and change
+// the resulting global of the resulting objects, in WebIDL streams,
+// the global of an object is (currently underspecified, but) intended
+// to be the "Relevant Global" of the 'this' object.
+//
+// See:
+// https://html.spec.whatwg.org/multipage/webappapis.html#relevant
+// https://github.com/whatwg/streams/issues/1213
+"use strict"
+
+const iframe = document.createElement("iframe")
+document.body.append(iframe)
+
+const otherGlobal = iframe.contentWindow;
+const OtherReadableStream = otherGlobal.ReadableStream
+const OtherReadableStreamDefaultReader = otherGlobal.ReadableStreamDefaultReader;
+const OtherReadableStreamDefaultController = otherGlobal.ReadableStreamDefaultController;
+
+promise_test(async () => {
+
+ // Controllers
+ let controller;
+ let otherController;
+
+ // Get Stream Prototypes and controllers.
+ let streamController;
+ let stream = new ReadableStream({start(c) { streamController = c; }});
+
+ const callReaderThisGlobal = OtherReadableStream.prototype.getReader.call(stream);
+ const newReaderOtherGlobal = new OtherReadableStreamDefaultReader(new ReadableStream());
+
+ // Relevant Global Checking.
+ assert_equals(callReaderThisGlobal instanceof ReadableStreamDefaultReader, true, "reader was created in this global (.call)");
+ assert_equals(newReaderOtherGlobal instanceof ReadableStreamDefaultReader, false, "reader was created in other global (new)");
+
+ assert_equals(callReaderThisGlobal instanceof OtherReadableStreamDefaultReader, false, "reader isn't coming from other global (.call)" );
+ assert_equals(newReaderOtherGlobal instanceof OtherReadableStreamDefaultReader, true, "reader isn't coming from other global (new)");
+
+ assert_equals(otherController instanceof ReadableStreamDefaultController, false, "otherController should come from other gloal")
+
+
+ const request = callReaderThisGlobal.read();
+ assert_equals(request instanceof Promise, true, "Promise comes from this global");
+
+ streamController.close();
+ const requestResult = await request;
+ assert_equals(requestResult instanceof Object, true, "returned object comes from this global");
+}, "Stream objects created in expected globals")
+
+promise_test(async () => {
+ const stream = new ReadableStream();
+ const otherReader = new OtherReadableStreamDefaultReader(stream);
+ const cancelPromise = ReadableStreamDefaultReader.prototype.cancel.call(otherReader);
+ assert_equals(cancelPromise instanceof Promise, true, "Cancel promise comes from the same global as the stream");
+ assert_equals(await cancelPromise, undefined, "Cancel promise resolves to undefined");
+}, "Cancel promise is created in same global as stream")
+
+// Refresh the streams and controllers.
+function getFreshInstances() {
+ let controller;
+ let otherController;
+ let stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ new OtherReadableStream({
+ start(c) {
+ otherController = c;
+ }
+ });
+
+ return {stream, controller, otherController}
+}
+
+
+promise_test(async () => {
+ // Test closed promise on reader from another global (connected to a this-global stream)
+ const {stream, controller, otherController} = getFreshInstances();
+
+ const otherReader = new OtherReadableStreamDefaultReader(stream);
+ const closedPromise = otherReader.closed;
+ assert_equals(closedPromise instanceof otherGlobal.Promise, true, "Closed promise in other global.");
+}, "Closed Promise in correct global");
+
+promise_test(async () => {
+ const {stream, controller, otherController} = getFreshInstances();
+
+ const otherReader = OtherReadableStream.prototype.getReader.call(stream);
+ assert_equals(otherReader instanceof ReadableStreamDefaultReader, true, "Reader comes from this global")
+ const request = otherReader.read();
+ assert_equals(request instanceof Promise, true, "Promise still comes from stream's realm (this realm)");
+ otherController.close.call(controller);
+ assert_equals((await request) instanceof otherGlobal.Object, true, "Object comes from other realm");
+}, "Reader objects in correct global");
+
+
+promise_test(async () => {
+ const {stream, controller, otherController} = getFreshInstances();
+ assert_equals(controller.desiredSize, 1, "Desired size is expected");
+ Object.defineProperty(controller, "desiredSize",
+ Object.getOwnPropertyDescriptor(OtherReadableStreamDefaultController.prototype, "desiredSize"));
+ assert_equals(controller.desiredSize, 1, "Grafting getter from other prototype still returns desired size");
+}, "Desired size can be grafted from one prototype to another");
+
+promise_test(async () => {
+ const {stream, controller, otherController} = getFreshInstances();
+
+ // Make sure the controller close method returns the correct TypeError
+ const enqueuedError = { name: "enqueuedError" };
+ controller.error(enqueuedError);
+
+ assert_throws_js(TypeError, () => controller.close(), "Current Global controller");
+ assert_throws_js(otherGlobal.TypeError, () => otherController.close.call(controller), "Other global controller");
+}, "Closing errored stream throws object in appropriate global")
+
+promise_test(async () => {
+ const {otherController} = getFreshInstances();
+ // We can enqueue chunks from multiple globals
+ const chunk = { name: "chunk" };
+
+ let controller;
+ const stream = new ReadableStream({ start(c) { controller = c; } }, { size() {return 1} });
+ otherController.enqueue.call(controller, chunk);
+ otherController.enqueue.call(controller, new otherGlobal.Uint8Array(10));
+ controller.enqueue(new otherGlobal.Uint8Array(10));
+}, "Can enqueue chunks from multiple globals")
+
+promise_test(async () => {
+ const {stream, controller, otherController} = getFreshInstances();
+ const chunk = { name: "chunk" };
+
+ // We get the correct type errors out of a closed stream.
+ controller.close();
+ assert_throws_js(TypeError, () => controller.enqueue(new otherGlobal.Uint8Array(10)));
+ assert_throws_js(otherGlobal.TypeError, () => otherController.enqueue.call(controller, chunk));
+ assert_throws_js(otherGlobal.TypeError, () => otherController.enqueue.call(controller, new otherGlobal.Uint8Array(10)));
+}, "Correct errors and globals for closed streams");
+
+
+promise_test(async () => {
+ const {stream, controller, otherController} = getFreshInstances();
+ // Branches out of tee are in the correct global
+
+ const [branch1, branch2] = otherGlobal.ReadableStream.prototype.tee.call(stream);
+ assert_equals(branch1 instanceof ReadableStream, true, "Branch created in this global (as stream is in this global)");
+ assert_equals(branch2 instanceof ReadableStream, true, "Branch created in this global (as stream is in this global)");
+}, "Tee Branches in correct global");
+</script>
diff --git a/testing/web-platform/tests/streams/readable-streams/owning-type-message-port.any.js b/testing/web-platform/tests/streams/readable-streams/owning-type-message-port.any.js
new file mode 100644
index 0000000000..282c1f4114
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/owning-type-message-port.any.js
@@ -0,0 +1,49 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/rs-utils.js
+'use strict';
+
+promise_test(async () => {
+ const channel = new MessageChannel;
+ const port1 = channel.port1;
+ const port2 = channel.port2;
+
+ const source = {
+ start(controller) {
+ controller.enqueue(port1, { transfer : [ port1 ] });
+ },
+ type: 'owning'
+ };
+
+ const stream = new ReadableStream(source);
+
+ const chunk = await stream.getReader().read();
+
+ assert_not_equals(chunk.value, port1);
+
+ let promise = new Promise(resolve => port2.onmessage = e => resolve(e.data));
+ chunk.value.postMessage("toPort2");
+ assert_equals(await promise, "toPort2");
+
+ promise = new Promise(resolve => chunk.value.onmessage = e => resolve(e.data));
+ port2.postMessage("toPort1");
+ assert_equals(await promise, "toPort1");
+}, 'Transferred MessageChannel works as expected');
+
+promise_test(async t => {
+ const channel = new MessageChannel;
+ const port1 = channel.port1;
+ const port2 = channel.port2;
+
+ const source = {
+ start(controller) {
+ controller.enqueue({ port1 }, { transfer : [ port1 ] });
+ },
+ type: 'owning'
+ };
+
+ const stream = new ReadableStream(source);
+ const [clone1, clone2] = stream.tee();
+
+ await promise_rejects_dom(t, "DataCloneError", clone2.getReader().read());
+}, 'Second branch of owning ReadableStream tee should end up into errors with transfer only values');
diff --git a/testing/web-platform/tests/streams/readable-streams/owning-type-video-frame.any.js b/testing/web-platform/tests/streams/readable-streams/owning-type-video-frame.any.js
new file mode 100644
index 0000000000..b652f9c5fc
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/owning-type-video-frame.any.js
@@ -0,0 +1,128 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/rs-utils.js
+'use strict';
+
+function createVideoFrame()
+{
+ let init = {
+ format: 'I420',
+ timestamp: 1234,
+ codedWidth: 4,
+ codedHeight: 2
+ };
+ let data = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, // y
+ 1, 2, // u
+ 1, 2, // v
+ ]);
+
+ return new VideoFrame(data, init);
+}
+
+promise_test(async () => {
+ const videoFrame = createVideoFrame();
+ videoFrame.test = 1;
+ const source = {
+ start(controller) {
+ assert_equals(videoFrame.format, 'I420');
+ controller.enqueue(videoFrame, { transfer : [ videoFrame ] });
+ assert_equals(videoFrame.format, null);
+ assert_equals(videoFrame.test, 1);
+ },
+ type: 'owning'
+ };
+
+ const stream = new ReadableStream(source);
+ // Cancelling the stream should close all video frames, thus no console messages of GCing VideoFrames should happen.
+ stream.cancel();
+}, 'ReadableStream of type owning should close serialized chunks');
+
+promise_test(async () => {
+ const videoFrame = createVideoFrame();
+ videoFrame.test = 1;
+ const source = {
+ start(controller) {
+ assert_equals(videoFrame.format, 'I420');
+ controller.enqueue({ videoFrame }, { transfer : [ videoFrame ] });
+ assert_equals(videoFrame.format, null);
+ assert_equals(videoFrame.test, 1);
+ },
+ type: 'owning'
+ };
+
+ const stream = new ReadableStream(source);
+ const reader = stream.getReader();
+
+ const chunk = await reader.read();
+ assert_equals(chunk.value.videoFrame.format, 'I420');
+ assert_equals(chunk.value.videoFrame.test, undefined);
+
+ chunk.value.videoFrame.close();
+}, 'ReadableStream of type owning should transfer JS chunks with transferred values');
+
+promise_test(async t => {
+ const videoFrame = createVideoFrame();
+ videoFrame.close();
+ const source = {
+ start(controller) {
+ assert_throws_dom("DataCloneError", () => controller.enqueue(videoFrame, { transfer : [ videoFrame ] }));
+ },
+ type: 'owning'
+ };
+
+ const stream = new ReadableStream(source);
+ const reader = stream.getReader();
+
+ await promise_rejects_dom(t, "DataCloneError", reader.read());
+}, 'ReadableStream of type owning should error when trying to enqueue not serializable values');
+
+promise_test(async () => {
+ const videoFrame = createVideoFrame();
+ const source = {
+ start(controller) {
+ controller.enqueue(videoFrame, { transfer : [ videoFrame ] });
+ },
+ type: 'owning'
+ };
+
+ const stream = new ReadableStream(source);
+ const [clone1, clone2] = stream.tee();
+
+ const chunk1 = await clone1.getReader().read();
+ const chunk2 = await clone2.getReader().read();
+
+ assert_equals(videoFrame.format, null);
+ assert_equals(chunk1.value.format, 'I420');
+ assert_equals(chunk2.value.format, 'I420');
+
+ chunk1.value.close();
+ chunk2.value.close();
+}, 'ReadableStream of type owning should clone serializable objects when teeing');
+
+promise_test(async () => {
+ const videoFrame = createVideoFrame();
+ videoFrame.test = 1;
+ const source = {
+ start(controller) {
+ assert_equals(videoFrame.format, 'I420');
+ controller.enqueue({ videoFrame }, { transfer : [ videoFrame ] });
+ assert_equals(videoFrame.format, null);
+ assert_equals(videoFrame.test, 1);
+ },
+ type: 'owning'
+ };
+
+ const stream = new ReadableStream(source);
+ const [clone1, clone2] = stream.tee();
+
+ const chunk1 = await clone1.getReader().read();
+ const chunk2 = await clone2.getReader().read();
+
+ assert_equals(videoFrame.format, null);
+ assert_equals(chunk1.value.videoFrame.format, 'I420');
+ assert_equals(chunk2.value.videoFrame.format, 'I420');
+
+ chunk1.value.videoFrame.close();
+ chunk2.value.videoFrame.close();
+}, 'ReadableStream of type owning should clone JS Objects with serializables when teeing');
diff --git a/testing/web-platform/tests/streams/readable-streams/owning-type.any.js b/testing/web-platform/tests/streams/readable-streams/owning-type.any.js
new file mode 100644
index 0000000000..34c2a55d51
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/owning-type.any.js
@@ -0,0 +1,91 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/rs-utils.js
+'use strict';
+
+test(() => {
+ new ReadableStream({ type: 'owning' }); // ReadableStream constructed with 'owning' type
+}, 'ReadableStream can be constructed with owning type');
+
+test(() => {
+ let startCalled = false;
+
+ const source = {
+ start(controller) {
+ assert_equals(this, source, 'source is this during start');
+ assert_true(controller instanceof ReadableStreamDefaultController, 'default controller');
+ startCalled = true;
+ },
+ type: 'owning'
+ };
+
+ new ReadableStream(source);
+ assert_true(startCalled);
+}, 'ReadableStream of type owning should call start with a ReadableStreamDefaultController');
+
+test(() => {
+ let startCalled = false;
+
+ const source = {
+ start(controller) {
+ controller.enqueue("a", { transfer: [] });
+ controller.enqueue("a", { transfer: undefined });
+ startCalled = true;
+ },
+ type: 'owning'
+ };
+
+ new ReadableStream(source);
+ assert_true(startCalled);
+}, 'ReadableStream should be able to call enqueue with an empty transfer list');
+
+test(() => {
+ let startCalled = false;
+
+ const uint8Array = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]);
+ const buffer = uint8Array.buffer;
+ let source = {
+ start(controller) {
+ startCalled = true;
+ assert_throws_js(TypeError, () => { controller.enqueue(buffer, { transfer : [ buffer ] }); }, "transfer list is not empty");
+ }
+ };
+
+ new ReadableStream(source);
+ assert_true(startCalled);
+
+ startCalled = false;
+ source = {
+ start(controller) {
+ startCalled = true;
+ assert_throws_js(TypeError, () => { controller.enqueue(buffer, { get transfer() { throw new TypeError(); } }) }, "getter throws");
+ }
+ };
+
+ new ReadableStream(source);
+ assert_true(startCalled);
+}, 'ReadableStream should check transfer parameter');
+
+promise_test(async () => {
+ const uint8Array = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]);
+ const buffer = uint8Array.buffer;
+ buffer.test = 1;
+ const source = {
+ start(controller) {
+ assert_equals(buffer.byteLength, 8);
+ controller.enqueue(buffer, { transfer : [ buffer ] });
+ assert_equals(buffer.byteLength, 0);
+ assert_equals(buffer.test, 1);
+ },
+ type: 'owning'
+ };
+
+ const stream = new ReadableStream(source);
+ const reader = stream.getReader();
+
+ const chunk = await reader.read();
+
+ assert_not_equals(chunk.value, buffer);
+ assert_equals(chunk.value.byteLength, 8);
+ assert_equals(chunk.value.test, undefined);
+}, 'ReadableStream of type owning should transfer enqueued chunks');
diff --git a/testing/web-platform/tests/streams/readable-streams/patched-global.any.js b/testing/web-platform/tests/streams/readable-streams/patched-global.any.js
new file mode 100644
index 0000000000..c208824c86
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/patched-global.any.js
@@ -0,0 +1,142 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+// Tests which patch the global environment are kept separate to avoid
+// interfering with other tests.
+
+const ReadableStream_prototype_locked_get =
+ Object.getOwnPropertyDescriptor(ReadableStream.prototype, 'locked').get;
+
+// Verify that |rs| passes the brand check as a readable stream.
+function isReadableStream(rs) {
+ try {
+ ReadableStream_prototype_locked_get.call(rs);
+ return true;
+ } catch (e) {
+ return false;
+ }
+}
+
+test(t => {
+ const rs = new ReadableStream();
+
+ const trappedProperties = ['highWaterMark', 'size', 'start', 'type', 'mode'];
+ for (const property of trappedProperties) {
+ // eslint-disable-next-line no-extend-native, accessor-pairs
+ Object.defineProperty(Object.prototype, property, {
+ get() { throw new Error(`${property} getter called`); },
+ configurable: true
+ });
+ }
+ t.add_cleanup(() => {
+ for (const property of trappedProperties) {
+ delete Object.prototype[property];
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ assert_true(isReadableStream(branch1), 'branch1 should be a ReadableStream');
+ assert_true(isReadableStream(branch2), 'branch2 should be a ReadableStream');
+}, 'ReadableStream tee() should not touch Object.prototype properties');
+
+test(t => {
+ const rs = new ReadableStream();
+
+ const oldReadableStream = self.ReadableStream;
+
+ self.ReadableStream = function() {
+ throw new Error('ReadableStream called on global object');
+ };
+
+ t.add_cleanup(() => {
+ self.ReadableStream = oldReadableStream;
+ });
+
+ const [branch1, branch2] = rs.tee();
+
+ assert_true(isReadableStream(branch1), 'branch1 should be a ReadableStream');
+ assert_true(isReadableStream(branch2), 'branch2 should be a ReadableStream');
+}, 'ReadableStream tee() should not call the global ReadableStream');
+
+promise_test(async t => {
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue(1);
+ c.enqueue(2);
+ c.enqueue(3);
+ c.close();
+ }
+ });
+
+ const oldReadableStreamGetReader = ReadableStream.prototype.getReader;
+
+ const ReadableStreamDefaultReader = (new ReadableStream()).getReader().constructor;
+ const oldDefaultReaderRead = ReadableStreamDefaultReader.prototype.read;
+ const oldDefaultReaderCancel = ReadableStreamDefaultReader.prototype.cancel;
+ const oldDefaultReaderReleaseLock = ReadableStreamDefaultReader.prototype.releaseLock;
+
+ self.ReadableStream.prototype.getReader = function() {
+ throw new Error('patched getReader() called');
+ };
+
+ ReadableStreamDefaultReader.prototype.read = function() {
+ throw new Error('patched read() called');
+ };
+ ReadableStreamDefaultReader.prototype.cancel = function() {
+ throw new Error('patched cancel() called');
+ };
+ ReadableStreamDefaultReader.prototype.releaseLock = function() {
+ throw new Error('patched releaseLock() called');
+ };
+
+ t.add_cleanup(() => {
+ self.ReadableStream.prototype.getReader = oldReadableStreamGetReader;
+
+ ReadableStreamDefaultReader.prototype.read = oldDefaultReaderRead;
+ ReadableStreamDefaultReader.prototype.cancel = oldDefaultReaderCancel;
+ ReadableStreamDefaultReader.prototype.releaseLock = oldDefaultReaderReleaseLock;
+ });
+
+ // read the first chunk, then cancel
+ for await (const chunk of rs) {
+ break;
+ }
+
+ // should be able to acquire a new reader
+ const reader = oldReadableStreamGetReader.call(rs);
+ // stream should be cancelled
+ await reader.closed;
+}, 'ReadableStream async iterator should use the original values of getReader() and ReadableStreamDefaultReader ' +
+ 'methods');
+
+test(t => {
+ const oldPromiseThen = Promise.prototype.then;
+ Promise.prototype.then = () => {
+ throw new Error('patched then() called');
+ };
+ t.add_cleanup(() => {
+ Promise.prototype.then = oldPromiseThen;
+ });
+ const [branch1, branch2] = new ReadableStream().tee();
+ assert_true(isReadableStream(branch1), 'branch1 should be a ReadableStream');
+ assert_true(isReadableStream(branch2), 'branch2 should be a ReadableStream');
+}, 'tee() should not call Promise.prototype.then()');
+
+test(t => {
+ const oldPromiseThen = Promise.prototype.then;
+ Promise.prototype.then = () => {
+ throw new Error('patched then() called');
+ };
+ t.add_cleanup(() => {
+ Promise.prototype.then = oldPromiseThen;
+ });
+ let readableController;
+ const rs = new ReadableStream({
+ start(c) {
+ readableController = c;
+ }
+ });
+ const ws = new WritableStream();
+ rs.pipeTo(ws);
+ readableController.close();
+}, 'pipeTo() should not call Promise.prototype.then()');
diff --git a/testing/web-platform/tests/streams/readable-streams/read-task-handling.window.js b/testing/web-platform/tests/streams/readable-streams/read-task-handling.window.js
new file mode 100644
index 0000000000..2edc0ddddf
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/read-task-handling.window.js
@@ -0,0 +1,46 @@
+// META: global=window,worker
+'use strict';
+
+function performMicrotaskCheckpoint() {
+ document.createNodeIterator(document, -1, {
+ acceptNode() {
+ return NodeFilter.FILTER_ACCEPT;
+ }
+ }).nextNode();
+}
+
+test(() => {
+ // Add a getter for "then" that will incidentally be invoked
+ // during promise resolution.
+ Object.prototype.__defineGetter__('then', () => {
+ // Clean up behind ourselves.
+ delete Object.prototype.then;
+
+ // This promise should (like all promises) be resolved
+ // asynchronously.
+ var executed = false;
+ Promise.resolve().then(_ => { executed = true; });
+
+ // This shouldn't run microtasks! They should only run
+ // after the fetch is resolved.
+ performMicrotaskCheckpoint();
+
+ // The fulfill handler above shouldn't have run yet. If it has run,
+ // throw to reject this promise and fail the test.
+ assert_false(executed, "shouldn't have run microtasks yet");
+
+ // Otherwise act as if there's no "then" property so the promise
+ // fulfills and the test passes.
+ return undefined;
+ });
+
+ const readable = new ReadableStream({
+ pull(c) {
+ c.enqueue({});
+ }
+ }, { highWaterMark: 0 });
+
+ // Create a read request, incidentally resolving a promise with an
+ // object value, thereby invoking the getter installed above.
+ readable.getReader().read();
+}, "reading from a stream should occur in a microtask scope");
diff --git a/testing/web-platform/tests/streams/readable-streams/reentrant-strategies.any.js b/testing/web-platform/tests/streams/readable-streams/reentrant-strategies.any.js
new file mode 100644
index 0000000000..8ae7b98e8d
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/reentrant-strategies.any.js
@@ -0,0 +1,264 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/recording-streams.js
+// META: script=../resources/rs-utils.js
+// META: script=../resources/test-utils.js
+'use strict';
+
+// The size() function of the readable strategy can re-entrantly call back into the ReadableStream implementation. This
+// makes it risky to cache state across the call to ReadableStreamDefaultControllerEnqueue. These tests attempt to catch
+// such errors. They are separated from the other strategy tests because no real user code should ever do anything like
+// this.
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+promise_test(() => {
+ let controller;
+ let calls = 0;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, {
+ size() {
+ ++calls;
+ if (calls < 2) {
+ controller.enqueue('b');
+ }
+ return 1;
+ }
+ });
+ controller.enqueue('a');
+ controller.close();
+ return readableStreamToArray(rs)
+ .then(array => assert_array_equals(array, ['b', 'a'], 'array should contain two chunks'));
+}, 'enqueue() inside size() should work');
+
+promise_test(() => {
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, {
+ size() {
+ // The queue is empty.
+ controller.close();
+ // The state has gone from "readable" to "closed".
+ return 1;
+ // This chunk will be enqueued, but will be impossible to read because the state is already "closed".
+ }
+ });
+ controller.enqueue('a');
+ return readableStreamToArray(rs)
+ .then(array => assert_array_equals(array, [], 'array should contain no chunks'));
+ // The chunk 'a' is still in rs's queue. It is closed so 'a' cannot be read.
+}, 'close() inside size() should not crash');
+
+promise_test(() => {
+ let controller;
+ let calls = 0;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, {
+ size() {
+ ++calls;
+ if (calls === 2) {
+ // The queue contains one chunk.
+ controller.close();
+ // The state is still "readable", but closeRequest is now true.
+ }
+ return 1;
+ }
+ });
+ controller.enqueue('a');
+ controller.enqueue('b');
+ return readableStreamToArray(rs)
+ .then(array => assert_array_equals(array, ['a', 'b'], 'array should contain two chunks'));
+}, 'close request inside size() should work');
+
+promise_test(t => {
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, {
+ size() {
+ controller.error(error1);
+ return 1;
+ }
+ });
+ controller.enqueue('a');
+ return promise_rejects_exactly(t, error1, rs.getReader().read(), 'read() should reject');
+}, 'error() inside size() should work');
+
+promise_test(() => {
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, {
+ size() {
+ assert_equals(controller.desiredSize, 1, 'desiredSize should be 1');
+ return 1;
+ },
+ highWaterMark: 1
+ });
+ controller.enqueue('a');
+ controller.close();
+ return readableStreamToArray(rs)
+ .then(array => assert_array_equals(array, ['a'], 'array should contain one chunk'));
+}, 'desiredSize inside size() should work');
+
+promise_test(t => {
+ let cancelPromise;
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ },
+ cancel: t.step_func(reason => {
+ assert_equals(reason, error1, 'reason should be error1');
+ assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue() should throw');
+ })
+ }, {
+ size() {
+ cancelPromise = rs.cancel(error1);
+ return 1;
+ },
+ highWaterMark: Infinity
+ });
+ controller.enqueue('a');
+ const reader = rs.getReader();
+ return Promise.all([
+ reader.closed,
+ cancelPromise
+ ]);
+}, 'cancel() inside size() should work');
+
+promise_test(() => {
+ let controller;
+ let pipeToPromise;
+ const ws = recordingWritableStream();
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, {
+ size() {
+ if (!pipeToPromise) {
+ pipeToPromise = rs.pipeTo(ws);
+ }
+ return 1;
+ },
+ highWaterMark: 1
+ });
+ controller.enqueue('a');
+ assert_not_equals(pipeToPromise, undefined);
+
+ // Some pipeTo() implementations need an additional chunk enqueued in order for the first one to be processed. See
+ // https://github.com/whatwg/streams/issues/794 for background.
+ controller.enqueue('a');
+
+ // Give pipeTo() a chance to process the queued chunks.
+ return delay(0).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'a'], 'ws should contain two chunks');
+ controller.close();
+ return pipeToPromise;
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'a', 'close'], 'target should have been closed');
+ });
+}, 'pipeTo() inside size() should behave as expected');
+
+promise_test(() => {
+ let controller;
+ let readPromise;
+ let calls = 0;
+ let readResolved = false;
+ let reader;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, {
+ size() {
+ // This is triggered by controller.enqueue(). The queue is empty and there are no pending reads. This read is
+ // added to the list of pending reads.
+ readPromise = reader.read();
+ ++calls;
+ return 1;
+ },
+ highWaterMark: 0
+ });
+ reader = rs.getReader();
+ controller.enqueue('a');
+ readPromise.then(() => {
+ readResolved = true;
+ });
+ return flushAsyncEvents().then(() => {
+ assert_false(readResolved);
+ controller.enqueue('b');
+ assert_equals(calls, 1, 'size() should have been called once');
+ return delay(0);
+ }).then(() => {
+ assert_true(readResolved);
+ assert_equals(calls, 1, 'size() should only be called once');
+ return readPromise;
+ }).then(({ value, done }) => {
+ assert_false(done, 'done should be false');
+ // See https://github.com/whatwg/streams/issues/794 for why this chunk is not 'a'.
+ assert_equals(value, 'b', 'chunk should have been read');
+ assert_equals(calls, 1, 'calls should still be 1');
+ return reader.read();
+ }).then(({ value, done }) => {
+ assert_false(done, 'done should be false again');
+ assert_equals(value, 'a', 'chunk a should come after b');
+ });
+}, 'read() inside of size() should behave as expected');
+
+promise_test(() => {
+ let controller;
+ let reader;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, {
+ size() {
+ reader = rs.getReader();
+ return 1;
+ }
+ });
+ controller.enqueue('a');
+ return reader.read().then(({ value, done }) => {
+ assert_false(done, 'done should be false');
+ assert_equals(value, 'a', 'value should be a');
+ });
+}, 'getReader() inside size() should work');
+
+promise_test(() => {
+ let controller;
+ let branch1;
+ let branch2;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }, {
+ size() {
+ [branch1, branch2] = rs.tee();
+ return 1;
+ }
+ });
+ controller.enqueue('a');
+ assert_true(rs.locked, 'rs should be locked');
+ controller.close();
+ return Promise.all([
+ readableStreamToArray(branch1).then(array => assert_array_equals(array, ['a'], 'branch1 should have one chunk')),
+ readableStreamToArray(branch2).then(array => assert_array_equals(array, ['a'], 'branch2 should have one chunk'))
+ ]);
+}, 'tee() inside size() should work');
diff --git a/testing/web-platform/tests/streams/readable-streams/tee.any.js b/testing/web-platform/tests/streams/readable-streams/tee.any.js
new file mode 100644
index 0000000000..c2c2e48230
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/tee.any.js
@@ -0,0 +1,479 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/rs-utils.js
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+// META: script=../resources/rs-test-templates.js
+'use strict';
+
+test(() => {
+
+ const rs = new ReadableStream();
+ const result = rs.tee();
+
+ assert_true(Array.isArray(result), 'return value should be an array');
+ assert_equals(result.length, 2, 'array should have length 2');
+ assert_equals(result[0].constructor, ReadableStream, '0th element should be a ReadableStream');
+ assert_equals(result[1].constructor, ReadableStream, '1st element should be a ReadableStream');
+
+}, 'ReadableStream teeing: rs.tee() returns an array of two ReadableStreams');
+
+promise_test(t => {
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.close();
+ }
+ });
+
+ const branch = rs.tee();
+ const branch1 = branch[0];
+ const branch2 = branch[1];
+ const reader1 = branch1.getReader();
+ const reader2 = branch2.getReader();
+
+ reader2.closed.then(t.unreached_func('branch2 should not be closed'));
+
+ return Promise.all([
+ reader1.closed,
+ reader1.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'first chunk from branch1 should be correct');
+ }),
+ reader1.read().then(r => {
+ assert_object_equals(r, { value: 'b', done: false }, 'second chunk from branch1 should be correct');
+ }),
+ reader1.read().then(r => {
+ assert_object_equals(r, { value: undefined, done: true }, 'third read() from branch1 should be done');
+ }),
+ reader2.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'first chunk from branch2 should be correct');
+ })
+ ]);
+
+}, 'ReadableStream teeing: should be able to read one branch to the end without affecting the other');
+
+promise_test(() => {
+
+ const theObject = { the: 'test object' };
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue(theObject);
+ }
+ });
+
+ const branch = rs.tee();
+ const branch1 = branch[0];
+ const branch2 = branch[1];
+ const reader1 = branch1.getReader();
+ const reader2 = branch2.getReader();
+
+ return Promise.all([reader1.read(), reader2.read()]).then(values => {
+ assert_object_equals(values[0], values[1], 'the values should be equal');
+ });
+
+}, 'ReadableStream teeing: values should be equal across each branch');
+
+promise_test(t => {
+
+ const theError = { name: 'boo!' };
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ },
+ pull() {
+ throw theError;
+ }
+ });
+
+ const branches = rs.tee();
+ const reader1 = branches[0].getReader();
+ const reader2 = branches[1].getReader();
+
+ reader1.label = 'reader1';
+ reader2.label = 'reader2';
+
+ return Promise.all([
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed),
+ reader1.read().then(r => {
+ assert_object_equals(r, { value: 'a', done: false }, 'should be able to read the first chunk in branch1');
+ }),
+ reader1.read().then(r => {
+ assert_object_equals(r, { value: 'b', done: false }, 'should be able to read the second chunk in branch1');
+
+ return promise_rejects_exactly(t, theError, reader2.read());
+ })
+ .then(() => promise_rejects_exactly(t, theError, reader1.read()))
+ ]);
+
+}, 'ReadableStream teeing: errors in the source should propagate to both branches');
+
+promise_test(() => {
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.close();
+ }
+ });
+
+ const branches = rs.tee();
+ const branch1 = branches[0];
+ const branch2 = branches[1];
+ branch1.cancel();
+
+ return Promise.all([
+ readableStreamToArray(branch1).then(chunks => {
+ assert_array_equals(chunks, [], 'branch1 should have no chunks');
+ }),
+ readableStreamToArray(branch2).then(chunks => {
+ assert_array_equals(chunks, ['a', 'b'], 'branch2 should have two chunks');
+ })
+ ]);
+
+}, 'ReadableStream teeing: canceling branch1 should not impact branch2');
+
+promise_test(() => {
+
+ const rs = new ReadableStream({
+ start(c) {
+ c.enqueue('a');
+ c.enqueue('b');
+ c.close();
+ }
+ });
+
+ const branches = rs.tee();
+ const branch1 = branches[0];
+ const branch2 = branches[1];
+ branch2.cancel();
+
+ return Promise.all([
+ readableStreamToArray(branch1).then(chunks => {
+ assert_array_equals(chunks, ['a', 'b'], 'branch1 should have two chunks');
+ }),
+ readableStreamToArray(branch2).then(chunks => {
+ assert_array_equals(chunks, [], 'branch2 should have no chunks');
+ })
+ ]);
+
+}, 'ReadableStream teeing: canceling branch2 should not impact branch1');
+
+templatedRSTeeCancel('ReadableStream teeing', (extras) => {
+ return new ReadableStream({ ...extras });
+});
+
+promise_test(t => {
+
+ let controller;
+ const stream = new ReadableStream({ start(c) { controller = c; } });
+ const [branch1, branch2] = stream.tee();
+
+ const error = new Error();
+ error.name = 'distinctive';
+
+ // Ensure neither branch is waiting in ReadableStreamDefaultReaderRead().
+ controller.enqueue();
+ controller.enqueue();
+
+ return delay(0).then(() => {
+ // This error will have to be detected via [[closedPromise]].
+ controller.error(error);
+
+ const reader1 = branch1.getReader();
+ const reader2 = branch2.getReader();
+
+ return Promise.all([
+ promise_rejects_exactly(t, error, reader1.closed, 'reader1.closed should reject'),
+ promise_rejects_exactly(t, error, reader2.closed, 'reader2.closed should reject')
+ ]);
+ });
+
+}, 'ReadableStream teeing: erroring a teed stream should error both branches');
+
+promise_test(() => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const branches = rs.tee();
+ const reader1 = branches[0].getReader();
+ const reader2 = branches[1].getReader();
+
+ const promise = Promise.all([reader1.closed, reader2.closed]);
+
+ controller.close();
+ return promise;
+
+}, 'ReadableStream teeing: closing the original should immediately close the branches');
+
+promise_test(t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const branches = rs.tee();
+ const reader1 = branches[0].getReader();
+ const reader2 = branches[1].getReader();
+
+ const theError = { name: 'boo!' };
+ const promise = Promise.all([
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed)
+ ]);
+
+ controller.error(theError);
+ return promise;
+
+}, 'ReadableStream teeing: erroring the original should immediately error the branches');
+
+promise_test(async t => {
+
+ let controller;
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader());
+ const cancelPromise = reader2.cancel();
+
+ controller.enqueue('a');
+
+ const read1 = await reader1.read();
+ assert_object_equals(read1, { value: 'a', done: false }, 'first read() from branch1 should fulfill with the chunk');
+
+ controller.close();
+
+ const read2 = await reader1.read();
+ assert_object_equals(read2, { value: undefined, done: true }, 'second read() from branch1 should be done');
+
+ await Promise.all([
+ reader1.closed,
+ cancelPromise
+ ]);
+
+}, 'ReadableStream teeing: canceling branch1 should finish when branch2 reads until end of stream');
+
+promise_test(async t => {
+
+ let controller;
+ const theError = { name: 'boo!' };
+ const rs = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader());
+ const cancelPromise = reader2.cancel();
+
+ controller.error(theError);
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, reader1.read()),
+ cancelPromise
+ ]);
+
+}, 'ReadableStream teeing: canceling branch1 should finish when original stream errors');
+
+promise_test(async () => {
+
+ const rs = new ReadableStream({});
+
+ const [branch1, branch2] = rs.tee();
+
+ const cancel1 = branch1.cancel();
+ await flushAsyncEvents();
+ const cancel2 = branch2.cancel();
+
+ await Promise.all([cancel1, cancel2]);
+
+}, 'ReadableStream teeing: canceling both branches in sequence with delay');
+
+promise_test(async t => {
+
+ const theError = { name: 'boo!' };
+ const rs = new ReadableStream({
+ cancel() {
+ throw theError;
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+
+ const cancel1 = branch1.cancel();
+ await flushAsyncEvents();
+ const cancel2 = branch2.cancel();
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, cancel1),
+ promise_rejects_exactly(t, theError, cancel2)
+ ]);
+
+}, 'ReadableStream teeing: failing to cancel when canceling both branches in sequence with delay');
+
+test(t => {
+
+ // Copy original global.
+ const oldReadableStream = ReadableStream;
+ const getReader = ReadableStream.prototype.getReader;
+
+ const origRS = new ReadableStream();
+
+ // Replace the global ReadableStream constructor with one that doesn't work.
+ ReadableStream = function() {
+ throw new Error('global ReadableStream constructor called');
+ };
+ t.add_cleanup(() => {
+ ReadableStream = oldReadableStream;
+ });
+
+ // This will probably fail if the global ReadableStream constructor was used.
+ const [rs1, rs2] = origRS.tee();
+
+ // These will definitely fail if the global ReadableStream constructor was used.
+ assert_not_equals(getReader.call(rs1), undefined, 'getReader should work on rs1');
+ assert_not_equals(getReader.call(rs2), undefined, 'getReader should work on rs2');
+
+}, 'ReadableStreamTee should not use a modified ReadableStream constructor from the global object');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({}, { highWaterMark: 0 });
+
+ // Create two branches, each with a HWM of 1. This should result in one
+ // chunk being pulled, not two.
+ rs.tee();
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(rs.events, ['pull'], 'pull should only be called once');
+ });
+
+}, 'ReadableStreamTee should not pull more chunks than can fit in the branch queue');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({
+ pull(controller) {
+ controller.enqueue('a');
+ }
+ }, { highWaterMark: 0 });
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader());
+ return Promise.all([reader1.read(), reader2.read()])
+ .then(() => {
+ assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice');
+ });
+
+}, 'ReadableStreamTee should only pull enough to fill the emptiest queue');
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({}, { highWaterMark: 0 });
+ const theError = { name: 'boo!' };
+
+ rs.controller.error(theError);
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader());
+
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(rs.events, [], 'pull should not be called');
+
+ return Promise.all([
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed)
+ ]);
+ });
+
+}, 'ReadableStreamTee should not pull when original is already errored');
+
+for (const branch of [1, 2]) {
+ promise_test(t => {
+
+ const rs = recordingReadableStream({}, { highWaterMark: 0 });
+ const theError = { name: 'boo!' };
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader());
+
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(rs.events, ['pull'], 'pull should be called once');
+
+ rs.controller.enqueue('a');
+
+ const reader = (branch === 1) ? reader1 : reader2;
+ return reader.read();
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice');
+
+ rs.controller.error(theError);
+
+ return Promise.all([
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed)
+ ]);
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice');
+ });
+
+ }, `ReadableStreamTee stops pulling when original stream errors while branch ${branch} is reading`);
+}
+
+promise_test(t => {
+
+ const rs = recordingReadableStream({}, { highWaterMark: 0 });
+ const theError = { name: 'boo!' };
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader());
+
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(rs.events, ['pull'], 'pull should be called once');
+
+ rs.controller.enqueue('a');
+
+ return Promise.all([reader1.read(), reader2.read()]);
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice');
+
+ rs.controller.error(theError);
+
+ return Promise.all([
+ promise_rejects_exactly(t, theError, reader1.closed),
+ promise_rejects_exactly(t, theError, reader2.closed)
+ ]);
+ }).then(() => flushAsyncEvents()).then(() => {
+ assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice');
+ });
+
+}, 'ReadableStreamTee stops pulling when original stream errors while both branches are reading');
+
+promise_test(async () => {
+
+ const rs = recordingReadableStream();
+
+ const [reader1, reader2] = rs.tee().map(branch => branch.getReader());
+ const branch1Reads = [reader1.read(), reader1.read()];
+ const branch2Reads = [reader2.read(), reader2.read()];
+
+ await flushAsyncEvents();
+ rs.controller.enqueue('a');
+ rs.controller.close();
+
+ assert_object_equals(await branch1Reads[0], { value: 'a', done: false }, 'first chunk from branch1 should be correct');
+ assert_object_equals(await branch2Reads[0], { value: 'a', done: false }, 'first chunk from branch2 should be correct');
+
+ assert_object_equals(await branch1Reads[1], { value: undefined, done: true }, 'second read() from branch1 should be done');
+ assert_object_equals(await branch2Reads[1], { value: undefined, done: true }, 'second read() from branch2 should be done');
+
+}, 'ReadableStream teeing: enqueue() and close() while both branches are pulling');
diff --git a/testing/web-platform/tests/streams/readable-streams/templated.any.js b/testing/web-platform/tests/streams/readable-streams/templated.any.js
new file mode 100644
index 0000000000..dc75b805a1
--- /dev/null
+++ b/testing/web-platform/tests/streams/readable-streams/templated.any.js
@@ -0,0 +1,143 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/rs-test-templates.js
+'use strict';
+
+// Run the readable stream test templates against readable streams created directly using the constructor
+
+const theError = { name: 'boo!' };
+const chunks = ['a', 'b'];
+
+templatedRSEmpty('ReadableStream (empty)', () => {
+ return new ReadableStream();
+});
+
+templatedRSEmptyReader('ReadableStream (empty) reader', () => {
+ return streamAndDefaultReader(new ReadableStream());
+});
+
+templatedRSClosed('ReadableStream (closed via call in start)', () => {
+ return new ReadableStream({
+ start(c) {
+ c.close();
+ }
+ });
+});
+
+templatedRSClosedReader('ReadableStream reader (closed before getting reader)', () => {
+ let controller;
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ controller.close();
+ const result = streamAndDefaultReader(stream);
+ return result;
+});
+
+templatedRSClosedReader('ReadableStream reader (closed after getting reader)', () => {
+ let controller;
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const result = streamAndDefaultReader(stream);
+ controller.close();
+ return result;
+});
+
+templatedRSClosed('ReadableStream (closed via cancel)', () => {
+ const stream = new ReadableStream();
+ stream.cancel();
+ return stream;
+});
+
+templatedRSClosedReader('ReadableStream reader (closed via cancel after getting reader)', () => {
+ const stream = new ReadableStream();
+ const result = streamAndDefaultReader(stream);
+ result.reader.cancel();
+ return result;
+});
+
+templatedRSErrored('ReadableStream (errored via call in start)', () => {
+ return new ReadableStream({
+ start(c) {
+ c.error(theError);
+ }
+ });
+}, theError);
+
+templatedRSErroredSyncOnly('ReadableStream (errored via call in start)', () => {
+ return new ReadableStream({
+ start(c) {
+ c.error(theError);
+ }
+ });
+}, theError);
+
+templatedRSErrored('ReadableStream (errored via returning a rejected promise in start)', () => {
+ return new ReadableStream({
+ start() {
+ return Promise.reject(theError);
+ }
+ });
+}, theError);
+
+templatedRSErroredReader('ReadableStream (errored via returning a rejected promise in start) reader', () => {
+ return streamAndDefaultReader(new ReadableStream({
+ start() {
+ return Promise.reject(theError);
+ }
+ }));
+}, theError);
+
+templatedRSErroredReader('ReadableStream reader (errored before getting reader)', () => {
+ let controller;
+ const stream = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ controller.error(theError);
+ return streamAndDefaultReader(stream);
+}, theError);
+
+templatedRSErroredReader('ReadableStream reader (errored after getting reader)', () => {
+ let controller;
+ const result = streamAndDefaultReader(new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ }));
+ controller.error(theError);
+ return result;
+}, theError);
+
+templatedRSTwoChunksOpenReader('ReadableStream (two chunks enqueued, still open) reader', () => {
+ return streamAndDefaultReader(new ReadableStream({
+ start(c) {
+ c.enqueue(chunks[0]);
+ c.enqueue(chunks[1]);
+ }
+ }));
+}, chunks);
+
+templatedRSTwoChunksClosedReader('ReadableStream (two chunks enqueued, then closed) reader', () => {
+ let doClose;
+ const stream = new ReadableStream({
+ start(c) {
+ c.enqueue(chunks[0]);
+ c.enqueue(chunks[1]);
+ doClose = c.close.bind(c);
+ }
+ });
+ const result = streamAndDefaultReader(stream);
+ doClose();
+ return result;
+}, chunks);
+
+function streamAndDefaultReader(stream) {
+ return { stream, reader: stream.getReader() };
+}
diff --git a/testing/web-platform/tests/streams/resources/recording-streams.js b/testing/web-platform/tests/streams/resources/recording-streams.js
new file mode 100644
index 0000000000..661fe512f5
--- /dev/null
+++ b/testing/web-platform/tests/streams/resources/recording-streams.js
@@ -0,0 +1,131 @@
+'use strict';
+
+self.recordingReadableStream = (extras = {}, strategy) => {
+ let controllerToCopyOver;
+ const stream = new ReadableStream({
+ type: extras.type,
+ start(controller) {
+ controllerToCopyOver = controller;
+
+ if (extras.start) {
+ return extras.start(controller);
+ }
+
+ return undefined;
+ },
+ pull(controller) {
+ stream.events.push('pull');
+
+ if (extras.pull) {
+ return extras.pull(controller);
+ }
+
+ return undefined;
+ },
+ cancel(reason) {
+ stream.events.push('cancel', reason);
+ stream.eventsWithoutPulls.push('cancel', reason);
+
+ if (extras.cancel) {
+ return extras.cancel(reason);
+ }
+
+ return undefined;
+ }
+ }, strategy);
+
+ stream.controller = controllerToCopyOver;
+ stream.events = [];
+ stream.eventsWithoutPulls = [];
+
+ return stream;
+};
+
+self.recordingWritableStream = (extras = {}, strategy) => {
+ let controllerToCopyOver;
+ const stream = new WritableStream({
+ start(controller) {
+ controllerToCopyOver = controller;
+
+ if (extras.start) {
+ return extras.start(controller);
+ }
+
+ return undefined;
+ },
+ write(chunk, controller) {
+ stream.events.push('write', chunk);
+
+ if (extras.write) {
+ return extras.write(chunk, controller);
+ }
+
+ return undefined;
+ },
+ close() {
+ stream.events.push('close');
+
+ if (extras.close) {
+ return extras.close();
+ }
+
+ return undefined;
+ },
+ abort(e) {
+ stream.events.push('abort', e);
+
+ if (extras.abort) {
+ return extras.abort(e);
+ }
+
+ return undefined;
+ }
+ }, strategy);
+
+ stream.controller = controllerToCopyOver;
+ stream.events = [];
+
+ return stream;
+};
+
+self.recordingTransformStream = (extras = {}, writableStrategy, readableStrategy) => {
+ let controllerToCopyOver;
+ const stream = new TransformStream({
+ start(controller) {
+ controllerToCopyOver = controller;
+
+ if (extras.start) {
+ return extras.start(controller);
+ }
+
+ return undefined;
+ },
+
+ transform(chunk, controller) {
+ stream.events.push('transform', chunk);
+
+ if (extras.transform) {
+ return extras.transform(chunk, controller);
+ }
+
+ controller.enqueue(chunk);
+
+ return undefined;
+ },
+
+ flush(controller) {
+ stream.events.push('flush');
+
+ if (extras.flush) {
+ return extras.flush(controller);
+ }
+
+ return undefined;
+ }
+ }, writableStrategy, readableStrategy);
+
+ stream.controller = controllerToCopyOver;
+ stream.events = [];
+
+ return stream;
+};
diff --git a/testing/web-platform/tests/streams/resources/rs-test-templates.js b/testing/web-platform/tests/streams/resources/rs-test-templates.js
new file mode 100644
index 0000000000..25751c477f
--- /dev/null
+++ b/testing/web-platform/tests/streams/resources/rs-test-templates.js
@@ -0,0 +1,721 @@
+'use strict';
+
+// These tests can be run against any readable stream produced by the web platform that meets the given descriptions.
+// For readable stream tests, the factory should return the stream. For reader tests, the factory should return a
+// { stream, reader } object. (You can use this to vary the time at which you acquire a reader.)
+
+self.templatedRSEmpty = (label, factory) => {
+ test(() => {}, 'Running templatedRSEmpty with ' + label);
+
+ test(() => {
+
+ const rs = factory();
+
+ assert_equals(typeof rs.locked, 'boolean', 'has a boolean locked getter');
+ assert_equals(typeof rs.cancel, 'function', 'has a cancel method');
+ assert_equals(typeof rs.getReader, 'function', 'has a getReader method');
+ assert_equals(typeof rs.pipeThrough, 'function', 'has a pipeThrough method');
+ assert_equals(typeof rs.pipeTo, 'function', 'has a pipeTo method');
+ assert_equals(typeof rs.tee, 'function', 'has a tee method');
+
+ }, label + ': instances have the correct methods and properties');
+
+ test(() => {
+ const rs = factory();
+
+ assert_throws_js(TypeError, () => rs.getReader({ mode: '' }), 'empty string mode should throw');
+ assert_throws_js(TypeError, () => rs.getReader({ mode: null }), 'null mode should throw');
+ assert_throws_js(TypeError, () => rs.getReader({ mode: 'asdf' }), 'asdf mode should throw');
+ assert_throws_js(TypeError, () => rs.getReader(5), '5 should throw');
+
+ // Should not throw
+ rs.getReader(null);
+
+ }, label + ': calling getReader with invalid arguments should throw appropriate errors');
+};
+
+self.templatedRSClosed = (label, factory) => {
+ test(() => {}, 'Running templatedRSClosed with ' + label);
+
+ promise_test(() => {
+
+ const rs = factory();
+ const cancelPromise1 = rs.cancel();
+ const cancelPromise2 = rs.cancel();
+
+ assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises');
+
+ return Promise.all([
+ cancelPromise1.then(v => assert_equals(v, undefined, 'first cancel() call should fulfill with undefined')),
+ cancelPromise2.then(v => assert_equals(v, undefined, 'second cancel() call should fulfill with undefined'))
+ ]);
+
+ }, label + ': cancel() should return a distinct fulfilled promise each time');
+
+ test(() => {
+
+ const rs = factory();
+ assert_false(rs.locked, 'locked getter should return false');
+
+ }, label + ': locked should be false');
+
+ test(() => {
+
+ const rs = factory();
+ rs.getReader(); // getReader() should not throw.
+
+ }, label + ': getReader() should be OK');
+
+ test(() => {
+
+ const rs = factory();
+
+ const reader = rs.getReader();
+ reader.releaseLock();
+
+ const reader2 = rs.getReader(); // Getting a second reader should not throw.
+ reader2.releaseLock();
+
+ rs.getReader(); // Getting a third reader should not throw.
+
+ }, label + ': should be able to acquire multiple readers if they are released in succession');
+
+ test(() => {
+
+ const rs = factory();
+
+ rs.getReader();
+
+ assert_throws_js(TypeError, () => rs.getReader(), 'getting a second reader should throw');
+ assert_throws_js(TypeError, () => rs.getReader(), 'getting a third reader should throw');
+
+ }, label + ': should not be able to acquire a second reader if we don\'t release the first one');
+};
+
+self.templatedRSErrored = (label, factory, error) => {
+ test(() => {}, 'Running templatedRSErrored with ' + label);
+
+ promise_test(t => {
+
+ const rs = factory();
+ const reader = rs.getReader();
+
+ return Promise.all([
+ promise_rejects_exactly(t, error, reader.closed),
+ promise_rejects_exactly(t, error, reader.read())
+ ]);
+
+ }, label + ': getReader() should return a reader that acts errored');
+
+ promise_test(t => {
+
+ const rs = factory();
+ const reader = rs.getReader();
+
+ return Promise.all([
+ promise_rejects_exactly(t, error, reader.read()),
+ promise_rejects_exactly(t, error, reader.read()),
+ promise_rejects_exactly(t, error, reader.closed)
+ ]);
+
+ }, label + ': read() twice should give the error each time');
+
+ test(() => {
+ const rs = factory();
+
+ assert_false(rs.locked, 'locked getter should return false');
+ }, label + ': locked should be false');
+};
+
+self.templatedRSErroredSyncOnly = (label, factory, error) => {
+ test(() => {}, 'Running templatedRSErroredSyncOnly with ' + label);
+
+ promise_test(t => {
+
+ const rs = factory();
+ rs.getReader().releaseLock();
+ const reader = rs.getReader(); // Calling getReader() twice does not throw (the stream is not locked).
+
+ return promise_rejects_exactly(t, error, reader.closed);
+
+ }, label + ': should be able to obtain a second reader, with the correct closed promise');
+
+ test(() => {
+
+ const rs = factory();
+ rs.getReader();
+
+ assert_throws_js(TypeError, () => rs.getReader(), 'getting a second reader should throw a TypeError');
+ assert_throws_js(TypeError, () => rs.getReader(), 'getting a third reader should throw a TypeError');
+
+ }, label + ': should not be able to obtain additional readers if we don\'t release the first lock');
+
+ promise_test(t => {
+
+ const rs = factory();
+ const cancelPromise1 = rs.cancel();
+ const cancelPromise2 = rs.cancel();
+
+ assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error, cancelPromise1),
+ promise_rejects_exactly(t, error, cancelPromise2)
+ ]);
+
+ }, label + ': cancel() should return a distinct rejected promise each time');
+
+ promise_test(t => {
+
+ const rs = factory();
+ const reader = rs.getReader();
+ const cancelPromise1 = reader.cancel();
+ const cancelPromise2 = reader.cancel();
+
+ assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error, cancelPromise1),
+ promise_rejects_exactly(t, error, cancelPromise2)
+ ]);
+
+ }, label + ': reader cancel() should return a distinct rejected promise each time');
+};
+
+self.templatedRSEmptyReader = (label, factory) => {
+ test(() => {}, 'Running templatedRSEmptyReader with ' + label);
+
+ test(() => {
+
+ const reader = factory().reader;
+
+ assert_true('closed' in reader, 'has a closed property');
+ assert_equals(typeof reader.closed.then, 'function', 'closed property is thenable');
+
+ assert_equals(typeof reader.cancel, 'function', 'has a cancel method');
+ assert_equals(typeof reader.read, 'function', 'has a read method');
+ assert_equals(typeof reader.releaseLock, 'function', 'has a releaseLock method');
+
+ }, label + ': instances have the correct methods and properties');
+
+ test(() => {
+
+ const stream = factory().stream;
+
+ assert_true(stream.locked, 'locked getter should return true');
+
+ }, label + ': locked should be true');
+
+ promise_test(t => {
+
+ const reader = factory().reader;
+
+ reader.read().then(
+ t.unreached_func('read() should not fulfill'),
+ t.unreached_func('read() should not reject')
+ );
+
+ return delay(500);
+
+ }, label + ': read() should never settle');
+
+ promise_test(t => {
+
+ const reader = factory().reader;
+
+ reader.read().then(
+ t.unreached_func('read() should not fulfill'),
+ t.unreached_func('read() should not reject')
+ );
+
+ reader.read().then(
+ t.unreached_func('read() should not fulfill'),
+ t.unreached_func('read() should not reject')
+ );
+
+ return delay(500);
+
+ }, label + ': two read()s should both never settle');
+
+ test(() => {
+
+ const reader = factory().reader;
+ assert_not_equals(reader.read(), reader.read(), 'the promises returned should be distinct');
+
+ }, label + ': read() should return distinct promises each time');
+
+ test(() => {
+
+ const stream = factory().stream;
+ assert_throws_js(TypeError, () => stream.getReader(), 'stream.getReader() should throw a TypeError');
+
+ }, label + ': getReader() again on the stream should fail');
+
+ promise_test(async t => {
+
+ const streamAndReader = factory();
+ const stream = streamAndReader.stream;
+ const reader = streamAndReader.reader;
+
+ const read1 = reader.read();
+ const read2 = reader.read();
+ const closed = reader.closed;
+
+ reader.releaseLock();
+
+ assert_false(stream.locked, 'the stream should be unlocked');
+
+ await Promise.all([
+ promise_rejects_js(t, TypeError, read1, 'first read should reject'),
+ promise_rejects_js(t, TypeError, read2, 'second read should reject'),
+ promise_rejects_js(t, TypeError, closed, 'closed should reject')
+ ]);
+
+ }, label + ': releasing the lock should reject all pending read requests');
+
+ promise_test(t => {
+
+ const reader = factory().reader;
+ reader.releaseLock();
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, reader.read()),
+ promise_rejects_js(t, TypeError, reader.read())
+ ]);
+
+ }, label + ': releasing the lock should cause further read() calls to reject with a TypeError');
+
+ promise_test(t => {
+
+ const reader = factory().reader;
+
+ const closedBefore = reader.closed;
+ reader.releaseLock();
+ const closedAfter = reader.closed;
+
+ assert_equals(closedBefore, closedAfter, 'the closed promise should not change identity');
+
+ return promise_rejects_js(t, TypeError, closedBefore);
+
+ }, label + ': releasing the lock should cause closed calls to reject with a TypeError');
+
+ test(() => {
+
+ const streamAndReader = factory();
+ const stream = streamAndReader.stream;
+ const reader = streamAndReader.reader;
+
+ reader.releaseLock();
+ assert_false(stream.locked, 'locked getter should return false');
+
+ }, label + ': releasing the lock should cause locked to become false');
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+ reader.cancel();
+
+ return reader.read().then(r => {
+ assert_object_equals(r, { value: undefined, done: true }, 'read()ing from the reader should give a done result');
+ });
+
+ }, label + ': canceling via the reader should cause the reader to act closed');
+
+ promise_test(t => {
+
+ const stream = factory().stream;
+ return promise_rejects_js(t, TypeError, stream.cancel());
+
+ }, label + ': canceling via the stream should fail');
+};
+
+self.templatedRSClosedReader = (label, factory) => {
+ test(() => {}, 'Running templatedRSClosedReader with ' + label);
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+
+ return reader.read().then(v => {
+ assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly');
+ });
+
+ }, label + ': read() should fulfill with { value: undefined, done: true }');
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+
+ return Promise.all([
+ reader.read().then(v => {
+ assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly');
+ }),
+ reader.read().then(v => {
+ assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly');
+ })
+ ]);
+
+ }, label + ': read() multiple times should fulfill with { value: undefined, done: true }');
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+
+ return reader.read().then(() => reader.read()).then(v => {
+ assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly');
+ });
+
+ }, label + ': read() should work when used within another read() fulfill callback');
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+
+ return reader.closed.then(v => assert_equals(v, undefined, 'reader closed should fulfill with undefined'));
+
+ }, label + ': closed should fulfill with undefined');
+
+ promise_test(t => {
+
+ const reader = factory().reader;
+
+ const closedBefore = reader.closed;
+ reader.releaseLock();
+ const closedAfter = reader.closed;
+
+ assert_not_equals(closedBefore, closedAfter, 'the closed promise should change identity');
+
+ return Promise.all([
+ closedBefore.then(v => assert_equals(v, undefined, 'reader.closed acquired before release should fulfill')),
+ promise_rejects_js(t, TypeError, closedAfter)
+ ]);
+
+ }, label + ': releasing the lock should cause closed to reject and change identity');
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+ const cancelPromise1 = reader.cancel();
+ const cancelPromise2 = reader.cancel();
+ const closedReaderPromise = reader.closed;
+
+ assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises');
+ assert_not_equals(cancelPromise1, closedReaderPromise, 'cancel() promise 1 should be distinct from reader.closed');
+ assert_not_equals(cancelPromise2, closedReaderPromise, 'cancel() promise 2 should be distinct from reader.closed');
+
+ return Promise.all([
+ cancelPromise1.then(v => assert_equals(v, undefined, 'first cancel() should fulfill with undefined')),
+ cancelPromise2.then(v => assert_equals(v, undefined, 'second cancel() should fulfill with undefined'))
+ ]);
+
+ }, label + ': cancel() should return a distinct fulfilled promise each time');
+};
+
+self.templatedRSErroredReader = (label, factory, error) => {
+ test(() => {}, 'Running templatedRSErroredReader with ' + label);
+
+ promise_test(t => {
+
+ const reader = factory().reader;
+ return promise_rejects_exactly(t, error, reader.closed);
+
+ }, label + ': closed should reject with the error');
+
+ promise_test(t => {
+
+ const reader = factory().reader;
+ const closedBefore = reader.closed;
+
+ return promise_rejects_exactly(t, error, closedBefore).then(() => {
+ reader.releaseLock();
+
+ const closedAfter = reader.closed;
+ assert_not_equals(closedBefore, closedAfter, 'the closed promise should change identity');
+
+ return promise_rejects_js(t, TypeError, closedAfter);
+ });
+
+ }, label + ': releasing the lock should cause closed to reject and change identity');
+
+ promise_test(t => {
+
+ const reader = factory().reader;
+ return promise_rejects_exactly(t, error, reader.read());
+
+ }, label + ': read() should reject with the error');
+};
+
+self.templatedRSTwoChunksOpenReader = (label, factory, chunks) => {
+ test(() => {}, 'Running templatedRSTwoChunksOpenReader with ' + label);
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+
+ return Promise.all([
+ reader.read().then(r => {
+ assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct');
+ }),
+ reader.read().then(r => {
+ assert_object_equals(r, { value: chunks[1], done: false }, 'second result should be correct');
+ })
+ ]);
+
+ }, label + ': calling read() twice without waiting will eventually give both chunks (sequential)');
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+
+ return reader.read().then(r => {
+ assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct');
+
+ return reader.read().then(r2 => {
+ assert_object_equals(r2, { value: chunks[1], done: false }, 'second result should be correct');
+ });
+ });
+
+ }, label + ': calling read() twice without waiting will eventually give both chunks (nested)');
+
+ test(() => {
+
+ const reader = factory().reader;
+ assert_not_equals(reader.read(), reader.read(), 'the promises returned should be distinct');
+
+ }, label + ': read() should return distinct promises each time');
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+
+ const promise1 = reader.closed.then(v => {
+ assert_equals(v, undefined, 'reader closed should fulfill with undefined');
+ });
+
+ const promise2 = reader.read().then(r => {
+ assert_object_equals(r, { value: chunks[0], done: false },
+ 'promise returned before cancellation should fulfill with a chunk');
+ });
+
+ reader.cancel();
+
+ const promise3 = reader.read().then(r => {
+ assert_object_equals(r, { value: undefined, done: true },
+ 'promise returned after cancellation should fulfill with an end-of-stream signal');
+ });
+
+ return Promise.all([promise1, promise2, promise3]);
+
+ }, label + ': cancel() after a read() should still give that single read result');
+};
+
+self.templatedRSTwoChunksClosedReader = function (label, factory, chunks) {
+ test(() => {}, 'Running templatedRSTwoChunksClosedReader with ' + label);
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+
+ return Promise.all([
+ reader.read().then(r => {
+ assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct');
+ }),
+ reader.read().then(r => {
+ assert_object_equals(r, { value: chunks[1], done: false }, 'second result should be correct');
+ }),
+ reader.read().then(r => {
+ assert_object_equals(r, { value: undefined, done: true }, 'third result should be correct');
+ })
+ ]);
+
+ }, label + ': third read(), without waiting, should give { value: undefined, done: true } (sequential)');
+
+ promise_test(() => {
+
+ const reader = factory().reader;
+
+ return reader.read().then(r => {
+ assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct');
+
+ return reader.read().then(r2 => {
+ assert_object_equals(r2, { value: chunks[1], done: false }, 'second result should be correct');
+
+ return reader.read().then(r3 => {
+ assert_object_equals(r3, { value: undefined, done: true }, 'third result should be correct');
+ });
+ });
+ });
+
+ }, label + ': third read(), without waiting, should give { value: undefined, done: true } (nested)');
+
+ promise_test(() => {
+
+ const streamAndReader = factory();
+ const stream = streamAndReader.stream;
+ const reader = streamAndReader.reader;
+
+ assert_true(stream.locked, 'stream should start locked');
+
+ const promise = reader.closed.then(v => {
+ assert_equals(v, undefined, 'reader closed should fulfill with undefined');
+ assert_true(stream.locked, 'stream should remain locked');
+ });
+
+ reader.read();
+ reader.read();
+
+ return promise;
+
+ }, label +
+ ': draining the stream via read() should cause the reader closed promise to fulfill, but locked stays true');
+
+ promise_test(() => {
+
+ const streamAndReader = factory();
+ const stream = streamAndReader.stream;
+ const reader = streamAndReader.reader;
+
+ const promise = reader.closed.then(() => {
+ assert_true(stream.locked, 'the stream should start locked');
+ reader.releaseLock(); // Releasing the lock after reader closed should not throw.
+ assert_false(stream.locked, 'the stream should end unlocked');
+ });
+
+ reader.read();
+ reader.read();
+
+ return promise;
+
+ }, label + ': releasing the lock after the stream is closed should cause locked to become false');
+
+ promise_test(t => {
+
+ const reader = factory().reader;
+
+ reader.releaseLock();
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, reader.read()),
+ promise_rejects_js(t, TypeError, reader.read()),
+ promise_rejects_js(t, TypeError, reader.read())
+ ]);
+
+ }, label + ': releasing the lock should cause further read() calls to reject with a TypeError');
+
+ promise_test(() => {
+
+ const streamAndReader = factory();
+ const stream = streamAndReader.stream;
+ const reader = streamAndReader.reader;
+
+ const readerClosed = reader.closed;
+
+ assert_equals(reader.closed, readerClosed, 'accessing reader.closed twice in succession gives the same value');
+
+ const promise = reader.read().then(() => {
+ assert_equals(reader.closed, readerClosed, 'reader.closed is the same after read() fulfills');
+
+ reader.releaseLock();
+
+ assert_equals(reader.closed, readerClosed, 'reader.closed is the same after releasing the lock');
+
+ const newReader = stream.getReader();
+ return newReader.read();
+ });
+
+ assert_equals(reader.closed, readerClosed, 'reader.closed is the same after calling read()');
+
+ return promise;
+
+ }, label + ': reader\'s closed property always returns the same promise');
+};
+
+self.templatedRSTeeCancel = (label, factory) => {
+ test(() => {}, `Running templatedRSTeeCancel with ${label}`);
+
+ promise_test(async () => {
+
+ const reason1 = new Error('We\'re wanted men.');
+ const reason2 = new Error('I have the death sentence on twelve systems.');
+
+ let resolve;
+ const promise = new Promise(r => resolve = r);
+ const rs = factory({
+ cancel(reason) {
+ assert_array_equals(reason, [reason1, reason2],
+ 'the cancel reason should be an array containing those from the branches');
+ resolve();
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ await Promise.all([
+ branch1.cancel(reason1),
+ branch2.cancel(reason2),
+ promise
+ ]);
+
+ }, `${label}: canceling both branches should aggregate the cancel reasons into an array`);
+
+ promise_test(async () => {
+
+ const reason1 = new Error('This little one\'s not worth the effort.');
+ const reason2 = new Error('Come, let me get you something.');
+
+ let resolve;
+ const promise = new Promise(r => resolve = r);
+ const rs = factory({
+ cancel(reason) {
+ assert_array_equals(reason, [reason1, reason2],
+ 'the cancel reason should be an array containing those from the branches');
+ resolve();
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ await Promise.all([
+ branch2.cancel(reason2),
+ branch1.cancel(reason1),
+ promise
+ ]);
+
+ }, `${label}: canceling both branches in reverse order should aggregate the cancel reasons into an array`);
+
+ promise_test(async t => {
+
+ const theError = { name: 'I\'ll be careful.' };
+ const rs = factory({
+ cancel() {
+ throw theError;
+ }
+ });
+
+ const [branch1, branch2] = rs.tee();
+ await Promise.all([
+ promise_rejects_exactly(t, theError, branch1.cancel()),
+ promise_rejects_exactly(t, theError, branch2.cancel())
+ ]);
+
+ }, `${label}: failing to cancel the original stream should cause cancel() to reject on branches`);
+
+ promise_test(async t => {
+
+ const theError = { name: 'You just watch yourself!' };
+ let controller;
+ const stream = factory({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ const [branch1, branch2] = stream.tee();
+ controller.error(theError);
+
+ await Promise.all([
+ promise_rejects_exactly(t, theError, branch1.cancel()),
+ promise_rejects_exactly(t, theError, branch2.cancel())
+ ]);
+
+ }, `${label}: erroring a teed stream should properly handle canceled branches`);
+
+};
diff --git a/testing/web-platform/tests/streams/resources/rs-utils.js b/testing/web-platform/tests/streams/resources/rs-utils.js
new file mode 100644
index 0000000000..f1a014275a
--- /dev/null
+++ b/testing/web-platform/tests/streams/resources/rs-utils.js
@@ -0,0 +1,197 @@
+'use strict';
+(function () {
+
+ class RandomPushSource {
+ constructor(toPush) {
+ this.pushed = 0;
+ this.toPush = toPush;
+ this.started = false;
+ this.paused = false;
+ this.closed = false;
+
+ this._intervalHandle = null;
+ }
+
+ readStart() {
+ if (this.closed) {
+ return;
+ }
+
+ if (!this.started) {
+ this._intervalHandle = setInterval(writeChunk, 2);
+ this.started = true;
+ }
+
+ if (this.paused) {
+ this._intervalHandle = setInterval(writeChunk, 2);
+ this.paused = false;
+ }
+
+ const source = this;
+ function writeChunk() {
+ if (source.paused) {
+ return;
+ }
+
+ source.pushed++;
+
+ if (source.toPush > 0 && source.pushed > source.toPush) {
+ if (source._intervalHandle) {
+ clearInterval(source._intervalHandle);
+ source._intervalHandle = undefined;
+ }
+ source.closed = true;
+ source.onend();
+ } else {
+ source.ondata(randomChunk(128));
+ }
+ }
+ }
+
+ readStop() {
+ if (this.paused) {
+ return;
+ }
+
+ if (this.started) {
+ this.paused = true;
+ clearInterval(this._intervalHandle);
+ this._intervalHandle = undefined;
+ } else {
+ throw new Error('Can\'t pause reading an unstarted source.');
+ }
+ }
+ }
+
+ function randomChunk(size) {
+ let chunk = '';
+
+ for (let i = 0; i < size; ++i) {
+ // Add a random character from the basic printable ASCII set.
+ chunk += String.fromCharCode(Math.round(Math.random() * 84) + 32);
+ }
+
+ return chunk;
+ }
+
+ function readableStreamToArray(readable, reader) {
+ if (reader === undefined) {
+ reader = readable.getReader();
+ }
+
+ const chunks = [];
+
+ return pump();
+
+ function pump() {
+ return reader.read().then(result => {
+ if (result.done) {
+ return chunks;
+ }
+
+ chunks.push(result.value);
+ return pump();
+ });
+ }
+ }
+
+ class SequentialPullSource {
+ constructor(limit, options) {
+ const async = options && options.async;
+
+ this.current = 0;
+ this.limit = limit;
+ this.opened = false;
+ this.closed = false;
+
+ this._exec = f => f();
+ if (async) {
+ this._exec = f => step_timeout(f, 0);
+ }
+ }
+
+ open(cb) {
+ this._exec(() => {
+ this.opened = true;
+ cb();
+ });
+ }
+
+ read(cb) {
+ this._exec(() => {
+ if (++this.current <= this.limit) {
+ cb(null, false, this.current);
+ } else {
+ cb(null, true, null);
+ }
+ });
+ }
+
+ close(cb) {
+ this._exec(() => {
+ this.closed = true;
+ cb();
+ });
+ }
+ }
+
+ function sequentialReadableStream(limit, options) {
+ const sequentialSource = new SequentialPullSource(limit, options);
+
+ const stream = new ReadableStream({
+ start() {
+ return new Promise((resolve, reject) => {
+ sequentialSource.open(err => {
+ if (err) {
+ reject(err);
+ }
+ resolve();
+ });
+ });
+ },
+
+ pull(c) {
+ return new Promise((resolve, reject) => {
+ sequentialSource.read((err, done, chunk) => {
+ if (err) {
+ reject(err);
+ } else if (done) {
+ sequentialSource.close(err2 => {
+ if (err2) {
+ reject(err2);
+ }
+ c.close();
+ resolve();
+ });
+ } else {
+ c.enqueue(chunk);
+ resolve();
+ }
+ });
+ });
+ }
+ });
+
+ stream.source = sequentialSource;
+
+ return stream;
+ }
+
+ function transferArrayBufferView(view) {
+ const noopByteStream = new ReadableStream({
+ type: 'bytes',
+ pull(c) {
+ c.byobRequest.respond(c.byobRequest.view.byteLength);
+ c.close();
+ }
+ });
+ const reader = noopByteStream.getReader({ mode: 'byob' });
+ return reader.read(view).then((result) => result.value);
+ }
+
+ self.RandomPushSource = RandomPushSource;
+ self.readableStreamToArray = readableStreamToArray;
+ self.sequentialReadableStream = sequentialReadableStream;
+ self.transferArrayBufferView = transferArrayBufferView;
+
+}());
diff --git a/testing/web-platform/tests/streams/resources/test-utils.js b/testing/web-platform/tests/streams/resources/test-utils.js
new file mode 100644
index 0000000000..a38f78027b
--- /dev/null
+++ b/testing/web-platform/tests/streams/resources/test-utils.js
@@ -0,0 +1,27 @@
+'use strict';
+
+self.delay = ms => new Promise(resolve => step_timeout(resolve, ms));
+
+// For tests which verify that the implementation doesn't do something it shouldn't, it's better not to use a
+// timeout. Instead, assume that any reasonable implementation is going to finish work after 2 times around the event
+// loop, and use flushAsyncEvents().then(() => assert_array_equals(...));
+// Some tests include promise resolutions which may mean the test code takes a couple of event loop visits itself. So go
+// around an extra 2 times to avoid complicating those tests.
+self.flushAsyncEvents = () => delay(0).then(() => delay(0)).then(() => delay(0)).then(() => delay(0));
+
+self.assert_typed_array_equals = (actual, expected, message) => {
+ const prefix = message === undefined ? '' : `${message} `;
+ assert_equals(typeof actual, 'object', `${prefix}type is object`);
+ assert_equals(actual.constructor, expected.constructor, `${prefix}constructor`);
+ assert_equals(actual.byteOffset, expected.byteOffset, `${prefix}byteOffset`);
+ assert_equals(actual.byteLength, expected.byteLength, `${prefix}byteLength`);
+ assert_equals(actual.buffer.byteLength, expected.buffer.byteLength, `${prefix}buffer.byteLength`);
+ assert_array_equals([...actual], [...expected], `${prefix}contents`);
+ assert_array_equals([...new Uint8Array(actual.buffer)], [...new Uint8Array(expected.buffer)], `${prefix}buffer contents`);
+};
+
+self.makePromiseAndResolveFunc = () => {
+ let resolve;
+ const promise = new Promise(r => { resolve = r; });
+ return [promise, resolve];
+};
diff --git a/testing/web-platform/tests/streams/transferable/deserialize-error.window.js b/testing/web-platform/tests/streams/transferable/deserialize-error.window.js
new file mode 100644
index 0000000000..64cf2bbfb1
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/deserialize-error.window.js
@@ -0,0 +1,39 @@
+// META: script=/common/get-host-info.sub.js
+// META: script=resources/create-wasm-module.js
+// META: timeout=long
+
+const { HTTPS_NOTSAMESITE_ORIGIN } = get_host_info();
+const iframe = document.createElement('iframe');
+iframe.src = `${HTTPS_NOTSAMESITE_ORIGIN}/streams/transferable/resources/deserialize-error-frame.html`;
+
+window.addEventListener('message', async evt => {
+ // Tests are serialized to make the results deterministic.
+ switch (evt.data) {
+ case 'init done': {
+ const ws = new WritableStream();
+ iframe.contentWindow.postMessage(ws, '*', [ws]);
+ return;
+ }
+
+ case 'ws done': {
+ const module = await createWasmModule();
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.enqueue(module);
+ }
+ });
+ iframe.contentWindow.postMessage(rs, '*', [rs]);
+ return;
+ }
+
+ case 'rs done': {
+ iframe.remove();
+ }
+ }
+});
+
+// Need to do this after adding the listener to ensure we catch the first
+// message.
+document.body.appendChild(iframe);
+
+fetch_tests_from_window(iframe.contentWindow);
diff --git a/testing/web-platform/tests/streams/transferable/gc-crash.html b/testing/web-platform/tests/streams/transferable/gc-crash.html
new file mode 100644
index 0000000000..0d331e6be0
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/gc-crash.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<html class="test-wait">
+<script src="/common/gc.js"></script>
+<script type="module">
+ const b = new ReadableStream({
+ start(c) {
+ c.enqueue({}) // the value we will transfer
+ },
+ })
+ const transferred = structuredClone(b, { transfer: [b] })
+ // Here we request a read, triggering a message transfer
+ transferred.getReader().read()
+ // And immediately trigger GC without waiting for the read,
+ // causing the actual transfer to be done after GC
+ await garbageCollect()
+ document.documentElement.classList.remove("test-wait")
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/readable-stream.html b/testing/web-platform/tests/streams/transferable/readable-stream.html
new file mode 100644
index 0000000000..b1ede4695b
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/readable-stream.html
@@ -0,0 +1,260 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="resources/helpers.js"></script>
+<script src="../resources/recording-streams.js"></script>
+<script src="../resources/test-utils.js"></script>
+<script>
+'use strict';
+
+promise_test(async () => {
+ const rs = await createTransferredReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.close();
+ }
+ });
+ const reader = rs.getReader();
+ {
+ const {value, done} = await reader.read();
+ assert_false(done, 'should not be done yet');
+ assert_equals(value, 'a', 'first chunk should be a');
+ }
+ {
+ const {done} = await reader.read();
+ assert_true(done, 'should be done now');
+ }
+}, 'sending one chunk through a transferred stream should work');
+
+promise_test(async () => {
+ let controller;
+ const rs = await createTransferredReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ for (let i = 0; i < 10; ++i) {
+ controller.enqueue(i);
+ }
+ controller.close();
+ const reader = rs.getReader();
+ for (let i = 0; i < 10; ++i) {
+ const {value, done} = await reader.read();
+ assert_false(done, 'should not be done yet');
+ assert_equals(value, i, 'chunk content should match index');
+ }
+ const {done} = await reader.read();
+ assert_true(done, 'should be done now');
+}, 'sending ten chunks through a transferred stream should work');
+
+promise_test(async () => {
+ let controller;
+ const rs = await createTransferredReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const reader = rs.getReader();
+ for (let i = 0; i < 10; ++i) {
+ controller.enqueue(i);
+ const {value, done} = await reader.read();
+ assert_false(done, 'should not be done yet');
+ assert_equals(value, i, 'chunk content should match index');
+ }
+ controller.close();
+ const {done} = await reader.read();
+ assert_true(done, 'should be done now');
+}, 'sending ten chunks one at a time should work');
+
+promise_test(async () => {
+ let controller;
+ const rs = await createTransferredReadableStream({
+ start() {
+ this.counter = 0;
+ },
+ pull(controller) {
+ controller.enqueue(this.counter);
+ ++this.counter;
+ if (this.counter === 10)
+ controller.close();
+ }
+ });
+ const reader = rs.getReader();
+ for (let i = 0; i < 10; ++i) {
+ const {value, done} = await reader.read();
+ assert_false(done, 'should not be done yet');
+ assert_equals(value, i, 'chunk content should match index');
+ }
+ const {done} = await reader.read();
+ assert_true(done, 'should be done now');
+}, 'sending ten chunks on demand should work');
+
+promise_test(async () => {
+ const rs = recordingReadableStream({}, { highWaterMark: 0 });
+ await delay(0);
+ assert_array_equals(rs.events, [], 'pull() should not have been called');
+ // Eat the message so it can't interfere with other tests.
+ addEventListener('message', () => {}, {once: true});
+ // The transfer is done manually to verify that it is posting the stream that
+ // relieves backpressure, not receiving it.
+ postMessage(rs, '*', [rs]);
+ await delay(0);
+ assert_array_equals(rs.events, ['pull'], 'pull() should have been called');
+}, 'transferring a stream should relieve backpressure');
+
+promise_test(async () => {
+ const rs = await recordingTransferredReadableStream({
+ pull(controller) {
+ controller.enqueue('a');
+ }
+ }, { highWaterMark: 2 });
+ await delay(0);
+ assert_array_equals(rs.events, ['pull', 'pull', 'pull'],
+ 'pull() should have been called three times');
+}, 'transferring a stream should add one chunk to the queue size');
+
+promise_test(async () => {
+ const rs = await recordingTransferredReadableStream({
+ start(controller) {
+ controller.enqueue(new Uint8Array(1024));
+ controller.enqueue(new Uint8Array(1024));
+ }
+ }, new ByteLengthQueuingStrategy({highWaterMark: 512}));
+ await delay(0);
+ // At this point the queue contains 1024/512 bytes and 1/1 chunk, so it's full
+ // and pull() is not called.
+ assert_array_equals(rs.events, [], 'pull() should not have been called');
+ const reader = rs.getReader();
+ const {value, done} = await reader.read();
+ assert_false(done, 'we should not be done');
+ assert_equals(value.byteLength, 1024, 'expected chunk should be returned');
+ // Now the queue contains 0/512 bytes and 1/1 chunk, so pull() is called. If
+ // the implementation erroneously counted the extra queue space in bytes, then
+ // the queue would contain 1024/513 bytes and pull() wouldn't be called.
+ assert_array_equals(rs.events, ['pull'], 'pull() should have been called');
+}, 'the extra queue from transferring is counted in chunks');
+
+async function transferredReadableStreamWithCancelPromise() {
+ let resolveCancelCalled;
+ const cancelCalled = new Promise(resolve => {
+ resolveCancelCalled = resolve;
+ });
+ const rs = await recordingTransferredReadableStream({
+ cancel() {
+ resolveCancelCalled();
+ }
+ });
+ return { rs, cancelCalled };
+}
+
+promise_test(async () => {
+ const { rs, cancelCalled } = await transferredReadableStreamWithCancelPromise();
+ rs.cancel('message');
+ await cancelCalled;
+ assert_array_equals(rs.events, ['pull', 'cancel', 'message'],
+ 'cancel() should have been called');
+ const reader = rs.getReader();
+ // Check the stream really got closed.
+ await reader.closed;
+}, 'cancel should be propagated to the original');
+
+promise_test(async () => {
+ const { rs, cancelCalled } = await transferredReadableStreamWithCancelPromise();
+ const reader = rs.getReader();
+ const readPromise = reader.read();
+ reader.cancel('done');
+ const { done } = await readPromise;
+ assert_true(done, 'should be done');
+ await cancelCalled;
+ assert_array_equals(rs.events, ['pull', 'cancel', 'done'],
+ 'events should match');
+}, 'cancel should abort a pending read()');
+
+promise_test(async () => {
+ let cancelComplete = false;
+ const rs = await createTransferredReadableStream({
+ async cancel() {
+ await flushAsyncEvents();
+ cancelComplete = true;
+ }
+ });
+ await rs.cancel();
+ assert_false(cancelComplete,
+ 'cancel() on the underlying sink should not have completed');
+}, 'stream cancel should not wait for underlying source cancel');
+
+promise_test(async t => {
+ const rs = await recordingTransferredReadableStream();
+ const reader = rs.getReader();
+ let serializationHappened = false;
+ rs.controller.enqueue({
+ get getter() {
+ serializationHappened = true;
+ return 'a';
+ }
+ });
+ await flushAsyncEvents();
+ assert_false(serializationHappened,
+ 'serialization should not have happened yet');
+ const {value, done} = await reader.read();
+ assert_false(done, 'should not be done');
+ assert_equals(value.getter, 'a', 'getter should be a');
+ assert_true(serializationHappened,
+ 'serialization should have happened');
+}, 'serialization should not happen until the value is read');
+
+promise_test(async t => {
+ const rs = await recordingTransferredReadableStream();
+ const reader = rs.getReader();
+ rs.controller.enqueue(new ReadableStream());
+ await promise_rejects_dom(t, 'DataCloneError', reader.read(),
+ 'closed promise should reject');
+ assert_throws_js(TypeError, () => rs.controller.enqueue(),
+ 'original stream should be errored');
+}, 'transferring a non-serializable chunk should error both sides');
+
+promise_test(async t => {
+ const rs = await createTransferredReadableStream({
+ start(controller) {
+ controller.error('foo');
+ }
+ });
+ const reader = rs.getReader();
+ return promise_rejects_exactly(t, 'foo', reader.read(),
+ 'error should be passed through');
+}, 'errors should be passed through');
+
+promise_test(async () => {
+ const rs = await recordingTransferredReadableStream();
+ await delay(0);
+ const reader = rs.getReader();
+ reader.cancel();
+ rs.controller.error();
+ const {done} = await reader.read();
+ assert_true(done, 'should be done');
+ assert_throws_js(TypeError, () => rs.controller.enqueue(),
+ 'enqueue should throw');
+}, 'race between cancel() and error() should leave sides in different states');
+
+promise_test(async () => {
+ const rs = await recordingTransferredReadableStream();
+ await delay(0);
+ const reader = rs.getReader();
+ reader.cancel();
+ rs.controller.close();
+ const {done} = await reader.read();
+ assert_true(done, 'should be done');
+}, 'race between cancel() and close() should be benign');
+
+promise_test(async () => {
+ const rs = await recordingTransferredReadableStream();
+ await delay(0);
+ const reader = rs.getReader();
+ reader.cancel();
+ rs.controller.enqueue('a');
+ const {done} = await reader.read();
+ assert_true(done, 'should be done');
+}, 'race between cancel() and enqueue() should be benign');
+
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/reason.html b/testing/web-platform/tests/streams/transferable/reason.html
new file mode 100644
index 0000000000..4251aa85b8
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/reason.html
@@ -0,0 +1,132 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="resources/helpers.js"></script>
+<script>
+'use strict';
+
+// Chrome used to special-case the reason for cancel() and abort() in order to
+// handle exceptions correctly. This is no longer necessary. These tests are
+// retained to avoid regressions.
+
+async function getTransferredReason(originalReason) {
+ let resolvePromise;
+ const rv = new Promise(resolve => {
+ resolvePromise = resolve;
+ });
+ const rs = await createTransferredReadableStream({
+ cancel(reason) {
+ resolvePromise(reason);
+ }
+ });
+ await rs.cancel(originalReason);
+ return rv;
+}
+
+for (const value of ['hi', '\t\r\n', 7, 3.0, undefined, null, true, false,
+ NaN, Infinity]) {
+ promise_test(async () => {
+ const reason = await getTransferredReason(value);
+ assert_equals(reason, value, 'reason should match');
+ }, `reason with a simple value of '${value}' should be preserved`);
+}
+
+for (const badType of [Symbol('hi'), _ => 'hi']) {
+ promise_test(async t => {
+ return promise_rejects_dom(t, 'DataCloneError',
+ getTransferredReason(badType),
+ 'cancel() should reject');
+ }, `reason with a type of '${typeof badType}' should be rejected and ` +
+ `error the stream`);
+}
+
+promise_test(async () => {
+ const reasonAsJson =
+ `{"foo":[1,"col"],"bar":{"hoge":0.2,"baz":{},"shan":null}}`;
+ const reason = await getTransferredReason(JSON.parse(reasonAsJson));
+ assert_equals(JSON.stringify(reason), reasonAsJson,
+ 'object should be preserved');
+}, 'objects that can be completely expressed in JSON should be preserved');
+
+promise_test(async () => {
+ const circularObject = {};
+ circularObject.self = circularObject;
+ const reason = await getTransferredReason(circularObject);
+ assert_true(reason instanceof Object, 'an Object should be output');
+ assert_equals(reason.self, reason,
+ 'the object should have a circular reference');
+}, 'objects that cannot be expressed in JSON should also be preserved');
+
+promise_test(async () => {
+ const originalReason = new TypeError('hi');
+ const reason = await getTransferredReason(originalReason);
+ assert_true(reason instanceof TypeError,
+ 'type should be preserved');
+ assert_equals(reason.message, originalReason.message,
+ 'message should be preserved');
+}, 'the type and message of a TypeError should be preserved');
+
+promise_test(async () => {
+ const originalReason = new TypeError('hi');
+ originalReason.foo = 'bar';
+ const reason = await getTransferredReason(originalReason);
+ assert_false('foo' in reason,
+ 'foo should not be preserved');
+}, 'other attributes of a TypeError should not be preserved');
+
+promise_test(async () => {
+ const originalReason = new TypeError();
+ originalReason.message = [1, 2, 3];
+ const reason = await getTransferredReason(originalReason);
+ assert_equals(reason.message, '1,2,3', 'message should be stringified');
+}, 'a TypeError message should be converted to a string');
+
+promise_test(async () => {
+ const originalReason = new TypeError();
+ Object.defineProperty(originalReason, 'message', {
+ get() { return 'words'; }
+ });
+ const reason = await getTransferredReason(originalReason);
+ assert_equals(reason.message, '', 'message should not be preserved');
+}, 'a TypeError message should not be preserved if it is a getter');
+
+promise_test(async () => {
+ const originalReason = new TypeError();
+ delete originalReason.message;
+ TypeError.prototype.message = 'inherited message';
+ const reason = await getTransferredReason(originalReason);
+ delete TypeError.prototype.message;
+ assert_equals(reason.message, '', 'message should not be preserved');
+}, 'a TypeError message should not be preserved if it is inherited');
+
+promise_test(async () => {
+ const originalReason = new DOMException('yes', 'AbortError');
+ const reason = await getTransferredReason(originalReason);
+ assert_true(reason instanceof DOMException,
+ 'reason should be a DOMException');
+ assert_equals(reason.message, originalReason.message,
+ 'the messages should match');
+ assert_equals(reason.name, originalReason.name,
+ 'the names should match');
+}, 'DOMException errors should be preserved');
+
+for (const errorConstructor of [EvalError, RangeError,
+ ReferenceError, SyntaxError, TypeError,
+ URIError]) {
+ promise_test(async () => {
+ const originalReason = new errorConstructor('nope');
+ const reason = await getTransferredReason(originalReason);
+ assert_equals(typeof reason, 'object', 'reason should have type object');
+ assert_true(reason instanceof errorConstructor,
+ `reason should inherit ${errorConstructor.name}`);
+ assert_true(reason instanceof Error, 'reason should inherit Error');
+ assert_equals(reason.constructor, errorConstructor,
+ 'reason should have the right constructor');
+ assert_equals(reason.name, errorConstructor.name,
+ `name should match constructor name`);
+ assert_equals(reason.message, 'nope', 'message should match');
+ }, `${errorConstructor.name} should be preserved`);
+}
+
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/resources/create-wasm-module.js b/testing/web-platform/tests/streams/transferable/resources/create-wasm-module.js
new file mode 100644
index 0000000000..37064af95c
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/create-wasm-module.js
@@ -0,0 +1,11 @@
+// There aren't many cloneable types that will cause an error on
+// deserialization. WASM modules have the property that it's an error to
+// deserialize them cross-site, which works for our purposes.
+async function createWasmModule() {
+ // It doesn't matter what the module is, so we use one from another
+ // test.
+ const response =
+ await fetch("/wasm/serialization/module/resources/incrementer.wasm");
+ const ab = await response.arrayBuffer();
+ return WebAssembly.compile(ab);
+}
diff --git a/testing/web-platform/tests/streams/transferable/resources/deserialize-error-frame.html b/testing/web-platform/tests/streams/transferable/resources/deserialize-error-frame.html
new file mode 100644
index 0000000000..5ec2fcda2c
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/deserialize-error-frame.html
@@ -0,0 +1,39 @@
+<!DOCTYPE html>
+<script src="/resources/testharness.js"></script>
+<script src="create-wasm-module.js"></script>
+<script>
+async_test(t => {
+ parent.postMessage('init done', '*');
+ window.addEventListener('message', async evt => {
+ if (evt.data.constructor.name !== 'WritableStream') {
+ return;
+ }
+ const ws = evt.data;
+ const writer = ws.getWriter();
+ const module = await createWasmModule();
+ writer.write(module);
+ await promise_rejects_dom(t, 'DataCloneError', writer.closed,
+ 'should reject with a DataCloneError');
+ t.done();
+ // Signal that this test is done. When both tests are done the iframe will
+ // be removed.
+ parent.postMessage('ws done', '*');
+ });
+}, 'a WritableStream deserialization failure should result in a DataCloneError');
+
+async_test(t => {
+ window.addEventListener('message', async evt => {
+ if (evt.data.constructor.name !== 'ReadableStream') {
+ return;
+ }
+ const rs = evt.data;
+ const reader = rs.getReader();
+ await promise_rejects_dom(t, 'DataCloneError', reader.read(),
+ 'should reject with a DataCloneError');
+ t.done();
+ // Signal that this test is done. When both tests are done the iframe will
+ // be removed.
+ parent.postMessage('rs done', '*');
+ });
+}, 'a ReadableStream deserialization failure should result in a DataCloneError');
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/resources/echo-iframe.html b/testing/web-platform/tests/streams/transferable/resources/echo-iframe.html
new file mode 100644
index 0000000000..68f6850343
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/echo-iframe.html
@@ -0,0 +1,7 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script>
+addEventListener('message', evt => {
+ evt.source.postMessage(evt.data, '*', [evt.data]);
+});
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/resources/echo-worker.js b/testing/web-platform/tests/streams/transferable/resources/echo-worker.js
new file mode 100644
index 0000000000..806c237108
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/echo-worker.js
@@ -0,0 +1,2 @@
+// A worker that just transfers back any message that is sent to it.
+onmessage = evt => postMessage(evt.data, [evt.data]);
diff --git a/testing/web-platform/tests/streams/transferable/resources/helpers.js b/testing/web-platform/tests/streams/transferable/resources/helpers.js
new file mode 100644
index 0000000000..12504537f9
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/helpers.js
@@ -0,0 +1,132 @@
+'use strict';
+
+(() => {
+ // Create a ReadableStream that will pass the tests in
+ // testTransferredReadableStream(), below.
+ function createOriginalReadableStream() {
+ return new ReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.close();
+ }
+ });
+ }
+
+ // Common tests to roughly determine that |rs| is a correctly transferred
+ // version of a stream created by createOriginalReadableStream().
+ function testTransferredReadableStream(rs) {
+ assert_equals(rs.constructor, ReadableStream,
+ 'rs should be a ReadableStream in this realm');
+ assert_true(rs instanceof ReadableStream,
+ 'instanceof check should pass');
+
+ // Perform a brand-check on |rs| in the process of calling getReader().
+ const reader = ReadableStream.prototype.getReader.call(rs);
+
+ return reader.read().then(({value, done}) => {
+ assert_false(done, 'done should be false');
+ assert_equals(value, 'a', 'value should be "a"');
+ return reader.read();
+ }).then(({done}) => {
+ assert_true(done, 'done should be true');
+ });
+ }
+
+ function testMessage(msg) {
+ assert_array_equals(msg.ports, [], 'there should be no ports in the event');
+ return testTransferredReadableStream(msg.data);
+ }
+
+ function testMessageEvent(target) {
+ return new Promise((resolve, reject) => {
+ target.addEventListener('message', ev => {
+ try {
+ resolve(testMessage(ev));
+ } catch (e) {
+ reject(e);
+ }
+ }, {once: true});
+ });
+ }
+
+ function testMessageEventOrErrorMessage(target) {
+ return new Promise((resolve, reject) => {
+ target.addEventListener('message', ev => {
+ if (typeof ev.data === 'string') {
+ // Assume it's an error message and reject with it.
+ reject(ev.data);
+ return;
+ }
+
+ try {
+ resolve(testMessage(ev));
+ } catch (e) {
+ reject(e);
+ }
+ }, {once: true});
+ });
+ }
+
+ function checkTestResults(target) {
+ return new Promise((resolve, reject) => {
+ target.onmessage = msg => {
+ // testharness.js sends us objects which we need to ignore.
+ if (typeof msg.data !== 'string')
+ return;
+
+ if (msg.data === 'OK') {
+ resolve();
+ } else {
+ reject(msg.data);
+ }
+ };
+ });
+ }
+
+ // These tests assume that a transferred ReadableStream will behave the same
+ // regardless of how it was transferred. This enables us to simply transfer the
+ // stream to ourselves.
+ function createTransferredReadableStream(underlyingSource) {
+ const original = new ReadableStream(underlyingSource);
+ const promise = new Promise((resolve, reject) => {
+ addEventListener('message', msg => {
+ const rs = msg.data;
+ if (rs instanceof ReadableStream) {
+ resolve(rs);
+ } else {
+ reject(new Error(`what is this thing: "${rs}"?`));
+ }
+ }, {once: true});
+ });
+ postMessage(original, '*', [original]);
+ return promise;
+ }
+
+ function recordingTransferredReadableStream(underlyingSource, strategy) {
+ const original = recordingReadableStream(underlyingSource, strategy);
+ const promise = new Promise((resolve, reject) => {
+ addEventListener('message', msg => {
+ const rs = msg.data;
+ if (rs instanceof ReadableStream) {
+ rs.events = original.events;
+ rs.eventsWithoutPulls = original.eventsWithoutPulls;
+ rs.controller = original.controller;
+ resolve(rs);
+ } else {
+ reject(new Error(`what is this thing: "${rs}"?`));
+ }
+ }, {once: true});
+ });
+ postMessage(original, '*', [original]);
+ return promise;
+ }
+
+ self.createOriginalReadableStream = createOriginalReadableStream;
+ self.testMessage = testMessage;
+ self.testMessageEvent = testMessageEvent;
+ self.testMessageEventOrErrorMessage = testMessageEventOrErrorMessage;
+ self.checkTestResults = checkTestResults;
+ self.createTransferredReadableStream = createTransferredReadableStream;
+ self.recordingTransferredReadableStream = recordingTransferredReadableStream;
+
+})();
diff --git a/testing/web-platform/tests/streams/transferable/resources/receiving-shared-worker.js b/testing/web-platform/tests/streams/transferable/resources/receiving-shared-worker.js
new file mode 100644
index 0000000000..84f779c3db
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/receiving-shared-worker.js
@@ -0,0 +1,11 @@
+'use strict';
+importScripts('/resources/testharness.js', 'helpers.js');
+
+onconnect = evt => {
+ const port = evt.source;
+ const promise = testMessageEvent(port);
+ port.start();
+ promise
+ .then(() => port.postMessage('OK'))
+ .catch(err => port.postMessage(`BAD: ${err}`));
+};
diff --git a/testing/web-platform/tests/streams/transferable/resources/receiving-worker.js b/testing/web-platform/tests/streams/transferable/resources/receiving-worker.js
new file mode 100644
index 0000000000..4ebb9c5f8f
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/receiving-worker.js
@@ -0,0 +1,7 @@
+'use strict';
+importScripts('/resources/testharness.js', 'helpers.js');
+
+const promise = testMessageEvent(self);
+promise
+ .then(() => postMessage('OK'))
+ .catch(err => postMessage(`BAD: ${err}`));
diff --git a/testing/web-platform/tests/streams/transferable/resources/sending-shared-worker.js b/testing/web-platform/tests/streams/transferable/resources/sending-shared-worker.js
new file mode 100644
index 0000000000..e579077894
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/sending-shared-worker.js
@@ -0,0 +1,12 @@
+'use strict';
+importScripts('helpers.js');
+
+onconnect = msg => {
+ const port = msg.source;
+ const orig = createOriginalReadableStream();
+ try {
+ port.postMessage(orig, [orig]);
+ } catch (e) {
+ port.postMessage(e.message);
+ }
+};
diff --git a/testing/web-platform/tests/streams/transferable/resources/sending-worker.js b/testing/web-platform/tests/streams/transferable/resources/sending-worker.js
new file mode 100644
index 0000000000..0b79733f74
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/sending-worker.js
@@ -0,0 +1,5 @@
+'use strict';
+importScripts('helpers.js');
+
+const orig = createOriginalReadableStream();
+postMessage(orig, [orig]);
diff --git a/testing/web-platform/tests/streams/transferable/resources/service-worker-iframe.html b/testing/web-platform/tests/streams/transferable/resources/service-worker-iframe.html
new file mode 100644
index 0000000000..348d067c92
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/service-worker-iframe.html
@@ -0,0 +1,39 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="helpers.js"></script>
+<script>
+'use strict';
+
+setup({
+ explicit_done: true
+});
+
+function startTests() {
+ promise_test(() => {
+ const orig = createOriginalReadableStream();
+ const promise = checkTestResults(navigator.serviceWorker);
+ navigator.serviceWorker.controller.postMessage(orig, [orig]);
+ assert_true(orig.locked, 'the original stream should be locked');
+ return promise;
+ }, 'serviceWorker.controller.postMessage should be able to transfer a ' +
+ 'ReadableStream');
+
+ promise_test(() => {
+ const promise = testMessageEventOrErrorMessage(navigator.serviceWorker);
+ navigator.serviceWorker.controller.postMessage('SEND');
+ return promise;
+ }, 'postMessage in a service worker should be able to transfer ReadableStream');
+
+ done();
+}
+
+// Delay running the tests until we get a message from the page telling us to.
+// This is to work around an issue where testharness.js doesn't detect
+// completion of the tests if they fail too early.
+onmessage = msg => {
+ if (msg.data === 'explicit trigger')
+ startTests();
+};
+
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/resources/service-worker.js b/testing/web-platform/tests/streams/transferable/resources/service-worker.js
new file mode 100644
index 0000000000..af76b6c11b
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/resources/service-worker.js
@@ -0,0 +1,30 @@
+'use strict';
+importScripts('/resources/testharness.js', 'helpers.js');
+
+onmessage = msg => {
+ const client = msg.source;
+ if (msg.data === 'SEND') {
+ sendingTest(client);
+ } else {
+ receivingTest(msg, client);
+ }
+};
+
+function sendingTest(client) {
+ const orig = createOriginalReadableStream();
+ try {
+ client.postMessage(orig, [orig]);
+ } catch (e) {
+ client.postMessage(e.message);
+ }
+}
+
+function receivingTest(msg, client) {
+ try {
+ msg.waitUntil(testMessage(msg)
+ .then(() => client.postMessage('OK'))
+ .catch(e => client.postMessage(`BAD: ${e}`)));
+ } catch (e) {
+ client.postMessage(`BAD: ${e}`);
+ }
+}
diff --git a/testing/web-platform/tests/streams/transferable/service-worker.https.html b/testing/web-platform/tests/streams/transferable/service-worker.https.html
new file mode 100644
index 0000000000..2ca7f19c91
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/service-worker.https.html
@@ -0,0 +1,28 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/service-workers/service-worker/resources/test-helpers.sub.js"></script>
+<script>
+'use strict';
+
+const kServiceWorkerUrl = 'resources/service-worker.js';
+const kIframeUrl = 'resources/service-worker-iframe.html';
+
+// A dummy test so that we can use the test-helpers.sub.js functions
+const test = async_test('service-worker');
+
+async function registerAndStart() {
+ const reg = await service_worker_unregister_and_register(
+ test, kServiceWorkerUrl, kIframeUrl);
+ await wait_for_state(test, reg.installing, 'activated');
+ const iframe = await with_iframe(kIframeUrl);
+ fetch_tests_from_window(iframe.contentWindow);
+ add_completion_callback(() => iframe.remove());
+ iframe.contentWindow.postMessage('explicit trigger', '*');
+ return service_worker_unregister_and_done(test, kIframeUrl);
+}
+
+onload = registerAndStart;
+
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/shared-worker.html b/testing/web-platform/tests/streams/transferable/shared-worker.html
new file mode 100644
index 0000000000..cd0415402d
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/shared-worker.html
@@ -0,0 +1,25 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="resources/helpers.js"></script>
+<script>
+'use strict';
+
+promise_test(t => {
+ const orig = createOriginalReadableStream();
+ const w = new SharedWorker('resources/receiving-shared-worker.js');
+ const promise = checkTestResults(w.port);
+ w.port.postMessage(orig, [orig]);
+ assert_true(orig.locked, 'the original stream should be locked');
+ return promise;
+}, 'worker.postMessage should be able to transfer a ReadableStream');
+
+promise_test(t => {
+ const w = new SharedWorker('resources/sending-shared-worker.js');
+ const promise = testMessageEventOrErrorMessage(w.port);
+ w.port.start();
+ return promise;
+}, 'postMessage in a worker should be able to transfer a ReadableStream');
+
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/transfer-with-messageport.window.js b/testing/web-platform/tests/streams/transferable/transfer-with-messageport.window.js
new file mode 100644
index 0000000000..37f8c9df16
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/transfer-with-messageport.window.js
@@ -0,0 +1,219 @@
+"use strict";
+
+function receiveEventOnce(target, name) {
+ return new Promise(resolve => {
+ target.addEventListener(
+ name,
+ ev => {
+ resolve(ev);
+ },
+ { once: true }
+ );
+ });
+}
+
+async function postAndTestMessageEvent(data, transfer, title) {
+ postMessage(data, "*", transfer);
+ const messagePortCount = transfer.filter(i => i instanceof MessagePort)
+ .length;
+ const ev = await receiveEventOnce(window, "message");
+ assert_equals(
+ ev.ports.length,
+ messagePortCount,
+ `Correct number of ports ${title}`
+ );
+ for (const [i, port] of ev.ports.entries()) {
+ assert_true(
+ port instanceof MessagePort,
+ `ports[${i}] include MessagePort ${title}`
+ );
+ }
+ for (const [key, value] of Object.entries(data)) {
+ assert_true(
+ ev.data[key] instanceof value.constructor,
+ `data.${key} has correct interface ${value.constructor.name} ${title}`
+ );
+ }
+}
+
+async function transferMessagePortWithOrder1(stream) {
+ const channel = new MessageChannel();
+ await postAndTestMessageEvent(
+ { stream, port2: channel.port2 },
+ [stream, channel.port2],
+ `when transferring [${stream.constructor.name}, MessagePort]`
+ );
+}
+
+async function transferMessagePortWithOrder2(stream) {
+ const channel = new MessageChannel();
+ await postAndTestMessageEvent(
+ { stream, port2: channel.port2 },
+ [channel.port2, stream],
+ `when transferring [MessagePort, ${stream.constructor.name}]`
+ );
+}
+
+async function transferMessagePortWithOrder3(stream) {
+ const channel = new MessageChannel();
+ await postAndTestMessageEvent(
+ { port1: channel.port1, stream, port2: channel.port2 },
+ [channel.port1, stream, channel.port2],
+ `when transferring [MessagePort, ${stream.constructor.name}, MessagePort]`
+ );
+}
+
+async function transferMessagePortWithOrder4(stream) {
+ const channel = new MessageChannel();
+ await postAndTestMessageEvent(
+ {},
+ [channel.port1, stream, channel.port2],
+ `when transferring [MessagePort, ${stream.constructor.name}, MessagePort] but with empty data`
+ );
+}
+
+async function transferMessagePortWithOrder5(stream) {
+ const channel = new MessageChannel();
+ await postAndTestMessageEvent(
+ { port2: channel.port2, port1: channel.port1, stream },
+ [channel.port1, stream, channel.port2],
+ `when transferring [MessagePort, ${stream.constructor.name}, MessagePort] but with data having different order`
+ );
+}
+
+async function transferMessagePortWithOrder6(stream) {
+ const channel = new MessageChannel();
+ await postAndTestMessageEvent(
+ { port2: channel.port2, port1: channel.port1 },
+ [channel.port1, stream, channel.port2],
+ `when transferring [MessagePort, ${stream.constructor.name}, MessagePort] but with stream not being in the data`
+ );
+}
+
+async function transferMessagePortWithOrder7(stream) {
+ const channel = new MessageChannel();
+ await postAndTestMessageEvent(
+ { stream },
+ [channel.port1, stream, channel.port2],
+ `when transferring [MessagePort, ${stream.constructor.name}, MessagePort] but with ports not being in the data`
+ );
+}
+
+async function transferMessagePortWith(constructor) {
+ await transferMessagePortWithOrder1(new constructor());
+ await transferMessagePortWithOrder2(new constructor());
+ await transferMessagePortWithOrder3(new constructor());
+}
+
+async function advancedTransferMesagePortWith(constructor) {
+ await transferMessagePortWithOrder4(new constructor());
+ await transferMessagePortWithOrder5(new constructor());
+ await transferMessagePortWithOrder6(new constructor());
+ await transferMessagePortWithOrder7(new constructor());
+}
+
+async function mixedTransferMessagePortWithOrder1() {
+ const channel = new MessageChannel();
+ const readable = new ReadableStream();
+ const writable = new WritableStream();
+ const transform = new TransformStream();
+ await postAndTestMessageEvent(
+ {
+ readable,
+ writable,
+ transform,
+ port1: channel.port1,
+ port2: channel.port2,
+ },
+ [readable, writable, transform, channel.port1, channel.port2],
+ `when transferring [ReadableStream, WritableStream, TransformStream, MessagePort, MessagePort]`
+ );
+}
+
+async function mixedTransferMessagePortWithOrder2() {
+ const channel = new MessageChannel();
+ const readable = new ReadableStream();
+ const writable = new WritableStream();
+ const transform = new TransformStream();
+ await postAndTestMessageEvent(
+ { readable, writable, transform },
+ [transform, channel.port1, readable, channel.port2, writable],
+ `when transferring [TransformStream, MessagePort, ReadableStream, MessagePort, WritableStream]`
+ );
+}
+
+async function mixedTransferMessagePortWithOrder3() {
+ const channel = new MessageChannel();
+ const readable1 = new ReadableStream();
+ const readable2 = new ReadableStream();
+ const writable1 = new WritableStream();
+ const writable2 = new WritableStream();
+ const transform1 = new TransformStream();
+ const transform2 = new TransformStream();
+ await postAndTestMessageEvent(
+ { readable1, writable1, transform1, readable2, writable2, transform2 },
+ [
+ transform2,
+ channel.port1,
+ readable1,
+ channel.port2,
+ writable2,
+ readable2,
+ writable1,
+ transform1,
+ ],
+ `when transferring [TransformStream, MessagePort, ReadableStream, MessagePort, WritableStream, ReadableStream, WritableStream, TransformStream] but with the data having different order`
+ );
+}
+
+async function mixedTransferMesagePortWith() {
+ await mixedTransferMessagePortWithOrder1();
+ await mixedTransferMessagePortWithOrder2();
+ await mixedTransferMessagePortWithOrder3();
+}
+
+promise_test(async t => {
+ await transferMessagePortWith(ReadableStream);
+}, "Transferring a MessagePort with a ReadableStream should set `.ports`");
+
+promise_test(async t => {
+ await transferMessagePortWith(WritableStream);
+}, "Transferring a MessagePort with a WritableStream should set `.ports`");
+
+promise_test(async t => {
+ await transferMessagePortWith(TransformStream);
+}, "Transferring a MessagePort with a TransformStream should set `.ports`");
+
+promise_test(async t => {
+ await transferMessagePortWith(ReadableStream);
+}, "Transferring a MessagePort with a ReadableStream should set `.ports`, advanced");
+
+promise_test(async t => {
+ await transferMessagePortWith(WritableStream);
+}, "Transferring a MessagePort with a WritableStream should set `.ports`, advanced");
+
+promise_test(async t => {
+ await transferMessagePortWith(TransformStream);
+}, "Transferring a MessagePort with a TransformStream should set `.ports`, advanced");
+
+promise_test(async t => {
+ await mixedTransferMesagePortWith();
+}, "Transferring a MessagePort with multiple streams should set `.ports`");
+
+test(() => {
+ assert_throws_dom("DataCloneError", () =>
+ postMessage({ stream: new ReadableStream() }, "*")
+ );
+}, "ReadableStream must not be serializable");
+
+test(() => {
+ assert_throws_dom("DataCloneError", () =>
+ postMessage({ stream: new WritableStream() }, "*")
+ );
+}, "WritableStream must not be serializable");
+
+test(() => {
+ assert_throws_dom("DataCloneError", () =>
+ postMessage({ stream: new TransformStream() }, "*")
+ );
+}, "TransformStream must not be serializable");
diff --git a/testing/web-platform/tests/streams/transferable/transform-stream-members.any.js b/testing/web-platform/tests/streams/transferable/transform-stream-members.any.js
new file mode 100644
index 0000000000..05914e12cc
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/transform-stream-members.any.js
@@ -0,0 +1,18 @@
+// META: global=window,dedicatedworker,shadowrealm
+
+const combinations = [
+ (t => [t, t.readable])(new TransformStream()),
+ (t => [t.readable, t])(new TransformStream()),
+ (t => [t, t.writable])(new TransformStream()),
+ (t => [t.writable, t])(new TransformStream()),
+];
+
+for (const combination of combinations) {
+ test(() => {
+ assert_throws_dom(
+ "DataCloneError",
+ () => structuredClone(combination, { transfer: combination }),
+ "structuredClone should throw"
+ );
+ }, `Transferring ${combination} should fail`);
+}
diff --git a/testing/web-platform/tests/streams/transferable/transform-stream.html b/testing/web-platform/tests/streams/transferable/transform-stream.html
new file mode 100644
index 0000000000..355d5d8074
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/transform-stream.html
@@ -0,0 +1,108 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="../resources/test-utils.js"></script>
+<script>
+'use strict';
+
+promise_test(t => {
+ const orig = new TransformStream();
+ const promise = new Promise(resolve => {
+ addEventListener('message', t.step_func(evt => {
+ const transferred = evt.data;
+ assert_equals(transferred.constructor, TransformStream,
+ 'transferred should be a TransformStream in this realm');
+ assert_true(transferred instanceof TransformStream,
+ 'instanceof check should pass');
+
+ // Perform a brand-check on |transferred|.
+ const readableGetter = Object.getOwnPropertyDescriptor(
+ TransformStream.prototype, 'readable').get;
+ assert_true(readableGetter.call(transferred) instanceof ReadableStream,
+ 'brand check should pass and readable stream should result');
+ const writableGetter = Object.getOwnPropertyDescriptor(
+ TransformStream.prototype, 'writable').get;
+ assert_true(writableGetter.call(transferred) instanceof WritableStream,
+ 'brand check should pass and writable stream should result');
+ resolve();
+ }), {once: true});
+ });
+ postMessage(orig, '*', [orig]);
+ assert_true(orig.readable.locked, 'the readable side should be locked');
+ assert_true(orig.writable.locked, 'the writable side should be locked');
+ return promise;
+}, 'window.postMessage should be able to transfer a TransformStream');
+
+test(() => {
+ const ts = new TransformStream();
+ const writer = ts.writable.getWriter();
+ assert_throws_dom('DataCloneError', () => postMessage(ts, '*', [ts]),
+ 'postMessage should throw');
+ assert_false(ts.readable.locked, 'readable side should not get locked');
+}, 'a TransformStream with a locked writable should not be transferable');
+
+test(() => {
+ const ts = new TransformStream();
+ const reader = ts.readable.getReader();
+ assert_throws_dom('DataCloneError', () => postMessage(ts, '*', [ts]),
+ 'postMessage should throw');
+ assert_false(ts.writable.locked, 'writable side should not get locked');
+}, 'a TransformStream with a locked readable should not be transferable');
+
+test(() => {
+ const ts = new TransformStream();
+ const reader = ts.readable.getReader();
+ const writer = ts.writable.getWriter();
+ assert_throws_dom('DataCloneError', () => postMessage(ts, '*', [ts]),
+ 'postMessage should throw');
+}, 'a TransformStream with both sides locked should not be transferable');
+
+promise_test(t => {
+ const source = new ReadableStream({
+ start(controller) {
+ controller.enqueue('hello ');
+ controller.enqueue('there ');
+ controller.close();
+ }
+ });
+ let resolve;
+ const ready = new Promise(r => resolve = r);
+ let result = '';
+ const sink = new WritableStream({
+ write(chunk) {
+ if (result) {
+ resolve();
+ }
+ result += chunk;
+ }
+ });
+ const transform1 = new TransformStream({
+ transform(chunk, controller) {
+ controller.enqueue(chunk.toUpperCase());
+ }
+ });
+ const transform2 = new TransformStream({
+ transform(chunk, controller) {
+ controller.enqueue(chunk + chunk);
+ }
+ });
+ const promise = new Promise(resolve => {
+ addEventListener('message', t.step_func(evt => {
+ const data = evt.data;
+ resolve(data.source
+ .pipeThrough(data.transform1)
+ .pipeThrough(data.transform2)
+ .pipeTo(data.sink));
+ }));
+ });
+ postMessage({source, sink, transform1, transform2}, '*',
+ [source, transform1, sink, transform2]);
+ return ready
+ .then(() => {
+ assert_equals(result, 'HELLO HELLO THERE THERE ',
+ 'transforms should have been applied');
+ });
+}, 'piping through transferred transforms should work');
+
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/window.html b/testing/web-platform/tests/streams/transferable/window.html
new file mode 100644
index 0000000000..11c868356b
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/window.html
@@ -0,0 +1,55 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="resources/helpers.js"></script>
+<script>
+'use strict';
+
+promise_test(t => {
+ const orig = createOriginalReadableStream();
+ const promise = testMessageEvent(window);
+ postMessage(orig, '*', [orig]);
+ assert_true(orig.locked, 'the original stream should be locked');
+ return promise;
+}, 'window.postMessage should be able to transfer a ReadableStream');
+
+promise_test(t => {
+ const orig = createOriginalReadableStream();
+ const mc = new MessageChannel();
+ const promise = testMessageEvent(mc.port1);
+ mc.port1.start();
+
+ mc.port2.postMessage(orig, [orig]);
+ mc.port2.close();
+ assert_true(orig.locked, 'the original stream should be locked');
+ return promise;
+}, 'port.postMessage should be able to transfer a ReadableStream');
+
+promise_test(t => {
+ const orig = createOriginalReadableStream();
+ const promise = new Promise(resolve => {
+ addEventListener('message', t.step_func(evt => {
+ const [rs1, rs2] = evt.data;
+ assert_equals(rs1, rs2, 'both ReadableStreams should be the same object');
+ resolve();
+ }), {once: true});
+ });
+ postMessage([orig, orig], '*', [orig]);
+ return promise;
+}, 'the same ReadableStream posted multiple times should arrive together');
+
+const onloadPromise = new Promise(resolve => onload = resolve);
+
+promise_test(() => {
+ const orig = createOriginalReadableStream();
+ const promise = testMessageEvent(window);
+ return onloadPromise.then(() => {
+ const echoIframe = document.querySelector('#echo');
+ echoIframe.contentWindow.postMessage(orig, '*', [orig]);
+ return promise;
+ });
+}, 'transfer to and from an iframe should work');
+</script>
+
+<iframe id=echo src="resources/echo-iframe.html" style="display:none"></iframe>
diff --git a/testing/web-platform/tests/streams/transferable/worker.html b/testing/web-platform/tests/streams/transferable/worker.html
new file mode 100644
index 0000000000..c5dc9fc62f
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/worker.html
@@ -0,0 +1,76 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="resources/helpers.js"></script>
+<script src="../resources/test-utils.js"></script>
+<script>
+'use strict';
+
+promise_test(t => {
+ const orig = createOriginalReadableStream();
+ const w = new Worker('resources/receiving-worker.js');
+ t.add_cleanup(() => {
+ w.terminate();
+ });
+ const promise = new Promise((resolve, reject) => {
+ checkTestResults(w).then(resolve, reject);
+ w.onerror = () => reject('error in worker');
+ });
+ w.postMessage(orig, [orig]);
+ assert_true(orig.locked, 'the original stream should be locked');
+ return promise;
+}, 'worker.postMessage should be able to transfer a ReadableStream');
+
+promise_test(t => {
+ const w = new Worker('resources/sending-worker.js');
+ t.add_cleanup(() => {
+ w.terminate();
+ });
+ return new Promise((resolve, reject) => {
+ testMessageEvent(w).then(resolve, reject);
+ w.onerror = () => reject('error in worker');
+ });
+}, 'postMessage in a worker should be able to transfer a ReadableStream');
+
+promise_test(async t => {
+ const w = new Worker('resources/echo-worker.js');
+ let controller;
+ const orig = new ReadableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const targetStream = await new Promise((resolve, reject) => {
+ w.onmessage = evt => resolve(evt.data);
+ w.onerror = () => reject('error in worker');
+ w.postMessage(orig, [orig]);
+ });
+ const reader = targetStream.getReader();
+ const reads = [];
+ // Place a lot of chunks "in transit". This should increase the likelihood
+ // that they is a chunk at each relevant step when the worker is terminated.
+ for (let i = 0; i < 50; ++i) {
+ await delay(0);
+ controller.enqueue(i);
+ const expected = i;
+ reads.push(reader.read().then(({value, done}) => {
+ assert_false(done, 'we should not be done');
+ assert_equals(value, expected, 'value should match expectation');
+ }));
+ }
+ w.terminate();
+ for (let i = 50; i < 60; ++i) {
+ controller.enqueue(i);
+ reads.push(
+ reader.read().then(t.unreached_func('read() should not resolve')));
+ await delay(0);
+ }
+ // We don't expect every read() to complete, but we want to give them a chance
+ // to reject if they're going to.
+ return Promise.race([
+ Promise.all(reads),
+ flushAsyncEvents()
+ ]);
+}, 'terminating a worker should not error the stream');
+</script>
diff --git a/testing/web-platform/tests/streams/transferable/writable-stream.html b/testing/web-platform/tests/streams/transferable/writable-stream.html
new file mode 100644
index 0000000000..7e25dad94d
--- /dev/null
+++ b/testing/web-platform/tests/streams/transferable/writable-stream.html
@@ -0,0 +1,146 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="resources/helpers.js"></script>
+<script src="../resources/test-utils.js"></script>
+<script src="../resources/recording-streams.js"></script>
+<script>
+'use strict';
+
+promise_test(t => {
+ const orig = new WritableStream();
+ const promise = new Promise(resolve => {
+ addEventListener('message', t.step_func(evt => {
+ const transferred = evt.data;
+ assert_equals(transferred.constructor, WritableStream,
+ 'transferred should be a WritableStream in this realm');
+ assert_true(transferred instanceof WritableStream,
+ 'instanceof check should pass');
+
+ // Perform a brand-check on |transferred|.
+ const writer = WritableStream.prototype.getWriter.call(transferred);
+ resolve();
+ }), {once: true});
+ });
+ postMessage(orig, '*', [orig]);
+ assert_true(orig.locked, 'the original stream should be locked');
+ return promise;
+}, 'window.postMessage should be able to transfer a WritableStream');
+
+test(() => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+ assert_throws_dom('DataCloneError', () => postMessage(ws, '*', [ws]),
+ 'postMessage should throw');
+}, 'a locked WritableStream should not be transferable');
+
+promise_test(t => {
+ const {writable, readable} = new TransformStream();
+ const promise = new Promise(resolve => {
+ addEventListener('message', t.step_func(async evt => {
+ const {writable, readable} = evt.data;
+ const reader = readable.getReader();
+ const writer = writable.getWriter();
+ const writerPromises = Promise.all([
+ writer.write('hi'),
+ writer.close(),
+ ]);
+ const {value, done} = await reader.read();
+ assert_false(done, 'we should not be done');
+ assert_equals(value, 'hi', 'chunk should have been delivered');
+ const readResult = await reader.read();
+ assert_true(readResult.done, 'readable should be closed');
+ await writerPromises;
+ resolve();
+ }), {once: true});
+ });
+ postMessage({writable, readable}, '*', [writable, readable]);
+ return promise;
+}, 'window.postMessage should be able to transfer a {readable, writable} pair');
+
+function transfer(stream) {
+ return new Promise(resolve => {
+ addEventListener('message', evt => resolve(evt.data), { once: true });
+ postMessage(stream, '*', [stream]);
+ });
+}
+
+promise_test(async () => {
+ const orig = new WritableStream(
+ {}, new ByteLengthQueuingStrategy({ highWaterMark: 65536 }));
+ const transferred = await transfer(orig);
+ const writer = transferred.getWriter();
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+}, 'desiredSize for a newly-transferred stream should be 1');
+
+promise_test(async () => {
+ const orig = new WritableStream({
+ write() {
+ return new Promise(() => {});
+ }
+ });
+ const transferred = await transfer(orig);
+ const writer = transferred.getWriter();
+ await writer.write('a');
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+}, 'effective queue size of a transferred writable should be 2');
+
+promise_test(async () => {
+ const [writeCalled, resolveWriteCalled] = makePromiseAndResolveFunc();
+ let resolveWrite;
+ const orig = new WritableStream({
+ write() {
+ resolveWriteCalled();
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ }
+ });
+ const transferred = await transfer(orig);
+ const writer = transferred.getWriter();
+ await writer.write('a');
+ let writeDone = false;
+ const writePromise = writer.write('b').then(() => {
+ writeDone = true;
+ });
+ await writeCalled;
+ assert_false(writeDone, 'second write should not have resolved yet');
+ resolveWrite();
+ await writePromise; // (makes sure this resolves)
+}, 'second write should wait for first underlying write to complete');
+
+async function transferredWritableStreamWithAbortPromise() {
+ const [abortCalled, resolveAbortCalled] = makePromiseAndResolveFunc();
+ const orig = recordingWritableStream({
+ abort() {
+ resolveAbortCalled();
+ }
+ });
+ const transferred = await transfer(orig);
+ return { orig, transferred, abortCalled };
+}
+
+promise_test(async t => {
+ const { orig, transferred, abortCalled } = await transferredWritableStreamWithAbortPromise();
+ transferred.abort('p');
+ await abortCalled;
+ assert_array_equals(orig.events, ['abort', 'p'],
+ 'abort() should have been called');
+}, 'abort() should work');
+
+promise_test(async t => {
+ const { orig, transferred, abortCalled } = await transferredWritableStreamWithAbortPromise();
+ const writer = transferred.getWriter();
+ // A WritableStream object cannot be cloned.
+ await promise_rejects_dom(t, 'DataCloneError', writer.write(new WritableStream()),
+ 'the write should reject');
+ await promise_rejects_dom(t, 'DataCloneError', writer.closed,
+ 'the stream should be errored');
+ await abortCalled;
+ assert_equals(orig.events.length, 2, 'abort should have been called');
+ assert_equals(orig.events[0], 'abort', 'first event should be abort');
+ assert_equals(orig.events[1].name, 'DataCloneError',
+ 'reason should be a DataCloneError');
+}, 'writing a unclonable object should error the stream');
+</script>
diff --git a/testing/web-platform/tests/streams/transform-streams/backpressure.any.js b/testing/web-platform/tests/streams/transform-streams/backpressure.any.js
new file mode 100644
index 0000000000..47a21fb7e7
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/backpressure.any.js
@@ -0,0 +1,195 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/recording-streams.js
+// META: script=../resources/test-utils.js
+'use strict';
+
+const error1 = new Error('error1 message');
+error1.name = 'error1';
+
+promise_test(() => {
+ const ts = recordingTransformStream();
+ const writer = ts.writable.getWriter();
+ // This call never resolves.
+ writer.write('a');
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ts.events, [], 'transform should not be called');
+ });
+}, 'backpressure allows no transforms with a default identity transform and no reader');
+
+promise_test(() => {
+ const ts = recordingTransformStream({}, undefined, { highWaterMark: 1 });
+ const writer = ts.writable.getWriter();
+ // This call to write() resolves asynchronously.
+ writer.write('a');
+ // This call to write() waits for backpressure that is never relieved and never calls transform().
+ writer.write('b');
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ts.events, ['transform', 'a'], 'transform should be called once');
+ });
+}, 'backpressure only allows one transform() with a identity transform with a readable HWM of 1 and no reader');
+
+promise_test(() => {
+ // Without a transform() implementation, recordingTransformStream() never enqueues anything.
+ const ts = recordingTransformStream({
+ transform() {
+ // Discard all chunks. As a result, the readable side is never full enough to exert backpressure and transform()
+ // keeps being called.
+ }
+ }, undefined, { highWaterMark: 1 });
+ const writer = ts.writable.getWriter();
+ const writePromises = [];
+ for (let i = 0; i < 4; ++i) {
+ writePromises.push(writer.write(i));
+ }
+ return Promise.all(writePromises).then(() => {
+ assert_array_equals(ts.events, ['transform', 0, 'transform', 1, 'transform', 2, 'transform', 3],
+ 'all 4 events should be transformed');
+ });
+}, 'transform() should keep being called as long as there is no backpressure');
+
+promise_test(() => {
+ const ts = new TransformStream({}, undefined, { highWaterMark: 1 });
+ const writer = ts.writable.getWriter();
+ const reader = ts.readable.getReader();
+ const events = [];
+ const writerPromises = [
+ writer.write('a').then(() => events.push('a')),
+ writer.write('b').then(() => events.push('b')),
+ writer.close().then(() => events.push('closed'))];
+ return delay(0).then(() => {
+ assert_array_equals(events, ['a'], 'the first write should have resolved');
+ return reader.read();
+ }).then(({ value, done }) => {
+ assert_false(done, 'done should not be true');
+ assert_equals('a', value, 'value should be "a"');
+ return delay(0);
+ }).then(() => {
+ assert_array_equals(events, ['a', 'b', 'closed'], 'both writes and close() should have resolved');
+ return reader.read();
+ }).then(({ value, done }) => {
+ assert_false(done, 'done should still not be true');
+ assert_equals('b', value, 'value should be "b"');
+ return reader.read();
+ }).then(({ done }) => {
+ assert_true(done, 'done should be true');
+ return writerPromises;
+ });
+}, 'writes should resolve as soon as transform completes');
+
+promise_test(() => {
+ const ts = new TransformStream(undefined, undefined, { highWaterMark: 0 });
+ const writer = ts.writable.getWriter();
+ const reader = ts.readable.getReader();
+ const readPromise = reader.read();
+ writer.write('a');
+ return readPromise.then(({ value, done }) => {
+ assert_false(done, 'not done');
+ assert_equals(value, 'a', 'value should be "a"');
+ });
+}, 'calling pull() before the first write() with backpressure should work');
+
+promise_test(() => {
+ let reader;
+ const ts = recordingTransformStream({
+ transform(chunk, controller) {
+ controller.enqueue(chunk);
+ return reader.read();
+ }
+ }, undefined, { highWaterMark: 1 });
+ const writer = ts.writable.getWriter();
+ reader = ts.readable.getReader();
+ return writer.write('a');
+}, 'transform() should be able to read the chunk it just enqueued');
+
+promise_test(() => {
+ let resolveTransform;
+ const transformPromise = new Promise(resolve => {
+ resolveTransform = resolve;
+ });
+ const ts = recordingTransformStream({
+ transform() {
+ return transformPromise;
+ }
+ }, undefined, new CountQueuingStrategy({ highWaterMark: Infinity }));
+ const writer = ts.writable.getWriter();
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+ return delay(0).then(() => {
+ writer.write('a');
+ assert_array_equals(ts.events, ['transform', 'a']);
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0');
+ return flushAsyncEvents();
+ }).then(() => {
+ assert_equals(writer.desiredSize, 0, 'desiredSize should still be 0');
+ resolveTransform();
+ return delay(0);
+ }).then(() => {
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+ });
+}, 'blocking transform() should cause backpressure');
+
+promise_test(t => {
+ const ts = new TransformStream();
+ ts.readable.cancel(error1);
+ return promise_rejects_exactly(t, error1, ts.writable.getWriter().closed, 'closed should reject');
+}, 'writer.closed should resolve after readable is canceled during start');
+
+promise_test(t => {
+ const ts = new TransformStream({}, undefined, { highWaterMark: 0 });
+ return delay(0).then(() => {
+ ts.readable.cancel(error1);
+ return promise_rejects_exactly(t, error1, ts.writable.getWriter().closed, 'closed should reject');
+ });
+}, 'writer.closed should resolve after readable is canceled with backpressure');
+
+promise_test(t => {
+ const ts = new TransformStream({}, undefined, { highWaterMark: 1 });
+ return delay(0).then(() => {
+ ts.readable.cancel(error1);
+ return promise_rejects_exactly(t, error1, ts.writable.getWriter().closed, 'closed should reject');
+ });
+}, 'writer.closed should resolve after readable is canceled with no backpressure');
+
+promise_test(() => {
+ const ts = new TransformStream({}, undefined, { highWaterMark: 1 });
+ const writer = ts.writable.getWriter();
+ return delay(0).then(() => {
+ const writePromise = writer.write('a');
+ ts.readable.cancel(error1);
+ return writePromise;
+ });
+}, 'cancelling the readable should cause a pending write to resolve');
+
+promise_test(t => {
+ const rs = new ReadableStream();
+ const ts = new TransformStream();
+ const pipePromise = rs.pipeTo(ts.writable);
+ ts.readable.cancel(error1);
+ return promise_rejects_exactly(t, error1, pipePromise, 'promise returned from pipeTo() should be rejected');
+}, 'cancelling the readable side of a TransformStream should abort an empty pipe');
+
+promise_test(t => {
+ const rs = new ReadableStream();
+ const ts = new TransformStream();
+ const pipePromise = rs.pipeTo(ts.writable);
+ return delay(0).then(() => {
+ ts.readable.cancel(error1);
+ return promise_rejects_exactly(t, error1, pipePromise, 'promise returned from pipeTo() should be rejected');
+ });
+}, 'cancelling the readable side of a TransformStream should abort an empty pipe after startup');
+
+promise_test(t => {
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.enqueue('a');
+ controller.enqueue('b');
+ controller.enqueue('c');
+ }
+ });
+ const ts = new TransformStream();
+ const pipePromise = rs.pipeTo(ts.writable);
+ // Allow data to flow into the pipe.
+ return delay(0).then(() => {
+ ts.readable.cancel(error1);
+ return promise_rejects_exactly(t, error1, pipePromise, 'promise returned from pipeTo() should be rejected');
+ });
+}, 'cancelling the readable side of a TransformStream should abort a full pipe');
diff --git a/testing/web-platform/tests/streams/transform-streams/cancel.any.js b/testing/web-platform/tests/streams/transform-streams/cancel.any.js
new file mode 100644
index 0000000000..5c7fc4eae5
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/cancel.any.js
@@ -0,0 +1,115 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+'use strict';
+
+const thrownError = new Error('bad things are happening!');
+thrownError.name = 'error1';
+
+const originalReason = new Error('original reason');
+originalReason.name = 'error2';
+
+promise_test(async t => {
+ let cancelled = undefined;
+ const ts = new TransformStream({
+ cancel(reason) {
+ cancelled = reason;
+ }
+ });
+ const res = await ts.readable.cancel(thrownError);
+ assert_equals(res, undefined, 'readable.cancel() should return undefined');
+ assert_equals(cancelled, thrownError, 'transformer.cancel() should be called with the passed reason');
+}, 'cancelling the readable side should call transformer.cancel()');
+
+promise_test(async t => {
+ const ts = new TransformStream({
+ cancel(reason) {
+ assert_equals(reason, originalReason, 'transformer.cancel() should be called with the passed reason');
+ throw thrownError;
+ }
+ });
+ const writer = ts.writable.getWriter();
+ const cancelPromise = ts.readable.cancel(originalReason);
+ await promise_rejects_exactly(t, thrownError, cancelPromise, 'readable.cancel() should reject with thrownError');
+ await promise_rejects_exactly(t, thrownError, writer.closed, 'writer.closed should reject with thrownError');
+}, 'cancelling the readable side should reject if transformer.cancel() throws');
+
+promise_test(async t => {
+ let aborted = undefined;
+ const ts = new TransformStream({
+ cancel(reason) {
+ aborted = reason;
+ },
+ flush: t.unreached_func('flush should not be called')
+ });
+ const res = await ts.writable.abort(thrownError);
+ assert_equals(res, undefined, 'writable.abort() should return undefined');
+ assert_equals(aborted, thrownError, 'transformer.abort() should be called with the passed reason');
+}, 'aborting the writable side should call transformer.abort()');
+
+promise_test(async t => {
+ const ts = new TransformStream({
+ cancel(reason) {
+ assert_equals(reason, originalReason, 'transformer.cancel() should be called with the passed reason');
+ throw thrownError;
+ },
+ flush: t.unreached_func('flush should not be called')
+ });
+ const reader = ts.readable.getReader();
+ const abortPromise = ts.writable.abort(originalReason);
+ await promise_rejects_exactly(t, thrownError, abortPromise, 'writable.abort() should reject with thrownError');
+ await promise_rejects_exactly(t, thrownError, reader.closed, 'reader.closed should reject with thrownError');
+}, 'aborting the writable side should reject if transformer.cancel() throws');
+
+promise_test(async t => {
+ const ts = new TransformStream({
+ async cancel(reason) {
+ assert_equals(reason, originalReason, 'transformer.cancel() should be called with the passed reason');
+ throw thrownError;
+ },
+ flush: t.unreached_func('flush should not be called')
+ });
+ const cancelPromise = ts.readable.cancel(originalReason);
+ const closePromise = ts.writable.close();
+ await Promise.all([
+ promise_rejects_exactly(t, thrownError, cancelPromise, 'cancelPromise should reject with thrownError'),
+ promise_rejects_exactly(t, thrownError, closePromise, 'closePromise should reject with thrownError'),
+ ]);
+}, 'closing the writable side should reject if a parallel transformer.cancel() throws');
+
+promise_test(async t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ },
+ async cancel(reason) {
+ assert_equals(reason, originalReason, 'transformer.cancel() should be called with the passed reason');
+ controller.error(thrownError);
+ },
+ flush: t.unreached_func('flush should not be called')
+ });
+ const cancelPromise = ts.readable.cancel(originalReason);
+ const closePromise = ts.writable.close();
+ await Promise.all([
+ promise_rejects_exactly(t, thrownError, cancelPromise, 'cancelPromise should reject with thrownError'),
+ promise_rejects_exactly(t, thrownError, closePromise, 'closePromise should reject with thrownError'),
+ ]);
+}, 'readable.cancel() and a parallel writable.close() should reject if a transformer.cancel() calls controller.error()');
+
+promise_test(async t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ },
+ async cancel(reason) {
+ assert_equals(reason, originalReason, 'transformer.cancel() should be called with the passed reason');
+ controller.error(thrownError);
+ },
+ flush: t.unreached_func('flush should not be called')
+ });
+ const cancelPromise = ts.writable.abort(originalReason);
+ await promise_rejects_exactly(t, thrownError, cancelPromise, 'cancelPromise should reject with thrownError');
+ const closePromise = ts.readable.cancel(1);
+ await promise_rejects_exactly(t, thrownError, closePromise, 'closePromise should reject with thrownError');
+}, 'writable.abort() and readable.cancel() should reject if a transformer.cancel() calls controller.error()');
diff --git a/testing/web-platform/tests/streams/transform-streams/errors.any.js b/testing/web-platform/tests/streams/transform-streams/errors.any.js
new file mode 100644
index 0000000000..bea060b659
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/errors.any.js
@@ -0,0 +1,360 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+'use strict';
+
+const thrownError = new Error('bad things are happening!');
+thrownError.name = 'error1';
+
+promise_test(t => {
+ const ts = new TransformStream({
+ transform() {
+ throw thrownError;
+ },
+ cancel: t.unreached_func('cancel should not be called')
+ });
+
+ const reader = ts.readable.getReader();
+
+ const writer = ts.writable.getWriter();
+
+ return Promise.all([
+ promise_rejects_exactly(t, thrownError, writer.write('a'),
+ 'writable\'s write should reject with the thrown error'),
+ promise_rejects_exactly(t, thrownError, reader.read(),
+ 'readable\'s read should reject with the thrown error'),
+ promise_rejects_exactly(t, thrownError, reader.closed,
+ 'readable\'s closed should be rejected with the thrown error'),
+ promise_rejects_exactly(t, thrownError, writer.closed,
+ 'writable\'s closed should be rejected with the thrown error')
+ ]);
+}, 'TransformStream errors thrown in transform put the writable and readable in an errored state');
+
+promise_test(t => {
+ const ts = new TransformStream({
+ transform() {
+ },
+ flush() {
+ throw thrownError;
+ },
+ cancel: t.unreached_func('cancel should not be called')
+ });
+
+ const reader = ts.readable.getReader();
+
+ const writer = ts.writable.getWriter();
+
+ return Promise.all([
+ writer.write('a'),
+ promise_rejects_exactly(t, thrownError, writer.close(),
+ 'writable\'s close should reject with the thrown error'),
+ promise_rejects_exactly(t, thrownError, reader.read(),
+ 'readable\'s read should reject with the thrown error'),
+ promise_rejects_exactly(t, thrownError, reader.closed,
+ 'readable\'s closed should be rejected with the thrown error'),
+ promise_rejects_exactly(t, thrownError, writer.closed,
+ 'writable\'s closed should be rejected with the thrown error')
+ ]);
+}, 'TransformStream errors thrown in flush put the writable and readable in an errored state');
+
+test(t => {
+ new TransformStream({
+ start(c) {
+ c.enqueue('a');
+ c.error(new Error('generic error'));
+ assert_throws_js(TypeError, () => c.enqueue('b'), 'enqueue() should throw');
+ },
+ cancel: t.unreached_func('cancel should not be called')
+ });
+}, 'errored TransformStream should not enqueue new chunks');
+
+promise_test(t => {
+ const ts = new TransformStream({
+ start() {
+ return flushAsyncEvents().then(() => {
+ throw thrownError;
+ });
+ },
+ transform: t.unreached_func('transform should not be called'),
+ flush: t.unreached_func('flush should not be called'),
+ cancel: t.unreached_func('cancel should not be called')
+ });
+
+ const writer = ts.writable.getWriter();
+ const reader = ts.readable.getReader();
+ return Promise.all([
+ promise_rejects_exactly(t, thrownError, writer.write('a'), 'writer should reject with thrownError'),
+ promise_rejects_exactly(t, thrownError, writer.close(), 'close() should reject with thrownError'),
+ promise_rejects_exactly(t, thrownError, reader.read(), 'reader should reject with thrownError')
+ ]);
+}, 'TransformStream transformer.start() rejected promise should error the stream');
+
+promise_test(t => {
+ const controllerError = new Error('start failure');
+ controllerError.name = 'controllerError';
+ const ts = new TransformStream({
+ start(c) {
+ return flushAsyncEvents()
+ .then(() => {
+ c.error(controllerError);
+ throw new Error('ignored error');
+ });
+ },
+ transform: t.unreached_func('transform should never be called if start() fails'),
+ flush: t.unreached_func('flush should never be called if start() fails'),
+ cancel: t.unreached_func('cancel should never be called if start() fails')
+ });
+
+ const writer = ts.writable.getWriter();
+ const reader = ts.readable.getReader();
+ return Promise.all([
+ promise_rejects_exactly(t, controllerError, writer.write('a'), 'writer should reject with controllerError'),
+ promise_rejects_exactly(t, controllerError, writer.close(), 'close should reject with same error'),
+ promise_rejects_exactly(t, controllerError, reader.read(), 'reader should reject with same error')
+ ]);
+}, 'when controller.error is followed by a rejection, the error reason should come from controller.error');
+
+test(() => {
+ assert_throws_js(URIError, () => new TransformStream({
+ start() { throw new URIError('start thrown error'); },
+ transform() {}
+ }), 'constructor should throw');
+}, 'TransformStream constructor should throw when start does');
+
+test(() => {
+ const strategy = {
+ size() { throw new URIError('size thrown error'); }
+ };
+
+ assert_throws_js(URIError, () => new TransformStream({
+ start(c) {
+ c.enqueue('a');
+ },
+ transform() {}
+ }, undefined, strategy), 'constructor should throw the same error strategy.size throws');
+}, 'when strategy.size throws inside start(), the constructor should throw the same error');
+
+test(() => {
+ const controllerError = new URIError('controller.error');
+
+ let controller;
+ const strategy = {
+ size() {
+ controller.error(controllerError);
+ throw new Error('redundant error');
+ }
+ };
+
+ assert_throws_js(URIError, () => new TransformStream({
+ start(c) {
+ controller = c;
+ c.enqueue('a');
+ },
+ transform() {}
+ }, undefined, strategy), 'the first error should be thrown');
+}, 'when strategy.size calls controller.error() then throws, the constructor should throw the first error');
+
+promise_test(t => {
+ const ts = new TransformStream();
+ const writer = ts.writable.getWriter();
+ const closedPromise = writer.closed;
+ return Promise.all([
+ ts.readable.cancel(thrownError),
+ promise_rejects_exactly(t, thrownError, closedPromise, 'closed should throw a TypeError')
+ ]);
+}, 'cancelling the readable side should error the writable');
+
+promise_test(t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const writer = ts.writable.getWriter();
+ const reader = ts.readable.getReader();
+ const writePromise = writer.write('a');
+ const closePromise = writer.close();
+ controller.error(thrownError);
+ return Promise.all([
+ promise_rejects_exactly(t, thrownError, reader.closed, 'reader.closed should reject'),
+ promise_rejects_exactly(t, thrownError, writePromise, 'writePromise should reject'),
+ promise_rejects_exactly(t, thrownError, closePromise, 'closePromise should reject')]);
+}, 'it should be possible to error the readable between close requested and complete');
+
+promise_test(t => {
+ const ts = new TransformStream({
+ transform(chunk, controller) {
+ controller.enqueue(chunk);
+ controller.terminate();
+ throw thrownError;
+ }
+ }, undefined, { highWaterMark: 1 });
+ const writePromise = ts.writable.getWriter().write('a');
+ const closedPromise = ts.readable.getReader().closed;
+ return Promise.all([
+ promise_rejects_exactly(t, thrownError, writePromise, 'write() should reject'),
+ promise_rejects_exactly(t, thrownError, closedPromise, 'reader.closed should reject')
+ ]);
+}, 'an exception from transform() should error the stream if terminate has been requested but not completed');
+
+promise_test(t => {
+ const ts = new TransformStream();
+ const writer = ts.writable.getWriter();
+ // The microtask following transformer.start() hasn't completed yet, so the abort is queued and not notified to the
+ // TransformStream yet.
+ const abortPromise = writer.abort(thrownError);
+ const cancelPromise = ts.readable.cancel(new Error('cancel reason'));
+ return Promise.all([
+ abortPromise,
+ cancelPromise,
+ promise_rejects_exactly(t, thrownError, writer.closed, 'writer.closed should reject'),
+ ]);
+}, 'abort should set the close reason for the writable when it happens before cancel during start, and cancel should ' +
+ 'reject');
+
+promise_test(t => {
+ let resolveTransform;
+ const transformPromise = new Promise(resolve => {
+ resolveTransform = resolve;
+ });
+ const ts = new TransformStream({
+ transform() {
+ return transformPromise;
+ }
+ }, undefined, { highWaterMark: 2 });
+ const writer = ts.writable.getWriter();
+ return delay(0).then(() => {
+ const writePromise = writer.write();
+ const abortPromise = writer.abort(thrownError);
+ const cancelPromise = ts.readable.cancel(new Error('cancel reason'));
+ resolveTransform();
+ return Promise.all([
+ writePromise,
+ abortPromise,
+ cancelPromise,
+ promise_rejects_exactly(t, thrownError, writer.closed, 'writer.closed should reject with thrownError')]);
+ });
+}, 'abort should set the close reason for the writable when it happens before cancel during underlying sink write, ' +
+ 'but cancel should still succeed');
+
+const ignoredError = new Error('ignoredError');
+ignoredError.name = 'ignoredError';
+
+promise_test(t => {
+ const ts = new TransformStream({
+ start(controller) {
+ controller.error(thrownError);
+ controller.error(ignoredError);
+ }
+ });
+ return promise_rejects_exactly(t, thrownError, ts.writable.abort(), 'abort() should reject with thrownError');
+}, 'controller.error() should do nothing the second time it is called');
+
+promise_test(t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const cancelPromise = ts.readable.cancel(ignoredError);
+ controller.error(thrownError);
+ return Promise.all([
+ cancelPromise,
+ promise_rejects_exactly(t, thrownError, ts.writable.getWriter().closed, 'closed should reject with thrownError')
+ ]);
+}, 'controller.error() should close writable immediately after readable.cancel()');
+
+promise_test(t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ return ts.readable.cancel(thrownError).then(() => {
+ controller.error(ignoredError);
+ return promise_rejects_exactly(t, thrownError, ts.writable.getWriter().closed, 'closed should reject with thrownError');
+ });
+}, 'controller.error() should do nothing after readable.cancel() resolves');
+
+promise_test(t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ return ts.writable.abort(thrownError).then(() => {
+ controller.error(ignoredError);
+ return promise_rejects_exactly(t, thrownError, ts.writable.getWriter().closed, 'closed should reject with thrownError');
+ });
+}, 'controller.error() should do nothing after writable.abort() has completed');
+
+promise_test(t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ },
+ transform() {
+ throw thrownError;
+ }
+ }, undefined, { highWaterMark: Infinity });
+ const writer = ts.writable.getWriter();
+ return promise_rejects_exactly(t, thrownError, writer.write(), 'write() should reject').then(() => {
+ controller.error();
+ return promise_rejects_exactly(t, thrownError, writer.closed, 'closed should reject with thrownError');
+ });
+}, 'controller.error() should do nothing after a transformer method has thrown an exception');
+
+promise_test(t => {
+ let controller;
+ let calls = 0;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ },
+ transform() {
+ ++calls;
+ }
+ }, undefined, { highWaterMark: 1 });
+ return delay(0).then(() => {
+ // Create backpressure.
+ controller.enqueue('a');
+ const writer = ts.writable.getWriter();
+ // transform() will not be called until backpressure is relieved.
+ const writePromise = writer.write('b');
+ assert_equals(calls, 0, 'transform() should not have been called');
+ controller.error(thrownError);
+ // Now backpressure has been relieved and the write can proceed.
+ return promise_rejects_exactly(t, thrownError, writePromise, 'write() should reject').then(() => {
+ assert_equals(calls, 0, 'transform() should not be called');
+ });
+ });
+}, 'erroring during write with backpressure should result in the write failing');
+
+promise_test(t => {
+ const ts = new TransformStream({}, undefined, { highWaterMark: 0 });
+ return delay(0).then(() => {
+ const writer = ts.writable.getWriter();
+ // write should start synchronously
+ const writePromise = writer.write(0);
+ // The underlying sink's abort() is not called until the write() completes.
+ const abortPromise = writer.abort(thrownError);
+ // Perform a read to relieve backpressure and permit the write() to complete.
+ const readPromise = ts.readable.getReader().read();
+ return Promise.all([
+ promise_rejects_exactly(t, thrownError, readPromise, 'read() should reject'),
+ promise_rejects_exactly(t, thrownError, writePromise, 'write() should reject'),
+ abortPromise
+ ]);
+ });
+}, 'a write() that was waiting for backpressure should reject if the writable is aborted');
+
+promise_test(t => {
+ const ts = new TransformStream();
+ ts.writable.abort(thrownError);
+ const reader = ts.readable.getReader();
+ return promise_rejects_exactly(t, thrownError, reader.read(), 'read() should reject with thrownError');
+}, 'the readable should be errored with the reason passed to the writable abort() method');
diff --git a/testing/web-platform/tests/streams/transform-streams/flush.any.js b/testing/web-platform/tests/streams/transform-streams/flush.any.js
new file mode 100644
index 0000000000..c95d8ae118
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/flush.any.js
@@ -0,0 +1,146 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+'use strict';
+
+promise_test(() => {
+ let flushCalled = false;
+ const ts = new TransformStream({
+ transform() { },
+ flush() {
+ flushCalled = true;
+ }
+ });
+
+ return ts.writable.getWriter().close().then(() => {
+ return assert_true(flushCalled, 'closing the writable triggers the transform flush immediately');
+ });
+}, 'TransformStream flush is called immediately when the writable is closed, if no writes are queued');
+
+promise_test(() => {
+ let flushCalled = false;
+ let resolveTransform;
+ const ts = new TransformStream({
+ transform() {
+ return new Promise(resolve => {
+ resolveTransform = resolve;
+ });
+ },
+ flush() {
+ flushCalled = true;
+ return new Promise(() => {}); // never resolves
+ }
+ }, undefined, { highWaterMark: 1 });
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+ writer.close();
+ assert_false(flushCalled, 'closing the writable does not immediately call flush if writes are not finished');
+
+ let rsClosed = false;
+ ts.readable.getReader().closed.then(() => {
+ rsClosed = true;
+ });
+
+ return delay(0).then(() => {
+ assert_false(flushCalled, 'closing the writable does not asynchronously call flush if writes are not finished');
+ resolveTransform();
+ return delay(0);
+ }).then(() => {
+ assert_true(flushCalled, 'flush is eventually called');
+ assert_false(rsClosed, 'if flushPromise does not resolve, the readable does not become closed');
+ });
+}, 'TransformStream flush is called after all queued writes finish, once the writable is closed');
+
+promise_test(() => {
+ let c;
+ const ts = new TransformStream({
+ start(controller) {
+ c = controller;
+ },
+ transform() {
+ },
+ flush() {
+ c.enqueue('x');
+ c.enqueue('y');
+ }
+ });
+
+ const reader = ts.readable.getReader();
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+ writer.close();
+ return reader.read().then(result1 => {
+ assert_equals(result1.value, 'x', 'the first chunk read is the first one enqueued in flush');
+ assert_equals(result1.done, false, 'the first chunk read is the first one enqueued in flush');
+
+ return reader.read().then(result2 => {
+ assert_equals(result2.value, 'y', 'the second chunk read is the second one enqueued in flush');
+ assert_equals(result2.done, false, 'the second chunk read is the second one enqueued in flush');
+ });
+ });
+}, 'TransformStream flush gets a chance to enqueue more into the readable');
+
+promise_test(() => {
+ let c;
+ const ts = new TransformStream({
+ start(controller) {
+ c = controller;
+ },
+ transform() {
+ },
+ flush() {
+ c.enqueue('x');
+ c.enqueue('y');
+ return delay(0);
+ }
+ });
+
+ const reader = ts.readable.getReader();
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+ writer.close();
+
+ return Promise.all([
+ reader.read().then(result1 => {
+ assert_equals(result1.value, 'x', 'the first chunk read is the first one enqueued in flush');
+ assert_equals(result1.done, false, 'the first chunk read is the first one enqueued in flush');
+
+ return reader.read().then(result2 => {
+ assert_equals(result2.value, 'y', 'the second chunk read is the second one enqueued in flush');
+ assert_equals(result2.done, false, 'the second chunk read is the second one enqueued in flush');
+ });
+ }),
+ reader.closed.then(() => {
+ assert_true(true, 'readable reader becomes closed');
+ })
+ ]);
+}, 'TransformStream flush gets a chance to enqueue more into the readable, and can then async close');
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+promise_test(t => {
+ const ts = new TransformStream({
+ flush(controller) {
+ controller.error(error1);
+ }
+ });
+ return promise_rejects_exactly(t, error1, ts.writable.getWriter().close(), 'close() should reject');
+}, 'error() during flush should cause writer.close() to reject');
+
+promise_test(async t => {
+ let flushed = false;
+ const ts = new TransformStream({
+ flush() {
+ flushed = true;
+ },
+ cancel: t.unreached_func('cancel should not be called')
+ });
+ const closePromise = ts.writable.close();
+ await delay(0);
+ const cancelPromise = ts.readable.cancel(error1);
+ await Promise.all([closePromise, cancelPromise]);
+ assert_equals(flushed, true, 'transformer.flush() should be called');
+}, 'closing the writable side should call transformer.flush() and a parallel readable.cancel() should not reject');
diff --git a/testing/web-platform/tests/streams/transform-streams/general.any.js b/testing/web-platform/tests/streams/transform-streams/general.any.js
new file mode 100644
index 0000000000..a40ef30843
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/general.any.js
@@ -0,0 +1,452 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/rs-utils.js
+'use strict';
+
+test(() => {
+ new TransformStream({ transform() { } });
+}, 'TransformStream can be constructed with a transform function');
+
+test(() => {
+ new TransformStream();
+ new TransformStream({});
+}, 'TransformStream can be constructed with no transform function');
+
+test(() => {
+ const ts = new TransformStream({ transform() { } });
+
+ const writer = ts.writable.getWriter();
+ assert_equals(writer.desiredSize, 1, 'writer.desiredSize should be 1');
+}, 'TransformStream writable starts in the writable state');
+
+promise_test(() => {
+ const ts = new TransformStream();
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+ assert_equals(writer.desiredSize, 0, 'writer.desiredSize should be 0 after write()');
+
+ return ts.readable.getReader().read().then(result => {
+ assert_equals(result.value, 'a',
+ 'result from reading the readable is the same as was written to writable');
+ assert_false(result.done, 'stream should not be done');
+
+ return delay(0).then(() => assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 again'));
+ });
+}, 'Identity TransformStream: can read from readable what is put into writable');
+
+promise_test(() => {
+ let c;
+ const ts = new TransformStream({
+ start(controller) {
+ c = controller;
+ },
+ transform(chunk) {
+ c.enqueue(chunk.toUpperCase());
+ }
+ });
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+
+ return ts.readable.getReader().read().then(result => {
+ assert_equals(result.value, 'A',
+ 'result from reading the readable is the transformation of what was written to writable');
+ assert_false(result.done, 'stream should not be done');
+ });
+}, 'Uppercaser sync TransformStream: can read from readable transformed version of what is put into writable');
+
+promise_test(() => {
+ let c;
+ const ts = new TransformStream({
+ start(controller) {
+ c = controller;
+ },
+ transform(chunk) {
+ c.enqueue(chunk.toUpperCase());
+ c.enqueue(chunk.toUpperCase());
+ }
+ });
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+
+ const reader = ts.readable.getReader();
+
+ return reader.read().then(result1 => {
+ assert_equals(result1.value, 'A',
+ 'the first chunk read is the transformation of the single chunk written');
+ assert_false(result1.done, 'stream should not be done');
+
+ return reader.read().then(result2 => {
+ assert_equals(result2.value, 'A',
+ 'the second chunk read is also the transformation of the single chunk written');
+ assert_false(result2.done, 'stream should not be done');
+ });
+ });
+}, 'Uppercaser-doubler sync TransformStream: can read both chunks put into the readable');
+
+promise_test(() => {
+ let c;
+ const ts = new TransformStream({
+ start(controller) {
+ c = controller;
+ },
+ transform(chunk) {
+ return delay(0).then(() => c.enqueue(chunk.toUpperCase()));
+ }
+ });
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+
+ return ts.readable.getReader().read().then(result => {
+ assert_equals(result.value, 'A',
+ 'result from reading the readable is the transformation of what was written to writable');
+ assert_false(result.done, 'stream should not be done');
+ });
+}, 'Uppercaser async TransformStream: can read from readable transformed version of what is put into writable');
+
+promise_test(() => {
+ let doSecondEnqueue;
+ let returnFromTransform;
+ const ts = new TransformStream({
+ transform(chunk, controller) {
+ delay(0).then(() => controller.enqueue(chunk.toUpperCase()));
+ doSecondEnqueue = () => controller.enqueue(chunk.toUpperCase());
+ return new Promise(resolve => {
+ returnFromTransform = resolve;
+ });
+ }
+ });
+
+ const reader = ts.readable.getReader();
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+
+ return reader.read().then(result1 => {
+ assert_equals(result1.value, 'A',
+ 'the first chunk read is the transformation of the single chunk written');
+ assert_false(result1.done, 'stream should not be done');
+ doSecondEnqueue();
+
+ return reader.read().then(result2 => {
+ assert_equals(result2.value, 'A',
+ 'the second chunk read is also the transformation of the single chunk written');
+ assert_false(result2.done, 'stream should not be done');
+ returnFromTransform();
+ });
+ });
+}, 'Uppercaser-doubler async TransformStream: can read both chunks put into the readable');
+
+promise_test(() => {
+ const ts = new TransformStream({ transform() { } });
+
+ const writer = ts.writable.getWriter();
+ writer.close();
+
+ return Promise.all([writer.closed, ts.readable.getReader().closed]);
+}, 'TransformStream: by default, closing the writable closes the readable (when there are no queued writes)');
+
+promise_test(() => {
+ let transformResolve;
+ const transformPromise = new Promise(resolve => {
+ transformResolve = resolve;
+ });
+ const ts = new TransformStream({
+ transform() {
+ return transformPromise;
+ }
+ }, undefined, { highWaterMark: 1 });
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+ writer.close();
+
+ let rsClosed = false;
+ ts.readable.getReader().closed.then(() => {
+ rsClosed = true;
+ });
+
+ return delay(0).then(() => {
+ assert_equals(rsClosed, false, 'readable is not closed after a tick');
+ transformResolve();
+
+ return writer.closed.then(() => {
+ // TODO: Is this expectation correct?
+ assert_equals(rsClosed, true, 'readable is closed at that point');
+ });
+ });
+}, 'TransformStream: by default, closing the writable waits for transforms to finish before closing both');
+
+promise_test(() => {
+ let c;
+ const ts = new TransformStream({
+ start(controller) {
+ c = controller;
+ },
+ transform() {
+ c.enqueue('x');
+ c.enqueue('y');
+ return delay(0);
+ }
+ });
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+ writer.close();
+
+ const readableChunks = readableStreamToArray(ts.readable);
+
+ return writer.closed.then(() => {
+ return readableChunks.then(chunks => {
+ assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable');
+ });
+ });
+}, 'TransformStream: by default, closing the writable closes the readable after sync enqueues and async done');
+
+promise_test(() => {
+ let c;
+ const ts = new TransformStream({
+ start(controller) {
+ c = controller;
+ },
+ transform() {
+ return delay(0)
+ .then(() => c.enqueue('x'))
+ .then(() => c.enqueue('y'))
+ .then(() => delay(0));
+ }
+ });
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+ writer.close();
+
+ const readableChunks = readableStreamToArray(ts.readable);
+
+ return writer.closed.then(() => {
+ return readableChunks.then(chunks => {
+ assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable');
+ });
+ });
+}, 'TransformStream: by default, closing the writable closes the readable after async enqueues and async done');
+
+promise_test(() => {
+ let c;
+ const ts = new TransformStream({
+ suffix: '-suffix',
+
+ start(controller) {
+ c = controller;
+ c.enqueue('start' + this.suffix);
+ },
+
+ transform(chunk) {
+ c.enqueue(chunk + this.suffix);
+ },
+
+ flush() {
+ c.enqueue('flushed' + this.suffix);
+ }
+ });
+
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+ writer.close();
+
+ const readableChunks = readableStreamToArray(ts.readable);
+
+ return writer.closed.then(() => {
+ return readableChunks.then(chunks => {
+ assert_array_equals(chunks, ['start-suffix', 'a-suffix', 'flushed-suffix'], 'all enqueued chunks have suffixes');
+ });
+ });
+}, 'Transform stream should call transformer methods as methods');
+
+promise_test(() => {
+ function functionWithOverloads() {}
+ functionWithOverloads.apply = () => assert_unreached('apply() should not be called');
+ functionWithOverloads.call = () => assert_unreached('call() should not be called');
+ const ts = new TransformStream({
+ start: functionWithOverloads,
+ transform: functionWithOverloads,
+ flush: functionWithOverloads
+ });
+ const writer = ts.writable.getWriter();
+ writer.write('a');
+ writer.close();
+
+ return readableStreamToArray(ts.readable);
+}, 'methods should not not have .apply() or .call() called');
+
+promise_test(t => {
+ let startCalled = false;
+ let startDone = false;
+ let transformDone = false;
+ let flushDone = false;
+ const ts = new TransformStream({
+ start() {
+ startCalled = true;
+ return flushAsyncEvents().then(() => {
+ startDone = true;
+ });
+ },
+ transform() {
+ return t.step(() => {
+ assert_true(startDone, 'transform() should not be called until the promise returned from start() has resolved');
+ return flushAsyncEvents().then(() => {
+ transformDone = true;
+ });
+ });
+ },
+ flush() {
+ return t.step(() => {
+ assert_true(transformDone,
+ 'flush() should not be called until the promise returned from transform() has resolved');
+ return flushAsyncEvents().then(() => {
+ flushDone = true;
+ });
+ });
+ }
+ }, undefined, { highWaterMark: 1 });
+
+ assert_true(startCalled, 'start() should be called synchronously');
+
+ const writer = ts.writable.getWriter();
+ const writePromise = writer.write('a');
+ return writer.close().then(() => {
+ assert_true(flushDone, 'promise returned from flush() should have resolved');
+ return writePromise;
+ });
+}, 'TransformStream start, transform, and flush should be strictly ordered');
+
+promise_test(() => {
+ let transformCalled = false;
+ const ts = new TransformStream({
+ transform() {
+ transformCalled = true;
+ }
+ }, undefined, { highWaterMark: Infinity });
+ // transform() is only called synchronously when there is no backpressure and all microtasks have run.
+ return delay(0).then(() => {
+ const writePromise = ts.writable.getWriter().write();
+ assert_true(transformCalled, 'transform() should have been called');
+ return writePromise;
+ });
+}, 'it should be possible to call transform() synchronously');
+
+promise_test(() => {
+ const ts = new TransformStream({}, undefined, { highWaterMark: 0 });
+
+ const writer = ts.writable.getWriter();
+ writer.close();
+
+ return Promise.all([writer.closed, ts.readable.getReader().closed]);
+}, 'closing the writable should close the readable when there are no queued chunks, even with backpressure');
+
+test(() => {
+ new TransformStream({
+ start(controller) {
+ controller.terminate();
+ assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue should throw');
+ }
+ });
+}, 'enqueue() should throw after controller.terminate()');
+
+promise_test(() => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const cancelPromise = ts.readable.cancel();
+ assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue should throw');
+ return cancelPromise;
+}, 'enqueue() should throw after readable.cancel()');
+
+test(() => {
+ new TransformStream({
+ start(controller) {
+ controller.terminate();
+ controller.terminate();
+ }
+ });
+}, 'controller.terminate() should do nothing the second time it is called');
+
+promise_test(t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const cancelReason = { name: 'cancelReason' };
+ const cancelPromise = ts.readable.cancel(cancelReason);
+ controller.terminate();
+ return Promise.all([
+ cancelPromise,
+ promise_rejects_js(t, TypeError, ts.writable.getWriter().closed, 'closed should reject with TypeError')
+ ]);
+}, 'terminate() should abort writable immediately after readable.cancel()');
+
+promise_test(t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ const cancelReason = { name: 'cancelReason' };
+ return ts.readable.cancel(cancelReason).then(() => {
+ controller.terminate();
+ return promise_rejects_exactly(t, cancelReason, ts.writable.getWriter().closed, 'closed should reject with TypeError');
+ })
+}, 'terminate() should do nothing after readable.cancel() resolves');
+
+
+promise_test(() => {
+ let calls = 0;
+ new TransformStream({
+ start() {
+ ++calls;
+ }
+ });
+ return flushAsyncEvents().then(() => {
+ assert_equals(calls, 1, 'start() should have been called exactly once');
+ });
+}, 'start() should not be called twice');
+
+test(() => {
+ assert_throws_js(RangeError, () => new TransformStream({ readableType: 'bytes' }), 'constructor should throw');
+}, 'specifying a defined readableType should throw');
+
+test(() => {
+ assert_throws_js(RangeError, () => new TransformStream({ writableType: 'bytes' }), 'constructor should throw');
+}, 'specifying a defined writableType should throw');
+
+test(() => {
+ class Subclass extends TransformStream {
+ extraFunction() {
+ return true;
+ }
+ }
+ assert_equals(
+ Object.getPrototypeOf(Subclass.prototype), TransformStream.prototype,
+ 'Subclass.prototype\'s prototype should be TransformStream.prototype');
+ assert_equals(Object.getPrototypeOf(Subclass), TransformStream,
+ 'Subclass\'s prototype should be TransformStream');
+ const sub = new Subclass();
+ assert_true(sub instanceof TransformStream,
+ 'Subclass object should be an instance of TransformStream');
+ assert_true(sub instanceof Subclass,
+ 'Subclass object should be an instance of Subclass');
+ const readableGetter = Object.getOwnPropertyDescriptor(
+ TransformStream.prototype, 'readable').get;
+ assert_equals(readableGetter.call(sub), sub.readable,
+ 'Subclass object should pass brand check');
+ assert_true(sub.extraFunction(),
+ 'extraFunction() should be present on Subclass object');
+}, 'Subclassing TransformStream should work');
diff --git a/testing/web-platform/tests/streams/transform-streams/invalid-realm.tentative.window.js b/testing/web-platform/tests/streams/transform-streams/invalid-realm.tentative.window.js
new file mode 100644
index 0000000000..57cdfd9486
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/invalid-realm.tentative.window.js
@@ -0,0 +1,17 @@
+// TransformStream should still work even if the realm is detached.
+
+// Adds an iframe to the document and returns it.
+function addIframe() {
+ const iframe = document.createElement('iframe');
+ document.body.appendChild(iframe);
+ return iframe;
+}
+
+promise_test(async t => {
+ const iframe = addIframe();
+ const stream = new iframe.contentWindow.TransformStream();
+ const readPromise = stream.readable.getReader().read();
+ const writer = stream.writable.getWriter();
+ iframe.remove();
+ return Promise.all([writer.write('A'), readPromise]);
+}, 'TransformStream: write in detached realm should succeed');
diff --git a/testing/web-platform/tests/streams/transform-streams/lipfuzz.any.js b/testing/web-platform/tests/streams/transform-streams/lipfuzz.any.js
new file mode 100644
index 0000000000..e334705db4
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/lipfuzz.any.js
@@ -0,0 +1,163 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+class LipFuzzTransformer {
+ constructor(substitutions) {
+ this.substitutions = substitutions;
+ this.partialChunk = '';
+ this.lastIndex = undefined;
+ }
+
+ transform(chunk, controller) {
+ chunk = this.partialChunk + chunk;
+ this.partialChunk = '';
+ // lastIndex is the index of the first character after the last substitution.
+ this.lastIndex = 0;
+ chunk = chunk.replace(/\{\{([a-zA-Z0-9_-]+)\}\}/g, this.replaceTag.bind(this));
+ // Regular expression for an incomplete template at the end of a string.
+ const partialAtEndRegexp = /\{(\{([a-zA-Z0-9_-]+(\})?)?)?$/g;
+ // Avoid looking at any characters that have already been substituted.
+ partialAtEndRegexp.lastIndex = this.lastIndex;
+ this.lastIndex = undefined;
+ const match = partialAtEndRegexp.exec(chunk);
+ if (match) {
+ this.partialChunk = chunk.substring(match.index);
+ chunk = chunk.substring(0, match.index);
+ }
+ controller.enqueue(chunk);
+ }
+
+ flush(controller) {
+ if (this.partialChunk.length > 0) {
+ controller.enqueue(this.partialChunk);
+ }
+ }
+
+ replaceTag(match, p1, offset) {
+ let replacement = this.substitutions[p1];
+ if (replacement === undefined) {
+ replacement = '';
+ }
+ this.lastIndex = offset + replacement.length;
+ return replacement;
+ }
+}
+
+const substitutions = {
+ in1: 'out1',
+ in2: 'out2',
+ quine: '{{quine}}',
+ bogusPartial: '{{incompleteResult}'
+};
+
+const cases = [
+ {
+ input: [''],
+ output: ['']
+ },
+ {
+ input: [],
+ output: []
+ },
+ {
+ input: ['{{in1}}'],
+ output: ['out1']
+ },
+ {
+ input: ['z{{in1}}'],
+ output: ['zout1']
+ },
+ {
+ input: ['{{in1}}q'],
+ output: ['out1q']
+ },
+ {
+ input: ['{{in1}}{{in1}'],
+ output: ['out1', '{{in1}']
+ },
+ {
+ input: ['{{in1}}{{in1}', '}'],
+ output: ['out1', 'out1']
+ },
+ {
+ input: ['{{in1', '}}'],
+ output: ['', 'out1']
+ },
+ {
+ input: ['{{', 'in1}}'],
+ output: ['', 'out1']
+ },
+ {
+ input: ['{', '{in1}}'],
+ output: ['', 'out1']
+ },
+ {
+ input: ['{{', 'in1}'],
+ output: ['', '', '{{in1}']
+ },
+ {
+ input: ['{'],
+ output: ['', '{']
+ },
+ {
+ input: ['{', ''],
+ output: ['', '', '{']
+ },
+ {
+ input: ['{', '{', 'i', 'n', '1', '}', '}'],
+ output: ['', '', '', '', '', '', 'out1']
+ },
+ {
+ input: ['{{in1}}{{in2}}{{in1}}'],
+ output: ['out1out2out1']
+ },
+ {
+ input: ['{{wrong}}'],
+ output: ['']
+ },
+ {
+ input: ['{{wron', 'g}}'],
+ output: ['', '']
+ },
+ {
+ input: ['{{quine}}'],
+ output: ['{{quine}}']
+ },
+ {
+ input: ['{{bogusPartial}}'],
+ output: ['{{incompleteResult}']
+ },
+ {
+ input: ['{{bogusPartial}}}'],
+ output: ['{{incompleteResult}}']
+ }
+];
+
+for (const testCase of cases) {
+ const inputChunks = testCase.input;
+ const outputChunks = testCase.output;
+ promise_test(() => {
+ const lft = new TransformStream(new LipFuzzTransformer(substitutions));
+ const writer = lft.writable.getWriter();
+ const promises = [];
+ for (const inputChunk of inputChunks) {
+ promises.push(writer.write(inputChunk));
+ }
+ promises.push(writer.close());
+ const reader = lft.readable.getReader();
+ let readerChain = Promise.resolve();
+ for (const outputChunk of outputChunks) {
+ readerChain = readerChain.then(() => {
+ return reader.read().then(({ value, done }) => {
+ assert_false(done, `done should be false when reading ${outputChunk}`);
+ assert_equals(value, outputChunk, `value should match outputChunk`);
+ });
+ });
+ }
+ readerChain = readerChain.then(() => {
+ return reader.read().then(({ done }) => assert_true(done, `done should be true`));
+ });
+ promises.push(readerChain);
+ return Promise.all(promises);
+ }, `testing "${inputChunks}" (length ${inputChunks.length})`);
+}
diff --git a/testing/web-platform/tests/streams/transform-streams/patched-global.any.js b/testing/web-platform/tests/streams/transform-streams/patched-global.any.js
new file mode 100644
index 0000000000..cc111eda45
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/patched-global.any.js
@@ -0,0 +1,53 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+// Tests which patch the global environment are kept separate to avoid
+// interfering with other tests.
+
+test(t => {
+ // eslint-disable-next-line no-extend-native, accessor-pairs
+ Object.defineProperty(Object.prototype, 'highWaterMark', {
+ set() { throw new Error('highWaterMark setter called'); },
+ configurable: true
+ });
+
+ // eslint-disable-next-line no-extend-native, accessor-pairs
+ Object.defineProperty(Object.prototype, 'size', {
+ set() { throw new Error('size setter called'); },
+ configurable: true
+ });
+
+ t.add_cleanup(() => {
+ delete Object.prototype.highWaterMark;
+ delete Object.prototype.size;
+ });
+
+ assert_not_equals(new TransformStream(), null, 'constructor should work');
+}, 'TransformStream constructor should not call setters for highWaterMark or size');
+
+test(t => {
+ const oldReadableStream = ReadableStream;
+ const oldWritableStream = WritableStream;
+ const getReader = ReadableStream.prototype.getReader;
+ const getWriter = WritableStream.prototype.getWriter;
+
+ // Replace ReadableStream and WritableStream with broken versions.
+ ReadableStream = function () {
+ throw new Error('Called the global ReadableStream constructor');
+ };
+ WritableStream = function () {
+ throw new Error('Called the global WritableStream constructor');
+ };
+ t.add_cleanup(() => {
+ ReadableStream = oldReadableStream;
+ WritableStream = oldWritableStream;
+ });
+
+ const ts = new TransformStream();
+
+ // Just to be sure, ensure the readable and writable pass brand checks.
+ assert_not_equals(getReader.call(ts.readable), undefined,
+ 'getReader should work when called on ts.readable');
+ assert_not_equals(getWriter.call(ts.writable), undefined,
+ 'getWriter should work when called on ts.writable');
+}, 'TransformStream should use the original value of ReadableStream and WritableStream');
diff --git a/testing/web-platform/tests/streams/transform-streams/properties.any.js b/testing/web-platform/tests/streams/transform-streams/properties.any.js
new file mode 100644
index 0000000000..dbfd1aa372
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/properties.any.js
@@ -0,0 +1,49 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+const transformerMethods = {
+ start: {
+ length: 1,
+ trigger: () => Promise.resolve()
+ },
+ transform: {
+ length: 2,
+ trigger: ts => ts.writable.getWriter().write()
+ },
+ flush: {
+ length: 1,
+ trigger: ts => ts.writable.getWriter().close()
+ }
+};
+
+for (const method in transformerMethods) {
+ const { length, trigger } = transformerMethods[method];
+
+ // Some semantic tests of how transformer methods are called can be found in general.js, as well as in the test files
+ // specific to each method.
+ promise_test(() => {
+ let argCount;
+ const ts = new TransformStream({
+ [method](...args) {
+ argCount = args.length;
+ }
+ }, undefined, { highWaterMark: Infinity });
+ return Promise.resolve(trigger(ts)).then(() => {
+ assert_equals(argCount, length, `${method} should be called with ${length} arguments`);
+ });
+ }, `transformer method ${method} should be called with the right number of arguments`);
+
+ promise_test(() => {
+ let methodWasCalled = false;
+ function Transformer() {}
+ Transformer.prototype = {
+ [method]() {
+ methodWasCalled = true;
+ }
+ };
+ const ts = new TransformStream(new Transformer(), undefined, { highWaterMark: Infinity });
+ return Promise.resolve(trigger(ts)).then(() => {
+ assert_true(methodWasCalled, `${method} should be called`);
+ });
+ }, `transformer method ${method} should be called even when it's located on the prototype chain`);
+}
diff --git a/testing/web-platform/tests/streams/transform-streams/reentrant-strategies.any.js b/testing/web-platform/tests/streams/transform-streams/reentrant-strategies.any.js
new file mode 100644
index 0000000000..a6d4596558
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/reentrant-strategies.any.js
@@ -0,0 +1,323 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/recording-streams.js
+// META: script=../resources/rs-utils.js
+// META: script=../resources/test-utils.js
+'use strict';
+
+// The size() function of readableStrategy can re-entrantly call back into the TransformStream implementation. This
+// makes it risky to cache state across the call to ReadableStreamDefaultControllerEnqueue. These tests attempt to catch
+// such errors. They are separated from the other strategy tests because no real user code should ever do anything like
+// this.
+//
+// There is no such issue with writableStrategy size() because it is never called from within TransformStream
+// algorithms.
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+promise_test(() => {
+ let controller;
+ let calls = 0;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ }, undefined, {
+ size() {
+ ++calls;
+ if (calls < 2) {
+ controller.enqueue('b');
+ }
+ return 1;
+ },
+ highWaterMark: Infinity
+ });
+ const writer = ts.writable.getWriter();
+ return Promise.all([writer.write('a'), writer.close()])
+ .then(() => readableStreamToArray(ts.readable))
+ .then(array => assert_array_equals(array, ['b', 'a'], 'array should contain two chunks'));
+}, 'enqueue() inside size() should work');
+
+promise_test(() => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ }, undefined, {
+ size() {
+ // The readable queue is empty.
+ controller.terminate();
+ // The readable state has gone from "readable" to "closed".
+ return 1;
+ // This chunk will be enqueued, but will be impossible to read because the state is already "closed".
+ },
+ highWaterMark: Infinity
+ });
+ const writer = ts.writable.getWriter();
+ return writer.write('a')
+ .then(() => readableStreamToArray(ts.readable))
+ .then(array => assert_array_equals(array, [], 'array should contain no chunks'));
+ // The chunk 'a' is still in readable's queue. readable is closed so 'a' cannot be read. writable's queue is empty and
+ // it is still writable.
+}, 'terminate() inside size() should work');
+
+promise_test(t => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ }, undefined, {
+ size() {
+ controller.error(error1);
+ return 1;
+ },
+ highWaterMark: Infinity
+ });
+ const writer = ts.writable.getWriter();
+ return writer.write('a')
+ .then(() => promise_rejects_exactly(t, error1, ts.readable.getReader().read(), 'read() should reject'));
+}, 'error() inside size() should work');
+
+promise_test(() => {
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ }, undefined, {
+ size() {
+ assert_equals(controller.desiredSize, 1, 'desiredSize should be 1');
+ return 1;
+ },
+ highWaterMark: 1
+ });
+ const writer = ts.writable.getWriter();
+ return Promise.all([writer.write('a'), writer.close()])
+ .then(() => readableStreamToArray(ts.readable))
+ .then(array => assert_array_equals(array, ['a'], 'array should contain one chunk'));
+}, 'desiredSize inside size() should work');
+
+promise_test(t => {
+ let cancelPromise;
+ const ts = new TransformStream({}, undefined, {
+ size() {
+ cancelPromise = ts.readable.cancel(error1);
+ return 1;
+ },
+ highWaterMark: Infinity
+ });
+ const writer = ts.writable.getWriter();
+ return writer.write('a')
+ .then(() => {
+ promise_rejects_exactly(t, error1, writer.closed, 'writer.closed should reject');
+ return cancelPromise;
+ });
+}, 'readable cancel() inside size() should work');
+
+promise_test(() => {
+ let controller;
+ let pipeToPromise;
+ const ws = recordingWritableStream();
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ }, undefined, {
+ size() {
+ if (!pipeToPromise) {
+ pipeToPromise = ts.readable.pipeTo(ws);
+ }
+ return 1;
+ },
+ highWaterMark: 1
+ });
+ // Allow promise returned by start() to resolve so that enqueue() will happen synchronously.
+ return delay(0).then(() => {
+ controller.enqueue('a');
+ assert_not_equals(pipeToPromise, undefined);
+
+ // Some pipeTo() implementations need an additional chunk enqueued in order for the first one to be processed. See
+ // https://github.com/whatwg/streams/issues/794 for background.
+ controller.enqueue('a');
+
+ // Give pipeTo() a chance to process the queued chunks.
+ return delay(0);
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'a'], 'ws should contain two chunks');
+ controller.terminate();
+ return pipeToPromise;
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'write', 'a', 'close'], 'target should have been closed');
+ });
+}, 'pipeTo() inside size() should work');
+
+promise_test(() => {
+ let controller;
+ let readPromise;
+ let calls = 0;
+ let reader;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ }, undefined, {
+ size() {
+ // This is triggered by controller.enqueue(). The queue is empty and there are no pending reads. pull() is called
+ // synchronously, allowing transform() to proceed asynchronously. This results in a second call to enqueue(),
+ // which resolves this pending read() without calling size() again.
+ readPromise = reader.read();
+ ++calls;
+ return 1;
+ },
+ highWaterMark: 0
+ });
+ reader = ts.readable.getReader();
+ const writer = ts.writable.getWriter();
+ let writeResolved = false;
+ const writePromise = writer.write('b').then(() => {
+ writeResolved = true;
+ });
+ return flushAsyncEvents().then(() => {
+ assert_false(writeResolved);
+ controller.enqueue('a');
+ assert_equals(calls, 1, 'size() should have been called once');
+ return delay(0);
+ }).then(() => {
+ assert_true(writeResolved);
+ assert_equals(calls, 1, 'size() should only be called once');
+ return readPromise;
+ }).then(({ value, done }) => {
+ assert_false(done, 'done should be false');
+ // See https://github.com/whatwg/streams/issues/794 for why this chunk is not 'a'.
+ assert_equals(value, 'b', 'chunk should have been read');
+ assert_equals(calls, 1, 'calls should still be 1');
+ return writePromise;
+ });
+}, 'read() inside of size() should work');
+
+promise_test(() => {
+ let writer;
+ let writePromise1;
+ let calls = 0;
+ const ts = new TransformStream({}, undefined, {
+ size() {
+ ++calls;
+ if (calls < 2) {
+ writePromise1 = writer.write('a');
+ }
+ return 1;
+ },
+ highWaterMark: Infinity
+ });
+ writer = ts.writable.getWriter();
+ // Give pull() a chance to be called.
+ return delay(0).then(() => {
+ // This write results in a synchronous call to transform(), enqueue(), and size().
+ const writePromise2 = writer.write('b');
+ assert_equals(calls, 1, 'size() should have been called once');
+ return Promise.all([writePromise1, writePromise2, writer.close()]);
+ }).then(() => {
+ assert_equals(calls, 2, 'size() should have been called twice');
+ return readableStreamToArray(ts.readable);
+ }).then(array => {
+ assert_array_equals(array, ['b', 'a'], 'both chunks should have been enqueued');
+ assert_equals(calls, 2, 'calls should still be 2');
+ });
+}, 'writer.write() inside size() should work');
+
+promise_test(() => {
+ let controller;
+ let writer;
+ let writePromise;
+ let calls = 0;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ }, undefined, {
+ size() {
+ ++calls;
+ if (calls < 2) {
+ writePromise = writer.write('a');
+ }
+ return 1;
+ },
+ highWaterMark: Infinity
+ });
+ writer = ts.writable.getWriter();
+ // Give pull() a chance to be called.
+ return delay(0).then(() => {
+ // This enqueue results in synchronous calls to size(), write(), transform() and enqueue().
+ controller.enqueue('b');
+ assert_equals(calls, 2, 'size() should have been called twice');
+ return Promise.all([writePromise, writer.close()]);
+ }).then(() => {
+ return readableStreamToArray(ts.readable);
+ }).then(array => {
+ // Because one call to enqueue() is nested inside the other, they finish in the opposite order that they were
+ // called, so the chunks end up reverse order.
+ assert_array_equals(array, ['a', 'b'], 'both chunks should have been enqueued');
+ assert_equals(calls, 2, 'calls should still be 2');
+ });
+}, 'synchronous writer.write() inside size() should work');
+
+promise_test(() => {
+ let writer;
+ let closePromise;
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ }, undefined, {
+ size() {
+ closePromise = writer.close();
+ return 1;
+ },
+ highWaterMark: 1
+ });
+ writer = ts.writable.getWriter();
+ const reader = ts.readable.getReader();
+ // Wait for the promise returned by start() to be resolved so that the call to close() will result in a synchronous
+ // call to TransformStreamDefaultSink.
+ return delay(0).then(() => {
+ controller.enqueue('a');
+ return reader.read();
+ }).then(({ value, done }) => {
+ assert_false(done, 'done should be false');
+ assert_equals(value, 'a', 'value should be correct');
+ return reader.read();
+ }).then(({ done }) => {
+ assert_true(done, 'done should be true');
+ return closePromise;
+ });
+}, 'writer.close() inside size() should work');
+
+promise_test(t => {
+ let abortPromise;
+ let controller;
+ const ts = new TransformStream({
+ start(c) {
+ controller = c;
+ }
+ }, undefined, {
+ size() {
+ abortPromise = ts.writable.abort(error1);
+ return 1;
+ },
+ highWaterMark: 1
+ });
+ const reader = ts.readable.getReader();
+ // Wait for the promise returned by start() to be resolved so that the call to abort() will result in a synchronous
+ // call to TransformStreamDefaultSink.
+ return delay(0).then(() => {
+ controller.enqueue('a');
+ return reader.read();
+ }).then(({ value, done }) => {
+ assert_false(done, 'done should be false');
+ assert_equals(value, 'a', 'value should be correct');
+ return Promise.all([promise_rejects_exactly(t, error1, reader.read(), 'read() should reject'), abortPromise]);
+ });
+}, 'writer.abort() inside size() should work');
diff --git a/testing/web-platform/tests/streams/transform-streams/strategies.any.js b/testing/web-platform/tests/streams/transform-streams/strategies.any.js
new file mode 100644
index 0000000000..57e113e668
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/strategies.any.js
@@ -0,0 +1,150 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/recording-streams.js
+// META: script=../resources/test-utils.js
+'use strict';
+
+// Here we just test that the strategies are correctly passed to the readable and writable sides. We assume that
+// ReadableStream and WritableStream will correctly apply the strategies when they are being used by a TransformStream
+// and so it isn't necessary to repeat their tests here.
+
+test(() => {
+ const ts = new TransformStream({}, { highWaterMark: 17 });
+ assert_equals(ts.writable.getWriter().desiredSize, 17, 'desiredSize should be 17');
+}, 'writableStrategy highWaterMark should work');
+
+promise_test(() => {
+ const ts = recordingTransformStream({}, undefined, { highWaterMark: 9 });
+ const writer = ts.writable.getWriter();
+ for (let i = 0; i < 10; ++i) {
+ writer.write(i);
+ }
+ return delay(0).then(() => {
+ assert_array_equals(ts.events, [
+ 'transform', 0, 'transform', 1, 'transform', 2, 'transform', 3, 'transform', 4,
+ 'transform', 5, 'transform', 6, 'transform', 7, 'transform', 8],
+ 'transform() should have been called 9 times');
+ });
+}, 'readableStrategy highWaterMark should work');
+
+promise_test(t => {
+ let writableSizeCalled = false;
+ let readableSizeCalled = false;
+ let transformCalled = false;
+ const ts = new TransformStream(
+ {
+ transform(chunk, controller) {
+ t.step(() => {
+ transformCalled = true;
+ assert_true(writableSizeCalled, 'writableStrategy.size() should have been called');
+ assert_false(readableSizeCalled, 'readableStrategy.size() should not have been called');
+ controller.enqueue(chunk);
+ assert_true(readableSizeCalled, 'readableStrategy.size() should have been called');
+ });
+ }
+ },
+ {
+ size() {
+ writableSizeCalled = true;
+ return 1;
+ }
+ },
+ {
+ size() {
+ readableSizeCalled = true;
+ return 1;
+ },
+ highWaterMark: Infinity
+ });
+ return ts.writable.getWriter().write().then(() => {
+ assert_true(transformCalled, 'transform() should be called');
+ });
+}, 'writable should have the correct size() function');
+
+test(() => {
+ const ts = new TransformStream();
+ const writer = ts.writable.getWriter();
+ assert_equals(writer.desiredSize, 1, 'default writable HWM is 1');
+ writer.write(undefined);
+ assert_equals(writer.desiredSize, 0, 'default chunk size is 1');
+}, 'default writable strategy should be equivalent to { highWaterMark: 1 }');
+
+promise_test(t => {
+ const ts = new TransformStream({
+ transform(chunk, controller) {
+ return t.step(() => {
+ assert_equals(controller.desiredSize, 0, 'desiredSize should be 0');
+ controller.enqueue(undefined);
+ // The first chunk enqueued is consumed by the pending read().
+ assert_equals(controller.desiredSize, 0, 'desiredSize should still be 0');
+ controller.enqueue(undefined);
+ assert_equals(controller.desiredSize, -1, 'desiredSize should be -1');
+ });
+ }
+ });
+ const writePromise = ts.writable.getWriter().write();
+ return ts.readable.getReader().read().then(() => writePromise);
+}, 'default readable strategy should be equivalent to { highWaterMark: 0 }');
+
+test(() => {
+ assert_throws_js(RangeError, () => new TransformStream(undefined, { highWaterMark: -1 }),
+ 'should throw RangeError for negative writableHighWaterMark');
+ assert_throws_js(RangeError, () => new TransformStream(undefined, undefined, { highWaterMark: -1 }),
+ 'should throw RangeError for negative readableHighWaterMark');
+ assert_throws_js(RangeError, () => new TransformStream(undefined, { highWaterMark: NaN }),
+ 'should throw RangeError for NaN writableHighWaterMark');
+ assert_throws_js(RangeError, () => new TransformStream(undefined, undefined, { highWaterMark: NaN }),
+ 'should throw RangeError for NaN readableHighWaterMark');
+}, 'a RangeError should be thrown for an invalid highWaterMark');
+
+const objectThatConvertsTo42 = {
+ toString() {
+ return '42';
+ }
+};
+
+test(() => {
+ const ts = new TransformStream(undefined, { highWaterMark: objectThatConvertsTo42 });
+ const writer = ts.writable.getWriter();
+ assert_equals(writer.desiredSize, 42, 'writable HWM is 42');
+}, 'writableStrategy highWaterMark should be converted to a number');
+
+test(() => {
+ const ts = new TransformStream({
+ start(controller) {
+ assert_equals(controller.desiredSize, 42, 'desiredSize should be 42');
+ }
+ }, undefined, { highWaterMark: objectThatConvertsTo42 });
+}, 'readableStrategy highWaterMark should be converted to a number');
+
+promise_test(t => {
+ const ts = new TransformStream(undefined, undefined, {
+ size() { return NaN; },
+ highWaterMark: 1
+ });
+ const writer = ts.writable.getWriter();
+ return promise_rejects_js(t, RangeError, writer.write(), 'write should reject');
+}, 'a bad readableStrategy size function should cause writer.write() to reject on an identity transform');
+
+promise_test(t => {
+ const ts = new TransformStream({
+ transform(chunk, controller) {
+ // This assert has the important side-effect of catching the error, so transform() does not throw.
+ assert_throws_js(RangeError, () => controller.enqueue(chunk), 'enqueue should throw');
+ }
+ }, undefined, {
+ size() {
+ return -1;
+ },
+ highWaterMark: 1
+ });
+
+ const writer = ts.writable.getWriter();
+ return writer.write().then(() => {
+ return Promise.all([
+ promise_rejects_js(t, RangeError, writer.ready, 'ready should reject'),
+ promise_rejects_js(t, RangeError, writer.closed, 'closed should reject'),
+ promise_rejects_js(t, RangeError, ts.readable.getReader().closed, 'readable closed should reject')
+ ]);
+ });
+}, 'a bad readableStrategy size function should error the stream on enqueue even when transformer.transform() ' +
+ 'catches the exception');
diff --git a/testing/web-platform/tests/streams/transform-streams/terminate.any.js b/testing/web-platform/tests/streams/transform-streams/terminate.any.js
new file mode 100644
index 0000000000..a959e70fe6
--- /dev/null
+++ b/testing/web-platform/tests/streams/transform-streams/terminate.any.js
@@ -0,0 +1,100 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/recording-streams.js
+// META: script=../resources/test-utils.js
+'use strict';
+
+promise_test(t => {
+ const ts = recordingTransformStream({}, undefined, { highWaterMark: 0 });
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.enqueue(0);
+ }
+ });
+ let pipeToRejected = false;
+ const pipeToPromise = promise_rejects_js(t, TypeError, rs.pipeTo(ts.writable), 'pipeTo should reject').then(() => {
+ pipeToRejected = true;
+ });
+ return delay(0).then(() => {
+ assert_array_equals(ts.events, [], 'transform() should have seen no chunks');
+ assert_false(pipeToRejected, 'pipeTo() should not have rejected yet');
+ ts.controller.terminate();
+ return pipeToPromise;
+ }).then(() => {
+ assert_array_equals(ts.events, [], 'transform() should still have seen no chunks');
+ assert_true(pipeToRejected, 'pipeToRejected must be true');
+ });
+}, 'controller.terminate() should error pipeTo()');
+
+promise_test(t => {
+ const ts = recordingTransformStream({}, undefined, { highWaterMark: 1 });
+ const rs = new ReadableStream({
+ start(controller) {
+ controller.enqueue(0);
+ controller.enqueue(1);
+ }
+ });
+ const pipeToPromise = rs.pipeTo(ts.writable);
+ return delay(0).then(() => {
+ assert_array_equals(ts.events, ['transform', 0], 'transform() should have seen one chunk');
+ ts.controller.terminate();
+ return promise_rejects_js(t, TypeError, pipeToPromise, 'pipeTo() should reject');
+ }).then(() => {
+ assert_array_equals(ts.events, ['transform', 0], 'transform() should still have seen only one chunk');
+ });
+}, 'controller.terminate() should prevent remaining chunks from being processed');
+
+test(() => {
+ new TransformStream({
+ start(controller) {
+ controller.enqueue(0);
+ controller.terminate();
+ assert_throws_js(TypeError, () => controller.enqueue(1), 'enqueue should throw');
+ }
+ });
+}, 'controller.enqueue() should throw after controller.terminate()');
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+promise_test(t => {
+ const ts = new TransformStream({
+ start(controller) {
+ controller.enqueue(0);
+ controller.terminate();
+ controller.error(error1);
+ }
+ });
+ return Promise.all([
+ promise_rejects_js(t, TypeError, ts.writable.abort(), 'abort() should reject with a TypeError'),
+ promise_rejects_exactly(t, error1, ts.readable.cancel(), 'cancel() should reject with error1'),
+ promise_rejects_exactly(t, error1, ts.readable.getReader().closed, 'closed should reject with error1')
+ ]);
+}, 'controller.error() after controller.terminate() with queued chunk should error the readable');
+
+promise_test(t => {
+ const ts = new TransformStream({
+ start(controller) {
+ controller.terminate();
+ controller.error(error1);
+ }
+ });
+ return Promise.all([
+ promise_rejects_js(t, TypeError, ts.writable.abort(), 'abort() should reject with a TypeError'),
+ ts.readable.cancel(),
+ ts.readable.getReader().closed
+ ]);
+}, 'controller.error() after controller.terminate() without queued chunk should do nothing');
+
+promise_test(() => {
+ const ts = new TransformStream({
+ flush(controller) {
+ controller.terminate();
+ }
+ });
+ const writer = ts.writable.getWriter();
+ return Promise.all([
+ writer.close(),
+ writer.closed,
+ ts.readable.getReader().closed
+ ]);
+}, 'controller.terminate() inside flush() should not prevent writer.close() from succeeding');
diff --git a/testing/web-platform/tests/streams/writable-streams/aborting.any.js b/testing/web-platform/tests/streams/writable-streams/aborting.any.js
new file mode 100644
index 0000000000..9171dbe158
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/aborting.any.js
@@ -0,0 +1,1487 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+const error2 = new Error('error2');
+error2.name = 'error2';
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write: t.unreached_func('write() should not be called')
+ });
+
+ const writer = ws.getWriter();
+ const writePromise = writer.write('a');
+
+ const readyPromise = writer.ready;
+
+ writer.abort(error1);
+
+ assert_equals(writer.ready, readyPromise, 'the ready promise property should not change');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, readyPromise, 'the ready promise should reject with error1'),
+ promise_rejects_exactly(t, error1, writePromise, 'the write() promise should reject with error1')
+ ]);
+}, 'Aborting a WritableStream before it starts should cause the writer\'s unsettled ready promise to reject');
+
+promise_test(t => {
+ const ws = new WritableStream();
+
+ const writer = ws.getWriter();
+ writer.write('a');
+
+ const readyPromise = writer.ready;
+
+ return readyPromise.then(() => {
+ writer.abort(error1);
+
+ assert_not_equals(writer.ready, readyPromise, 'the ready promise property should change');
+ return promise_rejects_exactly(t, error1, writer.ready, 'the ready promise should reject with error1');
+ });
+}, 'Aborting a WritableStream should cause the writer\'s fulfilled ready promise to reset to a rejected one');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+
+ writer.releaseLock();
+
+ return promise_rejects_js(t, TypeError, writer.abort(), 'abort() should reject with a TypeError');
+}, 'abort() on a released writer rejects');
+
+promise_test(t => {
+ const ws = recordingWritableStream();
+
+ return delay(0)
+ .then(() => {
+ const writer = ws.getWriter();
+
+ const abortPromise = writer.abort(error1);
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writer.write(1), 'write(1) must reject with error1'),
+ promise_rejects_exactly(t, error1, writer.write(2), 'write(2) must reject with error1'),
+ abortPromise
+ ]);
+ })
+ .then(() => {
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+}, 'Aborting a WritableStream immediately prevents future writes');
+
+promise_test(t => {
+ const ws = recordingWritableStream();
+ const results = [];
+
+ return delay(0)
+ .then(() => {
+ const writer = ws.getWriter();
+
+ results.push(
+ writer.write(1),
+ promise_rejects_exactly(t, error1, writer.write(2), 'write(2) must reject with error1'),
+ promise_rejects_exactly(t, error1, writer.write(3), 'write(3) must reject with error1')
+ );
+
+ const abortPromise = writer.abort(error1);
+
+ results.push(
+ promise_rejects_exactly(t, error1, writer.write(4), 'write(4) must reject with error1'),
+ promise_rejects_exactly(t, error1, writer.write(5), 'write(5) must reject with error1')
+ );
+
+ return abortPromise;
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', 1, 'abort', error1]);
+
+ return Promise.all(results);
+ });
+}, 'Aborting a WritableStream prevents further writes after any that are in progress');
+
+promise_test(() => {
+ const ws = new WritableStream({
+ abort() {
+ return 'Hello';
+ }
+ });
+ const writer = ws.getWriter();
+
+ return writer.abort('a').then(value => {
+ assert_equals(value, undefined, 'fulfillment value must be undefined');
+ });
+}, 'Fulfillment value of writer.abort() call must be undefined even if the underlying sink returns a non-undefined ' +
+ 'value');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ abort() {
+ throw error1;
+ }
+ });
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.abort(undefined),
+ 'rejection reason of abortPromise must be the error thrown by abort');
+}, 'WritableStream if sink\'s abort throws, the promise returned by writer.abort() rejects');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ abort() {
+ throw error1;
+ }
+ });
+ const writer = ws.getWriter();
+
+ const abortPromise1 = writer.abort(undefined);
+ const abortPromise2 = writer.abort(undefined);
+
+ assert_equals(abortPromise1, abortPromise2, 'the promises must be the same');
+
+ return promise_rejects_exactly(t, error1, abortPromise1, 'promise must have matching rejection');
+}, 'WritableStream if sink\'s abort throws, the promise returned by multiple writer.abort()s is the same and rejects');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ abort() {
+ throw error1;
+ }
+ });
+
+ return promise_rejects_exactly(t, error1, ws.abort(undefined),
+ 'rejection reason of abortPromise must be the error thrown by abort');
+}, 'WritableStream if sink\'s abort throws, the promise returned by ws.abort() rejects');
+
+promise_test(t => {
+ let resolveWritePromise;
+ const ws = new WritableStream({
+ write() {
+ return new Promise(resolve => {
+ resolveWritePromise = resolve;
+ });
+ },
+ abort() {
+ throw error1;
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ writer.write().catch(() => {});
+ return flushAsyncEvents().then(() => {
+ const abortPromise = writer.abort(undefined);
+
+ resolveWritePromise();
+ return promise_rejects_exactly(t, error1, abortPromise,
+ 'rejection reason of abortPromise must be the error thrown by abort');
+ });
+}, 'WritableStream if sink\'s abort throws, for an abort performed during a write, the promise returned by ' +
+ 'ws.abort() rejects');
+
+promise_test(() => {
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+
+ return writer.abort(error1).then(() => {
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+}, 'Aborting a WritableStream passes through the given reason');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+
+ const abortPromise = writer.abort(error1);
+
+ const events = [];
+ writer.ready.catch(() => {
+ events.push('ready');
+ });
+ writer.closed.catch(() => {
+ events.push('closed');
+ });
+
+ return Promise.all([
+ abortPromise,
+ promise_rejects_exactly(t, error1, writer.write(), 'writing should reject with error1'),
+ promise_rejects_exactly(t, error1, writer.close(), 'closing should reject with error1'),
+ promise_rejects_exactly(t, error1, writer.ready, 'ready should reject with error1'),
+ promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1')
+ ]).then(() => {
+ assert_array_equals(['ready', 'closed'], events, 'ready should reject before closed');
+ });
+}, 'Aborting a WritableStream puts it in an errored state with the error passed to abort()');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+
+ const writePromise = promise_rejects_exactly(t, error1, writer.write('a'),
+ 'writing should reject with error1');
+
+ writer.abort(error1);
+
+ return writePromise;
+}, 'Aborting a WritableStream causes any outstanding write() promises to be rejected with the reason supplied');
+
+promise_test(t => {
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+
+ const closePromise = writer.close();
+ const abortPromise = writer.abort(error1);
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1'),
+ promise_rejects_exactly(t, error1, closePromise, 'close() should reject with error1'),
+ abortPromise
+ ]).then(() => {
+ assert_array_equals(ws.events, ['abort', error1]);
+ });
+}, 'Closing but then immediately aborting a WritableStream causes the stream to error');
+
+promise_test(() => {
+ let resolveClose;
+ const ws = new WritableStream({
+ close() {
+ return new Promise(resolve => {
+ resolveClose = resolve;
+ });
+ }
+ });
+ const writer = ws.getWriter();
+
+ const closePromise = writer.close();
+
+ return delay(0).then(() => {
+ const abortPromise = writer.abort(error1);
+ resolveClose();
+ return Promise.all([
+ writer.closed,
+ abortPromise,
+ closePromise
+ ]);
+ });
+}, 'Closing a WritableStream and aborting it while it closes causes the stream to ignore the abort attempt');
+
+promise_test(() => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+
+ writer.close();
+
+ return delay(0).then(() => writer.abort());
+}, 'Aborting a WritableStream after it is closed is a no-op');
+
+promise_test(t => {
+ // Testing that per https://github.com/whatwg/streams/issues/620#issuecomment-263483953 the fallback to close was
+ // removed.
+
+ // Cannot use recordingWritableStream since it always has an abort
+ let closeCalled = false;
+ const ws = new WritableStream({
+ close() {
+ closeCalled = true;
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ writer.abort(error1);
+
+ return promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1').then(() => {
+ assert_false(closeCalled, 'close must not have been called');
+ });
+}, 'WritableStream should NOT call underlying sink\'s close if no abort is supplied (historical)');
+
+promise_test(() => {
+ let thenCalled = false;
+ const ws = new WritableStream({
+ abort() {
+ return {
+ then(onFulfilled) {
+ thenCalled = true;
+ onFulfilled();
+ }
+ };
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.abort().then(() => assert_true(thenCalled, 'then() should be called'));
+}, 'returning a thenable from abort() should work');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write() {
+ return flushAsyncEvents();
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const writePromise = writer.write('a');
+ writer.abort(error1);
+ let closedRejected = false;
+ return Promise.all([
+ writePromise.then(() => assert_false(closedRejected, '.closed should not resolve before write()')),
+ promise_rejects_exactly(t, error1, writer.closed, '.closed should reject').then(() => {
+ closedRejected = true;
+ })
+ ]);
+ });
+}, '.closed should not resolve before fulfilled write()');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write() {
+ return Promise.reject(error1);
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const writePromise = writer.write('a');
+ const abortPromise = writer.abort(error2);
+ let closedRejected = false;
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writePromise, 'write() should reject')
+ .then(() => assert_false(closedRejected, '.closed should not resolve before write()')),
+ promise_rejects_exactly(t, error2, writer.closed, '.closed should reject')
+ .then(() => {
+ closedRejected = true;
+ }),
+ abortPromise
+ ]);
+ });
+}, '.closed should not resolve before rejected write(); write() error should not overwrite abort() error');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write() {
+ return flushAsyncEvents();
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 4 }));
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const settlementOrder = [];
+ return Promise.all([
+ writer.write('1').then(() => settlementOrder.push(1)),
+ promise_rejects_exactly(t, error1, writer.write('2'), 'first queued write should be rejected')
+ .then(() => settlementOrder.push(2)),
+ promise_rejects_exactly(t, error1, writer.write('3'), 'second queued write should be rejected')
+ .then(() => settlementOrder.push(3)),
+ writer.abort(error1)
+ ]).then(() => assert_array_equals([1, 2, 3], settlementOrder, 'writes should be satisfied in order'));
+ });
+}, 'writes should be satisfied in order when aborting');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write() {
+ return Promise.reject(error1);
+ }
+ }, new CountQueuingStrategy({ highWaterMark: 4 }));
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const settlementOrder = [];
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writer.write('1'), 'in-flight write should be rejected')
+ .then(() => settlementOrder.push(1)),
+ promise_rejects_exactly(t, error2, writer.write('2'), 'first queued write should be rejected')
+ .then(() => settlementOrder.push(2)),
+ promise_rejects_exactly(t, error2, writer.write('3'), 'second queued write should be rejected')
+ .then(() => settlementOrder.push(3)),
+ writer.abort(error2)
+ ]).then(() => assert_array_equals([1, 2, 3], settlementOrder, 'writes should be satisfied in order'));
+ });
+}, 'writes should be satisfied in order after rejected write when aborting');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write() {
+ return Promise.reject(error1);
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writer.write('a'), 'writer.write() should reject with error from underlying write()'),
+ promise_rejects_exactly(t, error2, writer.close(),
+ 'writer.close() should reject with error from underlying write()'),
+ writer.abort(error2)
+ ]);
+ });
+}, 'close() should reject with abort reason why abort() is first error');
+
+promise_test(() => {
+ let resolveWrite;
+ const ws = recordingWritableStream({
+ write() {
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ }
+ });
+
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ writer.write('a');
+ const abortPromise = writer.abort('b');
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ws.events, ['write', 'a'], 'abort should not be called while write is in-flight');
+ resolveWrite();
+ return abortPromise.then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'abort', 'b'], 'abort should be called after the write finishes');
+ });
+ });
+ });
+}, 'underlying abort() should not be called until underlying write() completes');
+
+promise_test(() => {
+ let resolveClose;
+ const ws = recordingWritableStream({
+ close() {
+ return new Promise(resolve => {
+ resolveClose = resolve;
+ });
+ }
+ });
+
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ writer.close();
+ const abortPromise = writer.abort();
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ws.events, ['close'], 'abort should not be called while close is in-flight');
+ resolveClose();
+ return abortPromise.then(() => {
+ assert_array_equals(ws.events, ['close'], 'abort should not be called');
+ });
+ });
+ });
+}, 'underlying abort() should not be called if underlying close() has started');
+
+promise_test(t => {
+ let rejectClose;
+ let abortCalled = false;
+ const ws = new WritableStream({
+ close() {
+ return new Promise((resolve, reject) => {
+ rejectClose = reject;
+ });
+ },
+ abort() {
+ abortCalled = true;
+ }
+ });
+
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const closePromise = writer.close();
+ const abortPromise = writer.abort();
+ return flushAsyncEvents().then(() => {
+ assert_false(abortCalled, 'underlying abort should not be called while close is in-flight');
+ rejectClose(error1);
+ return promise_rejects_exactly(t, error1, abortPromise, 'abort should reject with the same reason').then(() => {
+ return promise_rejects_exactly(t, error1, closePromise, 'close should reject with the same reason');
+ }).then(() => {
+ assert_false(abortCalled, 'underlying abort should not be called after close completes');
+ });
+ });
+ });
+}, 'if underlying close() has started and then rejects, the abort() and close() promises should reject with the ' +
+ 'underlying close rejection reason');
+
+promise_test(t => {
+ let resolveWrite;
+ const ws = recordingWritableStream({
+ write() {
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ }
+ });
+
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ writer.write('a');
+ const closePromise = writer.close();
+ const abortPromise = writer.abort(error1);
+
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ws.events, ['write', 'a'], 'abort should not be called while write is in-flight');
+ resolveWrite();
+ return abortPromise.then(() => {
+ assert_array_equals(ws.events, ['write', 'a', 'abort', error1], 'abort should be called after write completes');
+ return promise_rejects_exactly(t, error1, closePromise, 'promise returned by close() should be rejected');
+ });
+ });
+ });
+}, 'an abort() that happens during a write() should trigger the underlying abort() even with a close() queued');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write() {
+ return new Promise(() => {});
+ }
+ });
+
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ writer.write('a');
+ writer.abort(error1);
+ writer.releaseLock();
+ const writer2 = ws.getWriter();
+ return promise_rejects_exactly(t, error1, writer2.ready,
+ 'ready of the second writer should be rejected with error1');
+ });
+}, 'if a writer is created for a stream with a pending abort, its ready should be rejected with the abort error');
+
+promise_test(() => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const closePromise = writer.close();
+ const abortPromise = writer.abort();
+ const events = [];
+ return Promise.all([
+ closePromise.then(() => { events.push('close'); }),
+ abortPromise.then(() => { events.push('abort'); })
+ ]).then(() => {
+ assert_array_equals(events, ['close', 'abort']);
+ });
+ });
+}, 'writer close() promise should resolve before abort() promise');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write(chunk, controller) {
+ controller.error(error1);
+ return new Promise(() => {});
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ writer.write('a');
+ return promise_rejects_exactly(t, error1, writer.ready, 'writer.ready should reject');
+ });
+}, 'writer.ready should reject on controller error without waiting for underlying write');
+
+promise_test(t => {
+ let rejectWrite;
+ const ws = new WritableStream({
+ write() {
+ return new Promise((resolve, reject) => {
+ rejectWrite = reject;
+ });
+ }
+ });
+
+ let writePromise;
+ let abortPromise;
+
+ const events = [];
+
+ const writer = ws.getWriter();
+
+ writer.closed.catch(() => {
+ events.push('closed');
+ });
+
+ // Wait for ws to start
+ return flushAsyncEvents().then(() => {
+ writePromise = writer.write('a');
+ writePromise.catch(() => {
+ events.push('writePromise');
+ });
+
+ abortPromise = writer.abort(error1);
+ abortPromise.then(() => {
+ events.push('abortPromise');
+ });
+
+ const writePromise2 = writer.write('a');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writePromise2, 'writePromise2 must reject with the error from abort'),
+ promise_rejects_exactly(t, error1, writer.ready, 'writer.ready must reject with the error from abort'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(events, [], 'writePromise, abortPromise and writer.closed must not be rejected yet');
+
+ rejectWrite(error2);
+
+ return Promise.all([
+ promise_rejects_exactly(t, error2, writePromise,
+ 'writePromise must reject with the error returned from the sink\'s write method'),
+ abortPromise,
+ promise_rejects_exactly(t, error1, writer.closed,
+ 'writer.closed must reject with the error from abort'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(events, ['writePromise', 'abortPromise', 'closed'],
+ 'writePromise, abortPromise and writer.closed must settle');
+
+ const writePromise3 = writer.write('a');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writePromise3,
+ 'writePromise3 must reject with the error from abort'),
+ promise_rejects_exactly(t, error1, writer.ready,
+ 'writer.ready must be still rejected with the error indicating abort')
+ ]);
+ }).then(() => {
+ writer.releaseLock();
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, writer.ready,
+ 'writer.ready must be rejected with an error indicating release'),
+ promise_rejects_js(t, TypeError, writer.closed,
+ 'writer.closed must be rejected with an error indicating release')
+ ]);
+ });
+}, 'writer.abort() while there is an in-flight write, and then finish the write with rejection');
+
+promise_test(t => {
+ let resolveWrite;
+ let controller;
+ const ws = new WritableStream({
+ write(chunk, c) {
+ controller = c;
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ }
+ });
+
+ let writePromise;
+ let abortPromise;
+
+ const events = [];
+
+ const writer = ws.getWriter();
+
+ writer.closed.catch(() => {
+ events.push('closed');
+ });
+
+ // Wait for ws to start
+ return flushAsyncEvents().then(() => {
+ writePromise = writer.write('a');
+ writePromise.then(() => {
+ events.push('writePromise');
+ });
+
+ abortPromise = writer.abort(error1);
+ abortPromise.then(() => {
+ events.push('abortPromise');
+ });
+
+ const writePromise2 = writer.write('a');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writePromise2, 'writePromise2 must reject with the error from abort'),
+ promise_rejects_exactly(t, error1, writer.ready, 'writer.ready must reject with the error from abort'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(events, [], 'writePromise, abortPromise and writer.closed must not be fulfilled/rejected yet');
+
+ // This error is too late to change anything. abort() has already changed the stream state to 'erroring'.
+ controller.error(error2);
+
+ const writePromise3 = writer.write('a');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writePromise3,
+ 'writePromise3 must reject with the error from abort'),
+ promise_rejects_exactly(t, error1, writer.ready,
+ 'writer.ready must be still rejected with the error indicating abort'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(
+ events, [],
+ 'writePromise, abortPromise and writer.closed must not be fulfilled/rejected yet even after ' +
+ 'controller.error() call');
+
+ resolveWrite();
+
+ return Promise.all([
+ writePromise,
+ abortPromise,
+ promise_rejects_exactly(t, error1, writer.closed,
+ 'writer.closed must reject with the error from abort'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(events, ['writePromise', 'abortPromise', 'closed'],
+ 'writePromise, abortPromise and writer.closed must settle');
+
+ const writePromise4 = writer.write('a');
+
+ return Promise.all([
+ writePromise,
+ promise_rejects_exactly(t, error1, writePromise4,
+ 'writePromise4 must reject with the error from abort'),
+ promise_rejects_exactly(t, error1, writer.ready,
+ 'writer.ready must be still rejected with the error indicating abort')
+ ]);
+ }).then(() => {
+ writer.releaseLock();
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, writer.ready,
+ 'writer.ready must be rejected with an error indicating release'),
+ promise_rejects_js(t, TypeError, writer.closed,
+ 'writer.closed must be rejected with an error indicating release')
+ ]);
+ });
+}, 'writer.abort(), controller.error() while there is an in-flight write, and then finish the write');
+
+promise_test(t => {
+ let resolveClose;
+ let controller;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ },
+ close() {
+ return new Promise(resolve => {
+ resolveClose = resolve;
+ });
+ }
+ });
+
+ let closePromise;
+ let abortPromise;
+
+ const events = [];
+
+ const writer = ws.getWriter();
+
+ writer.closed.then(() => {
+ events.push('closed');
+ });
+
+ // Wait for ws to start
+ return flushAsyncEvents().then(() => {
+ closePromise = writer.close();
+ closePromise.then(() => {
+ events.push('closePromise');
+ });
+
+ abortPromise = writer.abort(error1);
+ abortPromise.then(() => {
+ events.push('abortPromise');
+ });
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, writer.close(),
+ 'writer.close() must reject with an error indicating already closing'),
+ promise_rejects_exactly(t, error1, writer.ready, 'writer.ready must reject with the error from abort'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(events, [], 'closePromise, abortPromise and writer.closed must not be fulfilled/rejected yet');
+
+ controller.error(error2);
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, writer.close(),
+ 'writer.close() must reject with an error indicating already closing'),
+ promise_rejects_exactly(t, error1, writer.ready,
+ 'writer.ready must be still rejected with the error indicating abort'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(
+ events, [],
+ 'closePromise, abortPromise and writer.closed must not be fulfilled/rejected yet even after ' +
+ 'controller.error() call');
+
+ resolveClose();
+
+ return Promise.all([
+ closePromise,
+ abortPromise,
+ writer.closed,
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'],
+ 'closedPromise, abortPromise and writer.closed must fulfill');
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, writer.close(),
+ 'writer.close() must reject with an error indicating already closing'),
+ promise_rejects_exactly(t, error1, writer.ready,
+ 'writer.ready must be still rejected with the error indicating abort')
+ ]);
+ }).then(() => {
+ writer.releaseLock();
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, writer.close(),
+ 'writer.close() must reject with an error indicating release'),
+ promise_rejects_js(t, TypeError, writer.ready,
+ 'writer.ready must be rejected with an error indicating release'),
+ promise_rejects_js(t, TypeError, writer.closed,
+ 'writer.closed must be rejected with an error indicating release')
+ ]);
+ });
+}, 'writer.abort(), controller.error() while there is an in-flight close, and then finish the close');
+
+promise_test(t => {
+ let resolveWrite;
+ let controller;
+ const ws = recordingWritableStream({
+ write(chunk, c) {
+ controller = c;
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ }
+ });
+
+ let writePromise;
+ let abortPromise;
+
+ const events = [];
+
+ const writer = ws.getWriter();
+
+ writer.closed.catch(() => {
+ events.push('closed');
+ });
+
+ // Wait for ws to start
+ return flushAsyncEvents().then(() => {
+ writePromise = writer.write('a');
+ writePromise.then(() => {
+ events.push('writePromise');
+ });
+
+ controller.error(error2);
+
+ const writePromise2 = writer.write('a');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error2, writePromise2,
+ 'writePromise2 must reject with the error passed to the controller\'s error method'),
+ promise_rejects_exactly(t, error2, writer.ready,
+ 'writer.ready must reject with the error passed to the controller\'s error method'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(events, [], 'writePromise and writer.closed must not be fulfilled/rejected yet');
+
+ abortPromise = writer.abort(error1);
+ abortPromise.catch(() => {
+ events.push('abortPromise');
+ });
+
+ const writePromise3 = writer.write('a');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error2, writePromise3,
+ 'writePromise3 must reject with the error passed to the controller\'s error method'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(
+ events, [],
+ 'writePromise and writer.closed must not be fulfilled/rejected yet even after writer.abort()');
+
+ resolveWrite();
+
+ return Promise.all([
+ promise_rejects_exactly(t, error2, abortPromise,
+ 'abort() must reject with the error passed to the controller\'s error method'),
+ promise_rejects_exactly(t, error2, writer.closed,
+ 'writer.closed must reject with the error passed to the controller\'s error method'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(events, ['writePromise', 'abortPromise', 'closed'],
+ 'writePromise, abortPromise and writer.closed must fulfill/reject');
+ assert_array_equals(ws.events, ['write', 'a'], 'sink abort() should not be called');
+
+ const writePromise4 = writer.write('a');
+
+ return Promise.all([
+ writePromise,
+ promise_rejects_exactly(t, error2, writePromise4,
+ 'writePromise4 must reject with the error passed to the controller\'s error method'),
+ promise_rejects_exactly(t, error2, writer.ready,
+ 'writer.ready must be still rejected with the error passed to the controller\'s error method')
+ ]);
+ }).then(() => {
+ writer.releaseLock();
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, writer.ready,
+ 'writer.ready must be rejected with an error indicating release'),
+ promise_rejects_js(t, TypeError, writer.closed,
+ 'writer.closed must be rejected with an error indicating release')
+ ]);
+ });
+}, 'controller.error(), writer.abort() while there is an in-flight write, and then finish the write');
+
+promise_test(t => {
+ let resolveClose;
+ let controller;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ },
+ close() {
+ return new Promise(resolve => {
+ resolveClose = resolve;
+ });
+ }
+ });
+
+ let closePromise;
+ let abortPromise;
+
+ const events = [];
+
+ const writer = ws.getWriter();
+
+ writer.closed.then(() => {
+ events.push('closed');
+ });
+
+ // Wait for ws to start
+ return flushAsyncEvents().then(() => {
+ closePromise = writer.close();
+ closePromise.then(() => {
+ events.push('closePromise');
+ });
+
+ controller.error(error2);
+
+ return flushAsyncEvents();
+ }).then(() => {
+ assert_array_equals(events, [], 'closePromise must not be fulfilled/rejected yet');
+
+ abortPromise = writer.abort(error1);
+ abortPromise.then(() => {
+ events.push('abortPromise');
+ });
+
+ return Promise.all([
+ promise_rejects_exactly(t, error2, writer.ready,
+ 'writer.ready must reject with the error passed to the controller\'s error method'),
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(
+ events, [],
+ 'closePromise and writer.closed must not be fulfilled/rejected yet even after writer.abort()');
+
+ resolveClose();
+
+ return Promise.all([
+ closePromise,
+ promise_rejects_exactly(t, error2, writer.ready,
+ 'writer.ready must be still rejected with the error passed to the controller\'s error method'),
+ writer.closed,
+ flushAsyncEvents()
+ ]);
+ }).then(() => {
+ assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'],
+ 'abortPromise, closePromise and writer.closed must fulfill/reject');
+ }).then(() => {
+ writer.releaseLock();
+
+ return Promise.all([
+ promise_rejects_js(t, TypeError, writer.ready,
+ 'writer.ready must be rejected with an error indicating release'),
+ promise_rejects_js(t, TypeError, writer.closed,
+ 'writer.closed must be rejected with an error indicating release')
+ ]);
+ });
+}, 'controller.error(), writer.abort() while there is an in-flight close, and then finish the close');
+
+promise_test(t => {
+ let resolveWrite;
+ const ws = new WritableStream({
+ write() {
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const writePromise = writer.write('a');
+ const closed = writer.closed;
+ const abortPromise = writer.abort();
+ writer.releaseLock();
+ resolveWrite();
+ return Promise.all([
+ writePromise,
+ abortPromise,
+ promise_rejects_js(t, TypeError, closed, 'closed should reject')]);
+ });
+}, 'releaseLock() while aborting should reject the original closed promise');
+
+// TODO(ricea): Consider removing this test if it is no longer useful.
+promise_test(t => {
+ let resolveWrite;
+ let resolveAbort;
+ let resolveAbortStarted;
+ const abortStarted = new Promise(resolve => {
+ resolveAbortStarted = resolve;
+ });
+ const ws = new WritableStream({
+ write() {
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ },
+ abort() {
+ resolveAbortStarted();
+ return new Promise(resolve => {
+ resolveAbort = resolve;
+ });
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const writePromise = writer.write('a');
+ const closed = writer.closed;
+ const abortPromise = writer.abort();
+ resolveWrite();
+ return abortStarted.then(() => {
+ writer.releaseLock();
+ assert_equals(writer.closed, closed, 'closed promise should not have changed');
+ resolveAbort();
+ return Promise.all([
+ writePromise,
+ abortPromise,
+ promise_rejects_js(t, TypeError, closed, 'closed should reject')]);
+ });
+ });
+}, 'releaseLock() during delayed async abort() should reject the writer.closed promise');
+
+promise_test(() => {
+ let resolveStart;
+ const ws = recordingWritableStream({
+ start() {
+ return new Promise(resolve => {
+ resolveStart = resolve;
+ });
+ }
+ });
+ const abortPromise = ws.abort('done');
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ws.events, [], 'abort() should not be called during start()');
+ resolveStart();
+ return abortPromise.then(() => {
+ assert_array_equals(ws.events, ['abort', 'done'], 'abort() should be called after start() is done');
+ });
+ });
+}, 'sink abort() should not be called until sink start() is done');
+
+promise_test(() => {
+ let resolveStart;
+ let controller;
+ const ws = recordingWritableStream({
+ start(c) {
+ controller = c;
+ return new Promise(resolve => {
+ resolveStart = resolve;
+ });
+ }
+ });
+ const abortPromise = ws.abort('done');
+ controller.error(error1);
+ resolveStart();
+ return abortPromise.then(() =>
+ assert_array_equals(ws.events, ['abort', 'done'],
+ 'abort() should still be called if start() errors the controller'));
+}, 'if start attempts to error the controller after abort() has been called, then it should lose');
+
+promise_test(() => {
+ const ws = recordingWritableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ });
+ return ws.abort('done').then(() =>
+ assert_array_equals(ws.events, ['abort', 'done'], 'abort() should still be called if start() rejects'));
+}, 'stream abort() promise should still resolve if sink start() rejects');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+ const writerReady1 = writer.ready;
+ writer.abort(error1);
+ const writerReady2 = writer.ready;
+ assert_not_equals(writerReady1, writerReady2, 'abort() should replace the ready promise with a rejected one');
+ return Promise.all([writerReady1,
+ promise_rejects_exactly(t, error1, writerReady2, 'writerReady2 should reject')]);
+}, 'writer abort() during sink start() should replace the writer.ready promise synchronously');
+
+promise_test(t => {
+ const events = [];
+ const ws = recordingWritableStream();
+ const writer = ws.getWriter();
+ const writePromise1 = writer.write(1);
+ const abortPromise = writer.abort(error1);
+ const writePromise2 = writer.write(2);
+ const closePromise = writer.close();
+ writePromise1.catch(() => events.push('write1'));
+ abortPromise.then(() => events.push('abort'));
+ writePromise2.catch(() => events.push('write2'));
+ closePromise.catch(() => events.push('close'));
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writePromise1, 'first write() should reject'),
+ abortPromise,
+ promise_rejects_exactly(t, error1, writePromise2, 'second write() should reject'),
+ promise_rejects_exactly(t, error1, closePromise, 'close() should reject')
+ ])
+ .then(() => {
+ assert_array_equals(events, ['write2', 'write1', 'abort', 'close'],
+ 'promises should resolve in the standard order');
+ assert_array_equals(ws.events, ['abort', error1], 'underlying sink write() should not be called');
+ });
+}, 'promises returned from other writer methods should be rejected when writer abort() happens during sink start()');
+
+promise_test(t => {
+ let writeReject;
+ let controller;
+ const ws = new WritableStream({
+ write(chunk, c) {
+ controller = c;
+ return new Promise((resolve, reject) => {
+ writeReject = reject;
+ });
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const writePromise = writer.write('a');
+ const abortPromise = writer.abort();
+ controller.error(error1);
+ writeReject(error2);
+ return Promise.all([
+ promise_rejects_exactly(t, error2, writePromise, 'write() should reject with error2'),
+ abortPromise
+ ]);
+ });
+}, 'abort() should succeed despite rejection from write');
+
+promise_test(t => {
+ let closeReject;
+ let controller;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ },
+ close() {
+ return new Promise((resolve, reject) => {
+ closeReject = reject;
+ });
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const closePromise = writer.close();
+ const abortPromise = writer.abort();
+ controller.error(error1);
+ closeReject(error2);
+ return Promise.all([
+ promise_rejects_exactly(t, error2, closePromise, 'close() should reject with error2'),
+ promise_rejects_exactly(t, error2, abortPromise, 'abort() should reject with error2')
+ ]);
+ });
+}, 'abort() should be rejected with the rejection returned from close()');
+
+promise_test(t => {
+ let rejectWrite;
+ const ws = recordingWritableStream({
+ write() {
+ return new Promise((resolve, reject) => {
+ rejectWrite = reject;
+ });
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const writePromise = writer.write('1');
+ const abortPromise = writer.abort(error2);
+ rejectWrite(error1);
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writePromise, 'write should reject'),
+ abortPromise,
+ promise_rejects_exactly(t, error2, writer.closed, 'closed should reject with error2')
+ ]);
+ }).then(() => {
+ assert_array_equals(ws.events, ['write', '1', 'abort', error2], 'abort sink method should be called');
+ });
+}, 'a rejecting sink.write() should not prevent sink.abort() from being called');
+
+promise_test(() => {
+ const ws = recordingWritableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ });
+ return ws.abort(error2)
+ .then(() => {
+ assert_array_equals(ws.events, ['abort', error2]);
+ });
+}, 'when start errors after stream abort(), underlying sink abort() should be called anyway');
+
+promise_test(() => {
+ const ws = new WritableStream();
+ const abortPromise1 = ws.abort();
+ const abortPromise2 = ws.abort();
+ assert_equals(abortPromise1, abortPromise2, 'the promises must be the same');
+
+ return abortPromise1.then(
+ v => assert_equals(v, undefined, 'abort() should fulfill with undefined'));
+}, 'when calling abort() twice on the same stream, both should give the same promise that fulfills with undefined');
+
+promise_test(() => {
+ const ws = new WritableStream();
+ const abortPromise1 = ws.abort();
+
+ return abortPromise1.then(v1 => {
+ assert_equals(v1, undefined, 'first abort() should fulfill with undefined');
+
+ const abortPromise2 = ws.abort();
+ assert_not_equals(abortPromise2, abortPromise1, 'because we waited, the second promise should be a new promise');
+
+ return abortPromise2.then(v2 => {
+ assert_equals(v2, undefined, 'second abort() should fulfill with undefined');
+ });
+ });
+}, 'when calling abort() twice on the same stream, but sequentially so so there\'s no pending abort the second time, ' +
+ 'both should fulfill with undefined');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ start(c) {
+ c.error(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.closed, 'writer.closed should reject').then(() => {
+ return writer.abort().then(
+ v => assert_equals(v, undefined, 'abort() should fulfill with undefined'));
+ });
+}, 'calling abort() on an errored stream should fulfill with undefined');
+
+promise_test(t => {
+ let controller;
+ let resolveWrite;
+ const ws = recordingWritableStream({
+ start(c) {
+ controller = c;
+ },
+ write() {
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const writePromise = writer.write('chunk');
+ controller.error(error1);
+ const abortPromise = writer.abort(error2);
+ resolveWrite();
+ return Promise.all([
+ writePromise,
+ promise_rejects_exactly(t, error1, abortPromise, 'abort() should reject')
+ ]).then(() => {
+ assert_array_equals(ws.events, ['write', 'chunk'], 'sink abort() should not be called');
+ });
+ });
+}, 'sink abort() should not be called if stream was erroring due to controller.error() before abort() was called');
+
+promise_test(t => {
+ let resolveWrite;
+ let size = 1;
+ const ws = recordingWritableStream({
+ write() {
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ }
+ }, {
+ size() {
+ return size;
+ },
+ highWaterMark: 1
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const writePromise1 = writer.write('chunk1');
+ size = NaN;
+ const writePromise2 = writer.write('chunk2');
+ const abortPromise = writer.abort(error2);
+ resolveWrite();
+ return Promise.all([
+ writePromise1,
+ promise_rejects_js(t, RangeError, writePromise2, 'second write() should reject'),
+ promise_rejects_js(t, RangeError, abortPromise, 'abort() should reject')
+ ]).then(() => {
+ assert_array_equals(ws.events, ['write', 'chunk1'], 'sink abort() should not be called');
+ });
+ });
+}, 'sink abort() should not be called if stream was erroring due to bad strategy before abort() was called');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ return ws.abort().then(() => {
+ const writer = ws.getWriter();
+ return writer.closed.then(t.unreached_func('closed promise should not fulfill'),
+ e => assert_equals(e, undefined, 'e should be undefined'));
+ });
+}, 'abort with no arguments should set the stored error to undefined');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ return ws.abort(undefined).then(() => {
+ const writer = ws.getWriter();
+ return writer.closed.then(t.unreached_func('closed promise should not fulfill'),
+ e => assert_equals(e, undefined, 'e should be undefined'));
+ });
+}, 'abort with an undefined argument should set the stored error to undefined');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ return ws.abort('string argument').then(() => {
+ const writer = ws.getWriter();
+ return writer.closed.then(t.unreached_func('closed promise should not fulfill'),
+ e => assert_equals(e, 'string argument', 'e should be \'string argument\''));
+ });
+}, 'abort with a string argument should set the stored error to that argument');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+ return promise_rejects_js(t, TypeError, ws.abort(), 'abort should reject')
+ .then(() => writer.ready);
+}, 'abort on a locked stream should reject');
+
+test(t => {
+ let ctrl;
+ const ws = new WritableStream({start(c) { ctrl = c; }});
+ const e = Error('hello');
+
+ assert_true(ctrl.signal instanceof AbortSignal);
+ assert_false(ctrl.signal.aborted);
+ assert_equals(ctrl.signal.reason, undefined, 'signal.reason before abort');
+ ws.abort(e);
+ assert_true(ctrl.signal.aborted);
+ assert_equals(ctrl.signal.reason, e);
+}, 'WritableStreamDefaultController.signal');
+
+promise_test(async t => {
+ let ctrl;
+ let resolve;
+ const called = new Promise(r => resolve = r);
+
+ const ws = new WritableStream({
+ start(c) { ctrl = c; },
+ write() { resolve(); return new Promise(() => {}); }
+ });
+ const writer = ws.getWriter();
+
+ writer.write(99);
+ await called;
+
+ assert_false(ctrl.signal.aborted);
+ assert_equals(ctrl.signal.reason, undefined, 'signal.reason before abort');
+ writer.abort();
+ assert_true(ctrl.signal.aborted);
+ assert_true(ctrl.signal.reason instanceof DOMException, 'signal.reason is a DOMException');
+ assert_equals(ctrl.signal.reason.name, 'AbortError', 'signal.reason is an AbortError');
+}, 'the abort signal is signalled synchronously - write');
+
+promise_test(async t => {
+ let ctrl;
+ let resolve;
+ const called = new Promise(r => resolve = r);
+
+ const ws = new WritableStream({
+ start(c) { ctrl = c; },
+ close() { resolve(); return new Promise(() => {}); }
+ });
+ const writer = ws.getWriter();
+
+ writer.close(99);
+ await called;
+
+ assert_false(ctrl.signal.aborted);
+ writer.abort();
+ assert_true(ctrl.signal.aborted);
+}, 'the abort signal is signalled synchronously - close');
+
+promise_test(async t => {
+ let ctrl;
+ const ws = new WritableStream({start(c) { ctrl = c; }});
+ const writer = ws.getWriter();
+
+ const e = TypeError();
+ ctrl.error(e);
+ await promise_rejects_exactly(t, e, writer.closed);
+ assert_false(ctrl.signal.aborted);
+}, 'the abort signal is not signalled on error');
+
+promise_test(async t => {
+ let ctrl;
+ const e = TypeError();
+ const ws = new WritableStream({
+ start(c) { ctrl = c; },
+ async write() { throw e; }
+ });
+ const writer = ws.getWriter();
+
+ await promise_rejects_exactly(t, e, writer.write('hello'), 'write result');
+ await promise_rejects_exactly(t, e, writer.closed, 'closed');
+ assert_false(ctrl.signal.aborted);
+}, 'the abort signal is not signalled on write failure');
+
+promise_test(async t => {
+ let ctrl;
+ const e = TypeError();
+ const ws = new WritableStream({
+ start(c) { ctrl = c; },
+ async close() { throw e; }
+ });
+ const writer = ws.getWriter();
+
+ await promise_rejects_exactly(t, e, writer.close(), 'close result');
+ await promise_rejects_exactly(t, e, writer.closed, 'closed');
+ assert_false(ctrl.signal.aborted);
+}, 'the abort signal is not signalled on close failure');
+
+promise_test(async t => {
+ let ctrl;
+ const e1 = SyntaxError();
+ const e2 = TypeError();
+ const ws = new WritableStream({
+ start(c) { ctrl = c; },
+ });
+
+ const writer = ws.getWriter();
+ ctrl.signal.addEventListener('abort', () => writer.abort(e2));
+ writer.abort(e1);
+ assert_true(ctrl.signal.aborted);
+
+ await promise_rejects_exactly(t, e2, writer.closed, 'closed');
+}, 'recursive abort() call');
diff --git a/testing/web-platform/tests/streams/writable-streams/bad-strategies.any.js b/testing/web-platform/tests/streams/writable-streams/bad-strategies.any.js
new file mode 100644
index 0000000000..a1ef079116
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/bad-strategies.any.js
@@ -0,0 +1,95 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+const error1 = new Error('a unique string');
+error1.name = 'error1';
+
+test(() => {
+ assert_throws_exactly(error1, () => {
+ new WritableStream({}, {
+ get size() {
+ throw error1;
+ },
+ highWaterMark: 5
+ });
+ }, 'construction should re-throw the error');
+}, 'Writable stream: throwing strategy.size getter');
+
+test(() => {
+ assert_throws_js(TypeError, () => {
+ new WritableStream({}, { size: 'a string' });
+ });
+}, 'reject any non-function value for strategy.size');
+
+test(() => {
+ assert_throws_exactly(error1, () => {
+ new WritableStream({}, {
+ size() {
+ return 1;
+ },
+ get highWaterMark() {
+ throw error1;
+ }
+ });
+ }, 'construction should re-throw the error');
+}, 'Writable stream: throwing strategy.highWaterMark getter');
+
+test(() => {
+
+ for (const highWaterMark of [-1, -Infinity, NaN, 'foo', {}]) {
+ assert_throws_js(RangeError, () => {
+ new WritableStream({}, {
+ size() {
+ return 1;
+ },
+ highWaterMark
+ });
+ }, `construction should throw a RangeError for ${highWaterMark}`);
+ }
+}, 'Writable stream: invalid strategy.highWaterMark');
+
+promise_test(t => {
+ const ws = new WritableStream({}, {
+ size() {
+ throw error1;
+ },
+ highWaterMark: 5
+ });
+
+ const writer = ws.getWriter();
+
+ const p1 = promise_rejects_exactly(t, error1, writer.write('a'), 'write should reject with the thrown error');
+
+ const p2 = promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with the thrown error');
+
+ return Promise.all([p1, p2]);
+}, 'Writable stream: throwing strategy.size method');
+
+promise_test(() => {
+ const sizes = [NaN, -Infinity, Infinity, -1];
+ return Promise.all(sizes.map(size => {
+ const ws = new WritableStream({}, {
+ size() {
+ return size;
+ },
+ highWaterMark: 5
+ });
+
+ const writer = ws.getWriter();
+
+ return writer.write('a').then(() => assert_unreached('write must reject'), writeE => {
+ assert_equals(writeE.name, 'RangeError', `write must reject with a RangeError for ${size}`);
+
+ return writer.closed.then(() => assert_unreached('write must reject'), closedE => {
+ assert_equals(closedE, writeE, `closed should reject with the same error as write`);
+ });
+ });
+ }));
+}, 'Writable stream: invalid strategy.size return value');
+
+test(() => {
+ assert_throws_js(TypeError, () => new WritableStream(undefined, {
+ size: 'not a function',
+ highWaterMark: NaN
+ }), 'WritableStream constructor should throw a TypeError');
+}, 'Writable stream: invalid size beats invalid highWaterMark');
diff --git a/testing/web-platform/tests/streams/writable-streams/bad-underlying-sinks.any.js b/testing/web-platform/tests/streams/writable-streams/bad-underlying-sinks.any.js
new file mode 100644
index 0000000000..3c434ffe60
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/bad-underlying-sinks.any.js
@@ -0,0 +1,204 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+test(() => {
+ assert_throws_exactly(error1, () => {
+ new WritableStream({
+ get start() {
+ throw error1;
+ }
+ });
+ }, 'constructor should throw same error as throwing start getter');
+
+ assert_throws_exactly(error1, () => {
+ new WritableStream({
+ start() {
+ throw error1;
+ }
+ });
+ }, 'constructor should throw same error as throwing start method');
+
+ assert_throws_js(TypeError, () => {
+ new WritableStream({
+ start: 'not a function or undefined'
+ });
+ }, 'constructor should throw TypeError when passed a non-function start property');
+
+ assert_throws_js(TypeError, () => {
+ new WritableStream({
+ start: { apply() {} }
+ });
+ }, 'constructor should throw TypeError when passed a non-function start property with an .apply method');
+}, 'start: errors in start cause WritableStream constructor to throw');
+
+promise_test(t => {
+
+ const ws = recordingWritableStream({
+ close() {
+ throw error1;
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.close(), 'close() promise must reject with the thrown error')
+ .then(() => promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the thrown error'))
+ .then(() => promise_rejects_exactly(t, error1, writer.closed, 'closed promise must reject with the thrown error'))
+ .then(() => {
+ assert_array_equals(ws.events, ['close']);
+ });
+
+}, 'close: throwing method should cause writer close() and ready to reject');
+
+promise_test(t => {
+
+ const ws = recordingWritableStream({
+ close() {
+ return Promise.reject(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.close(), 'close() promise must reject with the same error')
+ .then(() => promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the same error'))
+ .then(() => assert_array_equals(ws.events, ['close']));
+
+}, 'close: returning a rejected promise should cause writer close() and ready to reject');
+
+test(() => {
+ assert_throws_exactly(error1, () => new WritableStream({
+ get close() {
+ throw error1;
+ }
+ }), 'constructor should throw');
+}, 'close: throwing getter should cause constructor to throw');
+
+test(() => {
+ assert_throws_exactly(error1, () => new WritableStream({
+ get write() {
+ throw error1;
+ }
+ }), 'constructor should throw');
+}, 'write: throwing getter should cause write() and closed to reject');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write() {
+ throw error1;
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.write('a'), 'write should reject with the thrown error')
+ .then(() => promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with the thrown error'));
+}, 'write: throwing method should cause write() and closed to reject');
+
+promise_test(t => {
+
+ let rejectSinkWritePromise;
+ const ws = recordingWritableStream({
+ write() {
+ return new Promise((r, reject) => {
+ rejectSinkWritePromise = reject;
+ });
+ }
+ });
+
+ return flushAsyncEvents().then(() => {
+ const writer = ws.getWriter();
+ const writePromise = writer.write('a');
+ rejectSinkWritePromise(error1);
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writePromise, 'writer write must reject with the same error'),
+ promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the same error')
+ ]);
+ })
+ .then(() => {
+ assert_array_equals(ws.events, ['write', 'a']);
+ });
+
+}, 'write: returning a promise that becomes rejected after the writer write() should cause writer write() and ready ' +
+ 'to reject');
+
+promise_test(t => {
+
+ const ws = recordingWritableStream({
+ write() {
+ if (ws.events.length === 2) {
+ return delay(0);
+ }
+
+ return Promise.reject(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ // Do not wait for this; we want to test the ready promise when the stream is "full" (desiredSize = 0), but if we wait
+ // then the stream will transition back to "empty" (desiredSize = 1)
+ writer.write('a');
+ const readyPromise = writer.ready;
+
+ return promise_rejects_exactly(t, error1, writer.write('b'), 'second write must reject with the same error').then(() => {
+ assert_equals(writer.ready, readyPromise,
+ 'the ready promise must not change, since the queue was full after the first write, so the pending one simply ' +
+ 'transitioned');
+ return promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the same error');
+ })
+ .then(() => assert_array_equals(ws.events, ['write', 'a', 'write', 'b']));
+
+}, 'write: returning a rejected promise (second write) should cause writer write() and ready to reject');
+
+test(() => {
+ assert_throws_js(TypeError, () => new WritableStream({
+ start: 'test'
+ }), 'constructor should throw');
+}, 'start: non-function start method');
+
+test(() => {
+ assert_throws_js(TypeError, () => new WritableStream({
+ write: 'test'
+ }), 'constructor should throw');
+}, 'write: non-function write method');
+
+test(() => {
+ assert_throws_js(TypeError, () => new WritableStream({
+ close: 'test'
+ }), 'constructor should throw');
+}, 'close: non-function close method');
+
+test(() => {
+ assert_throws_js(TypeError, () => new WritableStream({
+ abort: { apply() {} }
+ }), 'constructor should throw');
+}, 'abort: non-function abort method with .apply');
+
+test(() => {
+ assert_throws_exactly(error1, () => new WritableStream({
+ get abort() {
+ throw error1;
+ }
+ }), 'constructor should throw');
+}, 'abort: throwing getter should cause abort() and closed to reject');
+
+promise_test(t => {
+ const abortReason = new Error('different string');
+ const ws = new WritableStream({
+ abort() {
+ throw error1;
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.abort(abortReason), 'abort should reject with the thrown error')
+ .then(() => promise_rejects_exactly(t, abortReason, writer.closed, 'closed should reject with abortReason'));
+}, 'abort: throwing method should cause abort() and closed to reject');
diff --git a/testing/web-platform/tests/streams/writable-streams/byte-length-queuing-strategy.any.js b/testing/web-platform/tests/streams/writable-streams/byte-length-queuing-strategy.any.js
new file mode 100644
index 0000000000..eed86ee700
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/byte-length-queuing-strategy.any.js
@@ -0,0 +1,28 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+promise_test(t => {
+ let isDone = false;
+ const ws = new WritableStream(
+ {
+ write() {
+ return new Promise(resolve => {
+ t.step_timeout(() => {
+ isDone = true;
+ resolve();
+ }, 200);
+ });
+ },
+
+ close() {
+ assert_true(isDone, 'close is only called once the promise has been resolved');
+ }
+ },
+ new ByteLengthQueuingStrategy({ highWaterMark: 1024 * 16 })
+ );
+
+ const writer = ws.getWriter();
+ writer.write({ byteLength: 1024 });
+
+ return writer.close();
+}, 'Closing a writable stream with in-flight writes below the high water mark delays the close call properly');
diff --git a/testing/web-platform/tests/streams/writable-streams/close.any.js b/testing/web-platform/tests/streams/writable-streams/close.any.js
new file mode 100644
index 0000000000..45261e7ca7
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/close.any.js
@@ -0,0 +1,470 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+const error2 = new Error('error2');
+error2.name = 'error2';
+
+promise_test(() => {
+ const ws = new WritableStream({
+ close() {
+ return 'Hello';
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ const closePromise = writer.close();
+ return closePromise.then(value => assert_equals(value, undefined, 'fulfillment value must be undefined'));
+}, 'fulfillment value of writer.close() call must be undefined even if the underlying sink returns a non-undefined ' +
+ 'value');
+
+promise_test(() => {
+ let controller;
+ let resolveClose;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ },
+ close() {
+ return new Promise(resolve => {
+ resolveClose = resolve;
+ });
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ const closePromise = writer.close();
+ return flushAsyncEvents().then(() => {
+ controller.error(error1);
+ return flushAsyncEvents();
+ }).then(() => {
+ resolveClose();
+ return Promise.all([
+ closePromise,
+ writer.closed,
+ flushAsyncEvents().then(() => writer.closed)]);
+ });
+}, 'when sink calls error asynchronously while sink close is in-flight, the stream should not become errored');
+
+promise_test(() => {
+ let controller;
+ const passedError = new Error('error me');
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ },
+ close() {
+ controller.error(passedError);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return writer.close().then(() => writer.closed);
+}, 'when sink calls error synchronously while closing, the stream should not become errored');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ close() {
+ throw error1;
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return Promise.all([
+ writer.write('y'),
+ promise_rejects_exactly(t, error1, writer.close(), 'close() must reject with the error'),
+ promise_rejects_exactly(t, error1, writer.closed, 'closed must reject with the error')
+ ]);
+}, 'when the sink throws during close, and the close is requested while a write is still in-flight, the stream should ' +
+ 'become errored during the close');
+
+promise_test(() => {
+ const ws = new WritableStream({
+ write(chunk, controller) {
+ controller.error(error1);
+ return new Promise(() => {});
+ }
+ });
+
+ const writer = ws.getWriter();
+ writer.write('a');
+
+ return delay(0).then(() => {
+ writer.releaseLock();
+ });
+}, 'releaseLock on a stream with a pending write in which the stream has been errored');
+
+promise_test(() => {
+ let controller;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ },
+ close() {
+ controller.error(error1);
+ return new Promise(() => {});
+ }
+ });
+
+ const writer = ws.getWriter();
+ writer.close();
+
+ return delay(0).then(() => {
+ writer.releaseLock();
+ });
+}, 'releaseLock on a stream with a pending close in which controller.error() was called');
+
+promise_test(() => {
+ const ws = recordingWritableStream();
+
+ const writer = ws.getWriter();
+
+ return writer.ready.then(() => {
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+
+ writer.close();
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be still 1');
+
+ return writer.ready.then(v => {
+ assert_equals(v, undefined, 'ready promise should be fulfilled with undefined');
+ assert_array_equals(ws.events, ['close'], 'write and abort should not be called');
+ });
+ });
+}, 'when close is called on a WritableStream in writable state, ready should return a fulfilled promise');
+
+promise_test(() => {
+ const ws = recordingWritableStream({
+ write() {
+ return new Promise(() => {});
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return writer.ready.then(() => {
+ writer.write('a');
+
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0');
+
+ let calledClose = false;
+ return Promise.all([
+ writer.ready.then(v => {
+ assert_equals(v, undefined, 'ready promise should be fulfilled with undefined');
+ assert_true(calledClose, 'ready should not be fulfilled before writer.close() is called');
+ assert_array_equals(ws.events, ['write', 'a'], 'sink abort() should not be called');
+ }),
+ flushAsyncEvents().then(() => {
+ writer.close();
+ calledClose = true;
+ })
+ ]);
+ });
+}, 'when close is called on a WritableStream in waiting state, ready promise should be fulfilled');
+
+promise_test(() => {
+ let asyncCloseFinished = false;
+ const ws = recordingWritableStream({
+ close() {
+ return flushAsyncEvents().then(() => {
+ asyncCloseFinished = true;
+ });
+ }
+ });
+
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ writer.write('a');
+
+ writer.close();
+
+ return writer.ready.then(v => {
+ assert_false(asyncCloseFinished, 'ready promise should be fulfilled before async close completes');
+ assert_equals(v, undefined, 'ready promise should be fulfilled with undefined');
+ assert_array_equals(ws.events, ['write', 'a', 'close'], 'sink abort() should not be called');
+ });
+ });
+}, 'when close is called on a WritableStream in waiting state, ready should be fulfilled immediately even if close ' +
+ 'takes a long time');
+
+promise_test(t => {
+ const rejection = { name: 'letter' };
+ const ws = new WritableStream({
+ close() {
+ return {
+ then(onFulfilled, onRejected) { onRejected(rejection); }
+ };
+ }
+ });
+ return promise_rejects_exactly(t, rejection, ws.getWriter().close(), 'close() should return a rejection');
+}, 'returning a thenable from close() should work');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const closePromise = writer.close();
+ const closedPromise = writer.closed;
+ writer.releaseLock();
+ return Promise.all([
+ closePromise,
+ promise_rejects_js(t, TypeError, closedPromise, '.closed promise should be rejected')
+ ]);
+ });
+}, 'releaseLock() should not change the result of sync close()');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ close() {
+ return flushAsyncEvents();
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const closePromise = writer.close();
+ const closedPromise = writer.closed;
+ writer.releaseLock();
+ return Promise.all([
+ closePromise,
+ promise_rejects_js(t, TypeError, closedPromise, '.closed promise should be rejected')
+ ]);
+ });
+}, 'releaseLock() should not change the result of async close()');
+
+promise_test(() => {
+ let resolveClose;
+ const ws = new WritableStream({
+ close() {
+ const promise = new Promise(resolve => {
+ resolveClose = resolve;
+ });
+ return promise;
+ }
+ });
+ const writer = ws.getWriter();
+ const closePromise = writer.close();
+ writer.releaseLock();
+ return delay(0).then(() => {
+ resolveClose();
+ return closePromise.then(() => {
+ assert_equals(ws.getWriter().desiredSize, 0, 'desiredSize should be 0');
+ });
+ });
+}, 'close() should set state to CLOSED even if writer has detached');
+
+promise_test(() => {
+ let resolveClose;
+ const ws = new WritableStream({
+ close() {
+ const promise = new Promise(resolve => {
+ resolveClose = resolve;
+ });
+ return promise;
+ }
+ });
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+ return delay(0).then(() => {
+ const abortingWriter = ws.getWriter();
+ const abortPromise = abortingWriter.abort();
+ abortingWriter.releaseLock();
+ resolveClose();
+ return abortPromise;
+ });
+}, 'the promise returned by async abort during close should resolve');
+
+// Though the order in which the promises are fulfilled or rejected is arbitrary, we're checking it for
+// interoperability. We can change the order as long as we file bugs on all implementers to update to the latest tests
+// to keep them interoperable.
+
+promise_test(() => {
+ const ws = new WritableStream({});
+
+ const writer = ws.getWriter();
+
+ const closePromise = writer.close();
+
+ const events = [];
+ return Promise.all([
+ closePromise.then(() => {
+ events.push('closePromise');
+ }),
+ writer.closed.then(() => {
+ events.push('closed');
+ })
+ ]).then(() => {
+ assert_array_equals(events, ['closePromise', 'closed'],
+ 'promises must fulfill/reject in the expected order');
+ });
+}, 'promises must fulfill/reject in the expected order on closure');
+
+promise_test(() => {
+ const ws = new WritableStream({});
+
+ // Wait until the WritableStream starts so that the close() call gets processed. Otherwise, abort() will be
+ // processed without waiting for completion of the close().
+ return delay(0).then(() => {
+ const writer = ws.getWriter();
+
+ const closePromise = writer.close();
+ const abortPromise = writer.abort(error1);
+
+ const events = [];
+ return Promise.all([
+ closePromise.then(() => {
+ events.push('closePromise');
+ }),
+ abortPromise.then(() => {
+ events.push('abortPromise');
+ }),
+ writer.closed.then(() => {
+ events.push('closed');
+ })
+ ]).then(() => {
+ assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'],
+ 'promises must fulfill/reject in the expected order');
+ });
+ });
+}, 'promises must fulfill/reject in the expected order on aborted closure');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ close() {
+ return Promise.reject(error1);
+ }
+ });
+
+ // Wait until the WritableStream starts so that the close() call gets processed.
+ return delay(0).then(() => {
+ const writer = ws.getWriter();
+
+ const closePromise = writer.close();
+ const abortPromise = writer.abort(error2);
+
+ const events = [];
+ closePromise.catch(() => events.push('closePromise'));
+ abortPromise.catch(() => events.push('abortPromise'));
+ writer.closed.catch(() => events.push('closed'));
+ return Promise.all([
+ promise_rejects_exactly(t, error1, closePromise,
+ 'closePromise must reject with the error returned from the sink\'s close method'),
+ promise_rejects_exactly(t, error1, abortPromise,
+ 'abortPromise must reject with the error returned from the sink\'s close method'),
+ promise_rejects_exactly(t, error2, writer.closed,
+ 'writer.closed must reject with error2')
+ ]).then(() => {
+ assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'],
+ 'promises must fulfill/reject in the expected order');
+ });
+ });
+}, 'promises must fulfill/reject in the expected order on aborted and errored closure');
+
+promise_test(t => {
+ let resolveWrite;
+ let controller;
+ const ws = new WritableStream({
+ write(chunk, c) {
+ controller = c;
+ return new Promise(resolve => {
+ resolveWrite = resolve;
+ });
+ }
+ });
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ const writePromise = writer.write('c');
+ controller.error(error1);
+ const closePromise = writer.close();
+ let closeRejected = false;
+ closePromise.catch(() => {
+ closeRejected = true;
+ });
+ return flushAsyncEvents().then(() => {
+ assert_false(closeRejected);
+ resolveWrite();
+ return Promise.all([
+ writePromise,
+ promise_rejects_exactly(t, error1, closePromise, 'close() should reject')
+ ]).then(() => {
+ assert_true(closeRejected);
+ });
+ });
+ });
+}, 'close() should not reject until no sink methods are in flight');
+
+promise_test(() => {
+ const ws = new WritableStream();
+ const writer1 = ws.getWriter();
+ return writer1.close().then(() => {
+ writer1.releaseLock();
+ const writer2 = ws.getWriter();
+ const ready = writer2.ready;
+ assert_equals(ready.constructor, Promise);
+ return ready;
+ });
+}, 'ready promise should be initialised as fulfilled for a writer on a closed stream');
+
+promise_test(() => {
+ const ws = new WritableStream();
+ ws.close();
+ const writer = ws.getWriter();
+ return writer.closed;
+}, 'close() on a writable stream should work');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ ws.getWriter();
+ return promise_rejects_js(t, TypeError, ws.close(), 'close should reject');
+}, 'close() on a locked stream should reject');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ start(controller) {
+ controller.error(error1);
+ }
+ });
+ return promise_rejects_exactly(t, error1, ws.close(), 'close should reject with error1');
+}, 'close() on an erroring stream should reject');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ start(controller) {
+ controller.error(error1);
+ }
+ });
+ const writer = ws.getWriter();
+ return promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with the error').then(() => {
+ writer.releaseLock();
+ return promise_rejects_js(t, TypeError, ws.close(), 'close should reject');
+ });
+}, 'close() on an errored stream should reject');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+ return writer.close().then(() => {
+ return promise_rejects_js(t, TypeError, ws.close(), 'close should reject');
+ });
+}, 'close() on an closed stream should reject');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ close() {
+ return new Promise(() => {});
+ }
+ });
+
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+
+ return promise_rejects_js(t, TypeError, ws.close(), 'close should reject');
+}, 'close() on a stream with a pending close should reject');
diff --git a/testing/web-platform/tests/streams/writable-streams/constructor.any.js b/testing/web-platform/tests/streams/writable-streams/constructor.any.js
new file mode 100644
index 0000000000..0abc7ef545
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/constructor.any.js
@@ -0,0 +1,155 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+const error2 = new Error('error2');
+error2.name = 'error2';
+
+promise_test(() => {
+ let controller;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+
+ // Now error the stream after its construction.
+ controller.error(error1);
+
+ const writer = ws.getWriter();
+
+ assert_equals(writer.desiredSize, null, 'desiredSize should be null');
+ return writer.closed.catch(r => {
+ assert_equals(r, error1, 'ws should be errored by the passed error');
+ });
+}, 'controller argument should be passed to start method');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write(chunk, controller) {
+ controller.error(error1);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return Promise.all([
+ writer.write('a'),
+ promise_rejects_exactly(t, error1, writer.closed, 'controller.error() in write() should error the stream')
+ ]);
+}, 'controller argument should be passed to write method');
+
+// Older versions of the standard had the controller argument passed to close(). It wasn't useful, and so has been
+// removed. This test remains to identify implementations that haven't been updated.
+promise_test(t => {
+ const ws = new WritableStream({
+ close(...args) {
+ t.step(() => {
+ assert_array_equals(args, [], 'no arguments should be passed to close');
+ });
+ }
+ });
+
+ return ws.getWriter().close();
+}, 'controller argument should not be passed to close method');
+
+promise_test(() => {
+ const ws = new WritableStream({}, {
+ highWaterMark: 1000,
+ size() { return 1; }
+ });
+
+ const writer = ws.getWriter();
+
+ assert_equals(writer.desiredSize, 1000, 'desiredSize should be 1000');
+ return writer.ready.then(v => {
+ assert_equals(v, undefined, 'ready promise should fulfill with undefined');
+ });
+}, 'highWaterMark should be reflected to desiredSize');
+
+promise_test(() => {
+ const ws = new WritableStream({}, {
+ highWaterMark: Infinity,
+ size() { return 0; }
+ });
+
+ const writer = ws.getWriter();
+
+ assert_equals(writer.desiredSize, Infinity, 'desiredSize should be Infinity');
+
+ return writer.ready;
+}, 'WritableStream should be writable and ready should fulfill immediately if the strategy does not apply ' +
+ 'backpressure');
+
+test(() => {
+ new WritableStream();
+}, 'WritableStream should be constructible with no arguments');
+
+test(() => {
+ const underlyingSink = { get start() { throw error1; } };
+ const queuingStrategy = { highWaterMark: 0, get size() { throw error2; } };
+
+ // underlyingSink is converted in prose in the method body, whereas queuingStrategy is done at the IDL layer.
+ // So the queuingStrategy exception should be encountered first.
+ assert_throws_exactly(error2, () => new WritableStream(underlyingSink, queuingStrategy));
+}, 'underlyingSink argument should be converted after queuingStrategy argument');
+
+test(() => {
+ const ws = new WritableStream({});
+
+ const writer = ws.getWriter();
+
+ assert_equals(typeof writer.write, 'function', 'writer should have a write method');
+ assert_equals(typeof writer.abort, 'function', 'writer should have an abort method');
+ assert_equals(typeof writer.close, 'function', 'writer should have a close method');
+
+ assert_equals(writer.desiredSize, 1, 'desiredSize should start at 1');
+
+ assert_not_equals(typeof writer.ready, 'undefined', 'writer should have a ready property');
+ assert_equals(typeof writer.ready.then, 'function', 'ready property should be thenable');
+ assert_not_equals(typeof writer.closed, 'undefined', 'writer should have a closed property');
+ assert_equals(typeof writer.closed.then, 'function', 'closed property should be thenable');
+}, 'WritableStream instances should have standard methods and properties');
+
+test(() => {
+ let WritableStreamDefaultController;
+ new WritableStream({
+ start(c) {
+ WritableStreamDefaultController = c.constructor;
+ }
+ });
+
+ assert_throws_js(TypeError, () => new WritableStreamDefaultController({}),
+ 'constructor should throw a TypeError exception');
+}, 'WritableStreamDefaultController constructor should throw');
+
+test(() => {
+ let WritableStreamDefaultController;
+ const stream = new WritableStream({
+ start(c) {
+ WritableStreamDefaultController = c.constructor;
+ }
+ });
+
+ assert_throws_js(TypeError, () => new WritableStreamDefaultController(stream),
+ 'constructor should throw a TypeError exception');
+}, 'WritableStreamDefaultController constructor should throw when passed an initialised WritableStream');
+
+test(() => {
+ const stream = new WritableStream();
+ const writer = stream.getWriter();
+ const WritableStreamDefaultWriter = writer.constructor;
+ writer.releaseLock();
+ assert_throws_js(TypeError, () => new WritableStreamDefaultWriter({}),
+ 'constructor should throw a TypeError exception');
+}, 'WritableStreamDefaultWriter should throw unless passed a WritableStream');
+
+test(() => {
+ const stream = new WritableStream();
+ const writer = stream.getWriter();
+ const WritableStreamDefaultWriter = writer.constructor;
+ assert_throws_js(TypeError, () => new WritableStreamDefaultWriter(stream),
+ 'constructor should throw a TypeError exception');
+}, 'WritableStreamDefaultWriter constructor should throw when stream argument is locked');
diff --git a/testing/web-platform/tests/streams/writable-streams/count-queuing-strategy.any.js b/testing/web-platform/tests/streams/writable-streams/count-queuing-strategy.any.js
new file mode 100644
index 0000000000..8211757530
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/count-queuing-strategy.any.js
@@ -0,0 +1,124 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+test(() => {
+ new WritableStream({}, new CountQueuingStrategy({ highWaterMark: 4 }));
+}, 'Can construct a writable stream with a valid CountQueuingStrategy');
+
+promise_test(() => {
+ const dones = Object.create(null);
+
+ const ws = new WritableStream(
+ {
+ write(chunk) {
+ return new Promise(resolve => {
+ dones[chunk] = resolve;
+ });
+ }
+ },
+ new CountQueuingStrategy({ highWaterMark: 0 })
+ );
+
+ const writer = ws.getWriter();
+ let writePromiseB;
+ let writePromiseC;
+
+ return Promise.resolve().then(() => {
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be initially 0');
+
+ const writePromiseA = writer.write('a');
+ assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 1st write()');
+
+ writePromiseB = writer.write('b');
+ assert_equals(writer.desiredSize, -2, 'desiredSize should be -2 after 2nd write()');
+
+ dones.a();
+ return writePromiseA;
+ }).then(() => {
+ assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after completing 1st write()');
+
+ dones.b();
+ return writePromiseB;
+ }).then(() => {
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 2nd write()');
+
+ writePromiseC = writer.write('c');
+ assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 3rd write()');
+
+ dones.c();
+ return writePromiseC;
+ }).then(() => {
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 3rd write()');
+ });
+}, 'Correctly governs the value of a WritableStream\'s state property (HWM = 0)');
+
+promise_test(() => {
+ const dones = Object.create(null);
+
+ const ws = new WritableStream(
+ {
+ write(chunk) {
+ return new Promise(resolve => {
+ dones[chunk] = resolve;
+ });
+ }
+ },
+ new CountQueuingStrategy({ highWaterMark: 4 })
+ );
+
+ const writer = ws.getWriter();
+ let writePromiseB;
+ let writePromiseC;
+ let writePromiseD;
+
+ return Promise.resolve().then(() => {
+ assert_equals(writer.desiredSize, 4, 'desiredSize should be initially 4');
+
+ const writePromiseA = writer.write('a');
+ assert_equals(writer.desiredSize, 3, 'desiredSize should be 3 after 1st write()');
+
+ writePromiseB = writer.write('b');
+ assert_equals(writer.desiredSize, 2, 'desiredSize should be 2 after 2nd write()');
+
+ writePromiseC = writer.write('c');
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 after 3rd write()');
+
+ writePromiseD = writer.write('d');
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after 4th write()');
+
+ writer.write('e');
+ assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 5th write()');
+
+ writer.write('f');
+ assert_equals(writer.desiredSize, -2, 'desiredSize should be -2 after 6th write()');
+
+ writer.write('g');
+ assert_equals(writer.desiredSize, -3, 'desiredSize should be -3 after 7th write()');
+
+ dones.a();
+ return writePromiseA;
+ }).then(() => {
+ assert_equals(writer.desiredSize, -2, 'desiredSize should be -2 after completing 1st write()');
+
+ dones.b();
+ return writePromiseB;
+ }).then(() => {
+ assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after completing 2nd write()');
+
+ dones.c();
+ return writePromiseC;
+ }).then(() => {
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 3rd write()');
+
+ writer.write('h');
+ assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 8th write()');
+
+ dones.d();
+ return writePromiseD;
+ }).then(() => {
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 4th write()');
+
+ writer.write('i');
+ assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 9th write()');
+ });
+}, 'Correctly governs the value of a WritableStream\'s state property (HWM = 4)');
diff --git a/testing/web-platform/tests/streams/writable-streams/error.any.js b/testing/web-platform/tests/streams/writable-streams/error.any.js
new file mode 100644
index 0000000000..d08c8a5486
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/error.any.js
@@ -0,0 +1,64 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+const error2 = new Error('error2');
+error2.name = 'error2';
+
+promise_test(t => {
+ const ws = new WritableStream({
+ start(controller) {
+ controller.error(error1);
+ }
+ });
+ return promise_rejects_exactly(t, error1, ws.getWriter().closed, 'stream should be errored');
+}, 'controller.error() should error the stream');
+
+test(() => {
+ let controller;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ ws.abort();
+ controller.error(error1);
+}, 'controller.error() on erroring stream should not throw');
+
+promise_test(t => {
+ let controller;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ controller.error(error1);
+ controller.error(error2);
+ return promise_rejects_exactly(t, error1, ws.getWriter().closed, 'first controller.error() should win');
+}, 'surplus calls to controller.error() should be a no-op');
+
+promise_test(() => {
+ let controller;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ return ws.abort().then(() => {
+ controller.error(error1);
+ });
+}, 'controller.error() on errored stream should not throw');
+
+promise_test(() => {
+ let controller;
+ const ws = new WritableStream({
+ start(c) {
+ controller = c;
+ }
+ });
+ return ws.getWriter().close().then(() => {
+ controller.error(error1);
+ });
+}, 'controller.error() on closed stream should not throw');
diff --git a/testing/web-platform/tests/streams/writable-streams/floating-point-total-queue-size.any.js b/testing/web-platform/tests/streams/writable-streams/floating-point-total-queue-size.any.js
new file mode 100644
index 0000000000..20a14fc19a
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/floating-point-total-queue-size.any.js
@@ -0,0 +1,87 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+// Due to the limitations of floating-point precision, the calculation of desiredSize sometimes gives different answers
+// than adding up the items in the queue would. It is important that implementations give the same result in these edge
+// cases so that developers do not come to depend on non-standard behaviour. See
+// https://github.com/whatwg/streams/issues/582 and linked issues for further discussion.
+
+promise_test(() => {
+ const writer = setupTestStream();
+
+ const writePromises = [
+ writer.write(2),
+ writer.write(Number.MAX_SAFE_INTEGER)
+ ];
+
+ assert_equals(writer.desiredSize, 0 - 2 - Number.MAX_SAFE_INTEGER,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing two chunks)');
+
+ return Promise.all(writePromises).then(() => {
+ assert_equals(writer.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative');
+ });
+}, 'Floating point arithmetic must manifest near NUMBER.MAX_SAFE_INTEGER (total ends up positive)');
+
+promise_test(() => {
+ const writer = setupTestStream();
+
+ const writePromises = [
+ writer.write(1e-16),
+ writer.write(1)
+ ];
+
+ assert_equals(writer.desiredSize, 0 - 1e-16 - 1,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing two chunks)');
+
+ return Promise.all(writePromises).then(() => {
+ assert_equals(writer.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative');
+ });
+}, 'Floating point arithmetic must manifest near 0 (total ends up positive, but clamped)');
+
+promise_test(() => {
+ const writer = setupTestStream();
+
+ const writePromises = [
+ writer.write(1e-16),
+ writer.write(1),
+ writer.write(2e-16)
+ ];
+
+ assert_equals(writer.desiredSize, 0 - 1e-16 - 1 - 2e-16,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing three chunks)');
+
+ return Promise.all(writePromises).then(() => {
+ assert_equals(writer.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16 + 1 + 2e-16,
+ 'desiredSize must be calculated using floating-point arithmetic (after the three chunks have finished writing)');
+ });
+}, 'Floating point arithmetic must manifest near 0 (total ends up positive, and not clamped)');
+
+promise_test(() => {
+ const writer = setupTestStream();
+
+ const writePromises = [
+ writer.write(2e-16),
+ writer.write(1)
+ ];
+
+ assert_equals(writer.desiredSize, 0 - 2e-16 - 1,
+ 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing two chunks)');
+
+ return Promise.all(writePromises).then(() => {
+ assert_equals(writer.desiredSize, 0 - 2e-16 - 1 + 2e-16 + 1,
+ 'desiredSize must be calculated using floating-point arithmetic (after the two chunks have finished writing)');
+ });
+}, 'Floating point arithmetic must manifest near 0 (total ends up zero)');
+
+function setupTestStream() {
+ const strategy = {
+ size(x) {
+ return x;
+ },
+ highWaterMark: 0
+ };
+
+ const ws = new WritableStream({}, strategy);
+
+ return ws.getWriter();
+}
diff --git a/testing/web-platform/tests/streams/writable-streams/general.any.js b/testing/web-platform/tests/streams/writable-streams/general.any.js
new file mode 100644
index 0000000000..48f8eeb89e
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/general.any.js
@@ -0,0 +1,277 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+test(() => {
+ const ws = new WritableStream({});
+ const writer = ws.getWriter();
+ writer.releaseLock();
+
+ assert_throws_js(TypeError, () => writer.desiredSize, 'desiredSize should throw a TypeError');
+}, 'desiredSize on a released writer');
+
+test(() => {
+ const ws = new WritableStream({});
+
+ const writer = ws.getWriter();
+
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+}, 'desiredSize initial value');
+
+promise_test(() => {
+ const ws = new WritableStream({});
+
+ const writer = ws.getWriter();
+
+ writer.close();
+
+ return writer.closed.then(() => {
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0');
+ });
+}, 'desiredSize on a writer for a closed stream');
+
+test(() => {
+ const ws = new WritableStream({
+ start(c) {
+ c.error();
+ }
+ });
+
+ const writer = ws.getWriter();
+ assert_equals(writer.desiredSize, null, 'desiredSize should be null');
+}, 'desiredSize on a writer for an errored stream');
+
+test(() => {
+ const ws = new WritableStream({});
+
+ const writer = ws.getWriter();
+ writer.close();
+ writer.releaseLock();
+
+ ws.getWriter();
+}, 'ws.getWriter() on a closing WritableStream');
+
+promise_test(() => {
+ const ws = new WritableStream({});
+
+ const writer = ws.getWriter();
+ return writer.close().then(() => {
+ writer.releaseLock();
+
+ ws.getWriter();
+ });
+}, 'ws.getWriter() on a closed WritableStream');
+
+test(() => {
+ const ws = new WritableStream({});
+
+ const writer = ws.getWriter();
+ writer.abort();
+ writer.releaseLock();
+
+ ws.getWriter();
+}, 'ws.getWriter() on an aborted WritableStream');
+
+promise_test(() => {
+ const ws = new WritableStream({
+ start(c) {
+ c.error();
+ }
+ });
+
+ const writer = ws.getWriter();
+ return writer.closed.then(
+ v => assert_unreached('writer.closed fulfilled unexpectedly with: ' + v),
+ () => {
+ writer.releaseLock();
+
+ ws.getWriter();
+ }
+ );
+}, 'ws.getWriter() on an errored WritableStream');
+
+promise_test(() => {
+ const ws = new WritableStream({});
+
+ const writer = ws.getWriter();
+ writer.releaseLock();
+
+ return writer.closed.then(
+ v => assert_unreached('writer.closed fulfilled unexpectedly with: ' + v),
+ closedRejection => {
+ assert_equals(closedRejection.name, 'TypeError', 'closed promise should reject with a TypeError');
+ return writer.ready.then(
+ v => assert_unreached('writer.ready fulfilled unexpectedly with: ' + v),
+ readyRejection => assert_equals(readyRejection, closedRejection,
+ 'ready promise should reject with the same error')
+ );
+ }
+ );
+}, 'closed and ready on a released writer');
+
+promise_test(t => {
+ let thisObject = null;
+ // Calls to Sink methods after the first are implicitly ignored. Only the first value that is passed to the resolver
+ // is used.
+ class Sink {
+ start() {
+ // Called twice
+ t.step(() => {
+ assert_equals(this, thisObject, 'start should be called as a method');
+ });
+ }
+
+ write() {
+ t.step(() => {
+ assert_equals(this, thisObject, 'write should be called as a method');
+ });
+ }
+
+ close() {
+ t.step(() => {
+ assert_equals(this, thisObject, 'close should be called as a method');
+ });
+ }
+
+ abort() {
+ t.step(() => {
+ assert_equals(this, thisObject, 'abort should be called as a method');
+ });
+ }
+ }
+
+ const theSink = new Sink();
+ thisObject = theSink;
+ const ws = new WritableStream(theSink);
+
+ const writer = ws.getWriter();
+
+ writer.write('a');
+ const closePromise = writer.close();
+
+ const ws2 = new WritableStream(theSink);
+ const writer2 = ws2.getWriter();
+ const abortPromise = writer2.abort();
+
+ return Promise.all([
+ closePromise,
+ abortPromise
+ ]);
+}, 'WritableStream should call underlying sink methods as methods');
+
+promise_test(t => {
+ function functionWithOverloads() {}
+ functionWithOverloads.apply = t.unreached_func('apply() should not be called');
+ functionWithOverloads.call = t.unreached_func('call() should not be called');
+ const underlyingSink = {
+ start: functionWithOverloads,
+ write: functionWithOverloads,
+ close: functionWithOverloads,
+ abort: functionWithOverloads
+ };
+ // Test start(), write(), close().
+ const ws1 = new WritableStream(underlyingSink);
+ const writer1 = ws1.getWriter();
+ writer1.write('a');
+ writer1.close();
+
+ // Test abort().
+ const abortError = new Error();
+ abortError.name = 'abort error';
+
+ const ws2 = new WritableStream(underlyingSink);
+ const writer2 = ws2.getWriter();
+ writer2.abort(abortError);
+
+ // Test abort() with a close underlying sink method present. (Historical; see
+ // https://github.com/whatwg/streams/issues/620#issuecomment-263483953 for what used to be
+ // tested here. But more coverage can't hurt.)
+ const ws3 = new WritableStream({
+ start: functionWithOverloads,
+ write: functionWithOverloads,
+ close: functionWithOverloads
+ });
+ const writer3 = ws3.getWriter();
+ writer3.abort(abortError);
+
+ return writer1.closed
+ .then(() => promise_rejects_exactly(t, abortError, writer2.closed, 'writer2.closed should be rejected'))
+ .then(() => promise_rejects_exactly(t, abortError, writer3.closed, 'writer3.closed should be rejected'));
+}, 'methods should not not have .apply() or .call() called');
+
+promise_test(() => {
+ const strategy = {
+ size() {
+ if (this !== undefined) {
+ throw new Error('size called as a method');
+ }
+ return 1;
+ }
+ };
+
+ const ws = new WritableStream({}, strategy);
+ const writer = ws.getWriter();
+ return writer.write('a');
+}, 'WritableStream\'s strategy.size should not be called as a method');
+
+promise_test(() => {
+ const ws = new WritableStream();
+ const writer1 = ws.getWriter();
+ assert_equals(undefined, writer1.releaseLock(), 'releaseLock() should return undefined');
+ const writer2 = ws.getWriter();
+ assert_equals(undefined, writer1.releaseLock(), 'no-op releaseLock() should return undefined');
+ // Calling releaseLock() on writer1 should not interfere with writer2. If it did, then the ready promise would be
+ // rejected.
+ return writer2.ready;
+}, 'redundant releaseLock() is no-op');
+
+promise_test(() => {
+ const events = [];
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ // Force the ready promise back to a pending state.
+ const writerPromise = writer.write('dummy');
+ const readyPromise = writer.ready.catch(() => events.push('ready'));
+ const closedPromise = writer.closed.catch(() => events.push('closed'));
+ writer.releaseLock();
+ return Promise.all([readyPromise, closedPromise]).then(() => {
+ assert_array_equals(events, ['ready', 'closed'], 'ready promise should fire before closed promise');
+ // Stop the writer promise hanging around after the test has finished.
+ return Promise.all([
+ writerPromise,
+ ws.abort()
+ ]);
+ });
+ });
+}, 'ready promise should fire before closed on releaseLock');
+
+test(() => {
+ class Subclass extends WritableStream {
+ extraFunction() {
+ return true;
+ }
+ }
+ assert_equals(
+ Object.getPrototypeOf(Subclass.prototype), WritableStream.prototype,
+ 'Subclass.prototype\'s prototype should be WritableStream.prototype');
+ assert_equals(Object.getPrototypeOf(Subclass), WritableStream,
+ 'Subclass\'s prototype should be WritableStream');
+ const sub = new Subclass();
+ assert_true(sub instanceof WritableStream,
+ 'Subclass object should be an instance of WritableStream');
+ assert_true(sub instanceof Subclass,
+ 'Subclass object should be an instance of Subclass');
+ const lockedGetter = Object.getOwnPropertyDescriptor(
+ WritableStream.prototype, 'locked').get;
+ assert_equals(lockedGetter.call(sub), sub.locked,
+ 'Subclass object should pass brand check');
+ assert_true(sub.extraFunction(),
+ 'extraFunction() should be present on Subclass object');
+}, 'Subclassing WritableStream should work');
+
+test(() => {
+ const ws = new WritableStream();
+ assert_false(ws.locked, 'stream should not be locked');
+ ws.getWriter();
+ assert_true(ws.locked, 'stream should be locked');
+}, 'the locked getter should return true if the stream has a writer');
diff --git a/testing/web-platform/tests/streams/writable-streams/properties.any.js b/testing/web-platform/tests/streams/writable-streams/properties.any.js
new file mode 100644
index 0000000000..ae0549f087
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/properties.any.js
@@ -0,0 +1,53 @@
+// META: global=window,worker,shadowrealm
+'use strict';
+
+const sinkMethods = {
+ start: {
+ length: 1,
+ trigger: () => Promise.resolve()
+ },
+ write: {
+ length: 2,
+ trigger: writer => writer.write()
+ },
+ close: {
+ length: 0,
+ trigger: writer => writer.close()
+ },
+ abort: {
+ length: 1,
+ trigger: writer => writer.abort()
+ }
+};
+
+for (const method in sinkMethods) {
+ const { length, trigger } = sinkMethods[method];
+
+ // Some semantic tests of how sink methods are called can be found in general.js, as well as in the test files
+ // specific to each method.
+ promise_test(() => {
+ let argCount;
+ const ws = new WritableStream({
+ [method](...args) {
+ argCount = args.length;
+ }
+ });
+ return Promise.resolve(trigger(ws.getWriter())).then(() => {
+ assert_equals(argCount, length, `${method} should be called with ${length} arguments`);
+ });
+ }, `sink method ${method} should be called with the right number of arguments`);
+
+ promise_test(() => {
+ let methodWasCalled = false;
+ function Sink() {}
+ Sink.prototype = {
+ [method]() {
+ methodWasCalled = true;
+ }
+ };
+ const ws = new WritableStream(new Sink());
+ return Promise.resolve(trigger(ws.getWriter())).then(() => {
+ assert_true(methodWasCalled, `${method} should be called`);
+ });
+ }, `sink method ${method} should be called even when it's located on the prototype chain`);
+}
diff --git a/testing/web-platform/tests/streams/writable-streams/reentrant-strategy.any.js b/testing/web-platform/tests/streams/writable-streams/reentrant-strategy.any.js
new file mode 100644
index 0000000000..18ce9e8475
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/reentrant-strategy.any.js
@@ -0,0 +1,174 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+// These tests exercise the pathological case of calling WritableStream* methods from within the strategy.size()
+// callback. This is not something any real code should ever do. Failures here indicate subtle deviations from the
+// standard that may affect real, non-pathological code.
+
+const error1 = { name: 'error1' };
+
+promise_test(() => {
+ let writer;
+ const strategy = {
+ size(chunk) {
+ if (chunk > 0) {
+ writer.write(chunk - 1);
+ }
+ return chunk;
+ }
+ };
+
+ const ws = recordingWritableStream({}, strategy);
+ writer = ws.getWriter();
+ return writer.write(2)
+ .then(() => {
+ assert_array_equals(ws.events, ['write', 0, 'write', 1, 'write', 2], 'writes should appear in order');
+ });
+}, 'writes should be written in the standard order');
+
+promise_test(() => {
+ let writer;
+ const events = [];
+ const strategy = {
+ size(chunk) {
+ events.push('size', chunk);
+ if (chunk > 0) {
+ writer.write(chunk - 1)
+ .then(() => events.push('writer.write done', chunk - 1));
+ }
+ return chunk;
+ }
+ };
+ const ws = new WritableStream({
+ write(chunk) {
+ events.push('sink.write', chunk);
+ }
+ }, strategy);
+ writer = ws.getWriter();
+ return writer.write(2)
+ .then(() => events.push('writer.write done', 2))
+ .then(() => flushAsyncEvents())
+ .then(() => {
+ assert_array_equals(events, ['size', 2, 'size', 1, 'size', 0,
+ 'sink.write', 0, 'sink.write', 1, 'writer.write done', 0,
+ 'sink.write', 2, 'writer.write done', 1,
+ 'writer.write done', 2],
+ 'events should happen in standard order');
+ });
+}, 'writer.write() promises should resolve in the standard order');
+
+promise_test(t => {
+ let controller;
+ const strategy = {
+ size() {
+ controller.error(error1);
+ return 1;
+ }
+ };
+ const ws = recordingWritableStream({
+ start(c) {
+ controller = c;
+ }
+ }, strategy);
+ const resolved = [];
+ const writer = ws.getWriter();
+ const readyPromise1 = writer.ready.then(() => resolved.push('ready1'));
+ const writePromise = promise_rejects_exactly(t, error1, writer.write(),
+ 'write() should reject with the error')
+ .then(() => resolved.push('write'));
+ const readyPromise2 = promise_rejects_exactly(t, error1, writer.ready, 'ready should reject with error1')
+ .then(() => resolved.push('ready2'));
+ const closedPromise = promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1')
+ .then(() => resolved.push('closed'));
+ return Promise.all([readyPromise1, writePromise, readyPromise2, closedPromise])
+ .then(() => {
+ assert_array_equals(resolved, ['ready1', 'write', 'ready2', 'closed'],
+ 'promises should resolve in standard order');
+ assert_array_equals(ws.events, [], 'underlying sink write should not be called');
+ });
+}, 'controller.error() should work when called from within strategy.size()');
+
+promise_test(t => {
+ let writer;
+ const strategy = {
+ size() {
+ writer.close();
+ return 1;
+ }
+ };
+
+ const ws = recordingWritableStream({}, strategy);
+ writer = ws.getWriter();
+ return promise_rejects_js(t, TypeError, writer.write('a'), 'write() promise should reject')
+ .then(() => {
+ assert_array_equals(ws.events, ['close'], 'sink.write() should not be called');
+ });
+}, 'close() should work when called from within strategy.size()');
+
+promise_test(t => {
+ let writer;
+ const strategy = {
+ size() {
+ writer.abort(error1);
+ return 1;
+ }
+ };
+
+ const ws = recordingWritableStream({}, strategy);
+ writer = ws.getWriter();
+ return promise_rejects_exactly(t, error1, writer.write('a'), 'write() promise should reject')
+ .then(() => {
+ assert_array_equals(ws.events, ['abort', error1], 'sink.write() should not be called');
+ });
+}, 'abort() should work when called from within strategy.size()');
+
+promise_test(t => {
+ let writer;
+ const strategy = {
+ size() {
+ writer.releaseLock();
+ return 1;
+ }
+ };
+
+ const ws = recordingWritableStream({}, strategy);
+ writer = ws.getWriter();
+ const writePromise = promise_rejects_js(t, TypeError, writer.write('a'), 'write() promise should reject');
+ const readyPromise = promise_rejects_js(t, TypeError, writer.ready, 'ready promise should reject');
+ const closedPromise = promise_rejects_js(t, TypeError, writer.closed, 'closed promise should reject');
+ return Promise.all([writePromise, readyPromise, closedPromise])
+ .then(() => {
+ assert_array_equals(ws.events, [], 'sink.write() should not be called');
+ });
+}, 'releaseLock() should abort the write() when called within strategy.size()');
+
+promise_test(t => {
+ let writer1;
+ let ws;
+ let writePromise2;
+ let closePromise;
+ let closedPromise2;
+ const strategy = {
+ size(chunk) {
+ if (chunk > 0) {
+ writer1.releaseLock();
+ const writer2 = ws.getWriter();
+ writePromise2 = writer2.write(0);
+ closePromise = writer2.close();
+ closedPromise2 = writer2.closed;
+ }
+ return 1;
+ }
+ };
+ ws = recordingWritableStream({}, strategy);
+ writer1 = ws.getWriter();
+ const writePromise1 = promise_rejects_js(t, TypeError, writer1.write(1), 'write() promise should reject');
+ const readyPromise = promise_rejects_js(t, TypeError, writer1.ready, 'ready promise should reject');
+ const closedPromise1 = promise_rejects_js(t, TypeError, writer1.closed, 'closed promise should reject');
+ return Promise.all([writePromise1, readyPromise, closedPromise1, writePromise2, closePromise, closedPromise2])
+ .then(() => {
+ assert_array_equals(ws.events, ['write', 0, 'close'], 'sink.write() should only be called once');
+ });
+}, 'original reader should error when new reader is created within strategy.size()');
diff --git a/testing/web-platform/tests/streams/writable-streams/start.any.js b/testing/web-platform/tests/streams/writable-streams/start.any.js
new file mode 100644
index 0000000000..17972b568c
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/start.any.js
@@ -0,0 +1,163 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = { name: 'error1' };
+
+promise_test(() => {
+ let resolveStartPromise;
+ const ws = recordingWritableStream({
+ start() {
+ return new Promise(resolve => {
+ resolveStartPromise = resolve;
+ });
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+ writer.write('a');
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after writer.write()');
+
+ // Wait and verify that write isn't called.
+ return flushAsyncEvents()
+ .then(() => {
+ assert_array_equals(ws.events, [], 'write should not be called until start promise resolves');
+ resolveStartPromise();
+ return writer.ready;
+ })
+ .then(() => assert_array_equals(ws.events, ['write', 'a'],
+ 'write should not be called until start promise resolves'));
+}, 'underlying sink\'s write should not be called until start finishes');
+
+promise_test(() => {
+ let resolveStartPromise;
+ const ws = recordingWritableStream({
+ start() {
+ return new Promise(resolve => {
+ resolveStartPromise = resolve;
+ });
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ writer.close();
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+
+ // Wait and verify that write isn't called.
+ return flushAsyncEvents().then(() => {
+ assert_array_equals(ws.events, [], 'close should not be called until start promise resolves');
+ resolveStartPromise();
+ return writer.closed;
+ });
+}, 'underlying sink\'s close should not be called until start finishes');
+
+test(() => {
+ const passedError = new Error('horrible things');
+
+ let writeCalled = false;
+ let closeCalled = false;
+ assert_throws_exactly(passedError, () => {
+ // recordingWritableStream cannot be used here because the exception in the
+ // constructor prevents assigning the object to a variable.
+ new WritableStream({
+ start() {
+ throw passedError;
+ },
+ write() {
+ writeCalled = true;
+ },
+ close() {
+ closeCalled = true;
+ }
+ });
+ }, 'constructor should throw passedError');
+ assert_false(writeCalled, 'write should not be called');
+ assert_false(closeCalled, 'close should not be called');
+}, 'underlying sink\'s write or close should not be called if start throws');
+
+promise_test(() => {
+ const ws = recordingWritableStream({
+ start() {
+ return Promise.reject();
+ }
+ });
+
+ // Wait and verify that write or close aren't called.
+ return flushAsyncEvents()
+ .then(() => assert_array_equals(ws.events, [], 'write and close should not be called'));
+}, 'underlying sink\'s write or close should not be invoked if the promise returned by start is rejected');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ start() {
+ return {
+ then(onFulfilled, onRejected) { onRejected(error1); }
+ };
+ }
+ });
+ return promise_rejects_exactly(t, error1, ws.getWriter().closed, 'closed promise should be rejected');
+}, 'returning a thenable from start() should work');
+
+promise_test(t => {
+ const ws = recordingWritableStream({
+ start(controller) {
+ controller.error(error1);
+ }
+ });
+ return promise_rejects_exactly(t, error1, ws.getWriter().write('a'), 'write() should reject with the error')
+ .then(() => {
+ assert_array_equals(ws.events, [], 'sink write() should not have been called');
+ });
+}, 'controller.error() during start should cause writes to fail');
+
+promise_test(t => {
+ let controller;
+ let resolveStart;
+ const ws = recordingWritableStream({
+ start(c) {
+ controller = c;
+ return new Promise(resolve => {
+ resolveStart = resolve;
+ });
+ }
+ });
+ const writer = ws.getWriter();
+ const writePromise = writer.write('a');
+ const closePromise = writer.close();
+ controller.error(error1);
+ resolveStart();
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writePromise, 'write() should fail'),
+ promise_rejects_exactly(t, error1, closePromise, 'close() should fail')
+ ]).then(() => {
+ assert_array_equals(ws.events, [], 'sink write() and close() should not have been called');
+ });
+}, 'controller.error() during async start should cause existing writes to fail');
+
+promise_test(t => {
+ const events = [];
+ const promises = [];
+ function catchAndRecord(promise, name) {
+ promises.push(promise.then(t.unreached_func(`promise ${name} should not resolve`),
+ () => {
+ events.push(name);
+ }));
+ }
+ const ws = new WritableStream({
+ start() {
+ return Promise.reject();
+ }
+ }, { highWaterMark: 0 });
+ const writer = ws.getWriter();
+ catchAndRecord(writer.ready, 'ready');
+ catchAndRecord(writer.closed, 'closed');
+ catchAndRecord(writer.write(), 'write');
+ return Promise.all(promises)
+ .then(() => {
+ assert_array_equals(events, ['ready', 'write', 'closed'], 'promises should reject in standard order');
+ });
+}, 'when start() rejects, writer promises should reject in standard order');
diff --git a/testing/web-platform/tests/streams/writable-streams/write.any.js b/testing/web-platform/tests/streams/writable-streams/write.any.js
new file mode 100644
index 0000000000..20a2885bf3
--- /dev/null
+++ b/testing/web-platform/tests/streams/writable-streams/write.any.js
@@ -0,0 +1,284 @@
+// META: global=window,worker,shadowrealm
+// META: script=../resources/test-utils.js
+// META: script=../resources/recording-streams.js
+'use strict';
+
+const error1 = new Error('error1');
+error1.name = 'error1';
+
+const error2 = new Error('error2');
+error2.name = 'error2';
+
+function writeArrayToStream(array, writableStreamWriter) {
+ array.forEach(chunk => writableStreamWriter.write(chunk));
+ return writableStreamWriter.close();
+}
+
+promise_test(() => {
+ let storage;
+ const ws = new WritableStream({
+ start() {
+ storage = [];
+ },
+
+ write(chunk) {
+ return delay(0).then(() => storage.push(chunk));
+ },
+
+ close() {
+ return delay(0);
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ const input = [1, 2, 3, 4, 5];
+ return writeArrayToStream(input, writer)
+ .then(() => assert_array_equals(storage, input, 'correct data should be relayed to underlying sink'));
+}, 'WritableStream should complete asynchronous writes before close resolves');
+
+promise_test(() => {
+ const ws = recordingWritableStream();
+
+ const writer = ws.getWriter();
+
+ const input = [1, 2, 3, 4, 5];
+ return writeArrayToStream(input, writer)
+ .then(() => assert_array_equals(ws.events, ['write', 1, 'write', 2, 'write', 3, 'write', 4, 'write', 5, 'close'],
+ 'correct data should be relayed to underlying sink'));
+}, 'WritableStream should complete synchronous writes before close resolves');
+
+promise_test(() => {
+ const ws = new WritableStream({
+ write() {
+ return 'Hello';
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ const writePromise = writer.write('a');
+ return writePromise
+ .then(value => assert_equals(value, undefined, 'fulfillment value must be undefined'));
+}, 'fulfillment value of ws.write() call should be undefined even if the underlying sink returns a non-undefined ' +
+ 'value');
+
+promise_test(() => {
+ let resolveSinkWritePromise;
+ const ws = new WritableStream({
+ write() {
+ return new Promise(resolve => {
+ resolveSinkWritePromise = resolve;
+ });
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+
+ return writer.ready.then(() => {
+ const writePromise = writer.write('a');
+ let writePromiseResolved = false;
+ assert_not_equals(resolveSinkWritePromise, undefined, 'resolveSinkWritePromise should not be undefined');
+
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after writer.write()');
+
+ return Promise.all([
+ writePromise.then(value => {
+ writePromiseResolved = true;
+ assert_equals(resolveSinkWritePromise, undefined, 'sinkWritePromise should be fulfilled before writePromise');
+
+ assert_equals(value, undefined, 'writePromise should be fulfilled with undefined');
+ }),
+ writer.ready.then(value => {
+ assert_equals(resolveSinkWritePromise, undefined, 'sinkWritePromise should be fulfilled before writer.ready');
+ assert_true(writePromiseResolved, 'writePromise should be fulfilled before writer.ready');
+
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 again');
+
+ assert_equals(value, undefined, 'writePromise should be fulfilled with undefined');
+ }),
+ flushAsyncEvents().then(() => {
+ resolveSinkWritePromise();
+ resolveSinkWritePromise = undefined;
+ })
+ ]);
+ });
+}, 'WritableStream should transition to waiting until write is acknowledged');
+
+promise_test(t => {
+ let sinkWritePromiseRejectors = [];
+ const ws = new WritableStream({
+ write() {
+ const sinkWritePromise = new Promise((r, reject) => sinkWritePromiseRejectors.push(reject));
+ return sinkWritePromise;
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
+
+ return writer.ready.then(() => {
+ const writePromise = writer.write('a');
+ assert_equals(sinkWritePromiseRejectors.length, 1, 'there should be 1 rejector');
+ assert_equals(writer.desiredSize, 0, 'desiredSize should be 0');
+
+ const writePromise2 = writer.write('b');
+ assert_equals(sinkWritePromiseRejectors.length, 1, 'there should be still 1 rejector');
+ assert_equals(writer.desiredSize, -1, 'desiredSize should be -1');
+
+ const closedPromise = writer.close();
+
+ assert_equals(writer.desiredSize, -1, 'desiredSize should still be -1');
+
+ return Promise.all([
+ promise_rejects_exactly(t, error1, closedPromise,
+ 'closedPromise should reject with the error returned from the sink\'s write method')
+ .then(() => assert_equals(sinkWritePromiseRejectors.length, 0,
+ 'sinkWritePromise should reject before closedPromise')),
+ promise_rejects_exactly(t, error1, writePromise,
+ 'writePromise should reject with the error returned from the sink\'s write method')
+ .then(() => assert_equals(sinkWritePromiseRejectors.length, 0,
+ 'sinkWritePromise should reject before writePromise')),
+ promise_rejects_exactly(t, error1, writePromise2,
+ 'writePromise2 should reject with the error returned from the sink\'s write method')
+ .then(() => assert_equals(sinkWritePromiseRejectors.length, 0,
+ 'sinkWritePromise should reject before writePromise2')),
+ flushAsyncEvents().then(() => {
+ sinkWritePromiseRejectors[0](error1);
+ sinkWritePromiseRejectors = [];
+ })
+ ]);
+ });
+}, 'when write returns a rejected promise, queued writes and close should be cleared');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write() {
+ throw error1;
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error1, writer.write('a'),
+ 'write() should reject with the error returned from the sink\'s write method')
+ .then(() => promise_rejects_js(t, TypeError, writer.close(), 'close() should be rejected'));
+}, 'when sink\'s write throws an error, the stream should become errored and the promise should reject');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ write(chunk, controller) {
+ controller.error(error1);
+ throw error2;
+ }
+ });
+
+ const writer = ws.getWriter();
+
+ return promise_rejects_exactly(t, error2, writer.write('a'),
+ 'write() should reject with the error returned from the sink\'s write method ')
+ .then(() => {
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writer.ready,
+ 'writer.ready must reject with the error passed to the controller'),
+ promise_rejects_exactly(t, error1, writer.closed,
+ 'writer.closed must reject with the error passed to the controller')
+ ]);
+ });
+}, 'writer.write(), ready and closed reject with the error passed to controller.error() made before sink.write' +
+ ' rejection');
+
+promise_test(() => {
+ const numberOfWrites = 1000;
+
+ let resolveFirstWritePromise;
+ let writeCount = 0;
+ const ws = new WritableStream({
+ write() {
+ ++writeCount;
+ if (!resolveFirstWritePromise) {
+ return new Promise(resolve => {
+ resolveFirstWritePromise = resolve;
+ });
+ }
+ return Promise.resolve();
+ }
+ });
+
+ const writer = ws.getWriter();
+ return writer.ready.then(() => {
+ for (let i = 1; i < numberOfWrites; ++i) {
+ writer.write('a');
+ }
+ const writePromise = writer.write('a');
+
+ assert_equals(writeCount, 1, 'should have called sink\'s write once');
+
+ resolveFirstWritePromise();
+
+ return writePromise
+ .then(() =>
+ assert_equals(writeCount, numberOfWrites, `should have called sink's write ${numberOfWrites} times`));
+ });
+}, 'a large queue of writes should be processed completely');
+
+promise_test(() => {
+ const stream = recordingWritableStream();
+ const w = stream.getWriter();
+ const WritableStreamDefaultWriter = w.constructor;
+ w.releaseLock();
+ const writer = new WritableStreamDefaultWriter(stream);
+ return writer.ready.then(() => {
+ writer.write('a');
+ assert_array_equals(stream.events, ['write', 'a'], 'write() should be passed to sink');
+ });
+}, 'WritableStreamDefaultWriter should work when manually constructed');
+
+promise_test(() => {
+ let thenCalled = false;
+ const ws = new WritableStream({
+ write() {
+ return {
+ then(onFulfilled) {
+ thenCalled = true;
+ onFulfilled();
+ }
+ };
+ }
+ });
+ return ws.getWriter().write('a').then(() => assert_true(thenCalled, 'thenCalled should be true'));
+}, 'returning a thenable from write() should work');
+
+promise_test(() => {
+ const stream = new WritableStream();
+ const writer = stream.getWriter();
+ const WritableStreamDefaultWriter = writer.constructor;
+ assert_throws_js(TypeError, () => new WritableStreamDefaultWriter(stream),
+ 'should not be able to construct on locked stream');
+ // If stream.[[writer]] no longer points to |writer| then the closed Promise
+ // won't work properly.
+ return Promise.all([writer.close(), writer.closed]);
+}, 'failing DefaultWriter constructor should not release an existing writer');
+
+promise_test(t => {
+ const ws = new WritableStream({
+ start() {
+ return Promise.reject(error1);
+ }
+ }, { highWaterMark: 0 });
+ const writer = ws.getWriter();
+ return Promise.all([
+ promise_rejects_exactly(t, error1, writer.ready, 'ready should be rejected'),
+ promise_rejects_exactly(t, error1, writer.write(), 'write() should be rejected')
+ ]);
+}, 'write() on a stream with HWM 0 should not cause the ready Promise to resolve');
+
+promise_test(t => {
+ const ws = new WritableStream();
+ const writer = ws.getWriter();
+ writer.releaseLock();
+ return promise_rejects_js(t, TypeError, writer.write(), 'write should reject');
+}, 'writing to a released writer should reject the returned promise');