summaryrefslogtreecommitdiffstats
path: root/test/wpt/tests/FileAPI/blob/Blob-stream.any.js
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-21 20:56:19 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-21 20:56:19 +0000
commit0b6210cd37b68b94252cb798598b12974a20e1c1 (patch)
treee371686554a877842d95aa94f100bee552ff2a8e /test/wpt/tests/FileAPI/blob/Blob-stream.any.js
parentInitial commit. (diff)
downloadnode-undici-upstream.tar.xz
node-undici-upstream.zip
Adding upstream version 5.28.2+dfsg1+~cs23.11.12.3.upstream/5.28.2+dfsg1+_cs23.11.12.3upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'test/wpt/tests/FileAPI/blob/Blob-stream.any.js')
-rw-r--r--test/wpt/tests/FileAPI/blob/Blob-stream.any.js83
1 files changed, 83 insertions, 0 deletions
diff --git a/test/wpt/tests/FileAPI/blob/Blob-stream.any.js b/test/wpt/tests/FileAPI/blob/Blob-stream.any.js
new file mode 100644
index 0000000..87710a1
--- /dev/null
+++ b/test/wpt/tests/FileAPI/blob/Blob-stream.any.js
@@ -0,0 +1,83 @@
+// META: title=Blob Stream
+// META: script=../support/Blob.js
+// META: script=/common/gc.js
+'use strict';
+
+// Helper function that triggers garbage collection while reading a chunk
+// if perform_gc is true.
+async function read_and_gc(reader, perform_gc) {
+ // Passing Uint8Array for byte streams; non-byte streams will simply ignore it
+ const read_promise = reader.read(new Uint8Array(64));
+ if (perform_gc) {
+ await garbageCollect();
+ }
+ return read_promise;
+}
+
+// Takes in a ReadableStream and reads from it until it is done, returning
+// an array that contains the results of each read operation. If perform_gc
+// is true, garbage collection is triggered while reading every chunk.
+async function read_all_chunks(stream, { perform_gc = false, mode } = {}) {
+ assert_true(stream instanceof ReadableStream);
+ assert_true('getReader' in stream);
+ const reader = stream.getReader({ mode });
+
+ assert_true('read' in reader);
+ let read_value = await read_and_gc(reader, perform_gc);
+
+ let out = [];
+ let i = 0;
+ while (!read_value.done) {
+ for (let val of read_value.value) {
+ out[i++] = val;
+ }
+ read_value = await read_and_gc(reader, perform_gc);
+ }
+ return out;
+}
+
+promise_test(async () => {
+ const blob = new Blob(["PASS"]);
+ const stream = blob.stream();
+ const chunks = await read_all_chunks(stream);
+ for (let [index, value] of chunks.entries()) {
+ assert_equals(value, "PASS".charCodeAt(index));
+ }
+}, "Blob.stream()")
+
+promise_test(async () => {
+ const blob = new Blob();
+ const stream = blob.stream();
+ const chunks = await read_all_chunks(stream);
+ assert_array_equals(chunks, []);
+}, "Blob.stream() empty Blob")
+
+promise_test(async () => {
+ const input_arr = [8, 241, 48, 123, 151];
+ const typed_arr = new Uint8Array(input_arr);
+ const blob = new Blob([typed_arr]);
+ const stream = blob.stream();
+ const chunks = await read_all_chunks(stream);
+ assert_array_equals(chunks, input_arr);
+}, "Blob.stream() non-unicode input")
+
+promise_test(async() => {
+ const input_arr = [8, 241, 48, 123, 151];
+ const typed_arr = new Uint8Array(input_arr);
+ let blob = new Blob([typed_arr]);
+ const stream = blob.stream();
+ blob = null;
+ await garbageCollect();
+ const chunks = await read_all_chunks(stream, { perform_gc: true });
+ assert_array_equals(chunks, input_arr);
+}, "Blob.stream() garbage collection of blob shouldn't break stream" +
+ "consumption")
+
+promise_test(async () => {
+ const input_arr = [8, 241, 48, 123, 151];
+ const typed_arr = new Uint8Array(input_arr);
+ let blob = new Blob([typed_arr]);
+ const stream = blob.stream();
+ const chunks = await read_all_chunks(stream, { mode: "byob" });
+ assert_array_equals(chunks, input_arr);
+}, "Reading Blob.stream() with BYOB reader")