summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/compression/decompression-with-detach.tentative.window.js
blob: 1ff9c2698370225ea0a6a458edafd9357a810b88 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
// META: global=window,worker,shadowrealm
// META: script=resources/concatenate-stream.js

'use strict';

const kInputLength = 1000000;

async function createLargeCompressedInput() {
  const cs = new CompressionStream('deflate');
  // The input has to be large enough that it won't fit in a single chunk when
  // decompressed.
  const writer = cs.writable.getWriter();
  writer.write(new Uint8Array(kInputLength));
  writer.close();
  return concatenateStream(cs.readable);
}

promise_test(async () => {
  const input = await createLargeCompressedInput();
  const ds = new DecompressionStream('deflate');
  const writer = ds.writable.getWriter();
  writer.write(input);
  writer.close();
  // Object.prototype.then will be looked up synchronously when the promise
  // returned by read() is resolved.
  Object.defineProperty(Object.prototype, 'then', {
    get() {
      // Cause input to become detached and unreferenced.
      try {
        postMessage(undefined, 'nowhere', [input.buffer]);
      } catch (e) {
        // It's already detached.
      }
    }
  });
  const output = await concatenateStream(ds.readable);
  // If output successfully decompressed and gave the right length, we can be
  // reasonably confident that no data corruption happened.
  assert_equals(
      output.byteLength, kInputLength, 'output should be the right length');
}, 'data should be correctly decompressed even if input is detached partway');