summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/compression/compression-large-flush-output.any.js
blob: 6afcb4d52875b91b599e76945403ccfa3b39692e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: script=resources/concatenate-stream.js
// META: timeout=long

'use strict';

// This test verifies that a large flush output will not truncate the
// final results.

async function compressData(chunk, format) {
  const cs = new CompressionStream(format);
  const writer = cs.writable.getWriter();
  writer.write(chunk);
  writer.close();
  return await concatenateStream(cs.readable);
}

// JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data.
const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i)));
const data = fullData.subarray(0, 35_579);
const expectedValue = data;

promise_test(async t => {
  const compressedData = await compressData(data, 'deflate');
  // decompress with pako, and check that we got the same result as our original string
  assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `deflate compression with large flush output`);

promise_test(async t => {
  const compressedData = await compressData(data, 'gzip');
  // decompress with pako, and check that we got the same result as our original string
  assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `gzip compression with large flush output`);

promise_test(async t => {
  const compressedData = await compressData(data, 'deflate-raw');
  // decompress with pako, and check that we got the same result as our original string
  assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
}, `deflate-raw compression with large flush output`);