summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/compression/compression-stream.tentative.any.js
blob: 8c02a6d699a6d2225131d75b87ea2c68e9d50c7d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
// META: global=window,worker
// META: script=third_party/pako/pako_inflate.min.js
// META: timeout=long

'use strict';

const SMALL_FILE = "/media/foo.vtt";
const LARGE_FILE = "/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm";

async function compressArrayBuffer(input, format) {
  const cs = new CompressionStream(format);
  const writer = cs.writable.getWriter();
  writer.write(input);
  const closePromise = writer.close();
  const out = [];
  const reader = cs.readable.getReader();
  let totalSize = 0;
  while (true) {
    const { value, done } = await reader.read();
    if (done)
      break;
    out.push(value);
    totalSize += value.byteLength;
  }
  await closePromise;
  const concatenated = new Uint8Array(totalSize);
  let offset = 0;
  for (const array of out) {
    concatenated.set(array, offset);
    offset += array.byteLength;
  }
  return concatenated;
}

test(() => {
  assert_throws_js(TypeError, () => {
    const transformer = new CompressionStream("nonvalid");
  }, "non supported format should throw");
}, "CompressionStream constructor should throw on invalid format");

promise_test(async () => {
  const buffer = new ArrayBuffer(0);
  const bufferView = new Uint8Array(buffer);
  const compressedData = await compressArrayBuffer(bufferView, "deflate");
  // decompress with pako, and check that we got the same result as our original string
  assert_array_equals(bufferView, pako.inflate(compressedData));
}, "deflated empty data should be reinflated back to its origin");

promise_test(async () => {
  const response = await fetch(SMALL_FILE)
  const buffer = await response.arrayBuffer();
  const bufferView = new Uint8Array(buffer);
  const compressedData = await compressArrayBuffer(bufferView, "deflate");
  // decompress with pako, and check that we got the same result as our original string
  assert_array_equals(bufferView, pako.inflate(compressedData));
}, "deflated small amount data should be reinflated back to its origin");

promise_test(async () => {
  const response = await fetch(LARGE_FILE)
  const buffer = await response.arrayBuffer();
  const bufferView = new Uint8Array(buffer);
  const compressedData = await compressArrayBuffer(bufferView, "deflate");
  // decompress with pako, and check that we got the same result as our original string
  assert_array_equals(bufferView, pako.inflate(compressedData));
}, "deflated large amount data should be reinflated back to its origin");

promise_test(async () => {
  const buffer = new ArrayBuffer(0);
  const bufferView = new Uint8Array(buffer);
  const compressedData = await compressArrayBuffer(bufferView, "gzip");
  // decompress with pako, and check that we got the same result as our original string
  assert_array_equals(bufferView, pako.inflate(compressedData));
}, "gzipped empty data should be reinflated back to its origin");

promise_test(async () => {
  const response = await fetch(SMALL_FILE)
  const buffer = await response.arrayBuffer();
  const bufferView = new Uint8Array(buffer);
  const compressedData = await compressArrayBuffer(bufferView, "gzip");
  // decompress with pako, and check that we got the same result as our original string
  assert_array_equals(bufferView, pako.inflate(compressedData));
}, "gzipped small amount data should be reinflated back to its origin");

promise_test(async () => {
  const response = await fetch(LARGE_FILE)
  const buffer = await response.arrayBuffer();
  const bufferView = new Uint8Array(buffer);
  const compressedData = await compressArrayBuffer(bufferView, "gzip");
  // decompress with pako, and check that we got the same result as our original string
  assert_array_equals(bufferView, pako.inflate(compressedData));
}, "gzipped large amount data should be reinflated back to its origin");