summaryrefslogtreecommitdiffstats
path: root/dom/media/mediasource/test/test_AudioChange_mp4_WebAudio.html
blob: c76342f79398559b5d1989a23719584d8d537462 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
<!DOCTYPE HTML>
<html>
<head>
  <title>MSE: basic functionality</title>
  <script src="/tests/SimpleTest/SimpleTest.js"></script>
  <script type="text/javascript" src="mediasource.js"></script>
  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<pre id="test">
<script class="testbody" type="text/javascript">

SimpleTest.waitForExplicitFinish();

// This test checks loading a stereo segment, followed by a 5.1 segment plays
// without error, when the audio is being routed to an AudioContext.

const ac = new AudioContext();

runWithMSE(async (ms, el) => {
  el.controls = true;
  const source = ac.createMediaElementSource(el);
  source.connect(ac.destination);

  await once(ms, "sourceopen");
  ok(true, "Receive a sourceopen event");
  logEvents(el);

  const audiosb = ms.addSourceBuffer("audio/mp4");
  el.addEventListener("error", e => {
    ok(false, `should not fire ${e.type} event`);
    SimpleTest.finish();
  });
  is(el.readyState, el.HAVE_NOTHING, "readyState is HAVE_NOTHING");
  let p = once(el, "loadedmetadata");
  await fetchAndLoad(audiosb, "aac20-48000-64000-", ["init"], ".mp4");
  await p;
  ok(true, "got loadedmetadata event");
  p = Promise.all([once(el, "loadeddata"), once(el, "canplay")]);
  await fetchAndLoad(audiosb, "aac20-48000-64000-", ["1"], ".m4s");
  await p;
  ok(true, "got canplay event");
  el.play();
  await fetchAndLoad(audiosb, "aac51-48000-128000-", ["init"], ".mp4");
  await fetchAndLoad(audiosb, "aac51-48000-128000-", ["2"], ".m4s");
  ms.endOfStream();
  await once(el, "ended");
  ok(el.currentTime >= 6, "played to the end");
  SimpleTest.finish();
});

</script>
</pre>
</body>
</html>