summaryrefslogtreecommitdiffstats
path: root/dom/media/webaudio/test/test_audioContextParams_sampleRate.html
blob: 3ac84b26275a3287d5625aed776dd428994d0da0 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
<!DOCTYPE HTML>
<html>
<head>
  <script type="application/javascript" src="mediaStreamPlayback.js"></script>
  <script type="text/javascript" src="webaudio.js"></script>
</head>
<body>
<pre id="test">

<script>
  createHTML({
    title: "Parallel MTG by setting AudioContextParam sample rate",
    bug: "1387454",
    visible: true
  });

  scriptsReady
  .then(() => FAKE_ENABLED = false)
  .then(() => runTestWhenReady( async () => {
    // Test an AudioContext of specific sample rate.
    // Verify that the oscillator produces a tone.
    let rate1 = 500;
    let ac1 = new AudioContext({sampleRate: 44100});
    let dest_ac1 = ac1.createMediaStreamDestination();
    let osc_ac1 = ac1.createOscillator();
    osc_ac1.frequency.value = rate1;
    osc_ac1.connect(dest_ac1);
    osc_ac1.start(0);

    let analyser = new AudioStreamAnalyser(ac1, dest_ac1.stream);
    analyser.enableDebugCanvas();
    await analyser.waitForAnalysisSuccess( array => {
      const freg_50Hz   = array[analyser.binIndexForFrequency(50)];
      const freq_rate1  = array[analyser.binIndexForFrequency(rate1)];
      const freq_4000Hz = array[analyser.binIndexForFrequency(4000)];

      info("Analysing audio frequency - low:target1:high = "
              + freg_50Hz + ':' + freq_rate1 + ':' + freq_4000Hz);
      return freg_50Hz < 50 && freq_rate1 > 200 && freq_4000Hz < 50;
    })
    osc_ac1.stop();

    // Same test using a  new AudioContext of different sample rate.
    let rate2 = 1500;
    let ac2 = new AudioContext({sampleRate: 48000});
    let dest_ac2 = ac2.createMediaStreamDestination();
    let osc_ac2 = ac2.createOscillator();
    osc_ac2.frequency.value = rate2;
    osc_ac2.connect(dest_ac2);
    osc_ac2.start(0);

    let analyser2 = new AudioStreamAnalyser(ac2, dest_ac2.stream);
    analyser2.enableDebugCanvas();
    await analyser2.waitForAnalysisSuccess( array => {
      const freg_50Hz   = array[analyser2.binIndexForFrequency(50)];
      const freq_rate2  = array[analyser2.binIndexForFrequency(rate2)];
      const freq_4000Hz = array[analyser2.binIndexForFrequency(4000)];

      info("Analysing audio frequency - low:target2:high = "
              + freg_50Hz + ':' + freq_rate2 + ':' + freq_4000Hz);
      return freg_50Hz < 50 && freq_rate2 > 200 && freq_4000Hz < 50;
    })
    osc_ac2.stop();

    // Two AudioContexts with different sample rate cannot communicate.
    mustThrowWith("Connect nodes with different sample rate", "NotSupportedError",
                  () => {let source_ac2 = ac2.createMediaStreamSource(dest_ac1.stream)});

    // Two AudioContext with the same sample rate can communicate.
    let ac3 = new AudioContext({sampleRate: 48000});
    let dest_ac3 = ac3.createMediaStreamDestination();
    let source_ac2 = ac2.createMediaStreamSource(dest_ac3.stream);
    ok(true, "Connect nodes with the same sample rate is ok");

    expectException(function() {
      new AudioContext({sampleRate: 0});
    }, DOMException.NOT_SUPPORTED_ERR);

    expectException(function() {
      new AudioContext({sampleRate: -1});
    }, DOMException.NOT_SUPPORTED_ERR);

  }))
  .then(() => finish())
</script>
</pre>
</body>
</html>