blob: 65cee6124389b43b48163910d054f3798328b154 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
|
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
</head>
<body>
<pre id="test">
<script>
/* import-globals-from ../../webrtc/tests/mochitests/mediaStreamPlayback.js */
createHTML({
title: "Parallel MTG by setting AudioContextParam sample rate",
bug: "1387454",
visible: true
});
runTest(async () => {
// Test an AudioContext of specific sample rate.
// Verify that the oscillator produces a tone.
const rate1 = 500;
const ac1 = new AudioContext({sampleRate: 44100});
const dest_ac1 = ac1.createMediaStreamDestination();
const osc_ac1 = ac1.createOscillator();
osc_ac1.frequency.value = rate1;
osc_ac1.connect(dest_ac1);
osc_ac1.start(0);
const analyser = new AudioStreamAnalyser(ac1, dest_ac1.stream);
analyser.enableDebugCanvas();
await analyser.waitForAnalysisSuccess( array => {
const freg_50Hz = array[analyser.binIndexForFrequency(50)];
const freq_rate1 = array[analyser.binIndexForFrequency(rate1)];
const freq_4000Hz = array[analyser.binIndexForFrequency(4000)];
info("Analysing audio frequency - low:target1:high = "
+ freg_50Hz + ':' + freq_rate1 + ':' + freq_4000Hz);
return freg_50Hz < 50 && freq_rate1 > 200 && freq_4000Hz < 50;
})
osc_ac1.stop();
// Same test using a new AudioContext of different sample rate.
const rate2 = 1500;
const ac2 = new AudioContext({sampleRate: 48000});
const dest_ac2 = ac2.createMediaStreamDestination();
const osc_ac2 = ac2.createOscillator();
osc_ac2.frequency.value = rate2;
osc_ac2.connect(dest_ac2);
osc_ac2.start(0);
const analyser2 = new AudioStreamAnalyser(ac2, dest_ac2.stream);
analyser2.enableDebugCanvas();
await analyser2.waitForAnalysisSuccess( array => {
const freg_50Hz = array[analyser2.binIndexForFrequency(50)];
const freq_rate2 = array[analyser2.binIndexForFrequency(rate2)];
const freq_4000Hz = array[analyser2.binIndexForFrequency(4000)];
info("Analysing audio frequency - low:target2:high = "
+ freg_50Hz + ':' + freq_rate2 + ':' + freq_4000Hz);
return freg_50Hz < 50 && freq_rate2 > 200 && freq_4000Hz < 50;
})
osc_ac2.stop();
// Two AudioContexts with different sample rate cannot communicate.
mustThrowWith("Connect nodes with different sample rate", "NotSupportedError",
() => ac2.createMediaStreamSource(dest_ac1.stream));
// Two AudioContext with the same sample rate can communicate.
const ac3 = new AudioContext({sampleRate: 48000});
const dest_ac3 = ac3.createMediaStreamDestination();
ac2.createMediaStreamSource(dest_ac3.stream);
ok(true, "Connect nodes with the same sample rate is ok");
mustThrowWith("Invalid zero samplerate", "NotSupportedError",
() => new AudioContext({sampleRate: 0}));
mustThrowWith("Invalid negative samplerate", "NotSupportedError",
() => new AudioContext({sampleRate: -1}));
});
</script>
</pre>
</body>
</html>
|