summaryrefslogtreecommitdiffstats
path: root/dom/media/webrtc/tests/mochitests/test_getUserMedia_mediaElementCapture_audio.html
blob: e2f1208490d3fe2a2d6c3da27a45276a784a1393 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
<!DOCTYPE HTML>
<html>
<head>
  <script type="application/javascript" src="mediaStreamPlayback.js"></script>
</head>
<body>
<pre id="test">
<script>

createHTML({
  bug: "1259788",
  title: "Test CaptureStream audio content on HTMLMediaElement playing a gUM MediaStream",
  visible: true
});

var audioContext;
var gUMAudioElement;
var analyser;
let tone;
runTest(() => getUserMedia({audio: { echoCancellation: false }})
  .then(stream => {
    gUMAudioElement = createMediaElement("audio", "gUMAudio");
    gUMAudioElement.srcObject = stream;

    audioContext = new AudioContext();
    // Start a tone so that the gUM call will record something even with
    // --use-test-media-devices.
    tone = new LoopbackTone(audioContext, TEST_AUDIO_FREQ);
    tone.start();

    info("Capturing");

    analyser = new AudioStreamAnalyser(audioContext,
                                       gUMAudioElement.mozCaptureStream());
    analyser.enableDebugCanvas();
    return analyser.waitForAnalysisSuccess(array =>
      array[analyser.binIndexForFrequency(50)]              < 50 &&
      array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] > 200 &&
      array[analyser.binIndexForFrequency(2500)]            < 50);
  })
  .then(() => {
    info("Audio flowing. Pausing.");
    gUMAudioElement.pause();

    return analyser.waitForAnalysisSuccess(array =>
      array[analyser.binIndexForFrequency(50)]              < 50 &&
      array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] < 50 &&
      array[analyser.binIndexForFrequency(2500)]            < 50);
  })
  .then(() => {
    info("Audio stopped flowing. Playing.");
    gUMAudioElement.play();

    return analyser.waitForAnalysisSuccess(array =>
      array[analyser.binIndexForFrequency(50)]              < 50 &&
      array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] > 200 &&
      array[analyser.binIndexForFrequency(2500)]            < 50);
  })
  .then(() => {
    info("Audio flowing. Removing source.");
    var stream = gUMAudioElement.srcObject;
    gUMAudioElement.srcObject = null;

    return analyser.waitForAnalysisSuccess(array =>
      array[analyser.binIndexForFrequency(50)]              < 50 &&
      array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] < 50 &&
      array[analyser.binIndexForFrequency(2500)]            < 50)
        .then(() => stream);
  })
  .then(stream => {
    info("Audio stopped flowing. Setting source.");
    gUMAudioElement.srcObject = stream;

    return analyser.waitForAnalysisSuccess(array =>
      array[analyser.binIndexForFrequency(50)]              < 50 &&
      array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] > 200 &&
      array[analyser.binIndexForFrequency(2500)]            < 50);
  })
  .then(() => {
    info("Audio flowing from new source. Adding a track.");
    let oscillator = audioContext.createOscillator();
    oscillator.type = 'sine';
    oscillator.frequency.value = 2000;
    oscillator.start();

    let oscOut = audioContext.createMediaStreamDestination();
    oscillator.connect(oscOut);

    gUMAudioElement.srcObject.addTrack(oscOut.stream.getTracks()[0]);

    return analyser.waitForAnalysisSuccess(array =>
      array[analyser.binIndexForFrequency(50)]              < 50 &&
      array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] > 200 &&
      array[analyser.binIndexForFrequency(1500)]            < 50 &&
      array[analyser.binIndexForFrequency(2000)]            > 200 &&
      array[analyser.binIndexForFrequency(2500)]            < 50);
  })
  .then(() => {
    info("Audio flowing from new track. Removing a track.");

    const gUMTrack = gUMAudioElement.srcObject.getTracks()[0];
    gUMAudioElement.srcObject.removeTrack(gUMTrack);

    is(gUMAudioElement.srcObject.getTracks().length, 1,
       "A track should have been removed");

    return analyser.waitForAnalysisSuccess(array =>
      array[analyser.binIndexForFrequency(50)]              < 50 &&
      array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] < 50 &&
      array[analyser.binIndexForFrequency(1500)]            < 50 &&
      array[analyser.binIndexForFrequency(2000)]            > 200 &&
      array[analyser.binIndexForFrequency(2500)]            < 50)
        .then(() => [gUMTrack, ...gUMAudioElement.srcObject.getTracks()]
            .forEach(t => t.stop()));
  })
  .then(() => ok(true, "Test passed."))
  .catch(e => ok(false, "Test failed: " + e + (e.stack ? "\n" + e.stack : "")))
  .finally(() => tone.stop()));

</script>
</pre>
</body>
</html>