summaryrefslogtreecommitdiffstats
path: root/dom/media/test/test_mediarecorder_record_addtracked_stream.html
blob: 639531f7f262f7fc2bd7689ccb43f66d7056f09f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
<!DOCTYPE HTML>
<html>
<head>
  <title>Test MediaRecorder recording a constructed MediaStream</title>
  <script src="/tests/SimpleTest/SimpleTest.js"></script>
  <script src="/tests/dom/canvas/test/captureStream_common.js"></script>
  <script src="/tests/dom/media/webrtc/tests/mochitests/head.js"></script>
  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<pre id="test">
<div id="content">
</div>
<script>
SimpleTest.waitForExplicitFinish();
runTestWhenReady(async () => {
  const canvas = document.createElement("canvas");
  canvas.width = canvas.height = 100;
  document.getElementById("content").appendChild(canvas);

  const helper = new CaptureStreamTestHelper2D(100, 100);
  helper.drawColor(canvas, helper.red);

  const audioCtx = new AudioContext();
  const osc = audioCtx.createOscillator();
  osc.frequency.value = 1000;
  osc.start();
  const dest = audioCtx.createMediaStreamDestination();
  osc.connect(dest);

  const stream = dest.stream;

  // Timeouts are experienced intermittently in Linux due to no sound in the
  // destination. As a workaround wait for the source sound to arrive.
  const sourceAnalyser = new AudioStreamAnalyser(audioCtx, stream);
  const sourceAudioReady = sourceAnalyser.waitForAnalysisSuccess(array => {
    const freq = osc.frequency.value;
    const lowerFreq = freq / 2;
    const upperFreq = freq + 1000;
    const lowerAmp = array[sourceAnalyser.binIndexForFrequency(lowerFreq)];
    const freqAmp = array[sourceAnalyser.binIndexForFrequency(freq)];
    const upperAmp = array[sourceAnalyser.binIndexForFrequency(upperFreq)];
    info("Analysing source audio. "
         + lowerFreq + ": " + lowerAmp + ", "
         + freq + ": " + freqAmp + ", "
         + upperFreq + ": " + upperAmp);
    return lowerAmp < 50 && freqAmp > 200 && upperAmp < 50;
  });
  await sourceAudioReady;
  info("Source Audio content ok");

  canvas.captureStream(0).getVideoTracks().forEach(t => stream.addTrack(t));

  const blobs = [];

  let mediaRecorder = new MediaRecorder(stream);
  is(mediaRecorder.stream, stream,
     "Media recorder stream = constructed stream at the start of recording");


  mediaRecorder.ondataavailable = evt => {
    info("ondataavailable fired");

    is(mediaRecorder.state, "inactive", "Blob received after stopping");
    is(blobs.length, 0, "This is the first and only blob");
    ok(evt instanceof BlobEvent,
       "Events fired from ondataavailable should be BlobEvent");
    is(evt.type, "dataavailable",
       "Event type should dataavailable");
    ok(evt.data.size >= 0,
       "Blob data size received is greater than or equal to zero");

    blobs.push(evt.data);
  };

  const stopped = haveEvent(mediaRecorder, "stop", wait(5000, new Error("Timeout")));
  const stoppedNoErrors = Promise.all([
    stopped,
    haveNoEvent(mediaRecorder, "warning", stopped),
    haveNoEvent(mediaRecorder, "error", stopped)
  ]);

  mediaRecorder.start();
  is(mediaRecorder.state, "recording", "Media recorder should be recording");

  await haveEvent(mediaRecorder, "start", wait(5000, new Error("Timeout")));
  info("onstart fired");

  // The recording can be too short to cause any checks with
  // waitForAnalysisSuccess(). Waiting a bit here solves this.
  await wait(500);

  is(mediaRecorder.state, "recording",
     "Media recorder is recording before being stopped");
  mediaRecorder.stop();
  is(mediaRecorder.state, "inactive",
     "Media recorder is inactive after being stopped");
  is(mediaRecorder.stream, stream,
     "Media recorder stream = constructed stream post recording");

  await stoppedNoErrors;
  info("Got 'stop' event");

  ok(blobs.length == 1, "Should have gotten one data blob");

  // Clean up recording sources
  osc.stop();
  stream.getTracks().forEach(t => t.stop());

  // Sanity check the recording
  const video = document.createElement("video");
  document.getElementById("content").appendChild(video);
  video.id = "recorded-video";

  const blob = new Blob(blobs);
  ok(blob.size > 0, "Recorded blob should contain data");

  video.src = URL.createObjectURL(blob);
  video.preload = "metadata";

  info("Waiting for metadata to be preloaded");

  await haveEvent(video, "loadedmetadata", wait(5000, new Error("Timeout")));
  info("Playback of recording loaded metadata");

  const recordingStream = video.mozCaptureStream();
  is(recordingStream.getVideoTracks().length, 1,
     "Recording should have one video track");
  is(recordingStream.getAudioTracks().length, 1,
     "Recording should have one audio track");

  const ended = haveEvent(video, "ended", wait(5000, new Error("Timeout")));
  const endedNoError = Promise.all([
    ended,
    haveNoEvent(video, "error", ended),
  ]);

  const analyser = new AudioStreamAnalyser(audioCtx, recordingStream);
  const audioReady = analyser.waitForAnalysisSuccess(array => {
    const freq = osc.frequency.value;
    const lowerFreq = freq / 2;
    const upperFreq = freq + 1000;
    const lowerAmp = array[analyser.binIndexForFrequency(lowerFreq)];
    const freqAmp = array[analyser.binIndexForFrequency(freq)];
    const upperAmp = array[analyser.binIndexForFrequency(upperFreq)];
    info("Analysing audio. "
         + lowerFreq + ": " + lowerAmp + ", "
         + freq + ": " + freqAmp + ", "
         + upperFreq + ": " + upperAmp);
    return lowerAmp < 50 && freqAmp > 200 && upperAmp < 50;
  }, endedNoError.then(() => new Error("Audio check failed")));

  const videoReady = helper.pixelMustBecome(
      video, helper.red, {
        threshold: 128,
        infoString: "Should become red",
        cancelPromise: endedNoError.then(() => new Error("Video check failed")),
      });

  video.play();

  try {
    await endedNoError;
  } finally {
    analyser.disconnect();
    let url = video.src;
    video.src = "";
    URL.revokeObjectURL(url);
  }

  info("Playback of recording ended without error");

  await audioReady;
  info("Audio content ok");

  await videoReady;
  info("Video content ok");
});
</script>
</pre>
</body>
</html>