1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
|
<!DOCTYPE HTML>
<html>
<head>
<title>A/V sync test for stream capturing</title>
<script src="/tests/SimpleTest/SimpleTest.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
<p>Following canvas will capture and show the video frame when the video becomes audible</p><br>
<canvas id="canvas" width="640" height="480"></canvas>
<script type="application/javascript">
/**
* This test will capture stream before the video starts playing, and check if
* A/V sync will keep sync during playing.
*/
add_task(async function testAVSyncForStreamCapturing() {
createVideo();
captureStreamFromVideo();
await playMedia();
await testAVSync();
destroyVideo();
});
/**
* This test will check if A/V is still on sync after we switch the media sink
* from playback-based sink to mediatrack-based sink.
*/
add_task(async function testAVSyncWhenSwitchingMediaSink() {
createVideo();
await playMedia({resolveAfterReceivingTimeupdate : 5});
captureStreamFromVideo();
await testAVSync();
destroyVideo();
});
/**
* This test will check if A/V is still on sync after we change the playback
* rate on the captured stream.
*/
add_task(async function testAVSyncWhenChangingPlaybackRate() {
createVideo();
captureStreamFromVideo();
await playMedia();
const playbackRates = [0.25, 0.5, 1.0, 1.5, 2.0];
for (let rate of playbackRates) {
setPlaybackRate(rate);
// TODO : when playback rate set to 1.5+x, the A/V will become less stable
// in testing so we raise the fuzzy frames for that, but also increase the
// test times. As at that speed, precise A/V becomes trivial because we
// can't really tell the difference. But it would be good for us to
// investigate if we could make A/V sync work better at that extreme high
// rate.
if (rate >= 1.5) {
await testAVSync({ expectedAVSyncTestTimes : 4, fuzzyFrames : 10});
} else {
await testAVSync({ expectedAVSyncTestTimes : 2 });
}
}
destroyVideo();
});
/**
* Following are helper functions
*/
const DEBUG = false;
function info_debug(msg) {
if (DEBUG) {
info(msg);
}
}
function createVideo() {
const video = document.createElement("video");
// This video is special for testing A/V sync, it only produce audible sound
// once per second, and when the sound comes out, you can check the position
// of the square to know if the A/V keeps sync.
video.src = "sync.webm";
video.loop = true;
video.controls = true;
video.width = 640;
video.height = 480;
video.id = "video";
document.body.appendChild(video);
}
function destroyVideo() {
const video = document.getElementById("video");
video.src = null;
video.remove();
}
async function playMedia({ resolveAfterReceivingTimeupdate } = {}) {
const video = document.getElementById("video");
ok(await video.play().then(_=>true,_=>false), "video started playing");
if (resolveAfterReceivingTimeupdate > 0) {
// Play it for a while to ensure the clock growing on the normal audio sink.
for (let idx = 0; idx < resolveAfterReceivingTimeupdate; idx++) {
await new Promise(r => video.ontimeupdate = r);
}
}
}
async function captureStreamFromVideo() {
const video = document.getElementById("video");
let ac = new AudioContext();
let analyser = ac.createAnalyser();
analyser.frequencyBuf = new Float32Array(analyser.frequencyBinCount);
analyser.smoothingTimeConstant = 0;
analyser.fftSize = 2048; // 1024 bins
let sourceNode = ac.createMediaElementSource(video);
sourceNode.connect(analyser);
analyser.connect(ac.destination);
video.analyser = analyser;
}
// This method will capture the stream from the video element, and check if A/V
// keeps sync during capturing. `callback` parameter will be executed after
// finishing capturing.
// @param [optional] expectedAVSyncTestTimes
// The amount of times that A/V sync test performs.
// @param [optional] fuzzyFrames
// This will fuzz the result from +0 (perfect sync) to -X to +X frames.
async function testAVSync({ expectedAVSyncTestTimes = 5, fuzzyFrames = 5} = {}) {
return new Promise(r => {
const analyser = document.getElementById("video").analyser;
let testIdx = 0;
let hasDetectedAudibleFrame = false;
// As we only want to detect the audible frame at the first moment when
// sound becomes audible, so we always skip the first audible frame because
// it might not be the start, but the tail part (where audio is being
// decaying to silence) when we start detecting.
let hasSkippedFirstFrame = false;
analyser.notifyAnalysis = () => {
let {frequencyBuf} = analyser;
analyser.getFloatFrequencyData(frequencyBuf);
if (checkIfBufferIsSilent(frequencyBuf)) {
info_debug("no need to paint the silent frame");
hasDetectedAudibleFrame = false;
requestAnimationFrame(analyser.notifyAnalysis);
return;
}
if (hasDetectedAudibleFrame) {
info_debug("detected audible frame already");
requestAnimationFrame(analyser.notifyAnalysis);
return;
}
hasDetectedAudibleFrame = true;
if (!hasSkippedFirstFrame) {
info("skip the first audible frame");
hasSkippedFirstFrame = true;
requestAnimationFrame(analyser.notifyAnalysis);
return;
}
const video = document.getElementById("video");
info(`paint audible frame`);
const cvs = document.getElementById("canvas");
let context = cvs.getContext('2d');
context.drawImage(video, 0, 0, 640, 480);
if (checkIfAVIsOnSyncFuzzy(context, fuzzyFrames)) {
ok(true, `test ${testIdx++} times, a/v is in sync!`);
} else {
ok(false, `test ${testIdx++} times, a/v is out of sync!`);
}
if (testIdx == expectedAVSyncTestTimes) {
r();
return;
}
requestAnimationFrame(analyser.notifyAnalysis);
}
analyser.notifyAnalysis();
});
}
function checkIfBufferIsSilent(buffer) {
for (let data of buffer) {
// when sound is audible, its values are around -200 and the silence values
// are around -800.
if (data > -200) {
return false;
}
}
return true;
}
// This function will check the pixel data from the `context` to see if the
// square appears in the right place. As we can't control the exact timing
// of rendering video frames in the compositor, so the result would be fuzzy.
function checkIfAVIsOnSyncFuzzy(context, fuzzyFrames) {
const squareLength = 48;
// Canvas is 640*480, so perfect sync is the left-top corner when the square
// shows up in the middle.
const perfectSync =
{ x: 320 - squareLength/2.0 ,
y: 240 - squareLength/2.0 };
let isAVSyncFuzzy = false;
// Get the whole partial section of image and detect where the square is.
let imageData = context.getImageData(0, perfectSync.y, 640, squareLength);
for (let i = 0; i < imageData.data.length; i += 4) {
// If the pixel's color is red, then this position will be the left-top
// corner of the square.
if (isPixelColorRed(imageData.data[i], imageData.data[i+1],
imageData.data[i+2])) {
const pos = ImageIdxToRelativeCoordinate(imageData, i);
let diff = calculateFrameDiffenceInXAxis(pos.x, perfectSync.x);
info(`find the square in diff=${diff}`);
// Maybe we check A/V sync too early or too late, try to adjust the diff
// to guess the previous correct position where the square should be.
if (diff > fuzzyFrames) {
diff = adjustFrameDiffBasedOnMediaTime(diff);
const video = document.getElementById("video");
info(`adjusted diff to ${diff} (time=${video.currentTime})`);
}
if (diff <= fuzzyFrames) {
isAVSyncFuzzy = true;
}
context.putImageData(imageData, 0, 0);
break;
}
}
if (!isAVSyncFuzzy) {
const ctx = document.getElementById('canvas');
info(ctx.toDataURL());
}
return isAVSyncFuzzy;
}
// Input an imageData and its idx, then return a relative coordinate on the
// range of given imageData.
function ImageIdxToRelativeCoordinate(imageData, idx) {
const offset = idx / 4; // RGBA
return { x: offset % imageData.width, y: offset / imageData.width };
}
function calculateFrameDiffenceInXAxis(squareX, targetX) {
const offsetX = Math.abs(targetX - squareX);
const xSpeedPerFrame = 640 / 60; // video is 60fps
return offsetX / xSpeedPerFrame;
}
function isPixelColorRed(r, g, b) {
// As the rendering color would vary in the screen on different platforms, so
// we won't strict the R should be 255, just check if it's larger than a
// certain threshold.
return r > 200 && g < 10 && b < 10;
}
function setPlaybackRate(rate) {
const video = document.getElementById("video");
info(`change playback rate from ${video.playbackRate} to ${rate}`);
document.getElementById("video").playbackRate = rate;
}
function adjustFrameDiffBasedOnMediaTime(currentDiff) {
// The audio wave can be simply regarded as being composed by "start", "peak"
// and "tail". The "start" part is the sound gradually becoming louder and the
// "tail" is gradually becoming silent. We want to check the "peak" part which
// should happn on evert second regularly (1s, 2s, 3s ...) However, this check
// is triggered by `requestAnimationFrame()` and we can't guarantee that
// we're checking the peak part while the function is being called. Therefore,
// we have to do an adjustment by the video time, to know if we're checking
// the audio wave too early or too late in order to get a consistent result.
const video = document.getElementById("video");
const videoCurrentTimeFloatPortion = video.currentTime % 1;
const timeOffset =
videoCurrentTimeFloatPortion > 0.5 ?
1 - videoCurrentTimeFloatPortion : // too early
videoCurrentTimeFloatPortion; // too late
const frameOffset = timeOffset / 0.016; // 60fps, 1 frame=0.016s
info(`timeOffset=${timeOffset}, frameOffset=${frameOffset}`);
return Math.abs(currentDiff - frameOffset);
}
</script>
</head>
<body>
</body>
</html>
|