1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
|
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
</head>
<body>
<pre id="test">
<script>
createHTML({
bug: "1259788",
title: "Test CaptureStream video content on HTMLMediaElement playing a gUM MediaStream",
visible: true
});
var gUMVideoElement;
var captureStreamElement;
const pausedTimeout = 1000;
let h;
runTest(async () => {
try {
await pushPrefs(
// This test expects fake video devices, as it expects captured frames to
// shift over time, which is not currently provided by loopback devices
['media.video_loopback_dev', ''],
['media.navigator.streams.fake', true]);
let stream = await getUserMedia({video: true});
h = new VideoStreamHelper();
gUMVideoElement =
createMediaElement("video", "gUMVideo");
gUMVideoElement.srcObject = stream;
gUMVideoElement.play();
info("Capturing");
captureStreamElement =
createMediaElement("video", "captureStream");
captureStreamElement.srcObject = gUMVideoElement.mozCaptureStream();
captureStreamElement.play();
await h.checkVideoPlaying(captureStreamElement);
// Adding a dummy audio track to the stream will keep a consuming media
// element from ending.
// We could also solve it by repeatedly play()ing or autoplay, but then we
// wouldn't be sure the media element stopped rendering video because it
// went to the ended state or because there were no frames for the track.
let osc = createOscillatorStream(new AudioContext(), 1000);
captureStreamElement.srcObject.addTrack(osc.getTracks()[0]);
info("Video flowing. Pausing.");
gUMVideoElement.pause();
await h.checkVideoPaused(captureStreamElement, { time: pausedTimeout });
info("Video stopped flowing. Playing.");
gUMVideoElement.play();
await h.checkVideoPlaying(captureStreamElement);
info("Video flowing. Removing source.");
stream = gUMVideoElement.srcObject;
gUMVideoElement.srcObject = null;
await h.checkVideoPaused(captureStreamElement, { time: pausedTimeout });
info("Video stopped flowing. Setting source.");
gUMVideoElement.srcObject = stream;
await h.checkVideoPlaying(captureStreamElement);
info("Video flowing. Changing source by track manipulation. Remove first.");
let track = gUMVideoElement.srcObject.getTracks()[0];
gUMVideoElement.srcObject.removeTrack(track);
await h.checkVideoPaused(captureStreamElement, { time: pausedTimeout });
info("Video paused. Changing source by track manipulation. Add first.");
gUMVideoElement.srcObject.addTrack(track);
gUMVideoElement.play();
await h.checkVideoPlaying(captureStreamElement);
gUMVideoElement.srcObject.getTracks().forEach(t => t.stop());
ok(true, "Test passed.");
} catch (e) {
ok(false, "Test failed: " + e + (e.stack ? "\n" + e.stack : ""));
}
});
</script>
</pre>
</body>
</html>
|