1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
|
<!doctype html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>WebRTC video.requestVideoFrameCallback() test</title>
<script src="/webrtc/RTCPeerConnection-helper.js"></script>
</head>
<body>
<div id="log"></div>
<div>
<video id="local-view" muted autoplay="autoplay"></video>
<video id="remote-view" muted autoplay="autoplay"/>
</video>
</div>
<!-- These files are in place when executing on W3C. -->
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script type="text/javascript">
var test = async_test('Test video.requestVideoFrameCallback() parameters for WebRTC applications.');
//
// This test is based on /webrtc/simplecall.https.html, but it calls to
// video.requestVideoFrameCallback() before ending, to verify WebRTC required
// and optional parameters.
//
var gFirstConnection = null;
var gSecondConnection = null;
var gCallbackCounter = 0;
var verify_params = (now, metadata) => {
gCallbackCounter = gCallbackCounter + 1;
assert_greater_than(now, 0);
// Verify all required fields
assert_greater_than(metadata.presentationTime, 0);
assert_greater_than(metadata.expectedDisplayTime, 0);
assert_greater_than(metadata.presentedFrames, 0);
assert_greater_than(metadata.width, 0);
assert_greater_than(metadata.height, 0);
assert_true("mediaTime" in metadata, "mediaTime should be present");
// Verify WebRTC only fields.
assert_true("rtpTimestamp" in metadata, "rtpTimestamp should be present");
assert_true("receiveTime" in metadata, "receiveTime should be present");
// captureTime is not available until roundtrip time estimation is done.
if (gCallbackCounter > 60 || "captureTime" in metadata) {
assert_true("captureTime" in metadata, "captureTime should be present");
test.done();
}
else {
// Keep requesting callbacks.
document.getElementById('remote-view').requestVideoFrameCallback(test.step_func(verify_params));
}
}
var verify_local_metadata = (now, metadata) => {
assert_greater_than(metadata.expectedDisplayTime, 0);
assert_greater_than(metadata.presentedFrames, 0);
assert_greater_than(metadata.width, 0);
assert_greater_than(metadata.height, 0);
assert_true("captureTime" in metadata, "captureTime should always be present for local sources.");
assert_greater_than(metadata.captureTime, 0);
}
// If the remote video gets video data that implies the negotiation
// as well as the ICE and DTLS connection are up.
document.getElementById('remote-view')
.addEventListener('loadedmetadata', function() {
document.getElementById('remote-view').requestVideoFrameCallback(test.step_func(verify_params));
});
document.getElementById('local-view')
.addEventListener('loadmetadata', function() {
document.getElementById('local-view').requestVideoFrameCallback(test.step_func_done(verify_local_metadata));
});
function getNoiseStreamOkCallback(localStream) {
gFirstConnection = new RTCPeerConnection(null);
test.add_cleanup(() => gFirstConnection.close());
gFirstConnection.onicecandidate = onIceCandidateToFirst;
gSecondConnection = new RTCPeerConnection(null);
test.add_cleanup(() => gSecondConnection.close());
gSecondConnection.onicecandidate = onIceCandidateToSecond;
gSecondConnection.ontrack = onRemoteTrack;
localStream.getTracks().forEach(function(track) {
// Bidirectional streams are needed in order for captureTime to be
// populated. Use the same source in both directions.
gFirstConnection.addTrack(track, localStream);
gSecondConnection.addTrack(track, localStream);
});
gFirstConnection.createOffer().then(onOfferCreated, failed('createOffer'));
var videoTag = document.getElementById('local-view');
videoTag.srcObject = localStream;
};
var onOfferCreated = test.step_func(function(offer) {
gFirstConnection.setLocalDescription(offer);
// This would normally go across the application's signaling solution.
// In our case, the "signaling" is to call this function.
receiveCall(offer.sdp);
});
function receiveCall(offerSdp) {
var parsedOffer = new RTCSessionDescription({ type: 'offer',
sdp: offerSdp });
gSecondConnection.setRemoteDescription(parsedOffer);
gSecondConnection.createAnswer().then(onAnswerCreated,
failed('createAnswer'));
};
var onAnswerCreated = test.step_func(function(answer) {
gSecondConnection.setLocalDescription(answer);
// Similarly, this would go over the application's signaling solution.
handleAnswer(answer.sdp);
});
function handleAnswer(answerSdp) {
var parsedAnswer = new RTCSessionDescription({ type: 'answer',
sdp: answerSdp });
gFirstConnection.setRemoteDescription(parsedAnswer);
};
var onIceCandidateToFirst = test.step_func(function(event) {
// If event.candidate is null = no more candidates.
if (event.candidate) {
gSecondConnection.addIceCandidate(event.candidate);
}
});
var onIceCandidateToSecond = test.step_func(function(event) {
if (event.candidate) {
gFirstConnection.addIceCandidate(event.candidate);
}
});
var onRemoteTrack = test.step_func(function(event) {
var videoTag = document.getElementById('remote-view');
if (!videoTag.srcObject) {
videoTag.srcObject = event.streams[0];
}
});
// Returns a suitable error callback.
function failed(function_name) {
return test.unreached_func('WebRTC called error callback for ' + function_name);
}
// This function starts the test.
test.step(function() {
getNoiseStream({ video: true, audio: true })
.then(test.step_func(getNoiseStreamOkCallback), failed('getNoiseStream'));
});
</script>
</body>
</html>
|