1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
|
<!doctype html>
<meta charset=utf-8>
<title>Stats exposing hardware capability</title>
<meta name="timeout" content="long">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/testdriver.js"></script>
<script src="/resources/testdriver-vendor.js"></script>
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
<script>
/*
* Test stats that expose hardware capabilities are only exposed according to
* the conditions described in https://w3c.github.io/webrtc-stats/#limiting-exposure-of-hardware-capabilities.
*/
'use strict';
function getStatEntry(report, type, kind) {
const values = [...report.values()];
const for_kind = values.filter(
stat => stat.type == type && stat.kind == kind);
assert_equals(1, for_kind.length,
"Expected report to have only 1 entry with type '" + type +
"' and kind '" + kind + "'. Found values " + for_kind);
return for_kind[0];
}
async function hasEncodedAndDecodedFrames(pc, t) {
while (true) {
const report = await pc.getStats();
const inboundRtp = getStatEntry(report, 'inbound-rtp', 'video');
const outboundRtp = getStatEntry(report, 'outbound-rtp', 'video');
if (inboundRtp.framesDecoded > 0 && outboundRtp.framesEncoded > 0) {
return;
}
// Avoid any stats caching, which can otherwise make this an infinite loop.
await (new Promise(r => t.step_timeout(r, 100)));
}
}
async function setupPcAndGetStatEntry(
t, stream, type, kind, stat) {
const pc1 = new RTCPeerConnection();
t.add_cleanup(() => pc1.close());
const pc2 = new RTCPeerConnection();
t.add_cleanup(() => pc2.close());
for (const track of stream.getTracks()) {
pc1.addTrack(track, stream);
pc2.addTrack(track, stream);
t.add_cleanup(() => track.stop());
}
exchangeIceCandidates(pc1, pc2);
await exchangeOfferAnswer(pc1, pc2);
await hasEncodedAndDecodedFrames(pc1, t);
const report = await pc1.getStats();
return getStatEntry(report, type, kind);
}
for (const args of [
// RTCOutboundRtpStreamStats.powerEfficientEncoder
['outbound-rtp', 'video', 'powerEfficientEncoder'],
// RTCOutboundRtpStreamStats.encoderImplementation
['outbound-rtp', 'video', 'encoderImplementation'],
// RTCInboundRtpStreamStats.powerEfficientDecoder
['inbound-rtp', 'video', 'powerEfficientDecoder'],
// RTCOutboundRtpStreamStats.decoderImplementation
['inbound-rtp', 'video', 'decoderImplementation'],
]) {
const type = args[0];
const kind = args[1];
const stat = args[2];
promise_test(async (t) => {
const stream = await getNoiseStream({video: true, audio: true});
const statsEntry = await setupPcAndGetStatEntry(t, stream, type, kind, stat);
assert_not_own_property(statsEntry, stat);
}, stat + " not exposed when not capturing.");
// Exposing hardware capabilities when a there is a fullscreen element was
// removed with https://github.com/w3c/webrtc-stats/pull/713.
promise_test(async (t) => {
const stream = await getNoiseStream({video: true, audio: true});
const element = document.getElementById('elementToFullscreen');
await test_driver.bless("fullscreen", () => element.requestFullscreen());
t.add_cleanup(() => document.exitFullscreen());
const statsEntry = await setupPcAndGetStatEntry(
t, stream, type, kind, stat);
assert_not_own_property(statsEntry, stat);
}, stat + " not exposed when fullscreen and not capturing.");
promise_test(async (t) => {
const stream = await navigator.mediaDevices.getUserMedia(
{video: true, audio: true});
const statsEntry = await setupPcAndGetStatEntry(
t, stream, type, kind, stat);
assert_own_property(statsEntry, stat);
}, stat + " exposed when capturing.");
}
</script>
<body>
<div id="elementToFullscreen"></div>
</body>
|