diff options
Diffstat (limited to 'testing/web-platform/tests/webrtc-stats')
8 files changed, 585 insertions, 0 deletions
diff --git a/testing/web-platform/tests/webrtc-stats/META.yml b/testing/web-platform/tests/webrtc-stats/META.yml new file mode 100644 index 0000000000..10bcf856eb --- /dev/null +++ b/testing/web-platform/tests/webrtc-stats/META.yml @@ -0,0 +1,5 @@ +spec: https://w3c.github.io/webrtc-stats/ +suggested_reviewers: + - henbos + - vr000m + - jan-ivar diff --git a/testing/web-platform/tests/webrtc-stats/README.md b/testing/web-platform/tests/webrtc-stats/README.md new file mode 100644 index 0000000000..2b69372894 --- /dev/null +++ b/testing/web-platform/tests/webrtc-stats/README.md @@ -0,0 +1,7 @@ +The following 4 test cases in the `webrtc/` directory test some of the mandatory-to-implement stats defined in WebRTC Statistics: + +* `getstats.html` +* `RTCPeerConnection-getStats.https.html` +* `RTCPeerConnection-track-stats.https.html` +* `RTCRtpReceiver-getStats.https.html` +* `RTCRtpSender-getStats.https.html` diff --git a/testing/web-platform/tests/webrtc-stats/getStats-remote-candidate-address.html b/testing/web-platform/tests/webrtc-stats/getStats-remote-candidate-address.html new file mode 100644 index 0000000000..08e2aec90e --- /dev/null +++ b/testing/web-platform/tests/webrtc-stats/getStats-remote-candidate-address.html @@ -0,0 +1,81 @@ +<!doctype html> +<meta charset=utf-8> +<title>Exposure or remote candidate address on stats</title> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="../webrtc/RTCPeerConnection-helper.js"></script> +<script src="../webrtc/RTCStats-helper.js"></script> +<script> + 'use strict'; + +promise_test(async (test) => { + const localPc = new RTCPeerConnection(); + test.add_cleanup(() => localPc.close()); + const remotePc = new RTCPeerConnection(); + test.add_cleanup(() => remotePc.close()); + + const promiseDataChannel = new Promise(resolve => { + remotePc.addEventListener('datachannel', (event) => { + resolve(event.channel); + }); + }); + + const localDataChannel = localPc.createDataChannel('test'); + + localPc.addEventListener('icecandidate', event => { + if (event.candidate) + remotePc.addIceCandidate(event.candidate); + }); + exchangeOfferAnswer(localPc, remotePc); + + const remoteDataChannel = await promiseDataChannel; + + localDataChannel.send("test"); + + await new Promise(resolve => { + remoteDataChannel.onmessage = resolve; + }); + + const remoteCandidateStats = getRequiredStats(await localPc.getStats(), "remote-candidate"); + assert_equals(remoteCandidateStats.address, null, "address should be null"); +}, "Do not expose in stats remote addresses that are not known to be already exposed to JS"); + + +promise_test(async (test) => { + const localPc = new RTCPeerConnection(); + test.add_cleanup(() => localPc.close()); + const remotePc = new RTCPeerConnection(); + test.add_cleanup(() => remotePc.close()); + + const promiseDataChannel = new Promise(resolve => { + remotePc.addEventListener('datachannel', (event) => { + resolve(event.channel); + }); + }); + + const localDataChannel = localPc.createDataChannel('test'); + + localPc.addEventListener('icecandidate', event => { + if (event.candidate) + remotePc.addIceCandidate(event.candidate); + }); + remotePc.addEventListener('icecandidate', event => { + if (event.candidate) + localPc.addIceCandidate(event.candidate); + }); + exchangeOfferAnswer(localPc, remotePc); + + const remoteDataChannel = await promiseDataChannel; + + localDataChannel.send("test"); + + await new Promise(resolve => { + remoteDataChannel.onmessage = resolve; + }); + + const remoteCandidateStats = getRequiredStats(await localPc.getStats(), "remote-candidate"); + assert_not_equals(remoteCandidateStats.address, null, "address should not be null"); + +}, "Expose in stats remote addresses that are already exposed to JS"); + +</script> diff --git a/testing/web-platform/tests/webrtc-stats/hardware-capability-stats.https.html b/testing/web-platform/tests/webrtc-stats/hardware-capability-stats.https.html new file mode 100644 index 0000000000..49f80d4b65 --- /dev/null +++ b/testing/web-platform/tests/webrtc-stats/hardware-capability-stats.https.html @@ -0,0 +1,107 @@ +<!doctype html> +<meta charset=utf-8> +<title>Stats exposing hardware capability</title> +<meta name="timeout" content="long"> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="/resources/testdriver.js"></script> +<script src="/resources/testdriver-vendor.js"></script> +<script src="../webrtc/RTCPeerConnection-helper.js"></script> +<script src="../webrtc/RTCStats-helper.js"></script> +<script> +/* + * Test stats that expose hardware capabilities are only exposed according to + * the conditions described in https://w3c.github.io/webrtc-stats/#limiting-exposure-of-hardware-capabilities. + */ +'use strict'; + +function getStatEntry(report, type, kind) { + const values = [...report.values()]; + const for_kind = values.filter( + stat => stat.type == type && stat.kind == kind); + + assert_equals(1, for_kind.length, + "Expected report to have only 1 entry with type '" + type + + "' and kind '" + kind + "'. Found values " + for_kind); + return for_kind[0]; +} + +async function hasEncodedAndDecodedFrames(pc, t) { + while (true) { + const report = await pc.getStats(); + const inboundRtp = getStatEntry(report, 'inbound-rtp', 'video'); + const outboundRtp = getStatEntry(report, 'outbound-rtp', 'video'); + if (inboundRtp.framesDecoded > 0 && outboundRtp.framesEncoded > 0) { + return; + } + // Avoid any stats caching, which can otherwise make this an infinite loop. + await (new Promise(r => t.step_timeout(r, 100))); + } +} + +async function setupPcAndGetStatEntry( + t, stream, type, kind, stat) { + const pc1 = new RTCPeerConnection(); + t.add_cleanup(() => pc1.close()); + const pc2 = new RTCPeerConnection(); + t.add_cleanup(() => pc2.close()); + for (const track of stream.getTracks()) { + pc1.addTrack(track, stream); + pc2.addTrack(track, stream); + t.add_cleanup(() => track.stop()); + } + + exchangeIceCandidates(pc1, pc2); + await exchangeOfferAnswer(pc1, pc2); + await hasEncodedAndDecodedFrames(pc1, t); + const report = await pc1.getStats(); + return getStatEntry(report, type, kind); +} + +for (const args of [ + // RTCOutboundRtpStreamStats.powerEfficientEncoder + ['outbound-rtp', 'video', 'powerEfficientEncoder'], + // RTCOutboundRtpStreamStats.encoderImplementation + ['outbound-rtp', 'video', 'encoderImplementation'], + // RTCInboundRtpStreamStats.powerEfficientDecoder + ['inbound-rtp', 'video', 'powerEfficientDecoder'], + // RTCOutboundRtpStreamStats.decoderImplementation + ['inbound-rtp', 'video', 'decoderImplementation'], +]) { + const type = args[0]; + const kind = args[1]; + const stat = args[2]; + + promise_test(async (t) => { + const stream = await getNoiseStream({video: true, audio: true}); + const statsEntry = await setupPcAndGetStatEntry(t, stream, type, kind, stat); + assert_not_own_property(statsEntry, stat); + }, stat + " not exposed when not capturing."); + + // Exposing hardware capabilities when a there is a fullscreen element was + // removed with https://github.com/w3c/webrtc-stats/pull/713. + promise_test(async (t) => { + const stream = await getNoiseStream({video: true, audio: true}); + + const element = document.getElementById('elementToFullscreen'); + await test_driver.bless("fullscreen", () => element.requestFullscreen()); + t.add_cleanup(() => document.exitFullscreen()); + + const statsEntry = await setupPcAndGetStatEntry( + t, stream, type, kind, stat); + assert_not_own_property(statsEntry, stat); + }, stat + " not exposed when fullscreen and not capturing."); + + promise_test(async (t) => { + const stream = await navigator.mediaDevices.getUserMedia( + {video: true, audio: true}); + const statsEntry = await setupPcAndGetStatEntry( + t, stream, type, kind, stat); + assert_own_property(statsEntry, stat); + }, stat + " exposed when capturing."); +} + +</script> +<body> + <div id="elementToFullscreen"></div> +</body> diff --git a/testing/web-platform/tests/webrtc-stats/idlharness.window.js b/testing/web-platform/tests/webrtc-stats/idlharness.window.js new file mode 100644 index 0000000000..d98712fc48 --- /dev/null +++ b/testing/web-platform/tests/webrtc-stats/idlharness.window.js @@ -0,0 +1,14 @@ +// META: script=/resources/WebIDLParser.js +// META: script=/resources/idlharness.js + +'use strict'; + +// https://w3c.github.io/webrtc-stats/ + +idl_test( + ['webrtc-stats'], + ['webrtc'], + idl_array => { + // No interfaces to test + } +); diff --git a/testing/web-platform/tests/webrtc-stats/outbound-rtp.https.html b/testing/web-platform/tests/webrtc-stats/outbound-rtp.https.html new file mode 100644 index 0000000000..ff87d54256 --- /dev/null +++ b/testing/web-platform/tests/webrtc-stats/outbound-rtp.https.html @@ -0,0 +1,49 @@ +<!doctype html> +<meta charset=utf-8> +<meta name="timeout" content="long"> +<title>RTCPeerConnection getStats test related to outbound-rtp stats</title> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="../webrtc/RTCPeerConnection-helper.js"></script> +<script> +function extractOutboundRtpStats(stats) { + const wantedStats = []; + stats.forEach(report => { + if (report.type === 'outbound-rtp') { + wantedStats.push(report); + } + }); + return wantedStats; +} + +promise_test(async (test) => { + const pc1 = new RTCPeerConnection(); + test.add_cleanup(() => pc1.close()); + const pc2 = new RTCPeerConnection(); + test.add_cleanup(() => pc2.close()); + + const stream = await getNoiseStream({audio: true, video: true}); + stream.getTracks().forEach(t => pc1.addTrack(t, stream)); + exchangeIceCandidates(pc1, pc2); + exchangeOfferAnswer(pc1, pc2); + const {track} = await new Promise(r => pc2.ontrack = r); + await new Promise(r => track.onunmute = r); + let outboundStats = extractOutboundRtpStats(await pc1.getStats()); + assert_equals(outboundStats.length, 2); + assert_true(outboundStats[0].active); + assert_true(outboundStats[1].active); + + pc1.getSenders().forEach(async sender => { + const parameters = sender.getParameters(); + parameters.encodings[0].active = false; + await sender.setParameters(parameters); + }); + // Avoid any stats caching. + await (new Promise(r => test.step_timeout(r, 100))); + + outboundStats = extractOutboundRtpStats(await pc1.getStats()); + assert_equals(outboundStats.length, 2); + assert_false(outboundStats[0].active); + assert_false(outboundStats[1].active); +}, 'setting an encoding to false is reflected in outbound-rtp stats'); +</script> diff --git a/testing/web-platform/tests/webrtc-stats/rtp-stats-creation.html b/testing/web-platform/tests/webrtc-stats/rtp-stats-creation.html new file mode 100644 index 0000000000..7a6d9df456 --- /dev/null +++ b/testing/web-platform/tests/webrtc-stats/rtp-stats-creation.html @@ -0,0 +1,110 @@ +<!doctype html> +<meta charset=utf-8> +<title>No RTCRtpStreamStats should exist prior to RTP/RTCP packet flow</title> +<script src="/resources/testharness.js"></script> +<script src="/resources/testharnessreport.js"></script> +<script src="../webrtc/RTCPeerConnection-helper.js"></script> +<script> +'use strict'; + +promise_test(async (test) => { + const localPc = createPeerConnectionWithCleanup(test); + const remotePc = createPeerConnectionWithCleanup(test); + + localPc.addTransceiver("audio"); + localPc.addTransceiver("video"); + await exchangeOfferAndListenToOntrack(test, localPc, remotePc); + const report = await remotePc.getStats(); + const rtp = [...report.values()].filter(({type}) => type.endsWith("rtp")); + assert_equals(rtp.length, 0, "no rtp stats with only remote description"); +}, "No RTCRtpStreamStats exist when only remote description is set"); + +promise_test(async (test) => { + const localPc = createPeerConnectionWithCleanup(test); + const remotePc = createPeerConnectionWithCleanup(test); + + localPc.addTrack(...await createTrackAndStreamWithCleanup(test, "audio")); + localPc.addTrack(...await createTrackAndStreamWithCleanup(test, "video")); + await exchangeOfferAndListenToOntrack(test, localPc, remotePc); + const report = await localPc.getStats(); + const rtp = [...report.values()].filter(({type}) => type.endsWith("rtp")); + assert_equals(rtp.length, 0, "no rtp stats with only local description"); +}, "No RTCRtpStreamStats exist when only local description is set"); + +promise_test(async (test) => { + const localPc = createPeerConnectionWithCleanup(test); + const remotePc = createPeerConnectionWithCleanup(test); + + localPc.addTrack(...await createTrackAndStreamWithCleanup(test, "audio")); + localPc.addTrack(...await createTrackAndStreamWithCleanup(test, "video")); + exchangeIceCandidates(localPc, remotePc); + await Promise.all([ + exchangeOfferAnswer(localPc, remotePc), + new Promise(r => remotePc.ontrack = e => e.track.onunmute = r) + ]); + const start = performance.now(); + while (true) { + const report = await localPc.getStats(); + const outbound = + [...report.values()].filter(({type}) => type == "outbound-rtp"); + assert_true(outbound.every(({packetsSent}) => packetsSent > 0), + "no outbound rtp stats before packets sent"); + if (outbound.length == 2) { + // One outbound stat for each track is present. We're done. + break; + } + if (performance.now() > start + 5000) { + assert_unreached("outbound stats should become available"); + } + await new Promise(r => test.step_timeout(r, 100)); + } +}, "No RTCOutboundRtpStreamStats exist until packets have been sent"); + +promise_test(async (test) => { + const localPc = createPeerConnectionWithCleanup(test); + const remotePc = createPeerConnectionWithCleanup(test); + + localPc.addTrack(...await createTrackAndStreamWithCleanup(test, "audio")); + localPc.addTrack(...await createTrackAndStreamWithCleanup(test, "video")); + exchangeIceCandidates(localPc, remotePc); + await exchangeOfferAnswer(localPc, remotePc); + const start = performance.now(); + while (true) { + const report = await remotePc.getStats(); + const inbound = + [...report.values()].filter(({type}) => type == "inbound-rtp"); + assert_true(inbound.every(({packetsReceived}) => packetsReceived > 0), + "no inbound rtp stats before packets received"); + if (inbound.length == 2) { + // One inbound stat for each track is present. We're done. + break; + } + if (performance.now() > start + 5000) { + assert_unreached("inbound stats should become available"); + } + await new Promise(r => test.step_timeout(r, 100)); + } +}, "No RTCInboundRtpStreamStats exist until packets have been received"); + +promise_test(async (test) => { + const localPc = createPeerConnectionWithCleanup(test); + const remotePc = createPeerConnectionWithCleanup(test); + + localPc.addTrack(...await createTrackAndStreamWithCleanup(test, "audio")); + exchangeIceCandidates(localPc, remotePc); + await exchangeOfferAnswer(localPc, remotePc); + const start = performance.now(); + while (true) { + const report = await remotePc.getStats(); + const audioPlayout = + [...report.values()].filter(({type}) => type == "media-playout"); + if (audioPlayout.length == 1) { + break; + } + if (performance.now() > start + 5000) { + assert_unreached("Audio playout stats should become available"); + } + await new Promise(r => test.step_timeout(r, 100)); + } +}, "RTCAudioPlayoutStats should be present"); +</script> diff --git a/testing/web-platform/tests/webrtc-stats/supported-stats.https.html b/testing/web-platform/tests/webrtc-stats/supported-stats.https.html new file mode 100644 index 0000000000..24b4d3f06f --- /dev/null +++ b/testing/web-platform/tests/webrtc-stats/supported-stats.https.html @@ -0,0 +1,212 @@ +<!doctype html> +<meta charset=utf-8> +<meta name="timeout" content="long"> +<title>Support for all stats defined in WebRTC Stats</title> +<script src=/resources/testharness.js></script> +<script src=/resources/testharnessreport.js></script> +<script src="../webrtc/RTCPeerConnection-helper.js"></script> +<script src="../webrtc/dictionary-helper.js"></script> +<script src="../webrtc/RTCStats-helper.js"></script> +<script src="/resources/WebIDLParser.js"></script> +<script> +'use strict'; + +// inspired from similar test for MTI stats in ../webrtc/RTCPeerConnection-mandatory-getStats.https.html + + + +// From https://w3c.github.io/webrtc-stats/webrtc-stats.html#rtcstatstype-str* + +const dictionaryNames = { + "codec": "RTCCodecStats", + "inbound-rtp": "RTCInboundRtpStreamStats", + "outbound-rtp": "RTCOutboundRtpStreamStats", + "remote-inbound-rtp": "RTCRemoteInboundRtpStreamStats", + "remote-outbound-rtp": "RTCRemoteOutboundRtpStreamStats", + "csrc": "RTCRtpContributingSourceStats", + "peer-connection": "RTCPeerConnectionStats", + "data-channel": "RTCDataChannelStats", + "media-source": { + audio: "RTCAudioSourceStats", + video: "RTCVideoSourceStats" + }, + "media-playout": "RTCAudioPlayoutStats", + "sender": { + audio: "RTCAudioSenderStats", + video: "RTCVideoSenderStats" + }, + "receiver": { + audio: "RTCAudioReceiverStats", + video: "RTCVideoReceiverStats", + }, + "transport": "RTCTransportStats", + "candidate-pair": "RTCIceCandidatePairStats", + "local-candidate": "RTCIceCandidateStats", + "remote-candidate": "RTCIceCandidateStats", + "certificate": "RTCCertificateStats", +}; + +function isPropertyTestable(type, property) { + // List of properties which are not testable by this test. + // When adding something to this list, please explain why. + const untestablePropertiesByType = { + 'candidate-pair': [ + 'availableIncomingBitrate', // requires REMB, no TWCC. + ], + 'certificate': [ + 'issuerCertificateId', // we only use self-signed certificates. + ], + 'local-candidate': [ + 'url', // requires a STUN/TURN server. + 'relayProtocol', // requires a TURN server. + 'relatedAddress', // requires a STUN/TURN server. + 'relatedPort', // requires a STUN/TURN server. + ], + 'remote-candidate': [ + 'url', // requires a STUN/TURN server. + 'relayProtocol', // requires a TURN server. + 'relatedAddress', // requires a STUN/TURN server. + 'relatedPort', // requires a STUN/TURN server. + 'tcpType', // requires ICE-TCP connection. + ], + 'outbound-rtp': [ + 'rid', // requires simulcast. + ], + 'media-source': [ + 'echoReturnLoss', // requires gUM with an audio input device. + 'echoReturnLossEnhancement', // requires gUM with an audio input device. + ] + }; + if (!untestablePropertiesByType[type]) { + return true; + } + return !untestablePropertiesByType[type].includes(property); +} + +async function getAllStats(t, pc) { + // Try to obtain as many stats as possible, waiting up to 20 seconds for + // roundTripTime which can take several RTCP messages to calculate. + let stats; + for (let i = 0; i < 20; i++) { + stats = await pc.getStats(); + const values = [...stats.values()]; + const [remoteInboundAudio, remoteInboundVideo] = + ["audio", "video"].map(kind => + values.find(s => s.type == "remote-inbound-rtp" && s.kind == kind)); + const [remoteOutboundAudio, remoteOutboundVideo] = + ["audio", "video"].map(kind => + values.find(s => s.type == "remote-outbound-rtp" && s.kind == kind)); + // We expect both audio and video remote-inbound-rtp RTT. + const hasRemoteInbound = + remoteInboundAudio && "roundTripTime" in remoteInboundAudio && + remoteInboundVideo && "roundTripTime" in remoteInboundVideo; + // Due to current implementation limitations, we don't put as hard + // requirements on remote-outbound-rtp as remote-inbound-rtp. It's enough if + // it is available for either kind and `roundTripTime` is not required. In + // Chromium, remote-outbound-rtp is only implemented for audio and + // `roundTripTime` is missing in this test, but awaiting for any + // remote-outbound-rtp avoids flaky failures. + const hasRemoteOutbound = remoteOutboundAudio || remoteOutboundVideo; + const hasMediaPlayout = values.find(({type}) => type == "media-playout") != undefined; + if (hasRemoteInbound && hasRemoteOutbound && hasMediaPlayout) { + return stats; + } + await new Promise(r => t.step_timeout(r, 1000)); + } + return stats; +} + + +promise_test(async t => { + // load the IDL to know which members to be looking for + const idl = await fetch("/interfaces/webrtc-stats.idl").then(r => r.text()); + // for RTCStats definition + const webrtcIdl = await fetch("/interfaces/webrtc.idl").then(r => r.text()); + const astArray = WebIDL2.parse(idl + webrtcIdl); + + let all = {}; + for (let type in dictionaryNames) { + // TODO: make use of audio/video distinction + let dictionaries = dictionaryNames[type].audio ? Object.values(dictionaryNames[type]) : [dictionaryNames[type]]; + all[type] = []; + let i = 0; + // Recursively collect members from inherited dictionaries + while (i < dictionaries.length) { + const dictName = dictionaries[i]; + const dict = astArray.find(i => i.name === dictName && i.type === "dictionary"); + if (dict && dict.members) { + all[type] = all[type].concat(dict.members.map(m => m.name)); + if (dict.inheritance) { + dictionaries.push(dict.inheritance); + } + } + i++; + } + // Unique-ify + all[type] = [...new Set(all[type])]; + } + + const remaining = JSON.parse(JSON.stringify(all)); + for (const type in remaining) { + remaining[type] = new Set(remaining[type]); + } + + const pc1 = new RTCPeerConnection(); + t.add_cleanup(() => pc1.close()); + const pc2 = new RTCPeerConnection(); + t.add_cleanup(() => pc2.close()); + + const dc1 = pc1.createDataChannel("dummy", {negotiated: true, id: 0}); + const dc2 = pc2.createDataChannel("dummy", {negotiated: true, id: 0}); + // Use a real gUM to ensure that all stats exposing hardware capabilities are + // also exposed. + const stream = await navigator.mediaDevices.getUserMedia( + {video: true, audio: true}); + for (const track of stream.getTracks()) { + pc1.addTrack(track, stream); + pc2.addTrack(track, stream); + t.add_cleanup(() => track.stop()); + } + + // Do a non-trickle ICE handshake to ensure that TCP candidates are gathered. + await pc1.setLocalDescription(); + await waitForIceGatheringState(pc1, ['complete']); + await pc2.setRemoteDescription(pc1.localDescription); + await pc2.setLocalDescription(); + await waitForIceGatheringState(pc2, ['complete']); + await pc1.setRemoteDescription(pc2.localDescription); + + const stats = await getAllStats(t, pc1); + + // The focus of this test is not API correctness, but rather to provide an + // accessible metric of implementation progress by dictionary member. We count + // whether we've seen each dictionary's members in getStats(). + + test(t => { + for (const stat of stats.values()) { + if (all[stat.type]) { + const memberNames = all[stat.type]; + const remainingNames = remaining[stat.type]; + assert_true(memberNames.length > 0, "Test error. No member found."); + for (const memberName of memberNames) { + if (memberName in stat) { + assert_not_equals(stat[memberName], undefined, "Not undefined"); + remainingNames.delete(memberName); + } + } + } + } + }, "Validating stats"); + + for (const type in all) { + for (const memberName of all[type]) { + test(t => { + assert_implements_optional(isPropertyTestable(type, memberName), + `${type}.${memberName} marked as not testable.`); + assert_true(!remaining[type].has(memberName), + `Is ${memberName} present`); + }, `${type}'s ${memberName}`); + } + } +}, 'getStats succeeds'); +</script> |