126 lines
4.8 KiB
HTML
126 lines
4.8 KiB
HTML
<!DOCTYPE HTML>
|
|
<html>
|
|
<head>
|
|
<script type="application/javascript" src="pc.js"></script>
|
|
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
|
|
<script type="application/javascript" src="helpers_from_wpt/sdp.js"></script>
|
|
<script type="application/javascript" src="simulcast.js"></script>
|
|
<script type="application/javascript" src="stats.js"></script>
|
|
</head>
|
|
<body>
|
|
<pre id="test">
|
|
<script type="application/javascript">
|
|
createHTML({
|
|
bug: "1231507",
|
|
title: "Basic video-only peer connection with Simulcast answer",
|
|
visible: true
|
|
});
|
|
|
|
runNetworkTest(async () => {
|
|
await pushPrefs(
|
|
// 180Kbps was determined empirically, set well-higher than
|
|
// the 80Kbps+overhead needed for the two simulcast streams.
|
|
// 100Kbps was apparently too low.
|
|
['media.peerconnection.video.min_bitrate_estimate', 180*1000]);
|
|
|
|
// [TODO] re-enable HW decoder after bug 1526207 is fixed.
|
|
if (navigator.userAgent.includes("Android")) {
|
|
await pushPrefs(["media.navigator.mediadatadecoder_vpx_enabled", false]);
|
|
}
|
|
|
|
|
|
const offerer = new RTCPeerConnection();
|
|
const answerer = new RTCPeerConnection();
|
|
|
|
const add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
|
|
offerer.onicecandidate = e => add(answerer, e.candidate, generateErrorCallback());
|
|
answerer.onicecandidate = e => add(offerer, e.candidate, generateErrorCallback());
|
|
|
|
const metadataToBeLoaded = [];
|
|
offerer.ontrack = (e) => {
|
|
metadataToBeLoaded.push(getPlaybackWithLoadedMetadata(e.track));
|
|
};
|
|
|
|
// Two recv transceivers, one for each simulcast stream
|
|
offerer.addTransceiver('video', { direction: 'recvonly' });
|
|
offerer.addTransceiver('video', { direction: 'recvonly' });
|
|
|
|
const offer = await offerer.createOffer();
|
|
|
|
const mungedOffer = midToRid(offer);
|
|
info(`Transformed recv offer to simulcast: ${offer.sdp} to ${mungedOffer}`);
|
|
|
|
await answerer.setRemoteDescription({type: 'offer', sdp: mungedOffer});
|
|
|
|
// One send transceiver, that will be used to send both simulcast streams
|
|
const emitter = new VideoFrameEmitter();
|
|
const videoStream = emitter.stream();
|
|
const sender = answerer.addTrack(videoStream.getVideoTracks()[0], videoStream);
|
|
let parameters = sender.getParameters();
|
|
is(parameters.encodings.length, 2);
|
|
is(answerer.getSenders().length, 1);
|
|
emitter.start();
|
|
|
|
await offerer.setLocalDescription(offer);
|
|
|
|
const rids = offerer.getTransceivers().map(t => t.mid);
|
|
is(rids.length, 2, 'Should have 2 mids in offer');
|
|
ok(rids[0] != '', 'First mid should be non-empty');
|
|
ok(rids[1] != '', 'Second mid should be non-empty');
|
|
info(`rids: ${JSON.stringify(rids)}`);
|
|
|
|
parameters = sender.getParameters();
|
|
info(`parameters: ${JSON.stringify(parameters)}`);
|
|
const observedRids = parameters.encodings.map(({rid}) => rid);
|
|
isDeeply(observedRids, rids);
|
|
parameters.encodings[0].maxBitrate = 40000;
|
|
parameters.encodings[0].scaleResolutionDownBy = 1;
|
|
parameters.encodings[1].maxBitrate = 40000;
|
|
parameters.encodings[1].scaleResolutionDownBy = 2;
|
|
await sender.setParameters(parameters);
|
|
|
|
const answer = await answerer.createAnswer();
|
|
|
|
const mungedAnswer = ridToMid(answer);
|
|
info(`Transformed send simulcast answer to multiple m-sections: ${answer.sdp} to ${mungedAnswer}`);
|
|
await offerer.setRemoteDescription({type: 'answer', sdp: mungedAnswer});
|
|
await answerer.setLocalDescription(answer);
|
|
|
|
is(metadataToBeLoaded.length, 2, 'Offerer should have gotten 2 ontrack events');
|
|
info('Waiting for 2 loadedmetadata events');
|
|
const videoElems = await Promise.all(metadataToBeLoaded);
|
|
|
|
const statsReady =
|
|
Promise.all([waitForSyncedRtcp(offerer), waitForSyncedRtcp(answerer)]);
|
|
|
|
const helper = new VideoStreamHelper();
|
|
info('Waiting for first video element to start playing');
|
|
await helper.checkVideoPlaying(videoElems[0]);
|
|
info('Waiting for second video element to start playing');
|
|
await helper.checkVideoPlaying(videoElems[1]);
|
|
|
|
is(videoElems[0].videoWidth, 64,
|
|
"sink is same width as source, modulo our cropping algorithm");
|
|
is(videoElems[0].videoHeight, 64,
|
|
"sink is same height as source, modulo our cropping algorithm");
|
|
is(videoElems[1].videoWidth, 32,
|
|
"sink is 1/2 width of source, modulo our cropping algorithm");
|
|
is(videoElems[1].videoHeight, 32,
|
|
"sink is 1/2 height of source, modulo our cropping algorithm");
|
|
|
|
await statsReady;
|
|
info("Stats ready");
|
|
const senderStats = await sender.getStats();
|
|
checkSenderStats(senderStats, 2);
|
|
checkExpectedFields(senderStats);
|
|
pedanticChecks(senderStats);
|
|
|
|
emitter.stop();
|
|
videoStream.getVideoTracks()[0].stop();
|
|
offerer.close();
|
|
answerer.close();
|
|
});
|
|
</script>
|
|
</pre>
|
|
</body>
|
|
</html>
|