106 lines
2.5 KiB
HTML
106 lines
2.5 KiB
HTML
<!DOCTYPE html>
|
|
<html>
|
|
<head><meta charset="UTF-8"></head>
|
|
<body>
|
|
<div id="message"></div>
|
|
<script>
|
|
// Specifies whether we are using fake streams to run this automation
|
|
var useFakeStreams = true;
|
|
try {
|
|
var audioDevice = SpecialPowers.getCharPref("media.audio_loopback_dev");
|
|
var videoDevice = SpecialPowers.getCharPref("media.video_loopback_dev");
|
|
dump("TEST DEVICES: Using media devices:\n");
|
|
dump("audio: " + audioDevice + "\nvideo: " + videoDevice + "\n");
|
|
useFakeStreams = false;
|
|
} catch (e) {
|
|
dump("TEST DEVICES: No test devices found (in media.{audio,video}_loopback_dev, using fake streams.\n");
|
|
useFakeStreams = true;
|
|
}
|
|
|
|
function message(m) {
|
|
document.getElementById("message").innerHTML += `${m}<br>`;
|
|
top.postMessage(m, "*");
|
|
}
|
|
|
|
var gStreams = [];
|
|
var gVideoEvents = [];
|
|
var gAudioEvents = [];
|
|
|
|
async function requestDevice(aAudio, aVideo, aShare, aBadDevice = false) {
|
|
const opts = {video: aVideo, audio: aAudio};
|
|
if (aShare) {
|
|
opts.video = { mediaSource: aShare };
|
|
}
|
|
if (useFakeStreams) {
|
|
opts.fake = true;
|
|
}
|
|
|
|
if (aVideo && aBadDevice) {
|
|
opts.video = {
|
|
deviceId: "bad device",
|
|
};
|
|
opts.fake = true;
|
|
}
|
|
|
|
if (aAudio && aBadDevice) {
|
|
opts.audio = {
|
|
deviceId: "bad device",
|
|
};
|
|
opts.fake = true;
|
|
}
|
|
|
|
try {
|
|
const stream = await navigator.mediaDevices.getUserMedia(opts)
|
|
gStreams.push(stream);
|
|
|
|
const videoTrack = stream.getVideoTracks()[0];
|
|
if (videoTrack) {
|
|
for (const name of ["mute", "unmute", "ended"]) {
|
|
videoTrack.addEventListener(name, () => gVideoEvents.push(name));
|
|
}
|
|
}
|
|
|
|
const audioTrack = stream.getAudioTracks()[0];
|
|
if (audioTrack) {
|
|
for (const name of ["mute", "unmute", "ended"]) {
|
|
audioTrack.addEventListener(name, () => gAudioEvents.push(name));
|
|
}
|
|
}
|
|
message("ok");
|
|
} catch (err) {
|
|
message("error: " + err);
|
|
}
|
|
}
|
|
message("pending");
|
|
|
|
function stopTracks(aKind) {
|
|
for (let stream of gStreams) {
|
|
for (let track of stream.getTracks()) {
|
|
if (track.kind == aKind) {
|
|
track.stop();
|
|
stream.removeTrack(track);
|
|
}
|
|
}
|
|
}
|
|
gStreams = gStreams.filter(s => !!s.getTracks().length);
|
|
if (aKind == "video") {
|
|
gVideoEvents = [];
|
|
} else if (aKind == "audio") {
|
|
gAudioEvents = [];
|
|
}
|
|
}
|
|
|
|
function closeStream() {
|
|
for (let stream of gStreams) {
|
|
for (let track of stream.getTracks()) {
|
|
track.stop();
|
|
}
|
|
}
|
|
gStreams = [];
|
|
gVideoEvents = [];
|
|
gAudioEvents = [];
|
|
message("closed");
|
|
}
|
|
</script>
|
|
</body>
|
|
</html>
|