summaryrefslogtreecommitdiffstats
path: root/browser/base/content/test/webrtc/get_user_media2.html
blob: 810b00d47bf5611718d8c69161d7eed1e092acd3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
<!DOCTYPE html>
<html>
<head><meta charset="UTF-8"></head>
<body>
<div id="message"></div>
<script>
// Specifies whether we are using fake streams to run this automation
var useFakeStreams = true;
try {
  var audioDevice = SpecialPowers.getCharPref("media.audio_loopback_dev");
  var videoDevice = SpecialPowers.getCharPref("media.video_loopback_dev");
  dump("TEST DEVICES: Using media devices:\n");
  dump("audio: " + audioDevice + "\nvideo: " + videoDevice + "\n");
  useFakeStreams = false;
} catch (e) {
  dump("TEST DEVICES: No test devices found (in media.{audio,video}_loopback_dev, using fake streams.\n");
  useFakeStreams = true;
}

function message(m) {
  // eslint-disable-next-line no-unsanitized/property
  document.getElementById("message").innerHTML += `${m}<br>`;
  top.postMessage(m, "*");
}

var gStreams = [];
var gVideoEvents = [];
var gAudioEvents = [];

async function requestDevice(aAudio, aVideo, aShare, aBadDevice = false) {
  const opts = {video: aVideo, audio: aAudio};
  if (aShare) {
    opts.video = { mediaSource: aShare };
  }
  if (useFakeStreams) {
    opts.fake = true;
  }

  if (aVideo && aBadDevice) {
    opts.video = {
      deviceId: "bad device",
    };
    opts.fake = true;
  }

  if (aAudio && aBadDevice) {
    opts.audio = {
      deviceId: "bad device",
    };
    opts.fake = true;
  }

  try {
    const stream = await navigator.mediaDevices.getUserMedia(opts)
    gStreams.push(stream);

    const videoTrack = stream.getVideoTracks()[0];
    if (videoTrack) {
      for (const name of ["mute", "unmute", "ended"]) {
        videoTrack.addEventListener(name, () => gVideoEvents.push(name));
      }
    }

    const audioTrack = stream.getAudioTracks()[0];
    if (audioTrack) {
      for (const name of ["mute", "unmute", "ended"]) {
        audioTrack.addEventListener(name, () => gAudioEvents.push(name));
      }
    }
    message("ok");
  } catch (err) {
    message("error: " + err);
  }
}
message("pending");

function stopTracks(aKind) {
  for (let stream of gStreams) {
    for (let track of stream.getTracks()) {
      if (track.kind == aKind) {
        track.stop();
        stream.removeTrack(track);
      }
    }
  }
  gStreams = gStreams.filter(s => !!s.getTracks().length);
  if (aKind == "video") {
    gVideoEvents = [];
  } else if (aKind == "audio") {
    gAudioEvents = [];
  }
}

function closeStream() {
  for (let stream of gStreams) {
    for (let track of stream.getTracks()) {
      track.stop();
    }
  }
  gStreams = [];
  gVideoEvents = [];
  gAudioEvents = [];
  message("closed");
}
</script>
</body>
</html>