summaryrefslogtreecommitdiffstats
path: root/browser/base/content/test/webrtc/get_user_media.html
blob: 3d6c9e3cb35afa94a5c87ce6f56423185540253f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
<!DOCTYPE html>
<html>
<head><meta charset="UTF-8"></head>
<body>
<div id="message"></div>
<script>
// Specifies whether we are using fake streams to run this automation
var useFakeStreams = true;
try {
  var audioDevice = SpecialPowers.getCharPref("media.audio_loopback_dev");
  var videoDevice = SpecialPowers.getCharPref("media.video_loopback_dev");
  dump("TEST DEVICES: Using media devices:\n");
  dump("audio: " + audioDevice + "\nvideo: " + videoDevice + "\n");
  useFakeStreams = false;
} catch (e) {
  dump("TEST DEVICES: No test devices found (in media.{audio,video}_loopback_dev, using fake streams.\n");
  useFakeStreams = true;
}

function message(m) {
  document.getElementById("message").innerHTML += `${m}<br>`;
  top.postMessage(m, "*");
}

var gStreams = [];
var gVideoEvents = [];
var gAudioEvents = [];

async function requestDevice(aAudio, aVideo, aShare, aBadDevice = false) {
  const opts = {video: aVideo, audio: aAudio};
  if (aShare) {
    opts.video = { mediaSource: aShare };
    SpecialPowers.wrap(document).notifyUserGestureActivation();
  }
  if (useFakeStreams) {
    opts.fake = true;
  }

  if (aVideo && aBadDevice) {
    opts.video = {
      deviceId: "bad device",
    };
    opts.fake = true;
  }

  if (aAudio && aBadDevice) {
    opts.audio = {
      deviceId: "bad device",
    };
    opts.fake = true;
  }

  try {
    const stream = await navigator.mediaDevices.getUserMedia(opts)
    gStreams.push(stream);

    const videoTrack = stream.getVideoTracks()[0];
    if (videoTrack) {
      for (const name of ["mute", "unmute", "ended"]) {
        videoTrack.addEventListener(name, () => gVideoEvents.push(name));
      }
    }

    const audioTrack = stream.getAudioTracks()[0];
    if (audioTrack) {
      for (const name of ["mute", "unmute", "ended"]) {
        audioTrack.addEventListener(name, () => gAudioEvents.push(name));
      }
    }
    message("ok");
  } catch (err) {
    message("error: " + err);
  }
}

let selectedAudioOutputId;
async function requestAudioOutput(options = {}) {
  const audioOutputOptions = options.requestSameDevice && {
    deviceId: selectedAudioOutputId,
  };
  SpecialPowers.wrap(document).notifyUserGestureActivation();
  try {
    ({ deviceId: selectedAudioOutputId } =
     await navigator.mediaDevices.selectAudioOutput(audioOutputOptions));
    message("ok");
  } catch (err) {
    message("error: " + err);
  }
}

message("pending");

function stopTracks(aKind) {
  for (let stream of gStreams) {
    for (let track of stream.getTracks()) {
      if (track.kind == aKind) {
        track.stop();
        stream.removeTrack(track);
      }
    }
  }
  gStreams = gStreams.filter(s => !!s.getTracks().length);
  if (aKind == "video") {
    gVideoEvents = [];
  } else if (aKind == "audio") {
    gAudioEvents = [];
  }
}

function closeStream() {
  for (let stream of gStreams) {
    for (let track of stream.getTracks()) {
      track.stop();
    }
  }
  gStreams = [];
  gVideoEvents = [];
  gAudioEvents = [];
  message("closed");
}
</script>
</body>
</html>