summaryrefslogtreecommitdiffstats
path: root/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc
diff options
context:
space:
mode:
Diffstat (limited to 'comm/chat/protocols/matrix/lib/matrix-sdk/webrtc')
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/audioContext.js52
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/call.js2364
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callEventHandler.js339
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callEventTypes.js19
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callFeed.js294
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/groupCall.js1213
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/groupCallEventHandler.js181
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/mediaHandler.js395
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/callStatsReportGatherer.js194
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/callStatsReportSummary.js5
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStats.js34
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStatsBuilder.js33
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStatsReportBuilder.js127
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/groupCallStats.js80
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaSsrcHandler.js62
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackHandler.js69
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackStats.js150
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackStatsHandler.js82
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/statsReport.js28
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/statsReportEmitter.js36
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/summaryStatsReportGatherer.js103
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/trackStatsBuilder.js172
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/transportStats.js5
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/transportStatsBuilder.js40
-rw-r--r--comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/valueFormatter.js31
25 files changed, 6108 insertions, 0 deletions
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/audioContext.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/audioContext.js
new file mode 100644
index 0000000000..4cecf68ad3
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/audioContext.js
@@ -0,0 +1,52 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.releaseContext = exports.acquireContext = void 0;
+/*
+Copyright 2022 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+let audioContext = null;
+let refCount = 0;
+
+/**
+ * Acquires a reference to the shared AudioContext.
+ * It's highly recommended to reuse this AudioContext rather than creating your
+ * own, because multiple AudioContexts can be problematic in some browsers.
+ * Make sure to call releaseContext when you're done using it.
+ * @returns The shared AudioContext
+ */
+const acquireContext = () => {
+ if (audioContext === null) audioContext = new AudioContext();
+ refCount++;
+ return audioContext;
+};
+
+/**
+ * Signals that one of the references to the shared AudioContext has been
+ * released, allowing the context and associated hardware resources to be
+ * cleaned up if nothing else is using it.
+ */
+exports.acquireContext = acquireContext;
+const releaseContext = () => {
+ refCount--;
+ if (refCount === 0) {
+ audioContext?.close();
+ audioContext = null;
+ }
+};
+exports.releaseContext = releaseContext; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/call.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/call.js
new file mode 100644
index 0000000000..862d7ce1f8
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/call.js
@@ -0,0 +1,2364 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MatrixCall = exports.CallType = exports.CallState = exports.CallParty = exports.CallEvent = exports.CallErrorCode = exports.CallError = exports.CallDirection = void 0;
+exports.createNewMatrixCall = createNewMatrixCall;
+exports.genCallID = genCallID;
+exports.setTracksEnabled = setTracksEnabled;
+exports.supportsMatrixCall = supportsMatrixCall;
+var _uuid = require("uuid");
+var _sdpTransform = require("sdp-transform");
+var _logger = require("../logger");
+var _utils = require("../utils");
+var _event = require("../@types/event");
+var _randomstring = require("../randomstring");
+var _callEventTypes = require("./callEventTypes");
+var _callFeed = require("./callFeed");
+var _typedEventEmitter = require("../models/typed-event-emitter");
+var _deviceinfo = require("../crypto/deviceinfo");
+var _groupCall = require("./groupCall");
+var _httpApi = require("../http-api");
+function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
+function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } /*
+ Copyright 2015, 2016 OpenMarket Ltd
+ Copyright 2017 New Vector Ltd
+ Copyright 2019, 2020 The Matrix.org Foundation C.I.C.
+ Copyright 2021 - 2022 Šimon Brandner <simon.bra.ag@gmail.com>
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */ /**
+ * This is an internal module. See {@link createNewMatrixCall} for the public API.
+ */
+var MediaType = /*#__PURE__*/function (MediaType) {
+ MediaType["AUDIO"] = "audio";
+ MediaType["VIDEO"] = "video";
+ return MediaType;
+}(MediaType || {});
+var CodecName = /*#__PURE__*/function (CodecName) {
+ CodecName["OPUS"] = "opus";
+ return CodecName;
+}(CodecName || {}); // add more as needed
+// Used internally to specify modifications to codec parameters in SDP
+let CallState = /*#__PURE__*/function (CallState) {
+ CallState["Fledgling"] = "fledgling";
+ CallState["InviteSent"] = "invite_sent";
+ CallState["WaitLocalMedia"] = "wait_local_media";
+ CallState["CreateOffer"] = "create_offer";
+ CallState["CreateAnswer"] = "create_answer";
+ CallState["Connecting"] = "connecting";
+ CallState["Connected"] = "connected";
+ CallState["Ringing"] = "ringing";
+ CallState["Ended"] = "ended";
+ return CallState;
+}({});
+exports.CallState = CallState;
+let CallType = /*#__PURE__*/function (CallType) {
+ CallType["Voice"] = "voice";
+ CallType["Video"] = "video";
+ return CallType;
+}({});
+exports.CallType = CallType;
+let CallDirection = /*#__PURE__*/function (CallDirection) {
+ CallDirection["Inbound"] = "inbound";
+ CallDirection["Outbound"] = "outbound";
+ return CallDirection;
+}({});
+exports.CallDirection = CallDirection;
+let CallParty = /*#__PURE__*/function (CallParty) {
+ CallParty["Local"] = "local";
+ CallParty["Remote"] = "remote";
+ return CallParty;
+}({});
+exports.CallParty = CallParty;
+let CallEvent = /*#__PURE__*/function (CallEvent) {
+ CallEvent["Hangup"] = "hangup";
+ CallEvent["State"] = "state";
+ CallEvent["Error"] = "error";
+ CallEvent["Replaced"] = "replaced";
+ CallEvent["LocalHoldUnhold"] = "local_hold_unhold";
+ CallEvent["RemoteHoldUnhold"] = "remote_hold_unhold";
+ CallEvent["HoldUnhold"] = "hold_unhold";
+ CallEvent["FeedsChanged"] = "feeds_changed";
+ CallEvent["AssertedIdentityChanged"] = "asserted_identity_changed";
+ CallEvent["LengthChanged"] = "length_changed";
+ CallEvent["DataChannel"] = "datachannel";
+ CallEvent["SendVoipEvent"] = "send_voip_event";
+ CallEvent["PeerConnectionCreated"] = "peer_connection_created";
+ return CallEvent;
+}({});
+exports.CallEvent = CallEvent;
+let CallErrorCode = /*#__PURE__*/function (CallErrorCode) {
+ CallErrorCode["UserHangup"] = "user_hangup";
+ CallErrorCode["LocalOfferFailed"] = "local_offer_failed";
+ CallErrorCode["NoUserMedia"] = "no_user_media";
+ CallErrorCode["UnknownDevices"] = "unknown_devices";
+ CallErrorCode["SendInvite"] = "send_invite";
+ CallErrorCode["CreateAnswer"] = "create_answer";
+ CallErrorCode["CreateOffer"] = "create_offer";
+ CallErrorCode["SendAnswer"] = "send_answer";
+ CallErrorCode["SetRemoteDescription"] = "set_remote_description";
+ CallErrorCode["SetLocalDescription"] = "set_local_description";
+ CallErrorCode["AnsweredElsewhere"] = "answered_elsewhere";
+ CallErrorCode["IceFailed"] = "ice_failed";
+ CallErrorCode["InviteTimeout"] = "invite_timeout";
+ CallErrorCode["Replaced"] = "replaced";
+ CallErrorCode["SignallingFailed"] = "signalling_timeout";
+ CallErrorCode["UserBusy"] = "user_busy";
+ CallErrorCode["Transferred"] = "transferred";
+ CallErrorCode["NewSession"] = "new_session";
+ return CallErrorCode;
+}({});
+/**
+ * The version field that we set in m.call.* events
+ */
+exports.CallErrorCode = CallErrorCode;
+const VOIP_PROTO_VERSION = "1";
+
+/** The fallback ICE server to use for STUN or TURN protocols. */
+const FALLBACK_ICE_SERVER = "stun:turn.matrix.org";
+
+/** The length of time a call can be ringing for. */
+const CALL_TIMEOUT_MS = 60 * 1000; // ms
+/** The time after which we increment callLength */
+const CALL_LENGTH_INTERVAL = 1000; // ms
+/** The time after which we end the call, if ICE got disconnected */
+const ICE_DISCONNECTED_TIMEOUT = 30 * 1000; // ms
+/** The time after which we try a ICE restart, if ICE got disconnected */
+const ICE_RECONNECTING_TIMEOUT = 2 * 1000; // ms
+class CallError extends Error {
+ constructor(code, msg, err) {
+ // Still don't think there's any way to have proper nested errors
+ super(msg + ": " + err);
+ _defineProperty(this, "code", void 0);
+ this.code = code;
+ }
+}
+exports.CallError = CallError;
+function genCallID() {
+ return Date.now().toString() + (0, _randomstring.randomString)(16);
+}
+function getCodecParamMods(isPtt) {
+ const mods = [{
+ mediaType: "audio",
+ codec: "opus",
+ enableDtx: true,
+ maxAverageBitrate: isPtt ? 12000 : undefined
+ }];
+ return mods;
+}
+
+/**
+ * These now all have the call object as an argument. Why? Well, to know which call a given event is
+ * about you have three options:
+ * 1. Use a closure as the callback that remembers what call it's listening to. This can be
+ * a pain because you need to pass the listener function again when you remove the listener,
+ * which might be somewhere else.
+ * 2. Use not-very-well-known fact that EventEmitter sets 'this' to the emitter object in the
+ * callback. This doesn't really play well with modern Typescript and eslint and doesn't work
+ * with our pattern of re-emitting events.
+ * 3. Pass the object in question as an argument to the callback.
+ *
+ * Now that we have group calls which have to deal with multiple call objects, this will
+ * become more important, and I think methods 1 and 2 are just going to cause issues.
+ */
+
+// The key of the transceiver map (purpose + media type, separated by ':')
+
+// generates keys for the map of transceivers
+// kind is unfortunately a string rather than MediaType as this is the type of
+// track.kind
+function getTransceiverKey(purpose, kind) {
+ return purpose + ":" + kind;
+}
+class MatrixCall extends _typedEventEmitter.TypedEventEmitter {
+ /**
+ * Construct a new Matrix Call.
+ * @param opts - Config options.
+ */
+ constructor(opts) {
+ super();
+ _defineProperty(this, "roomId", void 0);
+ _defineProperty(this, "callId", void 0);
+ _defineProperty(this, "invitee", void 0);
+ _defineProperty(this, "hangupParty", void 0);
+ _defineProperty(this, "hangupReason", void 0);
+ _defineProperty(this, "direction", void 0);
+ _defineProperty(this, "ourPartyId", void 0);
+ _defineProperty(this, "peerConn", void 0);
+ _defineProperty(this, "toDeviceSeq", 0);
+ // whether this call should have push-to-talk semantics
+ // This should be set by the consumer on incoming & outgoing calls.
+ _defineProperty(this, "isPtt", false);
+ _defineProperty(this, "_state", CallState.Fledgling);
+ _defineProperty(this, "client", void 0);
+ _defineProperty(this, "forceTURN", void 0);
+ _defineProperty(this, "turnServers", void 0);
+ // A queue for candidates waiting to go out.
+ // We try to amalgamate candidates into a single candidate message where
+ // possible
+ _defineProperty(this, "candidateSendQueue", []);
+ _defineProperty(this, "candidateSendTries", 0);
+ _defineProperty(this, "candidatesEnded", false);
+ _defineProperty(this, "feeds", []);
+ // our transceivers for each purpose and type of media
+ _defineProperty(this, "transceivers", new Map());
+ _defineProperty(this, "inviteOrAnswerSent", false);
+ _defineProperty(this, "waitForLocalAVStream", false);
+ _defineProperty(this, "successor", void 0);
+ _defineProperty(this, "opponentMember", void 0);
+ _defineProperty(this, "opponentVersion", void 0);
+ // The party ID of the other side: undefined if we haven't chosen a partner
+ // yet, null if we have but they didn't send a party ID.
+ _defineProperty(this, "opponentPartyId", void 0);
+ _defineProperty(this, "opponentCaps", void 0);
+ _defineProperty(this, "iceDisconnectedTimeout", void 0);
+ _defineProperty(this, "iceReconnectionTimeOut", void 0);
+ _defineProperty(this, "inviteTimeout", void 0);
+ _defineProperty(this, "removeTrackListeners", new Map());
+ // The logic of when & if a call is on hold is nontrivial and explained in is*OnHold
+ // This flag represents whether we want the other party to be on hold
+ _defineProperty(this, "remoteOnHold", false);
+ // the stats for the call at the point it ended. We can't get these after we
+ // tear the call down, so we just grab a snapshot before we stop the call.
+ // The typescript definitions have this type as 'any' :(
+ _defineProperty(this, "callStatsAtEnd", void 0);
+ // Perfect negotiation state: https://www.w3.org/TR/webrtc/#perfect-negotiation-example
+ _defineProperty(this, "makingOffer", false);
+ _defineProperty(this, "ignoreOffer", false);
+ _defineProperty(this, "isSettingRemoteAnswerPending", false);
+ _defineProperty(this, "responsePromiseChain", void 0);
+ // If candidates arrive before we've picked an opponent (which, in particular,
+ // will happen if the opponent sends candidates eagerly before the user answers
+ // the call) we buffer them up here so we can then add the ones from the party we pick
+ _defineProperty(this, "remoteCandidateBuffer", new Map());
+ _defineProperty(this, "remoteAssertedIdentity", void 0);
+ _defineProperty(this, "remoteSDPStreamMetadata", void 0);
+ _defineProperty(this, "callLengthInterval", void 0);
+ _defineProperty(this, "callStartTime", void 0);
+ _defineProperty(this, "opponentDeviceId", void 0);
+ _defineProperty(this, "opponentDeviceInfo", void 0);
+ _defineProperty(this, "opponentSessionId", void 0);
+ _defineProperty(this, "groupCallId", void 0);
+ // Used to keep the timer for the delay before actually stopping our
+ // video track after muting (see setLocalVideoMuted)
+ _defineProperty(this, "stopVideoTrackTimer", void 0);
+ // Used to allow connection without Video and Audio. To establish a webrtc connection without media a Data channel is
+ // needed At the moment this property is true if we allow MatrixClient with isVoipWithNoMediaAllowed = true
+ _defineProperty(this, "isOnlyDataChannelAllowed", void 0);
+ _defineProperty(this, "stats", void 0);
+ /**
+ * Internal
+ */
+ _defineProperty(this, "gotLocalIceCandidate", event => {
+ if (event.candidate) {
+ if (this.candidatesEnded) {
+ _logger.logger.warn(`Call ${this.callId} gotLocalIceCandidate() got candidate after candidates have ended!`);
+ }
+ _logger.logger.debug(`Call ${this.callId} got local ICE ${event.candidate.sdpMid} ${event.candidate.candidate}`);
+ if (this.callHasEnded()) return;
+
+ // As with the offer, note we need to make a copy of this object, not
+ // pass the original: that broke in Chrome ~m43.
+ if (event.candidate.candidate === "") {
+ this.queueCandidate(null);
+ } else {
+ this.queueCandidate(event.candidate);
+ }
+ }
+ });
+ _defineProperty(this, "onIceGatheringStateChange", event => {
+ _logger.logger.debug(`Call ${this.callId} onIceGatheringStateChange() ice gathering state changed to ${this.peerConn.iceGatheringState}`);
+ if (this.peerConn?.iceGatheringState === "complete") {
+ this.queueCandidate(null); // We should leave it to WebRTC to announce the end
+ _logger.logger.debug(`Call ${this.callId} onIceGatheringStateChange() ice gathering state complete, set candidates have ended`);
+ }
+ });
+ _defineProperty(this, "getLocalOfferFailed", err => {
+ _logger.logger.error(`Call ${this.callId} getLocalOfferFailed() running`, err);
+ this.emit(CallEvent.Error, new CallError(CallErrorCode.LocalOfferFailed, "Failed to get local offer!", err), this);
+ this.terminate(CallParty.Local, CallErrorCode.LocalOfferFailed, false);
+ });
+ _defineProperty(this, "getUserMediaFailed", err => {
+ if (this.successor) {
+ this.successor.getUserMediaFailed(err);
+ return;
+ }
+ _logger.logger.warn(`Call ${this.callId} getUserMediaFailed() failed to get user media - ending call`, err);
+ this.emit(CallEvent.Error, new CallError(CallErrorCode.NoUserMedia, "Couldn't start capturing media! Is your microphone set up and " + "does this app have permission?", err), this);
+ this.terminate(CallParty.Local, CallErrorCode.NoUserMedia, false);
+ });
+ _defineProperty(this, "onIceConnectionStateChanged", () => {
+ if (this.callHasEnded()) {
+ return; // because ICE can still complete as we're ending the call
+ }
+
+ _logger.logger.debug(`Call ${this.callId} onIceConnectionStateChanged() running (state=${this.peerConn?.iceConnectionState}, conn=${this.peerConn?.connectionState})`);
+
+ // ideally we'd consider the call to be connected when we get media but
+ // chrome doesn't implement any of the 'onstarted' events yet
+ if (["connected", "completed"].includes(this.peerConn?.iceConnectionState ?? "")) {
+ clearTimeout(this.iceDisconnectedTimeout);
+ this.iceDisconnectedTimeout = undefined;
+ if (this.iceReconnectionTimeOut) {
+ clearTimeout(this.iceReconnectionTimeOut);
+ }
+ this.state = CallState.Connected;
+ if (!this.callLengthInterval && !this.callStartTime) {
+ this.callStartTime = Date.now();
+ this.callLengthInterval = setInterval(() => {
+ this.emit(CallEvent.LengthChanged, Math.round((Date.now() - this.callStartTime) / 1000), this);
+ }, CALL_LENGTH_INTERVAL);
+ }
+ } else if (this.peerConn?.iceConnectionState == "failed") {
+ this.candidatesEnded = false;
+ // Firefox for Android does not yet have support for restartIce()
+ // (the types say it's always defined though, so we have to cast
+ // to prevent typescript from warning).
+ if (this.peerConn?.restartIce) {
+ this.candidatesEnded = false;
+ _logger.logger.debug(`Call ${this.callId} onIceConnectionStateChanged() ice restart (state=${this.peerConn?.iceConnectionState})`);
+ this.peerConn.restartIce();
+ } else {
+ _logger.logger.info(`Call ${this.callId} onIceConnectionStateChanged() hanging up call (ICE failed and no ICE restart method)`);
+ this.hangup(CallErrorCode.IceFailed, false);
+ }
+ } else if (this.peerConn?.iceConnectionState == "disconnected") {
+ this.candidatesEnded = false;
+ this.iceReconnectionTimeOut = setTimeout(() => {
+ _logger.logger.info(`Call ${this.callId} onIceConnectionStateChanged() ICE restarting because of ICE disconnected, (state=${this.peerConn?.iceConnectionState}, conn=${this.peerConn?.connectionState})`);
+ if (this.peerConn?.restartIce) {
+ this.candidatesEnded = false;
+ this.peerConn.restartIce();
+ }
+ this.iceReconnectionTimeOut = undefined;
+ }, ICE_RECONNECTING_TIMEOUT);
+ this.iceDisconnectedTimeout = setTimeout(() => {
+ _logger.logger.info(`Call ${this.callId} onIceConnectionStateChanged() hanging up call (ICE disconnected for too long)`);
+ this.hangup(CallErrorCode.IceFailed, false);
+ }, ICE_DISCONNECTED_TIMEOUT);
+ this.state = CallState.Connecting;
+ }
+
+ // In PTT mode, override feed status to muted when we lose connection to
+ // the peer, since we don't want to block the line if they're not saying anything.
+ // Experimenting in Chrome, this happens after 5 or 6 seconds, which is probably
+ // fast enough.
+ if (this.isPtt && ["failed", "disconnected"].includes(this.peerConn.iceConnectionState)) {
+ for (const feed of this.getRemoteFeeds()) {
+ feed.setAudioVideoMuted(true, true);
+ }
+ }
+ });
+ _defineProperty(this, "onSignallingStateChanged", () => {
+ _logger.logger.debug(`Call ${this.callId} onSignallingStateChanged() running (state=${this.peerConn?.signalingState})`);
+ });
+ _defineProperty(this, "onTrack", ev => {
+ if (ev.streams.length === 0) {
+ _logger.logger.warn(`Call ${this.callId} onTrack() called with streamless track streamless (kind=${ev.track.kind})`);
+ return;
+ }
+ const stream = ev.streams[0];
+ this.pushRemoteFeed(stream);
+ if (!this.removeTrackListeners.has(stream)) {
+ const onRemoveTrack = () => {
+ if (stream.getTracks().length === 0) {
+ _logger.logger.info(`Call ${this.callId} onTrack() removing track (streamId=${stream.id})`);
+ this.deleteFeedByStream(stream);
+ stream.removeEventListener("removetrack", onRemoveTrack);
+ this.removeTrackListeners.delete(stream);
+ }
+ };
+ stream.addEventListener("removetrack", onRemoveTrack);
+ this.removeTrackListeners.set(stream, onRemoveTrack);
+ }
+ });
+ _defineProperty(this, "onDataChannel", ev => {
+ this.emit(CallEvent.DataChannel, ev.channel, this);
+ });
+ _defineProperty(this, "onNegotiationNeeded", async () => {
+ _logger.logger.info(`Call ${this.callId} onNegotiationNeeded() negotiation is needed!`);
+ if (this.state !== CallState.CreateOffer && this.opponentVersion === 0) {
+ _logger.logger.info(`Call ${this.callId} onNegotiationNeeded() opponent does not support renegotiation: ignoring negotiationneeded event`);
+ return;
+ }
+ this.queueGotLocalOffer();
+ });
+ _defineProperty(this, "onHangupReceived", msg => {
+ _logger.logger.debug(`Call ${this.callId} onHangupReceived() running`);
+
+ // party ID must match (our chosen partner hanging up the call) or be undefined (we haven't chosen
+ // a partner yet but we're treating the hangup as a reject as per VoIP v0)
+ if (this.partyIdMatches(msg) || this.state === CallState.Ringing) {
+ // default reason is user_hangup
+ this.terminate(CallParty.Remote, msg.reason || CallErrorCode.UserHangup, true);
+ } else {
+ _logger.logger.info(`Call ${this.callId} onHangupReceived() ignoring message from party ID ${msg.party_id}: our partner is ${this.opponentPartyId}`);
+ }
+ });
+ _defineProperty(this, "onRejectReceived", msg => {
+ _logger.logger.debug(`Call ${this.callId} onRejectReceived() running`);
+
+ // No need to check party_id for reject because if we'd received either
+ // an answer or reject, we wouldn't be in state InviteSent
+
+ const shouldTerminate =
+ // reject events also end the call if it's ringing: it's another of
+ // our devices rejecting the call.
+ [CallState.InviteSent, CallState.Ringing].includes(this.state) ||
+ // also if we're in the init state and it's an inbound call, since
+ // this means we just haven't entered the ringing state yet
+ this.state === CallState.Fledgling && this.direction === CallDirection.Inbound;
+ if (shouldTerminate) {
+ this.terminate(CallParty.Remote, msg.reason || CallErrorCode.UserHangup, true);
+ } else {
+ _logger.logger.debug(`Call ${this.callId} onRejectReceived() called in wrong state (state=${this.state})`);
+ }
+ });
+ _defineProperty(this, "onAnsweredElsewhere", msg => {
+ _logger.logger.debug(`Call ${this.callId} onAnsweredElsewhere() running`);
+ this.terminate(CallParty.Remote, CallErrorCode.AnsweredElsewhere, true);
+ });
+ this.roomId = opts.roomId;
+ this.invitee = opts.invitee;
+ this.client = opts.client;
+ if (!this.client.deviceId) throw new Error("Client must have a device ID to start calls");
+ this.forceTURN = opts.forceTURN ?? false;
+ this.ourPartyId = this.client.deviceId;
+ this.opponentDeviceId = opts.opponentDeviceId;
+ this.opponentSessionId = opts.opponentSessionId;
+ this.groupCallId = opts.groupCallId;
+ // Array of Objects with urls, username, credential keys
+ this.turnServers = opts.turnServers || [];
+ if (this.turnServers.length === 0 && this.client.isFallbackICEServerAllowed()) {
+ this.turnServers.push({
+ urls: [FALLBACK_ICE_SERVER]
+ });
+ }
+ for (const server of this.turnServers) {
+ (0, _utils.checkObjectHasKeys)(server, ["urls"]);
+ }
+ this.callId = genCallID();
+ // If the Client provides calls without audio and video we need a datachannel for a webrtc connection
+ this.isOnlyDataChannelAllowed = this.client.isVoipWithNoMediaAllowed;
+ }
+
+ /**
+ * Place a voice call to this room.
+ * @throws If you have not specified a listener for 'error' events.
+ */
+ async placeVoiceCall() {
+ await this.placeCall(true, false);
+ }
+
+ /**
+ * Place a video call to this room.
+ * @throws If you have not specified a listener for 'error' events.
+ */
+ async placeVideoCall() {
+ await this.placeCall(true, true);
+ }
+
+ /**
+ * Create a datachannel using this call's peer connection.
+ * @param label - A human readable label for this datachannel
+ * @param options - An object providing configuration options for the data channel.
+ */
+ createDataChannel(label, options) {
+ const dataChannel = this.peerConn.createDataChannel(label, options);
+ this.emit(CallEvent.DataChannel, dataChannel, this);
+ return dataChannel;
+ }
+ getOpponentMember() {
+ return this.opponentMember;
+ }
+ getOpponentDeviceId() {
+ return this.opponentDeviceId;
+ }
+ getOpponentSessionId() {
+ return this.opponentSessionId;
+ }
+ opponentCanBeTransferred() {
+ return Boolean(this.opponentCaps && this.opponentCaps["m.call.transferee"]);
+ }
+ opponentSupportsDTMF() {
+ return Boolean(this.opponentCaps && this.opponentCaps["m.call.dtmf"]);
+ }
+ getRemoteAssertedIdentity() {
+ return this.remoteAssertedIdentity;
+ }
+ get state() {
+ return this._state;
+ }
+ set state(state) {
+ const oldState = this._state;
+ this._state = state;
+ this.emit(CallEvent.State, state, oldState, this);
+ }
+ get type() {
+ // we may want to look for a video receiver here rather than a track to match the
+ // sender behaviour, although in practice they should be the same thing
+ return this.hasUserMediaVideoSender || this.hasRemoteUserMediaVideoTrack ? CallType.Video : CallType.Voice;
+ }
+ get hasLocalUserMediaVideoTrack() {
+ return !!this.localUsermediaStream?.getVideoTracks().length;
+ }
+ get hasRemoteUserMediaVideoTrack() {
+ return this.getRemoteFeeds().some(feed => {
+ return feed.purpose === _callEventTypes.SDPStreamMetadataPurpose.Usermedia && feed.stream?.getVideoTracks().length;
+ });
+ }
+ get hasLocalUserMediaAudioTrack() {
+ return !!this.localUsermediaStream?.getAudioTracks().length;
+ }
+ get hasRemoteUserMediaAudioTrack() {
+ return this.getRemoteFeeds().some(feed => {
+ return feed.purpose === _callEventTypes.SDPStreamMetadataPurpose.Usermedia && !!feed.stream?.getAudioTracks().length;
+ });
+ }
+ get hasUserMediaAudioSender() {
+ return Boolean(this.transceivers.get(getTransceiverKey(_callEventTypes.SDPStreamMetadataPurpose.Usermedia, "audio"))?.sender);
+ }
+ get hasUserMediaVideoSender() {
+ return Boolean(this.transceivers.get(getTransceiverKey(_callEventTypes.SDPStreamMetadataPurpose.Usermedia, "video"))?.sender);
+ }
+ get localUsermediaFeed() {
+ return this.getLocalFeeds().find(feed => feed.purpose === _callEventTypes.SDPStreamMetadataPurpose.Usermedia);
+ }
+ get localScreensharingFeed() {
+ return this.getLocalFeeds().find(feed => feed.purpose === _callEventTypes.SDPStreamMetadataPurpose.Screenshare);
+ }
+ get localUsermediaStream() {
+ return this.localUsermediaFeed?.stream;
+ }
+ get localScreensharingStream() {
+ return this.localScreensharingFeed?.stream;
+ }
+ get remoteUsermediaFeed() {
+ return this.getRemoteFeeds().find(feed => feed.purpose === _callEventTypes.SDPStreamMetadataPurpose.Usermedia);
+ }
+ get remoteScreensharingFeed() {
+ return this.getRemoteFeeds().find(feed => feed.purpose === _callEventTypes.SDPStreamMetadataPurpose.Screenshare);
+ }
+ get remoteUsermediaStream() {
+ return this.remoteUsermediaFeed?.stream;
+ }
+ get remoteScreensharingStream() {
+ return this.remoteScreensharingFeed?.stream;
+ }
+ getFeedByStreamId(streamId) {
+ return this.getFeeds().find(feed => feed.stream.id === streamId);
+ }
+
+ /**
+ * Returns an array of all CallFeeds
+ * @returns CallFeeds
+ */
+ getFeeds() {
+ return this.feeds;
+ }
+
+ /**
+ * Returns an array of all local CallFeeds
+ * @returns local CallFeeds
+ */
+ getLocalFeeds() {
+ return this.feeds.filter(feed => feed.isLocal());
+ }
+
+ /**
+ * Returns an array of all remote CallFeeds
+ * @returns remote CallFeeds
+ */
+ getRemoteFeeds() {
+ return this.feeds.filter(feed => !feed.isLocal());
+ }
+ async initOpponentCrypto() {
+ if (!this.opponentDeviceId) return;
+ if (!this.client.getUseE2eForGroupCall()) return;
+ // It's possible to want E2EE and yet not have the means to manage E2EE
+ // ourselves (for example if the client is a RoomWidgetClient)
+ if (!this.client.isCryptoEnabled()) {
+ // All we know is the device ID
+ this.opponentDeviceInfo = new _deviceinfo.DeviceInfo(this.opponentDeviceId);
+ return;
+ }
+ // if we've got to this point, we do want to init crypto, so throw if we can't
+ if (!this.client.crypto) throw new Error("Crypto is not initialised.");
+ const userId = this.invitee || this.getOpponentMember()?.userId;
+ if (!userId) throw new Error("Couldn't find opponent user ID to init crypto");
+ const deviceInfoMap = await this.client.crypto.deviceList.downloadKeys([userId], false);
+ this.opponentDeviceInfo = deviceInfoMap.get(userId)?.get(this.opponentDeviceId);
+ if (this.opponentDeviceInfo === undefined) {
+ throw new _groupCall.GroupCallUnknownDeviceError(userId);
+ }
+ }
+
+ /**
+ * Generates and returns localSDPStreamMetadata
+ * @returns localSDPStreamMetadata
+ */
+ getLocalSDPStreamMetadata(updateStreamIds = false) {
+ const metadata = {};
+ for (const localFeed of this.getLocalFeeds()) {
+ if (updateStreamIds) {
+ localFeed.sdpMetadataStreamId = localFeed.stream.id;
+ }
+ metadata[localFeed.sdpMetadataStreamId] = {
+ purpose: localFeed.purpose,
+ audio_muted: localFeed.isAudioMuted(),
+ video_muted: localFeed.isVideoMuted()
+ };
+ }
+ return metadata;
+ }
+
+ /**
+ * Returns true if there are no incoming feeds,
+ * otherwise returns false
+ * @returns no incoming feeds
+ */
+ noIncomingFeeds() {
+ return !this.feeds.some(feed => !feed.isLocal());
+ }
+ pushRemoteFeed(stream) {
+ // Fallback to old behavior if the other side doesn't support SDPStreamMetadata
+ if (!this.opponentSupportsSDPStreamMetadata()) {
+ this.pushRemoteFeedWithoutMetadata(stream);
+ return;
+ }
+ const userId = this.getOpponentMember().userId;
+ const purpose = this.remoteSDPStreamMetadata[stream.id].purpose;
+ const audioMuted = this.remoteSDPStreamMetadata[stream.id].audio_muted;
+ const videoMuted = this.remoteSDPStreamMetadata[stream.id].video_muted;
+ if (!purpose) {
+ _logger.logger.warn(`Call ${this.callId} pushRemoteFeed() ignoring stream because we didn't get any metadata about it (streamId=${stream.id})`);
+ return;
+ }
+ if (this.getFeedByStreamId(stream.id)) {
+ _logger.logger.warn(`Call ${this.callId} pushRemoteFeed() ignoring stream because we already have a feed for it (streamId=${stream.id})`);
+ return;
+ }
+ this.feeds.push(new _callFeed.CallFeed({
+ client: this.client,
+ call: this,
+ roomId: this.roomId,
+ userId,
+ deviceId: this.getOpponentDeviceId(),
+ stream,
+ purpose,
+ audioMuted,
+ videoMuted
+ }));
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+ _logger.logger.info(`Call ${this.callId} pushRemoteFeed() pushed stream (streamId=${stream.id}, active=${stream.active}, purpose=${purpose})`);
+ }
+
+ /**
+ * This method is used ONLY if the other client doesn't support sending SDPStreamMetadata
+ */
+ pushRemoteFeedWithoutMetadata(stream) {
+ const userId = this.getOpponentMember().userId;
+ // We can guess the purpose here since the other client can only send one stream
+ const purpose = _callEventTypes.SDPStreamMetadataPurpose.Usermedia;
+ const oldRemoteStream = this.feeds.find(feed => !feed.isLocal())?.stream;
+
+ // Note that we check by ID and always set the remote stream: Chrome appears
+ // to make new stream objects when transceiver directionality is changed and the 'active'
+ // status of streams change - Dave
+ // If we already have a stream, check this stream has the same id
+ if (oldRemoteStream && stream.id !== oldRemoteStream.id) {
+ _logger.logger.warn(`Call ${this.callId} pushRemoteFeedWithoutMetadata() ignoring new stream because we already have stream (streamId=${stream.id})`);
+ return;
+ }
+ if (this.getFeedByStreamId(stream.id)) {
+ _logger.logger.warn(`Call ${this.callId} pushRemoteFeedWithoutMetadata() ignoring stream because we already have a feed for it (streamId=${stream.id})`);
+ return;
+ }
+ this.feeds.push(new _callFeed.CallFeed({
+ client: this.client,
+ call: this,
+ roomId: this.roomId,
+ audioMuted: false,
+ videoMuted: false,
+ userId,
+ deviceId: this.getOpponentDeviceId(),
+ stream,
+ purpose
+ }));
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+ _logger.logger.info(`Call ${this.callId} pushRemoteFeedWithoutMetadata() pushed stream (streamId=${stream.id}, active=${stream.active})`);
+ }
+ pushNewLocalFeed(stream, purpose, addToPeerConnection = true) {
+ const userId = this.client.getUserId();
+
+ // Tracks don't always start off enabled, eg. chrome will give a disabled
+ // audio track if you ask for user media audio and already had one that
+ // you'd set to disabled (presumably because it clones them internally).
+ setTracksEnabled(stream.getAudioTracks(), true);
+ setTracksEnabled(stream.getVideoTracks(), true);
+ if (this.getFeedByStreamId(stream.id)) {
+ _logger.logger.warn(`Call ${this.callId} pushNewLocalFeed() ignoring stream because we already have a feed for it (streamId=${stream.id})`);
+ return;
+ }
+ this.pushLocalFeed(new _callFeed.CallFeed({
+ client: this.client,
+ roomId: this.roomId,
+ audioMuted: false,
+ videoMuted: false,
+ userId,
+ deviceId: this.getOpponentDeviceId(),
+ stream,
+ purpose
+ }), addToPeerConnection);
+ }
+
+ /**
+ * Pushes supplied feed to the call
+ * @param callFeed - to push
+ * @param addToPeerConnection - whether to add the tracks to the peer connection
+ */
+ pushLocalFeed(callFeed, addToPeerConnection = true) {
+ if (this.feeds.some(feed => callFeed.stream.id === feed.stream.id)) {
+ _logger.logger.info(`Call ${this.callId} pushLocalFeed() ignoring duplicate local stream (streamId=${callFeed.stream.id})`);
+ return;
+ }
+ this.feeds.push(callFeed);
+ if (addToPeerConnection) {
+ for (const track of callFeed.stream.getTracks()) {
+ _logger.logger.info(`Call ${this.callId} pushLocalFeed() adding track to peer connection (id=${track.id}, kind=${track.kind}, streamId=${callFeed.stream.id}, streamPurpose=${callFeed.purpose}, enabled=${track.enabled})`);
+ const tKey = getTransceiverKey(callFeed.purpose, track.kind);
+ if (this.transceivers.has(tKey)) {
+ // we already have a sender, so we re-use it. We try to re-use transceivers as much
+ // as possible because they can't be removed once added, so otherwise they just
+ // accumulate which makes the SDP very large very quickly: in fact it only takes
+ // about 6 video tracks to exceed the maximum size of an Olm-encrypted
+ // Matrix event.
+ const transceiver = this.transceivers.get(tKey);
+ transceiver.sender.replaceTrack(track);
+ // set the direction to indicate we're going to start sending again
+ // (this will trigger the re-negotiation)
+ transceiver.direction = transceiver.direction === "inactive" ? "sendonly" : "sendrecv";
+ } else {
+ // create a new one. We need to use addTrack rather addTransceiver for this because firefox
+ // doesn't yet implement RTCRTPSender.setStreams()
+ // (https://bugzilla.mozilla.org/show_bug.cgi?id=1510802) so we'd have no way to group the
+ // two tracks together into a stream.
+ const newSender = this.peerConn.addTrack(track, callFeed.stream);
+
+ // now go & fish for the new transceiver
+ const newTransceiver = this.peerConn.getTransceivers().find(t => t.sender === newSender);
+ if (newTransceiver) {
+ this.transceivers.set(tKey, newTransceiver);
+ } else {
+ _logger.logger.warn(`Call ${this.callId} pushLocalFeed() didn't find a matching transceiver after adding track!`);
+ }
+ }
+ }
+ }
+ _logger.logger.info(`Call ${this.callId} pushLocalFeed() pushed stream (id=${callFeed.stream.id}, active=${callFeed.stream.active}, purpose=${callFeed.purpose})`);
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+ }
+
+ /**
+ * Removes local call feed from the call and its tracks from the peer
+ * connection
+ * @param callFeed - to remove
+ */
+ removeLocalFeed(callFeed) {
+ const audioTransceiverKey = getTransceiverKey(callFeed.purpose, "audio");
+ const videoTransceiverKey = getTransceiverKey(callFeed.purpose, "video");
+ for (const transceiverKey of [audioTransceiverKey, videoTransceiverKey]) {
+ // this is slightly mixing the track and transceiver API but is basically just shorthand.
+ // There is no way to actually remove a transceiver, so this just sets it to inactive
+ // (or recvonly) and replaces the source with nothing.
+ if (this.transceivers.has(transceiverKey)) {
+ const transceiver = this.transceivers.get(transceiverKey);
+ if (transceiver.sender) this.peerConn.removeTrack(transceiver.sender);
+ }
+ }
+ if (callFeed.purpose === _callEventTypes.SDPStreamMetadataPurpose.Screenshare) {
+ this.client.getMediaHandler().stopScreensharingStream(callFeed.stream);
+ }
+ this.deleteFeed(callFeed);
+ }
+ deleteAllFeeds() {
+ for (const feed of this.feeds) {
+ if (!feed.isLocal() || !this.groupCallId) {
+ feed.dispose();
+ }
+ }
+ this.feeds = [];
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+ }
+ deleteFeedByStream(stream) {
+ const feed = this.getFeedByStreamId(stream.id);
+ if (!feed) {
+ _logger.logger.warn(`Call ${this.callId} deleteFeedByStream() didn't find the feed to delete (streamId=${stream.id})`);
+ return;
+ }
+ this.deleteFeed(feed);
+ }
+ deleteFeed(feed) {
+ feed.dispose();
+ this.feeds.splice(this.feeds.indexOf(feed), 1);
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+ }
+
+ // The typescript definitions have this type as 'any' :(
+ async getCurrentCallStats() {
+ if (this.callHasEnded()) {
+ return this.callStatsAtEnd;
+ }
+ return this.collectCallStats();
+ }
+ async collectCallStats() {
+ // This happens when the call fails before it starts.
+ // For example when we fail to get capture sources
+ if (!this.peerConn) return;
+ const statsReport = await this.peerConn.getStats();
+ const stats = [];
+ statsReport.forEach(item => {
+ stats.push(item);
+ });
+ return stats;
+ }
+
+ /**
+ * Configure this call from an invite event. Used by MatrixClient.
+ * @param event - The m.call.invite event
+ */
+ async initWithInvite(event) {
+ const invite = event.getContent();
+ this.direction = CallDirection.Inbound;
+
+ // make sure we have valid turn creds. Unless something's gone wrong, it should
+ // poll and keep the credentials valid so this should be instant.
+ const haveTurnCreds = await this.client.checkTurnServers();
+ if (!haveTurnCreds) {
+ _logger.logger.warn(`Call ${this.callId} initWithInvite() failed to get TURN credentials! Proceeding with call anyway...`);
+ }
+ const sdpStreamMetadata = invite[_callEventTypes.SDPStreamMetadataKey];
+ if (sdpStreamMetadata) {
+ this.updateRemoteSDPStreamMetadata(sdpStreamMetadata);
+ } else {
+ _logger.logger.debug(`Call ${this.callId} initWithInvite() did not get any SDPStreamMetadata! Can not send/receive multiple streams`);
+ }
+ this.peerConn = this.createPeerConnection();
+ this.emit(CallEvent.PeerConnectionCreated, this.peerConn, this);
+ // we must set the party ID before await-ing on anything: the call event
+ // handler will start giving us more call events (eg. candidates) so if
+ // we haven't set the party ID, we'll ignore them.
+ this.chooseOpponent(event);
+ await this.initOpponentCrypto();
+ try {
+ await this.peerConn.setRemoteDescription(invite.offer);
+ _logger.logger.debug(`Call ${this.callId} initWithInvite() set remote description: ${invite.offer.type}`);
+ await this.addBufferedIceCandidates();
+ } catch (e) {
+ _logger.logger.debug(`Call ${this.callId} initWithInvite() failed to set remote description`, e);
+ this.terminate(CallParty.Local, CallErrorCode.SetRemoteDescription, false);
+ return;
+ }
+ const remoteStream = this.feeds.find(feed => !feed.isLocal())?.stream;
+
+ // According to previous comments in this file, firefox at some point did not
+ // add streams until media started arriving on them. Testing latest firefox
+ // (81 at time of writing), this is no longer a problem, so let's do it the correct way.
+ //
+ // For example in case of no media webrtc connections like screen share only call we have to allow webrtc
+ // connections without remote media. In this case we always use a data channel. At the moment we allow as well
+ // only data channel as media in the WebRTC connection with this setup here.
+ if (!this.isOnlyDataChannelAllowed && (!remoteStream || remoteStream.getTracks().length === 0)) {
+ _logger.logger.error(`Call ${this.callId} initWithInvite() no remote stream or no tracks after setting remote description!`);
+ this.terminate(CallParty.Local, CallErrorCode.SetRemoteDescription, false);
+ return;
+ }
+ this.state = CallState.Ringing;
+ if (event.getLocalAge()) {
+ // Time out the call if it's ringing for too long
+ const ringingTimer = setTimeout(() => {
+ if (this.state == CallState.Ringing) {
+ _logger.logger.debug(`Call ${this.callId} initWithInvite() invite has expired. Hanging up.`);
+ this.hangupParty = CallParty.Remote; // effectively
+ this.state = CallState.Ended;
+ this.stopAllMedia();
+ if (this.peerConn.signalingState != "closed") {
+ this.peerConn.close();
+ }
+ this.stats?.removeStatsReportGatherer(this.callId);
+ this.emit(CallEvent.Hangup, this);
+ }
+ }, invite.lifetime - event.getLocalAge());
+ const onState = state => {
+ if (state !== CallState.Ringing) {
+ clearTimeout(ringingTimer);
+ this.off(CallEvent.State, onState);
+ }
+ };
+ this.on(CallEvent.State, onState);
+ }
+ }
+
+ /**
+ * Configure this call from a hangup or reject event. Used by MatrixClient.
+ * @param event - The m.call.hangup event
+ */
+ initWithHangup(event) {
+ // perverse as it may seem, sometimes we want to instantiate a call with a
+ // hangup message (because when getting the state of the room on load, events
+ // come in reverse order and we want to remember that a call has been hung up)
+ this.state = CallState.Ended;
+ }
+ shouldAnswerWithMediaType(wantedValue, valueOfTheOtherSide, type) {
+ if (wantedValue && !valueOfTheOtherSide) {
+ // TODO: Figure out how to do this
+ _logger.logger.warn(`Call ${this.callId} shouldAnswerWithMediaType() unable to answer with ${type} because the other side isn't sending it either.`);
+ return false;
+ } else if (!(0, _utils.isNullOrUndefined)(wantedValue) && wantedValue !== valueOfTheOtherSide && !this.opponentSupportsSDPStreamMetadata()) {
+ _logger.logger.warn(`Call ${this.callId} shouldAnswerWithMediaType() unable to answer with ${type}=${wantedValue} because the other side doesn't support it. Answering with ${type}=${valueOfTheOtherSide}.`);
+ return valueOfTheOtherSide;
+ }
+ return wantedValue ?? valueOfTheOtherSide;
+ }
+
+ /**
+ * Answer a call.
+ */
+ async answer(audio, video) {
+ if (this.inviteOrAnswerSent) return;
+ // TODO: Figure out how to do this
+ if (audio === false && video === false) throw new Error("You CANNOT answer a call without media");
+ if (!this.localUsermediaStream && !this.waitForLocalAVStream) {
+ const prevState = this.state;
+ const answerWithAudio = this.shouldAnswerWithMediaType(audio, this.hasRemoteUserMediaAudioTrack, "audio");
+ const answerWithVideo = this.shouldAnswerWithMediaType(video, this.hasRemoteUserMediaVideoTrack, "video");
+ this.state = CallState.WaitLocalMedia;
+ this.waitForLocalAVStream = true;
+ try {
+ const stream = await this.client.getMediaHandler().getUserMediaStream(answerWithAudio, answerWithVideo);
+ this.waitForLocalAVStream = false;
+ const usermediaFeed = new _callFeed.CallFeed({
+ client: this.client,
+ roomId: this.roomId,
+ userId: this.client.getUserId(),
+ deviceId: this.client.getDeviceId() ?? undefined,
+ stream,
+ purpose: _callEventTypes.SDPStreamMetadataPurpose.Usermedia,
+ audioMuted: false,
+ videoMuted: false
+ });
+ const feeds = [usermediaFeed];
+ if (this.localScreensharingFeed) {
+ feeds.push(this.localScreensharingFeed);
+ }
+ this.answerWithCallFeeds(feeds);
+ } catch (e) {
+ if (answerWithVideo) {
+ // Try to answer without video
+ _logger.logger.warn(`Call ${this.callId} answer() failed to getUserMedia(), trying to getUserMedia() without video`);
+ this.state = prevState;
+ this.waitForLocalAVStream = false;
+ await this.answer(answerWithAudio, false);
+ } else {
+ this.getUserMediaFailed(e);
+ return;
+ }
+ }
+ } else if (this.waitForLocalAVStream) {
+ this.state = CallState.WaitLocalMedia;
+ }
+ }
+ answerWithCallFeeds(callFeeds) {
+ if (this.inviteOrAnswerSent) return;
+ this.queueGotCallFeedsForAnswer(callFeeds);
+ }
+
+ /**
+ * Replace this call with a new call, e.g. for glare resolution. Used by
+ * MatrixClient.
+ * @param newCall - The new call.
+ */
+ replacedBy(newCall) {
+ _logger.logger.debug(`Call ${this.callId} replacedBy() running (newCallId=${newCall.callId})`);
+ if (this.state === CallState.WaitLocalMedia) {
+ _logger.logger.debug(`Call ${this.callId} replacedBy() telling new call to wait for local media (newCallId=${newCall.callId})`);
+ newCall.waitForLocalAVStream = true;
+ } else if ([CallState.CreateOffer, CallState.InviteSent].includes(this.state)) {
+ if (newCall.direction === CallDirection.Outbound) {
+ newCall.queueGotCallFeedsForAnswer([]);
+ } else {
+ _logger.logger.debug(`Call ${this.callId} replacedBy() handing local stream to new call(newCallId=${newCall.callId})`);
+ newCall.queueGotCallFeedsForAnswer(this.getLocalFeeds().map(feed => feed.clone()));
+ }
+ }
+ this.successor = newCall;
+ this.emit(CallEvent.Replaced, newCall, this);
+ this.hangup(CallErrorCode.Replaced, true);
+ }
+
+ /**
+ * Hangup a call.
+ * @param reason - The reason why the call is being hung up.
+ * @param suppressEvent - True to suppress emitting an event.
+ */
+ hangup(reason, suppressEvent) {
+ if (this.callHasEnded()) return;
+ _logger.logger.debug(`Call ${this.callId} hangup() ending call (reason=${reason})`);
+ this.terminate(CallParty.Local, reason, !suppressEvent);
+ // We don't want to send hangup here if we didn't even get to sending an invite
+ if ([CallState.Fledgling, CallState.WaitLocalMedia].includes(this.state)) return;
+ const content = {};
+ // Don't send UserHangup reason to older clients
+ if (this.opponentVersion && this.opponentVersion !== 0 || reason !== CallErrorCode.UserHangup) {
+ content["reason"] = reason;
+ }
+ this.sendVoipEvent(_event.EventType.CallHangup, content);
+ }
+
+ /**
+ * Reject a call
+ * This used to be done by calling hangup, but is a separate method and protocol
+ * event as of MSC2746.
+ */
+ reject() {
+ if (this.state !== CallState.Ringing) {
+ throw Error("Call must be in 'ringing' state to reject!");
+ }
+ if (this.opponentVersion === 0) {
+ _logger.logger.info(`Call ${this.callId} reject() opponent version is less than 1: sending hangup instead of reject (opponentVersion=${this.opponentVersion})`);
+ this.hangup(CallErrorCode.UserHangup, true);
+ return;
+ }
+ _logger.logger.debug("Rejecting call: " + this.callId);
+ this.terminate(CallParty.Local, CallErrorCode.UserHangup, true);
+ this.sendVoipEvent(_event.EventType.CallReject, {});
+ }
+
+ /**
+ * Adds an audio and/or video track - upgrades the call
+ * @param audio - should add an audio track
+ * @param video - should add an video track
+ */
+ async upgradeCall(audio, video) {
+ // We don't do call downgrades
+ if (!audio && !video) return;
+ if (!this.opponentSupportsSDPStreamMetadata()) return;
+ try {
+ _logger.logger.debug(`Call ${this.callId} upgradeCall() upgrading call (audio=${audio}, video=${video})`);
+ const getAudio = audio || this.hasLocalUserMediaAudioTrack;
+ const getVideo = video || this.hasLocalUserMediaVideoTrack;
+
+ // updateLocalUsermediaStream() will take the tracks, use them as
+ // replacement and throw the stream away, so it isn't reusable
+ const stream = await this.client.getMediaHandler().getUserMediaStream(getAudio, getVideo, false);
+ await this.updateLocalUsermediaStream(stream, audio, video);
+ } catch (error) {
+ _logger.logger.error(`Call ${this.callId} upgradeCall() failed to upgrade the call`, error);
+ this.emit(CallEvent.Error, new CallError(CallErrorCode.NoUserMedia, "Failed to get camera access: ", error), this);
+ }
+ }
+
+ /**
+ * Returns true if this.remoteSDPStreamMetadata is defined, otherwise returns false
+ * @returns can screenshare
+ */
+ opponentSupportsSDPStreamMetadata() {
+ return Boolean(this.remoteSDPStreamMetadata);
+ }
+
+ /**
+ * If there is a screensharing stream returns true, otherwise returns false
+ * @returns is screensharing
+ */
+ isScreensharing() {
+ return Boolean(this.localScreensharingStream);
+ }
+
+ /**
+ * Starts/stops screensharing
+ * @param enabled - the desired screensharing state
+ * @param desktopCapturerSourceId - optional id of the desktop capturer source to use
+ * @returns new screensharing state
+ */
+ async setScreensharingEnabled(enabled, opts) {
+ // Skip if there is nothing to do
+ if (enabled && this.isScreensharing()) {
+ _logger.logger.warn(`Call ${this.callId} setScreensharingEnabled() there is already a screensharing stream - there is nothing to do!`);
+ return true;
+ } else if (!enabled && !this.isScreensharing()) {
+ _logger.logger.warn(`Call ${this.callId} setScreensharingEnabled() there already isn't a screensharing stream - there is nothing to do!`);
+ return false;
+ }
+
+ // Fallback to replaceTrack()
+ if (!this.opponentSupportsSDPStreamMetadata()) {
+ return this.setScreensharingEnabledWithoutMetadataSupport(enabled, opts);
+ }
+ _logger.logger.debug(`Call ${this.callId} setScreensharingEnabled() running (enabled=${enabled})`);
+ if (enabled) {
+ try {
+ const stream = await this.client.getMediaHandler().getScreensharingStream(opts);
+ if (!stream) return false;
+ this.pushNewLocalFeed(stream, _callEventTypes.SDPStreamMetadataPurpose.Screenshare);
+ return true;
+ } catch (err) {
+ _logger.logger.error(`Call ${this.callId} setScreensharingEnabled() failed to get screen-sharing stream:`, err);
+ return false;
+ }
+ } else {
+ const audioTransceiver = this.transceivers.get(getTransceiverKey(_callEventTypes.SDPStreamMetadataPurpose.Screenshare, "audio"));
+ const videoTransceiver = this.transceivers.get(getTransceiverKey(_callEventTypes.SDPStreamMetadataPurpose.Screenshare, "video"));
+ for (const transceiver of [audioTransceiver, videoTransceiver]) {
+ // this is slightly mixing the track and transceiver API but is basically just shorthand
+ // for removing the sender.
+ if (transceiver && transceiver.sender) this.peerConn.removeTrack(transceiver.sender);
+ }
+ this.client.getMediaHandler().stopScreensharingStream(this.localScreensharingStream);
+ this.deleteFeedByStream(this.localScreensharingStream);
+ return false;
+ }
+ }
+
+ /**
+ * Starts/stops screensharing
+ * Should be used ONLY if the opponent doesn't support SDPStreamMetadata
+ * @param enabled - the desired screensharing state
+ * @param desktopCapturerSourceId - optional id of the desktop capturer source to use
+ * @returns new screensharing state
+ */
+ async setScreensharingEnabledWithoutMetadataSupport(enabled, opts) {
+ _logger.logger.debug(`Call ${this.callId} setScreensharingEnabledWithoutMetadataSupport() running (enabled=${enabled})`);
+ if (enabled) {
+ try {
+ const stream = await this.client.getMediaHandler().getScreensharingStream(opts);
+ if (!stream) return false;
+ const track = stream.getTracks().find(track => track.kind === "video");
+ const sender = this.transceivers.get(getTransceiverKey(_callEventTypes.SDPStreamMetadataPurpose.Usermedia, "video"))?.sender;
+ sender?.replaceTrack(track ?? null);
+ this.pushNewLocalFeed(stream, _callEventTypes.SDPStreamMetadataPurpose.Screenshare, false);
+ return true;
+ } catch (err) {
+ _logger.logger.error(`Call ${this.callId} setScreensharingEnabledWithoutMetadataSupport() failed to get screen-sharing stream:`, err);
+ return false;
+ }
+ } else {
+ const track = this.localUsermediaStream?.getTracks().find(track => track.kind === "video");
+ const sender = this.transceivers.get(getTransceiverKey(_callEventTypes.SDPStreamMetadataPurpose.Usermedia, "video"))?.sender;
+ sender?.replaceTrack(track ?? null);
+ this.client.getMediaHandler().stopScreensharingStream(this.localScreensharingStream);
+ this.deleteFeedByStream(this.localScreensharingStream);
+ return false;
+ }
+ }
+
+ /**
+ * Replaces/adds the tracks from the passed stream to the localUsermediaStream
+ * @param stream - to use a replacement for the local usermedia stream
+ */
+ async updateLocalUsermediaStream(stream, forceAudio = false, forceVideo = false) {
+ const callFeed = this.localUsermediaFeed;
+ const audioEnabled = forceAudio || !callFeed.isAudioMuted() && !this.remoteOnHold;
+ const videoEnabled = forceVideo || !callFeed.isVideoMuted() && !this.remoteOnHold;
+ _logger.logger.log(`Call ${this.callId} updateLocalUsermediaStream() running (streamId=${stream.id}, audio=${audioEnabled}, video=${videoEnabled})`);
+ setTracksEnabled(stream.getAudioTracks(), audioEnabled);
+ setTracksEnabled(stream.getVideoTracks(), videoEnabled);
+
+ // We want to keep the same stream id, so we replace the tracks rather
+ // than the whole stream.
+
+ // Firstly, we replace the tracks in our localUsermediaStream.
+ for (const track of this.localUsermediaStream.getTracks()) {
+ this.localUsermediaStream.removeTrack(track);
+ track.stop();
+ }
+ for (const track of stream.getTracks()) {
+ this.localUsermediaStream.addTrack(track);
+ }
+
+ // Then replace the old tracks, if possible.
+ for (const track of stream.getTracks()) {
+ const tKey = getTransceiverKey(_callEventTypes.SDPStreamMetadataPurpose.Usermedia, track.kind);
+ const transceiver = this.transceivers.get(tKey);
+ const oldSender = transceiver?.sender;
+ let added = false;
+ if (oldSender) {
+ try {
+ _logger.logger.info(`Call ${this.callId} updateLocalUsermediaStream() replacing track (id=${track.id}, kind=${track.kind}, streamId=${stream.id}, streamPurpose=${callFeed.purpose})`);
+ await oldSender.replaceTrack(track);
+ // Set the direction to indicate we're going to be sending.
+ // This is only necessary in the cases where we're upgrading
+ // the call to video after downgrading it.
+ transceiver.direction = transceiver.direction === "inactive" ? "sendonly" : "sendrecv";
+ added = true;
+ } catch (error) {
+ _logger.logger.warn(`Call ${this.callId} updateLocalUsermediaStream() replaceTrack failed: adding new transceiver instead`, error);
+ }
+ }
+ if (!added) {
+ _logger.logger.info(`Call ${this.callId} updateLocalUsermediaStream() adding track to peer connection (id=${track.id}, kind=${track.kind}, streamId=${stream.id}, streamPurpose=${callFeed.purpose})`);
+ const newSender = this.peerConn.addTrack(track, this.localUsermediaStream);
+ const newTransceiver = this.peerConn.getTransceivers().find(t => t.sender === newSender);
+ if (newTransceiver) {
+ this.transceivers.set(tKey, newTransceiver);
+ } else {
+ _logger.logger.warn(`Call ${this.callId} updateLocalUsermediaStream() couldn't find matching transceiver for newly added track!`);
+ }
+ }
+ }
+ }
+
+ /**
+ * Set whether our outbound video should be muted or not.
+ * @param muted - True to mute the outbound video.
+ * @returns the new mute state
+ */
+ async setLocalVideoMuted(muted) {
+ _logger.logger.log(`Call ${this.callId} setLocalVideoMuted() running ${muted}`);
+
+ // if we were still thinking about stopping and removing the video
+ // track: don't, because we want it back.
+ if (!muted && this.stopVideoTrackTimer !== undefined) {
+ clearTimeout(this.stopVideoTrackTimer);
+ this.stopVideoTrackTimer = undefined;
+ }
+ if (!(await this.client.getMediaHandler().hasVideoDevice())) {
+ return this.isLocalVideoMuted();
+ }
+ if (!this.hasUserMediaVideoSender && !muted) {
+ this.localUsermediaFeed?.setAudioVideoMuted(null, muted);
+ await this.upgradeCall(false, true);
+ return this.isLocalVideoMuted();
+ }
+
+ // we may not have a video track - if not, re-request usermedia
+ if (!muted && this.localUsermediaStream.getVideoTracks().length === 0) {
+ const stream = await this.client.getMediaHandler().getUserMediaStream(true, true);
+ await this.updateLocalUsermediaStream(stream);
+ }
+ this.localUsermediaFeed?.setAudioVideoMuted(null, muted);
+ this.updateMuteStatus();
+ await this.sendMetadataUpdate();
+
+ // if we're muting video, set a timeout to stop & remove the video track so we release
+ // the camera. We wait a short time to do this because when we disable a track, WebRTC
+ // will send black video for it. If we just stop and remove it straight away, the video
+ // will just freeze which means that when we unmute video, the other side will briefly
+ // get a static frame of us from before we muted. This way, the still frame is just black.
+ // A very small delay is not always enough so the theory here is that it needs to be long
+ // enough for WebRTC to encode a frame: 120ms should be long enough even if we're only
+ // doing 10fps.
+ if (muted) {
+ this.stopVideoTrackTimer = setTimeout(() => {
+ for (const t of this.localUsermediaStream.getVideoTracks()) {
+ t.stop();
+ this.localUsermediaStream.removeTrack(t);
+ }
+ }, 120);
+ }
+ return this.isLocalVideoMuted();
+ }
+
+ /**
+ * Check if local video is muted.
+ *
+ * If there are multiple video tracks, <i>all</i> of the tracks need to be muted
+ * for this to return true. This means if there are no video tracks, this will
+ * return true.
+ * @returns True if the local preview video is muted, else false
+ * (including if the call is not set up yet).
+ */
+ isLocalVideoMuted() {
+ return this.localUsermediaFeed?.isVideoMuted() ?? false;
+ }
+
+ /**
+ * Set whether the microphone should be muted or not.
+ * @param muted - True to mute the mic.
+ * @returns the new mute state
+ */
+ async setMicrophoneMuted(muted) {
+ _logger.logger.log(`Call ${this.callId} setMicrophoneMuted() running ${muted}`);
+ if (!(await this.client.getMediaHandler().hasAudioDevice())) {
+ return this.isMicrophoneMuted();
+ }
+ if (!muted && (!this.hasUserMediaAudioSender || !this.hasLocalUserMediaAudioTrack)) {
+ await this.upgradeCall(true, false);
+ return this.isMicrophoneMuted();
+ }
+ this.localUsermediaFeed?.setAudioVideoMuted(muted, null);
+ this.updateMuteStatus();
+ await this.sendMetadataUpdate();
+ return this.isMicrophoneMuted();
+ }
+
+ /**
+ * Check if the microphone is muted.
+ *
+ * If there are multiple audio tracks, <i>all</i> of the tracks need to be muted
+ * for this to return true. This means if there are no audio tracks, this will
+ * return true.
+ * @returns True if the mic is muted, else false (including if the call
+ * is not set up yet).
+ */
+ isMicrophoneMuted() {
+ return this.localUsermediaFeed?.isAudioMuted() ?? false;
+ }
+
+ /**
+ * @returns true if we have put the party on the other side of the call on hold
+ * (that is, we are signalling to them that we are not listening)
+ */
+ isRemoteOnHold() {
+ return this.remoteOnHold;
+ }
+ setRemoteOnHold(onHold) {
+ if (this.isRemoteOnHold() === onHold) return;
+ this.remoteOnHold = onHold;
+ for (const transceiver of this.peerConn.getTransceivers()) {
+ // We don't send hold music or anything so we're not actually
+ // sending anything, but sendrecv is fairly standard for hold and
+ // it makes it a lot easier to figure out who's put who on hold.
+ transceiver.direction = onHold ? "sendonly" : "sendrecv";
+ }
+ this.updateMuteStatus();
+ this.sendMetadataUpdate();
+ this.emit(CallEvent.RemoteHoldUnhold, this.remoteOnHold, this);
+ }
+
+ /**
+ * Indicates whether we are 'on hold' to the remote party (ie. if true,
+ * they cannot hear us).
+ * @returns true if the other party has put us on hold
+ */
+ isLocalOnHold() {
+ if (this.state !== CallState.Connected) return false;
+ let callOnHold = true;
+
+ // We consider a call to be on hold only if *all* the tracks are on hold
+ // (is this the right thing to do?)
+ for (const transceiver of this.peerConn.getTransceivers()) {
+ const trackOnHold = ["inactive", "recvonly"].includes(transceiver.currentDirection);
+ if (!trackOnHold) callOnHold = false;
+ }
+ return callOnHold;
+ }
+
+ /**
+ * Sends a DTMF digit to the other party
+ * @param digit - The digit (nb. string - '#' and '*' are dtmf too)
+ */
+ sendDtmfDigit(digit) {
+ for (const sender of this.peerConn.getSenders()) {
+ if (sender.track?.kind === "audio" && sender.dtmf) {
+ sender.dtmf.insertDTMF(digit);
+ return;
+ }
+ }
+ throw new Error("Unable to find a track to send DTMF on");
+ }
+ updateMuteStatus() {
+ const micShouldBeMuted = this.isMicrophoneMuted() || this.remoteOnHold;
+ const vidShouldBeMuted = this.isLocalVideoMuted() || this.remoteOnHold;
+ _logger.logger.log(`Call ${this.callId} updateMuteStatus stream ${this.localUsermediaStream.id} micShouldBeMuted ${micShouldBeMuted} vidShouldBeMuted ${vidShouldBeMuted}`);
+ setTracksEnabled(this.localUsermediaStream.getAudioTracks(), !micShouldBeMuted);
+ setTracksEnabled(this.localUsermediaStream.getVideoTracks(), !vidShouldBeMuted);
+ }
+ async sendMetadataUpdate() {
+ await this.sendVoipEvent(_event.EventType.CallSDPStreamMetadataChangedPrefix, {
+ [_callEventTypes.SDPStreamMetadataKey]: this.getLocalSDPStreamMetadata()
+ });
+ }
+ gotCallFeedsForInvite(callFeeds, requestScreenshareFeed = false) {
+ if (this.successor) {
+ this.successor.queueGotCallFeedsForAnswer(callFeeds);
+ return;
+ }
+ if (this.callHasEnded()) {
+ this.stopAllMedia();
+ return;
+ }
+ for (const feed of callFeeds) {
+ this.pushLocalFeed(feed);
+ }
+ if (requestScreenshareFeed) {
+ this.peerConn.addTransceiver("video", {
+ direction: "recvonly"
+ });
+ }
+ this.state = CallState.CreateOffer;
+ _logger.logger.debug(`Call ${this.callId} gotUserMediaForInvite() run`);
+ // Now we wait for the negotiationneeded event
+ }
+
+ async sendAnswer() {
+ const answerContent = {
+ answer: {
+ sdp: this.peerConn.localDescription.sdp,
+ // type is now deprecated as of Matrix VoIP v1, but
+ // required to still be sent for backwards compat
+ type: this.peerConn.localDescription.type
+ },
+ [_callEventTypes.SDPStreamMetadataKey]: this.getLocalSDPStreamMetadata(true)
+ };
+ answerContent.capabilities = {
+ "m.call.transferee": this.client.supportsCallTransfer,
+ "m.call.dtmf": false
+ };
+
+ // We have just taken the local description from the peerConn which will
+ // contain all the local candidates added so far, so we can discard any candidates
+ // we had queued up because they'll be in the answer.
+ const discardCount = this.discardDuplicateCandidates();
+ _logger.logger.info(`Call ${this.callId} sendAnswer() discarding ${discardCount} candidates that will be sent in answer`);
+ try {
+ await this.sendVoipEvent(_event.EventType.CallAnswer, answerContent);
+ // If this isn't the first time we've tried to send the answer,
+ // we may have candidates queued up, so send them now.
+ this.inviteOrAnswerSent = true;
+ } catch (error) {
+ // We've failed to answer: back to the ringing state
+ this.state = CallState.Ringing;
+ if (error instanceof _httpApi.MatrixError && error.event) this.client.cancelPendingEvent(error.event);
+ let code = CallErrorCode.SendAnswer;
+ let message = "Failed to send answer";
+ if (error.name == "UnknownDeviceError") {
+ code = CallErrorCode.UnknownDevices;
+ message = "Unknown devices present in the room";
+ }
+ this.emit(CallEvent.Error, new CallError(code, message, error), this);
+ throw error;
+ }
+
+ // error handler re-throws so this won't happen on error, but
+ // we don't want the same error handling on the candidate queue
+ this.sendCandidateQueue();
+ }
+ queueGotCallFeedsForAnswer(callFeeds) {
+ // Ensure only one negotiate/answer event is being processed at a time.
+ if (this.responsePromiseChain) {
+ this.responsePromiseChain = this.responsePromiseChain.then(() => this.gotCallFeedsForAnswer(callFeeds));
+ } else {
+ this.responsePromiseChain = this.gotCallFeedsForAnswer(callFeeds);
+ }
+ }
+
+ // Enables DTX (discontinuous transmission) on the given session to reduce
+ // bandwidth when transmitting silence
+ mungeSdp(description, mods) {
+ // The only way to enable DTX at this time is through SDP munging
+ const sdp = (0, _sdpTransform.parse)(description.sdp);
+ sdp.media.forEach(media => {
+ const payloadTypeToCodecMap = new Map();
+ const codecToPayloadTypeMap = new Map();
+ for (const rtp of media.rtp) {
+ payloadTypeToCodecMap.set(rtp.payload, rtp.codec);
+ codecToPayloadTypeMap.set(rtp.codec, rtp.payload);
+ }
+ for (const mod of mods) {
+ if (mod.mediaType !== media.type) continue;
+ if (!codecToPayloadTypeMap.has(mod.codec)) {
+ _logger.logger.info(`Call ${this.callId} mungeSdp() ignoring SDP modifications for ${mod.codec} as it's not present.`);
+ continue;
+ }
+ const extraConfig = [];
+ if (mod.enableDtx !== undefined) {
+ extraConfig.push(`usedtx=${mod.enableDtx ? "1" : "0"}`);
+ }
+ if (mod.maxAverageBitrate !== undefined) {
+ extraConfig.push(`maxaveragebitrate=${mod.maxAverageBitrate}`);
+ }
+ let found = false;
+ for (const fmtp of media.fmtp) {
+ if (payloadTypeToCodecMap.get(fmtp.payload) === mod.codec) {
+ found = true;
+ fmtp.config += ";" + extraConfig.join(";");
+ }
+ }
+ if (!found) {
+ media.fmtp.push({
+ payload: codecToPayloadTypeMap.get(mod.codec),
+ config: extraConfig.join(";")
+ });
+ }
+ }
+ });
+ description.sdp = (0, _sdpTransform.write)(sdp);
+ }
+ async createOffer() {
+ const offer = await this.peerConn.createOffer();
+ this.mungeSdp(offer, getCodecParamMods(this.isPtt));
+ return offer;
+ }
+ async createAnswer() {
+ const answer = await this.peerConn.createAnswer();
+ this.mungeSdp(answer, getCodecParamMods(this.isPtt));
+ return answer;
+ }
+ async gotCallFeedsForAnswer(callFeeds) {
+ if (this.callHasEnded()) return;
+ this.waitForLocalAVStream = false;
+ for (const feed of callFeeds) {
+ this.pushLocalFeed(feed);
+ }
+ this.state = CallState.CreateAnswer;
+ let answer;
+ try {
+ this.getRidOfRTXCodecs();
+ answer = await this.createAnswer();
+ } catch (err) {
+ _logger.logger.debug(`Call ${this.callId} gotCallFeedsForAnswer() failed to create answer: `, err);
+ this.terminate(CallParty.Local, CallErrorCode.CreateAnswer, true);
+ return;
+ }
+ try {
+ await this.peerConn.setLocalDescription(answer);
+
+ // make sure we're still going
+ if (this.callHasEnded()) return;
+ this.state = CallState.Connecting;
+
+ // Allow a short time for initial candidates to be gathered
+ await new Promise(resolve => {
+ setTimeout(resolve, 200);
+ });
+
+ // make sure the call hasn't ended before we continue
+ if (this.callHasEnded()) return;
+ this.sendAnswer();
+ } catch (err) {
+ _logger.logger.debug(`Call ${this.callId} gotCallFeedsForAnswer() error setting local description!`, err);
+ this.terminate(CallParty.Local, CallErrorCode.SetLocalDescription, true);
+ return;
+ }
+ }
+ async onRemoteIceCandidatesReceived(ev) {
+ if (this.callHasEnded()) {
+ //debuglog("Ignoring remote ICE candidate because call has ended");
+ return;
+ }
+ const content = ev.getContent();
+ const candidates = content.candidates;
+ if (!candidates) {
+ _logger.logger.info(`Call ${this.callId} onRemoteIceCandidatesReceived() ignoring candidates event with no candidates!`);
+ return;
+ }
+ const fromPartyId = content.version === 0 ? null : content.party_id || null;
+ if (this.opponentPartyId === undefined) {
+ // we haven't picked an opponent yet so save the candidates
+ if (fromPartyId) {
+ _logger.logger.info(`Call ${this.callId} onRemoteIceCandidatesReceived() buffering ${candidates.length} candidates until we pick an opponent`);
+ const bufferedCandidates = this.remoteCandidateBuffer.get(fromPartyId) || [];
+ bufferedCandidates.push(...candidates);
+ this.remoteCandidateBuffer.set(fromPartyId, bufferedCandidates);
+ }
+ return;
+ }
+ if (!this.partyIdMatches(content)) {
+ _logger.logger.info(`Call ${this.callId} onRemoteIceCandidatesReceived() ignoring candidates from party ID ${content.party_id}: we have chosen party ID ${this.opponentPartyId}`);
+ return;
+ }
+ await this.addIceCandidates(candidates);
+ }
+
+ /**
+ * Used by MatrixClient.
+ */
+ async onAnswerReceived(event) {
+ const content = event.getContent();
+ _logger.logger.debug(`Call ${this.callId} onAnswerReceived() running (hangupParty=${content.party_id})`);
+ if (this.callHasEnded()) {
+ _logger.logger.debug(`Call ${this.callId} onAnswerReceived() ignoring answer because call has ended`);
+ return;
+ }
+ if (this.opponentPartyId !== undefined) {
+ _logger.logger.info(`Call ${this.callId} onAnswerReceived() ignoring answer from party ID ${content.party_id}: we already have an answer/reject from ${this.opponentPartyId}`);
+ return;
+ }
+ this.chooseOpponent(event);
+ await this.addBufferedIceCandidates();
+ this.state = CallState.Connecting;
+ const sdpStreamMetadata = content[_callEventTypes.SDPStreamMetadataKey];
+ if (sdpStreamMetadata) {
+ this.updateRemoteSDPStreamMetadata(sdpStreamMetadata);
+ } else {
+ _logger.logger.warn(`Call ${this.callId} onAnswerReceived() did not get any SDPStreamMetadata! Can not send/receive multiple streams`);
+ }
+ try {
+ this.isSettingRemoteAnswerPending = true;
+ await this.peerConn.setRemoteDescription(content.answer);
+ this.isSettingRemoteAnswerPending = false;
+ _logger.logger.debug(`Call ${this.callId} onAnswerReceived() set remote description: ${content.answer.type}`);
+ } catch (e) {
+ this.isSettingRemoteAnswerPending = false;
+ _logger.logger.debug(`Call ${this.callId} onAnswerReceived() failed to set remote description`, e);
+ this.terminate(CallParty.Local, CallErrorCode.SetRemoteDescription, false);
+ return;
+ }
+
+ // If the answer we selected has a party_id, send a select_answer event
+ // We do this after setting the remote description since otherwise we'd block
+ // call setup on it
+ if (this.opponentPartyId !== null) {
+ try {
+ await this.sendVoipEvent(_event.EventType.CallSelectAnswer, {
+ selected_party_id: this.opponentPartyId
+ });
+ } catch (err) {
+ // This isn't fatal, and will just mean that if another party has raced to answer
+ // the call, they won't know they got rejected, so we carry on & don't retry.
+ _logger.logger.warn(`Call ${this.callId} onAnswerReceived() failed to send select_answer event`, err);
+ }
+ }
+ }
+ async onSelectAnswerReceived(event) {
+ if (this.direction !== CallDirection.Inbound) {
+ _logger.logger.warn(`Call ${this.callId} onSelectAnswerReceived() got select_answer for an outbound call: ignoring`);
+ return;
+ }
+ const selectedPartyId = event.getContent().selected_party_id;
+ if (selectedPartyId === undefined || selectedPartyId === null) {
+ _logger.logger.warn(`Call ${this.callId} onSelectAnswerReceived() got nonsensical select_answer with null/undefined selected_party_id: ignoring`);
+ return;
+ }
+ if (selectedPartyId !== this.ourPartyId) {
+ _logger.logger.info(`Call ${this.callId} onSelectAnswerReceived() got select_answer for party ID ${selectedPartyId}: we are party ID ${this.ourPartyId}.`);
+ // The other party has picked somebody else's answer
+ await this.terminate(CallParty.Remote, CallErrorCode.AnsweredElsewhere, true);
+ }
+ }
+ async onNegotiateReceived(event) {
+ const content = event.getContent();
+ const description = content.description;
+ if (!description || !description.sdp || !description.type) {
+ _logger.logger.info(`Call ${this.callId} onNegotiateReceived() ignoring invalid m.call.negotiate event`);
+ return;
+ }
+ // Politeness always follows the direction of the call: in a glare situation,
+ // we pick either the inbound or outbound call, so one side will always be
+ // inbound and one outbound
+ const polite = this.direction === CallDirection.Inbound;
+
+ // Here we follow the perfect negotiation logic from
+ // https://w3c.github.io/webrtc-pc/#perfect-negotiation-example
+ const readyForOffer = !this.makingOffer && (this.peerConn.signalingState === "stable" || this.isSettingRemoteAnswerPending);
+ const offerCollision = description.type === "offer" && !readyForOffer;
+ this.ignoreOffer = !polite && offerCollision;
+ if (this.ignoreOffer) {
+ _logger.logger.info(`Call ${this.callId} onNegotiateReceived() ignoring colliding negotiate event because we're impolite`);
+ return;
+ }
+ const prevLocalOnHold = this.isLocalOnHold();
+ const sdpStreamMetadata = content[_callEventTypes.SDPStreamMetadataKey];
+ if (sdpStreamMetadata) {
+ this.updateRemoteSDPStreamMetadata(sdpStreamMetadata);
+ } else {
+ _logger.logger.warn(`Call ${this.callId} onNegotiateReceived() received negotiation event without SDPStreamMetadata!`);
+ }
+ try {
+ this.isSettingRemoteAnswerPending = description.type == "answer";
+ await this.peerConn.setRemoteDescription(description); // SRD rolls back as needed
+ this.isSettingRemoteAnswerPending = false;
+ _logger.logger.debug(`Call ${this.callId} onNegotiateReceived() set remote description: ${description.type}`);
+ if (description.type === "offer") {
+ let answer;
+ try {
+ this.getRidOfRTXCodecs();
+ answer = await this.createAnswer();
+ } catch (err) {
+ _logger.logger.debug(`Call ${this.callId} onNegotiateReceived() failed to create answer: `, err);
+ this.terminate(CallParty.Local, CallErrorCode.CreateAnswer, true);
+ return;
+ }
+ await this.peerConn.setLocalDescription(answer);
+ _logger.logger.debug(`Call ${this.callId} onNegotiateReceived() create an answer`);
+ this.sendVoipEvent(_event.EventType.CallNegotiate, {
+ description: this.peerConn.localDescription?.toJSON(),
+ [_callEventTypes.SDPStreamMetadataKey]: this.getLocalSDPStreamMetadata(true)
+ });
+ }
+ } catch (err) {
+ this.isSettingRemoteAnswerPending = false;
+ _logger.logger.warn(`Call ${this.callId} onNegotiateReceived() failed to complete negotiation`, err);
+ }
+ const newLocalOnHold = this.isLocalOnHold();
+ if (prevLocalOnHold !== newLocalOnHold) {
+ this.emit(CallEvent.LocalHoldUnhold, newLocalOnHold, this);
+ // also this one for backwards compat
+ this.emit(CallEvent.HoldUnhold, newLocalOnHold);
+ }
+ }
+ updateRemoteSDPStreamMetadata(metadata) {
+ this.remoteSDPStreamMetadata = (0, _utils.recursivelyAssign)(this.remoteSDPStreamMetadata || {}, metadata, true);
+ for (const feed of this.getRemoteFeeds()) {
+ const streamId = feed.stream.id;
+ const metadata = this.remoteSDPStreamMetadata[streamId];
+ feed.setAudioVideoMuted(metadata?.audio_muted, metadata?.video_muted);
+ feed.purpose = this.remoteSDPStreamMetadata[streamId]?.purpose;
+ }
+ }
+ onSDPStreamMetadataChangedReceived(event) {
+ const content = event.getContent();
+ const metadata = content[_callEventTypes.SDPStreamMetadataKey];
+ this.updateRemoteSDPStreamMetadata(metadata);
+ }
+ async onAssertedIdentityReceived(event) {
+ const content = event.getContent();
+ if (!content.asserted_identity) return;
+ this.remoteAssertedIdentity = {
+ id: content.asserted_identity.id,
+ displayName: content.asserted_identity.display_name
+ };
+ this.emit(CallEvent.AssertedIdentityChanged, this);
+ }
+ callHasEnded() {
+ // This exists as workaround to typescript trying to be clever and erroring
+ // when putting if (this.state === CallState.Ended) return; twice in the same
+ // function, even though that function is async.
+ return this.state === CallState.Ended;
+ }
+ queueGotLocalOffer() {
+ // Ensure only one negotiate/answer event is being processed at a time.
+ if (this.responsePromiseChain) {
+ this.responsePromiseChain = this.responsePromiseChain.then(() => this.wrappedGotLocalOffer());
+ } else {
+ this.responsePromiseChain = this.wrappedGotLocalOffer();
+ }
+ }
+ async wrappedGotLocalOffer() {
+ this.makingOffer = true;
+ try {
+ // XXX: in what situations do we believe gotLocalOffer actually throws? It appears
+ // to handle most of its exceptions itself and terminate the call. I'm not entirely
+ // sure it would ever throw, so I can't add a test for these lines.
+ // Also the tense is different between "gotLocalOffer" and "getLocalOfferFailed" so
+ // it's not entirely clear whether getLocalOfferFailed is just misnamed or whether
+ // they've been cross-polinated somehow at some point.
+ await this.gotLocalOffer();
+ } catch (e) {
+ this.getLocalOfferFailed(e);
+ return;
+ } finally {
+ this.makingOffer = false;
+ }
+ }
+ async gotLocalOffer() {
+ _logger.logger.debug(`Call ${this.callId} gotLocalOffer() running`);
+ if (this.callHasEnded()) {
+ _logger.logger.debug(`Call ${this.callId} gotLocalOffer() ignoring newly created offer because the call has ended"`);
+ return;
+ }
+ let offer;
+ try {
+ this.getRidOfRTXCodecs();
+ offer = await this.createOffer();
+ } catch (err) {
+ _logger.logger.debug(`Call ${this.callId} gotLocalOffer() failed to create offer: `, err);
+ this.terminate(CallParty.Local, CallErrorCode.CreateOffer, true);
+ return;
+ }
+ try {
+ await this.peerConn.setLocalDescription(offer);
+ } catch (err) {
+ _logger.logger.debug(`Call ${this.callId} gotLocalOffer() error setting local description!`, err);
+ this.terminate(CallParty.Local, CallErrorCode.SetLocalDescription, true);
+ return;
+ }
+ if (this.peerConn.iceGatheringState === "gathering") {
+ // Allow a short time for initial candidates to be gathered
+ await new Promise(resolve => {
+ setTimeout(resolve, 200);
+ });
+ }
+ if (this.callHasEnded()) return;
+ const eventType = this.state === CallState.CreateOffer ? _event.EventType.CallInvite : _event.EventType.CallNegotiate;
+ const content = {
+ lifetime: CALL_TIMEOUT_MS
+ };
+ if (eventType === _event.EventType.CallInvite && this.invitee) {
+ content.invitee = this.invitee;
+ }
+
+ // clunky because TypeScript can't follow the types through if we use an expression as the key
+ if (this.state === CallState.CreateOffer) {
+ content.offer = this.peerConn.localDescription?.toJSON();
+ } else {
+ content.description = this.peerConn.localDescription?.toJSON();
+ }
+ content.capabilities = {
+ "m.call.transferee": this.client.supportsCallTransfer,
+ "m.call.dtmf": false
+ };
+ content[_callEventTypes.SDPStreamMetadataKey] = this.getLocalSDPStreamMetadata(true);
+
+ // Get rid of any candidates waiting to be sent: they'll be included in the local
+ // description we just got and will send in the offer.
+ const discardCount = this.discardDuplicateCandidates();
+ _logger.logger.info(`Call ${this.callId} gotLocalOffer() discarding ${discardCount} candidates that will be sent in offer`);
+ try {
+ await this.sendVoipEvent(eventType, content);
+ } catch (error) {
+ _logger.logger.error(`Call ${this.callId} gotLocalOffer() failed to send invite`, error);
+ if (error instanceof _httpApi.MatrixError && error.event) this.client.cancelPendingEvent(error.event);
+ let code = CallErrorCode.SignallingFailed;
+ let message = "Signalling failed";
+ if (this.state === CallState.CreateOffer) {
+ code = CallErrorCode.SendInvite;
+ message = "Failed to send invite";
+ }
+ if (error.name == "UnknownDeviceError") {
+ code = CallErrorCode.UnknownDevices;
+ message = "Unknown devices present in the room";
+ }
+ this.emit(CallEvent.Error, new CallError(code, message, error), this);
+ this.terminate(CallParty.Local, code, false);
+
+ // no need to carry on & send the candidate queue, but we also
+ // don't want to rethrow the error
+ return;
+ }
+ this.sendCandidateQueue();
+ if (this.state === CallState.CreateOffer) {
+ this.inviteOrAnswerSent = true;
+ this.state = CallState.InviteSent;
+ this.inviteTimeout = setTimeout(() => {
+ this.inviteTimeout = undefined;
+ if (this.state === CallState.InviteSent) {
+ this.hangup(CallErrorCode.InviteTimeout, false);
+ }
+ }, CALL_TIMEOUT_MS);
+ }
+ }
+ /**
+ * This method removes all video/rtx codecs from screensharing video
+ * transceivers. This is necessary since they can cause problems. Without
+ * this the following steps should produce an error:
+ * Chromium calls Firefox
+ * Firefox answers
+ * Firefox starts screen-sharing
+ * Chromium starts screen-sharing
+ * Call crashes for Chromium with:
+ * [96685:23:0518/162603.933321:ERROR:webrtc_video_engine.cc(3296)] RTX codec (PT=97) mapped to PT=96 which is not in the codec list.
+ * [96685:23:0518/162603.933377:ERROR:webrtc_video_engine.cc(1171)] GetChangedRecvParameters called without any video codecs.
+ * [96685:23:0518/162603.933430:ERROR:sdp_offer_answer.cc(4302)] Failed to set local video description recv parameters for m-section with mid='2'. (INVALID_PARAMETER)
+ */
+ getRidOfRTXCodecs() {
+ // RTCRtpReceiver.getCapabilities and RTCRtpSender.getCapabilities don't seem to be supported on FF before v113
+ if (!RTCRtpReceiver.getCapabilities || !RTCRtpSender.getCapabilities) return;
+ const recvCodecs = RTCRtpReceiver.getCapabilities("video").codecs;
+ const sendCodecs = RTCRtpSender.getCapabilities("video").codecs;
+ const codecs = [...sendCodecs, ...recvCodecs];
+ for (const codec of codecs) {
+ if (codec.mimeType === "video/rtx") {
+ const rtxCodecIndex = codecs.indexOf(codec);
+ codecs.splice(rtxCodecIndex, 1);
+ }
+ }
+ const screenshareVideoTransceiver = this.transceivers.get(getTransceiverKey(_callEventTypes.SDPStreamMetadataPurpose.Screenshare, "video"));
+ // setCodecPreferences isn't supported on FF (as of v113)
+ screenshareVideoTransceiver?.setCodecPreferences?.(codecs);
+ }
+ /**
+ * @internal
+ */
+ async sendVoipEvent(eventType, content) {
+ const realContent = Object.assign({}, content, {
+ version: VOIP_PROTO_VERSION,
+ call_id: this.callId,
+ party_id: this.ourPartyId,
+ conf_id: this.groupCallId
+ });
+ if (this.opponentDeviceId) {
+ const toDeviceSeq = this.toDeviceSeq++;
+ const content = _objectSpread(_objectSpread({}, realContent), {}, {
+ device_id: this.client.deviceId,
+ sender_session_id: this.client.getSessionId(),
+ dest_session_id: this.opponentSessionId,
+ seq: toDeviceSeq,
+ [_event.ToDeviceMessageId]: (0, _uuid.v4)()
+ });
+ this.emit(CallEvent.SendVoipEvent, {
+ type: "toDevice",
+ eventType,
+ userId: this.invitee || this.getOpponentMember()?.userId,
+ opponentDeviceId: this.opponentDeviceId,
+ content
+ }, this);
+ const userId = this.invitee || this.getOpponentMember().userId;
+ if (this.client.getUseE2eForGroupCall()) {
+ if (!this.opponentDeviceInfo) {
+ _logger.logger.warn(`Call ${this.callId} sendVoipEvent() failed: we do not have opponentDeviceInfo`);
+ return;
+ }
+ await this.client.encryptAndSendToDevices([{
+ userId,
+ deviceInfo: this.opponentDeviceInfo
+ }], {
+ type: eventType,
+ content
+ });
+ } else {
+ await this.client.sendToDevice(eventType, new Map([[userId, new Map([[this.opponentDeviceId, content]])]]));
+ }
+ } else {
+ this.emit(CallEvent.SendVoipEvent, {
+ type: "sendEvent",
+ eventType,
+ roomId: this.roomId,
+ content: realContent,
+ userId: this.invitee || this.getOpponentMember()?.userId
+ }, this);
+ await this.client.sendEvent(this.roomId, eventType, realContent);
+ }
+ }
+
+ /**
+ * Queue a candidate to be sent
+ * @param content - The candidate to queue up, or null if candidates have finished being generated
+ * and end-of-candidates should be signalled
+ */
+ queueCandidate(content) {
+ // We partially de-trickle candidates by waiting for `delay` before sending them
+ // amalgamated, in order to avoid sending too many m.call.candidates events and hitting
+ // rate limits in Matrix.
+ // In practice, it'd be better to remove rate limits for m.call.*
+
+ // N.B. this deliberately lets you queue and send blank candidates, which MSC2746
+ // currently proposes as the way to indicate that candidate gathering is complete.
+ // This will hopefully be changed to an explicit rather than implicit notification
+ // shortly.
+ if (content) {
+ this.candidateSendQueue.push(content);
+ } else {
+ this.candidatesEnded = true;
+ }
+
+ // Don't send the ICE candidates yet if the call is in the ringing state: this
+ // means we tried to pick (ie. started generating candidates) and then failed to
+ // send the answer and went back to the ringing state. Queue up the candidates
+ // to send if we successfully send the answer.
+ // Equally don't send if we haven't yet sent the answer because we can send the
+ // first batch of candidates along with the answer
+ if (this.state === CallState.Ringing || !this.inviteOrAnswerSent) return;
+
+ // MSC2746 recommends these values (can be quite long when calling because the
+ // callee will need a while to answer the call)
+ const delay = this.direction === CallDirection.Inbound ? 500 : 2000;
+ if (this.candidateSendTries === 0) {
+ setTimeout(() => {
+ this.sendCandidateQueue();
+ }, delay);
+ }
+ }
+
+ // Discard all non-end-of-candidates messages
+ // Return the number of candidate messages that were discarded.
+ // Call this method before sending an invite or answer message
+ discardDuplicateCandidates() {
+ let discardCount = 0;
+ const newQueue = [];
+ for (let i = 0; i < this.candidateSendQueue.length; i++) {
+ const candidate = this.candidateSendQueue[i];
+ if (candidate.candidate === "") {
+ newQueue.push(candidate);
+ } else {
+ discardCount++;
+ }
+ }
+ this.candidateSendQueue = newQueue;
+ return discardCount;
+ }
+
+ /*
+ * Transfers this call to another user
+ */
+ async transfer(targetUserId) {
+ // Fetch the target user's global profile info: their room avatar / displayname
+ // could be different in whatever room we share with them.
+ const profileInfo = await this.client.getProfileInfo(targetUserId);
+ const replacementId = genCallID();
+ const body = {
+ replacement_id: genCallID(),
+ target_user: {
+ id: targetUserId,
+ display_name: profileInfo.displayname,
+ avatar_url: profileInfo.avatar_url
+ },
+ create_call: replacementId
+ };
+ await this.sendVoipEvent(_event.EventType.CallReplaces, body);
+ await this.terminate(CallParty.Local, CallErrorCode.Transferred, true);
+ }
+
+ /*
+ * Transfers this call to the target call, effectively 'joining' the
+ * two calls (so the remote parties on each call are connected together).
+ */
+ async transferToCall(transferTargetCall) {
+ const targetUserId = transferTargetCall.getOpponentMember()?.userId;
+ const targetProfileInfo = targetUserId ? await this.client.getProfileInfo(targetUserId) : undefined;
+ const opponentUserId = this.getOpponentMember()?.userId;
+ const transfereeProfileInfo = opponentUserId ? await this.client.getProfileInfo(opponentUserId) : undefined;
+ const newCallId = genCallID();
+ const bodyToTransferTarget = {
+ // the replacements on each side have their own ID, and it's distinct from the
+ // ID of the new call (but we can use the same function to generate it)
+ replacement_id: genCallID(),
+ target_user: {
+ id: opponentUserId,
+ display_name: transfereeProfileInfo?.displayname,
+ avatar_url: transfereeProfileInfo?.avatar_url
+ },
+ await_call: newCallId
+ };
+ await transferTargetCall.sendVoipEvent(_event.EventType.CallReplaces, bodyToTransferTarget);
+ const bodyToTransferee = {
+ replacement_id: genCallID(),
+ target_user: {
+ id: targetUserId,
+ display_name: targetProfileInfo?.displayname,
+ avatar_url: targetProfileInfo?.avatar_url
+ },
+ create_call: newCallId
+ };
+ await this.sendVoipEvent(_event.EventType.CallReplaces, bodyToTransferee);
+ await this.terminate(CallParty.Local, CallErrorCode.Transferred, true);
+ await transferTargetCall.terminate(CallParty.Local, CallErrorCode.Transferred, true);
+ }
+ async terminate(hangupParty, hangupReason, shouldEmit) {
+ if (this.callHasEnded()) return;
+ this.hangupParty = hangupParty;
+ this.hangupReason = hangupReason;
+ this.state = CallState.Ended;
+ if (this.inviteTimeout) {
+ clearTimeout(this.inviteTimeout);
+ this.inviteTimeout = undefined;
+ }
+ if (this.iceDisconnectedTimeout !== undefined) {
+ clearTimeout(this.iceDisconnectedTimeout);
+ this.iceDisconnectedTimeout = undefined;
+ }
+ if (this.callLengthInterval) {
+ clearInterval(this.callLengthInterval);
+ this.callLengthInterval = undefined;
+ }
+ if (this.stopVideoTrackTimer !== undefined) {
+ clearTimeout(this.stopVideoTrackTimer);
+ this.stopVideoTrackTimer = undefined;
+ }
+ for (const [stream, listener] of this.removeTrackListeners) {
+ stream.removeEventListener("removetrack", listener);
+ }
+ this.removeTrackListeners.clear();
+ this.callStatsAtEnd = await this.collectCallStats();
+
+ // Order is important here: first we stopAllMedia() and only then we can deleteAllFeeds()
+ this.stopAllMedia();
+ this.deleteAllFeeds();
+ if (this.peerConn && this.peerConn.signalingState !== "closed") {
+ this.peerConn.close();
+ }
+ this.stats?.removeStatsReportGatherer(this.callId);
+ if (shouldEmit) {
+ this.emit(CallEvent.Hangup, this);
+ }
+ this.client.callEventHandler.calls.delete(this.callId);
+ }
+ stopAllMedia() {
+ _logger.logger.debug(`Call ${this.callId} stopAllMedia() running`);
+ for (const feed of this.feeds) {
+ // Slightly awkward as local feed need to go via the correct method on
+ // the MediaHandler so they get removed from MediaHandler (remote tracks
+ // don't)
+ // NB. We clone local streams when passing them to individual calls in a group
+ // call, so we can (and should) stop the clones once we no longer need them:
+ // the other clones will continue fine.
+ if (feed.isLocal() && feed.purpose === _callEventTypes.SDPStreamMetadataPurpose.Usermedia) {
+ this.client.getMediaHandler().stopUserMediaStream(feed.stream);
+ } else if (feed.isLocal() && feed.purpose === _callEventTypes.SDPStreamMetadataPurpose.Screenshare) {
+ this.client.getMediaHandler().stopScreensharingStream(feed.stream);
+ } else if (!feed.isLocal()) {
+ _logger.logger.debug(`Call ${this.callId} stopAllMedia() stopping stream (streamId=${feed.stream.id})`);
+ for (const track of feed.stream.getTracks()) {
+ track.stop();
+ }
+ }
+ }
+ }
+ checkForErrorListener() {
+ if (this.listeners(_typedEventEmitter.EventEmitterEvents.Error).length === 0) {
+ throw new Error("You MUST attach an error listener using call.on('error', function() {})");
+ }
+ }
+ async sendCandidateQueue() {
+ if (this.candidateSendQueue.length === 0 || this.callHasEnded()) {
+ return;
+ }
+ const candidates = this.candidateSendQueue;
+ this.candidateSendQueue = [];
+ ++this.candidateSendTries;
+ const content = {
+ candidates: candidates.map(candidate => candidate.toJSON())
+ };
+ if (this.candidatesEnded) {
+ // If there are no more candidates, signal this by adding an empty string candidate
+ content.candidates.push({
+ candidate: ""
+ });
+ }
+ _logger.logger.debug(`Call ${this.callId} sendCandidateQueue() attempting to send ${candidates.length} candidates`);
+ try {
+ await this.sendVoipEvent(_event.EventType.CallCandidates, content);
+ // reset our retry count if we have successfully sent our candidates
+ // otherwise queueCandidate() will refuse to try to flush the queue
+ this.candidateSendTries = 0;
+
+ // Try to send candidates again just in case we received more candidates while sending.
+ this.sendCandidateQueue();
+ } catch (error) {
+ // don't retry this event: we'll send another one later as we might
+ // have more candidates by then.
+ if (error instanceof _httpApi.MatrixError && error.event) this.client.cancelPendingEvent(error.event);
+
+ // put all the candidates we failed to send back in the queue
+ this.candidateSendQueue.push(...candidates);
+ if (this.candidateSendTries > 5) {
+ _logger.logger.debug(`Call ${this.callId} sendCandidateQueue() failed to send candidates on attempt ${this.candidateSendTries}. Giving up on this call.`, error);
+ const code = CallErrorCode.SignallingFailed;
+ const message = "Signalling failed";
+ this.emit(CallEvent.Error, new CallError(code, message, error), this);
+ this.hangup(code, false);
+ return;
+ }
+ const delayMs = 500 * Math.pow(2, this.candidateSendTries);
+ ++this.candidateSendTries;
+ _logger.logger.debug(`Call ${this.callId} sendCandidateQueue() failed to send candidates. Retrying in ${delayMs}ms`, error);
+ setTimeout(() => {
+ this.sendCandidateQueue();
+ }, delayMs);
+ }
+ }
+
+ /**
+ * Place a call to this room.
+ * @throws if you have not specified a listener for 'error' events.
+ * @throws if have passed audio=false.
+ */
+ async placeCall(audio, video) {
+ if (!audio) {
+ throw new Error("You CANNOT start a call without audio");
+ }
+ this.state = CallState.WaitLocalMedia;
+ try {
+ const stream = await this.client.getMediaHandler().getUserMediaStream(audio, video);
+
+ // make sure all the tracks are enabled (same as pushNewLocalFeed -
+ // we probably ought to just have one code path for adding streams)
+ setTracksEnabled(stream.getAudioTracks(), true);
+ setTracksEnabled(stream.getVideoTracks(), true);
+ const callFeed = new _callFeed.CallFeed({
+ client: this.client,
+ roomId: this.roomId,
+ userId: this.client.getUserId(),
+ deviceId: this.client.getDeviceId() ?? undefined,
+ stream,
+ purpose: _callEventTypes.SDPStreamMetadataPurpose.Usermedia,
+ audioMuted: false,
+ videoMuted: false
+ });
+ await this.placeCallWithCallFeeds([callFeed]);
+ } catch (e) {
+ this.getUserMediaFailed(e);
+ return;
+ }
+ }
+
+ /**
+ * Place a call to this room with call feed.
+ * @param callFeeds - to use
+ * @throws if you have not specified a listener for 'error' events.
+ * @throws if have passed audio=false.
+ */
+ async placeCallWithCallFeeds(callFeeds, requestScreenshareFeed = false) {
+ this.checkForErrorListener();
+ this.direction = CallDirection.Outbound;
+ await this.initOpponentCrypto();
+
+ // XXX Find a better way to do this
+ this.client.callEventHandler.calls.set(this.callId, this);
+
+ // make sure we have valid turn creds. Unless something's gone wrong, it should
+ // poll and keep the credentials valid so this should be instant.
+ const haveTurnCreds = await this.client.checkTurnServers();
+ if (!haveTurnCreds) {
+ _logger.logger.warn(`Call ${this.callId} placeCallWithCallFeeds() failed to get TURN credentials! Proceeding with call anyway...`);
+ }
+
+ // create the peer connection now so it can be gathering candidates while we get user
+ // media (assuming a candidate pool size is configured)
+ this.peerConn = this.createPeerConnection();
+ this.emit(CallEvent.PeerConnectionCreated, this.peerConn, this);
+ this.gotCallFeedsForInvite(callFeeds, requestScreenshareFeed);
+ }
+ createPeerConnection() {
+ const pc = new window.RTCPeerConnection({
+ iceTransportPolicy: this.forceTURN ? "relay" : undefined,
+ iceServers: this.turnServers,
+ iceCandidatePoolSize: this.client.iceCandidatePoolSize,
+ bundlePolicy: "max-bundle"
+ });
+
+ // 'connectionstatechange' would be better, but firefox doesn't implement that.
+ pc.addEventListener("iceconnectionstatechange", this.onIceConnectionStateChanged);
+ pc.addEventListener("signalingstatechange", this.onSignallingStateChanged);
+ pc.addEventListener("icecandidate", this.gotLocalIceCandidate);
+ pc.addEventListener("icegatheringstatechange", this.onIceGatheringStateChange);
+ pc.addEventListener("track", this.onTrack);
+ pc.addEventListener("negotiationneeded", this.onNegotiationNeeded);
+ pc.addEventListener("datachannel", this.onDataChannel);
+ const opponentMember = this.getOpponentMember();
+ const opponentMemberId = opponentMember ? opponentMember.userId : "unknown";
+ this.stats?.addStatsReportGatherer(this.callId, opponentMemberId, pc);
+ return pc;
+ }
+ partyIdMatches(msg) {
+ // They must either match or both be absent (in which case opponentPartyId will be null)
+ // Also we ignore party IDs on the invite/offer if the version is 0, so we must do the same
+ // here and use null if the version is 0 (woe betide any opponent sending messages in the
+ // same call with different versions)
+ const msgPartyId = msg.version === 0 ? null : msg.party_id || null;
+ return msgPartyId === this.opponentPartyId;
+ }
+
+ // Commits to an opponent for the call
+ // ev: An invite or answer event
+ chooseOpponent(ev) {
+ // I choo-choo-choose you
+ const msg = ev.getContent();
+ _logger.logger.debug(`Call ${this.callId} chooseOpponent() running (partyId=${msg.party_id})`);
+ this.opponentVersion = msg.version;
+ if (this.opponentVersion === 0) {
+ // set to null to indicate that we've chosen an opponent, but because
+ // they're v0 they have no party ID (even if they sent one, we're ignoring it)
+ this.opponentPartyId = null;
+ } else {
+ // set to their party ID, or if they're naughty and didn't send one despite
+ // not being v0, set it to null to indicate we picked an opponent with no
+ // party ID
+ this.opponentPartyId = msg.party_id || null;
+ }
+ this.opponentCaps = msg.capabilities || {};
+ this.opponentMember = this.client.getRoom(this.roomId).getMember(ev.getSender()) ?? undefined;
+ if (this.opponentMember) {
+ this.stats?.updateOpponentMember(this.callId, this.opponentMember.userId);
+ }
+ }
+ async addBufferedIceCandidates() {
+ const bufferedCandidates = this.remoteCandidateBuffer.get(this.opponentPartyId);
+ if (bufferedCandidates) {
+ _logger.logger.info(`Call ${this.callId} addBufferedIceCandidates() adding ${bufferedCandidates.length} buffered candidates for opponent ${this.opponentPartyId}`);
+ await this.addIceCandidates(bufferedCandidates);
+ }
+ this.remoteCandidateBuffer.clear();
+ }
+ async addIceCandidates(candidates) {
+ for (const candidate of candidates) {
+ if ((candidate.sdpMid === null || candidate.sdpMid === undefined) && (candidate.sdpMLineIndex === null || candidate.sdpMLineIndex === undefined)) {
+ _logger.logger.debug(`Call ${this.callId} addIceCandidates() got remote ICE end-of-candidates`);
+ } else {
+ _logger.logger.debug(`Call ${this.callId} addIceCandidates() got remote ICE candidate (sdpMid=${candidate.sdpMid}, candidate=${candidate.candidate})`);
+ }
+ try {
+ await this.peerConn.addIceCandidate(candidate);
+ } catch (err) {
+ if (!this.ignoreOffer) {
+ _logger.logger.info(`Call ${this.callId} addIceCandidates() failed to add remote ICE candidate`, err);
+ } else {
+ _logger.logger.debug(`Call ${this.callId} addIceCandidates() failed to add remote ICE candidate because ignoring offer`, err);
+ }
+ }
+ }
+ }
+ get hasPeerConnection() {
+ return Boolean(this.peerConn);
+ }
+ initStats(stats, peerId = "unknown") {
+ this.stats = stats;
+ this.stats.start();
+ }
+}
+exports.MatrixCall = MatrixCall;
+function setTracksEnabled(tracks, enabled) {
+ for (const track of tracks) {
+ track.enabled = enabled;
+ }
+}
+function supportsMatrixCall() {
+ // typeof prevents Node from erroring on an undefined reference
+ if (typeof window === "undefined" || typeof document === "undefined") {
+ // NB. We don't log here as apps try to create a call object as a test for
+ // whether calls are supported, so we shouldn't fill the logs up.
+ return false;
+ }
+
+ // Firefox throws on so little as accessing the RTCPeerConnection when operating in a secure mode.
+ // There's some information at https://bugzilla.mozilla.org/show_bug.cgi?id=1542616 though the concern
+ // is that the browser throwing a SecurityError will brick the client creation process.
+ try {
+ const supported = Boolean(window.RTCPeerConnection || window.RTCSessionDescription || window.RTCIceCandidate || navigator.mediaDevices);
+ if (!supported) {
+ /* istanbul ignore if */ // Adds a lot of noise to test runs, so disable logging there.
+ if (process.env.NODE_ENV !== "test") {
+ _logger.logger.error("WebRTC is not supported in this browser / environment");
+ }
+ return false;
+ }
+ } catch (e) {
+ _logger.logger.error("Exception thrown when trying to access WebRTC", e);
+ return false;
+ }
+ return true;
+}
+
+/**
+ * DEPRECATED
+ * Use client.createCall()
+ *
+ * Create a new Matrix call for the browser.
+ * @param client - The client instance to use.
+ * @param roomId - The room the call is in.
+ * @param options - DEPRECATED optional options map.
+ * @returns the call or null if the browser doesn't support calling.
+ */
+function createNewMatrixCall(client, roomId, options) {
+ if (!supportsMatrixCall()) return null;
+ const optionsForceTURN = options ? options.forceTURN : false;
+ const opts = {
+ client: client,
+ roomId: roomId,
+ invitee: options?.invitee,
+ turnServers: client.getTurnServers(),
+ // call level options
+ forceTURN: client.forceTURN || optionsForceTURN,
+ opponentDeviceId: options?.opponentDeviceId,
+ opponentSessionId: options?.opponentSessionId,
+ groupCallId: options?.groupCallId
+ };
+ const call = new MatrixCall(opts);
+ client.reEmitter.reEmit(call, Object.values(CallEvent));
+ return call;
+} \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callEventHandler.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callEventHandler.js
new file mode 100644
index 0000000000..caf1cc9d2b
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callEventHandler.js
@@ -0,0 +1,339 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CallEventHandlerEvent = exports.CallEventHandler = void 0;
+var _logger = require("../logger");
+var _call = require("./call");
+var _event = require("../@types/event");
+var _client = require("../client");
+var _groupCall = require("./groupCall");
+var _room = require("../models/room");
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } /*
+ Copyright 2020 The Matrix.org Foundation C.I.C.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+// Don't ring unless we'd be ringing for at least 3 seconds: the user needs some
+// time to press the 'accept' button
+const RING_GRACE_PERIOD = 3000;
+let CallEventHandlerEvent = /*#__PURE__*/function (CallEventHandlerEvent) {
+ CallEventHandlerEvent["Incoming"] = "Call.incoming";
+ return CallEventHandlerEvent;
+}({});
+exports.CallEventHandlerEvent = CallEventHandlerEvent;
+class CallEventHandler {
+ constructor(client) {
+ // XXX: Most of these are only public because of the tests
+ _defineProperty(this, "calls", void 0);
+ _defineProperty(this, "callEventBuffer", void 0);
+ _defineProperty(this, "nextSeqByCall", new Map());
+ _defineProperty(this, "toDeviceEventBuffers", new Map());
+ _defineProperty(this, "client", void 0);
+ _defineProperty(this, "candidateEventsByCall", void 0);
+ _defineProperty(this, "eventBufferPromiseChain", void 0);
+ _defineProperty(this, "onSync", () => {
+ // Process the current event buffer and start queuing into a new one.
+ const currentEventBuffer = this.callEventBuffer;
+ this.callEventBuffer = [];
+
+ // Ensure correct ordering by only processing this queue after the previous one has finished processing
+ if (this.eventBufferPromiseChain) {
+ this.eventBufferPromiseChain = this.eventBufferPromiseChain.then(() => this.evaluateEventBuffer(currentEventBuffer));
+ } else {
+ this.eventBufferPromiseChain = this.evaluateEventBuffer(currentEventBuffer);
+ }
+ });
+ _defineProperty(this, "onRoomTimeline", event => {
+ this.callEventBuffer.push(event);
+ });
+ _defineProperty(this, "onToDeviceEvent", event => {
+ const content = event.getContent();
+ if (!content.call_id) {
+ this.callEventBuffer.push(event);
+ return;
+ }
+ if (!this.nextSeqByCall.has(content.call_id)) {
+ this.nextSeqByCall.set(content.call_id, 0);
+ }
+ if (content.seq === undefined) {
+ this.callEventBuffer.push(event);
+ return;
+ }
+ const nextSeq = this.nextSeqByCall.get(content.call_id) || 0;
+ if (content.seq !== nextSeq) {
+ if (!this.toDeviceEventBuffers.has(content.call_id)) {
+ this.toDeviceEventBuffers.set(content.call_id, []);
+ }
+ const buffer = this.toDeviceEventBuffers.get(content.call_id);
+ const index = buffer.findIndex(e => e.getContent().seq > content.seq);
+ if (index === -1) {
+ buffer.push(event);
+ } else {
+ buffer.splice(index, 0, event);
+ }
+ } else {
+ const callId = content.call_id;
+ this.callEventBuffer.push(event);
+ this.nextSeqByCall.set(callId, content.seq + 1);
+ const buffer = this.toDeviceEventBuffers.get(callId);
+ let nextEvent = buffer && buffer.shift();
+ while (nextEvent && nextEvent.getContent().seq === this.nextSeqByCall.get(callId)) {
+ this.callEventBuffer.push(nextEvent);
+ this.nextSeqByCall.set(callId, nextEvent.getContent().seq + 1);
+ nextEvent = buffer.shift();
+ }
+ }
+ });
+ this.client = client;
+ this.calls = new Map();
+ // The sync code always emits one event at a time, so it will patiently
+ // wait for us to finish processing a call invite before delivering the
+ // next event, even if that next event is a hangup. We therefore accumulate
+ // all our call events and then process them on the 'sync' event, ie.
+ // each time a sync has completed. This way, we can avoid emitting incoming
+ // call events if we get both the invite and answer/hangup in the same sync.
+ // This happens quite often, eg. replaying sync from storage, catchup sync
+ // after loading and after we've been offline for a bit.
+ this.callEventBuffer = [];
+ this.candidateEventsByCall = new Map();
+ }
+ start() {
+ this.client.on(_client.ClientEvent.Sync, this.onSync);
+ this.client.on(_room.RoomEvent.Timeline, this.onRoomTimeline);
+ this.client.on(_client.ClientEvent.ToDeviceEvent, this.onToDeviceEvent);
+ }
+ stop() {
+ this.client.removeListener(_client.ClientEvent.Sync, this.onSync);
+ this.client.removeListener(_room.RoomEvent.Timeline, this.onRoomTimeline);
+ this.client.removeListener(_client.ClientEvent.ToDeviceEvent, this.onToDeviceEvent);
+ }
+ async evaluateEventBuffer(eventBuffer) {
+ await Promise.all(eventBuffer.map(event => this.client.decryptEventIfNeeded(event)));
+ const callEvents = eventBuffer.filter(event => {
+ const eventType = event.getType();
+ return eventType.startsWith("m.call.") || eventType.startsWith("org.matrix.call.");
+ });
+ const ignoreCallIds = new Set();
+
+ // inspect the buffer and mark all calls which have been answered
+ // or hung up before passing them to the call event handler.
+ for (const event of callEvents) {
+ const eventType = event.getType();
+ if (eventType === _event.EventType.CallAnswer || eventType === _event.EventType.CallHangup) {
+ ignoreCallIds.add(event.getContent().call_id);
+ }
+ }
+
+ // Process call events in the order that they were received
+ for (const event of callEvents) {
+ const eventType = event.getType();
+ const callId = event.getContent().call_id;
+ if (eventType === _event.EventType.CallInvite && ignoreCallIds.has(callId)) {
+ // This call has previously been answered or hung up: ignore it
+ continue;
+ }
+ try {
+ await this.handleCallEvent(event);
+ } catch (e) {
+ _logger.logger.error("CallEventHandler evaluateEventBuffer() caught exception handling call event", e);
+ }
+ }
+ }
+ async handleCallEvent(event) {
+ this.client.emit(_client.ClientEvent.ReceivedVoipEvent, event);
+ const content = event.getContent();
+ const callRoomId = event.getRoomId() || this.client.groupCallEventHandler.getGroupCallById(content.conf_id)?.room?.roomId;
+ const groupCallId = content.conf_id;
+ const type = event.getType();
+ const senderId = event.getSender();
+ let call = content.call_id ? this.calls.get(content.call_id) : undefined;
+ let opponentDeviceId;
+ let groupCall;
+ if (groupCallId) {
+ groupCall = this.client.groupCallEventHandler.getGroupCallById(groupCallId);
+ if (!groupCall) {
+ _logger.logger.warn(`CallEventHandler handleCallEvent() could not find a group call - ignoring event (groupCallId=${groupCallId}, type=${type})`);
+ return;
+ }
+ opponentDeviceId = content.device_id;
+ if (!opponentDeviceId) {
+ _logger.logger.warn(`CallEventHandler handleCallEvent() could not find a device id - ignoring event (senderId=${senderId})`);
+ groupCall.emit(_groupCall.GroupCallEvent.Error, new _groupCall.GroupCallUnknownDeviceError(senderId));
+ return;
+ }
+ if (content.dest_session_id !== this.client.getSessionId()) {
+ _logger.logger.warn("CallEventHandler handleCallEvent() call event does not match current session id - ignoring");
+ return;
+ }
+ }
+ const weSentTheEvent = senderId === this.client.credentials.userId && (opponentDeviceId === undefined || opponentDeviceId === this.client.getDeviceId());
+ if (!callRoomId) return;
+ if (type === _event.EventType.CallInvite) {
+ // ignore invites you send
+ if (weSentTheEvent) return;
+ // expired call
+ if (event.getLocalAge() > content.lifetime - RING_GRACE_PERIOD) return;
+ // stale/old invite event
+ if (call && call.state === _call.CallState.Ended) return;
+ if (call) {
+ _logger.logger.warn(`CallEventHandler handleCallEvent() already has a call but got an invite - clobbering (callId=${content.call_id})`);
+ }
+ if (content.invitee && content.invitee !== this.client.getUserId()) {
+ return; // This invite was meant for another user in the room
+ }
+
+ const timeUntilTurnCresExpire = (this.client.getTurnServersExpiry() ?? 0) - Date.now();
+ _logger.logger.info("CallEventHandler handleCallEvent() current turn creds expire in " + timeUntilTurnCresExpire + " ms");
+ call = (0, _call.createNewMatrixCall)(this.client, callRoomId, {
+ forceTURN: this.client.forceTURN,
+ opponentDeviceId,
+ groupCallId,
+ opponentSessionId: content.sender_session_id
+ }) ?? undefined;
+ if (!call) {
+ _logger.logger.log(`CallEventHandler handleCallEvent() this client does not support WebRTC (callId=${content.call_id})`);
+ // don't hang up the call: there could be other clients
+ // connected that do support WebRTC and declining the
+ // the call on their behalf would be really annoying.
+ return;
+ }
+ call.callId = content.call_id;
+ const stats = groupCall?.getGroupCallStats();
+ if (stats) {
+ call.initStats(stats);
+ }
+ try {
+ await call.initWithInvite(event);
+ } catch (e) {
+ if (e instanceof _call.CallError) {
+ if (e.code === _groupCall.GroupCallErrorCode.UnknownDevice) {
+ groupCall?.emit(_groupCall.GroupCallEvent.Error, e);
+ } else {
+ _logger.logger.error(e);
+ }
+ }
+ }
+ this.calls.set(call.callId, call);
+
+ // if we stashed candidate events for that call ID, play them back now
+ if (this.candidateEventsByCall.get(call.callId)) {
+ for (const ev of this.candidateEventsByCall.get(call.callId)) {
+ call.onRemoteIceCandidatesReceived(ev);
+ }
+ }
+
+ // Were we trying to call that user (room)?
+ let existingCall;
+ for (const thisCall of this.calls.values()) {
+ const isCalling = [_call.CallState.WaitLocalMedia, _call.CallState.CreateOffer, _call.CallState.InviteSent].includes(thisCall.state);
+ if (call.roomId === thisCall.roomId && thisCall.direction === _call.CallDirection.Outbound && call.getOpponentMember()?.userId === thisCall.invitee && isCalling) {
+ existingCall = thisCall;
+ break;
+ }
+ }
+ if (existingCall) {
+ if (existingCall.callId > call.callId) {
+ _logger.logger.log(`CallEventHandler handleCallEvent() detected glare - answering incoming call and canceling outgoing call (incomingId=${call.callId}, outgoingId=${existingCall.callId})`);
+ existingCall.replacedBy(call);
+ } else {
+ _logger.logger.log(`CallEventHandler handleCallEvent() detected glare - hanging up incoming call (incomingId=${call.callId}, outgoingId=${existingCall.callId})`);
+ call.hangup(_call.CallErrorCode.Replaced, true);
+ }
+ } else {
+ this.client.emit(CallEventHandlerEvent.Incoming, call);
+ }
+ return;
+ } else if (type === _event.EventType.CallCandidates) {
+ if (weSentTheEvent) return;
+ if (!call) {
+ // store the candidates; we may get a call eventually.
+ if (!this.candidateEventsByCall.has(content.call_id)) {
+ this.candidateEventsByCall.set(content.call_id, []);
+ }
+ this.candidateEventsByCall.get(content.call_id).push(event);
+ } else {
+ call.onRemoteIceCandidatesReceived(event);
+ }
+ return;
+ } else if ([_event.EventType.CallHangup, _event.EventType.CallReject].includes(type)) {
+ // Note that we also observe our own hangups here so we can see
+ // if we've already rejected a call that would otherwise be valid
+ if (!call) {
+ // if not live, store the fact that the call has ended because
+ // we're probably getting events backwards so
+ // the hangup will come before the invite
+ call = (0, _call.createNewMatrixCall)(this.client, callRoomId, {
+ opponentDeviceId,
+ opponentSessionId: content.sender_session_id
+ }) ?? undefined;
+ if (call) {
+ call.callId = content.call_id;
+ call.initWithHangup(event);
+ this.calls.set(content.call_id, call);
+ }
+ } else {
+ if (call.state !== _call.CallState.Ended) {
+ if (type === _event.EventType.CallHangup) {
+ call.onHangupReceived(content);
+ } else {
+ call.onRejectReceived(content);
+ }
+
+ // @ts-expect-error typescript thinks the state can't be 'ended' because we're
+ // inside the if block where it wasn't, but it could have changed because
+ // on[Hangup|Reject]Received are side-effecty.
+ if (call.state === _call.CallState.Ended) this.calls.delete(content.call_id);
+ }
+ }
+ return;
+ }
+
+ // The following events need a call and a peer connection
+ if (!call || !call.hasPeerConnection) {
+ _logger.logger.info(`CallEventHandler handleCallEvent() discarding possible call event as we don't have a call (type=${type})`);
+ return;
+ }
+ // Ignore remote echo
+ if (event.getContent().party_id === call.ourPartyId) return;
+ switch (type) {
+ case _event.EventType.CallAnswer:
+ if (weSentTheEvent) {
+ if (call.state === _call.CallState.Ringing) {
+ call.onAnsweredElsewhere(content);
+ }
+ } else {
+ call.onAnswerReceived(event);
+ }
+ break;
+ case _event.EventType.CallSelectAnswer:
+ call.onSelectAnswerReceived(event);
+ break;
+ case _event.EventType.CallNegotiate:
+ call.onNegotiateReceived(event);
+ break;
+ case _event.EventType.CallAssertedIdentity:
+ case _event.EventType.CallAssertedIdentityPrefix:
+ call.onAssertedIdentityReceived(event);
+ break;
+ case _event.EventType.CallSDPStreamMetadataChanged:
+ case _event.EventType.CallSDPStreamMetadataChangedPrefix:
+ call.onSDPStreamMetadataChangedReceived(event);
+ break;
+ }
+ }
+}
+exports.CallEventHandler = CallEventHandler; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callEventTypes.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callEventTypes.js
new file mode 100644
index 0000000000..fae0c8f1e9
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callEventTypes.js
@@ -0,0 +1,19 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.SDPStreamMetadataPurpose = exports.SDPStreamMetadataKey = void 0;
+// allow non-camelcase as these are events type that go onto the wire
+/* eslint-disable camelcase */
+
+// TODO: Change to "sdp_stream_metadata" when MSC3077 is merged
+const SDPStreamMetadataKey = "org.matrix.msc3077.sdp_stream_metadata";
+exports.SDPStreamMetadataKey = SDPStreamMetadataKey;
+let SDPStreamMetadataPurpose = /*#__PURE__*/function (SDPStreamMetadataPurpose) {
+ SDPStreamMetadataPurpose["Usermedia"] = "m.usermedia";
+ SDPStreamMetadataPurpose["Screenshare"] = "m.screenshare";
+ return SDPStreamMetadataPurpose;
+}({});
+/* eslint-enable camelcase */
+exports.SDPStreamMetadataPurpose = SDPStreamMetadataPurpose; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callFeed.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callFeed.js
new file mode 100644
index 0000000000..25af7aa5e8
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/callFeed.js
@@ -0,0 +1,294 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.SPEAKING_THRESHOLD = exports.CallFeedEvent = exports.CallFeed = void 0;
+var _callEventTypes = require("./callEventTypes");
+var _audioContext = require("./audioContext");
+var _logger = require("../logger");
+var _typedEventEmitter = require("../models/typed-event-emitter");
+var _call = require("./call");
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } /*
+ Copyright 2021 Šimon Brandner <simon.bra.ag@gmail.com>
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+const POLLING_INTERVAL = 200; // ms
+const SPEAKING_THRESHOLD = -60; // dB
+exports.SPEAKING_THRESHOLD = SPEAKING_THRESHOLD;
+const SPEAKING_SAMPLE_COUNT = 8; // samples
+let CallFeedEvent = /*#__PURE__*/function (CallFeedEvent) {
+ CallFeedEvent["NewStream"] = "new_stream";
+ CallFeedEvent["MuteStateChanged"] = "mute_state_changed";
+ CallFeedEvent["LocalVolumeChanged"] = "local_volume_changed";
+ CallFeedEvent["VolumeChanged"] = "volume_changed";
+ CallFeedEvent["ConnectedChanged"] = "connected_changed";
+ CallFeedEvent["Speaking"] = "speaking";
+ CallFeedEvent["Disposed"] = "disposed";
+ return CallFeedEvent;
+}({});
+exports.CallFeedEvent = CallFeedEvent;
+class CallFeed extends _typedEventEmitter.TypedEventEmitter {
+ constructor(opts) {
+ super();
+ _defineProperty(this, "stream", void 0);
+ _defineProperty(this, "sdpMetadataStreamId", void 0);
+ _defineProperty(this, "userId", void 0);
+ _defineProperty(this, "deviceId", void 0);
+ _defineProperty(this, "purpose", void 0);
+ _defineProperty(this, "speakingVolumeSamples", void 0);
+ _defineProperty(this, "client", void 0);
+ _defineProperty(this, "call", void 0);
+ _defineProperty(this, "roomId", void 0);
+ _defineProperty(this, "audioMuted", void 0);
+ _defineProperty(this, "videoMuted", void 0);
+ _defineProperty(this, "localVolume", 1);
+ _defineProperty(this, "measuringVolumeActivity", false);
+ _defineProperty(this, "audioContext", void 0);
+ _defineProperty(this, "analyser", void 0);
+ _defineProperty(this, "frequencyBinCount", void 0);
+ _defineProperty(this, "speakingThreshold", SPEAKING_THRESHOLD);
+ _defineProperty(this, "speaking", false);
+ _defineProperty(this, "volumeLooperTimeout", void 0);
+ _defineProperty(this, "_disposed", false);
+ _defineProperty(this, "_connected", false);
+ _defineProperty(this, "onAddTrack", () => {
+ this.emit(CallFeedEvent.NewStream, this.stream);
+ });
+ _defineProperty(this, "onCallState", state => {
+ if (state === _call.CallState.Connected) {
+ this.connected = true;
+ } else if (state === _call.CallState.Connecting) {
+ this.connected = false;
+ }
+ });
+ _defineProperty(this, "volumeLooper", () => {
+ if (!this.analyser) return;
+ if (!this.measuringVolumeActivity) return;
+ this.analyser.getFloatFrequencyData(this.frequencyBinCount);
+ let maxVolume = -Infinity;
+ for (const volume of this.frequencyBinCount) {
+ if (volume > maxVolume) {
+ maxVolume = volume;
+ }
+ }
+ this.speakingVolumeSamples.shift();
+ this.speakingVolumeSamples.push(maxVolume);
+ this.emit(CallFeedEvent.VolumeChanged, maxVolume);
+ let newSpeaking = false;
+ for (const volume of this.speakingVolumeSamples) {
+ if (volume > this.speakingThreshold) {
+ newSpeaking = true;
+ break;
+ }
+ }
+ if (this.speaking !== newSpeaking) {
+ this.speaking = newSpeaking;
+ this.emit(CallFeedEvent.Speaking, this.speaking);
+ }
+ this.volumeLooperTimeout = setTimeout(this.volumeLooper, POLLING_INTERVAL);
+ });
+ this.client = opts.client;
+ this.call = opts.call;
+ this.roomId = opts.roomId;
+ this.userId = opts.userId;
+ this.deviceId = opts.deviceId;
+ this.purpose = opts.purpose;
+ this.audioMuted = opts.audioMuted;
+ this.videoMuted = opts.videoMuted;
+ this.speakingVolumeSamples = new Array(SPEAKING_SAMPLE_COUNT).fill(-Infinity);
+ this.sdpMetadataStreamId = opts.stream.id;
+ this.updateStream(null, opts.stream);
+ this.stream = opts.stream; // updateStream does this, but this makes TS happier
+
+ if (this.hasAudioTrack) {
+ this.initVolumeMeasuring();
+ }
+ if (opts.call) {
+ opts.call.addListener(_call.CallEvent.State, this.onCallState);
+ this.onCallState(opts.call.state);
+ }
+ }
+ get connected() {
+ // Local feeds are always considered connected
+ return this.isLocal() || this._connected;
+ }
+ set connected(connected) {
+ this._connected = connected;
+ this.emit(CallFeedEvent.ConnectedChanged, this.connected);
+ }
+ get hasAudioTrack() {
+ return this.stream.getAudioTracks().length > 0;
+ }
+ updateStream(oldStream, newStream) {
+ if (newStream === oldStream) return;
+ const wasMeasuringVolumeActivity = this.measuringVolumeActivity;
+ if (oldStream) {
+ oldStream.removeEventListener("addtrack", this.onAddTrack);
+ this.measureVolumeActivity(false);
+ }
+ this.stream = newStream;
+ newStream.addEventListener("addtrack", this.onAddTrack);
+ if (this.hasAudioTrack) {
+ this.initVolumeMeasuring();
+ if (wasMeasuringVolumeActivity) this.measureVolumeActivity(true);
+ } else {
+ this.measureVolumeActivity(false);
+ }
+ this.emit(CallFeedEvent.NewStream, this.stream);
+ }
+ initVolumeMeasuring() {
+ if (!this.hasAudioTrack) return;
+ if (!this.audioContext) this.audioContext = (0, _audioContext.acquireContext)();
+ this.analyser = this.audioContext.createAnalyser();
+ this.analyser.fftSize = 512;
+ this.analyser.smoothingTimeConstant = 0.1;
+ const mediaStreamAudioSourceNode = this.audioContext.createMediaStreamSource(this.stream);
+ mediaStreamAudioSourceNode.connect(this.analyser);
+ this.frequencyBinCount = new Float32Array(this.analyser.frequencyBinCount);
+ }
+ /**
+ * Returns callRoom member
+ * @returns member of the callRoom
+ */
+ getMember() {
+ const callRoom = this.client.getRoom(this.roomId);
+ return callRoom?.getMember(this.userId) ?? null;
+ }
+
+ /**
+ * Returns true if CallFeed is local, otherwise returns false
+ * @returns is local?
+ */
+ isLocal() {
+ return this.userId === this.client.getUserId() && (this.deviceId === undefined || this.deviceId === this.client.getDeviceId());
+ }
+
+ /**
+ * Returns true if audio is muted or if there are no audio
+ * tracks, otherwise returns false
+ * @returns is audio muted?
+ */
+ isAudioMuted() {
+ return this.stream.getAudioTracks().length === 0 || this.audioMuted;
+ }
+
+ /**
+ * Returns true video is muted or if there are no video
+ * tracks, otherwise returns false
+ * @returns is video muted?
+ */
+ isVideoMuted() {
+ // We assume only one video track
+ return this.stream.getVideoTracks().length === 0 || this.videoMuted;
+ }
+ isSpeaking() {
+ return this.speaking;
+ }
+
+ /**
+ * Replaces the current MediaStream with a new one.
+ * The stream will be different and new stream as remote parties are
+ * concerned, but this can be used for convenience locally to set up
+ * volume listeners automatically on the new stream etc.
+ * @param newStream - new stream with which to replace the current one
+ */
+ setNewStream(newStream) {
+ this.updateStream(this.stream, newStream);
+ }
+
+ /**
+ * Set one or both of feed's internal audio and video video mute state
+ * Either value may be null to leave it as-is
+ * @param audioMuted - is the feed's audio muted?
+ * @param videoMuted - is the feed's video muted?
+ */
+ setAudioVideoMuted(audioMuted, videoMuted) {
+ if (audioMuted !== null) {
+ if (this.audioMuted !== audioMuted) {
+ this.speakingVolumeSamples.fill(-Infinity);
+ }
+ this.audioMuted = audioMuted;
+ }
+ if (videoMuted !== null) this.videoMuted = videoMuted;
+ this.emit(CallFeedEvent.MuteStateChanged, this.audioMuted, this.videoMuted);
+ }
+
+ /**
+ * Starts emitting volume_changed events where the emitter value is in decibels
+ * @param enabled - emit volume changes
+ */
+ measureVolumeActivity(enabled) {
+ if (enabled) {
+ if (!this.analyser || !this.frequencyBinCount || !this.hasAudioTrack) return;
+ this.measuringVolumeActivity = true;
+ this.volumeLooper();
+ } else {
+ this.measuringVolumeActivity = false;
+ this.speakingVolumeSamples.fill(-Infinity);
+ this.emit(CallFeedEvent.VolumeChanged, -Infinity);
+ }
+ }
+ setSpeakingThreshold(threshold) {
+ this.speakingThreshold = threshold;
+ }
+ clone() {
+ const mediaHandler = this.client.getMediaHandler();
+ const stream = this.stream.clone();
+ _logger.logger.log(`CallFeed clone() cloning stream (originalStreamId=${this.stream.id}, newStreamId${stream.id})`);
+ if (this.purpose === _callEventTypes.SDPStreamMetadataPurpose.Usermedia) {
+ mediaHandler.userMediaStreams.push(stream);
+ } else {
+ mediaHandler.screensharingStreams.push(stream);
+ }
+ return new CallFeed({
+ client: this.client,
+ roomId: this.roomId,
+ userId: this.userId,
+ deviceId: this.deviceId,
+ stream,
+ purpose: this.purpose,
+ audioMuted: this.audioMuted,
+ videoMuted: this.videoMuted
+ });
+ }
+ dispose() {
+ clearTimeout(this.volumeLooperTimeout);
+ this.stream?.removeEventListener("addtrack", this.onAddTrack);
+ this.call?.removeListener(_call.CallEvent.State, this.onCallState);
+ if (this.audioContext) {
+ this.audioContext = undefined;
+ this.analyser = undefined;
+ (0, _audioContext.releaseContext)();
+ }
+ this._disposed = true;
+ this.emit(CallFeedEvent.Disposed);
+ }
+ get disposed() {
+ return this._disposed;
+ }
+ set disposed(value) {
+ this._disposed = value;
+ }
+ getLocalVolume() {
+ return this.localVolume;
+ }
+ setLocalVolume(localVolume) {
+ this.localVolume = localVolume;
+ this.emit(CallFeedEvent.LocalVolumeChanged, localVolume);
+ }
+}
+exports.CallFeed = CallFeed; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/groupCall.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/groupCall.js
new file mode 100644
index 0000000000..ac6da49d3b
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/groupCall.js
@@ -0,0 +1,1213 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.OtherUserSpeakingError = exports.GroupCallUnknownDeviceError = exports.GroupCallType = exports.GroupCallTerminationReason = exports.GroupCallStatsReportEvent = exports.GroupCallState = exports.GroupCallIntent = exports.GroupCallEvent = exports.GroupCallErrorCode = exports.GroupCallError = exports.GroupCall = void 0;
+var _typedEventEmitter = require("../models/typed-event-emitter");
+var _callFeed = require("./callFeed");
+var _call = require("./call");
+var _roomState = require("../models/room-state");
+var _logger = require("../logger");
+var _ReEmitter = require("../ReEmitter");
+var _callEventTypes = require("./callEventTypes");
+var _event = require("../@types/event");
+var _callEventHandler = require("./callEventHandler");
+var _groupCallEventHandler = require("./groupCallEventHandler");
+var _utils = require("../utils");
+var _groupCallStats = require("./stats/groupCallStats");
+var _statsReport = require("./stats/statsReport");
+function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
+function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
+let GroupCallIntent = /*#__PURE__*/function (GroupCallIntent) {
+ GroupCallIntent["Ring"] = "m.ring";
+ GroupCallIntent["Prompt"] = "m.prompt";
+ GroupCallIntent["Room"] = "m.room";
+ return GroupCallIntent;
+}({});
+exports.GroupCallIntent = GroupCallIntent;
+let GroupCallType = /*#__PURE__*/function (GroupCallType) {
+ GroupCallType["Video"] = "m.video";
+ GroupCallType["Voice"] = "m.voice";
+ return GroupCallType;
+}({});
+exports.GroupCallType = GroupCallType;
+let GroupCallTerminationReason = /*#__PURE__*/function (GroupCallTerminationReason) {
+ GroupCallTerminationReason["CallEnded"] = "call_ended";
+ return GroupCallTerminationReason;
+}({});
+exports.GroupCallTerminationReason = GroupCallTerminationReason;
+/**
+ * Because event names are just strings, they do need
+ * to be unique over all event types of event emitter.
+ * Some objects could emit more then one set of events.
+ */
+let GroupCallEvent = /*#__PURE__*/function (GroupCallEvent) {
+ GroupCallEvent["GroupCallStateChanged"] = "group_call_state_changed";
+ GroupCallEvent["ActiveSpeakerChanged"] = "active_speaker_changed";
+ GroupCallEvent["CallsChanged"] = "calls_changed";
+ GroupCallEvent["UserMediaFeedsChanged"] = "user_media_feeds_changed";
+ GroupCallEvent["ScreenshareFeedsChanged"] = "screenshare_feeds_changed";
+ GroupCallEvent["LocalScreenshareStateChanged"] = "local_screenshare_state_changed";
+ GroupCallEvent["LocalMuteStateChanged"] = "local_mute_state_changed";
+ GroupCallEvent["ParticipantsChanged"] = "participants_changed";
+ GroupCallEvent["Error"] = "group_call_error";
+ return GroupCallEvent;
+}({});
+exports.GroupCallEvent = GroupCallEvent;
+let GroupCallStatsReportEvent = /*#__PURE__*/function (GroupCallStatsReportEvent) {
+ GroupCallStatsReportEvent["ConnectionStats"] = "GroupCall.connection_stats";
+ GroupCallStatsReportEvent["ByteSentStats"] = "GroupCall.byte_sent_stats";
+ GroupCallStatsReportEvent["SummaryStats"] = "GroupCall.summary_stats";
+ return GroupCallStatsReportEvent;
+}({});
+exports.GroupCallStatsReportEvent = GroupCallStatsReportEvent;
+let GroupCallErrorCode = /*#__PURE__*/function (GroupCallErrorCode) {
+ GroupCallErrorCode["NoUserMedia"] = "no_user_media";
+ GroupCallErrorCode["UnknownDevice"] = "unknown_device";
+ GroupCallErrorCode["PlaceCallFailed"] = "place_call_failed";
+ return GroupCallErrorCode;
+}({});
+exports.GroupCallErrorCode = GroupCallErrorCode;
+class GroupCallError extends Error {
+ constructor(code, msg, err) {
+ // Still don't think there's any way to have proper nested errors
+ if (err) {
+ super(msg + ": " + err);
+ _defineProperty(this, "code", void 0);
+ } else {
+ super(msg);
+ _defineProperty(this, "code", void 0);
+ }
+ this.code = code;
+ }
+}
+exports.GroupCallError = GroupCallError;
+class GroupCallUnknownDeviceError extends GroupCallError {
+ constructor(userId) {
+ super(GroupCallErrorCode.UnknownDevice, "No device found for " + userId);
+ this.userId = userId;
+ }
+}
+exports.GroupCallUnknownDeviceError = GroupCallUnknownDeviceError;
+class OtherUserSpeakingError extends Error {
+ constructor() {
+ super("Cannot unmute: another user is speaking");
+ }
+}
+exports.OtherUserSpeakingError = OtherUserSpeakingError;
+let GroupCallState = /*#__PURE__*/function (GroupCallState) {
+ GroupCallState["LocalCallFeedUninitialized"] = "local_call_feed_uninitialized";
+ GroupCallState["InitializingLocalCallFeed"] = "initializing_local_call_feed";
+ GroupCallState["LocalCallFeedInitialized"] = "local_call_feed_initialized";
+ GroupCallState["Entered"] = "entered";
+ GroupCallState["Ended"] = "ended";
+ return GroupCallState;
+}({});
+exports.GroupCallState = GroupCallState;
+const DEVICE_TIMEOUT = 1000 * 60 * 60; // 1 hour
+
+function getCallUserId(call) {
+ return call.getOpponentMember()?.userId || call.invitee || null;
+}
+class GroupCall extends _typedEventEmitter.TypedEventEmitter {
+ constructor(client, room, type, isPtt, intent, groupCallId, dataChannelsEnabled, dataChannelOptions, isCallWithoutVideoAndAudio) {
+ super();
+ this.client = client;
+ this.room = room;
+ this.type = type;
+ this.isPtt = isPtt;
+ this.intent = intent;
+ this.dataChannelsEnabled = dataChannelsEnabled;
+ this.dataChannelOptions = dataChannelOptions;
+ // Config
+ _defineProperty(this, "activeSpeakerInterval", 1000);
+ _defineProperty(this, "retryCallInterval", 5000);
+ _defineProperty(this, "participantTimeout", 1000 * 15);
+ _defineProperty(this, "pttMaxTransmitTime", 1000 * 20);
+ _defineProperty(this, "activeSpeaker", void 0);
+ _defineProperty(this, "localCallFeed", void 0);
+ _defineProperty(this, "localScreenshareFeed", void 0);
+ _defineProperty(this, "localDesktopCapturerSourceId", void 0);
+ _defineProperty(this, "userMediaFeeds", []);
+ _defineProperty(this, "screenshareFeeds", []);
+ _defineProperty(this, "groupCallId", void 0);
+ _defineProperty(this, "allowCallWithoutVideoAndAudio", void 0);
+ _defineProperty(this, "calls", new Map());
+ // user_id -> device_id -> MatrixCall
+ _defineProperty(this, "callHandlers", new Map());
+ // user_id -> device_id -> ICallHandlers
+ _defineProperty(this, "activeSpeakerLoopInterval", void 0);
+ _defineProperty(this, "retryCallLoopInterval", void 0);
+ _defineProperty(this, "retryCallCounts", new Map());
+ // user_id -> device_id -> count
+ _defineProperty(this, "reEmitter", void 0);
+ _defineProperty(this, "transmitTimer", null);
+ _defineProperty(this, "participantsExpirationTimer", null);
+ _defineProperty(this, "resendMemberStateTimer", null);
+ _defineProperty(this, "initWithAudioMuted", false);
+ _defineProperty(this, "initWithVideoMuted", false);
+ _defineProperty(this, "initCallFeedPromise", void 0);
+ _defineProperty(this, "stats", void 0);
+ /**
+ * Configure default webrtc stats collection interval in ms
+ * Disable collecting webrtc stats by setting interval to 0
+ */
+ _defineProperty(this, "statsCollectIntervalTime", 0);
+ _defineProperty(this, "onConnectionStats", report => {
+ this.emit(GroupCallStatsReportEvent.ConnectionStats, {
+ report
+ });
+ });
+ _defineProperty(this, "onByteSentStats", report => {
+ this.emit(GroupCallStatsReportEvent.ByteSentStats, {
+ report
+ });
+ });
+ _defineProperty(this, "onSummaryStats", report => {
+ this.emit(GroupCallStatsReportEvent.SummaryStats, {
+ report
+ });
+ });
+ _defineProperty(this, "_state", GroupCallState.LocalCallFeedUninitialized);
+ _defineProperty(this, "_participants", new Map());
+ _defineProperty(this, "_creationTs", null);
+ _defineProperty(this, "_enteredViaAnotherSession", false);
+ /*
+ * Call Setup
+ *
+ * There are two different paths for calls to be created:
+ * 1. Incoming calls triggered by the Call.incoming event.
+ * 2. Outgoing calls to the initial members of a room or new members
+ * as they are observed by the RoomState.members event.
+ */
+ _defineProperty(this, "onIncomingCall", newCall => {
+ // The incoming calls may be for another room, which we will ignore.
+ if (newCall.roomId !== this.room.roomId) {
+ return;
+ }
+ if (newCall.state !== _call.CallState.Ringing) {
+ _logger.logger.warn(`GroupCall ${this.groupCallId} onIncomingCall() incoming call no longer in ringing state - ignoring`);
+ return;
+ }
+ if (!newCall.groupCallId || newCall.groupCallId !== this.groupCallId) {
+ _logger.logger.log(`GroupCall ${this.groupCallId} onIncomingCall() ignored because it doesn't match the current group call`);
+ newCall.reject();
+ return;
+ }
+ const opponentUserId = newCall.getOpponentMember()?.userId;
+ if (opponentUserId === undefined) {
+ _logger.logger.warn(`GroupCall ${this.groupCallId} onIncomingCall() incoming call with no member - ignoring`);
+ return;
+ }
+ const deviceMap = this.calls.get(opponentUserId) ?? new Map();
+ const prevCall = deviceMap.get(newCall.getOpponentDeviceId());
+ if (prevCall?.callId === newCall.callId) return;
+ _logger.logger.log(`GroupCall ${this.groupCallId} onIncomingCall() incoming call (userId=${opponentUserId}, callId=${newCall.callId})`);
+ if (prevCall) prevCall.hangup(_call.CallErrorCode.Replaced, false);
+ // We must do this before we start initialising / answering the call as we
+ // need to know it is the active call for this user+deviceId and to not ignore
+ // events from it.
+ deviceMap.set(newCall.getOpponentDeviceId(), newCall);
+ this.calls.set(opponentUserId, deviceMap);
+ this.initCall(newCall);
+ const feeds = this.getLocalFeeds().map(feed => feed.clone());
+ if (!this.callExpected(newCall)) {
+ // Disable our tracks for users not explicitly participating in the
+ // call but trying to receive the feeds
+ for (const feed of feeds) {
+ (0, _call.setTracksEnabled)(feed.stream.getAudioTracks(), false);
+ (0, _call.setTracksEnabled)(feed.stream.getVideoTracks(), false);
+ }
+ }
+ newCall.answerWithCallFeeds(feeds);
+ this.emit(GroupCallEvent.CallsChanged, this.calls);
+ });
+ _defineProperty(this, "onRetryCallLoop", () => {
+ let needsRetry = false;
+ for (const [{
+ userId
+ }, participantMap] of this.participants) {
+ const callMap = this.calls.get(userId);
+ let retriesMap = this.retryCallCounts.get(userId);
+ for (const [deviceId, participant] of participantMap) {
+ const call = callMap?.get(deviceId);
+ const retries = retriesMap?.get(deviceId) ?? 0;
+ if (call?.getOpponentSessionId() !== participant.sessionId && this.wantsOutgoingCall(userId, deviceId) && retries < 3) {
+ if (retriesMap === undefined) {
+ retriesMap = new Map();
+ this.retryCallCounts.set(userId, retriesMap);
+ }
+ retriesMap.set(deviceId, retries + 1);
+ needsRetry = true;
+ }
+ }
+ }
+ if (needsRetry) this.placeOutgoingCalls();
+ });
+ _defineProperty(this, "onCallFeedsChanged", call => {
+ const opponentMemberId = getCallUserId(call);
+ const opponentDeviceId = call.getOpponentDeviceId();
+ if (!opponentMemberId) {
+ throw new Error("Cannot change call feeds without user id");
+ }
+ const currentUserMediaFeed = this.getUserMediaFeed(opponentMemberId, opponentDeviceId);
+ const remoteUsermediaFeed = call.remoteUsermediaFeed;
+ const remoteFeedChanged = remoteUsermediaFeed !== currentUserMediaFeed;
+ const deviceMap = this.calls.get(opponentMemberId);
+ const currentCallForUserDevice = deviceMap?.get(opponentDeviceId);
+ if (currentCallForUserDevice?.callId !== call.callId) {
+ // the call in question is not the current call for this user/deviceId
+ // so ignore feed events from it otherwise we'll remove our real feeds
+ return;
+ }
+ if (remoteFeedChanged) {
+ if (!currentUserMediaFeed && remoteUsermediaFeed) {
+ this.addUserMediaFeed(remoteUsermediaFeed);
+ } else if (currentUserMediaFeed && remoteUsermediaFeed) {
+ this.replaceUserMediaFeed(currentUserMediaFeed, remoteUsermediaFeed);
+ } else if (currentUserMediaFeed && !remoteUsermediaFeed) {
+ this.removeUserMediaFeed(currentUserMediaFeed);
+ }
+ }
+ const currentScreenshareFeed = this.getScreenshareFeed(opponentMemberId, opponentDeviceId);
+ const remoteScreensharingFeed = call.remoteScreensharingFeed;
+ const remoteScreenshareFeedChanged = remoteScreensharingFeed !== currentScreenshareFeed;
+ if (remoteScreenshareFeedChanged) {
+ if (!currentScreenshareFeed && remoteScreensharingFeed) {
+ this.addScreenshareFeed(remoteScreensharingFeed);
+ } else if (currentScreenshareFeed && remoteScreensharingFeed) {
+ this.replaceScreenshareFeed(currentScreenshareFeed, remoteScreensharingFeed);
+ } else if (currentScreenshareFeed && !remoteScreensharingFeed) {
+ this.removeScreenshareFeed(currentScreenshareFeed);
+ }
+ }
+ });
+ _defineProperty(this, "onCallStateChanged", (call, state, _oldState) => {
+ if (state === _call.CallState.Ended) return;
+ const audioMuted = this.localCallFeed.isAudioMuted();
+ if (call.localUsermediaStream && call.isMicrophoneMuted() !== audioMuted) {
+ call.setMicrophoneMuted(audioMuted);
+ }
+ const videoMuted = this.localCallFeed.isVideoMuted();
+ if (call.localUsermediaStream && call.isLocalVideoMuted() !== videoMuted) {
+ call.setLocalVideoMuted(videoMuted);
+ }
+ const opponentUserId = call.getOpponentMember()?.userId;
+ if (state === _call.CallState.Connected && opponentUserId) {
+ const retriesMap = this.retryCallCounts.get(opponentUserId);
+ retriesMap?.delete(call.getOpponentDeviceId());
+ if (retriesMap?.size === 0) this.retryCallCounts.delete(opponentUserId);
+ }
+ });
+ _defineProperty(this, "onCallHangup", call => {
+ if (call.hangupReason === _call.CallErrorCode.Replaced) return;
+ const opponentUserId = call.getOpponentMember()?.userId ?? this.room.getMember(call.invitee).userId;
+ const deviceMap = this.calls.get(opponentUserId);
+
+ // Sanity check that this call is in fact in the map
+ if (deviceMap?.get(call.getOpponentDeviceId()) === call) {
+ this.disposeCall(call, call.hangupReason);
+ deviceMap.delete(call.getOpponentDeviceId());
+ if (deviceMap.size === 0) this.calls.delete(opponentUserId);
+ this.emit(GroupCallEvent.CallsChanged, this.calls);
+ }
+ });
+ _defineProperty(this, "onCallReplaced", (prevCall, newCall) => {
+ const opponentUserId = prevCall.getOpponentMember().userId;
+ let deviceMap = this.calls.get(opponentUserId);
+ if (deviceMap === undefined) {
+ deviceMap = new Map();
+ this.calls.set(opponentUserId, deviceMap);
+ }
+ prevCall.hangup(_call.CallErrorCode.Replaced, false);
+ this.initCall(newCall);
+ deviceMap.set(prevCall.getOpponentDeviceId(), newCall);
+ this.emit(GroupCallEvent.CallsChanged, this.calls);
+ });
+ _defineProperty(this, "onActiveSpeakerLoop", () => {
+ let topAvg = undefined;
+ let nextActiveSpeaker = undefined;
+ for (const callFeed of this.userMediaFeeds) {
+ if (callFeed.isLocal() && this.userMediaFeeds.length > 1) continue;
+ const total = callFeed.speakingVolumeSamples.reduce((acc, volume) => acc + Math.max(volume, _callFeed.SPEAKING_THRESHOLD));
+ const avg = total / callFeed.speakingVolumeSamples.length;
+ if (!topAvg || avg > topAvg) {
+ topAvg = avg;
+ nextActiveSpeaker = callFeed;
+ }
+ }
+ if (nextActiveSpeaker && this.activeSpeaker !== nextActiveSpeaker && topAvg && topAvg > _callFeed.SPEAKING_THRESHOLD) {
+ this.activeSpeaker = nextActiveSpeaker;
+ this.emit(GroupCallEvent.ActiveSpeakerChanged, this.activeSpeaker);
+ }
+ });
+ _defineProperty(this, "onRoomState", () => this.updateParticipants());
+ _defineProperty(this, "onParticipantsChanged", () => {
+ // Re-run setTracksEnabled on all calls, so that participants that just
+ // left get denied access to our media, and participants that just
+ // joined get granted access
+ this.forEachCall(call => {
+ const expected = this.callExpected(call);
+ for (const feed of call.getLocalFeeds()) {
+ (0, _call.setTracksEnabled)(feed.stream.getAudioTracks(), !feed.isAudioMuted() && expected);
+ (0, _call.setTracksEnabled)(feed.stream.getVideoTracks(), !feed.isVideoMuted() && expected);
+ }
+ });
+ if (this.state === GroupCallState.Entered) this.placeOutgoingCalls();
+ });
+ _defineProperty(this, "onStateChanged", (newState, oldState) => {
+ if (newState === GroupCallState.Entered || oldState === GroupCallState.Entered || newState === GroupCallState.Ended) {
+ // We either entered, left, or ended the call
+ this.updateParticipants();
+ this.updateMemberState().catch(e => _logger.logger.error(`GroupCall ${this.groupCallId} onStateChanged() failed to update member state devices"`, e));
+ }
+ });
+ _defineProperty(this, "onLocalFeedsChanged", () => {
+ if (this.state === GroupCallState.Entered) {
+ this.updateMemberState().catch(e => _logger.logger.error(`GroupCall ${this.groupCallId} onLocalFeedsChanged() failed to update member state feeds`, e));
+ }
+ });
+ this.reEmitter = new _ReEmitter.ReEmitter(this);
+ this.groupCallId = groupCallId ?? (0, _call.genCallID)();
+ this.creationTs = room.currentState.getStateEvents(_event.EventType.GroupCallPrefix, this.groupCallId)?.getTs() ?? null;
+ this.updateParticipants();
+ room.on(_roomState.RoomStateEvent.Update, this.onRoomState);
+ this.on(GroupCallEvent.ParticipantsChanged, this.onParticipantsChanged);
+ this.on(GroupCallEvent.GroupCallStateChanged, this.onStateChanged);
+ this.on(GroupCallEvent.LocalScreenshareStateChanged, this.onLocalFeedsChanged);
+ this.allowCallWithoutVideoAndAudio = !!isCallWithoutVideoAndAudio;
+ }
+ async create() {
+ this.creationTs = Date.now();
+ this.client.groupCallEventHandler.groupCalls.set(this.room.roomId, this);
+ this.client.emit(_groupCallEventHandler.GroupCallEventHandlerEvent.Outgoing, this);
+ const groupCallState = {
+ "m.intent": this.intent,
+ "m.type": this.type,
+ "io.element.ptt": this.isPtt,
+ // TODO: Specify data-channels better
+ "dataChannelsEnabled": this.dataChannelsEnabled,
+ "dataChannelOptions": this.dataChannelsEnabled ? this.dataChannelOptions : undefined
+ };
+ await this.client.sendStateEvent(this.room.roomId, _event.EventType.GroupCallPrefix, groupCallState, this.groupCallId);
+ return this;
+ }
+ /**
+ * The group call's state.
+ */
+ get state() {
+ return this._state;
+ }
+ set state(value) {
+ const prevValue = this._state;
+ if (value !== prevValue) {
+ this._state = value;
+ this.emit(GroupCallEvent.GroupCallStateChanged, value, prevValue);
+ }
+ }
+ /**
+ * The current participants in the call, as a map from members to device IDs
+ * to participant info.
+ */
+ get participants() {
+ return this._participants;
+ }
+ set participants(value) {
+ const prevValue = this._participants;
+ const participantStateEqual = (x, y) => x.sessionId === y.sessionId && x.screensharing === y.screensharing;
+ const deviceMapsEqual = (x, y) => (0, _utils.mapsEqual)(x, y, participantStateEqual);
+
+ // Only update if the map actually changed
+ if (!(0, _utils.mapsEqual)(value, prevValue, deviceMapsEqual)) {
+ this._participants = value;
+ this.emit(GroupCallEvent.ParticipantsChanged, value);
+ }
+ }
+ /**
+ * The timestamp at which the call was created, or null if it has not yet
+ * been created.
+ */
+ get creationTs() {
+ return this._creationTs;
+ }
+ set creationTs(value) {
+ this._creationTs = value;
+ }
+ /**
+ * Whether the local device has entered this call via another session, such
+ * as a widget.
+ */
+ get enteredViaAnotherSession() {
+ return this._enteredViaAnotherSession;
+ }
+ set enteredViaAnotherSession(value) {
+ this._enteredViaAnotherSession = value;
+ this.updateParticipants();
+ }
+
+ /**
+ * Executes the given callback on all calls in this group call.
+ * @param f - The callback.
+ */
+ forEachCall(f) {
+ for (const deviceMap of this.calls.values()) {
+ for (const call of deviceMap.values()) f(call);
+ }
+ }
+ getLocalFeeds() {
+ const feeds = [];
+ if (this.localCallFeed) feeds.push(this.localCallFeed);
+ if (this.localScreenshareFeed) feeds.push(this.localScreenshareFeed);
+ return feeds;
+ }
+ hasLocalParticipant() {
+ return this.participants.get(this.room.getMember(this.client.getUserId()))?.has(this.client.getDeviceId()) ?? false;
+ }
+
+ /**
+ * Determines whether the given call is one that we were expecting to exist
+ * given our knowledge of who is participating in the group call.
+ */
+ callExpected(call) {
+ const userId = getCallUserId(call);
+ const member = userId === null ? null : this.room.getMember(userId);
+ const deviceId = call.getOpponentDeviceId();
+ return member !== null && deviceId !== undefined && this.participants.get(member)?.get(deviceId) !== undefined;
+ }
+ async initLocalCallFeed() {
+ if (this.state !== GroupCallState.LocalCallFeedUninitialized) {
+ throw new Error(`Cannot initialize local call feed in the "${this.state}" state.`);
+ }
+ this.state = GroupCallState.InitializingLocalCallFeed;
+
+ // wraps the real method to serialise calls, because we don't want to try starting
+ // multiple call feeds at once
+ if (this.initCallFeedPromise) return this.initCallFeedPromise;
+ try {
+ this.initCallFeedPromise = this.initLocalCallFeedInternal();
+ await this.initCallFeedPromise;
+ } finally {
+ this.initCallFeedPromise = undefined;
+ }
+ }
+ async initLocalCallFeedInternal() {
+ _logger.logger.log(`GroupCall ${this.groupCallId} initLocalCallFeedInternal() running`);
+ let stream;
+ try {
+ stream = await this.client.getMediaHandler().getUserMediaStream(true, this.type === GroupCallType.Video);
+ } catch (error) {
+ // If is allowed to join a call without a media stream, then we
+ // don't throw an error here. But we need an empty Local Feed to establish
+ // a connection later.
+ if (this.allowCallWithoutVideoAndAudio) {
+ stream = new MediaStream();
+ } else {
+ this.state = GroupCallState.LocalCallFeedUninitialized;
+ throw error;
+ }
+ }
+
+ // The call could've been disposed while we were waiting, and could
+ // also have been started back up again (hello, React 18) so if we're
+ // still in this 'initializing' state, carry on, otherwise bail.
+ if (this._state !== GroupCallState.InitializingLocalCallFeed) {
+ this.client.getMediaHandler().stopUserMediaStream(stream);
+ throw new Error("Group call disposed while gathering media stream");
+ }
+ const callFeed = new _callFeed.CallFeed({
+ client: this.client,
+ roomId: this.room.roomId,
+ userId: this.client.getUserId(),
+ deviceId: this.client.getDeviceId(),
+ stream,
+ purpose: _callEventTypes.SDPStreamMetadataPurpose.Usermedia,
+ audioMuted: this.initWithAudioMuted || stream.getAudioTracks().length === 0 || this.isPtt,
+ videoMuted: this.initWithVideoMuted || stream.getVideoTracks().length === 0
+ });
+ (0, _call.setTracksEnabled)(stream.getAudioTracks(), !callFeed.isAudioMuted());
+ (0, _call.setTracksEnabled)(stream.getVideoTracks(), !callFeed.isVideoMuted());
+ this.localCallFeed = callFeed;
+ this.addUserMediaFeed(callFeed);
+ this.state = GroupCallState.LocalCallFeedInitialized;
+ }
+ async updateLocalUsermediaStream(stream) {
+ if (this.localCallFeed) {
+ const oldStream = this.localCallFeed.stream;
+ this.localCallFeed.setNewStream(stream);
+ const micShouldBeMuted = this.localCallFeed.isAudioMuted();
+ const vidShouldBeMuted = this.localCallFeed.isVideoMuted();
+ _logger.logger.log(`GroupCall ${this.groupCallId} updateLocalUsermediaStream() (oldStreamId=${oldStream.id}, newStreamId=${stream.id}, micShouldBeMuted=${micShouldBeMuted}, vidShouldBeMuted=${vidShouldBeMuted})`);
+ (0, _call.setTracksEnabled)(stream.getAudioTracks(), !micShouldBeMuted);
+ (0, _call.setTracksEnabled)(stream.getVideoTracks(), !vidShouldBeMuted);
+ this.client.getMediaHandler().stopUserMediaStream(oldStream);
+ }
+ }
+ async enter() {
+ if (this.state === GroupCallState.LocalCallFeedUninitialized) {
+ await this.initLocalCallFeed();
+ } else if (this.state !== GroupCallState.LocalCallFeedInitialized) {
+ throw new Error(`Cannot enter call in the "${this.state}" state`);
+ }
+ _logger.logger.log(`GroupCall ${this.groupCallId} enter() running`);
+ this.state = GroupCallState.Entered;
+ this.client.on(_callEventHandler.CallEventHandlerEvent.Incoming, this.onIncomingCall);
+ for (const call of this.client.callEventHandler.calls.values()) {
+ this.onIncomingCall(call);
+ }
+ this.retryCallLoopInterval = setInterval(this.onRetryCallLoop, this.retryCallInterval);
+ this.activeSpeaker = undefined;
+ this.onActiveSpeakerLoop();
+ this.activeSpeakerLoopInterval = setInterval(this.onActiveSpeakerLoop, this.activeSpeakerInterval);
+ }
+ dispose() {
+ if (this.localCallFeed) {
+ this.removeUserMediaFeed(this.localCallFeed);
+ this.localCallFeed = undefined;
+ }
+ if (this.localScreenshareFeed) {
+ this.client.getMediaHandler().stopScreensharingStream(this.localScreenshareFeed.stream);
+ this.removeScreenshareFeed(this.localScreenshareFeed);
+ this.localScreenshareFeed = undefined;
+ this.localDesktopCapturerSourceId = undefined;
+ }
+ this.client.getMediaHandler().stopAllStreams();
+ if (this.transmitTimer !== null) {
+ clearTimeout(this.transmitTimer);
+ this.transmitTimer = null;
+ }
+ if (this.retryCallLoopInterval !== undefined) {
+ clearInterval(this.retryCallLoopInterval);
+ this.retryCallLoopInterval = undefined;
+ }
+ if (this.participantsExpirationTimer !== null) {
+ clearTimeout(this.participantsExpirationTimer);
+ this.participantsExpirationTimer = null;
+ }
+ if (this.state !== GroupCallState.Entered) {
+ return;
+ }
+ this.forEachCall(call => call.hangup(_call.CallErrorCode.UserHangup, false));
+ this.activeSpeaker = undefined;
+ clearInterval(this.activeSpeakerLoopInterval);
+ this.retryCallCounts.clear();
+ clearInterval(this.retryCallLoopInterval);
+ this.client.removeListener(_callEventHandler.CallEventHandlerEvent.Incoming, this.onIncomingCall);
+ this.stats?.stop();
+ }
+ leave() {
+ this.dispose();
+ this.state = GroupCallState.LocalCallFeedUninitialized;
+ }
+ async terminate(emitStateEvent = true) {
+ this.dispose();
+ this.room.off(_roomState.RoomStateEvent.Update, this.onRoomState);
+ this.client.groupCallEventHandler.groupCalls.delete(this.room.roomId);
+ this.client.emit(_groupCallEventHandler.GroupCallEventHandlerEvent.Ended, this);
+ this.state = GroupCallState.Ended;
+ if (emitStateEvent) {
+ const existingStateEvent = this.room.currentState.getStateEvents(_event.EventType.GroupCallPrefix, this.groupCallId);
+ await this.client.sendStateEvent(this.room.roomId, _event.EventType.GroupCallPrefix, _objectSpread(_objectSpread({}, existingStateEvent.getContent()), {}, {
+ "m.terminated": GroupCallTerminationReason.CallEnded
+ }), this.groupCallId);
+ }
+ }
+
+ /*
+ * Local Usermedia
+ */
+
+ isLocalVideoMuted() {
+ if (this.localCallFeed) {
+ return this.localCallFeed.isVideoMuted();
+ }
+ return true;
+ }
+ isMicrophoneMuted() {
+ if (this.localCallFeed) {
+ return this.localCallFeed.isAudioMuted();
+ }
+ return true;
+ }
+
+ /**
+ * Sets the mute state of the local participants's microphone.
+ * @param muted - Whether to mute the microphone
+ * @returns Whether muting/unmuting was successful
+ */
+ async setMicrophoneMuted(muted) {
+ // hasAudioDevice can block indefinitely if the window has lost focus,
+ // and it doesn't make much sense to keep a device from being muted, so
+ // we always allow muted = true changes to go through
+ if (!muted && !(await this.client.getMediaHandler().hasAudioDevice())) {
+ return false;
+ }
+ const sendUpdatesBefore = !muted && this.isPtt;
+
+ // set a timer for the maximum transmit time on PTT calls
+ if (this.isPtt) {
+ // Set or clear the max transmit timer
+ if (!muted && this.isMicrophoneMuted()) {
+ this.transmitTimer = setTimeout(() => {
+ this.setMicrophoneMuted(true);
+ }, this.pttMaxTransmitTime);
+ } else if (muted && !this.isMicrophoneMuted()) {
+ if (this.transmitTimer !== null) clearTimeout(this.transmitTimer);
+ this.transmitTimer = null;
+ }
+ }
+ this.forEachCall(call => call.localUsermediaFeed?.setAudioVideoMuted(muted, null));
+ const sendUpdates = async () => {
+ const updates = [];
+ this.forEachCall(call => updates.push(call.sendMetadataUpdate()));
+ await Promise.all(updates).catch(e => _logger.logger.info(`GroupCall ${this.groupCallId} setMicrophoneMuted() failed to send some metadata updates`, e));
+ };
+ if (sendUpdatesBefore) await sendUpdates();
+ if (this.localCallFeed) {
+ _logger.logger.log(`GroupCall ${this.groupCallId} setMicrophoneMuted() (streamId=${this.localCallFeed.stream.id}, muted=${muted})`);
+ const hasPermission = await this.checkAudioPermissionIfNecessary(muted);
+ if (!hasPermission) {
+ return false;
+ }
+ this.localCallFeed.setAudioVideoMuted(muted, null);
+ // I don't believe its actually necessary to enable these tracks: they
+ // are the one on the GroupCall's own CallFeed and are cloned before being
+ // given to any of the actual calls, so these tracks don't actually go
+ // anywhere. Let's do it anyway to avoid confusion.
+ (0, _call.setTracksEnabled)(this.localCallFeed.stream.getAudioTracks(), !muted);
+ } else {
+ _logger.logger.log(`GroupCall ${this.groupCallId} setMicrophoneMuted() no stream muted (muted=${muted})`);
+ this.initWithAudioMuted = muted;
+ }
+ this.forEachCall(call => (0, _call.setTracksEnabled)(call.localUsermediaFeed.stream.getAudioTracks(), !muted && this.callExpected(call)));
+ this.emit(GroupCallEvent.LocalMuteStateChanged, muted, this.isLocalVideoMuted());
+ if (!sendUpdatesBefore) await sendUpdates();
+ return true;
+ }
+
+ /**
+ * If we allow entering a call without a camera and without video, it can happen that the access rights to the
+ * devices have not yet been queried. If a stream does not yet have an audio track, we assume that the rights have
+ * not yet been checked.
+ *
+ * `this.client.getMediaHandler().getUserMediaStream` clones the current stream, so it only wanted to be called when
+ * not Audio Track exists.
+ * As such, this is a compromise, because, the access rights should always be queried before the call.
+ */
+ async checkAudioPermissionIfNecessary(muted) {
+ // We needed this here to avoid an error in case user join a call without a device.
+ try {
+ if (!muted && this.localCallFeed && !this.localCallFeed.hasAudioTrack) {
+ const stream = await this.client.getMediaHandler().getUserMediaStream(true, !this.localCallFeed.isVideoMuted());
+ if (stream?.getTracks().length === 0) {
+ // if case permission denied to get a stream stop this here
+ /* istanbul ignore next */
+ _logger.logger.log(`GroupCall ${this.groupCallId} setMicrophoneMuted() no device to receive local stream, muted=${muted}`);
+ return false;
+ }
+ }
+ } catch (e) {
+ /* istanbul ignore next */
+ _logger.logger.log(`GroupCall ${this.groupCallId} setMicrophoneMuted() no device or permission to receive local stream, muted=${muted}`);
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Sets the mute state of the local participants's video.
+ * @param muted - Whether to mute the video
+ * @returns Whether muting/unmuting was successful
+ */
+ async setLocalVideoMuted(muted) {
+ // hasAudioDevice can block indefinitely if the window has lost focus,
+ // and it doesn't make much sense to keep a device from being muted, so
+ // we always allow muted = true changes to go through
+ if (!muted && !(await this.client.getMediaHandler().hasVideoDevice())) {
+ return false;
+ }
+ if (this.localCallFeed) {
+ /* istanbul ignore next */
+ _logger.logger.log(`GroupCall ${this.groupCallId} setLocalVideoMuted() (stream=${this.localCallFeed.stream.id}, muted=${muted})`);
+ try {
+ const stream = await this.client.getMediaHandler().getUserMediaStream(true, !muted);
+ await this.updateLocalUsermediaStream(stream);
+ this.localCallFeed.setAudioVideoMuted(null, muted);
+ (0, _call.setTracksEnabled)(this.localCallFeed.stream.getVideoTracks(), !muted);
+ } catch (_) {
+ // No permission to video device
+ /* istanbul ignore next */
+ _logger.logger.log(`GroupCall ${this.groupCallId} setLocalVideoMuted() no device or permission to receive local stream, muted=${muted}`);
+ return false;
+ }
+ } else {
+ _logger.logger.log(`GroupCall ${this.groupCallId} setLocalVideoMuted() no stream muted (muted=${muted})`);
+ this.initWithVideoMuted = muted;
+ }
+ const updates = [];
+ this.forEachCall(call => updates.push(call.setLocalVideoMuted(muted)));
+ await Promise.all(updates);
+
+ // We setTracksEnabled again, independently from the call doing it
+ // internally, since we might not be expecting the call
+ this.forEachCall(call => (0, _call.setTracksEnabled)(call.localUsermediaFeed.stream.getVideoTracks(), !muted && this.callExpected(call)));
+ this.emit(GroupCallEvent.LocalMuteStateChanged, this.isMicrophoneMuted(), muted);
+ return true;
+ }
+ async setScreensharingEnabled(enabled, opts = {}) {
+ if (enabled === this.isScreensharing()) {
+ return enabled;
+ }
+ if (enabled) {
+ try {
+ _logger.logger.log(`GroupCall ${this.groupCallId} setScreensharingEnabled() is asking for screensharing permissions`);
+ const stream = await this.client.getMediaHandler().getScreensharingStream(opts);
+ for (const track of stream.getTracks()) {
+ const onTrackEnded = () => {
+ this.setScreensharingEnabled(false);
+ track.removeEventListener("ended", onTrackEnded);
+ };
+ track.addEventListener("ended", onTrackEnded);
+ }
+ _logger.logger.log(`GroupCall ${this.groupCallId} setScreensharingEnabled() granted screensharing permissions. Setting screensharing enabled on all calls`);
+ this.localDesktopCapturerSourceId = opts.desktopCapturerSourceId;
+ this.localScreenshareFeed = new _callFeed.CallFeed({
+ client: this.client,
+ roomId: this.room.roomId,
+ userId: this.client.getUserId(),
+ deviceId: this.client.getDeviceId(),
+ stream,
+ purpose: _callEventTypes.SDPStreamMetadataPurpose.Screenshare,
+ audioMuted: false,
+ videoMuted: false
+ });
+ this.addScreenshareFeed(this.localScreenshareFeed);
+ this.emit(GroupCallEvent.LocalScreenshareStateChanged, true, this.localScreenshareFeed, this.localDesktopCapturerSourceId);
+
+ // TODO: handle errors
+ this.forEachCall(call => call.pushLocalFeed(this.localScreenshareFeed.clone()));
+ return true;
+ } catch (error) {
+ if (opts.throwOnFail) throw error;
+ _logger.logger.error(`GroupCall ${this.groupCallId} setScreensharingEnabled() enabling screensharing error`, error);
+ this.emit(GroupCallEvent.Error, new GroupCallError(GroupCallErrorCode.NoUserMedia, "Failed to get screen-sharing stream: ", error));
+ return false;
+ }
+ } else {
+ this.forEachCall(call => {
+ if (call.localScreensharingFeed) call.removeLocalFeed(call.localScreensharingFeed);
+ });
+ this.client.getMediaHandler().stopScreensharingStream(this.localScreenshareFeed.stream);
+ this.removeScreenshareFeed(this.localScreenshareFeed);
+ this.localScreenshareFeed = undefined;
+ this.localDesktopCapturerSourceId = undefined;
+ this.emit(GroupCallEvent.LocalScreenshareStateChanged, false, undefined, undefined);
+ return false;
+ }
+ }
+ isScreensharing() {
+ return !!this.localScreenshareFeed;
+ }
+ /**
+ * Determines whether a given participant expects us to call them (versus
+ * them calling us).
+ * @param userId - The participant's user ID.
+ * @param deviceId - The participant's device ID.
+ * @returns Whether we need to place an outgoing call to the participant.
+ */
+ wantsOutgoingCall(userId, deviceId) {
+ const localUserId = this.client.getUserId();
+ const localDeviceId = this.client.getDeviceId();
+ return (
+ // If a user's ID is less than our own, they'll call us
+ userId >= localUserId && (
+ // If this is another one of our devices, compare device IDs to tell whether it'll call us
+ userId !== localUserId || deviceId > localDeviceId)
+ );
+ }
+
+ /**
+ * Places calls to all participants that we're responsible for calling.
+ */
+ placeOutgoingCalls() {
+ let callsChanged = false;
+ for (const [{
+ userId
+ }, participantMap] of this.participants) {
+ const callMap = this.calls.get(userId) ?? new Map();
+ for (const [deviceId, participant] of participantMap) {
+ const prevCall = callMap.get(deviceId);
+ if (prevCall?.getOpponentSessionId() !== participant.sessionId && this.wantsOutgoingCall(userId, deviceId)) {
+ callsChanged = true;
+ if (prevCall !== undefined) {
+ _logger.logger.debug(`GroupCall ${this.groupCallId} placeOutgoingCalls() replacing call (userId=${userId}, deviceId=${deviceId}, callId=${prevCall.callId})`);
+ prevCall.hangup(_call.CallErrorCode.NewSession, false);
+ }
+ const newCall = (0, _call.createNewMatrixCall)(this.client, this.room.roomId, {
+ invitee: userId,
+ opponentDeviceId: deviceId,
+ opponentSessionId: participant.sessionId,
+ groupCallId: this.groupCallId
+ });
+ if (newCall === null) {
+ _logger.logger.error(`GroupCall ${this.groupCallId} placeOutgoingCalls() failed to create call (userId=${userId}, device=${deviceId})`);
+ callMap.delete(deviceId);
+ } else {
+ this.initCall(newCall);
+ callMap.set(deviceId, newCall);
+ _logger.logger.debug(`GroupCall ${this.groupCallId} placeOutgoingCalls() placing call (userId=${userId}, deviceId=${deviceId}, sessionId=${participant.sessionId})`);
+ newCall.placeCallWithCallFeeds(this.getLocalFeeds().map(feed => feed.clone()), participant.screensharing).then(() => {
+ if (this.dataChannelsEnabled) {
+ newCall.createDataChannel("datachannel", this.dataChannelOptions);
+ }
+ }).catch(e => {
+ _logger.logger.warn(`GroupCall ${this.groupCallId} placeOutgoingCalls() failed to place call (userId=${userId})`, e);
+ if (e instanceof _call.CallError && e.code === GroupCallErrorCode.UnknownDevice) {
+ this.emit(GroupCallEvent.Error, e);
+ } else {
+ this.emit(GroupCallEvent.Error, new GroupCallError(GroupCallErrorCode.PlaceCallFailed, `Failed to place call to ${userId}`));
+ }
+ newCall.hangup(_call.CallErrorCode.SignallingFailed, false);
+ if (callMap.get(deviceId) === newCall) callMap.delete(deviceId);
+ });
+ }
+ }
+ }
+ if (callMap.size > 0) {
+ this.calls.set(userId, callMap);
+ } else {
+ this.calls.delete(userId);
+ }
+ }
+ if (callsChanged) this.emit(GroupCallEvent.CallsChanged, this.calls);
+ }
+
+ /*
+ * Room Member State
+ */
+
+ getMemberStateEvents(userId) {
+ return userId === undefined ? this.room.currentState.getStateEvents(_event.EventType.GroupCallMemberPrefix) : this.room.currentState.getStateEvents(_event.EventType.GroupCallMemberPrefix, userId);
+ }
+ initCall(call) {
+ const opponentMemberId = getCallUserId(call);
+ if (!opponentMemberId) {
+ throw new Error("Cannot init call without user id");
+ }
+ const onCallFeedsChanged = () => this.onCallFeedsChanged(call);
+ const onCallStateChanged = (state, oldState) => this.onCallStateChanged(call, state, oldState);
+ const onCallHangup = this.onCallHangup;
+ const onCallReplaced = newCall => this.onCallReplaced(call, newCall);
+ let deviceMap = this.callHandlers.get(opponentMemberId);
+ if (deviceMap === undefined) {
+ deviceMap = new Map();
+ this.callHandlers.set(opponentMemberId, deviceMap);
+ }
+ deviceMap.set(call.getOpponentDeviceId(), {
+ onCallFeedsChanged,
+ onCallStateChanged,
+ onCallHangup,
+ onCallReplaced
+ });
+ call.on(_call.CallEvent.FeedsChanged, onCallFeedsChanged);
+ call.on(_call.CallEvent.State, onCallStateChanged);
+ call.on(_call.CallEvent.Hangup, onCallHangup);
+ call.on(_call.CallEvent.Replaced, onCallReplaced);
+ call.isPtt = this.isPtt;
+ this.reEmitter.reEmit(call, Object.values(_call.CallEvent));
+ call.initStats(this.getGroupCallStats());
+ onCallFeedsChanged();
+ }
+ disposeCall(call, hangupReason) {
+ const opponentMemberId = getCallUserId(call);
+ const opponentDeviceId = call.getOpponentDeviceId();
+ if (!opponentMemberId) {
+ throw new Error("Cannot dispose call without user id");
+ }
+ const deviceMap = this.callHandlers.get(opponentMemberId);
+ const {
+ onCallFeedsChanged,
+ onCallStateChanged,
+ onCallHangup,
+ onCallReplaced
+ } = deviceMap.get(opponentDeviceId);
+ call.removeListener(_call.CallEvent.FeedsChanged, onCallFeedsChanged);
+ call.removeListener(_call.CallEvent.State, onCallStateChanged);
+ call.removeListener(_call.CallEvent.Hangup, onCallHangup);
+ call.removeListener(_call.CallEvent.Replaced, onCallReplaced);
+ deviceMap.delete(opponentMemberId);
+ if (deviceMap.size === 0) this.callHandlers.delete(opponentMemberId);
+ if (call.hangupReason === _call.CallErrorCode.Replaced) {
+ return;
+ }
+ const usermediaFeed = this.getUserMediaFeed(opponentMemberId, opponentDeviceId);
+ if (usermediaFeed) {
+ this.removeUserMediaFeed(usermediaFeed);
+ }
+ const screenshareFeed = this.getScreenshareFeed(opponentMemberId, opponentDeviceId);
+ if (screenshareFeed) {
+ this.removeScreenshareFeed(screenshareFeed);
+ }
+ }
+ /*
+ * UserMedia CallFeed Event Handlers
+ */
+
+ getUserMediaFeed(userId, deviceId) {
+ return this.userMediaFeeds.find(f => f.userId === userId && f.deviceId === deviceId);
+ }
+ addUserMediaFeed(callFeed) {
+ this.userMediaFeeds.push(callFeed);
+ callFeed.measureVolumeActivity(true);
+ this.emit(GroupCallEvent.UserMediaFeedsChanged, this.userMediaFeeds);
+ }
+ replaceUserMediaFeed(existingFeed, replacementFeed) {
+ const feedIndex = this.userMediaFeeds.findIndex(f => f.userId === existingFeed.userId && f.deviceId === existingFeed.deviceId);
+ if (feedIndex === -1) {
+ throw new Error("Couldn't find user media feed to replace");
+ }
+ this.userMediaFeeds.splice(feedIndex, 1, replacementFeed);
+ existingFeed.dispose();
+ replacementFeed.measureVolumeActivity(true);
+ this.emit(GroupCallEvent.UserMediaFeedsChanged, this.userMediaFeeds);
+ }
+ removeUserMediaFeed(callFeed) {
+ const feedIndex = this.userMediaFeeds.findIndex(f => f.userId === callFeed.userId && f.deviceId === callFeed.deviceId);
+ if (feedIndex === -1) {
+ throw new Error("Couldn't find user media feed to remove");
+ }
+ this.userMediaFeeds.splice(feedIndex, 1);
+ callFeed.dispose();
+ this.emit(GroupCallEvent.UserMediaFeedsChanged, this.userMediaFeeds);
+ if (this.activeSpeaker === callFeed) {
+ this.activeSpeaker = this.userMediaFeeds[0];
+ this.emit(GroupCallEvent.ActiveSpeakerChanged, this.activeSpeaker);
+ }
+ }
+ /*
+ * Screenshare Call Feed Event Handlers
+ */
+
+ getScreenshareFeed(userId, deviceId) {
+ return this.screenshareFeeds.find(f => f.userId === userId && f.deviceId === deviceId);
+ }
+ addScreenshareFeed(callFeed) {
+ this.screenshareFeeds.push(callFeed);
+ this.emit(GroupCallEvent.ScreenshareFeedsChanged, this.screenshareFeeds);
+ }
+ replaceScreenshareFeed(existingFeed, replacementFeed) {
+ const feedIndex = this.screenshareFeeds.findIndex(f => f.userId === existingFeed.userId && f.deviceId === existingFeed.deviceId);
+ if (feedIndex === -1) {
+ throw new Error("Couldn't find screenshare feed to replace");
+ }
+ this.screenshareFeeds.splice(feedIndex, 1, replacementFeed);
+ existingFeed.dispose();
+ this.emit(GroupCallEvent.ScreenshareFeedsChanged, this.screenshareFeeds);
+ }
+ removeScreenshareFeed(callFeed) {
+ const feedIndex = this.screenshareFeeds.findIndex(f => f.userId === callFeed.userId && f.deviceId === callFeed.deviceId);
+ if (feedIndex === -1) {
+ throw new Error("Couldn't find screenshare feed to remove");
+ }
+ this.screenshareFeeds.splice(feedIndex, 1);
+ callFeed.dispose();
+ this.emit(GroupCallEvent.ScreenshareFeedsChanged, this.screenshareFeeds);
+ }
+
+ /**
+ * Recalculates and updates the participant map to match the room state.
+ */
+ updateParticipants() {
+ const localMember = this.room.getMember(this.client.getUserId());
+ if (!localMember) {
+ // The client hasn't fetched enough of the room state to get our own member
+ // event. This probably shouldn't happen, but sanity check & exit for now.
+ _logger.logger.warn(`GroupCall ${this.groupCallId} updateParticipants() tried to update participants before local room member is available`);
+ return;
+ }
+ if (this.participantsExpirationTimer !== null) {
+ clearTimeout(this.participantsExpirationTimer);
+ this.participantsExpirationTimer = null;
+ }
+ if (this.state === GroupCallState.Ended) {
+ this.participants = new Map();
+ return;
+ }
+ const participants = new Map();
+ const now = Date.now();
+ const entered = this.state === GroupCallState.Entered || this.enteredViaAnotherSession;
+ let nextExpiration = Infinity;
+ for (const e of this.getMemberStateEvents()) {
+ const member = this.room.getMember(e.getStateKey());
+ const content = e.getContent();
+ const calls = Array.isArray(content["m.calls"]) ? content["m.calls"] : [];
+ const call = calls.find(call => call["m.call_id"] === this.groupCallId);
+ const devices = Array.isArray(call?.["m.devices"]) ? call["m.devices"] : [];
+
+ // Filter out invalid and expired devices
+ let validDevices = devices.filter(d => typeof d.device_id === "string" && typeof d.session_id === "string" && typeof d.expires_ts === "number" && d.expires_ts > now && Array.isArray(d.feeds));
+
+ // Apply local echo for the unentered case
+ if (!entered && member?.userId === this.client.getUserId()) {
+ validDevices = validDevices.filter(d => d.device_id !== this.client.getDeviceId());
+ }
+
+ // Must have a connected device and be joined to the room
+ if (validDevices.length > 0 && member?.membership === "join") {
+ const deviceMap = new Map();
+ participants.set(member, deviceMap);
+ for (const d of validDevices) {
+ deviceMap.set(d.device_id, {
+ sessionId: d.session_id,
+ screensharing: d.feeds.some(f => f.purpose === _callEventTypes.SDPStreamMetadataPurpose.Screenshare)
+ });
+ if (d.expires_ts < nextExpiration) nextExpiration = d.expires_ts;
+ }
+ }
+ }
+
+ // Apply local echo for the entered case
+ if (entered) {
+ let deviceMap = participants.get(localMember);
+ if (deviceMap === undefined) {
+ deviceMap = new Map();
+ participants.set(localMember, deviceMap);
+ }
+ if (!deviceMap.has(this.client.getDeviceId())) {
+ deviceMap.set(this.client.getDeviceId(), {
+ sessionId: this.client.getSessionId(),
+ screensharing: this.getLocalFeeds().some(f => f.purpose === _callEventTypes.SDPStreamMetadataPurpose.Screenshare)
+ });
+ }
+ }
+ this.participants = participants;
+ if (nextExpiration < Infinity) {
+ this.participantsExpirationTimer = setTimeout(() => this.updateParticipants(), nextExpiration - now);
+ }
+ }
+
+ /**
+ * Updates the local user's member state with the devices returned by the given function.
+ * @param fn - A function from the current devices to the new devices. If it
+ * returns null, the update will be skipped.
+ * @param keepAlive - Whether the request should outlive the window.
+ */
+ async updateDevices(fn, keepAlive = false) {
+ const now = Date.now();
+ const localUserId = this.client.getUserId();
+ const event = this.getMemberStateEvents(localUserId);
+ const content = event?.getContent() ?? {};
+ const calls = Array.isArray(content["m.calls"]) ? content["m.calls"] : [];
+ let call = null;
+ const otherCalls = [];
+ for (const c of calls) {
+ if (c["m.call_id"] === this.groupCallId) {
+ call = c;
+ } else {
+ otherCalls.push(c);
+ }
+ }
+ if (call === null) call = {};
+ const devices = Array.isArray(call["m.devices"]) ? call["m.devices"] : [];
+
+ // Filter out invalid and expired devices
+ const validDevices = devices.filter(d => typeof d.device_id === "string" && typeof d.session_id === "string" && typeof d.expires_ts === "number" && d.expires_ts > now && Array.isArray(d.feeds));
+ const newDevices = fn(validDevices);
+ if (newDevices === null) return;
+ const newCalls = [...otherCalls];
+ if (newDevices.length > 0) {
+ newCalls.push(_objectSpread(_objectSpread({}, call), {}, {
+ "m.call_id": this.groupCallId,
+ "m.devices": newDevices
+ }));
+ }
+ const newContent = {
+ "m.calls": newCalls
+ };
+ await this.client.sendStateEvent(this.room.roomId, _event.EventType.GroupCallMemberPrefix, newContent, localUserId, {
+ keepAlive
+ });
+ }
+ async addDeviceToMemberState() {
+ await this.updateDevices(devices => [...devices.filter(d => d.device_id !== this.client.getDeviceId()), {
+ device_id: this.client.getDeviceId(),
+ session_id: this.client.getSessionId(),
+ expires_ts: Date.now() + DEVICE_TIMEOUT,
+ feeds: this.getLocalFeeds().map(feed => ({
+ purpose: feed.purpose
+ }))
+ // TODO: Add data channels
+ }]);
+ }
+
+ async updateMemberState() {
+ // Clear the old update interval before proceeding
+ if (this.resendMemberStateTimer !== null) {
+ clearInterval(this.resendMemberStateTimer);
+ this.resendMemberStateTimer = null;
+ }
+ if (this.state === GroupCallState.Entered) {
+ // Add the local device
+ await this.addDeviceToMemberState();
+
+ // Resend the state event every so often so it doesn't become stale
+ this.resendMemberStateTimer = setInterval(async () => {
+ _logger.logger.log(`GroupCall ${this.groupCallId} updateMemberState() resending call member state"`);
+ try {
+ await this.addDeviceToMemberState();
+ } catch (e) {
+ _logger.logger.error(`GroupCall ${this.groupCallId} updateMemberState() failed to resend call member state`, e);
+ }
+ }, DEVICE_TIMEOUT * 3 / 4);
+ } else {
+ // Remove the local device
+ await this.updateDevices(devices => devices.filter(d => d.device_id !== this.client.getDeviceId()), true);
+ }
+ }
+
+ /**
+ * Cleans up our member state by filtering out logged out devices, inactive
+ * devices, and our own device (if we know we haven't entered).
+ */
+ async cleanMemberState() {
+ const {
+ devices: myDevices
+ } = await this.client.getDevices();
+ const deviceMap = new Map(myDevices.map(d => [d.device_id, d]));
+
+ // updateDevices takes care of filtering out inactive devices for us
+ await this.updateDevices(devices => {
+ const newDevices = devices.filter(d => {
+ const device = deviceMap.get(d.device_id);
+ return device?.last_seen_ts !== undefined && !(d.device_id === this.client.getDeviceId() && this.state !== GroupCallState.Entered && !this.enteredViaAnotherSession);
+ });
+
+ // Skip the update if the devices are unchanged
+ return newDevices.length === devices.length ? null : newDevices;
+ });
+ }
+ getGroupCallStats() {
+ if (this.stats === undefined) {
+ const userID = this.client.getUserId() || "unknown";
+ this.stats = new _groupCallStats.GroupCallStats(this.groupCallId, userID, this.statsCollectIntervalTime);
+ this.stats.reports.on(_statsReport.StatsReport.CONNECTION_STATS, this.onConnectionStats);
+ this.stats.reports.on(_statsReport.StatsReport.BYTE_SENT_STATS, this.onByteSentStats);
+ this.stats.reports.on(_statsReport.StatsReport.SUMMARY_STATS, this.onSummaryStats);
+ }
+ return this.stats;
+ }
+ setGroupCallStatsInterval(interval) {
+ this.statsCollectIntervalTime = interval;
+ if (this.stats !== undefined) {
+ this.stats.stop();
+ this.stats.setInterval(interval);
+ if (interval > 0) {
+ this.stats.start();
+ }
+ }
+ }
+}
+exports.GroupCall = GroupCall; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/groupCallEventHandler.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/groupCallEventHandler.js
new file mode 100644
index 0000000000..6c80c5b4da
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/groupCallEventHandler.js
@@ -0,0 +1,181 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.GroupCallEventHandlerEvent = exports.GroupCallEventHandler = void 0;
+var _client = require("../client");
+var _groupCall = require("./groupCall");
+var _roomState = require("../models/room-state");
+var _logger = require("../logger");
+var _event = require("../@types/event");
+var _sync = require("../sync");
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } /*
+ Copyright 2021 Šimon Brandner <simon.bra.ag@gmail.com>
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+let GroupCallEventHandlerEvent = /*#__PURE__*/function (GroupCallEventHandlerEvent) {
+ GroupCallEventHandlerEvent["Incoming"] = "GroupCall.incoming";
+ GroupCallEventHandlerEvent["Outgoing"] = "GroupCall.outgoing";
+ GroupCallEventHandlerEvent["Ended"] = "GroupCall.ended";
+ GroupCallEventHandlerEvent["Participants"] = "GroupCall.participants";
+ return GroupCallEventHandlerEvent;
+}({});
+exports.GroupCallEventHandlerEvent = GroupCallEventHandlerEvent;
+class GroupCallEventHandler {
+ constructor(client) {
+ this.client = client;
+ _defineProperty(this, "groupCalls", new Map());
+ // roomId -> GroupCall
+ // All rooms we know about and whether we've seen a 'Room' event
+ // for them. The promise will be fulfilled once we've processed that
+ // event which means we're "up to date" on what calls are in a room
+ // and get
+ _defineProperty(this, "roomDeferreds", new Map());
+ _defineProperty(this, "onRoomsChanged", room => {
+ this.createGroupCallForRoom(room);
+ });
+ _defineProperty(this, "onRoomStateChanged", (event, state) => {
+ const eventType = event.getType();
+ if (eventType === _event.EventType.GroupCallPrefix) {
+ const groupCallId = event.getStateKey();
+ const content = event.getContent();
+ const currentGroupCall = this.groupCalls.get(state.roomId);
+ if (!currentGroupCall && !content["m.terminated"] && !event.isRedacted()) {
+ this.createGroupCallFromRoomStateEvent(event);
+ } else if (currentGroupCall && currentGroupCall.groupCallId === groupCallId) {
+ if (content["m.terminated"] || event.isRedacted()) {
+ currentGroupCall.terminate(false);
+ } else if (content["m.type"] !== currentGroupCall.type) {
+ // TODO: Handle the callType changing when the room state changes
+ _logger.logger.warn(`GroupCallEventHandler onRoomStateChanged() currently does not support changing type (roomId=${state.roomId})`);
+ }
+ } else if (currentGroupCall && currentGroupCall.groupCallId !== groupCallId) {
+ // TODO: Handle new group calls and multiple group calls
+ _logger.logger.warn(`GroupCallEventHandler onRoomStateChanged() currently does not support multiple calls (roomId=${state.roomId})`);
+ }
+ }
+ });
+ }
+ async start() {
+ // We wait until the client has started syncing for real.
+ // This is because we only support one call at a time, and want
+ // the latest. We therefore want the latest state of the room before
+ // we create a group call for the room so we can be fairly sure that
+ // the group call we create is really the latest one.
+ if (this.client.getSyncState() !== _sync.SyncState.Syncing) {
+ _logger.logger.debug("GroupCallEventHandler start() waiting for client to start syncing");
+ await new Promise(resolve => {
+ const onSync = () => {
+ if (this.client.getSyncState() === _sync.SyncState.Syncing) {
+ this.client.off(_client.ClientEvent.Sync, onSync);
+ return resolve();
+ }
+ };
+ this.client.on(_client.ClientEvent.Sync, onSync);
+ });
+ }
+ const rooms = this.client.getRooms();
+ for (const room of rooms) {
+ this.createGroupCallForRoom(room);
+ }
+ this.client.on(_client.ClientEvent.Room, this.onRoomsChanged);
+ this.client.on(_roomState.RoomStateEvent.Events, this.onRoomStateChanged);
+ }
+ stop() {
+ this.client.removeListener(_roomState.RoomStateEvent.Events, this.onRoomStateChanged);
+ }
+ getRoomDeferred(roomId) {
+ let deferred = this.roomDeferreds.get(roomId);
+ if (deferred === undefined) {
+ let resolveFunc;
+ deferred = {
+ prom: new Promise(resolve => {
+ resolveFunc = resolve;
+ })
+ };
+ deferred.resolve = resolveFunc;
+ this.roomDeferreds.set(roomId, deferred);
+ }
+ return deferred;
+ }
+ waitUntilRoomReadyForGroupCalls(roomId) {
+ return this.getRoomDeferred(roomId).prom;
+ }
+ getGroupCallById(groupCallId) {
+ return [...this.groupCalls.values()].find(groupCall => groupCall.groupCallId === groupCallId);
+ }
+ createGroupCallForRoom(room) {
+ const callEvents = room.currentState.getStateEvents(_event.EventType.GroupCallPrefix);
+ const sortedCallEvents = callEvents.sort((a, b) => b.getTs() - a.getTs());
+ for (const callEvent of sortedCallEvents) {
+ const content = callEvent.getContent();
+ if (content["m.terminated"] || callEvent.isRedacted()) {
+ continue;
+ }
+ _logger.logger.debug(`GroupCallEventHandler createGroupCallForRoom() choosing group call from possible calls (stateKey=${callEvent.getStateKey()}, ts=${callEvent.getTs()}, roomId=${room.roomId}, numOfPossibleCalls=${callEvents.length})`);
+ this.createGroupCallFromRoomStateEvent(callEvent);
+ break;
+ }
+ _logger.logger.info(`GroupCallEventHandler createGroupCallForRoom() processed room (roomId=${room.roomId})`);
+ this.getRoomDeferred(room.roomId).resolve();
+ }
+ createGroupCallFromRoomStateEvent(event) {
+ const roomId = event.getRoomId();
+ const content = event.getContent();
+ const room = this.client.getRoom(roomId);
+ if (!room) {
+ _logger.logger.warn(`GroupCallEventHandler createGroupCallFromRoomStateEvent() couldn't find room for call (roomId=${roomId})`);
+ return;
+ }
+ const groupCallId = event.getStateKey();
+ const callType = content["m.type"];
+ if (!Object.values(_groupCall.GroupCallType).includes(callType)) {
+ _logger.logger.warn(`GroupCallEventHandler createGroupCallFromRoomStateEvent() received invalid call type (type=${callType}, roomId=${roomId})`);
+ return;
+ }
+ const callIntent = content["m.intent"];
+ if (!Object.values(_groupCall.GroupCallIntent).includes(callIntent)) {
+ _logger.logger.warn(`Received invalid group call intent (type=${callType}, roomId=${roomId})`);
+ return;
+ }
+ const isPtt = Boolean(content["io.element.ptt"]);
+ let dataChannelOptions;
+ if (content?.dataChannelsEnabled && content?.dataChannelOptions) {
+ // Pull out just the dataChannelOptions we want to support.
+ const {
+ ordered,
+ maxPacketLifeTime,
+ maxRetransmits,
+ protocol
+ } = content.dataChannelOptions;
+ dataChannelOptions = {
+ ordered,
+ maxPacketLifeTime,
+ maxRetransmits,
+ protocol
+ };
+ }
+ const groupCall = new _groupCall.GroupCall(this.client, room, callType, isPtt, callIntent, groupCallId,
+ // Because without Media section a WebRTC connection is not possible, so need a RTCDataChannel to set up a
+ // no media WebRTC connection anyway.
+ content?.dataChannelsEnabled || this.client.isVoipWithNoMediaAllowed, dataChannelOptions, this.client.isVoipWithNoMediaAllowed);
+ this.groupCalls.set(room.roomId, groupCall);
+ this.client.emit(GroupCallEventHandlerEvent.Incoming, groupCall);
+ return groupCall;
+ }
+}
+exports.GroupCallEventHandler = GroupCallEventHandler; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/mediaHandler.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/mediaHandler.js
new file mode 100644
index 0000000000..2077d05a27
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/mediaHandler.js
@@ -0,0 +1,395 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MediaHandlerEvent = exports.MediaHandler = void 0;
+var _typedEventEmitter = require("../models/typed-event-emitter");
+var _groupCall = require("../webrtc/groupCall");
+var _logger = require("../logger");
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } /*
+ Copyright 2015, 2016 OpenMarket Ltd
+ Copyright 2017 New Vector Ltd
+ Copyright 2019, 2020 The Matrix.org Foundation C.I.C.
+ Copyright 2021 - 2022 Šimon Brandner <simon.bra.ag@gmail.com>
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+let MediaHandlerEvent = /*#__PURE__*/function (MediaHandlerEvent) {
+ MediaHandlerEvent["LocalStreamsChanged"] = "local_streams_changed";
+ return MediaHandlerEvent;
+}({});
+exports.MediaHandlerEvent = MediaHandlerEvent;
+class MediaHandler extends _typedEventEmitter.TypedEventEmitter {
+ constructor(client) {
+ super();
+ this.client = client;
+ _defineProperty(this, "audioInput", void 0);
+ _defineProperty(this, "audioSettings", void 0);
+ _defineProperty(this, "videoInput", void 0);
+ _defineProperty(this, "localUserMediaStream", void 0);
+ _defineProperty(this, "userMediaStreams", []);
+ _defineProperty(this, "screensharingStreams", []);
+ // Promise chain to serialise calls to getMediaStream
+ _defineProperty(this, "getMediaStreamPromise", void 0);
+ }
+ restoreMediaSettings(audioInput, videoInput) {
+ this.audioInput = audioInput;
+ this.videoInput = videoInput;
+ }
+
+ /**
+ * Set an audio input device to use for MatrixCalls
+ * @param deviceId - the identifier for the device
+ * undefined treated as unset
+ */
+ async setAudioInput(deviceId) {
+ _logger.logger.info(`MediaHandler setAudioInput() running (deviceId=${deviceId})`);
+ if (this.audioInput === deviceId) return;
+ this.audioInput = deviceId;
+ await this.updateLocalUsermediaStreams();
+ }
+
+ /**
+ * Set audio settings for MatrixCalls
+ * @param opts - audio options to set
+ */
+ async setAudioSettings(opts) {
+ _logger.logger.info(`MediaHandler setAudioSettings() running (opts=${JSON.stringify(opts)})`);
+ this.audioSettings = Object.assign({}, opts);
+ await this.updateLocalUsermediaStreams();
+ }
+
+ /**
+ * Set a video input device to use for MatrixCalls
+ * @param deviceId - the identifier for the device
+ * undefined treated as unset
+ */
+ async setVideoInput(deviceId) {
+ _logger.logger.info(`MediaHandler setVideoInput() running (deviceId=${deviceId})`);
+ if (this.videoInput === deviceId) return;
+ this.videoInput = deviceId;
+ await this.updateLocalUsermediaStreams();
+ }
+
+ /**
+ * Set media input devices to use for MatrixCalls
+ * @param audioInput - the identifier for the audio device
+ * @param videoInput - the identifier for the video device
+ * undefined treated as unset
+ */
+ async setMediaInputs(audioInput, videoInput) {
+ _logger.logger.log(`MediaHandler setMediaInputs() running (audioInput: ${audioInput} videoInput: ${videoInput})`);
+ this.audioInput = audioInput;
+ this.videoInput = videoInput;
+ await this.updateLocalUsermediaStreams();
+ }
+
+ /*
+ * Requests new usermedia streams and replace the old ones
+ */
+ async updateLocalUsermediaStreams() {
+ if (this.userMediaStreams.length === 0) return;
+ const callMediaStreamParams = new Map();
+ for (const call of this.client.callEventHandler.calls.values()) {
+ callMediaStreamParams.set(call.callId, {
+ audio: call.hasLocalUserMediaAudioTrack,
+ video: call.hasLocalUserMediaVideoTrack
+ });
+ }
+ for (const stream of this.userMediaStreams) {
+ _logger.logger.log(`MediaHandler updateLocalUsermediaStreams() stopping all tracks (streamId=${stream.id})`);
+ for (const track of stream.getTracks()) {
+ track.stop();
+ }
+ }
+ this.userMediaStreams = [];
+ this.localUserMediaStream = undefined;
+ for (const call of this.client.callEventHandler.calls.values()) {
+ if (call.callHasEnded() || !callMediaStreamParams.has(call.callId)) {
+ continue;
+ }
+ const {
+ audio,
+ video
+ } = callMediaStreamParams.get(call.callId);
+ _logger.logger.log(`MediaHandler updateLocalUsermediaStreams() calling getUserMediaStream() (callId=${call.callId})`);
+ const stream = await this.getUserMediaStream(audio, video);
+ if (call.callHasEnded()) {
+ continue;
+ }
+ await call.updateLocalUsermediaStream(stream);
+ }
+ for (const groupCall of this.client.groupCallEventHandler.groupCalls.values()) {
+ if (!groupCall.localCallFeed) {
+ continue;
+ }
+ _logger.logger.log(`MediaHandler updateLocalUsermediaStreams() calling getUserMediaStream() (groupCallId=${groupCall.groupCallId})`);
+ const stream = await this.getUserMediaStream(true, groupCall.type === _groupCall.GroupCallType.Video);
+ if (groupCall.state === _groupCall.GroupCallState.Ended) {
+ continue;
+ }
+ await groupCall.updateLocalUsermediaStream(stream);
+ }
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+ }
+ async hasAudioDevice() {
+ try {
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ return devices.filter(device => device.kind === "audioinput").length > 0;
+ } catch (err) {
+ _logger.logger.log(`MediaHandler hasAudioDevice() calling navigator.mediaDevices.enumerateDevices with error`, err);
+ return false;
+ }
+ }
+ async hasVideoDevice() {
+ try {
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ return devices.filter(device => device.kind === "videoinput").length > 0;
+ } catch (err) {
+ _logger.logger.log(`MediaHandler hasVideoDevice() calling navigator.mediaDevices.enumerateDevices with error`, err);
+ return false;
+ }
+ }
+
+ /**
+ * @param audio - should have an audio track
+ * @param video - should have a video track
+ * @param reusable - is allowed to be reused by the MediaHandler
+ * @returns based on passed parameters
+ */
+ async getUserMediaStream(audio, video, reusable = true) {
+ // Serialise calls, othertwise we can't sensibly re-use the stream
+ if (this.getMediaStreamPromise) {
+ this.getMediaStreamPromise = this.getMediaStreamPromise.then(() => {
+ return this.getUserMediaStreamInternal(audio, video, reusable);
+ });
+ } else {
+ this.getMediaStreamPromise = this.getUserMediaStreamInternal(audio, video, reusable);
+ }
+ return this.getMediaStreamPromise;
+ }
+ async getUserMediaStreamInternal(audio, video, reusable) {
+ const shouldRequestAudio = audio && (await this.hasAudioDevice());
+ const shouldRequestVideo = video && (await this.hasVideoDevice());
+ let stream;
+ let canReuseStream = true;
+ if (this.localUserMediaStream) {
+ // This figures out if we can reuse the current localUsermediaStream
+ // based on whether or not the "mute state" (presence of tracks of a
+ // given kind) matches what is being requested
+ if (shouldRequestAudio !== this.localUserMediaStream.getAudioTracks().length > 0) {
+ canReuseStream = false;
+ }
+ if (shouldRequestVideo !== this.localUserMediaStream.getVideoTracks().length > 0) {
+ canReuseStream = false;
+ }
+
+ // This code checks that the device ID is the same as the localUserMediaStream stream, but we update
+ // the localUserMediaStream whenever the device ID changes (apart from when restoring) so it's not
+ // clear why this would ever be different, unless there's a race.
+ if (shouldRequestAudio && this.localUserMediaStream.getAudioTracks()[0]?.getSettings()?.deviceId !== this.audioInput) {
+ canReuseStream = false;
+ }
+ if (shouldRequestVideo && this.localUserMediaStream.getVideoTracks()[0]?.getSettings()?.deviceId !== this.videoInput) {
+ canReuseStream = false;
+ }
+ } else {
+ canReuseStream = false;
+ }
+ if (!canReuseStream) {
+ const constraints = this.getUserMediaContraints(shouldRequestAudio, shouldRequestVideo);
+ stream = await navigator.mediaDevices.getUserMedia(constraints);
+ _logger.logger.log(`MediaHandler getUserMediaStreamInternal() calling getUserMediaStream (streamId=${stream.id}, shouldRequestAudio=${shouldRequestAudio}, shouldRequestVideo=${shouldRequestVideo}, constraints=${JSON.stringify(constraints)})`);
+ for (const track of stream.getTracks()) {
+ const settings = track.getSettings();
+ if (track.kind === "audio") {
+ this.audioInput = settings.deviceId;
+ } else if (track.kind === "video") {
+ this.videoInput = settings.deviceId;
+ }
+ }
+ if (reusable) {
+ this.localUserMediaStream = stream;
+ }
+ } else {
+ stream = this.localUserMediaStream.clone();
+ _logger.logger.log(`MediaHandler getUserMediaStreamInternal() cloning (oldStreamId=${this.localUserMediaStream?.id} newStreamId=${stream.id} shouldRequestAudio=${shouldRequestAudio} shouldRequestVideo=${shouldRequestVideo})`);
+ if (!shouldRequestAudio) {
+ for (const track of stream.getAudioTracks()) {
+ stream.removeTrack(track);
+ }
+ }
+ if (!shouldRequestVideo) {
+ for (const track of stream.getVideoTracks()) {
+ stream.removeTrack(track);
+ }
+ }
+ }
+ if (reusable) {
+ this.userMediaStreams.push(stream);
+ }
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+ return stream;
+ }
+
+ /**
+ * Stops all tracks on the provided usermedia stream
+ */
+ stopUserMediaStream(mediaStream) {
+ _logger.logger.log(`MediaHandler stopUserMediaStream() stopping (streamId=${mediaStream.id})`);
+ for (const track of mediaStream.getTracks()) {
+ track.stop();
+ }
+ const index = this.userMediaStreams.indexOf(mediaStream);
+ if (index !== -1) {
+ _logger.logger.debug(`MediaHandler stopUserMediaStream() splicing usermedia stream out stream array (streamId=${mediaStream.id})`, mediaStream.id);
+ this.userMediaStreams.splice(index, 1);
+ }
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+ if (this.localUserMediaStream === mediaStream) {
+ this.localUserMediaStream = undefined;
+ }
+ }
+
+ /**
+ * @param desktopCapturerSourceId - sourceId for Electron DesktopCapturer
+ * @param reusable - is allowed to be reused by the MediaHandler
+ * @returns based on passed parameters
+ */
+ async getScreensharingStream(opts = {}, reusable = true) {
+ let stream;
+ if (this.screensharingStreams.length === 0) {
+ const screenshareConstraints = this.getScreenshareContraints(opts);
+ if (opts.desktopCapturerSourceId) {
+ // We are using Electron
+ _logger.logger.debug(`MediaHandler getScreensharingStream() calling getUserMedia() (opts=${JSON.stringify(opts)})`);
+ stream = await navigator.mediaDevices.getUserMedia(screenshareConstraints);
+ } else {
+ // We are not using Electron
+ _logger.logger.debug(`MediaHandler getScreensharingStream() calling getDisplayMedia() (opts=${JSON.stringify(opts)})`);
+ stream = await navigator.mediaDevices.getDisplayMedia(screenshareConstraints);
+ }
+ } else {
+ const matchingStream = this.screensharingStreams[this.screensharingStreams.length - 1];
+ _logger.logger.log(`MediaHandler getScreensharingStream() cloning (streamId=${matchingStream.id})`);
+ stream = matchingStream.clone();
+ }
+ if (reusable) {
+ this.screensharingStreams.push(stream);
+ }
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+ return stream;
+ }
+
+ /**
+ * Stops all tracks on the provided screensharing stream
+ */
+ stopScreensharingStream(mediaStream) {
+ _logger.logger.debug(`MediaHandler stopScreensharingStream() stopping stream (streamId=${mediaStream.id})`);
+ for (const track of mediaStream.getTracks()) {
+ track.stop();
+ }
+ const index = this.screensharingStreams.indexOf(mediaStream);
+ if (index !== -1) {
+ _logger.logger.debug(`MediaHandler stopScreensharingStream() splicing stream out (streamId=${mediaStream.id})`);
+ this.screensharingStreams.splice(index, 1);
+ }
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+ }
+
+ /**
+ * Stops all local media tracks
+ */
+ stopAllStreams() {
+ for (const stream of this.userMediaStreams) {
+ _logger.logger.log(`MediaHandler stopAllStreams() stopping (streamId=${stream.id})`);
+ for (const track of stream.getTracks()) {
+ track.stop();
+ }
+ }
+ for (const stream of this.screensharingStreams) {
+ for (const track of stream.getTracks()) {
+ track.stop();
+ }
+ }
+ this.userMediaStreams = [];
+ this.screensharingStreams = [];
+ this.localUserMediaStream = undefined;
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+ }
+ getUserMediaContraints(audio, video) {
+ const isWebkit = !!navigator.webkitGetUserMedia;
+ return {
+ audio: audio ? {
+ deviceId: this.audioInput ? {
+ ideal: this.audioInput
+ } : undefined,
+ autoGainControl: this.audioSettings ? {
+ ideal: this.audioSettings.autoGainControl
+ } : undefined,
+ echoCancellation: this.audioSettings ? {
+ ideal: this.audioSettings.echoCancellation
+ } : undefined,
+ noiseSuppression: this.audioSettings ? {
+ ideal: this.audioSettings.noiseSuppression
+ } : undefined
+ } : false,
+ video: video ? {
+ deviceId: this.videoInput ? {
+ ideal: this.videoInput
+ } : undefined,
+ /* We want 640x360. Chrome will give it only if we ask exactly,
+ FF refuses entirely if we ask exactly, so have to ask for ideal
+ instead
+ XXX: Is this still true?
+ */
+ width: isWebkit ? {
+ exact: 640
+ } : {
+ ideal: 640
+ },
+ height: isWebkit ? {
+ exact: 360
+ } : {
+ ideal: 360
+ }
+ } : false
+ };
+ }
+ getScreenshareContraints(opts) {
+ const {
+ desktopCapturerSourceId,
+ audio
+ } = opts;
+ if (desktopCapturerSourceId) {
+ return {
+ audio: audio ?? false,
+ video: {
+ mandatory: {
+ chromeMediaSource: "desktop",
+ chromeMediaSourceId: desktopCapturerSourceId
+ }
+ }
+ };
+ } else {
+ return {
+ audio: audio ?? false,
+ video: true
+ };
+ }
+ }
+}
+exports.MediaHandler = MediaHandler; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/callStatsReportGatherer.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/callStatsReportGatherer.js
new file mode 100644
index 0000000000..b830e469a1
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/callStatsReportGatherer.js
@@ -0,0 +1,194 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CallStatsReportGatherer = void 0;
+var _connectionStats = require("./connectionStats");
+var _connectionStatsBuilder = require("./connectionStatsBuilder");
+var _transportStatsBuilder = require("./transportStatsBuilder");
+var _mediaSsrcHandler = require("./media/mediaSsrcHandler");
+var _mediaTrackHandler = require("./media/mediaTrackHandler");
+var _mediaTrackStatsHandler = require("./media/mediaTrackStatsHandler");
+var _trackStatsBuilder = require("./trackStatsBuilder");
+var _connectionStatsReportBuilder = require("./connectionStatsReportBuilder");
+var _valueFormatter = require("./valueFormatter");
+function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
+function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } /*
+ Copyright 2023 The Matrix.org Foundation C.I.C.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+class CallStatsReportGatherer {
+ constructor(callId, opponentMemberId, pc, emitter, isFocus = true) {
+ this.callId = callId;
+ this.opponentMemberId = opponentMemberId;
+ this.pc = pc;
+ this.emitter = emitter;
+ this.isFocus = isFocus;
+ _defineProperty(this, "isActive", true);
+ _defineProperty(this, "previousStatsReport", void 0);
+ _defineProperty(this, "currentStatsReport", void 0);
+ _defineProperty(this, "connectionStats", new _connectionStats.ConnectionStats());
+ _defineProperty(this, "trackStats", void 0);
+ pc.addEventListener("signalingstatechange", this.onSignalStateChange.bind(this));
+ this.trackStats = new _mediaTrackStatsHandler.MediaTrackStatsHandler(new _mediaSsrcHandler.MediaSsrcHandler(), new _mediaTrackHandler.MediaTrackHandler(pc));
+ }
+ async processStats(groupCallId, localUserId) {
+ const summary = {
+ isFirstCollection: this.previousStatsReport === undefined,
+ receivedMedia: 0,
+ receivedAudioMedia: 0,
+ receivedVideoMedia: 0,
+ audioTrackSummary: {
+ count: 0,
+ muted: 0,
+ maxPacketLoss: 0,
+ maxJitter: 0,
+ concealedAudio: 0,
+ totalAudio: 0
+ },
+ videoTrackSummary: {
+ count: 0,
+ muted: 0,
+ maxPacketLoss: 0,
+ maxJitter: 0,
+ concealedAudio: 0,
+ totalAudio: 0
+ }
+ };
+ if (this.isActive) {
+ const statsPromise = this.pc.getStats();
+ if (typeof statsPromise?.then === "function") {
+ return statsPromise.then(report => {
+ // @ts-ignore
+ this.currentStatsReport = typeof report?.result === "function" ? report.result() : report;
+ try {
+ this.processStatsReport(groupCallId, localUserId);
+ } catch (error) {
+ this.isActive = false;
+ return summary;
+ }
+ this.previousStatsReport = this.currentStatsReport;
+ summary.receivedMedia = this.connectionStats.bitrate.download;
+ summary.receivedAudioMedia = this.connectionStats.bitrate.audio?.download || 0;
+ summary.receivedVideoMedia = this.connectionStats.bitrate.video?.download || 0;
+ const trackSummary = _trackStatsBuilder.TrackStatsBuilder.buildTrackSummary(Array.from(this.trackStats.getTrack2stats().values()));
+ return _objectSpread(_objectSpread({}, summary), {}, {
+ audioTrackSummary: trackSummary.audioTrackSummary,
+ videoTrackSummary: trackSummary.videoTrackSummary
+ });
+ }).catch(error => {
+ this.handleError(error);
+ return summary;
+ });
+ }
+ this.isActive = false;
+ }
+ return Promise.resolve(summary);
+ }
+ processStatsReport(groupCallId, localUserId) {
+ const byteSentStatsReport = new Map();
+ byteSentStatsReport.callId = this.callId;
+ byteSentStatsReport.opponentMemberId = this.opponentMemberId;
+ this.currentStatsReport?.forEach(now => {
+ const before = this.previousStatsReport ? this.previousStatsReport.get(now.id) : null;
+ // RTCIceCandidatePairStats - https://w3c.github.io/webrtc-stats/#candidatepair-dict*
+ if (now.type === "candidate-pair" && now.nominated && now.state === "succeeded") {
+ this.connectionStats.bandwidth = _connectionStatsBuilder.ConnectionStatsBuilder.buildBandwidthReport(now);
+ this.connectionStats.transport = _transportStatsBuilder.TransportStatsBuilder.buildReport(this.currentStatsReport, now, this.connectionStats.transport, this.isFocus);
+
+ // RTCReceivedRtpStreamStats
+ // https://w3c.github.io/webrtc-stats/#receivedrtpstats-dict*
+ // RTCSentRtpStreamStats
+ // https://w3c.github.io/webrtc-stats/#sentrtpstats-dict*
+ } else if (now.type === "inbound-rtp" || now.type === "outbound-rtp") {
+ const trackStats = this.trackStats.findTrack2Stats(now, now.type === "inbound-rtp" ? "remote" : "local");
+ if (!trackStats) {
+ return;
+ }
+ if (before) {
+ _trackStatsBuilder.TrackStatsBuilder.buildPacketsLost(trackStats, now, before);
+ }
+
+ // Get the resolution and framerate for only remote video sources here. For the local video sources,
+ // 'track' stats will be used since they have the updated resolution based on the simulcast streams
+ // currently being sent. Promise based getStats reports three 'outbound-rtp' streams and there will be
+ // more calculations needed to determine what is the highest resolution stream sent by the client if the
+ // 'outbound-rtp' stats are used.
+ if (now.type === "inbound-rtp") {
+ _trackStatsBuilder.TrackStatsBuilder.buildFramerateResolution(trackStats, now);
+ if (before) {
+ _trackStatsBuilder.TrackStatsBuilder.buildBitrateReceived(trackStats, now, before);
+ }
+ const ts = this.trackStats.findTransceiverByTrackId(trackStats.trackId);
+ _trackStatsBuilder.TrackStatsBuilder.setTrackStatsState(trackStats, ts);
+ _trackStatsBuilder.TrackStatsBuilder.buildJitter(trackStats, now);
+ _trackStatsBuilder.TrackStatsBuilder.buildAudioConcealment(trackStats, now);
+ } else if (before) {
+ byteSentStatsReport.set(trackStats.trackId, _valueFormatter.ValueFormatter.getNonNegativeValue(now.bytesSent));
+ _trackStatsBuilder.TrackStatsBuilder.buildBitrateSend(trackStats, now, before);
+ }
+ _trackStatsBuilder.TrackStatsBuilder.buildCodec(this.currentStatsReport, trackStats, now);
+ } else if (now.type === "track" && now.kind === "video" && !now.remoteSource) {
+ const trackStats = this.trackStats.findLocalVideoTrackStats(now);
+ if (!trackStats) {
+ return;
+ }
+ _trackStatsBuilder.TrackStatsBuilder.buildFramerateResolution(trackStats, now);
+ _trackStatsBuilder.TrackStatsBuilder.calculateSimulcastFramerate(trackStats, now, before, this.trackStats.mediaTrackHandler.getActiveSimulcastStreams());
+ }
+ });
+ this.emitter.emitByteSendReport(byteSentStatsReport);
+ this.processAndEmitConnectionStatsReport();
+ }
+ setActive(isActive) {
+ this.isActive = isActive;
+ }
+ getActive() {
+ return this.isActive;
+ }
+ handleError(_) {
+ this.isActive = false;
+ }
+ processAndEmitConnectionStatsReport() {
+ const report = _connectionStatsReportBuilder.ConnectionStatsReportBuilder.build(this.trackStats.getTrack2stats());
+ report.callId = this.callId;
+ report.opponentMemberId = this.opponentMemberId;
+ this.connectionStats.bandwidth = report.bandwidth;
+ this.connectionStats.bitrate = report.bitrate;
+ this.connectionStats.packetLoss = report.packetLoss;
+ this.emitter.emitConnectionStatsReport(_objectSpread(_objectSpread({}, report), {}, {
+ transport: this.connectionStats.transport
+ }));
+ this.connectionStats.transport = [];
+ }
+ stopProcessingStats() {}
+ onSignalStateChange() {
+ if (this.pc.signalingState === "stable") {
+ if (this.pc.currentRemoteDescription) {
+ this.trackStats.mediaSsrcHandler.parse(this.pc.currentRemoteDescription.sdp, "remote");
+ }
+ if (this.pc.currentLocalDescription) {
+ this.trackStats.mediaSsrcHandler.parse(this.pc.currentLocalDescription.sdp, "local");
+ }
+ }
+ }
+ setOpponentMemberId(id) {
+ this.opponentMemberId = id;
+ }
+}
+exports.CallStatsReportGatherer = CallStatsReportGatherer; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/callStatsReportSummary.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/callStatsReportSummary.js
new file mode 100644
index 0000000000..430afc16cd
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/callStatsReportSummary.js
@@ -0,0 +1,5 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+}); \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStats.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStats.js
new file mode 100644
index 0000000000..16374812d0
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStats.js
@@ -0,0 +1,34 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.ConnectionStats = void 0;
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+class ConnectionStats {
+ constructor() {
+ _defineProperty(this, "bandwidth", {});
+ _defineProperty(this, "bitrate", {});
+ _defineProperty(this, "packetLoss", {});
+ _defineProperty(this, "transport", []);
+ }
+}
+exports.ConnectionStats = ConnectionStats; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStatsBuilder.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStatsBuilder.js
new file mode 100644
index 0000000000..64bf4082ff
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStatsBuilder.js
@@ -0,0 +1,33 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.ConnectionStatsBuilder = void 0;
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+class ConnectionStatsBuilder {
+ static buildBandwidthReport(now) {
+ const availableIncomingBitrate = now.availableIncomingBitrate;
+ const availableOutgoingBitrate = now.availableOutgoingBitrate;
+ return {
+ download: availableIncomingBitrate ? Math.round(availableIncomingBitrate / 1000) : 0,
+ upload: availableOutgoingBitrate ? Math.round(availableOutgoingBitrate / 1000) : 0
+ };
+ }
+}
+exports.ConnectionStatsBuilder = ConnectionStatsBuilder; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStatsReportBuilder.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStatsReportBuilder.js
new file mode 100644
index 0000000000..7178b5411e
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/connectionStatsReportBuilder.js
@@ -0,0 +1,127 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.ConnectionStatsReportBuilder = void 0;
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+class ConnectionStatsReportBuilder {
+ static build(stats) {
+ const report = {};
+
+ // process stats
+ const totalPackets = {
+ download: 0,
+ upload: 0
+ };
+ const lostPackets = {
+ download: 0,
+ upload: 0
+ };
+ let bitrateDownload = 0;
+ let bitrateUpload = 0;
+ const resolutions = {
+ local: new Map(),
+ remote: new Map()
+ };
+ const framerates = {
+ local: new Map(),
+ remote: new Map()
+ };
+ const codecs = {
+ local: new Map(),
+ remote: new Map()
+ };
+ const jitter = new Map();
+ const audioConcealment = new Map();
+ let audioBitrateDownload = 0;
+ let audioBitrateUpload = 0;
+ let videoBitrateDownload = 0;
+ let videoBitrateUpload = 0;
+ let totalConcealedAudio = 0;
+ let totalAudioDuration = 0;
+ for (const [trackId, trackStats] of stats) {
+ // process packet loss stats
+ const loss = trackStats.getLoss();
+ const type = loss.isDownloadStream ? "download" : "upload";
+ totalPackets[type] += loss.packetsTotal;
+ lostPackets[type] += loss.packetsLost;
+
+ // process bitrate stats
+ bitrateDownload += trackStats.getBitrate().download;
+ bitrateUpload += trackStats.getBitrate().upload;
+
+ // collect resolutions and framerates
+ if (trackStats.kind === "audio") {
+ // process audio quality stats
+ const audioConcealmentForTrack = trackStats.getAudioConcealment();
+ totalConcealedAudio += audioConcealmentForTrack.concealedAudio;
+ totalAudioDuration += audioConcealmentForTrack.totalAudioDuration;
+ audioBitrateDownload += trackStats.getBitrate().download;
+ audioBitrateUpload += trackStats.getBitrate().upload;
+ } else {
+ videoBitrateDownload += trackStats.getBitrate().download;
+ videoBitrateUpload += trackStats.getBitrate().upload;
+ }
+ resolutions[trackStats.getType()].set(trackId, trackStats.getResolution());
+ framerates[trackStats.getType()].set(trackId, trackStats.getFramerate());
+ codecs[trackStats.getType()].set(trackId, trackStats.getCodec());
+ if (trackStats.getType() === "remote") {
+ jitter.set(trackId, trackStats.getJitter());
+ if (trackStats.kind === "audio") {
+ audioConcealment.set(trackId, trackStats.getAudioConcealment());
+ }
+ }
+ trackStats.resetBitrate();
+ }
+ report.bitrate = {
+ upload: bitrateUpload,
+ download: bitrateDownload
+ };
+ report.bitrate.audio = {
+ upload: audioBitrateUpload,
+ download: audioBitrateDownload
+ };
+ report.bitrate.video = {
+ upload: videoBitrateUpload,
+ download: videoBitrateDownload
+ };
+ report.packetLoss = {
+ total: ConnectionStatsReportBuilder.calculatePacketLoss(lostPackets.download + lostPackets.upload, totalPackets.download + totalPackets.upload),
+ download: ConnectionStatsReportBuilder.calculatePacketLoss(lostPackets.download, totalPackets.download),
+ upload: ConnectionStatsReportBuilder.calculatePacketLoss(lostPackets.upload, totalPackets.upload)
+ };
+ report.audioConcealment = audioConcealment;
+ report.totalAudioConcealment = {
+ concealedAudio: totalConcealedAudio,
+ totalAudioDuration
+ };
+ report.framerate = framerates;
+ report.resolution = resolutions;
+ report.codec = codecs;
+ report.jitter = jitter;
+ return report;
+ }
+ static calculatePacketLoss(lostPackets, totalPackets) {
+ if (!totalPackets || totalPackets <= 0 || !lostPackets || lostPackets <= 0) {
+ return 0;
+ }
+ return Math.round(lostPackets / totalPackets * 100);
+ }
+}
+exports.ConnectionStatsReportBuilder = ConnectionStatsReportBuilder; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/groupCallStats.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/groupCallStats.js
new file mode 100644
index 0000000000..4ed8a1062f
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/groupCallStats.js
@@ -0,0 +1,80 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.GroupCallStats = void 0;
+var _callStatsReportGatherer = require("./callStatsReportGatherer");
+var _statsReportEmitter = require("./statsReportEmitter");
+var _summaryStatsReportGatherer = require("./summaryStatsReportGatherer");
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } /*
+ Copyright 2023 The Matrix.org Foundation C.I.C.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+class GroupCallStats {
+ constructor(groupCallId, userId, interval = 10000) {
+ this.groupCallId = groupCallId;
+ this.userId = userId;
+ this.interval = interval;
+ _defineProperty(this, "timer", void 0);
+ _defineProperty(this, "gatherers", new Map());
+ _defineProperty(this, "reports", new _statsReportEmitter.StatsReportEmitter());
+ _defineProperty(this, "summaryStatsReportGatherer", new _summaryStatsReportGatherer.SummaryStatsReportGatherer(this.reports));
+ }
+ start() {
+ if (this.timer === undefined && this.interval > 0) {
+ this.timer = setInterval(() => {
+ this.processStats();
+ }, this.interval);
+ }
+ }
+ stop() {
+ if (this.timer !== undefined) {
+ clearInterval(this.timer);
+ this.gatherers.forEach(c => c.stopProcessingStats());
+ }
+ }
+ hasStatsReportGatherer(callId) {
+ return this.gatherers.has(callId);
+ }
+ addStatsReportGatherer(callId, opponentMemberId, peerConnection) {
+ if (this.hasStatsReportGatherer(callId)) {
+ return false;
+ }
+ this.gatherers.set(callId, new _callStatsReportGatherer.CallStatsReportGatherer(callId, opponentMemberId, peerConnection, this.reports));
+ return true;
+ }
+ removeStatsReportGatherer(callId) {
+ return this.gatherers.delete(callId);
+ }
+ getStatsReportGatherer(callId) {
+ return this.hasStatsReportGatherer(callId) ? this.gatherers.get(callId) : undefined;
+ }
+ updateOpponentMember(callId, opponentMember) {
+ this.getStatsReportGatherer(callId)?.setOpponentMemberId(opponentMember);
+ }
+ processStats() {
+ const summary = [];
+ this.gatherers.forEach(c => {
+ summary.push(c.processStats(this.groupCallId, this.userId));
+ });
+ Promise.all(summary).then(s => this.summaryStatsReportGatherer.build(s));
+ }
+ setInterval(interval) {
+ this.interval = interval;
+ }
+}
+exports.GroupCallStats = GroupCallStats; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaSsrcHandler.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaSsrcHandler.js
new file mode 100644
index 0000000000..5e43415558
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaSsrcHandler.js
@@ -0,0 +1,62 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MediaSsrcHandler = void 0;
+var _sdpTransform = require("sdp-transform");
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } /*
+ Copyright 2023 The Matrix.org Foundation C.I.C.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+class MediaSsrcHandler {
+ constructor() {
+ _defineProperty(this, "ssrcToMid", {
+ local: new Map(),
+ remote: new Map()
+ });
+ }
+ findMidBySsrc(ssrc, type) {
+ let mid;
+ this.ssrcToMid[type].forEach((ssrcs, m) => {
+ if (ssrcs.find(s => s == ssrc)) {
+ mid = m;
+ return;
+ }
+ });
+ return mid;
+ }
+ parse(description, type) {
+ const sdp = (0, _sdpTransform.parse)(description);
+ const ssrcToMid = new Map();
+ sdp.media.forEach(m => {
+ if (!!m.mid && m.type === "video" || m.type === "audio") {
+ const ssrcs = [];
+ m.ssrcs?.forEach(ssrc => {
+ if (ssrc.attribute === "cname") {
+ ssrcs.push(`${ssrc.id}`);
+ }
+ });
+ ssrcToMid.set(`${m.mid}`, ssrcs);
+ }
+ });
+ this.ssrcToMid[type] = ssrcToMid;
+ }
+ getSsrcToMidMap(type) {
+ return this.ssrcToMid[type];
+ }
+}
+exports.MediaSsrcHandler = MediaSsrcHandler; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackHandler.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackHandler.js
new file mode 100644
index 0000000000..c4252a9cbd
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackHandler.js
@@ -0,0 +1,69 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MediaTrackHandler = void 0;
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+class MediaTrackHandler {
+ constructor(pc) {
+ this.pc = pc;
+ }
+ getLocalTracks(kind) {
+ const isNotNullAndKind = track => {
+ return track !== null && track.kind === kind;
+ };
+ // @ts-ignore The linter don't get it
+ return this.pc.getTransceivers().filter(t => t.currentDirection === "sendonly" || t.currentDirection === "sendrecv").filter(t => t.sender !== null).map(t => t.sender).map(s => s.track).filter(isNotNullAndKind);
+ }
+ getTackById(trackId) {
+ return this.pc.getTransceivers().map(t => {
+ if (t?.sender.track !== null && t.sender.track.id === trackId) {
+ return t.sender.track;
+ }
+ if (t?.receiver.track !== null && t.receiver.track.id === trackId) {
+ return t.receiver.track;
+ }
+ return undefined;
+ }).find(t => t !== undefined);
+ }
+ getLocalTrackIdByMid(mid) {
+ const transceiver = this.pc.getTransceivers().find(t => t.mid === mid);
+ if (transceiver !== undefined && !!transceiver.sender && !!transceiver.sender.track) {
+ return transceiver.sender.track.id;
+ }
+ return undefined;
+ }
+ getRemoteTrackIdByMid(mid) {
+ const transceiver = this.pc.getTransceivers().find(t => t.mid === mid);
+ if (transceiver !== undefined && !!transceiver.receiver && !!transceiver.receiver.track) {
+ return transceiver.receiver.track.id;
+ }
+ return undefined;
+ }
+ getActiveSimulcastStreams() {
+ //@TODO implement this right.. Check how many layer configured
+ return 3;
+ }
+ getTransceiverByTrackId(trackId) {
+ return this.pc.getTransceivers().find(t => {
+ return t.receiver.track.id === trackId || t.sender.track !== null && t.sender.track.id === trackId;
+ });
+ }
+}
+exports.MediaTrackHandler = MediaTrackHandler; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackStats.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackStats.js
new file mode 100644
index 0000000000..d5a7963c23
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackStats.js
@@ -0,0 +1,150 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MediaTrackStats = void 0;
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+class MediaTrackStats {
+ constructor(trackId, type, kind) {
+ this.trackId = trackId;
+ this.type = type;
+ this.kind = kind;
+ _defineProperty(this, "loss", {
+ packetsTotal: 0,
+ packetsLost: 0,
+ isDownloadStream: false
+ });
+ _defineProperty(this, "bitrate", {
+ download: 0,
+ upload: 0
+ });
+ _defineProperty(this, "resolution", {
+ width: -1,
+ height: -1
+ });
+ _defineProperty(this, "audioConcealment", {
+ concealedAudio: 0,
+ totalAudioDuration: 0
+ });
+ _defineProperty(this, "framerate", 0);
+ _defineProperty(this, "jitter", 0);
+ _defineProperty(this, "codec", "");
+ _defineProperty(this, "isAlive", true);
+ _defineProperty(this, "isMuted", false);
+ _defineProperty(this, "isEnabled", true);
+ }
+ getType() {
+ return this.type;
+ }
+ setLoss(loss) {
+ this.loss = loss;
+ }
+ getLoss() {
+ return this.loss;
+ }
+ setResolution(resolution) {
+ this.resolution = resolution;
+ }
+ getResolution() {
+ return this.resolution;
+ }
+ setFramerate(framerate) {
+ this.framerate = framerate;
+ }
+ getFramerate() {
+ return this.framerate;
+ }
+ setBitrate(bitrate) {
+ this.bitrate = bitrate;
+ }
+ getBitrate() {
+ return this.bitrate;
+ }
+ setCodec(codecShortType) {
+ this.codec = codecShortType;
+ return true;
+ }
+ getCodec() {
+ return this.codec;
+ }
+ resetBitrate() {
+ this.bitrate = {
+ download: 0,
+ upload: 0
+ };
+ }
+ set alive(isAlive) {
+ this.isAlive = isAlive;
+ }
+
+ /**
+ * A MediaTrackState is alive if the corresponding MediaStreamTrack track bound to a transceiver and the
+ * MediaStreamTrack is in state MediaStreamTrack.readyState === live
+ */
+ get alive() {
+ return this.isAlive;
+ }
+ set muted(isMuted) {
+ this.isMuted = isMuted;
+ }
+
+ /**
+ * A MediaTrackState.isMuted corresponding to MediaStreamTrack.muted.
+ * But these values only match if MediaTrackState.isAlive.
+ */
+ get muted() {
+ return this.isMuted;
+ }
+ set enabled(isEnabled) {
+ this.isEnabled = isEnabled;
+ }
+
+ /**
+ * A MediaTrackState.isEnabled corresponding to MediaStreamTrack.enabled.
+ * But these values only match if MediaTrackState.isAlive.
+ */
+ get enabled() {
+ return this.isEnabled;
+ }
+ setJitter(jitter) {
+ this.jitter = jitter;
+ }
+
+ /**
+ * Jitter in milliseconds
+ */
+ getJitter() {
+ return this.jitter;
+ }
+
+ /**
+ * Audio concealment ration (conceled duration / total duration)
+ */
+ setAudioConcealment(concealedAudioDuration, totalAudioDuration) {
+ this.audioConcealment.concealedAudio = concealedAudioDuration;
+ this.audioConcealment.totalAudioDuration = totalAudioDuration;
+ }
+ getAudioConcealment() {
+ return this.audioConcealment;
+ }
+}
+exports.MediaTrackStats = MediaTrackStats; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackStatsHandler.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackStatsHandler.js
new file mode 100644
index 0000000000..f72f644cb3
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/media/mediaTrackStatsHandler.js
@@ -0,0 +1,82 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MediaTrackStatsHandler = void 0;
+var _mediaTrackStats = require("./mediaTrackStats");
+function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
+function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } /*
+ Copyright 2023 The Matrix.org Foundation C.I.C.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+class MediaTrackStatsHandler {
+ constructor(mediaSsrcHandler, mediaTrackHandler) {
+ this.mediaSsrcHandler = mediaSsrcHandler;
+ this.mediaTrackHandler = mediaTrackHandler;
+ _defineProperty(this, "track2stats", new Map());
+ }
+
+ /**
+ * Find tracks by rtc stats
+ * Argument report is any because the stats api is not consistent:
+ * For example `trackIdentifier`, `mid` not existing in every implementations
+ * https://www.w3.org/TR/webrtc-stats/#dom-rtcinboundrtpstreamstats
+ * https://developer.mozilla.org/en-US/docs/Web/API/RTCInboundRtpStreamStats
+ */
+ findTrack2Stats(report, type) {
+ let trackID;
+ if (report.trackIdentifier) {
+ trackID = report.trackIdentifier;
+ } else if (report.mid) {
+ trackID = type === "remote" ? this.mediaTrackHandler.getRemoteTrackIdByMid(report.mid) : this.mediaTrackHandler.getLocalTrackIdByMid(report.mid);
+ } else if (report.ssrc) {
+ const mid = this.mediaSsrcHandler.findMidBySsrc(report.ssrc, type);
+ if (!mid) {
+ return undefined;
+ }
+ trackID = type === "remote" ? this.mediaTrackHandler.getRemoteTrackIdByMid(report.mid) : this.mediaTrackHandler.getLocalTrackIdByMid(report.mid);
+ }
+ if (!trackID) {
+ return undefined;
+ }
+ let trackStats = this.track2stats.get(trackID);
+ if (!trackStats) {
+ const track = this.mediaTrackHandler.getTackById(trackID);
+ if (track !== undefined) {
+ const kind = track.kind === "audio" ? track.kind : "video";
+ trackStats = new _mediaTrackStats.MediaTrackStats(trackID, type, kind);
+ this.track2stats.set(trackID, trackStats);
+ } else {
+ return undefined;
+ }
+ }
+ return trackStats;
+ }
+ findLocalVideoTrackStats(report) {
+ const localVideoTracks = this.mediaTrackHandler.getLocalTracks("video");
+ if (localVideoTracks.length === 0) {
+ return undefined;
+ }
+ return this.findTrack2Stats(report, "local");
+ }
+ getTrack2stats() {
+ return this.track2stats;
+ }
+ findTransceiverByTrackId(trackID) {
+ return this.mediaTrackHandler.getTransceiverByTrackId(trackID);
+ }
+}
+exports.MediaTrackStatsHandler = MediaTrackStatsHandler; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/statsReport.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/statsReport.js
new file mode 100644
index 0000000000..d020a9e7f9
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/statsReport.js
@@ -0,0 +1,28 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.StatsReport = void 0;
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+let StatsReport = /*#__PURE__*/function (StatsReport) {
+ StatsReport["CONNECTION_STATS"] = "StatsReport.connection_stats";
+ StatsReport["BYTE_SENT_STATS"] = "StatsReport.byte_sent_stats";
+ StatsReport["SUMMARY_STATS"] = "StatsReport.summary_stats";
+ return StatsReport;
+}({});
+exports.StatsReport = StatsReport; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/statsReportEmitter.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/statsReportEmitter.js
new file mode 100644
index 0000000000..c25da81743
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/statsReportEmitter.js
@@ -0,0 +1,36 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.StatsReportEmitter = void 0;
+var _typedEventEmitter = require("../../models/typed-event-emitter");
+var _statsReport = require("./statsReport");
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+class StatsReportEmitter extends _typedEventEmitter.TypedEventEmitter {
+ emitByteSendReport(byteSentStats) {
+ this.emit(_statsReport.StatsReport.BYTE_SENT_STATS, byteSentStats);
+ }
+ emitConnectionStatsReport(report) {
+ this.emit(_statsReport.StatsReport.CONNECTION_STATS, report);
+ }
+ emitSummaryStatsReport(report) {
+ this.emit(_statsReport.StatsReport.SUMMARY_STATS, report);
+ }
+}
+exports.StatsReportEmitter = StatsReportEmitter; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/summaryStatsReportGatherer.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/summaryStatsReportGatherer.js
new file mode 100644
index 0000000000..fb78690e64
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/summaryStatsReportGatherer.js
@@ -0,0 +1,103 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.SummaryStatsReportGatherer = void 0;
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+class SummaryStatsReportGatherer {
+ constructor(emitter) {
+ this.emitter = emitter;
+ }
+ build(allSummary) {
+ // Filter all stats which collect the first time webrtc stats.
+ // Because stats based on time interval and the first collection of a summery stats has no previous
+ // webrtcStats as basement all the calculation are 0. We don't want track the 0 stats.
+ const summary = allSummary.filter(s => !s.isFirstCollection);
+ const summaryTotalCount = summary.length;
+ if (summaryTotalCount === 0) {
+ return;
+ }
+ const summaryCounter = {
+ receivedAudio: 0,
+ receivedVideo: 0,
+ receivedMedia: 0,
+ concealedAudio: 0,
+ totalAudio: 0
+ };
+ let maxJitter = 0;
+ let maxPacketLoss = 0;
+ summary.forEach(stats => {
+ this.countTrackListReceivedMedia(summaryCounter, stats);
+ this.countConcealedAudio(summaryCounter, stats);
+ maxJitter = this.buildMaxJitter(maxJitter, stats);
+ maxPacketLoss = this.buildMaxPacketLoss(maxPacketLoss, stats);
+ });
+ const decimalPlaces = 5;
+ const report = {
+ percentageReceivedMedia: Number((summaryCounter.receivedMedia / summaryTotalCount).toFixed(decimalPlaces)),
+ percentageReceivedVideoMedia: Number((summaryCounter.receivedVideo / summaryTotalCount).toFixed(decimalPlaces)),
+ percentageReceivedAudioMedia: Number((summaryCounter.receivedAudio / summaryTotalCount).toFixed(decimalPlaces)),
+ maxJitter,
+ maxPacketLoss,
+ percentageConcealedAudio: Number(summaryCounter.totalAudio > 0 ? (summaryCounter.concealedAudio / summaryCounter.totalAudio).toFixed(decimalPlaces) : 0),
+ peerConnections: summaryTotalCount
+ };
+ this.emitter.emitSummaryStatsReport(report);
+ }
+ countTrackListReceivedMedia(counter, stats) {
+ let hasReceivedAudio = false;
+ let hasReceivedVideo = false;
+ if (stats.receivedAudioMedia > 0 || stats.audioTrackSummary.count === 0) {
+ counter.receivedAudio++;
+ hasReceivedAudio = true;
+ }
+ if (stats.receivedVideoMedia > 0 || stats.videoTrackSummary.count === 0) {
+ counter.receivedVideo++;
+ hasReceivedVideo = true;
+ } else {
+ if (stats.videoTrackSummary.muted > 0 && stats.videoTrackSummary.muted === stats.videoTrackSummary.count) {
+ counter.receivedVideo++;
+ hasReceivedVideo = true;
+ }
+ }
+ if (hasReceivedVideo && hasReceivedAudio) {
+ counter.receivedMedia++;
+ }
+ }
+ buildMaxJitter(maxJitter, stats) {
+ if (maxJitter < stats.videoTrackSummary.maxJitter) {
+ maxJitter = stats.videoTrackSummary.maxJitter;
+ }
+ if (maxJitter < stats.audioTrackSummary.maxJitter) {
+ maxJitter = stats.audioTrackSummary.maxJitter;
+ }
+ return maxJitter;
+ }
+ buildMaxPacketLoss(maxPacketLoss, stats) {
+ if (maxPacketLoss < stats.videoTrackSummary.maxPacketLoss) {
+ maxPacketLoss = stats.videoTrackSummary.maxPacketLoss;
+ }
+ if (maxPacketLoss < stats.audioTrackSummary.maxPacketLoss) {
+ maxPacketLoss = stats.audioTrackSummary.maxPacketLoss;
+ }
+ return maxPacketLoss;
+ }
+ countConcealedAudio(summaryCounter, stats) {
+ summaryCounter.concealedAudio += stats.audioTrackSummary.concealedAudio;
+ summaryCounter.totalAudio += stats.audioTrackSummary.totalAudio;
+ }
+}
+exports.SummaryStatsReportGatherer = SummaryStatsReportGatherer; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/trackStatsBuilder.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/trackStatsBuilder.js
new file mode 100644
index 0000000000..563a14b784
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/trackStatsBuilder.js
@@ -0,0 +1,172 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.TrackStatsBuilder = void 0;
+var _valueFormatter = require("./valueFormatter");
+class TrackStatsBuilder {
+ static buildFramerateResolution(trackStats, now) {
+ const resolution = {
+ height: now.frameHeight,
+ width: now.frameWidth
+ };
+ const frameRate = now.framesPerSecond;
+ if (resolution.height && resolution.width) {
+ trackStats.setResolution(resolution);
+ }
+ trackStats.setFramerate(Math.round(frameRate || 0));
+ }
+ static calculateSimulcastFramerate(trackStats, now, before, layer) {
+ let frameRate = trackStats.getFramerate();
+ if (!frameRate) {
+ if (before) {
+ const timeMs = now.timestamp - before.timestamp;
+ if (timeMs > 0 && now.framesSent) {
+ const numberOfFramesSinceBefore = now.framesSent - before.framesSent;
+ frameRate = numberOfFramesSinceBefore / timeMs * 1000;
+ }
+ }
+ if (!frameRate) {
+ return;
+ }
+ }
+
+ // Reset frame rate to 0 when video is suspended as a result of endpoint falling out of last-n.
+ frameRate = layer ? Math.round(frameRate / layer) : 0;
+ trackStats.setFramerate(frameRate);
+ }
+ static buildCodec(report, trackStats, now) {
+ const codec = report?.get(now.codecId);
+ if (codec) {
+ /**
+ * The mime type has the following form: video/VP8 or audio/ISAC,
+ * so we what to keep just the type after the '/', audio and video
+ * keys will be added on the processing side.
+ */
+ const codecShortType = codec.mimeType.split("/")[1];
+ codecShortType && trackStats.setCodec(codecShortType);
+ }
+ }
+ static buildBitrateReceived(trackStats, now, before) {
+ trackStats.setBitrate({
+ download: TrackStatsBuilder.calculateBitrate(now.bytesReceived, before.bytesReceived, now.timestamp, before.timestamp),
+ upload: 0
+ });
+ }
+ static buildBitrateSend(trackStats, now, before) {
+ trackStats.setBitrate({
+ download: 0,
+ upload: this.calculateBitrate(now.bytesSent, before.bytesSent, now.timestamp, before.timestamp)
+ });
+ }
+ static buildPacketsLost(trackStats, now, before) {
+ const key = now.type === "outbound-rtp" ? "packetsSent" : "packetsReceived";
+ let packetsNow = now[key];
+ if (!packetsNow || packetsNow < 0) {
+ packetsNow = 0;
+ }
+ const packetsBefore = _valueFormatter.ValueFormatter.getNonNegativeValue(before[key]);
+ const packetsDiff = Math.max(0, packetsNow - packetsBefore);
+ const packetsLostNow = _valueFormatter.ValueFormatter.getNonNegativeValue(now.packetsLost);
+ const packetsLostBefore = _valueFormatter.ValueFormatter.getNonNegativeValue(before.packetsLost);
+ const packetsLostDiff = Math.max(0, packetsLostNow - packetsLostBefore);
+ trackStats.setLoss({
+ packetsTotal: packetsDiff + packetsLostDiff,
+ packetsLost: packetsLostDiff,
+ isDownloadStream: now.type !== "outbound-rtp"
+ });
+ }
+ static calculateBitrate(bytesNowAny, bytesBeforeAny, nowTimestamp, beforeTimestamp) {
+ const bytesNow = _valueFormatter.ValueFormatter.getNonNegativeValue(bytesNowAny);
+ const bytesBefore = _valueFormatter.ValueFormatter.getNonNegativeValue(bytesBeforeAny);
+ const bytesProcessed = Math.max(0, bytesNow - bytesBefore);
+ const timeMs = nowTimestamp - beforeTimestamp;
+ let bitrateKbps = 0;
+ if (timeMs > 0) {
+ bitrateKbps = Math.round(bytesProcessed * 8 / timeMs);
+ }
+ return bitrateKbps;
+ }
+ static setTrackStatsState(trackStats, transceiver) {
+ if (transceiver === undefined) {
+ trackStats.alive = false;
+ return;
+ }
+ const track = trackStats.getType() === "remote" ? transceiver.receiver.track : transceiver?.sender?.track;
+ if (track === undefined || track === null) {
+ trackStats.alive = false;
+ return;
+ }
+ if (track.readyState === "ended") {
+ trackStats.alive = false;
+ return;
+ }
+ trackStats.muted = track.muted;
+ trackStats.enabled = track.enabled;
+ trackStats.alive = true;
+ }
+ static buildTrackSummary(trackStatsList) {
+ const videoTrackSummary = {
+ count: 0,
+ muted: 0,
+ maxJitter: 0,
+ maxPacketLoss: 0,
+ concealedAudio: 0,
+ totalAudio: 0
+ };
+ const audioTrackSummary = {
+ count: 0,
+ muted: 0,
+ maxJitter: 0,
+ maxPacketLoss: 0,
+ concealedAudio: 0,
+ totalAudio: 0
+ };
+ const remoteTrackList = trackStatsList.filter(t => t.getType() === "remote");
+ const audioTrackList = remoteTrackList.filter(t => t.kind === "audio");
+ remoteTrackList.forEach(stats => {
+ const trackSummary = stats.kind === "video" ? videoTrackSummary : audioTrackSummary;
+ trackSummary.count++;
+ if (stats.alive && stats.muted) {
+ trackSummary.muted++;
+ }
+ if (trackSummary.maxJitter < stats.getJitter()) {
+ trackSummary.maxJitter = stats.getJitter();
+ }
+ if (trackSummary.maxPacketLoss < stats.getLoss().packetsLost) {
+ trackSummary.maxPacketLoss = stats.getLoss().packetsLost;
+ }
+ if (audioTrackList.length > 0) {
+ trackSummary.concealedAudio += stats.getAudioConcealment()?.concealedAudio;
+ trackSummary.totalAudio += stats.getAudioConcealment()?.totalAudioDuration;
+ }
+ });
+ return {
+ audioTrackSummary,
+ videoTrackSummary
+ };
+ }
+ static buildJitter(trackStats, statsReport) {
+ if (statsReport.type !== "inbound-rtp") {
+ return;
+ }
+ const jitterStr = statsReport?.jitter;
+ if (jitterStr !== undefined) {
+ const jitter = _valueFormatter.ValueFormatter.getNonNegativeValue(jitterStr);
+ trackStats.setJitter(Math.round(jitter * 1000));
+ } else {
+ trackStats.setJitter(-1);
+ }
+ }
+ static buildAudioConcealment(trackStats, statsReport) {
+ if (statsReport.type !== "inbound-rtp") {
+ return;
+ }
+ const msPerSample = 1000 * statsReport?.totalSamplesDuration / statsReport?.totalSamplesReceived;
+ const concealedAudioDuration = msPerSample * statsReport?.concealedSamples;
+ const totalAudioDuration = 1000 * statsReport?.totalSamplesDuration;
+ trackStats.setAudioConcealment(concealedAudioDuration, totalAudioDuration);
+ }
+}
+exports.TrackStatsBuilder = TrackStatsBuilder; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/transportStats.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/transportStats.js
new file mode 100644
index 0000000000..430afc16cd
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/transportStats.js
@@ -0,0 +1,5 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+}); \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/transportStatsBuilder.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/transportStatsBuilder.js
new file mode 100644
index 0000000000..d65aa28dba
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/transportStatsBuilder.js
@@ -0,0 +1,40 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.TransportStatsBuilder = void 0;
+class TransportStatsBuilder {
+ static buildReport(report, now, conferenceStatsTransport, isFocus) {
+ const localUsedCandidate = report?.get(now.localCandidateId);
+ const remoteUsedCandidate = report?.get(now.remoteCandidateId);
+
+ // RTCIceCandidateStats
+ // https://w3c.github.io/webrtc-stats/#icecandidate-dict*
+ if (remoteUsedCandidate && localUsedCandidate) {
+ const remoteIpAddress = remoteUsedCandidate.ip !== undefined ? remoteUsedCandidate.ip : remoteUsedCandidate.address;
+ const remotePort = remoteUsedCandidate.port;
+ const ip = `${remoteIpAddress}:${remotePort}`;
+ const localIpAddress = localUsedCandidate.ip !== undefined ? localUsedCandidate.ip : localUsedCandidate.address;
+ const localPort = localUsedCandidate.port;
+ const localIp = `${localIpAddress}:${localPort}`;
+ const type = remoteUsedCandidate.protocol;
+
+ // Save the address unless it has been saved already.
+ if (!conferenceStatsTransport.some(t => t.ip === ip && t.type === type && t.localIp === localIp)) {
+ conferenceStatsTransport.push({
+ ip,
+ type,
+ localIp,
+ isFocus,
+ localCandidateType: localUsedCandidate.candidateType,
+ remoteCandidateType: remoteUsedCandidate.candidateType,
+ networkType: localUsedCandidate.networkType,
+ rtt: now.currentRoundTripTime ? now.currentRoundTripTime * 1000 : NaN
+ });
+ }
+ }
+ return conferenceStatsTransport;
+ }
+}
+exports.TransportStatsBuilder = TransportStatsBuilder; \ No newline at end of file
diff --git a/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/valueFormatter.js b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/valueFormatter.js
new file mode 100644
index 0000000000..17050d260e
--- /dev/null
+++ b/comm/chat/protocols/matrix/lib/matrix-sdk/webrtc/stats/valueFormatter.js
@@ -0,0 +1,31 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.ValueFormatter = void 0;
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class ValueFormatter {
+ static getNonNegativeValue(imput) {
+ let value = imput;
+ if (typeof value !== "number") {
+ value = Number(value);
+ }
+ if (isNaN(value)) {
+ return 0;
+ }
+ return Math.max(0, value);
+ }
+}
+exports.ValueFormatter = ValueFormatter; \ No newline at end of file