summaryrefslogtreecommitdiff
path: root/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc
diff options
context:
space:
mode:
authorRaindropsSys <contact@minteck.org>2023-04-24 14:03:36 +0200
committerRaindropsSys <contact@minteck.org>2023-04-24 14:03:36 +0200
commit633c92eae865e957121e08de634aeee11a8b3992 (patch)
tree09d881bee1dae0b6eee49db1dfaf0f500240606c /includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc
parentc4657e4509733699c0f26a3c900bab47e915d5a0 (diff)
downloadpluralconnect-633c92eae865e957121e08de634aeee11a8b3992.tar.gz
pluralconnect-633c92eae865e957121e08de634aeee11a8b3992.tar.bz2
pluralconnect-633c92eae865e957121e08de634aeee11a8b3992.zip
Updated 18 files, added 1692 files and deleted includes/system/compare.inc (automated)
Diffstat (limited to 'includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc')
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/audioContext.ts44
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/call.ts2962
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callEventHandler.ts425
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callEventTypes.ts92
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callFeed.ts361
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/groupCall.ts1598
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/groupCallEventHandler.ts232
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/mediaHandler.ts469
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/connectionStats.ts47
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/connectionStatsReporter.ts28
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/groupCallStats.ts64
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaSsrcHandler.ts57
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackHandler.ts71
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackStats.ts104
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackStatsHandler.ts86
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReport.ts56
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportBuilder.ts110
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportEmitter.ts33
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportGatherer.ts183
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsValueFormatter.ts27
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/trackStatsReporter.ts117
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/transportStats.ts26
-rw-r--r--includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/transportStatsReporter.ts48
23 files changed, 7240 insertions, 0 deletions
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/audioContext.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/audioContext.ts
new file mode 100644
index 0000000..7cf3ed3
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/audioContext.ts
@@ -0,0 +1,44 @@
+/*
+Copyright 2022 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+let audioContext: AudioContext | null = null;
+let refCount = 0;
+
+/**
+ * Acquires a reference to the shared AudioContext.
+ * It's highly recommended to reuse this AudioContext rather than creating your
+ * own, because multiple AudioContexts can be problematic in some browsers.
+ * Make sure to call releaseContext when you're done using it.
+ * @returns The shared AudioContext
+ */
+export const acquireContext = (): AudioContext => {
+ if (audioContext === null) audioContext = new AudioContext();
+ refCount++;
+ return audioContext;
+};
+
+/**
+ * Signals that one of the references to the shared AudioContext has been
+ * released, allowing the context and associated hardware resources to be
+ * cleaned up if nothing else is using it.
+ */
+export const releaseContext = (): void => {
+ refCount--;
+ if (refCount === 0) {
+ audioContext?.close();
+ audioContext = null;
+ }
+};
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/call.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/call.ts
new file mode 100644
index 0000000..cd75c10
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/call.ts
@@ -0,0 +1,2962 @@
+/*
+Copyright 2015, 2016 OpenMarket Ltd
+Copyright 2017 New Vector Ltd
+Copyright 2019, 2020 The Matrix.org Foundation C.I.C.
+Copyright 2021 - 2022 Šimon Brandner <simon.bra.ag@gmail.com>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+/**
+ * This is an internal module. See {@link createNewMatrixCall} for the public API.
+ */
+
+import { v4 as uuidv4 } from "uuid";
+import { parse as parseSdp, write as writeSdp } from "sdp-transform";
+
+import { logger } from "../logger";
+import * as utils from "../utils";
+import { IContent, MatrixEvent } from "../models/event";
+import { EventType, ToDeviceMessageId } from "../@types/event";
+import { RoomMember } from "../models/room-member";
+import { randomString } from "../randomstring";
+import {
+ MCallReplacesEvent,
+ MCallAnswer,
+ MCallInviteNegotiate,
+ CallCapabilities,
+ SDPStreamMetadataPurpose,
+ SDPStreamMetadata,
+ SDPStreamMetadataKey,
+ MCallSDPStreamMetadataChanged,
+ MCallSelectAnswer,
+ MCAllAssertedIdentity,
+ MCallCandidates,
+ MCallBase,
+ MCallHangupReject,
+} from "./callEventTypes";
+import { CallFeed } from "./callFeed";
+import { MatrixClient } from "../client";
+import { EventEmitterEvents, TypedEventEmitter } from "../models/typed-event-emitter";
+import { DeviceInfo } from "../crypto/deviceinfo";
+import { GroupCallUnknownDeviceError } from "./groupCall";
+import { IScreensharingOpts } from "./mediaHandler";
+import { MatrixError } from "../http-api";
+import { GroupCallStats } from "./stats/groupCallStats";
+
+interface CallOpts {
+ // The room ID for this call.
+ roomId: string;
+ invitee?: string;
+ // The Matrix Client instance to send events to.
+ client: MatrixClient;
+ /**
+ * Whether relay through TURN should be forced.
+ * @deprecated use opts.forceTURN when creating the matrix client
+ * since it's only possible to set this option on outbound calls.
+ */
+ forceTURN?: boolean;
+ // A list of TURN servers.
+ turnServers?: Array<TurnServer>;
+ opponentDeviceId?: string;
+ opponentSessionId?: string;
+ groupCallId?: string;
+}
+
+interface TurnServer {
+ urls: Array<string>;
+ username?: string;
+ password?: string;
+ ttl?: number;
+}
+
+interface AssertedIdentity {
+ id: string;
+ displayName: string;
+}
+
+enum MediaType {
+ AUDIO = "audio",
+ VIDEO = "video",
+}
+
+enum CodecName {
+ OPUS = "opus",
+ // add more as needed
+}
+
+// Used internally to specify modifications to codec parameters in SDP
+interface CodecParamsMod {
+ mediaType: MediaType;
+ codec: CodecName;
+ enableDtx?: boolean; // true to enable discontinuous transmission, false to disable, undefined to leave as-is
+ maxAverageBitrate?: number; // sets the max average bitrate, or undefined to leave as-is
+}
+
+export enum CallState {
+ Fledgling = "fledgling",
+ InviteSent = "invite_sent",
+ WaitLocalMedia = "wait_local_media",
+ CreateOffer = "create_offer",
+ CreateAnswer = "create_answer",
+ Connecting = "connecting",
+ Connected = "connected",
+ Ringing = "ringing",
+ Ended = "ended",
+}
+
+export enum CallType {
+ Voice = "voice",
+ Video = "video",
+}
+
+export enum CallDirection {
+ Inbound = "inbound",
+ Outbound = "outbound",
+}
+
+export enum CallParty {
+ Local = "local",
+ Remote = "remote",
+}
+
+export enum CallEvent {
+ Hangup = "hangup",
+ State = "state",
+ Error = "error",
+ Replaced = "replaced",
+
+ // The value of isLocalOnHold() has changed
+ LocalHoldUnhold = "local_hold_unhold",
+ // The value of isRemoteOnHold() has changed
+ RemoteHoldUnhold = "remote_hold_unhold",
+ // backwards compat alias for LocalHoldUnhold: remove in a major version bump
+ HoldUnhold = "hold_unhold",
+ // Feeds have changed
+ FeedsChanged = "feeds_changed",
+
+ AssertedIdentityChanged = "asserted_identity_changed",
+
+ LengthChanged = "length_changed",
+
+ DataChannel = "datachannel",
+
+ SendVoipEvent = "send_voip_event",
+}
+
+export enum CallErrorCode {
+ /** The user chose to end the call */
+ UserHangup = "user_hangup",
+
+ /** An error code when the local client failed to create an offer. */
+ LocalOfferFailed = "local_offer_failed",
+ /**
+ * An error code when there is no local mic/camera to use. This may be because
+ * the hardware isn't plugged in, or the user has explicitly denied access.
+ */
+ NoUserMedia = "no_user_media",
+
+ /**
+ * Error code used when a call event failed to send
+ * because unknown devices were present in the room
+ */
+ UnknownDevices = "unknown_devices",
+
+ /**
+ * Error code used when we fail to send the invite
+ * for some reason other than there being unknown devices
+ */
+ SendInvite = "send_invite",
+
+ /**
+ * An answer could not be created
+ */
+ CreateAnswer = "create_answer",
+
+ /**
+ * An offer could not be created
+ */
+ CreateOffer = "create_offer",
+
+ /**
+ * Error code used when we fail to send the answer
+ * for some reason other than there being unknown devices
+ */
+ SendAnswer = "send_answer",
+
+ /**
+ * The session description from the other side could not be set
+ */
+ SetRemoteDescription = "set_remote_description",
+
+ /**
+ * The session description from this side could not be set
+ */
+ SetLocalDescription = "set_local_description",
+
+ /**
+ * A different device answered the call
+ */
+ AnsweredElsewhere = "answered_elsewhere",
+
+ /**
+ * No media connection could be established to the other party
+ */
+ IceFailed = "ice_failed",
+
+ /**
+ * The invite timed out whilst waiting for an answer
+ */
+ InviteTimeout = "invite_timeout",
+
+ /**
+ * The call was replaced by another call
+ */
+ Replaced = "replaced",
+
+ /**
+ * Signalling for the call could not be sent (other than the initial invite)
+ */
+ SignallingFailed = "signalling_timeout",
+
+ /**
+ * The remote party is busy
+ */
+ UserBusy = "user_busy",
+
+ /**
+ * We transferred the call off to somewhere else
+ */
+ Transferred = "transferred",
+
+ /**
+ * A call from the same user was found with a new session id
+ */
+ NewSession = "new_session",
+}
+
+/**
+ * The version field that we set in m.call.* events
+ */
+const VOIP_PROTO_VERSION = "1";
+
+/** The fallback ICE server to use for STUN or TURN protocols. */
+const FALLBACK_ICE_SERVER = "stun:turn.matrix.org";
+
+/** The length of time a call can be ringing for. */
+const CALL_TIMEOUT_MS = 60 * 1000; // ms
+/** The time after which we increment callLength */
+const CALL_LENGTH_INTERVAL = 1000; // ms
+/** The time after which we end the call, if ICE got disconnected */
+const ICE_DISCONNECTED_TIMEOUT = 30 * 1000; // ms
+
+export class CallError extends Error {
+ public readonly code: string;
+
+ public constructor(code: CallErrorCode, msg: string, err: Error) {
+ // Still don't think there's any way to have proper nested errors
+ super(msg + ": " + err);
+
+ this.code = code;
+ }
+}
+
+export function genCallID(): string {
+ return Date.now().toString() + randomString(16);
+}
+
+function getCodecParamMods(isPtt: boolean): CodecParamsMod[] {
+ const mods = [
+ {
+ mediaType: "audio",
+ codec: "opus",
+ enableDtx: true,
+ maxAverageBitrate: isPtt ? 12000 : undefined,
+ },
+ ] as CodecParamsMod[];
+
+ return mods;
+}
+
+export interface VoipEvent {
+ type: "toDevice" | "sendEvent";
+ eventType: string;
+ userId?: string;
+ opponentDeviceId?: string;
+ roomId?: string;
+ content: Record<string, unknown>;
+}
+
+/**
+ * These now all have the call object as an argument. Why? Well, to know which call a given event is
+ * about you have three options:
+ * 1. Use a closure as the callback that remembers what call it's listening to. This can be
+ * a pain because you need to pass the listener function again when you remove the listener,
+ * which might be somewhere else.
+ * 2. Use not-very-well-known fact that EventEmitter sets 'this' to the emitter object in the
+ * callback. This doesn't really play well with modern Typescript and eslint and doesn't work
+ * with our pattern of re-emitting events.
+ * 3. Pass the object in question as an argument to the callback.
+ *
+ * Now that we have group calls which have to deal with multiple call objects, this will
+ * become more important, and I think methods 1 and 2 are just going to cause issues.
+ */
+export type CallEventHandlerMap = {
+ [CallEvent.DataChannel]: (channel: RTCDataChannel, call: MatrixCall) => void;
+ [CallEvent.FeedsChanged]: (feeds: CallFeed[], call: MatrixCall) => void;
+ [CallEvent.Replaced]: (newCall: MatrixCall, oldCall: MatrixCall) => void;
+ [CallEvent.Error]: (error: CallError, call: MatrixCall) => void;
+ [CallEvent.RemoteHoldUnhold]: (onHold: boolean, call: MatrixCall) => void;
+ [CallEvent.LocalHoldUnhold]: (onHold: boolean, call: MatrixCall) => void;
+ [CallEvent.LengthChanged]: (length: number, call: MatrixCall) => void;
+ [CallEvent.State]: (state: CallState, oldState: CallState, call: MatrixCall) => void;
+ [CallEvent.Hangup]: (call: MatrixCall) => void;
+ [CallEvent.AssertedIdentityChanged]: (call: MatrixCall) => void;
+ /* @deprecated */
+ [CallEvent.HoldUnhold]: (onHold: boolean) => void;
+ [CallEvent.SendVoipEvent]: (event: VoipEvent, call: MatrixCall) => void;
+};
+
+// The key of the transceiver map (purpose + media type, separated by ':')
+type TransceiverKey = string;
+
+// generates keys for the map of transceivers
+// kind is unfortunately a string rather than MediaType as this is the type of
+// track.kind
+function getTransceiverKey(purpose: SDPStreamMetadataPurpose, kind: TransceiverKey): string {
+ return purpose + ":" + kind;
+}
+
+export class MatrixCall extends TypedEventEmitter<CallEvent, CallEventHandlerMap> {
+ public roomId: string;
+ public callId: string;
+ public invitee?: string;
+ public hangupParty?: CallParty;
+ public hangupReason?: string;
+ public direction?: CallDirection;
+ public ourPartyId: string;
+ public peerConn?: RTCPeerConnection;
+ public toDeviceSeq = 0;
+
+ // whether this call should have push-to-talk semantics
+ // This should be set by the consumer on incoming & outgoing calls.
+ public isPtt = false;
+
+ private _state = CallState.Fledgling;
+ private readonly client: MatrixClient;
+ private readonly forceTURN?: boolean;
+ private readonly turnServers: Array<TurnServer>;
+ // A queue for candidates waiting to go out.
+ // We try to amalgamate candidates into a single candidate message where
+ // possible
+ private candidateSendQueue: Array<RTCIceCandidate> = [];
+ private candidateSendTries = 0;
+ private candidatesEnded = false;
+ private feeds: Array<CallFeed> = [];
+
+ // our transceivers for each purpose and type of media
+ private transceivers = new Map<TransceiverKey, RTCRtpTransceiver>();
+
+ private inviteOrAnswerSent = false;
+ private waitForLocalAVStream = false;
+ private successor?: MatrixCall;
+ private opponentMember?: RoomMember;
+ private opponentVersion?: number | string;
+ // The party ID of the other side: undefined if we haven't chosen a partner
+ // yet, null if we have but they didn't send a party ID.
+ private opponentPartyId: string | null | undefined;
+ private opponentCaps?: CallCapabilities;
+ private iceDisconnectedTimeout?: ReturnType<typeof setTimeout>;
+ private inviteTimeout?: ReturnType<typeof setTimeout>;
+ private readonly removeTrackListeners = new Map<MediaStream, () => void>();
+
+ // The logic of when & if a call is on hold is nontrivial and explained in is*OnHold
+ // This flag represents whether we want the other party to be on hold
+ private remoteOnHold = false;
+
+ // the stats for the call at the point it ended. We can't get these after we
+ // tear the call down, so we just grab a snapshot before we stop the call.
+ // The typescript definitions have this type as 'any' :(
+ private callStatsAtEnd?: any[];
+
+ // Perfect negotiation state: https://www.w3.org/TR/webrtc/#perfect-negotiation-example
+ private makingOffer = false;
+ private ignoreOffer = false;
+
+ private responsePromiseChain?: Promise<void>;
+
+ // If candidates arrive before we've picked an opponent (which, in particular,
+ // will happen if the opponent sends candidates eagerly before the user answers
+ // the call) we buffer them up here so we can then add the ones from the party we pick
+ private remoteCandidateBuffer = new Map<string, RTCIceCandidate[]>();
+
+ private remoteAssertedIdentity?: AssertedIdentity;
+ private remoteSDPStreamMetadata?: SDPStreamMetadata;
+
+ private callLengthInterval?: ReturnType<typeof setInterval>;
+ private callStartTime?: number;
+
+ private opponentDeviceId?: string;
+ private opponentDeviceInfo?: DeviceInfo;
+ private opponentSessionId?: string;
+ public groupCallId?: string;
+
+ // Used to keep the timer for the delay before actually stopping our
+ // video track after muting (see setLocalVideoMuted)
+ private stopVideoTrackTimer?: ReturnType<typeof setTimeout>;
+ // Used to allow connection without Video and Audio. To establish a webrtc connection without media a Data channel is
+ // needed At the moment this property is true if we allow MatrixClient with isVoipWithNoMediaAllowed = true
+ private readonly isOnlyDataChannelAllowed: boolean;
+ private stats: GroupCallStats | undefined;
+
+ /**
+ * Construct a new Matrix Call.
+ * @param opts - Config options.
+ */
+ public constructor(opts: CallOpts) {
+ super();
+
+ this.roomId = opts.roomId;
+ this.invitee = opts.invitee;
+ this.client = opts.client;
+
+ if (!this.client.deviceId) throw new Error("Client must have a device ID to start calls");
+
+ this.forceTURN = opts.forceTURN ?? false;
+ this.ourPartyId = this.client.deviceId;
+ this.opponentDeviceId = opts.opponentDeviceId;
+ this.opponentSessionId = opts.opponentSessionId;
+ this.groupCallId = opts.groupCallId;
+ // Array of Objects with urls, username, credential keys
+ this.turnServers = opts.turnServers || [];
+ if (this.turnServers.length === 0 && this.client.isFallbackICEServerAllowed()) {
+ this.turnServers.push({
+ urls: [FALLBACK_ICE_SERVER],
+ });
+ }
+ for (const server of this.turnServers) {
+ utils.checkObjectHasKeys(server, ["urls"]);
+ }
+ this.callId = genCallID();
+ // If the Client provides calls without audio and video we need a datachannel for a webrtc connection
+ this.isOnlyDataChannelAllowed = this.client.isVoipWithNoMediaAllowed;
+ }
+
+ /**
+ * Place a voice call to this room.
+ * @throws If you have not specified a listener for 'error' events.
+ */
+ public async placeVoiceCall(): Promise<void> {
+ await this.placeCall(true, false);
+ }
+
+ /**
+ * Place a video call to this room.
+ * @throws If you have not specified a listener for 'error' events.
+ */
+ public async placeVideoCall(): Promise<void> {
+ await this.placeCall(true, true);
+ }
+
+ /**
+ * Create a datachannel using this call's peer connection.
+ * @param label - A human readable label for this datachannel
+ * @param options - An object providing configuration options for the data channel.
+ */
+ public createDataChannel(label: string, options: RTCDataChannelInit | undefined): RTCDataChannel {
+ const dataChannel = this.peerConn!.createDataChannel(label, options);
+ this.emit(CallEvent.DataChannel, dataChannel, this);
+ return dataChannel;
+ }
+
+ public getOpponentMember(): RoomMember | undefined {
+ return this.opponentMember;
+ }
+
+ public getOpponentDeviceId(): string | undefined {
+ return this.opponentDeviceId;
+ }
+
+ public getOpponentSessionId(): string | undefined {
+ return this.opponentSessionId;
+ }
+
+ public opponentCanBeTransferred(): boolean {
+ return Boolean(this.opponentCaps && this.opponentCaps["m.call.transferee"]);
+ }
+
+ public opponentSupportsDTMF(): boolean {
+ return Boolean(this.opponentCaps && this.opponentCaps["m.call.dtmf"]);
+ }
+
+ public getRemoteAssertedIdentity(): AssertedIdentity | undefined {
+ return this.remoteAssertedIdentity;
+ }
+
+ public get state(): CallState {
+ return this._state;
+ }
+
+ private set state(state: CallState) {
+ const oldState = this._state;
+ this._state = state;
+ this.emit(CallEvent.State, state, oldState, this);
+ }
+
+ public get type(): CallType {
+ // we may want to look for a video receiver here rather than a track to match the
+ // sender behaviour, although in practice they should be the same thing
+ return this.hasUserMediaVideoSender || this.hasRemoteUserMediaVideoTrack ? CallType.Video : CallType.Voice;
+ }
+
+ public get hasLocalUserMediaVideoTrack(): boolean {
+ return !!this.localUsermediaStream?.getVideoTracks().length;
+ }
+
+ public get hasRemoteUserMediaVideoTrack(): boolean {
+ return this.getRemoteFeeds().some((feed) => {
+ return feed.purpose === SDPStreamMetadataPurpose.Usermedia && feed.stream?.getVideoTracks().length;
+ });
+ }
+
+ public get hasLocalUserMediaAudioTrack(): boolean {
+ return !!this.localUsermediaStream?.getAudioTracks().length;
+ }
+
+ public get hasRemoteUserMediaAudioTrack(): boolean {
+ return this.getRemoteFeeds().some((feed) => {
+ return feed.purpose === SDPStreamMetadataPurpose.Usermedia && !!feed.stream?.getAudioTracks().length;
+ });
+ }
+
+ private get hasUserMediaAudioSender(): boolean {
+ return Boolean(this.transceivers.get(getTransceiverKey(SDPStreamMetadataPurpose.Usermedia, "audio"))?.sender);
+ }
+
+ private get hasUserMediaVideoSender(): boolean {
+ return Boolean(this.transceivers.get(getTransceiverKey(SDPStreamMetadataPurpose.Usermedia, "video"))?.sender);
+ }
+
+ public get localUsermediaFeed(): CallFeed | undefined {
+ return this.getLocalFeeds().find((feed) => feed.purpose === SDPStreamMetadataPurpose.Usermedia);
+ }
+
+ public get localScreensharingFeed(): CallFeed | undefined {
+ return this.getLocalFeeds().find((feed) => feed.purpose === SDPStreamMetadataPurpose.Screenshare);
+ }
+
+ public get localUsermediaStream(): MediaStream | undefined {
+ return this.localUsermediaFeed?.stream;
+ }
+
+ public get localScreensharingStream(): MediaStream | undefined {
+ return this.localScreensharingFeed?.stream;
+ }
+
+ public get remoteUsermediaFeed(): CallFeed | undefined {
+ return this.getRemoteFeeds().find((feed) => feed.purpose === SDPStreamMetadataPurpose.Usermedia);
+ }
+
+ public get remoteScreensharingFeed(): CallFeed | undefined {
+ return this.getRemoteFeeds().find((feed) => feed.purpose === SDPStreamMetadataPurpose.Screenshare);
+ }
+
+ public get remoteUsermediaStream(): MediaStream | undefined {
+ return this.remoteUsermediaFeed?.stream;
+ }
+
+ public get remoteScreensharingStream(): MediaStream | undefined {
+ return this.remoteScreensharingFeed?.stream;
+ }
+
+ private getFeedByStreamId(streamId: string): CallFeed | undefined {
+ return this.getFeeds().find((feed) => feed.stream.id === streamId);
+ }
+
+ /**
+ * Returns an array of all CallFeeds
+ * @returns CallFeeds
+ */
+ public getFeeds(): Array<CallFeed> {
+ return this.feeds;
+ }
+
+ /**
+ * Returns an array of all local CallFeeds
+ * @returns local CallFeeds
+ */
+ public getLocalFeeds(): Array<CallFeed> {
+ return this.feeds.filter((feed) => feed.isLocal());
+ }
+
+ /**
+ * Returns an array of all remote CallFeeds
+ * @returns remote CallFeeds
+ */
+ public getRemoteFeeds(): Array<CallFeed> {
+ return this.feeds.filter((feed) => !feed.isLocal());
+ }
+
+ private async initOpponentCrypto(): Promise<void> {
+ if (!this.opponentDeviceId) return;
+ if (!this.client.getUseE2eForGroupCall()) return;
+ // It's possible to want E2EE and yet not have the means to manage E2EE
+ // ourselves (for example if the client is a RoomWidgetClient)
+ if (!this.client.isCryptoEnabled()) {
+ // All we know is the device ID
+ this.opponentDeviceInfo = new DeviceInfo(this.opponentDeviceId);
+ return;
+ }
+ // if we've got to this point, we do want to init crypto, so throw if we can't
+ if (!this.client.crypto) throw new Error("Crypto is not initialised.");
+
+ const userId = this.invitee || this.getOpponentMember()?.userId;
+
+ if (!userId) throw new Error("Couldn't find opponent user ID to init crypto");
+
+ const deviceInfoMap = await this.client.crypto.deviceList.downloadKeys([userId], false);
+ this.opponentDeviceInfo = deviceInfoMap.get(userId)?.get(this.opponentDeviceId);
+ if (this.opponentDeviceInfo === undefined) {
+ throw new GroupCallUnknownDeviceError(userId);
+ }
+ }
+
+ /**
+ * Generates and returns localSDPStreamMetadata
+ * @returns localSDPStreamMetadata
+ */
+ private getLocalSDPStreamMetadata(updateStreamIds = false): SDPStreamMetadata {
+ const metadata: SDPStreamMetadata = {};
+ for (const localFeed of this.getLocalFeeds()) {
+ if (updateStreamIds) {
+ localFeed.sdpMetadataStreamId = localFeed.stream.id;
+ }
+
+ metadata[localFeed.sdpMetadataStreamId] = {
+ purpose: localFeed.purpose,
+ audio_muted: localFeed.isAudioMuted(),
+ video_muted: localFeed.isVideoMuted(),
+ };
+ }
+ return metadata;
+ }
+
+ /**
+ * Returns true if there are no incoming feeds,
+ * otherwise returns false
+ * @returns no incoming feeds
+ */
+ public noIncomingFeeds(): boolean {
+ return !this.feeds.some((feed) => !feed.isLocal());
+ }
+
+ private pushRemoteFeed(stream: MediaStream): void {
+ // Fallback to old behavior if the other side doesn't support SDPStreamMetadata
+ if (!this.opponentSupportsSDPStreamMetadata()) {
+ this.pushRemoteFeedWithoutMetadata(stream);
+ return;
+ }
+
+ const userId = this.getOpponentMember()!.userId;
+ const purpose = this.remoteSDPStreamMetadata![stream.id].purpose;
+ const audioMuted = this.remoteSDPStreamMetadata![stream.id].audio_muted;
+ const videoMuted = this.remoteSDPStreamMetadata![stream.id].video_muted;
+
+ if (!purpose) {
+ logger.warn(
+ `Call ${this.callId} pushRemoteFeed() ignoring stream because we didn't get any metadata about it (streamId=${stream.id})`,
+ );
+ return;
+ }
+
+ if (this.getFeedByStreamId(stream.id)) {
+ logger.warn(
+ `Call ${this.callId} pushRemoteFeed() ignoring stream because we already have a feed for it (streamId=${stream.id})`,
+ );
+ return;
+ }
+
+ this.feeds.push(
+ new CallFeed({
+ client: this.client,
+ call: this,
+ roomId: this.roomId,
+ userId,
+ deviceId: this.getOpponentDeviceId(),
+ stream,
+ purpose,
+ audioMuted,
+ videoMuted,
+ }),
+ );
+
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+
+ logger.info(
+ `Call ${this.callId} pushRemoteFeed() pushed stream (streamId=${stream.id}, active=${stream.active}, purpose=${purpose})`,
+ );
+ }
+
+ /**
+ * This method is used ONLY if the other client doesn't support sending SDPStreamMetadata
+ */
+ private pushRemoteFeedWithoutMetadata(stream: MediaStream): void {
+ const userId = this.getOpponentMember()!.userId;
+ // We can guess the purpose here since the other client can only send one stream
+ const purpose = SDPStreamMetadataPurpose.Usermedia;
+ const oldRemoteStream = this.feeds.find((feed) => !feed.isLocal())?.stream;
+
+ // Note that we check by ID and always set the remote stream: Chrome appears
+ // to make new stream objects when transceiver directionality is changed and the 'active'
+ // status of streams change - Dave
+ // If we already have a stream, check this stream has the same id
+ if (oldRemoteStream && stream.id !== oldRemoteStream.id) {
+ logger.warn(
+ `Call ${this.callId} pushRemoteFeedWithoutMetadata() ignoring new stream because we already have stream (streamId=${stream.id})`,
+ );
+ return;
+ }
+
+ if (this.getFeedByStreamId(stream.id)) {
+ logger.warn(
+ `Call ${this.callId} pushRemoteFeedWithoutMetadata() ignoring stream because we already have a feed for it (streamId=${stream.id})`,
+ );
+ return;
+ }
+
+ this.feeds.push(
+ new CallFeed({
+ client: this.client,
+ call: this,
+ roomId: this.roomId,
+ audioMuted: false,
+ videoMuted: false,
+ userId,
+ deviceId: this.getOpponentDeviceId(),
+ stream,
+ purpose,
+ }),
+ );
+
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+
+ logger.info(
+ `Call ${this.callId} pushRemoteFeedWithoutMetadata() pushed stream (streamId=${stream.id}, active=${stream.active})`,
+ );
+ }
+
+ private pushNewLocalFeed(stream: MediaStream, purpose: SDPStreamMetadataPurpose, addToPeerConnection = true): void {
+ const userId = this.client.getUserId()!;
+
+ // Tracks don't always start off enabled, eg. chrome will give a disabled
+ // audio track if you ask for user media audio and already had one that
+ // you'd set to disabled (presumably because it clones them internally).
+ setTracksEnabled(stream.getAudioTracks(), true);
+ setTracksEnabled(stream.getVideoTracks(), true);
+
+ if (this.getFeedByStreamId(stream.id)) {
+ logger.warn(
+ `Call ${this.callId} pushNewLocalFeed() ignoring stream because we already have a feed for it (streamId=${stream.id})`,
+ );
+ return;
+ }
+
+ this.pushLocalFeed(
+ new CallFeed({
+ client: this.client,
+ roomId: this.roomId,
+ audioMuted: false,
+ videoMuted: false,
+ userId,
+ deviceId: this.getOpponentDeviceId(),
+ stream,
+ purpose,
+ }),
+ addToPeerConnection,
+ );
+ }
+
+ /**
+ * Pushes supplied feed to the call
+ * @param callFeed - to push
+ * @param addToPeerConnection - whether to add the tracks to the peer connection
+ */
+ public pushLocalFeed(callFeed: CallFeed, addToPeerConnection = true): void {
+ if (this.feeds.some((feed) => callFeed.stream.id === feed.stream.id)) {
+ logger.info(
+ `Call ${this.callId} pushLocalFeed() ignoring duplicate local stream (streamId=${callFeed.stream.id})`,
+ );
+ return;
+ }
+
+ this.feeds.push(callFeed);
+
+ if (addToPeerConnection) {
+ for (const track of callFeed.stream.getTracks()) {
+ logger.info(
+ `Call ${this.callId} pushLocalFeed() adding track to peer connection (id=${track.id}, kind=${track.kind}, streamId=${callFeed.stream.id}, streamPurpose=${callFeed.purpose}, enabled=${track.enabled})`,
+ );
+
+ const tKey = getTransceiverKey(callFeed.purpose, track.kind);
+ if (this.transceivers.has(tKey)) {
+ // we already have a sender, so we re-use it. We try to re-use transceivers as much
+ // as possible because they can't be removed once added, so otherwise they just
+ // accumulate which makes the SDP very large very quickly: in fact it only takes
+ // about 6 video tracks to exceed the maximum size of an Olm-encrypted
+ // Matrix event.
+ const transceiver = this.transceivers.get(tKey)!;
+
+ transceiver.sender.replaceTrack(track);
+ // set the direction to indicate we're going to start sending again
+ // (this will trigger the re-negotiation)
+ transceiver.direction = transceiver.direction === "inactive" ? "sendonly" : "sendrecv";
+ } else {
+ // create a new one. We need to use addTrack rather addTransceiver for this because firefox
+ // doesn't yet implement RTCRTPSender.setStreams()
+ // (https://bugzilla.mozilla.org/show_bug.cgi?id=1510802) so we'd have no way to group the
+ // two tracks together into a stream.
+ const newSender = this.peerConn!.addTrack(track, callFeed.stream);
+
+ // now go & fish for the new transceiver
+ const newTransceiver = this.peerConn!.getTransceivers().find((t) => t.sender === newSender);
+ if (newTransceiver) {
+ this.transceivers.set(tKey, newTransceiver);
+ } else {
+ logger.warn(
+ `Call ${this.callId} pushLocalFeed() didn't find a matching transceiver after adding track!`,
+ );
+ }
+ }
+ }
+ }
+
+ logger.info(
+ `Call ${this.callId} pushLocalFeed() pushed stream (id=${callFeed.stream.id}, active=${callFeed.stream.active}, purpose=${callFeed.purpose})`,
+ );
+
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+ }
+
+ /**
+ * Removes local call feed from the call and its tracks from the peer
+ * connection
+ * @param callFeed - to remove
+ */
+ public removeLocalFeed(callFeed: CallFeed): void {
+ const audioTransceiverKey = getTransceiverKey(callFeed.purpose, "audio");
+ const videoTransceiverKey = getTransceiverKey(callFeed.purpose, "video");
+
+ for (const transceiverKey of [audioTransceiverKey, videoTransceiverKey]) {
+ // this is slightly mixing the track and transceiver API but is basically just shorthand.
+ // There is no way to actually remove a transceiver, so this just sets it to inactive
+ // (or recvonly) and replaces the source with nothing.
+ if (this.transceivers.has(transceiverKey)) {
+ const transceiver = this.transceivers.get(transceiverKey)!;
+ if (transceiver.sender) this.peerConn!.removeTrack(transceiver.sender);
+ }
+ }
+
+ if (callFeed.purpose === SDPStreamMetadataPurpose.Screenshare) {
+ this.client.getMediaHandler().stopScreensharingStream(callFeed.stream);
+ }
+
+ this.deleteFeed(callFeed);
+ }
+
+ private deleteAllFeeds(): void {
+ for (const feed of this.feeds) {
+ if (!feed.isLocal() || !this.groupCallId) {
+ feed.dispose();
+ }
+ }
+
+ this.feeds = [];
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+ }
+
+ private deleteFeedByStream(stream: MediaStream): void {
+ const feed = this.getFeedByStreamId(stream.id);
+ if (!feed) {
+ logger.warn(
+ `Call ${this.callId} deleteFeedByStream() didn't find the feed to delete (streamId=${stream.id})`,
+ );
+ return;
+ }
+ this.deleteFeed(feed);
+ }
+
+ private deleteFeed(feed: CallFeed): void {
+ feed.dispose();
+ this.feeds.splice(this.feeds.indexOf(feed), 1);
+ this.emit(CallEvent.FeedsChanged, this.feeds, this);
+ }
+
+ // The typescript definitions have this type as 'any' :(
+ public async getCurrentCallStats(): Promise<any[] | undefined> {
+ if (this.callHasEnded()) {
+ return this.callStatsAtEnd;
+ }
+
+ return this.collectCallStats();
+ }
+
+ private async collectCallStats(): Promise<any[] | undefined> {
+ // This happens when the call fails before it starts.
+ // For example when we fail to get capture sources
+ if (!this.peerConn) return;
+
+ const statsReport = await this.peerConn.getStats();
+ const stats: any[] = [];
+ statsReport.forEach((item) => {
+ stats.push(item);
+ });
+
+ return stats;
+ }
+
+ /**
+ * Configure this call from an invite event. Used by MatrixClient.
+ * @param event - The m.call.invite event
+ */
+ public async initWithInvite(event: MatrixEvent): Promise<void> {
+ const invite = event.getContent<MCallInviteNegotiate>();
+ this.direction = CallDirection.Inbound;
+
+ // make sure we have valid turn creds. Unless something's gone wrong, it should
+ // poll and keep the credentials valid so this should be instant.
+ const haveTurnCreds = await this.client.checkTurnServers();
+ if (!haveTurnCreds) {
+ logger.warn(
+ `Call ${this.callId} initWithInvite() failed to get TURN credentials! Proceeding with call anyway...`,
+ );
+ }
+
+ const sdpStreamMetadata = invite[SDPStreamMetadataKey];
+ if (sdpStreamMetadata) {
+ this.updateRemoteSDPStreamMetadata(sdpStreamMetadata);
+ } else {
+ logger.debug(
+ `Call ${this.callId} initWithInvite() did not get any SDPStreamMetadata! Can not send/receive multiple streams`,
+ );
+ }
+
+ this.peerConn = this.createPeerConnection();
+ // we must set the party ID before await-ing on anything: the call event
+ // handler will start giving us more call events (eg. candidates) so if
+ // we haven't set the party ID, we'll ignore them.
+ this.chooseOpponent(event);
+ await this.initOpponentCrypto();
+ try {
+ await this.peerConn.setRemoteDescription(invite.offer);
+ await this.addBufferedIceCandidates();
+ } catch (e) {
+ logger.debug(`Call ${this.callId} initWithInvite() failed to set remote description`, e);
+ this.terminate(CallParty.Local, CallErrorCode.SetRemoteDescription, false);
+ return;
+ }
+
+ const remoteStream = this.feeds.find((feed) => !feed.isLocal())?.stream;
+
+ // According to previous comments in this file, firefox at some point did not
+ // add streams until media started arriving on them. Testing latest firefox
+ // (81 at time of writing), this is no longer a problem, so let's do it the correct way.
+ //
+ // For example in case of no media webrtc connections like screen share only call we have to allow webrtc
+ // connections without remote media. In this case we always use a data channel. At the moment we allow as well
+ // only data channel as media in the WebRTC connection with this setup here.
+ if (!this.isOnlyDataChannelAllowed && (!remoteStream || remoteStream.getTracks().length === 0)) {
+ logger.error(
+ `Call ${this.callId} initWithInvite() no remote stream or no tracks after setting remote description!`,
+ );
+ this.terminate(CallParty.Local, CallErrorCode.SetRemoteDescription, false);
+ return;
+ }
+
+ this.state = CallState.Ringing;
+
+ if (event.getLocalAge()) {
+ // Time out the call if it's ringing for too long
+ const ringingTimer = setTimeout(() => {
+ if (this.state == CallState.Ringing) {
+ logger.debug(`Call ${this.callId} initWithInvite() invite has expired. Hanging up.`);
+ this.hangupParty = CallParty.Remote; // effectively
+ this.state = CallState.Ended;
+ this.stopAllMedia();
+ if (this.peerConn!.signalingState != "closed") {
+ this.peerConn!.close();
+ }
+ this.stats?.removeStatsReportGatherer(this.callId);
+ this.emit(CallEvent.Hangup, this);
+ }
+ }, invite.lifetime - event.getLocalAge());
+
+ const onState = (state: CallState): void => {
+ if (state !== CallState.Ringing) {
+ clearTimeout(ringingTimer);
+ this.off(CallEvent.State, onState);
+ }
+ };
+ this.on(CallEvent.State, onState);
+ }
+ }
+
+ /**
+ * Configure this call from a hangup or reject event. Used by MatrixClient.
+ * @param event - The m.call.hangup event
+ */
+ public initWithHangup(event: MatrixEvent): void {
+ // perverse as it may seem, sometimes we want to instantiate a call with a
+ // hangup message (because when getting the state of the room on load, events
+ // come in reverse order and we want to remember that a call has been hung up)
+ this.state = CallState.Ended;
+ }
+
+ private shouldAnswerWithMediaType(
+ wantedValue: boolean | undefined,
+ valueOfTheOtherSide: boolean,
+ type: "audio" | "video",
+ ): boolean {
+ if (wantedValue && !valueOfTheOtherSide) {
+ // TODO: Figure out how to do this
+ logger.warn(
+ `Call ${this.callId} shouldAnswerWithMediaType() unable to answer with ${type} because the other side isn't sending it either.`,
+ );
+ return false;
+ } else if (
+ !utils.isNullOrUndefined(wantedValue) &&
+ wantedValue !== valueOfTheOtherSide &&
+ !this.opponentSupportsSDPStreamMetadata()
+ ) {
+ logger.warn(
+ `Call ${this.callId} shouldAnswerWithMediaType() unable to answer with ${type}=${wantedValue} because the other side doesn't support it. Answering with ${type}=${valueOfTheOtherSide}.`,
+ );
+ return valueOfTheOtherSide!;
+ }
+ return wantedValue ?? valueOfTheOtherSide!;
+ }
+
+ /**
+ * Answer a call.
+ */
+ public async answer(audio?: boolean, video?: boolean): Promise<void> {
+ if (this.inviteOrAnswerSent) return;
+ // TODO: Figure out how to do this
+ if (audio === false && video === false) throw new Error("You CANNOT answer a call without media");
+
+ if (!this.localUsermediaStream && !this.waitForLocalAVStream) {
+ const prevState = this.state;
+ const answerWithAudio = this.shouldAnswerWithMediaType(audio, this.hasRemoteUserMediaAudioTrack, "audio");
+ const answerWithVideo = this.shouldAnswerWithMediaType(video, this.hasRemoteUserMediaVideoTrack, "video");
+
+ this.state = CallState.WaitLocalMedia;
+ this.waitForLocalAVStream = true;
+
+ try {
+ const stream = await this.client.getMediaHandler().getUserMediaStream(answerWithAudio, answerWithVideo);
+ this.waitForLocalAVStream = false;
+ const usermediaFeed = new CallFeed({
+ client: this.client,
+ roomId: this.roomId,
+ userId: this.client.getUserId()!,
+ deviceId: this.client.getDeviceId() ?? undefined,
+ stream,
+ purpose: SDPStreamMetadataPurpose.Usermedia,
+ audioMuted: false,
+ videoMuted: false,
+ });
+
+ const feeds = [usermediaFeed];
+
+ if (this.localScreensharingFeed) {
+ feeds.push(this.localScreensharingFeed);
+ }
+
+ this.answerWithCallFeeds(feeds);
+ } catch (e) {
+ if (answerWithVideo) {
+ // Try to answer without video
+ logger.warn(
+ `Call ${this.callId} answer() failed to getUserMedia(), trying to getUserMedia() without video`,
+ );
+ this.state = prevState;
+ this.waitForLocalAVStream = false;
+ await this.answer(answerWithAudio, false);
+ } else {
+ this.getUserMediaFailed(<Error>e);
+ return;
+ }
+ }
+ } else if (this.waitForLocalAVStream) {
+ this.state = CallState.WaitLocalMedia;
+ }
+ }
+
+ public answerWithCallFeeds(callFeeds: CallFeed[]): void {
+ if (this.inviteOrAnswerSent) return;
+
+ this.queueGotCallFeedsForAnswer(callFeeds);
+ }
+
+ /**
+ * Replace this call with a new call, e.g. for glare resolution. Used by
+ * MatrixClient.
+ * @param newCall - The new call.
+ */
+ public replacedBy(newCall: MatrixCall): void {
+ logger.debug(`Call ${this.callId} replacedBy() running (newCallId=${newCall.callId})`);
+ if (this.state === CallState.WaitLocalMedia) {
+ logger.debug(
+ `Call ${this.callId} replacedBy() telling new call to wait for local media (newCallId=${newCall.callId})`,
+ );
+ newCall.waitForLocalAVStream = true;
+ } else if ([CallState.CreateOffer, CallState.InviteSent].includes(this.state)) {
+ if (newCall.direction === CallDirection.Outbound) {
+ newCall.queueGotCallFeedsForAnswer([]);
+ } else {
+ logger.debug(
+ `Call ${this.callId} replacedBy() handing local stream to new call(newCallId=${newCall.callId})`,
+ );
+ newCall.queueGotCallFeedsForAnswer(this.getLocalFeeds().map((feed) => feed.clone()));
+ }
+ }
+ this.successor = newCall;
+ this.emit(CallEvent.Replaced, newCall, this);
+ this.hangup(CallErrorCode.Replaced, true);
+ }
+
+ /**
+ * Hangup a call.
+ * @param reason - The reason why the call is being hung up.
+ * @param suppressEvent - True to suppress emitting an event.
+ */
+ public hangup(reason: CallErrorCode, suppressEvent: boolean): void {
+ if (this.callHasEnded()) return;
+
+ logger.debug(`Call ${this.callId} hangup() ending call (reason=${reason})`);
+ this.terminate(CallParty.Local, reason, !suppressEvent);
+ // We don't want to send hangup here if we didn't even get to sending an invite
+ if ([CallState.Fledgling, CallState.WaitLocalMedia].includes(this.state)) return;
+ const content: IContent = {};
+ // Don't send UserHangup reason to older clients
+ if ((this.opponentVersion && this.opponentVersion !== 0) || reason !== CallErrorCode.UserHangup) {
+ content["reason"] = reason;
+ }
+ this.sendVoipEvent(EventType.CallHangup, content);
+ }
+
+ /**
+ * Reject a call
+ * This used to be done by calling hangup, but is a separate method and protocol
+ * event as of MSC2746.
+ */
+ public reject(): void {
+ if (this.state !== CallState.Ringing) {
+ throw Error("Call must be in 'ringing' state to reject!");
+ }
+
+ if (this.opponentVersion === 0) {
+ logger.info(
+ `Call ${this.callId} reject() opponent version is less than 1: sending hangup instead of reject (opponentVersion=${this.opponentVersion})`,
+ );
+ this.hangup(CallErrorCode.UserHangup, true);
+ return;
+ }
+
+ logger.debug("Rejecting call: " + this.callId);
+ this.terminate(CallParty.Local, CallErrorCode.UserHangup, true);
+ this.sendVoipEvent(EventType.CallReject, {});
+ }
+
+ /**
+ * Adds an audio and/or video track - upgrades the call
+ * @param audio - should add an audio track
+ * @param video - should add an video track
+ */
+ private async upgradeCall(audio: boolean, video: boolean): Promise<void> {
+ // We don't do call downgrades
+ if (!audio && !video) return;
+ if (!this.opponentSupportsSDPStreamMetadata()) return;
+
+ try {
+ logger.debug(`Call ${this.callId} upgradeCall() upgrading call (audio=${audio}, video=${video})`);
+ const getAudio = audio || this.hasLocalUserMediaAudioTrack;
+ const getVideo = video || this.hasLocalUserMediaVideoTrack;
+
+ // updateLocalUsermediaStream() will take the tracks, use them as
+ // replacement and throw the stream away, so it isn't reusable
+ const stream = await this.client.getMediaHandler().getUserMediaStream(getAudio, getVideo, false);
+ await this.updateLocalUsermediaStream(stream, audio, video);
+ } catch (error) {
+ logger.error(`Call ${this.callId} upgradeCall() failed to upgrade the call`, error);
+ this.emit(
+ CallEvent.Error,
+ new CallError(CallErrorCode.NoUserMedia, "Failed to get camera access: ", <Error>error),
+ this,
+ );
+ }
+ }
+
+ /**
+ * Returns true if this.remoteSDPStreamMetadata is defined, otherwise returns false
+ * @returns can screenshare
+ */
+ public opponentSupportsSDPStreamMetadata(): boolean {
+ return Boolean(this.remoteSDPStreamMetadata);
+ }
+
+ /**
+ * If there is a screensharing stream returns true, otherwise returns false
+ * @returns is screensharing
+ */
+ public isScreensharing(): boolean {
+ return Boolean(this.localScreensharingStream);
+ }
+
+ /**
+ * Starts/stops screensharing
+ * @param enabled - the desired screensharing state
+ * @param desktopCapturerSourceId - optional id of the desktop capturer source to use
+ * @returns new screensharing state
+ */
+ public async setScreensharingEnabled(enabled: boolean, opts?: IScreensharingOpts): Promise<boolean> {
+ // Skip if there is nothing to do
+ if (enabled && this.isScreensharing()) {
+ logger.warn(
+ `Call ${this.callId} setScreensharingEnabled() there is already a screensharing stream - there is nothing to do!`,
+ );
+ return true;
+ } else if (!enabled && !this.isScreensharing()) {
+ logger.warn(
+ `Call ${this.callId} setScreensharingEnabled() there already isn't a screensharing stream - there is nothing to do!`,
+ );
+ return false;
+ }
+
+ // Fallback to replaceTrack()
+ if (!this.opponentSupportsSDPStreamMetadata()) {
+ return this.setScreensharingEnabledWithoutMetadataSupport(enabled, opts);
+ }
+
+ logger.debug(`Call ${this.callId} setScreensharingEnabled() running (enabled=${enabled})`);
+ if (enabled) {
+ try {
+ const stream = await this.client.getMediaHandler().getScreensharingStream(opts);
+ if (!stream) return false;
+ this.pushNewLocalFeed(stream, SDPStreamMetadataPurpose.Screenshare);
+ return true;
+ } catch (err) {
+ logger.error(`Call ${this.callId} setScreensharingEnabled() failed to get screen-sharing stream:`, err);
+ return false;
+ }
+ } else {
+ const audioTransceiver = this.transceivers.get(
+ getTransceiverKey(SDPStreamMetadataPurpose.Screenshare, "audio"),
+ );
+ const videoTransceiver = this.transceivers.get(
+ getTransceiverKey(SDPStreamMetadataPurpose.Screenshare, "video"),
+ );
+
+ for (const transceiver of [audioTransceiver, videoTransceiver]) {
+ // this is slightly mixing the track and transceiver API but is basically just shorthand
+ // for removing the sender.
+ if (transceiver && transceiver.sender) this.peerConn!.removeTrack(transceiver.sender);
+ }
+
+ this.client.getMediaHandler().stopScreensharingStream(this.localScreensharingStream!);
+ this.deleteFeedByStream(this.localScreensharingStream!);
+ return false;
+ }
+ }
+
+ /**
+ * Starts/stops screensharing
+ * Should be used ONLY if the opponent doesn't support SDPStreamMetadata
+ * @param enabled - the desired screensharing state
+ * @param desktopCapturerSourceId - optional id of the desktop capturer source to use
+ * @returns new screensharing state
+ */
+ private async setScreensharingEnabledWithoutMetadataSupport(
+ enabled: boolean,
+ opts?: IScreensharingOpts,
+ ): Promise<boolean> {
+ logger.debug(
+ `Call ${this.callId} setScreensharingEnabledWithoutMetadataSupport() running (enabled=${enabled})`,
+ );
+ if (enabled) {
+ try {
+ const stream = await this.client.getMediaHandler().getScreensharingStream(opts);
+ if (!stream) return false;
+
+ const track = stream.getTracks().find((track) => track.kind === "video");
+
+ const sender = this.transceivers.get(
+ getTransceiverKey(SDPStreamMetadataPurpose.Usermedia, "video"),
+ )?.sender;
+
+ sender?.replaceTrack(track ?? null);
+
+ this.pushNewLocalFeed(stream, SDPStreamMetadataPurpose.Screenshare, false);
+
+ return true;
+ } catch (err) {
+ logger.error(
+ `Call ${this.callId} setScreensharingEnabledWithoutMetadataSupport() failed to get screen-sharing stream:`,
+ err,
+ );
+ return false;
+ }
+ } else {
+ const track = this.localUsermediaStream?.getTracks().find((track) => track.kind === "video");
+ const sender = this.transceivers.get(
+ getTransceiverKey(SDPStreamMetadataPurpose.Usermedia, "video"),
+ )?.sender;
+ sender?.replaceTrack(track ?? null);
+
+ this.client.getMediaHandler().stopScreensharingStream(this.localScreensharingStream!);
+ this.deleteFeedByStream(this.localScreensharingStream!);
+
+ return false;
+ }
+ }
+
+ /**
+ * Replaces/adds the tracks from the passed stream to the localUsermediaStream
+ * @param stream - to use a replacement for the local usermedia stream
+ */
+ public async updateLocalUsermediaStream(
+ stream: MediaStream,
+ forceAudio = false,
+ forceVideo = false,
+ ): Promise<void> {
+ const callFeed = this.localUsermediaFeed!;
+ const audioEnabled = forceAudio || (!callFeed.isAudioMuted() && !this.remoteOnHold);
+ const videoEnabled = forceVideo || (!callFeed.isVideoMuted() && !this.remoteOnHold);
+ logger.log(
+ `Call ${this.callId} updateLocalUsermediaStream() running (streamId=${stream.id}, audio=${audioEnabled}, video=${videoEnabled})`,
+ );
+ setTracksEnabled(stream.getAudioTracks(), audioEnabled);
+ setTracksEnabled(stream.getVideoTracks(), videoEnabled);
+
+ // We want to keep the same stream id, so we replace the tracks rather
+ // than the whole stream.
+
+ // Firstly, we replace the tracks in our localUsermediaStream.
+ for (const track of this.localUsermediaStream!.getTracks()) {
+ this.localUsermediaStream!.removeTrack(track);
+ track.stop();
+ }
+ for (const track of stream.getTracks()) {
+ this.localUsermediaStream!.addTrack(track);
+ }
+
+ // Then replace the old tracks, if possible.
+ for (const track of stream.getTracks()) {
+ const tKey = getTransceiverKey(SDPStreamMetadataPurpose.Usermedia, track.kind);
+
+ const transceiver = this.transceivers.get(tKey);
+ const oldSender = transceiver?.sender;
+ let added = false;
+ if (oldSender) {
+ try {
+ logger.info(
+ `Call ${this.callId} updateLocalUsermediaStream() replacing track (id=${track.id}, kind=${track.kind}, streamId=${stream.id}, streamPurpose=${callFeed.purpose})`,
+ );
+ await oldSender.replaceTrack(track);
+ // Set the direction to indicate we're going to be sending.
+ // This is only necessary in the cases where we're upgrading
+ // the call to video after downgrading it.
+ transceiver.direction = transceiver.direction === "inactive" ? "sendonly" : "sendrecv";
+ added = true;
+ } catch (error) {
+ logger.warn(
+ `Call ${this.callId} updateLocalUsermediaStream() replaceTrack failed: adding new transceiver instead`,
+ error,
+ );
+ }
+ }
+
+ if (!added) {
+ logger.info(
+ `Call ${this.callId} updateLocalUsermediaStream() adding track to peer connection (id=${track.id}, kind=${track.kind}, streamId=${stream.id}, streamPurpose=${callFeed.purpose})`,
+ );
+
+ const newSender = this.peerConn!.addTrack(track, this.localUsermediaStream!);
+ const newTransceiver = this.peerConn!.getTransceivers().find((t) => t.sender === newSender);
+ if (newTransceiver) {
+ this.transceivers.set(tKey, newTransceiver);
+ } else {
+ logger.warn(
+ `Call ${this.callId} updateLocalUsermediaStream() couldn't find matching transceiver for newly added track!`,
+ );
+ }
+ }
+ }
+ }
+
+ /**
+ * Set whether our outbound video should be muted or not.
+ * @param muted - True to mute the outbound video.
+ * @returns the new mute state
+ */
+ public async setLocalVideoMuted(muted: boolean): Promise<boolean> {
+ logger.log(`Call ${this.callId} setLocalVideoMuted() running ${muted}`);
+
+ // if we were still thinking about stopping and removing the video
+ // track: don't, because we want it back.
+ if (!muted && this.stopVideoTrackTimer !== undefined) {
+ clearTimeout(this.stopVideoTrackTimer);
+ this.stopVideoTrackTimer = undefined;
+ }
+
+ if (!(await this.client.getMediaHandler().hasVideoDevice())) {
+ return this.isLocalVideoMuted();
+ }
+
+ if (!this.hasUserMediaVideoSender && !muted) {
+ this.localUsermediaFeed?.setAudioVideoMuted(null, muted);
+ await this.upgradeCall(false, true);
+ return this.isLocalVideoMuted();
+ }
+
+ // we may not have a video track - if not, re-request usermedia
+ if (!muted && this.localUsermediaStream!.getVideoTracks().length === 0) {
+ const stream = await this.client.getMediaHandler().getUserMediaStream(true, true);
+ await this.updateLocalUsermediaStream(stream);
+ }
+
+ this.localUsermediaFeed?.setAudioVideoMuted(null, muted);
+
+ this.updateMuteStatus();
+ await this.sendMetadataUpdate();
+
+ // if we're muting video, set a timeout to stop & remove the video track so we release
+ // the camera. We wait a short time to do this because when we disable a track, WebRTC
+ // will send black video for it. If we just stop and remove it straight away, the video
+ // will just freeze which means that when we unmute video, the other side will briefly
+ // get a static frame of us from before we muted. This way, the still frame is just black.
+ // A very small delay is not always enough so the theory here is that it needs to be long
+ // enough for WebRTC to encode a frame: 120ms should be long enough even if we're only
+ // doing 10fps.
+ if (muted) {
+ this.stopVideoTrackTimer = setTimeout(() => {
+ for (const t of this.localUsermediaStream!.getVideoTracks()) {
+ t.stop();
+ this.localUsermediaStream!.removeTrack(t);
+ }
+ }, 120);
+ }
+
+ return this.isLocalVideoMuted();
+ }
+
+ /**
+ * Check if local video is muted.
+ *
+ * If there are multiple video tracks, <i>all</i> of the tracks need to be muted
+ * for this to return true. This means if there are no video tracks, this will
+ * return true.
+ * @returns True if the local preview video is muted, else false
+ * (including if the call is not set up yet).
+ */
+ public isLocalVideoMuted(): boolean {
+ return this.localUsermediaFeed?.isVideoMuted() ?? false;
+ }
+
+ /**
+ * Set whether the microphone should be muted or not.
+ * @param muted - True to mute the mic.
+ * @returns the new mute state
+ */
+ public async setMicrophoneMuted(muted: boolean): Promise<boolean> {
+ logger.log(`Call ${this.callId} setMicrophoneMuted() running ${muted}`);
+ if (!(await this.client.getMediaHandler().hasAudioDevice())) {
+ return this.isMicrophoneMuted();
+ }
+
+ if (!muted && (!this.hasUserMediaAudioSender || !this.hasLocalUserMediaAudioTrack)) {
+ await this.upgradeCall(true, false);
+ return this.isMicrophoneMuted();
+ }
+ this.localUsermediaFeed?.setAudioVideoMuted(muted, null);
+ this.updateMuteStatus();
+ await this.sendMetadataUpdate();
+ return this.isMicrophoneMuted();
+ }
+
+ /**
+ * Check if the microphone is muted.
+ *
+ * If there are multiple audio tracks, <i>all</i> of the tracks need to be muted
+ * for this to return true. This means if there are no audio tracks, this will
+ * return true.
+ * @returns True if the mic is muted, else false (including if the call
+ * is not set up yet).
+ */
+ public isMicrophoneMuted(): boolean {
+ return this.localUsermediaFeed?.isAudioMuted() ?? false;
+ }
+
+ /**
+ * @returns true if we have put the party on the other side of the call on hold
+ * (that is, we are signalling to them that we are not listening)
+ */
+ public isRemoteOnHold(): boolean {
+ return this.remoteOnHold;
+ }
+
+ public setRemoteOnHold(onHold: boolean): void {
+ if (this.isRemoteOnHold() === onHold) return;
+ this.remoteOnHold = onHold;
+
+ for (const transceiver of this.peerConn!.getTransceivers()) {
+ // We don't send hold music or anything so we're not actually
+ // sending anything, but sendrecv is fairly standard for hold and
+ // it makes it a lot easier to figure out who's put who on hold.
+ transceiver.direction = onHold ? "sendonly" : "sendrecv";
+ }
+ this.updateMuteStatus();
+ this.sendMetadataUpdate();
+
+ this.emit(CallEvent.RemoteHoldUnhold, this.remoteOnHold, this);
+ }
+
+ /**
+ * Indicates whether we are 'on hold' to the remote party (ie. if true,
+ * they cannot hear us).
+ * @returns true if the other party has put us on hold
+ */
+ public isLocalOnHold(): boolean {
+ if (this.state !== CallState.Connected) return false;
+
+ let callOnHold = true;
+
+ // We consider a call to be on hold only if *all* the tracks are on hold
+ // (is this the right thing to do?)
+ for (const transceiver of this.peerConn!.getTransceivers()) {
+ const trackOnHold = ["inactive", "recvonly"].includes(transceiver.currentDirection!);
+
+ if (!trackOnHold) callOnHold = false;
+ }
+
+ return callOnHold;
+ }
+
+ /**
+ * Sends a DTMF digit to the other party
+ * @param digit - The digit (nb. string - '#' and '*' are dtmf too)
+ */
+ public sendDtmfDigit(digit: string): void {
+ for (const sender of this.peerConn!.getSenders()) {
+ if (sender.track?.kind === "audio" && sender.dtmf) {
+ sender.dtmf.insertDTMF(digit);
+ return;
+ }
+ }
+
+ throw new Error("Unable to find a track to send DTMF on");
+ }
+
+ private updateMuteStatus(): void {
+ const micShouldBeMuted = this.isMicrophoneMuted() || this.remoteOnHold;
+ const vidShouldBeMuted = this.isLocalVideoMuted() || this.remoteOnHold;
+
+ logger.log(
+ `Call ${this.callId} updateMuteStatus stream ${
+ this.localUsermediaStream!.id
+ } micShouldBeMuted ${micShouldBeMuted} vidShouldBeMuted ${vidShouldBeMuted}`,
+ );
+
+ setTracksEnabled(this.localUsermediaStream!.getAudioTracks(), !micShouldBeMuted);
+ setTracksEnabled(this.localUsermediaStream!.getVideoTracks(), !vidShouldBeMuted);
+ }
+
+ public async sendMetadataUpdate(): Promise<void> {
+ await this.sendVoipEvent(EventType.CallSDPStreamMetadataChangedPrefix, {
+ [SDPStreamMetadataKey]: this.getLocalSDPStreamMetadata(),
+ });
+ }
+
+ private gotCallFeedsForInvite(callFeeds: CallFeed[], requestScreenshareFeed = false): void {
+ if (this.successor) {
+ this.successor.queueGotCallFeedsForAnswer(callFeeds);
+ return;
+ }
+ if (this.callHasEnded()) {
+ this.stopAllMedia();
+ return;
+ }
+
+ for (const feed of callFeeds) {
+ this.pushLocalFeed(feed);
+ }
+
+ if (requestScreenshareFeed) {
+ this.peerConn!.addTransceiver("video", {
+ direction: "recvonly",
+ });
+ }
+
+ this.state = CallState.CreateOffer;
+
+ logger.debug(`Call ${this.callId} gotUserMediaForInvite() run`);
+ // Now we wait for the negotiationneeded event
+ }
+
+ private async sendAnswer(): Promise<void> {
+ const answerContent = {
+ answer: {
+ sdp: this.peerConn!.localDescription!.sdp,
+ // type is now deprecated as of Matrix VoIP v1, but
+ // required to still be sent for backwards compat
+ type: this.peerConn!.localDescription!.type,
+ },
+ [SDPStreamMetadataKey]: this.getLocalSDPStreamMetadata(true),
+ } as MCallAnswer;
+
+ answerContent.capabilities = {
+ "m.call.transferee": this.client.supportsCallTransfer,
+ "m.call.dtmf": false,
+ };
+
+ // We have just taken the local description from the peerConn which will
+ // contain all the local candidates added so far, so we can discard any candidates
+ // we had queued up because they'll be in the answer.
+ const discardCount = this.discardDuplicateCandidates();
+ logger.info(
+ `Call ${this.callId} sendAnswer() discarding ${discardCount} candidates that will be sent in answer`,
+ );
+
+ try {
+ await this.sendVoipEvent(EventType.CallAnswer, answerContent);
+ // If this isn't the first time we've tried to send the answer,
+ // we may have candidates queued up, so send them now.
+ this.inviteOrAnswerSent = true;
+ } catch (error) {
+ // We've failed to answer: back to the ringing state
+ this.state = CallState.Ringing;
+ if (error instanceof MatrixError && error.event) this.client.cancelPendingEvent(error.event);
+
+ let code = CallErrorCode.SendAnswer;
+ let message = "Failed to send answer";
+ if ((<Error>error).name == "UnknownDeviceError") {
+ code = CallErrorCode.UnknownDevices;
+ message = "Unknown devices present in the room";
+ }
+ this.emit(CallEvent.Error, new CallError(code, message, <Error>error), this);
+ throw error;
+ }
+
+ // error handler re-throws so this won't happen on error, but
+ // we don't want the same error handling on the candidate queue
+ this.sendCandidateQueue();
+ }
+
+ private queueGotCallFeedsForAnswer(callFeeds: CallFeed[]): void {
+ // Ensure only one negotiate/answer event is being processed at a time.
+ if (this.responsePromiseChain) {
+ this.responsePromiseChain = this.responsePromiseChain.then(() => this.gotCallFeedsForAnswer(callFeeds));
+ } else {
+ this.responsePromiseChain = this.gotCallFeedsForAnswer(callFeeds);
+ }
+ }
+
+ // Enables DTX (discontinuous transmission) on the given session to reduce
+ // bandwidth when transmitting silence
+ private mungeSdp(description: RTCSessionDescriptionInit, mods: CodecParamsMod[]): void {
+ // The only way to enable DTX at this time is through SDP munging
+ const sdp = parseSdp(description.sdp!);
+
+ sdp.media.forEach((media) => {
+ const payloadTypeToCodecMap = new Map<number, string>();
+ const codecToPayloadTypeMap = new Map<string, number>();
+ for (const rtp of media.rtp) {
+ payloadTypeToCodecMap.set(rtp.payload, rtp.codec);
+ codecToPayloadTypeMap.set(rtp.codec, rtp.payload);
+ }
+
+ for (const mod of mods) {
+ if (mod.mediaType !== media.type) continue;
+
+ if (!codecToPayloadTypeMap.has(mod.codec)) {
+ logger.info(
+ `Call ${this.callId} mungeSdp() ignoring SDP modifications for ${mod.codec} as it's not present.`,
+ );
+ continue;
+ }
+
+ const extraConfig: string[] = [];
+ if (mod.enableDtx !== undefined) {
+ extraConfig.push(`usedtx=${mod.enableDtx ? "1" : "0"}`);
+ }
+ if (mod.maxAverageBitrate !== undefined) {
+ extraConfig.push(`maxaveragebitrate=${mod.maxAverageBitrate}`);
+ }
+
+ let found = false;
+ for (const fmtp of media.fmtp) {
+ if (payloadTypeToCodecMap.get(fmtp.payload) === mod.codec) {
+ found = true;
+ fmtp.config += ";" + extraConfig.join(";");
+ }
+ }
+ if (!found) {
+ media.fmtp.push({
+ payload: codecToPayloadTypeMap.get(mod.codec)!,
+ config: extraConfig.join(";"),
+ });
+ }
+ }
+ });
+ description.sdp = writeSdp(sdp);
+ }
+
+ private async createOffer(): Promise<RTCSessionDescriptionInit> {
+ const offer = await this.peerConn!.createOffer();
+ this.mungeSdp(offer, getCodecParamMods(this.isPtt));
+ return offer;
+ }
+
+ private async createAnswer(): Promise<RTCSessionDescriptionInit> {
+ const answer = await this.peerConn!.createAnswer();
+ this.mungeSdp(answer, getCodecParamMods(this.isPtt));
+ return answer;
+ }
+
+ private async gotCallFeedsForAnswer(callFeeds: CallFeed[]): Promise<void> {
+ if (this.callHasEnded()) return;
+
+ this.waitForLocalAVStream = false;
+
+ for (const feed of callFeeds) {
+ this.pushLocalFeed(feed);
+ }
+
+ this.state = CallState.CreateAnswer;
+
+ let answer: RTCSessionDescriptionInit;
+ try {
+ this.getRidOfRTXCodecs();
+ answer = await this.createAnswer();
+ } catch (err) {
+ logger.debug(`Call ${this.callId} gotCallFeedsForAnswer() failed to create answer: `, err);
+ this.terminate(CallParty.Local, CallErrorCode.CreateAnswer, true);
+ return;
+ }
+
+ try {
+ await this.peerConn!.setLocalDescription(answer);
+
+ // make sure we're still going
+ if (this.callHasEnded()) return;
+
+ this.state = CallState.Connecting;
+
+ // Allow a short time for initial candidates to be gathered
+ await new Promise((resolve) => {
+ setTimeout(resolve, 200);
+ });
+
+ // make sure the call hasn't ended before we continue
+ if (this.callHasEnded()) return;
+
+ this.sendAnswer();
+ } catch (err) {
+ logger.debug(`Call ${this.callId} gotCallFeedsForAnswer() error setting local description!`, err);
+ this.terminate(CallParty.Local, CallErrorCode.SetLocalDescription, true);
+ return;
+ }
+ }
+
+ /**
+ * Internal
+ */
+ private gotLocalIceCandidate = (event: RTCPeerConnectionIceEvent): void => {
+ if (event.candidate) {
+ if (this.candidatesEnded) {
+ logger.warn(
+ `Call ${this.callId} gotLocalIceCandidate() got candidate after candidates have ended - ignoring!`,
+ );
+ return;
+ }
+
+ logger.debug(`Call ${this.callId} got local ICE ${event.candidate.sdpMid} ${event.candidate.candidate}`);
+
+ if (this.callHasEnded()) return;
+
+ // As with the offer, note we need to make a copy of this object, not
+ // pass the original: that broke in Chrome ~m43.
+ if (event.candidate.candidate === "") {
+ this.queueCandidate(null);
+ } else {
+ this.queueCandidate(event.candidate);
+ }
+ }
+ };
+
+ private onIceGatheringStateChange = (event: Event): void => {
+ logger.debug(
+ `Call ${this.callId} onIceGatheringStateChange() ice gathering state changed to ${
+ this.peerConn!.iceGatheringState
+ }`,
+ );
+ if (this.peerConn?.iceGatheringState === "complete") {
+ this.queueCandidate(null);
+ }
+ };
+
+ public async onRemoteIceCandidatesReceived(ev: MatrixEvent): Promise<void> {
+ if (this.callHasEnded()) {
+ //debuglog("Ignoring remote ICE candidate because call has ended");
+ return;
+ }
+
+ const content = ev.getContent<MCallCandidates>();
+ const candidates = content.candidates;
+ if (!candidates) {
+ logger.info(
+ `Call ${this.callId} onRemoteIceCandidatesReceived() ignoring candidates event with no candidates!`,
+ );
+ return;
+ }
+
+ const fromPartyId = content.version === 0 ? null : content.party_id || null;
+
+ if (this.opponentPartyId === undefined) {
+ // we haven't picked an opponent yet so save the candidates
+ if (fromPartyId) {
+ logger.info(
+ `Call ${this.callId} onRemoteIceCandidatesReceived() buffering ${candidates.length} candidates until we pick an opponent`,
+ );
+ const bufferedCandidates = this.remoteCandidateBuffer.get(fromPartyId) || [];
+ bufferedCandidates.push(...candidates);
+ this.remoteCandidateBuffer.set(fromPartyId, bufferedCandidates);
+ }
+ return;
+ }
+
+ if (!this.partyIdMatches(content)) {
+ logger.info(
+ `Call ${this.callId} onRemoteIceCandidatesReceived() ignoring candidates from party ID ${content.party_id}: we have chosen party ID ${this.opponentPartyId}`,
+ );
+
+ return;
+ }
+
+ await this.addIceCandidates(candidates);
+ }
+
+ /**
+ * Used by MatrixClient.
+ */
+ public async onAnswerReceived(event: MatrixEvent): Promise<void> {
+ const content = event.getContent<MCallAnswer>();
+ logger.debug(`Call ${this.callId} onAnswerReceived() running (hangupParty=${content.party_id})`);
+
+ if (this.callHasEnded()) {
+ logger.debug(`Call ${this.callId} onAnswerReceived() ignoring answer because call has ended`);
+ return;
+ }
+
+ if (this.opponentPartyId !== undefined) {
+ logger.info(
+ `Call ${this.callId} onAnswerReceived() ignoring answer from party ID ${content.party_id}: we already have an answer/reject from ${this.opponentPartyId}`,
+ );
+ return;
+ }
+
+ this.chooseOpponent(event);
+ await this.addBufferedIceCandidates();
+
+ this.state = CallState.Connecting;
+
+ const sdpStreamMetadata = content[SDPStreamMetadataKey];
+ if (sdpStreamMetadata) {
+ this.updateRemoteSDPStreamMetadata(sdpStreamMetadata);
+ } else {
+ logger.warn(
+ `Call ${this.callId} onAnswerReceived() did not get any SDPStreamMetadata! Can not send/receive multiple streams`,
+ );
+ }
+
+ try {
+ await this.peerConn!.setRemoteDescription(content.answer);
+ } catch (e) {
+ logger.debug(`Call ${this.callId} onAnswerReceived() failed to set remote description`, e);
+ this.terminate(CallParty.Local, CallErrorCode.SetRemoteDescription, false);
+ return;
+ }
+
+ // If the answer we selected has a party_id, send a select_answer event
+ // We do this after setting the remote description since otherwise we'd block
+ // call setup on it
+ if (this.opponentPartyId !== null) {
+ try {
+ await this.sendVoipEvent(EventType.CallSelectAnswer, {
+ selected_party_id: this.opponentPartyId,
+ });
+ } catch (err) {
+ // This isn't fatal, and will just mean that if another party has raced to answer
+ // the call, they won't know they got rejected, so we carry on & don't retry.
+ logger.warn(`Call ${this.callId} onAnswerReceived() failed to send select_answer event`, err);
+ }
+ }
+ }
+
+ public async onSelectAnswerReceived(event: MatrixEvent): Promise<void> {
+ if (this.direction !== CallDirection.Inbound) {
+ logger.warn(
+ `Call ${this.callId} onSelectAnswerReceived() got select_answer for an outbound call: ignoring`,
+ );
+ return;
+ }
+
+ const selectedPartyId = event.getContent<MCallSelectAnswer>().selected_party_id;
+
+ if (selectedPartyId === undefined || selectedPartyId === null) {
+ logger.warn(
+ `Call ${this.callId} onSelectAnswerReceived() got nonsensical select_answer with null/undefined selected_party_id: ignoring`,
+ );
+ return;
+ }
+
+ if (selectedPartyId !== this.ourPartyId) {
+ logger.info(
+ `Call ${this.callId} onSelectAnswerReceived() got select_answer for party ID ${selectedPartyId}: we are party ID ${this.ourPartyId}.`,
+ );
+ // The other party has picked somebody else's answer
+ await this.terminate(CallParty.Remote, CallErrorCode.AnsweredElsewhere, true);
+ }
+ }
+
+ public async onNegotiateReceived(event: MatrixEvent): Promise<void> {
+ const content = event.getContent<MCallInviteNegotiate>();
+ const description = content.description;
+ if (!description || !description.sdp || !description.type) {
+ logger.info(`Call ${this.callId} onNegotiateReceived() ignoring invalid m.call.negotiate event`);
+ return;
+ }
+ // Politeness always follows the direction of the call: in a glare situation,
+ // we pick either the inbound or outbound call, so one side will always be
+ // inbound and one outbound
+ const polite = this.direction === CallDirection.Inbound;
+
+ // Here we follow the perfect negotiation logic from
+ // https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API/Perfect_negotiation
+ const offerCollision =
+ description.type === "offer" && (this.makingOffer || this.peerConn!.signalingState !== "stable");
+
+ this.ignoreOffer = !polite && offerCollision;
+ if (this.ignoreOffer) {
+ logger.info(
+ `Call ${this.callId} onNegotiateReceived() ignoring colliding negotiate event because we're impolite`,
+ );
+ return;
+ }
+
+ const prevLocalOnHold = this.isLocalOnHold();
+
+ const sdpStreamMetadata = content[SDPStreamMetadataKey];
+ if (sdpStreamMetadata) {
+ this.updateRemoteSDPStreamMetadata(sdpStreamMetadata);
+ } else {
+ logger.warn(
+ `Call ${this.callId} onNegotiateReceived() received negotiation event without SDPStreamMetadata!`,
+ );
+ }
+
+ try {
+ await this.peerConn!.setRemoteDescription(description);
+
+ if (description.type === "offer") {
+ let answer: RTCSessionDescriptionInit;
+ try {
+ this.getRidOfRTXCodecs();
+ answer = await this.createAnswer();
+ } catch (err) {
+ logger.debug(`Call ${this.callId} onNegotiateReceived() failed to create answer: `, err);
+ this.terminate(CallParty.Local, CallErrorCode.CreateAnswer, true);
+ return;
+ }
+
+ await this.peerConn!.setLocalDescription(answer);
+
+ this.sendVoipEvent(EventType.CallNegotiate, {
+ description: this.peerConn!.localDescription?.toJSON(),
+ [SDPStreamMetadataKey]: this.getLocalSDPStreamMetadata(true),
+ });
+ }
+ } catch (err) {
+ logger.warn(`Call ${this.callId} onNegotiateReceived() failed to complete negotiation`, err);
+ }
+
+ const newLocalOnHold = this.isLocalOnHold();
+ if (prevLocalOnHold !== newLocalOnHold) {
+ this.emit(CallEvent.LocalHoldUnhold, newLocalOnHold, this);
+ // also this one for backwards compat
+ this.emit(CallEvent.HoldUnhold, newLocalOnHold);
+ }
+ }
+
+ private updateRemoteSDPStreamMetadata(metadata: SDPStreamMetadata): void {
+ this.remoteSDPStreamMetadata = utils.recursivelyAssign(this.remoteSDPStreamMetadata || {}, metadata, true);
+ for (const feed of this.getRemoteFeeds()) {
+ const streamId = feed.stream.id;
+ const metadata = this.remoteSDPStreamMetadata![streamId];
+
+ feed.setAudioVideoMuted(metadata?.audio_muted, metadata?.video_muted);
+ feed.purpose = this.remoteSDPStreamMetadata![streamId]?.purpose;
+ }
+ }
+
+ public onSDPStreamMetadataChangedReceived(event: MatrixEvent): void {
+ const content = event.getContent<MCallSDPStreamMetadataChanged>();
+ const metadata = content[SDPStreamMetadataKey];
+ this.updateRemoteSDPStreamMetadata(metadata);
+ }
+
+ public async onAssertedIdentityReceived(event: MatrixEvent): Promise<void> {
+ const content = event.getContent<MCAllAssertedIdentity>();
+ if (!content.asserted_identity) return;
+
+ this.remoteAssertedIdentity = {
+ id: content.asserted_identity.id,
+ displayName: content.asserted_identity.display_name,
+ };
+ this.emit(CallEvent.AssertedIdentityChanged, this);
+ }
+
+ public callHasEnded(): boolean {
+ // This exists as workaround to typescript trying to be clever and erroring
+ // when putting if (this.state === CallState.Ended) return; twice in the same
+ // function, even though that function is async.
+ return this.state === CallState.Ended;
+ }
+
+ private queueGotLocalOffer(): void {
+ // Ensure only one negotiate/answer event is being processed at a time.
+ if (this.responsePromiseChain) {
+ this.responsePromiseChain = this.responsePromiseChain.then(() => this.wrappedGotLocalOffer());
+ } else {
+ this.responsePromiseChain = this.wrappedGotLocalOffer();
+ }
+ }
+
+ private async wrappedGotLocalOffer(): Promise<void> {
+ this.makingOffer = true;
+ try {
+ // XXX: in what situations do we believe gotLocalOffer actually throws? It appears
+ // to handle most of its exceptions itself and terminate the call. I'm not entirely
+ // sure it would ever throw, so I can't add a test for these lines.
+ // Also the tense is different between "gotLocalOffer" and "getLocalOfferFailed" so
+ // it's not entirely clear whether getLocalOfferFailed is just misnamed or whether
+ // they've been cross-polinated somehow at some point.
+ await this.gotLocalOffer();
+ } catch (e) {
+ this.getLocalOfferFailed(e as Error);
+ return;
+ } finally {
+ this.makingOffer = false;
+ }
+ }
+
+ private async gotLocalOffer(): Promise<void> {
+ logger.debug(`Call ${this.callId} gotLocalOffer() running`);
+
+ if (this.callHasEnded()) {
+ logger.debug(
+ `Call ${this.callId} gotLocalOffer() ignoring newly created offer because the call has ended"`,
+ );
+ return;
+ }
+
+ let offer: RTCSessionDescriptionInit;
+ try {
+ this.getRidOfRTXCodecs();
+ offer = await this.createOffer();
+ } catch (err) {
+ logger.debug(`Call ${this.callId} gotLocalOffer() failed to create offer: `, err);
+ this.terminate(CallParty.Local, CallErrorCode.CreateOffer, true);
+ return;
+ }
+
+ try {
+ await this.peerConn!.setLocalDescription(offer);
+ } catch (err) {
+ logger.debug(`Call ${this.callId} gotLocalOffer() error setting local description!`, err);
+ this.terminate(CallParty.Local, CallErrorCode.SetLocalDescription, true);
+ return;
+ }
+
+ if (this.peerConn!.iceGatheringState === "gathering") {
+ // Allow a short time for initial candidates to be gathered
+ await new Promise((resolve) => {
+ setTimeout(resolve, 200);
+ });
+ }
+
+ if (this.callHasEnded()) return;
+
+ const eventType = this.state === CallState.CreateOffer ? EventType.CallInvite : EventType.CallNegotiate;
+
+ const content = {
+ lifetime: CALL_TIMEOUT_MS,
+ } as MCallInviteNegotiate;
+
+ if (eventType === EventType.CallInvite && this.invitee) {
+ content.invitee = this.invitee;
+ }
+
+ // clunky because TypeScript can't follow the types through if we use an expression as the key
+ if (this.state === CallState.CreateOffer) {
+ content.offer = this.peerConn!.localDescription?.toJSON();
+ } else {
+ content.description = this.peerConn!.localDescription?.toJSON();
+ }
+
+ content.capabilities = {
+ "m.call.transferee": this.client.supportsCallTransfer,
+ "m.call.dtmf": false,
+ };
+
+ content[SDPStreamMetadataKey] = this.getLocalSDPStreamMetadata(true);
+
+ // Get rid of any candidates waiting to be sent: they'll be included in the local
+ // description we just got and will send in the offer.
+ const discardCount = this.discardDuplicateCandidates();
+ logger.info(
+ `Call ${this.callId} gotLocalOffer() discarding ${discardCount} candidates that will be sent in offer`,
+ );
+
+ try {
+ await this.sendVoipEvent(eventType, content);
+ } catch (error) {
+ logger.error(`Call ${this.callId} gotLocalOffer() failed to send invite`, error);
+ if (error instanceof MatrixError && error.event) this.client.cancelPendingEvent(error.event);
+
+ let code = CallErrorCode.SignallingFailed;
+ let message = "Signalling failed";
+ if (this.state === CallState.CreateOffer) {
+ code = CallErrorCode.SendInvite;
+ message = "Failed to send invite";
+ }
+ if ((<Error>error).name == "UnknownDeviceError") {
+ code = CallErrorCode.UnknownDevices;
+ message = "Unknown devices present in the room";
+ }
+
+ this.emit(CallEvent.Error, new CallError(code, message, <Error>error), this);
+ this.terminate(CallParty.Local, code, false);
+
+ // no need to carry on & send the candidate queue, but we also
+ // don't want to rethrow the error
+ return;
+ }
+
+ this.sendCandidateQueue();
+ if (this.state === CallState.CreateOffer) {
+ this.inviteOrAnswerSent = true;
+ this.state = CallState.InviteSent;
+ this.inviteTimeout = setTimeout(() => {
+ this.inviteTimeout = undefined;
+ if (this.state === CallState.InviteSent) {
+ this.hangup(CallErrorCode.InviteTimeout, false);
+ }
+ }, CALL_TIMEOUT_MS);
+ }
+ }
+
+ private getLocalOfferFailed = (err: Error): void => {
+ logger.error(`Call ${this.callId} getLocalOfferFailed() running`, err);
+
+ this.emit(
+ CallEvent.Error,
+ new CallError(CallErrorCode.LocalOfferFailed, "Failed to get local offer!", err),
+ this,
+ );
+ this.terminate(CallParty.Local, CallErrorCode.LocalOfferFailed, false);
+ };
+
+ private getUserMediaFailed = (err: Error): void => {
+ if (this.successor) {
+ this.successor.getUserMediaFailed(err);
+ return;
+ }
+
+ logger.warn(`Call ${this.callId} getUserMediaFailed() failed to get user media - ending call`, err);
+
+ this.emit(
+ CallEvent.Error,
+ new CallError(
+ CallErrorCode.NoUserMedia,
+ "Couldn't start capturing media! Is your microphone set up and " + "does this app have permission?",
+ err,
+ ),
+ this,
+ );
+ this.terminate(CallParty.Local, CallErrorCode.NoUserMedia, false);
+ };
+
+ private onIceConnectionStateChanged = (): void => {
+ if (this.callHasEnded()) {
+ return; // because ICE can still complete as we're ending the call
+ }
+ logger.debug(
+ `Call ${this.callId} onIceConnectionStateChanged() running (state=${this.peerConn?.iceConnectionState})`,
+ );
+
+ // ideally we'd consider the call to be connected when we get media but
+ // chrome doesn't implement any of the 'onstarted' events yet
+ if (["connected", "completed"].includes(this.peerConn?.iceConnectionState ?? "")) {
+ clearTimeout(this.iceDisconnectedTimeout);
+ this.iceDisconnectedTimeout = undefined;
+ this.state = CallState.Connected;
+
+ if (!this.callLengthInterval && !this.callStartTime) {
+ this.callStartTime = Date.now();
+
+ this.callLengthInterval = setInterval(() => {
+ this.emit(CallEvent.LengthChanged, Math.round((Date.now() - this.callStartTime!) / 1000), this);
+ }, CALL_LENGTH_INTERVAL);
+ }
+ } else if (this.peerConn?.iceConnectionState == "failed") {
+ // Firefox for Android does not yet have support for restartIce()
+ // (the types say it's always defined though, so we have to cast
+ // to prevent typescript from warning).
+ if (this.peerConn?.restartIce as (() => void) | null) {
+ this.candidatesEnded = false;
+ this.peerConn!.restartIce();
+ } else {
+ logger.info(
+ `Call ${this.callId} onIceConnectionStateChanged() hanging up call (ICE failed and no ICE restart method)`,
+ );
+ this.hangup(CallErrorCode.IceFailed, false);
+ }
+ } else if (this.peerConn?.iceConnectionState == "disconnected") {
+ this.iceDisconnectedTimeout = setTimeout(() => {
+ logger.info(
+ `Call ${this.callId} onIceConnectionStateChanged() hanging up call (ICE disconnected for too long)`,
+ );
+ this.hangup(CallErrorCode.IceFailed, false);
+ }, ICE_DISCONNECTED_TIMEOUT);
+ this.state = CallState.Connecting;
+ }
+
+ // In PTT mode, override feed status to muted when we lose connection to
+ // the peer, since we don't want to block the line if they're not saying anything.
+ // Experimenting in Chrome, this happens after 5 or 6 seconds, which is probably
+ // fast enough.
+ if (this.isPtt && ["failed", "disconnected"].includes(this.peerConn!.iceConnectionState)) {
+ for (const feed of this.getRemoteFeeds()) {
+ feed.setAudioVideoMuted(true, true);
+ }
+ }
+ };
+
+ private onSignallingStateChanged = (): void => {
+ logger.debug(`Call ${this.callId} onSignallingStateChanged() running (state=${this.peerConn?.signalingState})`);
+ };
+
+ private onTrack = (ev: RTCTrackEvent): void => {
+ if (ev.streams.length === 0) {
+ logger.warn(
+ `Call ${this.callId} onTrack() called with streamless track streamless (kind=${ev.track.kind})`,
+ );
+ return;
+ }
+
+ const stream = ev.streams[0];
+ this.pushRemoteFeed(stream);
+
+ if (!this.removeTrackListeners.has(stream)) {
+ const onRemoveTrack = (): void => {
+ if (stream.getTracks().length === 0) {
+ logger.info(`Call ${this.callId} onTrack() removing track (streamId=${stream.id})`);
+ this.deleteFeedByStream(stream);
+ stream.removeEventListener("removetrack", onRemoveTrack);
+ this.removeTrackListeners.delete(stream);
+ }
+ };
+ stream.addEventListener("removetrack", onRemoveTrack);
+ this.removeTrackListeners.set(stream, onRemoveTrack);
+ }
+ };
+
+ private onDataChannel = (ev: RTCDataChannelEvent): void => {
+ this.emit(CallEvent.DataChannel, ev.channel, this);
+ };
+
+ /**
+ * This method removes all video/rtx codecs from screensharing video
+ * transceivers. This is necessary since they can cause problems. Without
+ * this the following steps should produce an error:
+ * Chromium calls Firefox
+ * Firefox answers
+ * Firefox starts screen-sharing
+ * Chromium starts screen-sharing
+ * Call crashes for Chromium with:
+ * [96685:23:0518/162603.933321:ERROR:webrtc_video_engine.cc(3296)] RTX codec (PT=97) mapped to PT=96 which is not in the codec list.
+ * [96685:23:0518/162603.933377:ERROR:webrtc_video_engine.cc(1171)] GetChangedRecvParameters called without any video codecs.
+ * [96685:23:0518/162603.933430:ERROR:sdp_offer_answer.cc(4302)] Failed to set local video description recv parameters for m-section with mid='2'. (INVALID_PARAMETER)
+ */
+ private getRidOfRTXCodecs(): void {
+ // RTCRtpReceiver.getCapabilities and RTCRtpSender.getCapabilities don't seem to be supported on FF
+ if (!RTCRtpReceiver.getCapabilities || !RTCRtpSender.getCapabilities) return;
+
+ const recvCodecs = RTCRtpReceiver.getCapabilities("video")!.codecs;
+ const sendCodecs = RTCRtpSender.getCapabilities("video")!.codecs;
+ const codecs = [...sendCodecs, ...recvCodecs];
+
+ for (const codec of codecs) {
+ if (codec.mimeType === "video/rtx") {
+ const rtxCodecIndex = codecs.indexOf(codec);
+ codecs.splice(rtxCodecIndex, 1);
+ }
+ }
+
+ const screenshareVideoTransceiver = this.transceivers.get(
+ getTransceiverKey(SDPStreamMetadataPurpose.Screenshare, "video"),
+ );
+ if (screenshareVideoTransceiver) screenshareVideoTransceiver.setCodecPreferences(codecs);
+ }
+
+ private onNegotiationNeeded = async (): Promise<void> => {
+ logger.info(`Call ${this.callId} onNegotiationNeeded() negotiation is needed!`);
+
+ if (this.state !== CallState.CreateOffer && this.opponentVersion === 0) {
+ logger.info(
+ `Call ${this.callId} onNegotiationNeeded() opponent does not support renegotiation: ignoring negotiationneeded event`,
+ );
+ return;
+ }
+
+ this.queueGotLocalOffer();
+ };
+
+ public onHangupReceived = (msg: MCallHangupReject): void => {
+ logger.debug(`Call ${this.callId} onHangupReceived() running`);
+
+ // party ID must match (our chosen partner hanging up the call) or be undefined (we haven't chosen
+ // a partner yet but we're treating the hangup as a reject as per VoIP v0)
+ if (this.partyIdMatches(msg) || this.state === CallState.Ringing) {
+ // default reason is user_hangup
+ this.terminate(CallParty.Remote, msg.reason || CallErrorCode.UserHangup, true);
+ } else {
+ logger.info(
+ `Call ${this.callId} onHangupReceived() ignoring message from party ID ${msg.party_id}: our partner is ${this.opponentPartyId}`,
+ );
+ }
+ };
+
+ public onRejectReceived = (msg: MCallHangupReject): void => {
+ logger.debug(`Call ${this.callId} onRejectReceived() running`);
+
+ // No need to check party_id for reject because if we'd received either
+ // an answer or reject, we wouldn't be in state InviteSent
+
+ const shouldTerminate =
+ // reject events also end the call if it's ringing: it's another of
+ // our devices rejecting the call.
+ [CallState.InviteSent, CallState.Ringing].includes(this.state) ||
+ // also if we're in the init state and it's an inbound call, since
+ // this means we just haven't entered the ringing state yet
+ (this.state === CallState.Fledgling && this.direction === CallDirection.Inbound);
+
+ if (shouldTerminate) {
+ this.terminate(CallParty.Remote, msg.reason || CallErrorCode.UserHangup, true);
+ } else {
+ logger.debug(`Call ${this.callId} onRejectReceived() called in wrong state (state=${this.state})`);
+ }
+ };
+
+ public onAnsweredElsewhere = (msg: MCallAnswer): void => {
+ logger.debug(`Call ${this.callId} onAnsweredElsewhere() running`);
+ this.terminate(CallParty.Remote, CallErrorCode.AnsweredElsewhere, true);
+ };
+
+ /**
+ * @internal
+ */
+ private async sendVoipEvent(eventType: string, content: object): Promise<void> {
+ const realContent = Object.assign({}, content, {
+ version: VOIP_PROTO_VERSION,
+ call_id: this.callId,
+ party_id: this.ourPartyId,
+ conf_id: this.groupCallId,
+ });
+
+ if (this.opponentDeviceId) {
+ const toDeviceSeq = this.toDeviceSeq++;
+ const content = {
+ ...realContent,
+ device_id: this.client.deviceId,
+ sender_session_id: this.client.getSessionId(),
+ dest_session_id: this.opponentSessionId,
+ seq: toDeviceSeq,
+ [ToDeviceMessageId]: uuidv4(),
+ };
+
+ this.emit(
+ CallEvent.SendVoipEvent,
+ {
+ type: "toDevice",
+ eventType,
+ userId: this.invitee || this.getOpponentMember()?.userId,
+ opponentDeviceId: this.opponentDeviceId,
+ content,
+ },
+ this,
+ );
+
+ const userId = this.invitee || this.getOpponentMember()!.userId;
+ if (this.client.getUseE2eForGroupCall()) {
+ if (!this.opponentDeviceInfo) {
+ logger.warn(`Call ${this.callId} sendVoipEvent() failed: we do not have opponentDeviceInfo`);
+ return;
+ }
+
+ await this.client.encryptAndSendToDevices(
+ [
+ {
+ userId,
+ deviceInfo: this.opponentDeviceInfo,
+ },
+ ],
+ {
+ type: eventType,
+ content,
+ },
+ );
+ } else {
+ await this.client.sendToDevice(
+ eventType,
+ new Map<string, any>([[userId, new Map([[this.opponentDeviceId, content]])]]),
+ );
+ }
+ } else {
+ this.emit(
+ CallEvent.SendVoipEvent,
+ {
+ type: "sendEvent",
+ eventType,
+ roomId: this.roomId,
+ content: realContent,
+ userId: this.invitee || this.getOpponentMember()?.userId,
+ },
+ this,
+ );
+
+ await this.client.sendEvent(this.roomId!, eventType, realContent);
+ }
+ }
+
+ /**
+ * Queue a candidate to be sent
+ * @param content - The candidate to queue up, or null if candidates have finished being generated
+ * and end-of-candidates should be signalled
+ */
+ private queueCandidate(content: RTCIceCandidate | null): void {
+ // We partially de-trickle candidates by waiting for `delay` before sending them
+ // amalgamated, in order to avoid sending too many m.call.candidates events and hitting
+ // rate limits in Matrix.
+ // In practice, it'd be better to remove rate limits for m.call.*
+
+ // N.B. this deliberately lets you queue and send blank candidates, which MSC2746
+ // currently proposes as the way to indicate that candidate gathering is complete.
+ // This will hopefully be changed to an explicit rather than implicit notification
+ // shortly.
+ if (content) {
+ this.candidateSendQueue.push(content);
+ } else {
+ this.candidatesEnded = true;
+ }
+
+ // Don't send the ICE candidates yet if the call is in the ringing state: this
+ // means we tried to pick (ie. started generating candidates) and then failed to
+ // send the answer and went back to the ringing state. Queue up the candidates
+ // to send if we successfully send the answer.
+ // Equally don't send if we haven't yet sent the answer because we can send the
+ // first batch of candidates along with the answer
+ if (this.state === CallState.Ringing || !this.inviteOrAnswerSent) return;
+
+ // MSC2746 recommends these values (can be quite long when calling because the
+ // callee will need a while to answer the call)
+ const delay = this.direction === CallDirection.Inbound ? 500 : 2000;
+
+ if (this.candidateSendTries === 0) {
+ setTimeout(() => {
+ this.sendCandidateQueue();
+ }, delay);
+ }
+ }
+
+ // Discard all non-end-of-candidates messages
+ // Return the number of candidate messages that were discarded.
+ // Call this method before sending an invite or answer message
+ private discardDuplicateCandidates(): number {
+ let discardCount = 0;
+ const newQueue: RTCIceCandidate[] = [];
+
+ for (let i = 0; i < this.candidateSendQueue.length; i++) {
+ const candidate = this.candidateSendQueue[i];
+ if (candidate.candidate === "") {
+ newQueue.push(candidate);
+ } else {
+ discardCount++;
+ }
+ }
+
+ this.candidateSendQueue = newQueue;
+
+ return discardCount;
+ }
+
+ /*
+ * Transfers this call to another user
+ */
+ public async transfer(targetUserId: string): Promise<void> {
+ // Fetch the target user's global profile info: their room avatar / displayname
+ // could be different in whatever room we share with them.
+ const profileInfo = await this.client.getProfileInfo(targetUserId);
+
+ const replacementId = genCallID();
+
+ const body = {
+ replacement_id: genCallID(),
+ target_user: {
+ id: targetUserId,
+ display_name: profileInfo.displayname,
+ avatar_url: profileInfo.avatar_url,
+ },
+ create_call: replacementId,
+ } as MCallReplacesEvent;
+
+ await this.sendVoipEvent(EventType.CallReplaces, body);
+
+ await this.terminate(CallParty.Local, CallErrorCode.Transferred, true);
+ }
+
+ /*
+ * Transfers this call to the target call, effectively 'joining' the
+ * two calls (so the remote parties on each call are connected together).
+ */
+ public async transferToCall(transferTargetCall: MatrixCall): Promise<void> {
+ const targetUserId = transferTargetCall.getOpponentMember()?.userId;
+ const targetProfileInfo = targetUserId ? await this.client.getProfileInfo(targetUserId) : undefined;
+ const opponentUserId = this.getOpponentMember()?.userId;
+ const transfereeProfileInfo = opponentUserId ? await this.client.getProfileInfo(opponentUserId) : undefined;
+
+ const newCallId = genCallID();
+
+ const bodyToTransferTarget = {
+ // the replacements on each side have their own ID, and it's distinct from the
+ // ID of the new call (but we can use the same function to generate it)
+ replacement_id: genCallID(),
+ target_user: {
+ id: opponentUserId,
+ display_name: transfereeProfileInfo?.displayname,
+ avatar_url: transfereeProfileInfo?.avatar_url,
+ },
+ await_call: newCallId,
+ } as MCallReplacesEvent;
+
+ await transferTargetCall.sendVoipEvent(EventType.CallReplaces, bodyToTransferTarget);
+
+ const bodyToTransferee = {
+ replacement_id: genCallID(),
+ target_user: {
+ id: targetUserId,
+ display_name: targetProfileInfo?.displayname,
+ avatar_url: targetProfileInfo?.avatar_url,
+ },
+ create_call: newCallId,
+ } as MCallReplacesEvent;
+
+ await this.sendVoipEvent(EventType.CallReplaces, bodyToTransferee);
+
+ await this.terminate(CallParty.Local, CallErrorCode.Transferred, true);
+ await transferTargetCall.terminate(CallParty.Local, CallErrorCode.Transferred, true);
+ }
+
+ private async terminate(hangupParty: CallParty, hangupReason: CallErrorCode, shouldEmit: boolean): Promise<void> {
+ if (this.callHasEnded()) return;
+
+ this.hangupParty = hangupParty;
+ this.hangupReason = hangupReason;
+ this.state = CallState.Ended;
+
+ if (this.inviteTimeout) {
+ clearTimeout(this.inviteTimeout);
+ this.inviteTimeout = undefined;
+ }
+ if (this.iceDisconnectedTimeout !== undefined) {
+ clearTimeout(this.iceDisconnectedTimeout);
+ this.iceDisconnectedTimeout = undefined;
+ }
+ if (this.callLengthInterval) {
+ clearInterval(this.callLengthInterval);
+ this.callLengthInterval = undefined;
+ }
+ if (this.stopVideoTrackTimer !== undefined) {
+ clearTimeout(this.stopVideoTrackTimer);
+ this.stopVideoTrackTimer = undefined;
+ }
+
+ for (const [stream, listener] of this.removeTrackListeners) {
+ stream.removeEventListener("removetrack", listener);
+ }
+ this.removeTrackListeners.clear();
+
+ this.callStatsAtEnd = await this.collectCallStats();
+
+ // Order is important here: first we stopAllMedia() and only then we can deleteAllFeeds()
+ this.stopAllMedia();
+ this.deleteAllFeeds();
+
+ if (this.peerConn && this.peerConn.signalingState !== "closed") {
+ this.peerConn.close();
+ }
+ this.stats?.removeStatsReportGatherer(this.callId);
+
+ if (shouldEmit) {
+ this.emit(CallEvent.Hangup, this);
+ }
+
+ this.client.callEventHandler!.calls.delete(this.callId);
+ }
+
+ private stopAllMedia(): void {
+ logger.debug(`Call ${this.callId} stopAllMedia() running`);
+
+ for (const feed of this.feeds) {
+ // Slightly awkward as local feed need to go via the correct method on
+ // the MediaHandler so they get removed from MediaHandler (remote tracks
+ // don't)
+ // NB. We clone local streams when passing them to individual calls in a group
+ // call, so we can (and should) stop the clones once we no longer need them:
+ // the other clones will continue fine.
+ if (feed.isLocal() && feed.purpose === SDPStreamMetadataPurpose.Usermedia) {
+ this.client.getMediaHandler().stopUserMediaStream(feed.stream);
+ } else if (feed.isLocal() && feed.purpose === SDPStreamMetadataPurpose.Screenshare) {
+ this.client.getMediaHandler().stopScreensharingStream(feed.stream);
+ } else if (!feed.isLocal()) {
+ logger.debug(`Call ${this.callId} stopAllMedia() stopping stream (streamId=${feed.stream.id})`);
+ for (const track of feed.stream.getTracks()) {
+ track.stop();
+ }
+ }
+ }
+ }
+
+ private checkForErrorListener(): void {
+ if (this.listeners(EventEmitterEvents.Error).length === 0) {
+ throw new Error("You MUST attach an error listener using call.on('error', function() {})");
+ }
+ }
+
+ private async sendCandidateQueue(): Promise<void> {
+ if (this.candidateSendQueue.length === 0 || this.callHasEnded()) {
+ return;
+ }
+
+ const candidates = this.candidateSendQueue;
+ this.candidateSendQueue = [];
+ ++this.candidateSendTries;
+ const content = { candidates: candidates.map((candidate) => candidate.toJSON()) };
+ if (this.candidatesEnded) {
+ // If there are no more candidates, signal this by adding an empty string candidate
+ content.candidates.push({
+ candidate: "",
+ });
+ }
+ logger.debug(`Call ${this.callId} sendCandidateQueue() attempting to send ${candidates.length} candidates`);
+ try {
+ await this.sendVoipEvent(EventType.CallCandidates, content);
+ // reset our retry count if we have successfully sent our candidates
+ // otherwise queueCandidate() will refuse to try to flush the queue
+ this.candidateSendTries = 0;
+
+ // Try to send candidates again just in case we received more candidates while sending.
+ this.sendCandidateQueue();
+ } catch (error) {
+ // don't retry this event: we'll send another one later as we might
+ // have more candidates by then.
+ if (error instanceof MatrixError && error.event) this.client.cancelPendingEvent(error.event);
+
+ // put all the candidates we failed to send back in the queue
+ this.candidateSendQueue.push(...candidates);
+
+ if (this.candidateSendTries > 5) {
+ logger.debug(
+ `Call ${this.callId} sendCandidateQueue() failed to send candidates on attempt ${this.candidateSendTries}. Giving up on this call.`,
+ error,
+ );
+
+ const code = CallErrorCode.SignallingFailed;
+ const message = "Signalling failed";
+
+ this.emit(CallEvent.Error, new CallError(code, message, <Error>error), this);
+ this.hangup(code, false);
+
+ return;
+ }
+
+ const delayMs = 500 * Math.pow(2, this.candidateSendTries);
+ ++this.candidateSendTries;
+ logger.debug(
+ `Call ${this.callId} sendCandidateQueue() failed to send candidates. Retrying in ${delayMs}ms`,
+ error,
+ );
+ setTimeout(() => {
+ this.sendCandidateQueue();
+ }, delayMs);
+ }
+ }
+
+ /**
+ * Place a call to this room.
+ * @throws if you have not specified a listener for 'error' events.
+ * @throws if have passed audio=false.
+ */
+ public async placeCall(audio: boolean, video: boolean): Promise<void> {
+ if (!audio) {
+ throw new Error("You CANNOT start a call without audio");
+ }
+ this.state = CallState.WaitLocalMedia;
+
+ try {
+ const stream = await this.client.getMediaHandler().getUserMediaStream(audio, video);
+
+ // make sure all the tracks are enabled (same as pushNewLocalFeed -
+ // we probably ought to just have one code path for adding streams)
+ setTracksEnabled(stream.getAudioTracks(), true);
+ setTracksEnabled(stream.getVideoTracks(), true);
+
+ const callFeed = new CallFeed({
+ client: this.client,
+ roomId: this.roomId,
+ userId: this.client.getUserId()!,
+ deviceId: this.client.getDeviceId() ?? undefined,
+ stream,
+ purpose: SDPStreamMetadataPurpose.Usermedia,
+ audioMuted: false,
+ videoMuted: false,
+ });
+ await this.placeCallWithCallFeeds([callFeed]);
+ } catch (e) {
+ this.getUserMediaFailed(<Error>e);
+ return;
+ }
+ }
+
+ /**
+ * Place a call to this room with call feed.
+ * @param callFeeds - to use
+ * @throws if you have not specified a listener for 'error' events.
+ * @throws if have passed audio=false.
+ */
+ public async placeCallWithCallFeeds(callFeeds: CallFeed[], requestScreenshareFeed = false): Promise<void> {
+ this.checkForErrorListener();
+ this.direction = CallDirection.Outbound;
+
+ await this.initOpponentCrypto();
+
+ // XXX Find a better way to do this
+ this.client.callEventHandler!.calls.set(this.callId, this);
+
+ // make sure we have valid turn creds. Unless something's gone wrong, it should
+ // poll and keep the credentials valid so this should be instant.
+ const haveTurnCreds = await this.client.checkTurnServers();
+ if (!haveTurnCreds) {
+ logger.warn(
+ `Call ${this.callId} placeCallWithCallFeeds() failed to get TURN credentials! Proceeding with call anyway...`,
+ );
+ }
+
+ // create the peer connection now so it can be gathering candidates while we get user
+ // media (assuming a candidate pool size is configured)
+ this.peerConn = this.createPeerConnection();
+ this.gotCallFeedsForInvite(callFeeds, requestScreenshareFeed);
+ }
+
+ private createPeerConnection(): RTCPeerConnection {
+ const pc = new window.RTCPeerConnection({
+ iceTransportPolicy: this.forceTURN ? "relay" : undefined,
+ iceServers: this.turnServers,
+ iceCandidatePoolSize: this.client.iceCandidatePoolSize,
+ bundlePolicy: "max-bundle",
+ });
+
+ // 'connectionstatechange' would be better, but firefox doesn't implement that.
+ pc.addEventListener("iceconnectionstatechange", this.onIceConnectionStateChanged);
+ pc.addEventListener("signalingstatechange", this.onSignallingStateChanged);
+ pc.addEventListener("icecandidate", this.gotLocalIceCandidate);
+ pc.addEventListener("icegatheringstatechange", this.onIceGatheringStateChange);
+ pc.addEventListener("track", this.onTrack);
+ pc.addEventListener("negotiationneeded", this.onNegotiationNeeded);
+ pc.addEventListener("datachannel", this.onDataChannel);
+
+ this.stats?.addStatsReportGatherer(this.callId, "unknown", pc);
+ return pc;
+ }
+
+ private partyIdMatches(msg: MCallBase): boolean {
+ // They must either match or both be absent (in which case opponentPartyId will be null)
+ // Also we ignore party IDs on the invite/offer if the version is 0, so we must do the same
+ // here and use null if the version is 0 (woe betide any opponent sending messages in the
+ // same call with different versions)
+ const msgPartyId = msg.version === 0 ? null : msg.party_id || null;
+ return msgPartyId === this.opponentPartyId;
+ }
+
+ // Commits to an opponent for the call
+ // ev: An invite or answer event
+ private chooseOpponent(ev: MatrixEvent): void {
+ // I choo-choo-choose you
+ const msg = ev.getContent<MCallInviteNegotiate | MCallAnswer>();
+
+ logger.debug(`Call ${this.callId} chooseOpponent() running (partyId=${msg.party_id})`);
+
+ this.opponentVersion = msg.version;
+ if (this.opponentVersion === 0) {
+ // set to null to indicate that we've chosen an opponent, but because
+ // they're v0 they have no party ID (even if they sent one, we're ignoring it)
+ this.opponentPartyId = null;
+ } else {
+ // set to their party ID, or if they're naughty and didn't send one despite
+ // not being v0, set it to null to indicate we picked an opponent with no
+ // party ID
+ this.opponentPartyId = msg.party_id || null;
+ }
+ this.opponentCaps = msg.capabilities || ({} as CallCapabilities);
+ this.opponentMember = this.client.getRoom(this.roomId)!.getMember(ev.getSender()!) ?? undefined;
+ }
+
+ private async addBufferedIceCandidates(): Promise<void> {
+ const bufferedCandidates = this.remoteCandidateBuffer.get(this.opponentPartyId!);
+ if (bufferedCandidates) {
+ logger.info(
+ `Call ${this.callId} addBufferedIceCandidates() adding ${bufferedCandidates.length} buffered candidates for opponent ${this.opponentPartyId}`,
+ );
+ await this.addIceCandidates(bufferedCandidates);
+ }
+ this.remoteCandidateBuffer.clear();
+ }
+
+ private async addIceCandidates(candidates: RTCIceCandidate[]): Promise<void> {
+ for (const candidate of candidates) {
+ if (
+ (candidate.sdpMid === null || candidate.sdpMid === undefined) &&
+ (candidate.sdpMLineIndex === null || candidate.sdpMLineIndex === undefined)
+ ) {
+ logger.debug(`Call ${this.callId} addIceCandidates() got remote ICE end-of-candidates`);
+ } else {
+ logger.debug(
+ `Call ${this.callId} addIceCandidates() got remote ICE candidate (sdpMid=${candidate.sdpMid}, candidate=${candidate.candidate})`,
+ );
+ }
+
+ try {
+ await this.peerConn!.addIceCandidate(candidate);
+ } catch (err) {
+ if (!this.ignoreOffer) {
+ logger.info(`Call ${this.callId} addIceCandidates() failed to add remote ICE candidate`, err);
+ }
+ }
+ }
+ }
+
+ public get hasPeerConnection(): boolean {
+ return Boolean(this.peerConn);
+ }
+
+ public initStats(stats: GroupCallStats, peerId = "unknown"): void {
+ this.stats = stats;
+ this.stats.start();
+ }
+}
+
+export function setTracksEnabled(tracks: Array<MediaStreamTrack>, enabled: boolean): void {
+ for (const track of tracks) {
+ track.enabled = enabled;
+ }
+}
+
+export function supportsMatrixCall(): boolean {
+ // typeof prevents Node from erroring on an undefined reference
+ if (typeof window === "undefined" || typeof document === "undefined") {
+ // NB. We don't log here as apps try to create a call object as a test for
+ // whether calls are supported, so we shouldn't fill the logs up.
+ return false;
+ }
+
+ // Firefox throws on so little as accessing the RTCPeerConnection when operating in a secure mode.
+ // There's some information at https://bugzilla.mozilla.org/show_bug.cgi?id=1542616 though the concern
+ // is that the browser throwing a SecurityError will brick the client creation process.
+ try {
+ const supported = Boolean(
+ window.RTCPeerConnection ||
+ window.RTCSessionDescription ||
+ window.RTCIceCandidate ||
+ navigator.mediaDevices,
+ );
+ if (!supported) {
+ /* istanbul ignore if */ // Adds a lot of noise to test runs, so disable logging there.
+ if (process.env.NODE_ENV !== "test") {
+ logger.error("WebRTC is not supported in this browser / environment");
+ }
+ return false;
+ }
+ } catch (e) {
+ logger.error("Exception thrown when trying to access WebRTC", e);
+ return false;
+ }
+
+ return true;
+}
+
+/**
+ * DEPRECATED
+ * Use client.createCall()
+ *
+ * Create a new Matrix call for the browser.
+ * @param client - The client instance to use.
+ * @param roomId - The room the call is in.
+ * @param options - DEPRECATED optional options map.
+ * @returns the call or null if the browser doesn't support calling.
+ */
+export function createNewMatrixCall(
+ client: MatrixClient,
+ roomId: string,
+ options?: Pick<CallOpts, "forceTURN" | "invitee" | "opponentDeviceId" | "opponentSessionId" | "groupCallId">,
+): MatrixCall | null {
+ if (!supportsMatrixCall()) return null;
+
+ const optionsForceTURN = options ? options.forceTURN : false;
+
+ const opts: CallOpts = {
+ client: client,
+ roomId: roomId,
+ invitee: options?.invitee,
+ turnServers: client.getTurnServers(),
+ // call level options
+ forceTURN: client.forceTURN || optionsForceTURN,
+ opponentDeviceId: options?.opponentDeviceId,
+ opponentSessionId: options?.opponentSessionId,
+ groupCallId: options?.groupCallId,
+ };
+ const call = new MatrixCall(opts);
+
+ client.reEmitter.reEmit(call, Object.values(CallEvent));
+
+ return call;
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callEventHandler.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callEventHandler.ts
new file mode 100644
index 0000000..4ee183a
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callEventHandler.ts
@@ -0,0 +1,425 @@
+/*
+Copyright 2020 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { MatrixEvent } from "../models/event";
+import { logger } from "../logger";
+import { CallDirection, CallError, CallErrorCode, CallState, createNewMatrixCall, MatrixCall } from "./call";
+import { EventType } from "../@types/event";
+import { ClientEvent, MatrixClient } from "../client";
+import { MCallAnswer, MCallHangupReject } from "./callEventTypes";
+import { GroupCall, GroupCallErrorCode, GroupCallEvent, GroupCallUnknownDeviceError } from "./groupCall";
+import { RoomEvent } from "../models/room";
+
+// Don't ring unless we'd be ringing for at least 3 seconds: the user needs some
+// time to press the 'accept' button
+const RING_GRACE_PERIOD = 3000;
+
+export enum CallEventHandlerEvent {
+ Incoming = "Call.incoming",
+}
+
+export type CallEventHandlerEventHandlerMap = {
+ /**
+ * Fires whenever an incoming call arrives.
+ * @param call - The incoming call.
+ * @example
+ * ```
+ * matrixClient.on("Call.incoming", function(call){
+ * call.answer(); // auto-answer
+ * });
+ * ```
+ */
+ [CallEventHandlerEvent.Incoming]: (call: MatrixCall) => void;
+};
+
+export class CallEventHandler {
+ // XXX: Most of these are only public because of the tests
+ public calls: Map<string, MatrixCall>;
+ public callEventBuffer: MatrixEvent[];
+ public nextSeqByCall: Map<string, number> = new Map();
+ public toDeviceEventBuffers: Map<string, Array<MatrixEvent>> = new Map();
+
+ private client: MatrixClient;
+ private candidateEventsByCall: Map<string, Array<MatrixEvent>>;
+ private eventBufferPromiseChain?: Promise<void>;
+
+ public constructor(client: MatrixClient) {
+ this.client = client;
+ this.calls = new Map<string, MatrixCall>();
+ // The sync code always emits one event at a time, so it will patiently
+ // wait for us to finish processing a call invite before delivering the
+ // next event, even if that next event is a hangup. We therefore accumulate
+ // all our call events and then process them on the 'sync' event, ie.
+ // each time a sync has completed. This way, we can avoid emitting incoming
+ // call events if we get both the invite and answer/hangup in the same sync.
+ // This happens quite often, eg. replaying sync from storage, catchup sync
+ // after loading and after we've been offline for a bit.
+ this.callEventBuffer = [];
+ this.candidateEventsByCall = new Map<string, Array<MatrixEvent>>();
+ }
+
+ public start(): void {
+ this.client.on(ClientEvent.Sync, this.onSync);
+ this.client.on(RoomEvent.Timeline, this.onRoomTimeline);
+ this.client.on(ClientEvent.ToDeviceEvent, this.onToDeviceEvent);
+ }
+
+ public stop(): void {
+ this.client.removeListener(ClientEvent.Sync, this.onSync);
+ this.client.removeListener(RoomEvent.Timeline, this.onRoomTimeline);
+ this.client.removeListener(ClientEvent.ToDeviceEvent, this.onToDeviceEvent);
+ }
+
+ private onSync = (): void => {
+ // Process the current event buffer and start queuing into a new one.
+ const currentEventBuffer = this.callEventBuffer;
+ this.callEventBuffer = [];
+
+ // Ensure correct ordering by only processing this queue after the previous one has finished processing
+ if (this.eventBufferPromiseChain) {
+ this.eventBufferPromiseChain = this.eventBufferPromiseChain.then(() =>
+ this.evaluateEventBuffer(currentEventBuffer),
+ );
+ } else {
+ this.eventBufferPromiseChain = this.evaluateEventBuffer(currentEventBuffer);
+ }
+ };
+
+ private async evaluateEventBuffer(eventBuffer: MatrixEvent[]): Promise<void> {
+ await Promise.all(eventBuffer.map((event) => this.client.decryptEventIfNeeded(event)));
+
+ const callEvents = eventBuffer.filter((event) => {
+ const eventType = event.getType();
+ return eventType.startsWith("m.call.") || eventType.startsWith("org.matrix.call.");
+ });
+
+ const ignoreCallIds = new Set<string>();
+
+ // inspect the buffer and mark all calls which have been answered
+ // or hung up before passing them to the call event handler.
+ for (const event of callEvents) {
+ const eventType = event.getType();
+
+ if (eventType === EventType.CallAnswer || eventType === EventType.CallHangup) {
+ ignoreCallIds.add(event.getContent().call_id);
+ }
+ }
+
+ // Process call events in the order that they were received
+ for (const event of callEvents) {
+ const eventType = event.getType();
+ const callId = event.getContent().call_id;
+
+ if (eventType === EventType.CallInvite && ignoreCallIds.has(callId)) {
+ // This call has previously been answered or hung up: ignore it
+ continue;
+ }
+
+ try {
+ await this.handleCallEvent(event);
+ } catch (e) {
+ logger.error("CallEventHandler evaluateEventBuffer() caught exception handling call event", e);
+ }
+ }
+ }
+
+ private onRoomTimeline = (event: MatrixEvent): void => {
+ this.callEventBuffer.push(event);
+ };
+
+ private onToDeviceEvent = (event: MatrixEvent): void => {
+ const content = event.getContent();
+
+ if (!content.call_id) {
+ this.callEventBuffer.push(event);
+ return;
+ }
+
+ if (!this.nextSeqByCall.has(content.call_id)) {
+ this.nextSeqByCall.set(content.call_id, 0);
+ }
+
+ if (content.seq === undefined) {
+ this.callEventBuffer.push(event);
+ return;
+ }
+
+ const nextSeq = this.nextSeqByCall.get(content.call_id) || 0;
+
+ if (content.seq !== nextSeq) {
+ if (!this.toDeviceEventBuffers.has(content.call_id)) {
+ this.toDeviceEventBuffers.set(content.call_id, []);
+ }
+
+ const buffer = this.toDeviceEventBuffers.get(content.call_id)!;
+ const index = buffer.findIndex((e) => e.getContent().seq > content.seq);
+
+ if (index === -1) {
+ buffer.push(event);
+ } else {
+ buffer.splice(index, 0, event);
+ }
+ } else {
+ const callId = content.call_id;
+ this.callEventBuffer.push(event);
+ this.nextSeqByCall.set(callId, content.seq + 1);
+
+ const buffer = this.toDeviceEventBuffers.get(callId);
+
+ let nextEvent = buffer && buffer.shift();
+
+ while (nextEvent && nextEvent.getContent().seq === this.nextSeqByCall.get(callId)) {
+ this.callEventBuffer.push(nextEvent);
+ this.nextSeqByCall.set(callId, nextEvent.getContent().seq + 1);
+ nextEvent = buffer!.shift();
+ }
+ }
+ };
+
+ private async handleCallEvent(event: MatrixEvent): Promise<void> {
+ this.client.emit(ClientEvent.ReceivedVoipEvent, event);
+
+ const content = event.getContent();
+ const callRoomId =
+ event.getRoomId() || this.client.groupCallEventHandler!.getGroupCallById(content.conf_id)?.room?.roomId;
+ const groupCallId = content.conf_id;
+ const type = event.getType() as EventType;
+ const senderId = event.getSender()!;
+ let call = content.call_id ? this.calls.get(content.call_id) : undefined;
+
+ let opponentDeviceId: string | undefined;
+
+ let groupCall: GroupCall | undefined;
+ if (groupCallId) {
+ groupCall = this.client.groupCallEventHandler!.getGroupCallById(groupCallId);
+
+ if (!groupCall) {
+ logger.warn(
+ `CallEventHandler handleCallEvent() could not find a group call - ignoring event (groupCallId=${groupCallId}, type=${type})`,
+ );
+ return;
+ }
+
+ opponentDeviceId = content.device_id;
+
+ if (!opponentDeviceId) {
+ logger.warn(
+ `CallEventHandler handleCallEvent() could not find a device id - ignoring event (senderId=${senderId})`,
+ );
+ groupCall.emit(GroupCallEvent.Error, new GroupCallUnknownDeviceError(senderId));
+ return;
+ }
+
+ if (content.dest_session_id !== this.client.getSessionId()) {
+ logger.warn(
+ "CallEventHandler handleCallEvent() call event does not match current session id - ignoring",
+ );
+ return;
+ }
+ }
+
+ const weSentTheEvent =
+ senderId === this.client.credentials.userId &&
+ (opponentDeviceId === undefined || opponentDeviceId === this.client.getDeviceId()!);
+
+ if (!callRoomId) return;
+
+ if (type === EventType.CallInvite) {
+ // ignore invites you send
+ if (weSentTheEvent) return;
+ // expired call
+ if (event.getLocalAge() > content.lifetime - RING_GRACE_PERIOD) return;
+ // stale/old invite event
+ if (call && call.state === CallState.Ended) return;
+
+ if (call) {
+ logger.warn(
+ `CallEventHandler handleCallEvent() already has a call but got an invite - clobbering (callId=${content.call_id})`,
+ );
+ }
+
+ if (content.invitee && content.invitee !== this.client.getUserId()) {
+ return; // This invite was meant for another user in the room
+ }
+
+ const timeUntilTurnCresExpire = (this.client.getTurnServersExpiry() ?? 0) - Date.now();
+ logger.info(
+ "CallEventHandler handleCallEvent() current turn creds expire in " + timeUntilTurnCresExpire + " ms",
+ );
+ call =
+ createNewMatrixCall(this.client, callRoomId, {
+ forceTURN: this.client.forceTURN,
+ opponentDeviceId,
+ groupCallId,
+ opponentSessionId: content.sender_session_id,
+ }) ?? undefined;
+ if (!call) {
+ logger.log(
+ `CallEventHandler handleCallEvent() this client does not support WebRTC (callId=${content.call_id})`,
+ );
+ // don't hang up the call: there could be other clients
+ // connected that do support WebRTC and declining the
+ // the call on their behalf would be really annoying.
+ return;
+ }
+
+ call.callId = content.call_id;
+ const stats = groupCall?.getGroupCallStats();
+ if (stats) {
+ call.initStats(stats);
+ }
+
+ try {
+ await call.initWithInvite(event);
+ } catch (e) {
+ if (e instanceof CallError) {
+ if (e.code === GroupCallErrorCode.UnknownDevice) {
+ groupCall?.emit(GroupCallEvent.Error, e);
+ } else {
+ logger.error(e);
+ }
+ }
+ }
+ this.calls.set(call.callId, call);
+
+ // if we stashed candidate events for that call ID, play them back now
+ if (this.candidateEventsByCall.get(call.callId)) {
+ for (const ev of this.candidateEventsByCall.get(call.callId)!) {
+ call.onRemoteIceCandidatesReceived(ev);
+ }
+ }
+
+ // Were we trying to call that user (room)?
+ let existingCall: MatrixCall | undefined;
+ for (const thisCall of this.calls.values()) {
+ const isCalling = [CallState.WaitLocalMedia, CallState.CreateOffer, CallState.InviteSent].includes(
+ thisCall.state,
+ );
+
+ if (
+ call.roomId === thisCall.roomId &&
+ thisCall.direction === CallDirection.Outbound &&
+ call.getOpponentMember()?.userId === thisCall.invitee &&
+ isCalling
+ ) {
+ existingCall = thisCall;
+ break;
+ }
+ }
+
+ if (existingCall) {
+ if (existingCall.callId > call.callId) {
+ logger.log(
+ `CallEventHandler handleCallEvent() detected glare - answering incoming call and canceling outgoing call (incomingId=${call.callId}, outgoingId=${existingCall.callId})`,
+ );
+ existingCall.replacedBy(call);
+ } else {
+ logger.log(
+ `CallEventHandler handleCallEvent() detected glare - hanging up incoming call (incomingId=${call.callId}, outgoingId=${existingCall.callId})`,
+ );
+ call.hangup(CallErrorCode.Replaced, true);
+ }
+ } else {
+ this.client.emit(CallEventHandlerEvent.Incoming, call);
+ }
+ return;
+ } else if (type === EventType.CallCandidates) {
+ if (weSentTheEvent) return;
+
+ if (!call) {
+ // store the candidates; we may get a call eventually.
+ if (!this.candidateEventsByCall.has(content.call_id)) {
+ this.candidateEventsByCall.set(content.call_id, []);
+ }
+ this.candidateEventsByCall.get(content.call_id)!.push(event);
+ } else {
+ call.onRemoteIceCandidatesReceived(event);
+ }
+ return;
+ } else if ([EventType.CallHangup, EventType.CallReject].includes(type)) {
+ // Note that we also observe our own hangups here so we can see
+ // if we've already rejected a call that would otherwise be valid
+ if (!call) {
+ // if not live, store the fact that the call has ended because
+ // we're probably getting events backwards so
+ // the hangup will come before the invite
+ call =
+ createNewMatrixCall(this.client, callRoomId, {
+ opponentDeviceId,
+ opponentSessionId: content.sender_session_id,
+ }) ?? undefined;
+ if (call) {
+ call.callId = content.call_id;
+ call.initWithHangup(event);
+ this.calls.set(content.call_id, call);
+ }
+ } else {
+ if (call.state !== CallState.Ended) {
+ if (type === EventType.CallHangup) {
+ call.onHangupReceived(content as MCallHangupReject);
+ } else {
+ call.onRejectReceived(content as MCallHangupReject);
+ }
+
+ // @ts-expect-error typescript thinks the state can't be 'ended' because we're
+ // inside the if block where it wasn't, but it could have changed because
+ // on[Hangup|Reject]Received are side-effecty.
+ if (call.state === CallState.Ended) this.calls.delete(content.call_id);
+ }
+ }
+ return;
+ }
+
+ // The following events need a call and a peer connection
+ if (!call || !call.hasPeerConnection) {
+ logger.info(
+ `CallEventHandler handleCallEvent() discarding possible call event as we don't have a call (type=${type})`,
+ );
+ return;
+ }
+ // Ignore remote echo
+ if (event.getContent().party_id === call.ourPartyId) return;
+
+ switch (type) {
+ case EventType.CallAnswer:
+ if (weSentTheEvent) {
+ if (call.state === CallState.Ringing) {
+ call.onAnsweredElsewhere(content as MCallAnswer);
+ }
+ } else {
+ call.onAnswerReceived(event);
+ }
+ break;
+ case EventType.CallSelectAnswer:
+ call.onSelectAnswerReceived(event);
+ break;
+
+ case EventType.CallNegotiate:
+ call.onNegotiateReceived(event);
+ break;
+
+ case EventType.CallAssertedIdentity:
+ case EventType.CallAssertedIdentityPrefix:
+ call.onAssertedIdentityReceived(event);
+ break;
+
+ case EventType.CallSDPStreamMetadataChanged:
+ case EventType.CallSDPStreamMetadataChangedPrefix:
+ call.onSDPStreamMetadataChangedReceived(event);
+ break;
+ }
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callEventTypes.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callEventTypes.ts
new file mode 100644
index 0000000..f06ed5b
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callEventTypes.ts
@@ -0,0 +1,92 @@
+// allow non-camelcase as these are events type that go onto the wire
+/* eslint-disable camelcase */
+
+import { CallErrorCode } from "./call";
+
+// TODO: Change to "sdp_stream_metadata" when MSC3077 is merged
+export const SDPStreamMetadataKey = "org.matrix.msc3077.sdp_stream_metadata";
+
+export enum SDPStreamMetadataPurpose {
+ Usermedia = "m.usermedia",
+ Screenshare = "m.screenshare",
+}
+
+export interface SDPStreamMetadataObject {
+ purpose: SDPStreamMetadataPurpose;
+ audio_muted: boolean;
+ video_muted: boolean;
+}
+
+export interface SDPStreamMetadata {
+ [key: string]: SDPStreamMetadataObject;
+}
+
+export interface CallCapabilities {
+ "m.call.transferee": boolean;
+ "m.call.dtmf": boolean;
+}
+
+export interface CallReplacesTarget {
+ id: string;
+ display_name: string;
+ avatar_url: string;
+}
+
+export interface MCallBase {
+ call_id: string;
+ version: string | number;
+ party_id?: string;
+ sender_session_id?: string;
+ dest_session_id?: string;
+}
+
+export interface MCallAnswer extends MCallBase {
+ answer: RTCSessionDescription;
+ capabilities?: CallCapabilities;
+ [SDPStreamMetadataKey]: SDPStreamMetadata;
+}
+
+export interface MCallSelectAnswer extends MCallBase {
+ selected_party_id: string;
+}
+
+export interface MCallInviteNegotiate extends MCallBase {
+ offer: RTCSessionDescription;
+ description: RTCSessionDescription;
+ lifetime: number;
+ capabilities?: CallCapabilities;
+ invitee?: string;
+ sender_session_id?: string;
+ dest_session_id?: string;
+ [SDPStreamMetadataKey]: SDPStreamMetadata;
+}
+
+export interface MCallSDPStreamMetadataChanged extends MCallBase {
+ [SDPStreamMetadataKey]: SDPStreamMetadata;
+}
+
+export interface MCallReplacesEvent extends MCallBase {
+ replacement_id: string;
+ target_user: CallReplacesTarget;
+ create_call: string;
+ await_call: string;
+ target_room: string;
+}
+
+export interface MCAllAssertedIdentity extends MCallBase {
+ asserted_identity: {
+ id: string;
+ display_name: string;
+ avatar_url: string;
+ };
+}
+
+export interface MCallCandidates extends MCallBase {
+ candidates: RTCIceCandidate[];
+}
+
+export interface MCallHangupReject extends MCallBase {
+ reason?: CallErrorCode;
+}
+
+/* eslint-enable camelcase */
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callFeed.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callFeed.ts
new file mode 100644
index 0000000..505cf56
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/callFeed.ts
@@ -0,0 +1,361 @@
+/*
+Copyright 2021 Šimon Brandner <simon.bra.ag@gmail.com>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { SDPStreamMetadataPurpose } from "./callEventTypes";
+import { acquireContext, releaseContext } from "./audioContext";
+import { MatrixClient } from "../client";
+import { RoomMember } from "../models/room-member";
+import { logger } from "../logger";
+import { TypedEventEmitter } from "../models/typed-event-emitter";
+import { CallEvent, CallState, MatrixCall } from "./call";
+
+const POLLING_INTERVAL = 200; // ms
+export const SPEAKING_THRESHOLD = -60; // dB
+const SPEAKING_SAMPLE_COUNT = 8; // samples
+
+export interface ICallFeedOpts {
+ client: MatrixClient;
+ roomId?: string;
+ userId: string;
+ deviceId: string | undefined;
+ stream: MediaStream;
+ purpose: SDPStreamMetadataPurpose;
+ /**
+ * Whether or not the remote SDPStreamMetadata says audio is muted
+ */
+ audioMuted: boolean;
+ /**
+ * Whether or not the remote SDPStreamMetadata says video is muted
+ */
+ videoMuted: boolean;
+ /**
+ * The MatrixCall which is the source of this CallFeed
+ */
+ call?: MatrixCall;
+}
+
+export enum CallFeedEvent {
+ NewStream = "new_stream",
+ MuteStateChanged = "mute_state_changed",
+ LocalVolumeChanged = "local_volume_changed",
+ VolumeChanged = "volume_changed",
+ ConnectedChanged = "connected_changed",
+ Speaking = "speaking",
+ Disposed = "disposed",
+}
+
+type EventHandlerMap = {
+ [CallFeedEvent.NewStream]: (stream: MediaStream) => void;
+ [CallFeedEvent.MuteStateChanged]: (audioMuted: boolean, videoMuted: boolean) => void;
+ [CallFeedEvent.LocalVolumeChanged]: (localVolume: number) => void;
+ [CallFeedEvent.VolumeChanged]: (volume: number) => void;
+ [CallFeedEvent.ConnectedChanged]: (connected: boolean) => void;
+ [CallFeedEvent.Speaking]: (speaking: boolean) => void;
+ [CallFeedEvent.Disposed]: () => void;
+};
+
+export class CallFeed extends TypedEventEmitter<CallFeedEvent, EventHandlerMap> {
+ public stream: MediaStream;
+ public sdpMetadataStreamId: string;
+ public userId: string;
+ public readonly deviceId: string | undefined;
+ public purpose: SDPStreamMetadataPurpose;
+ public speakingVolumeSamples: number[];
+
+ private client: MatrixClient;
+ private call?: MatrixCall;
+ private roomId?: string;
+ private audioMuted: boolean;
+ private videoMuted: boolean;
+ private localVolume = 1;
+ private measuringVolumeActivity = false;
+ private audioContext?: AudioContext;
+ private analyser?: AnalyserNode;
+ private frequencyBinCount?: Float32Array;
+ private speakingThreshold = SPEAKING_THRESHOLD;
+ private speaking = false;
+ private volumeLooperTimeout?: ReturnType<typeof setTimeout>;
+ private _disposed = false;
+ private _connected = false;
+
+ public constructor(opts: ICallFeedOpts) {
+ super();
+
+ this.client = opts.client;
+ this.call = opts.call;
+ this.roomId = opts.roomId;
+ this.userId = opts.userId;
+ this.deviceId = opts.deviceId;
+ this.purpose = opts.purpose;
+ this.audioMuted = opts.audioMuted;
+ this.videoMuted = opts.videoMuted;
+ this.speakingVolumeSamples = new Array(SPEAKING_SAMPLE_COUNT).fill(-Infinity);
+ this.sdpMetadataStreamId = opts.stream.id;
+
+ this.updateStream(null, opts.stream);
+ this.stream = opts.stream; // updateStream does this, but this makes TS happier
+
+ if (this.hasAudioTrack) {
+ this.initVolumeMeasuring();
+ }
+
+ if (opts.call) {
+ opts.call.addListener(CallEvent.State, this.onCallState);
+ this.onCallState(opts.call.state);
+ }
+ }
+
+ public get connected(): boolean {
+ // Local feeds are always considered connected
+ return this.isLocal() || this._connected;
+ }
+
+ private set connected(connected: boolean) {
+ this._connected = connected;
+ this.emit(CallFeedEvent.ConnectedChanged, this.connected);
+ }
+
+ private get hasAudioTrack(): boolean {
+ return this.stream.getAudioTracks().length > 0;
+ }
+
+ private updateStream(oldStream: MediaStream | null, newStream: MediaStream): void {
+ if (newStream === oldStream) return;
+
+ if (oldStream) {
+ oldStream.removeEventListener("addtrack", this.onAddTrack);
+ this.measureVolumeActivity(false);
+ }
+
+ this.stream = newStream;
+ newStream.addEventListener("addtrack", this.onAddTrack);
+
+ if (this.hasAudioTrack) {
+ this.initVolumeMeasuring();
+ } else {
+ this.measureVolumeActivity(false);
+ }
+
+ this.emit(CallFeedEvent.NewStream, this.stream);
+ }
+
+ private initVolumeMeasuring(): void {
+ if (!this.hasAudioTrack) return;
+ if (!this.audioContext) this.audioContext = acquireContext();
+
+ this.analyser = this.audioContext.createAnalyser();
+ this.analyser.fftSize = 512;
+ this.analyser.smoothingTimeConstant = 0.1;
+
+ const mediaStreamAudioSourceNode = this.audioContext.createMediaStreamSource(this.stream);
+ mediaStreamAudioSourceNode.connect(this.analyser);
+
+ this.frequencyBinCount = new Float32Array(this.analyser.frequencyBinCount);
+ }
+
+ private onAddTrack = (): void => {
+ this.emit(CallFeedEvent.NewStream, this.stream);
+ };
+
+ private onCallState = (state: CallState): void => {
+ if (state === CallState.Connected) {
+ this.connected = true;
+ } else if (state === CallState.Connecting) {
+ this.connected = false;
+ }
+ };
+
+ /**
+ * Returns callRoom member
+ * @returns member of the callRoom
+ */
+ public getMember(): RoomMember | null {
+ const callRoom = this.client.getRoom(this.roomId);
+ return callRoom?.getMember(this.userId) ?? null;
+ }
+
+ /**
+ * Returns true if CallFeed is local, otherwise returns false
+ * @returns is local?
+ */
+ public isLocal(): boolean {
+ return (
+ this.userId === this.client.getUserId() &&
+ (this.deviceId === undefined || this.deviceId === this.client.getDeviceId())
+ );
+ }
+
+ /**
+ * Returns true if audio is muted or if there are no audio
+ * tracks, otherwise returns false
+ * @returns is audio muted?
+ */
+ public isAudioMuted(): boolean {
+ return this.stream.getAudioTracks().length === 0 || this.audioMuted;
+ }
+
+ /**
+ * Returns true video is muted or if there are no video
+ * tracks, otherwise returns false
+ * @returns is video muted?
+ */
+ public isVideoMuted(): boolean {
+ // We assume only one video track
+ return this.stream.getVideoTracks().length === 0 || this.videoMuted;
+ }
+
+ public isSpeaking(): boolean {
+ return this.speaking;
+ }
+
+ /**
+ * Replaces the current MediaStream with a new one.
+ * The stream will be different and new stream as remote parties are
+ * concerned, but this can be used for convenience locally to set up
+ * volume listeners automatically on the new stream etc.
+ * @param newStream - new stream with which to replace the current one
+ */
+ public setNewStream(newStream: MediaStream): void {
+ this.updateStream(this.stream, newStream);
+ }
+
+ /**
+ * Set one or both of feed's internal audio and video video mute state
+ * Either value may be null to leave it as-is
+ * @param audioMuted - is the feed's audio muted?
+ * @param videoMuted - is the feed's video muted?
+ */
+ public setAudioVideoMuted(audioMuted: boolean | null, videoMuted: boolean | null): void {
+ if (audioMuted !== null) {
+ if (this.audioMuted !== audioMuted) {
+ this.speakingVolumeSamples.fill(-Infinity);
+ }
+ this.audioMuted = audioMuted;
+ }
+ if (videoMuted !== null) this.videoMuted = videoMuted;
+ this.emit(CallFeedEvent.MuteStateChanged, this.audioMuted, this.videoMuted);
+ }
+
+ /**
+ * Starts emitting volume_changed events where the emitter value is in decibels
+ * @param enabled - emit volume changes
+ */
+ public measureVolumeActivity(enabled: boolean): void {
+ if (enabled) {
+ if (!this.analyser || !this.frequencyBinCount || !this.hasAudioTrack) return;
+
+ this.measuringVolumeActivity = true;
+ this.volumeLooper();
+ } else {
+ this.measuringVolumeActivity = false;
+ this.speakingVolumeSamples.fill(-Infinity);
+ this.emit(CallFeedEvent.VolumeChanged, -Infinity);
+ }
+ }
+
+ public setSpeakingThreshold(threshold: number): void {
+ this.speakingThreshold = threshold;
+ }
+
+ private volumeLooper = (): void => {
+ if (!this.analyser) return;
+
+ if (!this.measuringVolumeActivity) return;
+
+ this.analyser.getFloatFrequencyData(this.frequencyBinCount!);
+
+ let maxVolume = -Infinity;
+ for (const volume of this.frequencyBinCount!) {
+ if (volume > maxVolume) {
+ maxVolume = volume;
+ }
+ }
+
+ this.speakingVolumeSamples.shift();
+ this.speakingVolumeSamples.push(maxVolume);
+
+ this.emit(CallFeedEvent.VolumeChanged, maxVolume);
+
+ let newSpeaking = false;
+
+ for (const volume of this.speakingVolumeSamples) {
+ if (volume > this.speakingThreshold) {
+ newSpeaking = true;
+ break;
+ }
+ }
+
+ if (this.speaking !== newSpeaking) {
+ this.speaking = newSpeaking;
+ this.emit(CallFeedEvent.Speaking, this.speaking);
+ }
+
+ this.volumeLooperTimeout = setTimeout(this.volumeLooper, POLLING_INTERVAL);
+ };
+
+ public clone(): CallFeed {
+ const mediaHandler = this.client.getMediaHandler();
+ const stream = this.stream.clone();
+ logger.log(`CallFeed clone() cloning stream (originalStreamId=${this.stream.id}, newStreamId${stream.id})`);
+
+ if (this.purpose === SDPStreamMetadataPurpose.Usermedia) {
+ mediaHandler.userMediaStreams.push(stream);
+ } else {
+ mediaHandler.screensharingStreams.push(stream);
+ }
+
+ return new CallFeed({
+ client: this.client,
+ roomId: this.roomId,
+ userId: this.userId,
+ deviceId: this.deviceId,
+ stream,
+ purpose: this.purpose,
+ audioMuted: this.audioMuted,
+ videoMuted: this.videoMuted,
+ });
+ }
+
+ public dispose(): void {
+ clearTimeout(this.volumeLooperTimeout);
+ this.stream?.removeEventListener("addtrack", this.onAddTrack);
+ this.call?.removeListener(CallEvent.State, this.onCallState);
+ if (this.audioContext) {
+ this.audioContext = undefined;
+ this.analyser = undefined;
+ releaseContext();
+ }
+ this._disposed = true;
+ this.emit(CallFeedEvent.Disposed);
+ }
+
+ public get disposed(): boolean {
+ return this._disposed;
+ }
+
+ private set disposed(value: boolean) {
+ this._disposed = value;
+ }
+
+ public getLocalVolume(): number {
+ return this.localVolume;
+ }
+
+ public setLocalVolume(localVolume: number): void {
+ this.localVolume = localVolume;
+ this.emit(CallFeedEvent.LocalVolumeChanged, localVolume);
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/groupCall.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/groupCall.ts
new file mode 100644
index 0000000..c0896c4
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/groupCall.ts
@@ -0,0 +1,1598 @@
+import { TypedEventEmitter } from "../models/typed-event-emitter";
+import { CallFeed, SPEAKING_THRESHOLD } from "./callFeed";
+import { MatrixClient, IMyDevice } from "../client";
+import {
+ CallErrorCode,
+ CallEvent,
+ CallEventHandlerMap,
+ CallState,
+ genCallID,
+ MatrixCall,
+ setTracksEnabled,
+ createNewMatrixCall,
+ CallError,
+} from "./call";
+import { RoomMember } from "../models/room-member";
+import { Room } from "../models/room";
+import { RoomStateEvent } from "../models/room-state";
+import { logger } from "../logger";
+import { ReEmitter } from "../ReEmitter";
+import { SDPStreamMetadataPurpose } from "./callEventTypes";
+import { MatrixEvent } from "../models/event";
+import { EventType } from "../@types/event";
+import { CallEventHandlerEvent } from "./callEventHandler";
+import { GroupCallEventHandlerEvent } from "./groupCallEventHandler";
+import { IScreensharingOpts } from "./mediaHandler";
+import { mapsEqual } from "../utils";
+import { GroupCallStats } from "./stats/groupCallStats";
+import { ByteSentStatsReport, ConnectionStatsReport, StatsReport } from "./stats/statsReport";
+
+export enum GroupCallIntent {
+ Ring = "m.ring",
+ Prompt = "m.prompt",
+ Room = "m.room",
+}
+
+export enum GroupCallType {
+ Video = "m.video",
+ Voice = "m.voice",
+}
+
+export enum GroupCallTerminationReason {
+ CallEnded = "call_ended",
+}
+
+export type CallsByUserAndDevice = Map<string, Map<string, MatrixCall>>;
+
+/**
+ * Because event names are just strings, they do need
+ * to be unique over all event types of event emitter.
+ * Some objects could emit more then one set of events.
+ */
+export enum GroupCallEvent {
+ GroupCallStateChanged = "group_call_state_changed",
+ ActiveSpeakerChanged = "active_speaker_changed",
+ CallsChanged = "calls_changed",
+ UserMediaFeedsChanged = "user_media_feeds_changed",
+ ScreenshareFeedsChanged = "screenshare_feeds_changed",
+ LocalScreenshareStateChanged = "local_screenshare_state_changed",
+ LocalMuteStateChanged = "local_mute_state_changed",
+ ParticipantsChanged = "participants_changed",
+ Error = "group_call_error",
+}
+
+export type GroupCallEventHandlerMap = {
+ [GroupCallEvent.GroupCallStateChanged]: (newState: GroupCallState, oldState: GroupCallState) => void;
+ [GroupCallEvent.ActiveSpeakerChanged]: (activeSpeaker: CallFeed | undefined) => void;
+ [GroupCallEvent.CallsChanged]: (calls: CallsByUserAndDevice) => void;
+ [GroupCallEvent.UserMediaFeedsChanged]: (feeds: CallFeed[]) => void;
+ [GroupCallEvent.ScreenshareFeedsChanged]: (feeds: CallFeed[]) => void;
+ [GroupCallEvent.LocalScreenshareStateChanged]: (
+ isScreensharing: boolean,
+ feed?: CallFeed,
+ sourceId?: string,
+ ) => void;
+ [GroupCallEvent.LocalMuteStateChanged]: (audioMuted: boolean, videoMuted: boolean) => void;
+ [GroupCallEvent.ParticipantsChanged]: (participants: Map<RoomMember, Map<string, ParticipantState>>) => void;
+ /**
+ * Fires whenever an error occurs when call.js encounters an issue with setting up the call.
+ * <p>
+ * The error given will have a code equal to either `MatrixCall.ERR_LOCAL_OFFER_FAILED` or
+ * `MatrixCall.ERR_NO_USER_MEDIA`. `ERR_LOCAL_OFFER_FAILED` is emitted when the local client
+ * fails to create an offer. `ERR_NO_USER_MEDIA` is emitted when the user has denied access
+ * to their audio/video hardware.
+ * @param err - The error raised by MatrixCall.
+ * @example
+ * ```
+ * matrixCall.on("error", function(err){
+ * console.error(err.code, err);
+ * });
+ * ```
+ */
+ [GroupCallEvent.Error]: (error: GroupCallError) => void;
+};
+
+export enum GroupCallStatsReportEvent {
+ ConnectionStats = "GroupCall.connection_stats",
+ ByteSentStats = "GroupCall.byte_sent_stats",
+}
+
+export type GroupCallStatsReportEventHandlerMap = {
+ [GroupCallStatsReportEvent.ConnectionStats]: (report: GroupCallStatsReport<ConnectionStatsReport>) => void;
+ [GroupCallStatsReportEvent.ByteSentStats]: (report: GroupCallStatsReport<ByteSentStatsReport>) => void;
+};
+
+export enum GroupCallErrorCode {
+ NoUserMedia = "no_user_media",
+ UnknownDevice = "unknown_device",
+ PlaceCallFailed = "place_call_failed",
+}
+
+export interface GroupCallStatsReport<T extends ConnectionStatsReport | ByteSentStatsReport> {
+ report: T;
+}
+
+export class GroupCallError extends Error {
+ public code: string;
+
+ public constructor(code: GroupCallErrorCode, msg: string, err?: Error) {
+ // Still don't think there's any way to have proper nested errors
+ if (err) {
+ super(msg + ": " + err);
+ } else {
+ super(msg);
+ }
+
+ this.code = code;
+ }
+}
+
+export class GroupCallUnknownDeviceError extends GroupCallError {
+ public constructor(public userId: string) {
+ super(GroupCallErrorCode.UnknownDevice, "No device found for " + userId);
+ }
+}
+
+export class OtherUserSpeakingError extends Error {
+ public constructor() {
+ super("Cannot unmute: another user is speaking");
+ }
+}
+
+export interface IGroupCallDataChannelOptions {
+ ordered: boolean;
+ maxPacketLifeTime: number;
+ maxRetransmits: number;
+ protocol: string;
+}
+
+export interface IGroupCallRoomState {
+ "m.intent": GroupCallIntent;
+ "m.type": GroupCallType;
+ "io.element.ptt"?: boolean;
+ // TODO: Specify data-channels
+ "dataChannelsEnabled"?: boolean;
+ "dataChannelOptions"?: IGroupCallDataChannelOptions;
+}
+
+export interface IGroupCallRoomMemberFeed {
+ purpose: SDPStreamMetadataPurpose;
+}
+
+export interface IGroupCallRoomMemberDevice {
+ device_id: string;
+ session_id: string;
+ expires_ts: number;
+ feeds: IGroupCallRoomMemberFeed[];
+}
+
+export interface IGroupCallRoomMemberCallState {
+ "m.call_id": string;
+ "m.foci"?: string[];
+ "m.devices": IGroupCallRoomMemberDevice[];
+}
+
+export interface IGroupCallRoomMemberState {
+ "m.calls": IGroupCallRoomMemberCallState[];
+}
+
+export enum GroupCallState {
+ LocalCallFeedUninitialized = "local_call_feed_uninitialized",
+ InitializingLocalCallFeed = "initializing_local_call_feed",
+ LocalCallFeedInitialized = "local_call_feed_initialized",
+ Entered = "entered",
+ Ended = "ended",
+}
+
+export interface ParticipantState {
+ sessionId: string;
+ screensharing: boolean;
+}
+
+interface ICallHandlers {
+ onCallFeedsChanged: (feeds: CallFeed[]) => void;
+ onCallStateChanged: (state: CallState, oldState: CallState | undefined) => void;
+ onCallHangup: (call: MatrixCall) => void;
+ onCallReplaced: (newCall: MatrixCall) => void;
+}
+
+const DEVICE_TIMEOUT = 1000 * 60 * 60; // 1 hour
+
+function getCallUserId(call: MatrixCall): string | null {
+ return call.getOpponentMember()?.userId || call.invitee || null;
+}
+
+export class GroupCall extends TypedEventEmitter<
+ GroupCallEvent | CallEvent | GroupCallStatsReportEvent,
+ GroupCallEventHandlerMap & CallEventHandlerMap & GroupCallStatsReportEventHandlerMap
+> {
+ // Config
+ public activeSpeakerInterval = 1000;
+ public retryCallInterval = 5000;
+ public participantTimeout = 1000 * 15;
+ public pttMaxTransmitTime = 1000 * 20;
+
+ public activeSpeaker?: CallFeed;
+ public localCallFeed?: CallFeed;
+ public localScreenshareFeed?: CallFeed;
+ public localDesktopCapturerSourceId?: string;
+ public readonly userMediaFeeds: CallFeed[] = [];
+ public readonly screenshareFeeds: CallFeed[] = [];
+ public groupCallId: string;
+ public readonly allowCallWithoutVideoAndAudio: boolean;
+
+ private readonly calls = new Map<string, Map<string, MatrixCall>>(); // user_id -> device_id -> MatrixCall
+ private callHandlers = new Map<string, Map<string, ICallHandlers>>(); // user_id -> device_id -> ICallHandlers
+ private activeSpeakerLoopInterval?: ReturnType<typeof setTimeout>;
+ private retryCallLoopInterval?: ReturnType<typeof setTimeout>;
+ private retryCallCounts: Map<string, Map<string, number>> = new Map(); // user_id -> device_id -> count
+ private reEmitter: ReEmitter;
+ private transmitTimer: ReturnType<typeof setTimeout> | null = null;
+ private participantsExpirationTimer: ReturnType<typeof setTimeout> | null = null;
+ private resendMemberStateTimer: ReturnType<typeof setInterval> | null = null;
+ private initWithAudioMuted = false;
+ private initWithVideoMuted = false;
+ private initCallFeedPromise?: Promise<void>;
+
+ private readonly stats: GroupCallStats;
+
+ public constructor(
+ private client: MatrixClient,
+ public room: Room,
+ public type: GroupCallType,
+ public isPtt: boolean,
+ public intent: GroupCallIntent,
+ groupCallId?: string,
+ private dataChannelsEnabled?: boolean,
+ private dataChannelOptions?: IGroupCallDataChannelOptions,
+ isCallWithoutVideoAndAudio?: boolean,
+ ) {
+ super();
+ this.reEmitter = new ReEmitter(this);
+ this.groupCallId = groupCallId ?? genCallID();
+ this.creationTs =
+ room.currentState.getStateEvents(EventType.GroupCallPrefix, this.groupCallId)?.getTs() ?? null;
+ this.updateParticipants();
+
+ room.on(RoomStateEvent.Update, this.onRoomState);
+ this.on(GroupCallEvent.ParticipantsChanged, this.onParticipantsChanged);
+ this.on(GroupCallEvent.GroupCallStateChanged, this.onStateChanged);
+ this.on(GroupCallEvent.LocalScreenshareStateChanged, this.onLocalFeedsChanged);
+ this.allowCallWithoutVideoAndAudio = !!isCallWithoutVideoAndAudio;
+
+ const userID = this.client.getUserId() || "unknown";
+ this.stats = new GroupCallStats(this.groupCallId, userID);
+ this.stats.reports.on(StatsReport.CONNECTION_STATS, this.onConnectionStats);
+ this.stats.reports.on(StatsReport.BYTE_SENT_STATS, this.onByteSentStats);
+ }
+
+ private onConnectionStats = (report: ConnectionStatsReport): void => {
+ // @TODO: Implement data argumentation
+ this.emit(GroupCallStatsReportEvent.ConnectionStats, { report });
+ };
+
+ private onByteSentStats = (report: ByteSentStatsReport): void => {
+ // @TODO: Implement data argumentation
+ this.emit(GroupCallStatsReportEvent.ByteSentStats, { report });
+ };
+
+ public async create(): Promise<GroupCall> {
+ this.creationTs = Date.now();
+ this.client.groupCallEventHandler!.groupCalls.set(this.room.roomId, this);
+ this.client.emit(GroupCallEventHandlerEvent.Outgoing, this);
+
+ const groupCallState: IGroupCallRoomState = {
+ "m.intent": this.intent,
+ "m.type": this.type,
+ "io.element.ptt": this.isPtt,
+ // TODO: Specify data-channels better
+ "dataChannelsEnabled": this.dataChannelsEnabled,
+ "dataChannelOptions": this.dataChannelsEnabled ? this.dataChannelOptions : undefined,
+ };
+
+ await this.client.sendStateEvent(this.room.roomId, EventType.GroupCallPrefix, groupCallState, this.groupCallId);
+
+ return this;
+ }
+
+ private _state = GroupCallState.LocalCallFeedUninitialized;
+
+ /**
+ * The group call's state.
+ */
+ public get state(): GroupCallState {
+ return this._state;
+ }
+
+ private set state(value: GroupCallState) {
+ const prevValue = this._state;
+ if (value !== prevValue) {
+ this._state = value;
+ this.emit(GroupCallEvent.GroupCallStateChanged, value, prevValue);
+ }
+ }
+
+ private _participants = new Map<RoomMember, Map<string, ParticipantState>>();
+
+ /**
+ * The current participants in the call, as a map from members to device IDs
+ * to participant info.
+ */
+ public get participants(): Map<RoomMember, Map<string, ParticipantState>> {
+ return this._participants;
+ }
+
+ private set participants(value: Map<RoomMember, Map<string, ParticipantState>>) {
+ const prevValue = this._participants;
+ const participantStateEqual = (x: ParticipantState, y: ParticipantState): boolean =>
+ x.sessionId === y.sessionId && x.screensharing === y.screensharing;
+ const deviceMapsEqual = (x: Map<string, ParticipantState>, y: Map<string, ParticipantState>): boolean =>
+ mapsEqual(x, y, participantStateEqual);
+
+ // Only update if the map actually changed
+ if (!mapsEqual(value, prevValue, deviceMapsEqual)) {
+ this._participants = value;
+ this.emit(GroupCallEvent.ParticipantsChanged, value);
+ }
+ }
+
+ private _creationTs: number | null = null;
+
+ /**
+ * The timestamp at which the call was created, or null if it has not yet
+ * been created.
+ */
+ public get creationTs(): number | null {
+ return this._creationTs;
+ }
+
+ private set creationTs(value: number | null) {
+ this._creationTs = value;
+ }
+
+ private _enteredViaAnotherSession = false;
+
+ /**
+ * Whether the local device has entered this call via another session, such
+ * as a widget.
+ */
+ public get enteredViaAnotherSession(): boolean {
+ return this._enteredViaAnotherSession;
+ }
+
+ public set enteredViaAnotherSession(value: boolean) {
+ this._enteredViaAnotherSession = value;
+ this.updateParticipants();
+ }
+
+ /**
+ * Executes the given callback on all calls in this group call.
+ * @param f - The callback.
+ */
+ public forEachCall(f: (call: MatrixCall) => void): void {
+ for (const deviceMap of this.calls.values()) {
+ for (const call of deviceMap.values()) f(call);
+ }
+ }
+
+ public getLocalFeeds(): CallFeed[] {
+ const feeds: CallFeed[] = [];
+
+ if (this.localCallFeed) feeds.push(this.localCallFeed);
+ if (this.localScreenshareFeed) feeds.push(this.localScreenshareFeed);
+
+ return feeds;
+ }
+
+ public hasLocalParticipant(): boolean {
+ return (
+ this.participants.get(this.room.getMember(this.client.getUserId()!)!)?.has(this.client.getDeviceId()!) ??
+ false
+ );
+ }
+
+ /**
+ * Determines whether the given call is one that we were expecting to exist
+ * given our knowledge of who is participating in the group call.
+ */
+ private callExpected(call: MatrixCall): boolean {
+ const userId = getCallUserId(call);
+ const member = userId === null ? null : this.room.getMember(userId);
+ const deviceId = call.getOpponentDeviceId();
+ return member !== null && deviceId !== undefined && this.participants.get(member)?.get(deviceId) !== undefined;
+ }
+
+ public async initLocalCallFeed(): Promise<void> {
+ if (this.state !== GroupCallState.LocalCallFeedUninitialized) {
+ throw new Error(`Cannot initialize local call feed in the "${this.state}" state.`);
+ }
+ this.state = GroupCallState.InitializingLocalCallFeed;
+
+ // wraps the real method to serialise calls, because we don't want to try starting
+ // multiple call feeds at once
+ if (this.initCallFeedPromise) return this.initCallFeedPromise;
+
+ try {
+ this.initCallFeedPromise = this.initLocalCallFeedInternal();
+ await this.initCallFeedPromise;
+ } finally {
+ this.initCallFeedPromise = undefined;
+ }
+ }
+
+ private async initLocalCallFeedInternal(): Promise<void> {
+ logger.log(`GroupCall ${this.groupCallId} initLocalCallFeedInternal() running`);
+
+ let stream: MediaStream;
+
+ try {
+ stream = await this.client.getMediaHandler().getUserMediaStream(true, this.type === GroupCallType.Video);
+ } catch (error) {
+ // If is allowed to join a call without a media stream, then we
+ // don't throw an error here. But we need an empty Local Feed to establish
+ // a connection later.
+ if (this.allowCallWithoutVideoAndAudio) {
+ stream = new MediaStream();
+ } else {
+ this.state = GroupCallState.LocalCallFeedUninitialized;
+ throw error;
+ }
+ }
+
+ // The call could've been disposed while we were waiting, and could
+ // also have been started back up again (hello, React 18) so if we're
+ // still in this 'initializing' state, carry on, otherwise bail.
+ if (this._state !== GroupCallState.InitializingLocalCallFeed) {
+ this.client.getMediaHandler().stopUserMediaStream(stream);
+ throw new Error("Group call disposed while gathering media stream");
+ }
+
+ const callFeed = new CallFeed({
+ client: this.client,
+ roomId: this.room.roomId,
+ userId: this.client.getUserId()!,
+ deviceId: this.client.getDeviceId()!,
+ stream,
+ purpose: SDPStreamMetadataPurpose.Usermedia,
+ audioMuted: this.initWithAudioMuted || stream.getAudioTracks().length === 0 || this.isPtt,
+ videoMuted: this.initWithVideoMuted || stream.getVideoTracks().length === 0,
+ });
+
+ setTracksEnabled(stream.getAudioTracks(), !callFeed.isAudioMuted());
+ setTracksEnabled(stream.getVideoTracks(), !callFeed.isVideoMuted());
+
+ this.localCallFeed = callFeed;
+ this.addUserMediaFeed(callFeed);
+
+ this.state = GroupCallState.LocalCallFeedInitialized;
+ }
+
+ public async updateLocalUsermediaStream(stream: MediaStream): Promise<void> {
+ if (this.localCallFeed) {
+ const oldStream = this.localCallFeed.stream;
+ this.localCallFeed.setNewStream(stream);
+ const micShouldBeMuted = this.localCallFeed.isAudioMuted();
+ const vidShouldBeMuted = this.localCallFeed.isVideoMuted();
+ logger.log(
+ `GroupCall ${this.groupCallId} updateLocalUsermediaStream() (oldStreamId=${oldStream.id}, newStreamId=${stream.id}, micShouldBeMuted=${micShouldBeMuted}, vidShouldBeMuted=${vidShouldBeMuted})`,
+ );
+ setTracksEnabled(stream.getAudioTracks(), !micShouldBeMuted);
+ setTracksEnabled(stream.getVideoTracks(), !vidShouldBeMuted);
+ this.client.getMediaHandler().stopUserMediaStream(oldStream);
+ }
+ }
+
+ public async enter(): Promise<void> {
+ if (this.state === GroupCallState.LocalCallFeedUninitialized) {
+ await this.initLocalCallFeed();
+ } else if (this.state !== GroupCallState.LocalCallFeedInitialized) {
+ throw new Error(`Cannot enter call in the "${this.state}" state`);
+ }
+
+ logger.log(`GroupCall ${this.groupCallId} enter() running`);
+ this.state = GroupCallState.Entered;
+
+ this.client.on(CallEventHandlerEvent.Incoming, this.onIncomingCall);
+
+ for (const call of this.client.callEventHandler!.calls.values()) {
+ this.onIncomingCall(call);
+ }
+
+ this.retryCallLoopInterval = setInterval(this.onRetryCallLoop, this.retryCallInterval);
+
+ this.activeSpeaker = undefined;
+ this.onActiveSpeakerLoop();
+ this.activeSpeakerLoopInterval = setInterval(this.onActiveSpeakerLoop, this.activeSpeakerInterval);
+ }
+
+ private dispose(): void {
+ if (this.localCallFeed) {
+ this.removeUserMediaFeed(this.localCallFeed);
+ this.localCallFeed = undefined;
+ }
+
+ if (this.localScreenshareFeed) {
+ this.client.getMediaHandler().stopScreensharingStream(this.localScreenshareFeed.stream);
+ this.removeScreenshareFeed(this.localScreenshareFeed);
+ this.localScreenshareFeed = undefined;
+ this.localDesktopCapturerSourceId = undefined;
+ }
+
+ this.client.getMediaHandler().stopAllStreams();
+
+ if (this.transmitTimer !== null) {
+ clearTimeout(this.transmitTimer);
+ this.transmitTimer = null;
+ }
+
+ if (this.retryCallLoopInterval !== undefined) {
+ clearInterval(this.retryCallLoopInterval);
+ this.retryCallLoopInterval = undefined;
+ }
+
+ if (this.participantsExpirationTimer !== null) {
+ clearTimeout(this.participantsExpirationTimer);
+ this.participantsExpirationTimer = null;
+ }
+
+ if (this.state !== GroupCallState.Entered) {
+ return;
+ }
+
+ this.forEachCall((call) => call.hangup(CallErrorCode.UserHangup, false));
+
+ this.activeSpeaker = undefined;
+ clearInterval(this.activeSpeakerLoopInterval);
+
+ this.retryCallCounts.clear();
+ clearInterval(this.retryCallLoopInterval);
+
+ this.client.removeListener(CallEventHandlerEvent.Incoming, this.onIncomingCall);
+ this.stats.stop();
+ }
+
+ public leave(): void {
+ this.dispose();
+ this.state = GroupCallState.LocalCallFeedUninitialized;
+ }
+
+ public async terminate(emitStateEvent = true): Promise<void> {
+ this.dispose();
+
+ this.room.off(RoomStateEvent.Update, this.onRoomState);
+ this.client.groupCallEventHandler!.groupCalls.delete(this.room.roomId);
+ this.client.emit(GroupCallEventHandlerEvent.Ended, this);
+ this.state = GroupCallState.Ended;
+
+ if (emitStateEvent) {
+ const existingStateEvent = this.room.currentState.getStateEvents(
+ EventType.GroupCallPrefix,
+ this.groupCallId,
+ )!;
+
+ await this.client.sendStateEvent(
+ this.room.roomId,
+ EventType.GroupCallPrefix,
+ {
+ ...existingStateEvent.getContent(),
+ "m.terminated": GroupCallTerminationReason.CallEnded,
+ },
+ this.groupCallId,
+ );
+ }
+ }
+
+ /*
+ * Local Usermedia
+ */
+
+ public isLocalVideoMuted(): boolean {
+ if (this.localCallFeed) {
+ return this.localCallFeed.isVideoMuted();
+ }
+
+ return true;
+ }
+
+ public isMicrophoneMuted(): boolean {
+ if (this.localCallFeed) {
+ return this.localCallFeed.isAudioMuted();
+ }
+
+ return true;
+ }
+
+ /**
+ * Sets the mute state of the local participants's microphone.
+ * @param muted - Whether to mute the microphone
+ * @returns Whether muting/unmuting was successful
+ */
+ public async setMicrophoneMuted(muted: boolean): Promise<boolean> {
+ // hasAudioDevice can block indefinitely if the window has lost focus,
+ // and it doesn't make much sense to keep a device from being muted, so
+ // we always allow muted = true changes to go through
+ if (!muted && !(await this.client.getMediaHandler().hasAudioDevice())) {
+ return false;
+ }
+
+ const sendUpdatesBefore = !muted && this.isPtt;
+
+ // set a timer for the maximum transmit time on PTT calls
+ if (this.isPtt) {
+ // Set or clear the max transmit timer
+ if (!muted && this.isMicrophoneMuted()) {
+ this.transmitTimer = setTimeout(() => {
+ this.setMicrophoneMuted(true);
+ }, this.pttMaxTransmitTime);
+ } else if (muted && !this.isMicrophoneMuted()) {
+ if (this.transmitTimer !== null) clearTimeout(this.transmitTimer);
+ this.transmitTimer = null;
+ }
+ }
+
+ this.forEachCall((call) => call.localUsermediaFeed?.setAudioVideoMuted(muted, null));
+
+ const sendUpdates = async (): Promise<void> => {
+ const updates: Promise<void>[] = [];
+ this.forEachCall((call) => updates.push(call.sendMetadataUpdate()));
+
+ await Promise.all(updates).catch((e) =>
+ logger.info(
+ `GroupCall ${this.groupCallId} setMicrophoneMuted() failed to send some metadata updates`,
+ e,
+ ),
+ );
+ };
+
+ if (sendUpdatesBefore) await sendUpdates();
+
+ if (this.localCallFeed) {
+ logger.log(
+ `GroupCall ${this.groupCallId} setMicrophoneMuted() (streamId=${this.localCallFeed.stream.id}, muted=${muted})`,
+ );
+
+ // We needed this here to avoid an error in case user join a call without a device.
+ // I can not use .then .catch functions because linter :-(
+ try {
+ if (!muted) {
+ const stream = await this.client
+ .getMediaHandler()
+ .getUserMediaStream(true, !this.localCallFeed.isVideoMuted());
+ if (stream === null) {
+ // if case permission denied to get a stream stop this here
+ /* istanbul ignore next */
+ logger.log(
+ `GroupCall ${this.groupCallId} setMicrophoneMuted() no device to receive local stream, muted=${muted}`,
+ );
+ return false;
+ }
+ }
+ } catch (e) {
+ /* istanbul ignore next */
+ logger.log(
+ `GroupCall ${this.groupCallId} setMicrophoneMuted() no device or permission to receive local stream, muted=${muted}`,
+ );
+ return false;
+ }
+
+ this.localCallFeed.setAudioVideoMuted(muted, null);
+ // I don't believe its actually necessary to enable these tracks: they
+ // are the one on the GroupCall's own CallFeed and are cloned before being
+ // given to any of the actual calls, so these tracks don't actually go
+ // anywhere. Let's do it anyway to avoid confusion.
+ setTracksEnabled(this.localCallFeed.stream.getAudioTracks(), !muted);
+ } else {
+ logger.log(`GroupCall ${this.groupCallId} setMicrophoneMuted() no stream muted (muted=${muted})`);
+ this.initWithAudioMuted = muted;
+ }
+
+ this.forEachCall((call) =>
+ setTracksEnabled(call.localUsermediaFeed!.stream.getAudioTracks(), !muted && this.callExpected(call)),
+ );
+ this.emit(GroupCallEvent.LocalMuteStateChanged, muted, this.isLocalVideoMuted());
+
+ if (!sendUpdatesBefore) await sendUpdates();
+
+ return true;
+ }
+
+ /**
+ * Sets the mute state of the local participants's video.
+ * @param muted - Whether to mute the video
+ * @returns Whether muting/unmuting was successful
+ */
+ public async setLocalVideoMuted(muted: boolean): Promise<boolean> {
+ // hasAudioDevice can block indefinitely if the window has lost focus,
+ // and it doesn't make much sense to keep a device from being muted, so
+ // we always allow muted = true changes to go through
+ if (!muted && !(await this.client.getMediaHandler().hasVideoDevice())) {
+ return false;
+ }
+
+ if (this.localCallFeed) {
+ /* istanbul ignore next */
+ logger.log(
+ `GroupCall ${this.groupCallId} setLocalVideoMuted() (stream=${this.localCallFeed.stream.id}, muted=${muted})`,
+ );
+
+ try {
+ const stream = await this.client.getMediaHandler().getUserMediaStream(true, !muted);
+ await this.updateLocalUsermediaStream(stream);
+ this.localCallFeed.setAudioVideoMuted(null, muted);
+ setTracksEnabled(this.localCallFeed.stream.getVideoTracks(), !muted);
+ } catch (_) {
+ // No permission to video device
+ /* istanbul ignore next */
+ logger.log(
+ `GroupCall ${this.groupCallId} setLocalVideoMuted() no device or permission to receive local stream, muted=${muted}`,
+ );
+ return false;
+ }
+ } else {
+ logger.log(`GroupCall ${this.groupCallId} setLocalVideoMuted() no stream muted (muted=${muted})`);
+ this.initWithVideoMuted = muted;
+ }
+
+ const updates: Promise<unknown>[] = [];
+ this.forEachCall((call) => updates.push(call.setLocalVideoMuted(muted)));
+ await Promise.all(updates);
+
+ // We setTracksEnabled again, independently from the call doing it
+ // internally, since we might not be expecting the call
+ this.forEachCall((call) =>
+ setTracksEnabled(call.localUsermediaFeed!.stream.getVideoTracks(), !muted && this.callExpected(call)),
+ );
+
+ this.emit(GroupCallEvent.LocalMuteStateChanged, this.isMicrophoneMuted(), muted);
+
+ return true;
+ }
+
+ public async setScreensharingEnabled(enabled: boolean, opts: IScreensharingOpts = {}): Promise<boolean> {
+ if (enabled === this.isScreensharing()) {
+ return enabled;
+ }
+
+ if (enabled) {
+ try {
+ logger.log(
+ `GroupCall ${this.groupCallId} setScreensharingEnabled() is asking for screensharing permissions`,
+ );
+ const stream = await this.client.getMediaHandler().getScreensharingStream(opts);
+
+ for (const track of stream.getTracks()) {
+ const onTrackEnded = (): void => {
+ this.setScreensharingEnabled(false);
+ track.removeEventListener("ended", onTrackEnded);
+ };
+
+ track.addEventListener("ended", onTrackEnded);
+ }
+
+ logger.log(
+ `GroupCall ${this.groupCallId} setScreensharingEnabled() granted screensharing permissions. Setting screensharing enabled on all calls`,
+ );
+
+ this.localDesktopCapturerSourceId = opts.desktopCapturerSourceId;
+ this.localScreenshareFeed = new CallFeed({
+ client: this.client,
+ roomId: this.room.roomId,
+ userId: this.client.getUserId()!,
+ deviceId: this.client.getDeviceId()!,
+ stream,
+ purpose: SDPStreamMetadataPurpose.Screenshare,
+ audioMuted: false,
+ videoMuted: false,
+ });
+ this.addScreenshareFeed(this.localScreenshareFeed);
+
+ this.emit(
+ GroupCallEvent.LocalScreenshareStateChanged,
+ true,
+ this.localScreenshareFeed,
+ this.localDesktopCapturerSourceId,
+ );
+
+ // TODO: handle errors
+ this.forEachCall((call) => call.pushLocalFeed(this.localScreenshareFeed!.clone()));
+
+ return true;
+ } catch (error) {
+ if (opts.throwOnFail) throw error;
+ logger.error(
+ `GroupCall ${this.groupCallId} setScreensharingEnabled() enabling screensharing error`,
+ error,
+ );
+ this.emit(
+ GroupCallEvent.Error,
+ new GroupCallError(
+ GroupCallErrorCode.NoUserMedia,
+ "Failed to get screen-sharing stream: ",
+ error as Error,
+ ),
+ );
+ return false;
+ }
+ } else {
+ this.forEachCall((call) => {
+ if (call.localScreensharingFeed) call.removeLocalFeed(call.localScreensharingFeed);
+ });
+ this.client.getMediaHandler().stopScreensharingStream(this.localScreenshareFeed!.stream);
+ this.removeScreenshareFeed(this.localScreenshareFeed!);
+ this.localScreenshareFeed = undefined;
+ this.localDesktopCapturerSourceId = undefined;
+ this.emit(GroupCallEvent.LocalScreenshareStateChanged, false, undefined, undefined);
+ return false;
+ }
+ }
+
+ public isScreensharing(): boolean {
+ return !!this.localScreenshareFeed;
+ }
+
+ /*
+ * Call Setup
+ *
+ * There are two different paths for calls to be created:
+ * 1. Incoming calls triggered by the Call.incoming event.
+ * 2. Outgoing calls to the initial members of a room or new members
+ * as they are observed by the RoomState.members event.
+ */
+
+ private onIncomingCall = (newCall: MatrixCall): void => {
+ // The incoming calls may be for another room, which we will ignore.
+ if (newCall.roomId !== this.room.roomId) {
+ return;
+ }
+
+ if (newCall.state !== CallState.Ringing) {
+ logger.warn(
+ `GroupCall ${this.groupCallId} onIncomingCall() incoming call no longer in ringing state - ignoring`,
+ );
+ return;
+ }
+
+ if (!newCall.groupCallId || newCall.groupCallId !== this.groupCallId) {
+ logger.log(
+ `GroupCall ${this.groupCallId} onIncomingCall() ignored because it doesn't match the current group call`,
+ );
+ newCall.reject();
+ return;
+ }
+
+ const opponentUserId = newCall.getOpponentMember()?.userId;
+ if (opponentUserId === undefined) {
+ logger.warn(`GroupCall ${this.groupCallId} onIncomingCall() incoming call with no member - ignoring`);
+ return;
+ }
+
+ const deviceMap = this.calls.get(opponentUserId) ?? new Map<string, MatrixCall>();
+ const prevCall = deviceMap.get(newCall.getOpponentDeviceId()!);
+
+ if (prevCall?.callId === newCall.callId) return;
+
+ logger.log(
+ `GroupCall ${this.groupCallId} onIncomingCall() incoming call (userId=${opponentUserId}, callId=${newCall.callId})`,
+ );
+
+ if (prevCall) prevCall.hangup(CallErrorCode.Replaced, false);
+
+ this.initCall(newCall);
+
+ const feeds = this.getLocalFeeds().map((feed) => feed.clone());
+ if (!this.callExpected(newCall)) {
+ // Disable our tracks for users not explicitly participating in the
+ // call but trying to receive the feeds
+ for (const feed of feeds) {
+ setTracksEnabled(feed.stream.getAudioTracks(), false);
+ setTracksEnabled(feed.stream.getVideoTracks(), false);
+ }
+ }
+ newCall.answerWithCallFeeds(feeds);
+
+ deviceMap.set(newCall.getOpponentDeviceId()!, newCall);
+ this.calls.set(opponentUserId, deviceMap);
+ this.emit(GroupCallEvent.CallsChanged, this.calls);
+ };
+
+ /**
+ * Determines whether a given participant expects us to call them (versus
+ * them calling us).
+ * @param userId - The participant's user ID.
+ * @param deviceId - The participant's device ID.
+ * @returns Whether we need to place an outgoing call to the participant.
+ */
+ private wantsOutgoingCall(userId: string, deviceId: string): boolean {
+ const localUserId = this.client.getUserId()!;
+ const localDeviceId = this.client.getDeviceId()!;
+ return (
+ // If a user's ID is less than our own, they'll call us
+ userId >= localUserId &&
+ // If this is another one of our devices, compare device IDs to tell whether it'll call us
+ (userId !== localUserId || deviceId > localDeviceId)
+ );
+ }
+
+ /**
+ * Places calls to all participants that we're responsible for calling.
+ */
+ private placeOutgoingCalls(): void {
+ let callsChanged = false;
+
+ for (const [{ userId }, participantMap] of this.participants) {
+ const callMap = this.calls.get(userId) ?? new Map<string, MatrixCall>();
+
+ for (const [deviceId, participant] of participantMap) {
+ const prevCall = callMap.get(deviceId);
+
+ if (
+ prevCall?.getOpponentSessionId() !== participant.sessionId &&
+ this.wantsOutgoingCall(userId, deviceId)
+ ) {
+ callsChanged = true;
+
+ if (prevCall !== undefined) {
+ logger.debug(
+ `GroupCall ${this.groupCallId} placeOutgoingCalls() replacing call (userId=${userId}, deviceId=${deviceId}, callId=${prevCall.callId})`,
+ );
+ prevCall.hangup(CallErrorCode.NewSession, false);
+ }
+
+ const newCall = createNewMatrixCall(this.client, this.room.roomId, {
+ invitee: userId,
+ opponentDeviceId: deviceId,
+ opponentSessionId: participant.sessionId,
+ groupCallId: this.groupCallId,
+ });
+
+ if (newCall === null) {
+ logger.error(
+ `GroupCall ${this.groupCallId} placeOutgoingCalls() failed to create call (userId=${userId}, device=${deviceId})`,
+ );
+ callMap.delete(deviceId);
+ } else {
+ this.initCall(newCall);
+ callMap.set(deviceId, newCall);
+
+ logger.debug(
+ `GroupCall ${this.groupCallId} placeOutgoingCalls() placing call (userId=${userId}, deviceId=${deviceId}, sessionId=${participant.sessionId})`,
+ );
+
+ newCall
+ .placeCallWithCallFeeds(
+ this.getLocalFeeds().map((feed) => feed.clone()),
+ participant.screensharing,
+ )
+ .then(() => {
+ if (this.dataChannelsEnabled) {
+ newCall.createDataChannel("datachannel", this.dataChannelOptions);
+ }
+ })
+ .catch((e) => {
+ logger.warn(
+ `GroupCall ${this.groupCallId} placeOutgoingCalls() failed to place call (userId=${userId})`,
+ e,
+ );
+
+ if (e instanceof CallError && e.code === GroupCallErrorCode.UnknownDevice) {
+ this.emit(GroupCallEvent.Error, e);
+ } else {
+ this.emit(
+ GroupCallEvent.Error,
+ new GroupCallError(
+ GroupCallErrorCode.PlaceCallFailed,
+ `Failed to place call to ${userId}`,
+ ),
+ );
+ }
+
+ newCall.hangup(CallErrorCode.SignallingFailed, false);
+ if (callMap.get(deviceId) === newCall) callMap.delete(deviceId);
+ });
+ }
+ }
+ }
+
+ if (callMap.size > 0) {
+ this.calls.set(userId, callMap);
+ } else {
+ this.calls.delete(userId);
+ }
+ }
+
+ if (callsChanged) this.emit(GroupCallEvent.CallsChanged, this.calls);
+ }
+
+ /*
+ * Room Member State
+ */
+
+ private getMemberStateEvents(): MatrixEvent[];
+ private getMemberStateEvents(userId: string): MatrixEvent | null;
+ private getMemberStateEvents(userId?: string): MatrixEvent[] | MatrixEvent | null {
+ return userId === undefined
+ ? this.room.currentState.getStateEvents(EventType.GroupCallMemberPrefix)
+ : this.room.currentState.getStateEvents(EventType.GroupCallMemberPrefix, userId);
+ }
+
+ private onRetryCallLoop = (): void => {
+ let needsRetry = false;
+
+ for (const [{ userId }, participantMap] of this.participants) {
+ const callMap = this.calls.get(userId);
+ let retriesMap = this.retryCallCounts.get(userId);
+
+ for (const [deviceId, participant] of participantMap) {
+ const call = callMap?.get(deviceId);
+ const retries = retriesMap?.get(deviceId) ?? 0;
+
+ if (
+ call?.getOpponentSessionId() !== participant.sessionId &&
+ this.wantsOutgoingCall(userId, deviceId) &&
+ retries < 3
+ ) {
+ if (retriesMap === undefined) {
+ retriesMap = new Map();
+ this.retryCallCounts.set(userId, retriesMap);
+ }
+ retriesMap.set(deviceId, retries + 1);
+ needsRetry = true;
+ }
+ }
+ }
+
+ if (needsRetry) this.placeOutgoingCalls();
+ };
+
+ private initCall(call: MatrixCall): void {
+ const opponentMemberId = getCallUserId(call);
+
+ if (!opponentMemberId) {
+ throw new Error("Cannot init call without user id");
+ }
+
+ const onCallFeedsChanged = (): void => this.onCallFeedsChanged(call);
+ const onCallStateChanged = (state: CallState, oldState?: CallState): void =>
+ this.onCallStateChanged(call, state, oldState);
+ const onCallHangup = this.onCallHangup;
+ const onCallReplaced = (newCall: MatrixCall): void => this.onCallReplaced(call, newCall);
+
+ let deviceMap = this.callHandlers.get(opponentMemberId);
+ if (deviceMap === undefined) {
+ deviceMap = new Map();
+ this.callHandlers.set(opponentMemberId, deviceMap);
+ }
+
+ deviceMap.set(call.getOpponentDeviceId()!, {
+ onCallFeedsChanged,
+ onCallStateChanged,
+ onCallHangup,
+ onCallReplaced,
+ });
+
+ call.on(CallEvent.FeedsChanged, onCallFeedsChanged);
+ call.on(CallEvent.State, onCallStateChanged);
+ call.on(CallEvent.Hangup, onCallHangup);
+ call.on(CallEvent.Replaced, onCallReplaced);
+
+ call.isPtt = this.isPtt;
+
+ this.reEmitter.reEmit(call, Object.values(CallEvent));
+
+ call.initStats(this.stats);
+
+ onCallFeedsChanged();
+ }
+
+ private disposeCall(call: MatrixCall, hangupReason: CallErrorCode): void {
+ const opponentMemberId = getCallUserId(call);
+ const opponentDeviceId = call.getOpponentDeviceId()!;
+
+ if (!opponentMemberId) {
+ throw new Error("Cannot dispose call without user id");
+ }
+
+ const deviceMap = this.callHandlers.get(opponentMemberId)!;
+ const { onCallFeedsChanged, onCallStateChanged, onCallHangup, onCallReplaced } =
+ deviceMap.get(opponentDeviceId)!;
+
+ call.removeListener(CallEvent.FeedsChanged, onCallFeedsChanged);
+ call.removeListener(CallEvent.State, onCallStateChanged);
+ call.removeListener(CallEvent.Hangup, onCallHangup);
+ call.removeListener(CallEvent.Replaced, onCallReplaced);
+
+ deviceMap.delete(opponentMemberId);
+ if (deviceMap.size === 0) this.callHandlers.delete(opponentMemberId);
+
+ if (call.hangupReason === CallErrorCode.Replaced) {
+ return;
+ }
+
+ const usermediaFeed = this.getUserMediaFeed(opponentMemberId, opponentDeviceId);
+
+ if (usermediaFeed) {
+ this.removeUserMediaFeed(usermediaFeed);
+ }
+
+ const screenshareFeed = this.getScreenshareFeed(opponentMemberId, opponentDeviceId);
+
+ if (screenshareFeed) {
+ this.removeScreenshareFeed(screenshareFeed);
+ }
+ }
+
+ private onCallFeedsChanged = (call: MatrixCall): void => {
+ const opponentMemberId = getCallUserId(call);
+ const opponentDeviceId = call.getOpponentDeviceId()!;
+
+ if (!opponentMemberId) {
+ throw new Error("Cannot change call feeds without user id");
+ }
+
+ const currentUserMediaFeed = this.getUserMediaFeed(opponentMemberId, opponentDeviceId);
+ const remoteUsermediaFeed = call.remoteUsermediaFeed;
+ const remoteFeedChanged = remoteUsermediaFeed !== currentUserMediaFeed;
+
+ if (remoteFeedChanged) {
+ if (!currentUserMediaFeed && remoteUsermediaFeed) {
+ this.addUserMediaFeed(remoteUsermediaFeed);
+ } else if (currentUserMediaFeed && remoteUsermediaFeed) {
+ this.replaceUserMediaFeed(currentUserMediaFeed, remoteUsermediaFeed);
+ } else if (currentUserMediaFeed && !remoteUsermediaFeed) {
+ this.removeUserMediaFeed(currentUserMediaFeed);
+ }
+ }
+
+ const currentScreenshareFeed = this.getScreenshareFeed(opponentMemberId, opponentDeviceId);
+ const remoteScreensharingFeed = call.remoteScreensharingFeed;
+ const remoteScreenshareFeedChanged = remoteScreensharingFeed !== currentScreenshareFeed;
+
+ if (remoteScreenshareFeedChanged) {
+ if (!currentScreenshareFeed && remoteScreensharingFeed) {
+ this.addScreenshareFeed(remoteScreensharingFeed);
+ } else if (currentScreenshareFeed && remoteScreensharingFeed) {
+ this.replaceScreenshareFeed(currentScreenshareFeed, remoteScreensharingFeed);
+ } else if (currentScreenshareFeed && !remoteScreensharingFeed) {
+ this.removeScreenshareFeed(currentScreenshareFeed);
+ }
+ }
+ };
+
+ private onCallStateChanged = (call: MatrixCall, state: CallState, _oldState: CallState | undefined): void => {
+ if (state === CallState.Ended) return;
+
+ const audioMuted = this.localCallFeed!.isAudioMuted();
+
+ if (call.localUsermediaStream && call.isMicrophoneMuted() !== audioMuted) {
+ call.setMicrophoneMuted(audioMuted);
+ }
+
+ const videoMuted = this.localCallFeed!.isVideoMuted();
+
+ if (call.localUsermediaStream && call.isLocalVideoMuted() !== videoMuted) {
+ call.setLocalVideoMuted(videoMuted);
+ }
+
+ const opponentUserId = call.getOpponentMember()?.userId;
+ if (state === CallState.Connected && opponentUserId) {
+ const retriesMap = this.retryCallCounts.get(opponentUserId);
+ retriesMap?.delete(call.getOpponentDeviceId()!);
+ if (retriesMap?.size === 0) this.retryCallCounts.delete(opponentUserId);
+ }
+ };
+
+ private onCallHangup = (call: MatrixCall): void => {
+ if (call.hangupReason === CallErrorCode.Replaced) return;
+
+ const opponentUserId = call.getOpponentMember()?.userId ?? this.room.getMember(call.invitee!)!.userId;
+ const deviceMap = this.calls.get(opponentUserId);
+
+ // Sanity check that this call is in fact in the map
+ if (deviceMap?.get(call.getOpponentDeviceId()!) === call) {
+ this.disposeCall(call, call.hangupReason as CallErrorCode);
+ deviceMap.delete(call.getOpponentDeviceId()!);
+ if (deviceMap.size === 0) this.calls.delete(opponentUserId);
+ this.emit(GroupCallEvent.CallsChanged, this.calls);
+ }
+ };
+
+ private onCallReplaced = (prevCall: MatrixCall, newCall: MatrixCall): void => {
+ const opponentUserId = prevCall.getOpponentMember()!.userId;
+
+ let deviceMap = this.calls.get(opponentUserId);
+ if (deviceMap === undefined) {
+ deviceMap = new Map();
+ this.calls.set(opponentUserId, deviceMap);
+ }
+
+ prevCall.hangup(CallErrorCode.Replaced, false);
+ this.initCall(newCall);
+ deviceMap.set(prevCall.getOpponentDeviceId()!, newCall);
+ this.emit(GroupCallEvent.CallsChanged, this.calls);
+ };
+
+ /*
+ * UserMedia CallFeed Event Handlers
+ */
+
+ public getUserMediaFeed(userId: string, deviceId: string): CallFeed | undefined {
+ return this.userMediaFeeds.find((f) => f.userId === userId && f.deviceId! === deviceId);
+ }
+
+ private addUserMediaFeed(callFeed: CallFeed): void {
+ this.userMediaFeeds.push(callFeed);
+ callFeed.measureVolumeActivity(true);
+ this.emit(GroupCallEvent.UserMediaFeedsChanged, this.userMediaFeeds);
+ }
+
+ private replaceUserMediaFeed(existingFeed: CallFeed, replacementFeed: CallFeed): void {
+ const feedIndex = this.userMediaFeeds.findIndex(
+ (f) => f.userId === existingFeed.userId && f.deviceId! === existingFeed.deviceId,
+ );
+
+ if (feedIndex === -1) {
+ throw new Error("Couldn't find user media feed to replace");
+ }
+
+ this.userMediaFeeds.splice(feedIndex, 1, replacementFeed);
+
+ existingFeed.dispose();
+ replacementFeed.measureVolumeActivity(true);
+ this.emit(GroupCallEvent.UserMediaFeedsChanged, this.userMediaFeeds);
+ }
+
+ private removeUserMediaFeed(callFeed: CallFeed): void {
+ const feedIndex = this.userMediaFeeds.findIndex(
+ (f) => f.userId === callFeed.userId && f.deviceId! === callFeed.deviceId,
+ );
+
+ if (feedIndex === -1) {
+ throw new Error("Couldn't find user media feed to remove");
+ }
+
+ this.userMediaFeeds.splice(feedIndex, 1);
+
+ callFeed.dispose();
+ this.emit(GroupCallEvent.UserMediaFeedsChanged, this.userMediaFeeds);
+
+ if (this.activeSpeaker === callFeed) {
+ this.activeSpeaker = this.userMediaFeeds[0];
+ this.emit(GroupCallEvent.ActiveSpeakerChanged, this.activeSpeaker);
+ }
+ }
+
+ private onActiveSpeakerLoop = (): void => {
+ let topAvg: number | undefined = undefined;
+ let nextActiveSpeaker: CallFeed | undefined = undefined;
+
+ for (const callFeed of this.userMediaFeeds) {
+ if (callFeed.isLocal() && this.userMediaFeeds.length > 1) continue;
+
+ const total = callFeed.speakingVolumeSamples.reduce(
+ (acc, volume) => acc + Math.max(volume, SPEAKING_THRESHOLD),
+ );
+ const avg = total / callFeed.speakingVolumeSamples.length;
+
+ if (!topAvg || avg > topAvg) {
+ topAvg = avg;
+ nextActiveSpeaker = callFeed;
+ }
+ }
+
+ if (nextActiveSpeaker && this.activeSpeaker !== nextActiveSpeaker && topAvg && topAvg > SPEAKING_THRESHOLD) {
+ this.activeSpeaker = nextActiveSpeaker;
+ this.emit(GroupCallEvent.ActiveSpeakerChanged, this.activeSpeaker);
+ }
+ };
+
+ /*
+ * Screenshare Call Feed Event Handlers
+ */
+
+ public getScreenshareFeed(userId: string, deviceId: string): CallFeed | undefined {
+ return this.screenshareFeeds.find((f) => f.userId === userId && f.deviceId! === deviceId);
+ }
+
+ private addScreenshareFeed(callFeed: CallFeed): void {
+ this.screenshareFeeds.push(callFeed);
+ this.emit(GroupCallEvent.ScreenshareFeedsChanged, this.screenshareFeeds);
+ }
+
+ private replaceScreenshareFeed(existingFeed: CallFeed, replacementFeed: CallFeed): void {
+ const feedIndex = this.screenshareFeeds.findIndex(
+ (f) => f.userId === existingFeed.userId && f.deviceId! === existingFeed.deviceId,
+ );
+
+ if (feedIndex === -1) {
+ throw new Error("Couldn't find screenshare feed to replace");
+ }
+
+ this.screenshareFeeds.splice(feedIndex, 1, replacementFeed);
+
+ existingFeed.dispose();
+ this.emit(GroupCallEvent.ScreenshareFeedsChanged, this.screenshareFeeds);
+ }
+
+ private removeScreenshareFeed(callFeed: CallFeed): void {
+ const feedIndex = this.screenshareFeeds.findIndex(
+ (f) => f.userId === callFeed.userId && f.deviceId! === callFeed.deviceId,
+ );
+
+ if (feedIndex === -1) {
+ throw new Error("Couldn't find screenshare feed to remove");
+ }
+
+ this.screenshareFeeds.splice(feedIndex, 1);
+
+ callFeed.dispose();
+ this.emit(GroupCallEvent.ScreenshareFeedsChanged, this.screenshareFeeds);
+ }
+
+ /**
+ * Recalculates and updates the participant map to match the room state.
+ */
+ private updateParticipants(): void {
+ const localMember = this.room.getMember(this.client.getUserId()!)!;
+ if (!localMember) {
+ // The client hasn't fetched enough of the room state to get our own member
+ // event. This probably shouldn't happen, but sanity check & exit for now.
+ logger.warn(
+ `GroupCall ${this.groupCallId} updateParticipants() tried to update participants before local room member is available`,
+ );
+ return;
+ }
+
+ if (this.participantsExpirationTimer !== null) {
+ clearTimeout(this.participantsExpirationTimer);
+ this.participantsExpirationTimer = null;
+ }
+
+ if (this.state === GroupCallState.Ended) {
+ this.participants = new Map();
+ return;
+ }
+
+ const participants = new Map<RoomMember, Map<string, ParticipantState>>();
+ const now = Date.now();
+ const entered = this.state === GroupCallState.Entered || this.enteredViaAnotherSession;
+ let nextExpiration = Infinity;
+
+ for (const e of this.getMemberStateEvents()) {
+ const member = this.room.getMember(e.getStateKey()!);
+ const content = e.getContent<Record<any, unknown>>();
+ const calls: Record<any, unknown>[] = Array.isArray(content["m.calls"]) ? content["m.calls"] : [];
+ const call = calls.find((call) => call["m.call_id"] === this.groupCallId);
+ const devices: Record<any, unknown>[] = Array.isArray(call?.["m.devices"]) ? call!["m.devices"] : [];
+
+ // Filter out invalid and expired devices
+ let validDevices = devices.filter(
+ (d) =>
+ typeof d.device_id === "string" &&
+ typeof d.session_id === "string" &&
+ typeof d.expires_ts === "number" &&
+ d.expires_ts > now &&
+ Array.isArray(d.feeds),
+ ) as unknown as IGroupCallRoomMemberDevice[];
+
+ // Apply local echo for the unentered case
+ if (!entered && member?.userId === this.client.getUserId()!) {
+ validDevices = validDevices.filter((d) => d.device_id !== this.client.getDeviceId()!);
+ }
+
+ // Must have a connected device and be joined to the room
+ if (validDevices.length > 0 && member?.membership === "join") {
+ const deviceMap = new Map<string, ParticipantState>();
+ participants.set(member, deviceMap);
+
+ for (const d of validDevices) {
+ deviceMap.set(d.device_id, {
+ sessionId: d.session_id,
+ screensharing: d.feeds.some((f) => f.purpose === SDPStreamMetadataPurpose.Screenshare),
+ });
+ if (d.expires_ts < nextExpiration) nextExpiration = d.expires_ts;
+ }
+ }
+ }
+
+ // Apply local echo for the entered case
+ if (entered) {
+ let deviceMap = participants.get(localMember);
+ if (deviceMap === undefined) {
+ deviceMap = new Map();
+ participants.set(localMember, deviceMap);
+ }
+
+ if (!deviceMap.has(this.client.getDeviceId()!)) {
+ deviceMap.set(this.client.getDeviceId()!, {
+ sessionId: this.client.getSessionId(),
+ screensharing: this.getLocalFeeds().some((f) => f.purpose === SDPStreamMetadataPurpose.Screenshare),
+ });
+ }
+ }
+
+ this.participants = participants;
+ if (nextExpiration < Infinity) {
+ this.participantsExpirationTimer = setTimeout(() => this.updateParticipants(), nextExpiration - now);
+ }
+ }
+
+ /**
+ * Updates the local user's member state with the devices returned by the given function.
+ * @param fn - A function from the current devices to the new devices. If it
+ * returns null, the update will be skipped.
+ * @param keepAlive - Whether the request should outlive the window.
+ */
+ private async updateDevices(
+ fn: (devices: IGroupCallRoomMemberDevice[]) => IGroupCallRoomMemberDevice[] | null,
+ keepAlive = false,
+ ): Promise<void> {
+ const now = Date.now();
+ const localUserId = this.client.getUserId()!;
+
+ const event = this.getMemberStateEvents(localUserId);
+ const content = event?.getContent<Record<any, unknown>>() ?? {};
+ const calls: Record<any, unknown>[] = Array.isArray(content["m.calls"]) ? content["m.calls"] : [];
+
+ let call: Record<any, unknown> | null = null;
+ const otherCalls: Record<any, unknown>[] = [];
+ for (const c of calls) {
+ if (c["m.call_id"] === this.groupCallId) {
+ call = c;
+ } else {
+ otherCalls.push(c);
+ }
+ }
+ if (call === null) call = {};
+
+ const devices: Record<any, unknown>[] = Array.isArray(call["m.devices"]) ? call["m.devices"] : [];
+
+ // Filter out invalid and expired devices
+ const validDevices = devices.filter(
+ (d) =>
+ typeof d.device_id === "string" &&
+ typeof d.session_id === "string" &&
+ typeof d.expires_ts === "number" &&
+ d.expires_ts > now &&
+ Array.isArray(d.feeds),
+ ) as unknown as IGroupCallRoomMemberDevice[];
+
+ const newDevices = fn(validDevices);
+ if (newDevices === null) return;
+
+ const newCalls = [...(otherCalls as unknown as IGroupCallRoomMemberCallState[])];
+ if (newDevices.length > 0) {
+ newCalls.push({
+ ...call,
+ "m.call_id": this.groupCallId,
+ "m.devices": newDevices,
+ });
+ }
+
+ const newContent: IGroupCallRoomMemberState = { "m.calls": newCalls };
+
+ await this.client.sendStateEvent(this.room.roomId, EventType.GroupCallMemberPrefix, newContent, localUserId, {
+ keepAlive,
+ });
+ }
+
+ private async addDeviceToMemberState(): Promise<void> {
+ await this.updateDevices((devices) => [
+ ...devices.filter((d) => d.device_id !== this.client.getDeviceId()!),
+ {
+ device_id: this.client.getDeviceId()!,
+ session_id: this.client.getSessionId(),
+ expires_ts: Date.now() + DEVICE_TIMEOUT,
+ feeds: this.getLocalFeeds().map((feed) => ({ purpose: feed.purpose })),
+ // TODO: Add data channels
+ },
+ ]);
+ }
+
+ private async updateMemberState(): Promise<void> {
+ // Clear the old update interval before proceeding
+ if (this.resendMemberStateTimer !== null) {
+ clearInterval(this.resendMemberStateTimer);
+ this.resendMemberStateTimer = null;
+ }
+
+ if (this.state === GroupCallState.Entered) {
+ // Add the local device
+ await this.addDeviceToMemberState();
+
+ // Resend the state event every so often so it doesn't become stale
+ this.resendMemberStateTimer = setInterval(async () => {
+ logger.log(`GroupCall ${this.groupCallId} updateMemberState() resending call member state"`);
+ try {
+ await this.addDeviceToMemberState();
+ } catch (e) {
+ logger.error(
+ `GroupCall ${this.groupCallId} updateMemberState() failed to resend call member state`,
+ e,
+ );
+ }
+ }, (DEVICE_TIMEOUT * 3) / 4);
+ } else {
+ // Remove the local device
+ await this.updateDevices(
+ (devices) => devices.filter((d) => d.device_id !== this.client.getDeviceId()!),
+ true,
+ );
+ }
+ }
+
+ /**
+ * Cleans up our member state by filtering out logged out devices, inactive
+ * devices, and our own device (if we know we haven't entered).
+ */
+ public async cleanMemberState(): Promise<void> {
+ const { devices: myDevices } = await this.client.getDevices();
+ const deviceMap = new Map<string, IMyDevice>(myDevices.map((d) => [d.device_id, d]));
+
+ // updateDevices takes care of filtering out inactive devices for us
+ await this.updateDevices((devices) => {
+ const newDevices = devices.filter((d) => {
+ const device = deviceMap.get(d.device_id);
+ return (
+ device?.last_seen_ts !== undefined &&
+ !(
+ d.device_id === this.client.getDeviceId()! &&
+ this.state !== GroupCallState.Entered &&
+ !this.enteredViaAnotherSession
+ )
+ );
+ });
+
+ // Skip the update if the devices are unchanged
+ return newDevices.length === devices.length ? null : newDevices;
+ });
+ }
+
+ private onRoomState = (): void => this.updateParticipants();
+
+ private onParticipantsChanged = (): void => {
+ // Re-run setTracksEnabled on all calls, so that participants that just
+ // left get denied access to our media, and participants that just
+ // joined get granted access
+ this.forEachCall((call) => {
+ const expected = this.callExpected(call);
+ for (const feed of call.getLocalFeeds()) {
+ setTracksEnabled(feed.stream.getAudioTracks(), !feed.isAudioMuted() && expected);
+ setTracksEnabled(feed.stream.getVideoTracks(), !feed.isVideoMuted() && expected);
+ }
+ });
+
+ if (this.state === GroupCallState.Entered) this.placeOutgoingCalls();
+ };
+
+ private onStateChanged = (newState: GroupCallState, oldState: GroupCallState): void => {
+ if (
+ newState === GroupCallState.Entered ||
+ oldState === GroupCallState.Entered ||
+ newState === GroupCallState.Ended
+ ) {
+ // We either entered, left, or ended the call
+ this.updateParticipants();
+ this.updateMemberState().catch((e) =>
+ logger.error(
+ `GroupCall ${this.groupCallId} onStateChanged() failed to update member state devices"`,
+ e,
+ ),
+ );
+ }
+ };
+
+ private onLocalFeedsChanged = (): void => {
+ if (this.state === GroupCallState.Entered) {
+ this.updateMemberState().catch((e) =>
+ logger.error(
+ `GroupCall ${this.groupCallId} onLocalFeedsChanged() failed to update member state feeds`,
+ e,
+ ),
+ );
+ }
+ };
+
+ public getGroupCallStats(): GroupCallStats {
+ return this.stats;
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/groupCallEventHandler.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/groupCallEventHandler.ts
new file mode 100644
index 0000000..08487bd
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/groupCallEventHandler.ts
@@ -0,0 +1,232 @@
+/*
+Copyright 2021 Šimon Brandner <simon.bra.ag@gmail.com>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { MatrixEvent } from "../models/event";
+import { MatrixClient, ClientEvent } from "../client";
+import { GroupCall, GroupCallIntent, GroupCallType, IGroupCallDataChannelOptions } from "./groupCall";
+import { Room } from "../models/room";
+import { RoomState, RoomStateEvent } from "../models/room-state";
+import { RoomMember } from "../models/room-member";
+import { logger } from "../logger";
+import { EventType } from "../@types/event";
+import { SyncState } from "../sync";
+
+export enum GroupCallEventHandlerEvent {
+ Incoming = "GroupCall.incoming",
+ Outgoing = "GroupCall.outgoing",
+ Ended = "GroupCall.ended",
+ Participants = "GroupCall.participants",
+}
+
+export type GroupCallEventHandlerEventHandlerMap = {
+ [GroupCallEventHandlerEvent.Incoming]: (call: GroupCall) => void;
+ [GroupCallEventHandlerEvent.Outgoing]: (call: GroupCall) => void;
+ [GroupCallEventHandlerEvent.Ended]: (call: GroupCall) => void;
+ [GroupCallEventHandlerEvent.Participants]: (participants: RoomMember[], call: GroupCall) => void;
+};
+
+interface RoomDeferred {
+ prom: Promise<void>;
+ resolve?: () => void;
+}
+
+export class GroupCallEventHandler {
+ public groupCalls = new Map<string, GroupCall>(); // roomId -> GroupCall
+
+ // All rooms we know about and whether we've seen a 'Room' event
+ // for them. The promise will be fulfilled once we've processed that
+ // event which means we're "up to date" on what calls are in a room
+ // and get
+ private roomDeferreds = new Map<string, RoomDeferred>();
+
+ public constructor(private client: MatrixClient) {}
+
+ public async start(): Promise<void> {
+ // We wait until the client has started syncing for real.
+ // This is because we only support one call at a time, and want
+ // the latest. We therefore want the latest state of the room before
+ // we create a group call for the room so we can be fairly sure that
+ // the group call we create is really the latest one.
+ if (this.client.getSyncState() !== SyncState.Syncing) {
+ logger.debug("GroupCallEventHandler start() waiting for client to start syncing");
+ await new Promise<void>((resolve) => {
+ const onSync = (): void => {
+ if (this.client.getSyncState() === SyncState.Syncing) {
+ this.client.off(ClientEvent.Sync, onSync);
+ return resolve();
+ }
+ };
+ this.client.on(ClientEvent.Sync, onSync);
+ });
+ }
+
+ const rooms = this.client.getRooms();
+
+ for (const room of rooms) {
+ this.createGroupCallForRoom(room);
+ }
+
+ this.client.on(ClientEvent.Room, this.onRoomsChanged);
+ this.client.on(RoomStateEvent.Events, this.onRoomStateChanged);
+ }
+
+ public stop(): void {
+ this.client.removeListener(RoomStateEvent.Events, this.onRoomStateChanged);
+ }
+
+ private getRoomDeferred(roomId: string): RoomDeferred {
+ let deferred = this.roomDeferreds.get(roomId);
+ if (deferred === undefined) {
+ let resolveFunc: () => void;
+ deferred = {
+ prom: new Promise<void>((resolve) => {
+ resolveFunc = resolve;
+ }),
+ };
+ deferred.resolve = resolveFunc!;
+ this.roomDeferreds.set(roomId, deferred);
+ }
+
+ return deferred;
+ }
+
+ public waitUntilRoomReadyForGroupCalls(roomId: string): Promise<void> {
+ return this.getRoomDeferred(roomId).prom;
+ }
+
+ public getGroupCallById(groupCallId: string): GroupCall | undefined {
+ return [...this.groupCalls.values()].find((groupCall) => groupCall.groupCallId === groupCallId);
+ }
+
+ private createGroupCallForRoom(room: Room): void {
+ const callEvents = room.currentState.getStateEvents(EventType.GroupCallPrefix);
+ const sortedCallEvents = callEvents.sort((a, b) => b.getTs() - a.getTs());
+
+ for (const callEvent of sortedCallEvents) {
+ const content = callEvent.getContent();
+
+ if (content["m.terminated"] || callEvent.isRedacted()) {
+ continue;
+ }
+
+ logger.debug(
+ `GroupCallEventHandler createGroupCallForRoom() choosing group call from possible calls (stateKey=${callEvent.getStateKey()}, ts=${callEvent.getTs()}, roomId=${
+ room.roomId
+ }, numOfPossibleCalls=${callEvents.length})`,
+ );
+
+ this.createGroupCallFromRoomStateEvent(callEvent);
+ break;
+ }
+
+ logger.info(`GroupCallEventHandler createGroupCallForRoom() processed room (roomId=${room.roomId})`);
+ this.getRoomDeferred(room.roomId).resolve!();
+ }
+
+ private createGroupCallFromRoomStateEvent(event: MatrixEvent): GroupCall | undefined {
+ const roomId = event.getRoomId();
+ const content = event.getContent();
+
+ const room = this.client.getRoom(roomId);
+
+ if (!room) {
+ logger.warn(
+ `GroupCallEventHandler createGroupCallFromRoomStateEvent() couldn't find room for call (roomId=${roomId})`,
+ );
+ return;
+ }
+
+ const groupCallId = event.getStateKey();
+
+ const callType = content["m.type"];
+
+ if (!Object.values(GroupCallType).includes(callType)) {
+ logger.warn(
+ `GroupCallEventHandler createGroupCallFromRoomStateEvent() received invalid call type (type=${callType}, roomId=${roomId})`,
+ );
+ return;
+ }
+
+ const callIntent = content["m.intent"];
+
+ if (!Object.values(GroupCallIntent).includes(callIntent)) {
+ logger.warn(`Received invalid group call intent (type=${callType}, roomId=${roomId})`);
+ return;
+ }
+
+ const isPtt = Boolean(content["io.element.ptt"]);
+
+ let dataChannelOptions: IGroupCallDataChannelOptions | undefined;
+
+ if (content?.dataChannelsEnabled && content?.dataChannelOptions) {
+ // Pull out just the dataChannelOptions we want to support.
+ const { ordered, maxPacketLifeTime, maxRetransmits, protocol } = content.dataChannelOptions;
+ dataChannelOptions = { ordered, maxPacketLifeTime, maxRetransmits, protocol };
+ }
+
+ const groupCall = new GroupCall(
+ this.client,
+ room,
+ callType,
+ isPtt,
+ callIntent,
+ groupCallId,
+ // Because without Media section a WebRTC connection is not possible, so need a RTCDataChannel to set up a
+ // no media WebRTC connection anyway.
+ content?.dataChannelsEnabled || this.client.isVoipWithNoMediaAllowed,
+ dataChannelOptions,
+ this.client.isVoipWithNoMediaAllowed,
+ );
+
+ this.groupCalls.set(room.roomId, groupCall);
+ this.client.emit(GroupCallEventHandlerEvent.Incoming, groupCall);
+
+ return groupCall;
+ }
+
+ private onRoomsChanged = (room: Room): void => {
+ this.createGroupCallForRoom(room);
+ };
+
+ private onRoomStateChanged = (event: MatrixEvent, state: RoomState): void => {
+ const eventType = event.getType();
+
+ if (eventType === EventType.GroupCallPrefix) {
+ const groupCallId = event.getStateKey();
+ const content = event.getContent();
+
+ const currentGroupCall = this.groupCalls.get(state.roomId);
+
+ if (!currentGroupCall && !content["m.terminated"] && !event.isRedacted()) {
+ this.createGroupCallFromRoomStateEvent(event);
+ } else if (currentGroupCall && currentGroupCall.groupCallId === groupCallId) {
+ if (content["m.terminated"] || event.isRedacted()) {
+ currentGroupCall.terminate(false);
+ } else if (content["m.type"] !== currentGroupCall.type) {
+ // TODO: Handle the callType changing when the room state changes
+ logger.warn(
+ `GroupCallEventHandler onRoomStateChanged() currently does not support changing type (roomId=${state.roomId})`,
+ );
+ }
+ } else if (currentGroupCall && currentGroupCall.groupCallId !== groupCallId) {
+ // TODO: Handle new group calls and multiple group calls
+ logger.warn(
+ `GroupCallEventHandler onRoomStateChanged() currently does not support multiple calls (roomId=${state.roomId})`,
+ );
+ }
+ }
+ };
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/mediaHandler.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/mediaHandler.ts
new file mode 100644
index 0000000..7f65835
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/mediaHandler.ts
@@ -0,0 +1,469 @@
+/*
+Copyright 2015, 2016 OpenMarket Ltd
+Copyright 2017 New Vector Ltd
+Copyright 2019, 2020 The Matrix.org Foundation C.I.C.
+Copyright 2021 - 2022 Šimon Brandner <simon.bra.ag@gmail.com>
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { TypedEventEmitter } from "../models/typed-event-emitter";
+import { GroupCallType, GroupCallState } from "../webrtc/groupCall";
+import { logger } from "../logger";
+import { MatrixClient } from "../client";
+
+export enum MediaHandlerEvent {
+ LocalStreamsChanged = "local_streams_changed",
+}
+
+export type MediaHandlerEventHandlerMap = {
+ [MediaHandlerEvent.LocalStreamsChanged]: () => void;
+};
+
+export interface IScreensharingOpts {
+ desktopCapturerSourceId?: string;
+ audio?: boolean;
+ // For electron screen capture, there are very few options for detecting electron
+ // apart from inspecting the user agent or just trying getDisplayMedia() and
+ // catching the failure, so we do the latter - this flag tells the function to just
+ // throw an error so we can catch it in this case, rather than logging and emitting.
+ throwOnFail?: boolean;
+}
+
+export interface AudioSettings {
+ autoGainControl: boolean;
+ echoCancellation: boolean;
+ noiseSuppression: boolean;
+}
+
+export class MediaHandler extends TypedEventEmitter<
+ MediaHandlerEvent.LocalStreamsChanged,
+ MediaHandlerEventHandlerMap
+> {
+ private audioInput?: string;
+ private audioSettings?: AudioSettings;
+ private videoInput?: string;
+ private localUserMediaStream?: MediaStream;
+ public userMediaStreams: MediaStream[] = [];
+ public screensharingStreams: MediaStream[] = [];
+
+ // Promise chain to serialise calls to getMediaStream
+ private getMediaStreamPromise?: Promise<MediaStream>;
+
+ public constructor(private client: MatrixClient) {
+ super();
+ }
+
+ public restoreMediaSettings(audioInput: string, videoInput: string): void {
+ this.audioInput = audioInput;
+ this.videoInput = videoInput;
+ }
+
+ /**
+ * Set an audio input device to use for MatrixCalls
+ * @param deviceId - the identifier for the device
+ * undefined treated as unset
+ */
+ public async setAudioInput(deviceId: string): Promise<void> {
+ logger.info(`MediaHandler setAudioInput() running (deviceId=${deviceId})`);
+
+ if (this.audioInput === deviceId) return;
+
+ this.audioInput = deviceId;
+ await this.updateLocalUsermediaStreams();
+ }
+
+ /**
+ * Set audio settings for MatrixCalls
+ * @param opts - audio options to set
+ */
+ public async setAudioSettings(opts: AudioSettings): Promise<void> {
+ logger.info(`MediaHandler setAudioSettings() running (opts=${JSON.stringify(opts)})`);
+
+ this.audioSettings = Object.assign({}, opts) as AudioSettings;
+ await this.updateLocalUsermediaStreams();
+ }
+
+ /**
+ * Set a video input device to use for MatrixCalls
+ * @param deviceId - the identifier for the device
+ * undefined treated as unset
+ */
+ public async setVideoInput(deviceId: string): Promise<void> {
+ logger.info(`MediaHandler setVideoInput() running (deviceId=${deviceId})`);
+
+ if (this.videoInput === deviceId) return;
+
+ this.videoInput = deviceId;
+ await this.updateLocalUsermediaStreams();
+ }
+
+ /**
+ * Set media input devices to use for MatrixCalls
+ * @param audioInput - the identifier for the audio device
+ * @param videoInput - the identifier for the video device
+ * undefined treated as unset
+ */
+ public async setMediaInputs(audioInput: string, videoInput: string): Promise<void> {
+ logger.log(`MediaHandler setMediaInputs() running (audioInput: ${audioInput} videoInput: ${videoInput})`);
+ this.audioInput = audioInput;
+ this.videoInput = videoInput;
+ await this.updateLocalUsermediaStreams();
+ }
+
+ /*
+ * Requests new usermedia streams and replace the old ones
+ */
+ public async updateLocalUsermediaStreams(): Promise<void> {
+ if (this.userMediaStreams.length === 0) return;
+
+ const callMediaStreamParams: Map<string, { audio: boolean; video: boolean }> = new Map();
+ for (const call of this.client.callEventHandler!.calls.values()) {
+ callMediaStreamParams.set(call.callId, {
+ audio: call.hasLocalUserMediaAudioTrack,
+ video: call.hasLocalUserMediaVideoTrack,
+ });
+ }
+
+ for (const stream of this.userMediaStreams) {
+ logger.log(`MediaHandler updateLocalUsermediaStreams() stopping all tracks (streamId=${stream.id})`);
+ for (const track of stream.getTracks()) {
+ track.stop();
+ }
+ }
+
+ this.userMediaStreams = [];
+ this.localUserMediaStream = undefined;
+
+ for (const call of this.client.callEventHandler!.calls.values()) {
+ if (call.callHasEnded() || !callMediaStreamParams.has(call.callId)) {
+ continue;
+ }
+
+ const { audio, video } = callMediaStreamParams.get(call.callId)!;
+
+ logger.log(
+ `MediaHandler updateLocalUsermediaStreams() calling getUserMediaStream() (callId=${call.callId})`,
+ );
+ const stream = await this.getUserMediaStream(audio, video);
+
+ if (call.callHasEnded()) {
+ continue;
+ }
+
+ await call.updateLocalUsermediaStream(stream);
+ }
+
+ for (const groupCall of this.client.groupCallEventHandler!.groupCalls.values()) {
+ if (!groupCall.localCallFeed) {
+ continue;
+ }
+
+ logger.log(
+ `MediaHandler updateLocalUsermediaStreams() calling getUserMediaStream() (groupCallId=${groupCall.groupCallId})`,
+ );
+ const stream = await this.getUserMediaStream(true, groupCall.type === GroupCallType.Video);
+
+ if (groupCall.state === GroupCallState.Ended) {
+ continue;
+ }
+
+ await groupCall.updateLocalUsermediaStream(stream);
+ }
+
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+ }
+
+ public async hasAudioDevice(): Promise<boolean> {
+ try {
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ return devices.filter((device) => device.kind === "audioinput").length > 0;
+ } catch (err) {
+ logger.log(`MediaHandler hasAudioDevice() calling navigator.mediaDevices.enumerateDevices with error`, err);
+ return false;
+ }
+ }
+
+ public async hasVideoDevice(): Promise<boolean> {
+ try {
+ const devices = await navigator.mediaDevices.enumerateDevices();
+ return devices.filter((device) => device.kind === "videoinput").length > 0;
+ } catch (err) {
+ logger.log(`MediaHandler hasVideoDevice() calling navigator.mediaDevices.enumerateDevices with error`, err);
+ return false;
+ }
+ }
+
+ /**
+ * @param audio - should have an audio track
+ * @param video - should have a video track
+ * @param reusable - is allowed to be reused by the MediaHandler
+ * @returns based on passed parameters
+ */
+ public async getUserMediaStream(audio: boolean, video: boolean, reusable = true): Promise<MediaStream> {
+ // Serialise calls, othertwise we can't sensibly re-use the stream
+ if (this.getMediaStreamPromise) {
+ this.getMediaStreamPromise = this.getMediaStreamPromise.then(() => {
+ return this.getUserMediaStreamInternal(audio, video, reusable);
+ });
+ } else {
+ this.getMediaStreamPromise = this.getUserMediaStreamInternal(audio, video, reusable);
+ }
+
+ return this.getMediaStreamPromise;
+ }
+
+ private async getUserMediaStreamInternal(audio: boolean, video: boolean, reusable: boolean): Promise<MediaStream> {
+ const shouldRequestAudio = audio && (await this.hasAudioDevice());
+ const shouldRequestVideo = video && (await this.hasVideoDevice());
+
+ let stream: MediaStream;
+
+ let canReuseStream = true;
+ if (this.localUserMediaStream) {
+ // This figures out if we can reuse the current localUsermediaStream
+ // based on whether or not the "mute state" (presence of tracks of a
+ // given kind) matches what is being requested
+ if (shouldRequestAudio !== this.localUserMediaStream.getAudioTracks().length > 0) {
+ canReuseStream = false;
+ }
+ if (shouldRequestVideo !== this.localUserMediaStream.getVideoTracks().length > 0) {
+ canReuseStream = false;
+ }
+
+ // This code checks that the device ID is the same as the localUserMediaStream stream, but we update
+ // the localUserMediaStream whenever the device ID changes (apart from when restoring) so it's not
+ // clear why this would ever be different, unless there's a race.
+ if (
+ shouldRequestAudio &&
+ this.localUserMediaStream.getAudioTracks()[0]?.getSettings()?.deviceId !== this.audioInput
+ ) {
+ canReuseStream = false;
+ }
+ if (
+ shouldRequestVideo &&
+ this.localUserMediaStream.getVideoTracks()[0]?.getSettings()?.deviceId !== this.videoInput
+ ) {
+ canReuseStream = false;
+ }
+ } else {
+ canReuseStream = false;
+ }
+
+ if (!canReuseStream) {
+ const constraints = this.getUserMediaContraints(shouldRequestAudio, shouldRequestVideo);
+ stream = await navigator.mediaDevices.getUserMedia(constraints);
+ logger.log(
+ `MediaHandler getUserMediaStreamInternal() calling getUserMediaStream (streamId=${
+ stream.id
+ }, shouldRequestAudio=${shouldRequestAudio}, shouldRequestVideo=${shouldRequestVideo}, constraints=${JSON.stringify(
+ constraints,
+ )})`,
+ );
+
+ for (const track of stream.getTracks()) {
+ const settings = track.getSettings();
+
+ if (track.kind === "audio") {
+ this.audioInput = settings.deviceId!;
+ } else if (track.kind === "video") {
+ this.videoInput = settings.deviceId!;
+ }
+ }
+
+ if (reusable) {
+ this.localUserMediaStream = stream;
+ }
+ } else {
+ stream = this.localUserMediaStream!.clone();
+ logger.log(
+ `MediaHandler getUserMediaStreamInternal() cloning (oldStreamId=${this.localUserMediaStream?.id} newStreamId=${stream.id} shouldRequestAudio=${shouldRequestAudio} shouldRequestVideo=${shouldRequestVideo})`,
+ );
+
+ if (!shouldRequestAudio) {
+ for (const track of stream.getAudioTracks()) {
+ stream.removeTrack(track);
+ }
+ }
+
+ if (!shouldRequestVideo) {
+ for (const track of stream.getVideoTracks()) {
+ stream.removeTrack(track);
+ }
+ }
+ }
+
+ if (reusable) {
+ this.userMediaStreams.push(stream);
+ }
+
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+
+ return stream;
+ }
+
+ /**
+ * Stops all tracks on the provided usermedia stream
+ */
+ public stopUserMediaStream(mediaStream: MediaStream): void {
+ logger.log(`MediaHandler stopUserMediaStream() stopping (streamId=${mediaStream.id})`);
+ for (const track of mediaStream.getTracks()) {
+ track.stop();
+ }
+
+ const index = this.userMediaStreams.indexOf(mediaStream);
+
+ if (index !== -1) {
+ logger.debug(
+ `MediaHandler stopUserMediaStream() splicing usermedia stream out stream array (streamId=${mediaStream.id})`,
+ mediaStream.id,
+ );
+ this.userMediaStreams.splice(index, 1);
+ }
+
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+
+ if (this.localUserMediaStream === mediaStream) {
+ this.localUserMediaStream = undefined;
+ }
+ }
+
+ /**
+ * @param desktopCapturerSourceId - sourceId for Electron DesktopCapturer
+ * @param reusable - is allowed to be reused by the MediaHandler
+ * @returns based on passed parameters
+ */
+ public async getScreensharingStream(opts: IScreensharingOpts = {}, reusable = true): Promise<MediaStream> {
+ let stream: MediaStream;
+
+ if (this.screensharingStreams.length === 0) {
+ const screenshareConstraints = this.getScreenshareContraints(opts);
+
+ if (opts.desktopCapturerSourceId) {
+ // We are using Electron
+ logger.debug(
+ `MediaHandler getScreensharingStream() calling getUserMedia() (opts=${JSON.stringify(opts)})`,
+ );
+ stream = await navigator.mediaDevices.getUserMedia(screenshareConstraints);
+ } else {
+ // We are not using Electron
+ logger.debug(
+ `MediaHandler getScreensharingStream() calling getDisplayMedia() (opts=${JSON.stringify(opts)})`,
+ );
+ stream = await navigator.mediaDevices.getDisplayMedia(screenshareConstraints);
+ }
+ } else {
+ const matchingStream = this.screensharingStreams[this.screensharingStreams.length - 1];
+ logger.log(`MediaHandler getScreensharingStream() cloning (streamId=${matchingStream.id})`);
+ stream = matchingStream.clone();
+ }
+
+ if (reusable) {
+ this.screensharingStreams.push(stream);
+ }
+
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+
+ return stream;
+ }
+
+ /**
+ * Stops all tracks on the provided screensharing stream
+ */
+ public stopScreensharingStream(mediaStream: MediaStream): void {
+ logger.debug(`MediaHandler stopScreensharingStream() stopping stream (streamId=${mediaStream.id})`);
+ for (const track of mediaStream.getTracks()) {
+ track.stop();
+ }
+
+ const index = this.screensharingStreams.indexOf(mediaStream);
+
+ if (index !== -1) {
+ logger.debug(`MediaHandler stopScreensharingStream() splicing stream out (streamId=${mediaStream.id})`);
+ this.screensharingStreams.splice(index, 1);
+ }
+
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+ }
+
+ /**
+ * Stops all local media tracks
+ */
+ public stopAllStreams(): void {
+ for (const stream of this.userMediaStreams) {
+ logger.log(`MediaHandler stopAllStreams() stopping (streamId=${stream.id})`);
+ for (const track of stream.getTracks()) {
+ track.stop();
+ }
+ }
+
+ for (const stream of this.screensharingStreams) {
+ for (const track of stream.getTracks()) {
+ track.stop();
+ }
+ }
+
+ this.userMediaStreams = [];
+ this.screensharingStreams = [];
+ this.localUserMediaStream = undefined;
+
+ this.emit(MediaHandlerEvent.LocalStreamsChanged);
+ }
+
+ private getUserMediaContraints(audio: boolean, video: boolean): MediaStreamConstraints {
+ const isWebkit = !!navigator.webkitGetUserMedia;
+
+ return {
+ audio: audio
+ ? {
+ deviceId: this.audioInput ? { ideal: this.audioInput } : undefined,
+ autoGainControl: this.audioSettings ? { ideal: this.audioSettings.autoGainControl } : undefined,
+ echoCancellation: this.audioSettings ? { ideal: this.audioSettings.echoCancellation } : undefined,
+ noiseSuppression: this.audioSettings ? { ideal: this.audioSettings.noiseSuppression } : undefined,
+ }
+ : false,
+ video: video
+ ? {
+ deviceId: this.videoInput ? { ideal: this.videoInput } : undefined,
+ /* We want 640x360. Chrome will give it only if we ask exactly,
+ FF refuses entirely if we ask exactly, so have to ask for ideal
+ instead
+ XXX: Is this still true?
+ */
+ width: isWebkit ? { exact: 640 } : { ideal: 640 },
+ height: isWebkit ? { exact: 360 } : { ideal: 360 },
+ }
+ : false,
+ };
+ }
+
+ private getScreenshareContraints(opts: IScreensharingOpts): DesktopCapturerConstraints {
+ const { desktopCapturerSourceId, audio } = opts;
+ if (desktopCapturerSourceId) {
+ return {
+ audio: audio ?? false,
+ video: {
+ mandatory: {
+ chromeMediaSource: "desktop",
+ chromeMediaSourceId: desktopCapturerSourceId,
+ },
+ },
+ };
+ } else {
+ return {
+ audio: audio ?? false,
+ video: true,
+ };
+ }
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/connectionStats.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/connectionStats.ts
new file mode 100644
index 0000000..dbde6e5
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/connectionStats.ts
@@ -0,0 +1,47 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { TransportStats } from "./transportStats";
+import { Bitrate } from "./media/mediaTrackStats";
+
+export interface ConnectionStatsBandwidth {
+ /**
+ * bytes per second
+ */
+ download: number;
+ /**
+ * bytes per second
+ */
+ upload: number;
+}
+
+export interface ConnectionStatsBitrate extends Bitrate {
+ audio?: Bitrate;
+ video?: Bitrate;
+}
+
+export interface PacketLoos {
+ total: number;
+ download: number;
+ upload: number;
+}
+
+export class ConnectionStats {
+ public bandwidth: ConnectionStatsBitrate = {} as ConnectionStatsBitrate;
+ public bitrate: ConnectionStatsBitrate = {} as ConnectionStatsBitrate;
+ public packetLoss: PacketLoos = {} as PacketLoos;
+ public transport: TransportStats[] = [];
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/connectionStatsReporter.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/connectionStatsReporter.ts
new file mode 100644
index 0000000..c43b9b4
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/connectionStatsReporter.ts
@@ -0,0 +1,28 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+import { Bitrate } from "./media/mediaTrackStats";
+
+export class ConnectionStatsReporter {
+ public static buildBandwidthReport(now: RTCIceCandidatePairStats): Bitrate {
+ const availableIncomingBitrate = now.availableIncomingBitrate;
+ const availableOutgoingBitrate = now.availableOutgoingBitrate;
+
+ return {
+ download: availableIncomingBitrate ? Math.round(availableIncomingBitrate / 1000) : 0,
+ upload: availableOutgoingBitrate ? Math.round(availableOutgoingBitrate / 1000) : 0,
+ };
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/groupCallStats.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/groupCallStats.ts
new file mode 100644
index 0000000..6d8c566
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/groupCallStats.ts
@@ -0,0 +1,64 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+import { StatsReportGatherer } from "./statsReportGatherer";
+import { StatsReportEmitter } from "./statsReportEmitter";
+
+export class GroupCallStats {
+ private timer: undefined | ReturnType<typeof setTimeout>;
+ private readonly gatherers: Map<string, StatsReportGatherer> = new Map<string, StatsReportGatherer>();
+ public readonly reports = new StatsReportEmitter();
+
+ public constructor(private groupCallId: string, private userId: string, private interval: number = 10000) {}
+
+ public start(): void {
+ if (this.timer === undefined) {
+ this.timer = setInterval(() => {
+ this.processStats();
+ }, this.interval);
+ }
+ }
+
+ public stop(): void {
+ if (this.timer !== undefined) {
+ clearInterval(this.timer);
+ this.gatherers.forEach((c) => c.stopProcessingStats());
+ }
+ }
+
+ public hasStatsReportGatherer(callId: string): boolean {
+ return this.gatherers.has(callId);
+ }
+
+ public addStatsReportGatherer(callId: string, userId: string, peerConnection: RTCPeerConnection): boolean {
+ if (this.hasStatsReportGatherer(callId)) {
+ return false;
+ }
+ this.gatherers.set(callId, new StatsReportGatherer(callId, userId, peerConnection, this.reports));
+ return true;
+ }
+
+ public removeStatsReportGatherer(callId: string): boolean {
+ return this.gatherers.delete(callId);
+ }
+
+ public getStatsReportGatherer(callId: string): StatsReportGatherer | undefined {
+ return this.hasStatsReportGatherer(callId) ? this.gatherers.get(callId) : undefined;
+ }
+
+ private processStats(): void {
+ this.gatherers.forEach((c) => c.processStats(this.groupCallId, this.userId));
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaSsrcHandler.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaSsrcHandler.ts
new file mode 100644
index 0000000..e606051
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaSsrcHandler.ts
@@ -0,0 +1,57 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { parse as parseSdp } from "sdp-transform";
+
+export type Mid = string;
+export type Ssrc = string;
+export type MapType = "local" | "remote";
+
+export class MediaSsrcHandler {
+ private readonly ssrcToMid = { local: new Map<Mid, Ssrc[]>(), remote: new Map<Mid, Ssrc[]>() };
+
+ public findMidBySsrc(ssrc: Ssrc, type: "local" | "remote"): Mid | undefined {
+ let mid: Mid | undefined;
+ this.ssrcToMid[type].forEach((ssrcs, m) => {
+ if (ssrcs.find((s) => s == ssrc)) {
+ mid = m;
+ return;
+ }
+ });
+ return mid;
+ }
+
+ public parse(description: string, type: MapType): void {
+ const sdp = parseSdp(description);
+ const ssrcToMid = new Map<Mid, Ssrc[]>();
+ sdp.media.forEach((m) => {
+ if ((!!m.mid && m.type === "video") || m.type === "audio") {
+ const ssrcs: Ssrc[] = [];
+ m.ssrcs?.forEach((ssrc) => {
+ if (ssrc.attribute === "cname") {
+ ssrcs.push(`${ssrc.id}`);
+ }
+ });
+ ssrcToMid.set(`${m.mid}`, ssrcs);
+ }
+ });
+ this.ssrcToMid[type] = ssrcToMid;
+ }
+
+ public getSsrcToMidMap(type: MapType): Map<Mid, Ssrc[]> {
+ return this.ssrcToMid[type];
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackHandler.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackHandler.ts
new file mode 100644
index 0000000..32580b1
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackHandler.ts
@@ -0,0 +1,71 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+export type TrackId = string;
+
+export class MediaTrackHandler {
+ public constructor(private readonly pc: RTCPeerConnection) {}
+
+ public getLocalTracks(kind: "audio" | "video"): MediaStreamTrack[] {
+ const isNotNullAndKind = (track: MediaStreamTrack | null): boolean => {
+ return track !== null && track.kind === kind;
+ };
+ // @ts-ignore The linter don't get it
+ return this.pc
+ .getTransceivers()
+ .filter((t) => t.currentDirection === "sendonly" || t.currentDirection === "sendrecv")
+ .filter((t) => t.sender !== null)
+ .map((t) => t.sender)
+ .map((s) => s.track)
+ .filter(isNotNullAndKind);
+ }
+
+ public getTackById(trackId: string): MediaStreamTrack | undefined {
+ return this.pc
+ .getTransceivers()
+ .map((t) => {
+ if (t?.sender.track !== null && t.sender.track.id === trackId) {
+ return t.sender.track;
+ }
+ if (t?.receiver.track !== null && t.receiver.track.id === trackId) {
+ return t.receiver.track;
+ }
+ return undefined;
+ })
+ .find((t) => t !== undefined);
+ }
+
+ public getLocalTrackIdByMid(mid: string): string | undefined {
+ const transceiver = this.pc.getTransceivers().find((t) => t.mid === mid);
+ if (transceiver !== undefined && !!transceiver.sender && !!transceiver.sender.track) {
+ return transceiver.sender.track.id;
+ }
+ return undefined;
+ }
+
+ public getRemoteTrackIdByMid(mid: string): string | undefined {
+ const transceiver = this.pc.getTransceivers().find((t) => t.mid === mid);
+ if (transceiver !== undefined && !!transceiver.receiver && !!transceiver.receiver.track) {
+ return transceiver.receiver.track.id;
+ }
+ return undefined;
+ }
+
+ public getActiveSimulcastStreams(): number {
+ //@TODO implement this right.. Check how many layer configured
+ return 3;
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackStats.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackStats.ts
new file mode 100644
index 0000000..69ee9bd
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackStats.ts
@@ -0,0 +1,104 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { TrackId } from "./mediaTrackHandler";
+
+export interface PacketLoss {
+ packetsTotal: number;
+ packetsLost: number;
+ isDownloadStream: boolean;
+}
+
+export interface Bitrate {
+ /**
+ * bytes per second
+ */
+ download: number;
+ /**
+ * bytes per second
+ */
+ upload: number;
+}
+
+export interface Resolution {
+ width: number;
+ height: number;
+}
+
+export type TrackStatsType = "local" | "remote";
+
+export class MediaTrackStats {
+ private loss: PacketLoss = { packetsTotal: 0, packetsLost: 0, isDownloadStream: false };
+ private bitrate: Bitrate = { download: 0, upload: 0 };
+ private resolution: Resolution = { width: -1, height: -1 };
+ private framerate = 0;
+ private codec = "";
+
+ public constructor(
+ public readonly trackId: TrackId,
+ public readonly type: TrackStatsType,
+ public readonly kind: "audio" | "video",
+ ) {}
+
+ public getType(): TrackStatsType {
+ return this.type;
+ }
+
+ public setLoss(loos: PacketLoss): void {
+ this.loss = loos;
+ }
+
+ public getLoss(): PacketLoss {
+ return this.loss;
+ }
+
+ public setResolution(resolution: Resolution): void {
+ this.resolution = resolution;
+ }
+
+ public getResolution(): Resolution {
+ return this.resolution;
+ }
+
+ public setFramerate(framerate: number): void {
+ this.framerate = framerate;
+ }
+
+ public getFramerate(): number {
+ return this.framerate;
+ }
+
+ public setBitrate(bitrate: Bitrate): void {
+ this.bitrate = bitrate;
+ }
+
+ public getBitrate(): Bitrate {
+ return this.bitrate;
+ }
+
+ public setCodec(codecShortType: string): boolean {
+ this.codec = codecShortType;
+ return true;
+ }
+
+ public getCodec(): string {
+ return this.codec;
+ }
+
+ public resetBitrate(): void {
+ this.bitrate = { download: 0, upload: 0 };
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackStatsHandler.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackStatsHandler.ts
new file mode 100644
index 0000000..6fb119c
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/media/mediaTrackStatsHandler.ts
@@ -0,0 +1,86 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+import { TrackID } from "../statsReport";
+import { MediaTrackStats } from "./mediaTrackStats";
+import { MediaTrackHandler } from "./mediaTrackHandler";
+import { MediaSsrcHandler } from "./mediaSsrcHandler";
+
+export class MediaTrackStatsHandler {
+ private readonly track2stats = new Map<TrackID, MediaTrackStats>();
+
+ public constructor(
+ public readonly mediaSsrcHandler: MediaSsrcHandler,
+ public readonly mediaTrackHandler: MediaTrackHandler,
+ ) {}
+
+ /**
+ * Find tracks by rtc stats
+ * Argument report is any because the stats api is not consistent:
+ * For example `trackIdentifier`, `mid` not existing in every implementations
+ * https://www.w3.org/TR/webrtc-stats/#dom-rtcinboundrtpstreamstats
+ * https://developer.mozilla.org/en-US/docs/Web/API/RTCInboundRtpStreamStats
+ */
+ public findTrack2Stats(report: any, type: "remote" | "local"): MediaTrackStats | undefined {
+ let trackID;
+ if (report.trackIdentifier) {
+ trackID = report.trackIdentifier;
+ } else if (report.mid) {
+ trackID =
+ type === "remote"
+ ? this.mediaTrackHandler.getRemoteTrackIdByMid(report.mid)
+ : this.mediaTrackHandler.getLocalTrackIdByMid(report.mid);
+ } else if (report.ssrc) {
+ const mid = this.mediaSsrcHandler.findMidBySsrc(report.ssrc, type);
+ if (!mid) {
+ return undefined;
+ }
+ trackID =
+ type === "remote"
+ ? this.mediaTrackHandler.getRemoteTrackIdByMid(report.mid)
+ : this.mediaTrackHandler.getLocalTrackIdByMid(report.mid);
+ }
+
+ if (!trackID) {
+ return undefined;
+ }
+
+ let trackStats = this.track2stats.get(trackID);
+
+ if (!trackStats) {
+ const track = this.mediaTrackHandler.getTackById(trackID);
+ if (track !== undefined) {
+ const kind: "audio" | "video" = track.kind === "audio" ? track.kind : "video";
+ trackStats = new MediaTrackStats(trackID, type, kind);
+ this.track2stats.set(trackID, trackStats);
+ } else {
+ return undefined;
+ }
+ }
+ return trackStats;
+ }
+
+ public findLocalVideoTrackStats(report: any): MediaTrackStats | undefined {
+ const localVideoTracks = this.mediaTrackHandler.getLocalTracks("video");
+ if (localVideoTracks.length === 0) {
+ return undefined;
+ }
+ return this.findTrack2Stats(report, "local");
+ }
+
+ public getTrack2stats(): Map<TrackID, MediaTrackStats> {
+ return this.track2stats;
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReport.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReport.ts
new file mode 100644
index 0000000..56d6c4b
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReport.ts
@@ -0,0 +1,56 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { ConnectionStatsBandwidth, ConnectionStatsBitrate, PacketLoos } from "./connectionStats";
+import { TransportStats } from "./transportStats";
+import { Resolution } from "./media/mediaTrackStats";
+
+export enum StatsReport {
+ CONNECTION_STATS = "StatsReport.connection_stats",
+ BYTE_SENT_STATS = "StatsReport.byte_sent_stats",
+}
+
+export type TrackID = string;
+export type ByteSend = number;
+
+export interface ByteSentStatsReport extends Map<TrackID, ByteSend> {
+ // is a map: `local trackID` => byte send
+}
+
+export interface ConnectionStatsReport {
+ bandwidth: ConnectionStatsBandwidth;
+ bitrate: ConnectionStatsBitrate;
+ packetLoss: PacketLoos;
+ resolution: ResolutionMap;
+ framerate: FramerateMap;
+ codec: CodecMap;
+ transport: TransportStats[];
+}
+
+export interface ResolutionMap {
+ local: Map<TrackID, Resolution>;
+ remote: Map<TrackID, Resolution>;
+}
+
+export interface FramerateMap {
+ local: Map<TrackID, number>;
+ remote: Map<TrackID, number>;
+}
+
+export interface CodecMap {
+ local: Map<TrackID, string>;
+ remote: Map<TrackID, string>;
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportBuilder.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportBuilder.ts
new file mode 100644
index 0000000..c1af471
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportBuilder.ts
@@ -0,0 +1,110 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+import { CodecMap, ConnectionStatsReport, FramerateMap, ResolutionMap, TrackID } from "./statsReport";
+import { MediaTrackStats, Resolution } from "./media/mediaTrackStats";
+
+export class StatsReportBuilder {
+ public static build(stats: Map<TrackID, MediaTrackStats>): ConnectionStatsReport {
+ const report = {} as ConnectionStatsReport;
+
+ // process stats
+ const totalPackets = {
+ download: 0,
+ upload: 0,
+ };
+ const lostPackets = {
+ download: 0,
+ upload: 0,
+ };
+ let bitrateDownload = 0;
+ let bitrateUpload = 0;
+ const resolutions: ResolutionMap = {
+ local: new Map<TrackID, Resolution>(),
+ remote: new Map<TrackID, Resolution>(),
+ };
+ const framerates: FramerateMap = { local: new Map<TrackID, number>(), remote: new Map<TrackID, number>() };
+ const codecs: CodecMap = { local: new Map<TrackID, string>(), remote: new Map<TrackID, string>() };
+
+ let audioBitrateDownload = 0;
+ let audioBitrateUpload = 0;
+ let videoBitrateDownload = 0;
+ let videoBitrateUpload = 0;
+
+ for (const [trackId, trackStats] of stats) {
+ // process packet loss stats
+ const loss = trackStats.getLoss();
+ const type = loss.isDownloadStream ? "download" : "upload";
+
+ totalPackets[type] += loss.packetsTotal;
+ lostPackets[type] += loss.packetsLost;
+
+ // process bitrate stats
+ bitrateDownload += trackStats.getBitrate().download;
+ bitrateUpload += trackStats.getBitrate().upload;
+
+ // collect resolutions and framerates
+ if (trackStats.kind === "audio") {
+ audioBitrateDownload += trackStats.getBitrate().download;
+ audioBitrateUpload += trackStats.getBitrate().upload;
+ } else {
+ videoBitrateDownload += trackStats.getBitrate().download;
+ videoBitrateUpload += trackStats.getBitrate().upload;
+ }
+
+ resolutions[trackStats.getType()].set(trackId, trackStats.getResolution());
+ framerates[trackStats.getType()].set(trackId, trackStats.getFramerate());
+ codecs[trackStats.getType()].set(trackId, trackStats.getCodec());
+
+ trackStats.resetBitrate();
+ }
+
+ report.bitrate = {
+ upload: bitrateUpload,
+ download: bitrateDownload,
+ };
+
+ report.bitrate.audio = {
+ upload: audioBitrateUpload,
+ download: audioBitrateDownload,
+ };
+
+ report.bitrate.video = {
+ upload: videoBitrateUpload,
+ download: videoBitrateDownload,
+ };
+
+ report.packetLoss = {
+ total: StatsReportBuilder.calculatePacketLoss(
+ lostPackets.download + lostPackets.upload,
+ totalPackets.download + totalPackets.upload,
+ ),
+ download: StatsReportBuilder.calculatePacketLoss(lostPackets.download, totalPackets.download),
+ upload: StatsReportBuilder.calculatePacketLoss(lostPackets.upload, totalPackets.upload),
+ };
+ report.framerate = framerates;
+ report.resolution = resolutions;
+ report.codec = codecs;
+ return report;
+ }
+
+ private static calculatePacketLoss(lostPackets: number, totalPackets: number): number {
+ if (!totalPackets || totalPackets <= 0 || !lostPackets || lostPackets <= 0) {
+ return 0;
+ }
+
+ return Math.round((lostPackets / totalPackets) * 100);
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportEmitter.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportEmitter.ts
new file mode 100644
index 0000000..cf01470
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportEmitter.ts
@@ -0,0 +1,33 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { TypedEventEmitter } from "../../models/typed-event-emitter";
+import { ByteSentStatsReport, ConnectionStatsReport, StatsReport } from "./statsReport";
+
+export type StatsReportHandlerMap = {
+ [StatsReport.BYTE_SENT_STATS]: (report: ByteSentStatsReport) => void;
+ [StatsReport.CONNECTION_STATS]: (report: ConnectionStatsReport) => void;
+};
+
+export class StatsReportEmitter extends TypedEventEmitter<StatsReport, StatsReportHandlerMap> {
+ public emitByteSendReport(byteSentStats: ByteSentStatsReport): void {
+ this.emit(StatsReport.BYTE_SENT_STATS, byteSentStats);
+ }
+
+ public emitConnectionStatsReport(report: ConnectionStatsReport): void {
+ this.emit(StatsReport.CONNECTION_STATS, report);
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportGatherer.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportGatherer.ts
new file mode 100644
index 0000000..769ba6e
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsReportGatherer.ts
@@ -0,0 +1,183 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+import { ConnectionStats } from "./connectionStats";
+import { StatsReportEmitter } from "./statsReportEmitter";
+import { ByteSend, ByteSentStatsReport, TrackID } from "./statsReport";
+import { ConnectionStatsReporter } from "./connectionStatsReporter";
+import { TransportStatsReporter } from "./transportStatsReporter";
+import { MediaSsrcHandler } from "./media/mediaSsrcHandler";
+import { MediaTrackHandler } from "./media/mediaTrackHandler";
+import { MediaTrackStatsHandler } from "./media/mediaTrackStatsHandler";
+import { TrackStatsReporter } from "./trackStatsReporter";
+import { StatsReportBuilder } from "./statsReportBuilder";
+import { StatsValueFormatter } from "./statsValueFormatter";
+
+export class StatsReportGatherer {
+ private isActive = true;
+ private previousStatsReport: RTCStatsReport | undefined;
+ private currentStatsReport: RTCStatsReport | undefined;
+ private readonly connectionStats = new ConnectionStats();
+
+ private readonly trackStats: MediaTrackStatsHandler;
+
+ // private readonly ssrcToMid = { local: new Map<Mid, Ssrc[]>(), remote: new Map<Mid, Ssrc[]>() };
+
+ public constructor(
+ public readonly callId: string,
+ public readonly remoteUserId: string,
+ private readonly pc: RTCPeerConnection,
+ private readonly emitter: StatsReportEmitter,
+ private readonly isFocus = true,
+ ) {
+ pc.addEventListener("signalingstatechange", this.onSignalStateChange.bind(this));
+ this.trackStats = new MediaTrackStatsHandler(new MediaSsrcHandler(), new MediaTrackHandler(pc));
+ }
+
+ public async processStats(groupCallId: string, localUserId: string): Promise<boolean> {
+ if (this.isActive) {
+ const statsPromise = this.pc.getStats();
+ if (typeof statsPromise?.then === "function") {
+ return statsPromise
+ .then((report) => {
+ // @ts-ignore
+ this.currentStatsReport = typeof report?.result === "function" ? report.result() : report;
+ try {
+ this.processStatsReport(groupCallId, localUserId);
+ } catch (error) {
+ this.isActive = false;
+ return false;
+ }
+
+ this.previousStatsReport = this.currentStatsReport;
+ return true;
+ })
+ .catch((error) => {
+ this.handleError(error);
+ return false;
+ });
+ }
+ this.isActive = false;
+ }
+ return Promise.resolve(false);
+ }
+
+ private processStatsReport(groupCallId: string, localUserId: string): void {
+ const byteSentStats: ByteSentStatsReport = new Map<TrackID, ByteSend>();
+
+ this.currentStatsReport?.forEach((now) => {
+ const before = this.previousStatsReport ? this.previousStatsReport.get(now.id) : null;
+ // RTCIceCandidatePairStats - https://w3c.github.io/webrtc-stats/#candidatepair-dict*
+ if (now.type === "candidate-pair" && now.nominated && now.state === "succeeded") {
+ this.connectionStats.bandwidth = ConnectionStatsReporter.buildBandwidthReport(now);
+ this.connectionStats.transport = TransportStatsReporter.buildReport(
+ this.currentStatsReport,
+ now,
+ this.connectionStats.transport,
+ this.isFocus,
+ );
+
+ // RTCReceivedRtpStreamStats
+ // https://w3c.github.io/webrtc-stats/#receivedrtpstats-dict*
+ // RTCSentRtpStreamStats
+ // https://w3c.github.io/webrtc-stats/#sentrtpstats-dict*
+ } else if (now.type === "inbound-rtp" || now.type === "outbound-rtp") {
+ const trackStats = this.trackStats.findTrack2Stats(
+ now,
+ now.type === "inbound-rtp" ? "remote" : "local",
+ );
+ if (!trackStats) {
+ return;
+ }
+
+ if (before) {
+ TrackStatsReporter.buildPacketsLost(trackStats, now, before);
+ }
+
+ // Get the resolution and framerate for only remote video sources here. For the local video sources,
+ // 'track' stats will be used since they have the updated resolution based on the simulcast streams
+ // currently being sent. Promise based getStats reports three 'outbound-rtp' streams and there will be
+ // more calculations needed to determine what is the highest resolution stream sent by the client if the
+ // 'outbound-rtp' stats are used.
+ if (now.type === "inbound-rtp") {
+ TrackStatsReporter.buildFramerateResolution(trackStats, now);
+ if (before) {
+ TrackStatsReporter.buildBitrateReceived(trackStats, now, before);
+ }
+ } else if (before) {
+ byteSentStats.set(trackStats.trackId, StatsValueFormatter.getNonNegativeValue(now.bytesSent));
+ TrackStatsReporter.buildBitrateSend(trackStats, now, before);
+ }
+ TrackStatsReporter.buildCodec(this.currentStatsReport, trackStats, now);
+ } else if (now.type === "track" && now.kind === "video" && !now.remoteSource) {
+ const trackStats = this.trackStats.findLocalVideoTrackStats(now);
+ if (!trackStats) {
+ return;
+ }
+ TrackStatsReporter.buildFramerateResolution(trackStats, now);
+ TrackStatsReporter.calculateSimulcastFramerate(
+ trackStats,
+ now,
+ before,
+ this.trackStats.mediaTrackHandler.getActiveSimulcastStreams(),
+ );
+ }
+ });
+
+ this.emitter.emitByteSendReport(byteSentStats);
+ this.processAndEmitReport();
+ }
+
+ public setActive(isActive: boolean): void {
+ this.isActive = isActive;
+ }
+
+ public getActive(): boolean {
+ return this.isActive;
+ }
+
+ private handleError(_: any): void {
+ this.isActive = false;
+ }
+
+ private processAndEmitReport(): void {
+ const report = StatsReportBuilder.build(this.trackStats.getTrack2stats());
+
+ this.connectionStats.bandwidth = report.bandwidth;
+ this.connectionStats.bitrate = report.bitrate;
+ this.connectionStats.packetLoss = report.packetLoss;
+
+ this.emitter.emitConnectionStatsReport({
+ ...report,
+ transport: this.connectionStats.transport,
+ });
+
+ this.connectionStats.transport = [];
+ }
+
+ public stopProcessingStats(): void {}
+
+ private onSignalStateChange(): void {
+ if (this.pc.signalingState === "stable") {
+ if (this.pc.currentRemoteDescription) {
+ this.trackStats.mediaSsrcHandler.parse(this.pc.currentRemoteDescription.sdp, "remote");
+ }
+ if (this.pc.currentLocalDescription) {
+ this.trackStats.mediaSsrcHandler.parse(this.pc.currentLocalDescription.sdp, "local");
+ }
+ }
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsValueFormatter.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsValueFormatter.ts
new file mode 100644
index 0000000..c658fa6
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/statsValueFormatter.ts
@@ -0,0 +1,27 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+export class StatsValueFormatter {
+ public static getNonNegativeValue(imput: any): number {
+ let value = imput;
+
+ if (typeof value !== "number") {
+ value = Number(value);
+ }
+
+ if (isNaN(value)) {
+ return 0;
+ }
+
+ return Math.max(0, value);
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/trackStatsReporter.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/trackStatsReporter.ts
new file mode 100644
index 0000000..1f6fcd6
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/trackStatsReporter.ts
@@ -0,0 +1,117 @@
+import { MediaTrackStats } from "./media/mediaTrackStats";
+import { StatsValueFormatter } from "./statsValueFormatter";
+
+export class TrackStatsReporter {
+ public static buildFramerateResolution(trackStats: MediaTrackStats, now: any): void {
+ const resolution = {
+ height: now.frameHeight,
+ width: now.frameWidth,
+ };
+ const frameRate = now.framesPerSecond;
+
+ if (resolution.height && resolution.width) {
+ trackStats.setResolution(resolution);
+ }
+ trackStats.setFramerate(Math.round(frameRate || 0));
+ }
+
+ public static calculateSimulcastFramerate(trackStats: MediaTrackStats, now: any, before: any, layer: number): void {
+ let frameRate = trackStats.getFramerate();
+ if (!frameRate) {
+ if (before) {
+ const timeMs = now.timestamp - before.timestamp;
+
+ if (timeMs > 0 && now.framesSent) {
+ const numberOfFramesSinceBefore = now.framesSent - before.framesSent;
+
+ frameRate = (numberOfFramesSinceBefore / timeMs) * 1000;
+ }
+ }
+
+ if (!frameRate) {
+ return;
+ }
+ }
+
+ // Reset frame rate to 0 when video is suspended as a result of endpoint falling out of last-n.
+ frameRate = layer ? Math.round(frameRate / layer) : 0;
+ trackStats.setFramerate(frameRate);
+ }
+
+ public static buildCodec(report: RTCStatsReport | undefined, trackStats: MediaTrackStats, now: any): void {
+ const codec = report?.get(now.codecId);
+
+ if (codec) {
+ /**
+ * The mime type has the following form: video/VP8 or audio/ISAC,
+ * so we what to keep just the type after the '/', audio and video
+ * keys will be added on the processing side.
+ */
+ const codecShortType = codec.mimeType.split("/")[1];
+
+ codecShortType && trackStats.setCodec(codecShortType);
+ }
+ }
+
+ public static buildBitrateReceived(trackStats: MediaTrackStats, now: any, before: any): void {
+ trackStats.setBitrate({
+ download: TrackStatsReporter.calculateBitrate(
+ now.bytesReceived,
+ before.bytesReceived,
+ now.timestamp,
+ before.timestamp,
+ ),
+ upload: 0,
+ });
+ }
+
+ public static buildBitrateSend(trackStats: MediaTrackStats, now: any, before: any): void {
+ trackStats.setBitrate({
+ download: 0,
+ upload: this.calculateBitrate(now.bytesSent, before.bytesSent, now.timestamp, before.timestamp),
+ });
+ }
+
+ public static buildPacketsLost(trackStats: MediaTrackStats, now: any, before: any): void {
+ const key = now.type === "outbound-rtp" ? "packetsSent" : "packetsReceived";
+
+ let packetsNow = now[key];
+ if (!packetsNow || packetsNow < 0) {
+ packetsNow = 0;
+ }
+
+ const packetsBefore = StatsValueFormatter.getNonNegativeValue(before[key]);
+ const packetsDiff = Math.max(0, packetsNow - packetsBefore);
+
+ const packetsLostNow = StatsValueFormatter.getNonNegativeValue(now.packetsLost);
+ const packetsLostBefore = StatsValueFormatter.getNonNegativeValue(before.packetsLost);
+ const packetsLostDiff = Math.max(0, packetsLostNow - packetsLostBefore);
+
+ trackStats.setLoss({
+ packetsTotal: packetsDiff + packetsLostDiff,
+ packetsLost: packetsLostDiff,
+ isDownloadStream: now.type !== "outbound-rtp",
+ });
+ }
+
+ private static calculateBitrate(
+ bytesNowAny: any,
+ bytesBeforeAny: any,
+ nowTimestamp: number,
+ beforeTimestamp: number,
+ ): number {
+ const bytesNow = StatsValueFormatter.getNonNegativeValue(bytesNowAny);
+ const bytesBefore = StatsValueFormatter.getNonNegativeValue(bytesBeforeAny);
+ const bytesProcessed = Math.max(0, bytesNow - bytesBefore);
+
+ const timeMs = nowTimestamp - beforeTimestamp;
+ let bitrateKbps = 0;
+
+ if (timeMs > 0) {
+ // TODO is there any reason to round here?
+ bitrateKbps = Math.round((bytesProcessed * 8) / timeMs);
+ }
+
+ return bitrateKbps;
+ }
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/transportStats.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/transportStats.ts
new file mode 100644
index 0000000..2b6e975
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/transportStats.ts
@@ -0,0 +1,26 @@
+/*
+Copyright 2023 The Matrix.org Foundation C.I.C.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+export interface TransportStats {
+ ip: string;
+ type: string;
+ localIp: string;
+ isFocus: boolean;
+ localCandidateType: string;
+ remoteCandidateType: string;
+ networkType: string;
+ rtt: number;
+}
diff --git a/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/transportStatsReporter.ts b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/transportStatsReporter.ts
new file mode 100644
index 0000000..d419a73
--- /dev/null
+++ b/includes/external/matrix/node_modules/matrix-js-sdk/src/webrtc/stats/transportStatsReporter.ts
@@ -0,0 +1,48 @@
+import { TransportStats } from "./transportStats";
+
+export class TransportStatsReporter {
+ public static buildReport(
+ report: RTCStatsReport | undefined,
+ now: RTCIceCandidatePairStats,
+ conferenceStatsTransport: TransportStats[],
+ isFocus: boolean,
+ ): TransportStats[] {
+ const localUsedCandidate = report?.get(now.localCandidateId);
+ const remoteUsedCandidate = report?.get(now.remoteCandidateId);
+
+ // RTCIceCandidateStats
+ // https://w3c.github.io/webrtc-stats/#icecandidate-dict*
+ if (remoteUsedCandidate && localUsedCandidate) {
+ const remoteIpAddress =
+ remoteUsedCandidate.ip !== undefined ? remoteUsedCandidate.ip : remoteUsedCandidate.address;
+ const remotePort = remoteUsedCandidate.port;
+ const ip = `${remoteIpAddress}:${remotePort}`;
+
+ const localIpAddress =
+ localUsedCandidate.ip !== undefined ? localUsedCandidate.ip : localUsedCandidate.address;
+ const localPort = localUsedCandidate.port;
+ const localIp = `${localIpAddress}:${localPort}`;
+
+ const type = remoteUsedCandidate.protocol;
+
+ // Save the address unless it has been saved already.
+ if (
+ !conferenceStatsTransport.some(
+ (t: TransportStats) => t.ip === ip && t.type === type && t.localIp === localIp,
+ )
+ ) {
+ conferenceStatsTransport.push({
+ ip,
+ type,
+ localIp,
+ isFocus,
+ localCandidateType: localUsedCandidate.candidateType,
+ remoteCandidateType: remoteUsedCandidate.candidateType,
+ networkType: localUsedCandidate.networkType,
+ rtt: now.currentRoundTripTime ? now.currentRoundTripTime * 1000 : NaN,
+ } as TransportStats);
+ }
+ }
+ return conferenceStatsTransport;
+ }
+}