import { getLogger } from '@jitsi/logger';
import { Interop } from '@jitsi/sdp-interop';
import transform from 'sdp-transform';
import * as CodecMimeType from '../../service/RTC/CodecMimeType';
import MediaDirection from '../../service/RTC/MediaDirection';
import * as MediaType from '../../service/RTC/MediaType';
import RTCEvents from '../../service/RTC/RTCEvents';
import * as SignalingEvents from '../../service/RTC/SignalingEvents';
import { getSourceNameForJitsiTrack } from '../../service/RTC/SignalingLayer';
import * as VideoType from '../../service/RTC/VideoType';
import { SS_DEFAULT_FRAME_RATE } from '../RTC/ScreenObtainer';
import browser from '../browser';
import FeatureFlags from '../flags/FeatureFlags';
import LocalSdpMunger from '../sdp/LocalSdpMunger';
import RtxModifier from '../sdp/RtxModifier';
import SDP from '../sdp/SDP';
import SDPUtil from '../sdp/SDPUtil';
import SdpConsistency from '../sdp/SdpConsistency';
import { SdpTransformWrap } from '../sdp/SdpTransformUtil';
import * as GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
import JitsiRemoteTrack from './JitsiRemoteTrack';
import RTC from './RTC';
import RTCUtils from './RTCUtils';
import {
HD_BITRATE,
HD_SCALE_FACTOR,
SIM_LAYER_RIDS,
TPCUtils
} from './TPCUtils';
// FIXME SDP tools should end up in some kind of util module
const logger = getLogger(__filename);
const DEGRADATION_PREFERENCE_CAMERA = 'maintain-framerate';
const DEGRADATION_PREFERENCE_DESKTOP = 'maintain-resolution';
/* eslint-disable max-params */
/**
* Creates new instance of 'TraceablePeerConnection'.
*
* @param {RTC} rtc the instance of RTC service
* @param {number} id the peer connection id assigned by the parent RTC module.
* @param {SignalingLayer} signalingLayer the signaling layer instance
* @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
* @param {object} constraints WebRTC 'PeerConnection' constraints
* @param {boolean} isP2P indicates whether or not the new instance will be used in a peer to peer connection.
* @param {object} options TracablePeerConnection config options.
* @param {boolean} options.disableSimulcast if set to 'true' will disable the simulcast.
* @param {boolean} options.disableRtx if set to 'true' will disable the RTX.
* @param {string} options.disabledCodec the mime type of the code that should not be negotiated on the peerconnection.
* @param {string} options.preferredCodec the mime type of the codec that needs to be made the preferred codec for the
* peerconnection.
* @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
* @param {boolean} options.usesUnifiedPlan Indicates if the browser is running in unified plan mode.
*
* FIXME: initially the purpose of TraceablePeerConnection was to be able to
* debug the peer connection. Since many other responsibilities have been added
* it would make sense to extract a separate class from it and come up with
* a more suitable name.
*
* @constructor
*/
export default function TraceablePeerConnection(
rtc,
id,
signalingLayer,
pcConfig,
constraints,
isP2P,
options) {
/**
* Indicates whether or not this peer connection instance is actively
* sending/receiving audio media. When set to false the SDP audio
* media direction will be adjusted to 'inactive' in order to suspend
* the transmission.
* @type {boolean}
* @private
*/
this.audioTransferActive = !(options.startSilent === true);
/**
* The DTMF sender instance used to send DTMF tones.
*
* @type {RTCDTMFSender|undefined}
* @private
*/
this._dtmfSender = undefined;
/**
* @typedef {Object} TouchToneRequest
* @property {string} tones - The DTMF tones string as defined by
* {@code RTCDTMFSender.insertDTMF}, 'tones' argument.
* @property {number} duration - The amount of time in milliseconds that
* each DTMF should last.
* @property {string} interToneGap - The length of time in miliseconds to
* wait between tones.
*/
/**
* TouchToneRequests which are waiting to be played. This queue is filled
* if there are touch tones currently being played.
*
* @type {Array}
* @private
*/
this._dtmfTonesQueue = [];
/**
* Indicates whether or not this peer connection instance is actively
* sending/receiving video media. When set to false the SDP video
* media direction will be adjusted to 'inactive' in order to suspend
* the transmission.
* @type {boolean}
* @private
*/
this.videoTransferActive = true;
/**
* The parent instance of RTC service which created this
* TracablePeerConnection.
* @type {RTC}
*/
this.rtc = rtc;
/**
* The peer connection identifier assigned by the RTC module.
* @type {number}
*/
this.id = id;
/**
* Indicates whether or not this instance is used in a peer to peer
* connection.
* @type {boolean}
*/
this.isP2P = isP2P;
// FIXME: We should support multiple streams per jid.
/**
* The map holds remote tracks associated with this peer connection.
* It maps user's JID to media type and remote track
* (one track per media type per user's JID).
* @type {Map>}
*/
this.remoteTracks = new Map();
/**
* A map which stores local tracks mapped by {@link JitsiLocalTrack.rtcId}
* @type {Map}
*/
this.localTracks = new Map();
/**
* Keeps tracks of the WebRTC MediaStreams that have been added to
* the underlying WebRTC PeerConnection.
* @type {Array}
* @private
*/
this._addedStreams = [];
/**
* @typedef {Object} TPCGroupInfo
* @property {string} semantics the SSRC groups semantics
* @property {Array} ssrcs group's SSRCs in order where the first
* one is group's primary SSRC, the second one is secondary (RTX) and so
* on...
*/
/**
* @typedef {Object} TPCSSRCInfo
* @property {Array} ssrcs an array which holds all track's SSRCs
* @property {Array} groups an array stores all track's SSRC
* groups
*/
/**
* Holds the info about local track's SSRCs mapped per their
* {@link JitsiLocalTrack.rtcId}
* @type {Map}
*/
this.localSSRCs = new Map();
/**
* The local ICE username fragment for this session.
*/
this.localUfrag = null;
/**
* The remote ICE username fragment for this session.
*/
this.remoteUfrag = null;
/**
* The signaling layer which operates this peer connection.
* @type {SignalingLayer}
*/
this.signalingLayer = signalingLayer;
// SignalingLayer listeners
this._peerVideoTypeChanged = this._peerVideoTypeChanged.bind(this);
this.signalingLayer.on(
SignalingEvents.PEER_VIDEO_TYPE_CHANGED,
this._peerVideoTypeChanged);
this._peerMutedChanged = this._peerMutedChanged.bind(this);
this.signalingLayer.on(
SignalingEvents.PEER_MUTED_CHANGED,
this._peerMutedChanged);
this.options = options;
// Make sure constraints is properly formatted in order to provide information about whether or not this
// connection is P2P to rtcstats.
const safeConstraints = constraints || {};
safeConstraints.optional = safeConstraints.optional || [];
// The `optional` parameter needs to be of type array, otherwise chrome will throw an error.
// Firefox and Safari just ignore it.
if (Array.isArray(safeConstraints.optional)) {
safeConstraints.optional.push({ rtcStatsSFUP2P: this.isP2P });
} else {
logger.warn('Optional param is not an array, rtcstats p2p data is omitted.');
}
this.peerconnection = new RTCUtils.RTCPeerConnectionType(pcConfig, safeConstraints);
this.tpcUtils = new TPCUtils(this);
this.updateLog = [];
this.stats = {};
this.statsinterval = null;
/**
* Flag used to indicate if simulcast is turned off and a cap of 500 Kbps is applied on screensharing.
*/
this._capScreenshareBitrate = this.options.capScreenshareBitrate;
/**
* Flag used to indicate if the browser is running in unified plan mode.
*/
this._usesUnifiedPlan = options.usesUnifiedPlan;
/**
* Flag used to indicate if RTCRtpTransceiver#setCodecPreferences is to be used instead of SDP
* munging for codec selection.
*/
this._usesTransceiverCodecPreferences = browser.supportsCodecPreferences() && this._usesUnifiedPlan;
this._usesTransceiverCodecPreferences
&& logger.info('Using RTCRtpTransceiver#setCodecPreferences for codec selection');
/**
* @type {number} The max number of stats to keep in this.stats. Limit to
* 300 values, i.e. 5 minutes; set to 0 to disable
*/
this.maxstats = options.maxstats;
this.interop = new Interop();
const Simulcast = require('@jitsi/sdp-simulcast');
this.simulcast = new Simulcast(
{
numOfLayers: SIM_LAYER_RIDS.length,
explodeRemoteSimulcast: false,
usesUnifiedPlan: this._usesUnifiedPlan
});
this.sdpConsistency = new SdpConsistency(this.toString());
/**
* Munges local SDP provided to the Jingle Session in order to prevent from
* sending SSRC updates on attach/detach and mute/unmute (for video).
* @type {LocalSdpMunger}
*/
this.localSdpMunger = new LocalSdpMunger(this, this.rtc.getLocalEndpointId());
/**
* TracablePeerConnection uses RTC's eventEmitter
* @type {EventEmitter}
*/
this.eventEmitter = rtc.eventEmitter;
this.rtxModifier = new RtxModifier();
/**
* The height constraint applied on the video sender. The default value is 2160 (4K) when layer suspension is
* explicitly disabled.
*/
this._senderVideoMaxHeight = 2160;
// override as desired
this.trace = (what, info) => {
logger.debug(what, info);
this.updateLog.push({
time: new Date(),
type: what,
value: info || ''
});
};
this.onicecandidate = null;
this.peerconnection.onicecandidate = event => {
this.trace(
'onicecandidate',
JSON.stringify(event.candidate, null, ' '));
if (this.onicecandidate !== null) {
this.onicecandidate(event);
}
};
// Use track events when browser is running in unified plan mode and stream events in plan-b mode.
if (this._usesUnifiedPlan) {
this.onTrack = evt => {
const stream = evt.streams[0];
this._remoteTrackAdded(stream, evt.track, evt.transceiver);
stream.addEventListener('removetrack', e => {
this._remoteTrackRemoved(stream, e.track);
});
};
this.peerconnection.addEventListener('track', this.onTrack);
} else {
this.peerconnection.onaddstream = event => this._remoteStreamAdded(event.stream);
this.peerconnection.onremovestream = event => this._remoteStreamRemoved(event.stream);
}
this.onsignalingstatechange = null;
this.peerconnection.onsignalingstatechange = event => {
this.trace('onsignalingstatechange', this.signalingState);
if (this.onsignalingstatechange !== null) {
this.onsignalingstatechange(event);
}
};
this.oniceconnectionstatechange = null;
this.peerconnection.oniceconnectionstatechange = event => {
this.trace('oniceconnectionstatechange', this.iceConnectionState);
if (this.oniceconnectionstatechange !== null) {
this.oniceconnectionstatechange(event);
}
};
this.onnegotiationneeded = null;
this.peerconnection.onnegotiationneeded = event => {
this.trace('onnegotiationneeded');
if (this.onnegotiationneeded !== null) {
this.onnegotiationneeded(event);
}
};
this.onconnectionstatechange = null;
this.peerconnection.onconnectionstatechange = event => {
this.trace('onconnectionstatechange', this.connectionState);
if (this.onconnectionstatechange !== null) {
this.onconnectionstatechange(event);
}
};
this.ondatachannel = null;
this.peerconnection.ondatachannel = event => {
this.trace('ondatachannel');
if (this.ondatachannel !== null) {
this.ondatachannel(event);
}
};
if (this.maxstats) {
this.statsinterval = window.setInterval(() => {
this.getStats().then(stats => {
if (typeof stats?.result === 'function') {
const results = stats.result();
for (let i = 0; i < results.length; ++i) {
const res = results[i];
res.names().forEach(name => {
this._processStat(res, name, res.stat(name));
});
}
} else {
stats.forEach(r => this._processStat(r, '', r));
}
});
}, 1000);
}
logger.info(`Create new ${this}`);
}
/* eslint-enable max-params */
/**
* Process stat and adds it to the array of stats we store.
* @param report the current stats report.
* @param name the name of the report, if available
* @param statValue the value to add.
* @private
*/
TraceablePeerConnection.prototype._processStat
= function(report, name, statValue) {
const id = `${report.id}-${name}`;
let s = this.stats[id];
const now = new Date();
if (!s) {
this.stats[id] = s = {
startTime: now,
endTime: now,
values: [],
times: []
};
}
s.values.push(statValue);
s.times.push(now.getTime());
if (s.values.length > this.maxstats) {
s.values.shift();
s.times.shift();
}
s.endTime = now;
};
/**
* Returns a string representation of a SessionDescription object.
*/
const dumpSDP = function(description) {
if (typeof description === 'undefined' || description === null) {
return '';
}
return `type: ${description.type}\r\n${description.sdp}`;
};
/**
* Forwards the {@link peerconnection.iceConnectionState} state except that it
* will convert "completed" into "connected" where both mean that the ICE has
* succeeded and is up and running. We never see "completed" state for
* the JVB connection, but it started appearing for the P2P one. This method
* allows to adapt old logic to this new situation.
* @return {string}
*/
TraceablePeerConnection.prototype.getConnectionState = function() {
const state = this.peerconnection.iceConnectionState;
if (state === 'completed') {
return 'connected';
}
return state;
};
/**
* Obtains the media direction for given {@link MediaType}. The method takes
* into account whether or not there are any local tracks for media and
* the {@link audioTransferActive} and {@link videoTransferActive} flags.
* @param {MediaType} mediaType
* @param {boolean} isAddOperation whether the direction is to be calculated after a source-add action.
* @return {string} one of the SDP direction constants ('sendrecv, 'recvonly'
* etc.) which should be used when setting local description on the peer
* connection.
* @private
*/
TraceablePeerConnection.prototype.getDesiredMediaDirection = function(mediaType, isAddOperation = false) {
const hasLocalSource = this.hasAnyTracksOfType(mediaType);
if (this._usesUnifiedPlan) {
return isAddOperation
? hasLocalSource ? MediaDirection.SENDRECV : MediaDirection.SENDONLY
: hasLocalSource ? MediaDirection.RECVONLY : MediaDirection.INACTIVE;
}
const mediaTransferActive = mediaType === MediaType.AUDIO ? this.audioTransferActive : this.videoTransferActive;
if (mediaTransferActive) {
return hasLocalSource ? MediaDirection.SENDRECV : MediaDirection.RECVONLY;
}
return MediaDirection.INACTIVE;
};
/**
* Returns the list of RTCRtpReceivers created for the source of the given media type associated with
* the set of remote endpoints specified.
* @param {Array} endpoints list of the endpoints
* @param {string} mediaType 'audio' or 'video'
* @returns {Array} list of receivers created by the peerconnection.
*/
TraceablePeerConnection.prototype._getReceiversByEndpointIds = function(endpoints, mediaType) {
let remoteTracks = [];
let receivers = [];
for (const endpoint of endpoints) {
remoteTracks = remoteTracks.concat(this.getRemoteTracks(endpoint, mediaType));
}
// Get the ids of the MediaStreamTracks associated with each of these remote tracks.
const remoteTrackIds = remoteTracks.map(remote => remote.track?.id);
receivers = this.peerconnection.getReceivers()
.filter(receiver => receiver.track
&& receiver.track.kind === mediaType
&& remoteTrackIds.find(trackId => trackId === receiver.track.id));
return receivers;
};
/**
* Tells whether or not this TPC instance is using Simulcast.
* @return {boolean} true if simulcast is enabled and active or
* false if it's turned off.
*/
TraceablePeerConnection.prototype.isSimulcastOn = function() {
return !this.options.disableSimulcast;
};
/**
* Handles {@link SignalingEvents.PEER_VIDEO_TYPE_CHANGED}
* @param {string} endpointId the video owner's ID (MUC nickname)
* @param {VideoType} videoType the new value
* @private
*/
TraceablePeerConnection.prototype._peerVideoTypeChanged = function(
endpointId,
videoType) {
// Check if endpointId has a value to avoid action on random track
if (!endpointId) {
logger.error(`${this} No endpointID on peerVideoTypeChanged`);
return;
}
const videoTrack = this.getRemoteTracks(endpointId, MediaType.VIDEO);
if (videoTrack.length) {
// NOTE 1 track per media type is assumed
videoTrack[0]._setVideoType(videoType);
}
};
/**
* Handles remote track mute / unmute events.
* @param {string} endpointId the track owner's identifier (MUC nickname)
* @param {MediaType} mediaType "audio" or "video"
* @param {boolean} isMuted the new mute state
* @private
*/
TraceablePeerConnection.prototype._peerMutedChanged = function(
endpointId,
mediaType,
isMuted) {
// Check if endpointId is a value to avoid doing action on all remote tracks
if (!endpointId) {
logger.error(`${this} On peerMuteChanged - no endpoint ID`);
return;
}
const track = this.getRemoteTracks(endpointId, mediaType);
if (track.length) {
// NOTE 1 track per media type is assumed
track[0].setMute(isMuted);
}
};
/**
* Obtains audio levels of the remote audio tracks by getting the source information on the RTCRtpReceivers.
* The information relevant to the ssrc is updated each time a RTP packet constaining the ssrc is received.
* @param {Array} speakerList list of endpoint ids for which audio levels are to be gathered.
* @returns {Object} containing ssrc and audio level information as a key-value pair.
*/
TraceablePeerConnection.prototype.getAudioLevels = function(speakerList = []) {
const audioLevels = {};
const audioReceivers = speakerList.length
? this._getReceiversByEndpointIds(speakerList, MediaType.AUDIO)
: this.peerconnection.getReceivers()
.filter(receiver => receiver.track && receiver.track.kind === MediaType.AUDIO && receiver.track.enabled);
audioReceivers.forEach(remote => {
const ssrc = remote.getSynchronizationSources();
if (ssrc && ssrc.length) {
// As per spec, this audiolevel is a value between 0..1 (linear), where 1.0
// represents 0 dBov, 0 represents silence, and 0.5 represents approximately
// 6 dBSPL change in the sound pressure level from 0 dBov.
// https://www.w3.org/TR/webrtc/#dom-rtcrtpcontributingsource-audiolevel
audioLevels[ssrc[0].source] = ssrc[0].audioLevel;
}
});
return audioLevels;
};
/**
* Obtains local tracks for given {@link MediaType}. If the mediaType
* argument is omitted the list of all local tracks will be returned.
* @param {MediaType} [mediaType]
* @return {Array}
*/
TraceablePeerConnection.prototype.getLocalTracks = function(mediaType) {
let tracks = Array.from(this.localTracks.values());
if (mediaType !== undefined) {
tracks = tracks.filter(track => track.getType() === mediaType);
}
return tracks;
};
/**
* Retrieves the local video track.
*
* @returns {JitsiLocalTrack|undefined} - local video track.
*/
TraceablePeerConnection.prototype.getLocalVideoTrack = function() {
return this.getLocalTracks(MediaType.VIDEO)[0];
};
/**
* Checks whether or not this {@link TraceablePeerConnection} instance contains
* any local tracks for given mediaType.
* @param {MediaType} mediaType
* @return {boolean}
*/
TraceablePeerConnection.prototype.hasAnyTracksOfType = function(mediaType) {
if (!mediaType) {
throw new Error('"mediaType" is required');
}
return this.getLocalTracks(mediaType).length > 0;
};
/**
* Obtains all remote tracks currently known to this PeerConnection instance.
* @param {string} [endpointId] the track owner's identifier (MUC nickname)
* @param {MediaType} [mediaType] the remote tracks will be filtered
* by their media type if this argument is specified.
* @return {Array}
*/
TraceablePeerConnection.prototype.getRemoteTracks = function(
endpointId,
mediaType) {
const remoteTracks = [];
const endpoints
= endpointId ? [ endpointId ] : this.remoteTracks.keys();
for (const endpoint of endpoints) {
const endpointTrackMap = this.remoteTracks.get(endpoint);
if (!endpointTrackMap) {
// Otherwise an empty Map() would have to be allocated above
// eslint-disable-next-line no-continue
continue;
}
for (const trackMediaType of endpointTrackMap.keys()) {
// per media type filtering
if (!mediaType || mediaType === trackMediaType) {
const mediaTrack = endpointTrackMap.get(trackMediaType);
if (mediaTrack) {
remoteTracks.push(mediaTrack);
}
}
}
}
return remoteTracks;
};
/**
* Parses the remote description and returns the sdp lines of the sources associated with a remote participant.
*
* @param {string} id Endpoint id of the remote participant.
* @returns {Array} The sdp lines that have the ssrc information.
*/
TraceablePeerConnection.prototype.getRemoteSourceInfoByParticipant = function(id) {
const removeSsrcInfo = [];
const remoteTracks = this.getRemoteTracks(id);
if (!remoteTracks?.length) {
return removeSsrcInfo;
}
const primarySsrcs = remoteTracks.map(track => track.getSSRC());
const sdp = new SDP(this.remoteDescription.sdp);
primarySsrcs.forEach((ssrc, idx) => {
for (const media of sdp.media) {
let lines = '';
let ssrcLines = SDPUtil.findLines(media, `a=ssrc:${ssrc}`);
if (ssrcLines.length) {
if (!removeSsrcInfo[idx]) {
removeSsrcInfo[idx] = '';
}
// Check if there are any FID groups present for the primary ssrc.
const fidLines = SDPUtil.findLines(media, `a=ssrc-group:FID ${ssrc}`);
if (fidLines.length) {
const secondarySsrc = fidLines[0].split(' ')[2];
lines += `${fidLines[0]}\r\n`;
ssrcLines = ssrcLines.concat(SDPUtil.findLines(media, `a=ssrc:${secondarySsrc}`));
}
removeSsrcInfo[idx] += `${ssrcLines.join('\r\n')}\r\n`;
removeSsrcInfo[idx] += lines;
}
}
});
return removeSsrcInfo;
};
/**
* Returns the target bitrates configured for the local video source.
*
* @returns {Object}
*/
TraceablePeerConnection.prototype.getTargetVideoBitrates = function() {
const currentCodec = this.getConfiguredVideoCodec();
return this.tpcUtils.videoBitrates[currentCodec.toUpperCase()] || this.tpcUtils.videoBitrates;
};
/**
* Tries to find {@link JitsiTrack} for given SSRC number. It will search both
* local and remote tracks bound to this instance.
* @param {number} ssrc
* @return {JitsiTrack|null}
*/
TraceablePeerConnection.prototype.getTrackBySSRC = function(ssrc) {
if (typeof ssrc !== 'number') {
throw new Error(`SSRC ${ssrc} is not a number`);
}
for (const localTrack of this.localTracks.values()) {
if (this.getLocalSSRC(localTrack) === ssrc) {
return localTrack;
}
}
for (const remoteTrack of this.getRemoteTracks()) {
if (remoteTrack.getSSRC() === ssrc) {
return remoteTrack;
}
}
return null;
};
/**
* Tries to find SSRC number for given {@link JitsiTrack} id. It will search
* both local and remote tracks bound to this instance.
* @param {string} id
* @return {number|null}
*/
TraceablePeerConnection.prototype.getSsrcByTrackId = function(id) {
const findTrackById = track => track.getTrack().id === id;
const localTrack = this.getLocalTracks().find(findTrackById);
if (localTrack) {
return this.getLocalSSRC(localTrack);
}
const remoteTrack = this.getRemoteTracks().find(findTrackById);
if (remoteTrack) {
return remoteTrack.getSSRC();
}
return null;
};
/**
* Called when new remote MediaStream is added to the PeerConnection.
* @param {MediaStream} stream the WebRTC MediaStream for remote participant
*/
TraceablePeerConnection.prototype._remoteStreamAdded = function(stream) {
const streamId = RTC.getStreamID(stream);
if (!RTC.isUserStreamById(streamId)) {
logger.info(`${this} ignored remote 'stream added' event for non-user stream[id=${streamId}]`);
return;
}
// Bind 'addtrack'/'removetrack' event handlers
if (browser.isChromiumBased()) {
stream.onaddtrack = event => {
this._remoteTrackAdded(stream, event.track);
};
stream.onremovetrack = event => {
this._remoteTrackRemoved(stream, event.track);
};
}
// Call remoteTrackAdded for each track in the stream
const streamAudioTracks = stream.getAudioTracks();
for (const audioTrack of streamAudioTracks) {
this._remoteTrackAdded(stream, audioTrack);
}
const streamVideoTracks = stream.getVideoTracks();
for (const videoTrack of streamVideoTracks) {
this._remoteTrackAdded(stream, videoTrack);
}
};
/**
* Called on "track added" and "stream added" PeerConnection events (because we
* handle streams on per track basis). Finds the owner and the SSRC for
* the track and passes that to ChatRoom for further processing.
* @param {MediaStream} stream the WebRTC MediaStream instance which is
* the parent of the track
* @param {MediaStreamTrack} track the WebRTC MediaStreamTrack added for remote
* participant.
* @param {RTCRtpTransceiver} transceiver the WebRTC transceiver that is created
* for the remote participant in unified plan.
*/
TraceablePeerConnection.prototype._remoteTrackAdded = function(stream, track, transceiver = null) {
const streamId = RTC.getStreamID(stream);
const mediaType = track.kind;
if (!this.isP2P && !RTC.isUserStreamById(streamId)) {
logger.info(`${this} ignored remote 'stream added' event for non-user stream[id=${streamId}]`);
return;
}
logger.info(`${this} adding remote track for stream[id=${streamId},type=${mediaType}]`);
// look up an associated JID for a stream id
if (!mediaType) {
GlobalOnErrorHandler.callErrorHandler(
new Error(
`MediaType undefined for remote track, stream id: ${streamId}`
));
// Abort
return;
}
const remoteSDP = this._usesUnifiedPlan
? new SDP(this.peerconnection.remoteDescription.sdp)
: new SDP(this.remoteDescription.sdp);
let mediaLines;
// In unified plan mode, find the matching mline using 'mid' if its availble, otherwise use the
// 'msid' attribute of the stream.
if (this._usesUnifiedPlan) {
if (transceiver && transceiver.mid) {
const mid = transceiver.mid;
mediaLines = remoteSDP.media.filter(mls => SDPUtil.findLine(mls, `a=mid:${mid}`));
} else {
mediaLines = remoteSDP.media.filter(mls => {
const msid = SDPUtil.findLine(mls, 'a=msid:');
return typeof msid !== 'undefined' && streamId === msid.substring(7).split(' ')[0];
});
}
} else {
mediaLines = remoteSDP.media.filter(mls => mls.startsWith(`m=${mediaType}`));
}
if (!mediaLines.length) {
GlobalOnErrorHandler.callErrorHandler(
new Error(`No media lines found in remote SDP for remote stream[id=${streamId},type=${mediaType}]`));
// Abort
return;
}
let ssrcLines = SDPUtil.findLines(mediaLines[0], 'a=ssrc:');
ssrcLines
= ssrcLines.filter(line => line.indexOf(`msid:${streamId}`) !== -1);
if (!ssrcLines.length) {
GlobalOnErrorHandler.callErrorHandler(
new Error(`No SSRC lines found in remote SDP for remote stream[msid=${streamId},type=${mediaType}]`));
// Abort
return;
}
// FIXME the length of ssrcLines[0] not verified, but it will fail
// with global error handler anyway
const ssrcStr = ssrcLines[0].substring(7).split(' ')[0];
const trackSsrc = Number(ssrcStr);
const ownerEndpointId = this.signalingLayer.getSSRCOwner(trackSsrc);
if (isNaN(trackSsrc) || trackSsrc < 0) {
GlobalOnErrorHandler.callErrorHandler(
new Error(
`Invalid SSRC for remote stream[ssrc=${trackSsrc},id=${streamId},type=${mediaType}]`));
// Abort
return;
} else if (!ownerEndpointId) {
GlobalOnErrorHandler.callErrorHandler(
new Error(
`No SSRC owner known for remote stream[ssrc=${trackSsrc},id=${streamId},type=${mediaType}]`));
// Abort
return;
}
let sourceName;
if (FeatureFlags.isSourceNameSignalingEnabled()) {
sourceName = this.signalingLayer.getTrackSourceName(trackSsrc);
// If source name was not signaled, we'll generate one which allows testing signaling
// when mixing legacy(mobile) with new clients.
if (!sourceName) {
sourceName = getSourceNameForJitsiTrack(ownerEndpointId, mediaType, 0);
}
}
// eslint-disable-next-line no-undef
logger.info(`${this} creating remote track[endpoint=${ownerEndpointId},ssrc=${trackSsrc},`
+ `type=${mediaType},sourceName=${sourceName}]`);
const peerMediaInfo
= this.signalingLayer.getPeerMediaInfo(ownerEndpointId, mediaType);
if (!peerMediaInfo) {
GlobalOnErrorHandler.callErrorHandler(
new Error(`${this}: no peer media info available for ${ownerEndpointId}`));
return;
}
const muted = peerMediaInfo.muted;
const videoType = peerMediaInfo.videoType; // can be undefined
// eslint-disable-next-line no-undef
this._createRemoteTrack(
ownerEndpointId, stream, track, mediaType, videoType, trackSsrc, muted, sourceName);
};
// FIXME cleanup params
/* eslint-disable max-params */
/**
* Initializes a new JitsiRemoteTrack instance with the data provided by
* the signaling layer and SDP.
*
* @param {string} ownerEndpointId the owner's endpoint ID (MUC nickname)
* @param {MediaStream} stream the WebRTC stream instance
* @param {MediaStreamTrack} track the WebRTC track instance
* @param {MediaType} mediaType the track's type of the media
* @param {VideoType} [videoType] the track's type of the video (if applicable)
* @param {number} ssrc the track's main SSRC number
* @param {boolean} muted the initial muted status
* @param {String} sourceName the track's source name
*/
TraceablePeerConnection.prototype._createRemoteTrack = function(
ownerEndpointId,
stream,
track,
mediaType,
videoType,
ssrc,
muted,
sourceName) {
let remoteTracksMap = this.remoteTracks.get(ownerEndpointId);
if (!remoteTracksMap) {
remoteTracksMap = new Map();
this.remoteTracks.set(ownerEndpointId, remoteTracksMap);
}
const existingTrack = remoteTracksMap.get(mediaType);
if (existingTrack && existingTrack.getTrack() === track) {
// Ignore duplicated event which can originate either from 'onStreamAdded' or 'onTrackAdded'.
logger.info(`${this} ignored duplicated track event for track[endpoint=${ownerEndpointId},type=${mediaType}]`);
return;
} else if (existingTrack) {
logger.error(`${this} received a second remote track for track[endpoint=${ownerEndpointId},type=${mediaType}]`
+ 'deleting the existing track');
// The exisiting track needs to be removed here. We can get here when Jicofo reverses the order of source-add
// and source-remove messages. Ideally, when a remote endpoint changes source, like switching devices, it sends
// a source-remove (for old ssrc) followed by a source-add (for new ssrc) and Jicofo then should forward these
// two messages to all the other endpoints in the conference in the same order. However, sometimes, these
// messages arrive at the client in the reverse order resulting in two remote tracks (of same media type) being
// created and in case of video, a black strip (that of the first track which has ended) appears over the live
// track obscuring it. Removing the existing track when that happens will fix this issue.
this._remoteTrackRemoved(existingTrack.getOriginalStream(), existingTrack.getTrack());
}
const remoteTrack
= new JitsiRemoteTrack(
this.rtc,
this.rtc.conference,
ownerEndpointId,
stream,
track,
mediaType,
videoType,
ssrc,
muted,
this.isP2P,
sourceName);
remoteTracksMap.set(mediaType, remoteTrack);
this.eventEmitter.emit(RTCEvents.REMOTE_TRACK_ADDED, remoteTrack, this);
};
/* eslint-enable max-params */
/**
* Handles remote stream removal.
* @param stream the WebRTC MediaStream object which is being removed from the
* PeerConnection
*/
TraceablePeerConnection.prototype._remoteStreamRemoved = function(stream) {
if (!RTC.isUserStream(stream)) {
const id = RTC.getStreamID(stream);
logger.info(`Ignored remote 'stream removed' event for stream[id=${id}]`);
return;
}
// Call remoteTrackRemoved for each track in the stream
const streamVideoTracks = stream.getVideoTracks();
for (const videoTrack of streamVideoTracks) {
this._remoteTrackRemoved(stream, videoTrack);
}
const streamAudioTracks = stream.getAudioTracks();
for (const audioTrack of streamAudioTracks) {
this._remoteTrackRemoved(stream, audioTrack);
}
};
/**
* Handles remote media track removal.
* @param {MediaStream} stream WebRTC MediaStream instance which is the parent
* of the track.
* @param {MediaStreamTrack} track the WebRTC MediaStreamTrack which has been
* removed from the PeerConnection.
*/
TraceablePeerConnection.prototype._remoteTrackRemoved = function(
stream,
track) {
const streamId = RTC.getStreamID(stream);
const trackId = track && RTC.getTrackID(track);
if (!RTC.isUserStreamById(streamId)) {
logger.info(`${this} ignored remote 'stream removed' event for non-user stream[id=${streamId}]`);
return;
}
logger.info(`${this} remote track removed stream[id=${streamId},trackId=${trackId}]`);
if (!streamId) {
GlobalOnErrorHandler.callErrorHandler(new Error(`${this} remote track removal failed - no stream ID`));
return;
}
if (!trackId) {
GlobalOnErrorHandler.callErrorHandler(new Error(`${this} remote track removal failed - no track ID`));
return;
}
if (!this._removeRemoteTrackById(streamId, trackId)) {
// NOTE this warning is always printed when user leaves the room,
// because we remove remote tracks manually on MUC member left event,
// before the SSRCs are removed by Jicofo. In most cases it is fine to
// ignore this warning, but still it's better to keep it printed for
// debugging purposes.
//
// We could change the behaviour to emit track removed only from here,
// but the order of the events will change and consuming apps could
// behave unexpectedly (the "user left" event would come before "track
// removed" events).
logger.warn(`${this} Removed track not found for stream[id=${streamId},trackId=${trackId}]`);
}
};
/**
* Finds remote track by it's stream and track ids.
* @param {string} streamId the media stream id as defined by the WebRTC
* @param {string} trackId the media track id as defined by the WebRTC
* @return {JitsiRemoteTrack|undefined} the track's instance or
* undefined if not found.
* @private
*/
TraceablePeerConnection.prototype._getRemoteTrackById = function(
streamId,
trackId) {
// .find will break the loop once the first match is found
for (const endpointTrackMap of this.remoteTracks.values()) {
for (const mediaTrack of endpointTrackMap.values()) {
// FIXME verify and try to use ===
/* eslint-disable eqeqeq */
if (mediaTrack.getStreamId() == streamId
&& mediaTrack.getTrackId() == trackId) {
return mediaTrack;
}
/* eslint-enable eqeqeq */
}
}
return undefined;
};
/**
* Removes all JitsiRemoteTracks associated with given MUC nickname
* (resource part of the JID). Returns array of removed tracks.
*
* @param {string} owner - The resource part of the MUC JID.
* @returns {JitsiRemoteTrack[]}
*/
TraceablePeerConnection.prototype.removeRemoteTracks = function(owner) {
const removedTracks = [];
const remoteTracksMap = this.remoteTracks.get(owner);
if (remoteTracksMap) {
const removedAudioTrack = remoteTracksMap.get(MediaType.AUDIO);
const removedVideoTrack = remoteTracksMap.get(MediaType.VIDEO);
removedAudioTrack && removedTracks.push(removedAudioTrack);
removedVideoTrack && removedTracks.push(removedVideoTrack);
this.remoteTracks.delete(owner);
}
logger.debug(`${this} removed remote tracks[endpoint=${owner},count=${removedTracks.length}`);
return removedTracks;
};
/**
* Removes and disposes given JitsiRemoteTrack instance. Emits
* {@link RTCEvents.REMOTE_TRACK_REMOVED}.
* @param {JitsiRemoteTrack} toBeRemoved
*/
TraceablePeerConnection.prototype._removeRemoteTrack = function(toBeRemoved) {
toBeRemoved.dispose();
const participantId = toBeRemoved.getParticipantId();
const remoteTracksMap = this.remoteTracks.get(participantId);
if (!remoteTracksMap) {
logger.error(`${this} removeRemoteTrack: no remote tracks map for endpoint=${participantId}`);
} else if (!remoteTracksMap.delete(toBeRemoved.getType())) {
logger.error(`${this} Failed to remove ${toBeRemoved} - type mapping messed up ?`);
}
this.eventEmitter.emit(RTCEvents.REMOTE_TRACK_REMOVED, toBeRemoved);
};
/**
* Removes and disposes JitsiRemoteTrack identified by given stream and
* track ids.
*
* @param {string} streamId the media stream id as defined by the WebRTC
* @param {string} trackId the media track id as defined by the WebRTC
* @returns {JitsiRemoteTrack|undefined} the track which has been removed or
* undefined if no track matching given stream and track ids was
* found.
*/
TraceablePeerConnection.prototype._removeRemoteTrackById = function(
streamId,
trackId) {
const toBeRemoved = this._getRemoteTrackById(streamId, trackId);
if (toBeRemoved) {
this._removeRemoteTrack(toBeRemoved);
}
return toBeRemoved;
};
/**
* Returns a map with keys msid/mediaType and TrackSSRCInfo values.
* @param {RTCSessionDescription} desc the local description.
* @return {Map}
*/
TraceablePeerConnection.prototype._extractSSRCMap = function(desc) {
/**
* Track SSRC infos mapped by stream ID (msid) or mediaType (unfied-plan)
* @type {Map}
*/
const ssrcMap = new Map();
/**
* Groups mapped by primary SSRC number
* @type {Map>}
*/
const groupsMap = new Map();
if (typeof desc !== 'object' || desc === null
|| typeof desc.sdp !== 'string') {
logger.warn('An empty description was passed as an argument');
return ssrcMap;
}
const session = transform.parse(desc.sdp);
if (!Array.isArray(session.media)) {
return ssrcMap;
}
let media = session.media;
// For unified plan clients, only the first audio and video mlines will have ssrcs for the local sources.
// The rest of the m-lines are for the recv-only sources, one for each remote source.
if (this._usesUnifiedPlan) {
media = [];
[ MediaType.AUDIO, MediaType.VIDEO ].forEach(mediaType => {
const mLine = session.media.find(m => m.type === mediaType);
mLine && media.push(mLine);
});
}
for (const mLine of media) {
if (!Array.isArray(mLine.ssrcs)) {
continue; // eslint-disable-line no-continue
}
if (Array.isArray(mLine.ssrcGroups)) {
for (const group of mLine.ssrcGroups) {
if (typeof group.semantics !== 'undefined'
&& typeof group.ssrcs !== 'undefined') {
// Parse SSRCs and store as numbers
const groupSSRCs = group.ssrcs.split(' ').map(ssrcStr => parseInt(ssrcStr, 10));
const primarySSRC = groupSSRCs[0];
// Note that group.semantics is already present
group.ssrcs = groupSSRCs;
// eslint-disable-next-line max-depth
if (!groupsMap.has(primarySSRC)) {
groupsMap.set(primarySSRC, []);
}
groupsMap.get(primarySSRC).push(group);
}
}
}
let ssrcs = mLine.ssrcs;
// Filter the ssrcs with 'msid' attribute for plan-b clients and 'cname' for unified-plan clients.
ssrcs = this._usesUnifiedPlan
? ssrcs.filter(s => s.attribute === 'cname')
: ssrcs.filter(s => s.attribute === 'msid');
for (const ssrc of ssrcs) {
// Use the mediaType as key for the source map for unified plan clients since msids are not part of
// the standard and the unified plan SDPs do not have a proper msid attribute for the sources.
// Also the ssrcs for sources do not change for Unified plan clients since RTCRtpSender#replaceTrack is
// used for switching the tracks so it is safe to use the mediaType as the key for the TrackSSRCInfo map.
const key = this._usesUnifiedPlan ? mLine.type : ssrc.value;
const ssrcNumber = ssrc.id;
let ssrcInfo = ssrcMap.get(key);
if (!ssrcInfo) {
ssrcInfo = {
ssrcs: [],
groups: [],
msid: key
};
ssrcMap.set(key, ssrcInfo);
}
ssrcInfo.ssrcs.push(ssrcNumber);
if (groupsMap.has(ssrcNumber)) {
const ssrcGroups = groupsMap.get(ssrcNumber);
for (const group of ssrcGroups) {
ssrcInfo.groups.push(group);
}
}
}
}
return ssrcMap;
};
/**
* Takes a SessionDescription object and returns a "normalized" version.
* Currently it takes care of ordering the a=ssrc lines and denoting receive
* only SSRCs.
*/
const normalizePlanB = function(desc) {
if (typeof desc !== 'object' || desc === null
|| typeof desc.sdp !== 'string') {
logger.warn('An empty description was passed as an argument');
return desc;
}
// eslint-disable-next-line no-shadow
const transform = require('sdp-transform');
const session = transform.parse(desc.sdp);
if (typeof session !== 'undefined'
&& typeof session.media !== 'undefined'
&& Array.isArray(session.media)) {
session.media.forEach(mLine => {
// Chrome appears to be picky about the order in which a=ssrc lines
// are listed in an m-line when rtx is enabled (and thus there are
// a=ssrc-group lines with FID semantics). Specifically if we have
// "a=ssrc-group:FID S1 S2" and the "a=ssrc:S2" lines appear before
// the "a=ssrc:S1" lines, SRD fails.
// So, put SSRC which appear as the first SSRC in an FID ssrc-group
// first.
const firstSsrcs = [];
const newSsrcLines = [];
if (typeof mLine.ssrcGroups !== 'undefined'
&& Array.isArray(mLine.ssrcGroups)) {
mLine.ssrcGroups.forEach(group => {
if (typeof group.semantics !== 'undefined'
&& group.semantics === 'FID') {
if (typeof group.ssrcs !== 'undefined') {
firstSsrcs.push(Number(group.ssrcs.split(' ')[0]));
}
}
});
}
if (Array.isArray(mLine.ssrcs)) {
let i;
for (i = 0; i < mLine.ssrcs.length; i++) {
if (typeof mLine.ssrcs[i] === 'object'
&& typeof mLine.ssrcs[i].id !== 'undefined'
&& firstSsrcs.indexOf(mLine.ssrcs[i].id) >= 0) {
newSsrcLines.push(mLine.ssrcs[i]);
delete mLine.ssrcs[i];
}
}
for (i = 0; i < mLine.ssrcs.length; i++) {
if (typeof mLine.ssrcs[i] !== 'undefined') {
newSsrcLines.push(mLine.ssrcs[i]);
}
}
mLine.ssrcs = replaceDefaultUnifiedPlanMsid(newSsrcLines);
}
});
}
const resStr = transform.write(session);
return new RTCSessionDescription({
type: desc.type,
sdp: resStr
});
};
/**
* Unified plan differentiates a remote track not associated with a stream using
* the msid "-", which can incorrectly trigger an onaddstream event in plan-b.
* For jitsi, these tracks are actually receive-only ssrcs. To prevent
* onaddstream from firing, remove the ssrcs with msid "-" except the cname
* line. Normally the ssrcs are not used by the client, as the bridge controls
* media flow, but keep one reference to the ssrc for the p2p case.
*
* @param {Array