Browse Source

Remove support for legacy endpoint based signaling. (#2147)

* Remove support for legacy endpoint based signaling and make source-name signaling as the only signaling mode. The legacy screensharing mode will no longer be supported. The client will still be able to process remote presence sent in the old format to support interop with very old mobile clients and jigasi that do not support source-name signaling.
* remove code related to presenter mode.
tags/v0.0.2
Jaya Allamsetty 1 year ago
parent
commit
7b25768663
No account linked to committer's email address
39 changed files with 200 additions and 2043 deletions
  1. 14
    107
      JitsiConference.js
  2. 0
    3
      JitsiConferenceEvents.spec.ts
  3. 0
    17
      JitsiConferenceEvents.ts
  4. 0
    13
      JitsiMeetJS.ts
  5. 0
    24
      JitsiParticipant.js
  6. 8
    34
      modules/RTC/BridgeChannel.js
  7. 3
    5
      modules/RTC/JitsiLocalTrack.js
  8. 3
    8
      modules/RTC/JitsiRemoteTrack.js
  9. 3
    56
      modules/RTC/RTC.js
  10. 2
    7
      modules/RTC/TPCUtils.js
  11. 31
    72
      modules/RTC/TraceablePeerConnection.js
  12. 0
    900
      modules/connectivity/ParticipantConnectionStatus.js
  13. 3
    39
      modules/flags/FeatureFlags.js
  14. 0
    5
      modules/proxyconnection/ProxyConnectionPC.js
  15. 11
    32
      modules/qualitycontrol/ReceiveVideoController.js
  16. 0
    33
      modules/qualitycontrol/ReceiveVideoController.spec.js
  17. 32
    59
      modules/qualitycontrol/SendVideoController.js
  18. 0
    153
      modules/qualitycontrol/SendVideoController.spec.js
  19. 18
    23
      modules/sdp/LocalSdpMunger.js
  20. 4
    26
      modules/sdp/LocalSdpMunger.spec.js
  21. 1
    2
      modules/sdp/SDP.js
  22. 1
    2
      modules/sdp/SDPDiffer.js
  23. 0
    91
      modules/xmpp/ChatRoom.js
  24. 1
    2
      modules/xmpp/JingleHelperFunctions.js
  25. 10
    18
      modules/xmpp/JingleSessionPC.js
  26. 0
    80
      modules/xmpp/JingleSessionPC.spec.js
  27. 43
    98
      modules/xmpp/SignalingLayerImpl.js
  28. 7
    106
      modules/xmpp/SignalingLayerImpl.spec.js
  29. 5
    9
      modules/xmpp/xmpp.js
  30. 0
    1
      service/RTC/MediaType.spec.ts
  31. 0
    5
      service/RTC/MediaType.ts
  32. 0
    4
      service/RTC/RTCEvents.spec.ts
  33. 0
    2
      service/RTC/RTCEvents.ts
  34. 0
    1
      types/hand-crafted/JitsiConference.d.ts
  35. 0
    1
      types/hand-crafted/JitsiConferenceEvents.d.ts
  36. 0
    2
      types/hand-crafted/JitsiMeetJS.d.ts
  37. 0
    1
      types/hand-crafted/modules/RTC/RTC.d.ts
  38. 0
    1
      types/hand-crafted/modules/xmpp/ChatRoom.d.ts
  39. 0
    1
      types/hand-crafted/service/RTC/MediaType.d.ts

+ 14
- 107
JitsiConference.js View File

@@ -17,10 +17,7 @@ import RTC from './modules/RTC/RTC';
17 17
 import { SS_DEFAULT_FRAME_RATE } from './modules/RTC/ScreenObtainer';
18 18
 import browser from './modules/browser';
19 19
 import ConnectionQuality from './modules/connectivity/ConnectionQuality';
20
-import IceFailedHandling
21
-    from './modules/connectivity/IceFailedHandling';
22
-import ParticipantConnectionStatusHandler
23
-    from './modules/connectivity/ParticipantConnectionStatus';
20
+import IceFailedHandling from './modules/connectivity/IceFailedHandling';
24 21
 import * as DetectionEvents from './modules/detection/DetectionEvents';
25 22
 import NoAudioSignalDetection from './modules/detection/NoAudioSignalDetection';
26 23
 import P2PDominantSpeakerDetection from './modules/detection/P2PDominantSpeakerDetection';
@@ -422,30 +419,12 @@ JitsiConference.prototype._init = function(options = {}) {
422 419
     if (!this.rtc) {
423 420
         this.rtc = new RTC(this, options);
424 421
         this.eventManager.setupRTCListeners();
425
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
426
-            this._registerRtcListeners(this.rtc);
427
-        }
422
+        this._registerRtcListeners(this.rtc);
428 423
     }
429 424
 
430 425
     this.receiveVideoController = new ReceiveVideoController(this, this.rtc);
431 426
     this.sendVideoController = new SendVideoController(this, this.rtc);
432 427
 
433
-    // Do not initialize ParticipantConnectionStatusHandler when source-name signaling is enabled.
434
-    if (!FeatureFlags.isSourceNameSignalingEnabled()) {
435
-        this.participantConnectionStatus
436
-        = new ParticipantConnectionStatusHandler(
437
-            this.rtc,
438
-            this,
439
-            {
440
-                // These options are not public API, leaving it here only as an entry point through config for tuning
441
-                // up purposes. Default values should be adjusted as soon as optimal values are discovered.
442
-                p2pRtcMuteTimeout: config._p2pConnStatusRtcMuteTimeout,
443
-                rtcMuteTimeout: config._peerConnStatusRtcMuteTimeout,
444
-                outOfLastNTimeout: config._peerConnStatusOutOfLastNTimeout
445
-            });
446
-        this.participantConnectionStatus.init();
447
-    }
448
-
449 428
     // Add the ability to enable callStats only on a percentage of users based on config.js settings.
450 429
     let enableCallStats = true;
451 430
 
@@ -807,11 +786,7 @@ JitsiConference.prototype._sendBridgeVideoTypeMessage = function(localtrack) {
807 786
         videoType = BridgeVideoType.DESKTOP_HIGH_FPS;
808 787
     }
809 788
 
810
-    if (FeatureFlags.isSourceNameSignalingEnabled() && localtrack) {
811
-        this.rtc.sendSourceVideoType(localtrack.getSourceName(), videoType);
812
-    } else if (!FeatureFlags.isSourceNameSignalingEnabled()) {
813
-        this.rtc.setVideoType(videoType);
814
-    }
789
+    localtrack && this.rtc.sendSourceVideoType(localtrack.getSourceName(), videoType);
815 790
 };
816 791
 
817 792
 /**
@@ -1129,7 +1104,7 @@ JitsiConference.prototype.addTrack = function(track) {
1129 1104
 
1130 1105
         // Currently, only adding multiple video streams of different video types is supported.
1131 1106
         // TODO - remove this limitation once issues with jitsi-meet trying to add multiple camera streams is fixed.
1132
-        if (FeatureFlags.isMultiStreamSupportEnabled()
1107
+        if (FeatureFlags.isMultiStreamSendSupportEnabled()
1133 1108
             && mediaType === MediaType.VIDEO
1134 1109
             && !localTracks.find(t => t.getVideoType() === track.getVideoType())) {
1135 1110
             const sourceName = getSourceNameForJitsiTrack(
@@ -1163,7 +1138,7 @@ JitsiConference.prototype.addTrack = function(track) {
1163 1138
             // Presence needs to be sent here for desktop track since we need the presence to reach the remote peer
1164 1139
             // before signaling so that a fake participant tile is created for screenshare. Otherwise, presence will
1165 1140
             // only be sent after a session-accept or source-add is ack'ed.
1166
-            if (track.getVideoType() === VideoType.DESKTOP && FeatureFlags.isMultiStreamSupportEnabled()) {
1141
+            if (track.getVideoType() === VideoType.DESKTOP && FeatureFlags.isMultiStreamSendSupportEnabled()) {
1167 1142
                 this._updateRoomPresence(this.getActiveMediaSession());
1168 1143
             }
1169 1144
         });
@@ -1301,22 +1276,20 @@ JitsiConference.prototype.replaceTrack = function(oldTrack, newTrack) {
1301 1276
     const mediaType = oldTrack?.getType() || newTrack?.getType();
1302 1277
     const newVideoType = newTrack?.getVideoType();
1303 1278
 
1304
-    if (FeatureFlags.isMultiStreamSupportEnabled() && oldTrack && newTrack && oldVideoType !== newVideoType) {
1279
+    if (FeatureFlags.isMultiStreamSendSupportEnabled() && oldTrack && newTrack && oldVideoType !== newVideoType) {
1305 1280
         throw new Error(`Replacing a track of videoType=${oldVideoType} with a track of videoType=${newVideoType} is`
1306 1281
             + ' not supported in this mode.');
1307 1282
     }
1308 1283
 
1309
-    if (FeatureFlags.isSourceNameSignalingEnabled() && newTrack) {
1310
-        if (oldTrack) {
1311
-            newTrack.setSourceName(oldTrack.getSourceName());
1312
-        } else {
1313
-            const sourceName = getSourceNameForJitsiTrack(
1284
+    if (newTrack) {
1285
+        const sourceName = oldTrack
1286
+            ? oldTrack.getSourceName()
1287
+            : getSourceNameForJitsiTrack(
1314 1288
                 this.myUserId(),
1315 1289
                 mediaType,
1316 1290
                 this.getLocalTracks(mediaType)?.length);
1317 1291
 
1318
-            newTrack.setSourceName(sourceName);
1319
-        }
1292
+        newTrack.setSourceName(sourceName);
1320 1293
     }
1321 1294
     const oldTrackBelongsToConference = this === oldTrack?.conference;
1322 1295
 
@@ -1422,7 +1395,7 @@ JitsiConference.prototype._setupNewTrack = function(newTrack) {
1422 1395
     }
1423 1396
 
1424 1397
     // Create a source name for this track if it doesn't exist.
1425
-    if (FeatureFlags.isSourceNameSignalingEnabled() && !newTrack.getSourceName()) {
1398
+    if (!newTrack.getSourceName()) {
1426 1399
         const sourceName = getSourceNameForJitsiTrack(
1427 1400
             this.myUserId(),
1428 1401
             mediaType,
@@ -1455,27 +1428,10 @@ JitsiConference.prototype._setupNewTrack = function(newTrack) {
1455 1428
 JitsiConference.prototype._setNewVideoType = function(track) {
1456 1429
     let videoTypeChanged = false;
1457 1430
 
1458
-    if (FeatureFlags.isSourceNameSignalingEnabled() && track) {
1431
+    if (track) {
1459 1432
         videoTypeChanged = this._signalingLayer.setTrackVideoType(track.getSourceName(), track.videoType);
1460 1433
     }
1461 1434
 
1462
-    if (!FeatureFlags.isMultiStreamSupportEnabled()) {
1463
-        const videoTypeTagName = 'videoType';
1464
-
1465
-        // If track is missing we revert to default type Camera, the case where we screenshare and
1466
-        // we return to be video muted.
1467
-        const trackVideoType = track ? track.videoType : VideoType.CAMERA;
1468
-
1469
-        // If video type is camera and there is no videoType in presence, we skip adding it, as this is the default one
1470
-        if (trackVideoType !== VideoType.CAMERA || this.room.getFromPresence(videoTypeTagName)) {
1471
-            // We will not use this.sendCommand here to avoid sending the presence immediately, as later we may also
1472
-            // set the mute status.
1473
-            const legacyTypeChanged = this.room.addOrReplaceInPresence(videoTypeTagName, { value: trackVideoType });
1474
-
1475
-            videoTypeChanged = videoTypeChanged || legacyTypeChanged;
1476
-        }
1477
-    }
1478
-
1479 1435
     return videoTypeChanged;
1480 1436
 };
1481 1437
 
@@ -1490,28 +1446,10 @@ JitsiConference.prototype._setNewVideoType = function(track) {
1490 1446
 JitsiConference.prototype._setTrackMuteStatus = function(mediaType, localTrack, isMuted) {
1491 1447
     let presenceChanged = false;
1492 1448
 
1493
-    if (FeatureFlags.isSourceNameSignalingEnabled() && localTrack) {
1449
+    if (localTrack) {
1494 1450
         presenceChanged = this._signalingLayer.setTrackMuteStatus(localTrack.getSourceName(), isMuted);
1495 1451
     }
1496 1452
 
1497
-    // Add the 'audioMuted' and 'videoMuted' tags when source name signaling is enabled for backward compatibility.
1498
-    // It won't be used anymore when multiple stream support is enabled.
1499
-    if (!FeatureFlags.isMultiStreamSupportEnabled()) {
1500
-        let audioMuteChanged, videoMuteChanged;
1501
-
1502
-        if (!this.room) {
1503
-            return false;
1504
-        }
1505
-
1506
-        if (mediaType === MediaType.AUDIO) {
1507
-            audioMuteChanged = this.room.addAudioInfoToPresence(isMuted);
1508
-        } else {
1509
-            videoMuteChanged = this.room.addVideoInfoToPresence(isMuted);
1510
-        }
1511
-
1512
-        presenceChanged = presenceChanged || audioMuteChanged || videoMuteChanged;
1513
-    }
1514
-
1515 1453
     return presenceChanged;
1516 1454
 };
1517 1455
 
@@ -1705,20 +1643,6 @@ JitsiConference.prototype.setLastN = function(lastN) {
1705 1643
     }
1706 1644
 };
1707 1645
 
1708
-/**
1709
- * Checks if the participant given by participantId is currently included in
1710
- * the last N.
1711
- * @param {string} participantId the identifier of the participant we would
1712
- * like to check.
1713
- * @return {boolean} true if the participant with id is in the last N set or
1714
- * if there's no last N set, false otherwise.
1715
- * @deprecated this method should never be used to figure out the UI, but
1716
- * {@link ParticipantConnectionStatus} should be used instead.
1717
- */
1718
-JitsiConference.prototype.isInLastN = function(participantId) {
1719
-    return this.rtc.isInLastN(participantId);
1720
-};
1721
-
1722 1646
 /**
1723 1647
  * @return Array<JitsiParticipant> an array of all participants in this
1724 1648
  * conference.
@@ -3691,8 +3615,6 @@ JitsiConference.prototype._updateRoomPresence = function(jingleSession, ctx) {
3691 3615
     let presenceChanged = false;
3692 3616
     let muteStatusChanged, videoTypeChanged;
3693 3617
     const localTracks = jingleSession.peerconnection.getLocalTracks();
3694
-    const localAudioTracks = localTracks.filter(track => track.getType() === MediaType.AUDIO);
3695
-    const localVideoTracks = localTracks.filter(track => track.getType() === MediaType.VIDEO);
3696 3618
 
3697 3619
     // Set presence for all the available local tracks.
3698 3620
     for (const track of localTracks) {
@@ -3703,21 +3625,6 @@ JitsiConference.prototype._updateRoomPresence = function(jingleSession, ctx) {
3703 3625
         presenceChanged = presenceChanged || muteStatusChanged || videoTypeChanged;
3704 3626
     }
3705 3627
 
3706
-    // Set the presence in the legacy format if there are no local tracks and multi stream support is not enabled.
3707
-    if (!FeatureFlags.isMultiStreamSupportEnabled()) {
3708
-        let audioMuteStatusChanged, videoMuteStatusChanged;
3709
-
3710
-        if (!localAudioTracks?.length) {
3711
-            audioMuteStatusChanged = this._setTrackMuteStatus(MediaType.AUDIO, undefined, true);
3712
-        }
3713
-        if (!localVideoTracks?.length) {
3714
-            videoMuteStatusChanged = this._setTrackMuteStatus(MediaType.VIDEO, undefined, true);
3715
-            videoTypeChanged = this._setNewVideoType();
3716
-        }
3717
-
3718
-        presenceChanged = presenceChanged || audioMuteStatusChanged || videoMuteStatusChanged || videoTypeChanged;
3719
-    }
3720
-
3721 3628
     presenceChanged && this.room.sendPresence();
3722 3629
 };
3723 3630
 

+ 0
- 3
JitsiConferenceEvents.spec.ts View File

@@ -39,7 +39,6 @@ describe( "/JitsiConferenceEvents members", () => {
39 39
         NOISY_MIC,
40 40
         NON_PARTICIPANT_MESSAGE_RECEIVED,
41 41
         PRIVATE_MESSAGE_RECEIVED,
42
-        PARTICIPANT_CONN_STATUS_CHANGED,
43 42
         PARTCIPANT_FEATURES_CHANGED,
44 43
         PARTICIPANT_PROPERTY_CHANGED,
45 44
         P2P_STATUS,
@@ -117,7 +116,6 @@ describe( "/JitsiConferenceEvents members", () => {
117 116
         expect( NOISY_MIC ).toBe( 'conference.noisy_mic' );
118 117
         expect( NON_PARTICIPANT_MESSAGE_RECEIVED ).toBe( 'conference.non_participant_message_received' );
119 118
         expect( PRIVATE_MESSAGE_RECEIVED ).toBe( 'conference.privateMessageReceived' );
120
-        expect( PARTICIPANT_CONN_STATUS_CHANGED ).toBe( 'conference.participant_conn_status_changed' );
121 119
         expect( PARTCIPANT_FEATURES_CHANGED ).toBe( 'conference.partcipant_features_changed' );
122 120
         expect( PARTICIPANT_PROPERTY_CHANGED ).toBe( 'conference.participant_property_changed' );
123 121
         expect( P2P_STATUS ).toBe( 'conference.p2pStatus' );
@@ -193,7 +191,6 @@ describe( "/JitsiConferenceEvents members", () => {
193 191
         expect( JitsiConferenceEvents.NOISY_MIC ).toBe( 'conference.noisy_mic' );
194 192
         expect( JitsiConferenceEvents.NON_PARTICIPANT_MESSAGE_RECEIVED ).toBe( 'conference.non_participant_message_received' );
195 193
         expect( JitsiConferenceEvents.PRIVATE_MESSAGE_RECEIVED ).toBe( 'conference.privateMessageReceived' );
196
-        expect( JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED ).toBe( 'conference.participant_conn_status_changed' );
197 194
         expect( JitsiConferenceEvents.PARTCIPANT_FEATURES_CHANGED ).toBe( 'conference.partcipant_features_changed' );
198 195
         expect( JitsiConferenceEvents.PARTICIPANT_PROPERTY_CHANGED ).toBe( 'conference.participant_property_changed' );
199 196
         expect( JitsiConferenceEvents.P2P_STATUS ).toBe( 'conference.p2pStatus' );

+ 0
- 17
JitsiConferenceEvents.ts View File

@@ -223,22 +223,6 @@ export enum JitsiConferenceEvents {
223 223
      */
224 224
     PRIVATE_MESSAGE_RECEIVED = 'conference.privateMessageReceived',
225 225
 
226
-    /**
227
-     * Event fired when JVB sends notification about interrupted/restored user's
228
-     * ICE connection status or we detect local problem with the video track.
229
-     * First argument is the ID of the participant and
230
-     * the seconds is a string indicating if the connection is currently
231
-     * - active - the connection is active
232
-     * - inactive - the connection is inactive, was intentionally interrupted by
233
-     * the bridge
234
-     * - interrupted - a network problem occurred
235
-     * - restoring - the connection was inactive and is restoring now
236
-     *
237
-     * The current status value can be obtained by calling
238
-     * JitsiParticipant.getConnectionStatus().
239
-     */
240
-    PARTICIPANT_CONN_STATUS_CHANGED = 'conference.participant_conn_status_changed',
241
-
242 226
     /**
243 227
      * Indicates that the features of the participant has been changed.
244 228
      * TODO: there is a spelling mistake in this event name and associated constants
@@ -507,7 +491,6 @@ export const NO_AUDIO_INPUT = JitsiConferenceEvents.NO_AUDIO_INPUT;
507 491
 export const NOISY_MIC = JitsiConferenceEvents.NOISY_MIC;
508 492
 export const NON_PARTICIPANT_MESSAGE_RECEIVED = JitsiConferenceEvents.NON_PARTICIPANT_MESSAGE_RECEIVED;
509 493
 export const PRIVATE_MESSAGE_RECEIVED = JitsiConferenceEvents.PRIVATE_MESSAGE_RECEIVED;
510
-export const PARTICIPANT_CONN_STATUS_CHANGED = JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED;
511 494
 export const PARTCIPANT_FEATURES_CHANGED = JitsiConferenceEvents.PARTCIPANT_FEATURES_CHANGED;
512 495
 export const PARTICIPANT_PROPERTY_CHANGED = JitsiConferenceEvents.PARTICIPANT_PROPERTY_CHANGED;
513 496
 export const P2P_STATUS = JitsiConferenceEvents.P2P_STATUS;

+ 0
- 13
JitsiMeetJS.ts View File

@@ -14,8 +14,6 @@ import * as JitsiTranscriptionStatus from './JitsiTranscriptionStatus';
14 14
 import RTC from './modules/RTC/RTC';
15 15
 import browser from './modules/browser';
16 16
 import NetworkInfo from './modules/connectivity/NetworkInfo';
17
-import { ParticipantConnectionStatus }
18
-    from './modules/connectivity/ParticipantConnectionStatus';
19 17
 import { TrackStreamingStatus } from './modules/connectivity/TrackStreamingStatus';
20 18
 import getActiveAudioDevice from './modules/detection/ActiveDeviceDetector';
21 19
 import * as DetectionEvents from './modules/detection/DetectionEvents';
@@ -83,11 +81,7 @@ interface IJitsiMeetJSOptions {
83 81
     enableWindowOnErrorHandler?: boolean;
84 82
     externalStorage?: Storage;
85 83
     flags?: {
86
-        enableUnifiedOnChrome?: boolean;
87
-        receiveMultipleVideoStreams?: boolean;
88 84
         runInLiteMode?: boolean;
89
-        sendMultipleVideoStreams?: boolean;
90
-        sourceNameSignaling?: boolean;
91 85
         ssrcRewritingEnabled?: boolean;
92 86
     }
93 87
 }
@@ -111,7 +105,6 @@ export default {
111 105
     ProxyConnectionService,
112 106
 
113 107
     constants: {
114
-        participantConnectionStatus: ParticipantConnectionStatus,
115 108
         recording: recordingConstants,
116 109
         sipVideoGW: VideoSIPGWConstants,
117 110
         transcriptionStatus: JitsiTranscriptionStatus,
@@ -142,12 +135,6 @@ export default {
142 135
         Statistics.init(options);
143 136
         const flags = options.flags || {};
144 137
 
145
-        // Multi-stream is supported only on endpoints running in Unified plan mode and the flag to disable unified
146
-        // plan also needs to be taken into consideration.
147
-        if (typeof options.enableUnifiedOnChrome !== 'undefined') {
148
-            flags.enableUnifiedOnChrome = options.enableUnifiedOnChrome;
149
-        }
150
-
151 138
         // Configure the feature flags.
152 139
         FeatureFlags.init(flags);
153 140
 

+ 0
- 24
JitsiParticipant.js View File

@@ -3,8 +3,6 @@ import { Strophe } from 'strophe.js';
3 3
 
4 4
 
5 5
 import * as JitsiConferenceEvents from './JitsiConferenceEvents';
6
-import { ParticipantConnectionStatus }
7
-    from './modules/connectivity/ParticipantConnectionStatus';
8 6
 import { MediaType } from './service/RTC/MediaType';
9 7
 
10 8
 /**
@@ -40,7 +38,6 @@ export default class JitsiParticipant {
40 38
         this._status = status;
41 39
         this._hidden = hidden;
42 40
         this._statsID = statsID;
43
-        this._connectionStatus = ParticipantConnectionStatus.ACTIVE;
44 41
         this._properties = {};
45 42
         this._identity = identity;
46 43
         this._isReplacing = isReplacing;
@@ -81,27 +78,6 @@ export default class JitsiParticipant {
81 78
                         && jitsiTrack.isWebRTCTrackMuted()));
82 79
     }
83 80
 
84
-    /**
85
-     * Updates participant's connection status.
86
-     * @param {string} state the current participant connection state.
87
-     * {@link ParticipantConnectionStatus}.
88
-     * @private
89
-     */
90
-    _setConnectionStatus(status) {
91
-        this._connectionStatus = status;
92
-    }
93
-
94
-    /**
95
-     * Return participant's connectivity status.
96
-     *
97
-     * @returns {string} the connection status
98
-     * <tt>ParticipantConnectionStatus</tt> of the user.
99
-     * {@link ParticipantConnectionStatus}.
100
-     */
101
-    getConnectionStatus() {
102
-        return this._connectionStatus;
103
-    }
104
-
105 81
     /**
106 82
      * Sets the value of a property of this participant, and fires an event if
107 83
      * the value has changed.

+ 8
- 34
modules/RTC/BridgeChannel.js View File

@@ -2,7 +2,6 @@ import { getLogger } from '@jitsi/logger';
2 2
 
3 3
 import RTCEvents from '../../service/RTC/RTCEvents';
4 4
 import { createBridgeChannelClosedEvent } from '../../service/statistics/AnalyticsEvents';
5
-import FeatureFlags from '../flags/FeatureFlags';
6 5
 import Statistics from '../statistics/statistics';
7 6
 import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
8 7
 
@@ -358,26 +357,11 @@ export default class BridgeChannel {
358 357
 
359 358
                 break;
360 359
             }
361
-            case 'LastNEndpointsChangeEvent': {
362
-                if (!FeatureFlags.isSourceNameSignalingEnabled()) {
363
-                    // The new/latest list of last-n endpoint IDs (i.e. endpoints for which the bridge is sending
364
-                    // video).
365
-                    const lastNEndpoints = obj.lastNEndpoints;
366
-
367
-                    logger.info(`New forwarded endpoints: ${lastNEndpoints}`);
368
-                    emitter.emit(RTCEvents.LASTN_ENDPOINT_CHANGED, lastNEndpoints);
369
-                }
370
-
371
-                break;
372
-            }
373 360
             case 'ForwardedSources': {
374
-                if (FeatureFlags.isSourceNameSignalingEnabled()) {
375
-                    // The new/latest list of forwarded sources
376
-                    const forwardedSources = obj.forwardedSources;
361
+                const forwardedSources = obj.forwardedSources;
377 362
 
378
-                    logger.info(`New forwarded sources: ${forwardedSources}`);
379
-                    emitter.emit(RTCEvents.FORWARDED_SOURCES_CHANGED, forwardedSources);
380
-                }
363
+                logger.info(`New forwarded sources: ${forwardedSources}`);
364
+                emitter.emit(RTCEvents.FORWARDED_SOURCES_CHANGED, forwardedSources);
381 365
 
382 366
                 break;
383 367
             }
@@ -391,21 +375,11 @@ export default class BridgeChannel {
391 375
                 break;
392 376
             }
393 377
             case 'SenderSourceConstraints': {
394
-                if (FeatureFlags.isSourceNameSignalingEnabled()) {
395
-                    const { sourceName, maxHeight } = obj;
396
-
397
-                    if (typeof sourceName === 'string' && typeof maxHeight === 'number') {
398
-                        // eslint-disable-next-line object-property-newline
399
-                        logger.info(`SenderSourceConstraints: ${JSON.stringify({ sourceName, maxHeight })}`);
400
-                        emitter.emit(
401
-                            RTCEvents.SENDER_VIDEO_CONSTRAINTS_CHANGED, {
402
-                                sourceName,
403
-                                maxHeight
404
-                            }
405
-                        );
406
-                    } else {
407
-                        logger.error(`Invalid SenderSourceConstraints: ${JSON.stringify(obj)}`);
408
-                    }
378
+                if (typeof obj.sourceName === 'string' && typeof obj.maxHeight === 'number') {
379
+                    logger.info(`SenderSourceConstraints: ${obj.sourceName} - ${obj.maxHeight}`);
380
+                    emitter.emit(RTCEvents.SENDER_VIDEO_CONSTRAINTS_CHANGED, obj);
381
+                } else {
382
+                    logger.error(`Invalid SenderSourceConstraints: ${obj.sourceName} - ${obj.maxHeight}`);
409 383
                 }
410 384
                 break;
411 385
             }

+ 3
- 5
modules/RTC/JitsiLocalTrack.js View File

@@ -356,7 +356,7 @@ export default class JitsiLocalTrack extends JitsiTrack {
356 356
      */
357 357
     _setMuted(muted) {
358 358
         if (this.isMuted() === muted
359
-            && !(this.videoType === VideoType.DESKTOP && FeatureFlags.isMultiStreamSupportEnabled())) {
359
+            && !(this.videoType === VideoType.DESKTOP && FeatureFlags.isMultiStreamSendSupportEnabled())) {
360 360
             return Promise.resolve();
361 361
         }
362 362
 
@@ -374,7 +374,7 @@ export default class JitsiLocalTrack extends JitsiTrack {
374 374
         // the desktop track when screenshare is stopped. Later when screenshare is started again, the same sender will
375 375
         // be re-used without the need for signaling a new ssrc through source-add.
376 376
         if (this.isAudioTrack()
377
-                || (this.videoType === VideoType.DESKTOP && !FeatureFlags.isMultiStreamSupportEnabled())
377
+                || (this.videoType === VideoType.DESKTOP && !FeatureFlags.isMultiStreamSendSupportEnabled())
378 378
                 || !browser.doesVideoMuteByStreamRemove()) {
379 379
             logMuteInfo();
380 380
 
@@ -424,9 +424,7 @@ export default class JitsiLocalTrack extends JitsiTrack {
424 424
                     { constraints: { video: this._constraints } }));
425 425
 
426 426
             promise = promise.then(streamsInfo => {
427
-                // The track kind for presenter track is video as well.
428
-                const mediaType = this.getType() === MediaType.PRESENTER ? MediaType.VIDEO : this.getType();
429
-                const streamInfo = streamsInfo.find(info => info.track.kind === mediaType);
427
+                const streamInfo = streamsInfo.find(info => info.track.kind === this.getType());
430 428
 
431 429
                 if (streamInfo) {
432 430
                     this._setStream(streamInfo.stream);

+ 3
- 8
modules/RTC/JitsiRemoteTrack.js View File

@@ -1,7 +1,6 @@
1 1
 import * as JitsiTrackEvents from '../../JitsiTrackEvents';
2 2
 import { createTtfmEvent } from '../../service/statistics/AnalyticsEvents';
3 3
 import TrackStreamingStatusImpl, { TrackStreamingStatus } from '../connectivity/TrackStreamingStatus';
4
-import FeatureFlags from '../flags/FeatureFlags';
5 4
 import Statistics from '../statistics/statistics';
6 5
 
7 6
 import JitsiTrack from './JitsiTrack';
@@ -131,8 +130,7 @@ export default class JitsiRemoteTrack extends JitsiTrack {
131 130
     _addEventListener(event, handler) {
132 131
         super.addListener(event, handler);
133 132
 
134
-        if (FeatureFlags.isSourceNameSignalingEnabled()
135
-            && event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
133
+        if (event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
136 134
             && this.listenerCount(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED)
137 135
             && !this._trackStreamingStatusImpl
138 136
         ) {
@@ -150,8 +148,7 @@ export default class JitsiRemoteTrack extends JitsiTrack {
150 148
     _removeEventListener(event, handler) {
151 149
         super.removeListener(event, handler);
152 150
 
153
-        if (FeatureFlags.isSourceNameSignalingEnabled()
154
-            && event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
151
+        if (event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
155 152
             && !this.listenerCount(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED)
156 153
         ) {
157 154
             this._disposeTrackStreamingStatus();
@@ -191,9 +188,7 @@ export default class JitsiRemoteTrack extends JitsiTrack {
191 188
      * @returns {Promise}
192 189
      */
193 190
     dispose() {
194
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
195
-            this._disposeTrackStreamingStatus();
196
-        }
191
+        this._disposeTrackStreamingStatus();
197 192
 
198 193
         return super.dispose();
199 194
     }

+ 3
- 56
modules/RTC/RTC.js View File

@@ -5,7 +5,6 @@ import BridgeVideoType from '../../service/RTC/BridgeVideoType';
5 5
 import { MediaType } from '../../service/RTC/MediaType';
6 6
 import RTCEvents from '../../service/RTC/RTCEvents';
7 7
 import browser from '../browser';
8
-import FeatureFlags from '../flags/FeatureFlags';
9 8
 import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
10 9
 import Listenable from '../util/Listenable';
11 10
 import { safeCounterIncrement } from '../util/MathUtil';
@@ -151,9 +150,6 @@ export default class RTC extends Listenable {
151 150
          */
152 151
         this._selectedEndpoints = null;
153 152
 
154
-        // The last N change listener.
155
-        this._lastNChangeListener = this._onLastNChanged.bind(this);
156
-
157 153
         // The forwarded sources change listener.
158 154
         this._forwardedSourcesChangeListener = this._onForwardedSourcesChanged.bind(this);
159 155
 
@@ -270,23 +266,11 @@ export default class RTC extends Listenable {
270 266
                     logError(error, 'LastNChangedEvent', this._lastN);
271 267
                 }
272 268
             }
273
-            if (!FeatureFlags.isSourceNameSignalingEnabled()) {
274
-                try {
275
-                    this._channel.sendVideoTypeMessage(this._videoType);
276
-                } catch (error) {
277
-                    logError(error, 'VideoTypeMessage', this._videoType);
278
-                }
279
-            }
280 269
         };
281 270
         this.addListener(RTCEvents.DATA_CHANNEL_OPEN, this._channelOpenListener);
282 271
 
283
-        // Add Last N change listener.
284
-        this.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED, this._lastNChangeListener);
285
-
286
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
287
-            // Add forwarded sources change listener.
288
-            this.addListener(RTCEvents.FORWARDED_SOURCES_CHANGED, this._forwardedSourcesChangeListener);
289
-        }
272
+        // Add forwarded sources change listener.
273
+        this.addListener(RTCEvents.FORWARDED_SOURCES_CHANGED, this._forwardedSourcesChangeListener);
290 274
     }
291 275
 
292 276
     /**
@@ -301,30 +285,6 @@ export default class RTC extends Listenable {
301 285
         this._updateAudioOutputForAudioTracks(RTCUtils.getAudioOutputDevice());
302 286
     }
303 287
 
304
-    /**
305
-     * Receives events when Last N had changed.
306
-     * @param {array} lastNEndpoints The new Last N endpoints.
307
-     * @private
308
-     */
309
-    _onLastNChanged(lastNEndpoints = []) {
310
-        const oldLastNEndpoints = this._lastNEndpoints || [];
311
-        let leavingLastNEndpoints = [];
312
-        let enteringLastNEndpoints = [];
313
-
314
-        this._lastNEndpoints = lastNEndpoints;
315
-
316
-        leavingLastNEndpoints = oldLastNEndpoints.filter(
317
-            id => !this.isInLastN(id));
318
-
319
-        enteringLastNEndpoints = lastNEndpoints.filter(
320
-            id => oldLastNEndpoints.indexOf(id) === -1);
321
-
322
-        this.conference.eventEmitter.emit(
323
-            JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
324
-            leavingLastNEndpoints,
325
-            enteringLastNEndpoints);
326
-    }
327
-
328 288
     /**
329 289
      * Receives events when forwarded sources had changed.
330 290
      *
@@ -690,7 +650,7 @@ export default class RTC extends Listenable {
690 650
     setVideoMute(value) {
691 651
         const mutePromises = [];
692 652
 
693
-        this.getLocalTracks(MediaType.VIDEO).concat(this.getLocalTracks(MediaType.PRESENTER))
653
+        this.getLocalTracks(MediaType.VIDEO)
694 654
             .forEach(videoTrack => {
695 655
                 // this is a Promise
696 656
                 mutePromises.push(value ? videoTrack.mute() : videoTrack.unmute());
@@ -866,8 +826,6 @@ export default class RTC extends Listenable {
866 826
         if (this._channel) {
867 827
             this._channel.close();
868 828
             this._channel = null;
869
-
870
-            this.removeListener(RTCEvents.LASTN_ENDPOINT_CHANGED, this._lastNChangeListener);
871 829
         }
872 830
     }
873 831
 
@@ -939,17 +897,6 @@ export default class RTC extends Listenable {
939 897
         }
940 898
     }
941 899
 
942
-    /**
943
-     * Indicates if the endpoint id is currently included in the last N.
944
-     * @param {string} id The endpoint id that we check for last N.
945
-     * @returns {boolean} true if the endpoint id is in the last N or if we
946
-     * don't have bridge channel support, otherwise we return false.
947
-     */
948
-    isInLastN(id) {
949
-        return !this._lastNEndpoints // lastNEndpoints not initialised yet.
950
-            || this._lastNEndpoints.indexOf(id) > -1;
951
-    }
952
-
953 900
     /**
954 901
      * Indicates if the source name is currently included in the forwarded sources.
955 902
      *

+ 2
- 7
modules/RTC/TPCUtils.js View File

@@ -322,18 +322,13 @@ export class TPCUtils {
322 322
     calculateEncodingsBitrates(localVideoTrack) {
323 323
         const videoType = localVideoTrack.getVideoType();
324 324
         const desktopShareBitrate = this.pc.options?.videoQuality?.desktopBitrate || DESKTOP_SHARE_RATE;
325
-        const presenterEnabled = localVideoTrack._originalStream
326
-            && localVideoTrack._originalStream.id !== localVideoTrack.getStreamId();
327 325
         const lowFpsScreenshare = localVideoTrack.getVideoType() === VideoType.DESKTOP
328 326
             && this.pc._capScreenshareBitrate
329 327
             && !browser.isWebKitBased();
330 328
         const encodingsBitrates = this.localStreamEncodingsConfig
331 329
         .map(encoding => {
332 330
             const bitrate = lowFpsScreenshare
333
-
334
-                // For low fps screensharing, set a max bitrate of 500 Kbps when presenter is not turned on, 2500 Kbps
335
-                // otherwise.
336
-                ? presenterEnabled ? HD_BITRATE : desktopShareBitrate
331
+                ? desktopShareBitrate
337 332
 
338 333
                 // For high fps screenshare, 'maxBitrate' setting must be cleared on Chrome in plan-b, because
339 334
                 // if simulcast is enabled for screen and maxBitrates are set then Chrome will not send the
@@ -359,7 +354,7 @@ export class TPCUtils {
359 354
         const mediaType = newTrack?.getType() ?? oldTrack?.getType();
360 355
         const localTracks = this.pc.getLocalTracks(mediaType);
361 356
         const track = newTrack?.getTrack() ?? null;
362
-        const isNewLocalSource = FeatureFlags.isMultiStreamSupportEnabled()
357
+        const isNewLocalSource = FeatureFlags.isMultiStreamSendSupportEnabled()
363 358
             && localTracks?.length
364 359
             && !oldTrack
365 360
             && newTrack

+ 31
- 72
modules/RTC/TraceablePeerConnection.js View File

@@ -978,17 +978,12 @@ TraceablePeerConnection.prototype._remoteTrackAdded = function(stream, track, tr
978 978
         return;
979 979
     }
980 980
 
981
+    let sourceName = this.signalingLayer.getTrackSourceName(trackSsrc);
981 982
 
982
-    let sourceName;
983
-
984
-    if (FeatureFlags.isSourceNameSignalingEnabled()) {
985
-        sourceName = this.signalingLayer.getTrackSourceName(trackSsrc);
986
-
987
-        // If source name was not signaled, we'll generate one which allows testing signaling
988
-        // when mixing legacy(mobile) with new clients.
989
-        if (!sourceName) {
990
-            sourceName = getSourceNameForJitsiTrack(ownerEndpointId, mediaType, 0);
991
-        }
983
+    // If source name was not signaled, we'll generate one which allows testing signaling
984
+    // when mixing legacy(mobile) with new clients.
985
+    if (!sourceName) {
986
+        sourceName = getSourceNameForJitsiTrack(ownerEndpointId, mediaType, 0);
992 987
     }
993 988
 
994 989
     // eslint-disable-next-line no-undef
@@ -1054,14 +1049,6 @@ TraceablePeerConnection.prototype._createRemoteTrack = function(
1054 1049
         logger.info(`${this} ignored duplicated track event for track[endpoint=${ownerEndpointId},type=${mediaType}]`);
1055 1050
 
1056 1051
         return;
1057
-    } else if (userTracksByMediaType?.size && !FeatureFlags.isSourceNameSignalingEnabled()) {
1058
-        logger.error(`${this} received a second remote track for track[endpoint=${ownerEndpointId},type=${mediaType}]`
1059
-            + 'deleting the existing track');
1060
-        const existingTrack = Array.from(userTracksByMediaType)[0];
1061
-
1062
-        // The existing track needs to be removed here. This happens on Safari sometimes when an SSRC is removed from
1063
-        // the remote description and the browser doesn't fire a 'removetrack' event on the associated MediaStream.
1064
-        this._remoteTrackRemoved(existingTrack.getOriginalStream(), existingTrack.getTrack());
1065 1052
     }
1066 1053
 
1067 1054
     const remoteTrack
@@ -1224,20 +1211,16 @@ TraceablePeerConnection.prototype._extractSSRCMap = function(desc) {
1224 1211
 
1225 1212
     let media = session.media;
1226 1213
 
1227
-    // For unified plan clients, only the first audio and video mlines will have ssrcs for the local sources.
1228
-    // The rest of the m-lines are for the recv-only sources, one for each remote source.
1229 1214
     if (this._usesUnifiedPlan) {
1230
-        if (FeatureFlags.isMultiStreamSupportEnabled()) {
1231
-            media = media.filter(mline => mline.direction === MediaDirection.SENDONLY
1232
-                || mline.direction === MediaDirection.SENDRECV);
1233
-        } else {
1234
-            media = [];
1235
-            [ MediaType.AUDIO, MediaType.VIDEO ].forEach(mediaType => {
1236
-                const mLine = session.media.find(m => m.type === mediaType);
1215
+        media = media.filter(mline => mline.direction === MediaDirection.SENDONLY
1216
+            || mline.direction === MediaDirection.SENDRECV);
1217
+    } else {
1218
+        media = [];
1219
+        [ MediaType.AUDIO, MediaType.VIDEO ].forEach(mediaType => {
1220
+            const mLine = session.media.find(m => m.type === mediaType);
1237 1221
 
1238
-                mLine && media.push(mLine);
1239
-            });
1240
-        }
1222
+            mLine && media.push(mLine);
1223
+        });
1241 1224
     }
1242 1225
 
1243 1226
     let index = 0;
@@ -1278,9 +1261,7 @@ TraceablePeerConnection.prototype._extractSSRCMap = function(desc) {
1278 1261
             // the standard and the unified plan SDPs do not have a proper msid attribute for the sources.
1279 1262
             // Also the ssrcs for sources do not change for Unified plan clients since RTCRtpSender#replaceTrack is
1280 1263
             // used for switching the tracks so it is safe to use the mediaType as the key for the TrackSSRCInfo map.
1281
-            const key = this._usesUnifiedPlan
1282
-                ? FeatureFlags.isMultiStreamSupportEnabled() ? `${mLine.type}-${index}` : mLine.type
1283
-                : ssrc.value;
1264
+            const key = this._usesUnifiedPlan ? `${mLine.type}-${index}` : ssrc.value;
1284 1265
             const ssrcNumber = ssrc.id;
1285 1266
             let ssrcInfo = ssrcMap.get(key);
1286 1267
 
@@ -2431,16 +2412,14 @@ TraceablePeerConnection.prototype._setVp9MaxBitrates = function(description, isL
2431 2412
 
2432 2413
     // Find all the m-lines associated with the local sources.
2433 2414
     const direction = isLocalSdp ? MediaDirection.RECVONLY : MediaDirection.SENDONLY;
2434
-    const mLines = FeatureFlags.isMultiStreamSupportEnabled()
2435
-        ? parsedSdp.media.filter(m => m.type === MediaType.VIDEO && m.direction !== direction)
2436
-        : [ parsedSdp.media.find(m => m.type === MediaType.VIDEO) ];
2415
+    const mLines = parsedSdp.media.filter(m => m.type === MediaType.VIDEO && m.direction !== direction);
2437 2416
 
2438 2417
     for (const mLine of mLines) {
2439 2418
         if (this.codecPreference.mimeType === CodecMimeType.VP9) {
2440 2419
             const bitrates = this.tpcUtils.videoBitrates.VP9 || this.tpcUtils.videoBitrates;
2441 2420
             const hdBitrate = bitrates.high ? bitrates.high : HD_BITRATE;
2442 2421
             const mid = mLine.mid;
2443
-            const isSharingScreen = FeatureFlags.isMultiStreamSupportEnabled()
2422
+            const isSharingScreen = FeatureFlags.isMultiStreamSendSupportEnabled()
2444 2423
                 ? mid === this._getDesktopTrackMid()
2445 2424
                 : this._isSharingScreen();
2446 2425
             const limit = Math.floor((isSharingScreen ? HD_BITRATE : hdBitrate) / 1000);
@@ -2473,28 +2452,18 @@ TraceablePeerConnection.prototype._setVp9MaxBitrates = function(description, isL
2473 2452
  * @returns {Promise} promise that will be resolved when the operation is successful and rejected otherwise.
2474 2453
  */
2475 2454
 TraceablePeerConnection.prototype.configureSenderVideoEncodings = function(localVideoTrack = null) {
2476
-    if (FeatureFlags.isSourceNameSignalingEnabled()) {
2477
-        if (localVideoTrack) {
2478
-            return this.setSenderVideoConstraints(
2479
-                this._senderMaxHeights.get(localVideoTrack.getSourceName()),
2480
-                localVideoTrack);
2481
-        }
2482
-        const promises = [];
2483
-
2484
-        for (const track of this.getLocalVideoTracks()) {
2485
-            promises.push(this.setSenderVideoConstraints(this._senderMaxHeights.get(track.getSourceName()), track));
2486
-        }
2487
-
2488
-        return Promise.allSettled(promises);
2455
+    if (localVideoTrack) {
2456
+        return this.setSenderVideoConstraints(
2457
+            this._senderMaxHeights.get(localVideoTrack.getSourceName()),
2458
+            localVideoTrack);
2489 2459
     }
2460
+    const promises = [];
2490 2461
 
2491
-    let localTrack = localVideoTrack;
2492
-
2493
-    if (!localTrack) {
2494
-        localTrack = this.getLocalVideoTracks()[0];
2462
+    for (const track of this.getLocalVideoTracks()) {
2463
+        promises.push(this.setSenderVideoConstraints(this._senderMaxHeights.get(track.getSourceName()), track));
2495 2464
     }
2496 2465
 
2497
-    return this.setSenderVideoConstraints(this._senderVideoMaxHeight, localTrack);
2466
+    return Promise.allSettled(promises);
2498 2467
 };
2499 2468
 
2500 2469
 TraceablePeerConnection.prototype.setLocalDescription = function(description) {
@@ -2650,16 +2619,12 @@ TraceablePeerConnection.prototype.setSenderVideoConstraints = function(frameHeig
2650 2619
         return Promise.resolve();
2651 2620
     }
2652 2621
 
2653
-    if (FeatureFlags.isSourceNameSignalingEnabled()) {
2654
-        const sourceName = localVideoTrack.getSourceName();
2622
+    const sourceName = localVideoTrack.getSourceName();
2655 2623
 
2656
-        if (this._senderMaxHeights.get(sourceName) === frameHeight) {
2657
-            return Promise.resolve();
2658
-        }
2659
-        this._senderMaxHeights.set(sourceName, frameHeight);
2660
-    } else {
2661
-        this._senderVideoMaxHeight = frameHeight;
2624
+    if (this._senderMaxHeights.get(sourceName) === frameHeight) {
2625
+        return Promise.resolve();
2662 2626
     }
2627
+    this._senderMaxHeights.set(sourceName, frameHeight);
2663 2628
 
2664 2629
     if (!localVideoTrack || localVideoTrack.isMuted()) {
2665 2630
         return Promise.resolve();
@@ -3069,16 +3034,10 @@ TraceablePeerConnection.prototype._extractPrimarySSRC = function(ssrcObj) {
3069 3034
  */
3070 3035
 TraceablePeerConnection.prototype._processLocalSSRCsMap = function(ssrcMap) {
3071 3036
     for (const track of this.localTracks.values()) {
3072
-        let sourceIndex, sourceName;
3073
-
3074
-        if (FeatureFlags.isMultiStreamSupportEnabled()) {
3075
-            sourceName = track.getSourceName();
3076
-            sourceIndex = getSourceIndexFromSourceName(sourceName);
3077
-        }
3078
-
3037
+        const sourceName = track.getSourceName();
3038
+        const sourceIndex = getSourceIndexFromSourceName(sourceName);
3079 3039
         const sourceIdentifier = this._usesUnifiedPlan
3080
-            ? FeatureFlags.isMultiStreamSupportEnabled()
3081
-                ? `${track.getType()}-${sourceIndex}` : track.getType()
3040
+            ? `${track.getType()}-${sourceIndex}`
3082 3041
             : track.storedMSID;
3083 3042
 
3084 3043
         if (ssrcMap.has(sourceIdentifier)) {

+ 0
- 900
modules/connectivity/ParticipantConnectionStatus.js View File

@@ -1,900 +0,0 @@
1
-import { getLogger } from '@jitsi/logger';
2
-
3
-import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
4
-import * as JitsiTrackEvents from '../../JitsiTrackEvents';
5
-import { MediaType } from '../../service/RTC/MediaType';
6
-import RTCEvents from '../../service/RTC/RTCEvents';
7
-import { createParticipantConnectionStatusEvent } from '../../service/statistics/AnalyticsEvents';
8
-import browser from '../browser';
9
-import Statistics from '../statistics/statistics';
10
-
11
-const logger = getLogger(__filename);
12
-
13
-/**
14
- * Default value of 500 milliseconds for {@link ParticipantConnectionStatus.outOfLastNTimeout}.
15
- *
16
- * @type {number}
17
- */
18
-const DEFAULT_NOT_IN_LAST_N_TIMEOUT = 500;
19
-
20
-/**
21
- * Default value of 2500 milliseconds for {@link ParticipantConnectionStatus.p2pRtcMuteTimeout}.
22
- */
23
-const DEFAULT_P2P_RTC_MUTE_TIMEOUT = 2500;
24
-
25
-/**
26
- * Default value of 10000 milliseconds for {@link ParticipantConnectionStatus.rtcMuteTimeout}.
27
- *
28
- * @type {number}
29
- */
30
-const DEFAULT_RTC_MUTE_TIMEOUT = 10000;
31
-
32
-/**
33
- * The time to wait a track to be restored. Track which was out of lastN
34
- * should be inactive and when entering lastN it becomes restoring and when
35
- * data is received from bridge it will become active, but if no data is
36
- * received for some time we set status of that participant connection to
37
- * interrupted.
38
- * @type {number}
39
- */
40
-const DEFAULT_RESTORING_TIMEOUT = 10000;
41
-
42
-/**
43
- * Participant connection statuses.
44
- *
45
- * @type {{
46
- *      ACTIVE: string,
47
- *      INACTIVE: string,
48
- *      INTERRUPTED: string,
49
- *      RESTORING: string
50
- * }}
51
- */
52
-export const ParticipantConnectionStatus = {
53
-    /**
54
-     * Status indicating that connection is currently active.
55
-     */
56
-    ACTIVE: 'active',
57
-
58
-    /**
59
-     * Status indicating that connection is currently inactive.
60
-     * Inactive means the connection was stopped on purpose from the bridge,
61
-     * like exiting lastN or adaptivity decided to drop video because of not
62
-     * enough bandwidth.
63
-     */
64
-    INACTIVE: 'inactive',
65
-
66
-    /**
67
-     * Status indicating that connection is currently interrupted.
68
-     */
69
-    INTERRUPTED: 'interrupted',
70
-
71
-    /**
72
-     * Status indicating that connection is currently restoring.
73
-     */
74
-    RESTORING: 'restoring'
75
-};
76
-
77
-/**
78
- * Class is responsible for emitting
79
- * JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED events.
80
- */
81
-export default class ParticipantConnectionStatusHandler {
82
-    /* eslint-disable max-params*/
83
-    /**
84
-     * Calculates the new {@link ParticipantConnectionStatus} based on
85
-     * the values given for some specific remote user. It is assumed that
86
-     * the conference is currently in the JVB mode (in contrary to the P2P mode)
87
-     * @param {boolean} isConnectionActiveByJvb true if the JVB did not get any
88
-     * data from the user for the last 15 seconds.
89
-     * @param {boolean} isInLastN indicates whether the user is in the last N
90
-     * set. When set to false it means that JVB is not sending any video for
91
-     * the user.
92
-     * @param {boolean} isRestoringTimedout if true it means that the user has
93
-     * been outside of last N too long to be considered
94
-     * {@link ParticipantConnectionStatus.RESTORING}.
95
-     * @param {boolean} isVideoMuted true if the user is video muted and we
96
-     * should not expect to receive any video.
97
-     * @param {boolean} isVideoTrackFrozen if the current browser support video
98
-     * frozen detection then it will be set to true when the video track is
99
-     * frozen. If the current browser does not support frozen detection the it's
100
-     * always false.
101
-     * @return {ParticipantConnectionStatus} the new connection status for
102
-     * the user for whom the values above were provided.
103
-     * @private
104
-     */
105
-    static _getNewStateForJvbMode(
106
-            isConnectionActiveByJvb,
107
-            isInLastN,
108
-            isRestoringTimedout,
109
-            isVideoMuted,
110
-            isVideoTrackFrozen) {
111
-        if (!isConnectionActiveByJvb) {
112
-            // when there is a connection problem signaled from jvb
113
-            // it means no media was flowing for at least 15secs, so both audio
114
-            // and video are most likely interrupted
115
-            return ParticipantConnectionStatus.INTERRUPTED;
116
-        } else if (isVideoMuted) {
117
-            // If the connection is active according to JVB and the user is
118
-            // video muted there is no way for the connection to be inactive,
119
-            // because the detection logic below only makes sense for video.
120
-            return ParticipantConnectionStatus.ACTIVE;
121
-        }
122
-
123
-        // Logic when isVideoTrackFrozen is supported
124
-        if (browser.supportsVideoMuteOnConnInterrupted()) {
125
-            if (!isVideoTrackFrozen) {
126
-                // If the video is playing we're good
127
-                return ParticipantConnectionStatus.ACTIVE;
128
-            } else if (isInLastN) {
129
-                return isRestoringTimedout
130
-                    ? ParticipantConnectionStatus.INTERRUPTED
131
-                    : ParticipantConnectionStatus.RESTORING;
132
-            }
133
-
134
-            return ParticipantConnectionStatus.INACTIVE;
135
-        }
136
-
137
-        // Because this browser is incapable of detecting frozen video we must
138
-        // rely on the lastN value
139
-        return isInLastN
140
-            ? ParticipantConnectionStatus.ACTIVE
141
-            : ParticipantConnectionStatus.INACTIVE;
142
-    }
143
-
144
-    /* eslint-enable max-params*/
145
-
146
-    /**
147
-     * In P2P mode we don't care about any values coming from the JVB and
148
-     * the connection status can be only active or interrupted.
149
-     * @param {boolean} isVideoMuted the user if video muted
150
-     * @param {boolean} isVideoTrackFrozen true if the video track for
151
-     * the remote user is currently frozen. If the current browser does not
152
-     * support video frozen detection then it's always false.
153
-     * @return {ParticipantConnectionStatus}
154
-     * @private
155
-     */
156
-    static _getNewStateForP2PMode(isVideoMuted, isVideoTrackFrozen) {
157
-        if (!browser.supportsVideoMuteOnConnInterrupted()) {
158
-            // There's no way to detect problems in P2P when there's no video
159
-            // track frozen detection...
160
-            return ParticipantConnectionStatus.ACTIVE;
161
-        }
162
-
163
-        return isVideoMuted || !isVideoTrackFrozen
164
-            ? ParticipantConnectionStatus.ACTIVE
165
-            : ParticipantConnectionStatus.INTERRUPTED;
166
-    }
167
-
168
-    /**
169
-     * Creates new instance of <tt>ParticipantConnectionStatus</tt>.
170
-     *
171
-     * @constructor
172
-     * @param {RTC} rtc the RTC service instance
173
-     * @param {JitsiConference} conference parent conference instance
174
-     * @param {Object} options
175
-     * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
176
-     * {@link ParticipantConnectionStatus.p2pRtcMuteTimeout}.
177
-     * @param {number} [options.rtcMuteTimeout=10000] custom value for
178
-     * {@link ParticipantConnectionStatus.rtcMuteTimeout}.
179
-     * @param {number} [options.outOfLastNTimeout=500] custom value for
180
-     * {@link ParticipantConnectionStatus.outOfLastNTimeout}.
181
-     */
182
-    constructor(rtc, conference, options) {
183
-        this.rtc = rtc;
184
-        this.conference = conference;
185
-
186
-        /**
187
-         * A map of the "endpoint ID"(which corresponds to the resource part
188
-         * of MUC JID(nickname)) to the timeout callback IDs scheduled using
189
-         * window.setTimeout.
190
-         * @type {Object.<string, number>}
191
-         */
192
-        this.trackTimers = {};
193
-
194
-        /**
195
-         * This map holds the endpoint connection status received from the JVB
196
-         * (as it might be different than the one stored in JitsiParticipant).
197
-         * Required for getting back in sync when remote video track is removed.
198
-         * @type {Object.<string, boolean>}
199
-         */
200
-        this.connStatusFromJvb = { };
201
-
202
-        /**
203
-         * If video track frozen detection through RTC mute event is supported,
204
-         * we wait some time until video track is considered frozen. But because
205
-         * when the user falls out of last N it is expected for the video to
206
-         * freeze this timeout must be significantly reduced in "out of last N"
207
-         * case.
208
-         *
209
-         * Basically this value is used instead of {@link rtcMuteTimeout} when
210
-         * user is not in last N.
211
-         * @type {number}
212
-         */
213
-        this.outOfLastNTimeout
214
-            = typeof options.outOfLastNTimeout === 'number'
215
-                ? options.outOfLastNTimeout : DEFAULT_NOT_IN_LAST_N_TIMEOUT;
216
-
217
-        /**
218
-         * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
219
-         * event is fired on the Media stream, before the connection interrupted is fired. The default value is
220
-         * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
221
-         *
222
-         * @type {number} amount of time in milliseconds.
223
-         */
224
-        this.p2pRtcMuteTimeout = typeof options.p2pRtcMuteTimeout === 'number'
225
-            ? options.p2pRtcMuteTimeout : DEFAULT_P2P_RTC_MUTE_TIMEOUT;
226
-
227
-        /**
228
-         * How long we're going to wait after the RTC video track muted event
229
-         * for the corresponding signalling mute event, before the connection
230
-         * interrupted is fired. The default value is
231
-         * {@link DEFAULT_RTC_MUTE_TIMEOUT}.
232
-         *
233
-         * @type {number} amount of time in milliseconds
234
-         */
235
-        this.rtcMuteTimeout
236
-            = typeof options.rtcMuteTimeout === 'number'
237
-                ? options.rtcMuteTimeout : DEFAULT_RTC_MUTE_TIMEOUT;
238
-
239
-        /**
240
-         * This map holds a timestamp indicating  when participant's video track
241
-         * was RTC muted (it is assumed that each participant can have only 1
242
-         * video track at a time). The purpose of storing the timestamp is to
243
-         * avoid the transition to disconnected status in case of legitimate
244
-         * video mute operation where the signalling video muted event can
245
-         * arrive shortly after RTC muted event.
246
-         *
247
-         * The key is participant's ID which is the same as endpoint id in
248
-         * the Colibri conference allocated on the JVB.
249
-         *
250
-         * The value is a timestamp measured in milliseconds obtained with
251
-         * <tt>Date.now()</tt>.
252
-         *
253
-         * FIXME merge this logic with NO_DATA_FROM_SOURCE event
254
-         *       implemented in JitsiLocalTrack by extending the event to
255
-         *       the remote track and allowing to set different timeout for
256
-         *       local and remote tracks.
257
-         *
258
-         * @type {Object.<string, number>}
259
-         */
260
-        this.rtcMutedTimestamp = { };
261
-        logger.info(`RtcMuteTimeout set to: ${this.rtcMuteTimeout}`);
262
-
263
-        /**
264
-         * This map holds the timestamps indicating when participant's video
265
-         * entered lastN set. Participants entering lastN will have connection
266
-         * status restoring and when we start receiving video will become
267
-         * active, but if video is not received for certain time
268
-         * {@link DEFAULT_RESTORING_TIMEOUT} that participant connection status
269
-         * will become interrupted.
270
-         *
271
-         * @type {Map<string, number>}
272
-         */
273
-        this.enteredLastNTimestamp = new Map();
274
-
275
-        /**
276
-         * A map of the "endpoint ID"(which corresponds to the resource part
277
-         * of MUC JID(nickname)) to the restoring timeout callback IDs
278
-         * scheduled using window.setTimeout.
279
-         *
280
-         * @type {Map<string, number>}
281
-         */
282
-        this.restoringTimers = new Map();
283
-
284
-        /**
285
-         * A map that holds the current connection status (along with all the internal events that happen
286
-         * while in that state).
287
-         *
288
-         * The goal is to send this information to the analytics backend for post-mortem analysis.
289
-         */
290
-        this.connectionStatusMap = new Map();
291
-    }
292
-
293
-    /**
294
-     * Gets the video frozen timeout for given user.
295
-     * @param {string} id endpoint/participant ID
296
-     * @return {number} how long are we going to wait since RTC video muted
297
-     * even, before a video track is considered frozen.
298
-     * @private
299
-     */
300
-    _getVideoFrozenTimeout(id) {
301
-        return this.rtc.isInLastN(id)
302
-            ? this.rtcMuteTimeout
303
-            : this.conference.isP2PActive() ? this.p2pRtcMuteTimeout : this.outOfLastNTimeout;
304
-    }
305
-
306
-    /**
307
-     * Initializes <tt>ParticipantConnectionStatus</tt> and bind required event
308
-     * listeners.
309
-     */
310
-    init() {
311
-
312
-        this._onEndpointConnStatusChanged
313
-            = this.onEndpointConnStatusChanged.bind(this);
314
-
315
-        this.rtc.addListener(
316
-            RTCEvents.ENDPOINT_CONN_STATUS_CHANGED,
317
-            this._onEndpointConnStatusChanged);
318
-
319
-        // Handles P2P status changes
320
-        this._onP2PStatus = this.refreshConnectionStatusForAll.bind(this);
321
-        this.conference.on(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
322
-
323
-        // Used to send analytics events for the participant that left the call.
324
-        this._onUserLeft = this.onUserLeft.bind(this);
325
-        this.conference.on(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
326
-
327
-        // On some browsers MediaStreamTrack trigger "onmute"/"onunmute"
328
-        // events for video type tracks when they stop receiving data which is
329
-        // often a sign that remote user is having connectivity issues
330
-        if (browser.supportsVideoMuteOnConnInterrupted()) {
331
-
332
-            this._onTrackRtcMuted = this.onTrackRtcMuted.bind(this);
333
-            this.rtc.addListener(
334
-                RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
335
-
336
-            this._onTrackRtcUnmuted = this.onTrackRtcUnmuted.bind(this);
337
-            this.rtc.addListener(
338
-                RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
339
-
340
-            // Track added/removed listeners are used to bind "mute"/"unmute"
341
-            // event handlers
342
-            this._onRemoteTrackAdded = this.onRemoteTrackAdded.bind(this);
343
-            this.conference.on(
344
-                JitsiConferenceEvents.TRACK_ADDED,
345
-                this._onRemoteTrackAdded);
346
-
347
-            this._onRemoteTrackRemoved = this.onRemoteTrackRemoved.bind(this);
348
-            this.conference.on(
349
-                JitsiConferenceEvents.TRACK_REMOVED,
350
-                this._onRemoteTrackRemoved);
351
-
352
-            // Listened which will be bound to JitsiRemoteTrack to listen for
353
-            // signalling mute/unmute events.
354
-            this._onSignallingMuteChanged
355
-                = this.onSignallingMuteChanged.bind(this);
356
-
357
-            // Used to send an analytics event when the video type changes.
358
-            this._onTrackVideoTypeChanged
359
-                = this.onTrackVideoTypeChanged.bind(this);
360
-        }
361
-
362
-        this._onLastNChanged = this._onLastNChanged.bind(this);
363
-        this.conference.on(
364
-            JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
365
-            this._onLastNChanged);
366
-
367
-        this._onLastNValueChanged
368
-            = this.refreshConnectionStatusForAll.bind(this);
369
-        this.rtc.on(
370
-            RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
371
-    }
372
-
373
-    /**
374
-     * Removes all event listeners and disposes of all resources held by this
375
-     * instance.
376
-     */
377
-    dispose() {
378
-
379
-        this.rtc.removeListener(
380
-            RTCEvents.ENDPOINT_CONN_STATUS_CHANGED,
381
-            this._onEndpointConnStatusChanged);
382
-
383
-        if (browser.supportsVideoMuteOnConnInterrupted()) {
384
-            this.rtc.removeListener(
385
-                RTCEvents.REMOTE_TRACK_MUTE,
386
-                this._onTrackRtcMuted);
387
-            this.rtc.removeListener(
388
-                RTCEvents.REMOTE_TRACK_UNMUTE,
389
-                this._onTrackRtcUnmuted);
390
-
391
-            this.conference.off(
392
-                JitsiConferenceEvents.TRACK_ADDED,
393
-                this._onRemoteTrackAdded);
394
-            this.conference.off(
395
-                JitsiConferenceEvents.TRACK_REMOVED,
396
-                this._onRemoteTrackRemoved);
397
-        }
398
-
399
-        this.conference.off(
400
-            JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
401
-            this._onLastNChanged);
402
-
403
-        this.rtc.removeListener(
404
-            RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
405
-
406
-        this.conference.off(
407
-            JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
408
-
409
-        this.conference.off(
410
-            JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
411
-
412
-        const participantIds = Object.keys(this.trackTimers);
413
-
414
-        for (const participantId of participantIds) {
415
-            this.clearTimeout(participantId);
416
-            this.clearRtcMutedTimestamp(participantId);
417
-        }
418
-
419
-        for (const id in this.connectionStatusMap) {
420
-            if (this.connectionStatusMap.hasOwnProperty(id)) {
421
-                this.onUserLeft(id);
422
-            }
423
-        }
424
-
425
-        // Clear RTC connection status cache
426
-        this.connStatusFromJvb = {};
427
-    }
428
-
429
-    /**
430
-     * Handles RTCEvents.ENDPOINT_CONN_STATUS_CHANGED triggered when we receive
431
-     * notification over the data channel from the bridge about endpoint's
432
-     * connection status update.
433
-     * @param {string} endpointId - The endpoint ID(MUC nickname/resource JID).
434
-     * @param {boolean} isActive - true if the connection is OK or false otherwise.
435
-     */
436
-    onEndpointConnStatusChanged(endpointId, isActive) {
437
-
438
-        logger.debug(
439
-            `Detector RTCEvents.ENDPOINT_CONN_STATUS_CHANGED(${Date.now()}): ${
440
-                endpointId}: ${isActive}`);
441
-
442
-        // Filter out events for the local JID for now
443
-        if (endpointId !== this.conference.myUserId()) {
444
-            // Store the status received over the data channels
445
-            this.connStatusFromJvb[endpointId] = isActive;
446
-            this.figureOutConnectionStatus(endpointId);
447
-        }
448
-    }
449
-
450
-    /**
451
-     * Changes connection status.
452
-     * @param {JitsiParticipant} participant
453
-     * @param newStatus
454
-     */
455
-    _changeConnectionStatus(participant, newStatus) {
456
-        if (participant.getConnectionStatus() !== newStatus) {
457
-
458
-            const endpointId = participant.getId();
459
-
460
-            participant._setConnectionStatus(newStatus);
461
-
462
-            logger.debug(
463
-                `Emit endpoint conn status(${Date.now()}) ${endpointId}: ${
464
-                    newStatus}`);
465
-
466
-            // Log the event on CallStats
467
-            Statistics.sendLog(
468
-                JSON.stringify({
469
-                    id: 'peer.conn.status',
470
-                    participant: endpointId,
471
-                    status: newStatus
472
-                }));
473
-
474
-
475
-            this.conference.eventEmitter.emit(
476
-                JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED,
477
-                endpointId, newStatus);
478
-        }
479
-    }
480
-
481
-    /**
482
-     * Reset the postponed "connection interrupted" event which was previously
483
-     * scheduled as a timeout on RTC 'onmute' event.
484
-     *
485
-     * @param {string} participantId - The participant for which the "connection
486
-     * interrupted" timeout was scheduled.
487
-     */
488
-    clearTimeout(participantId) {
489
-        if (this.trackTimers[participantId]) {
490
-            window.clearTimeout(this.trackTimers[participantId]);
491
-            this.trackTimers[participantId] = null;
492
-        }
493
-    }
494
-
495
-    /**
496
-     * Clears the timestamp of the RTC muted event for participant's video track
497
-     * @param {string} participantId the id of the conference participant which
498
-     * is the same as the Colibri endpoint ID of the video channel allocated for
499
-     * the user on the videobridge.
500
-     */
501
-    clearRtcMutedTimestamp(participantId) {
502
-        this.rtcMutedTimestamp[participantId] = null;
503
-    }
504
-
505
-    /**
506
-     * Bind signalling mute event listeners for video {JitsiRemoteTrack} when
507
-     * a new one is added to the conference.
508
-     *
509
-     * @param {JitsiTrack} remoteTrack - The {JitsiTrack} which is being added to
510
-     * the conference.
511
-     */
512
-    onRemoteTrackAdded(remoteTrack) {
513
-        if (!remoteTrack.isLocal()
514
-                && remoteTrack.getType() === MediaType.VIDEO) {
515
-
516
-            logger.debug(
517
-                `Detector on remote track added for: ${
518
-                    remoteTrack.getParticipantId()}`);
519
-
520
-            remoteTrack.on(
521
-                JitsiTrackEvents.TRACK_MUTE_CHANGED,
522
-                this._onSignallingMuteChanged);
523
-            remoteTrack.on(
524
-                JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED,
525
-                videoType => this._onTrackVideoTypeChanged(remoteTrack, videoType));
526
-        }
527
-    }
528
-
529
-    /**
530
-     * Removes all event listeners bound to the remote video track and clears
531
-     * any related timeouts.
532
-     *
533
-     * @param {JitsiRemoteTrack} remoteTrack - The remote track which is being
534
-     * removed from the conference.
535
-     */
536
-    onRemoteTrackRemoved(remoteTrack) {
537
-        if (!remoteTrack.isLocal()
538
-                && remoteTrack.getType() === MediaType.VIDEO) {
539
-
540
-            const endpointId = remoteTrack.getParticipantId();
541
-
542
-            logger.debug(`Detector on remote track removed: ${endpointId}`);
543
-
544
-            remoteTrack.off(
545
-                JitsiTrackEvents.TRACK_MUTE_CHANGED,
546
-                this._onSignallingMuteChanged);
547
-
548
-            this.clearTimeout(endpointId);
549
-            this.clearRtcMutedTimestamp(endpointId);
550
-
551
-            this.figureOutConnectionStatus(endpointId);
552
-        }
553
-    }
554
-
555
-    /**
556
-     * Checks if given participant's video is considered frozen.
557
-     * @param {JitsiParticipant} participant - The participant.
558
-     * @return {boolean} <tt>true</tt> if the video has frozen for given
559
-     * participant or <tt>false</tt> when it's either not considered frozen
560
-     * (yet) or if freeze detection is not supported by the current browser.
561
-     *
562
-     * FIXME merge this logic with NO_DATA_FROM_SOURCE event
563
-     *       implemented in JitsiLocalTrack by extending the event to
564
-     *       the remote track and allowing to set different timeout for
565
-     *       local and remote tracks.
566
-     *
567
-     */
568
-    isVideoTrackFrozen(participant) {
569
-        if (!browser.supportsVideoMuteOnConnInterrupted()) {
570
-            return false;
571
-        }
572
-
573
-        const id = participant.getId();
574
-        const hasAnyVideoRTCMuted = participant.hasAnyVideoTrackWebRTCMuted();
575
-        const rtcMutedTimestamp = this.rtcMutedTimestamp[id];
576
-        const timeout = this._getVideoFrozenTimeout(id);
577
-
578
-        return hasAnyVideoRTCMuted
579
-            && typeof rtcMutedTimestamp === 'number'
580
-            && (Date.now() - rtcMutedTimestamp) >= timeout;
581
-    }
582
-
583
-    /**
584
-     * Goes over every participant and updates connectivity status.
585
-     * Should be called when a parameter which affects all of the participants
586
-     * is changed (P2P for example).
587
-     */
588
-    refreshConnectionStatusForAll() {
589
-        const participants = this.conference.getParticipants();
590
-
591
-        for (const participant of participants) {
592
-            this.figureOutConnectionStatus(participant.getId());
593
-        }
594
-    }
595
-
596
-    /**
597
-     * Figures out (and updates) the current connectivity status for
598
-     * the participant identified by the given id.
599
-     *
600
-     * @param {string} id - The participant's id (MUC nickname or Colibri endpoint ID).
601
-     */
602
-    figureOutConnectionStatus(id) {
603
-        const participant = this.conference.getParticipantById(id);
604
-
605
-        if (!participant) {
606
-            // Probably the participant is no longer in the conference
607
-            // (at the time of writing this code, participant is
608
-            // detached from the conference and TRACK_REMOVED events are
609
-            // fired),
610
-            // so we don't care, but let's print a log message for debugging purposes.
611
-            logger.debug(`figure out conn status - no participant for: ${id}`);
612
-
613
-            return;
614
-        }
615
-
616
-        const inP2PMode = this.conference.isP2PActive();
617
-        const isRestoringTimedOut = this._isRestoringTimedout(id);
618
-        const audioOnlyMode = this.conference.getLastN() === 0;
619
-
620
-        // NOTE Overriding videoMuted to true for audioOnlyMode should disable
621
-        // any detection based on video playback or the last N.
622
-        const isVideoMuted = participant.isVideoMuted() || audioOnlyMode;
623
-        const isVideoTrackFrozen = this.isVideoTrackFrozen(participant);
624
-        const isInLastN = this.rtc.isInLastN(id);
625
-        let isConnActiveByJvb = this.connStatusFromJvb[id];
626
-
627
-        if (typeof isConnActiveByJvb !== 'boolean') {
628
-            // If no status was received from the JVB it means that it's active
629
-            // (the bridge does not send notification unless there is a problem)
630
-            isConnActiveByJvb = true;
631
-        }
632
-
633
-        const newState
634
-            = inP2PMode
635
-                ? ParticipantConnectionStatusHandler._getNewStateForP2PMode(
636
-                    isVideoMuted,
637
-                    isVideoTrackFrozen)
638
-                : ParticipantConnectionStatusHandler._getNewStateForJvbMode(
639
-                    isConnActiveByJvb,
640
-                    isInLastN,
641
-                    isRestoringTimedOut,
642
-                    isVideoMuted,
643
-                    isVideoTrackFrozen);
644
-
645
-        // if the new state is not restoring clear timers and timestamps
646
-        // that we use to track the restoring state
647
-        if (newState !== ParticipantConnectionStatus.RESTORING) {
648
-            this._clearRestoringTimer(id);
649
-        }
650
-
651
-        logger.debug(
652
-            `Figure out conn status for ${id}, is video muted: ${
653
-                isVideoMuted} is active(jvb): ${
654
-                isConnActiveByJvb} video track frozen: ${
655
-                isVideoTrackFrozen} p2p mode: ${
656
-                inP2PMode} is in last N: ${
657
-                isInLastN} currentStatus => newStatus: ${
658
-                participant.getConnectionStatus()} => ${newState}`);
659
-
660
-        const oldConnectionStatus = this.connectionStatusMap[id] || {};
661
-
662
-        // Send an analytics event (guard on either the p2p flag or the connection status has changed
663
-        // since the last time this code block run).
664
-        if (!('p2p' in oldConnectionStatus)
665
-            || !('connectionStatus' in oldConnectionStatus)
666
-            || oldConnectionStatus.p2p !== inP2PMode
667
-            || oldConnectionStatus.connectionStatus !== newState) {
668
-
669
-            const nowMs = Date.now();
670
-
671
-            this.maybeSendParticipantConnectionStatusEvent(id, nowMs);
672
-
673
-            this.connectionStatusMap[id] = {
674
-                ...oldConnectionStatus,
675
-                connectionStatus: newState,
676
-                p2p: inP2PMode,
677
-                startedMs: nowMs
678
-            };
679
-
680
-            // sometimes (always?) we're late to hook the TRACK_VIDEOTYPE_CHANGED event and the
681
-            // video type is not in oldConnectionStatus.
682
-            if (!('videoType' in this.connectionStatusMap[id])) {
683
-                const videoTracks = participant.getTracksByMediaType(MediaType.VIDEO);
684
-
685
-                if (Array.isArray(videoTracks) && videoTracks.length !== 0) {
686
-                    this.connectionStatusMap[id].videoType = videoTracks[0].videoType;
687
-                }
688
-            }
689
-        }
690
-        this._changeConnectionStatus(participant, newState);
691
-    }
692
-
693
-    /**
694
-     * Computes the duration of the current connection status for the participant with the specified id (i.e. 15 seconds
695
-     * in the INTERRUPTED state) and sends a participant connection status event.
696
-     * @param {string} id - The jid of the participant.
697
-     * @param {Number} nowMs - The current time (in millis).
698
-     * @returns {void}
699
-     */
700
-    maybeSendParticipantConnectionStatusEvent(id, nowMs) {
701
-        const participantConnectionStatus = this.connectionStatusMap[id];
702
-
703
-        if (participantConnectionStatus
704
-            && 'startedMs' in participantConnectionStatus
705
-            && 'videoType' in participantConnectionStatus
706
-            && 'connectionStatus' in participantConnectionStatus
707
-            && 'p2p' in participantConnectionStatus) {
708
-            participantConnectionStatus.value = nowMs - participantConnectionStatus.startedMs;
709
-            Statistics.sendAnalytics(
710
-                createParticipantConnectionStatusEvent(participantConnectionStatus));
711
-        }
712
-    }
713
-
714
-    /**
715
-     * On change in Last N set check all leaving and entering participants to
716
-     * change their corresponding statuses.
717
-     *
718
-     * @param {Array<string>} leavingLastN - The array of ids leaving lastN.
719
-     * @param {Array<string>} enteringLastN - The array of ids entering lastN.
720
-     * @private
721
-     */
722
-    _onLastNChanged(leavingLastN = [], enteringLastN = []) {
723
-        const now = Date.now();
724
-
725
-        logger.debug(`LastN endpoints changed leaving=${leavingLastN}, entering=${enteringLastN} at ${now}`);
726
-
727
-        // If the browser doesn't fire the mute/onmute events when the remote peer stops/starts sending media,
728
-        // calculate the connection status for all the endpoints since it won't get triggered automatically on
729
-        // the endpoint that has started/stopped receiving media.
730
-        if (!browser.supportsVideoMuteOnConnInterrupted()) {
731
-            this.refreshConnectionStatusForAll();
732
-        }
733
-
734
-        for (const id of leavingLastN) {
735
-            this.enteredLastNTimestamp.delete(id);
736
-            this._clearRestoringTimer(id);
737
-            browser.supportsVideoMuteOnConnInterrupted() && this.figureOutConnectionStatus(id);
738
-        }
739
-        for (const id of enteringLastN) {
740
-            // store the timestamp this id is entering lastN
741
-            this.enteredLastNTimestamp.set(id, now);
742
-            browser.supportsVideoMuteOnConnInterrupted() && this.figureOutConnectionStatus(id);
743
-        }
744
-    }
745
-
746
-    /**
747
-     * Clears the restoring timer for participant's video track and the
748
-     * timestamp for entering lastN.
749
-     *
750
-     * @param {string} participantId - The id of the conference participant which
751
-     * is the same as the Colibri endpoint ID of the video channel allocated for
752
-     * the user on the videobridge.
753
-     */
754
-    _clearRestoringTimer(participantId) {
755
-        const rTimer = this.restoringTimers.get(participantId);
756
-
757
-        if (rTimer) {
758
-            clearTimeout(rTimer);
759
-            this.restoringTimers.delete(participantId);
760
-        }
761
-    }
762
-
763
-    /**
764
-     * Checks whether a track had stayed enough in restoring state, compares
765
-     * current time and the time the track entered in lastN. If it hasn't
766
-     * timedout and there is no timer added, add new timer in order to give it
767
-     * more time to become active or mark it as interrupted on next check.
768
-     *
769
-     * @param {string} participantId - The id of the conference participant which
770
-     * is the same as the Colibri endpoint ID of the video channel allocated for
771
-     * the user on the videobridge.
772
-     * @returns {boolean} <tt>true</tt> if the track was in restoring state
773
-     * more than the timeout ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to
774
-     * set its status to interrupted.
775
-     * @private
776
-     */
777
-    _isRestoringTimedout(participantId) {
778
-        const enteredLastNTimestamp
779
-            = this.enteredLastNTimestamp.get(participantId);
780
-
781
-        if (enteredLastNTimestamp
782
-            && (Date.now() - enteredLastNTimestamp)
783
-                >= DEFAULT_RESTORING_TIMEOUT) {
784
-            return true;
785
-        }
786
-
787
-        // still haven't reached timeout, if there is no timer scheduled,
788
-        // schedule one so we can track the restoring state and change it after
789
-        // reaching the timeout
790
-        const rTimer = this.restoringTimers.get(participantId);
791
-
792
-        if (!rTimer) {
793
-            this.restoringTimers.set(participantId, setTimeout(
794
-                () => this.figureOutConnectionStatus(participantId),
795
-                DEFAULT_RESTORING_TIMEOUT));
796
-        }
797
-
798
-        return false;
799
-    }
800
-
801
-    /**
802
-     * Sends a last/final participant connection status event for the participant that left the conference.
803
-     * @param {string} id - The id of the participant that left the conference.
804
-     * @returns {void}
805
-     */
806
-    onUserLeft(id) {
807
-        this.maybeSendParticipantConnectionStatusEvent(id, Date.now());
808
-        delete this.connectionStatusMap[id];
809
-    }
810
-
811
-    /**
812
-     * Handles RTC 'onmute' event for the video track.
813
-     *
814
-     * @param {JitsiRemoteTrack} track - The video track for which 'onmute' event
815
-     * will be processed.
816
-     */
817
-    onTrackRtcMuted(track) {
818
-        const participantId = track.getParticipantId();
819
-        const participant = this.conference.getParticipantById(participantId);
820
-
821
-        logger.debug(`Detector track RTC muted: ${participantId}`, Date.now());
822
-        if (!participant) {
823
-            logger.error(`No participant for id: ${participantId}`);
824
-
825
-            return;
826
-        }
827
-        this.rtcMutedTimestamp[participantId] = Date.now();
828
-        if (!participant.isVideoMuted()) {
829
-            // If the user is not muted according to the signalling we'll give
830
-            // it some time, before the connection interrupted event is
831
-            // triggered.
832
-            this.clearTimeout(participantId);
833
-
834
-            // The timeout is reduced when user is not in the last N
835
-            const timeout = this._getVideoFrozenTimeout(participantId);
836
-
837
-            this.trackTimers[participantId] = window.setTimeout(() => {
838
-                logger.debug(
839
-                    `Set RTC mute timeout for: ${participantId}\
840
-                     of ${timeout} ms`);
841
-                this.clearTimeout(participantId);
842
-                this.figureOutConnectionStatus(participantId);
843
-            }, timeout);
844
-        }
845
-    }
846
-
847
-    /**
848
-     * Handles RTC 'onunmute' event for the video track.
849
-     *
850
-     * @param {JitsiRemoteTrack} track - The video track for which 'onunmute'
851
-     * event will be processed.
852
-     */
853
-    onTrackRtcUnmuted(track) {
854
-        const participantId = track.getParticipantId();
855
-
856
-        logger.debug(
857
-            `Detector track RTC unmuted: ${participantId}`, Date.now());
858
-
859
-        this.clearTimeout(participantId);
860
-        this.clearRtcMutedTimestamp(participantId);
861
-
862
-        this.figureOutConnectionStatus(participantId);
863
-    }
864
-
865
-    /**
866
-     * Here the signalling "mute"/"unmute" events are processed.
867
-     *
868
-     * @param {JitsiRemoteTrack} track - The remote video track for which
869
-     * the signalling mute/unmute event will be processed.
870
-     */
871
-    onSignallingMuteChanged(track) {
872
-        const participantId = track.getParticipantId();
873
-
874
-        logger.debug(
875
-            `Detector on track signalling mute changed: ${participantId}`,
876
-            track.isMuted());
877
-
878
-        this.figureOutConnectionStatus(participantId);
879
-    }
880
-
881
-    /**
882
-     * Sends a participant connection status event as a result of the video type
883
-     * changing.
884
-     * @param {JitsiRemoteTrack} track - The track.
885
-     * @param {VideoType} type - The video type.
886
-     * @returns {void}
887
-     */
888
-    onTrackVideoTypeChanged(track, type) {
889
-        const id = track.getParticipantId();
890
-        const nowMs = Date.now();
891
-
892
-        this.maybeSendParticipantConnectionStatusEvent(id, nowMs);
893
-
894
-        this.connectionStatusMap[id] = {
895
-            ...this.connectionStatusMap[id] || {},
896
-            videoType: type,
897
-            startedMs: nowMs
898
-        };
899
-    }
900
-}

+ 3
- 39
modules/flags/FeatureFlags.js View File

@@ -1,9 +1,6 @@
1
-import { getLogger } from '@jitsi/logger';
2 1
 
3 2
 import browser from '../browser';
4 3
 
5
-const logger = getLogger('FeatureFlags');
6
-
7 4
 /**
8 5
  * A global module for accessing information about different feature flags state.
9 6
  */
@@ -13,26 +10,11 @@ class FeatureFlags {
13 10
      *
14 11
      * @param {object} flags - The feature flags.
15 12
      * @param {boolean=} flags.runInLiteMode - Enables lite mode for testing to disable media decoding.
16
-     * @param {boolean=} flags.receiveMultipleVideoStreams - Signal support for receiving multiple video streams.
17
-     * @param {boolean=} flags.sendMultipleVideoStreams - Signal support for sending multiple video streams.
18
-     * @param {boolean=} flags.sourceNameSignaling - Enables source names in the signaling.
19 13
      * @param {boolean=} flags.ssrcRewritingEnabled - Use SSRC rewriting. Requires sourceNameSignaling to be enabled.
20
-     * @param {boolean=} flags.enableUnifiedOnChrome - Use unified plan signaling on chrome browsers.
21 14
      */
22 15
     init(flags) {
23 16
         this._runInLiteMode = Boolean(flags.runInLiteMode);
24
-        this._receiveMultipleVideoStreams = flags.receiveMultipleVideoStreams ?? true;
25
-        this._sendMultipleVideoStreams = flags.sendMultipleVideoStreams ?? true;
26
-        this._sourceNameSignaling = flags.sourceNameSignaling ?? true;
27
-        this._ssrcRewriting = this._sourceNameSignaling && Boolean(flags.ssrcRewritingEnabled);
28
-
29
-        // For Chromium, check if Unified plan is enabled.
30
-        this._usesUnifiedPlan = browser.supportsUnifiedPlan()
31
-            && (!browser.isChromiumBased() || (flags.enableUnifiedOnChrome ?? true));
32
-
33
-        logger.info(`Send multiple video streams: ${this._sendMultipleVideoStreams},`
34
-            + ` Source name signaling: ${this._sourceNameSignaling},`
35
-            + ` Unified plan: ${this._usesUnifiedPlan}`);
17
+        this._ssrcRewriting = Boolean(flags.ssrcRewritingEnabled);
36 18
     }
37 19
 
38 20
     /**
@@ -40,17 +22,8 @@ class FeatureFlags {
40 22
      *
41 23
      * @returns {boolean}
42 24
      */
43
-    isMultiStreamSupportEnabled() {
44
-        return this._sourceNameSignaling && this._sendMultipleVideoStreams && this._usesUnifiedPlan;
45
-    }
46
-
47
-    /**
48
-     * Checks if receiving multiple video streams is supported.
49
-     *
50
-     * @returns {boolean}
51
-     */
52
-    isReceiveMultipleVideoStreamsSupported() {
53
-        return this._receiveMultipleVideoStreams;
25
+    isMultiStreamSendSupportEnabled() {
26
+        return browser.supportsUnifiedPlan();
54 27
     }
55 28
 
56 29
     /**
@@ -64,15 +37,6 @@ class FeatureFlags {
64 37
         return this._runInLiteMode;
65 38
     }
66 39
 
67
-    /**
68
-     * Checks if the source name signaling is enabled.
69
-     *
70
-     * @returns {boolean}
71
-     */
72
-    isSourceNameSignalingEnabled() {
73
-        return this._sourceNameSignaling;
74
-    }
75
-
76 40
     /**
77 41
      * Checks if the clients supports re-writing of the SSRCs on the media streams by the bridge.
78 42
      * @returns {boolean}

+ 0
- 5
modules/proxyconnection/ProxyConnectionPC.js View File

@@ -221,11 +221,6 @@ export default class ProxyConnectionPC {
221 221
             addPresenceListener: () => { /* no-op */ },
222 222
             connectionTimes: [],
223 223
             eventEmitter: { emit: emitter },
224
-            getMediaPresenceInfo: () => {
225
-                // Errors occur if this function does not return an object
226
-
227
-                return {};
228
-            },
229 224
             removeEventListener: () => { /* no op */ },
230 225
             removePresenceListener: () => { /* no-op */ },
231 226
             supportsRestartByTerminate: () => false

+ 11
- 32
modules/qualitycontrol/ReceiveVideoController.js View File

@@ -3,7 +3,6 @@ import isEqual from 'lodash.isequal';
3 3
 
4 4
 import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
5 5
 import { MediaType } from '../../service/RTC/MediaType';
6
-import FeatureFlags from '../flags/FeatureFlags';
7 6
 
8 7
 const logger = getLogger(__filename);
9 8
 const MAX_HEIGHT_ONSTAGE = 2160;
@@ -215,9 +214,6 @@ export default class ReceiveVideoController {
215 214
      * @returns
216 215
      */
217 216
     _getDefaultSourceReceiverConstraints(mediaSession, maxFrameHeight) {
218
-        if (!FeatureFlags.isSourceNameSignalingEnabled()) {
219
-            return null;
220
-        }
221 217
         const remoteVideoTracks = mediaSession.peerconnection?.getRemoteTracks(null, MediaType.VIDEO) || [];
222 218
         const receiverConstraints = new Map();
223 219
 
@@ -334,20 +330,12 @@ export default class ReceiveVideoController {
334 330
      * @param {Object} constraints The video constraints.
335 331
      */
336 332
     setReceiverConstraints(constraints) {
337
-        if (!this._receiverVideoConstraints) {
338
-            this._receiverVideoConstraints = new ReceiverVideoConstraints();
333
+        if (!constraints) {
334
+            return;
339 335
         }
340
-
341 336
         const isEndpointsFormat = Object.keys(constraints).includes('onStageEndpoints', 'selectedEndpoints');
342
-        const isSourcesFormat = Object.keys(constraints).includes('onStageSources', 'selectedSources');
343 337
 
344
-        if (!FeatureFlags.isSourceNameSignalingEnabled() && isSourcesFormat) {
345
-            throw new Error(
346
-                '"onStageSources" and "selectedSources" are not supported when sourceNameSignaling is disabled.'
347
-            );
348
-        }
349
-
350
-        if (FeatureFlags.isSourceNameSignalingEnabled() && isEndpointsFormat) {
338
+        if (isEndpointsFormat) {
351 339
             throw new Error(
352 340
                 '"onStageEndpoints" and "selectedEndpoints" are not supported when sourceNameSignaling is enabled.'
353 341
             );
@@ -365,26 +353,17 @@ export default class ReceiveVideoController {
365 353
                 return;
366 354
             }
367 355
 
368
-            if (FeatureFlags.isSourceNameSignalingEnabled()) {
369
-                const mappedConstraints = Array.from(Object.entries(constraints.constraints))
370
-                    .map(constraint => {
371
-                        constraint[1] = constraint[1].maxHeight;
356
+            const mappedConstraints = Array.from(Object.entries(constraints.constraints))
357
+                .map(constraint => {
358
+                    constraint[1] = constraint[1].maxHeight;
372 359
 
373
-                        return constraint;
374
-                    });
360
+                    return constraint;
361
+                });
375 362
 
376
-                this._sourceReceiverConstraints = new Map(mappedConstraints);
363
+            this._sourceReceiverConstraints = new Map(mappedConstraints);
377 364
 
378
-                // Send the receiver constraints to the peer through a "content-modify" message.
379
-                p2pSession.setReceiverVideoConstraint(null, this._sourceReceiverConstraints);
380
-            } else {
381
-                let maxFrameHeight = Object.values(constraints.constraints)[0]?.maxHeight;
382
-
383
-                if (!maxFrameHeight) {
384
-                    maxFrameHeight = constraints.defaultConstraints?.maxHeight;
385
-                }
386
-                maxFrameHeight && p2pSession.setReceiverVideoConstraint(maxFrameHeight);
387
-            }
365
+            // Send the receiver constraints to the peer through a "content-modify" message.
366
+            p2pSession.setReceiverVideoConstraint(null, this._sourceReceiverConstraints);
388 367
         }
389 368
     }
390 369
 }

+ 0
- 33
modules/qualitycontrol/ReceiveVideoController.spec.js View File

@@ -89,37 +89,4 @@ describe('ReceiveVideoController', () => {
89 89
             }
90 90
         });
91 91
     });
92
-
93
-    describe('when sourceNameSignaling is disabled', () => {
94
-        beforeEach(() => {
95
-            FeatureFlags.init({ sourceNameSignaling: false });
96
-        });
97
-
98
-        it('should call setNewReceiverVideoConstraints with the endpoints format.', () => {
99
-            const rtcSpy = spyOn(rtc, 'setNewReceiverVideoConstraints');
100
-            const constraints = {
101
-                onStageEndpoints: [ 'A', 'B', 'C' ],
102
-                selectedEndpoints: [ 'A' ]
103
-            };
104
-
105
-            receiveVideoController.setReceiverConstraints(constraints);
106
-            expect(rtcSpy).toHaveBeenCalledWith(constraints);
107
-        });
108
-
109
-        it('should not allow the source names format.', () => {
110
-            const constraints = {
111
-                onStageSources: [ 'A_camera_1', 'B_screen_2', 'C_camera_1' ],
112
-                selectedSources: [ 'A_camera_1' ]
113
-            };
114
-
115
-            try {
116
-                receiveVideoController.setReceiverConstraints(constraints);
117
-                fail();
118
-            } catch (error) {
119
-                expect(error).toEqual(new Error(
120
-                    '"onStageSources" and "selectedSources" are not supported when sourceNameSignaling is disabled.'
121
-                ));
122
-            }
123
-        });
124
-    });
125 92
 });

+ 32
- 59
modules/qualitycontrol/SendVideoController.js View File

@@ -2,7 +2,6 @@ import { getLogger } from '@jitsi/logger';
2 2
 
3 3
 import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
4 4
 import RTCEvents from '../../service/RTC/RTCEvents';
5
-import FeatureFlags from '../flags/FeatureFlags';
6 5
 import MediaSessionEvents from '../xmpp/MediaSessionEvents';
7 6
 
8 7
 const logger = getLogger(__filename);
@@ -52,14 +51,10 @@ export default class SendVideoController {
52 51
      * @private
53 52
      */
54 53
     _configureConstraintsForLocalSources() {
55
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
56
-            for (const track of this._rtc.getLocalVideoTracks()) {
57
-                const sourceName = track.getSourceName();
54
+        for (const track of this._rtc.getLocalVideoTracks()) {
55
+            const sourceName = track.getSourceName();
58 56
 
59
-                sourceName && this._propagateSendMaxFrameHeight(sourceName);
60
-            }
61
-        } else {
62
-            this._propagateSendMaxFrameHeight();
57
+            sourceName && this._propagateSendMaxFrameHeight(sourceName);
63 58
         }
64 59
     }
65 60
 
@@ -72,19 +67,12 @@ export default class SendVideoController {
72 67
      * @private
73 68
      */
74 69
     _onMediaSessionStarted(mediaSession) {
75
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
76
-            mediaSession.addListener(
77
-                MediaSessionEvents.REMOTE_SOURCE_CONSTRAINTS_CHANGED,
78
-                (session, sourceConstraints) => {
79
-                    session === this._conference.getActiveMediaSession()
80
-                        && sourceConstraints.forEach(constraint => this._onSenderConstraintsReceived(constraint));
81
-                });
82
-        } else {
83
-            mediaSession.addListener(
84
-                MediaSessionEvents.REMOTE_VIDEO_CONSTRAINTS_CHANGED,
85
-                session => session === this._conference.getActiveMediaSession()
86
-                    && this._configureConstraintsForLocalSources());
87
-        }
70
+        mediaSession.addListener(
71
+            MediaSessionEvents.REMOTE_SOURCE_CONSTRAINTS_CHANGED,
72
+            (session, sourceConstraints) => {
73
+                session === this._conference.getActiveMediaSession()
74
+                    && sourceConstraints.forEach(constraint => this._onSenderConstraintsReceived(constraint));
75
+            });
88 76
     }
89 77
 
90 78
     /**
@@ -95,27 +83,21 @@ export default class SendVideoController {
95 83
      * @private
96 84
      */
97 85
     _onSenderConstraintsReceived(videoConstraints) {
98
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
99
-            const { maxHeight, sourceName } = videoConstraints;
100
-            const localVideoTracks = this._conference.getLocalVideoTracks() ?? [];
101
-
102
-            for (const track of localVideoTracks) {
103
-                // Propagate the sender constraint only if it has changed.
104
-                if (track.getSourceName() === sourceName
105
-                    && (!this._sourceSenderConstraints.has(sourceName)
106
-                    || this._sourceSenderConstraints.get(sourceName) !== maxHeight)) {
107
-                    this._sourceSenderConstraints.set(
108
-                        sourceName,
109
-                        maxHeight === -1
110
-                            ? Math.min(MAX_LOCAL_RESOLUTION, this._preferredSendMaxFrameHeight)
111
-                            : maxHeight);
112
-                    logger.debug(`Sender constraints for source:${sourceName} changed to maxHeight:${maxHeight}`);
113
-                    this._propagateSendMaxFrameHeight(sourceName);
114
-                }
86
+        const { maxHeight, sourceName } = videoConstraints;
87
+        const localVideoTracks = this._conference.getLocalVideoTracks() ?? [];
88
+
89
+        for (const track of localVideoTracks) {
90
+            // Propagate the sender constraint only if it has changed.
91
+            if (track.getSourceName() === sourceName
92
+                && this._sourceSenderConstraints.get(sourceName) !== maxHeight) {
93
+                this._sourceSenderConstraints.set(
94
+                    sourceName,
95
+                    maxHeight === -1
96
+                        ? Math.min(MAX_LOCAL_RESOLUTION, this._preferredSendMaxFrameHeight)
97
+                        : maxHeight);
98
+                logger.debug(`Sender constraints for source:${sourceName} changed to maxHeight:${maxHeight}`);
99
+                this._propagateSendMaxFrameHeight(sourceName);
115 100
             }
116
-        } else if (this._senderVideoConstraints?.idealHeight !== videoConstraints.idealHeight) {
117
-            this._senderVideoConstraints = videoConstraints;
118
-            this._propagateSendMaxFrameHeight();
119 101
         }
120 102
     }
121 103
 
@@ -127,8 +109,8 @@ export default class SendVideoController {
127 109
      * @returns {Promise<void[]>}
128 110
      * @private
129 111
      */
130
-    _propagateSendMaxFrameHeight(sourceName = null) {
131
-        if (FeatureFlags.isSourceNameSignalingEnabled() && !sourceName) {
112
+    _propagateSendMaxFrameHeight(sourceName) {
113
+        if (!sourceName) {
132 114
             throw new Error('sourceName missing for calculating the sendMaxHeight for video tracks');
133 115
         }
134 116
         const sendMaxFrameHeight = this._selectSendMaxFrameHeight(sourceName);
@@ -151,17 +133,13 @@ export default class SendVideoController {
151 133
      * @returns {number|undefined}
152 134
      * @private
153 135
      */
154
-    _selectSendMaxFrameHeight(sourceName = null) {
155
-        if (FeatureFlags.isSourceNameSignalingEnabled() && !sourceName) {
136
+    _selectSendMaxFrameHeight(sourceName) {
137
+        if (!sourceName) {
156 138
             throw new Error('sourceName missing for calculating the sendMaxHeight for video tracks');
157 139
         }
158 140
         const activeMediaSession = this._conference.getActiveMediaSession();
159 141
         const remoteRecvMaxFrameHeight = activeMediaSession
160
-            ? activeMediaSession.isP2P
161
-                ? sourceName
162
-                    ? this._sourceSenderConstraints.get(sourceName)
163
-                    : activeMediaSession.getRemoteRecvMaxFrameHeight()
164
-                : sourceName ? this._sourceSenderConstraints.get(sourceName) : this._senderVideoConstraints?.idealHeight
142
+            ? this._sourceSenderConstraints.get(sourceName)
165 143
             : undefined;
166 144
 
167 145
         if (this._preferredSendMaxFrameHeight >= 0 && remoteRecvMaxFrameHeight >= 0) {
@@ -181,17 +159,12 @@ export default class SendVideoController {
181 159
      */
182 160
     setPreferredSendMaxFrameHeight(maxFrameHeight) {
183 161
         this._preferredSendMaxFrameHeight = maxFrameHeight;
162
+        const promises = [];
184 163
 
185
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
186
-            const promises = [];
187
-
188
-            for (const sourceName of this._sourceSenderConstraints.keys()) {
189
-                promises.push(this._propagateSendMaxFrameHeight(sourceName));
190
-            }
191
-
192
-            return Promise.allSettled(promises);
164
+        for (const sourceName of this._sourceSenderConstraints.keys()) {
165
+            promises.push(this._propagateSendMaxFrameHeight(sourceName));
193 166
         }
194 167
 
195
-        return this._propagateSendMaxFrameHeight();
168
+        return Promise.allSettled(promises);
196 169
     }
197 170
 }

+ 0
- 153
modules/qualitycontrol/SendVideoController.spec.js View File

@@ -1,153 +0,0 @@
1
-import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
2
-import RTCEvents from '../../service/RTC/RTCEvents';
3
-import FeatureFlags from '../flags/FeatureFlags';
4
-import Listenable from '../util/Listenable';
5
-import MediaSessionEvents from '../xmpp/MediaSessionEvents';
6
-
7
-import SendVideoController from './SendVideoController';
8
-
9
-// JSDocs disabled for Mock classes to avoid duplication - check on the original classes for info.
10
-/* eslint-disable require-jsdoc */
11
-/**
12
- * A mock JingleSessionPC impl that fit the needs of the SendVideoController module.
13
- * Should a generic, shared one exist in the future this test file should switch to use it too.
14
- */
15
-class MockJingleSessionPC extends Listenable {
16
-    constructor(rtc, isP2P) {
17
-        super();
18
-        this.rtc = rtc;
19
-        this.isP2P = isP2P;
20
-        this._remoteRecvMaxFrameHeight = undefined;
21
-        this.senderVideoConstraint = undefined;
22
-    }
23
-
24
-    getRemoteRecvMaxFrameHeight() {
25
-        return this._remoteRecvMaxFrameHeight;
26
-    }
27
-
28
-    setSenderVideoConstraint(senderVideoConstraint) {
29
-        this.senderVideoConstraint = senderVideoConstraint;
30
-    }
31
-
32
-    setRemoteRecvMaxFrameHeight(remoteRecvMaxFrameHeight) {
33
-        this._remoteRecvMaxFrameHeight = remoteRecvMaxFrameHeight;
34
-        if (this.isP2P) {
35
-            this.eventEmitter.emit(
36
-                MediaSessionEvents.REMOTE_VIDEO_CONSTRAINTS_CHANGED,
37
-                this);
38
-        } else {
39
-            this.rtc.eventEmitter.emit(
40
-                RTCEvents.SENDER_VIDEO_CONSTRAINTS_CHANGED,
41
-                { idealHeight: remoteRecvMaxFrameHeight });
42
-        }
43
-    }
44
-}
45
-
46
-/**
47
- * Mock conference for the purpose of this test file.
48
- */
49
-class MockConference extends Listenable {
50
-    /**
51
-     * A constructor...
52
-     */
53
-    constructor() {
54
-        super();
55
-        this.options = {};
56
-        this.activeMediaSession = undefined;
57
-        this.mediaSessions = [];
58
-    }
59
-
60
-    addMediaSession(mediaSession) {
61
-        this.mediaSessions.push(mediaSession);
62
-
63
-        this.eventEmitter.emit(JitsiConferenceEvents._MEDIA_SESSION_STARTED, mediaSession);
64
-    }
65
-
66
-    setActiveMediaSession(mediaSession) {
67
-        if (this.mediaSessions.indexOf(mediaSession) === -1) {
68
-            throw new Error('Given session is not part of this conference');
69
-        }
70
-
71
-        this.activeMediaSession = mediaSession;
72
-
73
-        this.eventEmitter.emit(JitsiConferenceEvents._MEDIA_SESSION_ACTIVE_CHANGED, this.activeMediaSession);
74
-    }
75
-
76
-    getActiveMediaSession() {
77
-        return this.activeMediaSession;
78
-    }
79
-
80
-    getLocalVideoTracks() {
81
-        return [];
82
-    }
83
-
84
-    getMediaSessions() {
85
-        return this.mediaSessions;
86
-    }
87
-}
88
-
89
-/**
90
- * Mock {@link RTC} - add things as needed, but only things useful for all tests.
91
- */
92
-export class MockRTC extends Listenable {
93
-    /**
94
-     * constructor
95
-     */
96
-    /* eslint-disable no-useless-constructor */
97
-    constructor() {
98
-        super();
99
-    }
100
-}
101
-
102
-/* eslint-enable require-jsdoc */
103
-
104
-describe('SendVideoController', () => {
105
-    let conference;
106
-    let rtc;
107
-    let sendVideoController;
108
-    let jvbConnection;
109
-    let p2pConnection;
110
-
111
-    beforeEach(() => {
112
-        conference = new MockConference();
113
-        rtc = new MockRTC();
114
-        FeatureFlags.init({ sourceNameSignaling: false });
115
-        sendVideoController = new SendVideoController(conference, rtc);
116
-        jvbConnection = new MockJingleSessionPC(rtc, false /* isP2P */);
117
-        p2pConnection = new MockJingleSessionPC(rtc, true /* isP2P */);
118
-
119
-        conference.addMediaSession(jvbConnection);
120
-        conference.addMediaSession(p2pConnection);
121
-    });
122
-    describe('handles 0 as receiver/sender video constraint', () => {
123
-        it('0 if it\'s the active sessions\'s remote recv constraint', () => {
124
-            jvbConnection.setRemoteRecvMaxFrameHeight(0);
125
-            p2pConnection.setRemoteRecvMaxFrameHeight(720);
126
-
127
-            conference.setActiveMediaSession(jvbConnection);
128
-
129
-            expect(jvbConnection.senderVideoConstraint).toBe(0);
130
-            expect(p2pConnection.senderVideoConstraint).toBe(0);
131
-        });
132
-        it('720 if 0 is set on the non-active session', () => {
133
-            jvbConnection.setRemoteRecvMaxFrameHeight(0);
134
-            p2pConnection.setRemoteRecvMaxFrameHeight(720);
135
-
136
-            conference.setActiveMediaSession(p2pConnection);
137
-
138
-            expect(jvbConnection.senderVideoConstraint).toBe(720);
139
-            expect(p2pConnection.senderVideoConstraint).toBe(720);
140
-        });
141
-        it('0 if it\'s the local send preference while remote are 720', () => {
142
-            conference.setActiveMediaSession(p2pConnection);
143
-
144
-            jvbConnection.setRemoteRecvMaxFrameHeight(720);
145
-            p2pConnection.setRemoteRecvMaxFrameHeight(720);
146
-
147
-            sendVideoController.setPreferredSendMaxFrameHeight(0);
148
-
149
-            expect(jvbConnection.senderVideoConstraint).toBe(0);
150
-            expect(p2pConnection.senderVideoConstraint).toBe(0);
151
-        });
152
-    });
153
-});

+ 18
- 23
modules/sdp/LocalSdpMunger.js View File

@@ -211,27 +211,26 @@ export default class LocalSdpMunger {
211 211
                     let streamId = streamAndTrackIDs[0];
212 212
                     const trackId = streamAndTrackIDs[1];
213 213
 
214
-                    if (FeatureFlags.isSourceNameSignalingEnabled()) {
215
-                        // Always overwrite streamId since we want the msid to be in this format even if the browser
216
-                        // generates one (in p2p mode).
217
-                        streamId = `${this.localEndpointId}-${mediaType}`;
214
+                    // Always overwrite streamId since we want the msid to be in this format even if the browser
215
+                    // generates one (in p2p mode).
216
+                    streamId = `${this.localEndpointId}-${mediaType}`;
218 217
 
218
+                    // eslint-disable-next-line max-depth
219
+                    if (mediaType === MediaType.VIDEO) {
219 220
                         // eslint-disable-next-line max-depth
220
-                        if (mediaType === MediaType.VIDEO) {
221
-                            // eslint-disable-next-line max-depth
222
-                            if (!this.videoSourcesToMsidMap.has(trackId)) {
223
-                                streamId = `${streamId}-${this.videoSourcesToMsidMap.size}`;
224
-                                this.videoSourcesToMsidMap.set(trackId, streamId);
225
-                            }
226
-                        } else if (!this.audioSourcesToMsidMap.has(trackId)) {
227
-                            streamId = `${streamId}-${this.audioSourcesToMsidMap.size}`;
228
-                            this.audioSourcesToMsidMap.set(trackId, streamId);
221
+                        if (!this.videoSourcesToMsidMap.has(trackId)) {
222
+                            streamId = `${streamId}-${this.videoSourcesToMsidMap.size}`;
223
+                            this.videoSourcesToMsidMap.set(trackId, streamId);
229 224
                         }
230
-
231
-                        streamId = mediaType === MediaType.VIDEO
232
-                            ? this.videoSourcesToMsidMap.get(trackId)
233
-                            : this.audioSourcesToMsidMap.get(trackId);
225
+                    } else if (!this.audioSourcesToMsidMap.has(trackId)) {
226
+                        streamId = `${streamId}-${this.audioSourcesToMsidMap.size}`;
227
+                        this.audioSourcesToMsidMap.set(trackId, streamId);
234 228
                     }
229
+
230
+                    streamId = mediaType === MediaType.VIDEO
231
+                        ? this.videoSourcesToMsidMap.get(trackId)
232
+                        : this.audioSourcesToMsidMap.get(trackId);
233
+
235 234
                     ssrcLine.value = this._generateMsidAttribute(mediaType, trackId, streamId);
236 235
                 } else {
237 236
                     logger.warn(`Unable to munge local MSID - weird format detected: ${ssrcLine.value}`);
@@ -344,7 +343,7 @@ export default class LocalSdpMunger {
344 343
 
345 344
         const videoMlines = transformer.selectMedia(MediaType.VIDEO);
346 345
 
347
-        if (!FeatureFlags.isMultiStreamSupportEnabled()) {
346
+        if (!FeatureFlags.isMultiStreamSendSupportEnabled()) {
348 347
             videoMlines.splice(1);
349 348
         }
350 349
 
@@ -355,7 +354,7 @@ export default class LocalSdpMunger {
355 354
 
356 355
         // Plan-b clients generate new SSRCs and trackIds whenever tracks are removed and added back to the
357 356
         // peerconnection, therefore local track based map for msids needs to be reset after every transformation.
358
-        if (FeatureFlags.isSourceNameSignalingEnabled() && !this.tpc._usesUnifiedPlan) {
357
+        if (!this.tpc._usesUnifiedPlan) {
359 358
             this.audioSourcesToMsidMap.clear();
360 359
             this.videoSourcesToMsidMap.clear();
361 360
         }
@@ -377,10 +376,6 @@ export default class LocalSdpMunger {
377 376
      * @private
378 377
      */
379 378
     _injectSourceNames(mediaSection) {
380
-        if (!FeatureFlags.isSourceNameSignalingEnabled()) {
381
-            return;
382
-        }
383
-
384 379
         const sources = [ ...new Set(mediaSection.mLine?.ssrcs?.map(s => s.id)) ];
385 380
         const mediaType = mediaSection.mLine?.type;
386 381
 

+ 4
- 26
modules/sdp/LocalSdpMunger.spec.js View File

@@ -26,7 +26,7 @@ describe('TransformSdpsForUnifiedPlan', () => {
26 26
     const localEndpointId = 'sRdpsdg';
27 27
 
28 28
     beforeEach(() => {
29
-        FeatureFlags.init({ sourceNameSignaling: false });
29
+        FeatureFlags.init({ });
30 30
         localSdpMunger = new LocalSdpMunger(tpc, localEndpointId);
31 31
     });
32 32
     describe('dontStripSsrcs', () => {
@@ -43,8 +43,8 @@ describe('TransformSdpsForUnifiedPlan', () => {
43 43
             const audioSsrcs = getSsrcLines(newSdp, 'audio');
44 44
             const videoSsrcs = getSsrcLines(newSdp, 'video');
45 45
 
46
-            expect(audioSsrcs.length).toEqual(1);
47
-            expect(videoSsrcs.length).toEqual(1);
46
+            expect(audioSsrcs.length).toEqual(2);
47
+            expect(videoSsrcs.length).toEqual(2);
48 48
         });
49 49
 
50 50
         describe('should do nothing to an sdp with msid', () => {
@@ -63,14 +63,7 @@ describe('TransformSdpsForUnifiedPlan', () => {
63 63
                 videoSsrcs = getSsrcLines(newSdp, 'video');
64 64
             };
65 65
 
66
-            it('without source name signaling enabled (no injected source name)', () => {
67
-                transformStreamIdentifiers();
68
-
69
-                expect(audioSsrcs.length).toEqual(4);
70
-                expect(videoSsrcs.length).toEqual(6);
71
-            });
72 66
             it('with source name signaling enabled (injected source name)', () => {
73
-                FeatureFlags.init({ });
74 67
                 transformStreamIdentifiers();
75 68
 
76 69
                 expect(audioSsrcs.length).toEqual(4 + 1 /* injected source name */);
@@ -95,7 +88,7 @@ describe('TransformSdpsForUnifiedPlan', () => {
95 88
                 if (ssrcLine.attribute === 'msid') {
96 89
                     const msid = ssrcLine.value.split(' ')[0];
97 90
 
98
-                    expect(msid).toBe(`${localEndpointId}-video-${tpc.id}`);
91
+                    expect(msid).toBe(`${localEndpointId}-video-0-${tpc.id}`);
99 92
                 }
100 93
             }
101 94
         });
@@ -125,7 +118,6 @@ describe('DoNotTransformSdpForPlanB', () => {
125 118
     const localEndpointId = 'sRdpsdg';
126 119
 
127 120
     beforeEach(() => {
128
-        FeatureFlags.init({ sourceNameSignaling: false });
129 121
         localSdpMunger = new LocalSdpMunger(tpc, localEndpointId);
130 122
     });
131 123
     describe('stripSsrcs', () => {
@@ -147,12 +139,6 @@ describe('DoNotTransformSdpForPlanB', () => {
147 139
                 videoSsrcs = getSsrcLines(newSdp, 'video');
148 140
             };
149 141
 
150
-            it('without source name signaling', () => {
151
-                transformStreamIdentifiers();
152
-
153
-                expect(audioSsrcs.length).toEqual(1);
154
-                expect(videoSsrcs.length).toEqual(1);
155
-            });
156 142
             it('with source name signaling', () => {
157 143
                 FeatureFlags.init({ });
158 144
                 transformStreamIdentifiers();
@@ -185,14 +171,6 @@ describe('Transform msids for source-name signaling', () => {
185 171
         videoMsid = videoMsidLine.split(' ')[0];
186 172
     };
187 173
 
188
-    it('should not transform', () => {
189
-        FeatureFlags.init({ sourceNameSignaling: false });
190
-        transformStreamIdentifiers();
191
-
192
-        expect(audioMsid).toBe('dcbb0236-cea5-402e-9e9a-595c65ffcc2a-1');
193
-        expect(videoMsid).toBe('0836cc8e-a7bb-47e9-affb-0599414bc56d-1');
194
-    });
195
-
196 174
     it('should transform', () => {
197 175
         FeatureFlags.init({ });
198 176
         transformStreamIdentifiers();

+ 1
- 2
modules/sdp/SDP.js View File

@@ -4,7 +4,6 @@ import transform from 'sdp-transform';
4 4
 
5 5
 import { MediaDirection } from '../../service/RTC/MediaDirection';
6 6
 import browser from '../browser';
7
-import FeatureFlags from '../flags/FeatureFlags';
8 7
 
9 8
 import SDPUtil from './SDPUtil';
10 9
 
@@ -245,7 +244,7 @@ SDP.prototype.toJingle = function(elem, thecreator) {
245 244
 
246 245
                     elem.c('source', {
247 246
                         ssrc: availableSsrc,
248
-                        name: FeatureFlags.isSourceNameSignalingEnabled() ? sourceName : undefined,
247
+                        name: sourceName,
249 248
                         videoType,
250 249
                         xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0'
251 250
                     });

+ 1
- 2
modules/sdp/SDPDiffer.js View File

@@ -1,4 +1,3 @@
1
-import FeatureFlags from '../flags/FeatureFlags';
2 1
 
3 2
 import SDPUtil from './SDPUtil';
4 3
 
@@ -175,7 +174,7 @@ SDPDiffer.prototype.toJingle = function(modify) {
175 174
 
176 175
             modify.c('source', { xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
177 176
             modify.attrs({
178
-                name: FeatureFlags.isSourceNameSignalingEnabled() ? sourceName : undefined,
177
+                name: sourceName,
179 178
                 videoType,
180 179
                 ssrc: mediaSsrc.ssrc
181 180
             });

+ 0
- 91
modules/xmpp/ChatRoom.js View File

@@ -4,8 +4,6 @@ import isEqual from 'lodash.isequal';
4 4
 import { $iq, $msg, $pres, Strophe } from 'strophe.js';
5 5
 
6 6
 import * as JitsiTranscriptionStatus from '../../JitsiTranscriptionStatus';
7
-import { MediaType } from '../../service/RTC/MediaType';
8
-import { VideoType } from '../../service/RTC/VideoType';
9 7
 import { XMPPEvents } from '../../service/xmpp/XMPPEvents';
10 8
 import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
11 9
 import Listenable from '../util/Listenable';
@@ -1593,95 +1591,6 @@ export default class ChatRoom extends Listenable {
1593 1591
         return null;
1594 1592
     }
1595 1593
 
1596
-    /**
1597
-     *
1598
-     * @param mute
1599
-     */
1600
-    addAudioInfoToPresence(mute) {
1601
-        const audioMutedTagName = 'audiomuted';
1602
-
1603
-        // we skip adding it as muted is default value
1604
-        if (mute && !this.getFromPresence(audioMutedTagName)) {
1605
-            return false;
1606
-        }
1607
-
1608
-        return this.addOrReplaceInPresence(
1609
-            audioMutedTagName,
1610
-            {
1611
-                value: mute.toString()
1612
-            });
1613
-    }
1614
-
1615
-    /**
1616
-     *
1617
-     * @param mute
1618
-     */
1619
-    addVideoInfoToPresence(mute) {
1620
-        const videoMutedTagName = 'videomuted';
1621
-
1622
-        // we skip adding it as muted is default value
1623
-        if (mute && !this.getFromPresence(videoMutedTagName)) {
1624
-            return false;
1625
-        }
1626
-
1627
-        return this.addOrReplaceInPresence(
1628
-            videoMutedTagName,
1629
-            {
1630
-                value: mute.toString()
1631
-            });
1632
-    }
1633
-
1634
-    /**
1635
-     * Obtains the info about given media advertised in the MUC presence of
1636
-     * the participant identified by the given endpoint JID.
1637
-     * @param {string} endpointId the endpoint ID mapped to the participant
1638
-     * which corresponds to MUC nickname.
1639
-     * @param {MediaType} mediaType the type of the media for which presence
1640
-     * info will be obtained.
1641
-     * @return {PeerMediaInfo} presenceInfo an object with media presence
1642
-     * info or <tt>null</tt> either if there is no presence available or if
1643
-     * the media type given is invalid.
1644
-     */
1645
-    getMediaPresenceInfo(endpointId, mediaType) {
1646
-        // Will figure out current muted status by looking up owner's presence
1647
-        const pres = this.lastPresences[`${this.roomjid}/${endpointId}`];
1648
-
1649
-        if (!pres) {
1650
-            // No presence available
1651
-            return null;
1652
-        }
1653
-        const data = {
1654
-            muted: true, // muted by default
1655
-            videoType: mediaType === MediaType.VIDEO ? VideoType.CAMERA : undefined // 'camera' by default
1656
-        };
1657
-        let mutedNode = null;
1658
-
1659
-        if (mediaType === MediaType.AUDIO) {
1660
-            mutedNode = filterNodeFromPresenceJSON(pres, 'audiomuted');
1661
-        } else if (mediaType === MediaType.VIDEO) {
1662
-            mutedNode = filterNodeFromPresenceJSON(pres, 'videomuted');
1663
-            const codecTypeNode = filterNodeFromPresenceJSON(pres, 'jitsi_participant_codecType');
1664
-            const videoTypeNode = filterNodeFromPresenceJSON(pres, 'videoType');
1665
-
1666
-            if (videoTypeNode.length > 0) {
1667
-                data.videoType = videoTypeNode[0].value;
1668
-            }
1669
-            if (codecTypeNode.length > 0) {
1670
-                data.codecType = codecTypeNode[0].value;
1671
-            }
1672
-        } else {
1673
-            logger.error(`Unsupported media type: ${mediaType}`);
1674
-
1675
-            return null;
1676
-        }
1677
-
1678
-        if (mutedNode.length > 0) {
1679
-            data.muted = mutedNode[0].value === 'true';
1680
-        }
1681
-
1682
-        return data;
1683
-    }
1684
-
1685 1594
     /**
1686 1595
      * Returns the last presence advertised by a MUC member.
1687 1596
      * @param {string} mucNick

+ 1
- 2
modules/xmpp/JingleHelperFunctions.js View File

@@ -3,7 +3,6 @@ import $ from 'jquery';
3 3
 import { $build } from 'strophe.js';
4 4
 
5 5
 import { MediaType } from '../../service/RTC/MediaType';
6
-import FeatureFlags from '../flags/FeatureFlags';
7 6
 
8 7
 const logger = getLogger(__filename);
9 8
 
@@ -17,7 +16,7 @@ function _createSourceExtension(owner, sourceCompactJson) {
17 16
     const node = $build('source', {
18 17
         xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0',
19 18
         ssrc: sourceCompactJson.s,
20
-        name: FeatureFlags.isSourceNameSignalingEnabled() ? sourceCompactJson.n : undefined
19
+        name: sourceCompactJson.n
21 20
     });
22 21
 
23 22
     if (sourceCompactJson.m) {

+ 10
- 18
modules/xmpp/JingleSessionPC.js View File

@@ -973,12 +973,10 @@ export default class JingleSessionPC extends JingleSession {
973 973
         ssrcs.each((i, ssrcElement) => {
974 974
             const ssrc = Number(ssrcElement.getAttribute('ssrc'));
975 975
 
976
-            if (FeatureFlags.isSourceNameSignalingEnabled()) {
977
-                if (ssrcElement.hasAttribute('name')) {
978
-                    const sourceName = ssrcElement.getAttribute('name');
976
+            if (ssrcElement.hasAttribute('name')) {
977
+                const sourceName = ssrcElement.getAttribute('name');
979 978
 
980
-                    this._signalingLayer.setTrackSourceName(ssrc, sourceName);
981
-                }
979
+                this._signalingLayer.setTrackSourceName(ssrc, sourceName);
982 980
             }
983 981
 
984 982
             if (this.isP2P) {
@@ -1055,7 +1053,7 @@ export default class JingleSessionPC extends JingleSession {
1055 1053
                     const videoTracks = localTracks.filter(track => track.getType() === MediaType.VIDEO);
1056 1054
 
1057 1055
                     videoTracks.length && videoTracks.splice(0, 1);
1058
-                    if (FeatureFlags.isMultiStreamSupportEnabled() && videoTracks.length) {
1056
+                    if (FeatureFlags.isMultiStreamSendSupportEnabled() && videoTracks.length) {
1059 1057
                         this.addTracks(videoTracks);
1060 1058
                     }
1061 1059
                 },
@@ -1211,7 +1209,7 @@ export default class JingleSessionPC extends JingleSession {
1211 1209
             // Add only 1 video track at a time. Adding 2 or more video tracks to the peerconnection at the same time
1212 1210
             // makes the browser go into a renegotiation loop by firing 'negotiationneeded' event after every
1213 1211
             // renegotiation.
1214
-            if (FeatureFlags.isMultiStreamSupportEnabled() && videoTracks.length > 1) {
1212
+            if (FeatureFlags.isMultiStreamSendSupportEnabled() && videoTracks.length > 1) {
1215 1213
                 tracks = [ ...audioTracks, videoTracks[0] ];
1216 1214
             }
1217 1215
             for (const track of tracks) {
@@ -1526,11 +1524,7 @@ export default class JingleSessionPC extends JingleSession {
1526 1524
         logger.info(`${this} setReceiverVideoConstraint - max frame height: ${maxFrameHeight}`
1527 1525
             + ` sourceReceiverConstraints: ${sourceReceiverConstraints}`);
1528 1526
 
1529
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
1530
-            this._sourceReceiverConstraints = sourceReceiverConstraints;
1531
-        } else {
1532
-            this.localRecvMaxFrameHeight = maxFrameHeight;
1533
-        }
1527
+        this._sourceReceiverConstraints = sourceReceiverConstraints;
1534 1528
 
1535 1529
         if (this.isP2P) {
1536 1530
             // Tell the remote peer about our receive constraint. If Jingle session is not yet active the state will
@@ -1819,7 +1813,7 @@ export default class JingleSessionPC extends JingleSession {
1819 1813
 
1820 1814
             // In p2p unified mode with multi-stream enabled, the new sources will have content name that doesn't exist
1821 1815
             // in the current remote description. Add a new m-line for this newly signaled source.
1822
-            if (!midFound && this.isP2P && FeatureFlags.isSourceNameSignalingEnabled()) {
1816
+            if (!midFound && this.isP2P) {
1823 1817
                 addSsrcInfo[name] = lines;
1824 1818
             }
1825 1819
         });
@@ -2124,7 +2118,7 @@ export default class JingleSessionPC extends JingleSession {
2124 2118
                 // Reject the m-line so that the browser removes the associated transceiver from the list of available
2125 2119
                 // transceivers. This will prevent the client from trying to re-use these inactive transceivers when
2126 2120
                 // additional video sources are added to the peerconnection.
2127
-                if (mid > -1 && !this.isP2P && FeatureFlags.isMultiStreamSupportEnabled()) {
2121
+                if (mid > -1 && !this.isP2P && FeatureFlags.isMultiStreamSendSupportEnabled()) {
2128 2122
                     const { media, port } = SDPUtil.parseMLine(remoteSdp.media[mid].split('\r\n')[0]);
2129 2123
 
2130 2124
                     remoteSdp.media[mid] = remoteSdp.media[mid].replace(`m=${media} ${port}`, `m=${media} 0`);
@@ -2153,7 +2147,6 @@ export default class JingleSessionPC extends JingleSession {
2153 2147
         // Add a new m-line in the remote description if the source info for a secondary video source is recceived from
2154 2148
         // the remote p2p peer when multi-stream support is enabled.
2155 2149
         if (addSsrcInfo.length > remoteSdp.media.length
2156
-            && FeatureFlags.isSourceNameSignalingEnabled()
2157 2150
             && this.isP2P
2158 2151
             && this.usesUnifiedPlan) {
2159 2152
             remoteSdp.addMlineForNewLocalSource(MediaType.VIDEO);
@@ -2274,7 +2267,7 @@ export default class JingleSessionPC extends JingleSession {
2274 2267
      * otherwise.
2275 2268
      */
2276 2269
     addTracks(localTracks = null) {
2277
-        if (!FeatureFlags.isMultiStreamSupportEnabled()
2270
+        if (!FeatureFlags.isMultiStreamSendSupportEnabled()
2278 2271
             || !localTracks?.length
2279 2272
             || localTracks.find(track => track.getType() !== MediaType.VIDEO)) {
2280 2273
             return Promise.reject(new Error('Multiple tracks of the given media type are not supported'));
@@ -2416,8 +2409,7 @@ export default class JingleSessionPC extends JingleSession {
2416 2409
 
2417 2410
                     return promise.then(() => {
2418 2411
                         // Set the source name of the new track.
2419
-                        if (FeatureFlags.isSourceNameSignalingEnabled()
2420
-                            && oldTrack
2412
+                        if (oldTrack
2421 2413
                             && newTrack
2422 2414
                             && oldTrack.isVideoTrack()) {
2423 2415
                             newTrack.setSourceName(oldTrack.getSourceName());

+ 0
- 80
modules/xmpp/JingleSessionPC.spec.js View File

@@ -8,23 +8,6 @@ import * as JingleSessionState from './JingleSessionState';
8 8
 import MediaSessionEvents from './MediaSessionEvents';
9 9
 import { MockChatRoom, MockStropheConnection } from './MockClasses';
10 10
 
11
-/**
12
- * Creates 'content-modify' Jingle IQ.
13
- * @param {string} senders - 'both' or 'none'.
14
- * @param {number|undefined} maxFrameHeight - the receive max video frame height.
15
- * @returns {jQuery}
16
- */
17
-function createContentModify(senders = 'both', maxFrameHeight) {
18
-    const modifyContentsIq = $.parseXML(
19
-        '<jingle action="content-modify" initiator="peer2" sid="sid12345" xmlns="urn:xmpp:jingle:1">'
20
-        + `<content name="video" senders="${senders}">`
21
-        + `<max-frame-height xmlns="http://jitsi.org/jitmeet/video">${maxFrameHeight}</max-frame-height>`
22
-        + '</content>'
23
-        + '</jingle>');
24
-
25
-    return $(modifyContentsIq).find('>jingle');
26
-}
27
-
28 11
 /**
29 12
  * Creates 'content-modify' Jingle IQ.
30 13
  * @returns {jQuery}
@@ -80,69 +63,6 @@ describe('JingleSessionPC', () => {
80 63
         // connection.connect('jid', undefined, () => { }); */
81 64
     });
82 65
 
83
-    describe('send/receive video constraints w/o source-name', () => {
84
-        beforeEach(() => {
85
-            FeatureFlags.init({ sourceNameSignaling: false });
86
-        });
87
-
88
-        it('sends content-modify with recv frame size', () => {
89
-            const sendIQSpy = spyOn(connection, 'sendIQ').and.callThrough();
90
-
91
-            jingleSession.setReceiverVideoConstraint(180);
92
-
93
-            expect(jingleSession.getState()).toBe(JingleSessionState.PENDING);
94
-
95
-            return new Promise((resolve, reject) => {
96
-                jingleSession.acceptOffer(
97
-                    offerIQ,
98
-                    resolve,
99
-                    reject,
100
-                    /* local tracks */ []);
101
-            }).then(() => {
102
-                expect(jingleSession.getState()).toBe(JingleSessionState.ACTIVE);
103
-
104
-                // FIXME content-modify is sent before session-accept
105
-                expect(sendIQSpy.calls.count()).toBe(2);
106
-
107
-                expect(sendIQSpy.calls.first().args[0].toString()).toBe(
108
-                    '<iq to="peer2" type="set" xmlns="jabber:client">'
109
-                    + '<jingle action="content-modify" initiator="peer2" sid="sid12345" xmlns="urn:xmpp:jingle:1">'
110
-                    + '<content name="video" senders="both">'
111
-                    + '<max-frame-height xmlns="http://jitsi.org/jitmeet/video">180</max-frame-height>'
112
-                    + '</content>'
113
-                    + '</jingle>'
114
-                    + '</iq>');
115
-            });
116
-        });
117
-        it('fires an event when remote peer sends content-modify', () => {
118
-            let remoteRecvMaxFrameHeight;
119
-            const remoteVideoConstraintsListener = session => {
120
-                remoteRecvMaxFrameHeight = session.getRemoteRecvMaxFrameHeight();
121
-            };
122
-
123
-            jingleSession.addListener(
124
-                MediaSessionEvents.REMOTE_VIDEO_CONSTRAINTS_CHANGED,
125
-                remoteVideoConstraintsListener);
126
-
127
-            return new Promise((resolve, reject) => {
128
-                jingleSession.acceptOffer(
129
-                    offerIQ,
130
-                    resolve,
131
-                    reject,
132
-                    /* local tracks */ []);
133
-            }).then(() => {
134
-                jingleSession.modifyContents(createContentModify('both', 180));
135
-                expect(remoteRecvMaxFrameHeight).toBe(180);
136
-
137
-                jingleSession.modifyContents(createContentModify('both', 360));
138
-                expect(remoteRecvMaxFrameHeight).toBe(360);
139
-
140
-                jingleSession.modifyContents(createContentModify('both', 180));
141
-                expect(remoteRecvMaxFrameHeight).toBe(180);
142
-            });
143
-        });
144
-    });
145
-
146 66
     describe('send/receive video constraints w/ source-name', () => {
147 67
         beforeEach(() => {
148 68
             FeatureFlags.init({ sourceNameSignaling: true });

+ 43
- 98
modules/xmpp/SignalingLayerImpl.js View File

@@ -6,7 +6,6 @@ import * as SignalingEvents from '../../service/RTC/SignalingEvents';
6 6
 import SignalingLayer, { getMediaTypeFromSourceName } from '../../service/RTC/SignalingLayer';
7 7
 import { VideoType } from '../../service/RTC/VideoType';
8 8
 import { XMPPEvents } from '../../service/xmpp/XMPPEvents';
9
-import FeatureFlags from '../flags/FeatureFlags';
10 9
 
11 10
 import { filterNodeFromPresenceJSON } from './ChatRoom';
12 11
 
@@ -108,44 +107,14 @@ export default class SignalingLayerImpl extends SignalingLayer {
108 107
                 'videomuted', this._videoMuteHandler);
109 108
             oldChatRoom.removePresenceListener(
110 109
                 'videoType', this._videoTypeHandler);
111
-            if (FeatureFlags.isSourceNameSignalingEnabled()) {
112
-                this._sourceInfoHandler
113
-                    && oldChatRoom.removePresenceListener(
114
-                        SOURCE_INFO_PRESENCE_ELEMENT, this._sourceInfoHandler);
115
-                this._memberLeftHandler
116
-                    && oldChatRoom.removeEventListener(
117
-                        XMPPEvents.MUC_MEMBER_LEFT, this._memberLeftHandler);
118
-            }
110
+            this._sourceInfoHandler
111
+                && oldChatRoom.removePresenceListener(SOURCE_INFO_PRESENCE_ELEMENT, this._sourceInfoHandler);
112
+            this._memberLeftHandler
113
+                && oldChatRoom.removeEventListener(XMPPEvents.MUC_MEMBER_LEFT, this._memberLeftHandler);
119 114
         }
120 115
         if (room) {
121
-            if (FeatureFlags.isSourceNameSignalingEnabled()) {
122
-                this._bindChatRoomEventHandlers(room);
123
-                this._addLocalSourceInfoToPresence();
124
-            } else {
125
-                // TODO the logic below has been duplicated in _bindChatRoomEventHandlers, clean this up once
126
-                //  the new impl has been tested well enough
127
-                // SignalingEvents
128
-                this._audioMuteHandler = (node, from) => {
129
-                    this.eventEmitter.emit(
130
-                        SignalingEvents.PEER_MUTED_CHANGED,
131
-                        from, MediaType.AUDIO, node.value === 'true');
132
-                };
133
-                room.addPresenceListener('audiomuted', this._audioMuteHandler);
134
-
135
-                this._videoMuteHandler = (node, from) => {
136
-                    this.eventEmitter.emit(
137
-                        SignalingEvents.PEER_MUTED_CHANGED,
138
-                        from, MediaType.VIDEO, node.value === 'true');
139
-                };
140
-                room.addPresenceListener('videomuted', this._videoMuteHandler);
141
-
142
-                this._videoTypeHandler = (node, from) => {
143
-                    this.eventEmitter.emit(
144
-                        SignalingEvents.PEER_VIDEO_TYPE_CHANGED,
145
-                        from, node.value);
146
-                };
147
-                room.addPresenceListener('videoType', this._videoTypeHandler);
148
-            }
116
+            this._bindChatRoomEventHandlers(room);
117
+            this._addLocalSourceInfoToPresence();
149 118
         }
150 119
     }
151 120
 
@@ -156,6 +125,8 @@ export default class SignalingLayerImpl extends SignalingLayer {
156 125
      * @returns {void}
157 126
      */
158 127
     _bindChatRoomEventHandlers(room) {
128
+        // Add handlers for 'audiomuted', 'videomuted' and 'videoType' fields in presence in order to support interop
129
+        // with very old versions of mobile clients and jigasi that do not support source-name signaling.
159 130
         const emitAudioMutedEvent = (endpointId, muted) => {
160 131
             this.eventEmitter.emit(
161 132
                 SignalingEvents.PEER_MUTED_CHANGED,
@@ -163,15 +134,7 @@ export default class SignalingLayerImpl extends SignalingLayer {
163 134
                 MediaType.AUDIO,
164 135
                 muted);
165 136
         };
166
-        const emitVideoMutedEvent = (endpointId, muted) => {
167
-            this.eventEmitter.emit(
168
-                SignalingEvents.PEER_MUTED_CHANGED,
169
-                endpointId,
170
-                MediaType.VIDEO,
171
-                muted);
172
-        };
173 137
 
174
-        // SignalingEvents
175 138
         this._audioMuteHandler = (node, from) => {
176 139
             if (!this._doesEndpointSendNewSourceInfo(from)) {
177 140
                 emitAudioMutedEvent(from, node.value === 'true');
@@ -179,6 +142,14 @@ export default class SignalingLayerImpl extends SignalingLayer {
179 142
         };
180 143
         room.addPresenceListener('audiomuted', this._audioMuteHandler);
181 144
 
145
+        const emitVideoMutedEvent = (endpointId, muted) => {
146
+            this.eventEmitter.emit(
147
+                SignalingEvents.PEER_MUTED_CHANGED,
148
+                endpointId,
149
+                MediaType.VIDEO,
150
+                muted);
151
+        };
152
+
182 153
         this._videoMuteHandler = (node, from) => {
183 154
             if (!this._doesEndpointSendNewSourceInfo(from)) {
184 155
                 emitVideoMutedEvent(from, node.value === 'true');
@@ -197,11 +168,9 @@ export default class SignalingLayerImpl extends SignalingLayer {
197 168
                 emitVideoTypeEvent(from, node.value);
198 169
             }
199 170
         };
171
+        room.addPresenceListener('videoType', this._videoTypeHandler);
200 172
 
201
-        if (!FeatureFlags.isMultiStreamSupportEnabled()) {
202
-            room.addPresenceListener('videoType', this._videoTypeHandler);
203
-        }
204
-
173
+        // Add handlers for presence in the new format.
205 174
         this._sourceInfoHandler = (node, mucNick) => {
206 175
             const endpointId = mucNick;
207 176
             const { value } = node;
@@ -256,15 +225,12 @@ export default class SignalingLayerImpl extends SignalingLayer {
256 225
 
257 226
             delete this._remoteSourceState[endpointId];
258 227
 
259
-            if (FeatureFlags.isSourceNameSignalingEnabled()) {
260
-                for (const [ key, value ] of this.ssrcOwners.entries()) {
261
-                    if (value === endpointId) {
262
-                        delete this._sourceNames[key];
263
-                    }
228
+            for (const [ key, value ] of this.ssrcOwners.entries()) {
229
+                if (value === endpointId) {
230
+                    delete this._sourceNames[key];
264 231
                 }
265 232
             }
266 233
         };
267
-
268 234
         room.addEventListener(XMPPEvents.MUC_MEMBER_LEFT, this._memberLeftHandler);
269 235
     }
270 236
 
@@ -304,50 +270,35 @@ export default class SignalingLayerImpl extends SignalingLayer {
304 270
             logger.warn('Requested peer media info, before room was set');
305 271
         };
306 272
 
307
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
308
-            const lastPresence = this.chatRoom?.getLastPresence(owner);
273
+        const lastPresence = this.chatRoom?.getLastPresence(owner);
309 274
 
310
-            if (!lastPresence) {
311
-                logger.warn(`getPeerMediaInfo - no presence stored for: ${owner}`);
275
+        if (!lastPresence) {
276
+            logger.warn(`getPeerMediaInfo - no presence stored for: ${owner}`);
312 277
 
313
-                return;
314
-            }
315
-            if (!this._doesEndpointSendNewSourceInfo(owner)) {
316
-                return legacyGetPeerMediaInfo();
317
-            }
278
+            return;
279
+        }
280
+        if (!this._doesEndpointSendNewSourceInfo(owner)) {
281
+            return legacyGetPeerMediaInfo();
282
+        }
318 283
 
319
-            if (sourceName) {
320
-                return this.getPeerSourceInfo(owner, sourceName);
321
-            }
284
+        if (sourceName) {
285
+            return this.getPeerSourceInfo(owner, sourceName);
286
+        }
322 287
 
323
-            /**
324
-             * @type {PeerMediaInfo}
325
-             */
326
-            const mediaInfo = {};
327
-            const endpointMediaSource = this._findEndpointSourceInfoForMediaType(owner, mediaType);
328
-
329
-            // The defaults are provided only, because getPeerMediaInfo is a legacy method. This will be eventually
330
-            // changed into a getSourceInfo method which returns undefined if there's no source. Also there will be
331
-            // no mediaType argument there.
332
-            if (mediaType === MediaType.AUDIO) {
333
-                mediaInfo.muted = endpointMediaSource ? endpointMediaSource.muted : true;
334
-            } else if (mediaType === MediaType.VIDEO) {
335
-                mediaInfo.muted = endpointMediaSource ? endpointMediaSource.muted : true;
336
-                mediaInfo.videoType = endpointMediaSource ? endpointMediaSource.videoType : undefined;
337
-
338
-                const codecTypeNode = filterNodeFromPresenceJSON(lastPresence, 'jitsi_participant_codecType');
339
-
340
-                if (codecTypeNode.length > 0) {
341
-                    mediaInfo.codecType = codecTypeNode[0].value;
342
-                }
343
-            } else {
344
-                throw new Error(`Unsupported media type: ${mediaType}`);
345
-            }
288
+        const mediaInfo = {
289
+            muted: true
290
+        };
346 291
 
347
-            return mediaInfo;
292
+        if (mediaType === MediaType.VIDEO) {
293
+            mediaInfo.videoType = undefined;
294
+            const codecTypeNode = filterNodeFromPresenceJSON(lastPresence, 'jitsi_participant_codecType');
295
+
296
+            if (codecTypeNode.length > 0) {
297
+                mediaInfo.codecType = codecTypeNode[0].value;
298
+            }
348 299
         }
349 300
 
350
-        return legacyGetPeerMediaInfo();
301
+        return mediaInfo;
351 302
     }
352 303
 
353 304
     /**
@@ -400,9 +351,6 @@ export default class SignalingLayerImpl extends SignalingLayer {
400 351
         this._localSourceState[sourceName].muted = muted;
401 352
 
402 353
         if (this.chatRoom) {
403
-            // FIXME This only adjusts the presence, but doesn't actually send it. Here we temporarily rely on
404
-            // the legacy signaling part to send the presence. Remember to add "send presence" here when the legacy
405
-            // signaling is removed.
406 354
             return this._addLocalSourceInfoToPresence();
407 355
         }
408 356
 
@@ -421,9 +369,6 @@ export default class SignalingLayerImpl extends SignalingLayer {
421 369
             // Include only if not a camera (default)
422 370
             this._localSourceState[sourceName].videoType = videoType === VideoType.CAMERA ? undefined : videoType;
423 371
 
424
-            // NOTE this doesn't send the actual presence, because is called from the same place where the legacy video
425
-            // type is emitted which does the actual sending. A send presence statement needs to be added when
426
-            // the legacy part is removed.
427 372
             return this._addLocalSourceInfoToPresence();
428 373
         }
429 374
 

+ 7
- 106
modules/xmpp/SignalingLayerImpl.spec.js View File

@@ -121,7 +121,6 @@ describe('SignalingLayerImpl', () => {
121 121
         let chatRoom = createMockChatRoom();
122 122
 
123 123
         beforeEach(() => {
124
-            FeatureFlags.init({ sourceNameSignaling: true });
125 124
             signalingLayer = new SignalingLayerImpl();
126 125
             chatRoom = createMockChatRoom();
127 126
             signalingLayer.setChatRoom(chatRoom);
@@ -160,7 +159,6 @@ describe('SignalingLayerImpl', () => {
160 159
             let chatRoom = createMockChatRoom();
161 160
 
162 161
             beforeEach(() => {
163
-                FeatureFlags.init({ sourceNameSignaling: true });
164 162
                 signalingLayer = new SignalingLayerImpl();
165 163
                 chatRoom = createMockChatRoom();
166 164
                 signalingLayer.setChatRoom(chatRoom);
@@ -206,29 +204,6 @@ describe('SignalingLayerImpl', () => {
206 204
                 );
207 205
             });
208 206
         });
209
-        describe('with:  sourceNameSignaling: false', () => {
210
-            let signalingLayer;
211
-            let chatRoom;
212
-
213
-            beforeEach(() => {
214
-                FeatureFlags.init({ sourceNameSignaling: false });
215
-                signalingLayer = new SignalingLayerImpl();
216
-                chatRoom = createMockChatRoom();
217
-                signalingLayer.setChatRoom(chatRoom);
218
-            });
219
-            it('does not react to SourceInfo', () => {
220
-                const emitterSpy = spyOn(signalingLayer.eventEmitter, 'emit');
221
-                const sourceInfo = {
222
-                    '12345678-a0': {
223
-                        muted: true
224
-                    }
225
-                };
226
-
227
-                chatRoom.mockSourceInfoPresence('endpoint1', sourceInfo);
228
-
229
-                expect(emitterSpy).not.toHaveBeenCalled();
230
-            });
231
-        });
232 207
     });
233 208
     describe('getPeerMediaInfo', () => {
234 209
         describe('with:  sourceNameSignaling: true', () => {
@@ -236,7 +211,6 @@ describe('SignalingLayerImpl', () => {
236 211
             let chatRoom;
237 212
 
238 213
             beforeEach(() => {
239
-                FeatureFlags.init({ sourceNameSignaling: true });
240 214
                 signalingLayer = new SignalingLayerImpl();
241 215
                 chatRoom = createMockChatRoom();
242 216
                 signalingLayer.setChatRoom(chatRoom);
@@ -268,9 +242,12 @@ describe('SignalingLayerImpl', () => {
268 242
 
269 243
                     chatRoom.mockSourceInfoPresence(endpointId, sourceInfo);
270 244
 
271
-                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endpointId, MediaType.AUDIO);
245
+                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endpointId, MediaType.AUDIO, '12345678-a0');
272 246
 
273
-                    expect(peerMediaInfo).toEqual({ muted: true });
247
+                    expect(peerMediaInfo).toEqual({
248
+                        muted: true,
249
+                        sourceName: '12345678-a0'
250
+                    });
274 251
                 });
275 252
                 it('for video', () => {
276 253
                     const endointId = '12345678';
@@ -283,90 +260,16 @@ describe('SignalingLayerImpl', () => {
283 260
 
284 261
                     chatRoom.mockSourceInfoPresence(endointId, sourceInfo);
285 262
 
286
-                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endointId, MediaType.VIDEO);
287
-
288
-                    expect(peerMediaInfo).toEqual({
289
-                        muted: true,
290
-                        videoType: 'desktop'
291
-                    });
292
-                });
293
-            });
294
-            describe('if there\'s no SourceInfo then will read from the legacy element', () => {
295
-                const endointId = '12345678';
296
-
297
-                it('for audio', () => {
298
-                    // There's no 'SourceInfo' in the presence
299
-                    chatRoom.getLastPresence = () => [ { } ];
300
-
301
-                    // This test is very implementation specific and relies on the fact that the backwards compat logic
302
-                    // is supposed to call into 'chatRoom.getMediaPresenceInfo' and return whatever it returns.
303
-                    // To be removed once legacy signaling is deprecated.
304
-                    chatRoom.getMediaPresenceInfo = () => {
305
-                        return {
306
-                            muted: true
307
-                        };
308
-                    };
309
-
310
-                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endointId, MediaType.AUDIO);
311
-
312
-                    expect(peerMediaInfo).toEqual({ muted: true });
313
-                });
314
-                it('for video', () => {
315
-                    // There's no 'SourceInfo' in the presence
316
-                    chatRoom.getLastPresence = () => [ { } ];
317
-
318
-                    // This test is very implementation specific and relies on the fact that the backwards compat logic
319
-                    // is supposed to call into 'chatRoom.getMediaPresenceInfo' and return whatever it returns.
320
-                    // To be removed once legacy signaling is deprecated.
321
-                    chatRoom.getMediaPresenceInfo = () => {
322
-                        return {
323
-                            muted: true,
324
-                            videoType: 'desktop'
325
-                        };
326
-                    };
327
-
328
-                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endointId, MediaType.VIDEO);
263
+                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endointId, MediaType.VIDEO, '12345678-v0');
329 264
 
330 265
                     expect(peerMediaInfo).toEqual({
331 266
                         muted: true,
267
+                        sourceName: '12345678-v0',
332 268
                         videoType: 'desktop'
333 269
                     });
334 270
                 });
335 271
             });
336 272
         });
337
-        describe('with:  sourceNameSignaling: false', () => {
338
-            beforeEach(() => {
339
-                FeatureFlags.init({ sourceNameSignaling: false });
340
-            });
341
-            it('should not read from SourceInfo element', () => {
342
-                const signalingLayer = new SignalingLayerImpl();
343
-                const chatRoom = createMockChatRoom();
344
-
345
-                signalingLayer.setChatRoom(chatRoom);
346
-
347
-                const endointId = '12345678';
348
-                const sourceInfo = {
349
-                    '12345678-v0': {
350
-                        muted: true,
351
-                        videoType: 'desktop'
352
-                    }
353
-                };
354
-
355
-                chatRoom.mockSourceInfoPresence(endointId, sourceInfo);
356
-
357
-                // This is the value the legacy flow will use (the values are different that the SourceInfo one).
358
-                const legacyMediaInfoValue = {
359
-                    muted: false,
360
-                    videoType: 'camera'
361
-                };
362
-
363
-                chatRoom.getMediaPresenceInfo = () => legacyMediaInfoValue;
364
-
365
-                const peerMediaInfo = signalingLayer.getPeerMediaInfo(endointId, MediaType.VIDEO);
366
-
367
-                expect(peerMediaInfo).toEqual(legacyMediaInfoValue);
368
-            });
369
-        });
370 273
     });
371 274
     describe('will remove source info(cleanup corner cases)', () => {
372 275
         let signalingLayer;
@@ -374,8 +277,6 @@ describe('SignalingLayerImpl', () => {
374 277
         const endpointId = '12345678';
375 278
 
376 279
         beforeEach(() => {
377
-            FeatureFlags.init({ sourceNameSignaling: true });
378
-
379 280
             signalingLayer = new SignalingLayerImpl();
380 281
             chatRoom = createMockChatRoom();
381 282
 

+ 5
- 9
modules/xmpp/xmpp.js View File

@@ -8,7 +8,6 @@ import * as JitsiConnectionEvents from '../../JitsiConnectionEvents';
8 8
 import { XMPPEvents } from '../../service/xmpp/XMPPEvents';
9 9
 import browser from '../browser';
10 10
 import { E2EEncryption } from '../e2ee/E2EEncryption';
11
-import FeatureFlags from '../flags/FeatureFlags';
12 11
 import Statistics from '../statistics/statistics';
13 12
 import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
14 13
 import Listenable from '../util/Listenable';
@@ -255,14 +254,11 @@ export default class XMPP extends Listenable {
255 254
         }
256 255
 
257 256
         // Advertise source-name signaling when the endpoint supports it.
258
-        if (FeatureFlags.isSourceNameSignalingEnabled()) {
259
-            logger.info('Source-name signaling is enabled');
260
-            this.caps.addFeature('http://jitsi.org/source-name');
261
-        }
262
-        if (FeatureFlags.isReceiveMultipleVideoStreamsSupported()) {
263
-            logger.info('Receiving multiple video streams is enabled');
264
-            this.caps.addFeature('http://jitsi.org/receive-multiple-video-streams');
265
-        }
257
+        logger.debug('Source-name signaling is enabled');
258
+        this.caps.addFeature('http://jitsi.org/source-name');
259
+
260
+        logger.debug('Receiving multiple video streams is enabled');
261
+        this.caps.addFeature('http://jitsi.org/receive-multiple-video-streams');
266 262
     }
267 263
 
268 264
     /**

+ 0
- 1
service/RTC/MediaType.spec.ts View File

@@ -12,7 +12,6 @@ describe( "/service/RTC/MediaType members", () => {
12 12
         expect( MediaType ).toBeDefined();
13 13
 
14 14
         expect( MediaType.AUDIO ).toBe( 'audio' );
15
-        expect( MediaType.PRESENTER ).toBe( 'presenter' );
16 15
         expect( MediaType.VIDEO ).toBe( 'video' );
17 16
     } );
18 17
 

+ 0
- 5
service/RTC/MediaType.ts View File

@@ -4,11 +4,6 @@ export enum MediaType {
4 4
      */
5 5
     AUDIO = 'audio',
6 6
 
7
-    /**
8
-     * The presenter type.
9
-     */
10
-    PRESENTER = 'presenter',
11
-
12 7
     /**
13 8
      * The video type.
14 9
      */

+ 0
- 4
service/RTC/RTCEvents.spec.ts View File

@@ -9,7 +9,6 @@ describe( "/service/RTC/RTCEvents members", () => {
9 9
         DATA_CHANNEL_OPEN,
10 10
         ENDPOINT_CONN_STATUS_CHANGED,
11 11
         DOMINANT_SPEAKER_CHANGED,
12
-        LASTN_ENDPOINT_CHANGED,
13 12
         FORWARDED_SOURCES_CHANGED,
14 13
         PERMISSIONS_CHANGED,
15 14
         SENDER_VIDEO_CONSTRAINTS_CHANGED,
@@ -44,7 +43,6 @@ describe( "/service/RTC/RTCEvents members", () => {
44 43
         expect( DATA_CHANNEL_OPEN ).toBe( 'rtc.data_channel_open' );
45 44
         expect( ENDPOINT_CONN_STATUS_CHANGED ).toBe( 'rtc.endpoint_conn_status_changed' );
46 45
         expect( DOMINANT_SPEAKER_CHANGED ).toBe( 'rtc.dominant_speaker_changed' );
47
-        expect( LASTN_ENDPOINT_CHANGED ).toBe( 'rtc.lastn_endpoint_changed' );
48 46
         expect( FORWARDED_SOURCES_CHANGED ).toBe( 'rtc.forwarded_sources_changed' );
49 47
         expect( PERMISSIONS_CHANGED ).toBe( 'rtc.permissions_changed' );
50 48
         expect( SENDER_VIDEO_CONSTRAINTS_CHANGED ).toBe( 'rtc.sender_video_constraints_changed' );
@@ -75,7 +73,6 @@ describe( "/service/RTC/RTCEvents members", () => {
75 73
             expect( RTCEvents.DATA_CHANNEL_OPEN ).toBe( 'rtc.data_channel_open' );
76 74
             expect( RTCEvents.ENDPOINT_CONN_STATUS_CHANGED ).toBe( 'rtc.endpoint_conn_status_changed' );
77 75
             expect( RTCEvents.DOMINANT_SPEAKER_CHANGED ).toBe( 'rtc.dominant_speaker_changed' );
78
-            expect( RTCEvents.LASTN_ENDPOINT_CHANGED ).toBe( 'rtc.lastn_endpoint_changed' );
79 76
             expect( RTCEvents.PERMISSIONS_CHANGED ).toBe( 'rtc.permissions_changed' );
80 77
             expect( RTCEvents.SENDER_VIDEO_CONSTRAINTS_CHANGED ).toBe( 'rtc.sender_video_constraints_changed' );
81 78
             expect( RTCEvents.LASTN_VALUE_CHANGED ).toBe( 'rtc.lastn_value_changed' );
@@ -106,7 +103,6 @@ describe( "/service/RTC/RTCEvents members", () => {
106 103
             expect( RTCEventsDefault.DATA_CHANNEL_OPEN ).toBe( 'rtc.data_channel_open' );
107 104
             expect( RTCEventsDefault.ENDPOINT_CONN_STATUS_CHANGED ).toBe( 'rtc.endpoint_conn_status_changed' );
108 105
             expect( RTCEventsDefault.DOMINANT_SPEAKER_CHANGED ).toBe( 'rtc.dominant_speaker_changed' );
109
-            expect( RTCEventsDefault.LASTN_ENDPOINT_CHANGED ).toBe( 'rtc.lastn_endpoint_changed' );
110 106
             expect( RTCEventsDefault.PERMISSIONS_CHANGED ).toBe( 'rtc.permissions_changed' );
111 107
             expect( RTCEventsDefault.SENDER_VIDEO_CONSTRAINTS_CHANGED ).toBe( 'rtc.sender_video_constraints_changed' );
112 108
             expect( RTCEventsDefault.LASTN_VALUE_CHANGED ).toBe( 'rtc.lastn_value_changed' );

+ 0
- 2
service/RTC/RTCEvents.ts View File

@@ -11,7 +11,6 @@ export enum RTCEvents {
11 11
     DATA_CHANNEL_OPEN = 'rtc.data_channel_open',
12 12
     ENDPOINT_CONN_STATUS_CHANGED = 'rtc.endpoint_conn_status_changed',
13 13
     DOMINANT_SPEAKER_CHANGED = 'rtc.dominant_speaker_changed',
14
-    LASTN_ENDPOINT_CHANGED = 'rtc.lastn_endpoint_changed',
15 14
     FORWARDED_SOURCES_CHANGED = 'rtc.forwarded_sources_changed',
16 15
 
17 16
     /**
@@ -130,7 +129,6 @@ export const CREATE_OFFER_FAILED = RTCEvents.CREATE_OFFER_FAILED;
130 129
 export const DATA_CHANNEL_OPEN = RTCEvents.DATA_CHANNEL_OPEN;
131 130
 export const ENDPOINT_CONN_STATUS_CHANGED = RTCEvents.ENDPOINT_CONN_STATUS_CHANGED;
132 131
 export const DOMINANT_SPEAKER_CHANGED = RTCEvents.DOMINANT_SPEAKER_CHANGED;
133
-export const LASTN_ENDPOINT_CHANGED = RTCEvents.LASTN_ENDPOINT_CHANGED;
134 132
 export const FORWARDED_SOURCES_CHANGED = RTCEvents.FORWARDED_SOURCES_CHANGED;
135 133
 export const PERMISSIONS_CHANGED = RTCEvents.PERMISSIONS_CHANGED;
136 134
 export const SENDER_VIDEO_CONSTRAINTS_CHANGED = RTCEvents.SENDER_VIDEO_CONSTRAINTS_CHANGED;

+ 0
- 1
types/hand-crafted/JitsiConference.d.ts View File

@@ -74,7 +74,6 @@ export default class JitsiConference {
74 74
   selectParticipants: ( participantIds: string[] ) => void;
75 75
   getLastN: () => number;
76 76
   setLastN: ( lastN: number ) => void;
77
-  isInLastN: ( participantId: string ) => boolean;
78 77
   getParticipants: () => JitsiParticipant[];
79 78
   getParticipantCount: ( countHidden?: boolean ) => number;
80 79
   getParticipantById: ( id: string ) => JitsiParticipant;

+ 0
- 1
types/hand-crafted/JitsiConferenceEvents.d.ts View File

@@ -32,7 +32,6 @@ export enum JitsiConferenceEvents {
32 32
   NOISY_MIC = 'conference.noisy_mic',
33 33
   NON_PARTICIPANT_MESSAGE_RECEIVED = 'conference.non_participant_message_received',
34 34
   PRIVATE_MESSAGE_RECEIVED = 'conference.privateMessageReceived',
35
-  PARTICIPANT_CONN_STATUS_CHANGED = 'conference.participant_conn_status_changed',
36 35
   PARTCIPANT_FEATURES_CHANGED = 'conference.partcipant_features_changed',
37 36
   PARTICIPANT_PROPERTY_CHANGED = 'conference.participant_property_changed',
38 37
   P2P_STATUS = 'conference.p2pStatus',

+ 0
- 2
types/hand-crafted/JitsiMeetJS.d.ts View File

@@ -11,7 +11,6 @@ import { JitsiConnectionEvents } from './JitsiConnectionEvents';
11 11
 import { JitsiConferenceEvents } from './JitsiConferenceEvents';
12 12
 import { JitsiTranscriptionStatus } from './JitsiTranscriptionStatus';
13 13
 import BrowserCapabilities from './modules/browser/BrowserCapabilities';
14
-import { ParticipantConnectionStatus } from './modules/connectivity/ParticipantConnectionStatus';
15 14
 import { DetectionEvents } from './modules/detection/DetectionEvents';
16 15
 import TrackVADEmitter, { VADProcessor } from './modules/detection/TrackVADEmitter';
17 16
 import RecordingConstants from './modules/recording/recordingConstants';
@@ -69,7 +68,6 @@ export type JitsiMeetJSType = {
69 68
   //USER_MEDIA_SLOW_PROMISE_TIMEOUT: 1000;
70 69
 
71 70
   constants: {
72
-    participantConnectionStatus: typeof ParticipantConnectionStatus,
73 71
     recording: typeof RecordingConstants,
74 72
     sipVideoGW: typeof VideoSIPGWConstants,
75 73
     transcriptionStatus: typeof JitsiTranscriptionStatus,

+ 0
- 1
types/hand-crafted/modules/RTC/RTC.d.ts View File

@@ -43,7 +43,6 @@ export default class RTC extends Listenable {
43 43
   setAudioLevel: ( tpc: TraceablePeerConnection, ssrc: number, audioLevel: number, isLocal: boolean ) => void;
44 44
   sendChannelMessage: ( to: string, payload: unknown ) => void; // TODO:
45 45
   setLastN: ( value: number ) => void;
46
-  isInLastN: ( id: string ) => boolean;
47 46
   isInForwardedSources: ( sourceName: string ) => boolean;
48 47
   setNewReceiverVideoConstraints: ( constraints: unknown ) => void; // TODO:
49 48
   setVideoType: ( videoType: string ) => void;

+ 0
- 1
types/hand-crafted/modules/xmpp/ChatRoom.d.ts View File

@@ -48,7 +48,6 @@ export default class ChatRoom extends Listenable {
48 48
   sendAudioInfoPresence: ( mute: unknown, callback: ( params: unknown ) => unknown ) => void; // TODO:
49 49
   addVideoInfoToPresence: ( mute: unknown ) => void; // TODO:
50 50
   sendVideoInfoPresence: ( mute: unknown ) => void; // TODO:
51
-  getMediaPresenceInfo: ( endpointId: string, mediaType: MediaType ) => unknown; // TODO: what is PeerMediaInfo
52 51
   isSIPCallingSupported: () => boolean;
53 52
   dial: ( number: string ) => unknown; // TODO:
54 53
   hangup: () => unknown; // TODO:

+ 0
- 1
types/hand-crafted/service/RTC/MediaType.d.ts View File

@@ -1,5 +1,4 @@
1 1
 export enum MediaType {
2 2
   AUDIO = 'audio',
3
-  PRESENTER = 'presenter',
4 3
   VIDEO = 'video'
5 4
 }

Loading…
Cancel
Save