浏览代码

feat: use source names in presence

...to advertise track's muted state and the video type.

For now, if the source name signaling flag is enabled, both legacy and
new <SourceInfo> element will be used at the same time. This is to be
able to interoperate with legacy clients and start testing the new
format at the same time. Whenever possible <SourceInfo> will be used
as main source of truth with the fallback to legacy <audiomuted/>,
<videomuted/> and <videotype/> elements.
tags/v0.0.2
Pawel Domas 4 年前
父节点
当前提交
03809d48bc

+ 42
- 11
JitsiConference.js 查看文件

@@ -31,6 +31,7 @@ import VADTalkMutedDetection from './modules/detection/VADTalkMutedDetection';
31 31
 import { E2EEncryption } from './modules/e2ee/E2EEncryption';
32 32
 import E2ePing from './modules/e2eping/e2eping';
33 33
 import Jvb121EventGenerator from './modules/event/Jvb121EventGenerator';
34
+import FeatureFlags from './modules/flags/FeatureFlags';
34 35
 import { ReceiveVideoController } from './modules/qualitycontrol/ReceiveVideoController';
35 36
 import { SendVideoController } from './modules/qualitycontrol/SendVideoController';
36 37
 import RecordingManager from './modules/recording/RecordingManager';
@@ -54,6 +55,7 @@ import {
54 55
 import BridgeVideoType from './service/RTC/BridgeVideoType';
55 56
 import CodecMimeType from './service/RTC/CodecMimeType';
56 57
 import * as MediaType from './service/RTC/MediaType';
58
+import { getSourceNameForJitsiTrack } from './service/RTC/SignalingLayer';
57 59
 import VideoType from './service/RTC/VideoType';
58 60
 import {
59 61
     ACTION_JINGLE_RESTART,
@@ -1274,21 +1276,12 @@ JitsiConference.prototype._setupNewTrack = function(newTrack) {
1274 1276
         }
1275 1277
     }
1276 1278
     if (newTrack.isVideoTrack()) {
1277
-        const videoTypeTagName = 'videoType';
1278
-
1279
-        // if video type is camera and there is no videoType in presence, we skip adding it, as this is the default one
1280
-        if (newTrack.videoType !== VideoType.CAMERA || this.room.getFromPresence(videoTypeTagName)) {
1281
-            this.sendCommand(videoTypeTagName, { value: newTrack.videoType });
1282
-        }
1279
+        this._sendNewVideoType(newTrack);
1283 1280
     }
1284 1281
     this.rtc.addLocalTrack(newTrack);
1285 1282
 
1286 1283
     // ensure that we're sharing proper "is muted" state
1287
-    if (newTrack.isAudioTrack()) {
1288
-        this.room.setAudioMute(newTrack.isMuted());
1289
-    } else {
1290
-        this.room.setVideoMute(newTrack.isMuted());
1291
-    }
1284
+    this._setTrackMuteStatus(newTrack, newTrack.isMuted());
1292 1285
 
1293 1286
     newTrack.muteHandler = this._fireMuteChangeEvent.bind(this, newTrack);
1294 1287
     newTrack.audioLevelHandler = this._fireAudioLevelChangeEvent.bind(this);
@@ -1304,6 +1297,44 @@ JitsiConference.prototype._setupNewTrack = function(newTrack) {
1304 1297
     this.eventEmitter.emit(JitsiConferenceEvents.TRACK_ADDED, newTrack);
1305 1298
 };
1306 1299
 
1300
+JitsiConference.prototype._sendNewVideoType = function(track) {
1301
+    if (FeatureFlags.isSourceNameSignalingEnabled()) {
1302
+        // FIXME once legacy signaling using 'sendCommand' is removed, signalingLayer.setTrackVideoType must be adjusted
1303
+        // to send the presence (not just modify it).
1304
+        this._signalingLayer.setTrackVideoType(
1305
+            getSourceNameForJitsiTrack(
1306
+                this.myUserId(),
1307
+                track.getType(),
1308
+                0
1309
+            ),
1310
+            track.videoType);
1311
+    }
1312
+
1313
+    const videoTypeTagName = 'videoType';
1314
+
1315
+    // if video type is camera and there is no videoType in presence, we skip adding it, as this is the default one
1316
+    if (track.videoType !== VideoType.CAMERA || this.room.getFromPresence(videoTypeTagName)) {
1317
+        this.sendCommand(videoTypeTagName, { value: track.videoType });
1318
+    }
1319
+};
1320
+
1321
+JitsiConference.prototype._setTrackMuteStatus = function(localTrack, isMuted) {
1322
+    if (FeatureFlags.isSourceNameSignalingEnabled()) {
1323
+        // TODO When legacy signaling part is removed, remember to adjust signalingLayer.setTrackMuteStatus, so that
1324
+        // it triggers sending the presence (it only updates it for now, because the legacy code below sends).
1325
+        this._signalingLayer.setTrackMuteStatus(
1326
+            getSourceNameForJitsiTrack(this.myUserId(), localTrack.getType(), 0),
1327
+            isMuted
1328
+        );
1329
+    }
1330
+
1331
+    if (localTrack.isAudioTrack()) {
1332
+        this.room && this.room.setAudioMute(isMuted);
1333
+    } else {
1334
+        this.room && this.room.setVideoMute(isMuted);
1335
+    }
1336
+};
1337
+
1307 1338
 /**
1308 1339
  * Method called by the {@link JitsiLocalTrack} (a video one) in order to add
1309 1340
  * back the underlying WebRTC MediaStream to the PeerConnection (which has

+ 2
- 5
modules/RTC/JitsiLocalTrack.js 查看文件

@@ -624,11 +624,8 @@ export default class JitsiLocalTrack extends JitsiTrack {
624 624
      * @returns {void}
625 625
      */
626 626
     _sendMuteStatus(mute) {
627
-        if (this.conference && this.conference.room) {
628
-            this.conference.room[
629
-                this.isAudioTrack()
630
-                    ? 'setAudioMute'
631
-                    : 'setVideoMute'](mute);
627
+        if (this.conference) {
628
+            this.conference._setTrackMuteStatus(this, mute);
632 629
         }
633 630
     }
634 631
 

+ 2
- 3
modules/sdp/LocalSdpMunger.js 查看文件

@@ -4,6 +4,7 @@ import { getLogger } from 'jitsi-meet-logger';
4 4
 
5 5
 import MediaDirection from '../../service/RTC/MediaDirection';
6 6
 import * as MediaType from '../../service/RTC/MediaType';
7
+import { getSourceNameForJitsiTrack } from '../../service/RTC/SignalingLayer';
7 8
 import VideoType from '../../service/RTC/VideoType';
8 9
 import FeatureFlags from '../flags/FeatureFlags';
9 10
 
@@ -352,13 +353,11 @@ export default class LocalSdpMunger {
352 353
             const nameExists = mediaSection.ssrcs.find(ssrc => ssrc.id === source && ssrc.attribute === 'name');
353 354
 
354 355
             if (!nameExists) {
355
-                const firstLetterOfMediaType = mediaType.substring(0, 1);
356
-
357 356
                 // Inject source names as a=ssrc:3124985624 name:endpointA-v0
358 357
                 mediaSection.ssrcs.push({
359 358
                     id: source,
360 359
                     attribute: 'name',
361
-                    value: `${this.localEndpointId}-${firstLetterOfMediaType}0`
360
+                    value: getSourceNameForJitsiTrack(this.localEndpointId, mediaType, 0)
362 361
                 });
363 362
             }
364 363
         }

+ 10
- 1
modules/xmpp/ChatRoom.js 查看文件

@@ -67,7 +67,7 @@ export const parser = {
67 67
  * @param pres the presence JSON
68 68
  * @param nodeName the name of the node (videomuted, audiomuted, etc)
69 69
  */
70
-function filterNodeFromPresenceJSON(pres, nodeName) {
70
+export function filterNodeFromPresenceJSON(pres, nodeName) {
71 71
     const res = [];
72 72
 
73 73
     for (let i = 0; i < pres.length; i++) {
@@ -1657,6 +1657,15 @@ export default class ChatRoom extends Listenable {
1657 1657
         return data;
1658 1658
     }
1659 1659
 
1660
+    /**
1661
+     * Returns the last presence advertised by a MUC member.
1662
+     * @param {string} mucNick
1663
+     * @returns {*}
1664
+     */
1665
+    getLastPresence(mucNick) {
1666
+        return this.lastPresences[`${this.roomjid}/${mucNick}`];
1667
+    }
1668
+
1660 1669
     /**
1661 1670
      * Returns true if the SIP calls are supported and false otherwise
1662 1671
      */

+ 1
- 0
modules/xmpp/JingleSessionPC.spec.js 查看文件

@@ -55,6 +55,7 @@ describe('JingleSessionPC', () => {
55 55
         jingleSession.initialize(
56 56
             /* ChatRoom */ new MockChatRoom(),
57 57
             /* RTC */ rtc,
58
+            /* Signaling layer */ { },
58 59
             /* options */ { });
59 60
 
60 61
         // eslint-disable-next-line no-empty-function

+ 306
- 25
modules/xmpp/SignalingLayerImpl.js 查看文件

@@ -1,13 +1,21 @@
1 1
 /* global __filename */
2 2
 
3 3
 import { getLogger } from 'jitsi-meet-logger';
4
+import { Strophe } from 'strophe.js';
4 5
 
5 6
 import * as MediaType from '../../service/RTC/MediaType';
6 7
 import * as SignalingEvents from '../../service/RTC/SignalingEvents';
7
-import SignalingLayer from '../../service/RTC/SignalingLayer';
8
+import SignalingLayer, { getMediaTypeFromSourceName } from '../../service/RTC/SignalingLayer';
9
+import VideoType from '../../service/RTC/VideoType';
10
+import XMPPEvents from '../../service/xmpp/XMPPEvents';
11
+import FeatureFlags from '../flags/FeatureFlags';
12
+
13
+import { filterNodeFromPresenceJSON } from './ChatRoom';
8 14
 
9 15
 const logger = getLogger(__filename);
10 16
 
17
+export const SOURCE_INFO_PRESENCE_ELEMENT = 'SourceInfo';
18
+
11 19
 /**
12 20
  * Default XMPP implementation of the {@link SignalingLayer} interface. Obtains
13 21
  * the data from the MUC presence.
@@ -34,6 +42,45 @@ export default class SignalingLayerImpl extends SignalingLayer {
34 42
          * @type {ChatRoom|null}
35 43
          */
36 44
         this.chatRoom = null;
45
+
46
+        /**
47
+         * @type {Map<SourceName, SourceInfo>}
48
+         * @private
49
+         */
50
+        this._localSourceState = { };
51
+
52
+        /**
53
+         * @type {Map<EndpointId, Map<SourceName, SourceInfo>>}
54
+         * @private
55
+         */
56
+        this._remoteSourceState = { };
57
+    }
58
+
59
+    /**
60
+     * Adds <SourceInfo> element to the local presence.
61
+     *
62
+     * @returns {void}
63
+     * @private
64
+     */
65
+    _addLocalSourceInfoToPresence() {
66
+        if (this.chatRoom) {
67
+            this.chatRoom.addOrReplaceInPresence(
68
+                SOURCE_INFO_PRESENCE_ELEMENT,
69
+                { value: JSON.stringify(this._localSourceState) });
70
+        }
71
+    }
72
+
73
+    /**
74
+     * Check is given endpoint has advertised <SourceInfo/> in it's presence which means that the source name signaling
75
+     * is used by this endpoint.
76
+     *
77
+     * @param {EndpointId} endpointId
78
+     * @returns {boolean}
79
+     */
80
+    _doesEndpointSendNewSourceInfo(endpointId) {
81
+        const presence = this.chatRoom?.getLastPresence(endpointId);
82
+
83
+        return Boolean(presence && presence.find(node => node.tagName === SOURCE_INFO_PRESENCE_ELEMENT));
37 84
     }
38 85
 
39 86
     /**
@@ -51,40 +98,231 @@ export default class SignalingLayerImpl extends SignalingLayer {
51 98
                 'videomuted', this._videoMuteHandler);
52 99
             oldChatRoom.removePresenceListener(
53 100
                 'videoType', this._videoTypeHandler);
101
+            if (FeatureFlags.isSourceNameSignalingEnabled()) {
102
+                this._sourceInfoHandler
103
+                    && oldChatRoom.removePresenceListener(
104
+                        SOURCE_INFO_PRESENCE_ELEMENT, this._sourceInfoHandler);
105
+                this._memberLeftHandler
106
+                    && oldChatRoom.removeEventListener(
107
+                        XMPPEvents.MUC_MEMBER_LEFT, this._memberLeftHandler);
108
+            }
54 109
         }
55 110
         if (room) {
56
-            // SignalingEvents
57
-            this._audioMuteHandler = (node, from) => {
58
-                this.eventEmitter.emit(
59
-                    SignalingEvents.PEER_MUTED_CHANGED,
60
-                    from, MediaType.AUDIO, node.value === 'true');
61
-            };
62
-            room.addPresenceListener('audiomuted', this._audioMuteHandler);
63
-
64
-            this._videoMuteHandler = (node, from) => {
65
-                this.eventEmitter.emit(
66
-                    SignalingEvents.PEER_MUTED_CHANGED,
67
-                    from, MediaType.VIDEO, node.value === 'true');
68
-            };
69
-            room.addPresenceListener('videomuted', this._videoMuteHandler);
70
-
71
-            this._videoTypeHandler = (node, from) => {
72
-                this.eventEmitter.emit(
73
-                    SignalingEvents.PEER_VIDEO_TYPE_CHANGED,
74
-                    from, node.value);
75
-            };
76
-            room.addPresenceListener('videoType', this._videoTypeHandler);
111
+            if (FeatureFlags.isSourceNameSignalingEnabled()) {
112
+                this._bindChatRoomEventHandlers(room);
113
+                this._addLocalSourceInfoToPresence();
114
+            } else {
115
+                // TODO the logic below has been duplicated in _bindChatRoomEventHandlers, clean this up once
116
+                //  the new impl has been tested well enough
117
+                // SignalingEvents
118
+                this._audioMuteHandler = (node, from) => {
119
+                    this.eventEmitter.emit(
120
+                        SignalingEvents.PEER_MUTED_CHANGED,
121
+                        from, MediaType.AUDIO, node.value === 'true');
122
+                };
123
+                room.addPresenceListener('audiomuted', this._audioMuteHandler);
124
+
125
+                this._videoMuteHandler = (node, from) => {
126
+                    this.eventEmitter.emit(
127
+                        SignalingEvents.PEER_MUTED_CHANGED,
128
+                        from, MediaType.VIDEO, node.value === 'true');
129
+                };
130
+                room.addPresenceListener('videomuted', this._videoMuteHandler);
131
+
132
+                this._videoTypeHandler = (node, from) => {
133
+                    this.eventEmitter.emit(
134
+                        SignalingEvents.PEER_VIDEO_TYPE_CHANGED,
135
+                        from, node.value);
136
+                };
137
+                room.addPresenceListener('videoType', this._videoTypeHandler);
138
+            }
139
+        }
140
+    }
141
+
142
+    /**
143
+     * Binds event listeners to the chat room instance.
144
+     * @param {ChatRoom} room
145
+     * @private
146
+     * @returns {void}
147
+     */
148
+    _bindChatRoomEventHandlers(room) {
149
+        const emitAudioMutedEvent = (endpointId, muted) => {
150
+            this.eventEmitter.emit(
151
+                SignalingEvents.PEER_MUTED_CHANGED,
152
+                endpointId,
153
+                MediaType.AUDIO,
154
+                muted);
155
+        };
156
+        const emitVideoMutedEvent = (endpointId, muted) => {
157
+            this.eventEmitter.emit(
158
+                SignalingEvents.PEER_MUTED_CHANGED,
159
+                endpointId,
160
+                MediaType.VIDEO,
161
+                muted);
162
+        };
163
+
164
+        // SignalingEvents
165
+        this._audioMuteHandler = (node, from) => {
166
+            if (!this._doesEndpointSendNewSourceInfo(from)) {
167
+                emitAudioMutedEvent(from, node.value === 'true');
168
+            }
169
+        };
170
+        room.addPresenceListener('audiomuted', this._audioMuteHandler);
171
+
172
+        this._videoMuteHandler = (node, from) => {
173
+            if (!this._doesEndpointSendNewSourceInfo(from)) {
174
+                emitVideoMutedEvent(from, node.value === 'true');
175
+            }
176
+        };
177
+        room.addPresenceListener('videomuted', this._videoMuteHandler);
178
+
179
+        const emitVideoTypeEvent = (endpointId, videoType) => {
180
+            this.eventEmitter.emit(
181
+                SignalingEvents.PEER_VIDEO_TYPE_CHANGED,
182
+                endpointId, videoType);
183
+        };
184
+
185
+        this._videoTypeHandler = (node, from) => {
186
+            if (!this._doesEndpointSendNewSourceInfo(from)) {
187
+                emitVideoTypeEvent(from, node.value);
188
+            }
189
+        };
190
+        room.addPresenceListener('videoType', this._videoTypeHandler);
191
+
192
+        this._sourceInfoHandler = (node, mucNick) => {
193
+            const endpointId = mucNick;
194
+            const { value } = node;
195
+            const sourceInfoJSON = JSON.parse(value);
196
+            const emitEventsFromHere = this._doesEndpointSendNewSourceInfo(endpointId);
197
+            const endpointSourceState
198
+                = this._remoteSourceState[endpointId] || (this._remoteSourceState[endpointId] = {});
199
+
200
+            for (const sourceName of Object.keys(sourceInfoJSON)) {
201
+                const mediaType = getMediaTypeFromSourceName(sourceName);
202
+                const newMutedState = Boolean(sourceInfoJSON[sourceName].muted);
203
+                const oldSourceState = endpointSourceState[sourceName]
204
+                    || (endpointSourceState[sourceName] = { sourceName });
205
+
206
+                if (oldSourceState.muted !== newMutedState) {
207
+                    oldSourceState.muted = newMutedState;
208
+                    if (emitEventsFromHere && mediaType === MediaType.AUDIO) {
209
+                        emitAudioMutedEvent(endpointId, newMutedState);
210
+                    } else {
211
+                        emitVideoMutedEvent(endpointId, newMutedState);
212
+                    }
213
+                }
214
+
215
+                const newVideoType = sourceInfoJSON[sourceName].videoType;
216
+
217
+                if (oldSourceState.videoType !== newVideoType) {
218
+                    oldSourceState.videoType = newVideoType;
219
+                    emitEventsFromHere && emitVideoTypeEvent(endpointId, newVideoType);
220
+                }
221
+            }
222
+
223
+            // Cleanup removed source names
224
+            const newSourceNames = Object.keys(sourceInfoJSON);
225
+
226
+            for (const sourceName of Object.keys(endpointSourceState)) {
227
+                if (newSourceNames.indexOf(sourceName) === -1) {
228
+                    delete endpointSourceState[sourceName];
229
+                }
230
+            }
231
+        };
232
+        room.addPresenceListener('SourceInfo', this._sourceInfoHandler);
233
+
234
+        // Cleanup when participant leaves
235
+        this._memberLeftHandler = jid => {
236
+            const endpointId = Strophe.getResourceFromJid(jid);
237
+
238
+            delete this._remoteSourceState[endpointId];
239
+        };
240
+
241
+        room.addEventListener(XMPPEvents.MUC_MEMBER_LEFT, this._memberLeftHandler);
242
+    }
243
+
244
+    /**
245
+     * Finds the first source of given media type for the given endpoint.
246
+     * @param endpointId
247
+     * @param mediaType
248
+     * @returns {SourceInfo|null}
249
+     * @private
250
+     */
251
+    _findEndpointSourceInfoForMediaType(endpointId, mediaType) {
252
+        const remoteSourceState = this._remoteSourceState[endpointId];
253
+
254
+        if (!remoteSourceState) {
255
+            return null;
77 256
         }
257
+
258
+        for (const sourceInfo of Object.values(remoteSourceState)) {
259
+            const _mediaType = getMediaTypeFromSourceName(sourceInfo.sourceName);
260
+
261
+            if (_mediaType === mediaType) {
262
+                return sourceInfo;
263
+            }
264
+        }
265
+
266
+        return null;
78 267
     }
79 268
 
80 269
     /**
81 270
      * @inheritDoc
82 271
      */
83 272
     getPeerMediaInfo(owner, mediaType) {
84
-        if (this.chatRoom) {
85
-            return this.chatRoom.getMediaPresenceInfo(owner, mediaType);
273
+        const legacyGetPeerMediaInfo = () => {
274
+            if (this.chatRoom) {
275
+                return this.chatRoom.getMediaPresenceInfo(owner, mediaType);
276
+            }
277
+            logger.error('Requested peer media info, before room was set');
278
+        };
279
+
280
+        if (FeatureFlags.isSourceNameSignalingEnabled()) {
281
+            const lastPresence = this.chatRoom.getLastPresence(owner);
282
+
283
+            if (!lastPresence) {
284
+                throw new Error(`getPeerMediaInfo - no presence stored for: ${owner}`);
285
+            }
286
+
287
+            if (!this._doesEndpointSendNewSourceInfo(owner)) {
288
+                return legacyGetPeerMediaInfo();
289
+            }
290
+
291
+            /**
292
+             * @type {PeerMediaInfo}
293
+             */
294
+            const mediaInfo = {};
295
+            const endpointMediaSource = this._findEndpointSourceInfoForMediaType(owner, mediaType);
296
+
297
+            // The defaults are provided only, because getPeerMediaInfo is a legacy method. This will be eventually
298
+            // changed into a getSourceInfo method which returns undefined if there's no source. Also there will be
299
+            // no mediaType argument there.
300
+            if (mediaType === MediaType.AUDIO) {
301
+                mediaInfo.muted = endpointMediaSource ? endpointMediaSource.muted : true;
302
+            } else if (mediaType === MediaType.VIDEO) {
303
+                mediaInfo.muted = endpointMediaSource ? endpointMediaSource.muted : true;
304
+                mediaInfo.videoType = endpointMediaSource ? endpointMediaSource.videoType : undefined;
305
+
306
+                const codecTypeNode = filterNodeFromPresenceJSON(lastPresence, 'jitsi_participant_codecType');
307
+
308
+                if (codecTypeNode.length > 0) {
309
+                    mediaInfo.codecType = codecTypeNode[0].value;
310
+                }
311
+            } else {
312
+                throw new Error(`Unsupported media type: ${mediaType}`);
313
+            }
314
+
315
+            return mediaInfo;
86 316
         }
87
-        logger.error('Requested peer media info, before room was set');
317
+
318
+        return legacyGetPeerMediaInfo();
319
+    }
320
+
321
+    /**
322
+     * @inheritDoc
323
+     */
324
+    getPeerSourceInfo(owner, sourceName) {
325
+        return this._remoteSourceState[owner] ? this._remoteSourceState[owner][sourceName] : undefined;
88 326
     }
89 327
 
90 328
     /**
@@ -112,4 +350,47 @@ export default class SignalingLayerImpl extends SignalingLayer {
112 350
         }
113 351
         this.ssrcOwners.set(ssrc, endpointId);
114 352
     }
353
+
354
+    /**
355
+     * Adjusts muted status of given track.
356
+     *
357
+     * @param {SourceName} sourceName - the name of the track's source.
358
+     * @param {boolean} muted - the new muted status.
359
+     * @returns {boolean}
360
+     */
361
+    setTrackMuteStatus(sourceName, muted) {
362
+        if (!this._localSourceState[sourceName]) {
363
+            this._localSourceState[sourceName] = {};
364
+        }
365
+
366
+        this._localSourceState[sourceName].muted = muted;
367
+
368
+        if (this.chatRoom) {
369
+            // FIXME This only adjusts the presence, but doesn't actually send it. Here we temporarily rely on
370
+            // the legacy signaling part to send the presence. Remember to add "send presence" here when the legacy
371
+            // signaling is removed.
372
+            this._addLocalSourceInfoToPresence();
373
+        }
374
+    }
375
+
376
+    /**
377
+     * Sets track's video type.
378
+     * @param {SourceName} sourceName - the track's source name.
379
+     * @param {VideoType} videoType - the new video type.
380
+     */
381
+    setTrackVideoType(sourceName, videoType) {
382
+        if (!this._localSourceState[sourceName]) {
383
+            this._localSourceState[sourceName] = {};
384
+        }
385
+
386
+        if (this._localSourceState[sourceName].videoType !== videoType) {
387
+            // Include only if not a camera (default)
388
+            this._localSourceState[sourceName].videoType = videoType === VideoType.CAMERA ? undefined : videoType;
389
+
390
+            // NOTE this doesn't send the actual presence, because is called from the same place where the legacy video
391
+            // type is emitted which does the actual sending. A send presence statement needs to be added when
392
+            // the legacy part is removed.
393
+            this._addLocalSourceInfoToPresence();
394
+        }
395
+    }
115 396
 }

+ 416
- 0
modules/xmpp/SignalingLayerImpl.spec.js 查看文件

@@ -0,0 +1,416 @@
1
+import * as MediaType from '../../service/RTC/MediaType';
2
+import * as SignalingEvents from '../../service/RTC/SignalingEvents';
3
+import { getSourceNameForJitsiTrack } from '../../service/RTC/SignalingLayer';
4
+import VideoType from '../../service/RTC/VideoType';
5
+import XMPPEvents from '../../service/xmpp/XMPPEvents';
6
+import FeatureFlags from '../flags/FeatureFlags';
7
+import Listenable from '../util/Listenable';
8
+
9
+import SignalingLayerImpl, { SOURCE_INFO_PRESENCE_ELEMENT } from './SignalingLayerImpl';
10
+
11
+const INITIAL_SOURCE_INFO = { value: JSON.stringify({}) };
12
+
13
+// eslint-disable-next-line require-jsdoc
14
+function createMockChatRoom() {
15
+    const chatRoom = {
16
+        ...new Listenable(),
17
+        ...jasmine.createSpyObj('', [
18
+            'addOrReplaceInPresence',
19
+            'setAudioMute',
20
+            'setVideoMute'
21
+        ])
22
+    };
23
+
24
+    const listeners = {};
25
+
26
+    // Stores presence listeners
27
+    chatRoom.addPresenceListener = (tagName, l) => {
28
+        listeners[tagName] || (listeners[tagName] = []);
29
+        listeners[tagName].push(l);
30
+    };
31
+
32
+    // Notify presence listeners
33
+    chatRoom.emitPresenceListener = (node, mucNick) => {
34
+        const nodeListeners = listeners[node.tagName];
35
+
36
+        if (nodeListeners) {
37
+            for (const l of nodeListeners) {
38
+                l(node, mucNick);
39
+            }
40
+        }
41
+    };
42
+
43
+    // Fakes 'SourceInfo' in the presence by adjusting getLastPresence return value and emitting a presence event.
44
+    chatRoom.mockSourceInfoPresence = (endpointId, sourceInfo) => {
45
+        chatRoom.getLastPresence = () => [ {
46
+            tagName: SOURCE_INFO_PRESENCE_ELEMENT,
47
+            value: JSON.stringify(sourceInfo)
48
+        } ];
49
+        chatRoom.emitPresenceListener({
50
+            tagName: SOURCE_INFO_PRESENCE_ELEMENT,
51
+            value: JSON.stringify(sourceInfo)
52
+        }, endpointId);
53
+    };
54
+
55
+    chatRoom.emitParticipantLeft = endpointId => {
56
+        // Only the resource part (MUC nick) is relevant
57
+        chatRoom.eventEmitter.emit(XMPPEvents.MUC_MEMBER_LEFT, `room@server.com/${endpointId}`);
58
+    };
59
+
60
+    return chatRoom;
61
+}
62
+
63
+describe('SignalingLayerImpl', () => {
64
+    describe('setTrackMuteStatus advertises the track muted status in the chat room', () => {
65
+        describe('with source name signaling enabled', () => {
66
+            const endpointId = 'abcdef12';
67
+            let signalingLayer;
68
+            let chatRoom;
69
+
70
+            beforeEach(() => {
71
+                FeatureFlags.init({ sourceNameSignaling: true });
72
+                signalingLayer = new SignalingLayerImpl();
73
+                chatRoom = createMockChatRoom();
74
+                signalingLayer.setChatRoom(chatRoom);
75
+
76
+                // No tracks yet
77
+                expect(chatRoom.addOrReplaceInPresence)
78
+                    .toHaveBeenCalledWith(
79
+                        SOURCE_INFO_PRESENCE_ELEMENT,
80
+                        INITIAL_SOURCE_INFO);
81
+            });
82
+            it('for audio track', () => {
83
+                const audioSourceName = getSourceNameForJitsiTrack(endpointId, MediaType.AUDIO, 0);
84
+
85
+                // Audio track: muted
86
+                signalingLayer.setTrackMuteStatus(audioSourceName, true);
87
+                expect(chatRoom.addOrReplaceInPresence)
88
+                    .toHaveBeenCalledWith(
89
+                        SOURCE_INFO_PRESENCE_ELEMENT,
90
+                        { value: `{"${audioSourceName}":{"muted":true}}` });
91
+
92
+                // Audio track: unmuted
93
+                signalingLayer.setTrackMuteStatus(audioSourceName, false);
94
+                expect(chatRoom.addOrReplaceInPresence)
95
+                    .toHaveBeenCalledWith(
96
+                        SOURCE_INFO_PRESENCE_ELEMENT,
97
+                        { value: `{"${audioSourceName}":{"muted":false}}` });
98
+            });
99
+            it('for video track', () => {
100
+                const videoSourceName = getSourceNameForJitsiTrack(endpointId, MediaType.VIDEO, 0);
101
+
102
+                // Video track: muted
103
+                signalingLayer.setTrackMuteStatus(videoSourceName, true);
104
+                expect(chatRoom.addOrReplaceInPresence)
105
+                    .toHaveBeenCalledWith(
106
+                        SOURCE_INFO_PRESENCE_ELEMENT,
107
+                        { value: `{"${videoSourceName}":{"muted":true}}` });
108
+
109
+                // Video track: unmuted
110
+                signalingLayer.setTrackMuteStatus(videoSourceName, false);
111
+                expect(chatRoom.addOrReplaceInPresence)
112
+                    .toHaveBeenCalledWith(
113
+                        SOURCE_INFO_PRESENCE_ELEMENT,
114
+                        { value: `{"${videoSourceName}":{"muted":false}}` });
115
+            });
116
+        });
117
+    });
118
+    describe('setTrackVideoType', () => {
119
+        const endpointId = 'abcdef12';
120
+        let signalingLayer;
121
+        let chatRoom = createMockChatRoom();
122
+
123
+        beforeEach(() => {
124
+            FeatureFlags.init({ sourceNameSignaling: true });
125
+            signalingLayer = new SignalingLayerImpl();
126
+            chatRoom = createMockChatRoom();
127
+            signalingLayer.setChatRoom(chatRoom);
128
+
129
+            // Initial value is set in signalingLayer.setChatRoom
130
+            expect(chatRoom.addOrReplaceInPresence)
131
+                .toHaveBeenCalledWith(
132
+                    SOURCE_INFO_PRESENCE_ELEMENT,
133
+                    INITIAL_SOURCE_INFO);
134
+        });
135
+        it('sends video type in chat room presence', () => {
136
+            const videoSourceName = getSourceNameForJitsiTrack(endpointId, MediaType.VIDEO, 0);
137
+
138
+            signalingLayer.setTrackVideoType(videoSourceName, VideoType.CAMERA);
139
+            expect(chatRoom.addOrReplaceInPresence)
140
+                .toHaveBeenCalledWith(
141
+                    SOURCE_INFO_PRESENCE_ELEMENT,
142
+                    { value: '{"abcdef12-v0":{}}' });
143
+
144
+            signalingLayer.setTrackVideoType(videoSourceName, VideoType.DESKTOP);
145
+            expect(chatRoom.addOrReplaceInPresence)
146
+                .toHaveBeenCalledWith(
147
+                    SOURCE_INFO_PRESENCE_ELEMENT,
148
+                    { value: '{"abcdef12-v0":{"videoType":"desktop"}}' });
149
+
150
+            signalingLayer.setTrackVideoType(videoSourceName, VideoType.CAMERA);
151
+            expect(chatRoom.addOrReplaceInPresence)
152
+                .toHaveBeenCalledWith(
153
+                    SOURCE_INFO_PRESENCE_ELEMENT,
154
+                    { value: '{"abcdef12-v0":{}}' });
155
+        });
156
+    });
157
+    describe('should emit muted/video type events based on presence', () => {
158
+        describe('with:  sourceNameSignaling: true', () => {
159
+            let signalingLayer;
160
+            let chatRoom = createMockChatRoom();
161
+
162
+            beforeEach(() => {
163
+                FeatureFlags.init({ sourceNameSignaling: true });
164
+                signalingLayer = new SignalingLayerImpl();
165
+                chatRoom = createMockChatRoom();
166
+                signalingLayer.setChatRoom(chatRoom);
167
+            });
168
+            it('from a legacy user (no SourceInfo)', () => {
169
+                const emitterSpy = spyOn(signalingLayer.eventEmitter, 'emit');
170
+
171
+                chatRoom.getLastPresence = () => [];
172
+                chatRoom.emitPresenceListener({
173
+                    tagName: 'audiomuted',
174
+                    value: 'true'
175
+                }, 'endpoint1');
176
+
177
+                expect(emitterSpy).toHaveBeenCalledWith(
178
+                    SignalingEvents.PEER_MUTED_CHANGED,
179
+                    'endpoint1',
180
+                    'audio',
181
+                    true
182
+                );
183
+            });
184
+            it('from a user with SourceInfo', () => {
185
+                const emitterSpy = spyOn(signalingLayer.eventEmitter, 'emit');
186
+                const sourceInfo = {
187
+                    '12345678-a0': {
188
+                        muted: true
189
+                    }
190
+                };
191
+
192
+                chatRoom.mockSourceInfoPresence('endpoint1', sourceInfo);
193
+
194
+                // <audiomuted/> still included for backwards compat and ChatRoom will emit the presence event
195
+                chatRoom.emitPresenceListener({
196
+                    tagName: 'audiomuted',
197
+                    value: 'true'
198
+                }, 'endpoint1');
199
+
200
+                // Just once event though the legacy presence is there as well
201
+                expect(emitterSpy).toHaveBeenCalledTimes(1);
202
+                expect(emitterSpy).toHaveBeenCalledWith(
203
+                    SignalingEvents.PEER_MUTED_CHANGED,
204
+                    'endpoint1',
205
+                    'audio',
206
+                    true
207
+                );
208
+            });
209
+        });
210
+        describe('with:  sourceNameSignaling: false', () => {
211
+            let signalingLayer;
212
+            let chatRoom;
213
+
214
+            beforeEach(() => {
215
+                FeatureFlags.init({ sourceNameSignaling: false });
216
+                signalingLayer = new SignalingLayerImpl();
217
+                chatRoom = createMockChatRoom();
218
+                signalingLayer.setChatRoom(chatRoom);
219
+            });
220
+            it('does not react to SourceInfo', () => {
221
+                const emitterSpy = spyOn(signalingLayer.eventEmitter, 'emit');
222
+                const sourceInfo = {
223
+                    '12345678-a0': {
224
+                        muted: true
225
+                    }
226
+                };
227
+
228
+                chatRoom.mockSourceInfoPresence('endpoint1', sourceInfo);
229
+
230
+                expect(emitterSpy).not.toHaveBeenCalled();
231
+            });
232
+        });
233
+    });
234
+    describe('getPeerMediaInfo', () => {
235
+        describe('with:  sourceNameSignaling: true', () => {
236
+            let signalingLayer;
237
+            let chatRoom;
238
+
239
+            beforeEach(() => {
240
+                FeatureFlags.init({ sourceNameSignaling: true });
241
+                signalingLayer = new SignalingLayerImpl();
242
+                chatRoom = createMockChatRoom();
243
+                signalingLayer.setChatRoom(chatRoom);
244
+            });
245
+            it('will provide default value if only empty source info was sent so far', () => {
246
+                const endpointId = '12345678';
247
+
248
+                chatRoom.mockSourceInfoPresence(endpointId, { });
249
+
250
+                const audioPeerMediaInfo = signalingLayer.getPeerMediaInfo(endpointId, MediaType.AUDIO);
251
+
252
+                expect(audioPeerMediaInfo).toEqual({ muted: true });
253
+
254
+                const videoPeerMediaInfo = signalingLayer.getPeerMediaInfo(endpointId, MediaType.VIDEO);
255
+
256
+                expect(videoPeerMediaInfo).toEqual({
257
+                    muted: true,
258
+                    videoType: undefined
259
+                });
260
+            });
261
+            describe('will read from SourceInfo if available', () => {
262
+                it('for audio', () => {
263
+                    const endpointId = '12345678';
264
+                    const sourceInfo = {
265
+                        '12345678-a0': {
266
+                            muted: true
267
+                        }
268
+                    };
269
+
270
+                    chatRoom.mockSourceInfoPresence(endpointId, sourceInfo);
271
+
272
+                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endpointId, MediaType.AUDIO);
273
+
274
+                    expect(peerMediaInfo).toEqual({ muted: true });
275
+                });
276
+                it('for video', () => {
277
+                    const endointId = '12345678';
278
+                    const sourceInfo = {
279
+                        '12345678-v0': {
280
+                            muted: true,
281
+                            videoType: 'desktop'
282
+                        }
283
+                    };
284
+
285
+                    chatRoom.mockSourceInfoPresence(endointId, sourceInfo);
286
+
287
+                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endointId, MediaType.VIDEO);
288
+
289
+                    expect(peerMediaInfo).toEqual({
290
+                        muted: true,
291
+                        videoType: 'desktop'
292
+                    });
293
+                });
294
+            });
295
+            describe('if there\'s no SourceInfo then will read from the legacy element', () => {
296
+                const endointId = '12345678';
297
+
298
+                it('for audio', () => {
299
+                    // There's no 'SourceInfo' in the presence
300
+                    chatRoom.getLastPresence = () => [ { } ];
301
+
302
+                    // This test is very implementation specific and relies on the fact that the backwards compat logic
303
+                    // is supposed to call into 'chatRoom.getMediaPresenceInfo' and return whatever it returns.
304
+                    // To be removed once legacy signaling is deprecated.
305
+                    chatRoom.getMediaPresenceInfo = () => {
306
+                        return {
307
+                            muted: true
308
+                        };
309
+                    };
310
+
311
+                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endointId, MediaType.AUDIO);
312
+
313
+                    expect(peerMediaInfo).toEqual({ muted: true });
314
+                });
315
+                it('for video', () => {
316
+                    // There's no 'SourceInfo' in the presence
317
+                    chatRoom.getLastPresence = () => [ { } ];
318
+
319
+                    // This test is very implementation specific and relies on the fact that the backwards compat logic
320
+                    // is supposed to call into 'chatRoom.getMediaPresenceInfo' and return whatever it returns.
321
+                    // To be removed once legacy signaling is deprecated.
322
+                    chatRoom.getMediaPresenceInfo = () => {
323
+                        return {
324
+                            muted: true,
325
+                            videoType: 'desktop'
326
+                        };
327
+                    };
328
+
329
+                    const peerMediaInfo = signalingLayer.getPeerMediaInfo(endointId, MediaType.VIDEO);
330
+
331
+                    expect(peerMediaInfo).toEqual({
332
+                        muted: true,
333
+                        videoType: 'desktop'
334
+                    });
335
+                });
336
+            });
337
+        });
338
+        describe('with:  sourceNameSignaling: false', () => {
339
+            beforeEach(() => {
340
+                FeatureFlags.init({ sourceNameSignaling: false });
341
+            });
342
+            it('should not read from SourceInfo element', () => {
343
+                const signalingLayer = new SignalingLayerImpl();
344
+                const chatRoom = createMockChatRoom();
345
+
346
+                signalingLayer.setChatRoom(chatRoom);
347
+
348
+                const endointId = '12345678';
349
+                const sourceInfo = {
350
+                    '12345678-v0': {
351
+                        muted: true,
352
+                        videoType: 'desktop'
353
+                    }
354
+                };
355
+
356
+                chatRoom.mockSourceInfoPresence(endointId, sourceInfo);
357
+
358
+                // This is the value the legacy flow will use (the values are different that the SourceInfo one).
359
+                const legacyMediaInfoValue = {
360
+                    muted: false,
361
+                    videoType: 'camera'
362
+                };
363
+
364
+                chatRoom.getMediaPresenceInfo = () => legacyMediaInfoValue;
365
+
366
+                const peerMediaInfo = signalingLayer.getPeerMediaInfo(endointId, MediaType.VIDEO);
367
+
368
+                expect(peerMediaInfo).toEqual(legacyMediaInfoValue);
369
+            });
370
+        });
371
+    });
372
+    describe('will remove source info(cleanup corner cases)', () => {
373
+        let signalingLayer;
374
+        let chatRoom;
375
+        const endpointId = '12345678';
376
+
377
+        beforeEach(() => {
378
+            FeatureFlags.init({ sourceNameSignaling: true });
379
+
380
+            signalingLayer = new SignalingLayerImpl();
381
+            chatRoom = createMockChatRoom();
382
+
383
+            signalingLayer.setChatRoom(chatRoom);
384
+        });
385
+        it('when participant leaves', () => {
386
+            const sourceInfo = {
387
+                '12345678-v0': {
388
+                    muted: false,
389
+                    videoType: 'desktop'
390
+                }
391
+            };
392
+
393
+            chatRoom.mockSourceInfoPresence(endpointId, sourceInfo);
394
+
395
+            expect(signalingLayer.getPeerSourceInfo(endpointId, '12345678-v0')).toBeDefined();
396
+
397
+            chatRoom.emitParticipantLeft(endpointId);
398
+
399
+            expect(signalingLayer.getPeerSourceInfo(endpointId, '12345678-v0')).toBeUndefined();
400
+        });
401
+        it('when it\'s no longer in the presence', () => {
402
+            chatRoom.mockSourceInfoPresence(endpointId, {
403
+                '12345678-v0': { muted: false }
404
+            });
405
+
406
+            expect(signalingLayer.getPeerSourceInfo(endpointId, '12345678-v0')).toBeDefined();
407
+
408
+            chatRoom.mockSourceInfoPresence(endpointId, {
409
+                '12345678-v1': { muted: false }
410
+            });
411
+
412
+            expect(signalingLayer.getPeerSourceInfo(endpointId, '12345678-v0')).toBeUndefined();
413
+            expect(signalingLayer.getPeerSourceInfo(endpointId, '12345678-v1')).toBeDefined();
414
+        });
415
+    });
416
+});

+ 66
- 0
service/RTC/SignalingLayer.js 查看文件

@@ -1,5 +1,59 @@
1 1
 
2 2
 import Listenable from '../../modules/util/Listenable';
3
+import * as MediaType from '../../service/RTC/MediaType';
4
+
5
+/**
6
+ * @typedef {string} EndpointId
7
+ */
8
+/**
9
+ * @typedef {string} SourceName
10
+ */
11
+/**
12
+ * @typedef {Object} SourceInfo
13
+ *
14
+ * @property {SourceName} sourceName - Name of the media source.
15
+ * @property {boolean} [muted=false] - Tells if the source is muted (paused?).
16
+ * @property {string} [videoType] - Type of the video for video type.
17
+ */
18
+
19
+/**
20
+ * Generates a source name.
21
+ *
22
+ * @param {EndpointId} endpointId - Jitsi Endpoint Id.
23
+ * @param {MediaType} mediaType - the media type string.
24
+ * @param {number} trackIdx - Track index (or sender idx? - to be figured out) starting from 0.
25
+ * @returns {SourceName} eg. endpointA-v0
26
+ */
27
+export function getSourceNameForJitsiTrack(endpointId, mediaType, trackIdx) {
28
+    const firstLetterOfMediaType = mediaType.substring(0, 1);
29
+
30
+    return `${endpointId}-${firstLetterOfMediaType}${trackIdx}`;
31
+}
32
+
33
+/**
34
+ * Extracts MediaType from give source name (must be in the correct format as generated by
35
+ * {@link getSourceNameForJitsiTrack}).
36
+ *
37
+ * @param {SourceName} sourceName - the source name.
38
+ * @returns {MediaType}
39
+ */
40
+export function getMediaTypeFromSourceName(sourceName) {
41
+    const firstLetterOfMediaTypeIdx = sourceName.indexOf('-') + 1;
42
+
43
+    if (firstLetterOfMediaTypeIdx <= 0) {
44
+        throw new Error(`Invalid source name: ${sourceName}`);
45
+    }
46
+
47
+    const firstLetterOfMediaType = sourceName.substr(firstLetterOfMediaTypeIdx, 1);
48
+
49
+    for (const type of Object.values(MediaType)) {
50
+        if (type.substr(0, 1) === firstLetterOfMediaType) {
51
+            return type;
52
+        }
53
+    }
54
+
55
+    throw new Error(`Invalid source name: ${sourceName}`);
56
+}
3 57
 
4 58
 /**
5 59
  * An object that carries the info about specific media type advertised by
@@ -36,8 +90,20 @@ export default class SignalingLayer extends Listenable {
36 90
      * @return {PeerMediaInfo|null} presenceInfo an object with media presence
37 91
      * info or <tt>null</tt> either if there is no presence available for given
38 92
      * JID or if the media type given is invalid.
93
+     *
94
+     * @deprecated This method is to be replaced with getPeerSourceInfo.
39 95
      */
40 96
     getPeerMediaInfo(owner, mediaType) { // eslint-disable-line no-unused-vars
41 97
         throw new Error('not implemented');
42 98
     }
99
+
100
+    /**
101
+     * Obtains the info about a source for given name and endpoint ID.
102
+     * @param {EndpointId} owner - The owner's endpoint ID.
103
+     * @param {SourceName} sourceName - The name of the source for which the info is to be obtained.
104
+     * @returns {SourceInfo | undefined}
105
+     */
106
+    getPeerSourceInfo(owner, sourceName) { // eslint-disable-line no-unused-vars
107
+        throw new Error('not implemented');
108
+    }
43 109
 }

正在加载...
取消
保存