ソースを参照

Deal with the WebRTC streams on per track basis

master
paweldomas 9年前
コミット
f6d730794e

+ 6
- 5
JitsiConference.js ファイルの表示

902
     });
902
     });
903
 
903
 
904
     conference.room.addListener(XMPPEvents.REMOTE_TRACK_ADDED,
904
     conference.room.addListener(XMPPEvents.REMOTE_TRACK_ADDED,
905
-        function (data, sid, thessrc) {
906
-            var track = conference.rtc.createRemoteTrack(data, sid, thessrc);
905
+        function (data) {
906
+            var track = conference.rtc.createRemoteTrack(data);
907
             if (track) {
907
             if (track) {
908
                 conference.onTrackAdded(track);
908
                 conference.onTrackAdded(track);
909
             }
909
             }
910
         }
910
         }
911
     );
911
     );
912
     conference.room.addListener(XMPPEvents.REMOTE_TRACK_REMOVED,
912
     conference.room.addListener(XMPPEvents.REMOTE_TRACK_REMOVED,
913
-        function (streamId) {
913
+        function (streamId, trackId) {
914
             conference.getParticipants().forEach(function(participant) {
914
             conference.getParticipants().forEach(function(participant) {
915
                 var tracks = participant.getTracks();
915
                 var tracks = participant.getTracks();
916
                 for(var i = 0; i < tracks.length; i++) {
916
                 for(var i = 0; i < tracks.length; i++) {
917
-                    if(tracks[i] && tracks[i].stream &&
918
-                        RTC.getStreamID(tracks[i].stream) == streamId){
917
+                    if(tracks[i]
918
+                        && tracks[i].getStreamId() == streamId
919
+                        && tracks[i].getTrackId() == trackId) {
919
                         var track = participant._tracks.splice(i, 1)[0];
920
                         var track = participant._tracks.splice(i, 1)[0];
920
                         conference.eventEmitter.emit(
921
                         conference.eventEmitter.emit(
921
                             JitsiConferenceEvents.TRACK_REMOVED, track);
922
                             JitsiConferenceEvents.TRACK_REMOVED, track);

+ 4
- 3
modules/DTMF/JitsiDTMFManager.js ファイルの表示

1
 var logger = require("jitsi-meet-logger").getLogger(__filename);
1
 var logger = require("jitsi-meet-logger").getLogger(__filename);
2
 
2
 
3
 function JitsiDTMFManager (localAudio, peerConnection) {
3
 function JitsiDTMFManager (localAudio, peerConnection) {
4
-    var tracks = localAudio._getTracks();
5
-    if (!tracks.length) {
4
+    var audioTrack = localAudio.getTrack();
5
+    if (!audioTrack) {
6
         throw new Error("Failed to initialize DTMFSender: no audio track.");
6
         throw new Error("Failed to initialize DTMFSender: no audio track.");
7
     }
7
     }
8
-    this.dtmfSender = peerConnection.peerconnection.createDTMFSender(tracks[0]);
8
+    this.dtmfSender
9
+        = peerConnection.peerconnection.createDTMFSender(audioTrack);
9
     logger.debug("Initialized DTMFSender");
10
     logger.debug("Initialized DTMFSender");
10
 }
11
 }
11
 
12
 

+ 26
- 30
modules/RTC/JitsiLocalTrack.js ファイルの表示

8
 var VideoType = require('../../service/RTC/VideoType');
8
 var VideoType = require('../../service/RTC/VideoType');
9
 
9
 
10
 /**
10
 /**
11
- * Represents a single media track (either audio or video).
11
+ * Represents a single media track(either audio or video).
12
+ * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
13
+ * @param stream WebRTC MediaStream, parent of the track
14
+ * @param track underlying WebRTC MediaStreamTrack for new JitsiRemoteTrack
15
+ * @param mediaType the MediaType of the JitsiRemoteTrack
16
+ * @param videoType the VideoType of the JitsiRemoteTrack
17
+ * @param resolution the video resoultion if it's a video track
18
+ * @param deviceId the ID of the local device for this track
12
  * @constructor
19
  * @constructor
13
  */
20
  */
14
-function JitsiLocalTrack(stream, videoType,
15
-  resolution, deviceId)
16
-{
17
-    this.videoType = videoType;
21
+function JitsiLocalTrack(stream, track, mediaType, videoType, resolution,
22
+                         deviceId) {
23
+    JitsiTrack.call(this,
24
+        null /* RTC */, stream, track,
25
+        function () {
26
+            if(!this.dontFireRemoveEvent)
27
+                this.eventEmitter.emit(
28
+                    JitsiTrackEvents.LOCAL_TRACK_STOPPED);
29
+            this.dontFireRemoveEvent = false;
30
+        }.bind(this) /* inactiveHandler */,
31
+        mediaType, videoType, null /* ssrc */);
18
     this.dontFireRemoveEvent = false;
32
     this.dontFireRemoveEvent = false;
19
     this.resolution = resolution;
33
     this.resolution = resolution;
20
     this.deviceId = deviceId;
34
     this.deviceId = deviceId;
21
     this.startMuted = false;
35
     this.startMuted = false;
22
-    this.ssrc = null;
23
     this.disposed = false;
36
     this.disposed = false;
24
     //FIXME: This dependacy is not necessary.
37
     //FIXME: This dependacy is not necessary.
25
     this.conference = null;
38
     this.conference = null;
26
-    JitsiTrack.call(this, null, stream,
27
-        function () {
28
-            if(!this.dontFireRemoveEvent)
29
-                this.eventEmitter.emit(
30
-                    JitsiTrackEvents.LOCAL_TRACK_STOPPED);
31
-            this.dontFireRemoveEvent = false;
32
-        }.bind(this));
33
     this.initialMSID = this.getMSID();
39
     this.initialMSID = this.getMSID();
34
     this.inMuteOrUnmuteProgress = false;
40
     this.inMuteOrUnmuteProgress = false;
35
 }
41
 }
116
         // FIXME FF does not support 'removeStream' method used to mute
122
         // FIXME FF does not support 'removeStream' method used to mute
117
         RTCBrowserType.isFirefox()) {
123
         RTCBrowserType.isFirefox()) {
118
 
124
 
119
-        var tracks = this._getTracks();
120
-        for (var idx = 0; idx < tracks.length; idx++) {
121
-            tracks[idx].enabled = !mute;
122
-        }
125
+        if (this.track)
126
+            this.track.enabled = !mute;
123
         if(isAudio)
127
         if(isAudio)
124
             this.rtc.room.setAudioMute(mute, callbackFunction);
128
             this.rtc.room.setAudioMute(mute, callbackFunction);
125
         else
129
         else
154
                 .then(function (streamsInfo) {
158
                 .then(function (streamsInfo) {
155
                     var streamInfo = null;
159
                     var streamInfo = null;
156
                     for(var i = 0; i < streamsInfo.length; i++) {
160
                     for(var i = 0; i < streamsInfo.length; i++) {
157
-                        if(streamsInfo[i].type === self.type) {
161
+                        if(streamsInfo[i].mediaType === self.getType()) {
158
                             streamInfo = streamsInfo[i];
162
                             streamInfo = streamsInfo[i];
159
                             self.stream = streamInfo.stream;
163
                             self.stream = streamInfo.stream;
164
+                            self.track = streamInfo.track;
160
                             // This is not good when video type changes after
165
                             // This is not good when video type changes after
161
                             // unmute, but let's not crash here
166
                             // unmute, but let's not crash here
162
                             if (self.videoType != streamInfo.videoType) {
167
                             if (self.videoType != streamInfo.videoType) {
232
     // this.stream will be null when we mute local video on Chrome
237
     // this.stream will be null when we mute local video on Chrome
233
     if (!this.stream)
238
     if (!this.stream)
234
         return true;
239
         return true;
235
-    var tracks = [];
236
-    var isAudio = this.isAudioTrack();
237
-    if (isAudio) {
238
-        tracks = this.stream.getAudioTracks();
240
+    if (this.isVideoTrack() && !this.isActive()) {
241
+        return true;
239
     } else {
242
     } else {
240
-        if (!this.isActive())
241
-            return true;
242
-        tracks = this.stream.getVideoTracks();
243
+        return !this.track || !this.track.enabled;
243
     }
244
     }
244
-    for (var idx = 0; idx < tracks.length; idx++) {
245
-        if(tracks[idx].enabled)
246
-            return false;
247
-    }
248
-    return true;
249
 };
245
 };
250
 
246
 
251
 /**
247
 /**

+ 13
- 17
modules/RTC/JitsiRemoteTrack.js ファイルの表示

4
 /**
4
 /**
5
  * Represents a single media track (either audio or video).
5
  * Represents a single media track (either audio or video).
6
  * @param RTC the rtc instance.
6
  * @param RTC the rtc instance.
7
- * @param data object with the stream and some details about it(participant id, video type, etc.)
8
- * @param sid sid for the Media Stream
9
- * @param ssrc ssrc for the Media Stream
10
- * @param eventEmitter the event emitter
7
+ * @param ownerJid the MUC JID of the track owner
8
+ * @param stream WebRTC MediaStream, parent of the track
9
+ * @param track underlying WebRTC MediaStreamTrack for new JitsiRemoteTrack
10
+ * @param mediaType the MediaType of the JitsiRemoteTrack
11
+ * @param videoType the VideoType of the JitsiRemoteTrack
12
+ * @param ssrc the SSRC number of the Media Stream
13
+ * @param muted intial muted state of the JitsiRemoteTrack
11
  * @constructor
14
  * @constructor
12
  */
15
  */
13
-function JitsiRemoteTrack(RTC, data, sid, ssrc) {
14
-    JitsiTrack.call(this, RTC, data.stream,
15
-        function () {}, data.jitsiTrackType);
16
+function JitsiRemoteTrack(RTC, ownerJid, stream, track, mediaType, videoType,
17
+                          ssrc, muted) {    
18
+    JitsiTrack.call(
19
+        this, RTC, stream, track, function () {}, mediaType, videoType, ssrc);
16
     this.rtc = RTC;
20
     this.rtc = RTC;
17
-    this.sid = sid;
18
-    this.stream = data.stream;
19
-    this.peerjid = data.peerjid;
20
-    this.videoType = data.videoType;
21
-    this.ssrc = ssrc;
22
-    this.muted = false;
23
-    if((this.isAudioTrack() && data.audiomuted)
24
-      || (this.isVideoTrack() && data.videomuted)) {
25
-        this.muted = true;
26
-    }
21
+    this.peerjid = ownerJid;
22
+    this.muted = muted;
27
 }
23
 }
28
 
24
 
29
 JitsiRemoteTrack.prototype = Object.create(JitsiTrack.prototype);
25
 JitsiRemoteTrack.prototype = Object.create(JitsiTrack.prototype);

+ 41
- 25
modules/RTC/JitsiTrack.js ファイルの表示

45
  * Represents a single media track (either audio or video).
45
  * Represents a single media track (either audio or video).
46
  * @constructor
46
  * @constructor
47
  * @param rtc the rtc instance
47
  * @param rtc the rtc instance
48
- * @param stream the stream
48
+ * @param stream the WebRTC MediaStream instance
49
+ * @param track the WebRTC MediaStreamTrack instance, must be part of
50
+ * the given <tt>stream</tt>.
49
  * @param streamInactiveHandler the function that will handle
51
  * @param streamInactiveHandler the function that will handle
50
  *        onended/oninactive events of the stream.
52
  *        onended/oninactive events of the stream.
51
- * @param jitsiTrackType optionally a type can be specified.
52
- *        This is the case where we are creating a dummy track with no stream
53
- *        Currently this happens when a remote side is starting with video muted
53
+ * @param trackMediaType the media type of the JitsiTrack
54
+ * @param videoType the VideoType for this track if any
55
+ * @param ssrc the SSRC of this track if known
54
  */
56
  */
55
-function JitsiTrack(rtc, stream, streamInactiveHandler, jitsiTrackType)
57
+function JitsiTrack(rtc, stream, track, streamInactiveHandler, trackMediaType,
58
+                    videoType, ssrc)
56
 {
59
 {
57
     /**
60
     /**
58
      * Array with the HTML elements that are displaying the streams.
61
      * Array with the HTML elements that are displaying the streams.
61
     this.containers = [];
64
     this.containers = [];
62
     this.rtc = rtc;
65
     this.rtc = rtc;
63
     this.stream = stream;
66
     this.stream = stream;
67
+    this.ssrc = ssrc;
64
     this.eventEmitter = new EventEmitter();
68
     this.eventEmitter = new EventEmitter();
65
     this.audioLevel = -1;
69
     this.audioLevel = -1;
66
-    this.type = jitsiTrackType || ((this.stream.getVideoTracks().length > 0)?
67
-        MediaType.VIDEO : MediaType.AUDIO);
68
-    if(this.isAudioTrack()) {
69
-        this._getTracks = function () {
70
-            return this.stream? this.stream.getAudioTracks() : [];
71
-        }.bind(this);
72
-    } else {
73
-        this._getTracks = function () {
74
-            return this.stream? this.stream.getVideoTracks() : [];
75
-        }.bind(this);
76
-    }
77
-
70
+    this.type = trackMediaType;
71
+    this.track = track;
72
+    this.videoType = videoType;
78
     if(stream) {
73
     if(stream) {
79
         if (RTCBrowserType.isFirefox()) {
74
         if (RTCBrowserType.isFirefox()) {
80
             implementOnEndedHandling(this);
75
             implementOnEndedHandling(this);
111
     return this.stream;
106
     return this.stream;
112
 };
107
 };
113
 
108
 
109
+/**
110
+ * Returns the ID of the underlying WebRTC Media Stream(if any)
111
+ * @returns {String|null}
112
+ */
113
+JitsiTrack.prototype.getStreamId = function () {
114
+    return this.stream ? this.stream.id : null;
115
+};
116
+
117
+/**
118
+ * Return the underlying WebRTC MediaStreamTrack
119
+ * @returns {MediaStreamTrack}
120
+ */
121
+JitsiTrack.prototype.getTrack = function () {
122
+    return this.track;
123
+};
124
+
125
+/**
126
+ * Returns the ID of the underlying WebRTC MediaStreamTrack(if any)
127
+ * @returns {String|null}
128
+ */
129
+JitsiTrack.prototype.getTrackId = function () {
130
+    return this.track ? this.track.id : null;
131
+};
132
+
114
 /**
133
 /**
115
  * Return meaningful usage label for this track depending on it's media and
134
  * Return meaningful usage label for this track depending on it's media and
116
  * eventual video type.
135
  * eventual video type.
211
 };
230
 };
212
 
231
 
213
 /**
232
 /**
233
+ * FIXME remove hack in SDP.js and this method
214
  * Returns id of the track.
234
  * Returns id of the track.
215
  * @returns {string|null} id of the track or null if this is fake track.
235
  * @returns {string|null} id of the track or null if this is fake track.
216
  */
236
  */
217
 JitsiTrack.prototype._getId = function () {
237
 JitsiTrack.prototype._getId = function () {
218
-    var tracks = this.stream.getTracks();
219
-    if(!tracks || tracks.length === 0)
220
-        return null;
221
-    return tracks[0].id;
238
+    return this.getTrackId();
222
 };
239
 };
223
 
240
 
224
 /**
241
 /**
288
  * no stream is attached.
305
  * no stream is attached.
289
  */
306
  */
290
 JitsiTrack.prototype.getMSID = function () {
307
 JitsiTrack.prototype.getMSID = function () {
291
-    var tracks, track;
292
-    return (!this.stream || !this.stream.id || !(tracks = this._getTracks()) ||
293
-        !tracks.length || !(track = tracks[0]) || !track.id)?
294
-            null : this.stream.id + " " + track.id;
308
+    var streamId = this.getStreamId();
309
+    var trackId = this.getTrackId();
310
+    return (streamId && trackId) ? (streamId + " " + trackId) : null;
295
 };
311
 };
296
 
312
 
297
 module.exports = JitsiTrack;
313
 module.exports = JitsiTrack;

+ 34
- 12
modules/RTC/RTC.js ファイルの表示

15
     var newTracks = [];
15
     var newTracks = [];
16
     var deviceId = null;
16
     var deviceId = null;
17
     tracksInfo.forEach(function(trackInfo){
17
     tracksInfo.forEach(function(trackInfo){
18
-        if (trackInfo.type === MediaType.AUDIO) {
18
+        if (trackInfo.mediaType === MediaType.AUDIO) {
19
           deviceId = options.micDeviceId;
19
           deviceId = options.micDeviceId;
20
         } else if (trackInfo.videoType === VideoType.CAMERA){
20
         } else if (trackInfo.videoType === VideoType.CAMERA){
21
           deviceId = options.cameraDeviceId;
21
           deviceId = options.cameraDeviceId;
22
         }
22
         }
23
-        var localTrack = new JitsiLocalTrack(trackInfo.stream,
24
-            trackInfo.videoType, trackInfo.resolution, deviceId);
23
+        var localTrack
24
+            = new JitsiLocalTrack(
25
+                trackInfo.stream,
26
+                trackInfo.track,
27
+                trackInfo.mediaType,
28
+                trackInfo.videoType, trackInfo.resolution, deviceId);
25
         newTracks.push(localTrack);
29
         newTracks.push(localTrack);
26
     });
30
     });
27
     return newTracks;
31
     return newTracks;
43
         // we need to create a dummy track which we will mute, so we can
47
         // we need to create a dummy track which we will mute, so we can
44
         // notify interested about the muting
48
         // notify interested about the muting
45
         if (!videoTrack) {
49
         if (!videoTrack) {
46
-            videoTrack = self.createRemoteTrack(
47
-                {
48
-                    peerjid: room.roomjid + "/" + from,
50
+            videoTrack = self.createRemoteTrack({
51
+                    owner: room.roomjid + "/" + from,
49
                     videoType: VideoType.CAMERA,
52
                     videoType: VideoType.CAMERA,
50
-                    jitsiTrackType: MediaType.VIDEO
53
+                    mediaType: MediaType.VIDEO
51
                 },
54
                 },
52
                 null, null);
55
                 null, null);
53
             self.eventEmitter
56
             self.eventEmitter
249
     }
252
     }
250
 };
253
 };
251
 
254
 
252
-RTC.prototype.createRemoteTrack = function (data, sid, thessrc) {
253
-    var remoteTrack = new JitsiRemoteTrack(this, data, sid, thessrc);
254
-    if(!data.peerjid)
255
-        return;
256
-    var resource = Strophe.getResourceFromJid(data.peerjid);
255
+RTC.prototype.createRemoteTrack = function (event) {
256
+    var ownerJid = event.owner;
257
+    var remoteTrack = new JitsiRemoteTrack(
258
+        this,  ownerJid, event.stream,    event.track,
259
+        event.mediaType, event.videoType, event.ssrc, event.muted);
260
+    var resource = Strophe.getResourceFromJid(ownerJid);
257
     if(!this.remoteTracks[resource]) {
261
     if(!this.remoteTracks[resource]) {
258
         this.remoteTracks[resource] = {};
262
         this.remoteTracks[resource] = {};
259
     }
263
     }
309
 RTC.isDeviceChangeAvailable = function () {
313
 RTC.isDeviceChangeAvailable = function () {
310
     return RTCUtils.isDeviceChangeAvailable();
314
     return RTCUtils.isDeviceChangeAvailable();
311
 };
315
 };
316
+
317
+/**
318
+ * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
319
+ * "user" stream which means that it's not a "receive only" stream nor a "mixed"
320
+ * JVB stream.
321
+ * 
322
+ * Clients that implement Unified Plan, such as Firefox use recvonly
323
+ * "streams/channels/tracks" for receiving remote stream/tracks, as opposed to
324
+ * Plan B where there are only 3 channels: audio, video and data.
325
+ * 
326
+ * @param stream WebRTC MediaStream instance
327
+ * @returns {boolean}
328
+ */
329
+RTC.isUserStream = function (stream) {
330
+    var streamId = RTCUtils.getStreamID(stream);
331
+    return streamId && streamId !== "mixedmslabel" && streamId !== "default";
332
+};
333
+
312
 /**
334
 /**
313
  * Allows to receive list of available cameras/microphones.
335
  * Allows to receive list of available cameras/microphones.
314
  * @param {function} callback would receive array of devices as an argument
336
  * @param {function} callback would receive array of devices as an argument

+ 8
- 15
modules/RTC/RTCUIHelper.js ファイルの表示

1
-/* global $ */
1
+/* global $, __filename */
2
+var logger = require("jitsi-meet-logger").getLogger(__filename);
2
 var RTCBrowserType = require("./RTCBrowserType");
3
 var RTCBrowserType = require("./RTCBrowserType");
3
 var RTC = require('./RTC');
4
 var RTC = require('./RTC');
4
 
5
 
27
         } else {
28
         } else {
28
             var matching = $(containerElement).find(
29
             var matching = $(containerElement).find(
29
                 ' ' + videoElemName + '>param[value="video"]');
30
                 ' ' + videoElemName + '>param[value="video"]');
30
-            if (matching.length < 2) {
31
-                return matching.parent()[0];
32
-            } else {
33
-                // there are 2 video objects from FF
34
-                // object with id which ends with '_default'
35
-                // (like 'remoteVideo_default')
36
-                // doesn't contain video, so we ignore it
37
-                for (var i = 0; i < matching.length; i += 1) {
38
-                    var el = matching[i].parentNode;
39
-
40
-                    // check id suffix
41
-                    if (el.id.substr(-8) !== '_default') {
42
-                        return el;
43
-                    }
31
+            if (matching.length) {
32
+                if (matching.length > 1) {
33
+                    logger.warn(
34
+                        "Container with more than one video elements: ",
35
+                        containerElement);
44
                 }
36
                 }
37
+                return matching.parent()[0];
45
             }
38
             }
46
         }
39
         }
47
         return undefined;
40
         return undefined;

+ 6
- 3
modules/RTC/RTCUtils.js ファイルの表示

410
     if (desktopStream)
410
     if (desktopStream)
411
         res.push({
411
         res.push({
412
             stream: desktopStream,
412
             stream: desktopStream,
413
-            type: MediaType.VIDEO,
413
+            track: desktopStream.getVideoTracks()[0],
414
+            mediaType: MediaType.VIDEO,
414
             videoType: VideoType.DESKTOP
415
             videoType: VideoType.DESKTOP
415
         });
416
         });
416
 
417
 
417
     if(audioStream)
418
     if(audioStream)
418
         res.push({
419
         res.push({
419
             stream: audioStream,
420
             stream: audioStream,
420
-            type: MediaType.AUDIO,
421
+            track: audioStream.getAudioTracks()[0],
422
+            mediaType: MediaType.AUDIO,
421
             videoType: null
423
             videoType: null
422
         });
424
         });
423
 
425
 
424
     if(videoStream)
426
     if(videoStream)
425
         res.push({
427
         res.push({
426
             stream: videoStream,
428
             stream: videoStream,
427
-            type: MediaType.VIDEO,
429
+            track: videoStream.getVideoTracks()[0],
430
+            mediaType: MediaType.VIDEO,
428
             videoType: VideoType.CAMERA,
431
             videoType: VideoType.CAMERA,
429
             resolution: resolution
432
             resolution: resolution
430
         });
433
         });

+ 23
- 15
modules/xmpp/ChatRoom.js ファイルの表示

2
 /* jshint -W101,-W069 */
2
 /* jshint -W101,-W069 */
3
 var logger = require("jitsi-meet-logger").getLogger(__filename);
3
 var logger = require("jitsi-meet-logger").getLogger(__filename);
4
 var XMPPEvents = require("../../service/xmpp/XMPPEvents");
4
 var XMPPEvents = require("../../service/xmpp/XMPPEvents");
5
+var MediaType = require("../../service/RTC/MediaType");
5
 var Moderator = require("./moderator");
6
 var Moderator = require("./moderator");
6
 var EventEmitter = require("events");
7
 var EventEmitter = require("events");
7
 var Recorder = require("./recording");
8
 var Recorder = require("./recording");
691
     this.eventEmitter.removeListener(type, listener);
692
     this.eventEmitter.removeListener(type, listener);
692
 };
693
 };
693
 
694
 
694
-ChatRoom.prototype.remoteStreamAdded = function(data, sid, thessrc) {
695
-    if(this.lastPresences[data.peerjid])
696
-    {
697
-        var pres = this.lastPresences[data.peerjid];
698
-        var audiomuted = filterNodeFromPresenceJSON(pres, "audiomuted");
699
-        var videomuted = filterNodeFromPresenceJSON(pres, "videomuted");
700
-        data.videomuted = ((videomuted.length > 0
701
-            && videomuted[0]
702
-            && videomuted[0]["value"] === "true")? true : false);
703
-        data.audiomuted = ((audiomuted.length > 0
704
-            && audiomuted[0]
705
-            && audiomuted[0]["value"] === "true")? true : false);
706
-    }
707
-
708
-    this.eventEmitter.emit(XMPPEvents.REMOTE_TRACK_ADDED, data, sid, thessrc);
695
+ChatRoom.prototype.remoteTrackAdded = function(data) {
696
+    // Will figure out current muted status by looking up owner's presence
697
+    var pres = this.lastPresences[data.owner];
698
+    var mediaType = data.mediaType;
699
+    if(pres) {
700
+        var mutedNode = null;
701
+        if (mediaType === MediaType.AUDIO) {
702
+            mutedNode = filterNodeFromPresenceJSON(pres, "audiomuted");
703
+        } else if (mediaType === MediaType.VIDEO) {
704
+            mutedNode = filterNodeFromPresenceJSON(pres, "videomuted");
705
+        } else {
706
+            logger.warn("Unsupported media type: " + mediaType);
707
+            data.muted= null;
708
+        }
709
+
710
+        if (mutedNode) {
711
+            data.muted = !!(mutedNode.length > 0 &&
712
+                            mutedNode[0] && mutedNode[0]["value"] === "true");
713
+        }
714
+    }
715
+
716
+    this.eventEmitter.emit(XMPPEvents.REMOTE_TRACK_ADDED, data);
709
 };
717
 };
710
 
718
 
711
 /**
719
 /**

+ 121
- 59
modules/xmpp/JingleSessionPC.js ファイルの表示

3
 var logger = require("jitsi-meet-logger").getLogger(__filename);
3
 var logger = require("jitsi-meet-logger").getLogger(__filename);
4
 var JingleSession = require("./JingleSession");
4
 var JingleSession = require("./JingleSession");
5
 var TraceablePeerConnection = require("./TraceablePeerConnection");
5
 var TraceablePeerConnection = require("./TraceablePeerConnection");
6
+var MediaType = require("../../service/RTC/MediaType");
6
 var SDPDiffer = require("./SDPDiffer");
7
 var SDPDiffer = require("./SDPDiffer");
7
 var SDPUtil = require("./SDPUtil");
8
 var SDPUtil = require("./SDPUtil");
8
 var SDP = require("./SDP");
9
 var SDP = require("./SDP");
113
         self.sendIceCandidate(candidate);
114
         self.sendIceCandidate(candidate);
114
     };
115
     };
115
     this.peerconnection.onaddstream = function (event) {
116
     this.peerconnection.onaddstream = function (event) {
116
-        if (event.stream.id !== 'default') {
117
-            logger.log("REMOTE STREAM ADDED: ", event.stream , event.stream.id);
118
-            self.remoteStreamAdded(event);
119
-        } else {
120
-            // This is a recvonly stream. Clients that implement Unified Plan,
121
-            // such as Firefox use recvonly "streams/channels/tracks" for
122
-            // receiving remote stream/tracks, as opposed to Plan B where there
123
-            // are only 3 channels: audio, video and data.
124
-            logger.log("RECVONLY REMOTE STREAM IGNORED: " + event.stream + " - " + event.stream.id);
125
-        }
117
+        self.remoteStreamAdded(event.stream);
126
     };
118
     };
127
     this.peerconnection.onremovestream = function (event) {
119
     this.peerconnection.onremovestream = function (event) {
128
-        // Remove the stream from remoteStreams
129
-        if (event.stream.id !== 'default') {
130
-            logger.log("REMOTE STREAM REMOVED: ", event.stream , event.stream.id);
131
-            self.remoteStreamRemoved(event);
132
-        } else {
133
-            // This is a recvonly stream. Clients that implement Unified Plan,
134
-            // such as Firefox use recvonly "streams/channels/tracks" for
135
-            // receiving remote stream/tracks, as opposed to Plan B where there
136
-            // are only 3 channels: audio, video and data.
137
-            logger.log("RECVONLY REMOTE STREAM IGNORED: " + event.stream + " - " + event.stream.id);
138
-        }
120
+        self.remoteStreamRemoved(event.stream);
139
     };
121
     };
140
     this.peerconnection.onsignalingstatechange = function (event) {
122
     this.peerconnection.onsignalingstatechange = function (event) {
141
         if (!(self && self.peerconnection)) return;
123
         if (!(self && self.peerconnection)) return;
1184
     this.room.eventEmitter.emit(XMPPEvents.JINGLE_FATAL_ERROR, session, error);
1166
     this.room.eventEmitter.emit(XMPPEvents.JINGLE_FATAL_ERROR, session, error);
1185
 };
1167
 };
1186
 
1168
 
1187
-JingleSessionPC.prototype.remoteStreamAdded = function (data, times) {
1169
+/**
1170
+ * Called when new remote MediaStream is added to the PeerConnection.
1171
+ * @param stream the WebRTC MediaStream for remote participant
1172
+ */
1173
+JingleSessionPC.prototype.remoteStreamAdded = function (stream) {
1188
     var self = this;
1174
     var self = this;
1189
-    var thessrc;
1190
-    var streamId = RTC.getStreamID(data.stream);
1175
+    if (!RTC.isUserStream(stream)) {
1176
+        logger.info(
1177
+            "Ignored remote 'stream added' event for non-user stream", stream);
1178
+        return;
1179
+    }
1180
+    // Bind 'addtrack'/'removetrack' event handlers
1181
+    if (RTCBrowserType.isChrome()) {
1182
+        stream.onaddtrack = function (event) {
1183
+            self.remoteTrackAdded(event.target, event.track);
1184
+        };
1185
+        stream.onremovetrack = function (event) {
1186
+            self.remoteTrackRemoved(event.target, event.track);
1187
+        };
1188
+    }
1189
+    // Call remoteTrackAdded for each track in the stream
1190
+    stream.getAudioTracks().forEach(function (track) {
1191
+        self.remoteTrackAdded(stream, track);
1192
+    });
1193
+    stream.getVideoTracks().forEach(function (track) {
1194
+        self.remoteTrackAdded(stream, track);
1195
+    });
1196
+};
1197
+
1198
+/**
1199
+ * Called on "track added" and "stream added" PeerConnection events(cause we
1200
+ * handle streams on per track basis). Does find the owner and the SSRC for
1201
+ * the track and passes that to ChatRoom for further processing.
1202
+ * @param stream WebRTC MediaStream instance which is the parent of the track
1203
+ * @param track the WebRTC MediaStreamTrack added for remote participant
1204
+ */
1205
+JingleSessionPC.prototype.remoteTrackAdded = function (stream, track) {
1206
+    logger.info("Remote track added", stream, track);
1207
+    var streamId = RTC.getStreamID(stream);
1208
+    var mediaType = track.kind;
1209
+
1210
+    // This is our event structure which will be passed by the ChatRoom as
1211
+    // XMPPEvents.REMOTE_TRACK_ADDED data
1212
+    var jitsiTrackAddedEvent = {
1213
+        stream: stream,
1214
+        track: track,
1215
+        mediaType: track.kind, /* 'audio' or 'video' */
1216
+        owner: undefined, /* to be determined below */
1217
+        muted: null /* will be set in the ChatRoom */
1218
+    };
1191
 
1219
 
1192
     // look up an associated JID for a stream id
1220
     // look up an associated JID for a stream id
1193
-    if (!streamId) {
1194
-        logger.error("No stream ID for", data.stream);
1195
-    } else if (streamId && streamId.indexOf('mixedmslabel') === -1) {
1196
-        // look only at a=ssrc: and _not_ at a=ssrc-group: lines
1197
-
1198
-        var ssrclines = this.peerconnection.remoteDescription?
1199
-            SDPUtil.find_lines(this.peerconnection.remoteDescription.sdp, 'a=ssrc:') : [];
1200
-        ssrclines = ssrclines.filter(function (line) {
1201
-            // NOTE(gp) previously we filtered on the mslabel, but that property
1202
-            // is not always present.
1203
-            // return line.indexOf('mslabel:' + data.stream.label) !== -1;
1204
-
1205
-            if (RTCBrowserType.isTemasysPluginUsed()) {
1206
-                return ((line.indexOf('mslabel:' + streamId) !== -1));
1207
-            } else {
1208
-                return ((line.indexOf('msid:' + streamId) !== -1));
1209
-            }
1210
-        });
1211
-        if (ssrclines.length) {
1212
-            thessrc = ssrclines[0].substring(7).split(' ')[0];
1221
+    if (!mediaType) {
1222
+        logger.error("MediaType undefined", track);
1223
+        return;
1224
+    }
1213
 
1225
 
1214
-            if (!self.ssrcOwners[thessrc]) {
1215
-                logger.error("No SSRC owner known for: " + thessrc);
1216
-                return;
1217
-            }
1218
-            data.peerjid = self.ssrcOwners[thessrc];
1219
-            logger.log('associated jid', self.ssrcOwners[thessrc]);
1226
+    var remoteSDP = new SDP(this.peerconnection.remoteDescription.sdp);
1227
+    var medialines = remoteSDP.media.filter(function (mediaLines){
1228
+        return mediaLines.startsWith("m=" + mediaType);
1229
+    });
1230
+
1231
+    if (!medialines.length) {
1232
+        logger.error("No media for type " + mediaType + " found in remote SDP");
1233
+        return;
1234
+    }
1235
+
1236
+    var ssrclines = SDPUtil.find_lines(medialines[0], 'a=ssrc:');
1237
+    ssrclines = ssrclines.filter(function (line) {
1238
+        if (RTCBrowserType.isTemasysPluginUsed()) {
1239
+            return ((line.indexOf('mslabel:' + streamId) !== -1));
1220
         } else {
1240
         } else {
1221
-            logger.error("No SSRC lines for ", streamId);
1241
+            return ((line.indexOf('msid:' + streamId) !== -1));
1222
         }
1242
         }
1243
+    });
1244
+
1245
+    var thessrc;
1246
+    if (ssrclines.length) {
1247
+        thessrc = ssrclines[0].substring(7).split(' ')[0];
1248
+        if (!this.ssrcOwners[thessrc]) {
1249
+            logger.error("No SSRC owner known for: " + thessrc);
1250
+            return;
1251
+        }
1252
+        jitsiTrackAddedEvent.owner = this.ssrcOwners[thessrc];
1253
+        logger.log('associated jid', this.ssrcOwners[thessrc], thessrc);
1254
+    } else {
1255
+        logger.error("No SSRC lines for ", streamId);
1256
+        return;
1223
     }
1257
     }
1258
+    jitsiTrackAddedEvent.ssrc = thessrc;
1224
 
1259
 
1225
-    this.room.remoteStreamAdded(data, this.sid, thessrc);
1260
+    this.room.remoteTrackAdded(jitsiTrackAddedEvent);
1226
 };
1261
 };
1227
 
1262
 
1228
 /**
1263
 /**
1229
  * Handles remote stream removal.
1264
  * Handles remote stream removal.
1230
- * @param event The event object associated with the removal.
1265
+ * @param stream the WebRTC MediaStream object which is being removed from the
1266
+ * PeerConnection
1231
  */
1267
  */
1232
-JingleSessionPC.prototype.remoteStreamRemoved = function (event) {
1233
-    var thessrc;
1234
-    var streamId = RTC.getStreamID(event.stream);
1268
+JingleSessionPC.prototype.remoteStreamRemoved = function (stream) {
1269
+    var self = this;
1270
+    if (!RTC.isUserStream(stream)) {
1271
+        logger.info(
1272
+            "Ignored remote 'stream removed' event for non-user stream", stream);
1273
+        return;
1274
+    }
1275
+    // Call remoteTrackRemoved for each track in the stream
1276
+    stream.getVideoTracks().forEach(function(track){
1277
+        self.remoteTrackRemoved(stream, track);
1278
+    });
1279
+    stream.getAudioTracks().forEach(function(track) {
1280
+       self.remoteTrackRemoved(stream, track);
1281
+    });
1282
+};
1283
+
1284
+/**
1285
+ * Handles remote media track removal.
1286
+ * @param stream WebRTC MediaStream instance which is the parent of the track
1287
+ * @param track the WebRTC MediaStreamTrack which has been removed from
1288
+ * the PeerConnection.
1289
+ */
1290
+JingleSessionPC.prototype.remoteTrackRemoved = function (stream, track) {
1291
+    logger.info("Remote track removed", stream, track);
1292
+    var streamId = RTC.getStreamID(stream);
1293
+    var trackId = track && track.id;
1235
     if (!streamId) {
1294
     if (!streamId) {
1236
-        logger.error("No stream ID for", event.stream);
1237
-    } else if (streamId && streamId.indexOf('mixedmslabel') === -1) {
1238
-        this.room.eventEmitter.emit(XMPPEvents.REMOTE_STREAM_REMOVED, streamId);
1295
+        logger.error("No stream ID for", stream);
1296
+    } else if (!trackId) {
1297
+        logger.error("No track ID for", track);
1298
+    } else {
1299
+        this.room.eventEmitter.emit(
1300
+            XMPPEvents.REMOTE_TRACK_REMOVED, streamId, trackId);
1239
     }
1301
     }
1240
 };
1302
 };
1241
 
1303
 

+ 1
- 0
modules/xmpp/SDP.js ファイルの表示

248
                     elem.up();
248
                     elem.up();
249
                     var msid = null;
249
                     var msid = null;
250
                     if(mline.media == "audio") {
250
                     if(mline.media == "audio") {
251
+                        // FIXME what is this ? global APP.RTC in SDP ?
251
                         msid = APP.RTC.localAudio._getId();
252
                         msid = APP.RTC.localAudio._getId();
252
                     } else {
253
                     } else {
253
                         msid = APP.RTC.localVideo._getId();
254
                         msid = APP.RTC.localVideo._getId();

+ 14
- 0
service/xmpp/XMPPEvents.js ファイルの表示

99
     // Designates an event indicating that we received statistics from a
99
     // Designates an event indicating that we received statistics from a
100
     // participant in the MUC.
100
     // participant in the MUC.
101
     REMOTE_STATS: "xmpp.remote_stats",
101
     REMOTE_STATS: "xmpp.remote_stats",
102
+    /**
103
+     * Event fired when we remote track is added to the conference.
104
+     * The following structure is passed as an argument:
105
+     * {
106
+     *   stream: the WebRTC MediaStream instance 
107
+     *   track: the WebRTC MediaStreamTrack
108
+     *   mediaType: the MediaType instance
109
+     *   owner: the MUC JID of the stream owner
110
+     *   muted: a boolean indicating initial 'muted' status of the track or
111
+      *         'null' if unknown
112
+     **/
102
     REMOTE_TRACK_ADDED: "xmpp.remote_track_added",
113
     REMOTE_TRACK_ADDED: "xmpp.remote_track_added",
103
     /**
114
     /**
104
      * Indicates that the remote track has been removed from the conference.
115
      * Indicates that the remote track has been removed from the conference.
116
+     * 1st event argument is the ID of the parent WebRTC stream to which 
117
+     * the track being removed belongs to.
118
+     * 2nd event argument is the ID of the removed track.
105
      */
119
      */
106
     REMOTE_TRACK_REMOVED: "xmpp.remote_track_removed",
120
     REMOTE_TRACK_REMOVED: "xmpp.remote_track_removed",
107
     RESERVATION_ERROR: "xmpp.room_reservation_error",
121
     RESERVATION_ERROR: "xmpp.room_reservation_error",

読み込み中…
キャンセル
保存