|
|
@@ -1,4 +1,5 @@
|
|
1
|
|
-/* global APP, module */
|
|
|
1
|
+/* global __filename, APP, module */
|
|
|
2
|
+var logger = require("jitsi-meet-logger").getLogger(__filename);
|
|
2
|
3
|
var EventEmitter = require("events");
|
|
3
|
4
|
var RTCBrowserType = require("./RTCBrowserType");
|
|
4
|
5
|
var RTCEvents = require("../../service/RTC/RTCEvents.js");
|
|
|
@@ -10,62 +11,61 @@ var JitsiRemoteTrack = require("./JitsiRemoteTrack.js");
|
|
10
|
11
|
var MediaType = require("../../service/RTC/MediaType");
|
|
11
|
12
|
var VideoType = require("../../service/RTC/VideoType");
|
|
12
|
13
|
|
|
13
|
|
-function createLocalTracks(streams, options) {
|
|
14
|
|
- var newStreams = []
|
|
|
14
|
+function createLocalTracks(tracksInfo, options) {
|
|
|
15
|
+ var newTracks = [];
|
|
15
|
16
|
var deviceId = null;
|
|
16
|
|
- for (var i = 0; i < streams.length; i++) {
|
|
17
|
|
- if (streams[i].type === MediaType.AUDIO) {
|
|
|
17
|
+ tracksInfo.forEach(function(trackInfo){
|
|
|
18
|
+ if (trackInfo.type === MediaType.AUDIO) {
|
|
18
|
19
|
deviceId = options.micDeviceId;
|
|
19
|
|
- } else if (streams[i].videoType === VideoType.CAMERA){
|
|
|
20
|
+ } else if (trackInfo.videoType === VideoType.CAMERA){
|
|
20
|
21
|
deviceId = options.cameraDeviceId;
|
|
21
|
22
|
}
|
|
22
|
|
- var localStream = new JitsiLocalTrack(streams[i].stream,
|
|
23
|
|
- streams[i].videoType, streams[i].resolution, deviceId);
|
|
24
|
|
- newStreams.push(localStream);
|
|
25
|
|
- }
|
|
26
|
|
- return newStreams;
|
|
|
23
|
+ var localTrack = new JitsiLocalTrack(trackInfo.stream,
|
|
|
24
|
+ trackInfo.videoType, trackInfo.resolution, deviceId);
|
|
|
25
|
+ newTracks.push(localTrack);
|
|
|
26
|
+ });
|
|
|
27
|
+ return newTracks;
|
|
27
|
28
|
}
|
|
28
|
29
|
|
|
29
|
30
|
function RTC(room, options) {
|
|
30
|
31
|
this.room = room;
|
|
31
|
|
- this.localStreams = [];
|
|
|
32
|
+ this.localTracks = [];
|
|
32
|
33
|
//FIXME: We should support multiple streams per jid.
|
|
33
|
|
- this.remoteStreams = {};
|
|
|
34
|
+ this.remoteTracks = {};
|
|
34
|
35
|
this.localAudio = null;
|
|
35
|
36
|
this.localVideo = null;
|
|
36
|
37
|
this.eventEmitter = new EventEmitter();
|
|
37
|
38
|
var self = this;
|
|
38
|
39
|
this.options = options || {};
|
|
39
|
40
|
room.addPresenceListener("videomuted", function (values, from) {
|
|
40
|
|
- if(self.remoteStreams[from]) {
|
|
41
|
|
- // If there is no video track, but we receive it is muted,
|
|
42
|
|
- // we need to create a dummy track which we will mute, so we can
|
|
43
|
|
- // notify interested about the muting
|
|
44
|
|
- if(!self.remoteStreams[from][MediaType.VIDEO]) {
|
|
45
|
|
- var track = self.createRemoteStream(
|
|
46
|
|
- {peerjid:room.roomjid + "/" + from,
|
|
47
|
|
- videoType: VideoType.CAMERA,
|
|
48
|
|
- jitsiTrackType: MediaType.VIDEO},
|
|
49
|
|
- null, null);
|
|
50
|
|
- self.eventEmitter
|
|
51
|
|
- .emit(RTCEvents.FAKE_VIDEO_TRACK_CREATED, track);
|
|
52
|
|
- }
|
|
53
|
|
-
|
|
54
|
|
- self.remoteStreams[from][MediaType.VIDEO]
|
|
55
|
|
- .setMute(values.value == "true");
|
|
|
41
|
+ var videoTrack = self.getRemoteVideoTrack(from);
|
|
|
42
|
+ // If there is no video track, but we receive it is muted,
|
|
|
43
|
+ // we need to create a dummy track which we will mute, so we can
|
|
|
44
|
+ // notify interested about the muting
|
|
|
45
|
+ if (!videoTrack) {
|
|
|
46
|
+ videoTrack = self.createRemoteTrack(
|
|
|
47
|
+ {
|
|
|
48
|
+ peerjid: room.roomjid + "/" + from,
|
|
|
49
|
+ videoType: VideoType.CAMERA,
|
|
|
50
|
+ jitsiTrackType: MediaType.VIDEO
|
|
|
51
|
+ },
|
|
|
52
|
+ null, null);
|
|
|
53
|
+ self.eventEmitter
|
|
|
54
|
+ .emit(RTCEvents.FAKE_VIDEO_TRACK_CREATED, videoTrack);
|
|
56
|
55
|
}
|
|
|
56
|
+ videoTrack.setMute(values.value == "true");
|
|
57
|
57
|
});
|
|
58
|
58
|
room.addPresenceListener("audiomuted", function (values, from) {
|
|
59
|
|
- if(self.remoteStreams[from]) {
|
|
60
|
|
- self.remoteStreams[from][MediaType.AUDIO]
|
|
61
|
|
- .setMute(values.value == "true");
|
|
|
59
|
+ var audioTrack = self.getRemoteAudioTrack(from);
|
|
|
60
|
+ if (audioTrack) {
|
|
|
61
|
+ audioTrack.setMute(values.value == "true");
|
|
62
|
62
|
}
|
|
63
|
63
|
});
|
|
64
|
64
|
room.addPresenceListener("videoType", function(data, from) {
|
|
65
|
|
- if(!self.remoteStreams[from] ||
|
|
66
|
|
- (!self.remoteStreams[from][MediaType.VIDEO]))
|
|
67
|
|
- return;
|
|
68
|
|
- self.remoteStreams[from][MediaType.VIDEO]._setVideoType(data.value);
|
|
|
65
|
+ var videoTrack = self.getRemoteVideoTrack(from);
|
|
|
66
|
+ if (videoTrack) {
|
|
|
67
|
+ videoTrack._setVideoType(data.value);
|
|
|
68
|
+ }
|
|
69
|
69
|
});
|
|
70
|
70
|
}
|
|
71
|
71
|
|
|
|
@@ -85,8 +85,9 @@ function RTC(room, options) {
|
|
85
|
85
|
*/
|
|
86
|
86
|
|
|
87
|
87
|
RTC.obtainAudioAndVideoPermissions = function (options) {
|
|
88
|
|
- return RTCUtils.obtainAudioAndVideoPermissions(options).then(function (streams) {
|
|
89
|
|
- return createLocalTracks(streams, options);
|
|
|
88
|
+ return RTCUtils.obtainAudioAndVideoPermissions(options).then(
|
|
|
89
|
+ function (tracksInfo) {
|
|
|
90
|
+ return createLocalTracks(tracksInfo, options);
|
|
90
|
91
|
});
|
|
91
|
92
|
};
|
|
92
|
93
|
|
|
|
@@ -94,12 +95,12 @@ RTC.prototype.onIncommingCall = function(event) {
|
|
94
|
95
|
if(this.options.config.openSctp)
|
|
95
|
96
|
this.dataChannels = new DataChannels(event.peerconnection,
|
|
96
|
97
|
this.eventEmitter);
|
|
97
|
|
- for(var i = 0; i < this.localStreams.length; i++)
|
|
98
|
|
- if(this.localStreams[i])
|
|
|
98
|
+ for(var i = 0; i < this.localTracks.length; i++)
|
|
|
99
|
+ if(this.localTracks[i])
|
|
99
|
100
|
{
|
|
100
|
101
|
var ssrcInfo = null;
|
|
101
|
|
- if(this.localStreams[i].isMuted() &&
|
|
102
|
|
- this.localStreams[i].getType() === MediaType.VIDEO) {
|
|
|
102
|
+ if(this.localTracks[i].isMuted() &&
|
|
|
103
|
+ this.localTracks[i].getType() === MediaType.VIDEO) {
|
|
103
|
104
|
/**
|
|
104
|
105
|
* Handles issues when the stream is added before the peerconnection is created.
|
|
105
|
106
|
* The peerconnection is created when second participant enters the call. In
|
|
|
@@ -114,16 +115,16 @@ RTC.prototype.onIncommingCall = function(event) {
|
|
114
|
115
|
* In order to solve issues like the above one here we have to generate the ssrc
|
|
115
|
116
|
* information for the track .
|
|
116
|
117
|
*/
|
|
117
|
|
- this.localStreams[i]._setSSRC(
|
|
|
118
|
+ this.localTracks[i]._setSSRC(
|
|
118
|
119
|
this.room.generateNewStreamSSRCInfo());
|
|
119
|
120
|
ssrcInfo = {
|
|
120
|
|
- mtype: this.localStreams[i].getType(),
|
|
|
121
|
+ mtype: this.localTracks[i].getType(),
|
|
121
|
122
|
type: "addMuted",
|
|
122
|
|
- ssrc: this.localStreams[i].ssrc,
|
|
123
|
|
- msid: this.localStreams[i].initialMSID
|
|
|
123
|
+ ssrc: this.localTracks[i].ssrc,
|
|
|
124
|
+ msid: this.localTracks[i].initialMSID
|
|
124
|
125
|
}
|
|
125
|
126
|
}
|
|
126
|
|
- this.room.addStream(this.localStreams[i].getOriginalStream(),
|
|
|
127
|
+ this.room.addStream(this.localTracks[i].getOriginalStream(),
|
|
127
|
128
|
function () {}, ssrcInfo, true);
|
|
128
|
129
|
}
|
|
129
|
130
|
};
|
|
|
@@ -167,14 +168,14 @@ RTC.getDeviceAvailability = function () {
|
|
167
|
168
|
return RTCUtils.getDeviceAvailability();
|
|
168
|
169
|
};
|
|
169
|
170
|
|
|
170
|
|
-RTC.prototype.addLocalStream = function (stream) {
|
|
171
|
|
- this.localStreams.push(stream);
|
|
172
|
|
- stream._setRTC(this);
|
|
|
171
|
+RTC.prototype.addLocalTrack = function (track) {
|
|
|
172
|
+ this.localTracks.push(track);
|
|
|
173
|
+ track._setRTC(this);
|
|
173
|
174
|
|
|
174
|
|
- if (stream.isAudioTrack()) {
|
|
175
|
|
- this.localAudio = stream;
|
|
|
175
|
+ if (track.isAudioTrack()) {
|
|
|
176
|
+ this.localAudio = track;
|
|
176
|
177
|
} else {
|
|
177
|
|
- this.localVideo = stream;
|
|
|
178
|
+ this.localVideo = track;
|
|
178
|
179
|
}
|
|
179
|
180
|
};
|
|
180
|
181
|
|
|
|
@@ -182,10 +183,36 @@ RTC.prototype.addLocalStream = function (stream) {
|
|
182
|
183
|
* Get local video track.
|
|
183
|
184
|
* @returns {JitsiLocalTrack}
|
|
184
|
185
|
*/
|
|
185
|
|
-RTC.prototype.getLocalVideoStream = function () {
|
|
|
186
|
+RTC.prototype.getLocalVideoTrack = function () {
|
|
186
|
187
|
return this.localVideo;
|
|
187
|
188
|
};
|
|
188
|
189
|
|
|
|
190
|
+/**
|
|
|
191
|
+ * Gets JitsiRemoteTrack for AUDIO MediaType associated with given MUC nickname
|
|
|
192
|
+ * (resource part of the JID).
|
|
|
193
|
+ * @param resource the resource part of the MUC JID
|
|
|
194
|
+ * @returns {JitsiRemoteTrack|null}
|
|
|
195
|
+ */
|
|
|
196
|
+RTC.prototype.getRemoteAudioTrack = function (resource) {
|
|
|
197
|
+ if (this.remoteTracks[resource])
|
|
|
198
|
+ return this.remoteTracks[resource][MediaType.AUDIO];
|
|
|
199
|
+ else
|
|
|
200
|
+ return null;
|
|
|
201
|
+};
|
|
|
202
|
+
|
|
|
203
|
+/**
|
|
|
204
|
+ * Gets JitsiRemoteTrack for VIDEO MediaType associated with given MUC nickname
|
|
|
205
|
+ * (resource part of the JID).
|
|
|
206
|
+ * @param resource the resource part of the MUC JID
|
|
|
207
|
+ * @returns {JitsiRemoteTrack|null}
|
|
|
208
|
+ */
|
|
|
209
|
+RTC.prototype.getRemoteVideoTrack = function (resource) {
|
|
|
210
|
+ if (this.remoteTracks[resource])
|
|
|
211
|
+ return this.remoteTracks[resource][MediaType.VIDEO];
|
|
|
212
|
+ else
|
|
|
213
|
+ return null;
|
|
|
214
|
+};
|
|
|
215
|
+
|
|
189
|
216
|
/**
|
|
190
|
217
|
* Set mute for all local audio streams attached to the conference.
|
|
191
|
218
|
* @param value the mute value
|
|
|
@@ -193,48 +220,54 @@ RTC.prototype.getLocalVideoStream = function () {
|
|
193
|
220
|
*/
|
|
194
|
221
|
RTC.prototype.setAudioMute = function (value) {
|
|
195
|
222
|
var mutePromises = [];
|
|
196
|
|
- for(var i = 0; i < this.localStreams.length; i++) {
|
|
197
|
|
- var stream = this.localStreams[i];
|
|
198
|
|
- if(stream.getType() !== MediaType.AUDIO) {
|
|
|
223
|
+ for(var i = 0; i < this.localTracks.length; i++) {
|
|
|
224
|
+ var track = this.localTracks[i];
|
|
|
225
|
+ if(track.getType() !== MediaType.AUDIO) {
|
|
199
|
226
|
continue;
|
|
200
|
227
|
}
|
|
201
|
228
|
// this is a Promise
|
|
202
|
|
- mutePromises.push(value ? stream.mute() : stream.unmute());
|
|
|
229
|
+ mutePromises.push(value ? track.mute() : track.unmute());
|
|
203
|
230
|
}
|
|
204
|
231
|
// we return a Promise from all Promises so we can wait for their execution
|
|
205
|
232
|
return Promise.all(mutePromises);
|
|
206
|
233
|
};
|
|
207
|
234
|
|
|
208
|
|
-RTC.prototype.removeLocalStream = function (stream) {
|
|
209
|
|
- var pos = this.localStreams.indexOf(stream);
|
|
|
235
|
+RTC.prototype.removeLocalTrack = function (track) {
|
|
|
236
|
+ var pos = this.localTracks.indexOf(track);
|
|
210
|
237
|
if (pos === -1) {
|
|
211
|
238
|
return;
|
|
212
|
239
|
}
|
|
213
|
240
|
|
|
214
|
|
- this.localStreams.splice(pos, 1);
|
|
|
241
|
+ this.localTracks.splice(pos, 1);
|
|
215
|
242
|
|
|
216
|
|
- if (stream.isAudioTrack()) {
|
|
|
243
|
+ if (track.isAudioTrack()) {
|
|
217
|
244
|
this.localAudio = null;
|
|
218
|
245
|
} else {
|
|
219
|
246
|
this.localVideo = null;
|
|
220
|
247
|
}
|
|
221
|
248
|
};
|
|
222
|
249
|
|
|
223
|
|
-RTC.prototype.createRemoteStream = function (data, sid, thessrc) {
|
|
224
|
|
- var remoteStream = new JitsiRemoteTrack(this, data, sid, thessrc);
|
|
|
250
|
+RTC.prototype.createRemoteTrack = function (data, sid, thessrc) {
|
|
|
251
|
+ var remoteTrack = new JitsiRemoteTrack(this, data, sid, thessrc);
|
|
225
|
252
|
if(!data.peerjid)
|
|
226
|
253
|
return;
|
|
227
|
254
|
var resource = Strophe.getResourceFromJid(data.peerjid);
|
|
228
|
|
- if(!this.remoteStreams[resource]) {
|
|
229
|
|
- this.remoteStreams[resource] = {};
|
|
|
255
|
+ if(!this.remoteTracks[resource]) {
|
|
|
256
|
+ this.remoteTracks[resource] = {};
|
|
|
257
|
+ }
|
|
|
258
|
+ var mediaType = remoteTrack.getType();
|
|
|
259
|
+ if (this.remoteTracks[resource][mediaType]) {
|
|
|
260
|
+ logger.warn(
|
|
|
261
|
+ "Overwriting remote track !", resource, mediaType);
|
|
230
|
262
|
}
|
|
231
|
|
- this.remoteStreams[resource][remoteStream.getType()]= remoteStream;
|
|
232
|
|
- return remoteStream;
|
|
|
263
|
+ this.remoteTracks[resource][mediaType] = remoteTrack;
|
|
|
264
|
+ return remoteTrack;
|
|
233
|
265
|
};
|
|
234
|
266
|
|
|
235
|
|
-RTC.prototype.removeRemoteStream = function (resource) {
|
|
236
|
|
- if(this.remoteStreams[resource]) {
|
|
237
|
|
- delete this.remoteStreams[resource];
|
|
|
267
|
+RTC.prototype.removeRemoteTrack = function (resource) {
|
|
|
268
|
+ // FIXME this clears both audio and video tracks!
|
|
|
269
|
+ if(this.remoteTracks[resource]) {
|
|
|
270
|
+ delete this.remoteTracks[resource];
|
|
238
|
271
|
}
|
|
239
|
272
|
};
|
|
240
|
273
|
|
|
|
@@ -301,29 +334,29 @@ RTC.isDesktopSharingEnabled = function () {
|
|
301
|
334
|
RTC.prototype.dispose = function() {
|
|
302
|
335
|
};
|
|
303
|
336
|
|
|
304
|
|
-RTC.prototype.switchVideoStreams = function (newStream) {
|
|
|
337
|
+RTC.prototype.switchVideoTracks = function (newStream) {
|
|
305
|
338
|
this.localVideo.stream = newStream;
|
|
306
|
339
|
|
|
307
|
|
- this.localStreams = [];
|
|
|
340
|
+ this.localTracks = [];
|
|
308
|
341
|
|
|
309
|
342
|
//in firefox we have only one stream object
|
|
310
|
343
|
if (this.localAudio.getOriginalStream() != newStream)
|
|
311
|
|
- this.localStreams.push(this.localAudio);
|
|
312
|
|
- this.localStreams.push(this.localVideo);
|
|
|
344
|
+ this.localTracks.push(this.localAudio);
|
|
|
345
|
+ this.localTracks.push(this.localVideo);
|
|
313
|
346
|
};
|
|
314
|
347
|
|
|
315
|
348
|
RTC.prototype.setAudioLevel = function (resource, audioLevel) {
|
|
316
|
349
|
if(!resource)
|
|
317
|
350
|
return;
|
|
318
|
|
- if(this.remoteStreams[resource] &&
|
|
319
|
|
- this.remoteStreams[resource][MediaType.AUDIO]) {
|
|
320
|
|
- this.remoteStreams[resource][MediaType.AUDIO].setAudioLevel(audioLevel);
|
|
|
351
|
+ var audioTrack = this.getRemoteAudioTrack(resource);
|
|
|
352
|
+ if(audioTrack) {
|
|
|
353
|
+ audioTrack.setAudioLevel(audioLevel);
|
|
321
|
354
|
}
|
|
322
|
355
|
};
|
|
323
|
356
|
|
|
324
|
357
|
/**
|
|
325
|
|
- * Searches in localStreams(session stores ssrc for audio and video) and
|
|
326
|
|
- * remoteStreams for the ssrc and returns the corresponding resource.
|
|
|
358
|
+ * Searches in localTracks(session stores ssrc for audio and video) and
|
|
|
359
|
+ * remoteTracks for the ssrc and returns the corresponding resource.
|
|
327
|
360
|
* @param ssrc the ssrc to check.
|
|
328
|
361
|
*/
|
|
329
|
362
|
RTC.prototype.getResourceBySSRC = function (ssrc) {
|
|
|
@@ -332,13 +365,15 @@ RTC.prototype.getResourceBySSRC = function (ssrc) {
|
|
332
|
365
|
return Strophe.getResourceFromJid(this.room.myroomjid);
|
|
333
|
366
|
}
|
|
334
|
367
|
|
|
|
368
|
+ var self = this;
|
|
335
|
369
|
var resultResource = null;
|
|
336
|
|
- $.each(this.remoteStreams, function (resource, remoteTracks) {
|
|
337
|
|
- if((remoteTracks[MediaType.AUDIO]
|
|
338
|
|
- && remoteTracks[MediaType.AUDIO].getSSRC() == ssrc)
|
|
339
|
|
- || (remoteTracks[MediaType.VIDEO]
|
|
340
|
|
- && remoteTracks[MediaType.VIDEO].getSSRC() == ssrc))
|
|
|
370
|
+ Object.keys(this.remoteTracks).forEach(function (resource) {
|
|
|
371
|
+ var audioTrack = self.getRemoteAudioTrack(resource);
|
|
|
372
|
+ var videoTrack = self.getRemoteVideoTrack(resource);
|
|
|
373
|
+ if((audioTrack && audioTrack.getSSRC() == ssrc) ||
|
|
|
374
|
+ (videoTrack && videoTrack.getSSRC() == ssrc)) {
|
|
341
|
375
|
resultResource = resource;
|
|
|
376
|
+ }
|
|
342
|
377
|
});
|
|
343
|
378
|
|
|
344
|
379
|
return resultResource;
|