|
@@ -131,157 +131,197 @@ JitsiLocalTrack.prototype.unmute = function () {
|
131
|
131
|
|
132
|
132
|
/**
|
133
|
133
|
* Creates Promise for mute/unmute operation.
|
134
|
|
- * @param track the track that will be muted/unmuted
|
135
|
|
- * @param mute whether to mute or unmute the track
|
|
134
|
+ *
|
|
135
|
+ * @param {JitsiLocalTrack} track - The track that will be muted/unmuted.
|
|
136
|
+ * @param {boolean} mute - Whether to mute or unmute the track.
|
|
137
|
+ * @returns {Promise}
|
136
|
138
|
*/
|
137
|
|
-function createMuteUnmutePromise(track, mute)
|
138
|
|
-{
|
139
|
|
- return new Promise(function (resolve, reject) {
|
140
|
|
-
|
141
|
|
- if(this.inMuteOrUnmuteProgress) {
|
142
|
|
- reject(new JitsiTrackError(
|
143
|
|
- JitsiTrackErrors.TRACK_MUTE_UNMUTE_IN_PROGRESS));
|
144
|
|
- return;
|
145
|
|
- }
|
146
|
|
- this.inMuteOrUnmuteProgress = true;
|
147
|
|
-
|
148
|
|
- this._setMute(mute,
|
149
|
|
- function(){
|
150
|
|
- this.inMuteOrUnmuteProgress = false;
|
151
|
|
- resolve();
|
152
|
|
- }.bind(this),
|
153
|
|
- function(status){
|
154
|
|
- this.inMuteOrUnmuteProgress = false;
|
155
|
|
- reject(status);
|
156
|
|
- }.bind(this));
|
157
|
|
- }.bind(track));
|
|
139
|
+function createMuteUnmutePromise(track, mute) {
|
|
140
|
+ if (track.inMuteOrUnmuteProgress) {
|
|
141
|
+ return Promise.reject(
|
|
142
|
+ new JitsiTrackError(JitsiTrackErrors.TRACK_MUTE_UNMUTE_IN_PROGRESS)
|
|
143
|
+ );
|
|
144
|
+ }
|
|
145
|
+
|
|
146
|
+ track.inMuteOrUnmuteProgress = true;
|
|
147
|
+
|
|
148
|
+ return track._setMute(mute)
|
|
149
|
+ .then(function() {
|
|
150
|
+ track.inMuteOrUnmuteProgress = false;
|
|
151
|
+ })
|
|
152
|
+ .catch(function(status) {
|
|
153
|
+ track.inMuteOrUnmuteProgress = false;
|
|
154
|
+ throw status;
|
|
155
|
+ });
|
158
|
156
|
}
|
159
|
157
|
|
160
|
158
|
/**
|
161
|
159
|
* Mutes / unmutes the track.
|
162
|
|
- * @param mute {boolean} if true the track will be muted. Otherwise the track
|
|
160
|
+ *
|
|
161
|
+ * @param {boolean} mute - If true the track will be muted. Otherwise the track
|
163
|
162
|
* will be unmuted.
|
|
163
|
+ * @private
|
|
164
|
+ * @returns {Promise}
|
164
|
165
|
*/
|
165
|
|
-JitsiLocalTrack.prototype._setMute = function (mute, resolve, reject) {
|
|
166
|
+JitsiLocalTrack.prototype._setMute = function (mute) {
|
166
|
167
|
if (this.isMuted() === mute) {
|
167
|
|
- resolve();
|
168
|
|
- return;
|
|
168
|
+ return Promise.resolve();
|
169
|
169
|
}
|
|
170
|
+
|
|
171
|
+ var promise = Promise.resolve();
|
|
172
|
+ var self = this;
|
|
173
|
+
|
|
174
|
+ // Local track can be used out of conference, so we need to handle that
|
|
175
|
+ // case and mark that track should start muted or not when added to
|
|
176
|
+ // conference.
|
170
|
177
|
if(!this.conference || !this.conference.room) {
|
171
|
178
|
this.startMuted = mute;
|
172
|
|
- resolve();
|
173
|
|
- return;
|
174
|
179
|
}
|
175
|
|
- var isAudio = this.isAudioTrack();
|
176
|
|
- this.dontFireRemoveEvent = false;
|
177
|
|
-
|
178
|
|
- var setStreamToNull = false;
|
179
|
|
- // the callback that will notify that operation had finished
|
180
|
|
- var callbackFunction = function() {
|
181
|
180
|
|
182
|
|
- if(setStreamToNull)
|
183
|
|
- this.stream = null;
|
184
|
|
- this.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED);
|
185
|
|
-
|
186
|
|
- resolve();
|
187
|
|
- }.bind(this);
|
|
181
|
+ this.dontFireRemoveEvent = false;
|
188
|
182
|
|
189
|
|
- if ((window.location.protocol != "https:") ||
|
190
|
|
- (isAudio) || this.videoType === VideoType.DESKTOP ||
|
191
|
|
- // FIXME FF does not support 'removeStream' method used to mute
|
|
183
|
+ // FIXME FF does not support 'removeStream' method used to mute
|
|
184
|
+ if (window.location.protocol !== "https:" ||
|
|
185
|
+ this.isAudioTrack() ||
|
|
186
|
+ this.videoType === VideoType.DESKTOP ||
|
192
|
187
|
RTCBrowserType.isFirefox()) {
|
193
|
188
|
|
194
|
|
- if (this.track)
|
|
189
|
+ if(this.track)
|
195
|
190
|
this.track.enabled = !mute;
|
196
|
|
- if(isAudio)
|
197
|
|
- this.conference.room.setAudioMute(mute, callbackFunction);
|
198
|
|
- else
|
199
|
|
- this.conference.room.setVideoMute(mute, callbackFunction);
|
200
|
191
|
} else {
|
201
|
|
- if (mute) {
|
|
192
|
+ if(mute) {
|
202
|
193
|
this.dontFireRemoveEvent = true;
|
203
|
|
- this.conference.room.removeStream(this.stream, function () {
|
204
|
|
- RTCUtils.stopMediaStream(this.stream);
|
205
|
|
- setStreamToNull = true;
|
206
|
|
- if(isAudio)
|
207
|
|
- this.conference.room.setAudioMute(mute,
|
208
|
|
- callbackFunction);
|
209
|
|
- else
|
210
|
|
- this.conference.room.setVideoMute(mute,
|
211
|
|
- callbackFunction);
|
212
|
|
- //FIXME: Maybe here we should set the SRC for the containers to something
|
213
|
|
- }.bind(this),
|
214
|
|
- function (error) {
|
215
|
|
- reject(error);
|
216
|
|
- }, {mtype: this.type, type: "mute", ssrc: this.ssrc});
|
217
|
194
|
|
|
195
|
+ promise = this._removeStreamFromConferenceAsMute()
|
|
196
|
+ .then(function() {
|
|
197
|
+ //FIXME: Maybe here we should set the SRC for the containers
|
|
198
|
+ // to something
|
|
199
|
+ RTCUtils.stopMediaStream(self.stream);
|
|
200
|
+ self.stream = null;
|
|
201
|
+ });
|
218
|
202
|
} else {
|
219
|
|
- var self = this;
|
220
|
|
- // FIXME why are we doing all this audio type checks and
|
221
|
|
- // convoluted scenarios if we're going this way only
|
222
|
|
- // for VIDEO media and CAMERA type of video ?
|
|
203
|
+ // This path is only for camera.
|
223
|
204
|
var streamOptions = {
|
224
|
|
- devices: (isAudio ? ["audio"] : ["video"]),
|
225
|
|
- resolution: self.resolution
|
|
205
|
+ cameraDeviceId: this.getDeviceId(),
|
|
206
|
+ devices: ["video"],
|
|
207
|
+ resolution: this.resolution
|
226
|
208
|
};
|
227
|
|
- if (isAudio) {
|
228
|
|
- streamOptions['micDeviceId'] = self.getDeviceId();
|
229
|
|
- } else if(self.videoType === VideoType.CAMERA) {
|
230
|
|
- streamOptions['cameraDeviceId'] = self.getDeviceId();
|
231
|
|
- }
|
232
|
|
- RTCUtils.obtainAudioAndVideoPermissions(streamOptions)
|
|
209
|
+
|
|
210
|
+ promise = RTCUtils.obtainAudioAndVideoPermissions(streamOptions)
|
233
|
211
|
.then(function (streamsInfo) {
|
234
|
|
- var streamInfo = null;
|
235
|
|
- for(var i = 0; i < streamsInfo.length; i++) {
|
236
|
|
- if(streamsInfo[i].mediaType === self.getType()) {
|
237
|
|
- streamInfo = streamsInfo[i];
|
238
|
|
- self.stream = streamInfo.stream;
|
239
|
|
- self.track = streamInfo.track;
|
240
|
|
- // This is not good when video type changes after
|
241
|
|
- // unmute, but let's not crash here
|
242
|
|
- if (self.videoType != streamInfo.videoType) {
|
243
|
|
- logger.warn(
|
244
|
|
- "Video type has changed after unmute!",
|
245
|
|
- self.videoType, streamInfo.videoType);
|
246
|
|
- self.videoType = streamInfo.videoType;
|
247
|
|
- }
|
248
|
|
- break;
|
249
|
|
- }
|
250
|
|
- }
|
|
212
|
+ var mediaType = self.getType();
|
|
213
|
+ var streamInfo = streamsInfo.find(function(info) {
|
|
214
|
+ return info.mediaType === mediaType;
|
|
215
|
+ });
|
251
|
216
|
|
252
|
217
|
if(!streamInfo) {
|
253
|
218
|
// FIXME Introduce a new JitsiTrackError.
|
254
|
|
- reject(new Error('track.no_stream_found'));
|
255
|
|
- return;
|
|
219
|
+ throw new Error('track.no_stream_found');
|
|
220
|
+ }else {
|
|
221
|
+ self.stream = streamInfo.stream;
|
|
222
|
+ self.track = streamInfo.track;
|
|
223
|
+ // This is not good when video type changes after
|
|
224
|
+ // unmute, but let's not crash here
|
|
225
|
+ if (self.videoType !== streamInfo.videoType) {
|
|
226
|
+ logger.warn(
|
|
227
|
+ "Video type has changed after unmute!",
|
|
228
|
+ self.videoType, streamInfo.videoType);
|
|
229
|
+ self.videoType = streamInfo.videoType;
|
|
230
|
+ }
|
256
|
231
|
}
|
257
|
232
|
|
258
|
|
- for(var i = 0; i < self.containers.length; i++)
|
259
|
|
- {
|
260
|
|
- self.containers[i]
|
261
|
|
- = RTCUtils.attachMediaStream(
|
262
|
|
- self.containers[i], self.stream);
|
263
|
|
- }
|
|
233
|
+ self.containers = self.containers.map(function(cont) {
|
|
234
|
+ return RTCUtils.attachMediaStream(cont, self.stream);
|
|
235
|
+ });
|
264
|
236
|
|
265
|
|
- self.conference.room.addStream(self.stream,
|
266
|
|
- function () {
|
267
|
|
- if(isAudio)
|
268
|
|
- self.conference.room.setAudioMute(
|
269
|
|
- mute, callbackFunction);
|
270
|
|
- else
|
271
|
|
- self.conference.room.setVideoMute(
|
272
|
|
- mute, callbackFunction);
|
273
|
|
- }, function (error) {
|
274
|
|
- reject(error);
|
275
|
|
- }, {
|
276
|
|
- mtype: self.type,
|
277
|
|
- type: "unmute",
|
278
|
|
- ssrc: self.ssrc,
|
279
|
|
- msid: self.getMSID()});
|
280
|
|
- }).catch(function (error) {
|
281
|
|
- reject(error);
|
|
237
|
+ return self._addStreamToConferenceAsUnmute();
|
282
|
238
|
});
|
283
|
239
|
}
|
284
|
240
|
}
|
|
241
|
+
|
|
242
|
+ return promise
|
|
243
|
+ .then(function() {
|
|
244
|
+ return self._setConferenceMute(mute);
|
|
245
|
+ })
|
|
246
|
+ .then(function() {
|
|
247
|
+ self.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED);
|
|
248
|
+ });
|
|
249
|
+};
|
|
250
|
+
|
|
251
|
+/**
|
|
252
|
+ * Adds stream to conference and marks it as "unmute" operation.
|
|
253
|
+ *
|
|
254
|
+ * @private
|
|
255
|
+ * @returns {Promise}
|
|
256
|
+ */
|
|
257
|
+JitsiLocalTrack.prototype._addStreamToConferenceAsUnmute = function () {
|
|
258
|
+ if (!this.conference || !this.conference.room) {
|
|
259
|
+ return Promise.resolve();
|
|
260
|
+ }
|
|
261
|
+
|
|
262
|
+ var self = this;
|
|
263
|
+
|
|
264
|
+ return new Promise(function(resolve, reject) {
|
|
265
|
+ self.conference.room.addStream(
|
|
266
|
+ self.stream,
|
|
267
|
+ resolve,
|
|
268
|
+ reject,
|
|
269
|
+ {
|
|
270
|
+ mtype: self.type,
|
|
271
|
+ type: "unmute",
|
|
272
|
+ ssrc: self.ssrc,
|
|
273
|
+ msid: self.getMSID()
|
|
274
|
+ });
|
|
275
|
+ });
|
|
276
|
+};
|
|
277
|
+
|
|
278
|
+/**
|
|
279
|
+ * Removes stream from conference and marks it as "mute" operation.
|
|
280
|
+ *
|
|
281
|
+ * @private
|
|
282
|
+ * @returns {Promise}
|
|
283
|
+ */
|
|
284
|
+JitsiLocalTrack.prototype._removeStreamFromConferenceAsMute = function () {
|
|
285
|
+ if (!this.conference || !this.conference.room) {
|
|
286
|
+ return Promise.resolve();
|
|
287
|
+ }
|
|
288
|
+
|
|
289
|
+ var self = this;
|
|
290
|
+
|
|
291
|
+ return new Promise(function(resolve, reject) {
|
|
292
|
+ self.conference.room.removeStream(
|
|
293
|
+ self.stream,
|
|
294
|
+ resolve,
|
|
295
|
+ reject,
|
|
296
|
+ {
|
|
297
|
+ mtype: self.type,
|
|
298
|
+ type: "mute",
|
|
299
|
+ ssrc: self.ssrc
|
|
300
|
+ });
|
|
301
|
+ });
|
|
302
|
+};
|
|
303
|
+
|
|
304
|
+/**
|
|
305
|
+ * Synchronizes muted stated of track with muted state of conference.
|
|
306
|
+ *
|
|
307
|
+ * @param {boolean} mute - If track is muted.
|
|
308
|
+ * @private
|
|
309
|
+ * @returns {Promise}
|
|
310
|
+ */
|
|
311
|
+JitsiLocalTrack.prototype._setConferenceMute = function(mute) {
|
|
312
|
+ if (!this.conference || !this.conference.room) {
|
|
313
|
+ return Promise.resolve();
|
|
314
|
+ }
|
|
315
|
+
|
|
316
|
+ var self = this;
|
|
317
|
+
|
|
318
|
+ return new Promise(function(resolve) {
|
|
319
|
+ if (self.isAudioTrack()) {
|
|
320
|
+ self.conference.room.setAudioMute(mute, resolve);
|
|
321
|
+ } else {
|
|
322
|
+ self.conference.room.setVideoMute(mute, resolve);
|
|
323
|
+ }
|
|
324
|
+ });
|
285
|
325
|
};
|
286
|
326
|
|
287
|
327
|
/**
|