|
@@ -33,23 +33,21 @@ const logger = getLogger(__filename);
|
33
|
33
|
*/
|
34
|
34
|
export default class JitsiLocalTrack extends JitsiTrack {
|
35
|
35
|
/**
|
36
|
|
- * Constructs new JitsiLocalTrack instance.
|
|
36
|
+ * Constructs a new JitsiLocalTrack instance.
|
37
|
37
|
*
|
38
|
38
|
* @constructor
|
39
|
39
|
* @param {Object} trackInfo
|
40
|
|
- * @param {number} trackInfo.rtcId the ID assigned by the RTC module
|
41
|
|
- * @param trackInfo.stream WebRTC MediaStream, parent of the track
|
42
|
|
- * @param trackInfo.track underlying WebRTC MediaStreamTrack for new
|
43
|
|
- * JitsiRemoteTrack
|
44
|
|
- * @param trackInfo.mediaType the MediaType of the JitsiRemoteTrack
|
45
|
|
- * @param trackInfo.videoType the VideoType of the JitsiRemoteTrack
|
46
|
|
- * @param trackInfo.effects the effects array contains the effect instance to use
|
47
|
|
- * @param trackInfo.resolution the video resolution if it's a video track
|
48
|
|
- * @param trackInfo.deviceId the ID of the local device for this track
|
49
|
|
- * @param trackInfo.facingMode the camera facing mode used in getUserMedia
|
50
|
|
- * call
|
51
|
|
- * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing
|
52
|
|
- * source. NOTE: defined for desktop sharing tracks only.
|
|
40
|
+ * @param {number} trackInfo.rtcId - The ID assigned by the RTC module.
|
|
41
|
+ * @param {Object} trackInfo.stream - The WebRTC MediaStream, parent of the track.
|
|
42
|
+ * @param {Object} trackInfo.track - The underlying WebRTC MediaStreamTrack for new JitsiLocalTrack.
|
|
43
|
+ * @param {string} trackInfo.mediaType - The MediaType of the JitsiLocalTrack.
|
|
44
|
+ * @param {string} trackInfo.videoType - The VideoType of the JitsiLocalTrack.
|
|
45
|
+ * @param {Array<Object>} trackInfo.effects - The effects to be applied to the JitsiLocalTrack.
|
|
46
|
+ * @param {number} trackInfo.resolution - The the video resolution if it's a video track
|
|
47
|
+ * @param {string} trackInfo.deviceId - The ID of the local device for this track.
|
|
48
|
+ * @param {string} trackInfo.facingMode - Thehe camera facing mode used in getUserMedia call (for mobile only).
|
|
49
|
+ * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing source. NOTE: defined for desktop sharing
|
|
50
|
+ * tracks only.
|
53
|
51
|
*/
|
54
|
52
|
constructor({
|
55
|
53
|
deviceId,
|
|
@@ -208,32 +206,52 @@ export default class JitsiLocalTrack extends JitsiTrack {
|
208
|
206
|
}
|
209
|
207
|
|
210
|
208
|
/**
|
211
|
|
- * Get the duration of the track.
|
|
209
|
+ * Adds stream to conference and marks it as "unmute" operation.
|
212
|
210
|
*
|
213
|
|
- * @returns {Number} the duration of the track in seconds
|
|
211
|
+ * @private
|
|
212
|
+ * @returns {Promise}
|
214
|
213
|
*/
|
215
|
|
- getDuration() {
|
216
|
|
- return (Date.now() / 1000) - (this.metadata.timestamp / 1000);
|
|
214
|
+ _addStreamToConferenceAsUnmute() {
|
|
215
|
+ if (!this.conference) {
|
|
216
|
+ return Promise.resolve();
|
|
217
|
+ }
|
|
218
|
+
|
|
219
|
+ // FIXME it would be good to not included conference as part of this process. Only TraceablePeerConnections to
|
|
220
|
+ // which the track is attached should care about this action. The TPCs to which the track is not attached can
|
|
221
|
+ // sync up when track is re-attached. A problem with that is that the "modify sources" queue is part of the
|
|
222
|
+ // JingleSessionPC and it would be excluded from the process. One solution would be to extract class between
|
|
223
|
+ // TPC and JingleSessionPC which would contain the queue and would notify the signaling layer when local SSRCs
|
|
224
|
+ // are changed. This would help to separate XMPP from the RTC module.
|
|
225
|
+ return new Promise((resolve, reject) => {
|
|
226
|
+ this.conference._addLocalTrackAsUnmute(this)
|
|
227
|
+ .then(resolve, error => reject(new Error(error)));
|
|
228
|
+ });
|
217
|
229
|
}
|
218
|
230
|
|
219
|
231
|
/**
|
220
|
|
- * Returns if associated MediaStreamTrack is in the 'ended' state
|
|
232
|
+ * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
|
221
|
233
|
*
|
222
|
|
- * @returns {boolean}
|
|
234
|
+ * @private
|
|
235
|
+ * @returns {void}
|
223
|
236
|
*/
|
224
|
|
- isEnded() {
|
225
|
|
- if (this.isVideoTrack() && this.isMuted()) {
|
226
|
|
- // If a video track is muted the readyState will be ended, that's why we need to rely only on the
|
227
|
|
- // _trackEnded flag.
|
228
|
|
- return this._trackEnded;
|
229
|
|
- }
|
|
237
|
+ _fireNoDataFromSourceEvent() {
|
|
238
|
+ const value = !this.isReceivingData();
|
230
|
239
|
|
231
|
|
- return this.getTrack().readyState === 'ended' || this._trackEnded;
|
|
240
|
+ this.emit(NO_DATA_FROM_SOURCE, value);
|
|
241
|
+
|
|
242
|
+ // FIXME: Should we report all of those events
|
|
243
|
+ Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType(), value));
|
|
244
|
+ Statistics.sendLog(JSON.stringify({
|
|
245
|
+ name: NO_DATA_FROM_SOURCE,
|
|
246
|
+ log: value
|
|
247
|
+ }));
|
232
|
248
|
}
|
233
|
249
|
|
234
|
250
|
/**
|
235
|
|
- * Sets handlers to the MediaStreamTrack object that will detect camera
|
236
|
|
- * issues.
|
|
251
|
+ * Sets handlers to the MediaStreamTrack object that will detect camera issues.
|
|
252
|
+ *
|
|
253
|
+ * @private
|
|
254
|
+ * @returns {void}
|
237
|
255
|
*/
|
238
|
256
|
_initNoDataFromSourceHandlers() {
|
239
|
257
|
if (!this._isNoDataFromSourceEventsEnabled()) {
|
|
@@ -268,6 +286,7 @@ export default class JitsiLocalTrack extends JitsiTrack {
|
268
|
286
|
/**
|
269
|
287
|
* Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
|
270
|
288
|
*
|
|
289
|
+ * @private
|
271
|
290
|
* @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
|
272
|
291
|
*/
|
273
|
292
|
_isNoDataFromSourceEventsEnabled() {
|
|
@@ -276,216 +295,58 @@ export default class JitsiLocalTrack extends JitsiTrack {
|
276
|
295
|
}
|
277
|
296
|
|
278
|
297
|
/**
|
279
|
|
- * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
|
280
|
|
- */
|
281
|
|
- _fireNoDataFromSourceEvent() {
|
282
|
|
- const value = !this.isReceivingData();
|
283
|
|
-
|
284
|
|
- this.emit(NO_DATA_FROM_SOURCE, value);
|
285
|
|
-
|
286
|
|
- // FIXME: Should we report all of those events
|
287
|
|
- Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType(), value));
|
288
|
|
- Statistics.sendLog(JSON.stringify({
|
289
|
|
- name: NO_DATA_FROM_SOURCE,
|
290
|
|
- log: value
|
291
|
|
- }));
|
292
|
|
- }
|
293
|
|
-
|
294
|
|
- /**
|
295
|
|
- * Sets real device ID by comparing track information with device
|
296
|
|
- * information. This is temporary solution until getConstraints() method
|
297
|
|
- * will be implemented in browsers.
|
|
298
|
+ * Initializes a new Promise to execute {@link #_setMuted}. May be called multiple times in a row and the
|
|
299
|
+ * invocations of {@link #_setMuted} and, consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
|
|
300
|
+ * serialized fashion.
|
298
|
301
|
*
|
299
|
|
- * @param {MediaDeviceInfo[]} devices - list of devices obtained from
|
300
|
|
- * enumerateDevices() call
|
|
302
|
+ * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
|
|
303
|
+ * @private
|
|
304
|
+ * @returns {Promise}
|
301
|
305
|
*/
|
302
|
|
- _setRealDeviceIdFromDeviceList(devices) {
|
303
|
|
- const track = this.getTrack();
|
304
|
|
- const kind = `${track.kind}input`;
|
305
|
|
- let device = devices.find(d => d.kind === kind && d.label === track.label);
|
306
|
|
-
|
307
|
|
- if (!device && this._realDeviceId === 'default') { // the default device has been changed.
|
308
|
|
- // If the default device was 'A' and the default device is changed to 'B' the label for the track will
|
309
|
|
- // remain 'Default - A' but the label for the device in the device list will be updated to 'A'. That's
|
310
|
|
- // why in order to match it we need to remove the 'Default - ' part.
|
311
|
|
- const label = (track.label || '').replace('Default - ', '');
|
|
306
|
+ _queueSetMuted(muted) {
|
|
307
|
+ const setMuted = this._setMuted.bind(this, muted);
|
312
|
308
|
|
313
|
|
- device = devices.find(d => d.kind === kind && d.label === label);
|
314
|
|
- }
|
|
309
|
+ this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
|
315
|
310
|
|
316
|
|
- if (device) {
|
317
|
|
- this._realDeviceId = device.deviceId;
|
318
|
|
- } else {
|
319
|
|
- this._realDeviceId = undefined;
|
320
|
|
- }
|
|
311
|
+ return this._prevSetMuted;
|
321
|
312
|
}
|
322
|
313
|
|
323
|
314
|
/**
|
324
|
|
- * Sets the stream property of JitsiLocalTrack object and sets all stored
|
325
|
|
- * handlers to it.
|
|
315
|
+ * Removes stream from conference and marks it as "mute" operation.
|
326
|
316
|
*
|
327
|
|
- * @param {MediaStream} stream the new stream.
|
328
|
|
- * @protected
|
|
317
|
+ * @param {Function} successCallback - Callback that will be called when the operation is successful.
|
|
318
|
+ * @param {Function} errorCallback - Callback that will be called when the operation fails.
|
|
319
|
+ * @private
|
|
320
|
+ * @returns {Promise}
|
329
|
321
|
*/
|
330
|
|
- _setStream(stream) {
|
331
|
|
- super._setStream(stream);
|
|
322
|
+ _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
|
|
323
|
+ if (!this.conference) {
|
|
324
|
+ successCallback();
|
332
|
325
|
|
333
|
|
- if (stream) {
|
334
|
|
- // Store the MSID for video mute/unmute purposes.
|
335
|
|
- this.storedMSID = this.getMSID();
|
336
|
|
- logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
|
337
|
|
- } else {
|
338
|
|
- logger.debug(`Setting 'null' stream on ${this}`);
|
|
326
|
+ return;
|
339
|
327
|
}
|
|
328
|
+ this.conference._removeLocalTrackAsMute(this).then(
|
|
329
|
+ successCallback,
|
|
330
|
+ error => errorCallback(new Error(error)));
|
340
|
331
|
}
|
341
|
332
|
|
342
|
333
|
/**
|
343
|
|
- * Starts the effect process and returns the modified stream.
|
344
|
|
- *
|
345
|
|
- * @private
|
346
|
|
- * @param {*} effect - Represents effect instance
|
347
|
|
- * @returns {void}
|
348
|
|
- */
|
349
|
|
- _startStreamEffect(effect) {
|
350
|
|
- this._streamEffect = effect;
|
351
|
|
- this._originalStream = this.stream;
|
352
|
|
- this._setStream(this._streamEffect.startEffect(this._originalStream));
|
353
|
|
- this.track = this.stream.getTracks()[0];
|
354
|
|
- }
|
355
|
|
-
|
356
|
|
- /**
|
357
|
|
- * Stops the effect process and returns the original stream.
|
|
334
|
+ * Sends mute status for a track to conference if any.
|
358
|
335
|
*
|
|
336
|
+ * @param {boolean} mute - If track is muted.
|
359
|
337
|
* @private
|
360
|
338
|
* @returns {void}
|
361
|
339
|
*/
|
362
|
|
- _stopStreamEffect() {
|
363
|
|
- if (this._streamEffect) {
|
364
|
|
- this._streamEffect.stopEffect();
|
365
|
|
- this._setStream(this._originalStream);
|
366
|
|
- this._originalStream = null;
|
367
|
|
- this.track = this.stream ? this.stream.getTracks()[0] : null;
|
368
|
|
- }
|
369
|
|
- }
|
370
|
|
-
|
371
|
|
- /**
|
372
|
|
- * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
|
373
|
|
- *
|
374
|
|
- * @param {Object|undefined} effect - The new effect to be set.
|
375
|
|
- */
|
376
|
|
- _switchStreamEffect(effect) {
|
377
|
|
- if (this._streamEffect) {
|
378
|
|
- this._stopStreamEffect();
|
379
|
|
- this._streamEffect = undefined;
|
380
|
|
- }
|
381
|
|
- if (effect) {
|
382
|
|
- this._startStreamEffect(effect);
|
383
|
|
- }
|
384
|
|
- }
|
385
|
|
-
|
386
|
|
- /**
|
387
|
|
- * Sets the effect and switches between the modified stream and original one.
|
388
|
|
- *
|
389
|
|
- * @param {Object} effect - Represents the effect instance to be used.
|
390
|
|
- * @returns {Promise}
|
391
|
|
- */
|
392
|
|
- setEffect(effect) {
|
393
|
|
- if (typeof this._streamEffect === 'undefined' && typeof effect === 'undefined') {
|
394
|
|
- return Promise.resolve();
|
395
|
|
- }
|
396
|
|
-
|
397
|
|
- if (typeof effect !== 'undefined' && !effect.isEnabled(this)) {
|
398
|
|
- return Promise.reject(new Error('Incompatible effect instance!'));
|
399
|
|
- }
|
400
|
|
-
|
401
|
|
- if (this._setEffectInProgress === true) {
|
402
|
|
- return Promise.reject(new Error('setEffect already in progress!'));
|
403
|
|
- }
|
404
|
|
-
|
405
|
|
- // In case we have an audio track that is being enhanced with an effect, we still want it to be applied,
|
406
|
|
- // even if the track is muted. Where as for video the actual track doesn't exists if it's muted.
|
407
|
|
- if (this.isMuted() && !this.isAudioTrack()) {
|
408
|
|
- this._streamEffect = effect;
|
409
|
|
-
|
410
|
|
- return Promise.resolve();
|
411
|
|
- }
|
412
|
|
-
|
413
|
|
- const conference = this.conference;
|
414
|
|
-
|
415
|
|
- if (!conference) {
|
416
|
|
- this._switchStreamEffect(effect);
|
417
|
|
- if (this.isVideoTrack()) {
|
418
|
|
- this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
|
419
|
|
- }
|
420
|
|
-
|
421
|
|
- return Promise.resolve();
|
|
340
|
+ _sendMuteStatus(mute) {
|
|
341
|
+ if (this.conference) {
|
|
342
|
+ this.conference._setTrackMuteStatus(this.getType(), this, mute) && this.conference.room.sendPresence();
|
422
|
343
|
}
|
423
|
|
-
|
424
|
|
- this._setEffectInProgress = true;
|
425
|
|
-
|
426
|
|
- // TODO: Create new JingleSessionPC method for replacing a stream in JitsiLocalTrack without offer answer.
|
427
|
|
- return conference.removeTrack(this)
|
428
|
|
- .then(() => {
|
429
|
|
- this._switchStreamEffect(effect);
|
430
|
|
- if (this.isVideoTrack()) {
|
431
|
|
- this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
|
432
|
|
- }
|
433
|
|
-
|
434
|
|
- return conference.addTrack(this);
|
435
|
|
- })
|
436
|
|
- .then(() => {
|
437
|
|
- this._setEffectInProgress = false;
|
438
|
|
- })
|
439
|
|
- .catch(error => {
|
440
|
|
- // Any error will be not recovarable and will trigger CONFERENCE_FAILED event. But let's try to cleanup
|
441
|
|
- // everyhting related to the effect functionality.
|
442
|
|
- this._setEffectInProgress = false;
|
443
|
|
- this._switchStreamEffect();
|
444
|
|
- logger.error('Failed to switch to the new stream!', error);
|
445
|
|
- throw error;
|
446
|
|
- });
|
447
|
|
- }
|
448
|
|
-
|
449
|
|
- /**
|
450
|
|
- * Asynchronously mutes this track.
|
451
|
|
- *
|
452
|
|
- * @returns {Promise}
|
453
|
|
- */
|
454
|
|
- mute() {
|
455
|
|
- return this._queueSetMuted(true);
|
456
|
|
- }
|
457
|
|
-
|
458
|
|
- /**
|
459
|
|
- * Asynchronously unmutes this track.
|
460
|
|
- *
|
461
|
|
- * @returns {Promise}
|
462
|
|
- */
|
463
|
|
- unmute() {
|
464
|
|
- return this._queueSetMuted(false);
|
465
|
|
- }
|
466
|
|
-
|
467
|
|
- /**
|
468
|
|
- * Initializes a new Promise to execute {@link #_setMuted}. May be called
|
469
|
|
- * multiple times in a row and the invocations of {@link #_setMuted} and,
|
470
|
|
- * consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
|
471
|
|
- * serialized fashion.
|
472
|
|
- *
|
473
|
|
- * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
|
474
|
|
- * @returns {Promise}
|
475
|
|
- */
|
476
|
|
- _queueSetMuted(muted) {
|
477
|
|
- const setMuted = this._setMuted.bind(this, muted);
|
478
|
|
-
|
479
|
|
- this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
|
480
|
|
-
|
481
|
|
- return this._prevSetMuted;
|
482
|
344
|
}
|
483
|
345
|
|
484
|
346
|
/**
|
485
|
347
|
* Mutes / unmutes this track.
|
486
|
348
|
*
|
487
|
|
- * @param {boolean} muted - If <tt>true</tt>, this track will be muted;
|
488
|
|
- * otherwise, this track will be unmuted.
|
|
349
|
+ * @param {boolean} muted - If <tt>true</tt>, this track will be muted; otherwise, this track will be unmuted.
|
489
|
350
|
* @private
|
490
|
351
|
* @returns {Promise}
|
491
|
352
|
*/
|
|
@@ -592,68 +453,127 @@ export default class JitsiLocalTrack extends JitsiTrack {
|
592
|
453
|
}
|
593
|
454
|
|
594
|
455
|
/**
|
595
|
|
- * Adds stream to conference and marks it as "unmute" operation.
|
|
456
|
+ * Sets real device ID by comparing track information with device information. This is temporary solution until
|
|
457
|
+ * getConstraints() method will be implemented in browsers.
|
596
|
458
|
*
|
|
459
|
+ * @param {MediaDeviceInfo[]} devices - The list of devices obtained from enumerateDevices() call.
|
597
|
460
|
* @private
|
598
|
|
- * @returns {Promise}
|
|
461
|
+ * @returns {void}
|
599
|
462
|
*/
|
600
|
|
- _addStreamToConferenceAsUnmute() {
|
601
|
|
- if (!this.conference) {
|
602
|
|
- return Promise.resolve();
|
|
463
|
+ _setRealDeviceIdFromDeviceList(devices) {
|
|
464
|
+ const track = this.getTrack();
|
|
465
|
+ const kind = `${track.kind}input`;
|
|
466
|
+ let device = devices.find(d => d.kind === kind && d.label === track.label);
|
|
467
|
+
|
|
468
|
+ if (!device && this._realDeviceId === 'default') { // the default device has been changed.
|
|
469
|
+ // If the default device was 'A' and the default device is changed to 'B' the label for the track will
|
|
470
|
+ // remain 'Default - A' but the label for the device in the device list will be updated to 'A'. That's
|
|
471
|
+ // why in order to match it we need to remove the 'Default - ' part.
|
|
472
|
+ const label = (track.label || '').replace('Default - ', '');
|
|
473
|
+
|
|
474
|
+ device = devices.find(d => d.kind === kind && d.label === label);
|
603
|
475
|
}
|
604
|
476
|
|
605
|
|
- // FIXME it would be good to not included conference as part of this
|
606
|
|
- // process. Only TraceablePeerConnections to which the track is attached
|
607
|
|
- // should care about this action. The TPCs to which the track is not
|
608
|
|
- // attached can sync up when track is re-attached.
|
609
|
|
- // A problem with that is that the "modify sources" queue is part of
|
610
|
|
- // the JingleSessionPC and it would be excluded from the process. One
|
611
|
|
- // solution would be to extract class between TPC and JingleSessionPC
|
612
|
|
- // which would contain the queue and would notify the signaling layer
|
613
|
|
- // when local SSRCs are changed. This would help to separate XMPP from
|
614
|
|
- // the RTC module.
|
615
|
|
- return new Promise((resolve, reject) => {
|
616
|
|
- this.conference._addLocalTrackAsUnmute(this)
|
617
|
|
- .then(resolve, error => reject(new Error(error)));
|
618
|
|
- });
|
|
477
|
+ if (device) {
|
|
478
|
+ this._realDeviceId = device.deviceId;
|
|
479
|
+ } else {
|
|
480
|
+ this._realDeviceId = undefined;
|
|
481
|
+ }
|
619
|
482
|
}
|
620
|
483
|
|
621
|
484
|
/**
|
622
|
|
- * Removes stream from conference and marks it as "mute" operation.
|
|
485
|
+ * Sets the stream property of JitsiLocalTrack object and sets all stored handlers to it.
|
623
|
486
|
*
|
624
|
|
- * @param {Function} successCallback will be called on success
|
625
|
|
- * @param {Function} errorCallback will be called on error
|
|
487
|
+ * @param {MediaStream} stream - The new MediaStream.
|
626
|
488
|
* @private
|
|
489
|
+ * @returns {void}
|
627
|
490
|
*/
|
628
|
|
- _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
|
629
|
|
- if (!this.conference) {
|
630
|
|
- successCallback();
|
|
491
|
+ _setStream(stream) {
|
|
492
|
+ super._setStream(stream);
|
631
|
493
|
|
632
|
|
- return;
|
|
494
|
+ if (stream) {
|
|
495
|
+ // Store the MSID for video mute/unmute purposes.
|
|
496
|
+ this.storedMSID = this.getMSID();
|
|
497
|
+ logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
|
|
498
|
+ } else {
|
|
499
|
+ logger.debug(`Setting 'null' stream on ${this}`);
|
633
|
500
|
}
|
634
|
|
- this.conference._removeLocalTrackAsMute(this).then(
|
635
|
|
- successCallback,
|
636
|
|
- error => errorCallback(new Error(error)));
|
637
|
501
|
}
|
638
|
502
|
|
639
|
503
|
/**
|
640
|
|
- * Sends mute status for a track to conference if any.
|
|
504
|
+ * Starts the effect process and returns the modified stream.
|
641
|
505
|
*
|
642
|
|
- * @param {boolean} mute - If track is muted.
|
|
506
|
+ * @param {Object} effect - Represents effect instance
|
643
|
507
|
* @private
|
644
|
508
|
* @returns {void}
|
645
|
509
|
*/
|
646
|
|
- _sendMuteStatus(mute) {
|
647
|
|
- if (this.conference) {
|
648
|
|
- this.conference._setTrackMuteStatus(this.getType(), this, mute) && this.conference.room.sendPresence();
|
|
510
|
+ _startStreamEffect(effect) {
|
|
511
|
+ this._streamEffect = effect;
|
|
512
|
+ this._originalStream = this.stream;
|
|
513
|
+ this._setStream(this._streamEffect.startEffect(this._originalStream));
|
|
514
|
+ this.track = this.stream.getTracks()[0];
|
|
515
|
+ }
|
|
516
|
+
|
|
517
|
+ /**
|
|
518
|
+ * Stops the effect process and returns the original stream.
|
|
519
|
+ *
|
|
520
|
+ * @private
|
|
521
|
+ * @returns {void}
|
|
522
|
+ */
|
|
523
|
+ _stopStreamEffect() {
|
|
524
|
+ if (this._streamEffect) {
|
|
525
|
+ this._streamEffect.stopEffect();
|
|
526
|
+ this._setStream(this._originalStream);
|
|
527
|
+ this._originalStream = null;
|
|
528
|
+ this.track = this.stream ? this.stream.getTracks()[0] : null;
|
|
529
|
+ }
|
|
530
|
+ }
|
|
531
|
+
|
|
532
|
+ /**
|
|
533
|
+ * Switches the camera facing mode if the WebRTC implementation supports the custom MediaStreamTrack._switchCamera
|
|
534
|
+ * method. Currently, the method in question is implemented in react-native-webrtc only. When such a WebRTC
|
|
535
|
+ * implementation is executing, the method is the preferred way to switch between the front/user-facing and the
|
|
536
|
+ * back/environment-facing cameras because it will likely be (as is the case of react-native-webrtc) noticeably
|
|
537
|
+ * faster that creating a new MediaStreamTrack via a new getUserMedia call with the switched facingMode constraint
|
|
538
|
+ * value. Moreover, the approach with a new getUserMedia call may not even work: WebRTC on Android and iOS is
|
|
539
|
+ * either very slow to open the camera a second time or plainly freezes attempting to do that.
|
|
540
|
+ *
|
|
541
|
+ * @returns {void}
|
|
542
|
+ */
|
|
543
|
+ _switchCamera() {
|
|
544
|
+ if (this.isVideoTrack()
|
|
545
|
+ && this.videoType === VideoType.CAMERA
|
|
546
|
+ && typeof this.track._switchCamera === 'function') {
|
|
547
|
+ this.track._switchCamera();
|
|
548
|
+
|
|
549
|
+ this._facingMode
|
|
550
|
+ = this._facingMode === CameraFacingMode.ENVIRONMENT
|
|
551
|
+ ? CameraFacingMode.USER
|
|
552
|
+ : CameraFacingMode.ENVIRONMENT;
|
|
553
|
+ }
|
|
554
|
+ }
|
|
555
|
+
|
|
556
|
+ /**
|
|
557
|
+ * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
|
|
558
|
+ *
|
|
559
|
+ * @param {Object|undefined} effect - The new effect to be set.
|
|
560
|
+ * @private
|
|
561
|
+ * @returns {void}
|
|
562
|
+ */
|
|
563
|
+ _switchStreamEffect(effect) {
|
|
564
|
+ if (this._streamEffect) {
|
|
565
|
+ this._stopStreamEffect();
|
|
566
|
+ this._streamEffect = undefined;
|
|
567
|
+ }
|
|
568
|
+ if (effect) {
|
|
569
|
+ this._startStreamEffect(effect);
|
649
|
570
|
}
|
650
|
571
|
}
|
651
|
572
|
|
652
|
573
|
/**
|
653
|
574
|
* @inheritdoc
|
654
|
575
|
*
|
655
|
|
- * Stops sending the media track. And removes it from the HTML.
|
656
|
|
- * NOTE: Works for local tracks only.
|
|
576
|
+ * Stops sending the media track. And removes it from the HTML. NOTE: Works for local tracks only.
|
657
|
577
|
*
|
658
|
578
|
* @extends JitsiTrack#dispose
|
659
|
579
|
* @returns {Promise}
|
|
@@ -687,45 +607,80 @@ export default class JitsiLocalTrack extends JitsiTrack {
|
687
|
607
|
}
|
688
|
608
|
|
689
|
609
|
/**
|
690
|
|
- * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt>
|
691
|
|
- * otherwise.
|
|
610
|
+ * Returns facing mode for video track from camera. For other cases (e.g. audio track or 'desktop' video track)
|
|
611
|
+ * returns undefined.
|
692
|
612
|
*
|
693
|
|
- * @returns {boolean} <tt>true</tt> - if the stream is muted and
|
694
|
|
- * <tt>false</tt> otherwise.
|
|
613
|
+ * @returns {CameraFacingMode|undefined}
|
695
|
614
|
*/
|
696
|
|
- isMuted() {
|
697
|
|
- // this.stream will be null when we mute local video on Chrome
|
698
|
|
- if (!this.stream) {
|
699
|
|
- return true;
|
700
|
|
- }
|
701
|
|
- if (this.isVideoTrack() && !this.isActive()) {
|
702
|
|
- return true;
|
703
|
|
- }
|
|
615
|
+ getCameraFacingMode() {
|
|
616
|
+ if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
|
|
617
|
+ // MediaStreamTrack#getSettings() is not implemented in many
|
|
618
|
+ // browsers, so we need feature checking here. Progress on the
|
|
619
|
+ // respective browser's implementation can be tracked at
|
|
620
|
+ // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
|
|
621
|
+ // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
|
|
622
|
+ // for Firefox. Even if a browser implements getSettings() already,
|
|
623
|
+ // it might still not return anything for 'facingMode'.
|
|
624
|
+ const trackSettings = this.track.getSettings?.();
|
704
|
625
|
|
705
|
|
- // If currently used stream effect has its own muted state, use that.
|
706
|
|
- if (this._streamEffect && this._streamEffect.isMuted) {
|
707
|
|
- return this._streamEffect.isMuted();
|
|
626
|
+ if (trackSettings && 'facingMode' in trackSettings) {
|
|
627
|
+ return trackSettings.facingMode;
|
|
628
|
+ }
|
|
629
|
+
|
|
630
|
+ if (typeof this._facingMode !== 'undefined') {
|
|
631
|
+ return this._facingMode;
|
|
632
|
+ }
|
|
633
|
+
|
|
634
|
+ // In most cases we are showing a webcam. So if we've gotten here,
|
|
635
|
+ // it should be relatively safe to assume that we are probably
|
|
636
|
+ // showing the user-facing camera.
|
|
637
|
+ return CameraFacingMode.USER;
|
708
|
638
|
}
|
709
|
639
|
|
710
|
|
- return !this.track || !this.track.enabled;
|
|
640
|
+ return undefined;
|
711
|
641
|
}
|
712
|
642
|
|
713
|
643
|
/**
|
714
|
|
- * Sets the JitsiConference object associated with the track. This is temp
|
715
|
|
- * solution.
|
|
644
|
+ * Returns device id associated with track.
|
716
|
645
|
*
|
717
|
|
- * @param conference the JitsiConference object
|
|
646
|
+ * @returns {string}
|
718
|
647
|
*/
|
719
|
|
- _setConference(conference) {
|
720
|
|
- this.conference = conference;
|
|
648
|
+ getDeviceId() {
|
|
649
|
+ return this._realDeviceId || this.deviceId;
|
|
650
|
+ }
|
721
|
651
|
|
722
|
|
- // We want to keep up with postponed events which should have been fired
|
723
|
|
- // on "attach" call, but for local track we not always have the
|
724
|
|
- // conference before attaching. However this may result in duplicated
|
725
|
|
- // events if they have been triggered on "attach" already.
|
726
|
|
- for (let i = 0; i < this.containers.length; i++) {
|
727
|
|
- this._maybeFireTrackAttached(this.containers[i]);
|
|
652
|
+ /**
|
|
653
|
+ * Get the duration of the track.
|
|
654
|
+ *
|
|
655
|
+ * @returns {Number} the duration of the track in seconds
|
|
656
|
+ */
|
|
657
|
+ getDuration() {
|
|
658
|
+ return (Date.now() / 1000) - (this.metadata.timestamp / 1000);
|
|
659
|
+ }
|
|
660
|
+
|
|
661
|
+ /**
|
|
662
|
+ * Returns the participant id which owns the track.
|
|
663
|
+ *
|
|
664
|
+ * @returns {string} the id of the participants. It corresponds to the
|
|
665
|
+ * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
|
|
666
|
+ */
|
|
667
|
+ getParticipantId() {
|
|
668
|
+ return this.conference && this.conference.myUserId();
|
|
669
|
+ }
|
|
670
|
+
|
|
671
|
+ /**
|
|
672
|
+ * Returns if associated MediaStreamTrack is in the 'ended' state
|
|
673
|
+ *
|
|
674
|
+ * @returns {boolean}
|
|
675
|
+ */
|
|
676
|
+ isEnded() {
|
|
677
|
+ if (this.isVideoTrack() && this.isMuted()) {
|
|
678
|
+ // If a video track is muted the readyState will be ended, that's why we need to rely only on the
|
|
679
|
+ // _trackEnded flag.
|
|
680
|
+ return this._trackEnded;
|
728
|
681
|
}
|
|
682
|
+
|
|
683
|
+ return this.getTrack().readyState === 'ended' || this._trackEnded;
|
729
|
684
|
}
|
730
|
685
|
|
731
|
686
|
/**
|
|
@@ -738,32 +693,75 @@ export default class JitsiLocalTrack extends JitsiTrack {
|
738
|
693
|
}
|
739
|
694
|
|
740
|
695
|
/**
|
741
|
|
- * Returns device id associated with track.
|
|
696
|
+ * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
|
742
|
697
|
*
|
743
|
|
- * @returns {string}
|
|
698
|
+ * @returns {boolean} <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
|
744
|
699
|
*/
|
745
|
|
- getDeviceId() {
|
746
|
|
- return this._realDeviceId || this.deviceId;
|
|
700
|
+ isMuted() {
|
|
701
|
+ // this.stream will be null when we mute local video on Chrome
|
|
702
|
+ if (!this.stream) {
|
|
703
|
+ return true;
|
|
704
|
+ }
|
|
705
|
+ if (this.isVideoTrack() && !this.isActive()) {
|
|
706
|
+ return true;
|
|
707
|
+ }
|
|
708
|
+
|
|
709
|
+ // If currently used stream effect has its own muted state, use that.
|
|
710
|
+ if (this._streamEffect && this._streamEffect.isMuted) {
|
|
711
|
+ return this._streamEffect.isMuted();
|
|
712
|
+ }
|
|
713
|
+
|
|
714
|
+ return !this.track || !this.track.enabled;
|
747
|
715
|
}
|
748
|
716
|
|
749
|
717
|
/**
|
750
|
|
- * Returns the participant id which owns the track.
|
|
718
|
+ * Checks whether the attached MediaStream is receiving data from source or not. If the stream property is null
|
|
719
|
+ * (because of mute or another reason) this method will return false.
|
|
720
|
+ * NOTE: This method doesn't indicate problem with the streams directly. For example in case of video mute the
|
|
721
|
+ * method will return false or if the user has disposed the track.
|
751
|
722
|
*
|
752
|
|
- * @returns {string} the id of the participants. It corresponds to the
|
753
|
|
- * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
|
|
723
|
+ * @returns {boolean} true if the stream is receiving data and false this otherwise.
|
754
|
724
|
*/
|
755
|
|
- getParticipantId() {
|
756
|
|
- return this.conference && this.conference.myUserId();
|
|
725
|
+ isReceivingData() {
|
|
726
|
+ if (this.isVideoTrack()
|
|
727
|
+ && (this.isMuted() || this._stopStreamInProgress || this.videoType === VideoType.DESKTOP)) {
|
|
728
|
+ return true;
|
|
729
|
+ }
|
|
730
|
+
|
|
731
|
+ if (!this.stream) {
|
|
732
|
+ return false;
|
|
733
|
+ }
|
|
734
|
+
|
|
735
|
+ // In older version of the spec there is no muted property and readyState can have value muted. In the latest
|
|
736
|
+ // versions readyState can have values "live" and "ended" and there is muted boolean property. If the stream is
|
|
737
|
+ // muted that means that we aren't receiving any data from the source. We want to notify the users for error if
|
|
738
|
+ // the stream is muted or ended on it's creation.
|
|
739
|
+
|
|
740
|
+ // For video blur enabled use the original video stream
|
|
741
|
+ const stream = this._effectEnabled ? this._originalStream : this.stream;
|
|
742
|
+
|
|
743
|
+ return stream.getTracks().some(track =>
|
|
744
|
+ (!('readyState' in track) || track.readyState === 'live')
|
|
745
|
+ && (!('muted' in track) || track.muted !== true));
|
|
746
|
+ }
|
|
747
|
+
|
|
748
|
+ /**
|
|
749
|
+ * Asynchronously mutes this track.
|
|
750
|
+ *
|
|
751
|
+ * @returns {Promise}
|
|
752
|
+ */
|
|
753
|
+ mute() {
|
|
754
|
+ return this._queueSetMuted(true);
|
757
|
755
|
}
|
758
|
756
|
|
759
|
757
|
/**
|
760
|
|
- * Handles bytes sent statistics.
|
|
758
|
+ * Handles bytes sent statistics. NOTE: used only for audio tracks to detect audio issues.
|
761
|
759
|
*
|
762
|
|
- * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
|
763
|
|
- * @param {number} bytesSent the new value
|
764
|
|
- * NOTE: used only for audio tracks to detect audio issues.
|
|
760
|
+ * @param {TraceablePeerConnection} tpc - The peerconnection that is reporting the bytes sent stat.
|
|
761
|
+ * @param {number} bytesSent - The new value.
|
|
762
|
+ * @returns {void}
|
765
|
763
|
*/
|
766
|
|
- _onByteSentStatsReceived(tpc, bytesSent) {
|
|
764
|
+ onByteSentStatsReceived(tpc, bytesSent) {
|
767
|
765
|
if (bytesSent > 0) {
|
768
|
766
|
this._hasSentData = true;
|
769
|
767
|
}
|
|
@@ -783,41 +781,90 @@ export default class JitsiLocalTrack extends JitsiTrack {
|
783
|
781
|
}
|
784
|
782
|
|
785
|
783
|
/**
|
786
|
|
- * Returns facing mode for video track from camera. For other cases (e.g.
|
787
|
|
- * audio track or 'desktop' video track) returns undefined.
|
|
784
|
+ * Sets the JitsiConference object associated with the track. This is temp solution.
|
788
|
785
|
*
|
789
|
|
- * @returns {CameraFacingMode|undefined}
|
|
786
|
+ * @param conference - JitsiConference object.
|
|
787
|
+ * @returns {void}
|
790
|
788
|
*/
|
791
|
|
- getCameraFacingMode() {
|
792
|
|
- if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
|
793
|
|
- // MediaStreamTrack#getSettings() is not implemented in many
|
794
|
|
- // browsers, so we need feature checking here. Progress on the
|
795
|
|
- // respective browser's implementation can be tracked at
|
796
|
|
- // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
|
797
|
|
- // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
|
798
|
|
- // for Firefox. Even if a browser implements getSettings() already,
|
799
|
|
- // it might still not return anything for 'facingMode'.
|
800
|
|
- const trackSettings = this.track.getSettings?.();
|
|
789
|
+ setConference(conference) {
|
|
790
|
+ this.conference = conference;
|
801
|
791
|
|
802
|
|
- if (trackSettings && 'facingMode' in trackSettings) {
|
803
|
|
- return trackSettings.facingMode;
|
804
|
|
- }
|
|
792
|
+ // We want to keep up with postponed events which should have been fired
|
|
793
|
+ // on "attach" call, but for local track we not always have the
|
|
794
|
+ // conference before attaching. However this may result in duplicated
|
|
795
|
+ // events if they have been triggered on "attach" already.
|
|
796
|
+ for (let i = 0; i < this.containers.length; i++) {
|
|
797
|
+ this._maybeFireTrackAttached(this.containers[i]);
|
|
798
|
+ }
|
|
799
|
+ }
|
805
|
800
|
|
806
|
|
- if (typeof this._facingMode !== 'undefined') {
|
807
|
|
- return this._facingMode;
|
|
801
|
+ /**
|
|
802
|
+ * Sets the effect and switches between the modified stream and original one.
|
|
803
|
+ *
|
|
804
|
+ * @param {Object} effect - Represents the effect instance to be used.
|
|
805
|
+ * @returns {Promise}
|
|
806
|
+ */
|
|
807
|
+ setEffect(effect) {
|
|
808
|
+ if (typeof this._streamEffect === 'undefined' && typeof effect === 'undefined') {
|
|
809
|
+ return Promise.resolve();
|
|
810
|
+ }
|
|
811
|
+
|
|
812
|
+ if (typeof effect !== 'undefined' && !effect.isEnabled(this)) {
|
|
813
|
+ return Promise.reject(new Error('Incompatible effect instance!'));
|
|
814
|
+ }
|
|
815
|
+
|
|
816
|
+ if (this._setEffectInProgress === true) {
|
|
817
|
+ return Promise.reject(new Error('setEffect already in progress!'));
|
|
818
|
+ }
|
|
819
|
+
|
|
820
|
+ // In case we have an audio track that is being enhanced with an effect, we still want it to be applied,
|
|
821
|
+ // even if the track is muted. Where as for video the actual track doesn't exists if it's muted.
|
|
822
|
+ if (this.isMuted() && !this.isAudioTrack()) {
|
|
823
|
+ this._streamEffect = effect;
|
|
824
|
+
|
|
825
|
+ return Promise.resolve();
|
|
826
|
+ }
|
|
827
|
+
|
|
828
|
+ const conference = this.conference;
|
|
829
|
+
|
|
830
|
+ if (!conference) {
|
|
831
|
+ this._switchStreamEffect(effect);
|
|
832
|
+ if (this.isVideoTrack()) {
|
|
833
|
+ this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
|
808
|
834
|
}
|
809
|
835
|
|
810
|
|
- // In most cases we are showing a webcam. So if we've gotten here,
|
811
|
|
- // it should be relatively safe to assume that we are probably
|
812
|
|
- // showing the user-facing camera.
|
813
|
|
- return CameraFacingMode.USER;
|
|
836
|
+ return Promise.resolve();
|
814
|
837
|
}
|
815
|
838
|
|
816
|
|
- return undefined;
|
|
839
|
+ this._setEffectInProgress = true;
|
|
840
|
+
|
|
841
|
+ // TODO: Create new JingleSessionPC method for replacing a stream in JitsiLocalTrack without offer answer.
|
|
842
|
+ return conference.removeTrack(this)
|
|
843
|
+ .then(() => {
|
|
844
|
+ this._switchStreamEffect(effect);
|
|
845
|
+ if (this.isVideoTrack()) {
|
|
846
|
+ this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
|
|
847
|
+ }
|
|
848
|
+
|
|
849
|
+ return conference.addTrack(this);
|
|
850
|
+ })
|
|
851
|
+ .then(() => {
|
|
852
|
+ this._setEffectInProgress = false;
|
|
853
|
+ })
|
|
854
|
+ .catch(error => {
|
|
855
|
+ // Any error will be not recovarable and will trigger CONFERENCE_FAILED event. But let's try to cleanup
|
|
856
|
+ // everyhting related to the effect functionality.
|
|
857
|
+ this._setEffectInProgress = false;
|
|
858
|
+ this._switchStreamEffect();
|
|
859
|
+ logger.error('Failed to switch to the new stream!', error);
|
|
860
|
+ throw error;
|
|
861
|
+ });
|
817
|
862
|
}
|
818
|
863
|
|
819
|
864
|
/**
|
820
|
865
|
* Stops the associated MediaStream.
|
|
866
|
+ *
|
|
867
|
+ * @returns {void}
|
821
|
868
|
*/
|
822
|
869
|
stopStream() {
|
823
|
870
|
/**
|
|
@@ -837,69 +884,6 @@ export default class JitsiLocalTrack extends JitsiTrack {
|
837
|
884
|
}
|
838
|
885
|
}
|
839
|
886
|
|
840
|
|
- /**
|
841
|
|
- * Switches the camera facing mode if the WebRTC implementation supports the
|
842
|
|
- * custom MediaStreamTrack._switchCamera method. Currently, the method in
|
843
|
|
- * question is implemented in react-native-webrtc only. When such a WebRTC
|
844
|
|
- * implementation is executing, the method is the preferred way to switch
|
845
|
|
- * between the front/user-facing and the back/environment-facing cameras
|
846
|
|
- * because it will likely be (as is the case of react-native-webrtc)
|
847
|
|
- * noticeably faster that creating a new MediaStreamTrack via a new
|
848
|
|
- * getUserMedia call with the switched facingMode constraint value.
|
849
|
|
- * Moreover, the approach with a new getUserMedia call may not even work:
|
850
|
|
- * WebRTC on Android and iOS is either very slow to open the camera a second
|
851
|
|
- * time or plainly freezes attempting to do that.
|
852
|
|
- */
|
853
|
|
- _switchCamera() {
|
854
|
|
- if (this.isVideoTrack()
|
855
|
|
- && this.videoType === VideoType.CAMERA
|
856
|
|
- && typeof this.track._switchCamera === 'function') {
|
857
|
|
- this.track._switchCamera();
|
858
|
|
-
|
859
|
|
- this._facingMode
|
860
|
|
- = this._facingMode === CameraFacingMode.ENVIRONMENT
|
861
|
|
- ? CameraFacingMode.USER
|
862
|
|
- : CameraFacingMode.ENVIRONMENT;
|
863
|
|
- }
|
864
|
|
- }
|
865
|
|
-
|
866
|
|
- /**
|
867
|
|
- * Checks whether the attached MediaStream is receiving data from source or
|
868
|
|
- * not. If the stream property is null(because of mute or another reason)
|
869
|
|
- * this method will return false.
|
870
|
|
- * NOTE: This method doesn't indicate problem with the streams directly.
|
871
|
|
- * For example in case of video mute the method will return false or if the
|
872
|
|
- * user has disposed the track.
|
873
|
|
- *
|
874
|
|
- * @returns {boolean} true if the stream is receiving data and false
|
875
|
|
- * this otherwise.
|
876
|
|
- */
|
877
|
|
- isReceivingData() {
|
878
|
|
- if (this.isVideoTrack()
|
879
|
|
- && (this.isMuted() || this._stopStreamInProgress || this.videoType === VideoType.DESKTOP)) {
|
880
|
|
- return true;
|
881
|
|
- }
|
882
|
|
-
|
883
|
|
- if (!this.stream) {
|
884
|
|
- return false;
|
885
|
|
- }
|
886
|
|
-
|
887
|
|
- // In older version of the spec there is no muted property and
|
888
|
|
- // readyState can have value muted. In the latest versions
|
889
|
|
- // readyState can have values "live" and "ended" and there is
|
890
|
|
- // muted boolean property. If the stream is muted that means that
|
891
|
|
- // we aren't receiving any data from the source. We want to notify
|
892
|
|
- // the users for error if the stream is muted or ended on it's
|
893
|
|
- // creation.
|
894
|
|
-
|
895
|
|
- // For video blur enabled use the original video stream
|
896
|
|
- const stream = this._effectEnabled ? this._originalStream : this.stream;
|
897
|
|
-
|
898
|
|
- return stream.getTracks().some(track =>
|
899
|
|
- (!('readyState' in track) || track.readyState === 'live')
|
900
|
|
- && (!('muted' in track) || track.muted !== true));
|
901
|
|
- }
|
902
|
|
-
|
903
|
887
|
/**
|
904
|
888
|
* Creates a text representation of this local track instance.
|
905
|
889
|
*
|
|
@@ -908,4 +892,13 @@ export default class JitsiLocalTrack extends JitsiTrack {
|
908
|
892
|
toString() {
|
909
|
893
|
return `LocalTrack[${this.rtcId},${this.getType()}]`;
|
910
|
894
|
}
|
|
895
|
+
|
|
896
|
+ /**
|
|
897
|
+ * Asynchronously unmutes this track.
|
|
898
|
+ *
|
|
899
|
+ * @returns {Promise}
|
|
900
|
+ */
|
|
901
|
+ unmute() {
|
|
902
|
+ return this._queueSetMuted(false);
|
|
903
|
+ }
|
911
|
904
|
}
|