Selaa lähdekoodia

feat(multi-stream-support) Add track streaming status (#1855)

* feat(multi-stream-support) Add TrackStreamingStatus class
update JitsiRemoteTrack to init and dispose TrackStreamingStatus
stop emitting LASTN_ENDPOINT_CHANGED event when source name signaling is enabled
convert TrackStreamingStatus class to typescript

* rename methods and use TrackStreamingStatus enum

* update jdocs for JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED
tags/v0.0.2
William Liang 3 vuotta sitten
vanhempi
commit
c6b79dc257
No account linked to committer's email address

+ 9
- 2
JitsiConference.js Näytä tiedosto

@@ -1612,6 +1612,14 @@ JitsiConference.prototype.getLastN = function() {
1612 1612
     return this.receiveVideoController.getLastN();
1613 1613
 };
1614 1614
 
1615
+/**
1616
+ * Obtains the forwarded sources list in this conference.
1617
+ * @return {Array<string>|null}
1618
+ */
1619
+JitsiConference.prototype.getForwardedSources = function() {
1620
+    return this.rtc.getForwardedSources();
1621
+};
1622
+
1615 1623
 /**
1616 1624
  * Selects a new value for "lastN". The requested amount of videos are going
1617 1625
  * to be delivered after the value is in effect. Set to -1 for unlimited or
@@ -2136,8 +2144,7 @@ JitsiConference.prototype.onRemoteTrackRemoved = function(removedTrack) {
2136 2144
                 // considered equal the result of splice can be ignored.
2137 2145
                 participant._tracks.splice(i, 1);
2138 2146
 
2139
-                this.eventEmitter.emit(
2140
-                    JitsiConferenceEvents.TRACK_REMOVED, removedTrack);
2147
+                this.eventEmitter.emit(JitsiConferenceEvents.TRACK_REMOVED, removedTrack);
2141 2148
 
2142 2149
                 if (this.transcriber) {
2143 2150
                     this.transcriber.removeTrack(removedTrack);

+ 4
- 1
JitsiConferenceEvents.spec.ts Näytä tiedosto

@@ -28,6 +28,7 @@ describe( "/JitsiConferenceEvents members", () => {
28 28
         KICKED,
29 29
         PARTICIPANT_KICKED,
30 30
         LAST_N_ENDPOINTS_CHANGED,
31
+        FORWARDED_SOURCES_CHANGED,
31 32
         LOCK_STATE_CHANGED,
32 33
         SERVER_REGION_CHANGED,
33 34
         _MEDIA_SESSION_STARTED,
@@ -104,6 +105,7 @@ describe( "/JitsiConferenceEvents members", () => {
104 105
         expect( KICKED ).toBe( 'conference.kicked' );
105 106
         expect( PARTICIPANT_KICKED ).toBe( 'conference.participant_kicked' );
106 107
         expect( LAST_N_ENDPOINTS_CHANGED ).toBe( 'conference.lastNEndpointsChanged' );
108
+        expect( FORWARDED_SOURCES_CHANGED ).toBe( 'conference.forwardedSourcesChanged' );
107 109
         expect( LOCK_STATE_CHANGED ).toBe( 'conference.lock_state_changed' );
108 110
         expect( SERVER_REGION_CHANGED ).toBe( 'conference.server_region_changed' );
109 111
         expect( _MEDIA_SESSION_STARTED ).toBe( 'conference.media_session.started' );
@@ -176,6 +178,7 @@ describe( "/JitsiConferenceEvents members", () => {
176 178
             expect( JitsiConferenceEvents.KICKED ).toBe( 'conference.kicked' );
177 179
             expect( JitsiConferenceEvents.PARTICIPANT_KICKED ).toBe( 'conference.participant_kicked' );
178 180
             expect( JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED ).toBe( 'conference.lastNEndpointsChanged' );
181
+            expect( JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED ).toBe( 'conference.forwardedSourcesChanged' );
179 182
             expect( JitsiConferenceEvents.LOCK_STATE_CHANGED ).toBe( 'conference.lock_state_changed' );
180 183
             expect( JitsiConferenceEvents.SERVER_REGION_CHANGED ).toBe( 'conference.server_region_changed' );
181 184
             expect( JitsiConferenceEvents._MEDIA_SESSION_STARTED ).toBe( 'conference.media_session.started' );
@@ -230,4 +233,4 @@ describe( "/JitsiConferenceEvents members", () => {
230 233
         const keys = Object.keys( others );
231 234
         expect( keys ).withContext( `Extra members: ${ keys.join( ", " ) }` ).toEqual( [] );
232 235
     } );
233
-} );
236
+} );

+ 11
- 0
JitsiConferenceEvents.ts Näytä tiedosto

@@ -153,6 +153,16 @@ export enum JitsiConferenceEvents {
153 153
      */
154 154
     LAST_N_ENDPOINTS_CHANGED = 'conference.lastNEndpointsChanged',
155 155
 
156
+    /**
157
+     * The forwarded sources set is changed.
158
+     *
159
+     * @param {Array<string>} leavingForwardedSources the sourceNames of all the tracks which are leaving forwarded
160
+     * sources
161
+     * @param {Array<string>} enteringForwardedSources the sourceNames of all the tracks which are entering forwarded
162
+     * sources
163
+     */
164
+    FORWARDED_SOURCES_CHANGED = 'conference.forwardedSourcesChanged',
165
+
156 166
     /**
157 167
      * Indicates that the room has been locked or unlocked.
158 168
      */
@@ -481,6 +491,7 @@ export const JVB121_STATUS = JitsiConferenceEvents.JVB121_STATUS;
481 491
 export const KICKED = JitsiConferenceEvents.KICKED;
482 492
 export const PARTICIPANT_KICKED = JitsiConferenceEvents.PARTICIPANT_KICKED;
483 493
 export const LAST_N_ENDPOINTS_CHANGED = JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED;
494
+export const FORWARDED_SOURCES_CHANGED = JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED;
484 495
 export const LOCK_STATE_CHANGED = JitsiConferenceEvents.LOCK_STATE_CHANGED;
485 496
 export const SERVER_REGION_CHANGED = JitsiConferenceEvents.SERVER_REGION_CHANGED;
486 497
 export const _MEDIA_SESSION_STARTED = JitsiConferenceEvents._MEDIA_SESSION_STARTED;

+ 3
- 1
JitsiMeetJS.js Näytä tiedosto

@@ -16,6 +16,7 @@ import browser from './modules/browser';
16 16
 import NetworkInfo from './modules/connectivity/NetworkInfo';
17 17
 import { ParticipantConnectionStatus }
18 18
     from './modules/connectivity/ParticipantConnectionStatus';
19
+import { TrackStreamingStatus } from './modules/connectivity/TrackStreamingStatus';
19 20
 import getActiveAudioDevice from './modules/detection/ActiveDeviceDetector';
20 21
 import * as DetectionEvents from './modules/detection/DetectionEvents';
21 22
 import TrackVADEmitter from './modules/detection/TrackVADEmitter';
@@ -119,7 +120,8 @@ export default _mergeNamespaceAndModule({
119 120
         participantConnectionStatus: ParticipantConnectionStatus,
120 121
         recording: recordingConstants,
121 122
         sipVideoGW: VideoSIPGWConstants,
122
-        transcriptionStatus: JitsiTranscriptionStatus
123
+        transcriptionStatus: JitsiTranscriptionStatus,
124
+        trackStreamingStatus: TrackStreamingStatus
123 125
     },
124 126
     events: {
125 127
         conference: JitsiConferenceEvents,

+ 13
- 0
JitsiTrackEvents.js Näytä tiedosto

@@ -43,3 +43,16 @@ export const NO_DATA_FROM_SOURCE = 'track.no_data_from_source';
43 43
  * the microphone that is currently selected.
44 44
  */
45 45
 export const NO_AUDIO_INPUT = 'track.no_audio_input';
46
+
47
+/**
48
+ * Event fired whenever video track's streaming changes.
49
+ * First argument is the sourceName of the track and the second is a string indicating if the connection is currently
50
+ * - active - the connection is active.
51
+ * - inactive - the connection is inactive, was intentionally interrupted by the bridge because of low BWE or because
52
+ *   of the endpoint falling out of last N.
53
+ * - interrupted - a network problem occurred.
54
+ * - restoring - the connection was inactive and is restoring now.
55
+ *
56
+ * The current status value can be obtained by calling JitsiRemoteTrack.getTrackStreamingStatus().
57
+ */
58
+export const TRACK_STREAMING_STATUS_CHANGED = 'track.streaming_status_changed';

+ 3
- 1
JitsiTrackEvents.spec.ts Näytä tiedosto

@@ -8,6 +8,7 @@ describe( "/JitsiTrackEvents members", () => {
8 8
         TRACK_AUDIO_LEVEL_CHANGED,
9 9
         TRACK_AUDIO_OUTPUT_CHANGED,
10 10
         TRACK_MUTE_CHANGED,
11
+        TRACK_STREAMING_STATUS_CHANGED,
11 12
         TRACK_VIDEOTYPE_CHANGED,
12 13
         NO_DATA_FROM_SOURCE,
13 14
         NO_AUDIO_INPUT,
@@ -28,6 +29,7 @@ describe( "/JitsiTrackEvents members", () => {
28 29
             expect( JitsiTrackEvents.TRACK_AUDIO_LEVEL_CHANGED ).toBe( 'track.audioLevelsChanged' );
29 30
             expect( JitsiTrackEvents.TRACK_AUDIO_OUTPUT_CHANGED ).toBe( 'track.audioOutputChanged' );
30 31
             expect( JitsiTrackEvents.TRACK_MUTE_CHANGED ).toBe( 'track.trackMuteChanged' );
32
+            expect( JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED ).toBe( 'track.streaming_status_changed' );
31 33
             expect( JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED ).toBe( 'track.videoTypeChanged' );
32 34
             expect( JitsiTrackEvents.NO_DATA_FROM_SOURCE ).toBe( 'track.no_data_from_source' );
33 35
             expect( JitsiTrackEvents.NO_AUDIO_INPUT ).toBe( 'track.no_audio_input' );
@@ -38,4 +40,4 @@ describe( "/JitsiTrackEvents members", () => {
38 40
         const keys = Object.keys( others );
39 41
         expect( keys ).withContext( `Extra members: ${ keys.join( ", " ) }` ).toEqual( [] );
40 42
     } );
41
-} );
43
+} );

+ 18
- 4
modules/RTC/BridgeChannel.js Näytä tiedosto

@@ -359,11 +359,25 @@ export default class BridgeChannel {
359 359
                 break;
360 360
             }
361 361
             case 'LastNEndpointsChangeEvent': {
362
-                // The new/latest list of last-n endpoint IDs (i.e. endpoints for which the bridge is sending video).
363
-                const lastNEndpoints = obj.lastNEndpoints;
362
+                if (!FeatureFlags.isSourceNameSignalingEnabled()) {
363
+                    // The new/latest list of last-n endpoint IDs (i.e. endpoints for which the bridge is sending
364
+                    // video).
365
+                    const lastNEndpoints = obj.lastNEndpoints;
364 366
 
365
-                logger.info(`New forwarded endpoints: ${lastNEndpoints}`);
366
-                emitter.emit(RTCEvents.LASTN_ENDPOINT_CHANGED, lastNEndpoints);
367
+                    logger.info(`New forwarded endpoints: ${lastNEndpoints}`);
368
+                    emitter.emit(RTCEvents.LASTN_ENDPOINT_CHANGED, lastNEndpoints);
369
+                }
370
+
371
+                break;
372
+            }
373
+            case 'ForwardedSources': {
374
+                if (FeatureFlags.isSourceNameSignalingEnabled()) {
375
+                    // The new/latest list of forwarded sources
376
+                    const forwardedSources = obj.forwardedSources;
377
+
378
+                    logger.info(`New forwarded sources: ${forwardedSources}`);
379
+                    emitter.emit(RTCEvents.FORWARDED_SOURCES_CHANGED, forwardedSources);
380
+                }
367 381
 
368 382
                 break;
369 383
             }

+ 146
- 0
modules/RTC/JitsiRemoteTrack.js Näytä tiedosto

@@ -1,5 +1,7 @@
1 1
 import * as JitsiTrackEvents from '../../JitsiTrackEvents';
2 2
 import { createTtfmEvent } from '../../service/statistics/AnalyticsEvents';
3
+import TrackStreamingStatusImpl, { TrackStreamingStatus } from '../connectivity/TrackStreamingStatus';
4
+import FeatureFlags from '../flags/FeatureFlags';
3 5
 import Statistics from '../statistics/statistics';
4 6
 
5 7
 import JitsiTrack from './JitsiTrack';
@@ -74,6 +76,19 @@ export default class JitsiRemoteTrack extends JitsiTrack {
74 76
         this.muted = muted;
75 77
         this.isP2P = isP2P;
76 78
         this._sourceName = sourceName;
79
+        this._trackStreamingStatus = null;
80
+        this._trackStreamingStatusImpl = null;
81
+
82
+        /**
83
+         * This holds the timestamp indicating when remote video track entered forwarded sources set. Track entering
84
+         * forwardedSources will have streaming status restoring and when we start receiving video will become active,
85
+         * but if video is not received for certain time {@link DEFAULT_RESTORING_TIMEOUT} that track streaming status
86
+         * will become interrupted.
87
+         */
88
+        this._enteredForwardedSourcesTimestamp = null;
89
+
90
+        this.addEventListener = this.on = this._addEventListener.bind(this);
91
+        this.removeEventListener = this.off = this._removeEventListener.bind(this);
77 92
 
78 93
         logger.debug(`New remote track added: ${this}`);
79 94
 
@@ -106,6 +121,44 @@ export default class JitsiRemoteTrack extends JitsiTrack {
106 121
         });
107 122
     }
108 123
 
124
+    /**
125
+     * Overrides addEventListener method to init TrackStreamingStatus instance when there are listeners for the
126
+     * {@link JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED} event.
127
+     *
128
+     * @param {string} event - event name
129
+     * @param {function} handler - event handler
130
+     */
131
+    _addEventListener(event, handler) {
132
+        super.addListener(event, handler);
133
+
134
+        if (FeatureFlags.isSourceNameSignalingEnabled()
135
+            && event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
136
+            && this.listenerCount(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED)
137
+            && !this._trackStreamingStatusImpl
138
+        ) {
139
+            this._initTrackStreamingStatus();
140
+            logger.debug(`Initializing track streaming status: ${this._sourceName}`);
141
+        }
142
+    }
143
+
144
+    /**
145
+     * Overrides removeEventListener method to dispose TrackStreamingStatus instance.
146
+     *
147
+     * @param {string} event - event name
148
+     * @param {function} handler - event handler
149
+     */
150
+    _removeEventListener(event, handler) {
151
+        super.removeListener(event, handler);
152
+
153
+        if (FeatureFlags.isSourceNameSignalingEnabled()
154
+            && event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
155
+            && !this.listenerCount(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED)
156
+        ) {
157
+            this._disposeTrackStreamingStatus();
158
+            logger.debug(`Disposing track streaming status: ${this._sourceName}`);
159
+        }
160
+    }
161
+
109 162
     /**
110 163
      * Callback invoked when the track is muted. Emits an event notifying
111 164
      * listeners of the mute event.
@@ -132,6 +185,19 @@ export default class JitsiRemoteTrack extends JitsiTrack {
132 185
         this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_UNMUTE, this);
133 186
     }
134 187
 
188
+    /**
189
+     * Removes attached event listeners and dispose TrackStreamingStatus .
190
+     *
191
+     * @returns {Promise}
192
+     */
193
+    dispose() {
194
+        if (FeatureFlags.isSourceNameSignalingEnabled()) {
195
+            this._disposeTrackStreamingStatus();
196
+        }
197
+
198
+        return super.dispose();
199
+    }
200
+
135 201
     /**
136 202
      * Sets current muted status and fires an events for the change.
137 203
      * @param value the muted status.
@@ -318,6 +384,86 @@ export default class JitsiRemoteTrack extends JitsiTrack {
318 384
         return `readyState: ${readyState}, muted: ${muted}, enabled: ${enabled}`;
319 385
     }
320 386
 
387
+    /**
388
+     * Initializes trackStreamingStatusImpl.
389
+     */
390
+    _initTrackStreamingStatus() {
391
+        const config = this.conference.options.config;
392
+
393
+        this._trackStreamingStatus = TrackStreamingStatus.ACTIVE;
394
+
395
+        this._trackStreamingStatusImpl = new TrackStreamingStatusImpl(
396
+            this.rtc,
397
+            this.conference,
398
+            this,
399
+            {
400
+                // These options are not public API, leaving it here only as an entry point through config for
401
+                // tuning up purposes. Default values should be adjusted as soon as optimal values are discovered.
402
+                p2pRtcMuteTimeout: config._p2pConnStatusRtcMuteTimeout,
403
+                rtcMuteTimeout: config._peerConnStatusRtcMuteTimeout,
404
+                outOfForwardedSourcesTimeout: config._peerConnStatusOutOfLastNTimeout
405
+            });
406
+
407
+        this._trackStreamingStatusImpl.init();
408
+    }
409
+
410
+    /**
411
+     * Disposes trackStreamingStatusImpl and clears trackStreamingStatus.
412
+     */
413
+    _disposeTrackStreamingStatus() {
414
+        if (this._trackStreamingStatusImpl) {
415
+            this._trackStreamingStatusImpl.dispose();
416
+            this._trackStreamingStatusImpl = null;
417
+            this._trackStreamingStatus = null;
418
+        }
419
+    }
420
+
421
+    /**
422
+     * Updates track's streaming status.
423
+     *
424
+     * @param {string} state the current track streaming state. {@link TrackStreamingStatus}.
425
+     */
426
+    _setTrackStreamingStatus(status) {
427
+        this._trackStreamingStatus = status;
428
+    }
429
+
430
+    /**
431
+     * Returns track's streaming status.
432
+     *
433
+     * @returns {string} the streaming status <tt>TrackStreamingStatus</tt> of the track. Returns null
434
+     * if trackStreamingStatusImpl hasn't been initialized.
435
+     *
436
+     * {@link TrackStreamingStatus}.
437
+     */
438
+    getTrackStreamingStatus() {
439
+        return this._trackStreamingStatus;
440
+    }
441
+
442
+    /**
443
+     * Clears the timestamp of when the track entered forwarded sources.
444
+     */
445
+    _clearEnteredForwardedSourcesTimestamp() {
446
+        this._enteredForwardedSourcesTimestamp = null;
447
+    }
448
+
449
+    /**
450
+     * Updates the timestamp of when the track entered forwarded sources.
451
+     *
452
+     * @param {number} timestamp the time in millis
453
+     */
454
+    _setEnteredForwardedSourcesTimestamp(timestamp) {
455
+        this._enteredForwardedSourcesTimestamp = timestamp;
456
+    }
457
+
458
+    /**
459
+     * Returns the timestamp of when the track entered forwarded sources.
460
+     *
461
+     * @returns {number} the time in millis
462
+     */
463
+    _getEnteredForwardedSourcesTimestamp() {
464
+        return this._enteredForwardedSourcesTimestamp;
465
+    }
466
+
321 467
     /**
322 468
      * Creates a text representation of this remote track instance.
323 469
      * @return {string}

+ 62
- 0
modules/RTC/RTC.js Näytä tiedosto

@@ -126,6 +126,15 @@ export default class RTC extends Listenable {
126 126
          */
127 127
         this._lastNEndpoints = null;
128 128
 
129
+        /**
130
+         * Defines the forwarded sources list. It can be null or an array once initialised with a channel forwarded
131
+         * sources event.
132
+         *
133
+         * @type {Array<string>|null}
134
+         * @private
135
+         */
136
+        this._forwardedSources = null;
137
+
129 138
         /**
130 139
          * The number representing the maximum video height the local client
131 140
          * should receive from the bridge.
@@ -146,6 +155,9 @@ export default class RTC extends Listenable {
146 155
         // The last N change listener.
147 156
         this._lastNChangeListener = this._onLastNChanged.bind(this);
148 157
 
158
+        // The forwarded sources change listener.
159
+        this._forwardedSourcesChangeListener = this._onForwardedSourcesChanged.bind(this);
160
+
149 161
         this._onDeviceListChanged = this._onDeviceListChanged.bind(this);
150 162
         this._updateAudioOutputForAudioTracks
151 163
             = this._updateAudioOutputForAudioTracks.bind(this);
@@ -277,6 +289,11 @@ export default class RTC extends Listenable {
277 289
 
278 290
         // Add Last N change listener.
279 291
         this.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED, this._lastNChangeListener);
292
+
293
+        if (FeatureFlags.isSourceNameSignalingEnabled()) {
294
+            // Add forwarded sources change listener.
295
+            this.addListener(RTCEvents.FORWARDED_SOURCES_CHANGED, this._forwardedSourcesChangeListener);
296
+        }
280 297
     }
281 298
 
282 299
     /**
@@ -315,6 +332,31 @@ export default class RTC extends Listenable {
315 332
             enteringLastNEndpoints);
316 333
     }
317 334
 
335
+    /**
336
+     * Receives events when forwarded sources had changed.
337
+     *
338
+     * @param {array} forwardedSources The new forwarded sources.
339
+     * @private
340
+     */
341
+    _onForwardedSourcesChanged(forwardedSources = []) {
342
+        const oldForwardedSources = this._forwardedSources || [];
343
+        let leavingForwardedSources = [];
344
+        let enteringForwardedSources = [];
345
+
346
+        this._forwardedSources = forwardedSources;
347
+
348
+        leavingForwardedSources = oldForwardedSources.filter(sourceName => !this.isInForwardedSources(sourceName));
349
+
350
+        enteringForwardedSources = forwardedSources.filter(
351
+            sourceName => oldForwardedSources.indexOf(sourceName) === -1);
352
+
353
+        this.conference.eventEmitter.emit(
354
+            JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED,
355
+            leavingForwardedSources,
356
+            enteringForwardedSources,
357
+            Date.now());
358
+    }
359
+
318 360
     /**
319 361
      * Should be called when current media session ends and after the
320 362
      * PeerConnection has been closed using PeerConnection.close() method.
@@ -550,6 +592,14 @@ export default class RTC extends Listenable {
550 592
         track.conference = this.conference;
551 593
     }
552 594
 
595
+    /**
596
+     * Get forwarded sources list.
597
+     * @returns {Array<string>|null}
598
+     */
599
+    getForwardedSources() {
600
+        return this._forwardedSources;
601
+    }
602
+
553 603
     /**
554 604
      * Get local video track.
555 605
      * @returns {JitsiLocalTrack|undefined}
@@ -927,6 +977,18 @@ export default class RTC extends Listenable {
927 977
             || this._lastNEndpoints.indexOf(id) > -1;
928 978
     }
929 979
 
980
+    /**
981
+     * Indicates if the source name is currently included in the forwarded sources.
982
+     *
983
+     * @param {string} sourceName The source name that we check for forwarded sources.
984
+     * @returns {boolean} true if the source name is in the forwarded sources or if we don't have bridge channel
985
+     * support, otherwise we return false.
986
+     */
987
+    isInForwardedSources(sourceName) {
988
+        return !this._forwardedSources // forwardedSources not initialised yet.
989
+            || this._forwardedSources.indexOf(sourceName) > -1;
990
+    }
991
+
930 992
     /**
931 993
      * Updates the target audio output device for all remote audio tracks.
932 994
      *

+ 648
- 0
modules/connectivity/TrackStreamingStatus.ts Näytä tiedosto

@@ -0,0 +1,648 @@
1
+import { getLogger } from '@jitsi/logger';
2
+
3
+import { JitsiConferenceEvents } from '../../JitsiConferenceEvents';
4
+import * as JitsiTrackEvents from '../../JitsiTrackEvents';
5
+import RTCEvents from '../../service/RTC/RTCEvents';
6
+import { createTrackStreamingStatusEvent } from '../../service/statistics/AnalyticsEvents';
7
+import JitsiConference from '../../types/hand-crafted/JitsiConference';
8
+import JitsiRemoteTrack from '../../types/hand-crafted/modules/RTC/JitsiRemoteTrack';
9
+import RTC from '../../types/hand-crafted/modules/RTC/RTC';
10
+import { VideoType } from '../../types/hand-crafted/service/RTC/VideoType';
11
+import browser from '../browser';
12
+import Statistics from '../statistics/statistics';
13
+
14
+/** Track streaming statuses. */
15
+export enum TrackStreamingStatus {
16
+
17
+    /**
18
+     * Status indicating that streaming is currently active.
19
+     */
20
+    ACTIVE = 'active',
21
+
22
+    /**
23
+     * Status indicating that streaming is currently inactive.
24
+     * Inactive means the streaming was stopped on purpose from the bridge, like exiting forwarded sources or
25
+     * adaptivity decided to drop video because of not enough bandwidth.
26
+     */
27
+    INACTIVE = 'inactive',
28
+
29
+    /**
30
+     * Status indicating that streaming is currently interrupted.
31
+     */
32
+    INTERRUPTED = 'interrupted',
33
+
34
+    /**
35
+     * Status indicating that streaming is currently restoring.
36
+     */
37
+    RESTORING = 'restoring',
38
+  }
39
+
40
+type StreamingStatusMap = {
41
+    // TODO: Replace this hand crafted VideoType when we convert VideoType.js to Typescript.
42
+    videoType?: VideoType, 
43
+    startedMs?: number,
44
+    p2p?: boolean,
45
+    streamingStatus?: string,
46
+    value?: number
47
+};
48
+
49
+const logger = getLogger(__filename);
50
+
51
+/**
52
+ * Default value of 500 milliseconds for {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
53
+ */
54
+const DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT = 500;
55
+
56
+/**
57
+ * Default value of 2500 milliseconds for {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
58
+ */
59
+const DEFAULT_P2P_RTC_MUTE_TIMEOUT = 2500;
60
+
61
+/**
62
+ * Default value of 10000 milliseconds for {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
63
+ */
64
+const DEFAULT_RTC_MUTE_TIMEOUT = 10000;
65
+
66
+/**
67
+ * The time to wait a track to be restored. Track which was out of forwarded sources should be inactive and when
68
+ * entering forwarded sources it becomes restoring and when data is received from bridge it will become active, but if
69
+ * no data is received for some time we set status of that track streaming to interrupted.
70
+ */
71
+const DEFAULT_RESTORING_TIMEOUT = 10000;
72
+
73
+/**
74
+ * Class is responsible for emitting JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED events.
75
+ */
76
+export class TrackStreamingStatusImpl {
77
+    rtc: RTC;
78
+    conference: JitsiConference;
79
+    track: JitsiRemoteTrack;
80
+
81
+    /**  This holds the timeout callback ID scheduled using window.setTimeout. */
82
+    trackTimer: number | null;
83
+
84
+    /**
85
+     * If video track frozen detection through RTC mute event is supported, we wait some time until video track is
86
+     * considered frozen. But because when the track falls out of forwarded sources it is expected for the video to
87
+     * freeze this timeout must be significantly reduced in "out of forwarded sources" case.
88
+     *
89
+     * Basically this value is used instead of {@link rtcMuteTimeout} when track is not in forwarded sources.
90
+     */
91
+    outOfForwardedSourcesTimeout: number;
92
+
93
+    /**
94
+     * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
95
+     * event is fired on the Media stream, before the connection interrupted is fired. The default value is
96
+     * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
97
+     */
98
+    p2pRtcMuteTimeout: number;
99
+
100
+    /**
101
+     * How long we're going to wait after the RTC video track muted event for the corresponding signalling mute
102
+     * event, before the connection interrupted is fired. The default value is {@link DEFAULT_RTC_MUTE_TIMEOUT}.
103
+     *
104
+     * @returns amount of time in milliseconds
105
+     */
106
+    rtcMuteTimeout: number;
107
+
108
+    /**
109
+     * This holds a timestamp indicating  when remote video track was RTC muted. The purpose of storing the
110
+     * timestamp is to avoid the transition to disconnected status in case of legitimate video mute operation where
111
+     * the signalling video muted event can arrive shortly after RTC muted event.
112
+     *
113
+     * The timestamp is measured in milliseconds obtained with <tt>Date.now()</tt>.
114
+     *
115
+     * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event
116
+     * to the remote track and allowing to set different timeout for local and remote tracks.
117
+     */
118
+    rtcMutedTimestamp: number | null;
119
+
120
+    /** This holds the restoring timeout callback ID scheduled using window.setTimeout. */
121
+    restoringTimer: ReturnType<typeof setTimeout> | null;
122
+
123
+    /**
124
+     * This holds the current streaming status (along with all the internal events that happen while in that
125
+     * state).
126
+     *
127
+     * The goal is to send this information to the analytics backend for post-mortem analysis.
128
+     */
129
+    streamingStatusMap: StreamingStatusMap;
130
+
131
+    _onP2PStatus: () => void;
132
+    _onUserLeft: () => void;
133
+    _onTrackRtcMuted: () => void;
134
+    _onTrackRtcUnmuted: () => void;
135
+    _onSignallingMuteChanged: () => void;
136
+    _onTrackVideoTypeChanged: () => void;
137
+    _onLastNValueChanged: () => void;
138
+    _onForwardedSourcesChanged: () => void;
139
+
140
+    /* eslint-disable max-params*/
141
+    /**
142
+     * Calculates the new {@link TrackStreamingStatus} based on the values given for some specific remote track. It is
143
+     * assumed that the conference is currently in the JVB mode (in contrary to the P2P mode)
144
+     * @param isInForwardedSources - indicates whether the track is in the forwarded sources set. When set to
145
+     * false it means that JVB is not sending any video for the track.
146
+     * @param isRestoringTimedout - if true it means that the track has been outside of forwarded sources too
147
+     * long to be considered {@link TrackStreamingStatus.RESTORING}.
148
+     * @param isVideoMuted - true if the track is video muted and we should not expect to receive any video.
149
+     * @param isVideoTrackFrozen - if the current browser support video frozen detection then it will be set to
150
+     * true when the video track is frozen. If the current browser does not support frozen detection the it's always
151
+     * false.
152
+     * @return {TrackStreamingStatus} the new streaming status for the track for whom the values above were provided.
153
+     * @private
154
+     */
155
+    static _getNewStateForJvbMode(
156
+            isInForwardedSources: boolean,
157
+            isRestoringTimedout: boolean,
158
+            isVideoMuted: boolean,
159
+            isVideoTrackFrozen: boolean): TrackStreamingStatus {
160
+
161
+        // We are currently not checking the endpoint connection status received from the JVB.
162
+        if (isVideoMuted) {
163
+            // If the connection is active according to JVB and the track is video muted there is no way for the
164
+            // connection to be inactive, because the detection logic below only makes sense for video.
165
+            return TrackStreamingStatus.ACTIVE;
166
+        }
167
+
168
+        // Logic when isVideoTrackFrozen is supported
169
+        if (browser.supportsVideoMuteOnConnInterrupted()) {
170
+            if (!isVideoTrackFrozen) {
171
+                // If the video is playing we're good
172
+                return TrackStreamingStatus.ACTIVE;
173
+            } else if (isInForwardedSources) {
174
+                return isRestoringTimedout ? TrackStreamingStatus.INTERRUPTED : TrackStreamingStatus.RESTORING;
175
+            }
176
+
177
+            return TrackStreamingStatus.INACTIVE;
178
+        }
179
+
180
+        // Because this browser is incapable of detecting frozen video we must rely on the forwarded sources value
181
+        return isInForwardedSources ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INACTIVE;
182
+    }
183
+
184
+    /* eslint-enable max-params*/
185
+
186
+    /**
187
+     * In P2P mode we don't care about any values coming from the JVB and the streaming status can be only active or
188
+     * interrupted.
189
+     * @param isVideoMuted - true if video muted
190
+     * @param isVideoTrackFrozen - true if the video track for the remote track is currently frozen. If the
191
+     * current browser does not support video frozen detection then it's always false.
192
+     * @return {TrackStreamingStatus}
193
+     * @private
194
+     */
195
+    static _getNewStateForP2PMode(isVideoMuted: boolean, isVideoTrackFrozen: boolean): TrackStreamingStatus {
196
+        if (!browser.supportsVideoMuteOnConnInterrupted()) {
197
+            // There's no way to detect problems in P2P when there's no video track frozen detection...
198
+            return TrackStreamingStatus.ACTIVE;
199
+        }
200
+
201
+        return isVideoMuted || !isVideoTrackFrozen
202
+            ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INTERRUPTED;
203
+    }
204
+
205
+    /**
206
+     * Creates new instance of <tt>TrackStreamingStatus</tt>.
207
+     *
208
+     * @constructor
209
+     * @param rtc - the RTC service instance
210
+     * @param conference - parent conference instance
211
+     * @param {Object} options
212
+     * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
213
+     * {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
214
+     * @param {number} [options.rtcMuteTimeout=2000] custom value for
215
+     * {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
216
+     * @param {number} [options.outOfForwardedSourcesTimeout=500] custom value for
217
+     * {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
218
+     */
219
+    constructor(rtc: RTC, conference: JitsiConference, track: JitsiRemoteTrack, options: {
220
+        outOfForwardedSourcesTimeout: number,
221
+        p2pRtcMuteTimeout: number,
222
+        rtcMuteTimeout: number
223
+    }) {
224
+        this.rtc = rtc;
225
+        this.conference = conference;
226
+        this.track = track;
227
+
228
+        this.restoringTimer = null;
229
+        this.rtcMutedTimestamp = null;
230
+        this.streamingStatusMap = {};
231
+        this.trackTimer = null;
232
+
233
+        this.outOfForwardedSourcesTimeout = typeof options.outOfForwardedSourcesTimeout === 'number'
234
+            ? options.outOfForwardedSourcesTimeout : DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT;
235
+
236
+        this.p2pRtcMuteTimeout = typeof options.p2pRtcMuteTimeout === 'number'
237
+            ? options.p2pRtcMuteTimeout : DEFAULT_P2P_RTC_MUTE_TIMEOUT;
238
+
239
+        this.rtcMuteTimeout = typeof options.rtcMuteTimeout === 'number'
240
+            ? options.rtcMuteTimeout : DEFAULT_RTC_MUTE_TIMEOUT;
241
+        logger.info(`RtcMuteTimeout set to: ${this.rtcMuteTimeout}`);
242
+    }
243
+
244
+    /**
245
+     * Gets the video frozen timeout for given source name.
246
+     * @return how long are we going to wait since RTC video muted even, before a video track is considered
247
+     * frozen.
248
+     * @private
249
+     */
250
+    _getVideoFrozenTimeout(): number {
251
+        const sourceName = this.track.getSourceName();
252
+
253
+        return this.rtc.isInForwardedSources(sourceName)
254
+            ? this.rtcMuteTimeout
255
+            : this.conference.isP2PActive() ? this.p2pRtcMuteTimeout : this.outOfForwardedSourcesTimeout;
256
+    }
257
+
258
+    /**
259
+     * Initializes <tt>TrackStreamingStatus</tt> and bind required event listeners.
260
+     */
261
+    init(): void {
262
+        // Handles P2P status changes
263
+        this._onP2PStatus = this.figureOutStreamingStatus.bind(this);
264
+        this.conference.on(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
265
+
266
+        // Used to send analytics events for the participant that left the call.
267
+        this._onUserLeft = this.onUserLeft.bind(this);
268
+        this.conference.on(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
269
+
270
+        // On some browsers MediaStreamTrack trigger "onmute"/"onunmute" events for video type tracks when they stop
271
+        // receiving data which is often a sign that remote user is having connectivity issues.
272
+        if (browser.supportsVideoMuteOnConnInterrupted()) {
273
+
274
+            this._onTrackRtcMuted = this.onTrackRtcMuted.bind(this);
275
+            this.rtc.addListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
276
+
277
+            this._onTrackRtcUnmuted = this.onTrackRtcUnmuted.bind(this);
278
+            this.rtc.addListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
279
+
280
+            // Listened which will be bound to JitsiRemoteTrack to listen for signalling mute/unmute events.
281
+            this._onSignallingMuteChanged = this.onSignallingMuteChanged.bind(this);
282
+            this.track.on(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
283
+
284
+            // Used to send an analytics event when the video type changes.
285
+            this._onTrackVideoTypeChanged = this.onTrackVideoTypeChanged.bind(this);
286
+            this.track.on(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, this._onTrackVideoTypeChanged);
287
+        }
288
+
289
+        this._onForwardedSourcesChanged = this.onForwardedSourcesChanged.bind(this);
290
+        this.conference.on(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
291
+
292
+        this._onLastNValueChanged = this.figureOutStreamingStatus.bind(this);
293
+        this.rtc.on(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
294
+    }
295
+
296
+    /**
297
+     * Removes all event listeners and disposes of all resources held by this instance.
298
+     */
299
+    dispose(): void {
300
+        if (browser.supportsVideoMuteOnConnInterrupted()) {
301
+            this.rtc.removeListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
302
+            this.rtc.removeListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
303
+
304
+            this.track.off(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
305
+        }
306
+
307
+        this.conference.off(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
308
+        this.conference.off(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
309
+        this.conference.off(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
310
+        this.rtc.removeListener(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
311
+
312
+        this.clearTimeout();
313
+        this.clearRtcMutedTimestamp();
314
+        this.maybeSendTrackStreamingStatusEvent(Date.now());
315
+        this.figureOutStreamingStatus();
316
+    }
317
+
318
+    /**
319
+     * Changes streaming status.
320
+     * @param newStatus
321
+     */
322
+    _changeStreamingStatus(newStatus: TrackStreamingStatus): void {
323
+        if (this.track.getTrackStreamingStatus() !== newStatus) {
324
+
325
+            const sourceName = this.track.getSourceName();
326
+
327
+            this.track._setTrackStreamingStatus(newStatus);
328
+
329
+            logger.debug(`Emit track streaming status(${Date.now()}) ${sourceName}: ${newStatus}`);
330
+
331
+            // Log the event on CallStats
332
+            Statistics.sendLog(
333
+                JSON.stringify({
334
+                    id: 'track.streaming.status',
335
+                    track: sourceName,
336
+                    status: newStatus
337
+                }));
338
+
339
+            this.track.emit(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED, newStatus);
340
+        }
341
+    }
342
+
343
+    /**
344
+     * Reset the postponed "streaming interrupted" event which was previously scheduled as a timeout on RTC 'onmute'
345
+     * event.
346
+     */
347
+    clearTimeout(): void {
348
+        if (this.trackTimer) {
349
+            window.clearTimeout(this.trackTimer);
350
+            this.trackTimer = null;
351
+        }
352
+    }
353
+
354
+    /**
355
+     * Clears the timestamp of the RTC muted event for remote video track.
356
+     */
357
+    clearRtcMutedTimestamp(): void {
358
+        this.rtcMutedTimestamp = null;
359
+    }
360
+
361
+    /**
362
+     * Checks if track is considered frozen.
363
+     * @return <tt>true</tt> if the video has frozen or <tt>false</tt> when it's either not considered frozen
364
+     * (yet) or if freeze detection is not supported by the current browser.
365
+     *
366
+     * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event to
367
+     *       the remote track and allowing to set different timeout for local and remote tracks.
368
+     */
369
+    isVideoTrackFrozen(): boolean {
370
+        if (!browser.supportsVideoMuteOnConnInterrupted()) {
371
+            return false;
372
+        }
373
+
374
+        const isVideoRTCMuted = this.track.isWebRTCTrackMuted();
375
+        const rtcMutedTimestamp = this.rtcMutedTimestamp;
376
+        const timeout = this._getVideoFrozenTimeout();
377
+
378
+        return isVideoRTCMuted && typeof rtcMutedTimestamp === 'number' && (Date.now() - rtcMutedTimestamp) >= timeout;
379
+    }
380
+
381
+    /**
382
+     * Figures out (and updates) the current streaming status for the track identified by the source name.
383
+     */
384
+    figureOutStreamingStatus(): void {
385
+        const sourceName = this.track.getSourceName();
386
+        const inP2PMode = this.conference.isP2PActive();
387
+        const isRestoringTimedOut = this._isRestoringTimedout();
388
+        const audioOnlyMode = this.conference.getLastN() === 0;
389
+
390
+        // NOTE Overriding videoMuted to true for audioOnlyMode should disable any detection based on video playback or
391
+        // forwarded sources.
392
+        const isVideoMuted = this.track.isMuted() || audioOnlyMode;
393
+        const isVideoTrackFrozen = this.isVideoTrackFrozen();
394
+        const isInForwardedSources = this.rtc.isInForwardedSources(sourceName);
395
+
396
+        const newState
397
+            = inP2PMode
398
+                ? TrackStreamingStatusImpl._getNewStateForP2PMode(
399
+                    isVideoMuted,
400
+                    isVideoTrackFrozen)
401
+                : TrackStreamingStatusImpl._getNewStateForJvbMode(
402
+                    isInForwardedSources,
403
+                    isRestoringTimedOut,
404
+                    isVideoMuted,
405
+                    isVideoTrackFrozen);
406
+
407
+        // if the new state is not restoring clear timers and timestamps that we use to track the restoring state
408
+        if (newState !== TrackStreamingStatus.RESTORING) {
409
+            this._clearRestoringTimer();
410
+        }
411
+
412
+        logger.debug(
413
+            `Figure out conn status for ${sourceName}, is video muted: ${
414
+                isVideoMuted} video track frozen: ${
415
+                isVideoTrackFrozen} p2p mode: ${
416
+                inP2PMode} is in forwarded sources: ${
417
+                isInForwardedSources} currentStatus => newStatus: ${
418
+                this.track.getTrackStreamingStatus()} => ${newState}`);
419
+
420
+        const oldStreamingStatus = this.streamingStatusMap || {};
421
+
422
+        // Send an analytics event (guard on either the p2p flag or the streaming status has changed since the last
423
+        // time this code block run).
424
+        if (!('p2p' in oldStreamingStatus)
425
+            || !('streamingStatus' in oldStreamingStatus)
426
+            || oldStreamingStatus.p2p !== inP2PMode
427
+            || oldStreamingStatus.streamingStatus !== newState) {
428
+
429
+            const nowMs = Date.now();
430
+
431
+            this.maybeSendTrackStreamingStatusEvent(nowMs);
432
+
433
+            this.streamingStatusMap = {
434
+                ...oldStreamingStatus,
435
+                streamingStatus: newState,
436
+                p2p: inP2PMode,
437
+                startedMs: nowMs
438
+            };
439
+
440
+            // sometimes (always?) we're late to hook the TRACK_VIDEOTYPE_CHANGED event and the video type is not in
441
+            // oldStreamingStatus.
442
+            if (!('videoType' in this.streamingStatusMap)) {
443
+                this.streamingStatusMap.videoType = this.track.getVideoType();
444
+            }
445
+        }
446
+        this._changeStreamingStatus(newState);
447
+    }
448
+
449
+    /**
450
+     * Computes the duration of the current streaming status for the track (i.e. 15 seconds in the INTERRUPTED state)
451
+     * and sends a track streaming status event.
452
+     * @param nowMs - The current time (in millis).
453
+     */
454
+    maybeSendTrackStreamingStatusEvent(nowMs: number): void {
455
+        const trackStreamingStatus = this.streamingStatusMap;
456
+
457
+        if (trackStreamingStatus
458
+            && 'startedMs' in trackStreamingStatus
459
+            && 'videoType' in trackStreamingStatus
460
+            && 'streamingStatus' in trackStreamingStatus
461
+            && 'p2p' in trackStreamingStatus) {
462
+            trackStreamingStatus.value = nowMs - trackStreamingStatus.startedMs;
463
+            Statistics.sendAnalytics(createTrackStreamingStatusEvent(trackStreamingStatus));
464
+        }
465
+    }
466
+
467
+    /**
468
+     * On change in forwarded sources set check all leaving and entering track to change their corresponding statuses.
469
+     *
470
+     * @param leavingForwardedSources - The array of sourceName leaving forwarded sources.
471
+     * @param enteringForwardedSources - The array of sourceName entering forwarded sources.
472
+     * @param timestamp - The time in millis
473
+     * @private
474
+     */
475
+    onForwardedSourcesChanged(
476
+            leavingForwardedSources: string[] = [],
477
+            enteringForwardedSources: string[] = [],
478
+            timestamp: number): void {
479
+
480
+        const sourceName = this.track.getSourceName();
481
+
482
+        logger.debug(`Fowarded sources changed leaving=${leavingForwardedSources}, entering=${
483
+            enteringForwardedSources} at ${timestamp}`);
484
+
485
+        // If the browser doesn't fire the mute/onmute events when the remote peer stops/starts sending media,
486
+        // calculate the streaming status for all the tracks since it won't get triggered automatically on the track
487
+        // that has started/stopped receiving media.
488
+        if (!browser.supportsVideoMuteOnConnInterrupted()) {
489
+            this.figureOutStreamingStatus();
490
+        }
491
+
492
+        if (leavingForwardedSources.includes(sourceName)) {
493
+            this.track._clearEnteredForwardedSourcesTimestamp();
494
+            this._clearRestoringTimer();
495
+            browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
496
+        }
497
+
498
+        if (enteringForwardedSources.includes(sourceName)) {
499
+            // store the timestamp this track is entering forwarded sources
500
+            this.track._setEnteredForwardedSourcesTimestamp(timestamp);
501
+            browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
502
+        }
503
+    }
504
+
505
+    /**
506
+     * Clears the restoring timer for video track and the timestamp for entering forwarded sources.
507
+     */
508
+    _clearRestoringTimer(): void {
509
+        const rTimer = this.restoringTimer;
510
+
511
+        if (rTimer) {
512
+            clearTimeout(rTimer);
513
+            this.restoringTimer = null;
514
+        }
515
+    }
516
+
517
+    /**
518
+     * Checks whether a track had stayed enough in restoring state, compares current time and the time the track
519
+     * entered in forwarded sources. If it hasn't timedout and there is no timer added, add new timer in order to give
520
+     * it more time to become active or mark it as interrupted on next check.
521
+     *
522
+     * @returns <tt>true</tt> if the track was in restoring state more than the timeout
523
+     * ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to set its status to interrupted.
524
+     * @private
525
+     */
526
+    _isRestoringTimedout(): boolean {
527
+        const enteredForwardedSourcesTimestamp = this.track._getEnteredForwardedSourcesTimestamp();
528
+
529
+        if (enteredForwardedSourcesTimestamp
530
+            && (Date.now() - enteredForwardedSourcesTimestamp) >= DEFAULT_RESTORING_TIMEOUT) {
531
+            return true;
532
+        }
533
+
534
+        // still haven't reached timeout, if there is no timer scheduled, schedule one so we can track the restoring
535
+        // state and change it after reaching the timeout
536
+        const rTimer = this.restoringTimer;
537
+
538
+        if (!rTimer) {
539
+            this.restoringTimer = setTimeout(() => this.figureOutStreamingStatus(), DEFAULT_RESTORING_TIMEOUT);
540
+        }
541
+
542
+        return false;
543
+    }
544
+
545
+    /** Checks whether a track is the current track. */
546
+    _isCurrentTrack(track: JitsiRemoteTrack): boolean {
547
+        return track.getSourceName() === this.track.getSourceName();
548
+    }
549
+
550
+    /**
551
+     * Sends a last/final track streaming status event for the track of the user that left the conference.
552
+     * @param id - The id of the participant that left the conference.
553
+     */
554
+    onUserLeft(id: string): void {
555
+        if (this.track.getParticipantId() === id) {
556
+            this.maybeSendTrackStreamingStatusEvent(Date.now());
557
+            this.streamingStatusMap = {};
558
+        }
559
+    }
560
+
561
+    /**
562
+     * Handles RTC 'onmute' event for the video track.
563
+     *
564
+     * @param track - The video track for which 'onmute' event will be processed.
565
+     */
566
+    onTrackRtcMuted(track: JitsiRemoteTrack): void {
567
+        if (!this._isCurrentTrack(track)) {
568
+            return;
569
+        }
570
+
571
+        const sourceName = track.getSourceName();
572
+
573
+        logger.debug(`Detector track RTC muted: ${sourceName}`, Date.now());
574
+
575
+        this.rtcMutedTimestamp = Date.now();
576
+        if (!track.isMuted()) {
577
+            // If the user is not muted according to the signalling we'll give it some time, before the streaming
578
+            // interrupted event is triggered.
579
+            this.clearTimeout();
580
+
581
+            // The timeout is reduced when track is not in the forwarded sources
582
+            const timeout = this._getVideoFrozenTimeout();
583
+
584
+            this.trackTimer = window.setTimeout(() => {
585
+                logger.debug(`Set RTC mute timeout for: ${sourceName} of ${timeout} ms`);
586
+                this.clearTimeout();
587
+                this.figureOutStreamingStatus();
588
+            }, timeout);
589
+        }
590
+    }
591
+
592
+    /**
593
+     * Handles RTC 'onunmute' event for the video track.
594
+     *
595
+     * @param track - The video track for which 'onunmute' event will be processed.
596
+     */
597
+    onTrackRtcUnmuted(track: JitsiRemoteTrack): void {
598
+        if (!this._isCurrentTrack(track)) {
599
+            return;
600
+        }
601
+
602
+        const sourceName = this.track.getSourceName();
603
+
604
+        logger.debug(`Detector track RTC unmuted: ${sourceName}`, Date.now());
605
+
606
+        this.clearTimeout();
607
+        this.clearRtcMutedTimestamp();
608
+
609
+        this.figureOutStreamingStatus();
610
+    }
611
+
612
+    /**
613
+     * Here the signalling "mute"/"unmute" events are processed.
614
+     *
615
+     * @param track - The remote video track for which the signalling mute/unmute event will be
616
+     * processed.
617
+     */
618
+    onSignallingMuteChanged(track: JitsiRemoteTrack): void {
619
+        if (!this._isCurrentTrack(track)) {
620
+            return;
621
+        }
622
+
623
+        const sourceName = this.track.getSourceName();
624
+
625
+        logger.debug(`Detector on track signalling mute changed: ${sourceName}`, track.isMuted());
626
+
627
+        this.figureOutStreamingStatus();
628
+    }
629
+
630
+    /**
631
+     * Sends a track streaming status event as a result of the video type changing.
632
+     * @deprecated this will go away with full multiple streams support
633
+     * @param type - The video type.
634
+     */
635
+    onTrackVideoTypeChanged(type: VideoType): void {
636
+        const nowMs = Date.now();
637
+
638
+        this.maybeSendTrackStreamingStatusEvent(nowMs);
639
+
640
+        this.streamingStatusMap = {
641
+            ...this.streamingStatusMap || {},
642
+            videoType: type,
643
+            startedMs: nowMs
644
+        };
645
+    }
646
+}
647
+
648
+export default TrackStreamingStatusImpl;

+ 1
- 0
service/RTC/RTCEvents.js Näytä tiedosto

@@ -12,6 +12,7 @@ const RTCEvents = {
12 12
     ENDPOINT_CONN_STATUS_CHANGED: 'rtc.endpoint_conn_status_changed',
13 13
     DOMINANT_SPEAKER_CHANGED: 'rtc.dominant_speaker_changed',
14 14
     LASTN_ENDPOINT_CHANGED: 'rtc.lastn_endpoint_changed',
15
+    FORWARDED_SOURCES_CHANGED: 'rtc.forwarded_sources_changed',
15 16
 
16 17
     /**
17 18
      * Event emitted when the user granted/blocked a permission for the camera / mic.

+ 3
- 1
service/RTC/RTCEvents.spec.ts Näytä tiedosto

@@ -10,6 +10,7 @@ describe( "/service/RTC/RTCEvents members", () => {
10 10
         ENDPOINT_CONN_STATUS_CHANGED,
11 11
         DOMINANT_SPEAKER_CHANGED,
12 12
         LASTN_ENDPOINT_CHANGED,
13
+        FORWARDED_SOURCES_CHANGED,
13 14
         PERMISSIONS_CHANGED,
14 15
         SENDER_VIDEO_CONSTRAINTS_CHANGED,
15 16
         LASTN_VALUE_CHANGED,
@@ -41,6 +42,7 @@ describe( "/service/RTC/RTCEvents members", () => {
41 42
         expect( ENDPOINT_CONN_STATUS_CHANGED ).toBe( 'rtc.endpoint_conn_status_changed' );
42 43
         expect( DOMINANT_SPEAKER_CHANGED ).toBe( 'rtc.dominant_speaker_changed' );
43 44
         expect( LASTN_ENDPOINT_CHANGED ).toBe( 'rtc.lastn_endpoint_changed' );
45
+        expect( FORWARDED_SOURCES_CHANGED ).toBe( 'rtc.forwarded_sources_changed' );
44 46
         expect( PERMISSIONS_CHANGED ).toBe( 'rtc.permissions_changed' );
45 47
         expect( SENDER_VIDEO_CONSTRAINTS_CHANGED ).toBe( 'rtc.sender_video_constraints_changed' );
46 48
         expect( LASTN_VALUE_CHANGED ).toBe( 'rtc.lastn_value_changed' );
@@ -95,4 +97,4 @@ describe( "/service/RTC/RTCEvents members", () => {
95 97
         const keys = Object.keys( others );
96 98
         expect( keys ).withContext( `Extra members: ${ keys.join( ", " ) }` ).toEqual( [] );
97 99
     } );
98
-} );
100
+} );

+ 17
- 0
service/statistics/AnalyticsEvents.js Näytä tiedosto

@@ -371,6 +371,23 @@ export const createParticipantConnectionStatusEvent = function(attributes = {})
371 371
     };
372 372
 };
373 373
 
374
+/**
375
+ * Creates an event related to remote track streaming status changes.
376
+ *
377
+ * @param attributes the attributes to attach to the event.
378
+ * @returns {{type: string, source: string, name: string}}
379
+ */
380
+export const createTrackStreamingStatusEvent = function(attributes = {}) {
381
+    const action = 'duration';
382
+
383
+    return {
384
+        type: TYPE_OPERATIONAL,
385
+        source: 'track.streaming.status',
386
+        action,
387
+        attributes
388
+    };
389
+};
390
+
374 391
 /**
375 392
  * Creates an event for a Jingle-related event.
376 393
  * @param action the action of the event

+ 3
- 1
service/statistics/AnalyticsEvents.spec.ts Näytä tiedosto

@@ -35,6 +35,7 @@ describe( "/service/statistics/AnalyticsEvents members", () => {
35 35
         createFocusLeftEvent,
36 36
         createGetUserMediaEvent,
37 37
         createParticipantConnectionStatusEvent,
38
+        createTrackStreamingStatusEvent,
38 39
         createJingleEvent,
39 40
         createNoDataFromSourceEvent,
40 41
         createP2PEvent,
@@ -81,6 +82,7 @@ describe( "/service/statistics/AnalyticsEvents members", () => {
81 82
         expect( typeof ( createFocusLeftEvent ) ).toBe( 'function' );
82 83
         expect( typeof ( createGetUserMediaEvent ) ).toBe( 'function' );
83 84
         expect( typeof ( createParticipantConnectionStatusEvent ) ).toBe( 'function' );
85
+        expect( typeof ( createTrackStreamingStatusEvent ) ).toBe( 'function' );
84 86
         expect( typeof ( createJingleEvent ) ).toBe( 'function' );
85 87
         expect( typeof ( createNoDataFromSourceEvent ) ).toBe( 'function' );
86 88
         expect( typeof ( createP2PEvent ) ).toBe( 'function' );
@@ -97,4 +99,4 @@ describe( "/service/statistics/AnalyticsEvents members", () => {
97 99
         const keys = Object.keys( others );
98 100
         expect( keys ).withContext( `Extra members: ${ keys.join( ", " ) }` ).toEqual( [] );
99 101
     } );
100
-} );
102
+} );

+ 5
- 0
types/auto/JitsiConference.d.ts Näytä tiedosto

@@ -534,6 +534,11 @@ declare class JitsiConference {
534 534
      * @returns {number}
535 535
      */
536 536
     getLastN(): number;
537
+    /**
538
+     * Obtains the forwarded sources list in this conference.
539
+     * @return {Array<string>|null}
540
+     */
541
+    getForwardedSources(): Array<string> | null;
537 542
     /**
538 543
      * Selects a new value for "lastN". The requested amount of videos are going
539 544
      * to be delivered after the value is in effect. Set to -1 for unlimited or

+ 10
- 0
types/auto/JitsiConferenceEvents.d.ts Näytä tiedosto

@@ -128,6 +128,15 @@ export declare enum JitsiConferenceEvents {
128 128
      * which are entering Last N
129 129
      */
130 130
     LAST_N_ENDPOINTS_CHANGED = "conference.lastNEndpointsChanged",
131
+    /**
132
+     * The forwarded sources set is changed.
133
+     *
134
+     * @param {Array<string>} leavingForwardedSources the sourceNames of all the tracks which are leaving forwarded
135
+     * sources
136
+     * @param {Array<string>} enteringForwardedSources the sourceNames of all the tracks which are entering forwarded
137
+     * sources
138
+     */
139
+    FORWARDED_SOURCES_CHANGED = "conference.forwardedSourcesChanged",
131 140
     /**
132 141
      * Indicates that the room has been locked or unlocked.
133 142
      */
@@ -408,6 +417,7 @@ export declare const JVB121_STATUS = JitsiConferenceEvents.JVB121_STATUS;
408 417
 export declare const KICKED = JitsiConferenceEvents.KICKED;
409 418
 export declare const PARTICIPANT_KICKED = JitsiConferenceEvents.PARTICIPANT_KICKED;
410 419
 export declare const LAST_N_ENDPOINTS_CHANGED = JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED;
420
+export declare const FORWARDED_SOURCES_CHANGED = JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED;
411 421
 export declare const LOCK_STATE_CHANGED = JitsiConferenceEvents.LOCK_STATE_CHANGED;
412 422
 export declare const SERVER_REGION_CHANGED = JitsiConferenceEvents.SERVER_REGION_CHANGED;
413 423
 export declare const _MEDIA_SESSION_STARTED = JitsiConferenceEvents._MEDIA_SESSION_STARTED;

+ 12
- 0
types/auto/JitsiTrackEvents.d.ts Näytä tiedosto

@@ -37,3 +37,15 @@ export const NO_DATA_FROM_SOURCE: "track.no_data_from_source";
37 37
  * the microphone that is currently selected.
38 38
  */
39 39
 export const NO_AUDIO_INPUT: "track.no_audio_input";
40
+/**
41
+ * Event fired whenever video track's streaming changes.
42
+ * First argument is the sourceName of the track and the second is a string indicating if the connection is currently
43
+ * - active - the connection is active.
44
+ * - inactive - the connection is inactive, was intentionally interrupted by the bridge because of low BWE or because
45
+ *   of the endpoint falling out of last N.
46
+ * - interrupted - a network problem occurred.
47
+ * - restoring - the connection was inactive and is restoring now.
48
+ *
49
+ * The current status value can be obtained by calling JitsiRemoteTrack.getTrackStreamingStatus().
50
+ */
51
+export const TRACK_STREAMING_STATUS_CHANGED: "track.streaming_status_changed";

+ 64
- 0
types/auto/modules/RTC/JitsiRemoteTrack.d.ts Näytä tiedosto

@@ -28,6 +28,15 @@ export default class JitsiRemoteTrack extends JitsiTrack {
28 28
     muted: boolean;
29 29
     isP2P: boolean;
30 30
     _sourceName: string;
31
+    _trackStreamingStatus: any;
32
+    _trackStreamingStatusImpl: TrackStreamingStatusImpl;
33
+    /**
34
+     * This holds the timestamp indicating when remote video track entered forwarded sources set. Track entering
35
+     * forwardedSources will have streaming status restoring and when we start receiving video will become active,
36
+     * but if video is not received for certain time {@link DEFAULT_RESTORING_TIMEOUT} that track streaming status
37
+     * will become interrupted.
38
+     */
39
+    _enteredForwardedSourcesTimestamp: number;
31 40
     hasBeenMuted: boolean;
32 41
     _containerHandlers: {};
33 42
     /**
@@ -36,6 +45,21 @@ export default class JitsiRemoteTrack extends JitsiTrack {
36 45
      * @returns {void}
37 46
      */
38 47
     _bindTrackHandlers(): void;
48
+    /**
49
+     * Overrides addEventListener method to init TrackStreamingStatus instance when there are listeners for the
50
+     * {@link JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED} event.
51
+     *
52
+     * @param {string} event - event name
53
+     * @param {function} handler - event handler
54
+     */
55
+    _addEventListener(event: string, handler: Function): void;
56
+    /**
57
+     * Overrides removeEventListener method to dispose TrackStreamingStatus instance.
58
+     *
59
+     * @param {string} event - event name
60
+     * @param {function} handler - event handler
61
+     */
62
+    _removeEventListener(event: string, handler: Function): void;
39 63
     /**
40 64
      * Callback invoked when the track is muted. Emits an event notifying
41 65
      * listeners of the mute event.
@@ -105,5 +129,45 @@ export default class JitsiRemoteTrack extends JitsiTrack {
105 129
      * @returns {string}
106 130
      */
107 131
     _getStatus(): string;
132
+    /**
133
+     * Initializes trackStreamingStatusImpl.
134
+     */
135
+    _initTrackStreamingStatus(): void;
136
+    /**
137
+     * Disposes trackStreamingStatusImpl and clears trackStreamingStatus.
138
+     */
139
+    _disposeTrackStreamingStatus(): void;
140
+    /**
141
+     * Updates track's streaming status.
142
+     *
143
+     * @param {string} state the current track streaming state. {@link TrackStreamingStatus}.
144
+     */
145
+    _setTrackStreamingStatus(status: any): void;
146
+    /**
147
+     * Returns track's streaming status.
148
+     *
149
+     * @returns {string} the streaming status <tt>TrackStreamingStatus</tt> of the track. Returns null
150
+     * if trackStreamingStatusImpl hasn't been initialized.
151
+     *
152
+     * {@link TrackStreamingStatus}.
153
+     */
154
+    getTrackStreamingStatus(): string;
155
+    /**
156
+     * Clears the timestamp of when the track entered forwarded sources.
157
+     */
158
+    _clearEnteredForwardedSourcesTimestamp(): void;
159
+    /**
160
+     * Updates the timestamp of when the track entered forwarded sources.
161
+     *
162
+     * @param {number} timestamp the time in millis
163
+     */
164
+    _setEnteredForwardedSourcesTimestamp(timestamp: number): void;
165
+    /**
166
+     * Returns the timestamp of when the track entered forwarded sources.
167
+     *
168
+     * @returns {number} the time in millis
169
+     */
170
+    _getEnteredForwardedSourcesTimestamp(): number;
108 171
 }
109 172
 import JitsiTrack from "./JitsiTrack";
173
+import TrackStreamingStatusImpl from "../connectivity/TrackStreamingStatus";

+ 29
- 0
types/auto/modules/RTC/RTC.d.ts Näytä tiedosto

@@ -184,6 +184,14 @@ export default class RTC extends Listenable {
184 184
      * @private
185 185
      */
186 186
     private _lastNEndpoints;
187
+    /**
188
+     * Defines the forwarded sources list. It can be null or an array once initialised with a channel forwarded
189
+     * sources event.
190
+     *
191
+     * @type {Array<string>|null}
192
+     * @private
193
+     */
194
+    private _forwardedSources;
187 195
     /**
188 196
      * The number representing the maximum video height the local client
189 197
      * should receive from the bridge.
@@ -200,6 +208,7 @@ export default class RTC extends Listenable {
200 208
      */
201 209
     private _selectedEndpoints;
202 210
     _lastNChangeListener: any;
211
+    _forwardedSourcesChangeListener: any;
203 212
     /**
204 213
      * Callback invoked when the list of known audio and video devices has
205 214
      * been updated. Attempts to update the known available audio output
@@ -247,6 +256,13 @@ export default class RTC extends Listenable {
247 256
      * @private
248 257
      */
249 258
     private _onLastNChanged;
259
+    /**
260
+     * Receives events when forwarded sources had changed.
261
+     *
262
+     * @param {array} forwardedSources The new forwarded sources.
263
+     * @private
264
+     */
265
+    private _onForwardedSourcesChanged;
250 266
     /**
251 267
      * Should be called when current media session ends and after the
252 268
      * PeerConnection has been closed using PeerConnection.close() method.
@@ -340,6 +356,11 @@ export default class RTC extends Listenable {
340 356
      * @param track
341 357
      */
342 358
     addLocalTrack(track: any): void;
359
+    /**
360
+     * Get forwarded sources list.
361
+     * @returns {Array<string>|null}
362
+     */
363
+    getForwardedSources(): Array<string> | null;
343 364
     /**
344 365
      * Get local video track.
345 366
      * @returns {JitsiLocalTrack|undefined}
@@ -432,6 +453,14 @@ export default class RTC extends Listenable {
432 453
      * don't have bridge channel support, otherwise we return false.
433 454
      */
434 455
     isInLastN(id: string): boolean;
456
+    /**
457
+     * Indicates if the source name is currently included in the forwarded sources.
458
+     *
459
+     * @param {string} sourceName The source name that we check for forwarded sources.
460
+     * @returns {boolean} true if the source name is in the forwarded sources or if we don't have bridge channel
461
+     * support, otherwise we return false.
462
+     */
463
+    isInForwardedSources(sourceName: string): boolean;
435 464
 }
436 465
 import Listenable from "../util/Listenable";
437 466
 import TraceablePeerConnection from "./TraceablePeerConnection";

+ 239
- 0
types/auto/modules/connectivity/TrackStreamingStatus.d.ts Näytä tiedosto

@@ -0,0 +1,239 @@
1
+import JitsiConference from '../../types/hand-crafted/JitsiConference';
2
+import JitsiRemoteTrack from '../../types/hand-crafted/modules/RTC/JitsiRemoteTrack';
3
+import RTC from '../../types/hand-crafted/modules/RTC/RTC';
4
+import { VideoType } from '../../types/hand-crafted/service/RTC/VideoType';
5
+/** Track streaming statuses. */
6
+export declare enum TrackStreamingStatus {
7
+    /**
8
+     * Status indicating that streaming is currently active.
9
+     */
10
+    ACTIVE = "active",
11
+    /**
12
+     * Status indicating that streaming is currently inactive.
13
+     * Inactive means the streaming was stopped on purpose from the bridge, like exiting forwarded sources or
14
+     * adaptivity decided to drop video because of not enough bandwidth.
15
+     */
16
+    INACTIVE = "inactive",
17
+    /**
18
+     * Status indicating that streaming is currently interrupted.
19
+     */
20
+    INTERRUPTED = "interrupted",
21
+    /**
22
+     * Status indicating that streaming is currently restoring.
23
+     */
24
+    RESTORING = "restoring"
25
+}
26
+declare type StreamingStatusMap = {
27
+    videoType?: VideoType;
28
+    startedMs?: number;
29
+    p2p?: boolean;
30
+    streamingStatus?: string;
31
+    value?: number;
32
+};
33
+/**
34
+ * Class is responsible for emitting JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED events.
35
+ */
36
+export declare class TrackStreamingStatusImpl {
37
+    rtc: RTC;
38
+    conference: JitsiConference;
39
+    track: JitsiRemoteTrack;
40
+    /**  This holds the timeout callback ID scheduled using window.setTimeout. */
41
+    trackTimer: number | null;
42
+    /**
43
+     * If video track frozen detection through RTC mute event is supported, we wait some time until video track is
44
+     * considered frozen. But because when the track falls out of forwarded sources it is expected for the video to
45
+     * freeze this timeout must be significantly reduced in "out of forwarded sources" case.
46
+     *
47
+     * Basically this value is used instead of {@link rtcMuteTimeout} when track is not in forwarded sources.
48
+     */
49
+    outOfForwardedSourcesTimeout: number;
50
+    /**
51
+     * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
52
+     * event is fired on the Media stream, before the connection interrupted is fired. The default value is
53
+     * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
54
+     */
55
+    p2pRtcMuteTimeout: number;
56
+    /**
57
+     * How long we're going to wait after the RTC video track muted event for the corresponding signalling mute
58
+     * event, before the connection interrupted is fired. The default value is {@link DEFAULT_RTC_MUTE_TIMEOUT}.
59
+     *
60
+     * @returns amount of time in milliseconds
61
+     */
62
+    rtcMuteTimeout: number;
63
+    /**
64
+     * This holds a timestamp indicating  when remote video track was RTC muted. The purpose of storing the
65
+     * timestamp is to avoid the transition to disconnected status in case of legitimate video mute operation where
66
+     * the signalling video muted event can arrive shortly after RTC muted event.
67
+     *
68
+     * The timestamp is measured in milliseconds obtained with <tt>Date.now()</tt>.
69
+     *
70
+     * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event
71
+     * to the remote track and allowing to set different timeout for local and remote tracks.
72
+     */
73
+    rtcMutedTimestamp: number | null;
74
+    /** This holds the restoring timeout callback ID scheduled using window.setTimeout. */
75
+    restoringTimer: ReturnType<typeof setTimeout> | null;
76
+    /**
77
+     * This holds the current streaming status (along with all the internal events that happen while in that
78
+     * state).
79
+     *
80
+     * The goal is to send this information to the analytics backend for post-mortem analysis.
81
+     */
82
+    streamingStatusMap: StreamingStatusMap;
83
+    _onP2PStatus: () => void;
84
+    _onUserLeft: () => void;
85
+    _onTrackRtcMuted: () => void;
86
+    _onTrackRtcUnmuted: () => void;
87
+    _onSignallingMuteChanged: () => void;
88
+    _onTrackVideoTypeChanged: () => void;
89
+    _onLastNValueChanged: () => void;
90
+    _onForwardedSourcesChanged: () => void;
91
+    /**
92
+     * Calculates the new {@link TrackStreamingStatus} based on the values given for some specific remote track. It is
93
+     * assumed that the conference is currently in the JVB mode (in contrary to the P2P mode)
94
+     * @param isInForwardedSources - indicates whether the track is in the forwarded sources set. When set to
95
+     * false it means that JVB is not sending any video for the track.
96
+     * @param isRestoringTimedout - if true it means that the track has been outside of forwarded sources too
97
+     * long to be considered {@link TrackStreamingStatus.RESTORING}.
98
+     * @param isVideoMuted - true if the track is video muted and we should not expect to receive any video.
99
+     * @param isVideoTrackFrozen - if the current browser support video frozen detection then it will be set to
100
+     * true when the video track is frozen. If the current browser does not support frozen detection the it's always
101
+     * false.
102
+     * @return {TrackStreamingStatus} the new streaming status for the track for whom the values above were provided.
103
+     * @private
104
+     */
105
+    static _getNewStateForJvbMode(isInForwardedSources: boolean, isRestoringTimedout: boolean, isVideoMuted: boolean, isVideoTrackFrozen: boolean): TrackStreamingStatus;
106
+    /**
107
+     * In P2P mode we don't care about any values coming from the JVB and the streaming status can be only active or
108
+     * interrupted.
109
+     * @param isVideoMuted - true if video muted
110
+     * @param isVideoTrackFrozen - true if the video track for the remote track is currently frozen. If the
111
+     * current browser does not support video frozen detection then it's always false.
112
+     * @return {TrackStreamingStatus}
113
+     * @private
114
+     */
115
+    static _getNewStateForP2PMode(isVideoMuted: boolean, isVideoTrackFrozen: boolean): TrackStreamingStatus;
116
+    /**
117
+     * Creates new instance of <tt>TrackStreamingStatus</tt>.
118
+     *
119
+     * @constructor
120
+     * @param rtc - the RTC service instance
121
+     * @param conference - parent conference instance
122
+     * @param {Object} options
123
+     * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
124
+     * {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
125
+     * @param {number} [options.rtcMuteTimeout=2000] custom value for
126
+     * {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
127
+     * @param {number} [options.outOfForwardedSourcesTimeout=500] custom value for
128
+     * {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
129
+     */
130
+    constructor(rtc: RTC, conference: JitsiConference, track: JitsiRemoteTrack, options: {
131
+        outOfForwardedSourcesTimeout: number;
132
+        p2pRtcMuteTimeout: number;
133
+        rtcMuteTimeout: number;
134
+    });
135
+    /**
136
+     * Gets the video frozen timeout for given source name.
137
+     * @return how long are we going to wait since RTC video muted even, before a video track is considered
138
+     * frozen.
139
+     * @private
140
+     */
141
+    _getVideoFrozenTimeout(): number;
142
+    /**
143
+     * Initializes <tt>TrackStreamingStatus</tt> and bind required event listeners.
144
+     */
145
+    init(): void;
146
+    /**
147
+     * Removes all event listeners and disposes of all resources held by this instance.
148
+     */
149
+    dispose(): void;
150
+    /**
151
+     * Changes streaming status.
152
+     * @param newStatus
153
+     */
154
+    _changeStreamingStatus(newStatus: TrackStreamingStatus): void;
155
+    /**
156
+     * Reset the postponed "streaming interrupted" event which was previously scheduled as a timeout on RTC 'onmute'
157
+     * event.
158
+     */
159
+    clearTimeout(): void;
160
+    /**
161
+     * Clears the timestamp of the RTC muted event for remote video track.
162
+     */
163
+    clearRtcMutedTimestamp(): void;
164
+    /**
165
+     * Checks if track is considered frozen.
166
+     * @return <tt>true</tt> if the video has frozen or <tt>false</tt> when it's either not considered frozen
167
+     * (yet) or if freeze detection is not supported by the current browser.
168
+     *
169
+     * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event to
170
+     *       the remote track and allowing to set different timeout for local and remote tracks.
171
+     */
172
+    isVideoTrackFrozen(): boolean;
173
+    /**
174
+     * Figures out (and updates) the current streaming status for the track identified by the source name.
175
+     */
176
+    figureOutStreamingStatus(): void;
177
+    /**
178
+     * Computes the duration of the current streaming status for the track (i.e. 15 seconds in the INTERRUPTED state)
179
+     * and sends a track streaming status event.
180
+     * @param nowMs - The current time (in millis).
181
+     */
182
+    maybeSendTrackStreamingStatusEvent(nowMs: number): void;
183
+    /**
184
+     * On change in forwarded sources set check all leaving and entering track to change their corresponding statuses.
185
+     *
186
+     * @param leavingForwardedSources - The array of sourceName leaving forwarded sources.
187
+     * @param enteringForwardedSources - The array of sourceName entering forwarded sources.
188
+     * @param timestamp - The time in millis
189
+     * @private
190
+     */
191
+    onForwardedSourcesChanged(leavingForwardedSources: string[], enteringForwardedSources: string[], timestamp: number): void;
192
+    /**
193
+     * Clears the restoring timer for video track and the timestamp for entering forwarded sources.
194
+     */
195
+    _clearRestoringTimer(): void;
196
+    /**
197
+     * Checks whether a track had stayed enough in restoring state, compares current time and the time the track
198
+     * entered in forwarded sources. If it hasn't timedout and there is no timer added, add new timer in order to give
199
+     * it more time to become active or mark it as interrupted on next check.
200
+     *
201
+     * @returns <tt>true</tt> if the track was in restoring state more than the timeout
202
+     * ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to set its status to interrupted.
203
+     * @private
204
+     */
205
+    _isRestoringTimedout(): boolean;
206
+    /** Checks whether a track is the current track. */
207
+    _isCurrentTrack(track: JitsiRemoteTrack): boolean;
208
+    /**
209
+     * Sends a last/final track streaming status event for the track of the user that left the conference.
210
+     * @param id - The id of the participant that left the conference.
211
+     */
212
+    onUserLeft(id: string): void;
213
+    /**
214
+     * Handles RTC 'onmute' event for the video track.
215
+     *
216
+     * @param track - The video track for which 'onmute' event will be processed.
217
+     */
218
+    onTrackRtcMuted(track: JitsiRemoteTrack): void;
219
+    /**
220
+     * Handles RTC 'onunmute' event for the video track.
221
+     *
222
+     * @param track - The video track for which 'onunmute' event will be processed.
223
+     */
224
+    onTrackRtcUnmuted(track: JitsiRemoteTrack): void;
225
+    /**
226
+     * Here the signalling "mute"/"unmute" events are processed.
227
+     *
228
+     * @param track - The remote video track for which the signalling mute/unmute event will be
229
+     * processed.
230
+     */
231
+    onSignallingMuteChanged(track: JitsiRemoteTrack): void;
232
+    /**
233
+     * Sends a track streaming status event as a result of the video type changing.
234
+     * @deprecated this will go away with full multiple streams support
235
+     * @param type - The video type.
236
+     */
237
+    onTrackVideoTypeChanged(type: VideoType): void;
238
+}
239
+export default TrackStreamingStatusImpl;

+ 1
- 0
types/auto/service/RTC/RTCEvents.d.ts Näytä tiedosto

@@ -4,6 +4,7 @@ export const DATA_CHANNEL_OPEN: string;
4 4
 export const ENDPOINT_CONN_STATUS_CHANGED: string;
5 5
 export const DOMINANT_SPEAKER_CHANGED: string;
6 6
 export const LASTN_ENDPOINT_CHANGED: string;
7
+export const FORWARDED_SOURCES_CHANGED: string;
7 8
 export const PERMISSIONS_CHANGED: string;
8 9
 export const SENDER_VIDEO_CONSTRAINTS_CHANGED: string;
9 10
 export const LASTN_VALUE_CHANGED: string;

+ 5
- 0
types/auto/service/statistics/AnalyticsEvents.d.ts Näytä tiedosto

@@ -275,6 +275,11 @@ export function createParticipantConnectionStatusEvent(attributes?: {}): {
275 275
     source: string;
276 276
     name: string;
277 277
 };
278
+export function createTrackStreamingStatusEvent(attributes?: {}): {
279
+    type: string;
280
+    source: string;
281
+    name: string;
282
+};
278 283
 export function createJingleEvent(action: any, attributes?: {}): {
279 284
     type: string;
280 285
     action: any;

+ 5
- 5
types/hand-crafted/JitsiConference.d.ts Näytä tiedosto

@@ -1,4 +1,4 @@
1
-import { JitsiConferenceEvents } from './JitsiConferenceEvents';
1
+import { JitsiConferenceEvents } from '../../JitsiConferenceEvents';
2 2
 import JitsiConnection from './JitsiConnection';
3 3
 import JitsiTrackError from './JitsiTrackError';
4 4
 import JitsiParticipant from './JitsiParticipant';
@@ -42,10 +42,10 @@ export default class JitsiConference {
42 42
   getLocalAudioTrack: () => JitsiLocalTrack | null;
43 43
   getLocalVideoTrack: () => JitsiLocalTrack | null;
44 44
   getPerformanceStats: () => unknown | null; // TODO:
45
-  on: ( eventId: JitsiConferenceEvents, handler: () => unknown ) => void; // TODO:
46
-  off: ( eventId: JitsiConferenceEvents, handler: () => unknown ) => void; // TODO:
47
-  addEventListener: ( eventId: JitsiConferenceEvents, handler: () => unknown ) => void; // TODO:
48
-  removeEventListener: ( eventId: JitsiConferenceEvents, handler: () => unknown ) => void; // TODO:
45
+  on: ( eventId: JitsiConferenceEvents, handler: (...args: any[]) => unknown ) => void; // TODO:
46
+  off: ( eventId: JitsiConferenceEvents, handler: (...args: any[]) => unknown ) => void; // TODO:
47
+  addEventListener: ( eventId: JitsiConferenceEvents, handler: (...args: any[]) => unknown ) => void; // TODO:
48
+  removeEventListener: ( eventId: JitsiConferenceEvents, handler: (...args: any[]) => unknown ) => void; // TODO:
49 49
   addCommandListener: ( command: string, handler: () => unknown ) => void; // TODO:
50 50
   removeCommandListener: ( command: string, handler: () => unknown ) => void; // TODO:
51 51
   // sendTextMessage: (message: string, elementName: string) => void; // obsolete

+ 1
- 0
types/hand-crafted/JitsiConferenceEvents.d.ts Näytä tiedosto

@@ -21,6 +21,7 @@ export enum JitsiConferenceEvents {
21 21
   KICKED = 'conference.kicked',
22 22
   PARTICIPANT_KICKED = 'conference.participant_kicked',
23 23
   LAST_N_ENDPOINTS_CHANGED = 'conference.lastNEndpointsChanged',
24
+  FORWARDED_SOURCES_CHANGED = 'conference.forwardedSourcesChanged',
24 25
   LOCK_STATE_CHANGED = 'conference.lock_state_changed',
25 26
   SERVER_REGION_CHANGED = 'conference.server_region_changed',
26 27
   _MEDIA_SESSION_STARTED = 'conference.media_session.started',

+ 7
- 1
types/hand-crafted/modules/RTC/JitsiRemoteTrack.d.ts Näytä tiedosto

@@ -10,8 +10,14 @@ export default class JitsiRemoteTrack extends JitsiTrack {
10 10
   isLocal: () => false;
11 11
   getSSRC: () => number;
12 12
   toString: () => string;
13
+  getSourceName: () => string;
14
+  getTrackStreamingStatus: () => string;
15
+  _setTrackStreamingStatus: (newStatus: string) => void;
16
+  _clearEnteredForwardedSourcesTimestamp: () => void;
17
+  _setEnteredForwardedSourcesTimestamp: (timestamp: number) => void;
18
+  _getEnteredForwardedSourcesTimestamp: () => number | null;
13 19
 
14 20
   containerEvents: [ 'abort', 'canplay', 'canplaythrough', 'emptied', 'ended', 'error', 'loadeddata',
15 21
     'loadedmetadata', 'loadstart', 'pause', 'play', 'playing', 'ratechange', 'stalled', 'suspend',
16 22
     'waiting' ]; // TODO: this might be private
17
-}
23
+}

+ 2
- 1
types/hand-crafted/modules/RTC/JitsiTrack.d.ts Näytä tiedosto

@@ -1,9 +1,10 @@
1
+import EventEmitter from 'events';
1 2
 import JitsiConference from '../../JitsiConference';
2 3
 import { MediaType } from '../../service/RTC/MediaType';
3 4
 import { VideoType } from '../../service/RTC/VideoType';
4 5
 import TraceablePeerConnection from './TraceablePeerConnection';
5 6
 
6
-export default class JitsiTrack {
7
+export default class JitsiTrack extends EventEmitter {
7 8
   constructor( conference: JitsiConference, stream: unknown, track: unknown, streamInactiveHandler: unknown, trackMediaType: unknown, videoType: unknown ); // TODO:
8 9
   disposed: boolean;
9 10
   getVideoType: () => VideoType;

+ 1
- 0
types/hand-crafted/modules/RTC/RTC.d.ts Näytä tiedosto

@@ -46,6 +46,7 @@ export default class RTC extends Listenable {
46 46
   sendChannelMessage: ( to: string, payload: unknown ) => void; // TODO:
47 47
   setLastN: ( value: number ) => void;
48 48
   isInLastN: ( id: string ) => boolean;
49
+  isInForwardedSources: ( sourceName: string ) => boolean;
49 50
   setNewReceiverVideoConstraints: ( constraints: unknown ) => void; // TODO:
50 51
   setVideoType: ( videoType: string ) => void;
51 52
   setVideoMute: ( value: unknown ) => Promise<unknown>; // TODO:

+ 2
- 0
types/hand-crafted/modules/util/Listenable.d.ts Näytä tiedosto

@@ -4,4 +4,6 @@ export default class Listenable {
4 4
   constructor( eventEmitter?: EventEmitter<unknown> ); // TODO:
5 5
   addListener: ( eventName: string, listener: () => unknown ) => () => unknown; // TODO: returns remote listener func
6 6
   removeListener: ( eventName: string, listener: () => unknown ) => void;
7
+  on: (eventName: string, listener: (...args: any[]) => unknown) => unknown; // TODO: returns remote listener func
8
+  off: (eventName: string, listener: (...args: any[]) => unknown) => void;
7 9
 }

Loading…
Peruuta
Tallenna