|
@@ -0,0 +1,648 @@
|
|
1
|
+import { getLogger } from '@jitsi/logger';
|
|
2
|
+
|
|
3
|
+import { JitsiConferenceEvents } from '../../JitsiConferenceEvents';
|
|
4
|
+import * as JitsiTrackEvents from '../../JitsiTrackEvents';
|
|
5
|
+import RTCEvents from '../../service/RTC/RTCEvents';
|
|
6
|
+import { createTrackStreamingStatusEvent } from '../../service/statistics/AnalyticsEvents';
|
|
7
|
+import JitsiConference from '../../types/hand-crafted/JitsiConference';
|
|
8
|
+import JitsiRemoteTrack from '../../types/hand-crafted/modules/RTC/JitsiRemoteTrack';
|
|
9
|
+import RTC from '../../types/hand-crafted/modules/RTC/RTC';
|
|
10
|
+import { VideoType } from '../../types/hand-crafted/service/RTC/VideoType';
|
|
11
|
+import browser from '../browser';
|
|
12
|
+import Statistics from '../statistics/statistics';
|
|
13
|
+
|
|
14
|
+/** Track streaming statuses. */
|
|
15
|
+export enum TrackStreamingStatus {
|
|
16
|
+
|
|
17
|
+ /**
|
|
18
|
+ * Status indicating that streaming is currently active.
|
|
19
|
+ */
|
|
20
|
+ ACTIVE = 'active',
|
|
21
|
+
|
|
22
|
+ /**
|
|
23
|
+ * Status indicating that streaming is currently inactive.
|
|
24
|
+ * Inactive means the streaming was stopped on purpose from the bridge, like exiting forwarded sources or
|
|
25
|
+ * adaptivity decided to drop video because of not enough bandwidth.
|
|
26
|
+ */
|
|
27
|
+ INACTIVE = 'inactive',
|
|
28
|
+
|
|
29
|
+ /**
|
|
30
|
+ * Status indicating that streaming is currently interrupted.
|
|
31
|
+ */
|
|
32
|
+ INTERRUPTED = 'interrupted',
|
|
33
|
+
|
|
34
|
+ /**
|
|
35
|
+ * Status indicating that streaming is currently restoring.
|
|
36
|
+ */
|
|
37
|
+ RESTORING = 'restoring',
|
|
38
|
+ }
|
|
39
|
+
|
|
40
|
+type StreamingStatusMap = {
|
|
41
|
+ // TODO: Replace this hand crafted VideoType when we convert VideoType.js to Typescript.
|
|
42
|
+ videoType?: VideoType,
|
|
43
|
+ startedMs?: number,
|
|
44
|
+ p2p?: boolean,
|
|
45
|
+ streamingStatus?: string,
|
|
46
|
+ value?: number
|
|
47
|
+};
|
|
48
|
+
|
|
49
|
+const logger = getLogger(__filename);
|
|
50
|
+
|
|
51
|
+/**
|
|
52
|
+ * Default value of 500 milliseconds for {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
|
|
53
|
+ */
|
|
54
|
+const DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT = 500;
|
|
55
|
+
|
|
56
|
+/**
|
|
57
|
+ * Default value of 2500 milliseconds for {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
|
|
58
|
+ */
|
|
59
|
+const DEFAULT_P2P_RTC_MUTE_TIMEOUT = 2500;
|
|
60
|
+
|
|
61
|
+/**
|
|
62
|
+ * Default value of 10000 milliseconds for {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
|
|
63
|
+ */
|
|
64
|
+const DEFAULT_RTC_MUTE_TIMEOUT = 10000;
|
|
65
|
+
|
|
66
|
+/**
|
|
67
|
+ * The time to wait a track to be restored. Track which was out of forwarded sources should be inactive and when
|
|
68
|
+ * entering forwarded sources it becomes restoring and when data is received from bridge it will become active, but if
|
|
69
|
+ * no data is received for some time we set status of that track streaming to interrupted.
|
|
70
|
+ */
|
|
71
|
+const DEFAULT_RESTORING_TIMEOUT = 10000;
|
|
72
|
+
|
|
73
|
+/**
|
|
74
|
+ * Class is responsible for emitting JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED events.
|
|
75
|
+ */
|
|
76
|
+export class TrackStreamingStatusImpl {
|
|
77
|
+ rtc: RTC;
|
|
78
|
+ conference: JitsiConference;
|
|
79
|
+ track: JitsiRemoteTrack;
|
|
80
|
+
|
|
81
|
+ /** This holds the timeout callback ID scheduled using window.setTimeout. */
|
|
82
|
+ trackTimer: number | null;
|
|
83
|
+
|
|
84
|
+ /**
|
|
85
|
+ * If video track frozen detection through RTC mute event is supported, we wait some time until video track is
|
|
86
|
+ * considered frozen. But because when the track falls out of forwarded sources it is expected for the video to
|
|
87
|
+ * freeze this timeout must be significantly reduced in "out of forwarded sources" case.
|
|
88
|
+ *
|
|
89
|
+ * Basically this value is used instead of {@link rtcMuteTimeout} when track is not in forwarded sources.
|
|
90
|
+ */
|
|
91
|
+ outOfForwardedSourcesTimeout: number;
|
|
92
|
+
|
|
93
|
+ /**
|
|
94
|
+ * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
|
|
95
|
+ * event is fired on the Media stream, before the connection interrupted is fired. The default value is
|
|
96
|
+ * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
|
|
97
|
+ */
|
|
98
|
+ p2pRtcMuteTimeout: number;
|
|
99
|
+
|
|
100
|
+ /**
|
|
101
|
+ * How long we're going to wait after the RTC video track muted event for the corresponding signalling mute
|
|
102
|
+ * event, before the connection interrupted is fired. The default value is {@link DEFAULT_RTC_MUTE_TIMEOUT}.
|
|
103
|
+ *
|
|
104
|
+ * @returns amount of time in milliseconds
|
|
105
|
+ */
|
|
106
|
+ rtcMuteTimeout: number;
|
|
107
|
+
|
|
108
|
+ /**
|
|
109
|
+ * This holds a timestamp indicating when remote video track was RTC muted. The purpose of storing the
|
|
110
|
+ * timestamp is to avoid the transition to disconnected status in case of legitimate video mute operation where
|
|
111
|
+ * the signalling video muted event can arrive shortly after RTC muted event.
|
|
112
|
+ *
|
|
113
|
+ * The timestamp is measured in milliseconds obtained with <tt>Date.now()</tt>.
|
|
114
|
+ *
|
|
115
|
+ * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event
|
|
116
|
+ * to the remote track and allowing to set different timeout for local and remote tracks.
|
|
117
|
+ */
|
|
118
|
+ rtcMutedTimestamp: number | null;
|
|
119
|
+
|
|
120
|
+ /** This holds the restoring timeout callback ID scheduled using window.setTimeout. */
|
|
121
|
+ restoringTimer: ReturnType<typeof setTimeout> | null;
|
|
122
|
+
|
|
123
|
+ /**
|
|
124
|
+ * This holds the current streaming status (along with all the internal events that happen while in that
|
|
125
|
+ * state).
|
|
126
|
+ *
|
|
127
|
+ * The goal is to send this information to the analytics backend for post-mortem analysis.
|
|
128
|
+ */
|
|
129
|
+ streamingStatusMap: StreamingStatusMap;
|
|
130
|
+
|
|
131
|
+ _onP2PStatus: () => void;
|
|
132
|
+ _onUserLeft: () => void;
|
|
133
|
+ _onTrackRtcMuted: () => void;
|
|
134
|
+ _onTrackRtcUnmuted: () => void;
|
|
135
|
+ _onSignallingMuteChanged: () => void;
|
|
136
|
+ _onTrackVideoTypeChanged: () => void;
|
|
137
|
+ _onLastNValueChanged: () => void;
|
|
138
|
+ _onForwardedSourcesChanged: () => void;
|
|
139
|
+
|
|
140
|
+ /* eslint-disable max-params*/
|
|
141
|
+ /**
|
|
142
|
+ * Calculates the new {@link TrackStreamingStatus} based on the values given for some specific remote track. It is
|
|
143
|
+ * assumed that the conference is currently in the JVB mode (in contrary to the P2P mode)
|
|
144
|
+ * @param isInForwardedSources - indicates whether the track is in the forwarded sources set. When set to
|
|
145
|
+ * false it means that JVB is not sending any video for the track.
|
|
146
|
+ * @param isRestoringTimedout - if true it means that the track has been outside of forwarded sources too
|
|
147
|
+ * long to be considered {@link TrackStreamingStatus.RESTORING}.
|
|
148
|
+ * @param isVideoMuted - true if the track is video muted and we should not expect to receive any video.
|
|
149
|
+ * @param isVideoTrackFrozen - if the current browser support video frozen detection then it will be set to
|
|
150
|
+ * true when the video track is frozen. If the current browser does not support frozen detection the it's always
|
|
151
|
+ * false.
|
|
152
|
+ * @return {TrackStreamingStatus} the new streaming status for the track for whom the values above were provided.
|
|
153
|
+ * @private
|
|
154
|
+ */
|
|
155
|
+ static _getNewStateForJvbMode(
|
|
156
|
+ isInForwardedSources: boolean,
|
|
157
|
+ isRestoringTimedout: boolean,
|
|
158
|
+ isVideoMuted: boolean,
|
|
159
|
+ isVideoTrackFrozen: boolean): TrackStreamingStatus {
|
|
160
|
+
|
|
161
|
+ // We are currently not checking the endpoint connection status received from the JVB.
|
|
162
|
+ if (isVideoMuted) {
|
|
163
|
+ // If the connection is active according to JVB and the track is video muted there is no way for the
|
|
164
|
+ // connection to be inactive, because the detection logic below only makes sense for video.
|
|
165
|
+ return TrackStreamingStatus.ACTIVE;
|
|
166
|
+ }
|
|
167
|
+
|
|
168
|
+ // Logic when isVideoTrackFrozen is supported
|
|
169
|
+ if (browser.supportsVideoMuteOnConnInterrupted()) {
|
|
170
|
+ if (!isVideoTrackFrozen) {
|
|
171
|
+ // If the video is playing we're good
|
|
172
|
+ return TrackStreamingStatus.ACTIVE;
|
|
173
|
+ } else if (isInForwardedSources) {
|
|
174
|
+ return isRestoringTimedout ? TrackStreamingStatus.INTERRUPTED : TrackStreamingStatus.RESTORING;
|
|
175
|
+ }
|
|
176
|
+
|
|
177
|
+ return TrackStreamingStatus.INACTIVE;
|
|
178
|
+ }
|
|
179
|
+
|
|
180
|
+ // Because this browser is incapable of detecting frozen video we must rely on the forwarded sources value
|
|
181
|
+ return isInForwardedSources ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INACTIVE;
|
|
182
|
+ }
|
|
183
|
+
|
|
184
|
+ /* eslint-enable max-params*/
|
|
185
|
+
|
|
186
|
+ /**
|
|
187
|
+ * In P2P mode we don't care about any values coming from the JVB and the streaming status can be only active or
|
|
188
|
+ * interrupted.
|
|
189
|
+ * @param isVideoMuted - true if video muted
|
|
190
|
+ * @param isVideoTrackFrozen - true if the video track for the remote track is currently frozen. If the
|
|
191
|
+ * current browser does not support video frozen detection then it's always false.
|
|
192
|
+ * @return {TrackStreamingStatus}
|
|
193
|
+ * @private
|
|
194
|
+ */
|
|
195
|
+ static _getNewStateForP2PMode(isVideoMuted: boolean, isVideoTrackFrozen: boolean): TrackStreamingStatus {
|
|
196
|
+ if (!browser.supportsVideoMuteOnConnInterrupted()) {
|
|
197
|
+ // There's no way to detect problems in P2P when there's no video track frozen detection...
|
|
198
|
+ return TrackStreamingStatus.ACTIVE;
|
|
199
|
+ }
|
|
200
|
+
|
|
201
|
+ return isVideoMuted || !isVideoTrackFrozen
|
|
202
|
+ ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INTERRUPTED;
|
|
203
|
+ }
|
|
204
|
+
|
|
205
|
+ /**
|
|
206
|
+ * Creates new instance of <tt>TrackStreamingStatus</tt>.
|
|
207
|
+ *
|
|
208
|
+ * @constructor
|
|
209
|
+ * @param rtc - the RTC service instance
|
|
210
|
+ * @param conference - parent conference instance
|
|
211
|
+ * @param {Object} options
|
|
212
|
+ * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
|
|
213
|
+ * {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
|
|
214
|
+ * @param {number} [options.rtcMuteTimeout=2000] custom value for
|
|
215
|
+ * {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
|
|
216
|
+ * @param {number} [options.outOfForwardedSourcesTimeout=500] custom value for
|
|
217
|
+ * {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
|
|
218
|
+ */
|
|
219
|
+ constructor(rtc: RTC, conference: JitsiConference, track: JitsiRemoteTrack, options: {
|
|
220
|
+ outOfForwardedSourcesTimeout: number,
|
|
221
|
+ p2pRtcMuteTimeout: number,
|
|
222
|
+ rtcMuteTimeout: number
|
|
223
|
+ }) {
|
|
224
|
+ this.rtc = rtc;
|
|
225
|
+ this.conference = conference;
|
|
226
|
+ this.track = track;
|
|
227
|
+
|
|
228
|
+ this.restoringTimer = null;
|
|
229
|
+ this.rtcMutedTimestamp = null;
|
|
230
|
+ this.streamingStatusMap = {};
|
|
231
|
+ this.trackTimer = null;
|
|
232
|
+
|
|
233
|
+ this.outOfForwardedSourcesTimeout = typeof options.outOfForwardedSourcesTimeout === 'number'
|
|
234
|
+ ? options.outOfForwardedSourcesTimeout : DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT;
|
|
235
|
+
|
|
236
|
+ this.p2pRtcMuteTimeout = typeof options.p2pRtcMuteTimeout === 'number'
|
|
237
|
+ ? options.p2pRtcMuteTimeout : DEFAULT_P2P_RTC_MUTE_TIMEOUT;
|
|
238
|
+
|
|
239
|
+ this.rtcMuteTimeout = typeof options.rtcMuteTimeout === 'number'
|
|
240
|
+ ? options.rtcMuteTimeout : DEFAULT_RTC_MUTE_TIMEOUT;
|
|
241
|
+ logger.info(`RtcMuteTimeout set to: ${this.rtcMuteTimeout}`);
|
|
242
|
+ }
|
|
243
|
+
|
|
244
|
+ /**
|
|
245
|
+ * Gets the video frozen timeout for given source name.
|
|
246
|
+ * @return how long are we going to wait since RTC video muted even, before a video track is considered
|
|
247
|
+ * frozen.
|
|
248
|
+ * @private
|
|
249
|
+ */
|
|
250
|
+ _getVideoFrozenTimeout(): number {
|
|
251
|
+ const sourceName = this.track.getSourceName();
|
|
252
|
+
|
|
253
|
+ return this.rtc.isInForwardedSources(sourceName)
|
|
254
|
+ ? this.rtcMuteTimeout
|
|
255
|
+ : this.conference.isP2PActive() ? this.p2pRtcMuteTimeout : this.outOfForwardedSourcesTimeout;
|
|
256
|
+ }
|
|
257
|
+
|
|
258
|
+ /**
|
|
259
|
+ * Initializes <tt>TrackStreamingStatus</tt> and bind required event listeners.
|
|
260
|
+ */
|
|
261
|
+ init(): void {
|
|
262
|
+ // Handles P2P status changes
|
|
263
|
+ this._onP2PStatus = this.figureOutStreamingStatus.bind(this);
|
|
264
|
+ this.conference.on(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
|
|
265
|
+
|
|
266
|
+ // Used to send analytics events for the participant that left the call.
|
|
267
|
+ this._onUserLeft = this.onUserLeft.bind(this);
|
|
268
|
+ this.conference.on(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
|
|
269
|
+
|
|
270
|
+ // On some browsers MediaStreamTrack trigger "onmute"/"onunmute" events for video type tracks when they stop
|
|
271
|
+ // receiving data which is often a sign that remote user is having connectivity issues.
|
|
272
|
+ if (browser.supportsVideoMuteOnConnInterrupted()) {
|
|
273
|
+
|
|
274
|
+ this._onTrackRtcMuted = this.onTrackRtcMuted.bind(this);
|
|
275
|
+ this.rtc.addListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
|
|
276
|
+
|
|
277
|
+ this._onTrackRtcUnmuted = this.onTrackRtcUnmuted.bind(this);
|
|
278
|
+ this.rtc.addListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
|
|
279
|
+
|
|
280
|
+ // Listened which will be bound to JitsiRemoteTrack to listen for signalling mute/unmute events.
|
|
281
|
+ this._onSignallingMuteChanged = this.onSignallingMuteChanged.bind(this);
|
|
282
|
+ this.track.on(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
|
|
283
|
+
|
|
284
|
+ // Used to send an analytics event when the video type changes.
|
|
285
|
+ this._onTrackVideoTypeChanged = this.onTrackVideoTypeChanged.bind(this);
|
|
286
|
+ this.track.on(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, this._onTrackVideoTypeChanged);
|
|
287
|
+ }
|
|
288
|
+
|
|
289
|
+ this._onForwardedSourcesChanged = this.onForwardedSourcesChanged.bind(this);
|
|
290
|
+ this.conference.on(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
|
|
291
|
+
|
|
292
|
+ this._onLastNValueChanged = this.figureOutStreamingStatus.bind(this);
|
|
293
|
+ this.rtc.on(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
|
|
294
|
+ }
|
|
295
|
+
|
|
296
|
+ /**
|
|
297
|
+ * Removes all event listeners and disposes of all resources held by this instance.
|
|
298
|
+ */
|
|
299
|
+ dispose(): void {
|
|
300
|
+ if (browser.supportsVideoMuteOnConnInterrupted()) {
|
|
301
|
+ this.rtc.removeListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
|
|
302
|
+ this.rtc.removeListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
|
|
303
|
+
|
|
304
|
+ this.track.off(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
|
|
305
|
+ }
|
|
306
|
+
|
|
307
|
+ this.conference.off(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
|
|
308
|
+ this.conference.off(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
|
|
309
|
+ this.conference.off(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
|
|
310
|
+ this.rtc.removeListener(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
|
|
311
|
+
|
|
312
|
+ this.clearTimeout();
|
|
313
|
+ this.clearRtcMutedTimestamp();
|
|
314
|
+ this.maybeSendTrackStreamingStatusEvent(Date.now());
|
|
315
|
+ this.figureOutStreamingStatus();
|
|
316
|
+ }
|
|
317
|
+
|
|
318
|
+ /**
|
|
319
|
+ * Changes streaming status.
|
|
320
|
+ * @param newStatus
|
|
321
|
+ */
|
|
322
|
+ _changeStreamingStatus(newStatus: TrackStreamingStatus): void {
|
|
323
|
+ if (this.track.getTrackStreamingStatus() !== newStatus) {
|
|
324
|
+
|
|
325
|
+ const sourceName = this.track.getSourceName();
|
|
326
|
+
|
|
327
|
+ this.track._setTrackStreamingStatus(newStatus);
|
|
328
|
+
|
|
329
|
+ logger.debug(`Emit track streaming status(${Date.now()}) ${sourceName}: ${newStatus}`);
|
|
330
|
+
|
|
331
|
+ // Log the event on CallStats
|
|
332
|
+ Statistics.sendLog(
|
|
333
|
+ JSON.stringify({
|
|
334
|
+ id: 'track.streaming.status',
|
|
335
|
+ track: sourceName,
|
|
336
|
+ status: newStatus
|
|
337
|
+ }));
|
|
338
|
+
|
|
339
|
+ this.track.emit(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED, newStatus);
|
|
340
|
+ }
|
|
341
|
+ }
|
|
342
|
+
|
|
343
|
+ /**
|
|
344
|
+ * Reset the postponed "streaming interrupted" event which was previously scheduled as a timeout on RTC 'onmute'
|
|
345
|
+ * event.
|
|
346
|
+ */
|
|
347
|
+ clearTimeout(): void {
|
|
348
|
+ if (this.trackTimer) {
|
|
349
|
+ window.clearTimeout(this.trackTimer);
|
|
350
|
+ this.trackTimer = null;
|
|
351
|
+ }
|
|
352
|
+ }
|
|
353
|
+
|
|
354
|
+ /**
|
|
355
|
+ * Clears the timestamp of the RTC muted event for remote video track.
|
|
356
|
+ */
|
|
357
|
+ clearRtcMutedTimestamp(): void {
|
|
358
|
+ this.rtcMutedTimestamp = null;
|
|
359
|
+ }
|
|
360
|
+
|
|
361
|
+ /**
|
|
362
|
+ * Checks if track is considered frozen.
|
|
363
|
+ * @return <tt>true</tt> if the video has frozen or <tt>false</tt> when it's either not considered frozen
|
|
364
|
+ * (yet) or if freeze detection is not supported by the current browser.
|
|
365
|
+ *
|
|
366
|
+ * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event to
|
|
367
|
+ * the remote track and allowing to set different timeout for local and remote tracks.
|
|
368
|
+ */
|
|
369
|
+ isVideoTrackFrozen(): boolean {
|
|
370
|
+ if (!browser.supportsVideoMuteOnConnInterrupted()) {
|
|
371
|
+ return false;
|
|
372
|
+ }
|
|
373
|
+
|
|
374
|
+ const isVideoRTCMuted = this.track.isWebRTCTrackMuted();
|
|
375
|
+ const rtcMutedTimestamp = this.rtcMutedTimestamp;
|
|
376
|
+ const timeout = this._getVideoFrozenTimeout();
|
|
377
|
+
|
|
378
|
+ return isVideoRTCMuted && typeof rtcMutedTimestamp === 'number' && (Date.now() - rtcMutedTimestamp) >= timeout;
|
|
379
|
+ }
|
|
380
|
+
|
|
381
|
+ /**
|
|
382
|
+ * Figures out (and updates) the current streaming status for the track identified by the source name.
|
|
383
|
+ */
|
|
384
|
+ figureOutStreamingStatus(): void {
|
|
385
|
+ const sourceName = this.track.getSourceName();
|
|
386
|
+ const inP2PMode = this.conference.isP2PActive();
|
|
387
|
+ const isRestoringTimedOut = this._isRestoringTimedout();
|
|
388
|
+ const audioOnlyMode = this.conference.getLastN() === 0;
|
|
389
|
+
|
|
390
|
+ // NOTE Overriding videoMuted to true for audioOnlyMode should disable any detection based on video playback or
|
|
391
|
+ // forwarded sources.
|
|
392
|
+ const isVideoMuted = this.track.isMuted() || audioOnlyMode;
|
|
393
|
+ const isVideoTrackFrozen = this.isVideoTrackFrozen();
|
|
394
|
+ const isInForwardedSources = this.rtc.isInForwardedSources(sourceName);
|
|
395
|
+
|
|
396
|
+ const newState
|
|
397
|
+ = inP2PMode
|
|
398
|
+ ? TrackStreamingStatusImpl._getNewStateForP2PMode(
|
|
399
|
+ isVideoMuted,
|
|
400
|
+ isVideoTrackFrozen)
|
|
401
|
+ : TrackStreamingStatusImpl._getNewStateForJvbMode(
|
|
402
|
+ isInForwardedSources,
|
|
403
|
+ isRestoringTimedOut,
|
|
404
|
+ isVideoMuted,
|
|
405
|
+ isVideoTrackFrozen);
|
|
406
|
+
|
|
407
|
+ // if the new state is not restoring clear timers and timestamps that we use to track the restoring state
|
|
408
|
+ if (newState !== TrackStreamingStatus.RESTORING) {
|
|
409
|
+ this._clearRestoringTimer();
|
|
410
|
+ }
|
|
411
|
+
|
|
412
|
+ logger.debug(
|
|
413
|
+ `Figure out conn status for ${sourceName}, is video muted: ${
|
|
414
|
+ isVideoMuted} video track frozen: ${
|
|
415
|
+ isVideoTrackFrozen} p2p mode: ${
|
|
416
|
+ inP2PMode} is in forwarded sources: ${
|
|
417
|
+ isInForwardedSources} currentStatus => newStatus: ${
|
|
418
|
+ this.track.getTrackStreamingStatus()} => ${newState}`);
|
|
419
|
+
|
|
420
|
+ const oldStreamingStatus = this.streamingStatusMap || {};
|
|
421
|
+
|
|
422
|
+ // Send an analytics event (guard on either the p2p flag or the streaming status has changed since the last
|
|
423
|
+ // time this code block run).
|
|
424
|
+ if (!('p2p' in oldStreamingStatus)
|
|
425
|
+ || !('streamingStatus' in oldStreamingStatus)
|
|
426
|
+ || oldStreamingStatus.p2p !== inP2PMode
|
|
427
|
+ || oldStreamingStatus.streamingStatus !== newState) {
|
|
428
|
+
|
|
429
|
+ const nowMs = Date.now();
|
|
430
|
+
|
|
431
|
+ this.maybeSendTrackStreamingStatusEvent(nowMs);
|
|
432
|
+
|
|
433
|
+ this.streamingStatusMap = {
|
|
434
|
+ ...oldStreamingStatus,
|
|
435
|
+ streamingStatus: newState,
|
|
436
|
+ p2p: inP2PMode,
|
|
437
|
+ startedMs: nowMs
|
|
438
|
+ };
|
|
439
|
+
|
|
440
|
+ // sometimes (always?) we're late to hook the TRACK_VIDEOTYPE_CHANGED event and the video type is not in
|
|
441
|
+ // oldStreamingStatus.
|
|
442
|
+ if (!('videoType' in this.streamingStatusMap)) {
|
|
443
|
+ this.streamingStatusMap.videoType = this.track.getVideoType();
|
|
444
|
+ }
|
|
445
|
+ }
|
|
446
|
+ this._changeStreamingStatus(newState);
|
|
447
|
+ }
|
|
448
|
+
|
|
449
|
+ /**
|
|
450
|
+ * Computes the duration of the current streaming status for the track (i.e. 15 seconds in the INTERRUPTED state)
|
|
451
|
+ * and sends a track streaming status event.
|
|
452
|
+ * @param nowMs - The current time (in millis).
|
|
453
|
+ */
|
|
454
|
+ maybeSendTrackStreamingStatusEvent(nowMs: number): void {
|
|
455
|
+ const trackStreamingStatus = this.streamingStatusMap;
|
|
456
|
+
|
|
457
|
+ if (trackStreamingStatus
|
|
458
|
+ && 'startedMs' in trackStreamingStatus
|
|
459
|
+ && 'videoType' in trackStreamingStatus
|
|
460
|
+ && 'streamingStatus' in trackStreamingStatus
|
|
461
|
+ && 'p2p' in trackStreamingStatus) {
|
|
462
|
+ trackStreamingStatus.value = nowMs - trackStreamingStatus.startedMs;
|
|
463
|
+ Statistics.sendAnalytics(createTrackStreamingStatusEvent(trackStreamingStatus));
|
|
464
|
+ }
|
|
465
|
+ }
|
|
466
|
+
|
|
467
|
+ /**
|
|
468
|
+ * On change in forwarded sources set check all leaving and entering track to change their corresponding statuses.
|
|
469
|
+ *
|
|
470
|
+ * @param leavingForwardedSources - The array of sourceName leaving forwarded sources.
|
|
471
|
+ * @param enteringForwardedSources - The array of sourceName entering forwarded sources.
|
|
472
|
+ * @param timestamp - The time in millis
|
|
473
|
+ * @private
|
|
474
|
+ */
|
|
475
|
+ onForwardedSourcesChanged(
|
|
476
|
+ leavingForwardedSources: string[] = [],
|
|
477
|
+ enteringForwardedSources: string[] = [],
|
|
478
|
+ timestamp: number): void {
|
|
479
|
+
|
|
480
|
+ const sourceName = this.track.getSourceName();
|
|
481
|
+
|
|
482
|
+ logger.debug(`Fowarded sources changed leaving=${leavingForwardedSources}, entering=${
|
|
483
|
+ enteringForwardedSources} at ${timestamp}`);
|
|
484
|
+
|
|
485
|
+ // If the browser doesn't fire the mute/onmute events when the remote peer stops/starts sending media,
|
|
486
|
+ // calculate the streaming status for all the tracks since it won't get triggered automatically on the track
|
|
487
|
+ // that has started/stopped receiving media.
|
|
488
|
+ if (!browser.supportsVideoMuteOnConnInterrupted()) {
|
|
489
|
+ this.figureOutStreamingStatus();
|
|
490
|
+ }
|
|
491
|
+
|
|
492
|
+ if (leavingForwardedSources.includes(sourceName)) {
|
|
493
|
+ this.track._clearEnteredForwardedSourcesTimestamp();
|
|
494
|
+ this._clearRestoringTimer();
|
|
495
|
+ browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
|
|
496
|
+ }
|
|
497
|
+
|
|
498
|
+ if (enteringForwardedSources.includes(sourceName)) {
|
|
499
|
+ // store the timestamp this track is entering forwarded sources
|
|
500
|
+ this.track._setEnteredForwardedSourcesTimestamp(timestamp);
|
|
501
|
+ browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
|
|
502
|
+ }
|
|
503
|
+ }
|
|
504
|
+
|
|
505
|
+ /**
|
|
506
|
+ * Clears the restoring timer for video track and the timestamp for entering forwarded sources.
|
|
507
|
+ */
|
|
508
|
+ _clearRestoringTimer(): void {
|
|
509
|
+ const rTimer = this.restoringTimer;
|
|
510
|
+
|
|
511
|
+ if (rTimer) {
|
|
512
|
+ clearTimeout(rTimer);
|
|
513
|
+ this.restoringTimer = null;
|
|
514
|
+ }
|
|
515
|
+ }
|
|
516
|
+
|
|
517
|
+ /**
|
|
518
|
+ * Checks whether a track had stayed enough in restoring state, compares current time and the time the track
|
|
519
|
+ * entered in forwarded sources. If it hasn't timedout and there is no timer added, add new timer in order to give
|
|
520
|
+ * it more time to become active or mark it as interrupted on next check.
|
|
521
|
+ *
|
|
522
|
+ * @returns <tt>true</tt> if the track was in restoring state more than the timeout
|
|
523
|
+ * ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to set its status to interrupted.
|
|
524
|
+ * @private
|
|
525
|
+ */
|
|
526
|
+ _isRestoringTimedout(): boolean {
|
|
527
|
+ const enteredForwardedSourcesTimestamp = this.track._getEnteredForwardedSourcesTimestamp();
|
|
528
|
+
|
|
529
|
+ if (enteredForwardedSourcesTimestamp
|
|
530
|
+ && (Date.now() - enteredForwardedSourcesTimestamp) >= DEFAULT_RESTORING_TIMEOUT) {
|
|
531
|
+ return true;
|
|
532
|
+ }
|
|
533
|
+
|
|
534
|
+ // still haven't reached timeout, if there is no timer scheduled, schedule one so we can track the restoring
|
|
535
|
+ // state and change it after reaching the timeout
|
|
536
|
+ const rTimer = this.restoringTimer;
|
|
537
|
+
|
|
538
|
+ if (!rTimer) {
|
|
539
|
+ this.restoringTimer = setTimeout(() => this.figureOutStreamingStatus(), DEFAULT_RESTORING_TIMEOUT);
|
|
540
|
+ }
|
|
541
|
+
|
|
542
|
+ return false;
|
|
543
|
+ }
|
|
544
|
+
|
|
545
|
+ /** Checks whether a track is the current track. */
|
|
546
|
+ _isCurrentTrack(track: JitsiRemoteTrack): boolean {
|
|
547
|
+ return track.getSourceName() === this.track.getSourceName();
|
|
548
|
+ }
|
|
549
|
+
|
|
550
|
+ /**
|
|
551
|
+ * Sends a last/final track streaming status event for the track of the user that left the conference.
|
|
552
|
+ * @param id - The id of the participant that left the conference.
|
|
553
|
+ */
|
|
554
|
+ onUserLeft(id: string): void {
|
|
555
|
+ if (this.track.getParticipantId() === id) {
|
|
556
|
+ this.maybeSendTrackStreamingStatusEvent(Date.now());
|
|
557
|
+ this.streamingStatusMap = {};
|
|
558
|
+ }
|
|
559
|
+ }
|
|
560
|
+
|
|
561
|
+ /**
|
|
562
|
+ * Handles RTC 'onmute' event for the video track.
|
|
563
|
+ *
|
|
564
|
+ * @param track - The video track for which 'onmute' event will be processed.
|
|
565
|
+ */
|
|
566
|
+ onTrackRtcMuted(track: JitsiRemoteTrack): void {
|
|
567
|
+ if (!this._isCurrentTrack(track)) {
|
|
568
|
+ return;
|
|
569
|
+ }
|
|
570
|
+
|
|
571
|
+ const sourceName = track.getSourceName();
|
|
572
|
+
|
|
573
|
+ logger.debug(`Detector track RTC muted: ${sourceName}`, Date.now());
|
|
574
|
+
|
|
575
|
+ this.rtcMutedTimestamp = Date.now();
|
|
576
|
+ if (!track.isMuted()) {
|
|
577
|
+ // If the user is not muted according to the signalling we'll give it some time, before the streaming
|
|
578
|
+ // interrupted event is triggered.
|
|
579
|
+ this.clearTimeout();
|
|
580
|
+
|
|
581
|
+ // The timeout is reduced when track is not in the forwarded sources
|
|
582
|
+ const timeout = this._getVideoFrozenTimeout();
|
|
583
|
+
|
|
584
|
+ this.trackTimer = window.setTimeout(() => {
|
|
585
|
+ logger.debug(`Set RTC mute timeout for: ${sourceName} of ${timeout} ms`);
|
|
586
|
+ this.clearTimeout();
|
|
587
|
+ this.figureOutStreamingStatus();
|
|
588
|
+ }, timeout);
|
|
589
|
+ }
|
|
590
|
+ }
|
|
591
|
+
|
|
592
|
+ /**
|
|
593
|
+ * Handles RTC 'onunmute' event for the video track.
|
|
594
|
+ *
|
|
595
|
+ * @param track - The video track for which 'onunmute' event will be processed.
|
|
596
|
+ */
|
|
597
|
+ onTrackRtcUnmuted(track: JitsiRemoteTrack): void {
|
|
598
|
+ if (!this._isCurrentTrack(track)) {
|
|
599
|
+ return;
|
|
600
|
+ }
|
|
601
|
+
|
|
602
|
+ const sourceName = this.track.getSourceName();
|
|
603
|
+
|
|
604
|
+ logger.debug(`Detector track RTC unmuted: ${sourceName}`, Date.now());
|
|
605
|
+
|
|
606
|
+ this.clearTimeout();
|
|
607
|
+ this.clearRtcMutedTimestamp();
|
|
608
|
+
|
|
609
|
+ this.figureOutStreamingStatus();
|
|
610
|
+ }
|
|
611
|
+
|
|
612
|
+ /**
|
|
613
|
+ * Here the signalling "mute"/"unmute" events are processed.
|
|
614
|
+ *
|
|
615
|
+ * @param track - The remote video track for which the signalling mute/unmute event will be
|
|
616
|
+ * processed.
|
|
617
|
+ */
|
|
618
|
+ onSignallingMuteChanged(track: JitsiRemoteTrack): void {
|
|
619
|
+ if (!this._isCurrentTrack(track)) {
|
|
620
|
+ return;
|
|
621
|
+ }
|
|
622
|
+
|
|
623
|
+ const sourceName = this.track.getSourceName();
|
|
624
|
+
|
|
625
|
+ logger.debug(`Detector on track signalling mute changed: ${sourceName}`, track.isMuted());
|
|
626
|
+
|
|
627
|
+ this.figureOutStreamingStatus();
|
|
628
|
+ }
|
|
629
|
+
|
|
630
|
+ /**
|
|
631
|
+ * Sends a track streaming status event as a result of the video type changing.
|
|
632
|
+ * @deprecated this will go away with full multiple streams support
|
|
633
|
+ * @param type - The video type.
|
|
634
|
+ */
|
|
635
|
+ onTrackVideoTypeChanged(type: VideoType): void {
|
|
636
|
+ const nowMs = Date.now();
|
|
637
|
+
|
|
638
|
+ this.maybeSendTrackStreamingStatusEvent(nowMs);
|
|
639
|
+
|
|
640
|
+ this.streamingStatusMap = {
|
|
641
|
+ ...this.streamingStatusMap || {},
|
|
642
|
+ videoType: type,
|
|
643
|
+ startedMs: nowMs
|
|
644
|
+ };
|
|
645
|
+ }
|
|
646
|
+}
|
|
647
|
+
|
|
648
|
+export default TrackStreamingStatusImpl;
|