You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

TrackStreamingStatus.ts 25KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633
  1. import { getLogger } from '@jitsi/logger';
  2. import { JitsiConferenceEvents } from '../../JitsiConferenceEvents';
  3. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  4. import RTCEvents from '../../service/RTC/RTCEvents';
  5. import { VideoType } from '../../service/RTC/VideoType';
  6. import { createTrackStreamingStatusEvent } from '../../service/statistics/AnalyticsEvents';
  7. import JitsiRemoteTrack from '../RTC/JitsiRemoteTrack';
  8. import RTC from '../RTC/RTC';
  9. import browser from '../browser';
  10. import Statistics from '../statistics/statistics';
  11. /** Track streaming statuses. */
  12. export enum TrackStreamingStatus {
  13. /**
  14. * Status indicating that streaming is currently active.
  15. */
  16. ACTIVE = 'active',
  17. /**
  18. * Status indicating that streaming is currently inactive.
  19. * Inactive means the streaming was stopped on purpose from the bridge, like exiting forwarded sources or
  20. * adaptivity decided to drop video because of not enough bandwidth.
  21. */
  22. INACTIVE = 'inactive',
  23. /**
  24. * Status indicating that streaming is currently interrupted.
  25. */
  26. INTERRUPTED = 'interrupted',
  27. /**
  28. * Status indicating that streaming is currently restoring.
  29. */
  30. RESTORING = 'restoring',
  31. }
  32. type StreamingStatusMap = {
  33. p2p?: boolean;
  34. startedMs?: number;
  35. streamingStatus?: string;
  36. value?: number;
  37. videoType?: VideoType;
  38. };
  39. const logger = getLogger(__filename);
  40. /**
  41. * Default value of 500 milliseconds for {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
  42. */
  43. const DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT = 500;
  44. /**
  45. * Default value of 2500 milliseconds for {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
  46. */
  47. const DEFAULT_P2P_RTC_MUTE_TIMEOUT = 2500;
  48. /**
  49. * Default value of 10000 milliseconds for {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
  50. */
  51. const DEFAULT_RTC_MUTE_TIMEOUT = 10000;
  52. /**
  53. * The time to wait a track to be restored. Track which was out of forwarded sources should be inactive and when
  54. * entering forwarded sources it becomes restoring and when data is received from bridge it will become active, but if
  55. * no data is received for some time we set status of that track streaming to interrupted.
  56. */
  57. const DEFAULT_RESTORING_TIMEOUT = 10000;
  58. /**
  59. * Class is responsible for emitting JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED events.
  60. */
  61. export class TrackStreamingStatusImpl {
  62. rtc: RTC;
  63. conference: any; // TODO: needs JitsiConference type
  64. track: JitsiRemoteTrack;
  65. /** This holds the timeout callback ID scheduled using window.setTimeout. */
  66. trackTimer: number | null;
  67. /**
  68. * If video track frozen detection through RTC mute event is supported, we wait some time until video track is
  69. * considered frozen. But because when the track falls out of forwarded sources it is expected for the video to
  70. * freeze this timeout must be significantly reduced in "out of forwarded sources" case.
  71. *
  72. * Basically this value is used instead of {@link rtcMuteTimeout} when track is not in forwarded sources.
  73. */
  74. outOfForwardedSourcesTimeout: number;
  75. /**
  76. * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
  77. * event is fired on the Media stream, before the connection interrupted is fired. The default value is
  78. * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
  79. */
  80. p2pRtcMuteTimeout: number;
  81. /**
  82. * How long we're going to wait after the RTC video track muted event for the corresponding signalling mute
  83. * event, before the connection interrupted is fired. The default value is {@link DEFAULT_RTC_MUTE_TIMEOUT}.
  84. *
  85. * @returns amount of time in milliseconds
  86. */
  87. rtcMuteTimeout: number;
  88. /**
  89. * This holds a timestamp indicating when remote video track was RTC muted. The purpose of storing the
  90. * timestamp is to avoid the transition to disconnected status in case of legitimate video mute operation where
  91. * the signalling video muted event can arrive shortly after RTC muted event.
  92. *
  93. * The timestamp is measured in milliseconds obtained with <tt>Date.now()</tt>.
  94. *
  95. * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event
  96. * to the remote track and allowing to set different timeout for local and remote tracks.
  97. */
  98. rtcMutedTimestamp: number | null;
  99. /** This holds the restoring timeout callback ID scheduled using window.setTimeout. */
  100. restoringTimer: ReturnType<typeof setTimeout> | null;
  101. /**
  102. * This holds the current streaming status (along with all the internal events that happen while in that
  103. * state).
  104. *
  105. * The goal is to send this information to the analytics backend for post-mortem analysis.
  106. */
  107. streamingStatusMap: StreamingStatusMap;
  108. _onP2PStatus: () => void;
  109. _onUserLeft: () => void;
  110. _onTrackRtcMuted: () => void;
  111. _onTrackRtcUnmuted: () => void;
  112. _onSignallingMuteChanged: () => void;
  113. _onTrackVideoTypeChanged: () => void;
  114. _onLastNValueChanged: () => void;
  115. _onForwardedSourcesChanged: () => void;
  116. /* eslint-disable max-params */
  117. /**
  118. * Calculates the new {@link TrackStreamingStatus} based on the values given for some specific remote track. It is
  119. * assumed that the conference is currently in the JVB mode (in contrary to the P2P mode)
  120. * @param isInForwardedSources - indicates whether the track is in the forwarded sources set. When set to
  121. * false it means that JVB is not sending any video for the track.
  122. * @param isRestoringTimedout - if true it means that the track has been outside of forwarded sources too
  123. * long to be considered {@link TrackStreamingStatus.RESTORING}.
  124. * @param isVideoMuted - true if the track is video muted and we should not expect to receive any video.
  125. * @param isVideoTrackFrozen - if the current browser support video frozen detection then it will be set to
  126. * true when the video track is frozen. If the current browser does not support frozen detection the it's always
  127. * false.
  128. * @return {TrackStreamingStatus} the new streaming status for the track for whom the values above were provided.
  129. * @private
  130. */
  131. static _getNewStateForJvbMode(
  132. isInForwardedSources: boolean,
  133. isRestoringTimedout: boolean,
  134. isVideoMuted: boolean,
  135. isVideoTrackFrozen: boolean): TrackStreamingStatus {
  136. // We are currently not checking the endpoint connection status received from the JVB.
  137. if (isVideoMuted) {
  138. // If the connection is active according to JVB and the track is video muted there is no way for the
  139. // connection to be inactive, because the detection logic below only makes sense for video.
  140. return TrackStreamingStatus.ACTIVE;
  141. }
  142. // Logic when isVideoTrackFrozen is supported
  143. if (browser.supportsVideoMuteOnConnInterrupted()) {
  144. if (!isVideoTrackFrozen) {
  145. // If the video is playing we're good
  146. return TrackStreamingStatus.ACTIVE;
  147. } else if (isInForwardedSources) {
  148. return isRestoringTimedout ? TrackStreamingStatus.INTERRUPTED : TrackStreamingStatus.RESTORING;
  149. }
  150. return TrackStreamingStatus.INACTIVE;
  151. }
  152. // Because this browser is incapable of detecting frozen video we must rely on the forwarded sources value
  153. return isInForwardedSources ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INACTIVE;
  154. }
  155. /* eslint-enable max-params */
  156. /**
  157. * In P2P mode we don't care about any values coming from the JVB and the streaming status can be only active or
  158. * interrupted.
  159. * @param isVideoMuted - true if video muted
  160. * @param isVideoTrackFrozen - true if the video track for the remote track is currently frozen. If the
  161. * current browser does not support video frozen detection then it's always false.
  162. * @return {TrackStreamingStatus}
  163. * @private
  164. */
  165. static _getNewStateForP2PMode(isVideoMuted: boolean, isVideoTrackFrozen: boolean): TrackStreamingStatus {
  166. if (!browser.supportsVideoMuteOnConnInterrupted()) {
  167. // There's no way to detect problems in P2P when there's no video track frozen detection...
  168. return TrackStreamingStatus.ACTIVE;
  169. }
  170. return isVideoMuted || !isVideoTrackFrozen
  171. ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INTERRUPTED;
  172. }
  173. /**
  174. * Creates new instance of <tt>TrackStreamingStatus</tt>.
  175. *
  176. * @constructor
  177. * @param rtc - the RTC service instance
  178. * @param conference - parent conference instance // TODO: Needs JitsiConference Type
  179. * @param {Object} options
  180. * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
  181. * {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
  182. * @param {number} [options.rtcMuteTimeout=2000] custom value for
  183. * {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
  184. * @param {number} [options.outOfForwardedSourcesTimeout=500] custom value for
  185. * {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
  186. */
  187. constructor(rtc: RTC, conference: any, track: JitsiRemoteTrack, options: {
  188. outOfForwardedSourcesTimeout: number;
  189. p2pRtcMuteTimeout: number;
  190. rtcMuteTimeout: number;
  191. }) {
  192. this.rtc = rtc;
  193. this.conference = conference;
  194. this.track = track;
  195. this.restoringTimer = null;
  196. this.rtcMutedTimestamp = null;
  197. this.streamingStatusMap = {};
  198. this.trackTimer = null;
  199. this.outOfForwardedSourcesTimeout = typeof options.outOfForwardedSourcesTimeout === 'number'
  200. ? options.outOfForwardedSourcesTimeout : DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT;
  201. this.p2pRtcMuteTimeout = typeof options.p2pRtcMuteTimeout === 'number'
  202. ? options.p2pRtcMuteTimeout : DEFAULT_P2P_RTC_MUTE_TIMEOUT;
  203. this.rtcMuteTimeout = typeof options.rtcMuteTimeout === 'number'
  204. ? options.rtcMuteTimeout : DEFAULT_RTC_MUTE_TIMEOUT;
  205. logger.info(`RtcMuteTimeout set to: ${this.rtcMuteTimeout}`);
  206. }
  207. /**
  208. * Gets the video frozen timeout for given source name.
  209. * @return how long are we going to wait since RTC video muted even, before a video track is considered
  210. * frozen.
  211. * @private
  212. */
  213. _getVideoFrozenTimeout(): number {
  214. const sourceName = this.track.getSourceName();
  215. return this.rtc.isInForwardedSources(sourceName)
  216. ? this.rtcMuteTimeout
  217. : this.conference.isP2PActive() ? this.p2pRtcMuteTimeout : this.outOfForwardedSourcesTimeout;
  218. }
  219. /**
  220. * Initializes <tt>TrackStreamingStatus</tt> and bind required event listeners.
  221. */
  222. init(): void {
  223. // Handles P2P status changes
  224. this._onP2PStatus = this.figureOutStreamingStatus.bind(this);
  225. this.conference.on(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
  226. // Used to send analytics events for the participant that left the call.
  227. this._onUserLeft = this.onUserLeft.bind(this);
  228. this.conference.on(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
  229. // On some browsers MediaStreamTrack trigger "onmute"/"onunmute" events for video type tracks when they stop
  230. // receiving data which is often a sign that remote user is having connectivity issues.
  231. if (browser.supportsVideoMuteOnConnInterrupted()) {
  232. this._onTrackRtcMuted = this.onTrackRtcMuted.bind(this);
  233. this.rtc.addListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
  234. this._onTrackRtcUnmuted = this.onTrackRtcUnmuted.bind(this);
  235. this.rtc.addListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
  236. // Listened which will be bound to JitsiRemoteTrack to listen for signalling mute/unmute events.
  237. this._onSignallingMuteChanged = this.onSignallingMuteChanged.bind(this);
  238. this.track.on(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
  239. // Used to send an analytics event when the video type changes.
  240. this._onTrackVideoTypeChanged = this.onTrackVideoTypeChanged.bind(this);
  241. this.track.on(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, this._onTrackVideoTypeChanged);
  242. }
  243. this._onForwardedSourcesChanged = this.onForwardedSourcesChanged.bind(this);
  244. this.conference.on(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
  245. this._onLastNValueChanged = this.figureOutStreamingStatus.bind(this);
  246. this.rtc.on(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
  247. }
  248. /**
  249. * Removes all event listeners and disposes of all resources held by this instance.
  250. */
  251. dispose(): void {
  252. if (browser.supportsVideoMuteOnConnInterrupted()) {
  253. this.rtc.removeListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
  254. this.rtc.removeListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
  255. this.track.off(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
  256. this.track.off(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, this._onTrackVideoTypeChanged);
  257. }
  258. this.conference.off(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
  259. this.conference.off(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
  260. this.conference.off(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
  261. this.rtc.removeListener(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
  262. this.clearTimeout();
  263. this.clearRtcMutedTimestamp();
  264. this.maybeSendTrackStreamingStatusEvent(Date.now());
  265. this.figureOutStreamingStatus();
  266. }
  267. /**
  268. * Changes streaming status.
  269. * @param newStatus
  270. */
  271. _changeStreamingStatus(newStatus: TrackStreamingStatus): void {
  272. if (this.track.getTrackStreamingStatus() !== newStatus) {
  273. const sourceName = this.track.getSourceName();
  274. this.track._setTrackStreamingStatus(newStatus);
  275. logger.debug(`Emit track streaming status(${Date.now()}) ${sourceName}: ${newStatus}`);
  276. // It's common for the event listeners to access the JitsiRemoteTrack. Thus pass it as a parameter here.
  277. this.track.emit(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED, this.track, newStatus);
  278. }
  279. }
  280. /**
  281. * Reset the postponed "streaming interrupted" event which was previously scheduled as a timeout on RTC 'onmute'
  282. * event.
  283. */
  284. clearTimeout(): void {
  285. if (this.trackTimer) {
  286. window.clearTimeout(this.trackTimer);
  287. this.trackTimer = null;
  288. }
  289. }
  290. /**
  291. * Clears the timestamp of the RTC muted event for remote video track.
  292. */
  293. clearRtcMutedTimestamp(): void {
  294. this.rtcMutedTimestamp = null;
  295. }
  296. /**
  297. * Checks if track is considered frozen.
  298. * @return <tt>true</tt> if the video has frozen or <tt>false</tt> when it's either not considered frozen
  299. * (yet) or if freeze detection is not supported by the current browser.
  300. *
  301. * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event to
  302. * the remote track and allowing to set different timeout for local and remote tracks.
  303. */
  304. isVideoTrackFrozen(): boolean {
  305. if (!browser.supportsVideoMuteOnConnInterrupted()) {
  306. return false;
  307. }
  308. const isVideoRTCMuted = this.track.isWebRTCTrackMuted();
  309. const rtcMutedTimestamp = this.rtcMutedTimestamp;
  310. const timeout = this._getVideoFrozenTimeout();
  311. return isVideoRTCMuted && typeof rtcMutedTimestamp === 'number' && (Date.now() - rtcMutedTimestamp) >= timeout;
  312. }
  313. /**
  314. * Figures out (and updates) the current streaming status for the track identified by the source name.
  315. */
  316. figureOutStreamingStatus(): void {
  317. const sourceName = this.track.getSourceName();
  318. const inP2PMode = this.conference.isP2PActive();
  319. const isRestoringTimedOut = this._isRestoringTimedout();
  320. const audioOnlyMode = this.conference.getLastN() === 0;
  321. // NOTE Overriding videoMuted to true for audioOnlyMode should disable any detection based on video playback or
  322. // forwarded sources.
  323. const isVideoMuted = this.track.isMuted() || audioOnlyMode;
  324. const isVideoTrackFrozen = this.isVideoTrackFrozen();
  325. const isInForwardedSources = this.rtc.isInForwardedSources(sourceName);
  326. const newState
  327. = inP2PMode
  328. ? TrackStreamingStatusImpl._getNewStateForP2PMode(
  329. isVideoMuted,
  330. isVideoTrackFrozen)
  331. : TrackStreamingStatusImpl._getNewStateForJvbMode(
  332. isInForwardedSources,
  333. isRestoringTimedOut,
  334. isVideoMuted,
  335. isVideoTrackFrozen);
  336. // if the new state is not restoring clear timers and timestamps that we use to track the restoring state
  337. if (newState !== TrackStreamingStatus.RESTORING) {
  338. this._clearRestoringTimer();
  339. }
  340. logger.debug(
  341. `Figure out conn status for ${sourceName}, is video muted: ${
  342. isVideoMuted} video track frozen: ${
  343. isVideoTrackFrozen} p2p mode: ${
  344. inP2PMode} is in forwarded sources: ${
  345. isInForwardedSources} currentStatus => newStatus: ${
  346. this.track.getTrackStreamingStatus()} => ${newState}`);
  347. const oldStreamingStatus = this.streamingStatusMap || {};
  348. // Send an analytics event (guard on either the p2p flag or the streaming status has changed since the last
  349. // time this code block run).
  350. if (!('p2p' in oldStreamingStatus)
  351. || !('streamingStatus' in oldStreamingStatus)
  352. || oldStreamingStatus.p2p !== inP2PMode
  353. || oldStreamingStatus.streamingStatus !== newState) {
  354. const nowMs = Date.now();
  355. this.maybeSendTrackStreamingStatusEvent(nowMs);
  356. this.streamingStatusMap = {
  357. ...oldStreamingStatus,
  358. streamingStatus: newState,
  359. p2p: inP2PMode,
  360. startedMs: nowMs
  361. };
  362. // sometimes (always?) we're late to hook the TRACK_VIDEOTYPE_CHANGED event and the video type is not in
  363. // oldStreamingStatus.
  364. if (!('videoType' in this.streamingStatusMap)) {
  365. this.streamingStatusMap.videoType = this.track.getVideoType();
  366. }
  367. }
  368. this._changeStreamingStatus(newState);
  369. }
  370. /**
  371. * Computes the duration of the current streaming status for the track (i.e. 15 seconds in the INTERRUPTED state)
  372. * and sends a track streaming status event.
  373. * @param nowMs - The current time (in millis).
  374. */
  375. maybeSendTrackStreamingStatusEvent(nowMs: number): void {
  376. const trackStreamingStatus = this.streamingStatusMap;
  377. if (trackStreamingStatus
  378. && 'startedMs' in trackStreamingStatus
  379. && 'videoType' in trackStreamingStatus
  380. && 'streamingStatus' in trackStreamingStatus
  381. && 'p2p' in trackStreamingStatus) {
  382. trackStreamingStatus.value = nowMs - trackStreamingStatus.startedMs;
  383. Statistics.sendAnalytics(createTrackStreamingStatusEvent(trackStreamingStatus));
  384. }
  385. }
  386. /**
  387. * On change in forwarded sources set check all leaving and entering track to change their corresponding statuses.
  388. *
  389. * @param leavingForwardedSources - The array of sourceName leaving forwarded sources.
  390. * @param enteringForwardedSources - The array of sourceName entering forwarded sources.
  391. * @param timestamp - The time in millis
  392. * @private
  393. */
  394. onForwardedSourcesChanged(
  395. leavingForwardedSources: string[] = [],
  396. enteringForwardedSources: string[] = [],
  397. timestamp: number): void {
  398. const sourceName = this.track.getSourceName();
  399. // If the browser doesn't fire the mute/onmute events when the remote peer stops/starts sending media,
  400. // calculate the streaming status for all the tracks since it won't get triggered automatically on the track
  401. // that has started/stopped receiving media.
  402. if (!browser.supportsVideoMuteOnConnInterrupted()) {
  403. this.figureOutStreamingStatus();
  404. }
  405. if (leavingForwardedSources.includes(sourceName)) {
  406. this.track._clearEnteredForwardedSourcesTimestamp();
  407. this._clearRestoringTimer();
  408. browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
  409. }
  410. if (enteringForwardedSources.includes(sourceName)) {
  411. // store the timestamp this track is entering forwarded sources
  412. this.track._setEnteredForwardedSourcesTimestamp(timestamp);
  413. browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
  414. }
  415. }
  416. /**
  417. * Clears the restoring timer for video track and the timestamp for entering forwarded sources.
  418. */
  419. _clearRestoringTimer(): void {
  420. const rTimer = this.restoringTimer;
  421. if (rTimer) {
  422. clearTimeout(rTimer);
  423. this.restoringTimer = null;
  424. }
  425. }
  426. /**
  427. * Checks whether a track had stayed enough in restoring state, compares current time and the time the track
  428. * entered in forwarded sources. If it hasn't timedout and there is no timer added, add new timer in order to give
  429. * it more time to become active or mark it as interrupted on next check.
  430. *
  431. * @returns <tt>true</tt> if the track was in restoring state more than the timeout
  432. * ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to set its status to interrupted.
  433. * @private
  434. */
  435. _isRestoringTimedout(): boolean {
  436. const enteredForwardedSourcesTimestamp = this.track._getEnteredForwardedSourcesTimestamp();
  437. if (enteredForwardedSourcesTimestamp
  438. && (Date.now() - enteredForwardedSourcesTimestamp) >= DEFAULT_RESTORING_TIMEOUT) {
  439. return true;
  440. }
  441. // still haven't reached timeout, if there is no timer scheduled, schedule one so we can track the restoring
  442. // state and change it after reaching the timeout
  443. const rTimer = this.restoringTimer;
  444. if (!rTimer) {
  445. this.restoringTimer = setTimeout(() => this.figureOutStreamingStatus(), DEFAULT_RESTORING_TIMEOUT);
  446. }
  447. return false;
  448. }
  449. /** Checks whether a track is the current track. */
  450. _isCurrentTrack(track: JitsiRemoteTrack): boolean {
  451. return track.getSourceName() === this.track.getSourceName();
  452. }
  453. /**
  454. * Sends a last/final track streaming status event for the track of the user that left the conference.
  455. * @param id - The id of the participant that left the conference.
  456. */
  457. onUserLeft(id: string): void {
  458. if (this.track.getParticipantId() === id) {
  459. this.maybeSendTrackStreamingStatusEvent(Date.now());
  460. this.streamingStatusMap = {};
  461. }
  462. }
  463. /**
  464. * Handles RTC 'onmute' event for the video track.
  465. *
  466. * @param track - The video track for which 'onmute' event will be processed.
  467. */
  468. onTrackRtcMuted(track: JitsiRemoteTrack): void {
  469. if (!this._isCurrentTrack(track)) {
  470. return;
  471. }
  472. const sourceName = track.getSourceName();
  473. logger.debug(`Detector track RTC muted: ${sourceName}`, Date.now());
  474. this.rtcMutedTimestamp = Date.now();
  475. if (!track.isMuted()) {
  476. // If the user is not muted according to the signalling we'll give it some time, before the streaming
  477. // interrupted event is triggered.
  478. this.clearTimeout();
  479. // The timeout is reduced when track is not in the forwarded sources
  480. const timeout = this._getVideoFrozenTimeout();
  481. this.trackTimer = window.setTimeout(() => {
  482. logger.debug(`Set track RTC muted for: ${sourceName} after the timeout of ${timeout} ms`);
  483. this.clearTimeout();
  484. this.figureOutStreamingStatus();
  485. }, timeout);
  486. }
  487. }
  488. /**
  489. * Handles RTC 'onunmute' event for the video track.
  490. *
  491. * @param track - The video track for which 'onunmute' event will be processed.
  492. */
  493. onTrackRtcUnmuted(track: JitsiRemoteTrack): void {
  494. if (!this._isCurrentTrack(track)) {
  495. return;
  496. }
  497. const sourceName = this.track.getSourceName();
  498. logger.debug(`Detector track RTC unmuted: ${sourceName}`, Date.now());
  499. this.clearTimeout();
  500. this.clearRtcMutedTimestamp();
  501. this.figureOutStreamingStatus();
  502. }
  503. /**
  504. * Here the signalling "mute"/"unmute" events are processed.
  505. *
  506. * @param track - The remote video track for which the signalling mute/unmute event will be
  507. * processed.
  508. */
  509. onSignallingMuteChanged(track: JitsiRemoteTrack): void {
  510. if (!this._isCurrentTrack(track)) {
  511. return;
  512. }
  513. this.figureOutStreamingStatus();
  514. }
  515. /**
  516. * Sends a track streaming status event as a result of the video type changing.
  517. * @deprecated this will go away with full multiple streams support
  518. * @param type - The video type.
  519. */
  520. onTrackVideoTypeChanged(type: VideoType): void {
  521. const nowMs = Date.now();
  522. this.maybeSendTrackStreamingStatusEvent(nowMs);
  523. this.streamingStatusMap = {
  524. ...this.streamingStatusMap || {},
  525. videoType: type,
  526. startedMs: nowMs
  527. };
  528. }
  529. }
  530. export default TrackStreamingStatusImpl;