You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

TrackStreamingStatus.ts 26KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649
  1. import { getLogger } from '@jitsi/logger';
  2. import { JitsiConferenceEvents } from '../../JitsiConferenceEvents';
  3. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  4. import RTCEvents from '../../service/RTC/RTCEvents';
  5. import { createTrackStreamingStatusEvent } from '../../service/statistics/AnalyticsEvents';
  6. import JitsiConference from '../../types/hand-crafted/JitsiConference';
  7. import JitsiRemoteTrack from '../../types/hand-crafted/modules/RTC/JitsiRemoteTrack';
  8. import RTC from '../../types/hand-crafted/modules/RTC/RTC';
  9. import { VideoType } from '../../types/hand-crafted/service/RTC/VideoType';
  10. import browser from '../browser';
  11. import Statistics from '../statistics/statistics';
  12. /** Track streaming statuses. */
  13. export enum TrackStreamingStatus {
  14. /**
  15. * Status indicating that streaming is currently active.
  16. */
  17. ACTIVE = 'active',
  18. /**
  19. * Status indicating that streaming is currently inactive.
  20. * Inactive means the streaming was stopped on purpose from the bridge, like exiting forwarded sources or
  21. * adaptivity decided to drop video because of not enough bandwidth.
  22. */
  23. INACTIVE = 'inactive',
  24. /**
  25. * Status indicating that streaming is currently interrupted.
  26. */
  27. INTERRUPTED = 'interrupted',
  28. /**
  29. * Status indicating that streaming is currently restoring.
  30. */
  31. RESTORING = 'restoring',
  32. }
  33. type StreamingStatusMap = {
  34. // TODO: Replace this hand crafted VideoType when we convert VideoType.js to Typescript.
  35. videoType?: VideoType,
  36. startedMs?: number,
  37. p2p?: boolean,
  38. streamingStatus?: string,
  39. value?: number
  40. };
  41. const logger = getLogger(__filename);
  42. /**
  43. * Default value of 500 milliseconds for {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
  44. */
  45. const DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT = 500;
  46. /**
  47. * Default value of 2500 milliseconds for {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
  48. */
  49. const DEFAULT_P2P_RTC_MUTE_TIMEOUT = 2500;
  50. /**
  51. * Default value of 10000 milliseconds for {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
  52. */
  53. const DEFAULT_RTC_MUTE_TIMEOUT = 10000;
  54. /**
  55. * The time to wait a track to be restored. Track which was out of forwarded sources should be inactive and when
  56. * entering forwarded sources it becomes restoring and when data is received from bridge it will become active, but if
  57. * no data is received for some time we set status of that track streaming to interrupted.
  58. */
  59. const DEFAULT_RESTORING_TIMEOUT = 10000;
  60. /**
  61. * Class is responsible for emitting JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED events.
  62. */
  63. export class TrackStreamingStatusImpl {
  64. rtc: RTC;
  65. conference: JitsiConference;
  66. track: JitsiRemoteTrack;
  67. /** This holds the timeout callback ID scheduled using window.setTimeout. */
  68. trackTimer: number | null;
  69. /**
  70. * If video track frozen detection through RTC mute event is supported, we wait some time until video track is
  71. * considered frozen. But because when the track falls out of forwarded sources it is expected for the video to
  72. * freeze this timeout must be significantly reduced in "out of forwarded sources" case.
  73. *
  74. * Basically this value is used instead of {@link rtcMuteTimeout} when track is not in forwarded sources.
  75. */
  76. outOfForwardedSourcesTimeout: number;
  77. /**
  78. * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
  79. * event is fired on the Media stream, before the connection interrupted is fired. The default value is
  80. * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
  81. */
  82. p2pRtcMuteTimeout: number;
  83. /**
  84. * How long we're going to wait after the RTC video track muted event for the corresponding signalling mute
  85. * event, before the connection interrupted is fired. The default value is {@link DEFAULT_RTC_MUTE_TIMEOUT}.
  86. *
  87. * @returns amount of time in milliseconds
  88. */
  89. rtcMuteTimeout: number;
  90. /**
  91. * This holds a timestamp indicating when remote video track was RTC muted. The purpose of storing the
  92. * timestamp is to avoid the transition to disconnected status in case of legitimate video mute operation where
  93. * the signalling video muted event can arrive shortly after RTC muted event.
  94. *
  95. * The timestamp is measured in milliseconds obtained with <tt>Date.now()</tt>.
  96. *
  97. * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event
  98. * to the remote track and allowing to set different timeout for local and remote tracks.
  99. */
  100. rtcMutedTimestamp: number | null;
  101. /** This holds the restoring timeout callback ID scheduled using window.setTimeout. */
  102. restoringTimer: ReturnType<typeof setTimeout> | null;
  103. /**
  104. * This holds the current streaming status (along with all the internal events that happen while in that
  105. * state).
  106. *
  107. * The goal is to send this information to the analytics backend for post-mortem analysis.
  108. */
  109. streamingStatusMap: StreamingStatusMap;
  110. _onP2PStatus: () => void;
  111. _onUserLeft: () => void;
  112. _onTrackRtcMuted: () => void;
  113. _onTrackRtcUnmuted: () => void;
  114. _onSignallingMuteChanged: () => void;
  115. _onTrackVideoTypeChanged: () => void;
  116. _onLastNValueChanged: () => void;
  117. _onForwardedSourcesChanged: () => void;
  118. /* eslint-disable max-params*/
  119. /**
  120. * Calculates the new {@link TrackStreamingStatus} based on the values given for some specific remote track. It is
  121. * assumed that the conference is currently in the JVB mode (in contrary to the P2P mode)
  122. * @param isInForwardedSources - indicates whether the track is in the forwarded sources set. When set to
  123. * false it means that JVB is not sending any video for the track.
  124. * @param isRestoringTimedout - if true it means that the track has been outside of forwarded sources too
  125. * long to be considered {@link TrackStreamingStatus.RESTORING}.
  126. * @param isVideoMuted - true if the track is video muted and we should not expect to receive any video.
  127. * @param isVideoTrackFrozen - if the current browser support video frozen detection then it will be set to
  128. * true when the video track is frozen. If the current browser does not support frozen detection the it's always
  129. * false.
  130. * @return {TrackStreamingStatus} the new streaming status for the track for whom the values above were provided.
  131. * @private
  132. */
  133. static _getNewStateForJvbMode(
  134. isInForwardedSources: boolean,
  135. isRestoringTimedout: boolean,
  136. isVideoMuted: boolean,
  137. isVideoTrackFrozen: boolean): TrackStreamingStatus {
  138. // We are currently not checking the endpoint connection status received from the JVB.
  139. if (isVideoMuted) {
  140. // If the connection is active according to JVB and the track is video muted there is no way for the
  141. // connection to be inactive, because the detection logic below only makes sense for video.
  142. return TrackStreamingStatus.ACTIVE;
  143. }
  144. // Logic when isVideoTrackFrozen is supported
  145. if (browser.supportsVideoMuteOnConnInterrupted()) {
  146. if (!isVideoTrackFrozen) {
  147. // If the video is playing we're good
  148. return TrackStreamingStatus.ACTIVE;
  149. } else if (isInForwardedSources) {
  150. return isRestoringTimedout ? TrackStreamingStatus.INTERRUPTED : TrackStreamingStatus.RESTORING;
  151. }
  152. return TrackStreamingStatus.INACTIVE;
  153. }
  154. // Because this browser is incapable of detecting frozen video we must rely on the forwarded sources value
  155. return isInForwardedSources ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INACTIVE;
  156. }
  157. /* eslint-enable max-params*/
  158. /**
  159. * In P2P mode we don't care about any values coming from the JVB and the streaming status can be only active or
  160. * interrupted.
  161. * @param isVideoMuted - true if video muted
  162. * @param isVideoTrackFrozen - true if the video track for the remote track is currently frozen. If the
  163. * current browser does not support video frozen detection then it's always false.
  164. * @return {TrackStreamingStatus}
  165. * @private
  166. */
  167. static _getNewStateForP2PMode(isVideoMuted: boolean, isVideoTrackFrozen: boolean): TrackStreamingStatus {
  168. if (!browser.supportsVideoMuteOnConnInterrupted()) {
  169. // There's no way to detect problems in P2P when there's no video track frozen detection...
  170. return TrackStreamingStatus.ACTIVE;
  171. }
  172. return isVideoMuted || !isVideoTrackFrozen
  173. ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INTERRUPTED;
  174. }
  175. /**
  176. * Creates new instance of <tt>TrackStreamingStatus</tt>.
  177. *
  178. * @constructor
  179. * @param rtc - the RTC service instance
  180. * @param conference - parent conference instance
  181. * @param {Object} options
  182. * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
  183. * {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
  184. * @param {number} [options.rtcMuteTimeout=2000] custom value for
  185. * {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
  186. * @param {number} [options.outOfForwardedSourcesTimeout=500] custom value for
  187. * {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
  188. */
  189. constructor(rtc: RTC, conference: JitsiConference, track: JitsiRemoteTrack, options: {
  190. outOfForwardedSourcesTimeout: number,
  191. p2pRtcMuteTimeout: number,
  192. rtcMuteTimeout: number
  193. }) {
  194. this.rtc = rtc;
  195. this.conference = conference;
  196. this.track = track;
  197. this.restoringTimer = null;
  198. this.rtcMutedTimestamp = null;
  199. this.streamingStatusMap = {};
  200. this.trackTimer = null;
  201. this.outOfForwardedSourcesTimeout = typeof options.outOfForwardedSourcesTimeout === 'number'
  202. ? options.outOfForwardedSourcesTimeout : DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT;
  203. this.p2pRtcMuteTimeout = typeof options.p2pRtcMuteTimeout === 'number'
  204. ? options.p2pRtcMuteTimeout : DEFAULT_P2P_RTC_MUTE_TIMEOUT;
  205. this.rtcMuteTimeout = typeof options.rtcMuteTimeout === 'number'
  206. ? options.rtcMuteTimeout : DEFAULT_RTC_MUTE_TIMEOUT;
  207. logger.info(`RtcMuteTimeout set to: ${this.rtcMuteTimeout}`);
  208. }
  209. /**
  210. * Gets the video frozen timeout for given source name.
  211. * @return how long are we going to wait since RTC video muted even, before a video track is considered
  212. * frozen.
  213. * @private
  214. */
  215. _getVideoFrozenTimeout(): number {
  216. const sourceName = this.track.getSourceName();
  217. return this.rtc.isInForwardedSources(sourceName)
  218. ? this.rtcMuteTimeout
  219. : this.conference.isP2PActive() ? this.p2pRtcMuteTimeout : this.outOfForwardedSourcesTimeout;
  220. }
  221. /**
  222. * Initializes <tt>TrackStreamingStatus</tt> and bind required event listeners.
  223. */
  224. init(): void {
  225. // Handles P2P status changes
  226. this._onP2PStatus = this.figureOutStreamingStatus.bind(this);
  227. this.conference.on(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
  228. // Used to send analytics events for the participant that left the call.
  229. this._onUserLeft = this.onUserLeft.bind(this);
  230. this.conference.on(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
  231. // On some browsers MediaStreamTrack trigger "onmute"/"onunmute" events for video type tracks when they stop
  232. // receiving data which is often a sign that remote user is having connectivity issues.
  233. if (browser.supportsVideoMuteOnConnInterrupted()) {
  234. this._onTrackRtcMuted = this.onTrackRtcMuted.bind(this);
  235. this.rtc.addListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
  236. this._onTrackRtcUnmuted = this.onTrackRtcUnmuted.bind(this);
  237. this.rtc.addListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
  238. // Listened which will be bound to JitsiRemoteTrack to listen for signalling mute/unmute events.
  239. this._onSignallingMuteChanged = this.onSignallingMuteChanged.bind(this);
  240. this.track.on(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
  241. // Used to send an analytics event when the video type changes.
  242. this._onTrackVideoTypeChanged = this.onTrackVideoTypeChanged.bind(this);
  243. this.track.on(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, this._onTrackVideoTypeChanged);
  244. }
  245. this._onForwardedSourcesChanged = this.onForwardedSourcesChanged.bind(this);
  246. this.conference.on(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
  247. this._onLastNValueChanged = this.figureOutStreamingStatus.bind(this);
  248. this.rtc.on(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
  249. }
  250. /**
  251. * Removes all event listeners and disposes of all resources held by this instance.
  252. */
  253. dispose(): void {
  254. if (browser.supportsVideoMuteOnConnInterrupted()) {
  255. this.rtc.removeListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
  256. this.rtc.removeListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
  257. this.track.off(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
  258. }
  259. this.conference.off(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
  260. this.conference.off(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
  261. this.conference.off(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
  262. this.rtc.removeListener(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
  263. this.clearTimeout();
  264. this.clearRtcMutedTimestamp();
  265. this.maybeSendTrackStreamingStatusEvent(Date.now());
  266. this.figureOutStreamingStatus();
  267. }
  268. /**
  269. * Changes streaming status.
  270. * @param newStatus
  271. */
  272. _changeStreamingStatus(newStatus: TrackStreamingStatus): void {
  273. if (this.track.getTrackStreamingStatus() !== newStatus) {
  274. const sourceName = this.track.getSourceName();
  275. this.track._setTrackStreamingStatus(newStatus);
  276. logger.debug(`Emit track streaming status(${Date.now()}) ${sourceName}: ${newStatus}`);
  277. // Log the event on CallStats
  278. Statistics.sendLog(
  279. JSON.stringify({
  280. id: 'track.streaming.status',
  281. track: sourceName,
  282. status: newStatus
  283. }));
  284. // It's common for the event listeners to access the JitsiRemoteTrack. Thus pass it as a parameter here.
  285. this.track.emit(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED, this.track, newStatus);
  286. }
  287. }
  288. /**
  289. * Reset the postponed "streaming interrupted" event which was previously scheduled as a timeout on RTC 'onmute'
  290. * event.
  291. */
  292. clearTimeout(): void {
  293. if (this.trackTimer) {
  294. window.clearTimeout(this.trackTimer);
  295. this.trackTimer = null;
  296. }
  297. }
  298. /**
  299. * Clears the timestamp of the RTC muted event for remote video track.
  300. */
  301. clearRtcMutedTimestamp(): void {
  302. this.rtcMutedTimestamp = null;
  303. }
  304. /**
  305. * Checks if track is considered frozen.
  306. * @return <tt>true</tt> if the video has frozen or <tt>false</tt> when it's either not considered frozen
  307. * (yet) or if freeze detection is not supported by the current browser.
  308. *
  309. * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event to
  310. * the remote track and allowing to set different timeout for local and remote tracks.
  311. */
  312. isVideoTrackFrozen(): boolean {
  313. if (!browser.supportsVideoMuteOnConnInterrupted()) {
  314. return false;
  315. }
  316. const isVideoRTCMuted = this.track.isWebRTCTrackMuted();
  317. const rtcMutedTimestamp = this.rtcMutedTimestamp;
  318. const timeout = this._getVideoFrozenTimeout();
  319. return isVideoRTCMuted && typeof rtcMutedTimestamp === 'number' && (Date.now() - rtcMutedTimestamp) >= timeout;
  320. }
  321. /**
  322. * Figures out (and updates) the current streaming status for the track identified by the source name.
  323. */
  324. figureOutStreamingStatus(): void {
  325. const sourceName = this.track.getSourceName();
  326. const inP2PMode = this.conference.isP2PActive();
  327. const isRestoringTimedOut = this._isRestoringTimedout();
  328. const audioOnlyMode = this.conference.getLastN() === 0;
  329. // NOTE Overriding videoMuted to true for audioOnlyMode should disable any detection based on video playback or
  330. // forwarded sources.
  331. const isVideoMuted = this.track.isMuted() || audioOnlyMode;
  332. const isVideoTrackFrozen = this.isVideoTrackFrozen();
  333. const isInForwardedSources = this.rtc.isInForwardedSources(sourceName);
  334. const newState
  335. = inP2PMode
  336. ? TrackStreamingStatusImpl._getNewStateForP2PMode(
  337. isVideoMuted,
  338. isVideoTrackFrozen)
  339. : TrackStreamingStatusImpl._getNewStateForJvbMode(
  340. isInForwardedSources,
  341. isRestoringTimedOut,
  342. isVideoMuted,
  343. isVideoTrackFrozen);
  344. // if the new state is not restoring clear timers and timestamps that we use to track the restoring state
  345. if (newState !== TrackStreamingStatus.RESTORING) {
  346. this._clearRestoringTimer();
  347. }
  348. logger.debug(
  349. `Figure out conn status for ${sourceName}, is video muted: ${
  350. isVideoMuted} video track frozen: ${
  351. isVideoTrackFrozen} p2p mode: ${
  352. inP2PMode} is in forwarded sources: ${
  353. isInForwardedSources} currentStatus => newStatus: ${
  354. this.track.getTrackStreamingStatus()} => ${newState}`);
  355. const oldStreamingStatus = this.streamingStatusMap || {};
  356. // Send an analytics event (guard on either the p2p flag or the streaming status has changed since the last
  357. // time this code block run).
  358. if (!('p2p' in oldStreamingStatus)
  359. || !('streamingStatus' in oldStreamingStatus)
  360. || oldStreamingStatus.p2p !== inP2PMode
  361. || oldStreamingStatus.streamingStatus !== newState) {
  362. const nowMs = Date.now();
  363. this.maybeSendTrackStreamingStatusEvent(nowMs);
  364. this.streamingStatusMap = {
  365. ...oldStreamingStatus,
  366. streamingStatus: newState,
  367. p2p: inP2PMode,
  368. startedMs: nowMs
  369. };
  370. // sometimes (always?) we're late to hook the TRACK_VIDEOTYPE_CHANGED event and the video type is not in
  371. // oldStreamingStatus.
  372. if (!('videoType' in this.streamingStatusMap)) {
  373. this.streamingStatusMap.videoType = this.track.getVideoType();
  374. }
  375. }
  376. this._changeStreamingStatus(newState);
  377. }
  378. /**
  379. * Computes the duration of the current streaming status for the track (i.e. 15 seconds in the INTERRUPTED state)
  380. * and sends a track streaming status event.
  381. * @param nowMs - The current time (in millis).
  382. */
  383. maybeSendTrackStreamingStatusEvent(nowMs: number): void {
  384. const trackStreamingStatus = this.streamingStatusMap;
  385. if (trackStreamingStatus
  386. && 'startedMs' in trackStreamingStatus
  387. && 'videoType' in trackStreamingStatus
  388. && 'streamingStatus' in trackStreamingStatus
  389. && 'p2p' in trackStreamingStatus) {
  390. trackStreamingStatus.value = nowMs - trackStreamingStatus.startedMs;
  391. Statistics.sendAnalytics(createTrackStreamingStatusEvent(trackStreamingStatus));
  392. }
  393. }
  394. /**
  395. * On change in forwarded sources set check all leaving and entering track to change their corresponding statuses.
  396. *
  397. * @param leavingForwardedSources - The array of sourceName leaving forwarded sources.
  398. * @param enteringForwardedSources - The array of sourceName entering forwarded sources.
  399. * @param timestamp - The time in millis
  400. * @private
  401. */
  402. onForwardedSourcesChanged(
  403. leavingForwardedSources: string[] = [],
  404. enteringForwardedSources: string[] = [],
  405. timestamp: number): void {
  406. const sourceName = this.track.getSourceName();
  407. logger.debug(`Fowarded sources changed leaving=${leavingForwardedSources}, entering=${
  408. enteringForwardedSources} at ${timestamp}`);
  409. // If the browser doesn't fire the mute/onmute events when the remote peer stops/starts sending media,
  410. // calculate the streaming status for all the tracks since it won't get triggered automatically on the track
  411. // that has started/stopped receiving media.
  412. if (!browser.supportsVideoMuteOnConnInterrupted()) {
  413. this.figureOutStreamingStatus();
  414. }
  415. if (leavingForwardedSources.includes(sourceName)) {
  416. this.track._clearEnteredForwardedSourcesTimestamp();
  417. this._clearRestoringTimer();
  418. browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
  419. }
  420. if (enteringForwardedSources.includes(sourceName)) {
  421. // store the timestamp this track is entering forwarded sources
  422. this.track._setEnteredForwardedSourcesTimestamp(timestamp);
  423. browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
  424. }
  425. }
  426. /**
  427. * Clears the restoring timer for video track and the timestamp for entering forwarded sources.
  428. */
  429. _clearRestoringTimer(): void {
  430. const rTimer = this.restoringTimer;
  431. if (rTimer) {
  432. clearTimeout(rTimer);
  433. this.restoringTimer = null;
  434. }
  435. }
  436. /**
  437. * Checks whether a track had stayed enough in restoring state, compares current time and the time the track
  438. * entered in forwarded sources. If it hasn't timedout and there is no timer added, add new timer in order to give
  439. * it more time to become active or mark it as interrupted on next check.
  440. *
  441. * @returns <tt>true</tt> if the track was in restoring state more than the timeout
  442. * ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to set its status to interrupted.
  443. * @private
  444. */
  445. _isRestoringTimedout(): boolean {
  446. const enteredForwardedSourcesTimestamp = this.track._getEnteredForwardedSourcesTimestamp();
  447. if (enteredForwardedSourcesTimestamp
  448. && (Date.now() - enteredForwardedSourcesTimestamp) >= DEFAULT_RESTORING_TIMEOUT) {
  449. return true;
  450. }
  451. // still haven't reached timeout, if there is no timer scheduled, schedule one so we can track the restoring
  452. // state and change it after reaching the timeout
  453. const rTimer = this.restoringTimer;
  454. if (!rTimer) {
  455. this.restoringTimer = setTimeout(() => this.figureOutStreamingStatus(), DEFAULT_RESTORING_TIMEOUT);
  456. }
  457. return false;
  458. }
  459. /** Checks whether a track is the current track. */
  460. _isCurrentTrack(track: JitsiRemoteTrack): boolean {
  461. return track.getSourceName() === this.track.getSourceName();
  462. }
  463. /**
  464. * Sends a last/final track streaming status event for the track of the user that left the conference.
  465. * @param id - The id of the participant that left the conference.
  466. */
  467. onUserLeft(id: string): void {
  468. if (this.track.getParticipantId() === id) {
  469. this.maybeSendTrackStreamingStatusEvent(Date.now());
  470. this.streamingStatusMap = {};
  471. }
  472. }
  473. /**
  474. * Handles RTC 'onmute' event for the video track.
  475. *
  476. * @param track - The video track for which 'onmute' event will be processed.
  477. */
  478. onTrackRtcMuted(track: JitsiRemoteTrack): void {
  479. if (!this._isCurrentTrack(track)) {
  480. return;
  481. }
  482. const sourceName = track.getSourceName();
  483. logger.debug(`Detector track RTC muted: ${sourceName}`, Date.now());
  484. this.rtcMutedTimestamp = Date.now();
  485. if (!track.isMuted()) {
  486. // If the user is not muted according to the signalling we'll give it some time, before the streaming
  487. // interrupted event is triggered.
  488. this.clearTimeout();
  489. // The timeout is reduced when track is not in the forwarded sources
  490. const timeout = this._getVideoFrozenTimeout();
  491. this.trackTimer = window.setTimeout(() => {
  492. logger.debug(`Set RTC mute timeout for: ${sourceName} of ${timeout} ms`);
  493. this.clearTimeout();
  494. this.figureOutStreamingStatus();
  495. }, timeout);
  496. }
  497. }
  498. /**
  499. * Handles RTC 'onunmute' event for the video track.
  500. *
  501. * @param track - The video track for which 'onunmute' event will be processed.
  502. */
  503. onTrackRtcUnmuted(track: JitsiRemoteTrack): void {
  504. if (!this._isCurrentTrack(track)) {
  505. return;
  506. }
  507. const sourceName = this.track.getSourceName();
  508. logger.debug(`Detector track RTC unmuted: ${sourceName}`, Date.now());
  509. this.clearTimeout();
  510. this.clearRtcMutedTimestamp();
  511. this.figureOutStreamingStatus();
  512. }
  513. /**
  514. * Here the signalling "mute"/"unmute" events are processed.
  515. *
  516. * @param track - The remote video track for which the signalling mute/unmute event will be
  517. * processed.
  518. */
  519. onSignallingMuteChanged(track: JitsiRemoteTrack): void {
  520. if (!this._isCurrentTrack(track)) {
  521. return;
  522. }
  523. const sourceName = this.track.getSourceName();
  524. logger.debug(`Detector on track signalling mute changed: ${sourceName}`, track.isMuted());
  525. this.figureOutStreamingStatus();
  526. }
  527. /**
  528. * Sends a track streaming status event as a result of the video type changing.
  529. * @deprecated this will go away with full multiple streams support
  530. * @param type - The video type.
  531. */
  532. onTrackVideoTypeChanged(type: VideoType): void {
  533. const nowMs = Date.now();
  534. this.maybeSendTrackStreamingStatusEvent(nowMs);
  535. this.streamingStatusMap = {
  536. ...this.streamingStatusMap || {},
  537. videoType: type,
  538. startedMs: nowMs
  539. };
  540. }
  541. }
  542. export default TrackStreamingStatusImpl;