modified lib-jitsi-meet dev repo
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

TrackStreamingStatus.ts 26KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648
  1. import { getLogger } from '@jitsi/logger';
  2. import { JitsiConferenceEvents } from '../../JitsiConferenceEvents';
  3. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  4. import RTCEvents from '../../service/RTC/RTCEvents';
  5. import { createTrackStreamingStatusEvent } from '../../service/statistics/AnalyticsEvents';
  6. import JitsiConference from '../../types/hand-crafted/JitsiConference';
  7. import JitsiRemoteTrack from '../../types/hand-crafted/modules/RTC/JitsiRemoteTrack';
  8. import RTC from '../../types/hand-crafted/modules/RTC/RTC';
  9. import { VideoType } from '../../types/hand-crafted/service/RTC/VideoType';
  10. import browser from '../browser';
  11. import Statistics from '../statistics/statistics';
  12. /** Track streaming statuses. */
  13. export enum TrackStreamingStatus {
  14. /**
  15. * Status indicating that streaming is currently active.
  16. */
  17. ACTIVE = 'active',
  18. /**
  19. * Status indicating that streaming is currently inactive.
  20. * Inactive means the streaming was stopped on purpose from the bridge, like exiting forwarded sources or
  21. * adaptivity decided to drop video because of not enough bandwidth.
  22. */
  23. INACTIVE = 'inactive',
  24. /**
  25. * Status indicating that streaming is currently interrupted.
  26. */
  27. INTERRUPTED = 'interrupted',
  28. /**
  29. * Status indicating that streaming is currently restoring.
  30. */
  31. RESTORING = 'restoring',
  32. }
  33. type StreamingStatusMap = {
  34. // TODO: Replace this hand crafted VideoType when we convert VideoType.js to Typescript.
  35. videoType?: VideoType,
  36. startedMs?: number,
  37. p2p?: boolean,
  38. streamingStatus?: string,
  39. value?: number
  40. };
  41. const logger = getLogger(__filename);
  42. /**
  43. * Default value of 500 milliseconds for {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
  44. */
  45. const DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT = 500;
  46. /**
  47. * Default value of 2500 milliseconds for {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
  48. */
  49. const DEFAULT_P2P_RTC_MUTE_TIMEOUT = 2500;
  50. /**
  51. * Default value of 10000 milliseconds for {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
  52. */
  53. const DEFAULT_RTC_MUTE_TIMEOUT = 10000;
  54. /**
  55. * The time to wait a track to be restored. Track which was out of forwarded sources should be inactive and when
  56. * entering forwarded sources it becomes restoring and when data is received from bridge it will become active, but if
  57. * no data is received for some time we set status of that track streaming to interrupted.
  58. */
  59. const DEFAULT_RESTORING_TIMEOUT = 10000;
  60. /**
  61. * Class is responsible for emitting JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED events.
  62. */
  63. export class TrackStreamingStatusImpl {
  64. rtc: RTC;
  65. conference: JitsiConference;
  66. track: JitsiRemoteTrack;
  67. /** This holds the timeout callback ID scheduled using window.setTimeout. */
  68. trackTimer: number | null;
  69. /**
  70. * If video track frozen detection through RTC mute event is supported, we wait some time until video track is
  71. * considered frozen. But because when the track falls out of forwarded sources it is expected for the video to
  72. * freeze this timeout must be significantly reduced in "out of forwarded sources" case.
  73. *
  74. * Basically this value is used instead of {@link rtcMuteTimeout} when track is not in forwarded sources.
  75. */
  76. outOfForwardedSourcesTimeout: number;
  77. /**
  78. * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
  79. * event is fired on the Media stream, before the connection interrupted is fired. The default value is
  80. * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
  81. */
  82. p2pRtcMuteTimeout: number;
  83. /**
  84. * How long we're going to wait after the RTC video track muted event for the corresponding signalling mute
  85. * event, before the connection interrupted is fired. The default value is {@link DEFAULT_RTC_MUTE_TIMEOUT}.
  86. *
  87. * @returns amount of time in milliseconds
  88. */
  89. rtcMuteTimeout: number;
  90. /**
  91. * This holds a timestamp indicating when remote video track was RTC muted. The purpose of storing the
  92. * timestamp is to avoid the transition to disconnected status in case of legitimate video mute operation where
  93. * the signalling video muted event can arrive shortly after RTC muted event.
  94. *
  95. * The timestamp is measured in milliseconds obtained with <tt>Date.now()</tt>.
  96. *
  97. * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event
  98. * to the remote track and allowing to set different timeout for local and remote tracks.
  99. */
  100. rtcMutedTimestamp: number | null;
  101. /** This holds the restoring timeout callback ID scheduled using window.setTimeout. */
  102. restoringTimer: ReturnType<typeof setTimeout> | null;
  103. /**
  104. * This holds the current streaming status (along with all the internal events that happen while in that
  105. * state).
  106. *
  107. * The goal is to send this information to the analytics backend for post-mortem analysis.
  108. */
  109. streamingStatusMap: StreamingStatusMap;
  110. _onP2PStatus: () => void;
  111. _onUserLeft: () => void;
  112. _onTrackRtcMuted: () => void;
  113. _onTrackRtcUnmuted: () => void;
  114. _onSignallingMuteChanged: () => void;
  115. _onTrackVideoTypeChanged: () => void;
  116. _onLastNValueChanged: () => void;
  117. _onForwardedSourcesChanged: () => void;
  118. /* eslint-disable max-params*/
  119. /**
  120. * Calculates the new {@link TrackStreamingStatus} based on the values given for some specific remote track. It is
  121. * assumed that the conference is currently in the JVB mode (in contrary to the P2P mode)
  122. * @param isInForwardedSources - indicates whether the track is in the forwarded sources set. When set to
  123. * false it means that JVB is not sending any video for the track.
  124. * @param isRestoringTimedout - if true it means that the track has been outside of forwarded sources too
  125. * long to be considered {@link TrackStreamingStatus.RESTORING}.
  126. * @param isVideoMuted - true if the track is video muted and we should not expect to receive any video.
  127. * @param isVideoTrackFrozen - if the current browser support video frozen detection then it will be set to
  128. * true when the video track is frozen. If the current browser does not support frozen detection the it's always
  129. * false.
  130. * @return {TrackStreamingStatus} the new streaming status for the track for whom the values above were provided.
  131. * @private
  132. */
  133. static _getNewStateForJvbMode(
  134. isInForwardedSources: boolean,
  135. isRestoringTimedout: boolean,
  136. isVideoMuted: boolean,
  137. isVideoTrackFrozen: boolean): TrackStreamingStatus {
  138. // We are currently not checking the endpoint connection status received from the JVB.
  139. if (isVideoMuted) {
  140. // If the connection is active according to JVB and the track is video muted there is no way for the
  141. // connection to be inactive, because the detection logic below only makes sense for video.
  142. return TrackStreamingStatus.ACTIVE;
  143. }
  144. // Logic when isVideoTrackFrozen is supported
  145. if (browser.supportsVideoMuteOnConnInterrupted()) {
  146. if (!isVideoTrackFrozen) {
  147. // If the video is playing we're good
  148. return TrackStreamingStatus.ACTIVE;
  149. } else if (isInForwardedSources) {
  150. return isRestoringTimedout ? TrackStreamingStatus.INTERRUPTED : TrackStreamingStatus.RESTORING;
  151. }
  152. return TrackStreamingStatus.INACTIVE;
  153. }
  154. // Because this browser is incapable of detecting frozen video we must rely on the forwarded sources value
  155. return isInForwardedSources ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INACTIVE;
  156. }
  157. /* eslint-enable max-params*/
  158. /**
  159. * In P2P mode we don't care about any values coming from the JVB and the streaming status can be only active or
  160. * interrupted.
  161. * @param isVideoMuted - true if video muted
  162. * @param isVideoTrackFrozen - true if the video track for the remote track is currently frozen. If the
  163. * current browser does not support video frozen detection then it's always false.
  164. * @return {TrackStreamingStatus}
  165. * @private
  166. */
  167. static _getNewStateForP2PMode(isVideoMuted: boolean, isVideoTrackFrozen: boolean): TrackStreamingStatus {
  168. if (!browser.supportsVideoMuteOnConnInterrupted()) {
  169. // There's no way to detect problems in P2P when there's no video track frozen detection...
  170. return TrackStreamingStatus.ACTIVE;
  171. }
  172. return isVideoMuted || !isVideoTrackFrozen
  173. ? TrackStreamingStatus.ACTIVE : TrackStreamingStatus.INTERRUPTED;
  174. }
  175. /**
  176. * Creates new instance of <tt>TrackStreamingStatus</tt>.
  177. *
  178. * @constructor
  179. * @param rtc - the RTC service instance
  180. * @param conference - parent conference instance
  181. * @param {Object} options
  182. * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
  183. * {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
  184. * @param {number} [options.rtcMuteTimeout=2000] custom value for
  185. * {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
  186. * @param {number} [options.outOfForwardedSourcesTimeout=500] custom value for
  187. * {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
  188. */
  189. constructor(rtc: RTC, conference: JitsiConference, track: JitsiRemoteTrack, options: {
  190. outOfForwardedSourcesTimeout: number,
  191. p2pRtcMuteTimeout: number,
  192. rtcMuteTimeout: number
  193. }) {
  194. this.rtc = rtc;
  195. this.conference = conference;
  196. this.track = track;
  197. this.restoringTimer = null;
  198. this.rtcMutedTimestamp = null;
  199. this.streamingStatusMap = {};
  200. this.trackTimer = null;
  201. this.outOfForwardedSourcesTimeout = typeof options.outOfForwardedSourcesTimeout === 'number'
  202. ? options.outOfForwardedSourcesTimeout : DEFAULT_NOT_IN_FORWARDED_SOURCES_TIMEOUT;
  203. this.p2pRtcMuteTimeout = typeof options.p2pRtcMuteTimeout === 'number'
  204. ? options.p2pRtcMuteTimeout : DEFAULT_P2P_RTC_MUTE_TIMEOUT;
  205. this.rtcMuteTimeout = typeof options.rtcMuteTimeout === 'number'
  206. ? options.rtcMuteTimeout : DEFAULT_RTC_MUTE_TIMEOUT;
  207. logger.info(`RtcMuteTimeout set to: ${this.rtcMuteTimeout}`);
  208. }
  209. /**
  210. * Gets the video frozen timeout for given source name.
  211. * @return how long are we going to wait since RTC video muted even, before a video track is considered
  212. * frozen.
  213. * @private
  214. */
  215. _getVideoFrozenTimeout(): number {
  216. const sourceName = this.track.getSourceName();
  217. return this.rtc.isInForwardedSources(sourceName)
  218. ? this.rtcMuteTimeout
  219. : this.conference.isP2PActive() ? this.p2pRtcMuteTimeout : this.outOfForwardedSourcesTimeout;
  220. }
  221. /**
  222. * Initializes <tt>TrackStreamingStatus</tt> and bind required event listeners.
  223. */
  224. init(): void {
  225. // Handles P2P status changes
  226. this._onP2PStatus = this.figureOutStreamingStatus.bind(this);
  227. this.conference.on(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
  228. // Used to send analytics events for the participant that left the call.
  229. this._onUserLeft = this.onUserLeft.bind(this);
  230. this.conference.on(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
  231. // On some browsers MediaStreamTrack trigger "onmute"/"onunmute" events for video type tracks when they stop
  232. // receiving data which is often a sign that remote user is having connectivity issues.
  233. if (browser.supportsVideoMuteOnConnInterrupted()) {
  234. this._onTrackRtcMuted = this.onTrackRtcMuted.bind(this);
  235. this.rtc.addListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
  236. this._onTrackRtcUnmuted = this.onTrackRtcUnmuted.bind(this);
  237. this.rtc.addListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
  238. // Listened which will be bound to JitsiRemoteTrack to listen for signalling mute/unmute events.
  239. this._onSignallingMuteChanged = this.onSignallingMuteChanged.bind(this);
  240. this.track.on(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
  241. // Used to send an analytics event when the video type changes.
  242. this._onTrackVideoTypeChanged = this.onTrackVideoTypeChanged.bind(this);
  243. this.track.on(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, this._onTrackVideoTypeChanged);
  244. }
  245. this._onForwardedSourcesChanged = this.onForwardedSourcesChanged.bind(this);
  246. this.conference.on(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
  247. this._onLastNValueChanged = this.figureOutStreamingStatus.bind(this);
  248. this.rtc.on(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
  249. }
  250. /**
  251. * Removes all event listeners and disposes of all resources held by this instance.
  252. */
  253. dispose(): void {
  254. if (browser.supportsVideoMuteOnConnInterrupted()) {
  255. this.rtc.removeListener(RTCEvents.REMOTE_TRACK_MUTE, this._onTrackRtcMuted);
  256. this.rtc.removeListener(RTCEvents.REMOTE_TRACK_UNMUTE, this._onTrackRtcUnmuted);
  257. this.track.off(JitsiTrackEvents.TRACK_MUTE_CHANGED, this._onSignallingMuteChanged);
  258. }
  259. this.conference.off(JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED, this._onForwardedSourcesChanged);
  260. this.conference.off(JitsiConferenceEvents.P2P_STATUS, this._onP2PStatus);
  261. this.conference.off(JitsiConferenceEvents.USER_LEFT, this._onUserLeft);
  262. this.rtc.removeListener(RTCEvents.LASTN_VALUE_CHANGED, this._onLastNValueChanged);
  263. this.clearTimeout();
  264. this.clearRtcMutedTimestamp();
  265. this.maybeSendTrackStreamingStatusEvent(Date.now());
  266. this.figureOutStreamingStatus();
  267. }
  268. /**
  269. * Changes streaming status.
  270. * @param newStatus
  271. */
  272. _changeStreamingStatus(newStatus: TrackStreamingStatus): void {
  273. if (this.track.getTrackStreamingStatus() !== newStatus) {
  274. const sourceName = this.track.getSourceName();
  275. this.track._setTrackStreamingStatus(newStatus);
  276. logger.debug(`Emit track streaming status(${Date.now()}) ${sourceName}: ${newStatus}`);
  277. // Log the event on CallStats
  278. Statistics.sendLog(
  279. JSON.stringify({
  280. id: 'track.streaming.status',
  281. track: sourceName,
  282. status: newStatus
  283. }));
  284. this.track.emit(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED, newStatus);
  285. }
  286. }
  287. /**
  288. * Reset the postponed "streaming interrupted" event which was previously scheduled as a timeout on RTC 'onmute'
  289. * event.
  290. */
  291. clearTimeout(): void {
  292. if (this.trackTimer) {
  293. window.clearTimeout(this.trackTimer);
  294. this.trackTimer = null;
  295. }
  296. }
  297. /**
  298. * Clears the timestamp of the RTC muted event for remote video track.
  299. */
  300. clearRtcMutedTimestamp(): void {
  301. this.rtcMutedTimestamp = null;
  302. }
  303. /**
  304. * Checks if track is considered frozen.
  305. * @return <tt>true</tt> if the video has frozen or <tt>false</tt> when it's either not considered frozen
  306. * (yet) or if freeze detection is not supported by the current browser.
  307. *
  308. * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event to
  309. * the remote track and allowing to set different timeout for local and remote tracks.
  310. */
  311. isVideoTrackFrozen(): boolean {
  312. if (!browser.supportsVideoMuteOnConnInterrupted()) {
  313. return false;
  314. }
  315. const isVideoRTCMuted = this.track.isWebRTCTrackMuted();
  316. const rtcMutedTimestamp = this.rtcMutedTimestamp;
  317. const timeout = this._getVideoFrozenTimeout();
  318. return isVideoRTCMuted && typeof rtcMutedTimestamp === 'number' && (Date.now() - rtcMutedTimestamp) >= timeout;
  319. }
  320. /**
  321. * Figures out (and updates) the current streaming status for the track identified by the source name.
  322. */
  323. figureOutStreamingStatus(): void {
  324. const sourceName = this.track.getSourceName();
  325. const inP2PMode = this.conference.isP2PActive();
  326. const isRestoringTimedOut = this._isRestoringTimedout();
  327. const audioOnlyMode = this.conference.getLastN() === 0;
  328. // NOTE Overriding videoMuted to true for audioOnlyMode should disable any detection based on video playback or
  329. // forwarded sources.
  330. const isVideoMuted = this.track.isMuted() || audioOnlyMode;
  331. const isVideoTrackFrozen = this.isVideoTrackFrozen();
  332. const isInForwardedSources = this.rtc.isInForwardedSources(sourceName);
  333. const newState
  334. = inP2PMode
  335. ? TrackStreamingStatusImpl._getNewStateForP2PMode(
  336. isVideoMuted,
  337. isVideoTrackFrozen)
  338. : TrackStreamingStatusImpl._getNewStateForJvbMode(
  339. isInForwardedSources,
  340. isRestoringTimedOut,
  341. isVideoMuted,
  342. isVideoTrackFrozen);
  343. // if the new state is not restoring clear timers and timestamps that we use to track the restoring state
  344. if (newState !== TrackStreamingStatus.RESTORING) {
  345. this._clearRestoringTimer();
  346. }
  347. logger.debug(
  348. `Figure out conn status for ${sourceName}, is video muted: ${
  349. isVideoMuted} video track frozen: ${
  350. isVideoTrackFrozen} p2p mode: ${
  351. inP2PMode} is in forwarded sources: ${
  352. isInForwardedSources} currentStatus => newStatus: ${
  353. this.track.getTrackStreamingStatus()} => ${newState}`);
  354. const oldStreamingStatus = this.streamingStatusMap || {};
  355. // Send an analytics event (guard on either the p2p flag or the streaming status has changed since the last
  356. // time this code block run).
  357. if (!('p2p' in oldStreamingStatus)
  358. || !('streamingStatus' in oldStreamingStatus)
  359. || oldStreamingStatus.p2p !== inP2PMode
  360. || oldStreamingStatus.streamingStatus !== newState) {
  361. const nowMs = Date.now();
  362. this.maybeSendTrackStreamingStatusEvent(nowMs);
  363. this.streamingStatusMap = {
  364. ...oldStreamingStatus,
  365. streamingStatus: newState,
  366. p2p: inP2PMode,
  367. startedMs: nowMs
  368. };
  369. // sometimes (always?) we're late to hook the TRACK_VIDEOTYPE_CHANGED event and the video type is not in
  370. // oldStreamingStatus.
  371. if (!('videoType' in this.streamingStatusMap)) {
  372. this.streamingStatusMap.videoType = this.track.getVideoType();
  373. }
  374. }
  375. this._changeStreamingStatus(newState);
  376. }
  377. /**
  378. * Computes the duration of the current streaming status for the track (i.e. 15 seconds in the INTERRUPTED state)
  379. * and sends a track streaming status event.
  380. * @param nowMs - The current time (in millis).
  381. */
  382. maybeSendTrackStreamingStatusEvent(nowMs: number): void {
  383. const trackStreamingStatus = this.streamingStatusMap;
  384. if (trackStreamingStatus
  385. && 'startedMs' in trackStreamingStatus
  386. && 'videoType' in trackStreamingStatus
  387. && 'streamingStatus' in trackStreamingStatus
  388. && 'p2p' in trackStreamingStatus) {
  389. trackStreamingStatus.value = nowMs - trackStreamingStatus.startedMs;
  390. Statistics.sendAnalytics(createTrackStreamingStatusEvent(trackStreamingStatus));
  391. }
  392. }
  393. /**
  394. * On change in forwarded sources set check all leaving and entering track to change their corresponding statuses.
  395. *
  396. * @param leavingForwardedSources - The array of sourceName leaving forwarded sources.
  397. * @param enteringForwardedSources - The array of sourceName entering forwarded sources.
  398. * @param timestamp - The time in millis
  399. * @private
  400. */
  401. onForwardedSourcesChanged(
  402. leavingForwardedSources: string[] = [],
  403. enteringForwardedSources: string[] = [],
  404. timestamp: number): void {
  405. const sourceName = this.track.getSourceName();
  406. logger.debug(`Fowarded sources changed leaving=${leavingForwardedSources}, entering=${
  407. enteringForwardedSources} at ${timestamp}`);
  408. // If the browser doesn't fire the mute/onmute events when the remote peer stops/starts sending media,
  409. // calculate the streaming status for all the tracks since it won't get triggered automatically on the track
  410. // that has started/stopped receiving media.
  411. if (!browser.supportsVideoMuteOnConnInterrupted()) {
  412. this.figureOutStreamingStatus();
  413. }
  414. if (leavingForwardedSources.includes(sourceName)) {
  415. this.track._clearEnteredForwardedSourcesTimestamp();
  416. this._clearRestoringTimer();
  417. browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
  418. }
  419. if (enteringForwardedSources.includes(sourceName)) {
  420. // store the timestamp this track is entering forwarded sources
  421. this.track._setEnteredForwardedSourcesTimestamp(timestamp);
  422. browser.supportsVideoMuteOnConnInterrupted() && this.figureOutStreamingStatus();
  423. }
  424. }
  425. /**
  426. * Clears the restoring timer for video track and the timestamp for entering forwarded sources.
  427. */
  428. _clearRestoringTimer(): void {
  429. const rTimer = this.restoringTimer;
  430. if (rTimer) {
  431. clearTimeout(rTimer);
  432. this.restoringTimer = null;
  433. }
  434. }
  435. /**
  436. * Checks whether a track had stayed enough in restoring state, compares current time and the time the track
  437. * entered in forwarded sources. If it hasn't timedout and there is no timer added, add new timer in order to give
  438. * it more time to become active or mark it as interrupted on next check.
  439. *
  440. * @returns <tt>true</tt> if the track was in restoring state more than the timeout
  441. * ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to set its status to interrupted.
  442. * @private
  443. */
  444. _isRestoringTimedout(): boolean {
  445. const enteredForwardedSourcesTimestamp = this.track._getEnteredForwardedSourcesTimestamp();
  446. if (enteredForwardedSourcesTimestamp
  447. && (Date.now() - enteredForwardedSourcesTimestamp) >= DEFAULT_RESTORING_TIMEOUT) {
  448. return true;
  449. }
  450. // still haven't reached timeout, if there is no timer scheduled, schedule one so we can track the restoring
  451. // state and change it after reaching the timeout
  452. const rTimer = this.restoringTimer;
  453. if (!rTimer) {
  454. this.restoringTimer = setTimeout(() => this.figureOutStreamingStatus(), DEFAULT_RESTORING_TIMEOUT);
  455. }
  456. return false;
  457. }
  458. /** Checks whether a track is the current track. */
  459. _isCurrentTrack(track: JitsiRemoteTrack): boolean {
  460. return track.getSourceName() === this.track.getSourceName();
  461. }
  462. /**
  463. * Sends a last/final track streaming status event for the track of the user that left the conference.
  464. * @param id - The id of the participant that left the conference.
  465. */
  466. onUserLeft(id: string): void {
  467. if (this.track.getParticipantId() === id) {
  468. this.maybeSendTrackStreamingStatusEvent(Date.now());
  469. this.streamingStatusMap = {};
  470. }
  471. }
  472. /**
  473. * Handles RTC 'onmute' event for the video track.
  474. *
  475. * @param track - The video track for which 'onmute' event will be processed.
  476. */
  477. onTrackRtcMuted(track: JitsiRemoteTrack): void {
  478. if (!this._isCurrentTrack(track)) {
  479. return;
  480. }
  481. const sourceName = track.getSourceName();
  482. logger.debug(`Detector track RTC muted: ${sourceName}`, Date.now());
  483. this.rtcMutedTimestamp = Date.now();
  484. if (!track.isMuted()) {
  485. // If the user is not muted according to the signalling we'll give it some time, before the streaming
  486. // interrupted event is triggered.
  487. this.clearTimeout();
  488. // The timeout is reduced when track is not in the forwarded sources
  489. const timeout = this._getVideoFrozenTimeout();
  490. this.trackTimer = window.setTimeout(() => {
  491. logger.debug(`Set RTC mute timeout for: ${sourceName} of ${timeout} ms`);
  492. this.clearTimeout();
  493. this.figureOutStreamingStatus();
  494. }, timeout);
  495. }
  496. }
  497. /**
  498. * Handles RTC 'onunmute' event for the video track.
  499. *
  500. * @param track - The video track for which 'onunmute' event will be processed.
  501. */
  502. onTrackRtcUnmuted(track: JitsiRemoteTrack): void {
  503. if (!this._isCurrentTrack(track)) {
  504. return;
  505. }
  506. const sourceName = this.track.getSourceName();
  507. logger.debug(`Detector track RTC unmuted: ${sourceName}`, Date.now());
  508. this.clearTimeout();
  509. this.clearRtcMutedTimestamp();
  510. this.figureOutStreamingStatus();
  511. }
  512. /**
  513. * Here the signalling "mute"/"unmute" events are processed.
  514. *
  515. * @param track - The remote video track for which the signalling mute/unmute event will be
  516. * processed.
  517. */
  518. onSignallingMuteChanged(track: JitsiRemoteTrack): void {
  519. if (!this._isCurrentTrack(track)) {
  520. return;
  521. }
  522. const sourceName = this.track.getSourceName();
  523. logger.debug(`Detector on track signalling mute changed: ${sourceName}`, track.isMuted());
  524. this.figureOutStreamingStatus();
  525. }
  526. /**
  527. * Sends a track streaming status event as a result of the video type changing.
  528. * @deprecated this will go away with full multiple streams support
  529. * @param type - The video type.
  530. */
  531. onTrackVideoTypeChanged(type: VideoType): void {
  532. const nowMs = Date.now();
  533. this.maybeSendTrackStreamingStatusEvent(nowMs);
  534. this.streamingStatusMap = {
  535. ...this.streamingStatusMap || {},
  536. videoType: type,
  537. startedMs: nowMs
  538. };
  539. }
  540. }
  541. export default TrackStreamingStatusImpl;