Vous ne pouvez pas sélectionner plus de 25 sujets Les noms de sujets doivent commencer par une lettre ou un nombre, peuvent contenir des tirets ('-') et peuvent comporter jusqu'à 35 caractères.

JitsiRemoteTrack.js 17KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519
  1. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  2. import { VideoType } from '../../service/RTC/VideoType';
  3. import { createTtfmEvent } from '../../service/statistics/AnalyticsEvents';
  4. import TrackStreamingStatusImpl, { TrackStreamingStatus } from '../connectivity/TrackStreamingStatus';
  5. import Statistics from '../statistics/statistics';
  6. import JitsiTrack from './JitsiTrack';
  7. const logger = require('@jitsi/logger').getLogger(__filename);
  8. const RTCEvents = require('../../service/RTC/RTCEvents');
  9. let ttfmTrackerAudioAttached = false;
  10. let ttfmTrackerVideoAttached = false;
  11. /**
  12. * List of container events that we are going to process. _onContainerEventHandler will be added as listener to the
  13. * container for every event in the list.
  14. */
  15. const containerEvents = [ 'abort', 'canplaythrough', 'ended', 'error', 'stalled', 'suspend', 'waiting' ];
  16. /* eslint-disable max-params */
  17. /**
  18. * Represents a single media track (either audio or video).
  19. */
  20. export default class JitsiRemoteTrack extends JitsiTrack {
  21. /**
  22. * Creates new JitsiRemoteTrack instance.
  23. * @param {RTC} rtc the RTC service instance.
  24. * @param {JitsiConference} conference the conference to which this track
  25. * belongs to
  26. * @param {string} ownerEndpointId the endpoint ID of the track owner
  27. * @param {MediaStream} stream WebRTC MediaStream, parent of the track
  28. * @param {MediaStreamTrack} track underlying WebRTC MediaStreamTrack for
  29. * the new JitsiRemoteTrack
  30. * @param {MediaType} mediaType the type of the media
  31. * @param {VideoType} videoType the type of the video if applicable
  32. * @param {number} ssrc the SSRC number of the Media Stream
  33. * @param {boolean} muted the initial muted state
  34. * @param {boolean} isP2P indicates whether or not this track belongs to a
  35. * P2P session
  36. * @param {String} sourceName the source name signaled for the track
  37. * @throws {TypeError} if <tt>ssrc</tt> is not a number.
  38. * @constructor
  39. */
  40. constructor(
  41. rtc,
  42. conference,
  43. ownerEndpointId,
  44. stream,
  45. track,
  46. mediaType,
  47. videoType,
  48. ssrc,
  49. muted,
  50. isP2P,
  51. sourceName) {
  52. super(
  53. conference,
  54. stream,
  55. track,
  56. () => {
  57. // Nothing to do if the track is inactive.
  58. },
  59. mediaType,
  60. videoType);
  61. this.rtc = rtc;
  62. // Prevent from mixing up type of SSRC which should be a number
  63. if (typeof ssrc !== 'number') {
  64. throw new TypeError(`SSRC ${ssrc} is not a number`);
  65. }
  66. this.ssrc = ssrc;
  67. this.ownerEndpointId = ownerEndpointId;
  68. this.muted = muted;
  69. this.isP2P = isP2P;
  70. this._sourceName = sourceName;
  71. this._trackStreamingStatus = null;
  72. this._trackStreamingStatusImpl = null;
  73. /**
  74. * This holds the timestamp indicating when remote video track entered forwarded sources set. Track entering
  75. * forwardedSources will have streaming status restoring and when we start receiving video will become active,
  76. * but if video is not received for certain time {@link DEFAULT_RESTORING_TIMEOUT} that track streaming status
  77. * will become interrupted.
  78. */
  79. this._enteredForwardedSourcesTimestamp = null;
  80. this.addEventListener = this.on = this._addEventListener.bind(this);
  81. this.removeEventListener = this.off = this._removeEventListener.bind(this);
  82. logger.debug(`New remote track created: ${this}`);
  83. // we want to mark whether the track has been ever muted
  84. // to detect ttfm events for startmuted conferences, as it can
  85. // significantly increase ttfm values
  86. this.hasBeenMuted = muted;
  87. // Bind 'onmute' and 'onunmute' event handlers
  88. if (this.rtc && this.track) {
  89. this._bindTrackHandlers();
  90. }
  91. this._containerHandlers = {};
  92. containerEvents.forEach(event => {
  93. this._containerHandlers[event] = this._containerEventHandler.bind(this, event);
  94. });
  95. }
  96. /* eslint-enable max-params */
  97. /**
  98. * Attaches the track handlers.
  99. *
  100. * @returns {void}
  101. */
  102. _bindTrackHandlers() {
  103. this.track.addEventListener('mute', () => this._onTrackMute());
  104. this.track.addEventListener('unmute', () => this._onTrackUnmute());
  105. this.track.addEventListener('ended', () => {
  106. logger.debug(`"onended" event(${Date.now()}): ${this}`);
  107. });
  108. }
  109. /**
  110. * Overrides addEventListener method to init TrackStreamingStatus instance when there are listeners for the
  111. * {@link JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED} event.
  112. *
  113. * @param {string} event - event name
  114. * @param {function} handler - event handler
  115. */
  116. _addEventListener(event, handler) {
  117. super.addListener(event, handler);
  118. if (event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
  119. && this.listenerCount(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED)
  120. && !this._trackStreamingStatusImpl
  121. ) {
  122. this._initTrackStreamingStatus();
  123. logger.debug(`Initializing track streaming status: ${this._sourceName}`);
  124. }
  125. }
  126. /**
  127. * Overrides removeEventListener method to dispose TrackStreamingStatus instance.
  128. *
  129. * @param {string} event - event name
  130. * @param {function} handler - event handler
  131. */
  132. _removeEventListener(event, handler) {
  133. super.removeListener(event, handler);
  134. if (event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
  135. && !this.listenerCount(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED)
  136. ) {
  137. this._disposeTrackStreamingStatus();
  138. logger.debug(`Disposing track streaming status: ${this._sourceName}`);
  139. }
  140. }
  141. /**
  142. * Callback invoked when the track is muted. Emits an event notifying
  143. * listeners of the mute event.
  144. *
  145. * @private
  146. * @returns {void}
  147. */
  148. _onTrackMute() {
  149. logger.debug(`"onmute" event(${Date.now()}): ${this}`);
  150. // Ignore mute events that get fired on desktop tracks because of 0Hz screensharing introduced in Chromium.
  151. // The sender stops sending frames if the content of the captured window doesn't change resulting in the
  152. // receiver showing avatar instead of the shared content.
  153. if (this.videoType === VideoType.DESKTOP) {
  154. logger.debug('Ignoring mute event on desktop tracks.');
  155. return;
  156. }
  157. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_MUTE, this);
  158. }
  159. /**
  160. * Callback invoked when the track is unmuted. Emits an event notifying
  161. * listeners of the mute event.
  162. *
  163. * @private
  164. * @returns {void}
  165. */
  166. _onTrackUnmute() {
  167. logger.debug(`"onunmute" event(${Date.now()}): ${this}`);
  168. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_UNMUTE, this);
  169. }
  170. /**
  171. * Removes attached event listeners and dispose TrackStreamingStatus .
  172. *
  173. * @returns {Promise}
  174. */
  175. dispose() {
  176. if (this.disposed) {
  177. return;
  178. }
  179. this._disposeTrackStreamingStatus();
  180. return super.dispose();
  181. }
  182. /**
  183. * Sets current muted status and fires an events for the change.
  184. * @param value the muted status.
  185. */
  186. setMute(value) {
  187. if (this.muted === value) {
  188. return;
  189. }
  190. if (value) {
  191. this.hasBeenMuted = true;
  192. }
  193. // we can have a fake video stream
  194. if (this.stream) {
  195. this.stream.muted = value;
  196. }
  197. this.muted = value;
  198. logger.info(`Mute ${this}: ${value}`);
  199. this.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  200. }
  201. /**
  202. * Returns the current muted status of the track.
  203. * @returns {boolean|*|JitsiRemoteTrack.muted} <tt>true</tt> if the track is
  204. * muted and <tt>false</tt> otherwise.
  205. */
  206. isMuted() {
  207. return this.muted;
  208. }
  209. /**
  210. * Returns the participant id which owns the track.
  211. *
  212. * @returns {string} the id of the participants. It corresponds to the
  213. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  214. */
  215. getParticipantId() {
  216. return this.ownerEndpointId;
  217. }
  218. /**
  219. * Return false;
  220. */
  221. isLocal() {
  222. return false;
  223. }
  224. /**
  225. * Returns the synchronization source identifier (SSRC) of this remote
  226. * track.
  227. *
  228. * @returns {number} the SSRC of this remote track.
  229. */
  230. getSSRC() {
  231. return this.ssrc;
  232. }
  233. /**
  234. * Returns the tracks source name
  235. *
  236. * @returns {string} the track's source name
  237. */
  238. getSourceName() {
  239. return this._sourceName;
  240. }
  241. /**
  242. * Update the properties when the track is remapped to another source.
  243. *
  244. * @param {string} owner The endpoint ID of the new owner.
  245. */
  246. setOwner(owner) {
  247. this.ownerEndpointId = owner;
  248. }
  249. /**
  250. * Sets the name of the source associated with the remtoe track.
  251. *
  252. * @param {string} name - The source name to be associated with the track.
  253. */
  254. setSourceName(name) {
  255. this._sourceName = name;
  256. }
  257. /**
  258. * Changes the video type of the track.
  259. *
  260. * @param {string} type - The new video type("camera", "desktop").
  261. */
  262. _setVideoType(type) {
  263. if (this.videoType === type) {
  264. return;
  265. }
  266. this.videoType = type;
  267. this.emit(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, type);
  268. }
  269. /**
  270. * Handles track play events.
  271. */
  272. _playCallback() {
  273. if (!this.conference.room) {
  274. return;
  275. }
  276. const type = this.isVideoTrack() ? 'video' : 'audio';
  277. const now = window.performance.now();
  278. logger.info(`(TIME) Render ${type}:\t`, now);
  279. this.conference.getConnectionTimes()[`${type}.render`] = now;
  280. // The conference can be started without calling GUM
  281. // FIXME if there would be a module for connection times this kind
  282. // of logic (gumDuration or ttfm) should end up there
  283. const gumStart = window.connectionTimes['obtainPermissions.start'];
  284. const gumEnd = window.connectionTimes['obtainPermissions.end'];
  285. const gumDuration
  286. = !isNaN(gumEnd) && !isNaN(gumStart) ? gumEnd - gumStart : 0;
  287. // Subtract the muc.joined-to-session-initiate duration because jicofo
  288. // waits until there are 2 participants to start Jingle sessions.
  289. const ttfm = now
  290. - (this.conference.getConnectionTimes()['session.initiate']
  291. - this.conference.getConnectionTimes()['muc.joined'])
  292. - gumDuration;
  293. this.conference.getConnectionTimes()[`${type}.ttfm`] = ttfm;
  294. logger.info(`(TIME) TTFM ${type}:\t`, ttfm);
  295. Statistics.sendAnalytics(createTtfmEvent(
  296. {
  297. 'media_type': type,
  298. muted: this.hasBeenMuted,
  299. value: ttfm
  300. }));
  301. }
  302. /**
  303. * Attach time to first media tracker only if there is conference and only
  304. * for the first element.
  305. * @param container the HTML container which can be 'video' or 'audio'
  306. * element.
  307. * @private
  308. */
  309. _attachTTFMTracker(container) {
  310. if ((ttfmTrackerAudioAttached && this.isAudioTrack())
  311. || (ttfmTrackerVideoAttached && this.isVideoTrack())) {
  312. return;
  313. }
  314. if (this.isAudioTrack()) {
  315. ttfmTrackerAudioAttached = true;
  316. }
  317. if (this.isVideoTrack()) {
  318. ttfmTrackerVideoAttached = true;
  319. }
  320. container.addEventListener('canplay', this._playCallback.bind(this));
  321. }
  322. /**
  323. * Called when the track has been attached to a new container.
  324. *
  325. * @param {HTMLElement} container the HTML container which can be 'video' or 'audio' element.
  326. * @private
  327. */
  328. _onTrackAttach(container) {
  329. containerEvents.forEach(event => {
  330. container.addEventListener(event, this._containerHandlers[event]);
  331. });
  332. }
  333. /**
  334. * Called when the track has been detached from a container.
  335. *
  336. * @param {HTMLElement} container the HTML container which can be 'video' or 'audio' element.
  337. * @private
  338. */
  339. _onTrackDetach(container) {
  340. containerEvents.forEach(event => {
  341. container.removeEventListener(event, this._containerHandlers[event]);
  342. });
  343. }
  344. /**
  345. * An event handler for events triggered by the attached container.
  346. *
  347. * @param {string} type - The type of the event.
  348. */
  349. _containerEventHandler(type) {
  350. logger.debug(`${type} handler was called for a container with attached ${this}`);
  351. }
  352. /**
  353. * Returns a string with a description of the current status of the track.
  354. *
  355. * @returns {string}
  356. */
  357. _getStatus() {
  358. const { enabled, muted, readyState } = this.track;
  359. return `readyState: ${readyState}, muted: ${muted}, enabled: ${enabled}`;
  360. }
  361. /**
  362. * Initializes trackStreamingStatusImpl.
  363. */
  364. _initTrackStreamingStatus() {
  365. const config = this.conference.options.config;
  366. this._trackStreamingStatus = TrackStreamingStatus.ACTIVE;
  367. this._trackStreamingStatusImpl = new TrackStreamingStatusImpl(
  368. this.rtc,
  369. this.conference,
  370. this,
  371. {
  372. // These options are not public API, leaving it here only as an entry point through config for
  373. // tuning up purposes. Default values should be adjusted as soon as optimal values are discovered.
  374. p2pRtcMuteTimeout: config._p2pConnStatusRtcMuteTimeout,
  375. rtcMuteTimeout: config._peerConnStatusRtcMuteTimeout,
  376. outOfForwardedSourcesTimeout: config._peerConnStatusOutOfLastNTimeout
  377. });
  378. this._trackStreamingStatusImpl.init();
  379. // In some edge cases, both browser 'unmute' and bridge's forwarded sources events are received before a
  380. // LargeVideoUpdate is scheduled for auto-pinning a new screenshare track. If there are no layout changes and
  381. // no further track events are received for the SS track, a black tile will be displayed for screenshare on
  382. // stage. Fire a TRACK_STREAMING_STATUS_CHANGED event if the media is already being received for the remote
  383. // track to prevent this from happening.
  384. !this._trackStreamingStatusImpl.isVideoTrackFrozen()
  385. && this.rtc.eventEmitter.emit(
  386. JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
  387. this,
  388. this._trackStreamingStatus);
  389. }
  390. /**
  391. * Disposes trackStreamingStatusImpl and clears trackStreamingStatus.
  392. */
  393. _disposeTrackStreamingStatus() {
  394. if (this._trackStreamingStatusImpl) {
  395. this._trackStreamingStatusImpl.dispose();
  396. this._trackStreamingStatusImpl = null;
  397. this._trackStreamingStatus = null;
  398. }
  399. }
  400. /**
  401. * Updates track's streaming status.
  402. *
  403. * @param {string} state the current track streaming state. {@link TrackStreamingStatus}.
  404. */
  405. _setTrackStreamingStatus(status) {
  406. this._trackStreamingStatus = status;
  407. }
  408. /**
  409. * Returns track's streaming status.
  410. *
  411. * @returns {string} the streaming status <tt>TrackStreamingStatus</tt> of the track. Returns null
  412. * if trackStreamingStatusImpl hasn't been initialized.
  413. *
  414. * {@link TrackStreamingStatus}.
  415. */
  416. getTrackStreamingStatus() {
  417. return this._trackStreamingStatus;
  418. }
  419. /**
  420. * Clears the timestamp of when the track entered forwarded sources.
  421. */
  422. _clearEnteredForwardedSourcesTimestamp() {
  423. this._enteredForwardedSourcesTimestamp = null;
  424. }
  425. /**
  426. * Updates the timestamp of when the track entered forwarded sources.
  427. *
  428. * @param {number} timestamp the time in millis
  429. */
  430. _setEnteredForwardedSourcesTimestamp(timestamp) {
  431. this._enteredForwardedSourcesTimestamp = timestamp;
  432. }
  433. /**
  434. * Returns the timestamp of when the track entered forwarded sources.
  435. *
  436. * @returns {number} the time in millis
  437. */
  438. _getEnteredForwardedSourcesTimestamp() {
  439. return this._enteredForwardedSourcesTimestamp;
  440. }
  441. /**
  442. * Creates a text representation of this remote track instance.
  443. * @return {string}
  444. */
  445. toString() {
  446. return `RemoteTrack[userID: ${this.getParticipantId()}, type: ${this.getType()}, ssrc: ${
  447. this.getSSRC()}, p2p: ${this.isP2P}, sourceName: ${this._sourceName}, status: {${this._getStatus()}}]`;
  448. }
  449. }