modified lib-jitsi-meet dev repo
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiRemoteTrack.js 16KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497
  1. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  2. import { createTtfmEvent } from '../../service/statistics/AnalyticsEvents';
  3. import TrackStreamingStatusImpl, { TrackStreamingStatus } from '../connectivity/TrackStreamingStatus';
  4. import Statistics from '../statistics/statistics';
  5. import JitsiTrack from './JitsiTrack';
  6. const logger = require('@jitsi/logger').getLogger(__filename);
  7. const RTCEvents = require('../../service/RTC/RTCEvents');
  8. let ttfmTrackerAudioAttached = false;
  9. let ttfmTrackerVideoAttached = false;
  10. /**
  11. * List of container events that we are going to process. _onContainerEventHandler will be added as listener to the
  12. * container for every event in the list.
  13. */
  14. const containerEvents = [ 'abort', 'canplaythrough', 'ended', 'error' ];
  15. /* eslint-disable max-params */
  16. /**
  17. * Represents a single media track (either audio or video).
  18. */
  19. export default class JitsiRemoteTrack extends JitsiTrack {
  20. /**
  21. * Creates new JitsiRemoteTrack instance.
  22. * @param {RTC} rtc the RTC service instance.
  23. * @param {JitsiConference} conference the conference to which this track
  24. * belongs to
  25. * @param {string} ownerEndpointId the endpoint ID of the track owner
  26. * @param {MediaStream} stream WebRTC MediaStream, parent of the track
  27. * @param {MediaStreamTrack} track underlying WebRTC MediaStreamTrack for
  28. * the new JitsiRemoteTrack
  29. * @param {MediaType} mediaType the type of the media
  30. * @param {VideoType} videoType the type of the video if applicable
  31. * @param {number} ssrc the SSRC number of the Media Stream
  32. * @param {boolean} muted the initial muted state
  33. * @param {boolean} isP2P indicates whether or not this track belongs to a
  34. * P2P session
  35. * @param {String} sourceName the source name signaled for the track
  36. * @throws {TypeError} if <tt>ssrc</tt> is not a number.
  37. * @constructor
  38. */
  39. constructor(
  40. rtc,
  41. conference,
  42. ownerEndpointId,
  43. stream,
  44. track,
  45. mediaType,
  46. videoType,
  47. ssrc,
  48. muted,
  49. isP2P,
  50. sourceName) {
  51. super(
  52. conference,
  53. stream,
  54. track,
  55. () => {
  56. // Nothing to do if the track is inactive.
  57. },
  58. mediaType,
  59. videoType);
  60. this.rtc = rtc;
  61. // Prevent from mixing up type of SSRC which should be a number
  62. if (typeof ssrc !== 'number') {
  63. throw new TypeError(`SSRC ${ssrc} is not a number`);
  64. }
  65. this.ssrc = ssrc;
  66. this.ownerEndpointId = ownerEndpointId;
  67. this.muted = muted;
  68. this.isP2P = isP2P;
  69. this._sourceName = sourceName;
  70. this._trackStreamingStatus = null;
  71. this._trackStreamingStatusImpl = null;
  72. /**
  73. * This holds the timestamp indicating when remote video track entered forwarded sources set. Track entering
  74. * forwardedSources will have streaming status restoring and when we start receiving video will become active,
  75. * but if video is not received for certain time {@link DEFAULT_RESTORING_TIMEOUT} that track streaming status
  76. * will become interrupted.
  77. */
  78. this._enteredForwardedSourcesTimestamp = null;
  79. this.addEventListener = this.on = this._addEventListener.bind(this);
  80. this.removeEventListener = this.off = this._removeEventListener.bind(this);
  81. logger.debug(`New remote track created: ${this}`);
  82. // we want to mark whether the track has been ever muted
  83. // to detect ttfm events for startmuted conferences, as it can
  84. // significantly increase ttfm values
  85. this.hasBeenMuted = muted;
  86. // Bind 'onmute' and 'onunmute' event handlers
  87. if (this.rtc && this.track) {
  88. this._bindTrackHandlers();
  89. }
  90. this._containerHandlers = {};
  91. containerEvents.forEach(event => {
  92. this._containerHandlers[event] = this._containerEventHandler.bind(this, event);
  93. });
  94. }
  95. /* eslint-enable max-params */
  96. /**
  97. * Attaches the track handlers.
  98. *
  99. * @returns {void}
  100. */
  101. _bindTrackHandlers() {
  102. this.track.addEventListener('mute', () => this._onTrackMute());
  103. this.track.addEventListener('unmute', () => this._onTrackUnmute());
  104. this.track.addEventListener('ended', () => {
  105. logger.debug(`"onended" event(${Date.now()}): ${this}`);
  106. });
  107. }
  108. /**
  109. * Overrides addEventListener method to init TrackStreamingStatus instance when there are listeners for the
  110. * {@link JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED} event.
  111. *
  112. * @param {string} event - event name
  113. * @param {function} handler - event handler
  114. */
  115. _addEventListener(event, handler) {
  116. super.addListener(event, handler);
  117. if (event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
  118. && this.listenerCount(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED)
  119. && !this._trackStreamingStatusImpl
  120. ) {
  121. this._initTrackStreamingStatus();
  122. logger.debug(`Initializing track streaming status: ${this._sourceName}`);
  123. }
  124. }
  125. /**
  126. * Overrides removeEventListener method to dispose TrackStreamingStatus instance.
  127. *
  128. * @param {string} event - event name
  129. * @param {function} handler - event handler
  130. */
  131. _removeEventListener(event, handler) {
  132. super.removeListener(event, handler);
  133. if (event === JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED
  134. && !this.listenerCount(JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED)
  135. ) {
  136. this._disposeTrackStreamingStatus();
  137. logger.debug(`Disposing track streaming status: ${this._sourceName}`);
  138. }
  139. }
  140. /**
  141. * Callback invoked when the track is muted. Emits an event notifying
  142. * listeners of the mute event.
  143. *
  144. * @private
  145. * @returns {void}
  146. */
  147. _onTrackMute() {
  148. logger.debug(`"onmute" event(${Date.now()}): ${this}`);
  149. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_MUTE, this);
  150. }
  151. /**
  152. * Callback invoked when the track is unmuted. Emits an event notifying
  153. * listeners of the mute event.
  154. *
  155. * @private
  156. * @returns {void}
  157. */
  158. _onTrackUnmute() {
  159. logger.debug(`"onunmute" event(${Date.now()}): ${this}`);
  160. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_UNMUTE, this);
  161. }
  162. /**
  163. * Removes attached event listeners and dispose TrackStreamingStatus .
  164. *
  165. * @returns {Promise}
  166. */
  167. dispose() {
  168. this._disposeTrackStreamingStatus();
  169. return super.dispose();
  170. }
  171. /**
  172. * Sets current muted status and fires an events for the change.
  173. * @param value the muted status.
  174. */
  175. setMute(value) {
  176. if (this.muted === value) {
  177. return;
  178. }
  179. if (value) {
  180. this.hasBeenMuted = true;
  181. }
  182. // we can have a fake video stream
  183. if (this.stream) {
  184. this.stream.muted = value;
  185. }
  186. this.muted = value;
  187. this.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  188. }
  189. /**
  190. * Returns the current muted status of the track.
  191. * @returns {boolean|*|JitsiRemoteTrack.muted} <tt>true</tt> if the track is
  192. * muted and <tt>false</tt> otherwise.
  193. */
  194. isMuted() {
  195. return this.muted;
  196. }
  197. /**
  198. * Returns the participant id which owns the track.
  199. *
  200. * @returns {string} the id of the participants. It corresponds to the
  201. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  202. */
  203. getParticipantId() {
  204. return this.ownerEndpointId;
  205. }
  206. /**
  207. * Return false;
  208. */
  209. isLocal() {
  210. return false;
  211. }
  212. /**
  213. * Returns the synchronization source identifier (SSRC) of this remote
  214. * track.
  215. *
  216. * @returns {number} the SSRC of this remote track.
  217. */
  218. getSSRC() {
  219. return this.ssrc;
  220. }
  221. /**
  222. * Returns the tracks source name
  223. *
  224. * @returns {string} the track's source name
  225. */
  226. getSourceName() {
  227. return this._sourceName;
  228. }
  229. /**
  230. * Update the properties when the track is remapped to another source.
  231. *
  232. * @param {string} owner The endpoint ID of the new owner.
  233. * @param {string} name The new source name.
  234. */
  235. setNewSource(owner, name) {
  236. this.ownerEndpointId = owner;
  237. this._sourceName = name;
  238. this.emit(JitsiTrackEvents.TRACK_OWNER_CHANGED, owner);
  239. }
  240. /**
  241. * Changes the video type of the track.
  242. *
  243. * @param {string} type - The new video type("camera", "desktop").
  244. */
  245. _setVideoType(type) {
  246. if (this.videoType === type) {
  247. return;
  248. }
  249. this.videoType = type;
  250. this.emit(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, type);
  251. }
  252. /**
  253. * Handles track play events.
  254. */
  255. _playCallback() {
  256. if (!this.conference.room) {
  257. return;
  258. }
  259. const type = this.isVideoTrack() ? 'video' : 'audio';
  260. const now = window.performance.now();
  261. console.log(`(TIME) Render ${type}:\t`, now);
  262. this.conference.getConnectionTimes()[`${type}.render`] = now;
  263. // The conference can be started without calling GUM
  264. // FIXME if there would be a module for connection times this kind
  265. // of logic (gumDuration or ttfm) should end up there
  266. const gumStart = window.connectionTimes['obtainPermissions.start'];
  267. const gumEnd = window.connectionTimes['obtainPermissions.end'];
  268. const gumDuration
  269. = !isNaN(gumEnd) && !isNaN(gumStart) ? gumEnd - gumStart : 0;
  270. // Subtract the muc.joined-to-session-initiate duration because jicofo
  271. // waits until there are 2 participants to start Jingle sessions.
  272. const ttfm = now
  273. - (this.conference.getConnectionTimes()['session.initiate']
  274. - this.conference.getConnectionTimes()['muc.joined'])
  275. - gumDuration;
  276. this.conference.getConnectionTimes()[`${type}.ttfm`] = ttfm;
  277. console.log(`(TIME) TTFM ${type}:\t`, ttfm);
  278. Statistics.sendAnalytics(createTtfmEvent(
  279. {
  280. 'media_type': type,
  281. muted: this.hasBeenMuted,
  282. value: ttfm
  283. }));
  284. }
  285. /**
  286. * Attach time to first media tracker only if there is conference and only
  287. * for the first element.
  288. * @param container the HTML container which can be 'video' or 'audio'
  289. * element.
  290. * @private
  291. */
  292. _attachTTFMTracker(container) {
  293. if ((ttfmTrackerAudioAttached && this.isAudioTrack())
  294. || (ttfmTrackerVideoAttached && this.isVideoTrack())) {
  295. return;
  296. }
  297. if (this.isAudioTrack()) {
  298. ttfmTrackerAudioAttached = true;
  299. }
  300. if (this.isVideoTrack()) {
  301. ttfmTrackerVideoAttached = true;
  302. }
  303. container.addEventListener('canplay', this._playCallback.bind(this));
  304. }
  305. /**
  306. * Called when the track has been attached to a new container.
  307. *
  308. * @param {HTMLElement} container the HTML container which can be 'video' or 'audio' element.
  309. * @private
  310. */
  311. _onTrackAttach(container) {
  312. containerEvents.forEach(event => {
  313. container.addEventListener(event, this._containerHandlers[event]);
  314. });
  315. }
  316. /**
  317. * Called when the track has been detached from a container.
  318. *
  319. * @param {HTMLElement} container the HTML container which can be 'video' or 'audio' element.
  320. * @private
  321. */
  322. _onTrackDetach(container) {
  323. containerEvents.forEach(event => {
  324. container.removeEventListener(event, this._containerHandlers[event]);
  325. });
  326. }
  327. /**
  328. * An event handler for events triggered by the attached container.
  329. *
  330. * @param {string} type - The type of the event.
  331. */
  332. _containerEventHandler(type) {
  333. logger.debug(`${type} handler was called for a container with attached ${this}`);
  334. }
  335. /**
  336. * Returns a string with a description of the current status of the track.
  337. *
  338. * @returns {string}
  339. */
  340. _getStatus() {
  341. const { enabled, muted, readyState } = this.track;
  342. return `readyState: ${readyState}, muted: ${muted}, enabled: ${enabled}`;
  343. }
  344. /**
  345. * Initializes trackStreamingStatusImpl.
  346. */
  347. _initTrackStreamingStatus() {
  348. const config = this.conference.options.config;
  349. this._trackStreamingStatus = TrackStreamingStatus.ACTIVE;
  350. this._trackStreamingStatusImpl = new TrackStreamingStatusImpl(
  351. this.rtc,
  352. this.conference,
  353. this,
  354. {
  355. // These options are not public API, leaving it here only as an entry point through config for
  356. // tuning up purposes. Default values should be adjusted as soon as optimal values are discovered.
  357. p2pRtcMuteTimeout: config._p2pConnStatusRtcMuteTimeout,
  358. rtcMuteTimeout: config._peerConnStatusRtcMuteTimeout,
  359. outOfForwardedSourcesTimeout: config._peerConnStatusOutOfLastNTimeout
  360. });
  361. this._trackStreamingStatusImpl.init();
  362. // In some edge cases, both browser 'unmute' and bridge's forwarded sources events are received before a
  363. // LargeVideoUpdate is scheduled for auto-pinning a new screenshare track. If there are no layout changes and
  364. // no further track events are received for the SS track, a black tile will be displayed for screenshare on
  365. // stage. Fire a TRACK_STREAMING_STATUS_CHANGED event if the media is already being received for the remote
  366. // track to prevent this from happening.
  367. !this._trackStreamingStatusImpl.isVideoTrackFrozen()
  368. && this.rtc.eventEmitter.emit(
  369. JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED,
  370. this,
  371. this._trackStreamingStatus);
  372. }
  373. /**
  374. * Disposes trackStreamingStatusImpl and clears trackStreamingStatus.
  375. */
  376. _disposeTrackStreamingStatus() {
  377. if (this._trackStreamingStatusImpl) {
  378. this._trackStreamingStatusImpl.dispose();
  379. this._trackStreamingStatusImpl = null;
  380. this._trackStreamingStatus = null;
  381. }
  382. }
  383. /**
  384. * Updates track's streaming status.
  385. *
  386. * @param {string} state the current track streaming state. {@link TrackStreamingStatus}.
  387. */
  388. _setTrackStreamingStatus(status) {
  389. this._trackStreamingStatus = status;
  390. }
  391. /**
  392. * Returns track's streaming status.
  393. *
  394. * @returns {string} the streaming status <tt>TrackStreamingStatus</tt> of the track. Returns null
  395. * if trackStreamingStatusImpl hasn't been initialized.
  396. *
  397. * {@link TrackStreamingStatus}.
  398. */
  399. getTrackStreamingStatus() {
  400. return this._trackStreamingStatus;
  401. }
  402. /**
  403. * Clears the timestamp of when the track entered forwarded sources.
  404. */
  405. _clearEnteredForwardedSourcesTimestamp() {
  406. this._enteredForwardedSourcesTimestamp = null;
  407. }
  408. /**
  409. * Updates the timestamp of when the track entered forwarded sources.
  410. *
  411. * @param {number} timestamp the time in millis
  412. */
  413. _setEnteredForwardedSourcesTimestamp(timestamp) {
  414. this._enteredForwardedSourcesTimestamp = timestamp;
  415. }
  416. /**
  417. * Returns the timestamp of when the track entered forwarded sources.
  418. *
  419. * @returns {number} the time in millis
  420. */
  421. _getEnteredForwardedSourcesTimestamp() {
  422. return this._enteredForwardedSourcesTimestamp;
  423. }
  424. /**
  425. * Creates a text representation of this remote track instance.
  426. * @return {string}
  427. */
  428. toString() {
  429. return `RemoteTrack[userID: ${this.getParticipantId()}, type: ${this.getType()}, ssrc: ${
  430. this.getSSRC()}, p2p: ${this.isP2P}, sourceName: ${this._sourceName}, status: {${this._getStatus()}}]`;
  431. }
  432. }