You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiRemoteTrack.js 9.8KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329
  1. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  2. import { createTtfmEvent } from '../../service/statistics/AnalyticsEvents';
  3. import Statistics from '../statistics/statistics';
  4. import JitsiTrack from './JitsiTrack';
  5. const logger = require('@jitsi/logger').getLogger(__filename);
  6. const RTCEvents = require('../../service/RTC/RTCEvents');
  7. let ttfmTrackerAudioAttached = false;
  8. let ttfmTrackerVideoAttached = false;
  9. /**
  10. * List of container events that we are going to process. _onContainerEventHandler will be added as listener to the
  11. * container for every event in the list.
  12. */
  13. const containerEvents = [ 'abort', 'canplaythrough', 'ended', 'error' ];
  14. /* eslint-disable max-params */
  15. /**
  16. * Represents a single media track (either audio or video).
  17. */
  18. export default class JitsiRemoteTrack extends JitsiTrack {
  19. /**
  20. * Creates new JitsiRemoteTrack instance.
  21. * @param {RTC} rtc the RTC service instance.
  22. * @param {JitsiConference} conference the conference to which this track
  23. * belongs to
  24. * @param {string} ownerEndpointId the endpoint ID of the track owner
  25. * @param {MediaStream} stream WebRTC MediaStream, parent of the track
  26. * @param {MediaStreamTrack} track underlying WebRTC MediaStreamTrack for
  27. * the new JitsiRemoteTrack
  28. * @param {MediaType} mediaType the type of the media
  29. * @param {VideoType} videoType the type of the video if applicable
  30. * @param {number} ssrc the SSRC number of the Media Stream
  31. * @param {boolean} muted the initial muted state
  32. * @param {boolean} isP2P indicates whether or not this track belongs to a
  33. * P2P session
  34. * @param {String} sourceName the source name signaled for the track
  35. * @throws {TypeError} if <tt>ssrc</tt> is not a number.
  36. * @constructor
  37. */
  38. constructor(
  39. rtc,
  40. conference,
  41. ownerEndpointId,
  42. stream,
  43. track,
  44. mediaType,
  45. videoType,
  46. ssrc,
  47. muted,
  48. isP2P,
  49. sourceName) {
  50. super(
  51. conference,
  52. stream,
  53. track,
  54. () => {
  55. // Nothing to do if the track is inactive.
  56. },
  57. mediaType,
  58. videoType);
  59. this.rtc = rtc;
  60. // Prevent from mixing up type of SSRC which should be a number
  61. if (typeof ssrc !== 'number') {
  62. throw new TypeError(`SSRC ${ssrc} is not a number`);
  63. }
  64. this.ssrc = ssrc;
  65. this.ownerEndpointId = ownerEndpointId;
  66. this.muted = muted;
  67. this.isP2P = isP2P;
  68. this._sourceName = sourceName;
  69. logger.debug(`New remote track added: ${this}`);
  70. // we want to mark whether the track has been ever muted
  71. // to detect ttfm events for startmuted conferences, as it can
  72. // significantly increase ttfm values
  73. this.hasBeenMuted = muted;
  74. // Bind 'onmute' and 'onunmute' event handlers
  75. if (this.rtc && this.track) {
  76. this._bindTrackHandlers();
  77. }
  78. this._containerHandlers = {};
  79. containerEvents.forEach(event => {
  80. this._containerHandlers[event] = this._containerEventHandler.bind(this, event);
  81. });
  82. }
  83. /* eslint-enable max-params */
  84. /**
  85. * Attaches the track handlers.
  86. *
  87. * @returns {void}
  88. */
  89. _bindTrackHandlers() {
  90. this.track.addEventListener('mute', () => this._onTrackMute());
  91. this.track.addEventListener('unmute', () => this._onTrackUnmute());
  92. this.track.addEventListener('ended', () => {
  93. logger.debug(`"onended" event(${Date.now()}): ${this}`);
  94. });
  95. }
  96. /**
  97. * Callback invoked when the track is muted. Emits an event notifying
  98. * listeners of the mute event.
  99. *
  100. * @private
  101. * @returns {void}
  102. */
  103. _onTrackMute() {
  104. logger.debug(`"onmute" event(${Date.now()}): ${this}`);
  105. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_MUTE, this);
  106. }
  107. /**
  108. * Callback invoked when the track is unmuted. Emits an event notifying
  109. * listeners of the mute event.
  110. *
  111. * @private
  112. * @returns {void}
  113. */
  114. _onTrackUnmute() {
  115. logger.debug(`"onunmute" event(${Date.now()}): ${this}`);
  116. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_UNMUTE, this);
  117. }
  118. /**
  119. * Sets current muted status and fires an events for the change.
  120. * @param value the muted status.
  121. */
  122. setMute(value) {
  123. if (this.muted === value) {
  124. return;
  125. }
  126. if (value) {
  127. this.hasBeenMuted = true;
  128. }
  129. // we can have a fake video stream
  130. if (this.stream) {
  131. this.stream.muted = value;
  132. }
  133. this.muted = value;
  134. this.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  135. }
  136. /**
  137. * Returns the current muted status of the track.
  138. * @returns {boolean|*|JitsiRemoteTrack.muted} <tt>true</tt> if the track is
  139. * muted and <tt>false</tt> otherwise.
  140. */
  141. isMuted() {
  142. return this.muted;
  143. }
  144. /**
  145. * Returns the participant id which owns the track.
  146. *
  147. * @returns {string} the id of the participants. It corresponds to the
  148. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  149. */
  150. getParticipantId() {
  151. return this.ownerEndpointId;
  152. }
  153. /**
  154. * Return false;
  155. */
  156. isLocal() {
  157. return false;
  158. }
  159. /**
  160. * Returns the synchronization source identifier (SSRC) of this remote
  161. * track.
  162. *
  163. * @returns {number} the SSRC of this remote track.
  164. */
  165. getSSRC() {
  166. return this.ssrc;
  167. }
  168. /**
  169. * Returns the tracks source name
  170. *
  171. * @returns {string} the track's source name
  172. */
  173. getSourceName() {
  174. return this._sourceName;
  175. }
  176. /**
  177. * Changes the video type of the track.
  178. *
  179. * @param {string} type - The new video type("camera", "desktop").
  180. */
  181. _setVideoType(type) {
  182. if (this.videoType === type) {
  183. return;
  184. }
  185. this.videoType = type;
  186. this.emit(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, type);
  187. }
  188. /**
  189. * Handles track play events.
  190. */
  191. _playCallback() {
  192. const type = this.isVideoTrack() ? 'video' : 'audio';
  193. const now = window.performance.now();
  194. console.log(`(TIME) Render ${type}:\t`, now);
  195. this.conference.getConnectionTimes()[`${type}.render`] = now;
  196. // The conference can be started without calling GUM
  197. // FIXME if there would be a module for connection times this kind
  198. // of logic (gumDuration or ttfm) should end up there
  199. const gumStart = window.connectionTimes['obtainPermissions.start'];
  200. const gumEnd = window.connectionTimes['obtainPermissions.end'];
  201. const gumDuration
  202. = !isNaN(gumEnd) && !isNaN(gumStart) ? gumEnd - gumStart : 0;
  203. // Subtract the muc.joined-to-session-initiate duration because jicofo
  204. // waits until there are 2 participants to start Jingle sessions.
  205. const ttfm = now
  206. - (this.conference.getConnectionTimes()['session.initiate']
  207. - this.conference.getConnectionTimes()['muc.joined'])
  208. - gumDuration;
  209. this.conference.getConnectionTimes()[`${type}.ttfm`] = ttfm;
  210. console.log(`(TIME) TTFM ${type}:\t`, ttfm);
  211. Statistics.sendAnalytics(createTtfmEvent(
  212. {
  213. 'media_type': type,
  214. muted: this.hasBeenMuted,
  215. value: ttfm
  216. }));
  217. }
  218. /**
  219. * Attach time to first media tracker only if there is conference and only
  220. * for the first element.
  221. * @param container the HTML container which can be 'video' or 'audio'
  222. * element.
  223. * @private
  224. */
  225. _attachTTFMTracker(container) {
  226. if ((ttfmTrackerAudioAttached && this.isAudioTrack())
  227. || (ttfmTrackerVideoAttached && this.isVideoTrack())) {
  228. return;
  229. }
  230. if (this.isAudioTrack()) {
  231. ttfmTrackerAudioAttached = true;
  232. }
  233. if (this.isVideoTrack()) {
  234. ttfmTrackerVideoAttached = true;
  235. }
  236. container.addEventListener('canplay', this._playCallback.bind(this));
  237. }
  238. /**
  239. * Called when the track has been attached to a new container.
  240. *
  241. * @param {HTMLElement} container the HTML container which can be 'video' or 'audio' element.
  242. * @private
  243. */
  244. _onTrackAttach(container) {
  245. containerEvents.forEach(event => {
  246. container.addEventListener(event, this._containerHandlers[event]);
  247. });
  248. }
  249. /**
  250. * Called when the track has been detached from a container.
  251. *
  252. * @param {HTMLElement} container the HTML container which can be 'video' or 'audio' element.
  253. * @private
  254. */
  255. _onTrackDetach(container) {
  256. containerEvents.forEach(event => {
  257. container.removeEventListener(event, this._containerHandlers[event]);
  258. });
  259. }
  260. /**
  261. * An event handler for events triggered by the attached container.
  262. *
  263. * @param {string} type - The type of the event.
  264. */
  265. _containerEventHandler(type) {
  266. logger.debug(`${type} handler was called for a container with attached ${this}`);
  267. }
  268. /**
  269. * Returns a string with a description of the current status of the track.
  270. *
  271. * @returns {string}
  272. */
  273. _getStatus() {
  274. const { enabled, muted, readyState } = this.track;
  275. return `readyState: ${readyState}, muted: ${muted}, enabled: ${enabled}`;
  276. }
  277. /**
  278. * Creates a text representation of this remote track instance.
  279. * @return {string}
  280. */
  281. toString() {
  282. return `RemoteTrack[userID: ${this.getParticipantId()}, type: ${this.getType()}, ssrc: ${
  283. this.getSSRC()}, p2p: ${this.isP2P}, sourceName: ${this._sourceName}, status: ${this._getStatus()}]`;
  284. }
  285. }