You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiRemoteTrack.js 8.6KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279
  1. import JitsiTrack from './JitsiTrack';
  2. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  3. import RTCBrowserType from './RTCBrowserType';
  4. import Statistics from '../statistics/statistics';
  5. const logger = require('jitsi-meet-logger').getLogger(__filename);
  6. const RTCEvents = require('../../service/RTC/RTCEvents');
  7. let ttfmTrackerAudioAttached = false;
  8. let ttfmTrackerVideoAttached = false;
  9. /* eslint-disable max-params */
  10. /**
  11. * Represents a single media track (either audio or video).
  12. */
  13. export default class JitsiRemoteTrack extends JitsiTrack {
  14. /**
  15. * Creates new JitsiRemoteTrack instance.
  16. * @param {RTC} rtc the RTC service instance.
  17. * @param {JitsiConference} conference the conference to which this track
  18. * belongs to
  19. * @param {string} ownerEndpointId the endpoint ID of the track owner
  20. * @param {MediaStream} stream WebRTC MediaStream, parent of the track
  21. * @param {MediaStreamTrack} track underlying WebRTC MediaStreamTrack for
  22. * the new JitsiRemoteTrack
  23. * @param {MediaType} mediaType the type of the media
  24. * @param {VideoType} videoType the type of the video if applicable
  25. * @param {number} ssrc the SSRC number of the Media Stream
  26. * @param {boolean} muted the initial muted state
  27. * @param {boolean} isP2P indicates whether or not this track belongs to a
  28. * P2P session
  29. * @throws {TypeError} if <tt>ssrc</tt> is not a number.
  30. * @constructor
  31. */
  32. constructor(
  33. rtc,
  34. conference,
  35. ownerEndpointId,
  36. stream,
  37. track,
  38. mediaType,
  39. videoType,
  40. ssrc,
  41. muted,
  42. isP2P) {
  43. super(
  44. conference,
  45. stream,
  46. track,
  47. () => {
  48. // Nothing to do if the track is inactive.
  49. },
  50. mediaType,
  51. videoType);
  52. this.rtc = rtc;
  53. // Prevent from mixing up type of SSRC which should be a number
  54. if (typeof ssrc !== 'number') {
  55. throw new TypeError(`SSRC ${ssrc} is not a number`);
  56. }
  57. this.ssrc = ssrc;
  58. this.ownerEndpointId = ownerEndpointId;
  59. this.muted = muted;
  60. this.isP2P = isP2P;
  61. // we want to mark whether the track has been ever muted
  62. // to detect ttfm events for startmuted conferences, as it can
  63. // significantly increase ttfm values
  64. this.hasBeenMuted = muted;
  65. // Bind 'onmute' and 'onunmute' event handlers
  66. if (this.rtc && this.track) {
  67. this._bindMuteHandlers();
  68. }
  69. }
  70. /* eslint-enable max-params */
  71. /**
  72. * Attaches the track muted handlers.
  73. *
  74. * @returns {void}
  75. */
  76. _bindMuteHandlers() {
  77. // Use feature detection for finding what event handling function is
  78. // supported. On Internet Explorer, which uses uses temasys/firebreath,
  79. // the track will have attachEvent instead of addEventListener.
  80. //
  81. // FIXME it would be better to use recently added '_setHandler' method,
  82. // but:
  83. // 1. It does not allow to set more than one handler to the event
  84. // 2. It does mix MediaStream('inactive') with MediaStreamTrack events
  85. // 3. Allowing to bind more than one event handler requires too much
  86. // refactoring around camera issues detection.
  87. if (this.track.addEventListener) {
  88. this.track.addEventListener('mute', () => this._onTrackMute());
  89. this.track.addEventListener('unmute', () => this._onTrackUnmute());
  90. } else if (this.track.attachEvent) {
  91. // FIXME Internet Explorer is not emitting out mute/unmute events.
  92. this.track.attachEvent('onmute', () => this._onTrackMute());
  93. this.track.attachEvent('onunmute', () => this._onTrackUnmute());
  94. }
  95. }
  96. /**
  97. * Callback invoked when the track is muted. Emits an event notifying
  98. * listeners of the mute event.
  99. *
  100. * @private
  101. * @returns {void}
  102. */
  103. _onTrackMute() {
  104. logger.debug(
  105. `"onmute" event(${Date.now()}): `,
  106. this.getParticipantId(), this.getType(), this.getSSRC());
  107. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_MUTE, this);
  108. }
  109. /**
  110. * Callback invoked when the track is unmuted. Emits an event notifying
  111. * listeners of the mute event.
  112. *
  113. * @private
  114. * @returns {void}
  115. */
  116. _onTrackUnmute() {
  117. logger.debug(
  118. `"onunmute" event(${Date.now()}): `,
  119. this.getParticipantId(), this.getType(), this.getSSRC());
  120. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_UNMUTE, this);
  121. }
  122. /**
  123. * Sets current muted status and fires an events for the change.
  124. * @param value the muted status.
  125. */
  126. setMute(value) {
  127. if (this.muted === value) {
  128. return;
  129. }
  130. if (value) {
  131. this.hasBeenMuted = true;
  132. }
  133. // we can have a fake video stream
  134. if (this.stream) {
  135. this.stream.muted = value;
  136. }
  137. this.muted = value;
  138. this.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  139. }
  140. /**
  141. * Returns the current muted status of the track.
  142. * @returns {boolean|*|JitsiRemoteTrack.muted} <tt>true</tt> if the track is
  143. * muted and <tt>false</tt> otherwise.
  144. */
  145. isMuted() {
  146. return this.muted;
  147. }
  148. /**
  149. * Returns the participant id which owns the track.
  150. *
  151. * @returns {string} the id of the participants. It corresponds to the
  152. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  153. */
  154. getParticipantId() {
  155. return this.ownerEndpointId;
  156. }
  157. /**
  158. * Return false;
  159. */
  160. isLocal() {
  161. return false;
  162. }
  163. /**
  164. * Returns the synchronization source identifier (SSRC) of this remote
  165. * track.
  166. *
  167. * @returns {number} the SSRC of this remote track.
  168. */
  169. getSSRC() {
  170. return this.ssrc;
  171. }
  172. /**
  173. * Changes the video type of the track.
  174. *
  175. * @param {string} type - The new video type("camera", "desktop").
  176. */
  177. _setVideoType(type) {
  178. if (this.videoType === type) {
  179. return;
  180. }
  181. this.videoType = type;
  182. this.emit(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, type);
  183. }
  184. /**
  185. * Handles track play events.
  186. */
  187. _playCallback() {
  188. const type = this.isVideoTrack() ? 'video' : 'audio';
  189. const now = window.performance.now();
  190. console.log(`(TIME) Render ${type}:\t`, now);
  191. this.conference.getConnectionTimes()[`${type}.render`] = now;
  192. const ttfm = now
  193. - (this.conference.getConnectionTimes()['session.initiate']
  194. - this.conference.getConnectionTimes()['muc.joined'])
  195. - (window.connectionTimes['obtainPermissions.end']
  196. - window.connectionTimes['obtainPermissions.start']);
  197. this.conference.getConnectionTimes()[`${type}.ttfm`] = ttfm;
  198. console.log(`(TIME) TTFM ${type}:\t`, ttfm);
  199. let eventName = `${type}.ttfm`;
  200. if (this.hasBeenMuted) {
  201. eventName += '.muted';
  202. }
  203. Statistics.analytics.sendEvent(eventName, { value: ttfm });
  204. }
  205. /**
  206. * Attach time to first media tracker only if there is conference and only
  207. * for the first element.
  208. * @param container the HTML container which can be 'video' or 'audio'
  209. * element. It can also be 'object' element if Temasys plugin is in use and
  210. * this method has been called previously on video or audio HTML element.
  211. * @private
  212. */
  213. _attachTTFMTracker(container) {
  214. if ((ttfmTrackerAudioAttached && this.isAudioTrack())
  215. || (ttfmTrackerVideoAttached && this.isVideoTrack())) {
  216. return;
  217. }
  218. if (this.isAudioTrack()) {
  219. ttfmTrackerAudioAttached = true;
  220. }
  221. if (this.isVideoTrack()) {
  222. ttfmTrackerVideoAttached = true;
  223. }
  224. if (RTCBrowserType.isTemasysPluginUsed()) {
  225. // XXX Don't require Temasys unless it's to be used because it
  226. // doesn't run on React Native, for example.
  227. const AdapterJS = require('./adapter.screenshare');
  228. // FIXME: this is not working for IE11
  229. AdapterJS.addEvent(
  230. container,
  231. 'play',
  232. this._playCallback.bind(this));
  233. } else {
  234. container.addEventListener(
  235. 'canplay',
  236. this._playCallback.bind(this));
  237. }
  238. }
  239. /**
  240. * Creates a text representation of this remote track instance.
  241. * @return {string}
  242. */
  243. toString() {
  244. return `RemoteTrack[${this.ownerEndpointId}, ${this.getType()
  245. }, p2p: ${this.isP2P}]`;
  246. }
  247. }