You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiRemoteTrack.js 6.6KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. /* global */
  2. const JitsiTrack = require('./JitsiTrack');
  3. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  4. const logger = require('jitsi-meet-logger').getLogger(__filename);
  5. const RTCBrowserType = require('./RTCBrowserType');
  6. const RTCEvents = require('../../service/RTC/RTCEvents');
  7. const Statistics = require('../statistics/statistics');
  8. let ttfmTrackerAudioAttached = false;
  9. let ttfmTrackerVideoAttached = false;
  10. /**
  11. * Represents a single media track (either audio or video).
  12. * @param {RTC} rtc the RTC service instance.
  13. * @param {JitsiConference} conference the conference to which this track
  14. * belongs to
  15. * @param {string} ownerEndpointId the endpoint ID of the track owner
  16. * @param {MediaStream} stream WebRTC MediaStream, parent of the track
  17. * @param {MediaStreamTrack} track underlying WebRTC MediaStreamTrack for
  18. * the new JitsiRemoteTrack
  19. * @param {MediaType} mediaType the type of the media
  20. * @param {VideoType} videoType the type of the video if applicable
  21. * @param {string} ssrc the SSRC number of the Media Stream
  22. * @param {boolean} muted the initial muted state
  23. * @constructor
  24. */
  25. function JitsiRemoteTrack(rtc, conference, ownerEndpointId, stream, track,
  26. mediaType, videoType, ssrc, muted) {
  27. JitsiTrack.call(
  28. this,
  29. conference,
  30. stream,
  31. track,
  32. () => {
  33. // Nothing to do if the track is inactive.
  34. },
  35. mediaType,
  36. videoType,
  37. ssrc);
  38. this.rtc = rtc;
  39. this.ownerEndpointId = ownerEndpointId;
  40. this.muted = muted;
  41. // we want to mark whether the track has been ever muted
  42. // to detect ttfm events for startmuted conferences, as it can significantly
  43. // increase ttfm values
  44. this.hasBeenMuted = muted;
  45. // Bind 'onmute' and 'onunmute' event handlers
  46. if (this.rtc && this.track) {
  47. this._bindMuteHandlers();
  48. }
  49. }
  50. JitsiRemoteTrack.prototype = Object.create(JitsiTrack.prototype);
  51. JitsiRemoteTrack.prototype.constructor = JitsiRemoteTrack;
  52. JitsiRemoteTrack.prototype._bindMuteHandlers = function() {
  53. // Bind 'onmute'
  54. // FIXME it would be better to use recently added '_setHandler' method, but
  55. // 1. It does not allow to set more than one handler to the event
  56. // 2. It does mix MediaStream('inactive') with MediaStreamTrack events
  57. // 3. Allowing to bind more than one event handler requires too much
  58. // refactoring around camera issues detection.
  59. this.track.addEventListener('mute', () => {
  60. logger.debug(
  61. `"onmute" event(${Date.now()}): `,
  62. this.getParticipantId(), this.getType(), this.getSSRC());
  63. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_MUTE, this);
  64. });
  65. // Bind 'onunmute'
  66. this.track.addEventListener('unmute', () => {
  67. logger.debug(
  68. `"onunmute" event(${Date.now()}): `,
  69. this.getParticipantId(), this.getType(), this.getSSRC());
  70. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_UNMUTE, this);
  71. });
  72. };
  73. /**
  74. * Sets current muted status and fires an events for the change.
  75. * @param value the muted status.
  76. */
  77. JitsiRemoteTrack.prototype.setMute = function(value) {
  78. if(this.muted === value) {
  79. return;
  80. }
  81. if(value) {
  82. this.hasBeenMuted = true;
  83. }
  84. // we can have a fake video stream
  85. if(this.stream) {
  86. this.stream.muted = value;
  87. }
  88. this.muted = value;
  89. this.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  90. };
  91. /**
  92. * Returns the current muted status of the track.
  93. * @returns {boolean|*|JitsiRemoteTrack.muted} <tt>true</tt> if the track is
  94. * muted and <tt>false</tt> otherwise.
  95. */
  96. JitsiRemoteTrack.prototype.isMuted = function() {
  97. return this.muted;
  98. };
  99. /**
  100. * Returns the participant id which owns the track.
  101. * @returns {string} the id of the participants. It corresponds to the Colibri
  102. * endpoint id/MUC nickname in case of Jitsi-meet.
  103. */
  104. JitsiRemoteTrack.prototype.getParticipantId = function() {
  105. return this.ownerEndpointId;
  106. };
  107. /**
  108. * Return false;
  109. */
  110. JitsiRemoteTrack.prototype.isLocal = function() {
  111. return false;
  112. };
  113. /**
  114. * Returns the synchronization source identifier (SSRC) of this remote track.
  115. * @returns {string} the SSRC of this remote track
  116. */
  117. JitsiRemoteTrack.prototype.getSSRC = function() {
  118. return this.ssrc;
  119. };
  120. /**
  121. * Changes the video type of the track
  122. * @param type the new video type("camera", "desktop")
  123. */
  124. JitsiRemoteTrack.prototype._setVideoType = function(type) {
  125. if(this.videoType === type) {
  126. return;
  127. }
  128. this.videoType = type;
  129. this.eventEmitter.emit(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, type);
  130. };
  131. JitsiRemoteTrack.prototype._playCallback = function() {
  132. const type = this.isVideoTrack() ? 'video' : 'audio';
  133. const now = window.performance.now();
  134. console.log(`(TIME) Render ${type}:\t`, now);
  135. this.conference.getConnectionTimes()[`${type}.render`] = now;
  136. const ttfm = now
  137. - (this.conference.getConnectionTimes()['session.initiate']
  138. - this.conference.getConnectionTimes()['muc.joined'])
  139. - (window.connectionTimes['obtainPermissions.end']
  140. - window.connectionTimes['obtainPermissions.start']);
  141. this.conference.getConnectionTimes()[`${type}.ttfm`] = ttfm;
  142. console.log(`(TIME) TTFM ${type}:\t`, ttfm);
  143. let eventName = `${type}.ttfm`;
  144. if(this.hasBeenMuted) {
  145. eventName += '.muted';
  146. }
  147. Statistics.analytics.sendEvent(eventName, {value: ttfm});
  148. };
  149. /**
  150. * Attach time to first media tracker only if there is conference and only
  151. * for the first element.
  152. * @param container the HTML container which can be 'video' or 'audio' element.
  153. * It can also be 'object' element if Temasys plugin is in use and this
  154. * method has been called previously on video or audio HTML element.
  155. * @private
  156. */
  157. JitsiRemoteTrack.prototype._attachTTFMTracker = function(container) {
  158. if((ttfmTrackerAudioAttached && this.isAudioTrack())
  159. || (ttfmTrackerVideoAttached && this.isVideoTrack())) {
  160. return;
  161. }
  162. if (this.isAudioTrack()) {
  163. ttfmTrackerAudioAttached = true;
  164. }
  165. if (this.isVideoTrack()) {
  166. ttfmTrackerVideoAttached = true;
  167. }
  168. if (RTCBrowserType.isTemasysPluginUsed()) {
  169. // XXX Don't require Temasys unless it's to be used because it doesn't
  170. // run on React Native, for example.
  171. const AdapterJS = require('./adapter.screenshare');
  172. // FIXME: this is not working for IE11
  173. AdapterJS.addEvent(container, 'play', this._playCallback.bind(this));
  174. } else {
  175. container.addEventListener('canplay', this._playCallback.bind(this));
  176. }
  177. };
  178. module.exports = JitsiRemoteTrack;