選択できるのは25トピックまでです。 トピックは、先頭が英数字で、英数字とダッシュ('-')を使用した35文字以内のものにしてください。

JitsiRemoteTrack.js 9.3KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293
  1. import { createTtfmEvent } from '../../service/statistics/AnalyticsEvents';
  2. import JitsiTrack from './JitsiTrack';
  3. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  4. import RTCBrowserType from './RTCBrowserType';
  5. import Statistics from '../statistics/statistics';
  6. const logger = require('jitsi-meet-logger').getLogger(__filename);
  7. const RTCEvents = require('../../service/RTC/RTCEvents');
  8. let ttfmTrackerAudioAttached = false;
  9. let ttfmTrackerVideoAttached = false;
  10. /* eslint-disable max-params */
  11. /**
  12. * Represents a single media track (either audio or video).
  13. */
  14. export default class JitsiRemoteTrack extends JitsiTrack {
  15. /**
  16. * Creates new JitsiRemoteTrack instance.
  17. * @param {RTC} rtc the RTC service instance.
  18. * @param {JitsiConference} conference the conference to which this track
  19. * belongs to
  20. * @param {string} ownerEndpointId the endpoint ID of the track owner
  21. * @param {MediaStream} stream WebRTC MediaStream, parent of the track
  22. * @param {MediaStreamTrack} track underlying WebRTC MediaStreamTrack for
  23. * the new JitsiRemoteTrack
  24. * @param {MediaType} mediaType the type of the media
  25. * @param {VideoType} videoType the type of the video if applicable
  26. * @param {number} ssrc the SSRC number of the Media Stream
  27. * @param {boolean} muted the initial muted state
  28. * @param {boolean} isP2P indicates whether or not this track belongs to a
  29. * P2P session
  30. * @throws {TypeError} if <tt>ssrc</tt> is not a number.
  31. * @constructor
  32. */
  33. constructor(
  34. rtc,
  35. conference,
  36. ownerEndpointId,
  37. stream,
  38. track,
  39. mediaType,
  40. videoType,
  41. ssrc,
  42. muted,
  43. isP2P) {
  44. super(
  45. conference,
  46. stream,
  47. track,
  48. () => {
  49. // Nothing to do if the track is inactive.
  50. },
  51. mediaType,
  52. videoType);
  53. this.rtc = rtc;
  54. // Prevent from mixing up type of SSRC which should be a number
  55. if (typeof ssrc !== 'number') {
  56. throw new TypeError(`SSRC ${ssrc} is not a number`);
  57. }
  58. this.ssrc = ssrc;
  59. this.ownerEndpointId = ownerEndpointId;
  60. this.muted = muted;
  61. this.isP2P = isP2P;
  62. // we want to mark whether the track has been ever muted
  63. // to detect ttfm events for startmuted conferences, as it can
  64. // significantly increase ttfm values
  65. this.hasBeenMuted = muted;
  66. // Bind 'onmute' and 'onunmute' event handlers
  67. if (this.rtc && this.track) {
  68. this._bindMuteHandlers();
  69. }
  70. }
  71. /* eslint-enable max-params */
  72. /**
  73. * Attaches the track muted handlers.
  74. *
  75. * @returns {void}
  76. */
  77. _bindMuteHandlers() {
  78. // Use feature detection for finding what event handling function is
  79. // supported. On Internet Explorer, which uses uses temasys/firebreath,
  80. // the track will have attachEvent instead of addEventListener.
  81. //
  82. // FIXME it would be better to use recently added '_setHandler' method,
  83. // but:
  84. // 1. It does not allow to set more than one handler to the event
  85. // 2. It does mix MediaStream('inactive') with MediaStreamTrack events
  86. // 3. Allowing to bind more than one event handler requires too much
  87. // refactoring around camera issues detection.
  88. if (this.track.addEventListener) {
  89. this.track.addEventListener('mute', () => this._onTrackMute());
  90. this.track.addEventListener('unmute', () => this._onTrackUnmute());
  91. } else if (this.track.attachEvent) {
  92. // FIXME Internet Explorer is not emitting out mute/unmute events.
  93. this.track.attachEvent('onmute', () => this._onTrackMute());
  94. this.track.attachEvent('onunmute', () => this._onTrackUnmute());
  95. }
  96. }
  97. /**
  98. * Callback invoked when the track is muted. Emits an event notifying
  99. * listeners of the mute event.
  100. *
  101. * @private
  102. * @returns {void}
  103. */
  104. _onTrackMute() {
  105. logger.debug(
  106. `"onmute" event(${Date.now()}): `,
  107. this.getParticipantId(), this.getType(), this.getSSRC());
  108. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_MUTE, this);
  109. }
  110. /**
  111. * Callback invoked when the track is unmuted. Emits an event notifying
  112. * listeners of the mute event.
  113. *
  114. * @private
  115. * @returns {void}
  116. */
  117. _onTrackUnmute() {
  118. logger.debug(
  119. `"onunmute" event(${Date.now()}): `,
  120. this.getParticipantId(), this.getType(), this.getSSRC());
  121. this.rtc.eventEmitter.emit(RTCEvents.REMOTE_TRACK_UNMUTE, this);
  122. }
  123. /**
  124. * Sets current muted status and fires an events for the change.
  125. * @param value the muted status.
  126. */
  127. setMute(value) {
  128. if (this.muted === value) {
  129. return;
  130. }
  131. if (value) {
  132. this.hasBeenMuted = true;
  133. }
  134. // we can have a fake video stream
  135. if (this.stream) {
  136. this.stream.muted = value;
  137. }
  138. this.muted = value;
  139. this.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  140. }
  141. /**
  142. * Returns the current muted status of the track.
  143. * @returns {boolean|*|JitsiRemoteTrack.muted} <tt>true</tt> if the track is
  144. * muted and <tt>false</tt> otherwise.
  145. */
  146. isMuted() {
  147. return this.muted;
  148. }
  149. /**
  150. * Returns the participant id which owns the track.
  151. *
  152. * @returns {string} the id of the participants. It corresponds to the
  153. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  154. */
  155. getParticipantId() {
  156. return this.ownerEndpointId;
  157. }
  158. /**
  159. * Return false;
  160. */
  161. isLocal() {
  162. return false;
  163. }
  164. /**
  165. * Returns the synchronization source identifier (SSRC) of this remote
  166. * track.
  167. *
  168. * @returns {number} the SSRC of this remote track.
  169. */
  170. getSSRC() {
  171. return this.ssrc;
  172. }
  173. /**
  174. * Changes the video type of the track.
  175. *
  176. * @param {string} type - The new video type("camera", "desktop").
  177. */
  178. _setVideoType(type) {
  179. if (this.videoType === type) {
  180. return;
  181. }
  182. this.videoType = type;
  183. this.emit(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED, type);
  184. }
  185. /**
  186. * Handles track play events.
  187. */
  188. _playCallback() {
  189. const type = this.isVideoTrack() ? 'video' : 'audio';
  190. const now = window.performance.now();
  191. console.log(`(TIME) Render ${type}:\t`, now);
  192. this.conference.getConnectionTimes()[`${type}.render`] = now;
  193. // The conference can be started without calling GUM
  194. // FIXME if there would be a module for connection times this kind
  195. // of logic (gumDuration or ttfm) should end up there
  196. const gumStart = window.connectionTimes['obtainPermissions.start'];
  197. const gumEnd = window.connectionTimes['obtainPermissions.end'];
  198. const gumDuration
  199. = !isNaN(gumEnd) && !isNaN(gumStart) ? gumEnd - gumStart : 0;
  200. // Subtract the muc.joined-to-session-initiate duration because jicofo
  201. // waits until there are 2 participants to start Jingle sessions.
  202. const ttfm = now
  203. - (this.conference.getConnectionTimes()['session.initiate']
  204. - this.conference.getConnectionTimes()['muc.joined'])
  205. - gumDuration;
  206. this.conference.getConnectionTimes()[`${type}.ttfm`] = ttfm;
  207. console.log(`(TIME) TTFM ${type}:\t`, ttfm);
  208. Statistics.sendAnalytics(createTtfmEvent(
  209. {
  210. 'media_type': type,
  211. muted: this.hasBeenMuted,
  212. value: ttfm
  213. }));
  214. }
  215. /**
  216. * Attach time to first media tracker only if there is conference and only
  217. * for the first element.
  218. * @param container the HTML container which can be 'video' or 'audio'
  219. * element. It can also be 'object' element if Temasys plugin is in use and
  220. * this method has been called previously on video or audio HTML element.
  221. * @private
  222. */
  223. _attachTTFMTracker(container) {
  224. if ((ttfmTrackerAudioAttached && this.isAudioTrack())
  225. || (ttfmTrackerVideoAttached && this.isVideoTrack())) {
  226. return;
  227. }
  228. if (this.isAudioTrack()) {
  229. ttfmTrackerAudioAttached = true;
  230. }
  231. if (this.isVideoTrack()) {
  232. ttfmTrackerVideoAttached = true;
  233. }
  234. if (RTCBrowserType.isTemasysPluginUsed()) {
  235. // XXX Don't require Temasys unless it's to be used because it
  236. // doesn't run on React Native, for example.
  237. const AdapterJS = require('./adapter.screenshare');
  238. // FIXME: this is not working for IE11
  239. AdapterJS.addEvent(
  240. container,
  241. 'play',
  242. this._playCallback.bind(this));
  243. } else {
  244. container.addEventListener(
  245. 'canplay',
  246. this._playCallback.bind(this));
  247. }
  248. }
  249. /**
  250. * Creates a text representation of this remote track instance.
  251. * @return {string}
  252. */
  253. toString() {
  254. return `RemoteTrack[${
  255. this.ownerEndpointId}, ${
  256. this.getType()}, p2p: ${
  257. this.isP2P}]`;
  258. }
  259. }