You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiTrack.js 15KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485
  1. /* global __filename, module */
  2. import EventEmitter from 'events';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  5. import * as MediaType from '../../service/RTC/MediaType';
  6. import RTCBrowserType from './RTCBrowserType';
  7. import RTCUtils from './RTCUtils';
  8. const logger = getLogger(__filename);
  9. /**
  10. * Maps our handler types to MediaStreamTrack properties.
  11. */
  12. const trackHandler2Prop = {
  13. 'track_mute': 'onmute', // Not supported on FF
  14. 'track_unmute': 'onunmute',
  15. 'track_ended': 'onended'
  16. };
  17. /**
  18. * This implements 'onended' callback normally fired by WebRTC after the stream
  19. * is stopped. There is no such behaviour yet in FF, so we have to add it.
  20. * @param jitsiTrack our track object holding the original WebRTC stream object
  21. * to which 'onended' handling will be added.
  22. */
  23. function implementOnEndedHandling(jitsiTrack) {
  24. const stream = jitsiTrack.getOriginalStream();
  25. if (!stream) {
  26. return;
  27. }
  28. const originalStop = stream.stop;
  29. stream.stop = function() {
  30. originalStop.apply(stream);
  31. if (jitsiTrack.isActive()) {
  32. stream.onended();
  33. }
  34. };
  35. }
  36. /**
  37. * Adds onended/oninactive handler to a MediaStream.
  38. * @param mediaStream a MediaStream to attach onended/oninactive handler
  39. * @param handler the handler
  40. */
  41. function addMediaStreamInactiveHandler(mediaStream, handler) {
  42. // Temasys will use onended
  43. if (typeof mediaStream.active === 'undefined') {
  44. mediaStream.onended = handler;
  45. } else {
  46. mediaStream.oninactive = handler;
  47. }
  48. }
  49. /**
  50. * Represents a single media track (either audio or video).
  51. */
  52. export default class JitsiTrack extends EventEmitter {
  53. /* eslint-disable max-params */
  54. /**
  55. * Represents a single media track (either audio or video).
  56. * @constructor
  57. * @param rtc the rtc instance
  58. * @param stream the WebRTC MediaStream instance
  59. * @param track the WebRTC MediaStreamTrack instance, must be part of
  60. * the given <tt>stream</tt>.
  61. * @param streamInactiveHandler the function that will handle
  62. * onended/oninactive events of the stream.
  63. * @param trackMediaType the media type of the JitsiTrack
  64. * @param videoType the VideoType for this track if any
  65. */
  66. constructor(
  67. conference,
  68. stream,
  69. track,
  70. streamInactiveHandler,
  71. trackMediaType,
  72. videoType) {
  73. super();
  74. // aliases for addListener/removeListener
  75. this.addEventListener = this.addListener;
  76. this.removeEventListener = this.off = this.removeListener;
  77. /**
  78. * Array with the HTML elements that are displaying the streams.
  79. * @type {Array}
  80. */
  81. this.containers = [];
  82. this.conference = conference;
  83. this.stream = stream;
  84. this.audioLevel = -1;
  85. this.type = trackMediaType;
  86. this.track = track;
  87. this.videoType = videoType;
  88. this.handlers = new Map();
  89. /**
  90. * Indicates whether this JitsiTrack has been disposed. If true, this
  91. * JitsiTrack is to be considered unusable and operations involving it
  92. * are to fail (e.g. {@link JitsiConference#addTrack(JitsiTrack)},
  93. * {@link JitsiConference#removeTrack(JitsiTrack)}).
  94. * @type {boolean}
  95. */
  96. this.disposed = false;
  97. /**
  98. * The inactive handler which will be triggered when the underlying
  99. * media stream ends.
  100. * @type {Function}
  101. */
  102. this._streamInactiveHandler = streamInactiveHandler;
  103. this._bindInactiveHandler(streamInactiveHandler);
  104. }
  105. /* eslint-enable max-params */
  106. /**
  107. * Binds the inactive handler.
  108. * @param {Function} streamInactiveHandler
  109. * @private
  110. */
  111. _bindInactiveHandler(streamInactiveHandler) {
  112. if (RTCBrowserType.isFirefox()) {
  113. implementOnEndedHandling(this);
  114. }
  115. addMediaStreamInactiveHandler(this.stream, streamInactiveHandler);
  116. }
  117. /**
  118. * Sets handler to the WebRTC MediaStream or MediaStreamTrack object
  119. * depending on the passed type.
  120. * @param {string} type the type of the handler that is going to be set
  121. * @param {Function} handler the handler.
  122. */
  123. _setHandler(type, handler) {
  124. if (!trackHandler2Prop.hasOwnProperty(type)) {
  125. logger.error(`Invalid handler type ${type}`);
  126. return;
  127. }
  128. if (handler) {
  129. this.handlers.set(type, handler);
  130. } else {
  131. this.handlers.delete(type);
  132. }
  133. if (this.stream) {
  134. // FIXME why only video tacks ?
  135. for (const track of this.stream.getVideoTracks()) {
  136. track[trackHandler2Prop[type]] = handler;
  137. }
  138. }
  139. }
  140. /**
  141. * Unregisters all event handlers bound to the underlying media stream/track
  142. * @private
  143. */
  144. _unregisterHandlers() {
  145. if (!this.stream) {
  146. logger.warn(
  147. `${this}: unable to unregister handlers - no stream object`);
  148. return;
  149. }
  150. for (const type of this.handlers.keys()) {
  151. // FIXME why only video tracks ?
  152. for (const videoTrack of this.stream.getVideoTracks()) {
  153. videoTrack[trackHandler2Prop[type]] = undefined;
  154. }
  155. }
  156. if (this._streamInactiveHandler) {
  157. addMediaStreamInactiveHandler(this.stream, undefined);
  158. }
  159. }
  160. /**
  161. * Sets the stream property of JitsiTrack object and sets all stored
  162. * handlers to it.
  163. * @param {MediaStream} stream the new stream.
  164. */
  165. _setStream(stream) {
  166. if (this.stream === stream) {
  167. logger.warn(`Attempt to set the same stream twice on ${this}`);
  168. return;
  169. }
  170. this.stream = stream;
  171. for (const type of this.handlers.keys()) {
  172. this._setHandler(type, this.handlers.get(type));
  173. }
  174. if (this._streamInactiveHandler && this.stream) {
  175. this._bindInactiveHandler(this._streamInactiveHandler);
  176. }
  177. }
  178. /**
  179. * Returns the type (audio or video) of this track.
  180. */
  181. getType() {
  182. return this.type;
  183. }
  184. /**
  185. * Check if this is an audio track.
  186. */
  187. isAudioTrack() {
  188. return this.getType() === MediaType.AUDIO;
  189. }
  190. /**
  191. * Checks whether the underlying WebRTC <tt>MediaStreamTrack</tt> is muted
  192. * according to it's 'muted' field status.
  193. * @return {boolean} <tt>true</tt> if the underlying
  194. * <tt>MediaStreamTrack</tt> is muted or <tt>false</tt> otherwise.
  195. */
  196. isWebRTCTrackMuted() {
  197. return this.track && this.track.muted;
  198. }
  199. /**
  200. * Check if this is a video track.
  201. */
  202. isVideoTrack() {
  203. return this.getType() === MediaType.VIDEO;
  204. }
  205. /**
  206. * Checks whether this is a local track.
  207. * @abstract
  208. * @return {boolean} 'true' if it's a local track or 'false' otherwise.
  209. */
  210. isLocal() {
  211. throw new Error('Not implemented by subclass');
  212. }
  213. /**
  214. * Returns the WebRTC MediaStream instance.
  215. */
  216. getOriginalStream() {
  217. return this.stream;
  218. }
  219. /**
  220. * Returns the ID of the underlying WebRTC Media Stream(if any)
  221. * @returns {String|null}
  222. */
  223. getStreamId() {
  224. return this.stream ? this.stream.id : null;
  225. }
  226. /**
  227. * Return the underlying WebRTC MediaStreamTrack
  228. * @returns {MediaStreamTrack}
  229. */
  230. getTrack() {
  231. return this.track;
  232. }
  233. /**
  234. * Returns the ID of the underlying WebRTC MediaStreamTrack(if any)
  235. * @returns {String|null}
  236. */
  237. getTrackId() {
  238. return this.track ? this.track.id : null;
  239. }
  240. /**
  241. * Return meaningful usage label for this track depending on it's media and
  242. * eventual video type.
  243. * @returns {string}
  244. */
  245. getUsageLabel() {
  246. if (this.isAudioTrack()) {
  247. return 'mic';
  248. }
  249. return this.videoType ? this.videoType : 'default';
  250. }
  251. /**
  252. * Eventually will trigger RTCEvents.TRACK_ATTACHED event.
  253. * @param container the video/audio container to which this stream is
  254. * attached and for which event will be fired.
  255. * @private
  256. */
  257. _maybeFireTrackAttached(container) {
  258. if (this.conference && container) {
  259. this.conference._onTrackAttach(this, container);
  260. }
  261. }
  262. /**
  263. * Attaches the MediaStream of this track to an HTML container.
  264. * Adds the container to the list of containers that are displaying the
  265. * track. Note that Temasys plugin will replace original audio/video element
  266. * with 'object' when stream is being attached to the container for the
  267. * first time.
  268. * * NOTE * if given container element is not visible when the stream is
  269. * being attached it will be shown back given that Temasys plugin is
  270. * currently in use.
  271. *
  272. * @param container the HTML container which can be 'video' or 'audio'
  273. * element. It can also be 'object' element if Temasys plugin is in use and
  274. * this method has been called previously on video or audio HTML element.
  275. *
  276. * @returns potentially new instance of container if it was replaced by the
  277. * library. That's the case when Temasys plugin is in use.
  278. */
  279. attach(container) {
  280. let c = container;
  281. if (this.stream) {
  282. c = RTCUtils.attachMediaStream(container, this.stream);
  283. }
  284. this.containers.push(c);
  285. this._maybeFireTrackAttached(c);
  286. this._attachTTFMTracker(c);
  287. return c;
  288. }
  289. /**
  290. * Removes this JitsiTrack from the passed HTML container.
  291. *
  292. * @param container the HTML container to detach from this JitsiTrack. If
  293. * <tt>null</tt> or <tt>undefined</tt>, all containers are removed. A
  294. * container can be a 'video', 'audio' or 'object' HTML element instance to
  295. * which this JitsiTrack is currently attached.
  296. */
  297. detach(container) {
  298. for (let cs = this.containers, i = cs.length - 1; i >= 0; --i) {
  299. const c = cs[i];
  300. if (!container) {
  301. RTCUtils.attachMediaStream(c, null);
  302. }
  303. if (!container || c === container) {
  304. cs.splice(i, 1);
  305. }
  306. }
  307. if (container) {
  308. RTCUtils.attachMediaStream(container, null);
  309. }
  310. }
  311. /**
  312. * Attach time to first media tracker only if there is conference and only
  313. * for the first element.
  314. *
  315. * @param {HTMLElement} container the HTML container which can be 'video' or
  316. * 'audio' element. It can also be 'object' element if Temasys plugin is in
  317. * use and this method has been called previously on video or audio HTML
  318. * element.
  319. * @private
  320. */
  321. _attachTTFMTracker(container) { // eslint-disable-line no-unused-vars
  322. // Should be defined by the classes that are extending JitsiTrack
  323. }
  324. /**
  325. * Removes attached event listeners.
  326. *
  327. * @returns {Promise}
  328. */
  329. dispose() {
  330. this.removeAllListeners();
  331. this.disposed = true;
  332. return Promise.resolve();
  333. }
  334. /**
  335. * Returns true if this is a video track and the source of the video is a
  336. * screen capture as opposed to a camera.
  337. */
  338. isScreenSharing() {
  339. // FIXME: Should be fixed or removed.
  340. }
  341. /**
  342. * Returns id of the track.
  343. * @returns {string|null} id of the track or null if this is fake track.
  344. */
  345. getId() {
  346. if (this.stream) {
  347. return RTCUtils.getStreamID(this.stream);
  348. }
  349. return null;
  350. }
  351. /**
  352. * Checks whether the MediaStream is active/not ended.
  353. * When there is no check for active we don't have information and so
  354. * will return that stream is active (in case of FF).
  355. * @returns {boolean} whether MediaStream is active.
  356. */
  357. isActive() {
  358. if (typeof this.stream.active !== 'undefined') {
  359. return this.stream.active;
  360. }
  361. return true;
  362. }
  363. /**
  364. * Sets the audio level for the stream
  365. * @param {number} audioLevel value between 0 and 1
  366. * @param {TraceablePeerConnection} [tpc] the peerconnection instance which
  367. * is source for the audio level. It can be <tt>undefined</tt> for
  368. * a local track if the audio level was measured outside of the
  369. * peerconnection (see /modules/statistics/LocalStatsCollector.js).
  370. */
  371. setAudioLevel(audioLevel, tpc) {
  372. if (this.audioLevel !== audioLevel) {
  373. this.audioLevel = audioLevel;
  374. this.emit(
  375. JitsiTrackEvents.TRACK_AUDIO_LEVEL_CHANGED,
  376. audioLevel,
  377. tpc);
  378. }
  379. }
  380. /**
  381. * Returns the msid of the stream attached to the JitsiTrack object or null
  382. * if no stream is attached.
  383. */
  384. getMSID() {
  385. const streamId = this.getStreamId();
  386. const trackId = this.getTrackId();
  387. return streamId && trackId ? `${streamId} ${trackId}` : null;
  388. }
  389. /**
  390. * Sets new audio output device for track's DOM elements. Video tracks are
  391. * ignored.
  392. * @param {string} audioOutputDeviceId - id of 'audiooutput' device from
  393. * navigator.mediaDevices.enumerateDevices(), '' for default device
  394. * @emits JitsiTrackEvents.TRACK_AUDIO_OUTPUT_CHANGED
  395. * @returns {Promise}
  396. */
  397. setAudioOutput(audioOutputDeviceId) {
  398. if (!RTCUtils.isDeviceChangeAvailable('output')) {
  399. return Promise.reject(
  400. new Error('Audio output device change is not supported'));
  401. }
  402. // All audio communication is done through audio tracks, so ignore
  403. // changing audio output for video tracks at all.
  404. if (this.isVideoTrack()) {
  405. return Promise.resolve();
  406. }
  407. return (
  408. Promise.all(
  409. this.containers.map(
  410. element =>
  411. element.setSinkId(audioOutputDeviceId)
  412. .catch(error => {
  413. logger.warn(
  414. 'Failed to change audio output device'
  415. + ' on element. Default or'
  416. + ' previously set audio output'
  417. + ' device will be used.',
  418. element,
  419. error);
  420. throw error;
  421. })))
  422. .then(() => {
  423. this.emit(
  424. JitsiTrackEvents.TRACK_AUDIO_OUTPUT_CHANGED,
  425. audioOutputDeviceId);
  426. }));
  427. }
  428. }