You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiLocalTrack.js 33KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937
  1. import { getLogger } from '@jitsi/logger';
  2. import JitsiTrackError from '../../JitsiTrackError';
  3. import {
  4. TRACK_IS_DISPOSED,
  5. TRACK_NO_STREAM_FOUND
  6. } from '../../JitsiTrackErrors';
  7. import {
  8. LOCAL_TRACK_STOPPED,
  9. NO_DATA_FROM_SOURCE,
  10. TRACK_MUTE_CHANGED
  11. } from '../../JitsiTrackEvents';
  12. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  13. import { MediaType } from '../../service/RTC/MediaType';
  14. import RTCEvents from '../../service/RTC/RTCEvents';
  15. import { VideoType } from '../../service/RTC/VideoType';
  16. import {
  17. NO_BYTES_SENT,
  18. TRACK_UNMUTED,
  19. createNoDataFromSourceEvent
  20. } from '../../service/statistics/AnalyticsEvents';
  21. import browser from '../browser';
  22. import FeatureFlags from '../flags/FeatureFlags';
  23. import Statistics from '../statistics/statistics';
  24. import JitsiTrack from './JitsiTrack';
  25. import RTCUtils from './RTCUtils';
  26. const logger = getLogger(__filename);
  27. /**
  28. * Represents a single media track(either audio or video).
  29. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  30. */
  31. export default class JitsiLocalTrack extends JitsiTrack {
  32. /**
  33. * Constructs a new JitsiLocalTrack instance.
  34. *
  35. * @constructor
  36. * @param {Object} trackInfo
  37. * @param {number} trackInfo.rtcId - The ID assigned by the RTC module.
  38. * @param {Object} trackInfo.stream - The WebRTC MediaStream, parent of the track.
  39. * @param {Object} trackInfo.track - The underlying WebRTC MediaStreamTrack for new JitsiLocalTrack.
  40. * @param {string} trackInfo.mediaType - The MediaType of the JitsiLocalTrack.
  41. * @param {string} trackInfo.videoType - The VideoType of the JitsiLocalTrack.
  42. * @param {Array<Object>} trackInfo.effects - The effects to be applied to the JitsiLocalTrack.
  43. * @param {number} trackInfo.resolution - The the video resolution if it's a video track
  44. * @param {string} trackInfo.deviceId - The ID of the local device for this track.
  45. * @param {string} trackInfo.facingMode - Thehe camera facing mode used in getUserMedia call (for mobile only).
  46. * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing source. NOTE: defined for desktop sharing
  47. * tracks only.
  48. */
  49. constructor({
  50. deviceId,
  51. facingMode,
  52. mediaType,
  53. resolution,
  54. rtcId,
  55. sourceId,
  56. sourceType,
  57. stream,
  58. track,
  59. videoType,
  60. effects = []
  61. }) {
  62. super(
  63. /* conference */ null,
  64. stream,
  65. track,
  66. /* streamInactiveHandler */ () => this.emit(LOCAL_TRACK_STOPPED, this),
  67. mediaType,
  68. videoType);
  69. this._setEffectInProgress = false;
  70. const effect = effects.find(e => e.isEnabled(this));
  71. if (effect) {
  72. this._startStreamEffect(effect);
  73. }
  74. const displaySurface = videoType === VideoType.DESKTOP
  75. ? track.getSettings().displaySurface
  76. : null;
  77. /**
  78. * Track metadata.
  79. */
  80. this.metadata = {
  81. timestamp: Date.now(),
  82. ...displaySurface ? { displaySurface } : {}
  83. };
  84. /**
  85. * The ID assigned by the RTC module on instance creation.
  86. *
  87. * @type {number}
  88. */
  89. this.rtcId = rtcId;
  90. this.sourceId = sourceId;
  91. this.sourceType = sourceType ?? displaySurface;
  92. // Get the resolution from the track itself because it cannot be
  93. // certain which resolution webrtc has fallen back to using.
  94. this.resolution = track.getSettings().height;
  95. this.maxEnabledResolution = resolution;
  96. // Cache the constraints of the track in case of any this track
  97. // model needs to call getUserMedia again, such as when unmuting.
  98. this._constraints = track.getConstraints();
  99. // Safari returns an empty constraints object, construct the constraints using getSettings.
  100. if (!Object.keys(this._constraints).length && videoType === VideoType.CAMERA) {
  101. this._constraints = {
  102. height: track.getSettings().height,
  103. width: track.getSettings().width
  104. };
  105. }
  106. this.deviceId = deviceId;
  107. /**
  108. * The <tt>Promise</tt> which represents the progress of a previously
  109. * queued/scheduled {@link _setMuted} (from the point of view of
  110. * {@link _queueSetMuted}).
  111. *
  112. * @private
  113. * @type {Promise}
  114. */
  115. this._prevSetMuted = Promise.resolve();
  116. /**
  117. * The facing mode of the camera from which this JitsiLocalTrack
  118. * instance was obtained.
  119. *
  120. * @private
  121. * @type {CameraFacingMode|undefined}
  122. */
  123. this._facingMode = facingMode;
  124. // Currently there is no way to know the MediaStreamTrack ended due to
  125. // to device disconnect in Firefox through e.g. "readyState" property.
  126. // Instead we will compare current track's label with device labels from
  127. // enumerateDevices() list.
  128. this._trackEnded = false;
  129. /**
  130. * Indicates whether data has been sent or not.
  131. */
  132. this._hasSentData = false;
  133. /**
  134. * Used only for detection of audio problems. We want to check only once
  135. * whether the track is sending data ot not. This flag is set to false
  136. * after the check.
  137. */
  138. this._testDataSent = true;
  139. // Currently there is no way to determine with what device track was
  140. // created (until getConstraints() support), however we can associate
  141. // tracks with real devices obtained from enumerateDevices() call as
  142. // soon as it's called.
  143. // NOTE: this.deviceId corresponds to the device id specified in GUM constraints and this._realDeviceId seems to
  144. // correspond to the id of a matching device from the available device list.
  145. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  146. // The source name that will be signaled for this track.
  147. this._sourceName = null;
  148. this._trackMutedTS = 0;
  149. this._onDeviceListWillChange = devices => {
  150. const oldRealDeviceId = this._realDeviceId;
  151. this._setRealDeviceIdFromDeviceList(devices);
  152. if (
  153. // Mark track as ended for those browsers that do not support
  154. // "readyState" property. We do not touch tracks created with
  155. // default device ID "".
  156. (typeof this.getTrack().readyState === 'undefined'
  157. && typeof this._realDeviceId !== 'undefined'
  158. && !devices.find(d => d.deviceId === this._realDeviceId))
  159. // If there was an associated realDeviceID and after the device change the realDeviceId is undefined
  160. // then the associated device has been disconnected and the _trackEnded flag needs to be set. In
  161. // addition on some Chrome versions the readyState property is set after the device change event is
  162. // triggered which causes issues in jitsi-meet with the selection of a new device because we don't
  163. // detect that the old one was removed.
  164. || (typeof oldRealDeviceId !== 'undefined' && typeof this._realDeviceId === 'undefined')
  165. ) {
  166. this._trackEnded = true;
  167. }
  168. };
  169. // Subscribe each created local audio track to
  170. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  171. // handling this event for remote tracks (which are handled in RTC.js),
  172. // because there might be local tracks not attached to a conference.
  173. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  174. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  175. RTCUtils.addListener(
  176. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  177. this._onAudioOutputDeviceChanged);
  178. }
  179. RTCUtils.addListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  180. this._initNoDataFromSourceHandlers();
  181. }
  182. /**
  183. * Adds stream to conference and marks it as "unmute" operation.
  184. *
  185. * @private
  186. * @returns {Promise}
  187. */
  188. _addStreamToConferenceAsUnmute() {
  189. if (!this.conference) {
  190. return Promise.resolve();
  191. }
  192. // FIXME it would be good to not included conference as part of this process. Only TraceablePeerConnections to
  193. // which the track is attached should care about this action. The TPCs to which the track is not attached can
  194. // sync up when track is re-attached. A problem with that is that the "modify sources" queue is part of the
  195. // JingleSessionPC and it would be excluded from the process. One solution would be to extract class between
  196. // TPC and JingleSessionPC which would contain the queue and would notify the signaling layer when local SSRCs
  197. // are changed. This would help to separate XMPP from the RTC module.
  198. return new Promise((resolve, reject) => {
  199. this.conference._addLocalTrackToPc(this)
  200. .then(resolve, error => reject(new Error(error)));
  201. });
  202. }
  203. /**
  204. * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
  205. *
  206. * @private
  207. * @returns {void}
  208. */
  209. _fireNoDataFromSourceEvent() {
  210. const value = !this.isReceivingData();
  211. this.emit(NO_DATA_FROM_SOURCE, value);
  212. // FIXME: Should we report all of those events
  213. Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType(), value));
  214. Statistics.sendLog(JSON.stringify({
  215. name: NO_DATA_FROM_SOURCE,
  216. log: value
  217. }));
  218. }
  219. /**
  220. * Sets handlers to the MediaStreamTrack object that will detect camera issues.
  221. *
  222. * @private
  223. * @returns {void}
  224. */
  225. _initNoDataFromSourceHandlers() {
  226. if (!this._isNoDataFromSourceEventsEnabled()) {
  227. return;
  228. }
  229. this._setHandler('track_mute', () => {
  230. this._trackMutedTS = window.performance.now();
  231. this._fireNoDataFromSourceEvent();
  232. });
  233. this._setHandler('track_unmute', () => {
  234. this._fireNoDataFromSourceEvent();
  235. Statistics.sendAnalyticsAndLog(
  236. TRACK_UNMUTED,
  237. {
  238. 'media_type': this.getType(),
  239. 'track_type': 'local',
  240. value: window.performance.now() - this._trackMutedTS
  241. });
  242. });
  243. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  244. this._setHandler('track_ended', () => {
  245. if (!this.isReceivingData()) {
  246. this._fireNoDataFromSourceEvent();
  247. }
  248. });
  249. }
  250. }
  251. /**
  252. * Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  253. *
  254. * @private
  255. * @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  256. */
  257. _isNoDataFromSourceEventsEnabled() {
  258. // Disable the events for screen sharing.
  259. return !this.isVideoTrack() || this.videoType !== VideoType.DESKTOP;
  260. }
  261. /**
  262. * Initializes a new Promise to execute {@link #_setMuted}. May be called multiple times in a row and the
  263. * invocations of {@link #_setMuted} and, consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
  264. * serialized fashion.
  265. *
  266. * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
  267. * @private
  268. * @returns {Promise}
  269. */
  270. _queueSetMuted(muted) {
  271. const setMuted = this._setMuted.bind(this, muted);
  272. this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
  273. return this._prevSetMuted;
  274. }
  275. /**
  276. * Removes stream from conference and marks it as "mute" operation.
  277. *
  278. * @param {Function} successCallback - Callback that will be called when the operation is successful.
  279. * @param {Function} errorCallback - Callback that will be called when the operation fails.
  280. * @private
  281. * @returns {Promise}
  282. */
  283. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  284. if (!this.conference) {
  285. successCallback();
  286. return;
  287. }
  288. this.conference._removeLocalTrackFromPc(this).then(
  289. successCallback,
  290. error => errorCallback(new Error(error)));
  291. }
  292. /**
  293. * Sends mute status for a track to conference if any.
  294. *
  295. * @param {boolean} mute - If track is muted.
  296. * @private
  297. * @returns {void}
  298. */
  299. _sendMuteStatus(mute) {
  300. if (this.conference) {
  301. this.conference._setTrackMuteStatus(this.getType(), this, mute) && this.conference.room.sendPresence();
  302. }
  303. }
  304. /**
  305. * Mutes / unmutes this track.
  306. *
  307. * @param {boolean} muted - If <tt>true</tt>, this track will be muted; otherwise, this track will be unmuted.
  308. * @private
  309. * @returns {Promise}
  310. */
  311. _setMuted(muted) {
  312. if (this.isMuted() === muted
  313. && !(this.videoType === VideoType.DESKTOP && FeatureFlags.isMultiStreamSendSupportEnabled())) {
  314. return Promise.resolve();
  315. }
  316. if (this.disposed) {
  317. return Promise.reject(new JitsiTrackError(TRACK_IS_DISPOSED));
  318. }
  319. let promise = Promise.resolve();
  320. // A function that will print info about muted status transition
  321. const logMuteInfo = () => logger.info(`Mute ${this}: ${muted}`);
  322. // In React Native we mute the camera by setting track.enabled but that doesn't
  323. // work for screen-share tracks, so do the remove-as-mute for those.
  324. const doesVideoMuteByStreamRemove
  325. = browser.isReactNative() ? this.videoType === VideoType.DESKTOP : browser.doesVideoMuteByStreamRemove();
  326. // In the multi-stream mode, desktop tracks are muted from jitsi-meet instead of being removed from the
  327. // conference. This is needed because we don't want the client to signal a source-remove to the remote peer for
  328. // the desktop track when screenshare is stopped. Later when screenshare is started again, the same sender will
  329. // be re-used without the need for signaling a new ssrc through source-add.
  330. if (this.isAudioTrack()
  331. || (this.videoType === VideoType.DESKTOP && !FeatureFlags.isMultiStreamSendSupportEnabled())
  332. || !doesVideoMuteByStreamRemove) {
  333. logMuteInfo();
  334. // If we have a stream effect that implements its own mute functionality, prioritize it before
  335. // normal mute e.g. the stream effect that implements system audio sharing has a custom
  336. // mute state in which if the user mutes, system audio still has to go through.
  337. if (this._streamEffect && this._streamEffect.setMuted) {
  338. this._streamEffect.setMuted(muted);
  339. } else if (this.track) {
  340. this.track.enabled = !muted;
  341. }
  342. } else if (muted) {
  343. promise = new Promise((resolve, reject) => {
  344. logMuteInfo();
  345. this._removeStreamFromConferenceAsMute(
  346. () => {
  347. if (this._streamEffect) {
  348. this._stopStreamEffect();
  349. }
  350. // FIXME: Maybe here we should set the SRC for the
  351. // containers to something
  352. // We don't want any events to be fired on this stream
  353. this._unregisterHandlers();
  354. this.stopStream();
  355. this._setStream(null);
  356. resolve();
  357. },
  358. reject);
  359. });
  360. } else {
  361. logMuteInfo();
  362. // This path is only for camera.
  363. const streamOptions = {
  364. cameraDeviceId: this.getDeviceId(),
  365. devices: [ MediaType.VIDEO ],
  366. effects: this._streamEffect ? [ this._streamEffect ] : [],
  367. facingMode: this.getCameraFacingMode()
  368. };
  369. promise
  370. = RTCUtils.obtainAudioAndVideoPermissions(Object.assign(
  371. {},
  372. streamOptions,
  373. { constraints: { video: this._constraints } }));
  374. promise = promise.then(streamsInfo => {
  375. const streamInfo = streamsInfo.find(info => info.track.kind === this.getType());
  376. if (streamInfo) {
  377. this._setStream(streamInfo.stream);
  378. this.track = streamInfo.track;
  379. // This is not good when video type changes after
  380. // unmute, but let's not crash here
  381. if (this.videoType !== streamInfo.videoType) {
  382. logger.warn(
  383. `${this}: video type has changed after unmute!`,
  384. this.videoType, streamInfo.videoType);
  385. this.videoType = streamInfo.videoType;
  386. }
  387. } else {
  388. throw new JitsiTrackError(TRACK_NO_STREAM_FOUND);
  389. }
  390. if (this._streamEffect) {
  391. this._startStreamEffect(this._streamEffect);
  392. }
  393. this.containers.map(cont => RTCUtils.attachMediaStream(cont, this.stream));
  394. return this._addStreamToConferenceAsUnmute();
  395. });
  396. }
  397. return promise
  398. .then(() => {
  399. this._sendMuteStatus(muted);
  400. // Send the videoType message to the bridge.
  401. this.isVideoTrack() && this.conference && this.conference._sendBridgeVideoTypeMessage(this);
  402. this.emit(TRACK_MUTE_CHANGED, this);
  403. });
  404. }
  405. /**
  406. * Sets real device ID by comparing track information with device information. This is temporary solution until
  407. * getConstraints() method will be implemented in browsers.
  408. *
  409. * @param {MediaDeviceInfo[]} devices - The list of devices obtained from enumerateDevices() call.
  410. * @private
  411. * @returns {void}
  412. */
  413. _setRealDeviceIdFromDeviceList(devices) {
  414. const track = this.getTrack();
  415. const kind = `${track.kind}input`;
  416. // We need to match by deviceId as well, in case of multiple devices with the same label.
  417. let device = devices.find(d => d.kind === kind && d.label === track.label && d.deviceId === this.deviceId);
  418. if (!device && this._realDeviceId === 'default') { // the default device has been changed.
  419. // If the default device was 'A' and the default device is changed to 'B' the label for the track will
  420. // remain 'Default - A' but the label for the device in the device list will be updated to 'A'. That's
  421. // why in order to match it we need to remove the 'Default - ' part.
  422. const label = (track.label || '').replace('Default - ', '');
  423. device = devices.find(d => d.kind === kind && d.label === label);
  424. }
  425. if (device) {
  426. this._realDeviceId = device.deviceId;
  427. } else {
  428. this._realDeviceId = undefined;
  429. }
  430. }
  431. /**
  432. * Sets the stream property of JitsiLocalTrack object and sets all stored handlers to it.
  433. *
  434. * @param {MediaStream} stream - The new MediaStream.
  435. * @private
  436. * @returns {void}
  437. */
  438. _setStream(stream) {
  439. super._setStream(stream);
  440. if (stream) {
  441. // Store the MSID for video mute/unmute purposes.
  442. this.storedMSID = this.getMSID();
  443. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  444. } else {
  445. logger.debug(`Setting 'null' stream on ${this}`);
  446. }
  447. }
  448. /**
  449. * Starts the effect process and returns the modified stream.
  450. *
  451. * @param {Object} effect - Represents effect instance
  452. * @private
  453. * @returns {void}
  454. */
  455. _startStreamEffect(effect) {
  456. this._streamEffect = effect;
  457. this._originalStream = this.stream;
  458. this._setStream(this._streamEffect.startEffect(this._originalStream));
  459. this.track = this.stream.getTracks()[0];
  460. }
  461. /**
  462. * Stops the effect process and returns the original stream.
  463. *
  464. * @private
  465. * @returns {void}
  466. */
  467. _stopStreamEffect() {
  468. if (this._streamEffect) {
  469. this._streamEffect.stopEffect();
  470. this._setStream(this._originalStream);
  471. this._originalStream = null;
  472. this.track = this.stream ? this.stream.getTracks()[0] : null;
  473. }
  474. }
  475. /**
  476. * Switches the camera facing mode if the WebRTC implementation supports the custom MediaStreamTrack._switchCamera
  477. * method. Currently, the method in question is implemented in react-native-webrtc only. When such a WebRTC
  478. * implementation is executing, the method is the preferred way to switch between the front/user-facing and the
  479. * back/environment-facing cameras because it will likely be (as is the case of react-native-webrtc) noticeably
  480. * faster that creating a new MediaStreamTrack via a new getUserMedia call with the switched facingMode constraint
  481. * value. Moreover, the approach with a new getUserMedia call may not even work: WebRTC on Android and iOS is
  482. * either very slow to open the camera a second time or plainly freezes attempting to do that.
  483. *
  484. * @returns {void}
  485. */
  486. _switchCamera() {
  487. if (this.isVideoTrack()
  488. && this.videoType === VideoType.CAMERA
  489. && typeof this.track._switchCamera === 'function') {
  490. this.track._switchCamera();
  491. this._facingMode
  492. = this._facingMode === CameraFacingMode.ENVIRONMENT
  493. ? CameraFacingMode.USER
  494. : CameraFacingMode.ENVIRONMENT;
  495. }
  496. }
  497. /**
  498. * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
  499. *
  500. * @param {Object|undefined} effect - The new effect to be set.
  501. * @private
  502. * @returns {void}
  503. */
  504. _switchStreamEffect(effect) {
  505. if (this._streamEffect) {
  506. this._stopStreamEffect();
  507. this._streamEffect = undefined;
  508. }
  509. if (effect) {
  510. this._startStreamEffect(effect);
  511. }
  512. }
  513. /**
  514. * @inheritdoc
  515. *
  516. * Stops sending the media track. And removes it from the HTML. NOTE: Works for local tracks only.
  517. *
  518. * @extends JitsiTrack#dispose
  519. * @returns {Promise}
  520. */
  521. async dispose() {
  522. // Remove the effect instead of stopping it so that the original stream is restored
  523. // on both the local track and on the peerconnection.
  524. if (this._streamEffect) {
  525. await this.setEffect();
  526. }
  527. if (this.conference) {
  528. await this.conference.removeTrack(this);
  529. }
  530. if (this.stream) {
  531. this.stopStream();
  532. this.detach();
  533. }
  534. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  535. if (this._onAudioOutputDeviceChanged) {
  536. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  537. this._onAudioOutputDeviceChanged);
  538. }
  539. return super.dispose();
  540. }
  541. /**
  542. * Returns facing mode for video track from camera. For other cases (e.g. audio track or 'desktop' video track)
  543. * returns undefined.
  544. *
  545. * @returns {CameraFacingMode|undefined}
  546. */
  547. getCameraFacingMode() {
  548. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  549. // MediaStreamTrack#getSettings() is not implemented in many
  550. // browsers, so we need feature checking here. Progress on the
  551. // respective browser's implementation can be tracked at
  552. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  553. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  554. // for Firefox. Even if a browser implements getSettings() already,
  555. // it might still not return anything for 'facingMode'.
  556. const trackSettings = this.track.getSettings?.();
  557. if (trackSettings && 'facingMode' in trackSettings) {
  558. return trackSettings.facingMode;
  559. }
  560. if (typeof this._facingMode !== 'undefined') {
  561. return this._facingMode;
  562. }
  563. // In most cases we are showing a webcam. So if we've gotten here,
  564. // it should be relatively safe to assume that we are probably
  565. // showing the user-facing camera.
  566. return CameraFacingMode.USER;
  567. }
  568. return undefined;
  569. }
  570. /**
  571. * Returns device id associated with track.
  572. *
  573. * @returns {string}
  574. */
  575. getDeviceId() {
  576. return this._realDeviceId || this.deviceId;
  577. }
  578. /**
  579. * Get the duration of the track.
  580. *
  581. * @returns {Number} the duration of the track in seconds
  582. */
  583. getDuration() {
  584. return (Date.now() / 1000) - (this.metadata.timestamp / 1000);
  585. }
  586. /**
  587. * Returns the participant id which owns the track.
  588. *
  589. * @returns {string} the id of the participants. It corresponds to the
  590. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  591. */
  592. getParticipantId() {
  593. return this.conference && this.conference.myUserId();
  594. }
  595. /**
  596. * Returns the source name associated with the jitsi track.
  597. *
  598. * @returns {string | null} source name
  599. */
  600. getSourceName() {
  601. return this._sourceName;
  602. }
  603. /**
  604. * Returns if associated MediaStreamTrack is in the 'ended' state
  605. *
  606. * @returns {boolean}
  607. */
  608. isEnded() {
  609. if (this.isVideoTrack() && this.isMuted()) {
  610. // If a video track is muted the readyState will be ended, that's why we need to rely only on the
  611. // _trackEnded flag.
  612. return this._trackEnded;
  613. }
  614. return this.getTrack().readyState === 'ended' || this._trackEnded;
  615. }
  616. /**
  617. * Returns <tt>true</tt>.
  618. *
  619. * @returns {boolean} <tt>true</tt>
  620. */
  621. isLocal() {
  622. return true;
  623. }
  624. /**
  625. * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
  626. *
  627. * @returns {boolean} <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
  628. */
  629. isMuted() {
  630. // this.stream will be null when we mute local video on Chrome
  631. if (!this.stream) {
  632. return true;
  633. }
  634. if (this.isVideoTrack() && !this.isActive()) {
  635. return true;
  636. }
  637. // If currently used stream effect has its own muted state, use that.
  638. if (this._streamEffect && this._streamEffect.isMuted) {
  639. return this._streamEffect.isMuted();
  640. }
  641. return !this.track || !this.track.enabled;
  642. }
  643. /**
  644. * Checks whether the attached MediaStream is receiving data from source or not. If the stream property is null
  645. * (because of mute or another reason) this method will return false.
  646. * NOTE: This method doesn't indicate problem with the streams directly. For example in case of video mute the
  647. * method will return false or if the user has disposed the track.
  648. *
  649. * @returns {boolean} true if the stream is receiving data and false this otherwise.
  650. */
  651. isReceivingData() {
  652. if (this.isVideoTrack()
  653. && (this.isMuted() || this._stopStreamInProgress || this.videoType === VideoType.DESKTOP)) {
  654. return true;
  655. }
  656. if (!this.stream) {
  657. return false;
  658. }
  659. // In older version of the spec there is no muted property and readyState can have value muted. In the latest
  660. // versions readyState can have values "live" and "ended" and there is muted boolean property. If the stream is
  661. // muted that means that we aren't receiving any data from the source. We want to notify the users for error if
  662. // the stream is muted or ended on it's creation.
  663. // For video blur enabled use the original video stream
  664. const stream = this._effectEnabled ? this._originalStream : this.stream;
  665. return stream.getTracks().some(track =>
  666. (!('readyState' in track) || track.readyState === 'live')
  667. && (!('muted' in track) || track.muted !== true));
  668. }
  669. /**
  670. * Asynchronously mutes this track.
  671. *
  672. * @returns {Promise}
  673. */
  674. mute() {
  675. return this._queueSetMuted(true);
  676. }
  677. /**
  678. * Handles bytes sent statistics. NOTE: used only for audio tracks to detect audio issues.
  679. *
  680. * @param {TraceablePeerConnection} tpc - The peerconnection that is reporting the bytes sent stat.
  681. * @param {number} bytesSent - The new value.
  682. * @returns {void}
  683. */
  684. onByteSentStatsReceived(tpc, bytesSent) {
  685. if (bytesSent > 0) {
  686. this._hasSentData = true;
  687. }
  688. const iceConnectionState = tpc.getConnectionState();
  689. if (this._testDataSent && iceConnectionState === 'connected') {
  690. setTimeout(() => {
  691. if (!this._hasSentData) {
  692. logger.warn(`${this} 'bytes sent' <= 0: \
  693. ${bytesSent}`);
  694. Statistics.analytics.sendEvent(NO_BYTES_SENT, { 'media_type': this.getType() });
  695. }
  696. }, 3000);
  697. this._testDataSent = false;
  698. }
  699. }
  700. /**
  701. * Sets the JitsiConference object associated with the track. This is temp solution.
  702. *
  703. * @param conference - JitsiConference object.
  704. * @returns {void}
  705. */
  706. setConference(conference) {
  707. this.conference = conference;
  708. // We want to keep up with postponed events which should have been fired
  709. // on "attach" call, but for local track we not always have the
  710. // conference before attaching. However this may result in duplicated
  711. // events if they have been triggered on "attach" already.
  712. for (let i = 0; i < this.containers.length; i++) {
  713. this._maybeFireTrackAttached(this.containers[i]);
  714. }
  715. }
  716. /**
  717. * Sets the effect and switches between the modified stream and original one.
  718. *
  719. * @param {Object} effect - Represents the effect instance to be used.
  720. * @returns {Promise}
  721. */
  722. setEffect(effect) {
  723. if (typeof this._streamEffect === 'undefined' && typeof effect === 'undefined') {
  724. return Promise.resolve();
  725. }
  726. if (typeof effect !== 'undefined' && !effect.isEnabled(this)) {
  727. return Promise.reject(new Error('Incompatible effect instance!'));
  728. }
  729. if (this._setEffectInProgress === true) {
  730. return Promise.reject(new Error('setEffect already in progress!'));
  731. }
  732. // In case we have an audio track that is being enhanced with an effect, we still want it to be applied,
  733. // even if the track is muted. Where as for video the actual track doesn't exists if it's muted.
  734. if (this.isMuted() && !this.isAudioTrack()) {
  735. this._streamEffect = effect;
  736. return Promise.resolve();
  737. }
  738. const conference = this.conference;
  739. if (!conference) {
  740. this._switchStreamEffect(effect);
  741. if (this.isVideoTrack()) {
  742. this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
  743. }
  744. return Promise.resolve();
  745. }
  746. this._setEffectInProgress = true;
  747. return conference._removeLocalTrackFromPc(this)
  748. .then(() => {
  749. this._switchStreamEffect(effect);
  750. if (this.isVideoTrack()) {
  751. this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
  752. }
  753. return conference._addLocalTrackToPc(this);
  754. })
  755. .then(() => {
  756. this._setEffectInProgress = false;
  757. })
  758. .catch(error => {
  759. // Any error will be not recovarable and will trigger CONFERENCE_FAILED event. But let's try to cleanup
  760. // everyhting related to the effect functionality.
  761. this._setEffectInProgress = false;
  762. this._switchStreamEffect();
  763. logger.error('Failed to switch to the new stream!', error);
  764. throw error;
  765. });
  766. }
  767. /**
  768. * Sets the source name to be used for signaling the jitsi track.
  769. *
  770. * @param {string} name The source name.
  771. */
  772. setSourceName(name) {
  773. this._sourceName = name;
  774. }
  775. /**
  776. * Stops the associated MediaStream.
  777. *
  778. * @returns {void}
  779. */
  780. stopStream() {
  781. /**
  782. * Indicates that we are executing {@link #stopStream} i.e.
  783. * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
  784. * associated with this <tt>JitsiTrack</tt> instance.
  785. *
  786. * @private
  787. * @type {boolean}
  788. */
  789. this._stopStreamInProgress = true;
  790. try {
  791. RTCUtils.stopMediaStream(this.stream);
  792. } finally {
  793. this._stopStreamInProgress = false;
  794. }
  795. }
  796. /**
  797. * Creates a text representation of this local track instance.
  798. *
  799. * @return {string}
  800. */
  801. toString() {
  802. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  803. }
  804. /**
  805. * Asynchronously unmutes this track.
  806. *
  807. * @returns {Promise}
  808. */
  809. unmute() {
  810. return this._queueSetMuted(false);
  811. }
  812. }