Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

JitsiLocalTrack.js 32KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911
  1. import { getLogger } from '@jitsi/logger';
  2. import JitsiTrackError from '../../JitsiTrackError';
  3. import {
  4. TRACK_IS_DISPOSED,
  5. TRACK_NO_STREAM_FOUND
  6. } from '../../JitsiTrackErrors';
  7. import {
  8. LOCAL_TRACK_STOPPED,
  9. NO_DATA_FROM_SOURCE,
  10. TRACK_MUTE_CHANGED
  11. } from '../../JitsiTrackEvents';
  12. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  13. import * as MediaType from '../../service/RTC/MediaType';
  14. import RTCEvents from '../../service/RTC/RTCEvents';
  15. import VideoType from '../../service/RTC/VideoType';
  16. import {
  17. NO_BYTES_SENT,
  18. TRACK_UNMUTED,
  19. createNoDataFromSourceEvent
  20. } from '../../service/statistics/AnalyticsEvents';
  21. import browser from '../browser';
  22. import Statistics from '../statistics/statistics';
  23. import JitsiTrack from './JitsiTrack';
  24. import RTCUtils from './RTCUtils';
  25. const logger = getLogger(__filename);
  26. /**
  27. * Represents a single media track(either audio or video).
  28. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  29. */
  30. export default class JitsiLocalTrack extends JitsiTrack {
  31. /**
  32. * Constructs new JitsiLocalTrack instance.
  33. *
  34. * @constructor
  35. * @param {Object} trackInfo
  36. * @param {number} trackInfo.rtcId the ID assigned by the RTC module
  37. * @param trackInfo.stream WebRTC MediaStream, parent of the track
  38. * @param trackInfo.track underlying WebRTC MediaStreamTrack for new
  39. * JitsiRemoteTrack
  40. * @param trackInfo.mediaType the MediaType of the JitsiRemoteTrack
  41. * @param trackInfo.videoType the VideoType of the JitsiRemoteTrack
  42. * @param trackInfo.effects the effects array contains the effect instance to use
  43. * @param trackInfo.resolution the video resolution if it's a video track
  44. * @param trackInfo.deviceId the ID of the local device for this track
  45. * @param trackInfo.facingMode the camera facing mode used in getUserMedia
  46. * call
  47. * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing
  48. * source. NOTE: defined for desktop sharing tracks only.
  49. */
  50. constructor({
  51. deviceId,
  52. facingMode,
  53. mediaType,
  54. resolution,
  55. rtcId,
  56. sourceId,
  57. sourceType,
  58. stream,
  59. track,
  60. videoType,
  61. effects = []
  62. }) {
  63. super(
  64. /* conference */ null,
  65. stream,
  66. track,
  67. /* streamInactiveHandler */ () => this.emit(LOCAL_TRACK_STOPPED),
  68. mediaType,
  69. videoType);
  70. this._setEffectInProgress = false;
  71. const effect = effects.find(e => e.isEnabled(this));
  72. if (effect) {
  73. this._startStreamEffect(effect);
  74. }
  75. const displaySurface = videoType === VideoType.DESKTOP
  76. ? track.getSettings().displaySurface
  77. : null;
  78. /**
  79. * Track metadata.
  80. */
  81. this.metadata = {
  82. timestamp: Date.now(),
  83. ...displaySurface ? { displaySurface } : {}
  84. };
  85. /**
  86. * The ID assigned by the RTC module on instance creation.
  87. *
  88. * @type {number}
  89. */
  90. this.rtcId = rtcId;
  91. this.sourceId = sourceId;
  92. this.sourceType = sourceType;
  93. // Get the resolution from the track itself because it cannot be
  94. // certain which resolution webrtc has fallen back to using.
  95. this.resolution = track.getSettings().height;
  96. this.maxEnabledResolution = resolution;
  97. // Cache the constraints of the track in case of any this track
  98. // model needs to call getUserMedia again, such as when unmuting.
  99. this._constraints = track.getConstraints();
  100. // Safari returns an empty constraints object, construct the constraints using getSettings.
  101. if (!Object.keys(this._constraints).length && videoType === VideoType.CAMERA) {
  102. this._constraints = {
  103. height: track.getSettings().height,
  104. width: track.getSettings().width
  105. };
  106. }
  107. this.deviceId = deviceId;
  108. /**
  109. * The <tt>Promise</tt> which represents the progress of a previously
  110. * queued/scheduled {@link _setMuted} (from the point of view of
  111. * {@link _queueSetMuted}).
  112. *
  113. * @private
  114. * @type {Promise}
  115. */
  116. this._prevSetMuted = Promise.resolve();
  117. /**
  118. * The facing mode of the camera from which this JitsiLocalTrack
  119. * instance was obtained.
  120. *
  121. * @private
  122. * @type {CameraFacingMode|undefined}
  123. */
  124. this._facingMode = facingMode;
  125. // Currently there is no way to know the MediaStreamTrack ended due to
  126. // to device disconnect in Firefox through e.g. "readyState" property.
  127. // Instead we will compare current track's label with device labels from
  128. // enumerateDevices() list.
  129. this._trackEnded = false;
  130. /**
  131. * Indicates whether data has been sent or not.
  132. */
  133. this._hasSentData = false;
  134. /**
  135. * Used only for detection of audio problems. We want to check only once
  136. * whether the track is sending data ot not. This flag is set to false
  137. * after the check.
  138. */
  139. this._testDataSent = true;
  140. // Currently there is no way to determine with what device track was
  141. // created (until getConstraints() support), however we can associate
  142. // tracks with real devices obtained from enumerateDevices() call as
  143. // soon as it's called.
  144. // NOTE: this.deviceId corresponds to the device id specified in GUM constraints and this._realDeviceId seems to
  145. // correspond to the id of a matching device from the available device list.
  146. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  147. this._trackMutedTS = 0;
  148. this._onDeviceListWillChange = devices => {
  149. const oldRealDeviceId = this._realDeviceId;
  150. this._setRealDeviceIdFromDeviceList(devices);
  151. if (
  152. // Mark track as ended for those browsers that do not support
  153. // "readyState" property. We do not touch tracks created with
  154. // default device ID "".
  155. (typeof this.getTrack().readyState === 'undefined'
  156. && typeof this._realDeviceId !== 'undefined'
  157. && !devices.find(d => d.deviceId === this._realDeviceId))
  158. // If there was an associated realDeviceID and after the device change the realDeviceId is undefined
  159. // then the associated device has been disconnected and the _trackEnded flag needs to be set. In
  160. // addition on some Chrome versions the readyState property is set after the device change event is
  161. // triggered which causes issues in jitsi-meet with the selection of a new device because we don't
  162. // detect that the old one was removed.
  163. || (typeof oldRealDeviceId !== 'undefined' && typeof this._realDeviceId === 'undefined')
  164. ) {
  165. this._trackEnded = true;
  166. }
  167. };
  168. // Subscribe each created local audio track to
  169. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  170. // handling this event for remote tracks (which are handled in RTC.js),
  171. // because there might be local tracks not attached to a conference.
  172. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  173. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  174. RTCUtils.addListener(
  175. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  176. this._onAudioOutputDeviceChanged);
  177. }
  178. RTCUtils.addListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  179. this._initNoDataFromSourceHandlers();
  180. }
  181. /**
  182. * Get the duration of the track.
  183. *
  184. * @returns {Number} the duration of the track in seconds
  185. */
  186. getDuration() {
  187. return (Date.now() / 1000) - (this.metadata.timestamp / 1000);
  188. }
  189. /**
  190. * Returns if associated MediaStreamTrack is in the 'ended' state
  191. *
  192. * @returns {boolean}
  193. */
  194. isEnded() {
  195. if (this.isVideoTrack() && this.isMuted()) {
  196. // If a video track is muted the readyState will be ended, that's why we need to rely only on the
  197. // _trackEnded flag.
  198. return this._trackEnded;
  199. }
  200. return this.getTrack().readyState === 'ended' || this._trackEnded;
  201. }
  202. /**
  203. * Sets handlers to the MediaStreamTrack object that will detect camera
  204. * issues.
  205. */
  206. _initNoDataFromSourceHandlers() {
  207. if (!this._isNoDataFromSourceEventsEnabled()) {
  208. return;
  209. }
  210. this._setHandler('track_mute', () => {
  211. this._trackMutedTS = window.performance.now();
  212. this._fireNoDataFromSourceEvent();
  213. });
  214. this._setHandler('track_unmute', () => {
  215. this._fireNoDataFromSourceEvent();
  216. Statistics.sendAnalyticsAndLog(
  217. TRACK_UNMUTED,
  218. {
  219. 'media_type': this.getType(),
  220. 'track_type': 'local',
  221. value: window.performance.now() - this._trackMutedTS
  222. });
  223. });
  224. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  225. this._setHandler('track_ended', () => {
  226. if (!this.isReceivingData()) {
  227. this._fireNoDataFromSourceEvent();
  228. }
  229. });
  230. }
  231. }
  232. /**
  233. * Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  234. *
  235. * @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  236. */
  237. _isNoDataFromSourceEventsEnabled() {
  238. // Disable the events for screen sharing.
  239. return !this.isVideoTrack() || this.videoType !== VideoType.DESKTOP;
  240. }
  241. /**
  242. * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
  243. */
  244. _fireNoDataFromSourceEvent() {
  245. const value = !this.isReceivingData();
  246. this.emit(NO_DATA_FROM_SOURCE, value);
  247. // FIXME: Should we report all of those events
  248. Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType(), value));
  249. Statistics.sendLog(JSON.stringify({
  250. name: NO_DATA_FROM_SOURCE,
  251. log: value
  252. }));
  253. }
  254. /**
  255. * Sets real device ID by comparing track information with device
  256. * information. This is temporary solution until getConstraints() method
  257. * will be implemented in browsers.
  258. *
  259. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  260. * enumerateDevices() call
  261. */
  262. _setRealDeviceIdFromDeviceList(devices) {
  263. const track = this.getTrack();
  264. const kind = `${track.kind}input`;
  265. let device = devices.find(d => d.kind === kind && d.label === track.label);
  266. if (!device && this._realDeviceId === 'default') { // the default device has been changed.
  267. // If the default device was 'A' and the default device is changed to 'B' the label for the track will
  268. // remain 'Default - A' but the label for the device in the device list will be updated to 'A'. That's
  269. // why in order to match it we need to remove the 'Default - ' part.
  270. const label = (track.label || '').replace('Default - ', '');
  271. device = devices.find(d => d.kind === kind && d.label === label);
  272. }
  273. if (device) {
  274. this._realDeviceId = device.deviceId;
  275. } else {
  276. this._realDeviceId = undefined;
  277. }
  278. }
  279. /**
  280. * Sets the stream property of JitsiLocalTrack object and sets all stored
  281. * handlers to it.
  282. *
  283. * @param {MediaStream} stream the new stream.
  284. * @protected
  285. */
  286. _setStream(stream) {
  287. super._setStream(stream);
  288. if (stream) {
  289. // Store the MSID for video mute/unmute purposes.
  290. this.storedMSID = this.getMSID();
  291. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  292. } else {
  293. logger.debug(`Setting 'null' stream on ${this}`);
  294. }
  295. }
  296. /**
  297. * Starts the effect process and returns the modified stream.
  298. *
  299. * @private
  300. * @param {*} effect - Represents effect instance
  301. * @returns {void}
  302. */
  303. _startStreamEffect(effect) {
  304. this._streamEffect = effect;
  305. this._originalStream = this.stream;
  306. this._setStream(this._streamEffect.startEffect(this._originalStream));
  307. this.track = this.stream.getTracks()[0];
  308. }
  309. /**
  310. * Stops the effect process and returns the original stream.
  311. *
  312. * @private
  313. * @returns {void}
  314. */
  315. _stopStreamEffect() {
  316. if (this._streamEffect) {
  317. this._streamEffect.stopEffect();
  318. this._setStream(this._originalStream);
  319. this._originalStream = null;
  320. this.track = this.stream ? this.stream.getTracks()[0] : null;
  321. }
  322. }
  323. /**
  324. * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
  325. *
  326. * @param {Object|undefined} effect - The new effect to be set.
  327. */
  328. _switchStreamEffect(effect) {
  329. if (this._streamEffect) {
  330. this._stopStreamEffect();
  331. this._streamEffect = undefined;
  332. }
  333. if (effect) {
  334. this._startStreamEffect(effect);
  335. }
  336. }
  337. /**
  338. * Sets the effect and switches between the modified stream and original one.
  339. *
  340. * @param {Object} effect - Represents the effect instance to be used.
  341. * @returns {Promise}
  342. */
  343. setEffect(effect) {
  344. if (typeof this._streamEffect === 'undefined' && typeof effect === 'undefined') {
  345. return Promise.resolve();
  346. }
  347. if (typeof effect !== 'undefined' && !effect.isEnabled(this)) {
  348. return Promise.reject(new Error('Incompatible effect instance!'));
  349. }
  350. if (this._setEffectInProgress === true) {
  351. return Promise.reject(new Error('setEffect already in progress!'));
  352. }
  353. // In case we have an audio track that is being enhanced with an effect, we still want it to be applied,
  354. // even if the track is muted. Where as for video the actual track doesn't exists if it's muted.
  355. if (this.isMuted() && !this.isAudioTrack()) {
  356. this._streamEffect = effect;
  357. return Promise.resolve();
  358. }
  359. const conference = this.conference;
  360. if (!conference) {
  361. this._switchStreamEffect(effect);
  362. if (this.isVideoTrack()) {
  363. this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
  364. }
  365. return Promise.resolve();
  366. }
  367. this._setEffectInProgress = true;
  368. // TODO: Create new JingleSessionPC method for replacing a stream in JitsiLocalTrack without offer answer.
  369. return conference.removeTrack(this)
  370. .then(() => {
  371. this._switchStreamEffect(effect);
  372. if (this.isVideoTrack()) {
  373. this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
  374. }
  375. return conference.addTrack(this);
  376. })
  377. .then(() => {
  378. this._setEffectInProgress = false;
  379. })
  380. .catch(error => {
  381. // Any error will be not recovarable and will trigger CONFERENCE_FAILED event. But let's try to cleanup
  382. // everyhting related to the effect functionality.
  383. this._setEffectInProgress = false;
  384. this._switchStreamEffect();
  385. logger.error('Failed to switch to the new stream!', error);
  386. throw error;
  387. });
  388. }
  389. /**
  390. * Asynchronously mutes this track.
  391. *
  392. * @returns {Promise}
  393. */
  394. mute() {
  395. return this._queueSetMuted(true);
  396. }
  397. /**
  398. * Asynchronously unmutes this track.
  399. *
  400. * @returns {Promise}
  401. */
  402. unmute() {
  403. return this._queueSetMuted(false);
  404. }
  405. /**
  406. * Initializes a new Promise to execute {@link #_setMuted}. May be called
  407. * multiple times in a row and the invocations of {@link #_setMuted} and,
  408. * consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
  409. * serialized fashion.
  410. *
  411. * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
  412. * @returns {Promise}
  413. */
  414. _queueSetMuted(muted) {
  415. const setMuted = this._setMuted.bind(this, muted);
  416. this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
  417. return this._prevSetMuted;
  418. }
  419. /**
  420. * Mutes / unmutes this track.
  421. *
  422. * @param {boolean} muted - If <tt>true</tt>, this track will be muted;
  423. * otherwise, this track will be unmuted.
  424. * @private
  425. * @returns {Promise}
  426. */
  427. _setMuted(muted) {
  428. if (this.isMuted() === muted) {
  429. return Promise.resolve();
  430. }
  431. if (this.disposed) {
  432. return Promise.reject(new JitsiTrackError(TRACK_IS_DISPOSED));
  433. }
  434. let promise = Promise.resolve();
  435. // A function that will print info about muted status transition
  436. const logMuteInfo = () => logger.info(`Mute ${this}: ${muted}`);
  437. if (this.isAudioTrack()
  438. || this.videoType === VideoType.DESKTOP
  439. || !browser.doesVideoMuteByStreamRemove()) {
  440. logMuteInfo();
  441. // If we have a stream effect that implements its own mute functionality, prioritize it before
  442. // normal mute e.g. the stream effect that implements system audio sharing has a custom
  443. // mute state in which if the user mutes, system audio still has to go through.
  444. if (this._streamEffect && this._streamEffect.setMuted) {
  445. this._streamEffect.setMuted(muted);
  446. } else if (this.track) {
  447. this.track.enabled = !muted;
  448. }
  449. } else if (muted) {
  450. promise = new Promise((resolve, reject) => {
  451. logMuteInfo();
  452. this._removeStreamFromConferenceAsMute(
  453. () => {
  454. if (this._streamEffect) {
  455. this._stopStreamEffect();
  456. }
  457. // FIXME: Maybe here we should set the SRC for the
  458. // containers to something
  459. // We don't want any events to be fired on this stream
  460. this._unregisterHandlers();
  461. this.stopStream();
  462. this._setStream(null);
  463. resolve();
  464. },
  465. reject);
  466. });
  467. } else {
  468. logMuteInfo();
  469. // This path is only for camera.
  470. const streamOptions = {
  471. cameraDeviceId: this.getDeviceId(),
  472. devices: [ MediaType.VIDEO ],
  473. effects: this._streamEffect ? [ this._streamEffect ] : [],
  474. facingMode: this.getCameraFacingMode()
  475. };
  476. promise
  477. = RTCUtils.obtainAudioAndVideoPermissions(Object.assign(
  478. {},
  479. streamOptions,
  480. { constraints: { video: this._constraints } }));
  481. promise = promise.then(streamsInfo => {
  482. // The track kind for presenter track is video as well.
  483. const mediaType = this.getType() === MediaType.PRESENTER ? MediaType.VIDEO : this.getType();
  484. const streamInfo = streamsInfo.find(info => info.track.kind === mediaType);
  485. if (streamInfo) {
  486. this._setStream(streamInfo.stream);
  487. this.track = streamInfo.track;
  488. // This is not good when video type changes after
  489. // unmute, but let's not crash here
  490. if (this.videoType !== streamInfo.videoType) {
  491. logger.warn(
  492. `${this}: video type has changed after unmute!`,
  493. this.videoType, streamInfo.videoType);
  494. this.videoType = streamInfo.videoType;
  495. }
  496. } else {
  497. throw new JitsiTrackError(TRACK_NO_STREAM_FOUND);
  498. }
  499. if (this._streamEffect) {
  500. this._startStreamEffect(this._streamEffect);
  501. }
  502. this.containers.map(
  503. cont => RTCUtils.attachMediaStream(cont, this.stream));
  504. return this._addStreamToConferenceAsUnmute();
  505. });
  506. }
  507. return promise
  508. .then(() => {
  509. this._sendMuteStatus(muted);
  510. this.emit(TRACK_MUTE_CHANGED, this);
  511. });
  512. }
  513. /**
  514. * Adds stream to conference and marks it as "unmute" operation.
  515. *
  516. * @private
  517. * @returns {Promise}
  518. */
  519. _addStreamToConferenceAsUnmute() {
  520. if (!this.conference) {
  521. return Promise.resolve();
  522. }
  523. // FIXME it would be good to not included conference as part of this
  524. // process. Only TraceablePeerConnections to which the track is attached
  525. // should care about this action. The TPCs to which the track is not
  526. // attached can sync up when track is re-attached.
  527. // A problem with that is that the "modify sources" queue is part of
  528. // the JingleSessionPC and it would be excluded from the process. One
  529. // solution would be to extract class between TPC and JingleSessionPC
  530. // which would contain the queue and would notify the signaling layer
  531. // when local SSRCs are changed. This would help to separate XMPP from
  532. // the RTC module.
  533. return new Promise((resolve, reject) => {
  534. this.conference._addLocalTrackAsUnmute(this)
  535. .then(resolve, error => reject(new Error(error)));
  536. });
  537. }
  538. /**
  539. * Removes stream from conference and marks it as "mute" operation.
  540. *
  541. * @param {Function} successCallback will be called on success
  542. * @param {Function} errorCallback will be called on error
  543. * @private
  544. */
  545. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  546. if (!this.conference) {
  547. successCallback();
  548. return;
  549. }
  550. this.conference._removeLocalTrackAsMute(this).then(
  551. successCallback,
  552. error => errorCallback(new Error(error)));
  553. }
  554. /**
  555. * Sends mute status for a track to conference if any.
  556. *
  557. * @param {boolean} mute - If track is muted.
  558. * @private
  559. * @returns {void}
  560. */
  561. _sendMuteStatus(mute) {
  562. if (this.conference) {
  563. this.conference._setTrackMuteStatus(this, mute) && this.conference.room.sendPresence();
  564. }
  565. }
  566. /**
  567. * @inheritdoc
  568. *
  569. * Stops sending the media track. And removes it from the HTML.
  570. * NOTE: Works for local tracks only.
  571. *
  572. * @extends JitsiTrack#dispose
  573. * @returns {Promise}
  574. */
  575. dispose() {
  576. let promise = Promise.resolve();
  577. // Remove the effect instead of stopping it so that the original stream is restored
  578. // on both the local track and on the peerconnection.
  579. if (this._streamEffect) {
  580. promise = this.setEffect();
  581. }
  582. if (this.conference) {
  583. promise = promise.then(() => this.conference.removeTrack(this));
  584. }
  585. if (this.stream) {
  586. this.stopStream();
  587. this.detach();
  588. }
  589. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  590. if (this._onAudioOutputDeviceChanged) {
  591. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  592. this._onAudioOutputDeviceChanged);
  593. }
  594. return promise.then(() => super.dispose());
  595. }
  596. /**
  597. * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt>
  598. * otherwise.
  599. *
  600. * @returns {boolean} <tt>true</tt> - if the stream is muted and
  601. * <tt>false</tt> otherwise.
  602. */
  603. isMuted() {
  604. // this.stream will be null when we mute local video on Chrome
  605. if (!this.stream) {
  606. return true;
  607. }
  608. if (this.isVideoTrack() && !this.isActive()) {
  609. return true;
  610. }
  611. // If currently used stream effect has its own muted state, use that.
  612. if (this._streamEffect && this._streamEffect.isMuted) {
  613. return this._streamEffect.isMuted();
  614. }
  615. return !this.track || !this.track.enabled;
  616. }
  617. /**
  618. * Sets the JitsiConference object associated with the track. This is temp
  619. * solution.
  620. *
  621. * @param conference the JitsiConference object
  622. */
  623. _setConference(conference) {
  624. this.conference = conference;
  625. // We want to keep up with postponed events which should have been fired
  626. // on "attach" call, but for local track we not always have the
  627. // conference before attaching. However this may result in duplicated
  628. // events if they have been triggered on "attach" already.
  629. for (let i = 0; i < this.containers.length; i++) {
  630. this._maybeFireTrackAttached(this.containers[i]);
  631. }
  632. }
  633. /**
  634. * Returns <tt>true</tt>.
  635. *
  636. * @returns {boolean} <tt>true</tt>
  637. */
  638. isLocal() {
  639. return true;
  640. }
  641. /**
  642. * Returns device id associated with track.
  643. *
  644. * @returns {string}
  645. */
  646. getDeviceId() {
  647. return this._realDeviceId || this.deviceId;
  648. }
  649. /**
  650. * Returns the participant id which owns the track.
  651. *
  652. * @returns {string} the id of the participants. It corresponds to the
  653. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  654. */
  655. getParticipantId() {
  656. return this.conference && this.conference.myUserId();
  657. }
  658. /**
  659. * Handles bytes sent statistics.
  660. *
  661. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  662. * @param {number} bytesSent the new value
  663. * NOTE: used only for audio tracks to detect audio issues.
  664. */
  665. _onByteSentStatsReceived(tpc, bytesSent) {
  666. if (bytesSent > 0) {
  667. this._hasSentData = true;
  668. }
  669. const iceConnectionState = tpc.getConnectionState();
  670. if (this._testDataSent && iceConnectionState === 'connected') {
  671. setTimeout(() => {
  672. if (!this._hasSentData) {
  673. logger.warn(`${this} 'bytes sent' <= 0: \
  674. ${bytesSent}`);
  675. Statistics.analytics.sendEvent(NO_BYTES_SENT, { 'media_type': this.getType() });
  676. }
  677. }, 3000);
  678. this._testDataSent = false;
  679. }
  680. }
  681. /**
  682. * Returns facing mode for video track from camera. For other cases (e.g.
  683. * audio track or 'desktop' video track) returns undefined.
  684. *
  685. * @returns {CameraFacingMode|undefined}
  686. */
  687. getCameraFacingMode() {
  688. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  689. // MediaStreamTrack#getSettings() is not implemented in many
  690. // browsers, so we need feature checking here. Progress on the
  691. // respective browser's implementation can be tracked at
  692. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  693. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  694. // for Firefox. Even if a browser implements getSettings() already,
  695. // it might still not return anything for 'facingMode'.
  696. const trackSettings = this.track.getSettings?.();
  697. if (trackSettings && 'facingMode' in trackSettings) {
  698. return trackSettings.facingMode;
  699. }
  700. if (typeof this._facingMode !== 'undefined') {
  701. return this._facingMode;
  702. }
  703. // In most cases we are showing a webcam. So if we've gotten here,
  704. // it should be relatively safe to assume that we are probably
  705. // showing the user-facing camera.
  706. return CameraFacingMode.USER;
  707. }
  708. return undefined;
  709. }
  710. /**
  711. * Stops the associated MediaStream.
  712. */
  713. stopStream() {
  714. /**
  715. * Indicates that we are executing {@link #stopStream} i.e.
  716. * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
  717. * associated with this <tt>JitsiTrack</tt> instance.
  718. *
  719. * @private
  720. * @type {boolean}
  721. */
  722. this._stopStreamInProgress = true;
  723. try {
  724. RTCUtils.stopMediaStream(this.stream);
  725. } finally {
  726. this._stopStreamInProgress = false;
  727. }
  728. }
  729. /**
  730. * Switches the camera facing mode if the WebRTC implementation supports the
  731. * custom MediaStreamTrack._switchCamera method. Currently, the method in
  732. * question is implemented in react-native-webrtc only. When such a WebRTC
  733. * implementation is executing, the method is the preferred way to switch
  734. * between the front/user-facing and the back/environment-facing cameras
  735. * because it will likely be (as is the case of react-native-webrtc)
  736. * noticeably faster that creating a new MediaStreamTrack via a new
  737. * getUserMedia call with the switched facingMode constraint value.
  738. * Moreover, the approach with a new getUserMedia call may not even work:
  739. * WebRTC on Android and iOS is either very slow to open the camera a second
  740. * time or plainly freezes attempting to do that.
  741. */
  742. _switchCamera() {
  743. if (this.isVideoTrack()
  744. && this.videoType === VideoType.CAMERA
  745. && typeof this.track._switchCamera === 'function') {
  746. this.track._switchCamera();
  747. this._facingMode
  748. = this._facingMode === CameraFacingMode.ENVIRONMENT
  749. ? CameraFacingMode.USER
  750. : CameraFacingMode.ENVIRONMENT;
  751. }
  752. }
  753. /**
  754. * Checks whether the attached MediaStream is receiving data from source or
  755. * not. If the stream property is null(because of mute or another reason)
  756. * this method will return false.
  757. * NOTE: This method doesn't indicate problem with the streams directly.
  758. * For example in case of video mute the method will return false or if the
  759. * user has disposed the track.
  760. *
  761. * @returns {boolean} true if the stream is receiving data and false
  762. * this otherwise.
  763. */
  764. isReceivingData() {
  765. if (this.isVideoTrack()
  766. && (this.isMuted() || this._stopStreamInProgress || this.videoType === VideoType.DESKTOP)) {
  767. return true;
  768. }
  769. if (!this.stream) {
  770. return false;
  771. }
  772. // In older version of the spec there is no muted property and
  773. // readyState can have value muted. In the latest versions
  774. // readyState can have values "live" and "ended" and there is
  775. // muted boolean property. If the stream is muted that means that
  776. // we aren't receiving any data from the source. We want to notify
  777. // the users for error if the stream is muted or ended on it's
  778. // creation.
  779. // For video blur enabled use the original video stream
  780. const stream = this._effectEnabled ? this._originalStream : this.stream;
  781. return stream.getTracks().some(track =>
  782. (!('readyState' in track) || track.readyState === 'live')
  783. && (!('muted' in track) || track.muted !== true));
  784. }
  785. /**
  786. * Creates a text representation of this local track instance.
  787. *
  788. * @return {string}
  789. */
  790. toString() {
  791. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  792. }
  793. }