You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiLocalTrack.js 24KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728
  1. /* global __filename, Promise */
  2. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import JitsiTrack from './JitsiTrack';
  5. import JitsiTrackError from '../../JitsiTrackError';
  6. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  7. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  8. import * as MediaType from '../../service/RTC/MediaType';
  9. import RTCBrowserType from './RTCBrowserType';
  10. import RTCEvents from '../../service/RTC/RTCEvents';
  11. import RTCUtils from './RTCUtils';
  12. import Statistics from '../statistics/statistics';
  13. import VideoType from '../../service/RTC/VideoType';
  14. const logger = getLogger(__filename);
  15. /* eslint-disable max-params */
  16. /**
  17. * Represents a single media track(either audio or video).
  18. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  19. * @param {number} rtcId the ID assigned by the RTC module
  20. * @param stream WebRTC MediaStream, parent of the track
  21. * @param track underlying WebRTC MediaStreamTrack for new JitsiRemoteTrack
  22. * @param mediaType the MediaType of the JitsiRemoteTrack
  23. * @param videoType the VideoType of the JitsiRemoteTrack
  24. * @param resolution the video resolution if it's a video track
  25. * @param deviceId the ID of the local device for this track
  26. * @param facingMode the camera facing mode used in getUserMedia call
  27. * @constructor
  28. */
  29. function JitsiLocalTrack(
  30. rtcId,
  31. stream,
  32. track,
  33. mediaType,
  34. videoType,
  35. resolution,
  36. deviceId,
  37. facingMode) {
  38. /**
  39. * The ID assigned by the RTC module on instance creation.
  40. * @type {number}
  41. */
  42. this.rtcId = rtcId;
  43. JitsiTrack.call(
  44. this,
  45. null /* RTC */,
  46. stream,
  47. track,
  48. () => {
  49. if (!this.dontFireRemoveEvent) {
  50. this.eventEmitter.emit(JitsiTrackEvents.LOCAL_TRACK_STOPPED);
  51. }
  52. this.dontFireRemoveEvent = false;
  53. } /* inactiveHandler */,
  54. mediaType,
  55. videoType);
  56. this.dontFireRemoveEvent = false;
  57. this.resolution = resolution;
  58. // FIXME: currently firefox is ignoring our constraints about resolutions
  59. // so we do not store it, to avoid wrong reporting of local track resolution
  60. if (RTCBrowserType.isFirefox()) {
  61. this.resolution = null;
  62. }
  63. this.deviceId = deviceId;
  64. this.startMuted = false;
  65. this.storedMSID = this.getMSID();
  66. this.inMuteOrUnmuteProgress = false;
  67. /**
  68. * An array which stores the peer connection to which this local track is
  69. * currently attached to. See {@link TraceablePeerConnection.attachTrack}.
  70. * @type {Set<TraceablePeerConnection>}
  71. */
  72. this.peerConnections = new Set();
  73. /**
  74. * The facing mode of the camera from which this JitsiLocalTrack instance
  75. * was obtained.
  76. */
  77. this._facingMode = facingMode;
  78. // Currently there is no way to know the MediaStreamTrack ended due to to
  79. // device disconnect in Firefox through e.g. "readyState" property. Instead
  80. // we will compare current track's label with device labels from
  81. // enumerateDevices() list.
  82. this._trackEnded = false;
  83. /**
  84. * The value of bytes sent received from the statistics module.
  85. */
  86. this._bytesSent = null;
  87. /**
  88. * Used only for detection of audio problems. We want to check only once
  89. * whether the track is sending bytes ot not. This flag is set to false
  90. * after the check.
  91. */
  92. this._testByteSent = true;
  93. // Currently there is no way to determine with what device track was
  94. // created (until getConstraints() support), however we can associate tracks
  95. // with real devices obtained from enumerateDevices() call as soon as it's
  96. // called.
  97. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  98. /**
  99. * Indicates that we have called RTCUtils.stopMediaStream for the
  100. * MediaStream related to this JitsiTrack object.
  101. */
  102. this.stopStreamInProgress = false;
  103. /**
  104. * On mute event we are waiting for 3s to check if the stream is going to
  105. * be still muted before firing the event for camera issue detected
  106. * (NO_DATA_FROM_SOURCE).
  107. */
  108. this._noDataFromSourceTimeout = null;
  109. this._onDeviceListChanged = devices => {
  110. this._setRealDeviceIdFromDeviceList(devices);
  111. // Mark track as ended for those browsers that do not support
  112. // "readyState" property. We do not touch tracks created with default
  113. // device ID "".
  114. if (typeof this.getTrack().readyState === 'undefined'
  115. && typeof this._realDeviceId !== 'undefined'
  116. && !devices.find(d => d.deviceId === this._realDeviceId)) {
  117. this._trackEnded = true;
  118. }
  119. };
  120. // Subscribe each created local audio track to
  121. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  122. // handling this event for remote tracks (which are handled in RTC.js),
  123. // because there might be local tracks not attached to a conference.
  124. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  125. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  126. RTCUtils.addListener(
  127. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  128. this._onAudioOutputDeviceChanged);
  129. }
  130. RTCUtils.addListener(
  131. RTCEvents.DEVICE_LIST_CHANGED,
  132. this._onDeviceListChanged);
  133. this._initNoDataFromSourceHandlers();
  134. }
  135. /* eslint-enable max-params */
  136. JitsiLocalTrack.prototype = Object.create(JitsiTrack.prototype);
  137. JitsiLocalTrack.prototype.constructor = JitsiLocalTrack;
  138. JitsiLocalTrack.prototype._addPeerConnection = function(tpc) {
  139. if (this._isAttachedToPC(tpc)) {
  140. logger.error(`${tpc} has been associated with ${this} already !`);
  141. } else {
  142. this.peerConnections.add(tpc);
  143. }
  144. };
  145. JitsiLocalTrack.prototype._removePeerConnection = function(tpc) {
  146. if (this._isAttachedToPC(tpc)) {
  147. this.peerConnections.delete(tpc);
  148. } else {
  149. logger.error(`${tpc} is not associated with ${this}`);
  150. }
  151. };
  152. /**
  153. * Checks whether or not this instance is attached to given
  154. * <tt>TraceablePeerConnection</tt>. See
  155. * {@link TraceablePeerConnection.attachTrack} for more info.
  156. * @param {TraceablePeerConnection.attachTrack} tpc
  157. * @return {boolean} <tt>true</tt> if this tracks is currently attached to given
  158. * peer connection or <tt>false</tt> otherwise.
  159. */
  160. JitsiLocalTrack.prototype._isAttachedToPC = function(tpc) {
  161. return this.peerConnections.has(tpc);
  162. };
  163. /**
  164. * Returns if associated MediaStreamTrack is in the 'ended' state
  165. * @returns {boolean}
  166. */
  167. JitsiLocalTrack.prototype.isEnded = function() {
  168. return this.getTrack().readyState === 'ended' || this._trackEnded;
  169. };
  170. /**
  171. * Sets handlers to the MediaStreamTrack object that will detect camera issues.
  172. */
  173. JitsiLocalTrack.prototype._initNoDataFromSourceHandlers = function() {
  174. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  175. const _onNoDataFromSourceError
  176. = this._onNoDataFromSourceError.bind(this);
  177. this._setHandler('track_mute', () => {
  178. if (this._checkForCameraIssues()) {
  179. const now = window.performance.now();
  180. this._noDataFromSourceTimeout
  181. = setTimeout(_onNoDataFromSourceError, 3000);
  182. this._setHandler('track_unmute', () => {
  183. this._clearNoDataFromSourceMuteResources();
  184. Statistics.sendEventToAll(
  185. `${this.getType()}.track_unmute`,
  186. { value: window.performance.now() - now });
  187. });
  188. }
  189. });
  190. this._setHandler('track_ended', _onNoDataFromSourceError);
  191. }
  192. };
  193. /**
  194. * Clears all timeouts and handlers set on MediaStreamTrack mute event.
  195. * FIXME: Change the name of the method with better one.
  196. */
  197. JitsiLocalTrack.prototype._clearNoDataFromSourceMuteResources = function() {
  198. if (this._noDataFromSourceTimeout) {
  199. clearTimeout(this._noDataFromSourceTimeout);
  200. this._noDataFromSourceTimeout = null;
  201. }
  202. this._setHandler('track_unmute', undefined);
  203. };
  204. /**
  205. * Called when potential camera issue is detected. Clears the handlers and
  206. * timeouts set on MediaStreamTrack muted event. Verifies that the camera
  207. * issue persists and fires NO_DATA_FROM_SOURCE event.
  208. */
  209. JitsiLocalTrack.prototype._onNoDataFromSourceError = function() {
  210. this._clearNoDataFromSourceMuteResources();
  211. if (this._checkForCameraIssues()) {
  212. this._fireNoDataFromSourceEvent();
  213. }
  214. };
  215. /**
  216. * Fires JitsiTrackEvents.NO_DATA_FROM_SOURCE and logs it to analytics and
  217. * callstats.
  218. */
  219. JitsiLocalTrack.prototype._fireNoDataFromSourceEvent = function() {
  220. this.eventEmitter.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  221. const eventName = `${this.getType()}.no_data_from_source`;
  222. Statistics.analytics.sendEvent(eventName);
  223. const log = { name: eventName };
  224. if (this.isAudioTrack()) {
  225. log.isReceivingData = this._isReceivingData();
  226. }
  227. Statistics.sendLog(JSON.stringify(log));
  228. };
  229. /**
  230. * Sets real device ID by comparing track information with device information.
  231. * This is temporary solution until getConstraints() method will be implemented
  232. * in browsers.
  233. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  234. * enumerateDevices() call
  235. */
  236. JitsiLocalTrack.prototype._setRealDeviceIdFromDeviceList = function(devices) {
  237. const track = this.getTrack();
  238. const device
  239. = devices.find(
  240. d => d.kind === `${track.kind}input` && d.label === track.label);
  241. if (device) {
  242. this._realDeviceId = device.deviceId;
  243. }
  244. };
  245. /**
  246. * Sets the stream property of JitsiLocalTrack object and sets all stored
  247. * handlers to it.
  248. * @param {MediaStream} stream the new stream.
  249. */
  250. JitsiLocalTrack.prototype._setStream = function(stream) {
  251. JitsiTrack.prototype._setStream.call(this, stream);
  252. // Store the MSID for video mute/unmute purposes
  253. if (stream) {
  254. this.storedMSID = this.getMSID();
  255. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  256. } else {
  257. logger.debug(`Setting 'null' stream on ${this}`);
  258. }
  259. };
  260. /**
  261. * Mutes the track. Will reject the Promise if there is mute/unmute operation
  262. * in progress.
  263. * @returns {Promise}
  264. */
  265. JitsiLocalTrack.prototype.mute = function() {
  266. return createMuteUnmutePromise(this, true);
  267. };
  268. /**
  269. * Unmutes the track. Will reject the Promise if there is mute/unmute operation
  270. * in progress.
  271. * @returns {Promise}
  272. */
  273. JitsiLocalTrack.prototype.unmute = function() {
  274. return createMuteUnmutePromise(this, false);
  275. };
  276. /**
  277. * Creates Promise for mute/unmute operation.
  278. *
  279. * @param {JitsiLocalTrack} track - The track that will be muted/unmuted.
  280. * @param {boolean} mute - Whether to mute or unmute the track.
  281. * @returns {Promise}
  282. */
  283. function createMuteUnmutePromise(track, mute) {
  284. if (track.inMuteOrUnmuteProgress) {
  285. return Promise.reject(
  286. new JitsiTrackError(JitsiTrackErrors.TRACK_MUTE_UNMUTE_IN_PROGRESS)
  287. );
  288. }
  289. track.inMuteOrUnmuteProgress = true;
  290. return track._setMute(mute)
  291. .then(() => {
  292. track.inMuteOrUnmuteProgress = false;
  293. })
  294. .catch(status => {
  295. track.inMuteOrUnmuteProgress = false;
  296. throw status;
  297. });
  298. }
  299. /**
  300. * Mutes / unmutes the track.
  301. *
  302. * @param {boolean} mute - If true the track will be muted. Otherwise the track
  303. * will be unmuted.
  304. * @private
  305. * @returns {Promise}
  306. */
  307. JitsiLocalTrack.prototype._setMute = function(mute) {
  308. if (this.isMuted() === mute) {
  309. return Promise.resolve();
  310. }
  311. let promise = Promise.resolve();
  312. const self = this;
  313. // Local track can be used out of conference, so we need to handle that
  314. // case and mark that track should start muted or not when added to
  315. // conference.
  316. // Pawel: track's muted status should be taken into account when track is
  317. // being added to the conference/JingleSessionPC/TraceablePeerConnection.
  318. // There's no need to add such fields. It is logical that when muted track
  319. // is being added to a conference it "starts muted"...
  320. if (!this.conference || !this.conference.room) {
  321. this.startMuted = mute;
  322. }
  323. this.dontFireRemoveEvent = false;
  324. // A function that will print info about muted status transition
  325. const logMuteInfo = () => logger.info(`Mute ${this}: ${mute}`);
  326. if (this.isAudioTrack()
  327. || this.videoType === VideoType.DESKTOP
  328. || !RTCBrowserType.doesVideoMuteByStreamRemove()) {
  329. logMuteInfo();
  330. if (this.track) {
  331. this.track.enabled = !mute;
  332. }
  333. } else if (mute) {
  334. this.dontFireRemoveEvent = true;
  335. promise = new Promise((resolve, reject) => {
  336. logMuteInfo();
  337. this._removeStreamFromConferenceAsMute(() => {
  338. // FIXME: Maybe here we should set the SRC for the containers
  339. // to something
  340. this._stopMediaStream();
  341. this._setStream(null);
  342. resolve();
  343. }, err => {
  344. reject(err);
  345. });
  346. });
  347. } else {
  348. logMuteInfo();
  349. // This path is only for camera.
  350. const streamOptions = {
  351. cameraDeviceId: this.getDeviceId(),
  352. devices: [ MediaType.VIDEO ],
  353. facingMode: this.getCameraFacingMode()
  354. };
  355. if (this.resolution) {
  356. streamOptions.resolution = this.resolution;
  357. }
  358. promise = RTCUtils.obtainAudioAndVideoPermissions(streamOptions)
  359. .then(streamsInfo => {
  360. const mediaType = self.getType();
  361. const streamInfo
  362. = streamsInfo.find(info => info.mediaType === mediaType);
  363. if (streamInfo) {
  364. self._setStream(streamInfo.stream);
  365. self.track = streamInfo.track;
  366. // This is not good when video type changes after
  367. // unmute, but let's not crash here
  368. if (self.videoType !== streamInfo.videoType) {
  369. logger.warn(
  370. `${this}: video type has changed after unmute!`,
  371. self.videoType, streamInfo.videoType);
  372. self.videoType = streamInfo.videoType;
  373. }
  374. } else {
  375. throw new JitsiTrackError(
  376. JitsiTrackErrors.TRACK_NO_STREAM_FOUND);
  377. }
  378. self.containers
  379. = self.containers.map(
  380. cont => RTCUtils.attachMediaStream(cont, self.stream));
  381. return self._addStreamToConferenceAsUnmute();
  382. });
  383. }
  384. return promise
  385. .then(() => this._sendMuteStatus(mute))
  386. .then(() => {
  387. this.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  388. });
  389. };
  390. /**
  391. * Adds stream to conference and marks it as "unmute" operation.
  392. *
  393. * @private
  394. * @returns {Promise}
  395. */
  396. JitsiLocalTrack.prototype._addStreamToConferenceAsUnmute = function() {
  397. if (!this.conference) {
  398. return Promise.resolve();
  399. }
  400. // FIXME it would be good to not included conference as part of this process
  401. // Only TraceablePeerConnections to which the track is attached should care
  402. // about this action. The TPCs to which the track is not attached can sync
  403. // up when track is re-attached.
  404. // A problem with that is that the "modify sources" queue is part of
  405. // the JingleSessionPC and it would be excluded from the process. One
  406. // solution would be to extract class between TPC and JingleSessionPC which
  407. // would contain the queue and would notify the signaling layer when local
  408. // SSRCs are changed. This would help to separate XMPP from the RTC module.
  409. return new Promise((resolve, reject) => {
  410. this.conference._addLocalTrackAsUnmute(this)
  411. .then(resolve, error => reject(new Error(error)));
  412. });
  413. };
  414. /**
  415. * Removes stream from conference and marks it as "mute" operation.
  416. * @param {Function} successCallback will be called on success
  417. * @param {Function} errorCallback will be called on error
  418. * @private
  419. */
  420. JitsiLocalTrack.prototype._removeStreamFromConferenceAsMute
  421. = function(successCallback, errorCallback) {
  422. if (!this.conference) {
  423. successCallback();
  424. return;
  425. }
  426. this.conference._removeLocalTrackAsMute(this).then(
  427. successCallback,
  428. error => errorCallback(new Error(error)));
  429. };
  430. /**
  431. * Sends mute status for a track to conference if any.
  432. *
  433. * @param {boolean} mute - If track is muted.
  434. * @private
  435. * @returns {Promise}
  436. */
  437. JitsiLocalTrack.prototype._sendMuteStatus = function(mute) {
  438. if (!this.conference || !this.conference.room) {
  439. return Promise.resolve();
  440. }
  441. return new Promise(resolve => {
  442. this.conference.room[
  443. this.isAudioTrack()
  444. ? 'setAudioMute'
  445. : 'setVideoMute'](mute, resolve);
  446. });
  447. };
  448. /**
  449. * @inheritdoc
  450. *
  451. * Stops sending the media track. And removes it from the HTML.
  452. * NOTE: Works for local tracks only.
  453. *
  454. * @extends JitsiTrack#dispose
  455. * @returns {Promise}
  456. */
  457. JitsiLocalTrack.prototype.dispose = function() {
  458. const self = this;
  459. let promise = Promise.resolve();
  460. if (this.conference) {
  461. promise = this.conference.removeTrack(this);
  462. }
  463. if (this.stream) {
  464. this._stopMediaStream();
  465. this.detach();
  466. }
  467. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED,
  468. this._onDeviceListChanged);
  469. if (this._onAudioOutputDeviceChanged) {
  470. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  471. this._onAudioOutputDeviceChanged);
  472. }
  473. return promise
  474. .then(() => JitsiTrack.prototype.dispose.call(self) // super.dispose();
  475. );
  476. };
  477. /**
  478. * Returns <tt>true</tt> - if the stream is muted
  479. * and <tt>false</tt> otherwise.
  480. * @returns {boolean} <tt>true</tt> - if the stream is muted
  481. * and <tt>false</tt> otherwise.
  482. */
  483. JitsiLocalTrack.prototype.isMuted = function() {
  484. // this.stream will be null when we mute local video on Chrome
  485. if (!this.stream) {
  486. return true;
  487. }
  488. if (this.isVideoTrack() && !this.isActive()) {
  489. return true;
  490. }
  491. return !this.track || !this.track.enabled;
  492. };
  493. /**
  494. * Sets the JitsiConference object associated with the track. This is temp
  495. * solution.
  496. * @param conference the JitsiConference object
  497. */
  498. JitsiLocalTrack.prototype._setConference = function(conference) {
  499. this.conference = conference;
  500. // We want to keep up with postponed events which should have been fired
  501. // on "attach" call, but for local track we not always have the conference
  502. // before attaching. However this may result in duplicated events if they
  503. // have been triggered on "attach" already.
  504. for (let i = 0; i < this.containers.length; i++) {
  505. this._maybeFireTrackAttached(this.containers[i]);
  506. }
  507. };
  508. /**
  509. * Returns <tt>true</tt>.
  510. * @returns {boolean} <tt>true</tt>
  511. */
  512. JitsiLocalTrack.prototype.isLocal = function() {
  513. return true;
  514. };
  515. /**
  516. * Returns device id associated with track.
  517. * @returns {string}
  518. */
  519. JitsiLocalTrack.prototype.getDeviceId = function() {
  520. return this._realDeviceId || this.deviceId;
  521. };
  522. /**
  523. * Returns the participant id which owns the track.
  524. * @returns {string} the id of the participants. It corresponds to the Colibri
  525. * endpoint id/MUC nickname in case of Jitsi-meet.
  526. */
  527. JitsiLocalTrack.prototype.getParticipantId = function() {
  528. return this.conference && this.conference.myUserId();
  529. };
  530. /**
  531. * Sets the value of bytes sent statistic.
  532. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  533. * @param {number} bytesSent the new value
  534. * NOTE: used only for audio tracks to detect audio issues.
  535. */
  536. JitsiLocalTrack.prototype._setByteSent = function(tpc, bytesSent) {
  537. this._bytesSent = bytesSent;
  538. const iceConnectionState = tpc.getConnectionState();
  539. if (this._testByteSent && iceConnectionState === 'connected') {
  540. setTimeout(() => {
  541. if (this._bytesSent <= 0) {
  542. logger.warn(`${this} 'bytes sent' <= 0: ${this._bytesSent}`);
  543. // we are not receiving anything from the microphone
  544. this._fireNoDataFromSourceEvent();
  545. }
  546. }, 3000);
  547. this._testByteSent = false;
  548. }
  549. };
  550. /**
  551. * Returns facing mode for video track from camera. For other cases (e.g. audio
  552. * track or 'desktop' video track) returns undefined.
  553. *
  554. * @returns {CameraFacingMode|undefined}
  555. */
  556. JitsiLocalTrack.prototype.getCameraFacingMode = function() {
  557. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  558. // MediaStreamTrack#getSettings() is not implemented in many browsers,
  559. // so we need feature checking here. Progress on the respective
  560. // browser's implementation can be tracked at
  561. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for Chromium
  562. // and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517 for Firefox.
  563. // Even if a browser implements getSettings() already, it might still
  564. // not return anything for 'facingMode'.
  565. let trackSettings;
  566. try {
  567. trackSettings = this.track.getSettings();
  568. } catch (e) {
  569. // XXX React-native-webrtc, for example, defines
  570. // MediaStreamTrack#getSettings() but the implementation throws a
  571. // "Not implemented" Error.
  572. }
  573. if (trackSettings && 'facingMode' in trackSettings) {
  574. return trackSettings.facingMode;
  575. }
  576. if (typeof this._facingMode !== 'undefined') {
  577. return this._facingMode;
  578. }
  579. // In most cases we are showing a webcam. So if we've gotten here, it
  580. // should be relatively safe to assume that we are probably showing
  581. // the user-facing camera.
  582. return CameraFacingMode.USER;
  583. }
  584. return undefined;
  585. };
  586. /**
  587. * Stops the associated MediaStream.
  588. */
  589. JitsiLocalTrack.prototype._stopMediaStream = function() {
  590. this.stopStreamInProgress = true;
  591. RTCUtils.stopMediaStream(this.stream);
  592. this.stopStreamInProgress = false;
  593. };
  594. /**
  595. * Detects camera issues on ended and mute events from MediaStreamTrack.
  596. * @returns {boolean} true if an issue is detected and false otherwise
  597. */
  598. JitsiLocalTrack.prototype._checkForCameraIssues = function() {
  599. if (!this.isVideoTrack() || this.stopStreamInProgress
  600. || this.videoType === VideoType.DESKTOP) {
  601. return false;
  602. }
  603. return !this._isReceivingData();
  604. };
  605. /**
  606. * Checks whether the attached MediaStream is receiving data from source or
  607. * not. If the stream property is null(because of mute or another reason) this
  608. * method will return false.
  609. * NOTE: This method doesn't indicate problem with the streams directly.
  610. * For example in case of video mute the method will return false or if the
  611. * user has disposed the track.
  612. * @returns {boolean} true if the stream is receiving data and false otherwise.
  613. */
  614. JitsiLocalTrack.prototype._isReceivingData = function() {
  615. if (!this.stream) {
  616. return false;
  617. }
  618. // In older version of the spec there is no muted property and
  619. // readyState can have value muted. In the latest versions
  620. // readyState can have values "live" and "ended" and there is
  621. // muted boolean property. If the stream is muted that means that
  622. // we aren't receiving any data from the source. We want to notify
  623. // the users for error if the stream is muted or ended on it's
  624. // creation.
  625. return this.stream.getTracks().some(track =>
  626. (!('readyState' in track) || track.readyState === 'live')
  627. && (!('muted' in track) || track.muted !== true));
  628. };
  629. /**
  630. * Creates a text representation of this local track instance.
  631. * @return {string}
  632. */
  633. JitsiLocalTrack.prototype.toString = function() {
  634. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  635. };
  636. module.exports = JitsiLocalTrack;