modified lib-jitsi-meet dev repo
Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720
  1. /* global __filename, Promise */
  2. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import JitsiTrack from './JitsiTrack';
  5. import JitsiTrackError from '../../JitsiTrackError';
  6. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  7. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  8. import * as MediaType from '../../service/RTC/MediaType';
  9. import RTCBrowserType from './RTCBrowserType';
  10. import RTCEvents from '../../service/RTC/RTCEvents';
  11. import RTCUtils from './RTCUtils';
  12. import Statistics from '../statistics/statistics';
  13. import VideoType from '../../service/RTC/VideoType';
  14. const logger = getLogger(__filename);
  15. /**
  16. * Creates Promise for mute/unmute operation.
  17. *
  18. * @param {JitsiLocalTrack} track - The track that will be muted/unmuted.
  19. * @param {boolean} mute - Whether to mute or unmute the track.
  20. * @returns {Promise}
  21. */
  22. function createMuteUnmutePromise(track, mute) {
  23. if (track.inMuteOrUnmuteProgress) {
  24. return Promise.reject(
  25. new JitsiTrackError(JitsiTrackErrors.TRACK_MUTE_UNMUTE_IN_PROGRESS)
  26. );
  27. }
  28. track.inMuteOrUnmuteProgress = true;
  29. return track._setMute(mute)
  30. .then(() => {
  31. track.inMuteOrUnmuteProgress = false;
  32. })
  33. .catch(status => {
  34. track.inMuteOrUnmuteProgress = false;
  35. throw status;
  36. });
  37. }
  38. /**
  39. * Represents a single media track(either audio or video).
  40. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  41. */
  42. export default class JitsiLocalTrack extends JitsiTrack {
  43. /**
  44. * Constructs new JitsiLocalTrack instanse.
  45. * @param {Object} trackInfo
  46. * @param {number} trackInfo.rtcId the ID assigned by the RTC module
  47. * @param trackInfo.stream WebRTC MediaStream, parent of the track
  48. * @param trackInfo.track underlying WebRTC MediaStreamTrack for new
  49. * JitsiRemoteTrack
  50. * @param trackInfo.mediaType the MediaType of the JitsiRemoteTrack
  51. * @param trackInfo.videoType the VideoType of the JitsiRemoteTrack
  52. * @param trackInfo.resolution the video resolution if it's a video track
  53. * @param trackInfo.deviceId the ID of the local device for this track
  54. * @param trackInfo.facingMode the camera facing mode used in getUserMedia
  55. * call
  56. * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing
  57. * source. NOTE: defined for desktop sharing tracks only.
  58. * @constructor
  59. */
  60. constructor(trackInfo) {
  61. const {
  62. rtcId,
  63. stream,
  64. track,
  65. mediaType,
  66. videoType,
  67. resolution,
  68. deviceId,
  69. facingMode,
  70. sourceId,
  71. sourceType
  72. } = trackInfo;
  73. super(
  74. null /* RTC */,
  75. stream,
  76. track,
  77. () => {
  78. if (!this.dontFireRemoveEvent) {
  79. this.emit(JitsiTrackEvents.LOCAL_TRACK_STOPPED);
  80. }
  81. this.dontFireRemoveEvent = false;
  82. } /* inactiveHandler */,
  83. mediaType,
  84. videoType);
  85. /**
  86. * The ID assigned by the RTC module on instance creation.
  87. * @type {number}
  88. */
  89. this.rtcId = rtcId;
  90. this.dontFireRemoveEvent = false;
  91. this.resolution = resolution;
  92. this.sourceId = sourceId;
  93. this.sourceType = sourceType;
  94. // FIXME: currently firefox is ignoring our constraints about
  95. // resolutions so we do not store it, to avoid wrong reporting of local
  96. // track resolution
  97. if (RTCBrowserType.isFirefox()) {
  98. this.resolution = null;
  99. }
  100. this.deviceId = deviceId;
  101. this.storedMSID = this.getMSID();
  102. this.inMuteOrUnmuteProgress = false;
  103. /**
  104. * The facing mode of the camera from which this JitsiLocalTrack
  105. * instance was obtained.
  106. *
  107. * @private
  108. * @type {CameraFacingMode|undefined}
  109. */
  110. this._facingMode = facingMode;
  111. // Currently there is no way to know the MediaStreamTrack ended due to
  112. // to device disconnect in Firefox through e.g. "readyState" property.
  113. // Instead we will compare current track's label with device labels from
  114. // enumerateDevices() list.
  115. this._trackEnded = false;
  116. /**
  117. * The value of bytes sent received from the statistics module.
  118. */
  119. this._bytesSent = null;
  120. /**
  121. * Used only for detection of audio problems. We want to check only once
  122. * whether the track is sending bytes ot not. This flag is set to false
  123. * after the check.
  124. */
  125. this._testByteSent = true;
  126. // Currently there is no way to determine with what device track was
  127. // created (until getConstraints() support), however we can associate
  128. // tracks with real devices obtained from enumerateDevices() call as
  129. // soon as it's called.
  130. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  131. /**
  132. * Indicates that we have called RTCUtils.stopMediaStream for the
  133. * MediaStream related to this JitsiTrack object.
  134. */
  135. this.stopStreamInProgress = false;
  136. /**
  137. * On mute event we are waiting for 3s to check if the stream is going
  138. * to be still muted before firing the event for camera issue detected
  139. * (NO_DATA_FROM_SOURCE).
  140. */
  141. this._noDataFromSourceTimeout = null;
  142. this._onDeviceListChanged = devices => {
  143. this._setRealDeviceIdFromDeviceList(devices);
  144. // Mark track as ended for those browsers that do not support
  145. // "readyState" property. We do not touch tracks created with
  146. // default device ID "".
  147. if (typeof this.getTrack().readyState === 'undefined'
  148. && typeof this._realDeviceId !== 'undefined'
  149. && !devices.find(d => d.deviceId === this._realDeviceId)) {
  150. this._trackEnded = true;
  151. }
  152. };
  153. // Subscribe each created local audio track to
  154. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  155. // handling this event for remote tracks (which are handled in RTC.js),
  156. // because there might be local tracks not attached to a conference.
  157. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  158. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  159. RTCUtils.addListener(
  160. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  161. this._onAudioOutputDeviceChanged);
  162. }
  163. RTCUtils.addListener(
  164. RTCEvents.DEVICE_LIST_CHANGED,
  165. this._onDeviceListChanged);
  166. this._initNoDataFromSourceHandlers();
  167. }
  168. /**
  169. * Returns if associated MediaStreamTrack is in the 'ended' state
  170. * @returns {boolean}
  171. */
  172. isEnded() {
  173. return this.getTrack().readyState === 'ended' || this._trackEnded;
  174. }
  175. /**
  176. * Sets handlers to the MediaStreamTrack object that will detect camera
  177. * issues.
  178. */
  179. _initNoDataFromSourceHandlers() {
  180. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  181. const _onNoDataFromSourceError
  182. = this._onNoDataFromSourceError.bind(this);
  183. this._setHandler('track_mute', () => {
  184. if (this._checkForCameraIssues()) {
  185. const now = window.performance.now();
  186. this._noDataFromSourceTimeout
  187. = setTimeout(_onNoDataFromSourceError, 3000);
  188. this._setHandler('track_unmute', () => {
  189. this._clearNoDataFromSourceMuteResources();
  190. Statistics.sendEventToAll(
  191. `${this.getType()}.track_unmute`,
  192. { value: window.performance.now() - now });
  193. });
  194. }
  195. });
  196. this._setHandler('track_ended', _onNoDataFromSourceError);
  197. }
  198. }
  199. /**
  200. * Clears all timeouts and handlers set on MediaStreamTrack mute event.
  201. * FIXME: Change the name of the method with better one.
  202. */
  203. _clearNoDataFromSourceMuteResources() {
  204. if (this._noDataFromSourceTimeout) {
  205. clearTimeout(this._noDataFromSourceTimeout);
  206. this._noDataFromSourceTimeout = null;
  207. }
  208. this._setHandler('track_unmute', undefined);
  209. }
  210. /**
  211. * Called when potential camera issue is detected. Clears the handlers and
  212. * timeouts set on MediaStreamTrack muted event. Verifies that the camera
  213. * issue persists and fires NO_DATA_FROM_SOURCE event.
  214. */
  215. _onNoDataFromSourceError() {
  216. this._clearNoDataFromSourceMuteResources();
  217. if (this._checkForCameraIssues()) {
  218. this._fireNoDataFromSourceEvent();
  219. }
  220. }
  221. /**
  222. * Fires JitsiTrackEvents.NO_DATA_FROM_SOURCE and logs it to analytics and
  223. * callstats.
  224. */
  225. _fireNoDataFromSourceEvent() {
  226. this.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  227. const eventName = `${this.getType()}.no_data_from_source`;
  228. Statistics.analytics.sendEvent(eventName);
  229. const log = { name: eventName };
  230. if (this.isAudioTrack()) {
  231. log.isReceivingData = this._isReceivingData();
  232. }
  233. Statistics.sendLog(JSON.stringify(log));
  234. }
  235. /**
  236. * Sets real device ID by comparing track information with device
  237. * information. This is temporary solution until getConstraints() method
  238. * will be implemented in browsers.
  239. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  240. * enumerateDevices() call
  241. */
  242. _setRealDeviceIdFromDeviceList(devices) {
  243. const track = this.getTrack();
  244. // FIXME for temasys video track, label refers to id not the actual
  245. // device
  246. const device = devices.find(
  247. d => d.kind === `${track.kind}input` && d.label === track.label);
  248. if (device) {
  249. this._realDeviceId = device.deviceId;
  250. }
  251. }
  252. /**
  253. * Sets the stream property of JitsiLocalTrack object and sets all stored
  254. * handlers to it.
  255. * @param {MediaStream} stream the new stream.
  256. */
  257. _setStream(stream) {
  258. super._setStream(stream);
  259. // Store the MSID for video mute/unmute purposes
  260. if (stream) {
  261. this.storedMSID = this.getMSID();
  262. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  263. } else {
  264. logger.debug(`Setting 'null' stream on ${this}`);
  265. }
  266. }
  267. /**
  268. * Mutes the track. Will reject the Promise if there is mute/unmute
  269. * operation in progress.
  270. * @returns {Promise}
  271. */
  272. mute() {
  273. return createMuteUnmutePromise(this, true);
  274. }
  275. /**
  276. * Unmutes the track. Will reject the Promise if there is mute/unmute
  277. * operation in progress.
  278. * @returns {Promise}
  279. */
  280. unmute() {
  281. return createMuteUnmutePromise(this, false);
  282. }
  283. /**
  284. * Mutes / unmutes the track.
  285. *
  286. * @param {boolean} mute - If true the track will be muted. Otherwise the
  287. * track will be unmuted.
  288. * @private
  289. * @returns {Promise}
  290. */
  291. _setMute(mute) {
  292. if (this.isMuted() === mute) {
  293. return Promise.resolve();
  294. }
  295. let promise = Promise.resolve();
  296. this.dontFireRemoveEvent = false;
  297. // A function that will print info about muted status transition
  298. const logMuteInfo = () => logger.info(`Mute ${this}: ${mute}`);
  299. if (this.isAudioTrack()
  300. || this.videoType === VideoType.DESKTOP
  301. || !RTCBrowserType.doesVideoMuteByStreamRemove()) {
  302. logMuteInfo();
  303. if (this.track) {
  304. this.track.enabled = !mute;
  305. }
  306. } else if (mute) {
  307. this.dontFireRemoveEvent = true;
  308. promise = new Promise((resolve, reject) => {
  309. logMuteInfo();
  310. this._removeStreamFromConferenceAsMute(() => {
  311. // FIXME: Maybe here we should set the SRC for the
  312. // containers to something
  313. this._stopMediaStream();
  314. this._setStream(null);
  315. resolve();
  316. }, err => {
  317. reject(err);
  318. });
  319. });
  320. } else {
  321. logMuteInfo();
  322. // This path is only for camera.
  323. const streamOptions = {
  324. cameraDeviceId: this.getDeviceId(),
  325. devices: [ MediaType.VIDEO ],
  326. facingMode: this.getCameraFacingMode()
  327. };
  328. if (this.resolution) {
  329. streamOptions.resolution = this.resolution;
  330. }
  331. promise = RTCUtils.obtainAudioAndVideoPermissions(streamOptions)
  332. .then(streamsInfo => {
  333. const mediaType = this.getType();
  334. const streamInfo = streamsInfo.find(
  335. info => info.mediaType === mediaType);
  336. if (streamInfo) {
  337. this._setStream(streamInfo.stream);
  338. this.track = streamInfo.track;
  339. // This is not good when video type changes after
  340. // unmute, but let's not crash here
  341. if (this.videoType !== streamInfo.videoType) {
  342. logger.warn(
  343. `${this}: video type has changed after unmute!`,
  344. this.videoType, streamInfo.videoType);
  345. this.videoType = streamInfo.videoType;
  346. }
  347. } else {
  348. throw new JitsiTrackError(
  349. JitsiTrackErrors.TRACK_NO_STREAM_FOUND);
  350. }
  351. this.containers = this.containers.map(
  352. cont => RTCUtils.attachMediaStream(cont, this.stream));
  353. return this._addStreamToConferenceAsUnmute();
  354. });
  355. }
  356. return promise
  357. .then(() => this._sendMuteStatus(mute))
  358. .then(() => {
  359. this.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  360. });
  361. }
  362. /**
  363. * Adds stream to conference and marks it as "unmute" operation.
  364. *
  365. * @private
  366. * @returns {Promise}
  367. */
  368. _addStreamToConferenceAsUnmute() {
  369. if (!this.conference) {
  370. return Promise.resolve();
  371. }
  372. // FIXME it would be good to not included conference as part of this
  373. // process. Only TraceablePeerConnections to which the track is attached
  374. // should care about this action. The TPCs to which the track is not
  375. // attached can sync up when track is re-attached.
  376. // A problem with that is that the "modify sources" queue is part of
  377. // the JingleSessionPC and it would be excluded from the process. One
  378. // solution would be to extract class between TPC and JingleSessionPC
  379. // which would contain the queue and would notify the signaling layer
  380. // when local SSRCs are changed. This would help to separate XMPP from
  381. // the RTC module.
  382. return new Promise((resolve, reject) => {
  383. this.conference._addLocalTrackAsUnmute(this)
  384. .then(resolve, error => reject(new Error(error)));
  385. });
  386. }
  387. /**
  388. * Removes stream from conference and marks it as "mute" operation.
  389. * @param {Function} successCallback will be called on success
  390. * @param {Function} errorCallback will be called on error
  391. * @private
  392. */
  393. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  394. if (!this.conference) {
  395. successCallback();
  396. return;
  397. }
  398. this.conference._removeLocalTrackAsMute(this).then(
  399. successCallback,
  400. error => errorCallback(new Error(error)));
  401. }
  402. /**
  403. * Sends mute status for a track to conference if any.
  404. *
  405. * @param {boolean} mute - If track is muted.
  406. * @private
  407. * @returns {Promise}
  408. */
  409. _sendMuteStatus(mute) {
  410. if (!this.conference || !this.conference.room) {
  411. return Promise.resolve();
  412. }
  413. return new Promise(resolve => {
  414. this.conference.room[
  415. this.isAudioTrack()
  416. ? 'setAudioMute'
  417. : 'setVideoMute'](mute, resolve);
  418. });
  419. }
  420. /**
  421. * @inheritdoc
  422. *
  423. * Stops sending the media track. And removes it from the HTML.
  424. * NOTE: Works for local tracks only.
  425. *
  426. * @extends JitsiTrack#dispose
  427. * @returns {Promise}
  428. */
  429. dispose() {
  430. let promise = Promise.resolve();
  431. if (this.conference) {
  432. promise = this.conference.removeTrack(this);
  433. }
  434. if (this.stream) {
  435. this._stopMediaStream();
  436. this.detach();
  437. }
  438. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED,
  439. this._onDeviceListChanged);
  440. if (this._onAudioOutputDeviceChanged) {
  441. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  442. this._onAudioOutputDeviceChanged);
  443. }
  444. return promise.then(() => super.dispose());
  445. }
  446. /**
  447. * Returns <tt>true</tt> - if the stream is muted
  448. * and <tt>false</tt> otherwise.
  449. * @returns {boolean} <tt>true</tt> - if the stream is muted
  450. * and <tt>false</tt> otherwise.
  451. */
  452. isMuted() {
  453. // this.stream will be null when we mute local video on Chrome
  454. if (!this.stream) {
  455. return true;
  456. }
  457. if (this.isVideoTrack() && !this.isActive()) {
  458. return true;
  459. }
  460. return !this.track || !this.track.enabled;
  461. }
  462. /**
  463. * Sets the JitsiConference object associated with the track. This is temp
  464. * solution.
  465. * @param conference the JitsiConference object
  466. */
  467. _setConference(conference) {
  468. this.conference = conference;
  469. // We want to keep up with postponed events which should have been fired
  470. // on "attach" call, but for local track we not always have the
  471. // conference before attaching. However this may result in duplicated
  472. // events if they have been triggered on "attach" already.
  473. for (let i = 0; i < this.containers.length; i++) {
  474. this._maybeFireTrackAttached(this.containers[i]);
  475. }
  476. }
  477. /**
  478. * Returns <tt>true</tt>.
  479. * @returns {boolean} <tt>true</tt>
  480. */
  481. isLocal() {
  482. return true;
  483. }
  484. /**
  485. * Returns device id associated with track.
  486. * @returns {string}
  487. */
  488. getDeviceId() {
  489. return this._realDeviceId || this.deviceId;
  490. }
  491. /**
  492. * Returns the participant id which owns the track.
  493. * @returns {string} the id of the participants. It corresponds to the
  494. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  495. */
  496. getParticipantId() {
  497. return this.conference && this.conference.myUserId();
  498. }
  499. /**
  500. * Sets the value of bytes sent statistic.
  501. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  502. * @param {number} bytesSent the new value
  503. * NOTE: used only for audio tracks to detect audio issues.
  504. */
  505. _setByteSent(tpc, bytesSent) {
  506. this._bytesSent = bytesSent;
  507. const iceConnectionState = tpc.getConnectionState();
  508. if (this._testByteSent && iceConnectionState === 'connected') {
  509. setTimeout(() => {
  510. if (this._bytesSent <= 0) {
  511. // FIXME: Remove ${this}
  512. logger.warn(`${this} 'bytes sent' <= 0: \
  513. ${this._bytesSent}`);
  514. // we are not receiving anything from the microphone
  515. this._fireNoDataFromSourceEvent();
  516. }
  517. }, 3000);
  518. this._testByteSent = false;
  519. }
  520. }
  521. /**
  522. * Returns facing mode for video track from camera. For other cases (e.g.
  523. * audio track or 'desktop' video track) returns undefined.
  524. *
  525. * @returns {CameraFacingMode|undefined}
  526. */
  527. getCameraFacingMode() {
  528. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  529. // MediaStreamTrack#getSettings() is not implemented in many
  530. // browsers, so we need feature checking here. Progress on the
  531. // respective browser's implementation can be tracked at
  532. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  533. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  534. // for Firefox. Even if a browser implements getSettings() already,
  535. // it might still not return anything for 'facingMode'.
  536. let trackSettings;
  537. try {
  538. trackSettings = this.track.getSettings();
  539. } catch (e) {
  540. // XXX React-native-webrtc, for example, defines
  541. // MediaStreamTrack#getSettings() but the implementation throws
  542. // a "Not implemented" Error.
  543. }
  544. if (trackSettings && 'facingMode' in trackSettings) {
  545. return trackSettings.facingMode;
  546. }
  547. if (typeof this._facingMode !== 'undefined') {
  548. return this._facingMode;
  549. }
  550. // In most cases we are showing a webcam. So if we've gotten here,
  551. // it should be relatively safe to assume that we are probably
  552. // showing the user-facing camera.
  553. return CameraFacingMode.USER;
  554. }
  555. return undefined;
  556. }
  557. /**
  558. * Stops the associated MediaStream.
  559. */
  560. _stopMediaStream() {
  561. this.stopStreamInProgress = true;
  562. RTCUtils.stopMediaStream(this.stream);
  563. this.stopStreamInProgress = false;
  564. }
  565. /**
  566. * Switches the camera facing mode if the WebRTC implementation supports the
  567. * custom MediaStreamTrack._switchCamera method. Currently, the method in
  568. * question is implemented in react-native-webrtc only. When such a WebRTC
  569. * implementation is executing, the method is the preferred way to switch
  570. * between the front/user-facing and the back/environment-facing cameras
  571. * because it will likely be (as is the case of react-native-webrtc)
  572. * noticeably faster that creating a new MediaStreamTrack via a new
  573. * getUserMedia call with the switched facingMode constraint value.
  574. * Moreover, the approach with a new getUserMedia call may not even work:
  575. * WebRTC on Android and iOS is either very slow to open the camera a second
  576. * time or plainly freezes attempting to do that.
  577. */
  578. _switchCamera() {
  579. if (this.isVideoTrack()
  580. && this.videoType === VideoType.CAMERA
  581. && typeof this.track._switchCamera === 'function') {
  582. this.track._switchCamera();
  583. this._facingMode
  584. = this._facingMode === CameraFacingMode.ENVIRONMENT
  585. ? CameraFacingMode.USER
  586. : CameraFacingMode.ENVIRONMENT;
  587. }
  588. }
  589. /**
  590. * Detects camera issues on ended and mute events from MediaStreamTrack.
  591. * @returns {boolean} true if an issue is detected and false otherwise
  592. */
  593. _checkForCameraIssues() {
  594. if (!this.isVideoTrack() || this.stopStreamInProgress
  595. || this.videoType === VideoType.DESKTOP) {
  596. return false;
  597. }
  598. return !this._isReceivingData();
  599. }
  600. /**
  601. * Checks whether the attached MediaStream is receiving data from source or
  602. * not. If the stream property is null(because of mute or another reason)
  603. * this method will return false.
  604. * NOTE: This method doesn't indicate problem with the streams directly.
  605. * For example in case of video mute the method will return false or if the
  606. * user has disposed the track.
  607. * @returns {boolean} true if the stream is receiving data and false
  608. * this otherwise.
  609. */
  610. _isReceivingData() {
  611. if (!this.stream) {
  612. return false;
  613. }
  614. // In older version of the spec there is no muted property and
  615. // readyState can have value muted. In the latest versions
  616. // readyState can have values "live" and "ended" and there is
  617. // muted boolean property. If the stream is muted that means that
  618. // we aren't receiving any data from the source. We want to notify
  619. // the users for error if the stream is muted or ended on it's
  620. // creation.
  621. return this.stream.getTracks().some(track =>
  622. (!('readyState' in track) || track.readyState === 'live')
  623. && (!('muted' in track) || track.muted !== true));
  624. }
  625. /**
  626. * Creates a text representation of this local track instance.
  627. * @return {string}
  628. */
  629. toString() {
  630. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  631. }
  632. }