Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

JitsiLocalTrack.js 25KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718
  1. /* global __filename, Promise */
  2. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import JitsiTrack from './JitsiTrack';
  5. import JitsiTrackError from '../../JitsiTrackError';
  6. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  7. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  8. import * as MediaType from '../../service/RTC/MediaType';
  9. import RTCBrowserType from './RTCBrowserType';
  10. import RTCEvents from '../../service/RTC/RTCEvents';
  11. import RTCUtils from './RTCUtils';
  12. import Statistics from '../statistics/statistics';
  13. import VideoType from '../../service/RTC/VideoType';
  14. const logger = getLogger(__filename);
  15. /**
  16. * Creates Promise for mute/unmute operation.
  17. *
  18. * @param {JitsiLocalTrack} track - The track that will be muted/unmuted.
  19. * @param {boolean} mute - Whether to mute or unmute the track.
  20. * @returns {Promise}
  21. */
  22. function createMuteUnmutePromise(track, mute) {
  23. if (track.inMuteOrUnmuteProgress) {
  24. return Promise.reject(
  25. new JitsiTrackError(JitsiTrackErrors.TRACK_MUTE_UNMUTE_IN_PROGRESS)
  26. );
  27. }
  28. track.inMuteOrUnmuteProgress = true;
  29. return track._setMute(mute)
  30. .then(() => {
  31. track.inMuteOrUnmuteProgress = false;
  32. })
  33. .catch(status => {
  34. track.inMuteOrUnmuteProgress = false;
  35. throw status;
  36. });
  37. }
  38. /**
  39. * Represents a single media track(either audio or video).
  40. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  41. */
  42. export default class JitsiLocalTrack extends JitsiTrack {
  43. /**
  44. * Constructs new JitsiLocalTrack instanse.
  45. * @param {Object} trackInfo
  46. * @param {number} trackInfo.rtcId the ID assigned by the RTC module
  47. * @param trackInfo.stream WebRTC MediaStream, parent of the track
  48. * @param trackInfo.track underlying WebRTC MediaStreamTrack for new
  49. * JitsiRemoteTrack
  50. * @param trackInfo.mediaType the MediaType of the JitsiRemoteTrack
  51. * @param trackInfo.videoType the VideoType of the JitsiRemoteTrack
  52. * @param trackInfo.resolution the video resolution if it's a video track
  53. * @param trackInfo.deviceId the ID of the local device for this track
  54. * @param trackInfo.facingMode the camera facing mode used in getUserMedia
  55. * call
  56. * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing
  57. * source. NOTE: defined for desktop sharing tracks only.
  58. * @constructor
  59. */
  60. constructor(trackInfo) {
  61. const {
  62. rtcId,
  63. stream,
  64. track,
  65. mediaType,
  66. videoType,
  67. resolution,
  68. deviceId,
  69. facingMode,
  70. sourceId
  71. } = trackInfo;
  72. super(
  73. null /* RTC */,
  74. stream,
  75. track,
  76. () => {
  77. if (!this.dontFireRemoveEvent) {
  78. this.emit(JitsiTrackEvents.LOCAL_TRACK_STOPPED);
  79. }
  80. this.dontFireRemoveEvent = false;
  81. } /* inactiveHandler */,
  82. mediaType,
  83. videoType);
  84. /**
  85. * The ID assigned by the RTC module on instance creation.
  86. * @type {number}
  87. */
  88. this.rtcId = rtcId;
  89. this.dontFireRemoveEvent = false;
  90. this.resolution = resolution;
  91. this.sourceId = sourceId;
  92. // FIXME: currently firefox is ignoring our constraints about
  93. // resolutions so we do not store it, to avoid wrong reporting of local
  94. // track resolution
  95. if (RTCBrowserType.isFirefox()) {
  96. this.resolution = null;
  97. }
  98. this.deviceId = deviceId;
  99. this.storedMSID = this.getMSID();
  100. this.inMuteOrUnmuteProgress = false;
  101. /**
  102. * The facing mode of the camera from which this JitsiLocalTrack
  103. * instance was obtained.
  104. *
  105. * @private
  106. * @type {CameraFacingMode|undefined}
  107. */
  108. this._facingMode = facingMode;
  109. // Currently there is no way to know the MediaStreamTrack ended due to
  110. // to device disconnect in Firefox through e.g. "readyState" property.
  111. // Instead we will compare current track's label with device labels from
  112. // enumerateDevices() list.
  113. this._trackEnded = false;
  114. /**
  115. * The value of bytes sent received from the statistics module.
  116. */
  117. this._bytesSent = null;
  118. /**
  119. * Used only for detection of audio problems. We want to check only once
  120. * whether the track is sending bytes ot not. This flag is set to false
  121. * after the check.
  122. */
  123. this._testByteSent = true;
  124. // Currently there is no way to determine with what device track was
  125. // created (until getConstraints() support), however we can associate
  126. // tracks with real devices obtained from enumerateDevices() call as
  127. // soon as it's called.
  128. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  129. /**
  130. * Indicates that we have called RTCUtils.stopMediaStream for the
  131. * MediaStream related to this JitsiTrack object.
  132. */
  133. this.stopStreamInProgress = false;
  134. /**
  135. * On mute event we are waiting for 3s to check if the stream is going
  136. * to be still muted before firing the event for camera issue detected
  137. * (NO_DATA_FROM_SOURCE).
  138. */
  139. this._noDataFromSourceTimeout = null;
  140. this._onDeviceListChanged = devices => {
  141. this._setRealDeviceIdFromDeviceList(devices);
  142. // Mark track as ended for those browsers that do not support
  143. // "readyState" property. We do not touch tracks created with
  144. // default device ID "".
  145. if (typeof this.getTrack().readyState === 'undefined'
  146. && typeof this._realDeviceId !== 'undefined'
  147. && !devices.find(d => d.deviceId === this._realDeviceId)) {
  148. this._trackEnded = true;
  149. }
  150. };
  151. // Subscribe each created local audio track to
  152. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  153. // handling this event for remote tracks (which are handled in RTC.js),
  154. // because there might be local tracks not attached to a conference.
  155. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  156. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  157. RTCUtils.addListener(
  158. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  159. this._onAudioOutputDeviceChanged);
  160. }
  161. RTCUtils.addListener(
  162. RTCEvents.DEVICE_LIST_CHANGED,
  163. this._onDeviceListChanged);
  164. this._initNoDataFromSourceHandlers();
  165. }
  166. /**
  167. * Returns if associated MediaStreamTrack is in the 'ended' state
  168. * @returns {boolean}
  169. */
  170. isEnded() {
  171. return this.getTrack().readyState === 'ended' || this._trackEnded;
  172. }
  173. /**
  174. * Sets handlers to the MediaStreamTrack object that will detect camera
  175. * issues.
  176. */
  177. _initNoDataFromSourceHandlers() {
  178. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  179. const _onNoDataFromSourceError
  180. = this._onNoDataFromSourceError.bind(this);
  181. this._setHandler('track_mute', () => {
  182. if (this._checkForCameraIssues()) {
  183. const now = window.performance.now();
  184. this._noDataFromSourceTimeout
  185. = setTimeout(_onNoDataFromSourceError, 3000);
  186. this._setHandler('track_unmute', () => {
  187. this._clearNoDataFromSourceMuteResources();
  188. Statistics.sendEventToAll(
  189. `${this.getType()}.track_unmute`,
  190. { value: window.performance.now() - now });
  191. });
  192. }
  193. });
  194. this._setHandler('track_ended', _onNoDataFromSourceError);
  195. }
  196. }
  197. /**
  198. * Clears all timeouts and handlers set on MediaStreamTrack mute event.
  199. * FIXME: Change the name of the method with better one.
  200. */
  201. _clearNoDataFromSourceMuteResources() {
  202. if (this._noDataFromSourceTimeout) {
  203. clearTimeout(this._noDataFromSourceTimeout);
  204. this._noDataFromSourceTimeout = null;
  205. }
  206. this._setHandler('track_unmute', undefined);
  207. }
  208. /**
  209. * Called when potential camera issue is detected. Clears the handlers and
  210. * timeouts set on MediaStreamTrack muted event. Verifies that the camera
  211. * issue persists and fires NO_DATA_FROM_SOURCE event.
  212. */
  213. _onNoDataFromSourceError() {
  214. this._clearNoDataFromSourceMuteResources();
  215. if (this._checkForCameraIssues()) {
  216. this._fireNoDataFromSourceEvent();
  217. }
  218. }
  219. /**
  220. * Fires JitsiTrackEvents.NO_DATA_FROM_SOURCE and logs it to analytics and
  221. * callstats.
  222. */
  223. _fireNoDataFromSourceEvent() {
  224. this.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  225. const eventName = `${this.getType()}.no_data_from_source`;
  226. Statistics.analytics.sendEvent(eventName);
  227. const log = { name: eventName };
  228. if (this.isAudioTrack()) {
  229. log.isReceivingData = this._isReceivingData();
  230. }
  231. Statistics.sendLog(JSON.stringify(log));
  232. }
  233. /**
  234. * Sets real device ID by comparing track information with device
  235. * information. This is temporary solution until getConstraints() method
  236. * will be implemented in browsers.
  237. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  238. * enumerateDevices() call
  239. */
  240. _setRealDeviceIdFromDeviceList(devices) {
  241. const track = this.getTrack();
  242. // FIXME for temasys video track, label refers to id not the actual
  243. // device
  244. const device = devices.find(
  245. d => d.kind === `${track.kind}input` && d.label === track.label);
  246. if (device) {
  247. this._realDeviceId = device.deviceId;
  248. }
  249. }
  250. /**
  251. * Sets the stream property of JitsiLocalTrack object and sets all stored
  252. * handlers to it.
  253. * @param {MediaStream} stream the new stream.
  254. */
  255. _setStream(stream) {
  256. super._setStream(stream);
  257. // Store the MSID for video mute/unmute purposes
  258. if (stream) {
  259. this.storedMSID = this.getMSID();
  260. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  261. } else {
  262. logger.debug(`Setting 'null' stream on ${this}`);
  263. }
  264. }
  265. /**
  266. * Mutes the track. Will reject the Promise if there is mute/unmute
  267. * operation in progress.
  268. * @returns {Promise}
  269. */
  270. mute() {
  271. return createMuteUnmutePromise(this, true);
  272. }
  273. /**
  274. * Unmutes the track. Will reject the Promise if there is mute/unmute
  275. * operation in progress.
  276. * @returns {Promise}
  277. */
  278. unmute() {
  279. return createMuteUnmutePromise(this, false);
  280. }
  281. /**
  282. * Mutes / unmutes the track.
  283. *
  284. * @param {boolean} mute - If true the track will be muted. Otherwise the
  285. * track will be unmuted.
  286. * @private
  287. * @returns {Promise}
  288. */
  289. _setMute(mute) {
  290. if (this.isMuted() === mute) {
  291. return Promise.resolve();
  292. }
  293. let promise = Promise.resolve();
  294. this.dontFireRemoveEvent = false;
  295. // A function that will print info about muted status transition
  296. const logMuteInfo = () => logger.info(`Mute ${this}: ${mute}`);
  297. if (this.isAudioTrack()
  298. || this.videoType === VideoType.DESKTOP
  299. || !RTCBrowserType.doesVideoMuteByStreamRemove()) {
  300. logMuteInfo();
  301. if (this.track) {
  302. this.track.enabled = !mute;
  303. }
  304. } else if (mute) {
  305. this.dontFireRemoveEvent = true;
  306. promise = new Promise((resolve, reject) => {
  307. logMuteInfo();
  308. this._removeStreamFromConferenceAsMute(() => {
  309. // FIXME: Maybe here we should set the SRC for the
  310. // containers to something
  311. this._stopMediaStream();
  312. this._setStream(null);
  313. resolve();
  314. }, err => {
  315. reject(err);
  316. });
  317. });
  318. } else {
  319. logMuteInfo();
  320. // This path is only for camera.
  321. const streamOptions = {
  322. cameraDeviceId: this.getDeviceId(),
  323. devices: [ MediaType.VIDEO ],
  324. facingMode: this.getCameraFacingMode()
  325. };
  326. if (this.resolution) {
  327. streamOptions.resolution = this.resolution;
  328. }
  329. promise = RTCUtils.obtainAudioAndVideoPermissions(streamOptions)
  330. .then(streamsInfo => {
  331. const mediaType = this.getType();
  332. const streamInfo = streamsInfo.find(
  333. info => info.mediaType === mediaType);
  334. if (streamInfo) {
  335. this._setStream(streamInfo.stream);
  336. this.track = streamInfo.track;
  337. // This is not good when video type changes after
  338. // unmute, but let's not crash here
  339. if (this.videoType !== streamInfo.videoType) {
  340. logger.warn(
  341. `${this}: video type has changed after unmute!`,
  342. this.videoType, streamInfo.videoType);
  343. this.videoType = streamInfo.videoType;
  344. }
  345. } else {
  346. throw new JitsiTrackError(
  347. JitsiTrackErrors.TRACK_NO_STREAM_FOUND);
  348. }
  349. this.containers = this.containers.map(
  350. cont => RTCUtils.attachMediaStream(cont, this.stream));
  351. return this._addStreamToConferenceAsUnmute();
  352. });
  353. }
  354. return promise
  355. .then(() => this._sendMuteStatus(mute))
  356. .then(() => {
  357. this.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  358. });
  359. }
  360. /**
  361. * Adds stream to conference and marks it as "unmute" operation.
  362. *
  363. * @private
  364. * @returns {Promise}
  365. */
  366. _addStreamToConferenceAsUnmute() {
  367. if (!this.conference) {
  368. return Promise.resolve();
  369. }
  370. // FIXME it would be good to not included conference as part of this
  371. // process. Only TraceablePeerConnections to which the track is attached
  372. // should care about this action. The TPCs to which the track is not
  373. // attached can sync up when track is re-attached.
  374. // A problem with that is that the "modify sources" queue is part of
  375. // the JingleSessionPC and it would be excluded from the process. One
  376. // solution would be to extract class between TPC and JingleSessionPC
  377. // which would contain the queue and would notify the signaling layer
  378. // when local SSRCs are changed. This would help to separate XMPP from
  379. // the RTC module.
  380. return new Promise((resolve, reject) => {
  381. this.conference._addLocalTrackAsUnmute(this)
  382. .then(resolve, error => reject(new Error(error)));
  383. });
  384. }
  385. /**
  386. * Removes stream from conference and marks it as "mute" operation.
  387. * @param {Function} successCallback will be called on success
  388. * @param {Function} errorCallback will be called on error
  389. * @private
  390. */
  391. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  392. if (!this.conference) {
  393. successCallback();
  394. return;
  395. }
  396. this.conference._removeLocalTrackAsMute(this).then(
  397. successCallback,
  398. error => errorCallback(new Error(error)));
  399. }
  400. /**
  401. * Sends mute status for a track to conference if any.
  402. *
  403. * @param {boolean} mute - If track is muted.
  404. * @private
  405. * @returns {Promise}
  406. */
  407. _sendMuteStatus(mute) {
  408. if (!this.conference || !this.conference.room) {
  409. return Promise.resolve();
  410. }
  411. return new Promise(resolve => {
  412. this.conference.room[
  413. this.isAudioTrack()
  414. ? 'setAudioMute'
  415. : 'setVideoMute'](mute, resolve);
  416. });
  417. }
  418. /**
  419. * @inheritdoc
  420. *
  421. * Stops sending the media track. And removes it from the HTML.
  422. * NOTE: Works for local tracks only.
  423. *
  424. * @extends JitsiTrack#dispose
  425. * @returns {Promise}
  426. */
  427. dispose() {
  428. let promise = Promise.resolve();
  429. if (this.conference) {
  430. promise = this.conference.removeTrack(this);
  431. }
  432. if (this.stream) {
  433. this._stopMediaStream();
  434. this.detach();
  435. }
  436. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED,
  437. this._onDeviceListChanged);
  438. if (this._onAudioOutputDeviceChanged) {
  439. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  440. this._onAudioOutputDeviceChanged);
  441. }
  442. return promise.then(() => super.dispose());
  443. }
  444. /**
  445. * Returns <tt>true</tt> - if the stream is muted
  446. * and <tt>false</tt> otherwise.
  447. * @returns {boolean} <tt>true</tt> - if the stream is muted
  448. * and <tt>false</tt> otherwise.
  449. */
  450. isMuted() {
  451. // this.stream will be null when we mute local video on Chrome
  452. if (!this.stream) {
  453. return true;
  454. }
  455. if (this.isVideoTrack() && !this.isActive()) {
  456. return true;
  457. }
  458. return !this.track || !this.track.enabled;
  459. }
  460. /**
  461. * Sets the JitsiConference object associated with the track. This is temp
  462. * solution.
  463. * @param conference the JitsiConference object
  464. */
  465. _setConference(conference) {
  466. this.conference = conference;
  467. // We want to keep up with postponed events which should have been fired
  468. // on "attach" call, but for local track we not always have the
  469. // conference before attaching. However this may result in duplicated
  470. // events if they have been triggered on "attach" already.
  471. for (let i = 0; i < this.containers.length; i++) {
  472. this._maybeFireTrackAttached(this.containers[i]);
  473. }
  474. }
  475. /**
  476. * Returns <tt>true</tt>.
  477. * @returns {boolean} <tt>true</tt>
  478. */
  479. isLocal() {
  480. return true;
  481. }
  482. /**
  483. * Returns device id associated with track.
  484. * @returns {string}
  485. */
  486. getDeviceId() {
  487. return this._realDeviceId || this.deviceId;
  488. }
  489. /**
  490. * Returns the participant id which owns the track.
  491. * @returns {string} the id of the participants. It corresponds to the
  492. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  493. */
  494. getParticipantId() {
  495. return this.conference && this.conference.myUserId();
  496. }
  497. /**
  498. * Sets the value of bytes sent statistic.
  499. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  500. * @param {number} bytesSent the new value
  501. * NOTE: used only for audio tracks to detect audio issues.
  502. */
  503. _setByteSent(tpc, bytesSent) {
  504. this._bytesSent = bytesSent;
  505. const iceConnectionState = tpc.getConnectionState();
  506. if (this._testByteSent && iceConnectionState === 'connected') {
  507. setTimeout(() => {
  508. if (this._bytesSent <= 0) {
  509. // FIXME: Remove ${this}
  510. logger.warn(`${this} 'bytes sent' <= 0: \
  511. ${this._bytesSent}`);
  512. // we are not receiving anything from the microphone
  513. this._fireNoDataFromSourceEvent();
  514. }
  515. }, 3000);
  516. this._testByteSent = false;
  517. }
  518. }
  519. /**
  520. * Returns facing mode for video track from camera. For other cases (e.g.
  521. * audio track or 'desktop' video track) returns undefined.
  522. *
  523. * @returns {CameraFacingMode|undefined}
  524. */
  525. getCameraFacingMode() {
  526. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  527. // MediaStreamTrack#getSettings() is not implemented in many
  528. // browsers, so we need feature checking here. Progress on the
  529. // respective browser's implementation can be tracked at
  530. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  531. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  532. // for Firefox. Even if a browser implements getSettings() already,
  533. // it might still not return anything for 'facingMode'.
  534. let trackSettings;
  535. try {
  536. trackSettings = this.track.getSettings();
  537. } catch (e) {
  538. // XXX React-native-webrtc, for example, defines
  539. // MediaStreamTrack#getSettings() but the implementation throws
  540. // a "Not implemented" Error.
  541. }
  542. if (trackSettings && 'facingMode' in trackSettings) {
  543. return trackSettings.facingMode;
  544. }
  545. if (typeof this._facingMode !== 'undefined') {
  546. return this._facingMode;
  547. }
  548. // In most cases we are showing a webcam. So if we've gotten here,
  549. // it should be relatively safe to assume that we are probably
  550. // showing the user-facing camera.
  551. return CameraFacingMode.USER;
  552. }
  553. return undefined;
  554. }
  555. /**
  556. * Stops the associated MediaStream.
  557. */
  558. _stopMediaStream() {
  559. this.stopStreamInProgress = true;
  560. RTCUtils.stopMediaStream(this.stream);
  561. this.stopStreamInProgress = false;
  562. }
  563. /**
  564. * Switches the camera facing mode if the WebRTC implementation supports the
  565. * custom MediaStreamTrack._switchCamera method. Currently, the method in
  566. * question is implemented in react-native-webrtc only. When such a WebRTC
  567. * implementation is executing, the method is the preferred way to switch
  568. * between the front/user-facing and the back/environment-facing cameras
  569. * because it will likely be (as is the case of react-native-webrtc)
  570. * noticeably faster that creating a new MediaStreamTrack via a new
  571. * getUserMedia call with the switched facingMode constraint value.
  572. * Moreover, the approach with a new getUserMedia call may not even work:
  573. * WebRTC on Android and iOS is either very slow to open the camera a second
  574. * time or plainly freezes attempting to do that.
  575. */
  576. _switchCamera() {
  577. if (this.isVideoTrack()
  578. && this.videoType === VideoType.CAMERA
  579. && typeof this.track._switchCamera === 'function') {
  580. this.track._switchCamera();
  581. this._facingMode
  582. = this._facingMode === CameraFacingMode.ENVIRONMENT
  583. ? CameraFacingMode.USER
  584. : CameraFacingMode.ENVIRONMENT;
  585. }
  586. }
  587. /**
  588. * Detects camera issues on ended and mute events from MediaStreamTrack.
  589. * @returns {boolean} true if an issue is detected and false otherwise
  590. */
  591. _checkForCameraIssues() {
  592. if (!this.isVideoTrack() || this.stopStreamInProgress
  593. || this.videoType === VideoType.DESKTOP) {
  594. return false;
  595. }
  596. return !this._isReceivingData();
  597. }
  598. /**
  599. * Checks whether the attached MediaStream is receiving data from source or
  600. * not. If the stream property is null(because of mute or another reason)
  601. * this method will return false.
  602. * NOTE: This method doesn't indicate problem with the streams directly.
  603. * For example in case of video mute the method will return false or if the
  604. * user has disposed the track.
  605. * @returns {boolean} true if the stream is receiving data and false
  606. * this otherwise.
  607. */
  608. _isReceivingData() {
  609. if (!this.stream) {
  610. return false;
  611. }
  612. // In older version of the spec there is no muted property and
  613. // readyState can have value muted. In the latest versions
  614. // readyState can have values "live" and "ended" and there is
  615. // muted boolean property. If the stream is muted that means that
  616. // we aren't receiving any data from the source. We want to notify
  617. // the users for error if the stream is muted or ended on it's
  618. // creation.
  619. return this.stream.getTracks().some(track =>
  620. (!('readyState' in track) || track.readyState === 'live')
  621. && (!('muted' in track) || track.muted !== true));
  622. }
  623. /**
  624. * Creates a text representation of this local track instance.
  625. * @return {string}
  626. */
  627. toString() {
  628. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  629. }
  630. }