modified lib-jitsi-meet dev repo
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiLocalTrack.js 26KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743
  1. /* global __filename, Promise */
  2. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import JitsiTrack from './JitsiTrack';
  5. import JitsiTrackError from '../../JitsiTrackError';
  6. import { TRACK_NO_STREAM_FOUND } from '../../JitsiTrackErrors';
  7. import {
  8. LOCAL_TRACK_STOPPED,
  9. NO_DATA_FROM_SOURCE,
  10. TRACK_MUTE_CHANGED
  11. } from '../../JitsiTrackEvents';
  12. import * as MediaType from '../../service/RTC/MediaType';
  13. import RTCBrowserType from './RTCBrowserType';
  14. import RTCEvents from '../../service/RTC/RTCEvents';
  15. import RTCUtils from './RTCUtils';
  16. import Statistics from '../statistics/statistics';
  17. import VideoType from '../../service/RTC/VideoType';
  18. const logger = getLogger(__filename);
  19. /**
  20. * Represents a single media track(either audio or video).
  21. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  22. */
  23. export default class JitsiLocalTrack extends JitsiTrack {
  24. /**
  25. * Constructs new JitsiLocalTrack instanse.
  26. * @param {Object} trackInfo
  27. * @param {number} trackInfo.rtcId the ID assigned by the RTC module
  28. * @param trackInfo.stream WebRTC MediaStream, parent of the track
  29. * @param trackInfo.track underlying WebRTC MediaStreamTrack for new
  30. * JitsiRemoteTrack
  31. * @param trackInfo.mediaType the MediaType of the JitsiRemoteTrack
  32. * @param trackInfo.videoType the VideoType of the JitsiRemoteTrack
  33. * @param trackInfo.resolution the video resolution if it's a video track
  34. * @param trackInfo.deviceId the ID of the local device for this track
  35. * @param trackInfo.facingMode the camera facing mode used in getUserMedia
  36. * call
  37. * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing
  38. * source. NOTE: defined for desktop sharing tracks only.
  39. * @constructor
  40. */
  41. constructor({
  42. deviceId,
  43. facingMode,
  44. mediaType,
  45. resolution,
  46. rtcId,
  47. sourceId,
  48. sourceType,
  49. stream,
  50. track,
  51. videoType
  52. }) {
  53. super(
  54. /* conference */ null,
  55. stream,
  56. track,
  57. /* streamInactiveHandler */ () => this.emit(LOCAL_TRACK_STOPPED),
  58. mediaType,
  59. videoType);
  60. /**
  61. * The ID assigned by the RTC module on instance creation.
  62. * @type {number}
  63. */
  64. this.rtcId = rtcId;
  65. this.sourceId = sourceId;
  66. this.sourceType = sourceType;
  67. if (RTCBrowserType.usesNewGumFlow()) {
  68. // Get the resolution from the track itself because it cannot be
  69. // certain which resolution webrtc has fallen back to using.
  70. this.resolution = track.getSettings().height;
  71. // Cache the constraints of the track in case of any this track
  72. // model needs to call getUserMedia again, such as when unmuting.
  73. this._constraints = track.getConstraints();
  74. } else {
  75. // FIXME Currently, Firefox is ignoring our constraints about
  76. // resolutions so we do not store it, to avoid wrong reporting of
  77. // local track resolution.
  78. this.resolution = RTCBrowserType.isFirefox() ? null : resolution;
  79. }
  80. this.deviceId = deviceId;
  81. /**
  82. * The <tt>Promise</tt> which represents the progress of a previously
  83. * queued/scheduled {@link _setMuted} (from the point of view of
  84. * {@link _queueSetMuted}).
  85. *
  86. * @private
  87. * @type {Promise}
  88. */
  89. this._prevSetMuted = Promise.resolve();
  90. /**
  91. * The facing mode of the camera from which this JitsiLocalTrack
  92. * instance was obtained.
  93. *
  94. * @private
  95. * @type {CameraFacingMode|undefined}
  96. */
  97. this._facingMode = facingMode;
  98. // Currently there is no way to know the MediaStreamTrack ended due to
  99. // to device disconnect in Firefox through e.g. "readyState" property.
  100. // Instead we will compare current track's label with device labels from
  101. // enumerateDevices() list.
  102. this._trackEnded = false;
  103. /**
  104. * Indicates whether data has been sent or not.
  105. */
  106. this._hasSentData = false;
  107. /**
  108. * Used only for detection of audio problems. We want to check only once
  109. * whether the track is sending data ot not. This flag is set to false
  110. * after the check.
  111. */
  112. this._testDataSent = true;
  113. // Currently there is no way to determine with what device track was
  114. // created (until getConstraints() support), however we can associate
  115. // tracks with real devices obtained from enumerateDevices() call as
  116. // soon as it's called.
  117. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  118. /**
  119. * On mute event we are waiting for 3s to check if the stream is going
  120. * to be still muted before firing the event for camera issue detected
  121. * (NO_DATA_FROM_SOURCE).
  122. */
  123. this._noDataFromSourceTimeout = null;
  124. this._onDeviceListChanged = devices => {
  125. this._setRealDeviceIdFromDeviceList(devices);
  126. // Mark track as ended for those browsers that do not support
  127. // "readyState" property. We do not touch tracks created with
  128. // default device ID "".
  129. if (typeof this.getTrack().readyState === 'undefined'
  130. && typeof this._realDeviceId !== 'undefined'
  131. && !devices.find(d => d.deviceId === this._realDeviceId)) {
  132. this._trackEnded = true;
  133. }
  134. };
  135. // Subscribe each created local audio track to
  136. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  137. // handling this event for remote tracks (which are handled in RTC.js),
  138. // because there might be local tracks not attached to a conference.
  139. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  140. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  141. RTCUtils.addListener(
  142. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  143. this._onAudioOutputDeviceChanged);
  144. }
  145. RTCUtils.addListener(
  146. RTCEvents.DEVICE_LIST_CHANGED,
  147. this._onDeviceListChanged);
  148. this._initNoDataFromSourceHandlers();
  149. }
  150. /**
  151. * Returns if associated MediaStreamTrack is in the 'ended' state
  152. * @returns {boolean}
  153. */
  154. isEnded() {
  155. return this.getTrack().readyState === 'ended' || this._trackEnded;
  156. }
  157. /**
  158. * Sets handlers to the MediaStreamTrack object that will detect camera
  159. * issues.
  160. */
  161. _initNoDataFromSourceHandlers() {
  162. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  163. const _onNoDataFromSourceError
  164. = this._onNoDataFromSourceError.bind(this);
  165. this._setHandler('track_mute', () => {
  166. if (this._checkForCameraIssues()) {
  167. const now = window.performance.now();
  168. this._noDataFromSourceTimeout
  169. = setTimeout(_onNoDataFromSourceError, 3000);
  170. this._setHandler('track_unmute', () => {
  171. this._clearNoDataFromSourceMuteResources();
  172. Statistics.sendEventToAll(
  173. `${this.getType()}.track_unmute`,
  174. { value: window.performance.now() - now });
  175. });
  176. }
  177. });
  178. this._setHandler('track_ended', _onNoDataFromSourceError);
  179. }
  180. }
  181. /**
  182. * Clears all timeouts and handlers set on MediaStreamTrack mute event.
  183. * FIXME: Change the name of the method with better one.
  184. */
  185. _clearNoDataFromSourceMuteResources() {
  186. if (this._noDataFromSourceTimeout) {
  187. clearTimeout(this._noDataFromSourceTimeout);
  188. this._noDataFromSourceTimeout = null;
  189. }
  190. this._setHandler('track_unmute', undefined);
  191. }
  192. /**
  193. * Called when potential camera issue is detected. Clears the handlers and
  194. * timeouts set on MediaStreamTrack muted event. Verifies that the camera
  195. * issue persists and fires NO_DATA_FROM_SOURCE event.
  196. */
  197. _onNoDataFromSourceError() {
  198. this._clearNoDataFromSourceMuteResources();
  199. if (this._checkForCameraIssues()) {
  200. this._fireNoDataFromSourceEvent();
  201. }
  202. }
  203. /**
  204. * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
  205. */
  206. _fireNoDataFromSourceEvent() {
  207. this.emit(NO_DATA_FROM_SOURCE);
  208. const eventName = `${this.getType()}.no_data_from_source`;
  209. Statistics.analytics.sendEvent(eventName);
  210. const log = { name: eventName };
  211. if (this.isAudioTrack()) {
  212. log.isReceivingData = this._isReceivingData();
  213. }
  214. Statistics.sendLog(JSON.stringify(log));
  215. }
  216. /**
  217. * Sets real device ID by comparing track information with device
  218. * information. This is temporary solution until getConstraints() method
  219. * will be implemented in browsers.
  220. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  221. * enumerateDevices() call
  222. */
  223. _setRealDeviceIdFromDeviceList(devices) {
  224. const track = this.getTrack();
  225. // FIXME for temasys video track, label refers to id not the actual
  226. // device
  227. const device = devices.find(
  228. d => d.kind === `${track.kind}input` && d.label === track.label);
  229. if (device) {
  230. this._realDeviceId = device.deviceId;
  231. }
  232. }
  233. /**
  234. * Sets the stream property of JitsiLocalTrack object and sets all stored
  235. * handlers to it.
  236. *
  237. * @param {MediaStream} stream the new stream.
  238. * @protected
  239. */
  240. _setStream(stream) {
  241. super._setStream(stream);
  242. if (stream) {
  243. // Store the MSID for video mute/unmute purposes.
  244. this.storedMSID = this.getMSID();
  245. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  246. } else {
  247. logger.debug(`Setting 'null' stream on ${this}`);
  248. }
  249. }
  250. /**
  251. * Asynchronously mutes this track.
  252. *
  253. * @returns {Promise}
  254. */
  255. mute() {
  256. return this._queueSetMuted(true);
  257. }
  258. /**
  259. * Asynchronously unmutes this track.
  260. *
  261. * @returns {Promise}
  262. */
  263. unmute() {
  264. return this._queueSetMuted(false);
  265. }
  266. /**
  267. * Initializes a new Promise to execute {@link #_setMuted}. May be called
  268. * multiple times in a row and the invocations of {@link #_setMuted} and,
  269. * consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
  270. * serialized fashion.
  271. *
  272. * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
  273. * @returns {Promise}
  274. */
  275. _queueSetMuted(muted) {
  276. const setMuted = this._setMuted.bind(this, muted);
  277. this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
  278. return this._prevSetMuted;
  279. }
  280. /**
  281. * Mutes / unmutes this track.
  282. *
  283. * @param {boolean} muted - If <tt>true</tt>, this track will be muted;
  284. * otherwise, this track will be unmuted.
  285. * @private
  286. * @returns {Promise}
  287. */
  288. _setMuted(muted) {
  289. if (this.isMuted() === muted) {
  290. return Promise.resolve();
  291. }
  292. let promise = Promise.resolve();
  293. // A function that will print info about muted status transition
  294. const logMuteInfo = () => logger.info(`Mute ${this}: ${muted}`);
  295. if (this.isAudioTrack()
  296. || this.videoType === VideoType.DESKTOP
  297. || !RTCBrowserType.doesVideoMuteByStreamRemove()) {
  298. logMuteInfo();
  299. if (this.track) {
  300. this.track.enabled = !muted;
  301. }
  302. } else if (muted) {
  303. promise = new Promise((resolve, reject) => {
  304. logMuteInfo();
  305. this._removeStreamFromConferenceAsMute(
  306. () => {
  307. // FIXME: Maybe here we should set the SRC for the
  308. // containers to something
  309. // We don't want any events to be fired on this stream
  310. this._unregisterHandlers();
  311. this._stopStream();
  312. this._setStream(null);
  313. resolve();
  314. },
  315. reject);
  316. });
  317. } else {
  318. logMuteInfo();
  319. // This path is only for camera.
  320. const streamOptions = {
  321. cameraDeviceId: this.getDeviceId(),
  322. devices: [ MediaType.VIDEO ],
  323. facingMode: this.getCameraFacingMode()
  324. };
  325. if (RTCBrowserType.usesNewGumFlow()) {
  326. promise
  327. = RTCUtils.newObtainAudioAndVideoPermissions(Object.assign(
  328. {},
  329. streamOptions,
  330. { constraints: { video: this._constraints } }));
  331. } else {
  332. if (this.resolution) {
  333. streamOptions.resolution = this.resolution;
  334. }
  335. promise
  336. = RTCUtils.obtainAudioAndVideoPermissions(streamOptions);
  337. }
  338. promise.then(streamsInfo => {
  339. const mediaType = this.getType();
  340. const streamInfo
  341. = RTCBrowserType.usesNewGumFlow()
  342. ? streamsInfo.find(
  343. info => info.track.kind === mediaType)
  344. : streamsInfo.find(
  345. info => info.mediaType === mediaType);
  346. if (streamInfo) {
  347. this._setStream(streamInfo.stream);
  348. this.track = streamInfo.track;
  349. // This is not good when video type changes after
  350. // unmute, but let's not crash here
  351. if (this.videoType !== streamInfo.videoType) {
  352. logger.warn(
  353. `${this}: video type has changed after unmute!`,
  354. this.videoType, streamInfo.videoType);
  355. this.videoType = streamInfo.videoType;
  356. }
  357. } else {
  358. throw new JitsiTrackError(TRACK_NO_STREAM_FOUND);
  359. }
  360. this.containers = this.containers.map(
  361. cont => RTCUtils.attachMediaStream(cont, this.stream));
  362. return this._addStreamToConferenceAsUnmute();
  363. });
  364. }
  365. return promise
  366. .then(() => this._sendMuteStatus(muted))
  367. .then(() => this.emit(TRACK_MUTE_CHANGED, this));
  368. }
  369. /**
  370. * Adds stream to conference and marks it as "unmute" operation.
  371. *
  372. * @private
  373. * @returns {Promise}
  374. */
  375. _addStreamToConferenceAsUnmute() {
  376. if (!this.conference) {
  377. return Promise.resolve();
  378. }
  379. // FIXME it would be good to not included conference as part of this
  380. // process. Only TraceablePeerConnections to which the track is attached
  381. // should care about this action. The TPCs to which the track is not
  382. // attached can sync up when track is re-attached.
  383. // A problem with that is that the "modify sources" queue is part of
  384. // the JingleSessionPC and it would be excluded from the process. One
  385. // solution would be to extract class between TPC and JingleSessionPC
  386. // which would contain the queue and would notify the signaling layer
  387. // when local SSRCs are changed. This would help to separate XMPP from
  388. // the RTC module.
  389. return new Promise((resolve, reject) => {
  390. this.conference._addLocalTrackAsUnmute(this)
  391. .then(resolve, error => reject(new Error(error)));
  392. });
  393. }
  394. /**
  395. * Removes stream from conference and marks it as "mute" operation.
  396. * @param {Function} successCallback will be called on success
  397. * @param {Function} errorCallback will be called on error
  398. * @private
  399. */
  400. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  401. if (!this.conference) {
  402. successCallback();
  403. return;
  404. }
  405. this.conference._removeLocalTrackAsMute(this).then(
  406. successCallback,
  407. error => errorCallback(new Error(error)));
  408. }
  409. /**
  410. * Sends mute status for a track to conference if any.
  411. *
  412. * @param {boolean} mute - If track is muted.
  413. * @private
  414. * @returns {Promise}
  415. */
  416. _sendMuteStatus(mute) {
  417. if (!this.conference || !this.conference.room) {
  418. return Promise.resolve();
  419. }
  420. return new Promise(resolve => {
  421. this.conference.room[
  422. this.isAudioTrack()
  423. ? 'setAudioMute'
  424. : 'setVideoMute'](mute, resolve);
  425. });
  426. }
  427. /**
  428. * @inheritdoc
  429. *
  430. * Stops sending the media track. And removes it from the HTML.
  431. * NOTE: Works for local tracks only.
  432. *
  433. * @extends JitsiTrack#dispose
  434. * @returns {Promise}
  435. */
  436. dispose() {
  437. let promise = Promise.resolve();
  438. if (this.conference) {
  439. promise = this.conference.removeTrack(this);
  440. }
  441. if (this.stream) {
  442. this._stopStream();
  443. this.detach();
  444. }
  445. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED,
  446. this._onDeviceListChanged);
  447. if (this._onAudioOutputDeviceChanged) {
  448. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  449. this._onAudioOutputDeviceChanged);
  450. }
  451. return promise.then(() => super.dispose());
  452. }
  453. /**
  454. * Returns <tt>true</tt> - if the stream is muted
  455. * and <tt>false</tt> otherwise.
  456. * @returns {boolean} <tt>true</tt> - if the stream is muted
  457. * and <tt>false</tt> otherwise.
  458. */
  459. isMuted() {
  460. // this.stream will be null when we mute local video on Chrome
  461. if (!this.stream) {
  462. return true;
  463. }
  464. if (this.isVideoTrack() && !this.isActive()) {
  465. return true;
  466. }
  467. return !this.track || !this.track.enabled;
  468. }
  469. /**
  470. * Sets the JitsiConference object associated with the track. This is temp
  471. * solution.
  472. * @param conference the JitsiConference object
  473. */
  474. _setConference(conference) {
  475. this.conference = conference;
  476. // We want to keep up with postponed events which should have been fired
  477. // on "attach" call, but for local track we not always have the
  478. // conference before attaching. However this may result in duplicated
  479. // events if they have been triggered on "attach" already.
  480. for (let i = 0; i < this.containers.length; i++) {
  481. this._maybeFireTrackAttached(this.containers[i]);
  482. }
  483. }
  484. /**
  485. * Returns <tt>true</tt>.
  486. * @returns {boolean} <tt>true</tt>
  487. */
  488. isLocal() {
  489. return true;
  490. }
  491. /**
  492. * Returns device id associated with track.
  493. * @returns {string}
  494. */
  495. getDeviceId() {
  496. return this._realDeviceId || this.deviceId;
  497. }
  498. /**
  499. * Returns the participant id which owns the track.
  500. * @returns {string} the id of the participants. It corresponds to the
  501. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  502. */
  503. getParticipantId() {
  504. return this.conference && this.conference.myUserId();
  505. }
  506. /**
  507. * Handles bytes sent statistics.
  508. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  509. * @param {number} bytesSent the new value
  510. * NOTE: used only for audio tracks to detect audio issues.
  511. */
  512. _onByteSentStatsReceived(tpc, bytesSent) {
  513. if (bytesSent > 0) {
  514. this._hasSentData = true;
  515. }
  516. const iceConnectionState = tpc.getConnectionState();
  517. if (this._testDataSent && iceConnectionState === 'connected') {
  518. setTimeout(() => {
  519. if (!this._hasSentData) {
  520. logger.warn(`${this} 'bytes sent' <= 0: \
  521. ${this._bytesSent}`);
  522. // we are not receiving anything from the microphone
  523. this._fireNoDataFromSourceEvent();
  524. }
  525. }, 3000);
  526. this._testDataSent = false;
  527. }
  528. }
  529. /**
  530. * Returns facing mode for video track from camera. For other cases (e.g.
  531. * audio track or 'desktop' video track) returns undefined.
  532. *
  533. * @returns {CameraFacingMode|undefined}
  534. */
  535. getCameraFacingMode() {
  536. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  537. // MediaStreamTrack#getSettings() is not implemented in many
  538. // browsers, so we need feature checking here. Progress on the
  539. // respective browser's implementation can be tracked at
  540. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  541. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  542. // for Firefox. Even if a browser implements getSettings() already,
  543. // it might still not return anything for 'facingMode'.
  544. let trackSettings;
  545. try {
  546. trackSettings = this.track.getSettings();
  547. } catch (e) {
  548. // XXX React-native-webrtc, for example, defines
  549. // MediaStreamTrack#getSettings() but the implementation throws
  550. // a "Not implemented" Error.
  551. }
  552. if (trackSettings && 'facingMode' in trackSettings) {
  553. return trackSettings.facingMode;
  554. }
  555. if (typeof this._facingMode !== 'undefined') {
  556. return this._facingMode;
  557. }
  558. // In most cases we are showing a webcam. So if we've gotten here,
  559. // it should be relatively safe to assume that we are probably
  560. // showing the user-facing camera.
  561. return CameraFacingMode.USER;
  562. }
  563. return undefined;
  564. }
  565. /**
  566. * Stops the associated MediaStream.
  567. */
  568. _stopStream() {
  569. /**
  570. * Indicates that we are executing {@link #_stopStream} i.e.
  571. * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
  572. * associated with this <tt>JitsiTrack</tt> instance.
  573. *
  574. * @private
  575. * @type {boolean}
  576. */
  577. this._stopStreamInProgress = true;
  578. try {
  579. RTCUtils.stopMediaStream(this.stream);
  580. } finally {
  581. this._stopStreamInProgress = false;
  582. }
  583. }
  584. /**
  585. * Switches the camera facing mode if the WebRTC implementation supports the
  586. * custom MediaStreamTrack._switchCamera method. Currently, the method in
  587. * question is implemented in react-native-webrtc only. When such a WebRTC
  588. * implementation is executing, the method is the preferred way to switch
  589. * between the front/user-facing and the back/environment-facing cameras
  590. * because it will likely be (as is the case of react-native-webrtc)
  591. * noticeably faster that creating a new MediaStreamTrack via a new
  592. * getUserMedia call with the switched facingMode constraint value.
  593. * Moreover, the approach with a new getUserMedia call may not even work:
  594. * WebRTC on Android and iOS is either very slow to open the camera a second
  595. * time or plainly freezes attempting to do that.
  596. */
  597. _switchCamera() {
  598. if (this.isVideoTrack()
  599. && this.videoType === VideoType.CAMERA
  600. && typeof this.track._switchCamera === 'function') {
  601. this.track._switchCamera();
  602. this._facingMode
  603. = this._facingMode === CameraFacingMode.ENVIRONMENT
  604. ? CameraFacingMode.USER
  605. : CameraFacingMode.ENVIRONMENT;
  606. }
  607. }
  608. /**
  609. * Detects camera issues on ended and mute events from MediaStreamTrack.
  610. * @returns {boolean} true if an issue is detected and false otherwise
  611. */
  612. _checkForCameraIssues() {
  613. if (!this.isVideoTrack()
  614. || this._stopStreamInProgress
  615. || this.videoType === VideoType.DESKTOP) {
  616. return false;
  617. }
  618. return !this._isReceivingData();
  619. }
  620. /**
  621. * Checks whether the attached MediaStream is receiving data from source or
  622. * not. If the stream property is null(because of mute or another reason)
  623. * this method will return false.
  624. * NOTE: This method doesn't indicate problem with the streams directly.
  625. * For example in case of video mute the method will return false or if the
  626. * user has disposed the track.
  627. * @returns {boolean} true if the stream is receiving data and false
  628. * this otherwise.
  629. */
  630. _isReceivingData() {
  631. if (!this.stream) {
  632. return false;
  633. }
  634. // In older version of the spec there is no muted property and
  635. // readyState can have value muted. In the latest versions
  636. // readyState can have values "live" and "ended" and there is
  637. // muted boolean property. If the stream is muted that means that
  638. // we aren't receiving any data from the source. We want to notify
  639. // the users for error if the stream is muted or ended on it's
  640. // creation.
  641. return this.stream.getTracks().some(track =>
  642. (!('readyState' in track) || track.readyState === 'live')
  643. && (!('muted' in track) || track.muted !== true));
  644. }
  645. /**
  646. * Creates a text representation of this local track instance.
  647. * @return {string}
  648. */
  649. toString() {
  650. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  651. }
  652. }