You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiLocalTrack.js 26KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771
  1. /* global __filename, Promise */
  2. import { getLogger } from 'jitsi-meet-logger';
  3. import JitsiTrack from './JitsiTrack';
  4. import JitsiTrackError from '../../JitsiTrackError';
  5. import {
  6. TRACK_IS_DISPOSED,
  7. TRACK_NO_STREAM_FOUND
  8. } from '../../JitsiTrackErrors';
  9. import {
  10. LOCAL_TRACK_STOPPED,
  11. NO_DATA_FROM_SOURCE,
  12. TRACK_MUTE_CHANGED
  13. } from '../../JitsiTrackEvents';
  14. import browser from '../browser';
  15. import RTCUtils from './RTCUtils';
  16. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  17. import * as MediaType from '../../service/RTC/MediaType';
  18. import RTCEvents from '../../service/RTC/RTCEvents';
  19. import VideoType from '../../service/RTC/VideoType';
  20. import {
  21. TRACK_UNMUTED,
  22. createNoDataFromSourceEvent
  23. } from '../../service/statistics/AnalyticsEvents';
  24. import Statistics from '../statistics/statistics';
  25. const logger = getLogger(__filename);
  26. /**
  27. * Represents a single media track(either audio or video).
  28. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  29. */
  30. export default class JitsiLocalTrack extends JitsiTrack {
  31. /**
  32. * Constructs new JitsiLocalTrack instance.
  33. *
  34. * @constructor
  35. * @param {Object} trackInfo
  36. * @param {number} trackInfo.rtcId the ID assigned by the RTC module
  37. * @param trackInfo.stream WebRTC MediaStream, parent of the track
  38. * @param trackInfo.track underlying WebRTC MediaStreamTrack for new
  39. * JitsiRemoteTrack
  40. * @param trackInfo.mediaType the MediaType of the JitsiRemoteTrack
  41. * @param trackInfo.videoType the VideoType of the JitsiRemoteTrack
  42. * @param trackInfo.resolution the video resolution if it's a video track
  43. * @param trackInfo.deviceId the ID of the local device for this track
  44. * @param trackInfo.facingMode the camera facing mode used in getUserMedia
  45. * call
  46. * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing
  47. * source. NOTE: defined for desktop sharing tracks only.
  48. */
  49. constructor({
  50. deviceId,
  51. facingMode,
  52. mediaType,
  53. resolution,
  54. rtcId,
  55. sourceId,
  56. sourceType,
  57. stream,
  58. track,
  59. videoType
  60. }) {
  61. super(
  62. /* conference */ null,
  63. stream,
  64. track,
  65. /* streamInactiveHandler */ () => this.emit(LOCAL_TRACK_STOPPED),
  66. mediaType,
  67. videoType);
  68. /**
  69. * The ID assigned by the RTC module on instance creation.
  70. *
  71. * @type {number}
  72. */
  73. this.rtcId = rtcId;
  74. this.sourceId = sourceId;
  75. this.sourceType = sourceType;
  76. if (browser.usesNewGumFlow()) {
  77. // Get the resolution from the track itself because it cannot be
  78. // certain which resolution webrtc has fallen back to using.
  79. this.resolution = track.getSettings().height;
  80. // Cache the constraints of the track in case of any this track
  81. // model needs to call getUserMedia again, such as when unmuting.
  82. this._constraints = track.getConstraints();
  83. } else {
  84. // FIXME Currently, Firefox is ignoring our constraints about
  85. // resolutions so we do not store it, to avoid wrong reporting of
  86. // local track resolution.
  87. this.resolution = browser.isFirefox() ? null : resolution;
  88. }
  89. this.deviceId = deviceId;
  90. /**
  91. * The <tt>Promise</tt> which represents the progress of a previously
  92. * queued/scheduled {@link _setMuted} (from the point of view of
  93. * {@link _queueSetMuted}).
  94. *
  95. * @private
  96. * @type {Promise}
  97. */
  98. this._prevSetMuted = Promise.resolve();
  99. /**
  100. * The facing mode of the camera from which this JitsiLocalTrack
  101. * instance was obtained.
  102. *
  103. * @private
  104. * @type {CameraFacingMode|undefined}
  105. */
  106. this._facingMode = facingMode;
  107. // Currently there is no way to know the MediaStreamTrack ended due to
  108. // to device disconnect in Firefox through e.g. "readyState" property.
  109. // Instead we will compare current track's label with device labels from
  110. // enumerateDevices() list.
  111. this._trackEnded = false;
  112. /**
  113. * Indicates whether data has been sent or not.
  114. */
  115. this._hasSentData = false;
  116. /**
  117. * Used only for detection of audio problems. We want to check only once
  118. * whether the track is sending data ot not. This flag is set to false
  119. * after the check.
  120. */
  121. this._testDataSent = true;
  122. // Currently there is no way to determine with what device track was
  123. // created (until getConstraints() support), however we can associate
  124. // tracks with real devices obtained from enumerateDevices() call as
  125. // soon as it's called.
  126. // NOTE: this.deviceId corresponds to the device id specified in GUM constraints and this._realDeviceId seems to
  127. // correspond to the id of a matching device from the available device list.
  128. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  129. /**
  130. * On mute event we are waiting for 3s to check if the stream is going
  131. * to be still muted before firing the event for camera issue detected
  132. * (NO_DATA_FROM_SOURCE).
  133. */
  134. this._noDataFromSourceTimeout = null;
  135. this._onDeviceListChanged = devices => {
  136. this._setRealDeviceIdFromDeviceList(devices);
  137. // Mark track as ended for those browsers that do not support
  138. // "readyState" property. We do not touch tracks created with
  139. // default device ID "".
  140. if (typeof this.getTrack().readyState === 'undefined'
  141. && typeof this._realDeviceId !== 'undefined'
  142. && !devices.find(d => d.deviceId === this._realDeviceId)) {
  143. this._trackEnded = true;
  144. }
  145. };
  146. // Subscribe each created local audio track to
  147. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  148. // handling this event for remote tracks (which are handled in RTC.js),
  149. // because there might be local tracks not attached to a conference.
  150. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  151. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  152. RTCUtils.addListener(
  153. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  154. this._onAudioOutputDeviceChanged);
  155. }
  156. RTCUtils.addListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListChanged);
  157. this._initNoDataFromSourceHandlers();
  158. }
  159. /**
  160. * Returns if associated MediaStreamTrack is in the 'ended' state
  161. *
  162. * @returns {boolean}
  163. */
  164. isEnded() {
  165. return this.getTrack().readyState === 'ended' || this._trackEnded;
  166. }
  167. /**
  168. * Sets handlers to the MediaStreamTrack object that will detect camera
  169. * issues.
  170. */
  171. _initNoDataFromSourceHandlers() {
  172. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  173. const _onNoDataFromSourceError
  174. = this._onNoDataFromSourceError.bind(this);
  175. this._setHandler('track_mute', () => {
  176. if (this._checkForCameraIssues()) {
  177. const now = window.performance.now();
  178. this._noDataFromSourceTimeout
  179. = setTimeout(_onNoDataFromSourceError, 3000);
  180. this._setHandler('track_unmute', () => {
  181. this._clearNoDataFromSourceMuteResources();
  182. Statistics.sendAnalyticsAndLog(
  183. TRACK_UNMUTED,
  184. {
  185. 'media_type': this.getType(),
  186. 'track_type': 'local',
  187. value: window.performance.now() - now
  188. });
  189. });
  190. }
  191. });
  192. this._setHandler('track_ended', _onNoDataFromSourceError);
  193. }
  194. }
  195. /**
  196. * Clears all timeouts and handlers set on MediaStreamTrack mute event.
  197. * FIXME: Change the name of the method with better one.
  198. */
  199. _clearNoDataFromSourceMuteResources() {
  200. if (this._noDataFromSourceTimeout) {
  201. clearTimeout(this._noDataFromSourceTimeout);
  202. this._noDataFromSourceTimeout = null;
  203. }
  204. this._setHandler('track_unmute', undefined);
  205. }
  206. /**
  207. * Called when potential camera issue is detected. Clears the handlers and
  208. * timeouts set on MediaStreamTrack muted event. Verifies that the camera
  209. * issue persists and fires NO_DATA_FROM_SOURCE event.
  210. */
  211. _onNoDataFromSourceError() {
  212. this._clearNoDataFromSourceMuteResources();
  213. if (this._checkForCameraIssues()) {
  214. this._fireNoDataFromSourceEvent();
  215. }
  216. }
  217. /**
  218. * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
  219. */
  220. _fireNoDataFromSourceEvent() {
  221. this.emit(NO_DATA_FROM_SOURCE);
  222. Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType()));
  223. const log = { name: NO_DATA_FROM_SOURCE };
  224. if (this.isAudioTrack()) {
  225. log.isReceivingData = this._isReceivingData();
  226. }
  227. Statistics.sendLog(JSON.stringify(log));
  228. }
  229. /**
  230. * Sets real device ID by comparing track information with device
  231. * information. This is temporary solution until getConstraints() method
  232. * will be implemented in browsers.
  233. *
  234. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  235. * enumerateDevices() call
  236. */
  237. _setRealDeviceIdFromDeviceList(devices) {
  238. const track = this.getTrack();
  239. const kind = `${track.kind}input`;
  240. let device = devices.find(d => d.kind === kind && d.label === track.label);
  241. if (!device && this._realDeviceId === 'default') { // the default device has been changed.
  242. const label = (track.label || '').replace('Default - ', '');
  243. device = devices.find(d => d.kind === kind && d.label === label);
  244. }
  245. if (device) {
  246. this._realDeviceId = device.deviceId;
  247. }
  248. }
  249. /**
  250. * Sets the stream property of JitsiLocalTrack object and sets all stored
  251. * handlers to it.
  252. *
  253. * @param {MediaStream} stream the new stream.
  254. * @protected
  255. */
  256. _setStream(stream) {
  257. super._setStream(stream);
  258. if (stream) {
  259. // Store the MSID for video mute/unmute purposes.
  260. this.storedMSID = this.getMSID();
  261. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  262. } else {
  263. logger.debug(`Setting 'null' stream on ${this}`);
  264. }
  265. }
  266. /**
  267. * Asynchronously mutes this track.
  268. *
  269. * @returns {Promise}
  270. */
  271. mute() {
  272. return this._queueSetMuted(true);
  273. }
  274. /**
  275. * Asynchronously unmutes this track.
  276. *
  277. * @returns {Promise}
  278. */
  279. unmute() {
  280. return this._queueSetMuted(false);
  281. }
  282. /**
  283. * Initializes a new Promise to execute {@link #_setMuted}. May be called
  284. * multiple times in a row and the invocations of {@link #_setMuted} and,
  285. * consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
  286. * serialized fashion.
  287. *
  288. * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
  289. * @returns {Promise}
  290. */
  291. _queueSetMuted(muted) {
  292. const setMuted = this._setMuted.bind(this, muted);
  293. this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
  294. return this._prevSetMuted;
  295. }
  296. /**
  297. * Mutes / unmutes this track.
  298. *
  299. * @param {boolean} muted - If <tt>true</tt>, this track will be muted;
  300. * otherwise, this track will be unmuted.
  301. * @private
  302. * @returns {Promise}
  303. */
  304. _setMuted(muted) {
  305. if (this.isMuted() === muted) {
  306. return Promise.resolve();
  307. }
  308. if (this.disposed) {
  309. return Promise.reject(new JitsiTrackError(TRACK_IS_DISPOSED));
  310. }
  311. let promise = Promise.resolve();
  312. // A function that will print info about muted status transition
  313. const logMuteInfo = () => logger.info(`Mute ${this}: ${muted}`);
  314. if (this.isAudioTrack()
  315. || this.videoType === VideoType.DESKTOP
  316. || !browser.doesVideoMuteByStreamRemove()) {
  317. logMuteInfo();
  318. if (this.track) {
  319. this.track.enabled = !muted;
  320. }
  321. } else if (muted) {
  322. promise = new Promise((resolve, reject) => {
  323. logMuteInfo();
  324. this._removeStreamFromConferenceAsMute(
  325. () => {
  326. // FIXME: Maybe here we should set the SRC for the
  327. // containers to something
  328. // We don't want any events to be fired on this stream
  329. this._unregisterHandlers();
  330. this.stopStream();
  331. this._setStream(null);
  332. resolve();
  333. },
  334. reject);
  335. });
  336. } else {
  337. logMuteInfo();
  338. // This path is only for camera.
  339. const streamOptions = {
  340. cameraDeviceId: this.getDeviceId(),
  341. devices: [ MediaType.VIDEO ],
  342. facingMode: this.getCameraFacingMode()
  343. };
  344. if (browser.usesNewGumFlow()) {
  345. promise
  346. = RTCUtils.newObtainAudioAndVideoPermissions(Object.assign(
  347. {},
  348. streamOptions,
  349. { constraints: { video: this._constraints } }));
  350. } else {
  351. if (this.resolution) {
  352. streamOptions.resolution = this.resolution;
  353. }
  354. promise
  355. = RTCUtils.obtainAudioAndVideoPermissions(streamOptions);
  356. }
  357. promise.then(streamsInfo => {
  358. const mediaType = this.getType();
  359. const streamInfo
  360. = browser.usesNewGumFlow()
  361. ? streamsInfo.find(
  362. info => info.track.kind === mediaType)
  363. : streamsInfo.find(
  364. info => info.mediaType === mediaType);
  365. if (streamInfo) {
  366. this._setStream(streamInfo.stream);
  367. this.track = streamInfo.track;
  368. // This is not good when video type changes after
  369. // unmute, but let's not crash here
  370. if (this.videoType !== streamInfo.videoType) {
  371. logger.warn(
  372. `${this}: video type has changed after unmute!`,
  373. this.videoType, streamInfo.videoType);
  374. this.videoType = streamInfo.videoType;
  375. }
  376. } else {
  377. throw new JitsiTrackError(TRACK_NO_STREAM_FOUND);
  378. }
  379. this.containers.map(
  380. cont => RTCUtils.attachMediaStream(cont, this.stream));
  381. return this._addStreamToConferenceAsUnmute();
  382. });
  383. }
  384. return promise
  385. .then(() => this._sendMuteStatus(muted))
  386. .then(() => this.emit(TRACK_MUTE_CHANGED, this));
  387. }
  388. /**
  389. * Adds stream to conference and marks it as "unmute" operation.
  390. *
  391. * @private
  392. * @returns {Promise}
  393. */
  394. _addStreamToConferenceAsUnmute() {
  395. if (!this.conference) {
  396. return Promise.resolve();
  397. }
  398. // FIXME it would be good to not included conference as part of this
  399. // process. Only TraceablePeerConnections to which the track is attached
  400. // should care about this action. The TPCs to which the track is not
  401. // attached can sync up when track is re-attached.
  402. // A problem with that is that the "modify sources" queue is part of
  403. // the JingleSessionPC and it would be excluded from the process. One
  404. // solution would be to extract class between TPC and JingleSessionPC
  405. // which would contain the queue and would notify the signaling layer
  406. // when local SSRCs are changed. This would help to separate XMPP from
  407. // the RTC module.
  408. return new Promise((resolve, reject) => {
  409. this.conference._addLocalTrackAsUnmute(this)
  410. .then(resolve, error => reject(new Error(error)));
  411. });
  412. }
  413. /**
  414. * Removes stream from conference and marks it as "mute" operation.
  415. *
  416. * @param {Function} successCallback will be called on success
  417. * @param {Function} errorCallback will be called on error
  418. * @private
  419. */
  420. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  421. if (!this.conference) {
  422. successCallback();
  423. return;
  424. }
  425. this.conference._removeLocalTrackAsMute(this).then(
  426. successCallback,
  427. error => errorCallback(new Error(error)));
  428. }
  429. /**
  430. * Sends mute status for a track to conference if any.
  431. *
  432. * @param {boolean} mute - If track is muted.
  433. * @private
  434. * @returns {Promise}
  435. */
  436. _sendMuteStatus(mute) {
  437. if (!this.conference || !this.conference.room) {
  438. return Promise.resolve();
  439. }
  440. return new Promise(resolve => {
  441. this.conference.room[
  442. this.isAudioTrack()
  443. ? 'setAudioMute'
  444. : 'setVideoMute'](mute, resolve);
  445. });
  446. }
  447. /**
  448. * @inheritdoc
  449. *
  450. * Stops sending the media track. And removes it from the HTML.
  451. * NOTE: Works for local tracks only.
  452. *
  453. * @extends JitsiTrack#dispose
  454. * @returns {Promise}
  455. */
  456. dispose() {
  457. let promise = Promise.resolve();
  458. if (this.conference) {
  459. promise = this.conference.removeTrack(this);
  460. }
  461. if (this.stream) {
  462. this.stopStream();
  463. this.detach();
  464. }
  465. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListChanged);
  466. if (this._onAudioOutputDeviceChanged) {
  467. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  468. this._onAudioOutputDeviceChanged);
  469. }
  470. return promise.then(() => super.dispose());
  471. }
  472. /**
  473. * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt>
  474. * otherwise.
  475. *
  476. * @returns {boolean} <tt>true</tt> - if the stream is muted and
  477. * <tt>false</tt> otherwise.
  478. */
  479. isMuted() {
  480. // this.stream will be null when we mute local video on Chrome
  481. if (!this.stream) {
  482. return true;
  483. }
  484. if (this.isVideoTrack() && !this.isActive()) {
  485. return true;
  486. }
  487. return !this.track || !this.track.enabled;
  488. }
  489. /**
  490. * Sets the JitsiConference object associated with the track. This is temp
  491. * solution.
  492. *
  493. * @param conference the JitsiConference object
  494. */
  495. _setConference(conference) {
  496. this.conference = conference;
  497. // We want to keep up with postponed events which should have been fired
  498. // on "attach" call, but for local track we not always have the
  499. // conference before attaching. However this may result in duplicated
  500. // events if they have been triggered on "attach" already.
  501. for (let i = 0; i < this.containers.length; i++) {
  502. this._maybeFireTrackAttached(this.containers[i]);
  503. }
  504. }
  505. /**
  506. * Returns <tt>true</tt>.
  507. *
  508. * @returns {boolean} <tt>true</tt>
  509. */
  510. isLocal() {
  511. return true;
  512. }
  513. /**
  514. * Returns device id associated with track.
  515. *
  516. * @returns {string}
  517. */
  518. getDeviceId() {
  519. return this._realDeviceId || this.deviceId;
  520. }
  521. /**
  522. * Returns the participant id which owns the track.
  523. *
  524. * @returns {string} the id of the participants. It corresponds to the
  525. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  526. */
  527. getParticipantId() {
  528. return this.conference && this.conference.myUserId();
  529. }
  530. /**
  531. * Handles bytes sent statistics.
  532. *
  533. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  534. * @param {number} bytesSent the new value
  535. * NOTE: used only for audio tracks to detect audio issues.
  536. */
  537. _onByteSentStatsReceived(tpc, bytesSent) {
  538. if (bytesSent > 0) {
  539. this._hasSentData = true;
  540. }
  541. const iceConnectionState = tpc.getConnectionState();
  542. if (this._testDataSent && iceConnectionState === 'connected') {
  543. setTimeout(() => {
  544. if (!this._hasSentData) {
  545. logger.warn(`${this} 'bytes sent' <= 0: \
  546. ${this._bytesSent}`);
  547. // we are not receiving anything from the microphone
  548. this._fireNoDataFromSourceEvent();
  549. }
  550. }, 3000);
  551. this._testDataSent = false;
  552. }
  553. }
  554. /**
  555. * Returns facing mode for video track from camera. For other cases (e.g.
  556. * audio track or 'desktop' video track) returns undefined.
  557. *
  558. * @returns {CameraFacingMode|undefined}
  559. */
  560. getCameraFacingMode() {
  561. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  562. // MediaStreamTrack#getSettings() is not implemented in many
  563. // browsers, so we need feature checking here. Progress on the
  564. // respective browser's implementation can be tracked at
  565. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  566. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  567. // for Firefox. Even if a browser implements getSettings() already,
  568. // it might still not return anything for 'facingMode'.
  569. let trackSettings;
  570. try {
  571. trackSettings = this.track.getSettings();
  572. } catch (e) {
  573. // XXX React-native-webrtc, for example, defines
  574. // MediaStreamTrack#getSettings() but the implementation throws
  575. // a "Not implemented" Error.
  576. }
  577. if (trackSettings && 'facingMode' in trackSettings) {
  578. return trackSettings.facingMode;
  579. }
  580. if (typeof this._facingMode !== 'undefined') {
  581. return this._facingMode;
  582. }
  583. // In most cases we are showing a webcam. So if we've gotten here,
  584. // it should be relatively safe to assume that we are probably
  585. // showing the user-facing camera.
  586. return CameraFacingMode.USER;
  587. }
  588. return undefined;
  589. }
  590. /**
  591. * Stops the associated MediaStream.
  592. */
  593. stopStream() {
  594. /**
  595. * Indicates that we are executing {@link #stopStream} i.e.
  596. * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
  597. * associated with this <tt>JitsiTrack</tt> instance.
  598. *
  599. * @private
  600. * @type {boolean}
  601. */
  602. this._stopStreamInProgress = true;
  603. try {
  604. RTCUtils.stopMediaStream(this.stream);
  605. } finally {
  606. this._stopStreamInProgress = false;
  607. }
  608. }
  609. /**
  610. * Switches the camera facing mode if the WebRTC implementation supports the
  611. * custom MediaStreamTrack._switchCamera method. Currently, the method in
  612. * question is implemented in react-native-webrtc only. When such a WebRTC
  613. * implementation is executing, the method is the preferred way to switch
  614. * between the front/user-facing and the back/environment-facing cameras
  615. * because it will likely be (as is the case of react-native-webrtc)
  616. * noticeably faster that creating a new MediaStreamTrack via a new
  617. * getUserMedia call with the switched facingMode constraint value.
  618. * Moreover, the approach with a new getUserMedia call may not even work:
  619. * WebRTC on Android and iOS is either very slow to open the camera a second
  620. * time or plainly freezes attempting to do that.
  621. */
  622. _switchCamera() {
  623. if (this.isVideoTrack()
  624. && this.videoType === VideoType.CAMERA
  625. && typeof this.track._switchCamera === 'function') {
  626. this.track._switchCamera();
  627. this._facingMode
  628. = this._facingMode === CameraFacingMode.ENVIRONMENT
  629. ? CameraFacingMode.USER
  630. : CameraFacingMode.ENVIRONMENT;
  631. }
  632. }
  633. /**
  634. * Detects camera issues, i.e. returns true if we expect this track to be
  635. * receiving data from its source, but it isn't receiving data.
  636. *
  637. * @returns {boolean} true if an issue is detected and false otherwise
  638. */
  639. _checkForCameraIssues() {
  640. if (!this.isVideoTrack()
  641. || this._stopStreamInProgress
  642. || this.videoType === VideoType.DESKTOP) {
  643. return false;
  644. }
  645. return !this._isReceivingData();
  646. }
  647. /**
  648. * Checks whether the attached MediaStream is receiving data from source or
  649. * not. If the stream property is null(because of mute or another reason)
  650. * this method will return false.
  651. * NOTE: This method doesn't indicate problem with the streams directly.
  652. * For example in case of video mute the method will return false or if the
  653. * user has disposed the track.
  654. *
  655. * @returns {boolean} true if the stream is receiving data and false
  656. * this otherwise.
  657. */
  658. _isReceivingData() {
  659. if (!this.stream) {
  660. return false;
  661. }
  662. // In older version of the spec there is no muted property and
  663. // readyState can have value muted. In the latest versions
  664. // readyState can have values "live" and "ended" and there is
  665. // muted boolean property. If the stream is muted that means that
  666. // we aren't receiving any data from the source. We want to notify
  667. // the users for error if the stream is muted or ended on it's
  668. // creation.
  669. return this.stream.getTracks().some(track =>
  670. (!('readyState' in track) || track.readyState === 'live')
  671. && (!('muted' in track) || track.muted !== true));
  672. }
  673. /**
  674. * Creates a text representation of this local track instance.
  675. *
  676. * @return {string}
  677. */
  678. toString() {
  679. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  680. }
  681. }