You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiLocalTrack.js 23KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695
  1. /* global __filename, Promise */
  2. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import JitsiTrack from './JitsiTrack';
  5. import JitsiTrackError from '../../JitsiTrackError';
  6. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  7. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  8. import * as MediaType from '../../service/RTC/MediaType';
  9. import RTCBrowserType from './RTCBrowserType';
  10. import RTCEvents from '../../service/RTC/RTCEvents';
  11. import RTCUtils from './RTCUtils';
  12. import Statistics from '../statistics/statistics';
  13. import VideoType from '../../service/RTC/VideoType';
  14. const logger = getLogger(__filename);
  15. /* eslint-disable max-params */
  16. /**
  17. * Represents a single media track(either audio or video).
  18. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  19. * @param {number} rtcId the ID assigned by the RTC module
  20. * @param stream WebRTC MediaStream, parent of the track
  21. * @param track underlying WebRTC MediaStreamTrack for new JitsiRemoteTrack
  22. * @param mediaType the MediaType of the JitsiRemoteTrack
  23. * @param videoType the VideoType of the JitsiRemoteTrack
  24. * @param resolution the video resolution if it's a video track
  25. * @param deviceId the ID of the local device for this track
  26. * @param facingMode the camera facing mode used in getUserMedia call
  27. * @constructor
  28. */
  29. function JitsiLocalTrack(
  30. rtcId,
  31. stream,
  32. track,
  33. mediaType,
  34. videoType,
  35. resolution,
  36. deviceId,
  37. facingMode) {
  38. /**
  39. * The ID assigned by the RTC module on instance creation.
  40. * @type {number}
  41. */
  42. this.rtcId = rtcId;
  43. JitsiTrack.call(
  44. this,
  45. null /* RTC */,
  46. stream,
  47. track,
  48. () => {
  49. if (!this.dontFireRemoveEvent) {
  50. this.eventEmitter.emit(JitsiTrackEvents.LOCAL_TRACK_STOPPED);
  51. }
  52. this.dontFireRemoveEvent = false;
  53. } /* inactiveHandler */,
  54. mediaType,
  55. videoType);
  56. this.dontFireRemoveEvent = false;
  57. this.resolution = resolution;
  58. // FIXME: currently firefox is ignoring our constraints about resolutions
  59. // so we do not store it, to avoid wrong reporting of local track resolution
  60. if (RTCBrowserType.isFirefox()) {
  61. this.resolution = null;
  62. }
  63. this.deviceId = deviceId;
  64. this.startMuted = false;
  65. this.storedMSID = this.getMSID();
  66. this.inMuteOrUnmuteProgress = false;
  67. /**
  68. * The facing mode of the camera from which this JitsiLocalTrack instance
  69. * was obtained.
  70. */
  71. this._facingMode = facingMode;
  72. // Currently there is no way to know the MediaStreamTrack ended due to to
  73. // device disconnect in Firefox through e.g. "readyState" property. Instead
  74. // we will compare current track's label with device labels from
  75. // enumerateDevices() list.
  76. this._trackEnded = false;
  77. /**
  78. * The value of bytes sent received from the statistics module.
  79. */
  80. this._bytesSent = null;
  81. /**
  82. * Used only for detection of audio problems. We want to check only once
  83. * whether the track is sending bytes ot not. This flag is set to false
  84. * after the check.
  85. */
  86. this._testByteSent = true;
  87. // Currently there is no way to determine with what device track was
  88. // created (until getConstraints() support), however we can associate tracks
  89. // with real devices obtained from enumerateDevices() call as soon as it's
  90. // called.
  91. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  92. /**
  93. * Indicates that we have called RTCUtils.stopMediaStream for the
  94. * MediaStream related to this JitsiTrack object.
  95. */
  96. this.stopStreamInProgress = false;
  97. /**
  98. * On mute event we are waiting for 3s to check if the stream is going to
  99. * be still muted before firing the event for camera issue detected
  100. * (NO_DATA_FROM_SOURCE).
  101. */
  102. this._noDataFromSourceTimeout = null;
  103. this._onDeviceListChanged = devices => {
  104. this._setRealDeviceIdFromDeviceList(devices);
  105. // Mark track as ended for those browsers that do not support
  106. // "readyState" property. We do not touch tracks created with default
  107. // device ID "".
  108. if (typeof this.getTrack().readyState === 'undefined'
  109. && typeof this._realDeviceId !== 'undefined'
  110. && !devices.find(d => d.deviceId === this._realDeviceId)) {
  111. this._trackEnded = true;
  112. }
  113. };
  114. // Subscribe each created local audio track to
  115. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  116. // handling this event for remote tracks (which are handled in RTC.js),
  117. // because there might be local tracks not attached to a conference.
  118. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  119. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  120. RTCUtils.addListener(
  121. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  122. this._onAudioOutputDeviceChanged);
  123. }
  124. RTCUtils.addListener(
  125. RTCEvents.DEVICE_LIST_CHANGED,
  126. this._onDeviceListChanged);
  127. this._initNoDataFromSourceHandlers();
  128. }
  129. /* eslint-enable max-params */
  130. JitsiLocalTrack.prototype = Object.create(JitsiTrack.prototype);
  131. JitsiLocalTrack.prototype.constructor = JitsiLocalTrack;
  132. /**
  133. * Returns if associated MediaStreamTrack is in the 'ended' state
  134. * @returns {boolean}
  135. */
  136. JitsiLocalTrack.prototype.isEnded = function() {
  137. return this.getTrack().readyState === 'ended' || this._trackEnded;
  138. };
  139. /**
  140. * Sets handlers to the MediaStreamTrack object that will detect camera issues.
  141. */
  142. JitsiLocalTrack.prototype._initNoDataFromSourceHandlers = function() {
  143. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  144. const _onNoDataFromSourceError
  145. = this._onNoDataFromSourceError.bind(this);
  146. this._setHandler('track_mute', () => {
  147. if (this._checkForCameraIssues()) {
  148. const now = window.performance.now();
  149. this._noDataFromSourceTimeout
  150. = setTimeout(_onNoDataFromSourceError, 3000);
  151. this._setHandler('track_unmute', () => {
  152. this._clearNoDataFromSourceMuteResources();
  153. Statistics.sendEventToAll(
  154. `${this.getType()}.track_unmute`,
  155. { value: window.performance.now() - now });
  156. });
  157. }
  158. });
  159. this._setHandler('track_ended', _onNoDataFromSourceError);
  160. }
  161. };
  162. /**
  163. * Clears all timeouts and handlers set on MediaStreamTrack mute event.
  164. * FIXME: Change the name of the method with better one.
  165. */
  166. JitsiLocalTrack.prototype._clearNoDataFromSourceMuteResources = function() {
  167. if (this._noDataFromSourceTimeout) {
  168. clearTimeout(this._noDataFromSourceTimeout);
  169. this._noDataFromSourceTimeout = null;
  170. }
  171. this._setHandler('track_unmute', undefined);
  172. };
  173. /**
  174. * Called when potential camera issue is detected. Clears the handlers and
  175. * timeouts set on MediaStreamTrack muted event. Verifies that the camera
  176. * issue persists and fires NO_DATA_FROM_SOURCE event.
  177. */
  178. JitsiLocalTrack.prototype._onNoDataFromSourceError = function() {
  179. this._clearNoDataFromSourceMuteResources();
  180. if (this._checkForCameraIssues()) {
  181. this._fireNoDataFromSourceEvent();
  182. }
  183. };
  184. /**
  185. * Fires JitsiTrackEvents.NO_DATA_FROM_SOURCE and logs it to analytics and
  186. * callstats.
  187. */
  188. JitsiLocalTrack.prototype._fireNoDataFromSourceEvent = function() {
  189. this.eventEmitter.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  190. const eventName = `${this.getType()}.no_data_from_source`;
  191. Statistics.analytics.sendEvent(eventName);
  192. const log = { name: eventName };
  193. if (this.isAudioTrack()) {
  194. log.isReceivingData = this._isReceivingData();
  195. }
  196. Statistics.sendLog(JSON.stringify(log));
  197. };
  198. /**
  199. * Sets real device ID by comparing track information with device information.
  200. * This is temporary solution until getConstraints() method will be implemented
  201. * in browsers.
  202. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  203. * enumerateDevices() call
  204. */
  205. JitsiLocalTrack.prototype._setRealDeviceIdFromDeviceList = function(devices) {
  206. const track = this.getTrack();
  207. // FIXME for temasys video track, label refers to id not the actual device
  208. const device
  209. = devices.find(
  210. d => d.kind === `${track.kind}input` && d.label === track.label);
  211. if (device) {
  212. this._realDeviceId = device.deviceId;
  213. }
  214. };
  215. /**
  216. * Sets the stream property of JitsiLocalTrack object and sets all stored
  217. * handlers to it.
  218. * @param {MediaStream} stream the new stream.
  219. */
  220. JitsiLocalTrack.prototype._setStream = function(stream) {
  221. JitsiTrack.prototype._setStream.call(this, stream);
  222. // Store the MSID for video mute/unmute purposes
  223. if (stream) {
  224. this.storedMSID = this.getMSID();
  225. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  226. } else {
  227. logger.debug(`Setting 'null' stream on ${this}`);
  228. }
  229. };
  230. /**
  231. * Mutes the track. Will reject the Promise if there is mute/unmute operation
  232. * in progress.
  233. * @returns {Promise}
  234. */
  235. JitsiLocalTrack.prototype.mute = function() {
  236. return createMuteUnmutePromise(this, true);
  237. };
  238. /**
  239. * Unmutes the track. Will reject the Promise if there is mute/unmute operation
  240. * in progress.
  241. * @returns {Promise}
  242. */
  243. JitsiLocalTrack.prototype.unmute = function() {
  244. return createMuteUnmutePromise(this, false);
  245. };
  246. /**
  247. * Creates Promise for mute/unmute operation.
  248. *
  249. * @param {JitsiLocalTrack} track - The track that will be muted/unmuted.
  250. * @param {boolean} mute - Whether to mute or unmute the track.
  251. * @returns {Promise}
  252. */
  253. function createMuteUnmutePromise(track, mute) {
  254. if (track.inMuteOrUnmuteProgress) {
  255. return Promise.reject(
  256. new JitsiTrackError(JitsiTrackErrors.TRACK_MUTE_UNMUTE_IN_PROGRESS)
  257. );
  258. }
  259. track.inMuteOrUnmuteProgress = true;
  260. return track._setMute(mute)
  261. .then(() => {
  262. track.inMuteOrUnmuteProgress = false;
  263. })
  264. .catch(status => {
  265. track.inMuteOrUnmuteProgress = false;
  266. throw status;
  267. });
  268. }
  269. /**
  270. * Mutes / unmutes the track.
  271. *
  272. * @param {boolean} mute - If true the track will be muted. Otherwise the track
  273. * will be unmuted.
  274. * @private
  275. * @returns {Promise}
  276. */
  277. JitsiLocalTrack.prototype._setMute = function(mute) {
  278. if (this.isMuted() === mute) {
  279. return Promise.resolve();
  280. }
  281. let promise = Promise.resolve();
  282. const self = this;
  283. // Local track can be used out of conference, so we need to handle that
  284. // case and mark that track should start muted or not when added to
  285. // conference.
  286. // Pawel: track's muted status should be taken into account when track is
  287. // being added to the conference/JingleSessionPC/TraceablePeerConnection.
  288. // There's no need to add such fields. It is logical that when muted track
  289. // is being added to a conference it "starts muted"...
  290. if (!this.conference || !this.conference.room) {
  291. this.startMuted = mute;
  292. }
  293. this.dontFireRemoveEvent = false;
  294. // A function that will print info about muted status transition
  295. const logMuteInfo = () => logger.info(`Mute ${this}: ${mute}`);
  296. if (this.isAudioTrack()
  297. || this.videoType === VideoType.DESKTOP
  298. || !RTCBrowserType.doesVideoMuteByStreamRemove()) {
  299. logMuteInfo();
  300. if (this.track) {
  301. this.track.enabled = !mute;
  302. }
  303. } else if (mute) {
  304. this.dontFireRemoveEvent = true;
  305. promise = new Promise((resolve, reject) => {
  306. logMuteInfo();
  307. this._removeStreamFromConferenceAsMute(() => {
  308. // FIXME: Maybe here we should set the SRC for the containers
  309. // to something
  310. this._stopMediaStream();
  311. this._setStream(null);
  312. resolve();
  313. }, err => {
  314. reject(err);
  315. });
  316. });
  317. } else {
  318. logMuteInfo();
  319. // This path is only for camera.
  320. const streamOptions = {
  321. cameraDeviceId: this.getDeviceId(),
  322. devices: [ MediaType.VIDEO ],
  323. facingMode: this.getCameraFacingMode()
  324. };
  325. if (this.resolution) {
  326. streamOptions.resolution = this.resolution;
  327. }
  328. promise = RTCUtils.obtainAudioAndVideoPermissions(streamOptions)
  329. .then(streamsInfo => {
  330. const mediaType = self.getType();
  331. const streamInfo
  332. = streamsInfo.find(info => info.mediaType === mediaType);
  333. if (streamInfo) {
  334. self._setStream(streamInfo.stream);
  335. self.track = streamInfo.track;
  336. // This is not good when video type changes after
  337. // unmute, but let's not crash here
  338. if (self.videoType !== streamInfo.videoType) {
  339. logger.warn(
  340. `${this}: video type has changed after unmute!`,
  341. self.videoType, streamInfo.videoType);
  342. self.videoType = streamInfo.videoType;
  343. }
  344. } else {
  345. throw new JitsiTrackError(
  346. JitsiTrackErrors.TRACK_NO_STREAM_FOUND);
  347. }
  348. self.containers
  349. = self.containers.map(
  350. cont => RTCUtils.attachMediaStream(cont, self.stream));
  351. return self._addStreamToConferenceAsUnmute();
  352. });
  353. }
  354. return promise
  355. .then(() => this._sendMuteStatus(mute))
  356. .then(() => {
  357. this.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  358. });
  359. };
  360. /**
  361. * Adds stream to conference and marks it as "unmute" operation.
  362. *
  363. * @private
  364. * @returns {Promise}
  365. */
  366. JitsiLocalTrack.prototype._addStreamToConferenceAsUnmute = function() {
  367. if (!this.conference) {
  368. return Promise.resolve();
  369. }
  370. // FIXME it would be good to not included conference as part of this process
  371. // Only TraceablePeerConnections to which the track is attached should care
  372. // about this action. The TPCs to which the track is not attached can sync
  373. // up when track is re-attached.
  374. // A problem with that is that the "modify sources" queue is part of
  375. // the JingleSessionPC and it would be excluded from the process. One
  376. // solution would be to extract class between TPC and JingleSessionPC which
  377. // would contain the queue and would notify the signaling layer when local
  378. // SSRCs are changed. This would help to separate XMPP from the RTC module.
  379. return new Promise((resolve, reject) => {
  380. this.conference._addLocalTrackAsUnmute(this)
  381. .then(resolve, error => reject(new Error(error)));
  382. });
  383. };
  384. /**
  385. * Removes stream from conference and marks it as "mute" operation.
  386. * @param {Function} successCallback will be called on success
  387. * @param {Function} errorCallback will be called on error
  388. * @private
  389. */
  390. JitsiLocalTrack.prototype._removeStreamFromConferenceAsMute
  391. = function(successCallback, errorCallback) {
  392. if (!this.conference) {
  393. successCallback();
  394. return;
  395. }
  396. this.conference._removeLocalTrackAsMute(this).then(
  397. successCallback,
  398. error => errorCallback(new Error(error)));
  399. };
  400. /**
  401. * Sends mute status for a track to conference if any.
  402. *
  403. * @param {boolean} mute - If track is muted.
  404. * @private
  405. * @returns {Promise}
  406. */
  407. JitsiLocalTrack.prototype._sendMuteStatus = function(mute) {
  408. if (!this.conference || !this.conference.room) {
  409. return Promise.resolve();
  410. }
  411. return new Promise(resolve => {
  412. this.conference.room[
  413. this.isAudioTrack()
  414. ? 'setAudioMute'
  415. : 'setVideoMute'](mute, resolve);
  416. });
  417. };
  418. /**
  419. * @inheritdoc
  420. *
  421. * Stops sending the media track. And removes it from the HTML.
  422. * NOTE: Works for local tracks only.
  423. *
  424. * @extends JitsiTrack#dispose
  425. * @returns {Promise}
  426. */
  427. JitsiLocalTrack.prototype.dispose = function() {
  428. const self = this;
  429. let promise = Promise.resolve();
  430. if (this.conference) {
  431. promise = this.conference.removeTrack(this);
  432. }
  433. if (this.stream) {
  434. this._stopMediaStream();
  435. this.detach();
  436. }
  437. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED,
  438. this._onDeviceListChanged);
  439. if (this._onAudioOutputDeviceChanged) {
  440. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  441. this._onAudioOutputDeviceChanged);
  442. }
  443. return promise
  444. .then(() => JitsiTrack.prototype.dispose.call(self) // super.dispose();
  445. );
  446. };
  447. /**
  448. * Returns <tt>true</tt> - if the stream is muted
  449. * and <tt>false</tt> otherwise.
  450. * @returns {boolean} <tt>true</tt> - if the stream is muted
  451. * and <tt>false</tt> otherwise.
  452. */
  453. JitsiLocalTrack.prototype.isMuted = function() {
  454. // this.stream will be null when we mute local video on Chrome
  455. if (!this.stream) {
  456. return true;
  457. }
  458. if (this.isVideoTrack() && !this.isActive()) {
  459. return true;
  460. }
  461. return !this.track || !this.track.enabled;
  462. };
  463. /**
  464. * Sets the JitsiConference object associated with the track. This is temp
  465. * solution.
  466. * @param conference the JitsiConference object
  467. */
  468. JitsiLocalTrack.prototype._setConference = function(conference) {
  469. this.conference = conference;
  470. // We want to keep up with postponed events which should have been fired
  471. // on "attach" call, but for local track we not always have the conference
  472. // before attaching. However this may result in duplicated events if they
  473. // have been triggered on "attach" already.
  474. for (let i = 0; i < this.containers.length; i++) {
  475. this._maybeFireTrackAttached(this.containers[i]);
  476. }
  477. };
  478. /**
  479. * Returns <tt>true</tt>.
  480. * @returns {boolean} <tt>true</tt>
  481. */
  482. JitsiLocalTrack.prototype.isLocal = function() {
  483. return true;
  484. };
  485. /**
  486. * Returns device id associated with track.
  487. * @returns {string}
  488. */
  489. JitsiLocalTrack.prototype.getDeviceId = function() {
  490. return this._realDeviceId || this.deviceId;
  491. };
  492. /**
  493. * Returns the participant id which owns the track.
  494. * @returns {string} the id of the participants. It corresponds to the Colibri
  495. * endpoint id/MUC nickname in case of Jitsi-meet.
  496. */
  497. JitsiLocalTrack.prototype.getParticipantId = function() {
  498. return this.conference && this.conference.myUserId();
  499. };
  500. /**
  501. * Sets the value of bytes sent statistic.
  502. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  503. * @param {number} bytesSent the new value
  504. * NOTE: used only for audio tracks to detect audio issues.
  505. */
  506. JitsiLocalTrack.prototype._setByteSent = function(tpc, bytesSent) {
  507. this._bytesSent = bytesSent;
  508. const iceConnectionState = tpc.getConnectionState();
  509. if (this._testByteSent && iceConnectionState === 'connected') {
  510. setTimeout(() => {
  511. if (this._bytesSent <= 0) {
  512. logger.warn(`${this} 'bytes sent' <= 0: ${this._bytesSent}`);
  513. // we are not receiving anything from the microphone
  514. this._fireNoDataFromSourceEvent();
  515. }
  516. }, 3000);
  517. this._testByteSent = false;
  518. }
  519. };
  520. /**
  521. * Returns facing mode for video track from camera. For other cases (e.g. audio
  522. * track or 'desktop' video track) returns undefined.
  523. *
  524. * @returns {CameraFacingMode|undefined}
  525. */
  526. JitsiLocalTrack.prototype.getCameraFacingMode = function() {
  527. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  528. // MediaStreamTrack#getSettings() is not implemented in many browsers,
  529. // so we need feature checking here. Progress on the respective
  530. // browser's implementation can be tracked at
  531. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for Chromium
  532. // and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517 for Firefox.
  533. // Even if a browser implements getSettings() already, it might still
  534. // not return anything for 'facingMode'.
  535. let trackSettings;
  536. try {
  537. trackSettings = this.track.getSettings();
  538. } catch (e) {
  539. // XXX React-native-webrtc, for example, defines
  540. // MediaStreamTrack#getSettings() but the implementation throws a
  541. // "Not implemented" Error.
  542. }
  543. if (trackSettings && 'facingMode' in trackSettings) {
  544. return trackSettings.facingMode;
  545. }
  546. if (typeof this._facingMode !== 'undefined') {
  547. return this._facingMode;
  548. }
  549. // In most cases we are showing a webcam. So if we've gotten here, it
  550. // should be relatively safe to assume that we are probably showing
  551. // the user-facing camera.
  552. return CameraFacingMode.USER;
  553. }
  554. return undefined;
  555. };
  556. /**
  557. * Stops the associated MediaStream.
  558. */
  559. JitsiLocalTrack.prototype._stopMediaStream = function() {
  560. this.stopStreamInProgress = true;
  561. RTCUtils.stopMediaStream(this.stream);
  562. this.stopStreamInProgress = false;
  563. };
  564. /**
  565. * Detects camera issues on ended and mute events from MediaStreamTrack.
  566. * @returns {boolean} true if an issue is detected and false otherwise
  567. */
  568. JitsiLocalTrack.prototype._checkForCameraIssues = function() {
  569. if (!this.isVideoTrack() || this.stopStreamInProgress
  570. || this.videoType === VideoType.DESKTOP) {
  571. return false;
  572. }
  573. return !this._isReceivingData();
  574. };
  575. /**
  576. * Checks whether the attached MediaStream is receiving data from source or
  577. * not. If the stream property is null(because of mute or another reason) this
  578. * method will return false.
  579. * NOTE: This method doesn't indicate problem with the streams directly.
  580. * For example in case of video mute the method will return false or if the
  581. * user has disposed the track.
  582. * @returns {boolean} true if the stream is receiving data and false otherwise.
  583. */
  584. JitsiLocalTrack.prototype._isReceivingData = function() {
  585. if (!this.stream) {
  586. return false;
  587. }
  588. // In older version of the spec there is no muted property and
  589. // readyState can have value muted. In the latest versions
  590. // readyState can have values "live" and "ended" and there is
  591. // muted boolean property. If the stream is muted that means that
  592. // we aren't receiving any data from the source. We want to notify
  593. // the users for error if the stream is muted or ended on it's
  594. // creation.
  595. return this.stream.getTracks().some(track =>
  596. (!('readyState' in track) || track.readyState === 'live')
  597. && (!('muted' in track) || track.muted !== true));
  598. };
  599. /**
  600. * Creates a text representation of this local track instance.
  601. * @return {string}
  602. */
  603. JitsiLocalTrack.prototype.toString = function() {
  604. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  605. };
  606. module.exports = JitsiLocalTrack;