You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiLocalTrack.js 24KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711
  1. /* global __filename, Promise */
  2. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import JitsiTrack from './JitsiTrack';
  5. import JitsiTrackError from '../../JitsiTrackError';
  6. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  7. import * as JitsiTrackEvents from '../../JitsiTrackEvents';
  8. import * as MediaType from '../../service/RTC/MediaType';
  9. import RTCBrowserType from './RTCBrowserType';
  10. import RTCEvents from '../../service/RTC/RTCEvents';
  11. import RTCUtils from './RTCUtils';
  12. import Statistics from '../statistics/statistics';
  13. import VideoType from '../../service/RTC/VideoType';
  14. const logger = getLogger(__filename);
  15. /**
  16. * Creates Promise for mute/unmute operation.
  17. *
  18. * @param {JitsiLocalTrack} track - The track that will be muted/unmuted.
  19. * @param {boolean} mute - Whether to mute or unmute the track.
  20. * @returns {Promise}
  21. */
  22. function createMuteUnmutePromise(track, mute) {
  23. if (track.inMuteOrUnmuteProgress) {
  24. return Promise.reject(
  25. new JitsiTrackError(JitsiTrackErrors.TRACK_MUTE_UNMUTE_IN_PROGRESS)
  26. );
  27. }
  28. track.inMuteOrUnmuteProgress = true;
  29. return track._setMute(mute)
  30. .then(() => {
  31. track.inMuteOrUnmuteProgress = false;
  32. })
  33. .catch(status => {
  34. track.inMuteOrUnmuteProgress = false;
  35. throw status;
  36. });
  37. }
  38. /**
  39. * Represents a single media track(either audio or video).
  40. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  41. */
  42. export default class JitsiLocalTrack extends JitsiTrack {
  43. /* eslint-disable max-params */
  44. /**
  45. * Constructs new JitsiLocalTrack instanse.
  46. * @param {number} rtcId the ID assigned by the RTC module
  47. * @param stream WebRTC MediaStream, parent of the track
  48. * @param track underlying WebRTC MediaStreamTrack for new JitsiRemoteTrack
  49. * @param mediaType the MediaType of the JitsiRemoteTrack
  50. * @param videoType the VideoType of the JitsiRemoteTrack
  51. * @param resolution the video resolution if it's a video track
  52. * @param deviceId the ID of the local device for this track
  53. * @param facingMode the camera facing mode used in getUserMedia call
  54. * @constructor
  55. */
  56. constructor(
  57. rtcId,
  58. stream,
  59. track,
  60. mediaType,
  61. videoType,
  62. resolution,
  63. deviceId,
  64. facingMode) {
  65. super(
  66. null /* RTC */,
  67. stream,
  68. track,
  69. () => {
  70. if (!this.dontFireRemoveEvent) {
  71. this.emit(JitsiTrackEvents.LOCAL_TRACK_STOPPED);
  72. }
  73. this.dontFireRemoveEvent = false;
  74. } /* inactiveHandler */,
  75. mediaType,
  76. videoType);
  77. /**
  78. * The ID assigned by the RTC module on instance creation.
  79. * @type {number}
  80. */
  81. this.rtcId = rtcId;
  82. this.dontFireRemoveEvent = false;
  83. this.resolution = resolution;
  84. // FIXME: currently firefox is ignoring our constraints about
  85. // resolutions so we do not store it, to avoid wrong reporting of local
  86. // track resolution
  87. if (RTCBrowserType.isFirefox()) {
  88. this.resolution = null;
  89. }
  90. this.deviceId = deviceId;
  91. this.storedMSID = this.getMSID();
  92. this.inMuteOrUnmuteProgress = false;
  93. /**
  94. * The facing mode of the camera from which this JitsiLocalTrack
  95. * instance was obtained.
  96. *
  97. * @private
  98. * @type {CameraFacingMode|undefined}
  99. */
  100. this._facingMode = facingMode;
  101. // Currently there is no way to know the MediaStreamTrack ended due to
  102. // to device disconnect in Firefox through e.g. "readyState" property.
  103. // Instead we will compare current track's label with device labels from
  104. // enumerateDevices() list.
  105. this._trackEnded = false;
  106. /**
  107. * The value of bytes sent received from the statistics module.
  108. */
  109. this._bytesSent = null;
  110. /**
  111. * Used only for detection of audio problems. We want to check only once
  112. * whether the track is sending bytes ot not. This flag is set to false
  113. * after the check.
  114. */
  115. this._testByteSent = true;
  116. // Currently there is no way to determine with what device track was
  117. // created (until getConstraints() support), however we can associate
  118. // tracks with real devices obtained from enumerateDevices() call as
  119. // soon as it's called.
  120. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  121. /**
  122. * Indicates that we have called RTCUtils.stopMediaStream for the
  123. * MediaStream related to this JitsiTrack object.
  124. */
  125. this.stopStreamInProgress = false;
  126. /**
  127. * On mute event we are waiting for 3s to check if the stream is going
  128. * to be still muted before firing the event for camera issue detected
  129. * (NO_DATA_FROM_SOURCE).
  130. */
  131. this._noDataFromSourceTimeout = null;
  132. this._onDeviceListChanged = devices => {
  133. this._setRealDeviceIdFromDeviceList(devices);
  134. // Mark track as ended for those browsers that do not support
  135. // "readyState" property. We do not touch tracks created with
  136. // default device ID "".
  137. if (typeof this.getTrack().readyState === 'undefined'
  138. && typeof this._realDeviceId !== 'undefined'
  139. && !devices.find(d => d.deviceId === this._realDeviceId)) {
  140. this._trackEnded = true;
  141. }
  142. };
  143. // Subscribe each created local audio track to
  144. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  145. // handling this event for remote tracks (which are handled in RTC.js),
  146. // because there might be local tracks not attached to a conference.
  147. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  148. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  149. RTCUtils.addListener(
  150. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  151. this._onAudioOutputDeviceChanged);
  152. }
  153. RTCUtils.addListener(
  154. RTCEvents.DEVICE_LIST_CHANGED,
  155. this._onDeviceListChanged);
  156. this._initNoDataFromSourceHandlers();
  157. }
  158. /* eslint-enable max-params */
  159. /**
  160. * Returns if associated MediaStreamTrack is in the 'ended' state
  161. * @returns {boolean}
  162. */
  163. isEnded() {
  164. return this.getTrack().readyState === 'ended' || this._trackEnded;
  165. }
  166. /**
  167. * Sets handlers to the MediaStreamTrack object that will detect camera
  168. * issues.
  169. */
  170. _initNoDataFromSourceHandlers() {
  171. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  172. const _onNoDataFromSourceError
  173. = this._onNoDataFromSourceError.bind(this);
  174. this._setHandler('track_mute', () => {
  175. if (this._checkForCameraIssues()) {
  176. const now = window.performance.now();
  177. this._noDataFromSourceTimeout
  178. = setTimeout(_onNoDataFromSourceError, 3000);
  179. this._setHandler('track_unmute', () => {
  180. this._clearNoDataFromSourceMuteResources();
  181. Statistics.sendEventToAll(
  182. `${this.getType()}.track_unmute`,
  183. { value: window.performance.now() - now });
  184. });
  185. }
  186. });
  187. this._setHandler('track_ended', _onNoDataFromSourceError);
  188. }
  189. }
  190. /**
  191. * Clears all timeouts and handlers set on MediaStreamTrack mute event.
  192. * FIXME: Change the name of the method with better one.
  193. */
  194. _clearNoDataFromSourceMuteResources() {
  195. if (this._noDataFromSourceTimeout) {
  196. clearTimeout(this._noDataFromSourceTimeout);
  197. this._noDataFromSourceTimeout = null;
  198. }
  199. this._setHandler('track_unmute', undefined);
  200. }
  201. /**
  202. * Called when potential camera issue is detected. Clears the handlers and
  203. * timeouts set on MediaStreamTrack muted event. Verifies that the camera
  204. * issue persists and fires NO_DATA_FROM_SOURCE event.
  205. */
  206. _onNoDataFromSourceError() {
  207. this._clearNoDataFromSourceMuteResources();
  208. if (this._checkForCameraIssues()) {
  209. this._fireNoDataFromSourceEvent();
  210. }
  211. }
  212. /**
  213. * Fires JitsiTrackEvents.NO_DATA_FROM_SOURCE and logs it to analytics and
  214. * callstats.
  215. */
  216. _fireNoDataFromSourceEvent() {
  217. this.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  218. const eventName = `${this.getType()}.no_data_from_source`;
  219. Statistics.analytics.sendEvent(eventName);
  220. const log = { name: eventName };
  221. if (this.isAudioTrack()) {
  222. log.isReceivingData = this._isReceivingData();
  223. }
  224. Statistics.sendLog(JSON.stringify(log));
  225. }
  226. /**
  227. * Sets real device ID by comparing track information with device
  228. * information. This is temporary solution until getConstraints() method
  229. * will be implemented in browsers.
  230. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  231. * enumerateDevices() call
  232. */
  233. _setRealDeviceIdFromDeviceList(devices) {
  234. const track = this.getTrack();
  235. // FIXME for temasys video track, label refers to id not the actual
  236. // device
  237. const device = devices.find(
  238. d => d.kind === `${track.kind}input` && d.label === track.label);
  239. if (device) {
  240. this._realDeviceId = device.deviceId;
  241. }
  242. }
  243. /**
  244. * Sets the stream property of JitsiLocalTrack object and sets all stored
  245. * handlers to it.
  246. * @param {MediaStream} stream the new stream.
  247. */
  248. _setStream(stream) {
  249. super._setStream(stream);
  250. // Store the MSID for video mute/unmute purposes
  251. if (stream) {
  252. this.storedMSID = this.getMSID();
  253. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  254. } else {
  255. logger.debug(`Setting 'null' stream on ${this}`);
  256. }
  257. }
  258. /**
  259. * Mutes the track. Will reject the Promise if there is mute/unmute
  260. * operation in progress.
  261. * @returns {Promise}
  262. */
  263. mute() {
  264. return createMuteUnmutePromise(this, true);
  265. }
  266. /**
  267. * Unmutes the track. Will reject the Promise if there is mute/unmute
  268. * operation in progress.
  269. * @returns {Promise}
  270. */
  271. unmute() {
  272. return createMuteUnmutePromise(this, false);
  273. }
  274. /**
  275. * Mutes / unmutes the track.
  276. *
  277. * @param {boolean} mute - If true the track will be muted. Otherwise the
  278. * track will be unmuted.
  279. * @private
  280. * @returns {Promise}
  281. */
  282. _setMute(mute) {
  283. if (this.isMuted() === mute) {
  284. return Promise.resolve();
  285. }
  286. let promise = Promise.resolve();
  287. this.dontFireRemoveEvent = false;
  288. // A function that will print info about muted status transition
  289. const logMuteInfo = () => logger.info(`Mute ${this}: ${mute}`);
  290. if (this.isAudioTrack()
  291. || this.videoType === VideoType.DESKTOP
  292. || !RTCBrowserType.doesVideoMuteByStreamRemove()) {
  293. logMuteInfo();
  294. if (this.track) {
  295. this.track.enabled = !mute;
  296. }
  297. } else if (mute) {
  298. this.dontFireRemoveEvent = true;
  299. promise = new Promise((resolve, reject) => {
  300. logMuteInfo();
  301. this._removeStreamFromConferenceAsMute(() => {
  302. // FIXME: Maybe here we should set the SRC for the
  303. // containers to something
  304. this._stopMediaStream();
  305. this._setStream(null);
  306. resolve();
  307. }, err => {
  308. reject(err);
  309. });
  310. });
  311. } else {
  312. logMuteInfo();
  313. // This path is only for camera.
  314. const streamOptions = {
  315. cameraDeviceId: this.getDeviceId(),
  316. devices: [ MediaType.VIDEO ],
  317. facingMode: this.getCameraFacingMode()
  318. };
  319. if (this.resolution) {
  320. streamOptions.resolution = this.resolution;
  321. }
  322. promise = RTCUtils.obtainAudioAndVideoPermissions(streamOptions)
  323. .then(streamsInfo => {
  324. const mediaType = this.getType();
  325. const streamInfo = streamsInfo.find(
  326. info => info.mediaType === mediaType);
  327. if (streamInfo) {
  328. this._setStream(streamInfo.stream);
  329. this.track = streamInfo.track;
  330. // This is not good when video type changes after
  331. // unmute, but let's not crash here
  332. if (this.videoType !== streamInfo.videoType) {
  333. logger.warn(
  334. `${this}: video type has changed after unmute!`,
  335. this.videoType, streamInfo.videoType);
  336. this.videoType = streamInfo.videoType;
  337. }
  338. } else {
  339. throw new JitsiTrackError(
  340. JitsiTrackErrors.TRACK_NO_STREAM_FOUND);
  341. }
  342. this.containers = this.containers.map(
  343. cont => RTCUtils.attachMediaStream(cont, this.stream));
  344. return this._addStreamToConferenceAsUnmute();
  345. });
  346. }
  347. return promise
  348. .then(() => this._sendMuteStatus(mute))
  349. .then(() => {
  350. this.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED, this);
  351. });
  352. }
  353. /**
  354. * Adds stream to conference and marks it as "unmute" operation.
  355. *
  356. * @private
  357. * @returns {Promise}
  358. */
  359. _addStreamToConferenceAsUnmute() {
  360. if (!this.conference) {
  361. return Promise.resolve();
  362. }
  363. // FIXME it would be good to not included conference as part of this
  364. // process. Only TraceablePeerConnections to which the track is attached
  365. // should care about this action. The TPCs to which the track is not
  366. // attached can sync up when track is re-attached.
  367. // A problem with that is that the "modify sources" queue is part of
  368. // the JingleSessionPC and it would be excluded from the process. One
  369. // solution would be to extract class between TPC and JingleSessionPC
  370. // which would contain the queue and would notify the signaling layer
  371. // when local SSRCs are changed. This would help to separate XMPP from
  372. // the RTC module.
  373. return new Promise((resolve, reject) => {
  374. this.conference._addLocalTrackAsUnmute(this)
  375. .then(resolve, error => reject(new Error(error)));
  376. });
  377. }
  378. /**
  379. * Removes stream from conference and marks it as "mute" operation.
  380. * @param {Function} successCallback will be called on success
  381. * @param {Function} errorCallback will be called on error
  382. * @private
  383. */
  384. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  385. if (!this.conference) {
  386. successCallback();
  387. return;
  388. }
  389. this.conference._removeLocalTrackAsMute(this).then(
  390. successCallback,
  391. error => errorCallback(new Error(error)));
  392. }
  393. /**
  394. * Sends mute status for a track to conference if any.
  395. *
  396. * @param {boolean} mute - If track is muted.
  397. * @private
  398. * @returns {Promise}
  399. */
  400. _sendMuteStatus(mute) {
  401. if (!this.conference || !this.conference.room) {
  402. return Promise.resolve();
  403. }
  404. return new Promise(resolve => {
  405. this.conference.room[
  406. this.isAudioTrack()
  407. ? 'setAudioMute'
  408. : 'setVideoMute'](mute, resolve);
  409. });
  410. }
  411. /**
  412. * @inheritdoc
  413. *
  414. * Stops sending the media track. And removes it from the HTML.
  415. * NOTE: Works for local tracks only.
  416. *
  417. * @extends JitsiTrack#dispose
  418. * @returns {Promise}
  419. */
  420. dispose() {
  421. let promise = Promise.resolve();
  422. if (this.conference) {
  423. promise = this.conference.removeTrack(this);
  424. }
  425. if (this.stream) {
  426. this._stopMediaStream();
  427. this.detach();
  428. }
  429. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED,
  430. this._onDeviceListChanged);
  431. if (this._onAudioOutputDeviceChanged) {
  432. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  433. this._onAudioOutputDeviceChanged);
  434. }
  435. return promise.then(() => super.dispose());
  436. }
  437. /**
  438. * Returns <tt>true</tt> - if the stream is muted
  439. * and <tt>false</tt> otherwise.
  440. * @returns {boolean} <tt>true</tt> - if the stream is muted
  441. * and <tt>false</tt> otherwise.
  442. */
  443. isMuted() {
  444. // this.stream will be null when we mute local video on Chrome
  445. if (!this.stream) {
  446. return true;
  447. }
  448. if (this.isVideoTrack() && !this.isActive()) {
  449. return true;
  450. }
  451. return !this.track || !this.track.enabled;
  452. }
  453. /**
  454. * Sets the JitsiConference object associated with the track. This is temp
  455. * solution.
  456. * @param conference the JitsiConference object
  457. */
  458. _setConference(conference) {
  459. this.conference = conference;
  460. // We want to keep up with postponed events which should have been fired
  461. // on "attach" call, but for local track we not always have the
  462. // conference before attaching. However this may result in duplicated
  463. // events if they have been triggered on "attach" already.
  464. for (let i = 0; i < this.containers.length; i++) {
  465. this._maybeFireTrackAttached(this.containers[i]);
  466. }
  467. }
  468. /**
  469. * Returns <tt>true</tt>.
  470. * @returns {boolean} <tt>true</tt>
  471. */
  472. isLocal() {
  473. return true;
  474. }
  475. /**
  476. * Returns device id associated with track.
  477. * @returns {string}
  478. */
  479. getDeviceId() {
  480. return this._realDeviceId || this.deviceId;
  481. }
  482. /**
  483. * Returns the participant id which owns the track.
  484. * @returns {string} the id of the participants. It corresponds to the
  485. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  486. */
  487. getParticipantId() {
  488. return this.conference && this.conference.myUserId();
  489. }
  490. /**
  491. * Sets the value of bytes sent statistic.
  492. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  493. * @param {number} bytesSent the new value
  494. * NOTE: used only for audio tracks to detect audio issues.
  495. */
  496. _setByteSent(tpc, bytesSent) {
  497. this._bytesSent = bytesSent;
  498. const iceConnectionState = tpc.getConnectionState();
  499. if (this._testByteSent && iceConnectionState === 'connected') {
  500. setTimeout(() => {
  501. if (this._bytesSent <= 0) {
  502. // FIXME: Remove ${this}
  503. logger.warn(`${this} 'bytes sent' <= 0: \
  504. ${this._bytesSent}`);
  505. // we are not receiving anything from the microphone
  506. this._fireNoDataFromSourceEvent();
  507. }
  508. }, 3000);
  509. this._testByteSent = false;
  510. }
  511. }
  512. /**
  513. * Returns facing mode for video track from camera. For other cases (e.g.
  514. * audio track or 'desktop' video track) returns undefined.
  515. *
  516. * @returns {CameraFacingMode|undefined}
  517. */
  518. getCameraFacingMode() {
  519. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  520. // MediaStreamTrack#getSettings() is not implemented in many
  521. // browsers, so we need feature checking here. Progress on the
  522. // respective browser's implementation can be tracked at
  523. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  524. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  525. // for Firefox. Even if a browser implements getSettings() already,
  526. // it might still not return anything for 'facingMode'.
  527. let trackSettings;
  528. try {
  529. trackSettings = this.track.getSettings();
  530. } catch (e) {
  531. // XXX React-native-webrtc, for example, defines
  532. // MediaStreamTrack#getSettings() but the implementation throws
  533. // a "Not implemented" Error.
  534. }
  535. if (trackSettings && 'facingMode' in trackSettings) {
  536. return trackSettings.facingMode;
  537. }
  538. if (typeof this._facingMode !== 'undefined') {
  539. return this._facingMode;
  540. }
  541. // In most cases we are showing a webcam. So if we've gotten here,
  542. // it should be relatively safe to assume that we are probably
  543. // showing the user-facing camera.
  544. return CameraFacingMode.USER;
  545. }
  546. return undefined;
  547. }
  548. /**
  549. * Stops the associated MediaStream.
  550. */
  551. _stopMediaStream() {
  552. this.stopStreamInProgress = true;
  553. RTCUtils.stopMediaStream(this.stream);
  554. this.stopStreamInProgress = false;
  555. }
  556. /**
  557. * Switches the camera facing mode if the WebRTC implementation supports the
  558. * custom MediaStreamTrack._switchCamera method. Currently, the method in
  559. * question is implemented in react-native-webrtc only. When such a WebRTC
  560. * implementation is executing, the method is the preferred way to switch
  561. * between the front/user-facing and the back/environment-facing cameras
  562. * because it will likely be (as is the case of react-native-webrtc)
  563. * noticeably faster that creating a new MediaStreamTrack via a new
  564. * getUserMedia call with the switched facingMode constraint value.
  565. * Moreover, the approach with a new getUserMedia call may not even work:
  566. * WebRTC on Android and iOS is either very slow to open the camera a second
  567. * time or plainly freezes attempting to do that.
  568. */
  569. _switchCamera() {
  570. if (this.isVideoTrack()
  571. && this.videoType === VideoType.CAMERA
  572. && typeof this.track._switchCamera === 'function') {
  573. this.track._switchCamera();
  574. this._facingMode
  575. = this._facingMode === CameraFacingMode.ENVIRONMENT
  576. ? CameraFacingMode.USER
  577. : CameraFacingMode.ENVIRONMENT;
  578. }
  579. }
  580. /**
  581. * Detects camera issues on ended and mute events from MediaStreamTrack.
  582. * @returns {boolean} true if an issue is detected and false otherwise
  583. */
  584. _checkForCameraIssues() {
  585. if (!this.isVideoTrack() || this.stopStreamInProgress
  586. || this.videoType === VideoType.DESKTOP) {
  587. return false;
  588. }
  589. return !this._isReceivingData();
  590. }
  591. /**
  592. * Checks whether the attached MediaStream is receiving data from source or
  593. * not. If the stream property is null(because of mute or another reason)
  594. * this method will return false.
  595. * NOTE: This method doesn't indicate problem with the streams directly.
  596. * For example in case of video mute the method will return false or if the
  597. * user has disposed the track.
  598. * @returns {boolean} true if the stream is receiving data and false
  599. * this otherwise.
  600. */
  601. _isReceivingData() {
  602. if (!this.stream) {
  603. return false;
  604. }
  605. // In older version of the spec there is no muted property and
  606. // readyState can have value muted. In the latest versions
  607. // readyState can have values "live" and "ended" and there is
  608. // muted boolean property. If the stream is muted that means that
  609. // we aren't receiving any data from the source. We want to notify
  610. // the users for error if the stream is muted or ended on it's
  611. // creation.
  612. return this.stream.getTracks().some(track =>
  613. (!('readyState' in track) || track.readyState === 'live')
  614. && (!('muted' in track) || track.muted !== true));
  615. }
  616. /**
  617. * Creates a text representation of this local track instance.
  618. * @return {string}
  619. */
  620. toString() {
  621. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  622. }
  623. }