modified lib-jitsi-meet dev repo
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiLocalTrack.js 27KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767
  1. /* global __filename, Promise */
  2. import { getLogger } from 'jitsi-meet-logger';
  3. import JitsiTrack from './JitsiTrack';
  4. import JitsiTrackError from '../../JitsiTrackError';
  5. import {
  6. TRACK_IS_DISPOSED,
  7. TRACK_NO_STREAM_FOUND
  8. } from '../../JitsiTrackErrors';
  9. import {
  10. LOCAL_TRACK_STOPPED,
  11. NO_DATA_FROM_SOURCE,
  12. TRACK_MUTE_CHANGED
  13. } from '../../JitsiTrackEvents';
  14. import browser from '../browser';
  15. import RTCUtils from './RTCUtils';
  16. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  17. import * as MediaType from '../../service/RTC/MediaType';
  18. import RTCEvents from '../../service/RTC/RTCEvents';
  19. import VideoType from '../../service/RTC/VideoType';
  20. import {
  21. NO_BYTES_SENT,
  22. TRACK_UNMUTED,
  23. createNoDataFromSourceEvent
  24. } from '../../service/statistics/AnalyticsEvents';
  25. import Statistics from '../statistics/statistics';
  26. const logger = getLogger(__filename);
  27. /**
  28. * Represents a single media track(either audio or video).
  29. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  30. */
  31. export default class JitsiLocalTrack extends JitsiTrack {
  32. /**
  33. * Constructs new JitsiLocalTrack instance.
  34. *
  35. * @constructor
  36. * @param {Object} trackInfo
  37. * @param {number} trackInfo.rtcId the ID assigned by the RTC module
  38. * @param trackInfo.stream WebRTC MediaStream, parent of the track
  39. * @param trackInfo.track underlying WebRTC MediaStreamTrack for new
  40. * JitsiRemoteTrack
  41. * @param trackInfo.mediaType the MediaType of the JitsiRemoteTrack
  42. * @param trackInfo.videoType the VideoType of the JitsiRemoteTrack
  43. * @param trackInfo.resolution the video resolution if it's a video track
  44. * @param trackInfo.deviceId the ID of the local device for this track
  45. * @param trackInfo.facingMode the camera facing mode used in getUserMedia
  46. * call
  47. * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing
  48. * source. NOTE: defined for desktop sharing tracks only.
  49. */
  50. constructor({
  51. deviceId,
  52. facingMode,
  53. mediaType,
  54. resolution,
  55. rtcId,
  56. sourceId,
  57. sourceType,
  58. stream,
  59. track,
  60. videoType
  61. }) {
  62. super(
  63. /* conference */ null,
  64. stream,
  65. track,
  66. /* streamInactiveHandler */ () => this.emit(LOCAL_TRACK_STOPPED),
  67. mediaType,
  68. videoType);
  69. /**
  70. * The ID assigned by the RTC module on instance creation.
  71. *
  72. * @type {number}
  73. */
  74. this.rtcId = rtcId;
  75. this.sourceId = sourceId;
  76. this.sourceType = sourceType;
  77. if (browser.usesNewGumFlow()) {
  78. // Get the resolution from the track itself because it cannot be
  79. // certain which resolution webrtc has fallen back to using.
  80. this.resolution = track.getSettings().height;
  81. // Cache the constraints of the track in case of any this track
  82. // model needs to call getUserMedia again, such as when unmuting.
  83. this._constraints = track.getConstraints();
  84. } else {
  85. // FIXME Currently, Firefox is ignoring our constraints about
  86. // resolutions so we do not store it, to avoid wrong reporting of
  87. // local track resolution.
  88. this.resolution = browser.isFirefox() ? null : resolution;
  89. }
  90. this.deviceId = deviceId;
  91. /**
  92. * The <tt>Promise</tt> which represents the progress of a previously
  93. * queued/scheduled {@link _setMuted} (from the point of view of
  94. * {@link _queueSetMuted}).
  95. *
  96. * @private
  97. * @type {Promise}
  98. */
  99. this._prevSetMuted = Promise.resolve();
  100. /**
  101. * The facing mode of the camera from which this JitsiLocalTrack
  102. * instance was obtained.
  103. *
  104. * @private
  105. * @type {CameraFacingMode|undefined}
  106. */
  107. this._facingMode = facingMode;
  108. // Currently there is no way to know the MediaStreamTrack ended due to
  109. // to device disconnect in Firefox through e.g. "readyState" property.
  110. // Instead we will compare current track's label with device labels from
  111. // enumerateDevices() list.
  112. this._trackEnded = false;
  113. /**
  114. * Indicates whether data has been sent or not.
  115. */
  116. this._hasSentData = false;
  117. /**
  118. * Used only for detection of audio problems. We want to check only once
  119. * whether the track is sending data ot not. This flag is set to false
  120. * after the check.
  121. */
  122. this._testDataSent = true;
  123. // Currently there is no way to determine with what device track was
  124. // created (until getConstraints() support), however we can associate
  125. // tracks with real devices obtained from enumerateDevices() call as
  126. // soon as it's called.
  127. // NOTE: this.deviceId corresponds to the device id specified in GUM constraints and this._realDeviceId seems to
  128. // correspond to the id of a matching device from the available device list.
  129. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  130. this._trackMutedTS = 0;
  131. this._onDeviceListWillChange = devices => {
  132. const oldRealDeviceId = this._realDeviceId;
  133. this._setRealDeviceIdFromDeviceList(devices);
  134. if (
  135. // Mark track as ended for those browsers that do not support
  136. // "readyState" property. We do not touch tracks created with
  137. // default device ID "".
  138. (typeof this.getTrack().readyState === 'undefined'
  139. && typeof this._realDeviceId !== 'undefined'
  140. && !devices.find(d => d.deviceId === this._realDeviceId))
  141. // If there was an associated realDeviceID and after the device change the realDeviceId is undefined
  142. // then the associated device has been disconnected and the _trackEnded flag needs to be set. In
  143. // addition on some Chrome versions the readyState property is set after the device change event is
  144. // triggered which causes issues in jitsi-meet with the selection of a new device because we don't
  145. // detect that the old one was removed.
  146. || (typeof oldRealDeviceId !== 'undefined' && typeof this._realDeviceId === 'undefined')
  147. ) {
  148. this._trackEnded = true;
  149. }
  150. };
  151. // Subscribe each created local audio track to
  152. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  153. // handling this event for remote tracks (which are handled in RTC.js),
  154. // because there might be local tracks not attached to a conference.
  155. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  156. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  157. RTCUtils.addListener(
  158. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  159. this._onAudioOutputDeviceChanged);
  160. }
  161. RTCUtils.addListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  162. this._initNoDataFromSourceHandlers();
  163. }
  164. /**
  165. * Returns if associated MediaStreamTrack is in the 'ended' state
  166. *
  167. * @returns {boolean}
  168. */
  169. isEnded() {
  170. if (this.isVideoTrack() && this.isMuted()) {
  171. // If a video track is muted the readyState will be ended, that's why we need to rely only on the
  172. // _trackEnded flag.
  173. return this._trackEnded;
  174. }
  175. return this.getTrack().readyState === 'ended' || this._trackEnded;
  176. }
  177. /**
  178. * Sets handlers to the MediaStreamTrack object that will detect camera
  179. * issues.
  180. */
  181. _initNoDataFromSourceHandlers() {
  182. if (!this._isNoDataFromSourceEventsEnabled()) {
  183. return;
  184. }
  185. this._setHandler('track_mute', () => {
  186. this._trackMutedTS = window.performance.now();
  187. this._fireNoDataFromSourceEvent();
  188. });
  189. this._setHandler('track_unmute', () => {
  190. this._fireNoDataFromSourceEvent();
  191. Statistics.sendAnalyticsAndLog(
  192. TRACK_UNMUTED,
  193. {
  194. 'media_type': this.getType(),
  195. 'track_type': 'local',
  196. value: window.performance.now() - this._trackMutedTS
  197. });
  198. });
  199. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  200. this._setHandler('track_ended', () => {
  201. if (!this.isReceivingData()) {
  202. this._fireNoDataFromSourceEvent();
  203. }
  204. });
  205. }
  206. }
  207. /**
  208. * Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  209. *
  210. * @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  211. */
  212. _isNoDataFromSourceEventsEnabled() {
  213. // Disable the events for screen sharing.
  214. return !this.isVideoTrack() || this.videoType !== VideoType.DESKTOP;
  215. }
  216. /**
  217. * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
  218. */
  219. _fireNoDataFromSourceEvent() {
  220. const value = !this.isReceivingData();
  221. this.emit(NO_DATA_FROM_SOURCE, value);
  222. // FIXME: Should we report all of those events
  223. Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType(), value));
  224. Statistics.sendLog(JSON.stringify({
  225. name: NO_DATA_FROM_SOURCE,
  226. log: value
  227. }));
  228. }
  229. /**
  230. * Sets real device ID by comparing track information with device
  231. * information. This is temporary solution until getConstraints() method
  232. * will be implemented in browsers.
  233. *
  234. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  235. * enumerateDevices() call
  236. */
  237. _setRealDeviceIdFromDeviceList(devices) {
  238. const track = this.getTrack();
  239. const kind = `${track.kind}input`;
  240. let device = devices.find(d => d.kind === kind && d.label === track.label);
  241. if (!device && this._realDeviceId === 'default') { // the default device has been changed.
  242. // If the default device was 'A' and the default device is changed to 'B' the label for the track will
  243. // remain 'Default - A' but the label for the device in the device list will be updated to 'A'. That's
  244. // why in order to match it we need to remove the 'Default - ' part.
  245. const label = (track.label || '').replace('Default - ', '');
  246. device = devices.find(d => d.kind === kind && d.label === label);
  247. }
  248. if (device) {
  249. this._realDeviceId = device.deviceId;
  250. } else {
  251. this._realDeviceId = undefined;
  252. }
  253. }
  254. /**
  255. * Sets the stream property of JitsiLocalTrack object and sets all stored
  256. * handlers to it.
  257. *
  258. * @param {MediaStream} stream the new stream.
  259. * @protected
  260. */
  261. _setStream(stream) {
  262. super._setStream(stream);
  263. if (stream) {
  264. // Store the MSID for video mute/unmute purposes.
  265. this.storedMSID = this.getMSID();
  266. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  267. } else {
  268. logger.debug(`Setting 'null' stream on ${this}`);
  269. }
  270. }
  271. /**
  272. * Asynchronously mutes this track.
  273. *
  274. * @returns {Promise}
  275. */
  276. mute() {
  277. return this._queueSetMuted(true);
  278. }
  279. /**
  280. * Asynchronously unmutes this track.
  281. *
  282. * @returns {Promise}
  283. */
  284. unmute() {
  285. return this._queueSetMuted(false);
  286. }
  287. /**
  288. * Initializes a new Promise to execute {@link #_setMuted}. May be called
  289. * multiple times in a row and the invocations of {@link #_setMuted} and,
  290. * consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
  291. * serialized fashion.
  292. *
  293. * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
  294. * @returns {Promise}
  295. */
  296. _queueSetMuted(muted) {
  297. const setMuted = this._setMuted.bind(this, muted);
  298. this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
  299. return this._prevSetMuted;
  300. }
  301. /**
  302. * Mutes / unmutes this track.
  303. *
  304. * @param {boolean} muted - If <tt>true</tt>, this track will be muted;
  305. * otherwise, this track will be unmuted.
  306. * @private
  307. * @returns {Promise}
  308. */
  309. _setMuted(muted) {
  310. if (this.isMuted() === muted) {
  311. return Promise.resolve();
  312. }
  313. if (this.disposed) {
  314. return Promise.reject(new JitsiTrackError(TRACK_IS_DISPOSED));
  315. }
  316. let promise = Promise.resolve();
  317. // A function that will print info about muted status transition
  318. const logMuteInfo = () => logger.info(`Mute ${this}: ${muted}`);
  319. if (this.isAudioTrack()
  320. || this.videoType === VideoType.DESKTOP
  321. || !browser.doesVideoMuteByStreamRemove()) {
  322. logMuteInfo();
  323. if (this.track) {
  324. this.track.enabled = !muted;
  325. }
  326. } else if (muted) {
  327. promise = new Promise((resolve, reject) => {
  328. logMuteInfo();
  329. this._removeStreamFromConferenceAsMute(
  330. () => {
  331. // FIXME: Maybe here we should set the SRC for the
  332. // containers to something
  333. // We don't want any events to be fired on this stream
  334. this._unregisterHandlers();
  335. this.stopStream();
  336. this._setStream(null);
  337. resolve();
  338. },
  339. reject);
  340. });
  341. } else {
  342. logMuteInfo();
  343. // This path is only for camera.
  344. const streamOptions = {
  345. cameraDeviceId: this.getDeviceId(),
  346. devices: [ MediaType.VIDEO ],
  347. facingMode: this.getCameraFacingMode()
  348. };
  349. if (browser.usesNewGumFlow()) {
  350. promise
  351. = RTCUtils.newObtainAudioAndVideoPermissions(Object.assign(
  352. {},
  353. streamOptions,
  354. { constraints: { video: this._constraints } }));
  355. } else {
  356. if (this.resolution) {
  357. streamOptions.resolution = this.resolution;
  358. }
  359. promise
  360. = RTCUtils.obtainAudioAndVideoPermissions(streamOptions);
  361. }
  362. promise.then(streamsInfo => {
  363. const mediaType = this.getType();
  364. const streamInfo
  365. = browser.usesNewGumFlow()
  366. ? streamsInfo.find(
  367. info => info.track.kind === mediaType)
  368. : streamsInfo.find(
  369. info => info.mediaType === mediaType);
  370. if (streamInfo) {
  371. this._setStream(streamInfo.stream);
  372. this.track = streamInfo.track;
  373. // This is not good when video type changes after
  374. // unmute, but let's not crash here
  375. if (this.videoType !== streamInfo.videoType) {
  376. logger.warn(
  377. `${this}: video type has changed after unmute!`,
  378. this.videoType, streamInfo.videoType);
  379. this.videoType = streamInfo.videoType;
  380. }
  381. } else {
  382. throw new JitsiTrackError(TRACK_NO_STREAM_FOUND);
  383. }
  384. this.containers.map(
  385. cont => RTCUtils.attachMediaStream(cont, this.stream));
  386. return this._addStreamToConferenceAsUnmute();
  387. });
  388. }
  389. return promise
  390. .then(() => this._sendMuteStatus(muted))
  391. .then(() => this.emit(TRACK_MUTE_CHANGED, this));
  392. }
  393. /**
  394. * Adds stream to conference and marks it as "unmute" operation.
  395. *
  396. * @private
  397. * @returns {Promise}
  398. */
  399. _addStreamToConferenceAsUnmute() {
  400. if (!this.conference) {
  401. return Promise.resolve();
  402. }
  403. // FIXME it would be good to not included conference as part of this
  404. // process. Only TraceablePeerConnections to which the track is attached
  405. // should care about this action. The TPCs to which the track is not
  406. // attached can sync up when track is re-attached.
  407. // A problem with that is that the "modify sources" queue is part of
  408. // the JingleSessionPC and it would be excluded from the process. One
  409. // solution would be to extract class between TPC and JingleSessionPC
  410. // which would contain the queue and would notify the signaling layer
  411. // when local SSRCs are changed. This would help to separate XMPP from
  412. // the RTC module.
  413. return new Promise((resolve, reject) => {
  414. this.conference._addLocalTrackAsUnmute(this)
  415. .then(resolve, error => reject(new Error(error)));
  416. });
  417. }
  418. /**
  419. * Removes stream from conference and marks it as "mute" operation.
  420. *
  421. * @param {Function} successCallback will be called on success
  422. * @param {Function} errorCallback will be called on error
  423. * @private
  424. */
  425. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  426. if (!this.conference) {
  427. successCallback();
  428. return;
  429. }
  430. this.conference._removeLocalTrackAsMute(this).then(
  431. successCallback,
  432. error => errorCallback(new Error(error)));
  433. }
  434. /**
  435. * Sends mute status for a track to conference if any.
  436. *
  437. * @param {boolean} mute - If track is muted.
  438. * @private
  439. * @returns {Promise}
  440. */
  441. _sendMuteStatus(mute) {
  442. if (!this.conference || !this.conference.room) {
  443. return Promise.resolve();
  444. }
  445. return new Promise(resolve => {
  446. this.conference.room[
  447. this.isAudioTrack()
  448. ? 'setAudioMute'
  449. : 'setVideoMute'](mute, resolve);
  450. });
  451. }
  452. /**
  453. * @inheritdoc
  454. *
  455. * Stops sending the media track. And removes it from the HTML.
  456. * NOTE: Works for local tracks only.
  457. *
  458. * @extends JitsiTrack#dispose
  459. * @returns {Promise}
  460. */
  461. dispose() {
  462. let promise = Promise.resolve();
  463. if (this.conference) {
  464. promise = this.conference.removeTrack(this);
  465. }
  466. if (this.stream) {
  467. this.stopStream();
  468. this.detach();
  469. }
  470. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  471. if (this._onAudioOutputDeviceChanged) {
  472. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  473. this._onAudioOutputDeviceChanged);
  474. }
  475. return promise.then(() => super.dispose());
  476. }
  477. /**
  478. * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt>
  479. * otherwise.
  480. *
  481. * @returns {boolean} <tt>true</tt> - if the stream is muted and
  482. * <tt>false</tt> otherwise.
  483. */
  484. isMuted() {
  485. // this.stream will be null when we mute local video on Chrome
  486. if (!this.stream) {
  487. return true;
  488. }
  489. if (this.isVideoTrack() && !this.isActive()) {
  490. return true;
  491. }
  492. return !this.track || !this.track.enabled;
  493. }
  494. /**
  495. * Sets the JitsiConference object associated with the track. This is temp
  496. * solution.
  497. *
  498. * @param conference the JitsiConference object
  499. */
  500. _setConference(conference) {
  501. this.conference = conference;
  502. // We want to keep up with postponed events which should have been fired
  503. // on "attach" call, but for local track we not always have the
  504. // conference before attaching. However this may result in duplicated
  505. // events if they have been triggered on "attach" already.
  506. for (let i = 0; i < this.containers.length; i++) {
  507. this._maybeFireTrackAttached(this.containers[i]);
  508. }
  509. }
  510. /**
  511. * Returns <tt>true</tt>.
  512. *
  513. * @returns {boolean} <tt>true</tt>
  514. */
  515. isLocal() {
  516. return true;
  517. }
  518. /**
  519. * Returns device id associated with track.
  520. *
  521. * @returns {string}
  522. */
  523. getDeviceId() {
  524. return this._realDeviceId || this.deviceId;
  525. }
  526. /**
  527. * Returns the participant id which owns the track.
  528. *
  529. * @returns {string} the id of the participants. It corresponds to the
  530. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  531. */
  532. getParticipantId() {
  533. return this.conference && this.conference.myUserId();
  534. }
  535. /**
  536. * Handles bytes sent statistics.
  537. *
  538. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  539. * @param {number} bytesSent the new value
  540. * NOTE: used only for audio tracks to detect audio issues.
  541. */
  542. _onByteSentStatsReceived(tpc, bytesSent) {
  543. if (bytesSent > 0) {
  544. this._hasSentData = true;
  545. }
  546. const iceConnectionState = tpc.getConnectionState();
  547. if (this._testDataSent && iceConnectionState === 'connected') {
  548. setTimeout(() => {
  549. if (!this._hasSentData) {
  550. logger.warn(`${this} 'bytes sent' <= 0: \
  551. ${bytesSent}`);
  552. Statistics.analytics.sendEvent(NO_BYTES_SENT, { 'media_type': this.getType() });
  553. }
  554. }, 3000);
  555. this._testDataSent = false;
  556. }
  557. }
  558. /**
  559. * Returns facing mode for video track from camera. For other cases (e.g.
  560. * audio track or 'desktop' video track) returns undefined.
  561. *
  562. * @returns {CameraFacingMode|undefined}
  563. */
  564. getCameraFacingMode() {
  565. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  566. // MediaStreamTrack#getSettings() is not implemented in many
  567. // browsers, so we need feature checking here. Progress on the
  568. // respective browser's implementation can be tracked at
  569. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  570. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  571. // for Firefox. Even if a browser implements getSettings() already,
  572. // it might still not return anything for 'facingMode'.
  573. let trackSettings;
  574. try {
  575. trackSettings = this.track.getSettings();
  576. } catch (e) {
  577. // XXX React-native-webrtc, for example, defines
  578. // MediaStreamTrack#getSettings() but the implementation throws
  579. // a "Not implemented" Error.
  580. }
  581. if (trackSettings && 'facingMode' in trackSettings) {
  582. return trackSettings.facingMode;
  583. }
  584. if (typeof this._facingMode !== 'undefined') {
  585. return this._facingMode;
  586. }
  587. // In most cases we are showing a webcam. So if we've gotten here,
  588. // it should be relatively safe to assume that we are probably
  589. // showing the user-facing camera.
  590. return CameraFacingMode.USER;
  591. }
  592. return undefined;
  593. }
  594. /**
  595. * Stops the associated MediaStream.
  596. */
  597. stopStream() {
  598. /**
  599. * Indicates that we are executing {@link #stopStream} i.e.
  600. * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
  601. * associated with this <tt>JitsiTrack</tt> instance.
  602. *
  603. * @private
  604. * @type {boolean}
  605. */
  606. this._stopStreamInProgress = true;
  607. try {
  608. RTCUtils.stopMediaStream(this.stream);
  609. } finally {
  610. this._stopStreamInProgress = false;
  611. }
  612. }
  613. /**
  614. * Switches the camera facing mode if the WebRTC implementation supports the
  615. * custom MediaStreamTrack._switchCamera method. Currently, the method in
  616. * question is implemented in react-native-webrtc only. When such a WebRTC
  617. * implementation is executing, the method is the preferred way to switch
  618. * between the front/user-facing and the back/environment-facing cameras
  619. * because it will likely be (as is the case of react-native-webrtc)
  620. * noticeably faster that creating a new MediaStreamTrack via a new
  621. * getUserMedia call with the switched facingMode constraint value.
  622. * Moreover, the approach with a new getUserMedia call may not even work:
  623. * WebRTC on Android and iOS is either very slow to open the camera a second
  624. * time or plainly freezes attempting to do that.
  625. */
  626. _switchCamera() {
  627. if (this.isVideoTrack()
  628. && this.videoType === VideoType.CAMERA
  629. && typeof this.track._switchCamera === 'function') {
  630. this.track._switchCamera();
  631. this._facingMode
  632. = this._facingMode === CameraFacingMode.ENVIRONMENT
  633. ? CameraFacingMode.USER
  634. : CameraFacingMode.ENVIRONMENT;
  635. }
  636. }
  637. /**
  638. * Checks whether the attached MediaStream is receiving data from source or
  639. * not. If the stream property is null(because of mute or another reason)
  640. * this method will return false.
  641. * NOTE: This method doesn't indicate problem with the streams directly.
  642. * For example in case of video mute the method will return false or if the
  643. * user has disposed the track.
  644. *
  645. * @returns {boolean} true if the stream is receiving data and false
  646. * this otherwise.
  647. */
  648. isReceivingData() {
  649. if (this.isVideoTrack()
  650. && (this.isMuted() || this._stopStreamInProgress || this.videoType === VideoType.DESKTOP)) {
  651. return true;
  652. }
  653. if (!this.stream) {
  654. return false;
  655. }
  656. // In older version of the spec there is no muted property and
  657. // readyState can have value muted. In the latest versions
  658. // readyState can have values "live" and "ended" and there is
  659. // muted boolean property. If the stream is muted that means that
  660. // we aren't receiving any data from the source. We want to notify
  661. // the users for error if the stream is muted or ended on it's
  662. // creation.
  663. return this.stream.getTracks().some(track =>
  664. (!('readyState' in track) || track.readyState === 'live')
  665. && (!('muted' in track) || track.muted !== true));
  666. }
  667. /**
  668. * Creates a text representation of this local track instance.
  669. *
  670. * @return {string}
  671. */
  672. toString() {
  673. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  674. }
  675. }