您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

JitsiLocalTrack.js 31KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891
  1. /* global __filename, Promise */
  2. import { getLogger } from 'jitsi-meet-logger';
  3. import JitsiTrack from './JitsiTrack';
  4. import JitsiTrackError from '../../JitsiTrackError';
  5. import {
  6. TRACK_IS_DISPOSED,
  7. TRACK_NO_STREAM_FOUND
  8. } from '../../JitsiTrackErrors';
  9. import {
  10. LOCAL_TRACK_STOPPED,
  11. NO_DATA_FROM_SOURCE,
  12. TRACK_MUTE_CHANGED
  13. } from '../../JitsiTrackEvents';
  14. import browser from '../browser';
  15. import RTCUtils from './RTCUtils';
  16. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  17. import * as MediaType from '../../service/RTC/MediaType';
  18. import RTCEvents from '../../service/RTC/RTCEvents';
  19. import VideoType from '../../service/RTC/VideoType';
  20. import {
  21. NO_BYTES_SENT,
  22. TRACK_UNMUTED,
  23. createNoDataFromSourceEvent
  24. } from '../../service/statistics/AnalyticsEvents';
  25. import Statistics from '../statistics/statistics';
  26. const logger = getLogger(__filename);
  27. /**
  28. * Represents a single media track(either audio or video).
  29. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  30. */
  31. export default class JitsiLocalTrack extends JitsiTrack {
  32. /**
  33. * Constructs new JitsiLocalTrack instance.
  34. *
  35. * @constructor
  36. * @param {Object} trackInfo
  37. * @param {number} trackInfo.rtcId the ID assigned by the RTC module
  38. * @param trackInfo.stream WebRTC MediaStream, parent of the track
  39. * @param trackInfo.track underlying WebRTC MediaStreamTrack for new
  40. * JitsiRemoteTrack
  41. * @param trackInfo.mediaType the MediaType of the JitsiRemoteTrack
  42. * @param trackInfo.videoType the VideoType of the JitsiRemoteTrack
  43. * @param trackInfo.effects the effects array contains the effect instance to use
  44. * @param trackInfo.resolution the video resolution if it's a video track
  45. * @param trackInfo.deviceId the ID of the local device for this track
  46. * @param trackInfo.facingMode the camera facing mode used in getUserMedia
  47. * call
  48. * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing
  49. * source. NOTE: defined for desktop sharing tracks only.
  50. */
  51. constructor({
  52. deviceId,
  53. facingMode,
  54. mediaType,
  55. resolution,
  56. rtcId,
  57. sourceId,
  58. sourceType,
  59. stream,
  60. track,
  61. videoType,
  62. effects = []
  63. }) {
  64. super(
  65. /* conference */ null,
  66. stream,
  67. track,
  68. /* streamInactiveHandler */ () => this.emit(LOCAL_TRACK_STOPPED),
  69. mediaType,
  70. videoType);
  71. this._setEffectInProgress = false;
  72. const effect = effects.find(e => e.isEnabled(this));
  73. if (effect) {
  74. this._startStreamEffect(effect);
  75. }
  76. /**
  77. * The ID assigned by the RTC module on instance creation.
  78. *
  79. * @type {number}
  80. */
  81. this.rtcId = rtcId;
  82. this.sourceId = sourceId;
  83. this.sourceType = sourceType;
  84. if (browser.usesNewGumFlow()) {
  85. // Get the resolution from the track itself because it cannot be
  86. // certain which resolution webrtc has fallen back to using.
  87. this.resolution = track.getSettings().height;
  88. // Cache the constraints of the track in case of any this track
  89. // model needs to call getUserMedia again, such as when unmuting.
  90. this._constraints = track.getConstraints();
  91. } else {
  92. // FIXME Currently, Firefox is ignoring our constraints about
  93. // resolutions so we do not store it, to avoid wrong reporting of
  94. // local track resolution.
  95. this.resolution = browser.isFirefox() ? null : resolution;
  96. }
  97. this.deviceId = deviceId;
  98. /**
  99. * The <tt>Promise</tt> which represents the progress of a previously
  100. * queued/scheduled {@link _setMuted} (from the point of view of
  101. * {@link _queueSetMuted}).
  102. *
  103. * @private
  104. * @type {Promise}
  105. */
  106. this._prevSetMuted = Promise.resolve();
  107. /**
  108. * The facing mode of the camera from which this JitsiLocalTrack
  109. * instance was obtained.
  110. *
  111. * @private
  112. * @type {CameraFacingMode|undefined}
  113. */
  114. this._facingMode = facingMode;
  115. // Currently there is no way to know the MediaStreamTrack ended due to
  116. // to device disconnect in Firefox through e.g. "readyState" property.
  117. // Instead we will compare current track's label with device labels from
  118. // enumerateDevices() list.
  119. this._trackEnded = false;
  120. /**
  121. * Indicates whether data has been sent or not.
  122. */
  123. this._hasSentData = false;
  124. /**
  125. * Used only for detection of audio problems. We want to check only once
  126. * whether the track is sending data ot not. This flag is set to false
  127. * after the check.
  128. */
  129. this._testDataSent = true;
  130. // Currently there is no way to determine with what device track was
  131. // created (until getConstraints() support), however we can associate
  132. // tracks with real devices obtained from enumerateDevices() call as
  133. // soon as it's called.
  134. // NOTE: this.deviceId corresponds to the device id specified in GUM constraints and this._realDeviceId seems to
  135. // correspond to the id of a matching device from the available device list.
  136. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  137. this._trackMutedTS = 0;
  138. this._onDeviceListWillChange = devices => {
  139. const oldRealDeviceId = this._realDeviceId;
  140. this._setRealDeviceIdFromDeviceList(devices);
  141. if (
  142. // Mark track as ended for those browsers that do not support
  143. // "readyState" property. We do not touch tracks created with
  144. // default device ID "".
  145. (typeof this.getTrack().readyState === 'undefined'
  146. && typeof this._realDeviceId !== 'undefined'
  147. && !devices.find(d => d.deviceId === this._realDeviceId))
  148. // If there was an associated realDeviceID and after the device change the realDeviceId is undefined
  149. // then the associated device has been disconnected and the _trackEnded flag needs to be set. In
  150. // addition on some Chrome versions the readyState property is set after the device change event is
  151. // triggered which causes issues in jitsi-meet with the selection of a new device because we don't
  152. // detect that the old one was removed.
  153. || (typeof oldRealDeviceId !== 'undefined' && typeof this._realDeviceId === 'undefined')
  154. ) {
  155. this._trackEnded = true;
  156. }
  157. };
  158. // Subscribe each created local audio track to
  159. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  160. // handling this event for remote tracks (which are handled in RTC.js),
  161. // because there might be local tracks not attached to a conference.
  162. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  163. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  164. RTCUtils.addListener(
  165. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  166. this._onAudioOutputDeviceChanged);
  167. }
  168. RTCUtils.addListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  169. this._initNoDataFromSourceHandlers();
  170. }
  171. /**
  172. * Returns if associated MediaStreamTrack is in the 'ended' state
  173. *
  174. * @returns {boolean}
  175. */
  176. isEnded() {
  177. if (this.isVideoTrack() && this.isMuted()) {
  178. // If a video track is muted the readyState will be ended, that's why we need to rely only on the
  179. // _trackEnded flag.
  180. return this._trackEnded;
  181. }
  182. return this.getTrack().readyState === 'ended' || this._trackEnded;
  183. }
  184. /**
  185. * Sets handlers to the MediaStreamTrack object that will detect camera
  186. * issues.
  187. */
  188. _initNoDataFromSourceHandlers() {
  189. if (!this._isNoDataFromSourceEventsEnabled()) {
  190. return;
  191. }
  192. this._setHandler('track_mute', () => {
  193. this._trackMutedTS = window.performance.now();
  194. this._fireNoDataFromSourceEvent();
  195. });
  196. this._setHandler('track_unmute', () => {
  197. this._fireNoDataFromSourceEvent();
  198. Statistics.sendAnalyticsAndLog(
  199. TRACK_UNMUTED,
  200. {
  201. 'media_type': this.getType(),
  202. 'track_type': 'local',
  203. value: window.performance.now() - this._trackMutedTS
  204. });
  205. });
  206. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  207. this._setHandler('track_ended', () => {
  208. if (!this.isReceivingData()) {
  209. this._fireNoDataFromSourceEvent();
  210. }
  211. });
  212. }
  213. }
  214. /**
  215. * Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  216. *
  217. * @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  218. */
  219. _isNoDataFromSourceEventsEnabled() {
  220. // Disable the events for screen sharing.
  221. return !this.isVideoTrack() || this.videoType !== VideoType.DESKTOP;
  222. }
  223. /**
  224. * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
  225. */
  226. _fireNoDataFromSourceEvent() {
  227. const value = !this.isReceivingData();
  228. this.emit(NO_DATA_FROM_SOURCE, value);
  229. // FIXME: Should we report all of those events
  230. Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType(), value));
  231. Statistics.sendLog(JSON.stringify({
  232. name: NO_DATA_FROM_SOURCE,
  233. log: value
  234. }));
  235. }
  236. /**
  237. * Sets real device ID by comparing track information with device
  238. * information. This is temporary solution until getConstraints() method
  239. * will be implemented in browsers.
  240. *
  241. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  242. * enumerateDevices() call
  243. */
  244. _setRealDeviceIdFromDeviceList(devices) {
  245. const track = this.getTrack();
  246. const kind = `${track.kind}input`;
  247. let device = devices.find(d => d.kind === kind && d.label === track.label);
  248. if (!device && this._realDeviceId === 'default') { // the default device has been changed.
  249. // If the default device was 'A' and the default device is changed to 'B' the label for the track will
  250. // remain 'Default - A' but the label for the device in the device list will be updated to 'A'. That's
  251. // why in order to match it we need to remove the 'Default - ' part.
  252. const label = (track.label || '').replace('Default - ', '');
  253. device = devices.find(d => d.kind === kind && d.label === label);
  254. }
  255. if (device) {
  256. this._realDeviceId = device.deviceId;
  257. } else {
  258. this._realDeviceId = undefined;
  259. }
  260. }
  261. /**
  262. * Sets the stream property of JitsiLocalTrack object and sets all stored
  263. * handlers to it.
  264. *
  265. * @param {MediaStream} stream the new stream.
  266. * @protected
  267. */
  268. _setStream(stream) {
  269. super._setStream(stream);
  270. if (stream) {
  271. // Store the MSID for video mute/unmute purposes.
  272. this.storedMSID = this.getMSID();
  273. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  274. } else {
  275. logger.debug(`Setting 'null' stream on ${this}`);
  276. }
  277. }
  278. /**
  279. * Starts the effect process and returns the modified stream.
  280. *
  281. * @private
  282. * @param {*} effect - Represents effect instance
  283. * @returns {void}
  284. */
  285. _startStreamEffect(effect) {
  286. this._streamEffect = effect;
  287. this._originalStream = this.stream;
  288. this._setStream(this._streamEffect.startEffect(this._originalStream));
  289. }
  290. /**
  291. * Stops the effect process and returns the original stream.
  292. *
  293. * @private
  294. * @returns {void}
  295. */
  296. _stopStreamEffect() {
  297. if (this._streamEffect) {
  298. this._streamEffect.stopEffect();
  299. this._setStream(this._originalStream);
  300. this._originalStream = undefined;
  301. }
  302. }
  303. /**
  304. * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
  305. *
  306. * @param {Object|undefined} effect - The new effect to be set.
  307. */
  308. _switchStreamEffect(effect) {
  309. if (this._streamEffect) {
  310. this._stopStreamEffect();
  311. this._streamEffect = undefined;
  312. }
  313. if (effect) {
  314. this._startStreamEffect(effect);
  315. }
  316. }
  317. /**
  318. * Sets the effect and switches between the modified stream and original one.
  319. *
  320. * @param {Object} effect - Represents the effect instance to be used.
  321. * @returns {Promise}
  322. */
  323. setEffect(effect) {
  324. if (typeof this._streamEffect === 'undefined' && typeof effect === 'undefined') {
  325. return Promise.resolve();
  326. }
  327. if (typeof effect !== 'undefined' && !effect.isEnabled(this)) {
  328. return Promise.reject(new Error('Incompatible effect instance!'));
  329. }
  330. if (this._setEffectInProgress === true) {
  331. return Promise.reject(new Error('setEffect already in progress!'));
  332. }
  333. if (this.isMuted()) {
  334. this._streamEffect = effect;
  335. return Promise.resolve();
  336. }
  337. const conference = this.conference;
  338. if (!conference) {
  339. this._switchStreamEffect(effect);
  340. return Promise.resolve();
  341. }
  342. this._setEffectInProgress = true;
  343. // TODO: Create new JingleSessionPC method for replacing a stream in JitsiLocalTrack without offer answer.
  344. return conference.removeTrack(this)
  345. .then(() => {
  346. this._switchStreamEffect(effect);
  347. if (this.isVideoTrack()) {
  348. this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
  349. }
  350. return conference.addTrack(this);
  351. })
  352. .then(() => {
  353. this._setEffectInProgress = false;
  354. })
  355. .catch(error => {
  356. // Any error will be not recovarable and will trigger CONFERENCE_FAILED event. But let's try to cleanup
  357. // everyhting related to the effect functionality.
  358. this._setEffectInProgress = false;
  359. this._switchStreamEffect();
  360. logger.error('Failed to switch to the new stream!', error);
  361. throw error;
  362. });
  363. }
  364. /**
  365. * Asynchronously mutes this track.
  366. *
  367. * @returns {Promise}
  368. */
  369. mute() {
  370. return this._queueSetMuted(true);
  371. }
  372. /**
  373. * Asynchronously unmutes this track.
  374. *
  375. * @returns {Promise}
  376. */
  377. unmute() {
  378. return this._queueSetMuted(false);
  379. }
  380. /**
  381. * Initializes a new Promise to execute {@link #_setMuted}. May be called
  382. * multiple times in a row and the invocations of {@link #_setMuted} and,
  383. * consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
  384. * serialized fashion.
  385. *
  386. * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
  387. * @returns {Promise}
  388. */
  389. _queueSetMuted(muted) {
  390. const setMuted = this._setMuted.bind(this, muted);
  391. this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
  392. return this._prevSetMuted;
  393. }
  394. /**
  395. * Mutes / unmutes this track.
  396. *
  397. * @param {boolean} muted - If <tt>true</tt>, this track will be muted;
  398. * otherwise, this track will be unmuted.
  399. * @private
  400. * @returns {Promise}
  401. */
  402. _setMuted(muted) {
  403. if (this.isMuted() === muted) {
  404. return Promise.resolve();
  405. }
  406. if (this.disposed) {
  407. return Promise.reject(new JitsiTrackError(TRACK_IS_DISPOSED));
  408. }
  409. let promise = Promise.resolve();
  410. // A function that will print info about muted status transition
  411. const logMuteInfo = () => logger.info(`Mute ${this}: ${muted}`);
  412. if (this.isAudioTrack()
  413. || this.videoType === VideoType.DESKTOP
  414. || !browser.doesVideoMuteByStreamRemove()) {
  415. logMuteInfo();
  416. if (this.track) {
  417. this.track.enabled = !muted;
  418. }
  419. } else if (muted) {
  420. promise = new Promise((resolve, reject) => {
  421. logMuteInfo();
  422. this._removeStreamFromConferenceAsMute(
  423. () => {
  424. if (this._streamEffect) {
  425. this._stopStreamEffect();
  426. }
  427. // FIXME: Maybe here we should set the SRC for the
  428. // containers to something
  429. // We don't want any events to be fired on this stream
  430. this._unregisterHandlers();
  431. this.stopStream();
  432. this._setStream(null);
  433. resolve();
  434. },
  435. reject);
  436. });
  437. } else {
  438. logMuteInfo();
  439. // This path is only for camera.
  440. const streamOptions = {
  441. cameraDeviceId: this.getDeviceId(),
  442. devices: [ MediaType.VIDEO ],
  443. effects: this._streamEffect ? [ this._streamEffect ] : [],
  444. facingMode: this.getCameraFacingMode()
  445. };
  446. if (browser.usesNewGumFlow()) {
  447. promise
  448. = RTCUtils.newObtainAudioAndVideoPermissions(Object.assign(
  449. {},
  450. streamOptions,
  451. { constraints: { video: this._constraints } }));
  452. } else {
  453. if (this.resolution) {
  454. streamOptions.resolution = this.resolution;
  455. }
  456. promise
  457. = RTCUtils.obtainAudioAndVideoPermissions(streamOptions);
  458. }
  459. promise.then(streamsInfo => {
  460. // The track kind for presenter track is video as well.
  461. const mediaType = this.getType() === MediaType.PRESENTER ? MediaType.VIDEO : this.getType();
  462. const streamInfo
  463. = browser.usesNewGumFlow()
  464. ? streamsInfo.find(
  465. info => info.track.kind === mediaType)
  466. : streamsInfo.find(
  467. info => info.mediaType === mediaType);
  468. if (streamInfo) {
  469. this._setStream(streamInfo.stream);
  470. this.track = streamInfo.track;
  471. // This is not good when video type changes after
  472. // unmute, but let's not crash here
  473. if (this.videoType !== streamInfo.videoType) {
  474. logger.warn(
  475. `${this}: video type has changed after unmute!`,
  476. this.videoType, streamInfo.videoType);
  477. this.videoType = streamInfo.videoType;
  478. }
  479. } else {
  480. throw new JitsiTrackError(TRACK_NO_STREAM_FOUND);
  481. }
  482. if (this._streamEffect) {
  483. this._startStreamEffect(this._streamEffect);
  484. }
  485. this.containers.map(
  486. cont => RTCUtils.attachMediaStream(cont, this.stream));
  487. return this._addStreamToConferenceAsUnmute();
  488. });
  489. }
  490. return promise
  491. .then(() => this._sendMuteStatus(muted))
  492. .then(() => this.emit(TRACK_MUTE_CHANGED, this));
  493. }
  494. /**
  495. * Adds stream to conference and marks it as "unmute" operation.
  496. *
  497. * @private
  498. * @returns {Promise}
  499. */
  500. _addStreamToConferenceAsUnmute() {
  501. if (!this.conference) {
  502. return Promise.resolve();
  503. }
  504. // FIXME it would be good to not included conference as part of this
  505. // process. Only TraceablePeerConnections to which the track is attached
  506. // should care about this action. The TPCs to which the track is not
  507. // attached can sync up when track is re-attached.
  508. // A problem with that is that the "modify sources" queue is part of
  509. // the JingleSessionPC and it would be excluded from the process. One
  510. // solution would be to extract class between TPC and JingleSessionPC
  511. // which would contain the queue and would notify the signaling layer
  512. // when local SSRCs are changed. This would help to separate XMPP from
  513. // the RTC module.
  514. return new Promise((resolve, reject) => {
  515. this.conference._addLocalTrackAsUnmute(this)
  516. .then(resolve, error => reject(new Error(error)));
  517. });
  518. }
  519. /**
  520. * Removes stream from conference and marks it as "mute" operation.
  521. *
  522. * @param {Function} successCallback will be called on success
  523. * @param {Function} errorCallback will be called on error
  524. * @private
  525. */
  526. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  527. if (!this.conference) {
  528. successCallback();
  529. return;
  530. }
  531. this.conference._removeLocalTrackAsMute(this).then(
  532. successCallback,
  533. error => errorCallback(new Error(error)));
  534. }
  535. /**
  536. * Sends mute status for a track to conference if any.
  537. *
  538. * @param {boolean} mute - If track is muted.
  539. * @private
  540. * @returns {Promise}
  541. */
  542. _sendMuteStatus(mute) {
  543. if (!this.conference || !this.conference.room) {
  544. return Promise.resolve();
  545. }
  546. return new Promise(resolve => {
  547. this.conference.room[
  548. this.isAudioTrack()
  549. ? 'setAudioMute'
  550. : 'setVideoMute'](mute, resolve);
  551. });
  552. }
  553. /**
  554. * @inheritdoc
  555. *
  556. * Stops sending the media track. And removes it from the HTML.
  557. * NOTE: Works for local tracks only.
  558. *
  559. * @extends JitsiTrack#dispose
  560. * @returns {Promise}
  561. */
  562. dispose() {
  563. this._switchStreamEffect();
  564. let promise = Promise.resolve();
  565. if (this.conference) {
  566. promise = this.conference.removeTrack(this);
  567. }
  568. if (this.stream) {
  569. this.stopStream();
  570. this.detach();
  571. }
  572. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  573. if (this._onAudioOutputDeviceChanged) {
  574. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  575. this._onAudioOutputDeviceChanged);
  576. }
  577. return promise.then(() => super.dispose());
  578. }
  579. /**
  580. * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt>
  581. * otherwise.
  582. *
  583. * @returns {boolean} <tt>true</tt> - if the stream is muted and
  584. * <tt>false</tt> otherwise.
  585. */
  586. isMuted() {
  587. // this.stream will be null when we mute local video on Chrome
  588. if (!this.stream) {
  589. return true;
  590. }
  591. if (this.isVideoTrack() && !this.isActive()) {
  592. return true;
  593. }
  594. return !this.track || !this.track.enabled;
  595. }
  596. /**
  597. * Sets the JitsiConference object associated with the track. This is temp
  598. * solution.
  599. *
  600. * @param conference the JitsiConference object
  601. */
  602. _setConference(conference) {
  603. this.conference = conference;
  604. // We want to keep up with postponed events which should have been fired
  605. // on "attach" call, but for local track we not always have the
  606. // conference before attaching. However this may result in duplicated
  607. // events if they have been triggered on "attach" already.
  608. for (let i = 0; i < this.containers.length; i++) {
  609. this._maybeFireTrackAttached(this.containers[i]);
  610. }
  611. }
  612. /**
  613. * Returns <tt>true</tt>.
  614. *
  615. * @returns {boolean} <tt>true</tt>
  616. */
  617. isLocal() {
  618. return true;
  619. }
  620. /**
  621. * Returns device id associated with track.
  622. *
  623. * @returns {string}
  624. */
  625. getDeviceId() {
  626. return this._realDeviceId || this.deviceId;
  627. }
  628. /**
  629. * Returns the participant id which owns the track.
  630. *
  631. * @returns {string} the id of the participants. It corresponds to the
  632. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  633. */
  634. getParticipantId() {
  635. return this.conference && this.conference.myUserId();
  636. }
  637. /**
  638. * Handles bytes sent statistics.
  639. *
  640. * @param {TraceablePeerConnection} tpc the source of the "bytes sent" stat
  641. * @param {number} bytesSent the new value
  642. * NOTE: used only for audio tracks to detect audio issues.
  643. */
  644. _onByteSentStatsReceived(tpc, bytesSent) {
  645. if (bytesSent > 0) {
  646. this._hasSentData = true;
  647. }
  648. const iceConnectionState = tpc.getConnectionState();
  649. if (this._testDataSent && iceConnectionState === 'connected') {
  650. setTimeout(() => {
  651. if (!this._hasSentData) {
  652. logger.warn(`${this} 'bytes sent' <= 0: \
  653. ${bytesSent}`);
  654. Statistics.analytics.sendEvent(NO_BYTES_SENT, { 'media_type': this.getType() });
  655. }
  656. }, 3000);
  657. this._testDataSent = false;
  658. }
  659. }
  660. /**
  661. * Returns facing mode for video track from camera. For other cases (e.g.
  662. * audio track or 'desktop' video track) returns undefined.
  663. *
  664. * @returns {CameraFacingMode|undefined}
  665. */
  666. getCameraFacingMode() {
  667. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  668. // MediaStreamTrack#getSettings() is not implemented in many
  669. // browsers, so we need feature checking here. Progress on the
  670. // respective browser's implementation can be tracked at
  671. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  672. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  673. // for Firefox. Even if a browser implements getSettings() already,
  674. // it might still not return anything for 'facingMode'.
  675. let trackSettings;
  676. try {
  677. trackSettings = this.track.getSettings();
  678. } catch (e) {
  679. // XXX React-native-webrtc, for example, defines
  680. // MediaStreamTrack#getSettings() but the implementation throws
  681. // a "Not implemented" Error.
  682. }
  683. if (trackSettings && 'facingMode' in trackSettings) {
  684. return trackSettings.facingMode;
  685. }
  686. if (typeof this._facingMode !== 'undefined') {
  687. return this._facingMode;
  688. }
  689. // In most cases we are showing a webcam. So if we've gotten here,
  690. // it should be relatively safe to assume that we are probably
  691. // showing the user-facing camera.
  692. return CameraFacingMode.USER;
  693. }
  694. return undefined;
  695. }
  696. /**
  697. * Stops the associated MediaStream.
  698. */
  699. stopStream() {
  700. /**
  701. * Indicates that we are executing {@link #stopStream} i.e.
  702. * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
  703. * associated with this <tt>JitsiTrack</tt> instance.
  704. *
  705. * @private
  706. * @type {boolean}
  707. */
  708. this._stopStreamInProgress = true;
  709. try {
  710. RTCUtils.stopMediaStream(this.stream);
  711. } finally {
  712. this._stopStreamInProgress = false;
  713. }
  714. }
  715. /**
  716. * Switches the camera facing mode if the WebRTC implementation supports the
  717. * custom MediaStreamTrack._switchCamera method. Currently, the method in
  718. * question is implemented in react-native-webrtc only. When such a WebRTC
  719. * implementation is executing, the method is the preferred way to switch
  720. * between the front/user-facing and the back/environment-facing cameras
  721. * because it will likely be (as is the case of react-native-webrtc)
  722. * noticeably faster that creating a new MediaStreamTrack via a new
  723. * getUserMedia call with the switched facingMode constraint value.
  724. * Moreover, the approach with a new getUserMedia call may not even work:
  725. * WebRTC on Android and iOS is either very slow to open the camera a second
  726. * time or plainly freezes attempting to do that.
  727. */
  728. _switchCamera() {
  729. if (this.isVideoTrack()
  730. && this.videoType === VideoType.CAMERA
  731. && typeof this.track._switchCamera === 'function') {
  732. this.track._switchCamera();
  733. this._facingMode
  734. = this._facingMode === CameraFacingMode.ENVIRONMENT
  735. ? CameraFacingMode.USER
  736. : CameraFacingMode.ENVIRONMENT;
  737. }
  738. }
  739. /**
  740. * Checks whether the attached MediaStream is receiving data from source or
  741. * not. If the stream property is null(because of mute or another reason)
  742. * this method will return false.
  743. * NOTE: This method doesn't indicate problem with the streams directly.
  744. * For example in case of video mute the method will return false or if the
  745. * user has disposed the track.
  746. *
  747. * @returns {boolean} true if the stream is receiving data and false
  748. * this otherwise.
  749. */
  750. isReceivingData() {
  751. if (this.isVideoTrack()
  752. && (this.isMuted() || this._stopStreamInProgress || this.videoType === VideoType.DESKTOP)) {
  753. return true;
  754. }
  755. if (!this.stream) {
  756. return false;
  757. }
  758. // In older version of the spec there is no muted property and
  759. // readyState can have value muted. In the latest versions
  760. // readyState can have values "live" and "ended" and there is
  761. // muted boolean property. If the stream is muted that means that
  762. // we aren't receiving any data from the source. We want to notify
  763. // the users for error if the stream is muted or ended on it's
  764. // creation.
  765. // For video blur enabled use the original video stream
  766. const stream = this._effectEnabled ? this._originalStream : this.stream;
  767. return stream.getTracks().some(track =>
  768. (!('readyState' in track) || track.readyState === 'live')
  769. && (!('muted' in track) || track.muted !== true));
  770. }
  771. /**
  772. * Creates a text representation of this local track instance.
  773. *
  774. * @return {string}
  775. */
  776. toString() {
  777. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  778. }
  779. }