您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

JitsiLocalTrack.js 32KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904
  1. import { getLogger } from '@jitsi/logger';
  2. import JitsiTrackError from '../../JitsiTrackError';
  3. import {
  4. TRACK_IS_DISPOSED,
  5. TRACK_NO_STREAM_FOUND
  6. } from '../../JitsiTrackErrors';
  7. import {
  8. LOCAL_TRACK_STOPPED,
  9. NO_DATA_FROM_SOURCE,
  10. TRACK_MUTE_CHANGED
  11. } from '../../JitsiTrackEvents';
  12. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  13. import * as MediaType from '../../service/RTC/MediaType';
  14. import RTCEvents from '../../service/RTC/RTCEvents';
  15. import VideoType from '../../service/RTC/VideoType';
  16. import {
  17. NO_BYTES_SENT,
  18. TRACK_UNMUTED,
  19. createNoDataFromSourceEvent
  20. } from '../../service/statistics/AnalyticsEvents';
  21. import browser from '../browser';
  22. import Statistics from '../statistics/statistics';
  23. import JitsiTrack from './JitsiTrack';
  24. import RTCUtils from './RTCUtils';
  25. const logger = getLogger(__filename);
  26. /**
  27. * Represents a single media track(either audio or video).
  28. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  29. */
  30. export default class JitsiLocalTrack extends JitsiTrack {
  31. /**
  32. * Constructs a new JitsiLocalTrack instance.
  33. *
  34. * @constructor
  35. * @param {Object} trackInfo
  36. * @param {number} trackInfo.rtcId - The ID assigned by the RTC module.
  37. * @param {Object} trackInfo.stream - The WebRTC MediaStream, parent of the track.
  38. * @param {Object} trackInfo.track - The underlying WebRTC MediaStreamTrack for new JitsiLocalTrack.
  39. * @param {string} trackInfo.mediaType - The MediaType of the JitsiLocalTrack.
  40. * @param {string} trackInfo.videoType - The VideoType of the JitsiLocalTrack.
  41. * @param {Array<Object>} trackInfo.effects - The effects to be applied to the JitsiLocalTrack.
  42. * @param {number} trackInfo.resolution - The the video resolution if it's a video track
  43. * @param {string} trackInfo.deviceId - The ID of the local device for this track.
  44. * @param {string} trackInfo.facingMode - Thehe camera facing mode used in getUserMedia call (for mobile only).
  45. * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing source. NOTE: defined for desktop sharing
  46. * tracks only.
  47. */
  48. constructor({
  49. deviceId,
  50. facingMode,
  51. mediaType,
  52. resolution,
  53. rtcId,
  54. sourceId,
  55. sourceType,
  56. stream,
  57. track,
  58. videoType,
  59. effects = []
  60. }) {
  61. super(
  62. /* conference */ null,
  63. stream,
  64. track,
  65. /* streamInactiveHandler */ () => this.emit(LOCAL_TRACK_STOPPED),
  66. mediaType,
  67. videoType);
  68. this._setEffectInProgress = false;
  69. const effect = effects.find(e => e.isEnabled(this));
  70. if (effect) {
  71. this._startStreamEffect(effect);
  72. }
  73. const displaySurface = videoType === VideoType.DESKTOP
  74. ? track.getSettings().displaySurface
  75. : null;
  76. /**
  77. * Track metadata.
  78. */
  79. this.metadata = {
  80. timestamp: Date.now(),
  81. ...displaySurface ? { displaySurface } : {}
  82. };
  83. /**
  84. * The ID assigned by the RTC module on instance creation.
  85. *
  86. * @type {number}
  87. */
  88. this.rtcId = rtcId;
  89. this.sourceId = sourceId;
  90. this.sourceType = sourceType;
  91. // Get the resolution from the track itself because it cannot be
  92. // certain which resolution webrtc has fallen back to using.
  93. this.resolution = track.getSettings().height;
  94. this.maxEnabledResolution = resolution;
  95. // Cache the constraints of the track in case of any this track
  96. // model needs to call getUserMedia again, such as when unmuting.
  97. this._constraints = track.getConstraints();
  98. // Safari returns an empty constraints object, construct the constraints using getSettings.
  99. if (!Object.keys(this._constraints).length && videoType === VideoType.CAMERA) {
  100. this._constraints = {
  101. height: track.getSettings().height,
  102. width: track.getSettings().width
  103. };
  104. }
  105. this.deviceId = deviceId;
  106. /**
  107. * The <tt>Promise</tt> which represents the progress of a previously
  108. * queued/scheduled {@link _setMuted} (from the point of view of
  109. * {@link _queueSetMuted}).
  110. *
  111. * @private
  112. * @type {Promise}
  113. */
  114. this._prevSetMuted = Promise.resolve();
  115. /**
  116. * The facing mode of the camera from which this JitsiLocalTrack
  117. * instance was obtained.
  118. *
  119. * @private
  120. * @type {CameraFacingMode|undefined}
  121. */
  122. this._facingMode = facingMode;
  123. // Currently there is no way to know the MediaStreamTrack ended due to
  124. // to device disconnect in Firefox through e.g. "readyState" property.
  125. // Instead we will compare current track's label with device labels from
  126. // enumerateDevices() list.
  127. this._trackEnded = false;
  128. /**
  129. * Indicates whether data has been sent or not.
  130. */
  131. this._hasSentData = false;
  132. /**
  133. * Used only for detection of audio problems. We want to check only once
  134. * whether the track is sending data ot not. This flag is set to false
  135. * after the check.
  136. */
  137. this._testDataSent = true;
  138. // Currently there is no way to determine with what device track was
  139. // created (until getConstraints() support), however we can associate
  140. // tracks with real devices obtained from enumerateDevices() call as
  141. // soon as it's called.
  142. // NOTE: this.deviceId corresponds to the device id specified in GUM constraints and this._realDeviceId seems to
  143. // correspond to the id of a matching device from the available device list.
  144. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  145. this._trackMutedTS = 0;
  146. this._onDeviceListWillChange = devices => {
  147. const oldRealDeviceId = this._realDeviceId;
  148. this._setRealDeviceIdFromDeviceList(devices);
  149. if (
  150. // Mark track as ended for those browsers that do not support
  151. // "readyState" property. We do not touch tracks created with
  152. // default device ID "".
  153. (typeof this.getTrack().readyState === 'undefined'
  154. && typeof this._realDeviceId !== 'undefined'
  155. && !devices.find(d => d.deviceId === this._realDeviceId))
  156. // If there was an associated realDeviceID and after the device change the realDeviceId is undefined
  157. // then the associated device has been disconnected and the _trackEnded flag needs to be set. In
  158. // addition on some Chrome versions the readyState property is set after the device change event is
  159. // triggered which causes issues in jitsi-meet with the selection of a new device because we don't
  160. // detect that the old one was removed.
  161. || (typeof oldRealDeviceId !== 'undefined' && typeof this._realDeviceId === 'undefined')
  162. ) {
  163. this._trackEnded = true;
  164. }
  165. };
  166. // Subscribe each created local audio track to
  167. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  168. // handling this event for remote tracks (which are handled in RTC.js),
  169. // because there might be local tracks not attached to a conference.
  170. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  171. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  172. RTCUtils.addListener(
  173. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  174. this._onAudioOutputDeviceChanged);
  175. }
  176. RTCUtils.addListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  177. this._initNoDataFromSourceHandlers();
  178. }
  179. /**
  180. * Adds stream to conference and marks it as "unmute" operation.
  181. *
  182. * @private
  183. * @returns {Promise}
  184. */
  185. _addStreamToConferenceAsUnmute() {
  186. if (!this.conference) {
  187. return Promise.resolve();
  188. }
  189. // FIXME it would be good to not included conference as part of this process. Only TraceablePeerConnections to
  190. // which the track is attached should care about this action. The TPCs to which the track is not attached can
  191. // sync up when track is re-attached. A problem with that is that the "modify sources" queue is part of the
  192. // JingleSessionPC and it would be excluded from the process. One solution would be to extract class between
  193. // TPC and JingleSessionPC which would contain the queue and would notify the signaling layer when local SSRCs
  194. // are changed. This would help to separate XMPP from the RTC module.
  195. return new Promise((resolve, reject) => {
  196. this.conference._addLocalTrackAsUnmute(this)
  197. .then(resolve, error => reject(new Error(error)));
  198. });
  199. }
  200. /**
  201. * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
  202. *
  203. * @private
  204. * @returns {void}
  205. */
  206. _fireNoDataFromSourceEvent() {
  207. const value = !this.isReceivingData();
  208. this.emit(NO_DATA_FROM_SOURCE, value);
  209. // FIXME: Should we report all of those events
  210. Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType(), value));
  211. Statistics.sendLog(JSON.stringify({
  212. name: NO_DATA_FROM_SOURCE,
  213. log: value
  214. }));
  215. }
  216. /**
  217. * Sets handlers to the MediaStreamTrack object that will detect camera issues.
  218. *
  219. * @private
  220. * @returns {void}
  221. */
  222. _initNoDataFromSourceHandlers() {
  223. if (!this._isNoDataFromSourceEventsEnabled()) {
  224. return;
  225. }
  226. this._setHandler('track_mute', () => {
  227. this._trackMutedTS = window.performance.now();
  228. this._fireNoDataFromSourceEvent();
  229. });
  230. this._setHandler('track_unmute', () => {
  231. this._fireNoDataFromSourceEvent();
  232. Statistics.sendAnalyticsAndLog(
  233. TRACK_UNMUTED,
  234. {
  235. 'media_type': this.getType(),
  236. 'track_type': 'local',
  237. value: window.performance.now() - this._trackMutedTS
  238. });
  239. });
  240. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  241. this._setHandler('track_ended', () => {
  242. if (!this.isReceivingData()) {
  243. this._fireNoDataFromSourceEvent();
  244. }
  245. });
  246. }
  247. }
  248. /**
  249. * Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  250. *
  251. * @private
  252. * @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  253. */
  254. _isNoDataFromSourceEventsEnabled() {
  255. // Disable the events for screen sharing.
  256. return !this.isVideoTrack() || this.videoType !== VideoType.DESKTOP;
  257. }
  258. /**
  259. * Initializes a new Promise to execute {@link #_setMuted}. May be called multiple times in a row and the
  260. * invocations of {@link #_setMuted} and, consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
  261. * serialized fashion.
  262. *
  263. * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
  264. * @private
  265. * @returns {Promise}
  266. */
  267. _queueSetMuted(muted) {
  268. const setMuted = this._setMuted.bind(this, muted);
  269. this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
  270. return this._prevSetMuted;
  271. }
  272. /**
  273. * Removes stream from conference and marks it as "mute" operation.
  274. *
  275. * @param {Function} successCallback - Callback that will be called when the operation is successful.
  276. * @param {Function} errorCallback - Callback that will be called when the operation fails.
  277. * @private
  278. * @returns {Promise}
  279. */
  280. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  281. if (!this.conference) {
  282. successCallback();
  283. return;
  284. }
  285. this.conference._removeLocalTrackAsMute(this).then(
  286. successCallback,
  287. error => errorCallback(new Error(error)));
  288. }
  289. /**
  290. * Sends mute status for a track to conference if any.
  291. *
  292. * @param {boolean} mute - If track is muted.
  293. * @private
  294. * @returns {void}
  295. */
  296. _sendMuteStatus(mute) {
  297. if (this.conference) {
  298. this.conference._setTrackMuteStatus(this.getType(), this, mute) && this.conference.room.sendPresence();
  299. }
  300. }
  301. /**
  302. * Mutes / unmutes this track.
  303. *
  304. * @param {boolean} muted - If <tt>true</tt>, this track will be muted; otherwise, this track will be unmuted.
  305. * @private
  306. * @returns {Promise}
  307. */
  308. _setMuted(muted) {
  309. if (this.isMuted() === muted) {
  310. return Promise.resolve();
  311. }
  312. if (this.disposed) {
  313. return Promise.reject(new JitsiTrackError(TRACK_IS_DISPOSED));
  314. }
  315. let promise = Promise.resolve();
  316. // A function that will print info about muted status transition
  317. const logMuteInfo = () => logger.info(`Mute ${this}: ${muted}`);
  318. if (this.isAudioTrack()
  319. || this.videoType === VideoType.DESKTOP
  320. || !browser.doesVideoMuteByStreamRemove()) {
  321. logMuteInfo();
  322. // If we have a stream effect that implements its own mute functionality, prioritize it before
  323. // normal mute e.g. the stream effect that implements system audio sharing has a custom
  324. // mute state in which if the user mutes, system audio still has to go through.
  325. if (this._streamEffect && this._streamEffect.setMuted) {
  326. this._streamEffect.setMuted(muted);
  327. } else if (this.track) {
  328. this.track.enabled = !muted;
  329. }
  330. } else if (muted) {
  331. promise = new Promise((resolve, reject) => {
  332. logMuteInfo();
  333. this._removeStreamFromConferenceAsMute(
  334. () => {
  335. if (this._streamEffect) {
  336. this._stopStreamEffect();
  337. }
  338. // FIXME: Maybe here we should set the SRC for the
  339. // containers to something
  340. // We don't want any events to be fired on this stream
  341. this._unregisterHandlers();
  342. this.stopStream();
  343. this._setStream(null);
  344. resolve();
  345. },
  346. reject);
  347. });
  348. } else {
  349. logMuteInfo();
  350. // This path is only for camera.
  351. const streamOptions = {
  352. cameraDeviceId: this.getDeviceId(),
  353. devices: [ MediaType.VIDEO ],
  354. effects: this._streamEffect ? [ this._streamEffect ] : [],
  355. facingMode: this.getCameraFacingMode()
  356. };
  357. promise
  358. = RTCUtils.obtainAudioAndVideoPermissions(Object.assign(
  359. {},
  360. streamOptions,
  361. { constraints: { video: this._constraints } }));
  362. promise = promise.then(streamsInfo => {
  363. // The track kind for presenter track is video as well.
  364. const mediaType = this.getType() === MediaType.PRESENTER ? MediaType.VIDEO : this.getType();
  365. const streamInfo = streamsInfo.find(info => info.track.kind === mediaType);
  366. if (streamInfo) {
  367. this._setStream(streamInfo.stream);
  368. this.track = streamInfo.track;
  369. // This is not good when video type changes after
  370. // unmute, but let's not crash here
  371. if (this.videoType !== streamInfo.videoType) {
  372. logger.warn(
  373. `${this}: video type has changed after unmute!`,
  374. this.videoType, streamInfo.videoType);
  375. this.videoType = streamInfo.videoType;
  376. }
  377. } else {
  378. throw new JitsiTrackError(TRACK_NO_STREAM_FOUND);
  379. }
  380. if (this._streamEffect) {
  381. this._startStreamEffect(this._streamEffect);
  382. }
  383. this.containers.map(
  384. cont => RTCUtils.attachMediaStream(cont, this.stream));
  385. return this._addStreamToConferenceAsUnmute();
  386. });
  387. }
  388. return promise
  389. .then(() => {
  390. this._sendMuteStatus(muted);
  391. this.emit(TRACK_MUTE_CHANGED, this);
  392. });
  393. }
  394. /**
  395. * Sets real device ID by comparing track information with device information. This is temporary solution until
  396. * getConstraints() method will be implemented in browsers.
  397. *
  398. * @param {MediaDeviceInfo[]} devices - The list of devices obtained from enumerateDevices() call.
  399. * @private
  400. * @returns {void}
  401. */
  402. _setRealDeviceIdFromDeviceList(devices) {
  403. const track = this.getTrack();
  404. const kind = `${track.kind}input`;
  405. let device = devices.find(d => d.kind === kind && d.label === track.label);
  406. if (!device && this._realDeviceId === 'default') { // the default device has been changed.
  407. // If the default device was 'A' and the default device is changed to 'B' the label for the track will
  408. // remain 'Default - A' but the label for the device in the device list will be updated to 'A'. That's
  409. // why in order to match it we need to remove the 'Default - ' part.
  410. const label = (track.label || '').replace('Default - ', '');
  411. device = devices.find(d => d.kind === kind && d.label === label);
  412. }
  413. if (device) {
  414. this._realDeviceId = device.deviceId;
  415. } else {
  416. this._realDeviceId = undefined;
  417. }
  418. }
  419. /**
  420. * Sets the stream property of JitsiLocalTrack object and sets all stored handlers to it.
  421. *
  422. * @param {MediaStream} stream - The new MediaStream.
  423. * @private
  424. * @returns {void}
  425. */
  426. _setStream(stream) {
  427. super._setStream(stream);
  428. if (stream) {
  429. // Store the MSID for video mute/unmute purposes.
  430. this.storedMSID = this.getMSID();
  431. logger.debug(`Setting new MSID: ${this.storedMSID} on ${this}`);
  432. } else {
  433. logger.debug(`Setting 'null' stream on ${this}`);
  434. }
  435. }
  436. /**
  437. * Starts the effect process and returns the modified stream.
  438. *
  439. * @param {Object} effect - Represents effect instance
  440. * @private
  441. * @returns {void}
  442. */
  443. _startStreamEffect(effect) {
  444. this._streamEffect = effect;
  445. this._originalStream = this.stream;
  446. this._setStream(this._streamEffect.startEffect(this._originalStream));
  447. this.track = this.stream.getTracks()[0];
  448. }
  449. /**
  450. * Stops the effect process and returns the original stream.
  451. *
  452. * @private
  453. * @returns {void}
  454. */
  455. _stopStreamEffect() {
  456. if (this._streamEffect) {
  457. this._streamEffect.stopEffect();
  458. this._setStream(this._originalStream);
  459. this._originalStream = null;
  460. this.track = this.stream ? this.stream.getTracks()[0] : null;
  461. }
  462. }
  463. /**
  464. * Switches the camera facing mode if the WebRTC implementation supports the custom MediaStreamTrack._switchCamera
  465. * method. Currently, the method in question is implemented in react-native-webrtc only. When such a WebRTC
  466. * implementation is executing, the method is the preferred way to switch between the front/user-facing and the
  467. * back/environment-facing cameras because it will likely be (as is the case of react-native-webrtc) noticeably
  468. * faster that creating a new MediaStreamTrack via a new getUserMedia call with the switched facingMode constraint
  469. * value. Moreover, the approach with a new getUserMedia call may not even work: WebRTC on Android and iOS is
  470. * either very slow to open the camera a second time or plainly freezes attempting to do that.
  471. *
  472. * @returns {void}
  473. */
  474. _switchCamera() {
  475. if (this.isVideoTrack()
  476. && this.videoType === VideoType.CAMERA
  477. && typeof this.track._switchCamera === 'function') {
  478. this.track._switchCamera();
  479. this._facingMode
  480. = this._facingMode === CameraFacingMode.ENVIRONMENT
  481. ? CameraFacingMode.USER
  482. : CameraFacingMode.ENVIRONMENT;
  483. }
  484. }
  485. /**
  486. * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
  487. *
  488. * @param {Object|undefined} effect - The new effect to be set.
  489. * @private
  490. * @returns {void}
  491. */
  492. _switchStreamEffect(effect) {
  493. if (this._streamEffect) {
  494. this._stopStreamEffect();
  495. this._streamEffect = undefined;
  496. }
  497. if (effect) {
  498. this._startStreamEffect(effect);
  499. }
  500. }
  501. /**
  502. * @inheritdoc
  503. *
  504. * Stops sending the media track. And removes it from the HTML. NOTE: Works for local tracks only.
  505. *
  506. * @extends JitsiTrack#dispose
  507. * @returns {Promise}
  508. */
  509. dispose() {
  510. let promise = Promise.resolve();
  511. // Remove the effect instead of stopping it so that the original stream is restored
  512. // on both the local track and on the peerconnection.
  513. if (this._streamEffect) {
  514. promise = this.setEffect();
  515. }
  516. if (this.conference) {
  517. promise = promise.then(() => this.conference.removeTrack(this));
  518. }
  519. if (this.stream) {
  520. this.stopStream();
  521. this.detach();
  522. }
  523. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  524. if (this._onAudioOutputDeviceChanged) {
  525. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  526. this._onAudioOutputDeviceChanged);
  527. }
  528. return promise.then(() => super.dispose());
  529. }
  530. /**
  531. * Returns facing mode for video track from camera. For other cases (e.g. audio track or 'desktop' video track)
  532. * returns undefined.
  533. *
  534. * @returns {CameraFacingMode|undefined}
  535. */
  536. getCameraFacingMode() {
  537. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  538. // MediaStreamTrack#getSettings() is not implemented in many
  539. // browsers, so we need feature checking here. Progress on the
  540. // respective browser's implementation can be tracked at
  541. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  542. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  543. // for Firefox. Even if a browser implements getSettings() already,
  544. // it might still not return anything for 'facingMode'.
  545. const trackSettings = this.track.getSettings?.();
  546. if (trackSettings && 'facingMode' in trackSettings) {
  547. return trackSettings.facingMode;
  548. }
  549. if (typeof this._facingMode !== 'undefined') {
  550. return this._facingMode;
  551. }
  552. // In most cases we are showing a webcam. So if we've gotten here,
  553. // it should be relatively safe to assume that we are probably
  554. // showing the user-facing camera.
  555. return CameraFacingMode.USER;
  556. }
  557. return undefined;
  558. }
  559. /**
  560. * Returns device id associated with track.
  561. *
  562. * @returns {string}
  563. */
  564. getDeviceId() {
  565. return this._realDeviceId || this.deviceId;
  566. }
  567. /**
  568. * Get the duration of the track.
  569. *
  570. * @returns {Number} the duration of the track in seconds
  571. */
  572. getDuration() {
  573. return (Date.now() / 1000) - (this.metadata.timestamp / 1000);
  574. }
  575. /**
  576. * Returns the participant id which owns the track.
  577. *
  578. * @returns {string} the id of the participants. It corresponds to the
  579. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  580. */
  581. getParticipantId() {
  582. return this.conference && this.conference.myUserId();
  583. }
  584. /**
  585. * Returns if associated MediaStreamTrack is in the 'ended' state
  586. *
  587. * @returns {boolean}
  588. */
  589. isEnded() {
  590. if (this.isVideoTrack() && this.isMuted()) {
  591. // If a video track is muted the readyState will be ended, that's why we need to rely only on the
  592. // _trackEnded flag.
  593. return this._trackEnded;
  594. }
  595. return this.getTrack().readyState === 'ended' || this._trackEnded;
  596. }
  597. /**
  598. * Returns <tt>true</tt>.
  599. *
  600. * @returns {boolean} <tt>true</tt>
  601. */
  602. isLocal() {
  603. return true;
  604. }
  605. /**
  606. * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
  607. *
  608. * @returns {boolean} <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
  609. */
  610. isMuted() {
  611. // this.stream will be null when we mute local video on Chrome
  612. if (!this.stream) {
  613. return true;
  614. }
  615. if (this.isVideoTrack() && !this.isActive()) {
  616. return true;
  617. }
  618. // If currently used stream effect has its own muted state, use that.
  619. if (this._streamEffect && this._streamEffect.isMuted) {
  620. return this._streamEffect.isMuted();
  621. }
  622. return !this.track || !this.track.enabled;
  623. }
  624. /**
  625. * Checks whether the attached MediaStream is receiving data from source or not. If the stream property is null
  626. * (because of mute or another reason) this method will return false.
  627. * NOTE: This method doesn't indicate problem with the streams directly. For example in case of video mute the
  628. * method will return false or if the user has disposed the track.
  629. *
  630. * @returns {boolean} true if the stream is receiving data and false this otherwise.
  631. */
  632. isReceivingData() {
  633. if (this.isVideoTrack()
  634. && (this.isMuted() || this._stopStreamInProgress || this.videoType === VideoType.DESKTOP)) {
  635. return true;
  636. }
  637. if (!this.stream) {
  638. return false;
  639. }
  640. // In older version of the spec there is no muted property and readyState can have value muted. In the latest
  641. // versions readyState can have values "live" and "ended" and there is muted boolean property. If the stream is
  642. // muted that means that we aren't receiving any data from the source. We want to notify the users for error if
  643. // the stream is muted or ended on it's creation.
  644. // For video blur enabled use the original video stream
  645. const stream = this._effectEnabled ? this._originalStream : this.stream;
  646. return stream.getTracks().some(track =>
  647. (!('readyState' in track) || track.readyState === 'live')
  648. && (!('muted' in track) || track.muted !== true));
  649. }
  650. /**
  651. * Asynchronously mutes this track.
  652. *
  653. * @returns {Promise}
  654. */
  655. mute() {
  656. return this._queueSetMuted(true);
  657. }
  658. /**
  659. * Handles bytes sent statistics. NOTE: used only for audio tracks to detect audio issues.
  660. *
  661. * @param {TraceablePeerConnection} tpc - The peerconnection that is reporting the bytes sent stat.
  662. * @param {number} bytesSent - The new value.
  663. * @returns {void}
  664. */
  665. onByteSentStatsReceived(tpc, bytesSent) {
  666. if (bytesSent > 0) {
  667. this._hasSentData = true;
  668. }
  669. const iceConnectionState = tpc.getConnectionState();
  670. if (this._testDataSent && iceConnectionState === 'connected') {
  671. setTimeout(() => {
  672. if (!this._hasSentData) {
  673. logger.warn(`${this} 'bytes sent' <= 0: \
  674. ${bytesSent}`);
  675. Statistics.analytics.sendEvent(NO_BYTES_SENT, { 'media_type': this.getType() });
  676. }
  677. }, 3000);
  678. this._testDataSent = false;
  679. }
  680. }
  681. /**
  682. * Sets the JitsiConference object associated with the track. This is temp solution.
  683. *
  684. * @param conference - JitsiConference object.
  685. * @returns {void}
  686. */
  687. setConference(conference) {
  688. this.conference = conference;
  689. // We want to keep up with postponed events which should have been fired
  690. // on "attach" call, but for local track we not always have the
  691. // conference before attaching. However this may result in duplicated
  692. // events if they have been triggered on "attach" already.
  693. for (let i = 0; i < this.containers.length; i++) {
  694. this._maybeFireTrackAttached(this.containers[i]);
  695. }
  696. }
  697. /**
  698. * Sets the effect and switches between the modified stream and original one.
  699. *
  700. * @param {Object} effect - Represents the effect instance to be used.
  701. * @returns {Promise}
  702. */
  703. setEffect(effect) {
  704. if (typeof this._streamEffect === 'undefined' && typeof effect === 'undefined') {
  705. return Promise.resolve();
  706. }
  707. if (typeof effect !== 'undefined' && !effect.isEnabled(this)) {
  708. return Promise.reject(new Error('Incompatible effect instance!'));
  709. }
  710. if (this._setEffectInProgress === true) {
  711. return Promise.reject(new Error('setEffect already in progress!'));
  712. }
  713. // In case we have an audio track that is being enhanced with an effect, we still want it to be applied,
  714. // even if the track is muted. Where as for video the actual track doesn't exists if it's muted.
  715. if (this.isMuted() && !this.isAudioTrack()) {
  716. this._streamEffect = effect;
  717. return Promise.resolve();
  718. }
  719. const conference = this.conference;
  720. if (!conference) {
  721. this._switchStreamEffect(effect);
  722. if (this.isVideoTrack()) {
  723. this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
  724. }
  725. return Promise.resolve();
  726. }
  727. this._setEffectInProgress = true;
  728. // TODO: Create new JingleSessionPC method for replacing a stream in JitsiLocalTrack without offer answer.
  729. return conference.removeTrack(this)
  730. .then(() => {
  731. this._switchStreamEffect(effect);
  732. if (this.isVideoTrack()) {
  733. this.containers.forEach(cont => RTCUtils.attachMediaStream(cont, this.stream));
  734. }
  735. return conference.addTrack(this);
  736. })
  737. .then(() => {
  738. this._setEffectInProgress = false;
  739. })
  740. .catch(error => {
  741. // Any error will be not recovarable and will trigger CONFERENCE_FAILED event. But let's try to cleanup
  742. // everyhting related to the effect functionality.
  743. this._setEffectInProgress = false;
  744. this._switchStreamEffect();
  745. logger.error('Failed to switch to the new stream!', error);
  746. throw error;
  747. });
  748. }
  749. /**
  750. * Stops the associated MediaStream.
  751. *
  752. * @returns {void}
  753. */
  754. stopStream() {
  755. /**
  756. * Indicates that we are executing {@link #stopStream} i.e.
  757. * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
  758. * associated with this <tt>JitsiTrack</tt> instance.
  759. *
  760. * @private
  761. * @type {boolean}
  762. */
  763. this._stopStreamInProgress = true;
  764. try {
  765. RTCUtils.stopMediaStream(this.stream);
  766. } finally {
  767. this._stopStreamInProgress = false;
  768. }
  769. }
  770. /**
  771. * Creates a text representation of this local track instance.
  772. *
  773. * @return {string}
  774. */
  775. toString() {
  776. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  777. }
  778. /**
  779. * Asynchronously unmutes this track.
  780. *
  781. * @returns {Promise}
  782. */
  783. unmute() {
  784. return this._queueSetMuted(false);
  785. }
  786. }