Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

JitsiLocalTrack.js 35KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980
  1. import { getLogger } from '@jitsi/logger';
  2. import JitsiTrackError from '../../JitsiTrackError';
  3. import {
  4. TRACK_IS_DISPOSED,
  5. TRACK_NO_STREAM_FOUND
  6. } from '../../JitsiTrackErrors';
  7. import {
  8. LOCAL_TRACK_STOPPED,
  9. NO_DATA_FROM_SOURCE,
  10. TRACK_MUTE_CHANGED
  11. } from '../../JitsiTrackEvents';
  12. import { CameraFacingMode } from '../../service/RTC/CameraFacingMode';
  13. import { MediaType } from '../../service/RTC/MediaType';
  14. import RTCEvents from '../../service/RTC/RTCEvents';
  15. import { VideoType } from '../../service/RTC/VideoType';
  16. import {
  17. NO_BYTES_SENT,
  18. TRACK_UNMUTED,
  19. createNoDataFromSourceEvent
  20. } from '../../service/statistics/AnalyticsEvents';
  21. import browser from '../browser';
  22. import Statistics from '../statistics/statistics';
  23. import { isValidNumber } from '../util/MathUtil';
  24. import JitsiTrack from './JitsiTrack';
  25. import RTCUtils from './RTCUtils';
  26. const logger = getLogger('modules/RTC/JitsiLocalTrack');
  27. /**
  28. * Represents a single media track(either audio or video).
  29. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  30. */
  31. export default class JitsiLocalTrack extends JitsiTrack {
  32. /**
  33. * Constructs a new JitsiLocalTrack instance.
  34. *
  35. * @constructor
  36. * @param {Object} trackInfo
  37. * @param {Object} trackInfo.constraints - The contraints used for creating the track.
  38. * @param {number} trackInfo.rtcId - The ID assigned by the RTC module.
  39. * @param {Object} trackInfo.stream - The WebRTC MediaStream, parent of the track.
  40. * @param {Object} trackInfo.track - The underlying WebRTC MediaStreamTrack for new JitsiLocalTrack.
  41. * @param {string} trackInfo.mediaType - The MediaType of the JitsiLocalTrack.
  42. * @param {string} trackInfo.videoType - The VideoType of the JitsiLocalTrack.
  43. * @param {Array<Object>} trackInfo.effects - The effects to be applied to the JitsiLocalTrack.
  44. * @param {string} trackInfo.deviceId - The ID of the local device for this track.
  45. * @param {string} trackInfo.facingMode - Thehe camera facing mode used in getUserMedia call (for mobile only).
  46. * @param {string} trackInfo.sourceId - The id of the desktop sharing source, which is the Chrome media source ID,
  47. * returned by Desktop Picker on Electron. NOTE: defined for desktop sharing tracks only.
  48. * @param {string} trackInfo.sourceType - The type of source the track originates from.
  49. */
  50. constructor({
  51. constraints,
  52. deviceId,
  53. facingMode,
  54. mediaType,
  55. rtcId,
  56. sourceId,
  57. sourceType,
  58. stream,
  59. track,
  60. videoType,
  61. effects = []
  62. }) {
  63. super(
  64. /* conference */ null,
  65. stream,
  66. track,
  67. /* streamInactiveHandler */ () => this.emit(LOCAL_TRACK_STOPPED, this),
  68. mediaType,
  69. videoType);
  70. this._setEffectInProgress = false;
  71. const effect = effects.find(e => e.isEnabled(this));
  72. if (effect) {
  73. this._startStreamEffect(effect);
  74. }
  75. const displaySurface = videoType === VideoType.DESKTOP
  76. ? track.getSettings().displaySurface
  77. : null;
  78. /**
  79. * Track metadata.
  80. */
  81. this.metadata = {
  82. timestamp: Date.now(),
  83. ...displaySurface ? { displaySurface } : {}
  84. };
  85. /**
  86. * The ID assigned by the RTC module on instance creation.
  87. *
  88. * @type {number}
  89. */
  90. this.rtcId = rtcId;
  91. this.sourceId = sourceId;
  92. this.sourceType = sourceType ?? displaySurface;
  93. // Cache the constraints of the track in case of any this track
  94. // model needs to call getUserMedia again, such as when unmuting.
  95. this._constraints = track.getConstraints();
  96. if (mediaType === MediaType.VIDEO) {
  97. if (videoType === VideoType.CAMERA) {
  98. // Safari returns an empty constraints object, construct the constraints using getSettings.
  99. // Firefox in "fingerprint resistance mode" does a similar thing, except a `mediaSource` key is set.
  100. if (!this._constraints.height || !this._constraints.width) {
  101. this._constraints = {
  102. height: { ideal: this.getHeight() },
  103. width: { ideal: this.getWidth() }
  104. };
  105. }
  106. // If the constraints are still empty, fallback to the constraints used for initial gUM.
  107. if (!isValidNumber(this._constraints.height.ideal) && !isValidNumber(this._constraints.width.ideal)) {
  108. this._constraints.height = { ideal: constraints.height.ideal };
  109. this._constraints.width = { ideal: constraints.width.ideal };
  110. }
  111. }
  112. // Get the resolution from the track itself since we do not know what camera capability the browser has
  113. // picked for the given constraints, fallback to the constraints if MediaStreamTrack.getSettings() doesn't
  114. // return the height.
  115. this.resolution = this.getHeight();
  116. if (!isValidNumber(this.resolution) && this._constraints.height?.ideal) {
  117. this.resolution = this._constraints.height.ideal;
  118. }
  119. this.maxEnabledResolution = this.resolution;
  120. }
  121. this.deviceId = deviceId;
  122. /**
  123. * The <tt>Promise</tt> which represents the progress of a previously
  124. * queued/scheduled {@link _setMuted} (from the point of view of
  125. * {@link _queueSetMuted}).
  126. *
  127. * @private
  128. * @type {Promise}
  129. */
  130. this._prevSetMuted = Promise.resolve();
  131. /**
  132. * The facing mode of the camera from which this JitsiLocalTrack
  133. * instance was obtained.
  134. *
  135. * @private
  136. * @type {CameraFacingMode|undefined}
  137. */
  138. this._facingMode = facingMode;
  139. // Currently there is no way to know the MediaStreamTrack ended due to
  140. // to device disconnect in Firefox through e.g. "readyState" property.
  141. // Instead we will compare current track's label with device labels from
  142. // enumerateDevices() list.
  143. this._trackEnded = false;
  144. /**
  145. * Indicates whether data has been sent or not.
  146. */
  147. this._hasSentData = false;
  148. /**
  149. * Used only for detection of audio problems. We want to check only once
  150. * whether the track is sending data ot not. This flag is set to false
  151. * after the check.
  152. */
  153. this._testDataSent = true;
  154. // Currently there is no way to determine with what device track was
  155. // created (until getConstraints() support), however we can associate
  156. // tracks with real devices obtained from enumerateDevices() call as
  157. // soon as it's called.
  158. // NOTE: this.deviceId corresponds to the device id specified in GUM constraints and this._realDeviceId seems to
  159. // correspond to the id of a matching device from the available device list.
  160. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  161. // The source name that will be signaled for this track.
  162. this._sourceName = null;
  163. // The primary SSRC associated with the local media track. This will be set after the local desc
  164. // is processed once the track is added to the peerconnection.
  165. this._ssrc = null;
  166. this._trackMutedTS = 0;
  167. this._onDeviceListWillChange = devices => {
  168. const oldRealDeviceId = this._realDeviceId;
  169. this._setRealDeviceIdFromDeviceList(devices);
  170. if (
  171. // Mark track as ended for those browsers that do not support
  172. // "readyState" property. We do not touch tracks created with
  173. // default device ID "".
  174. (typeof this.getTrack().readyState === 'undefined'
  175. && typeof this._realDeviceId !== 'undefined'
  176. && !devices.find(d => d.deviceId === this._realDeviceId))
  177. // If there was an associated realDeviceID and after the device change the realDeviceId is undefined
  178. // then the associated device has been disconnected and the _trackEnded flag needs to be set. In
  179. // addition on some Chrome versions the readyState property is set after the device change event is
  180. // triggered which causes issues in jitsi-meet with the selection of a new device because we don't
  181. // detect that the old one was removed.
  182. || (typeof oldRealDeviceId !== 'undefined' && typeof this._realDeviceId === 'undefined')
  183. ) {
  184. this._trackEnded = true;
  185. }
  186. };
  187. // Subscribe each created local audio track to
  188. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  189. // handling this event for remote tracks (which are handled in RTC.js),
  190. // because there might be local tracks not attached to a conference.
  191. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  192. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  193. RTCUtils.addListener(
  194. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  195. this._onAudioOutputDeviceChanged);
  196. }
  197. RTCUtils.addListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  198. this._initNoDataFromSourceHandlers();
  199. }
  200. /**
  201. * Adds stream to conference and marks it as "unmute" operation.
  202. *
  203. * @private
  204. * @returns {Promise}
  205. */
  206. _addStreamToConferenceAsUnmute() {
  207. if (!this.conference) {
  208. return Promise.resolve();
  209. }
  210. // FIXME it would be good to not included conference as part of this process. Only TraceablePeerConnections to
  211. // which the track is attached should care about this action. The TPCs to which the track is not attached can
  212. // sync up when track is re-attached. A problem with that is that the "modify sources" queue is part of the
  213. // JingleSessionPC and it would be excluded from the process. One solution would be to extract class between
  214. // TPC and JingleSessionPC which would contain the queue and would notify the signaling layer when local SSRCs
  215. // are changed. This would help to separate XMPP from the RTC module.
  216. return new Promise((resolve, reject) => {
  217. this.conference._addLocalTrackToPc(this)
  218. .then(resolve, error => reject(new Error(error)));
  219. });
  220. }
  221. /**
  222. * Fires NO_DATA_FROM_SOURCE event and logs it to analytics
  223. *
  224. * @private
  225. * @returns {void}
  226. */
  227. _fireNoDataFromSourceEvent() {
  228. const value = !this.isReceivingData();
  229. this.emit(NO_DATA_FROM_SOURCE, value);
  230. logger.debug(`NO_DATA_FROM_SOURCE event with value ${value} detected for track: ${this}`);
  231. // FIXME: Should we report all of those events
  232. Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType(), value));
  233. }
  234. /**
  235. * Sets handlers to the MediaStreamTrack object that will detect camera issues.
  236. *
  237. * @private
  238. * @returns {void}
  239. */
  240. _initNoDataFromSourceHandlers() {
  241. if (!this._isNoDataFromSourceEventsEnabled()) {
  242. return;
  243. }
  244. this._setHandler('track_mute', () => {
  245. this._trackMutedTS = window.performance.now();
  246. this._fireNoDataFromSourceEvent();
  247. });
  248. this._setHandler('track_unmute', () => {
  249. this._fireNoDataFromSourceEvent();
  250. Statistics.sendAnalyticsAndLog(
  251. TRACK_UNMUTED,
  252. {
  253. 'media_type': this.getType(),
  254. 'track_type': 'local',
  255. value: window.performance.now() - this._trackMutedTS
  256. });
  257. });
  258. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  259. this._setHandler('track_ended', () => {
  260. if (!this.isReceivingData()) {
  261. this._fireNoDataFromSourceEvent();
  262. }
  263. });
  264. }
  265. }
  266. /**
  267. * Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  268. *
  269. * @private
  270. * @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  271. */
  272. _isNoDataFromSourceEventsEnabled() {
  273. // Disable the events for screen sharing.
  274. return !this.isVideoTrack() || this.videoType !== VideoType.DESKTOP;
  275. }
  276. /**
  277. * Initializes a new Promise to execute {@link #_setMuted}. May be called multiple times in a row and the
  278. * invocations of {@link #_setMuted} and, consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
  279. * serialized fashion.
  280. *
  281. * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
  282. * @private
  283. * @returns {Promise}
  284. */
  285. _queueSetMuted(muted) {
  286. const setMuted = this._setMuted.bind(this, muted);
  287. this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
  288. return this._prevSetMuted;
  289. }
  290. /**
  291. * Removes stream from conference and marks it as "mute" operation.
  292. *
  293. * @param {Function} successCallback - Callback that will be called when the operation is successful.
  294. * @param {Function} errorCallback - Callback that will be called when the operation fails.
  295. * @private
  296. * @returns {Promise}
  297. */
  298. _removeStreamFromConferenceAsMute(successCallback, errorCallback) {
  299. if (!this.conference) {
  300. successCallback();
  301. return;
  302. }
  303. this.conference._removeLocalTrackFromPc(this).then(
  304. successCallback,
  305. error => errorCallback(new Error(error)));
  306. }
  307. /**
  308. * Sends mute status for a track to conference if any.
  309. *
  310. * @param {boolean} mute - If track is muted.
  311. * @private
  312. * @returns {void}
  313. */
  314. _sendMuteStatus(mute) {
  315. if (this.conference) {
  316. this.conference._setTrackMuteStatus(this.getType(), this, mute) && this.conference.room.sendPresence();
  317. }
  318. }
  319. /**
  320. * Mutes / unmutes this track.
  321. *
  322. * @param {boolean} muted - If <tt>true</tt>, this track will be muted; otherwise, this track will be unmuted.
  323. * @private
  324. * @returns {Promise}
  325. */
  326. _setMuted(muted) {
  327. if (this.isMuted() === muted && this.videoType !== VideoType.DESKTOP) {
  328. return Promise.resolve();
  329. }
  330. if (this.disposed) {
  331. return Promise.reject(new JitsiTrackError(TRACK_IS_DISPOSED));
  332. }
  333. let promise = Promise.resolve();
  334. // A function that will print info about muted status transition
  335. const logMuteInfo = () => logger.info(`Mute ${this}: ${muted}`);
  336. // In React Native we mute the camera by setting track.enabled but that doesn't
  337. // work for screen-share tracks, so do the remove-as-mute for those.
  338. const doesVideoMuteByStreamRemove
  339. = browser.isReactNative() ? this.videoType === VideoType.DESKTOP : browser.doesVideoMuteByStreamRemove();
  340. // In the multi-stream mode, desktop tracks are muted from jitsi-meet instead of being removed from the
  341. // conference. This is needed because we don't want the client to signal a source-remove to the remote peer for
  342. // the desktop track when screenshare is stopped. Later when screenshare is started again, the same sender will
  343. // be re-used without the need for signaling a new ssrc through source-add.
  344. if (this.isAudioTrack() || !doesVideoMuteByStreamRemove) {
  345. logMuteInfo();
  346. // If we have a stream effect that implements its own mute functionality, prioritize it before
  347. // normal mute e.g. the stream effect that implements system audio sharing has a custom
  348. // mute state in which if the user mutes, system audio still has to go through.
  349. if (this._streamEffect && this._streamEffect.setMuted) {
  350. this._streamEffect.setMuted(muted);
  351. } else if (this.track) {
  352. this.track.enabled = !muted;
  353. }
  354. } else if (muted) {
  355. promise = new Promise((resolve, reject) => {
  356. logMuteInfo();
  357. this._removeStreamFromConferenceAsMute(
  358. () => {
  359. if (this._streamEffect) {
  360. this._stopStreamEffect();
  361. }
  362. // FIXME: Maybe here we should set the SRC for the
  363. // containers to something
  364. // We don't want any events to be fired on this stream
  365. this._unregisterHandlers();
  366. this.stopStream();
  367. this._setStream(null);
  368. resolve();
  369. },
  370. reject);
  371. });
  372. } else {
  373. logMuteInfo();
  374. // This path is only for camera.
  375. const streamOptions = {
  376. cameraDeviceId: this.getDeviceId(),
  377. devices: [ MediaType.VIDEO ],
  378. effects: this._streamEffect ? [ this._streamEffect ] : [],
  379. facingMode: this.getCameraFacingMode()
  380. };
  381. promise
  382. = RTCUtils.obtainAudioAndVideoPermissions({
  383. ...streamOptions,
  384. constraints: { video: this._constraints } });
  385. promise = promise.then(streamsInfo => {
  386. const streamInfo = streamsInfo.find(info => info.track.kind === this.getType());
  387. if (streamInfo) {
  388. this._setStream(streamInfo.stream);
  389. this.track = streamInfo.track;
  390. // This is not good when video type changes after
  391. // unmute, but let's not crash here
  392. if (this.videoType !== streamInfo.videoType) {
  393. logger.warn(
  394. `${this}: video type has changed after unmute!`,
  395. this.videoType, streamInfo.videoType);
  396. this.videoType = streamInfo.videoType;
  397. }
  398. } else {
  399. throw new JitsiTrackError(TRACK_NO_STREAM_FOUND);
  400. }
  401. if (this._streamEffect) {
  402. this._startStreamEffect(this._streamEffect);
  403. }
  404. this.containers.map(cont => RTCUtils.attachMediaStream(cont, this.stream).catch(() => {
  405. logger.error(`Attach media failed for ${this} on video unmute!`);
  406. }));
  407. return this._addStreamToConferenceAsUnmute();
  408. });
  409. }
  410. return promise
  411. .then(() => {
  412. this._sendMuteStatus(muted);
  413. // Send the videoType message to the bridge.
  414. this.isVideoTrack() && this.conference && this.conference._sendBridgeVideoTypeMessage(this);
  415. this.emit(TRACK_MUTE_CHANGED, this);
  416. });
  417. }
  418. /**
  419. * Sets real device ID by comparing track information with device information. This is temporary solution until
  420. * getConstraints() method will be implemented in browsers.
  421. *
  422. * @param {MediaDeviceInfo[]} devices - The list of devices obtained from enumerateDevices() call.
  423. * @private
  424. * @returns {void}
  425. */
  426. _setRealDeviceIdFromDeviceList(devices) {
  427. const track = this.getTrack();
  428. const kind = `${track.kind}input`;
  429. // We need to match by deviceId as well, in case of multiple devices with the same label.
  430. let device = devices.find(d => d.kind === kind && d.label === track.label && d.deviceId === this.deviceId);
  431. if (!device && this._realDeviceId === 'default') { // the default device has been changed.
  432. // If the default device was 'A' and the default device is changed to 'B' the label for the track will
  433. // remain 'Default - A' but the label for the device in the device list will be updated to 'A'. That's
  434. // why in order to match it we need to remove the 'Default - ' part.
  435. const label = (track.label || '').replace('Default - ', '');
  436. device = devices.find(d => d.kind === kind && d.label === label);
  437. }
  438. if (device) {
  439. this._realDeviceId = device.deviceId;
  440. } else {
  441. this._realDeviceId = undefined;
  442. }
  443. }
  444. /**
  445. * Sets the stream property of JitsiLocalTrack object and sets all stored handlers to it.
  446. *
  447. * @param {MediaStream} stream - The new MediaStream.
  448. * @private
  449. * @returns {void}
  450. */
  451. _setStream(stream) {
  452. super._setStream(stream);
  453. }
  454. /**
  455. * Starts the effect process and returns the modified stream.
  456. *
  457. * @param {Object} effect - Represents effect instance
  458. * @private
  459. * @returns {void}
  460. */
  461. _startStreamEffect(effect) {
  462. this._streamEffect = effect;
  463. this._originalStream = this.stream;
  464. this._setStream(this._streamEffect.startEffect(this._originalStream));
  465. this.track = this.stream.getTracks()[0];
  466. }
  467. /**
  468. * Stops the effect process and returns the original stream.
  469. *
  470. * @private
  471. * @returns {void}
  472. */
  473. _stopStreamEffect() {
  474. if (this._streamEffect) {
  475. this._streamEffect.stopEffect();
  476. this._setStream(this._originalStream);
  477. this._originalStream = null;
  478. this.track = this.stream ? this.stream.getTracks()[0] : null;
  479. }
  480. }
  481. /**
  482. * Switches the camera facing mode if the WebRTC implementation supports the custom MediaStreamTrack._switchCamera
  483. * method. Currently, the method in question is implemented in react-native-webrtc only. When such a WebRTC
  484. * implementation is executing, the method is the preferred way to switch between the front/user-facing and the
  485. * back/environment-facing cameras because it will likely be (as is the case of react-native-webrtc) noticeably
  486. * faster that creating a new MediaStreamTrack via a new getUserMedia call with the switched facingMode constraint
  487. * value. Moreover, the approach with a new getUserMedia call may not even work: WebRTC on Android and iOS is
  488. * either very slow to open the camera a second time or plainly freezes attempting to do that.
  489. *
  490. * @returns {void}
  491. */
  492. _switchCamera() {
  493. if (this.isVideoTrack()
  494. && this.videoType === VideoType.CAMERA
  495. && typeof this.track._switchCamera === 'function') {
  496. this.track._switchCamera();
  497. this._facingMode
  498. = this._facingMode === CameraFacingMode.ENVIRONMENT
  499. ? CameraFacingMode.USER
  500. : CameraFacingMode.ENVIRONMENT;
  501. }
  502. }
  503. /**
  504. * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
  505. *
  506. * @param {Object|undefined} effect - The new effect to be set.
  507. * @private
  508. * @returns {void}
  509. */
  510. _switchStreamEffect(effect) {
  511. if (this._streamEffect) {
  512. this._stopStreamEffect();
  513. this._streamEffect = undefined;
  514. }
  515. if (effect) {
  516. this._startStreamEffect(effect);
  517. }
  518. }
  519. /**
  520. * @inheritdoc
  521. *
  522. * Stops sending the media track. And removes it from the HTML. NOTE: Works for local tracks only.
  523. *
  524. * @extends JitsiTrack#dispose
  525. * @returns {Promise}
  526. */
  527. async dispose() {
  528. if (this.disposed) {
  529. return;
  530. }
  531. // Remove the effect instead of stopping it so that the original stream is restored
  532. // on both the local track and on the peerconnection.
  533. if (this._streamEffect) {
  534. await this.setEffect();
  535. }
  536. if (this.conference) {
  537. await this.conference.removeTrack(this);
  538. }
  539. if (this.stream) {
  540. this.stopStream();
  541. }
  542. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  543. if (this._onAudioOutputDeviceChanged) {
  544. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  545. this._onAudioOutputDeviceChanged);
  546. }
  547. return super.dispose();
  548. }
  549. /**
  550. * Returns facing mode for video track from camera. For other cases (e.g. audio track or 'desktop' video track)
  551. * returns undefined.
  552. *
  553. * @returns {CameraFacingMode|undefined}
  554. */
  555. getCameraFacingMode() {
  556. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  557. // MediaStreamTrack#getSettings() is not implemented in many
  558. // browsers, so we need feature checking here. Progress on the
  559. // respective browser's implementation can be tracked at
  560. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  561. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  562. // for Firefox. Even if a browser implements getSettings() already,
  563. // it might still not return anything for 'facingMode'.
  564. const trackSettings = this.track.getSettings?.();
  565. if (trackSettings && 'facingMode' in trackSettings) {
  566. return trackSettings.facingMode;
  567. }
  568. if (typeof this._facingMode !== 'undefined') {
  569. return this._facingMode;
  570. }
  571. // In most cases we are showing a webcam. So if we've gotten here,
  572. // it should be relatively safe to assume that we are probably
  573. // showing the user-facing camera.
  574. return CameraFacingMode.USER;
  575. }
  576. return undefined;
  577. }
  578. /**
  579. * Returns the capture resolution of the video track.
  580. *
  581. * @returns {Number}
  582. */
  583. getCaptureResolution() {
  584. if (this.videoType === VideoType.CAMERA || !browser.isWebKitBased()) {
  585. return this.resolution;
  586. }
  587. return this.getHeight();
  588. }
  589. /**
  590. * Returns device id associated with track.
  591. *
  592. * @returns {string}
  593. */
  594. getDeviceId() {
  595. return this._realDeviceId || this.deviceId;
  596. }
  597. /**
  598. * Get the duration of the track.
  599. *
  600. * @returns {Number} the duration of the track in seconds
  601. */
  602. getDuration() {
  603. return (Date.now() / 1000) - (this.metadata.timestamp / 1000);
  604. }
  605. /**
  606. * Returns the participant id which owns the track.
  607. *
  608. * @returns {string} the id of the participants. It corresponds to the
  609. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  610. */
  611. getParticipantId() {
  612. return this.conference && this.conference.myUserId();
  613. }
  614. /**
  615. * Returns the source name associated with the jitsi track.
  616. *
  617. * @returns {string | null} source name
  618. */
  619. getSourceName() {
  620. return this._sourceName;
  621. }
  622. /**
  623. * Returns the primary SSRC associated with the track.
  624. * @returns {number}
  625. */
  626. getSsrc() {
  627. return this._ssrc;
  628. }
  629. /**
  630. * Returns if associated MediaStreamTrack is in the 'ended' state
  631. *
  632. * @returns {boolean}
  633. */
  634. isEnded() {
  635. if (this.isVideoTrack() && this.isMuted()) {
  636. // If a video track is muted the readyState will be ended, that's why we need to rely only on the
  637. // _trackEnded flag.
  638. return this._trackEnded;
  639. }
  640. return this.getTrack().readyState === 'ended' || this._trackEnded;
  641. }
  642. /**
  643. * Returns <tt>true</tt>.
  644. *
  645. * @returns {boolean} <tt>true</tt>
  646. */
  647. isLocal() {
  648. return true;
  649. }
  650. /**
  651. * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
  652. *
  653. * @returns {boolean} <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
  654. */
  655. isMuted() {
  656. // this.stream will be null when we mute local video on Chrome
  657. if (!this.stream) {
  658. return true;
  659. }
  660. if (this.isVideoTrack() && !this.isActive()) {
  661. return true;
  662. }
  663. // If currently used stream effect has its own muted state, use that.
  664. if (this._streamEffect && this._streamEffect.isMuted) {
  665. return this._streamEffect.isMuted();
  666. }
  667. return !this.track || !this.track.enabled;
  668. }
  669. /**
  670. * Checks whether the attached MediaStream is receiving data from source or not. If the stream property is null
  671. * (because of mute or another reason) this method will return false.
  672. * NOTE: This method doesn't indicate problem with the streams directly. For example in case of video mute the
  673. * method will return false or if the user has disposed the track.
  674. *
  675. * @returns {boolean} true if the stream is receiving data and false this otherwise.
  676. */
  677. isReceivingData() {
  678. if (this.isVideoTrack()
  679. && (this.isMuted() || this._stopStreamInProgress || this.videoType === VideoType.DESKTOP)) {
  680. return true;
  681. }
  682. if (!this.stream) {
  683. return false;
  684. }
  685. // In older version of the spec there is no muted property and readyState can have value muted. In the latest
  686. // versions readyState can have values "live" and "ended" and there is muted boolean property. If the stream is
  687. // muted that means that we aren't receiving any data from the source. We want to notify the users for error if
  688. // the stream is muted or ended on it's creation.
  689. // For video blur enabled use the original video stream
  690. const stream = this._effectEnabled ? this._originalStream : this.stream;
  691. return stream.getTracks().some(track =>
  692. (!('readyState' in track) || track.readyState === 'live')
  693. && (!('muted' in track) || track.muted !== true));
  694. }
  695. /**
  696. * Asynchronously mutes this track.
  697. *
  698. * @returns {Promise}
  699. */
  700. mute() {
  701. return this._queueSetMuted(true);
  702. }
  703. /**
  704. * Handles bytes sent statistics. NOTE: used only for audio tracks to detect audio issues.
  705. *
  706. * @param {TraceablePeerConnection} tpc - The peerconnection that is reporting the bytes sent stat.
  707. * @param {number} bytesSent - The new value.
  708. * @returns {void}
  709. */
  710. onByteSentStatsReceived(tpc, bytesSent) {
  711. if (bytesSent > 0) {
  712. this._hasSentData = true;
  713. }
  714. const iceConnectionState = tpc.getConnectionState();
  715. if (this._testDataSent && iceConnectionState === 'connected') {
  716. setTimeout(() => {
  717. if (!this._hasSentData) {
  718. logger.warn(`${this} 'bytes sent' <= 0: \
  719. ${bytesSent}`);
  720. Statistics.analytics.sendEvent(NO_BYTES_SENT, { 'media_type': this.getType() });
  721. }
  722. }, 3000);
  723. this._testDataSent = false;
  724. }
  725. }
  726. /**
  727. * Sets the JitsiConference object associated with the track. This is temp solution.
  728. *
  729. * @param conference - JitsiConference object.
  730. * @returns {void}
  731. */
  732. setConference(conference) {
  733. this.conference = conference;
  734. }
  735. /**
  736. * Sets the effect and switches between the modified stream and original one.
  737. *
  738. * @param {Object} effect - Represents the effect instance to be used.
  739. * @returns {Promise}
  740. */
  741. setEffect(effect) {
  742. if (typeof this._streamEffect === 'undefined' && typeof effect === 'undefined') {
  743. return Promise.resolve();
  744. }
  745. if (typeof effect !== 'undefined' && !effect.isEnabled(this)) {
  746. return Promise.reject(new Error('Incompatible effect instance!'));
  747. }
  748. if (this._setEffectInProgress === true) {
  749. return Promise.reject(new Error('setEffect already in progress!'));
  750. }
  751. // In case we have an audio track that is being enhanced with an effect, we still want it to be applied,
  752. // even if the track is muted. Where as for video the actual track doesn't exists if it's muted.
  753. if (this.isMuted() && !this.isAudioTrack()) {
  754. this._streamEffect = effect;
  755. return Promise.resolve();
  756. }
  757. const conference = this.conference;
  758. if (!conference) {
  759. this._switchStreamEffect(effect);
  760. if (this.isVideoTrack()) {
  761. this.containers.forEach(cont => {
  762. RTCUtils.attachMediaStream(cont, this.stream).catch(() => {
  763. logger.error(`Attach media failed for ${this} when trying to set effect.`);
  764. });
  765. });
  766. }
  767. return Promise.resolve();
  768. }
  769. this._setEffectInProgress = true;
  770. return conference._removeLocalTrackFromPc(this)
  771. .then(() => {
  772. this._switchStreamEffect(effect);
  773. if (this.isVideoTrack()) {
  774. this.containers.forEach(cont => {
  775. RTCUtils.attachMediaStream(cont, this.stream).catch(() => {
  776. logger.error(`Attach media failed for ${this} when trying to set effect.`);
  777. });
  778. });
  779. }
  780. return conference._addLocalTrackToPc(this);
  781. })
  782. .then(() => {
  783. this._setEffectInProgress = false;
  784. })
  785. .catch(error => {
  786. // Any error will be not recovarable and will trigger CONFERENCE_FAILED event. But let's try to cleanup
  787. // everyhting related to the effect functionality.
  788. this._setEffectInProgress = false;
  789. this._switchStreamEffect();
  790. logger.error('Failed to switch to the new stream!', error);
  791. throw error;
  792. });
  793. }
  794. /**
  795. * Sets the source name to be used for signaling the jitsi track.
  796. *
  797. * @param {string} name The source name.
  798. */
  799. setSourceName(name) {
  800. this._sourceName = name;
  801. }
  802. /**
  803. * Sets the primary SSRC for the track.
  804. *
  805. * @param {number} ssrc The SSRC.
  806. */
  807. setSsrc(ssrc) {
  808. if (isValidNumber(ssrc)) {
  809. this._ssrc = ssrc;
  810. }
  811. }
  812. /**
  813. * Stops the associated MediaStream.
  814. *
  815. * @returns {void}
  816. */
  817. stopStream() {
  818. /**
  819. * Indicates that we are executing {@link #stopStream} i.e.
  820. * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
  821. * associated with this <tt>JitsiTrack</tt> instance.
  822. *
  823. * @private
  824. * @type {boolean}
  825. */
  826. this._stopStreamInProgress = true;
  827. try {
  828. RTCUtils.stopMediaStream(this.stream);
  829. } finally {
  830. this._stopStreamInProgress = false;
  831. }
  832. }
  833. /**
  834. * Creates a text representation of this local track instance.
  835. *
  836. * @return {string}
  837. */
  838. toString() {
  839. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  840. }
  841. /**
  842. * Asynchronously unmutes this track.
  843. *
  844. * @returns {Promise}
  845. */
  846. unmute() {
  847. return this._queueSetMuted(false);
  848. }
  849. }