Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

actions.web.ts 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513
  1. // @ts-expect-error
  2. import { AUDIO_ONLY_SCREEN_SHARE_NO_TRACK } from '../../../../modules/UI/UIErrors';
  3. import { IReduxState, IStore } from '../../app/types';
  4. import { showModeratedNotification } from '../../av-moderation/actions';
  5. import { shouldShowModeratedNotification } from '../../av-moderation/functions';
  6. import { setNoiseSuppressionEnabled } from '../../noise-suppression/actions';
  7. import { showErrorNotification, showNotification } from '../../notifications/actions';
  8. import { NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
  9. import { stopReceiver } from '../../remote-control/actions';
  10. import { setScreenAudioShareState, setScreenshareAudioTrack } from '../../screen-share/actions';
  11. import { isAudioOnlySharing, isScreenVideoShared } from '../../screen-share/functions';
  12. import { toggleScreenshotCaptureSummary } from '../../screenshot-capture/actions';
  13. import { isScreenshotCaptureEnabled } from '../../screenshot-capture/functions';
  14. import { AudioMixerEffect } from '../../stream-effects/audio-mixer/AudioMixerEffect';
  15. import { getCurrentConference } from '../conference/functions';
  16. import { notifyCameraError, notifyMicError } from '../devices/actions.web';
  17. import { openDialog } from '../dialog/actions';
  18. import { JitsiTrackErrors, JitsiTrackEvents, browser } from '../lib-jitsi-meet';
  19. import { gumPending, setScreenshareMuted } from '../media/actions';
  20. import { MEDIA_TYPE, MediaType, VIDEO_TYPE } from '../media/constants';
  21. import { IGUMPendingState } from '../media/types';
  22. import {
  23. addLocalTrack,
  24. replaceLocalTrack,
  25. toggleCamera
  26. } from './actions.any';
  27. import AllowToggleCameraDialog from './components/web/AllowToggleCameraDialog';
  28. import {
  29. createLocalTracksF,
  30. getLocalDesktopTrack,
  31. getLocalJitsiAudioTrack,
  32. getLocalVideoTrack,
  33. isToggleCameraEnabled
  34. } from './functions';
  35. import logger from './logger';
  36. import { ICreateInitialTracksOptions, IInitialTracksErrors, IShareOptions, IToggleScreenSharingOptions } from './types';
  37. export * from './actions.any';
  38. /**
  39. * Signals that the local participant is ending screensharing or beginning the screensharing flow.
  40. *
  41. * @param {boolean} enabled - The state to toggle screen sharing to.
  42. * @param {boolean} audioOnly - Only share system audio.
  43. * @param {Object} shareOptions - The options to be passed for capturing screenshare.
  44. * @returns {Function}
  45. */
  46. export function toggleScreensharing(
  47. enabled?: boolean,
  48. audioOnly = false,
  49. shareOptions: IShareOptions = {}) {
  50. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  51. // check for A/V Moderation when trying to start screen sharing
  52. if ((enabled || enabled === undefined) && shouldShowModeratedNotification(MEDIA_TYPE.VIDEO, getState())) {
  53. dispatch(showModeratedNotification(MEDIA_TYPE.SCREENSHARE));
  54. return Promise.reject();
  55. }
  56. return _toggleScreenSharing({
  57. enabled,
  58. audioOnly,
  59. shareOptions
  60. }, {
  61. dispatch,
  62. getState
  63. });
  64. };
  65. }
  66. /**
  67. * Displays a UI notification for screensharing failure based on the error passed.
  68. *
  69. * @private
  70. * @param {Object} error - The error.
  71. * @param {Object} store - The redux store.
  72. * @returns {void}
  73. */
  74. /**
  75. * Applies the AudioMixer effect on the local audio track if applicable. If there is no local audio track, the desktop
  76. * audio track is added to the conference.
  77. *
  78. * @private
  79. * @param {JitsiLocalTrack} desktopAudioTrack - The audio track to be added to the conference.
  80. * @param {*} state - The redux state.
  81. * @returns {void}
  82. */
  83. async function _maybeApplyAudioMixerEffect(desktopAudioTrack: any, state: IReduxState): Promise<void> {
  84. const localAudio = getLocalJitsiAudioTrack(state);
  85. const conference = getCurrentConference(state);
  86. if (localAudio) {
  87. // If there is a localAudio stream, mix in the desktop audio stream captured by the screen sharing API.
  88. const mixerEffect = new AudioMixerEffect(desktopAudioTrack);
  89. await localAudio.setEffect(mixerEffect);
  90. } else {
  91. // If no local stream is present ( i.e. no input audio devices) we use the screen share audio
  92. // stream as we would use a regular stream.
  93. await conference?.replaceTrack(null, desktopAudioTrack);
  94. }
  95. }
  96. /**
  97. * Toggles screen sharing.
  98. *
  99. * @private
  100. * @param {boolean} enabled - The state to toggle screen sharing to.
  101. * @param {Store} store - The redux store.
  102. * @returns {void}
  103. */
  104. async function _toggleScreenSharing(
  105. {
  106. enabled,
  107. audioOnly = false,
  108. shareOptions = {}
  109. }: IToggleScreenSharingOptions,
  110. store: IStore
  111. ): Promise<void> {
  112. const { dispatch, getState } = store;
  113. const state = getState();
  114. const audioOnlySharing = isAudioOnlySharing(state);
  115. const screenSharing = isScreenVideoShared(state);
  116. const conference = getCurrentConference(state);
  117. const localAudio = getLocalJitsiAudioTrack(state);
  118. const localScreenshare = getLocalDesktopTrack(state['features/base/tracks']);
  119. // Toggle screenshare or audio-only share if the new state is not passed. Happens in the following two cases.
  120. // 1. ShareAudioDialog passes undefined when the user hits continue in the share audio demo modal.
  121. // 2. Toggle screenshare called from the external API.
  122. const enable = audioOnly
  123. ? enabled ?? !audioOnlySharing
  124. : enabled ?? !screenSharing;
  125. const screensharingDetails: { sourceType?: string; } = {};
  126. if (enable) {
  127. let tracks;
  128. // Spot proxy stream.
  129. if (shareOptions.desktopStream) {
  130. tracks = [ shareOptions.desktopStream ];
  131. } else {
  132. const { _desktopSharingSourceDevice } = state['features/base/config'];
  133. if (!shareOptions.desktopSharingSources && _desktopSharingSourceDevice) {
  134. shareOptions.desktopSharingSourceDevice = _desktopSharingSourceDevice;
  135. }
  136. const options = {
  137. devices: [ VIDEO_TYPE.DESKTOP ],
  138. ...shareOptions
  139. };
  140. try {
  141. tracks = await createLocalTracksF(options) as any[];
  142. } catch (error) {
  143. dispatch(handleScreenSharingError(error, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
  144. throw error;
  145. }
  146. }
  147. const desktopAudioTrack = tracks.find(track => track.getType() === MEDIA_TYPE.AUDIO);
  148. const desktopVideoTrack = tracks.find(track => track.getType() === MEDIA_TYPE.VIDEO);
  149. if (audioOnly) {
  150. // Dispose the desktop track for audio-only screensharing.
  151. desktopVideoTrack.dispose();
  152. if (!desktopAudioTrack) {
  153. dispatch(handleScreenSharingError(AUDIO_ONLY_SCREEN_SHARE_NO_TRACK, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
  154. throw new Error(AUDIO_ONLY_SCREEN_SHARE_NO_TRACK);
  155. }
  156. } else if (desktopVideoTrack) {
  157. if (localScreenshare) {
  158. await dispatch(replaceLocalTrack(localScreenshare.jitsiTrack, desktopVideoTrack, conference));
  159. } else {
  160. await dispatch(addLocalTrack(desktopVideoTrack));
  161. }
  162. if (isScreenshotCaptureEnabled(state, false, true)) {
  163. dispatch(toggleScreenshotCaptureSummary(true));
  164. }
  165. screensharingDetails.sourceType = desktopVideoTrack.sourceType;
  166. }
  167. // Apply the AudioMixer effect if there is a local audio track, add the desktop track to the conference
  168. // otherwise without unmuting the microphone.
  169. if (desktopAudioTrack) {
  170. // Noise suppression doesn't work with desktop audio because we can't chain track effects yet, disable it
  171. // first. We need to to wait for the effect to clear first or it might interfere with the audio mixer.
  172. await dispatch(setNoiseSuppressionEnabled(false));
  173. _maybeApplyAudioMixerEffect(desktopAudioTrack, state);
  174. dispatch(setScreenshareAudioTrack(desktopAudioTrack));
  175. // Handle the case where screen share was stopped from the browsers 'screen share in progress' window.
  176. if (audioOnly) {
  177. desktopAudioTrack?.on(
  178. JitsiTrackEvents.LOCAL_TRACK_STOPPED,
  179. () => dispatch(toggleScreensharing(undefined, true)));
  180. }
  181. }
  182. // Show notification about more bandwidth usage in audio-only mode if the user starts screensharing. This
  183. // doesn't apply to audio-only screensharing.
  184. const { enabled: bestPerformanceMode } = state['features/base/audio-only'];
  185. if (bestPerformanceMode && !audioOnly) {
  186. dispatch(showNotification({
  187. titleKey: 'notify.screenSharingAudioOnlyTitle',
  188. descriptionKey: 'notify.screenSharingAudioOnlyDescription'
  189. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  190. }
  191. } else {
  192. const { desktopAudioTrack } = state['features/screen-share'];
  193. dispatch(stopReceiver());
  194. dispatch(toggleScreenshotCaptureSummary(false));
  195. // Mute the desktop track instead of removing it from the conference since we don't want the client to signal
  196. // a source-remove to the remote peer for the screenshare track. Later when screenshare is enabled again, the
  197. // same sender will be re-used without the need for signaling a new ssrc through source-add.
  198. dispatch(setScreenshareMuted(true));
  199. if (desktopAudioTrack) {
  200. if (localAudio) {
  201. localAudio.setEffect(undefined);
  202. } else {
  203. await conference?.replaceTrack(desktopAudioTrack, null);
  204. }
  205. desktopAudioTrack.dispose();
  206. dispatch(setScreenshareAudioTrack(null));
  207. }
  208. }
  209. if (audioOnly) {
  210. dispatch(setScreenAudioShareState(enable));
  211. } else {
  212. // Notify the external API.
  213. APP.API.notifyScreenSharingStatusChanged(enable, screensharingDetails);
  214. }
  215. }
  216. /**
  217. * Sets the camera facing mode(environment/user). If facing mode not provided, it will do a toggle.
  218. *
  219. * @param {string | undefined} facingMode - The selected facing mode.
  220. * @returns {void}
  221. */
  222. export function setCameraFacingMode(facingMode: string | undefined) {
  223. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  224. const state = getState();
  225. if (!isToggleCameraEnabled(state)) {
  226. return;
  227. }
  228. if (!facingMode) {
  229. dispatch(toggleCamera());
  230. return;
  231. }
  232. const tracks = state['features/base/tracks'];
  233. const localVideoTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
  234. if (!tracks || !localVideoTrack) {
  235. return;
  236. }
  237. const currentFacingMode = localVideoTrack.getCameraFacingMode();
  238. if (currentFacingMode !== facingMode) {
  239. dispatch(toggleCamera());
  240. }
  241. };
  242. }
  243. /**
  244. * Signals to open the permission dialog for toggling camera remotely.
  245. *
  246. * @param {Function} onAllow - Callback to be executed if permission to toggle camera was granted.
  247. * @param {string} initiatorId - The participant id of the requester.
  248. * @returns {Object} - The open dialog action.
  249. */
  250. export function openAllowToggleCameraDialog(onAllow: Function, initiatorId: string) {
  251. return openDialog(AllowToggleCameraDialog, {
  252. onAllow,
  253. initiatorId
  254. });
  255. }
  256. /**
  257. * Sets the GUM pending state for the tracks that have failed.
  258. *
  259. * NOTE: Some of the track that we will be setting to GUM pending state NONE may not have failed but they may have
  260. * been requested. This won't be a problem because their current GUM pending state will be NONE anyway.
  261. *
  262. * @param {JitsiLocalTrack} tracks - The tracks that have been created.
  263. * @param {Function} dispatch - The redux dispatch function.
  264. * @returns {void}
  265. */
  266. export function setGUMPendingStateOnFailedTracks(tracks: Array<any>, dispatch: IStore['dispatch']) {
  267. const tracksTypes = tracks.map(track => {
  268. if (track.getVideoType() === VIDEO_TYPE.DESKTOP) {
  269. return MEDIA_TYPE.SCREENSHARE;
  270. }
  271. return track.getType();
  272. });
  273. const nonPendingTracks = [ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ].filter(type => !tracksTypes.includes(type));
  274. dispatch(gumPending(nonPendingTracks, IGUMPendingState.NONE));
  275. }
  276. /**
  277. * Creates and adds to the conference the initial audio/video tracks.
  278. *
  279. * @param {Array<MediaType>} devices - Array with devices (audio/video) that will be used.
  280. * @returns {Function}
  281. */
  282. export function createAndAddInitialAVTracks(devices: Array<MediaType>) {
  283. return async (dispatch: IStore['dispatch']) => {
  284. dispatch(gumPending(devices, IGUMPendingState.PENDING_UNMUTE));
  285. const { tracks, errors } = await dispatch(createInitialAVTracks({ devices }));
  286. setGUMPendingStateOnFailedTracks(tracks, dispatch);
  287. dispatch(displayErrorsForCreateInitialLocalTracks(errors));
  288. await Promise.allSettled(tracks.map((track: any) => {
  289. const legacyConferenceObject = APP.conference;
  290. if (track.isAudioTrack()) {
  291. return legacyConferenceObject.useAudioStream(track);
  292. }
  293. if (track.isVideoTrack()) {
  294. return legacyConferenceObject.useVideoStream(track);
  295. }
  296. return Promise.resolve();
  297. }));
  298. dispatch(gumPending(devices, IGUMPendingState.NONE));
  299. };
  300. }
  301. /**
  302. * Creates the initial audio/video tracks.
  303. *
  304. * @param {ICreateInitialTracksOptions} options - Options for creating the audio/video tracks.
  305. * @param {boolean} recordTimeMetrics - If true time metrics will be recorded.
  306. * @returns {Function}
  307. */
  308. export function createInitialAVTracks(options: ICreateInitialTracksOptions, recordTimeMetrics = false) {
  309. return (dispatch: IStore['dispatch'], _getState: IStore['getState']) => {
  310. const {
  311. devices,
  312. timeout
  313. } = options;
  314. dispatch(gumPending(devices, IGUMPendingState.PENDING_UNMUTE));
  315. return createLocalTracksF(options, undefined, recordTimeMetrics).then(tracks => {
  316. return {
  317. errors: {} as IInitialTracksErrors,
  318. tracks
  319. };
  320. })
  321. .catch(async error => {
  322. const errors = {} as IInitialTracksErrors;
  323. if (error.name === JitsiTrackErrors.TIMEOUT && !browser.isElectron()) {
  324. if (devices.includes(MEDIA_TYPE.AUDIO)) {
  325. errors.audioOnlyError = error;
  326. }
  327. if (devices.includes(MEDIA_TYPE.VIDEO)) {
  328. errors.videoOnlyError = error;
  329. }
  330. if (errors.audioOnlyError && errors.videoOnlyError) {
  331. errors.audioAndVideoError = error;
  332. }
  333. return {
  334. errors,
  335. tracks: []
  336. };
  337. }
  338. // Retry with separate gUM calls.
  339. const gUMPromises = [];
  340. const tracks: any[] | PromiseLike<any[]> = [];
  341. if (devices.includes(MEDIA_TYPE.AUDIO)) {
  342. gUMPromises.push(createLocalTracksF({
  343. devices: [ MEDIA_TYPE.AUDIO ],
  344. timeout
  345. }));
  346. }
  347. if (devices.includes(MEDIA_TYPE.VIDEO)) {
  348. gUMPromises.push(createLocalTracksF({
  349. devices: [ MEDIA_TYPE.VIDEO ],
  350. timeout
  351. }));
  352. }
  353. const results = await Promise.allSettled(gUMPromises);
  354. let errorMsg;
  355. results.forEach((result, idx) => {
  356. if (result.status === 'fulfilled') {
  357. tracks.push(result.value[0]);
  358. } else {
  359. errorMsg = result.reason;
  360. const isAudio = idx === 0;
  361. logger.error(`${isAudio ? 'Audio' : 'Video'} track creation failed with error ${errorMsg}`);
  362. if (isAudio) {
  363. errors.audioOnlyError = errorMsg;
  364. } else {
  365. errors.videoOnlyError = errorMsg;
  366. }
  367. }
  368. });
  369. if (errors.audioOnlyError && errors.videoOnlyError) {
  370. errors.audioAndVideoError = errorMsg;
  371. }
  372. return {
  373. tracks,
  374. errors
  375. };
  376. });
  377. };
  378. }
  379. /**
  380. * Displays error notifications according to the state carried by the passed {@code errors} object.
  381. *
  382. * @param {InitialTracksErrors} errors - The errors (if any).
  383. * @returns {Function}
  384. * @private
  385. */
  386. export function displayErrorsForCreateInitialLocalTracks(errors: IInitialTracksErrors) {
  387. return (dispatch: IStore['dispatch']) => {
  388. const {
  389. audioOnlyError,
  390. screenSharingError,
  391. videoOnlyError
  392. } = errors;
  393. if (screenSharingError) {
  394. dispatch(handleScreenSharingError(screenSharingError, NOTIFICATION_TIMEOUT_TYPE.LONG));
  395. }
  396. if (audioOnlyError || videoOnlyError) {
  397. if (audioOnlyError) {
  398. dispatch(notifyMicError(audioOnlyError));
  399. }
  400. if (videoOnlyError) {
  401. dispatch(notifyCameraError(videoOnlyError));
  402. }
  403. }
  404. };
  405. }
  406. /**
  407. * Displays a UI notification for screensharing failure based on the error passed.
  408. *
  409. * @private
  410. * @param {Error | AUDIO_ONLY_SCREEN_SHARE_NO_TRACK} error - The error.
  411. * @param {NOTIFICATION_TIMEOUT_TYPE} timeout - The time for showing the notification.
  412. * @returns {Function}
  413. */
  414. export function handleScreenSharingError(
  415. error: Error | AUDIO_ONLY_SCREEN_SHARE_NO_TRACK,
  416. timeout: NOTIFICATION_TIMEOUT_TYPE) {
  417. return (dispatch: IStore['dispatch']) => {
  418. logger.error('failed to share local desktop', error);
  419. let descriptionKey;
  420. let titleKey;
  421. if (error.name === JitsiTrackErrors.PERMISSION_DENIED) {
  422. descriptionKey = 'dialog.screenSharingPermissionDeniedError';
  423. titleKey = 'dialog.screenSharingFailedTitle';
  424. } else if (error.name === JitsiTrackErrors.CONSTRAINT_FAILED) {
  425. descriptionKey = 'dialog.cameraConstraintFailedError';
  426. titleKey = 'deviceError.cameraError';
  427. } else if (error.name === JitsiTrackErrors.SCREENSHARING_GENERIC_ERROR) {
  428. descriptionKey = 'dialog.screenSharingFailed';
  429. titleKey = 'dialog.screenSharingFailedTitle';
  430. } else if (error === AUDIO_ONLY_SCREEN_SHARE_NO_TRACK) {
  431. descriptionKey = 'notify.screenShareNoAudio';
  432. titleKey = 'notify.screenShareNoAudioTitle';
  433. } else { // safeguard for not showing notification with empty text. This will also include
  434. // error.name === JitsiTrackErrors.SCREENSHARING_USER_CANCELED
  435. return;
  436. }
  437. dispatch(showErrorNotification({
  438. descriptionKey,
  439. titleKey
  440. }, timeout));
  441. };
  442. }