Du kan inte välja fler än 25 ämnen Ämnen måste starta med en bokstav eller siffra, kan innehålla bindestreck ('-') och vara max 35 tecken långa.

actions.ts 29KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849
  1. import { createTrackMutedEvent } from '../../analytics/AnalyticsEvents';
  2. import { sendAnalytics } from '../../analytics/functions';
  3. import { IStore } from '../../app/types';
  4. import { showErrorNotification, showNotification } from '../../notifications/actions';
  5. import { NOTIFICATION_TIMEOUT, NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
  6. import { getCurrentConference } from '../conference/functions';
  7. import { IJitsiConference } from '../conference/reducer';
  8. import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
  9. import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
  10. // eslint-disable-next-line lines-around-comment
  11. // @ts-ignore
  12. import { createLocalTrack } from '../lib-jitsi-meet/functions';
  13. import { setAudioMuted, setScreenshareMuted, setVideoMuted } from '../media/actions';
  14. import {
  15. CAMERA_FACING_MODE,
  16. MEDIA_TYPE,
  17. MediaType,
  18. VIDEO_MUTISM_AUTHORITY,
  19. VIDEO_TYPE,
  20. VideoType
  21. } from '../media/constants';
  22. import { getLocalParticipant } from '../participants/functions';
  23. import { updateSettings } from '../settings/actions';
  24. import {
  25. SET_NO_SRC_DATA_NOTIFICATION_UID,
  26. TOGGLE_SCREENSHARING,
  27. TRACK_ADDED,
  28. TRACK_CREATE_CANCELED,
  29. TRACK_CREATE_ERROR,
  30. TRACK_MUTE_UNMUTE_FAILED,
  31. TRACK_NO_DATA_FROM_SOURCE,
  32. TRACK_REMOVED,
  33. TRACK_STOPPED,
  34. TRACK_UPDATED,
  35. TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
  36. TRACK_WILL_CREATE
  37. } from './actionTypes';
  38. import {
  39. createLocalTracksF,
  40. getLocalTrack,
  41. getLocalTracks,
  42. getLocalVideoTrack,
  43. getTrackByJitsiTrack
  44. } from './functions';
  45. import logger from './logger';
  46. import { TrackOptions } from './types';
  47. /**
  48. * Add a given local track to the conference.
  49. *
  50. * @param {JitsiLocalTrack} newTrack - The local track to be added to the conference.
  51. * @returns {Function}
  52. */
  53. export function addLocalTrack(newTrack: any) {
  54. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  55. const conference = getCurrentConference(getState());
  56. if (conference) {
  57. await conference.addTrack(newTrack);
  58. }
  59. const setMuted = newTrack.isVideoTrack()
  60. ? getMultipleVideoSendingSupportFeatureFlag(getState())
  61. && newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
  62. ? setScreenshareMuted
  63. : setVideoMuted
  64. : setAudioMuted;
  65. const isMuted = newTrack.isMuted();
  66. logger.log(`Adding ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
  67. await dispatch(setMuted(isMuted));
  68. return dispatch(_addTracks([ newTrack ]));
  69. };
  70. }
  71. /**
  72. * Requests the creating of the desired media type tracks. Desire is expressed
  73. * by base/media unless the function caller specifies desired media types
  74. * explicitly and thus override base/media. Dispatches a
  75. * {@code createLocalTracksA} action for the desired media types for which there
  76. * are no existing tracks yet.
  77. *
  78. * @returns {Function}
  79. */
  80. export function createDesiredLocalTracks(...desiredTypes: any) {
  81. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  82. const state = getState();
  83. dispatch(destroyLocalDesktopTrackIfExists());
  84. if (desiredTypes.length === 0) {
  85. const { video } = state['features/base/media'];
  86. // XXX: Always create the audio track early, even if it will be muted.
  87. // This fixes a timing issue when adding the track to the conference which
  88. // manifests primarily on iOS 15.
  89. desiredTypes.push(MEDIA_TYPE.AUDIO);
  90. // XXX When the app is coming into the foreground from the
  91. // background in order to handle a URL, it may realize the new
  92. // background state soon after it has tried to create the local
  93. // tracks requested by the URL. Ignore
  94. // VIDEO_MUTISM_AUTHORITY.BACKGROUND and create the local video
  95. // track if no other VIDEO_MUTISM_AUTHORITY has muted it. The local
  96. // video track will be muted until the app realizes the new
  97. // background state.
  98. // eslint-disable-next-line no-bitwise
  99. (video.muted & ~VIDEO_MUTISM_AUTHORITY.BACKGROUND)
  100. || desiredTypes.push(MEDIA_TYPE.VIDEO);
  101. }
  102. const availableTypes
  103. = getLocalTracks(
  104. state['features/base/tracks'],
  105. /* includePending */ true)
  106. .map(t => t.mediaType);
  107. // We need to create the desired tracks which are not already available.
  108. const createTypes
  109. = desiredTypes.filter((type: MediaType) => availableTypes.indexOf(type) === -1);
  110. createTypes.length
  111. && dispatch(createLocalTracksA({ devices: createTypes }));
  112. };
  113. }
  114. /**
  115. * Request to start capturing local audio and/or video. By default, the user
  116. * facing camera will be selected.
  117. *
  118. * @param {Object} [options] - For info @see JitsiMeetJS.createLocalTracks.
  119. * @returns {Function}
  120. */
  121. export function createLocalTracksA(options: TrackOptions = {}) {
  122. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  123. const devices
  124. = options.devices || [ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ];
  125. const store = {
  126. dispatch,
  127. getState
  128. };
  129. // The following executes on React Native only at the time of this
  130. // writing. The effort to port Web's createInitialLocalTracksAndConnect
  131. // is significant and that's where the function createLocalTracksF got
  132. // born. I started with the idea a porting so that we could inherit the
  133. // ability to getUserMedia for audio only or video only if getUserMedia
  134. // for audio and video fails. Eventually though, I realized that on
  135. // mobile we do not have combined permission prompts implemented anyway
  136. // (either because there are no such prompts or it does not make sense
  137. // to implement them) and the right thing to do is to ask for each
  138. // device separately.
  139. for (const device of devices) {
  140. if (getLocalTrack(
  141. getState()['features/base/tracks'],
  142. device as MediaType,
  143. /* includePending */ true)) {
  144. throw new Error(`Local track for ${device} already exists`);
  145. }
  146. const gumProcess
  147. = createLocalTracksF(
  148. {
  149. cameraDeviceId: options.cameraDeviceId,
  150. devices: [ device ],
  151. facingMode:
  152. options.facingMode || CAMERA_FACING_MODE.USER,
  153. micDeviceId: options.micDeviceId
  154. },
  155. store)
  156. .then(
  157. (localTracks: any[]) => {
  158. // Because GUM is called for 1 device (which is actually
  159. // a media type 'audio', 'video', 'screen', etc.) we
  160. // should not get more than one JitsiTrack.
  161. if (localTracks.length !== 1) {
  162. throw new Error(
  163. `Expected exactly 1 track, but was given ${
  164. localTracks.length} tracks for device: ${
  165. device}.`);
  166. }
  167. if (gumProcess.canceled) {
  168. return _disposeTracks(localTracks)
  169. .then(() =>
  170. dispatch(_trackCreateCanceled(device as MediaType)));
  171. }
  172. return dispatch(trackAdded(localTracks[0]));
  173. },
  174. (reason: Error) =>
  175. dispatch(
  176. gumProcess.canceled
  177. ? _trackCreateCanceled(device as MediaType)
  178. : _onCreateLocalTracksRejected(
  179. reason,
  180. device)));
  181. /**
  182. * Cancels the {@code getUserMedia} process represented by this
  183. * {@code Promise}.
  184. *
  185. * @returns {Promise} This {@code Promise} i.e. {@code gumProcess}.
  186. */
  187. gumProcess.cancel = () => {
  188. gumProcess.canceled = true;
  189. return gumProcess;
  190. };
  191. dispatch({
  192. type: TRACK_WILL_CREATE,
  193. track: {
  194. gumProcess,
  195. local: true,
  196. mediaType: device
  197. }
  198. });
  199. }
  200. };
  201. }
  202. /**
  203. * Calls JitsiLocalTrack#dispose() on the given track or on all local tracks (if none are passed) ignoring errors if
  204. * track is already disposed. After that signals tracks to be removed.
  205. *
  206. * @param {JitsiLocalTrack|null} [track] - The local track that needs to be destroyed.
  207. * @returns {Function}
  208. */
  209. export function destroyLocalTracks(track = null) {
  210. if (track) {
  211. return (dispatch: IStore['dispatch']) => {
  212. dispatch(_disposeAndRemoveTracks([ track ]));
  213. };
  214. }
  215. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  216. // First wait until any getUserMedia in progress is settled and then get
  217. // rid of all local tracks.
  218. _cancelGUMProcesses(getState)
  219. .then(() =>
  220. dispatch(
  221. _disposeAndRemoveTracks(
  222. getState()['features/base/tracks']
  223. .filter(t => t.local)
  224. .map(t => t.jitsiTrack))));
  225. };
  226. }
  227. /**
  228. * Signals that the passed JitsiLocalTrack has triggered a no data from source event.
  229. *
  230. * @param {JitsiLocalTrack} track - The track.
  231. * @returns {{
  232. * type: TRACK_NO_DATA_FROM_SOURCE,
  233. * track: Track
  234. * }}
  235. */
  236. export function noDataFromSource(track: any) {
  237. return {
  238. type: TRACK_NO_DATA_FROM_SOURCE,
  239. track
  240. };
  241. }
  242. /**
  243. * Displays a no data from source video error if needed.
  244. *
  245. * @param {JitsiLocalTrack} jitsiTrack - The track.
  246. * @returns {Function}
  247. */
  248. export function showNoDataFromSourceVideoError(jitsiTrack: any) {
  249. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  250. let notificationInfo;
  251. const track = getTrackByJitsiTrack(getState()['features/base/tracks'], jitsiTrack);
  252. if (!track) {
  253. return;
  254. }
  255. if (track.isReceivingData) {
  256. notificationInfo = undefined;
  257. } else {
  258. const notificationAction = await dispatch(showErrorNotification({
  259. descriptionKey: 'dialog.cameraNotSendingData',
  260. titleKey: 'dialog.cameraNotSendingDataTitle'
  261. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  262. notificationInfo = {
  263. uid: notificationAction?.uid
  264. };
  265. }
  266. dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, notificationInfo));
  267. };
  268. }
  269. /**
  270. * Signals that the local participant is ending screensharing or beginning the screensharing flow.
  271. *
  272. * @param {boolean} enabled - The state to toggle screen sharing to.
  273. * @param {boolean} audioOnly - Only share system audio.
  274. * @param {boolean} ignoreDidHaveVideo - Whether or not to ignore if video was on when sharing started.
  275. * @param {Object} shareOptions - The options to be passed for capturing screenshare.
  276. * @returns {{
  277. * type: TOGGLE_SCREENSHARING,
  278. * on: boolean,
  279. * audioOnly: boolean,
  280. * ignoreDidHaveVideo: boolean,
  281. * shareOptions: Object
  282. * }}
  283. */
  284. export function toggleScreensharing(enabled: boolean, audioOnly = false,
  285. ignoreDidHaveVideo = false, shareOptions = {}) {
  286. return {
  287. type: TOGGLE_SCREENSHARING,
  288. enabled,
  289. audioOnly,
  290. ignoreDidHaveVideo,
  291. shareOptions
  292. };
  293. }
  294. /**
  295. * Replaces one track with another for one renegotiation instead of invoking
  296. * two renegotiations with a separate removeTrack and addTrack. Disposes the
  297. * removed track as well.
  298. *
  299. * @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
  300. * @param {JitsiLocalTrack|null} newTrack - The track to use instead.
  301. * @param {JitsiConference} [conference] - The conference from which to remove
  302. * and add the tracks. If one is not provided, the conference in the redux store
  303. * will be used.
  304. * @returns {Function}
  305. */
  306. export function replaceLocalTrack(oldTrack: any, newTrack: any, conference?: IJitsiConference) {
  307. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  308. conference
  309. // eslint-disable-next-line no-param-reassign
  310. || (conference = getState()['features/base/conference'].conference);
  311. if (conference) {
  312. await conference.replaceTrack(oldTrack, newTrack);
  313. }
  314. return dispatch(replaceStoredTracks(oldTrack, newTrack));
  315. };
  316. }
  317. /**
  318. * Replaces a stored track with another.
  319. *
  320. * @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
  321. * @param {JitsiLocalTrack|null} newTrack - The track to use instead.
  322. * @returns {Function}
  323. */
  324. function replaceStoredTracks(oldTrack: any, newTrack: any) {
  325. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  326. // We call dispose after doing the replace because dispose will
  327. // try and do a new o/a after the track removes itself. Doing it
  328. // after means the JitsiLocalTrack.conference is already
  329. // cleared, so it won't try and do the o/a.
  330. if (oldTrack) {
  331. await dispatch(_disposeAndRemoveTracks([ oldTrack ]));
  332. }
  333. if (newTrack) {
  334. // The mute state of the new track should be reflected in the app's mute state. For example, if the
  335. // app is currently muted and changing to a new track that is not muted, the app's mute state
  336. // should be falsey. As such, emit a mute event here to set up the app to reflect the track's mute
  337. // state. If this is not done, the current mute state of the app will be reflected on the track,
  338. // not vice-versa.
  339. const setMuted = newTrack.isVideoTrack()
  340. ? getMultipleVideoSendingSupportFeatureFlag(getState())
  341. && newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
  342. ? setScreenshareMuted
  343. : setVideoMuted
  344. : setAudioMuted;
  345. const isMuted = newTrack.isMuted();
  346. sendAnalytics(createTrackMutedEvent(newTrack.getType(), 'track.replaced', isMuted));
  347. logger.log(`Replace ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
  348. await dispatch(setMuted(isMuted));
  349. await dispatch(_addTracks([ newTrack ]));
  350. }
  351. };
  352. }
  353. /**
  354. * Create an action for when a new track has been signaled to be added to the
  355. * conference.
  356. *
  357. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  358. * @returns {{ type: TRACK_ADDED, track: Track }}
  359. */
  360. export function trackAdded(track: any) {
  361. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  362. track.on(
  363. JitsiTrackEvents.TRACK_MUTE_CHANGED,
  364. () => dispatch(trackMutedChanged(track)));
  365. track.on(
  366. JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED,
  367. (type: VideoType) => dispatch(trackVideoTypeChanged(track, type)));
  368. // participantId
  369. const local = track.isLocal();
  370. const mediaType = getMultipleVideoSendingSupportFeatureFlag(getState())
  371. && track.getVideoType() === VIDEO_TYPE.DESKTOP
  372. ? MEDIA_TYPE.SCREENSHARE
  373. : track.getType();
  374. let isReceivingData, noDataFromSourceNotificationInfo, participantId;
  375. if (local) {
  376. // Reset the no data from src notification state when we change the track, as it's context is set
  377. // on a per device basis.
  378. dispatch(setNoSrcDataNotificationUid());
  379. const participant = getLocalParticipant(getState);
  380. if (participant) {
  381. participantId = participant.id;
  382. }
  383. isReceivingData = track.isReceivingData();
  384. track.on(JitsiTrackEvents.NO_DATA_FROM_SOURCE, () => dispatch(noDataFromSource({ jitsiTrack: track })));
  385. if (!isReceivingData) {
  386. if (mediaType === MEDIA_TYPE.AUDIO) {
  387. const notificationAction = await dispatch(showNotification({
  388. descriptionKey: 'dialog.micNotSendingData',
  389. titleKey: 'dialog.micNotSendingDataTitle'
  390. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  391. // Set the notification ID so that other parts of the application know that this was
  392. // displayed in the context of the current device.
  393. // I.E. The no-audio-signal notification shouldn't be displayed if this was already shown.
  394. dispatch(setNoSrcDataNotificationUid(notificationAction?.uid));
  395. noDataFromSourceNotificationInfo = { uid: notificationAction?.uid };
  396. } else {
  397. const timeout = setTimeout(() => dispatch(
  398. showNoDataFromSourceVideoError(track)),
  399. NOTIFICATION_TIMEOUT.MEDIUM);
  400. noDataFromSourceNotificationInfo = { timeout };
  401. }
  402. }
  403. track.on(JitsiTrackEvents.LOCAL_TRACK_STOPPED,
  404. () => dispatch({
  405. type: TRACK_STOPPED,
  406. track: {
  407. jitsiTrack: track
  408. }
  409. }));
  410. } else {
  411. participantId = track.getParticipantId();
  412. isReceivingData = true;
  413. }
  414. return dispatch({
  415. type: TRACK_ADDED,
  416. track: {
  417. jitsiTrack: track,
  418. isReceivingData,
  419. local,
  420. mediaType,
  421. mirror: _shouldMirror(track),
  422. muted: track.isMuted(),
  423. noDataFromSourceNotificationInfo,
  424. participantId,
  425. videoStarted: false,
  426. videoType: track.videoType
  427. }
  428. });
  429. };
  430. }
  431. /**
  432. * Create an action for when a track's muted state has been signaled to be
  433. * changed.
  434. *
  435. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  436. * @returns {{
  437. * type: TRACK_UPDATED,
  438. * track: Track
  439. * }}
  440. */
  441. export function trackMutedChanged(track: any) {
  442. return {
  443. type: TRACK_UPDATED,
  444. track: {
  445. jitsiTrack: track,
  446. muted: track.isMuted()
  447. }
  448. };
  449. }
  450. /**
  451. * Create an action for when a track's muted state change action has failed. This could happen because of
  452. * {@code getUserMedia} errors during unmute or replace track errors at the peerconnection level.
  453. *
  454. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  455. * @param {boolean} wasMuting - If the operation that failed was a mute operation or an unmute operation.
  456. * @returns {{
  457. * type: TRACK_MUTE_UNMUTE_FAILED,
  458. * track: Track
  459. * }}
  460. */
  461. export function trackMuteUnmuteFailed(track: any, wasMuting: boolean) {
  462. return {
  463. type: TRACK_MUTE_UNMUTE_FAILED,
  464. track,
  465. wasMuting
  466. };
  467. }
  468. /**
  469. * Create an action for when a track's no data from source notification information changes.
  470. *
  471. * @param {JitsiLocalTrack} track - JitsiTrack instance.
  472. * @param {Object} noDataFromSourceNotificationInfo - Information about no data from source notification.
  473. * @returns {{
  474. * type: TRACK_UPDATED,
  475. * track: Track
  476. * }}
  477. */
  478. export function trackNoDataFromSourceNotificationInfoChanged(track: any, noDataFromSourceNotificationInfo?: Object) {
  479. return {
  480. type: TRACK_UPDATED,
  481. track: {
  482. jitsiTrack: track,
  483. noDataFromSourceNotificationInfo
  484. }
  485. };
  486. }
  487. /**
  488. * Create an action for when a track has been signaled for removal from the
  489. * conference.
  490. *
  491. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  492. * @returns {{
  493. * type: TRACK_REMOVED,
  494. * track: Track
  495. * }}
  496. */
  497. export function trackRemoved(track: any) {
  498. track.removeAllListeners(JitsiTrackEvents.TRACK_MUTE_CHANGED);
  499. track.removeAllListeners(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED);
  500. track.removeAllListeners(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  501. return {
  502. type: TRACK_REMOVED,
  503. track: {
  504. jitsiTrack: track
  505. }
  506. };
  507. }
  508. /**
  509. * Signal that track's video started to play.
  510. *
  511. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  512. * @returns {{
  513. * type: TRACK_UPDATED,
  514. * track: Track
  515. * }}
  516. */
  517. export function trackVideoStarted(track: any) {
  518. return {
  519. type: TRACK_UPDATED,
  520. track: {
  521. jitsiTrack: track,
  522. videoStarted: true
  523. }
  524. };
  525. }
  526. /**
  527. * Create an action for when participant video type changes.
  528. *
  529. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  530. * @param {VIDEO_TYPE|undefined} videoType - Video type.
  531. * @returns {{
  532. * type: TRACK_UPDATED,
  533. * track: Track
  534. * }}
  535. */
  536. export function trackVideoTypeChanged(track: any, videoType: VideoType) {
  537. return {
  538. type: TRACK_UPDATED,
  539. track: {
  540. jitsiTrack: track,
  541. videoType
  542. }
  543. };
  544. }
  545. /**
  546. * Create an action for when track streaming status changes.
  547. *
  548. * @param {(JitsiRemoteTrack)} track - JitsiTrack instance.
  549. * @param {string} streamingStatus - The new streaming status of the track.
  550. * @returns {{
  551. * type: TRACK_UPDATED,
  552. * track: Track
  553. * }}
  554. */
  555. export function trackStreamingStatusChanged(track: any, streamingStatus: string) {
  556. return {
  557. type: TRACK_UPDATED,
  558. track: {
  559. jitsiTrack: track,
  560. streamingStatus
  561. }
  562. };
  563. }
  564. /**
  565. * Signals passed tracks to be added.
  566. *
  567. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  568. * @private
  569. * @returns {Function}
  570. */
  571. function _addTracks(tracks: any[]) {
  572. return (dispatch: IStore['dispatch']) => Promise.all(tracks.map(t => dispatch(trackAdded(t))));
  573. }
  574. /**
  575. * Cancels and waits for any {@code getUserMedia} process/currently in progress
  576. * to complete/settle.
  577. *
  578. * @param {Function} getState - The redux store {@code getState} function used
  579. * to obtain the state.
  580. * @private
  581. * @returns {Promise} - A {@code Promise} resolved once all
  582. * {@code gumProcess.cancel()} {@code Promise}s are settled because all we care
  583. * about here is to be sure that the {@code getUserMedia} callbacks have
  584. * completed (i.e. Returned from the native side).
  585. */
  586. function _cancelGUMProcesses(getState: IStore['getState']) {
  587. const logError
  588. = (error: Error) =>
  589. logger.error('gumProcess.cancel failed', JSON.stringify(error));
  590. return Promise.all(
  591. getState()['features/base/tracks']
  592. .filter(t => t.local)
  593. .map(({ gumProcess }: any) =>
  594. gumProcess?.cancel().catch(logError)));
  595. }
  596. /**
  597. * Disposes passed tracks and signals them to be removed.
  598. *
  599. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  600. * @protected
  601. * @returns {Function}
  602. */
  603. export function _disposeAndRemoveTracks(tracks: any[]) {
  604. return (dispatch: IStore['dispatch']) =>
  605. _disposeTracks(tracks)
  606. .then(() =>
  607. Promise.all(tracks.map(t => dispatch(trackRemoved(t)))));
  608. }
  609. /**
  610. * Disposes passed tracks.
  611. *
  612. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  613. * @private
  614. * @returns {Promise} - A Promise resolved once {@link JitsiTrack.dispose()} is
  615. * done for every track from the list.
  616. */
  617. function _disposeTracks(tracks: any) {
  618. return Promise.all(
  619. tracks.map((t: any) =>
  620. t.dispose()
  621. .catch((err: Error) => {
  622. // Track might be already disposed so ignore such an error.
  623. // Of course, re-throw any other error(s).
  624. if (err.name !== JitsiTrackErrors.TRACK_IS_DISPOSED) {
  625. throw err;
  626. }
  627. })));
  628. }
  629. /**
  630. * Implements the {@code Promise} rejection handler of
  631. * {@code createLocalTracksA} and {@code createLocalTracksF}.
  632. *
  633. * @param {Object} error - The {@code Promise} rejection reason.
  634. * @param {string} device - The device/{@code MEDIA_TYPE} associated with the
  635. * rejection.
  636. * @private
  637. * @returns {Function}
  638. */
  639. function _onCreateLocalTracksRejected(error: Error, device: string) {
  640. return (dispatch: IStore['dispatch']) => {
  641. // If permissions are not allowed, alert the user.
  642. dispatch({
  643. type: TRACK_CREATE_ERROR,
  644. permissionDenied: error?.name === 'SecurityError',
  645. trackType: device
  646. });
  647. };
  648. }
  649. /**
  650. * Returns true if the provided {@code JitsiTrack} should be rendered as a
  651. * mirror.
  652. *
  653. * We only want to show a video in mirrored mode when:
  654. * 1) The video source is local, and not remote.
  655. * 2) The video source is a camera, not a desktop (capture).
  656. * 3) The camera is capturing the user, not the environment.
  657. *
  658. * TODO Similar functionality is part of lib-jitsi-meet. This function should be
  659. * removed after https://github.com/jitsi/lib-jitsi-meet/pull/187 is merged.
  660. *
  661. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  662. * @private
  663. * @returns {boolean}
  664. */
  665. function _shouldMirror(track: any) {
  666. return (
  667. track?.isLocal()
  668. && track?.isVideoTrack()
  669. // XXX The type of the return value of JitsiLocalTrack's
  670. // getCameraFacingMode happens to be named CAMERA_FACING_MODE as
  671. // well, it's defined by lib-jitsi-meet. Note though that the type
  672. // of the value on the right side of the equality check is defined
  673. // by jitsi-meet. The type definitions are surely compatible today
  674. // but that may not be the case tomorrow.
  675. && track?.getCameraFacingMode() === CAMERA_FACING_MODE.USER);
  676. }
  677. /**
  678. * Signals that track create operation for given media track has been canceled.
  679. * Will clean up local track stub from the redux state which holds the
  680. * {@code gumProcess} reference.
  681. *
  682. * @param {MEDIA_TYPE} mediaType - The type of the media for which the track was
  683. * being created.
  684. * @private
  685. * @returns {{
  686. * type,
  687. * trackType: MEDIA_TYPE
  688. * }}
  689. */
  690. function _trackCreateCanceled(mediaType: MediaType) {
  691. return {
  692. type: TRACK_CREATE_CANCELED,
  693. trackType: mediaType
  694. };
  695. }
  696. /**
  697. * If the local track if of type Desktop, it calls _disposeAndRemoveTracks) on it.
  698. *
  699. * @returns {Function}
  700. */
  701. export function destroyLocalDesktopTrackIfExists() {
  702. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  703. const videoTrack = getLocalVideoTrack(getState()['features/base/tracks']);
  704. const isDesktopTrack = videoTrack && videoTrack.videoType === VIDEO_TYPE.DESKTOP;
  705. if (isDesktopTrack) {
  706. dispatch(_disposeAndRemoveTracks([ videoTrack.jitsiTrack ]));
  707. }
  708. };
  709. }
  710. /**
  711. * Sets UID of the displayed no data from source notification. Used to track
  712. * if the notification was previously displayed in this context.
  713. *
  714. * @param {number} uid - Notification UID.
  715. * @returns {{
  716. * type: SET_NO_AUDIO_SIGNAL_UID,
  717. * uid: string
  718. * }}
  719. */
  720. export function setNoSrcDataNotificationUid(uid?: string) {
  721. return {
  722. type: SET_NO_SRC_DATA_NOTIFICATION_UID,
  723. uid
  724. };
  725. }
  726. /**
  727. * Updates the last media event received for a video track.
  728. *
  729. * @param {JitsiRemoteTrack} track - JitsiTrack instance.
  730. * @param {string} name - The current media event name for the video.
  731. * @returns {{
  732. * type: TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
  733. * track: Track,
  734. * name: string
  735. * }}
  736. */
  737. export function updateLastTrackVideoMediaEvent(track: any, name: string) {
  738. return {
  739. type: TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
  740. track,
  741. name
  742. };
  743. }
  744. /**
  745. * Toggles the facingMode constraint on the video stream.
  746. *
  747. * @returns {Function}
  748. */
  749. export function toggleCamera() {
  750. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  751. const state = getState();
  752. const tracks = state['features/base/tracks'];
  753. const localVideoTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
  754. const currentFacingMode = localVideoTrack.getCameraFacingMode();
  755. /**
  756. * FIXME: Ideally, we should be dispatching {@code replaceLocalTrack} here,
  757. * but it seems to not trigger the re-rendering of the local video on Chrome;
  758. * could be due to a plan B vs unified plan issue. Therefore, we use the legacy
  759. * method defined in conference.js that manually takes care of updating the local
  760. * video as well.
  761. */
  762. await APP.conference.useVideoStream(null);
  763. const targetFacingMode = currentFacingMode === CAMERA_FACING_MODE.USER
  764. ? CAMERA_FACING_MODE.ENVIRONMENT
  765. : CAMERA_FACING_MODE.USER;
  766. // Update the flipX value so the environment facing camera is not flipped, before the new track is created.
  767. dispatch(updateSettings({ localFlipX: targetFacingMode === CAMERA_FACING_MODE.USER }));
  768. const newVideoTrack = await createLocalTrack('video', null, null, { facingMode: targetFacingMode });
  769. // FIXME: See above.
  770. await APP.conference.useVideoStream(newVideoTrack);
  771. };
  772. }