You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

actions.any.ts 29KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859
  1. import { createTrackMutedEvent } from '../../analytics/AnalyticsEvents';
  2. import { sendAnalytics } from '../../analytics/functions';
  3. import { IStore } from '../../app/types';
  4. import { showErrorNotification, showNotification } from '../../notifications/actions';
  5. import { NOTIFICATION_TIMEOUT, NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
  6. import { getCurrentConference } from '../conference/functions';
  7. import { IJitsiConference } from '../conference/reducer';
  8. import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
  9. import { createLocalTrack } from '../lib-jitsi-meet/functions.any';
  10. import { setAudioMuted, setScreenshareMuted, setVideoMuted } from '../media/actions';
  11. import {
  12. CAMERA_FACING_MODE,
  13. MEDIA_TYPE,
  14. MediaType,
  15. VIDEO_MUTISM_AUTHORITY,
  16. VIDEO_TYPE,
  17. VideoType
  18. } from '../media/constants';
  19. import { getLocalParticipant } from '../participants/functions';
  20. import { updateSettings } from '../settings/actions';
  21. import {
  22. SET_NO_SRC_DATA_NOTIFICATION_UID,
  23. TRACK_ADDED,
  24. TRACK_CREATE_CANCELED,
  25. TRACK_CREATE_ERROR,
  26. TRACK_MUTE_UNMUTE_FAILED,
  27. TRACK_NO_DATA_FROM_SOURCE,
  28. TRACK_REMOVED,
  29. TRACK_STOPPED,
  30. TRACK_UPDATED,
  31. TRACK_WILL_CREATE
  32. } from './actionTypes';
  33. import {
  34. createLocalTracksF,
  35. getCameraFacingMode,
  36. getLocalTrack,
  37. getLocalTracks,
  38. getLocalVideoTrack,
  39. getTrackByJitsiTrack
  40. } from './functions';
  41. import logger from './logger';
  42. import { ITrack, ITrackOptions } from './types';
  43. /**
  44. * Add a given local track to the conference.
  45. *
  46. * @param {JitsiLocalTrack} newTrack - The local track to be added to the conference.
  47. * @returns {Function}
  48. */
  49. export function addLocalTrack(newTrack: any) {
  50. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  51. const conference = getCurrentConference(getState());
  52. if (conference) {
  53. await conference.addTrack(newTrack);
  54. }
  55. const setMuted = newTrack.isVideoTrack()
  56. ? newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
  57. ? setScreenshareMuted
  58. : setVideoMuted
  59. : setAudioMuted;
  60. const isMuted = newTrack.isMuted();
  61. logger.log(`Adding ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
  62. dispatch(setMuted(isMuted));
  63. return dispatch(_addTracks([ newTrack ]));
  64. };
  65. }
  66. /**
  67. * Requests the creating of the desired media type tracks. Desire is expressed
  68. * by base/media unless the function caller specifies desired media types
  69. * explicitly and thus override base/media. Dispatches a
  70. * {@code createLocalTracksA} action for the desired media types for which there
  71. * are no existing tracks yet.
  72. *
  73. * @returns {Function}
  74. */
  75. export function createDesiredLocalTracks(...desiredTypes: any) {
  76. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  77. const state = getState();
  78. dispatch(destroyLocalDesktopTrackIfExists());
  79. if (desiredTypes.length === 0) {
  80. const { video } = state['features/base/media'];
  81. // XXX: Always create the audio track early, even if it will be muted.
  82. // This fixes a timing issue when adding the track to the conference which
  83. // manifests primarily on iOS 15.
  84. desiredTypes.push(MEDIA_TYPE.AUDIO);
  85. // XXX When the app is coming into the foreground from the
  86. // background in order to handle a URL, it may realize the new
  87. // background state soon after it has tried to create the local
  88. // tracks requested by the URL. Ignore
  89. // VIDEO_MUTISM_AUTHORITY.BACKGROUND and create the local video
  90. // track if no other VIDEO_MUTISM_AUTHORITY has muted it. The local
  91. // video track will be muted until the app realizes the new
  92. // background state.
  93. // eslint-disable-next-line no-bitwise
  94. (video.muted & ~VIDEO_MUTISM_AUTHORITY.BACKGROUND)
  95. || desiredTypes.push(MEDIA_TYPE.VIDEO);
  96. }
  97. const availableTypes
  98. = getLocalTracks(
  99. state['features/base/tracks'],
  100. /* includePending */ true)
  101. .map(t => t.mediaType);
  102. // We need to create the desired tracks which are not already available.
  103. const createTypes
  104. = desiredTypes.filter((type: MediaType) => availableTypes.indexOf(type) === -1);
  105. createTypes.length
  106. && dispatch(createLocalTracksA({ devices: createTypes }));
  107. };
  108. }
  109. /**
  110. * Request to start capturing local audio and/or video. By default, the user
  111. * facing camera will be selected.
  112. *
  113. * @param {Object} [options] - For info @see JitsiMeetJS.createLocalTracks.
  114. * @returns {Function}
  115. */
  116. export function createLocalTracksA(options: ITrackOptions = {}) {
  117. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  118. const devices
  119. = options.devices || [ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ];
  120. const store = {
  121. dispatch,
  122. getState
  123. };
  124. const promises = [];
  125. const state = getState();
  126. // The following executes on React Native only at the time of this
  127. // writing. The effort to port Web's createInitialLocalTracks
  128. // is significant and that's where the function createLocalTracksF got
  129. // born. I started with the idea a porting so that we could inherit the
  130. // ability to getUserMedia for audio only or video only if getUserMedia
  131. // for audio and video fails. Eventually though, I realized that on
  132. // mobile we do not have combined permission prompts implemented anyway
  133. // (either because there are no such prompts or it does not make sense
  134. // to implement them) and the right thing to do is to ask for each
  135. // device separately.
  136. for (const device of devices) {
  137. if (getLocalTrack(
  138. state['features/base/tracks'],
  139. device as MediaType,
  140. /* includePending */ true)) {
  141. throw new Error(`Local track for ${device} already exists`);
  142. }
  143. const gumProcess: any
  144. = createLocalTracksF(
  145. {
  146. cameraDeviceId: options.cameraDeviceId,
  147. devices: [ device ],
  148. facingMode:
  149. options.facingMode || getCameraFacingMode(state),
  150. micDeviceId: options.micDeviceId
  151. },
  152. store)
  153. .then( // @ts-ignore
  154. (localTracks: any[]) => {
  155. // Because GUM is called for 1 device (which is actually
  156. // a media type 'audio', 'video', 'screen', etc.) we
  157. // should not get more than one JitsiTrack.
  158. if (localTracks.length !== 1) {
  159. throw new Error(
  160. `Expected exactly 1 track, but was given ${
  161. localTracks.length} tracks for device: ${
  162. device}.`);
  163. }
  164. if (gumProcess.canceled) {
  165. return _disposeTracks(localTracks)
  166. .then(() =>
  167. dispatch(_trackCreateCanceled(device as MediaType)));
  168. }
  169. return dispatch(trackAdded(localTracks[0]));
  170. },
  171. (reason: Error) =>
  172. dispatch(
  173. gumProcess.canceled
  174. ? _trackCreateCanceled(device as MediaType)
  175. : _onCreateLocalTracksRejected(
  176. reason,
  177. device)));
  178. promises.push(gumProcess.catch(() => undefined));
  179. /**
  180. * Cancels the {@code getUserMedia} process represented by this
  181. * {@code Promise}.
  182. *
  183. * @returns {Promise} This {@code Promise} i.e. {@code gumProcess}.
  184. */
  185. gumProcess.cancel = () => {
  186. gumProcess.canceled = true;
  187. return gumProcess;
  188. };
  189. dispatch({
  190. type: TRACK_WILL_CREATE,
  191. track: {
  192. gumProcess,
  193. local: true,
  194. mediaType: device
  195. }
  196. });
  197. }
  198. return Promise.all(promises);
  199. };
  200. }
  201. /**
  202. * Calls JitsiLocalTrack#dispose() on the given track or on all local tracks (if none are passed) ignoring errors if
  203. * track is already disposed. After that signals tracks to be removed.
  204. *
  205. * @param {JitsiLocalTrack|null} [track] - The local track that needs to be destroyed.
  206. * @returns {Function}
  207. */
  208. export function destroyLocalTracks(track: any = null) {
  209. if (track) {
  210. return (dispatch: IStore['dispatch']) => dispatch(_disposeAndRemoveTracks([ track ]));
  211. }
  212. return (dispatch: IStore['dispatch'], getState: IStore['getState']) =>
  213. // First wait until any getUserMedia in progress is settled and then get
  214. // rid of all local tracks.
  215. _cancelGUMProcesses(getState)
  216. .then(() =>
  217. dispatch(
  218. _disposeAndRemoveTracks(
  219. getState()['features/base/tracks']
  220. .filter(t => t.local)
  221. .map(t => t.jitsiTrack))));
  222. }
  223. /**
  224. * Signals that the passed JitsiLocalTrack has triggered a no data from source event.
  225. *
  226. * @param {JitsiLocalTrack} track - The track.
  227. * @returns {{
  228. * type: TRACK_NO_DATA_FROM_SOURCE,
  229. * track: Track
  230. * }}
  231. */
  232. export function noDataFromSource(track: any) {
  233. return {
  234. type: TRACK_NO_DATA_FROM_SOURCE,
  235. track
  236. };
  237. }
  238. /**
  239. * Displays a no data from source video error if needed.
  240. *
  241. * @param {JitsiLocalTrack} jitsiTrack - The track.
  242. * @returns {Function}
  243. */
  244. export function showNoDataFromSourceVideoError(jitsiTrack: any) {
  245. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  246. let notificationInfo;
  247. const track = getTrackByJitsiTrack(getState()['features/base/tracks'], jitsiTrack);
  248. if (!track) {
  249. return;
  250. }
  251. if (track.isReceivingData) {
  252. notificationInfo = undefined;
  253. } else {
  254. const notificationAction = dispatch(showErrorNotification({
  255. descriptionKey: 'dialog.cameraNotSendingData',
  256. titleKey: 'dialog.cameraNotSendingDataTitle'
  257. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  258. notificationInfo = {
  259. uid: notificationAction?.uid
  260. };
  261. }
  262. dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, notificationInfo));
  263. };
  264. }
  265. /**
  266. * Replaces one track with another for one renegotiation instead of invoking
  267. * two renegotiations with a separate removeTrack and addTrack. Disposes the
  268. * removed track as well.
  269. *
  270. * @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
  271. * @param {JitsiLocalTrack|null} newTrack - The track to use instead.
  272. * @param {JitsiConference} [conference] - The conference from which to remove
  273. * and add the tracks. If one is not provided, the conference in the redux store
  274. * will be used.
  275. * @returns {Function}
  276. */
  277. export function replaceLocalTrack(oldTrack: any, newTrack: any, conference?: IJitsiConference) {
  278. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  279. conference
  280. // eslint-disable-next-line no-param-reassign
  281. || (conference = getState()['features/base/conference'].conference);
  282. if (conference) {
  283. await conference.replaceTrack(oldTrack, newTrack);
  284. }
  285. return dispatch(replaceStoredTracks(oldTrack, newTrack));
  286. };
  287. }
  288. /**
  289. * Replaces a stored track with another.
  290. *
  291. * @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
  292. * @param {JitsiLocalTrack|null} newTrack - The track to use instead.
  293. * @returns {Function}
  294. */
  295. function replaceStoredTracks(oldTrack: any, newTrack: any) {
  296. return async (dispatch: IStore['dispatch']) => {
  297. // We call dispose after doing the replace because dispose will
  298. // try and do a new o/a after the track removes itself. Doing it
  299. // after means the JitsiLocalTrack.conference is already
  300. // cleared, so it won't try and do the o/a.
  301. if (oldTrack) {
  302. await dispatch(_disposeAndRemoveTracks([ oldTrack ]));
  303. }
  304. if (newTrack) {
  305. // The mute state of the new track should be reflected in the app's mute state. For example, if the
  306. // app is currently muted and changing to a new track that is not muted, the app's mute state
  307. // should be falsey. As such, emit a mute event here to set up the app to reflect the track's mute
  308. // state. If this is not done, the current mute state of the app will be reflected on the track,
  309. // not vice-versa.
  310. const setMuted = newTrack.isVideoTrack()
  311. ? newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
  312. ? setScreenshareMuted
  313. : setVideoMuted
  314. : setAudioMuted;
  315. const isMuted = newTrack.isMuted();
  316. sendAnalytics(createTrackMutedEvent(newTrack.getType(), 'track.replaced', isMuted));
  317. logger.log(`Replace ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
  318. dispatch(setMuted(isMuted));
  319. await dispatch(_addTracks([ newTrack ]));
  320. }
  321. };
  322. }
  323. /**
  324. * Create an action for when a new track has been signaled to be added to the
  325. * conference.
  326. *
  327. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  328. * @returns {Function}
  329. */
  330. export function trackAdded(track: any) {
  331. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  332. track.on(
  333. JitsiTrackEvents.TRACK_MUTE_CHANGED,
  334. () => dispatch(trackMutedChanged(track)));
  335. track.on(
  336. JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED,
  337. (type: VideoType) => dispatch(trackVideoTypeChanged(track, type)));
  338. const local = track.isLocal();
  339. const mediaType = track.getVideoType() === VIDEO_TYPE.DESKTOP
  340. ? MEDIA_TYPE.SCREENSHARE
  341. : track.getType();
  342. let isReceivingData, noDataFromSourceNotificationInfo, participantId;
  343. if (local) {
  344. // Reset the no data from src notification state when we change the track, as it's context is set
  345. // on a per device basis.
  346. dispatch(setNoSrcDataNotificationUid());
  347. const participant = getLocalParticipant(getState);
  348. if (participant) {
  349. participantId = participant.id;
  350. }
  351. isReceivingData = track.isReceivingData();
  352. track.on(JitsiTrackEvents.NO_DATA_FROM_SOURCE, () => dispatch(noDataFromSource({ jitsiTrack: track })));
  353. if (!isReceivingData) {
  354. if (mediaType === MEDIA_TYPE.AUDIO) {
  355. const notificationAction = dispatch(showNotification({
  356. descriptionKey: 'dialog.micNotSendingData',
  357. titleKey: 'dialog.micNotSendingDataTitle'
  358. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  359. // Set the notification ID so that other parts of the application know that this was
  360. // displayed in the context of the current device.
  361. // I.E. The no-audio-signal notification shouldn't be displayed if this was already shown.
  362. dispatch(setNoSrcDataNotificationUid(notificationAction?.uid));
  363. noDataFromSourceNotificationInfo = { uid: notificationAction?.uid };
  364. } else {
  365. const timeout = setTimeout(() => dispatch(
  366. showNoDataFromSourceVideoError(track)),
  367. NOTIFICATION_TIMEOUT.MEDIUM);
  368. noDataFromSourceNotificationInfo = { timeout };
  369. }
  370. }
  371. track.on(JitsiTrackEvents.LOCAL_TRACK_STOPPED,
  372. () => dispatch({
  373. type: TRACK_STOPPED,
  374. track: {
  375. jitsiTrack: track
  376. }
  377. }));
  378. } else {
  379. participantId = track.getParticipantId();
  380. isReceivingData = true;
  381. }
  382. return dispatch({
  383. type: TRACK_ADDED,
  384. track: {
  385. jitsiTrack: track,
  386. isReceivingData,
  387. local,
  388. mediaType,
  389. mirror: _shouldMirror(track),
  390. muted: track.isMuted(),
  391. noDataFromSourceNotificationInfo,
  392. participantId,
  393. videoStarted: false,
  394. videoType: track.videoType
  395. }
  396. });
  397. };
  398. }
  399. /**
  400. * Create an action for when a track's codec has been signaled to have been changed.
  401. *
  402. * @param {JitsiLocalTrack} track - JitsiLocalTrack instance.
  403. * @param {string} codec - The video codec.
  404. * @returns {{
  405. * type: TRACK_UPDATED,
  406. * track: Track
  407. * }}
  408. */
  409. export function trackCodecChanged(track: ITrack, codec: string): {
  410. track: {
  411. codec: string;
  412. jitsiTrack: any;
  413. };
  414. type: 'TRACK_UPDATED';
  415. } {
  416. return {
  417. type: TRACK_UPDATED,
  418. track: {
  419. codec,
  420. jitsiTrack: track
  421. }
  422. };
  423. }
  424. /**
  425. * Create an action for when a track's muted state has been signaled to be
  426. * changed.
  427. *
  428. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  429. * @returns {{
  430. * type: TRACK_UPDATED,
  431. * track: Track
  432. * }}
  433. */
  434. export function trackMutedChanged(track: any): {
  435. track: {
  436. jitsiTrack: any;
  437. muted: boolean;
  438. };
  439. type: 'TRACK_UPDATED';
  440. } {
  441. return {
  442. type: TRACK_UPDATED,
  443. track: {
  444. jitsiTrack: track,
  445. muted: track.isMuted()
  446. }
  447. };
  448. }
  449. /**
  450. * Create an action for when a track's muted state change action has failed. This could happen because of
  451. * {@code getUserMedia} errors during unmute or replace track errors at the peerconnection level.
  452. *
  453. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  454. * @param {boolean} wasMuting - If the operation that failed was a mute operation or an unmute operation.
  455. * @returns {{
  456. * type: TRACK_MUTE_UNMUTE_FAILED,
  457. * track: Track
  458. * }}
  459. */
  460. export function trackMuteUnmuteFailed(track: any, wasMuting: boolean): {
  461. track: any;
  462. type: 'TRACK_MUTE_UNMUTE_FAILED';
  463. wasMuting: boolean;
  464. } {
  465. return {
  466. type: TRACK_MUTE_UNMUTE_FAILED,
  467. track,
  468. wasMuting
  469. };
  470. }
  471. /**
  472. * Create an action for when a track's no data from source notification information changes.
  473. *
  474. * @param {JitsiLocalTrack} track - JitsiTrack instance.
  475. * @param {Object} noDataFromSourceNotificationInfo - Information about no data from source notification.
  476. * @returns {{
  477. * type: TRACK_UPDATED,
  478. * track: Track
  479. * }}
  480. */
  481. export function trackNoDataFromSourceNotificationInfoChanged(track: any, noDataFromSourceNotificationInfo?: Object) {
  482. return {
  483. type: TRACK_UPDATED,
  484. track: {
  485. jitsiTrack: track,
  486. noDataFromSourceNotificationInfo
  487. }
  488. };
  489. }
  490. /**
  491. * Create an action for when a track has been signaled for removal from the
  492. * conference.
  493. *
  494. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  495. * @returns {{
  496. * type: TRACK_REMOVED,
  497. * track: Track
  498. * }}
  499. */
  500. export function trackRemoved(track: any): {
  501. track: {
  502. jitsiTrack: any;
  503. };
  504. type: 'TRACK_REMOVED';
  505. } {
  506. track.removeAllListeners(JitsiTrackEvents.TRACK_MUTE_CHANGED);
  507. track.removeAllListeners(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED);
  508. track.removeAllListeners(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  509. return {
  510. type: TRACK_REMOVED,
  511. track: {
  512. jitsiTrack: track
  513. }
  514. };
  515. }
  516. /**
  517. * Signal that track's video started to play.
  518. *
  519. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  520. * @returns {{
  521. * type: TRACK_UPDATED,
  522. * track: Track
  523. * }}
  524. */
  525. export function trackVideoStarted(track: any): {
  526. track: {
  527. jitsiTrack: any;
  528. videoStarted: true;
  529. };
  530. type: 'TRACK_UPDATED';
  531. } {
  532. return {
  533. type: TRACK_UPDATED,
  534. track: {
  535. jitsiTrack: track,
  536. videoStarted: true
  537. }
  538. };
  539. }
  540. /**
  541. * Create an action for when participant video type changes.
  542. *
  543. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  544. * @param {VIDEO_TYPE|undefined} videoType - Video type.
  545. * @returns {{
  546. * type: TRACK_UPDATED,
  547. * track: Track
  548. * }}
  549. */
  550. export function trackVideoTypeChanged(track: any, videoType: VideoType) {
  551. const mediaType = videoType === VIDEO_TYPE.CAMERA ? MEDIA_TYPE.VIDEO : MEDIA_TYPE.SCREENSHARE;
  552. return {
  553. type: TRACK_UPDATED,
  554. track: {
  555. jitsiTrack: track,
  556. videoType,
  557. mediaType
  558. }
  559. };
  560. }
  561. /**
  562. * Create an action for when track streaming status changes.
  563. *
  564. * @param {(JitsiRemoteTrack)} track - JitsiTrack instance.
  565. * @param {string} streamingStatus - The new streaming status of the track.
  566. * @returns {{
  567. * type: TRACK_UPDATED,
  568. * track: Track
  569. * }}
  570. */
  571. export function trackStreamingStatusChanged(track: any, streamingStatus: string): {
  572. track: {
  573. jitsiTrack: any;
  574. streamingStatus: string;
  575. };
  576. type: 'TRACK_UPDATED';
  577. } {
  578. return {
  579. type: TRACK_UPDATED,
  580. track: {
  581. jitsiTrack: track,
  582. streamingStatus
  583. }
  584. };
  585. }
  586. /**
  587. * Signals passed tracks to be added.
  588. *
  589. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  590. * @private
  591. * @returns {Function}
  592. */
  593. function _addTracks(tracks: any[]) {
  594. return (dispatch: IStore['dispatch']) => Promise.all(tracks.map(t => dispatch(trackAdded(t))));
  595. }
  596. /**
  597. * Cancels and waits for any {@code getUserMedia} process/currently in progress
  598. * to complete/settle.
  599. *
  600. * @param {Function} getState - The redux store {@code getState} function used
  601. * to obtain the state.
  602. * @private
  603. * @returns {Promise} - A {@code Promise} resolved once all
  604. * {@code gumProcess.cancel()} {@code Promise}s are settled because all we care
  605. * about here is to be sure that the {@code getUserMedia} callbacks have
  606. * completed (i.e. Returned from the native side).
  607. */
  608. function _cancelGUMProcesses(getState: IStore['getState']): Promise<any> {
  609. const logError
  610. = (error: Error) =>
  611. logger.error('gumProcess.cancel failed', JSON.stringify(error));
  612. return Promise.all(
  613. getState()['features/base/tracks']
  614. .filter(t => t.local)
  615. .map(({ gumProcess }: any) =>
  616. gumProcess?.cancel().catch(logError)));
  617. }
  618. /**
  619. * Disposes passed tracks and signals them to be removed.
  620. *
  621. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  622. * @protected
  623. * @returns {Function}
  624. */
  625. export function _disposeAndRemoveTracks(tracks: any[]) {
  626. return (dispatch: IStore['dispatch']) =>
  627. _disposeTracks(tracks)
  628. .then(() =>
  629. Promise.all(tracks.map(t => dispatch(trackRemoved(t)))));
  630. }
  631. /**
  632. * Disposes passed tracks.
  633. *
  634. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  635. * @private
  636. * @returns {Promise} - A Promise resolved once {@link JitsiTrack.dispose()} is
  637. * done for every track from the list.
  638. */
  639. function _disposeTracks(tracks: any[]): Promise<any> {
  640. return Promise.all(
  641. tracks.map(t =>
  642. t.dispose()
  643. .catch((err: Error) => {
  644. // Track might be already disposed so ignore such an error.
  645. // Of course, re-throw any other error(s).
  646. if (err.name !== JitsiTrackErrors.TRACK_IS_DISPOSED) {
  647. throw err;
  648. }
  649. })));
  650. }
  651. /**
  652. * Implements the {@code Promise} rejection handler of
  653. * {@code createLocalTracksA} and {@code createLocalTracksF}.
  654. *
  655. * @param {Object} error - The {@code Promise} rejection reason.
  656. * @param {string} device - The device/{@code MEDIA_TYPE} associated with the
  657. * rejection.
  658. * @private
  659. * @returns {Function}
  660. */
  661. function _onCreateLocalTracksRejected(error?: Error, device?: string) {
  662. return (dispatch: IStore['dispatch']) => {
  663. // If permissions are not allowed, alert the user.
  664. dispatch({
  665. type: TRACK_CREATE_ERROR,
  666. permissionDenied: error?.name === 'SecurityError',
  667. trackType: device
  668. });
  669. };
  670. }
  671. /**
  672. * Returns true if the provided {@code JitsiTrack} should be rendered as a
  673. * mirror.
  674. *
  675. * We only want to show a video in mirrored mode when:
  676. * 1) The video source is local, and not remote.
  677. * 2) The video source is a camera, not a desktop (capture).
  678. * 3) The camera is capturing the user, not the environment.
  679. *
  680. * TODO Similar functionality is part of lib-jitsi-meet. This function should be
  681. * removed after https://github.com/jitsi/lib-jitsi-meet/pull/187 is merged.
  682. *
  683. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  684. * @private
  685. * @returns {boolean}
  686. */
  687. function _shouldMirror(track: any): boolean {
  688. return (
  689. track?.isLocal()
  690. && track?.isVideoTrack()
  691. // XXX The type of the return value of JitsiLocalTrack's
  692. // getCameraFacingMode happens to be named CAMERA_FACING_MODE as
  693. // well, it's defined by lib-jitsi-meet. Note though that the type
  694. // of the value on the right side of the equality check is defined
  695. // by jitsi-meet. The type definitions are surely compatible today
  696. // but that may not be the case tomorrow.
  697. && track?.getCameraFacingMode() === CAMERA_FACING_MODE.USER);
  698. }
  699. /**
  700. * Signals that track create operation for given media track has been canceled.
  701. * Will clean up local track stub from the redux state which holds the
  702. * {@code gumProcess} reference.
  703. *
  704. * @param {MEDIA_TYPE} mediaType - The type of the media for which the track was
  705. * being created.
  706. * @private
  707. * @returns {{
  708. * type,
  709. * trackType: MEDIA_TYPE
  710. * }}
  711. */
  712. function _trackCreateCanceled(mediaType: MediaType): {
  713. trackType: MediaType;
  714. type: 'TRACK_CREATE_CANCELED';
  715. } {
  716. return {
  717. type: TRACK_CREATE_CANCELED,
  718. trackType: mediaType
  719. };
  720. }
  721. /**
  722. * If the local track if of type Desktop, it calls _disposeAndRemoveTracks) on it.
  723. *
  724. * @returns {Function}
  725. */
  726. export function destroyLocalDesktopTrackIfExists() {
  727. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  728. const videoTrack = getLocalVideoTrack(getState()['features/base/tracks']);
  729. const isDesktopTrack = videoTrack && videoTrack.videoType === VIDEO_TYPE.DESKTOP;
  730. if (isDesktopTrack) {
  731. dispatch(_disposeAndRemoveTracks([ videoTrack.jitsiTrack ]));
  732. }
  733. };
  734. }
  735. /**
  736. * Sets UID of the displayed no data from source notification. Used to track
  737. * if the notification was previously displayed in this context.
  738. *
  739. * @param {number} uid - Notification UID.
  740. * @returns {{
  741. * type: SET_NO_AUDIO_SIGNAL_UID,
  742. * uid: string
  743. * }}
  744. */
  745. export function setNoSrcDataNotificationUid(uid?: string) {
  746. return {
  747. type: SET_NO_SRC_DATA_NOTIFICATION_UID,
  748. uid
  749. };
  750. }
  751. /**
  752. * Toggles the facingMode constraint on the video stream.
  753. *
  754. * @returns {Function}
  755. */
  756. export function toggleCamera() {
  757. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  758. const state = getState();
  759. const tracks = state['features/base/tracks'];
  760. const localVideoTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
  761. const currentFacingMode = localVideoTrack.getCameraFacingMode();
  762. const { localFlipX } = state['features/base/settings'];
  763. /**
  764. * FIXME: Ideally, we should be dispatching {@code replaceLocalTrack} here,
  765. * but it seems to not trigger the re-rendering of the local video on Chrome;
  766. * could be due to a plan B vs unified plan issue. Therefore, we use the legacy
  767. * method defined in conference.js that manually takes care of updating the local
  768. * video as well.
  769. */
  770. await APP.conference.useVideoStream(null);
  771. const targetFacingMode = currentFacingMode === CAMERA_FACING_MODE.USER
  772. ? CAMERA_FACING_MODE.ENVIRONMENT
  773. : CAMERA_FACING_MODE.USER;
  774. // Update the flipX value so the environment facing camera is not flipped, before the new track is created.
  775. dispatch(updateSettings({ localFlipX: targetFacingMode === CAMERA_FACING_MODE.USER ? localFlipX : false }));
  776. const newVideoTrack = await createLocalTrack('video', null, null, { facingMode: targetFacingMode });
  777. // FIXME: See above.
  778. await APP.conference.useVideoStream(newVideoTrack);
  779. };
  780. }