You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

actions.any.ts 30KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885
  1. import { createTrackMutedEvent } from '../../analytics/AnalyticsEvents';
  2. import { sendAnalytics } from '../../analytics/functions';
  3. import { IStore } from '../../app/types';
  4. import { showErrorNotification, showNotification } from '../../notifications/actions';
  5. import { NOTIFICATION_TIMEOUT, NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
  6. import { getCurrentConference } from '../conference/functions';
  7. import { IJitsiConference } from '../conference/reducer';
  8. import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any';
  9. import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
  10. import { createLocalTrack } from '../lib-jitsi-meet/functions.any';
  11. import { setAudioMuted, setScreenshareMuted, setVideoMuted } from '../media/actions';
  12. import {
  13. CAMERA_FACING_MODE,
  14. MEDIA_TYPE,
  15. MediaType,
  16. VIDEO_MUTISM_AUTHORITY,
  17. VIDEO_TYPE,
  18. VideoType
  19. } from '../media/constants';
  20. import { getLocalParticipant } from '../participants/functions';
  21. import { updateSettings } from '../settings/actions';
  22. import {
  23. SET_NO_SRC_DATA_NOTIFICATION_UID,
  24. TRACK_ADDED,
  25. TRACK_CREATE_CANCELED,
  26. TRACK_CREATE_ERROR,
  27. TRACK_MUTE_UNMUTE_FAILED,
  28. TRACK_NO_DATA_FROM_SOURCE,
  29. TRACK_OWNER_CHANGED,
  30. TRACK_REMOVED,
  31. TRACK_STOPPED,
  32. TRACK_UPDATED,
  33. TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
  34. TRACK_WILL_CREATE
  35. } from './actionTypes';
  36. import {
  37. createLocalTracksF,
  38. getLocalTrack,
  39. getLocalTracks,
  40. getLocalVideoTrack,
  41. getTrackByJitsiTrack
  42. } from './functions';
  43. import logger from './logger';
  44. import { ITrackOptions } from './types';
  45. /**
  46. * Add a given local track to the conference.
  47. *
  48. * @param {JitsiLocalTrack} newTrack - The local track to be added to the conference.
  49. * @returns {Function}
  50. */
  51. export function addLocalTrack(newTrack: any) {
  52. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  53. const conference = getCurrentConference(getState());
  54. if (conference) {
  55. await conference.addTrack(newTrack);
  56. }
  57. const setMuted = newTrack.isVideoTrack()
  58. ? getMultipleVideoSendingSupportFeatureFlag(getState())
  59. && newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
  60. ? setScreenshareMuted
  61. : setVideoMuted
  62. : setAudioMuted;
  63. const isMuted = newTrack.isMuted();
  64. logger.log(`Adding ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
  65. await dispatch(setMuted(isMuted));
  66. return dispatch(_addTracks([ newTrack ]));
  67. };
  68. }
  69. /**
  70. * Requests the creating of the desired media type tracks. Desire is expressed
  71. * by base/media unless the function caller specifies desired media types
  72. * explicitly and thus override base/media. Dispatches a
  73. * {@code createLocalTracksA} action for the desired media types for which there
  74. * are no existing tracks yet.
  75. *
  76. * @returns {Function}
  77. */
  78. export function createDesiredLocalTracks(...desiredTypes: any) {
  79. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  80. const state = getState();
  81. dispatch(destroyLocalDesktopTrackIfExists());
  82. if (desiredTypes.length === 0) {
  83. const { video } = state['features/base/media'];
  84. // XXX: Always create the audio track early, even if it will be muted.
  85. // This fixes a timing issue when adding the track to the conference which
  86. // manifests primarily on iOS 15.
  87. desiredTypes.push(MEDIA_TYPE.AUDIO);
  88. // XXX When the app is coming into the foreground from the
  89. // background in order to handle a URL, it may realize the new
  90. // background state soon after it has tried to create the local
  91. // tracks requested by the URL. Ignore
  92. // VIDEO_MUTISM_AUTHORITY.BACKGROUND and create the local video
  93. // track if no other VIDEO_MUTISM_AUTHORITY has muted it. The local
  94. // video track will be muted until the app realizes the new
  95. // background state.
  96. // eslint-disable-next-line no-bitwise
  97. (video.muted & ~VIDEO_MUTISM_AUTHORITY.BACKGROUND)
  98. || desiredTypes.push(MEDIA_TYPE.VIDEO);
  99. }
  100. const availableTypes
  101. = getLocalTracks(
  102. state['features/base/tracks'],
  103. /* includePending */ true)
  104. .map(t => t.mediaType);
  105. // We need to create the desired tracks which are not already available.
  106. const createTypes
  107. = desiredTypes.filter((type: MediaType) => availableTypes.indexOf(type) === -1);
  108. createTypes.length
  109. && dispatch(createLocalTracksA({ devices: createTypes }));
  110. };
  111. }
  112. /**
  113. * Request to start capturing local audio and/or video. By default, the user
  114. * facing camera will be selected.
  115. *
  116. * @param {Object} [options] - For info @see JitsiMeetJS.createLocalTracks.
  117. * @returns {Function}
  118. */
  119. export function createLocalTracksA(options: ITrackOptions = {}) {
  120. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  121. const devices
  122. = options.devices || [ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ];
  123. const store = {
  124. dispatch,
  125. getState
  126. };
  127. // The following executes on React Native only at the time of this
  128. // writing. The effort to port Web's createInitialLocalTracksAndConnect
  129. // is significant and that's where the function createLocalTracksF got
  130. // born. I started with the idea a porting so that we could inherit the
  131. // ability to getUserMedia for audio only or video only if getUserMedia
  132. // for audio and video fails. Eventually though, I realized that on
  133. // mobile we do not have combined permission prompts implemented anyway
  134. // (either because there are no such prompts or it does not make sense
  135. // to implement them) and the right thing to do is to ask for each
  136. // device separately.
  137. for (const device of devices) {
  138. if (getLocalTrack(
  139. getState()['features/base/tracks'],
  140. device as MediaType,
  141. /* includePending */ true)) {
  142. throw new Error(`Local track for ${device} already exists`);
  143. }
  144. const gumProcess: any
  145. = createLocalTracksF(
  146. {
  147. cameraDeviceId: options.cameraDeviceId,
  148. devices: [ device ],
  149. facingMode:
  150. options.facingMode || CAMERA_FACING_MODE.USER,
  151. micDeviceId: options.micDeviceId
  152. },
  153. store)
  154. .then( // @ts-ignore
  155. (localTracks: any[]) => {
  156. // Because GUM is called for 1 device (which is actually
  157. // a media type 'audio', 'video', 'screen', etc.) we
  158. // should not get more than one JitsiTrack.
  159. if (localTracks.length !== 1) {
  160. throw new Error(
  161. `Expected exactly 1 track, but was given ${
  162. localTracks.length} tracks for device: ${
  163. device}.`);
  164. }
  165. if (gumProcess.canceled) {
  166. return _disposeTracks(localTracks)
  167. .then(() =>
  168. dispatch(_trackCreateCanceled(device as MediaType)));
  169. }
  170. return dispatch(trackAdded(localTracks[0]));
  171. },
  172. (reason: Error) =>
  173. dispatch(
  174. gumProcess.canceled
  175. ? _trackCreateCanceled(device as MediaType)
  176. : _onCreateLocalTracksRejected(
  177. reason,
  178. device)));
  179. /**
  180. * Cancels the {@code getUserMedia} process represented by this
  181. * {@code Promise}.
  182. *
  183. * @returns {Promise} This {@code Promise} i.e. {@code gumProcess}.
  184. */
  185. gumProcess.cancel = () => {
  186. gumProcess.canceled = true;
  187. return gumProcess;
  188. };
  189. dispatch({
  190. type: TRACK_WILL_CREATE,
  191. track: {
  192. gumProcess,
  193. local: true,
  194. mediaType: device
  195. }
  196. });
  197. }
  198. };
  199. }
  200. /**
  201. * Calls JitsiLocalTrack#dispose() on the given track or on all local tracks (if none are passed) ignoring errors if
  202. * track is already disposed. After that signals tracks to be removed.
  203. *
  204. * @param {JitsiLocalTrack|null} [track] - The local track that needs to be destroyed.
  205. * @returns {Function}
  206. */
  207. export function destroyLocalTracks(track: any = null) {
  208. if (track) {
  209. return (dispatch: IStore['dispatch']) => {
  210. dispatch(_disposeAndRemoveTracks([ track ]));
  211. };
  212. }
  213. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  214. // First wait until any getUserMedia in progress is settled and then get
  215. // rid of all local tracks.
  216. _cancelGUMProcesses(getState)
  217. .then(() =>
  218. dispatch(
  219. _disposeAndRemoveTracks(
  220. getState()['features/base/tracks']
  221. .filter(t => t.local)
  222. .map(t => t.jitsiTrack))));
  223. };
  224. }
  225. /**
  226. * Signals that the passed JitsiLocalTrack has triggered a no data from source event.
  227. *
  228. * @param {JitsiLocalTrack} track - The track.
  229. * @returns {{
  230. * type: TRACK_NO_DATA_FROM_SOURCE,
  231. * track: Track
  232. * }}
  233. */
  234. export function noDataFromSource(track: any) {
  235. return {
  236. type: TRACK_NO_DATA_FROM_SOURCE,
  237. track
  238. };
  239. }
  240. /**
  241. * Displays a no data from source video error if needed.
  242. *
  243. * @param {JitsiLocalTrack} jitsiTrack - The track.
  244. * @returns {Function}
  245. */
  246. export function showNoDataFromSourceVideoError(jitsiTrack: any) {
  247. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  248. let notificationInfo;
  249. const track = getTrackByJitsiTrack(getState()['features/base/tracks'], jitsiTrack);
  250. if (!track) {
  251. return;
  252. }
  253. if (track.isReceivingData) {
  254. notificationInfo = undefined;
  255. } else {
  256. const notificationAction = await dispatch(showErrorNotification({
  257. descriptionKey: 'dialog.cameraNotSendingData',
  258. titleKey: 'dialog.cameraNotSendingDataTitle'
  259. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  260. notificationInfo = {
  261. uid: notificationAction?.uid
  262. };
  263. }
  264. dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, notificationInfo));
  265. };
  266. }
  267. /**
  268. * Replaces one track with another for one renegotiation instead of invoking
  269. * two renegotiations with a separate removeTrack and addTrack. Disposes the
  270. * removed track as well.
  271. *
  272. * @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
  273. * @param {JitsiLocalTrack|null} newTrack - The track to use instead.
  274. * @param {JitsiConference} [conference] - The conference from which to remove
  275. * and add the tracks. If one is not provided, the conference in the redux store
  276. * will be used.
  277. * @returns {Function}
  278. */
  279. export function replaceLocalTrack(oldTrack: any, newTrack: any, conference?: IJitsiConference) {
  280. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  281. conference
  282. // eslint-disable-next-line no-param-reassign
  283. || (conference = getState()['features/base/conference'].conference);
  284. if (conference) {
  285. await conference.replaceTrack(oldTrack, newTrack);
  286. }
  287. return dispatch(replaceStoredTracks(oldTrack, newTrack));
  288. };
  289. }
  290. /**
  291. * Replaces a stored track with another.
  292. *
  293. * @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
  294. * @param {JitsiLocalTrack|null} newTrack - The track to use instead.
  295. * @returns {Function}
  296. */
  297. function replaceStoredTracks(oldTrack: any, newTrack: any) {
  298. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  299. // We call dispose after doing the replace because dispose will
  300. // try and do a new o/a after the track removes itself. Doing it
  301. // after means the JitsiLocalTrack.conference is already
  302. // cleared, so it won't try and do the o/a.
  303. if (oldTrack) {
  304. await dispatch(_disposeAndRemoveTracks([ oldTrack ]));
  305. }
  306. if (newTrack) {
  307. // The mute state of the new track should be reflected in the app's mute state. For example, if the
  308. // app is currently muted and changing to a new track that is not muted, the app's mute state
  309. // should be falsey. As such, emit a mute event here to set up the app to reflect the track's mute
  310. // state. If this is not done, the current mute state of the app will be reflected on the track,
  311. // not vice-versa.
  312. const setMuted = newTrack.isVideoTrack()
  313. ? getMultipleVideoSendingSupportFeatureFlag(getState())
  314. && newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
  315. ? setScreenshareMuted
  316. : setVideoMuted
  317. : setAudioMuted;
  318. const isMuted = newTrack.isMuted();
  319. sendAnalytics(createTrackMutedEvent(newTrack.getType(), 'track.replaced', isMuted));
  320. logger.log(`Replace ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
  321. await dispatch(setMuted(isMuted));
  322. await dispatch(_addTracks([ newTrack ]));
  323. }
  324. };
  325. }
  326. /**
  327. * Create an action for when a new track has been signaled to be added to the
  328. * conference.
  329. *
  330. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  331. * @returns {Function}
  332. */
  333. export function trackAdded(track: any) {
  334. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  335. track.on(
  336. JitsiTrackEvents.TRACK_MUTE_CHANGED,
  337. () => dispatch(trackMutedChanged(track)));
  338. track.on(
  339. JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED,
  340. (type: VideoType) => dispatch(trackVideoTypeChanged(track, type)));
  341. track.on(
  342. JitsiTrackEvents.TRACK_OWNER_CHANGED,
  343. (owner: string) => dispatch(trackOwnerChanged(track, owner)));
  344. const local = track.isLocal();
  345. const isVirtualScreenshareParticipantCreated = !local || getMultipleVideoSendingSupportFeatureFlag(getState());
  346. const mediaType = track.getVideoType() === VIDEO_TYPE.DESKTOP && isVirtualScreenshareParticipantCreated
  347. ? MEDIA_TYPE.SCREENSHARE
  348. : track.getType();
  349. let isReceivingData, noDataFromSourceNotificationInfo, participantId;
  350. if (local) {
  351. // Reset the no data from src notification state when we change the track, as it's context is set
  352. // on a per device basis.
  353. dispatch(setNoSrcDataNotificationUid());
  354. const participant = getLocalParticipant(getState);
  355. if (participant) {
  356. participantId = participant.id;
  357. }
  358. isReceivingData = track.isReceivingData();
  359. track.on(JitsiTrackEvents.NO_DATA_FROM_SOURCE, () => dispatch(noDataFromSource({ jitsiTrack: track })));
  360. if (!isReceivingData) {
  361. if (mediaType === MEDIA_TYPE.AUDIO) {
  362. const notificationAction = await dispatch(showNotification({
  363. descriptionKey: 'dialog.micNotSendingData',
  364. titleKey: 'dialog.micNotSendingDataTitle'
  365. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  366. // Set the notification ID so that other parts of the application know that this was
  367. // displayed in the context of the current device.
  368. // I.E. The no-audio-signal notification shouldn't be displayed if this was already shown.
  369. dispatch(setNoSrcDataNotificationUid(notificationAction?.uid));
  370. noDataFromSourceNotificationInfo = { uid: notificationAction?.uid };
  371. } else {
  372. const timeout = setTimeout(() => dispatch(
  373. showNoDataFromSourceVideoError(track)),
  374. NOTIFICATION_TIMEOUT.MEDIUM);
  375. noDataFromSourceNotificationInfo = { timeout };
  376. }
  377. }
  378. track.on(JitsiTrackEvents.LOCAL_TRACK_STOPPED,
  379. () => dispatch({
  380. type: TRACK_STOPPED,
  381. track: {
  382. jitsiTrack: track
  383. }
  384. }));
  385. } else {
  386. participantId = track.getParticipantId();
  387. isReceivingData = true;
  388. }
  389. return dispatch({
  390. type: TRACK_ADDED,
  391. track: {
  392. jitsiTrack: track,
  393. isReceivingData,
  394. local,
  395. mediaType,
  396. mirror: _shouldMirror(track),
  397. muted: track.isMuted(),
  398. noDataFromSourceNotificationInfo,
  399. participantId,
  400. videoStarted: false,
  401. videoType: track.videoType
  402. }
  403. });
  404. };
  405. }
  406. /**
  407. * Create an action for when a track's muted state has been signaled to be
  408. * changed.
  409. *
  410. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  411. * @returns {{
  412. * type: TRACK_UPDATED,
  413. * track: Track
  414. * }}
  415. */
  416. export function trackMutedChanged(track: any): {
  417. track: {
  418. jitsiTrack: any;
  419. muted: boolean;
  420. };
  421. type: 'TRACK_UPDATED';
  422. } {
  423. return {
  424. type: TRACK_UPDATED,
  425. track: {
  426. jitsiTrack: track,
  427. muted: track.isMuted()
  428. }
  429. };
  430. }
  431. /**
  432. * Create an action for when a track's muted state change action has failed. This could happen because of
  433. * {@code getUserMedia} errors during unmute or replace track errors at the peerconnection level.
  434. *
  435. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  436. * @param {boolean} wasMuting - If the operation that failed was a mute operation or an unmute operation.
  437. * @returns {{
  438. * type: TRACK_MUTE_UNMUTE_FAILED,
  439. * track: Track
  440. * }}
  441. */
  442. export function trackMuteUnmuteFailed(track: any, wasMuting: boolean): {
  443. track: any;
  444. type: 'TRACK_MUTE_UNMUTE_FAILED';
  445. wasMuting: boolean;
  446. } {
  447. return {
  448. type: TRACK_MUTE_UNMUTE_FAILED,
  449. track,
  450. wasMuting
  451. };
  452. }
  453. /**
  454. * Create an action for when a track's no data from source notification information changes.
  455. *
  456. * @param {JitsiLocalTrack} track - JitsiTrack instance.
  457. * @param {Object} noDataFromSourceNotificationInfo - Information about no data from source notification.
  458. * @returns {{
  459. * type: TRACK_UPDATED,
  460. * track: Track
  461. * }}
  462. */
  463. export function trackNoDataFromSourceNotificationInfoChanged(track: any, noDataFromSourceNotificationInfo?: Object) {
  464. return {
  465. type: TRACK_UPDATED,
  466. track: {
  467. jitsiTrack: track,
  468. noDataFromSourceNotificationInfo
  469. }
  470. };
  471. }
  472. /**
  473. * Create an action for when a track has been signaled for removal from the
  474. * conference.
  475. *
  476. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  477. * @returns {{
  478. * type: TRACK_REMOVED,
  479. * track: Track
  480. * }}
  481. */
  482. export function trackRemoved(track: any): {
  483. track: {
  484. jitsiTrack: any;
  485. };
  486. type: 'TRACK_REMOVED';
  487. } {
  488. track.removeAllListeners(JitsiTrackEvents.TRACK_MUTE_CHANGED);
  489. track.removeAllListeners(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED);
  490. track.removeAllListeners(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  491. return {
  492. type: TRACK_REMOVED,
  493. track: {
  494. jitsiTrack: track
  495. }
  496. };
  497. }
  498. /**
  499. * Signal that track's video started to play.
  500. *
  501. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  502. * @returns {{
  503. * type: TRACK_UPDATED,
  504. * track: Track
  505. * }}
  506. */
  507. export function trackVideoStarted(track: any): {
  508. track: {
  509. jitsiTrack: any;
  510. videoStarted: true;
  511. };
  512. type: 'TRACK_UPDATED';
  513. } {
  514. return {
  515. type: TRACK_UPDATED,
  516. track: {
  517. jitsiTrack: track,
  518. videoStarted: true
  519. }
  520. };
  521. }
  522. /**
  523. * Create an action for when participant video type changes.
  524. *
  525. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  526. * @param {VIDEO_TYPE|undefined} videoType - Video type.
  527. * @returns {{
  528. * type: TRACK_UPDATED,
  529. * track: Track
  530. * }}
  531. */
  532. export function trackVideoTypeChanged(track: any, videoType: VideoType) {
  533. const mediaType = videoType === VIDEO_TYPE.CAMERA ? MEDIA_TYPE.VIDEO : MEDIA_TYPE.SCREENSHARE;
  534. return {
  535. type: TRACK_UPDATED,
  536. track: {
  537. jitsiTrack: track,
  538. videoType,
  539. mediaType
  540. }
  541. };
  542. }
  543. /**
  544. * Create an action for when track streaming status changes.
  545. *
  546. * @param {(JitsiRemoteTrack)} track - JitsiTrack instance.
  547. * @param {string} streamingStatus - The new streaming status of the track.
  548. * @returns {{
  549. * type: TRACK_UPDATED,
  550. * track: Track
  551. * }}
  552. */
  553. export function trackStreamingStatusChanged(track: any, streamingStatus: string): {
  554. track: {
  555. jitsiTrack: any;
  556. streamingStatus: string;
  557. };
  558. type: 'TRACK_UPDATED';
  559. } {
  560. return {
  561. type: TRACK_UPDATED,
  562. track: {
  563. jitsiTrack: track,
  564. streamingStatus
  565. }
  566. };
  567. }
  568. /**
  569. * Create an action for when the owner of the track changes due to ssrc remapping.
  570. *
  571. * @param {(JitsiRemoteTrack)} track - JitsiTrack instance.
  572. * @param {string} participantId - New owner's participant ID.
  573. * @returns {{
  574. * type: TRACK_OWNER_CHANGED,
  575. * track: Track
  576. * }}
  577. */
  578. export function trackOwnerChanged(track: any, participantId: string): {
  579. track: {
  580. jitsiTrack: any;
  581. participantId: string;
  582. };
  583. type: 'TRACK_OWNER_CHANGED';
  584. } {
  585. return {
  586. type: TRACK_OWNER_CHANGED,
  587. track: {
  588. jitsiTrack: track,
  589. participantId
  590. }
  591. };
  592. }
  593. /**
  594. * Signals passed tracks to be added.
  595. *
  596. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  597. * @private
  598. * @returns {Function}
  599. */
  600. function _addTracks(tracks: any[]) {
  601. return (dispatch: IStore['dispatch']) => Promise.all(tracks.map(t => dispatch(trackAdded(t))));
  602. }
  603. /**
  604. * Cancels and waits for any {@code getUserMedia} process/currently in progress
  605. * to complete/settle.
  606. *
  607. * @param {Function} getState - The redux store {@code getState} function used
  608. * to obtain the state.
  609. * @private
  610. * @returns {Promise} - A {@code Promise} resolved once all
  611. * {@code gumProcess.cancel()} {@code Promise}s are settled because all we care
  612. * about here is to be sure that the {@code getUserMedia} callbacks have
  613. * completed (i.e. Returned from the native side).
  614. */
  615. function _cancelGUMProcesses(getState: IStore['getState']): Promise<any> {
  616. const logError
  617. = (error: Error) =>
  618. logger.error('gumProcess.cancel failed', JSON.stringify(error));
  619. return Promise.all(
  620. getState()['features/base/tracks']
  621. .filter(t => t.local)
  622. .map(({ gumProcess }: any) =>
  623. gumProcess?.cancel().catch(logError)));
  624. }
  625. /**
  626. * Disposes passed tracks and signals them to be removed.
  627. *
  628. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  629. * @protected
  630. * @returns {Function}
  631. */
  632. export function _disposeAndRemoveTracks(tracks: any[]) {
  633. return (dispatch: IStore['dispatch']) =>
  634. _disposeTracks(tracks)
  635. .then(() =>
  636. Promise.all(tracks.map(t => dispatch(trackRemoved(t)))));
  637. }
  638. /**
  639. * Disposes passed tracks.
  640. *
  641. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  642. * @private
  643. * @returns {Promise} - A Promise resolved once {@link JitsiTrack.dispose()} is
  644. * done for every track from the list.
  645. */
  646. function _disposeTracks(tracks: any[]): Promise<any> {
  647. return Promise.all(
  648. tracks.map(t =>
  649. t.dispose()
  650. .catch((err: Error) => {
  651. // Track might be already disposed so ignore such an error.
  652. // Of course, re-throw any other error(s).
  653. if (err.name !== JitsiTrackErrors.TRACK_IS_DISPOSED) {
  654. throw err;
  655. }
  656. })));
  657. }
  658. /**
  659. * Implements the {@code Promise} rejection handler of
  660. * {@code createLocalTracksA} and {@code createLocalTracksF}.
  661. *
  662. * @param {Object} error - The {@code Promise} rejection reason.
  663. * @param {string} device - The device/{@code MEDIA_TYPE} associated with the
  664. * rejection.
  665. * @private
  666. * @returns {Function}
  667. */
  668. function _onCreateLocalTracksRejected(error?: Error, device?: string) {
  669. return (dispatch: IStore['dispatch']) => {
  670. // If permissions are not allowed, alert the user.
  671. dispatch({
  672. type: TRACK_CREATE_ERROR,
  673. permissionDenied: error?.name === 'SecurityError',
  674. trackType: device
  675. });
  676. };
  677. }
  678. /**
  679. * Returns true if the provided {@code JitsiTrack} should be rendered as a
  680. * mirror.
  681. *
  682. * We only want to show a video in mirrored mode when:
  683. * 1) The video source is local, and not remote.
  684. * 2) The video source is a camera, not a desktop (capture).
  685. * 3) The camera is capturing the user, not the environment.
  686. *
  687. * TODO Similar functionality is part of lib-jitsi-meet. This function should be
  688. * removed after https://github.com/jitsi/lib-jitsi-meet/pull/187 is merged.
  689. *
  690. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  691. * @private
  692. * @returns {boolean}
  693. */
  694. function _shouldMirror(track: any): boolean {
  695. return (
  696. track?.isLocal()
  697. && track?.isVideoTrack()
  698. // XXX The type of the return value of JitsiLocalTrack's
  699. // getCameraFacingMode happens to be named CAMERA_FACING_MODE as
  700. // well, it's defined by lib-jitsi-meet. Note though that the type
  701. // of the value on the right side of the equality check is defined
  702. // by jitsi-meet. The type definitions are surely compatible today
  703. // but that may not be the case tomorrow.
  704. && track?.getCameraFacingMode() === CAMERA_FACING_MODE.USER);
  705. }
  706. /**
  707. * Signals that track create operation for given media track has been canceled.
  708. * Will clean up local track stub from the redux state which holds the
  709. * {@code gumProcess} reference.
  710. *
  711. * @param {MEDIA_TYPE} mediaType - The type of the media for which the track was
  712. * being created.
  713. * @private
  714. * @returns {{
  715. * type,
  716. * trackType: MEDIA_TYPE
  717. * }}
  718. */
  719. function _trackCreateCanceled(mediaType: MediaType): {
  720. trackType: MediaType;
  721. type: 'TRACK_CREATE_CANCELED';
  722. } {
  723. return {
  724. type: TRACK_CREATE_CANCELED,
  725. trackType: mediaType
  726. };
  727. }
  728. /**
  729. * If the local track if of type Desktop, it calls _disposeAndRemoveTracks) on it.
  730. *
  731. * @returns {Function}
  732. */
  733. export function destroyLocalDesktopTrackIfExists() {
  734. return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  735. const videoTrack = getLocalVideoTrack(getState()['features/base/tracks']);
  736. const isDesktopTrack = videoTrack && videoTrack.videoType === VIDEO_TYPE.DESKTOP;
  737. if (isDesktopTrack) {
  738. dispatch(_disposeAndRemoveTracks([ videoTrack.jitsiTrack ]));
  739. }
  740. };
  741. }
  742. /**
  743. * Sets UID of the displayed no data from source notification. Used to track
  744. * if the notification was previously displayed in this context.
  745. *
  746. * @param {number} uid - Notification UID.
  747. * @returns {{
  748. * type: SET_NO_AUDIO_SIGNAL_UID,
  749. * uid: string
  750. * }}
  751. */
  752. export function setNoSrcDataNotificationUid(uid?: string) {
  753. return {
  754. type: SET_NO_SRC_DATA_NOTIFICATION_UID,
  755. uid
  756. };
  757. }
  758. /**
  759. * Updates the last media event received for a video track.
  760. *
  761. * @param {JitsiRemoteTrack} track - JitsiTrack instance.
  762. * @param {string} name - The current media event name for the video.
  763. * @returns {{
  764. * type: TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
  765. * track: Track,
  766. * name: string
  767. * }}
  768. */
  769. export function updateLastTrackVideoMediaEvent(track: any, name: string): {
  770. name: string;
  771. track: any;
  772. type: 'TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT';
  773. } {
  774. return {
  775. type: TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
  776. track,
  777. name
  778. };
  779. }
  780. /**
  781. * Toggles the facingMode constraint on the video stream.
  782. *
  783. * @returns {Function}
  784. */
  785. export function toggleCamera() {
  786. return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
  787. const state = getState();
  788. const tracks = state['features/base/tracks'];
  789. const localVideoTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
  790. const currentFacingMode = localVideoTrack.getCameraFacingMode();
  791. /**
  792. * FIXME: Ideally, we should be dispatching {@code replaceLocalTrack} here,
  793. * but it seems to not trigger the re-rendering of the local video on Chrome;
  794. * could be due to a plan B vs unified plan issue. Therefore, we use the legacy
  795. * method defined in conference.js that manually takes care of updating the local
  796. * video as well.
  797. */
  798. await APP.conference.useVideoStream(null);
  799. const targetFacingMode = currentFacingMode === CAMERA_FACING_MODE.USER
  800. ? CAMERA_FACING_MODE.ENVIRONMENT
  801. : CAMERA_FACING_MODE.USER;
  802. // Update the flipX value so the environment facing camera is not flipped, before the new track is created.
  803. dispatch(updateSettings({ localFlipX: targetFacingMode === CAMERA_FACING_MODE.USER }));
  804. const newVideoTrack = await createLocalTrack('video', null, null, { facingMode: targetFacingMode });
  805. // FIXME: See above.
  806. await APP.conference.useVideoStream(newVideoTrack);
  807. };
  808. }