您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

actions.js 28KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866
  1. /* global APP */
  2. import {
  3. createTrackMutedEvent,
  4. sendAnalytics
  5. } from '../../analytics';
  6. import { NOTIFICATION_TIMEOUT_TYPE, showErrorNotification, showNotification } from '../../notifications';
  7. import { getCurrentConference } from '../conference';
  8. import { getMultipleVideoSupportFeatureFlag, getSourceNameSignalingFeatureFlag } from '../config';
  9. import { JitsiTrackErrors, JitsiTrackEvents, createLocalTrack } from '../lib-jitsi-meet';
  10. import {
  11. CAMERA_FACING_MODE,
  12. MEDIA_TYPE,
  13. setAudioMuted,
  14. setScreenshareMuted,
  15. setVideoMuted,
  16. VIDEO_MUTISM_AUTHORITY,
  17. VIDEO_TYPE
  18. } from '../media';
  19. import { getLocalParticipant } from '../participants';
  20. import { updateSettings } from '../settings';
  21. import {
  22. SCREENSHARE_TRACK_MUTED_UPDATED,
  23. SET_NO_SRC_DATA_NOTIFICATION_UID,
  24. TOGGLE_SCREENSHARING,
  25. TRACK_ADDED,
  26. TRACK_CREATE_CANCELED,
  27. TRACK_CREATE_ERROR,
  28. TRACK_MUTE_UNMUTE_FAILED,
  29. TRACK_NO_DATA_FROM_SOURCE,
  30. TRACK_REMOVED,
  31. TRACK_STOPPED,
  32. TRACK_UPDATED,
  33. TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
  34. TRACK_WILL_CREATE
  35. } from './actionTypes';
  36. import {
  37. createLocalTracksF,
  38. getLocalTrack,
  39. getLocalTracks,
  40. getLocalVideoTrack,
  41. getTrackByJitsiTrack
  42. } from './functions';
  43. import logger from './logger';
  44. /**
  45. * Add a given local track to the conference.
  46. *
  47. * @param {JitsiLocalTrack} newTrack - The local track to be added to the conference.
  48. * @returns {Function}
  49. */
  50. export function addLocalTrack(newTrack) {
  51. return async (dispatch, getState) => {
  52. const conference = getCurrentConference(getState());
  53. if (conference) {
  54. await conference.addTrack(newTrack);
  55. }
  56. const setMuted = newTrack.isVideoTrack()
  57. ? getMultipleVideoSupportFeatureFlag(getState())
  58. && newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
  59. ? setScreenshareMuted
  60. : setVideoMuted
  61. : setAudioMuted;
  62. const isMuted = newTrack.isMuted();
  63. logger.log(`Adding ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
  64. await dispatch(setMuted(isMuted));
  65. return dispatch(_addTracks([ newTrack ]));
  66. };
  67. }
  68. /**
  69. * Requests the creating of the desired media type tracks. Desire is expressed
  70. * by base/media unless the function caller specifies desired media types
  71. * explicitly and thus override base/media. Dispatches a
  72. * {@code createLocalTracksA} action for the desired media types for which there
  73. * are no existing tracks yet.
  74. *
  75. * @returns {Function}
  76. */
  77. export function createDesiredLocalTracks(...desiredTypes) {
  78. return (dispatch, getState) => {
  79. const state = getState();
  80. dispatch(destroyLocalDesktopTrackIfExists());
  81. if (desiredTypes.length === 0) {
  82. const { video } = state['features/base/media'];
  83. // XXX: Always create the audio track early, even if it will be muted.
  84. // This fixes a timing issue when adding the track to the conference which
  85. // manifests primarily on iOS 15.
  86. desiredTypes.push(MEDIA_TYPE.AUDIO);
  87. // XXX When the app is coming into the foreground from the
  88. // background in order to handle a URL, it may realize the new
  89. // background state soon after it has tried to create the local
  90. // tracks requested by the URL. Ignore
  91. // VIDEO_MUTISM_AUTHORITY.BACKGROUND and create the local video
  92. // track if no other VIDEO_MUTISM_AUTHORITY has muted it. The local
  93. // video track will be muted until the app realizes the new
  94. // background state.
  95. // eslint-disable-next-line no-bitwise
  96. (video.muted & ~VIDEO_MUTISM_AUTHORITY.BACKGROUND)
  97. || desiredTypes.push(MEDIA_TYPE.VIDEO);
  98. }
  99. const availableTypes
  100. = getLocalTracks(
  101. state['features/base/tracks'],
  102. /* includePending */ true)
  103. .map(t => t.mediaType);
  104. // We need to create the desired tracks which are not already available.
  105. const createTypes
  106. = desiredTypes.filter(type => availableTypes.indexOf(type) === -1);
  107. createTypes.length
  108. && dispatch(createLocalTracksA({ devices: createTypes }));
  109. };
  110. }
  111. /**
  112. * Request to start capturing local audio and/or video. By default, the user
  113. * facing camera will be selected.
  114. *
  115. * @param {Object} [options] - For info @see JitsiMeetJS.createLocalTracks.
  116. * @returns {Function}
  117. */
  118. export function createLocalTracksA(options = {}) {
  119. return (dispatch, getState) => {
  120. const devices
  121. = options.devices || [ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ];
  122. const store = {
  123. dispatch,
  124. getState
  125. };
  126. // The following executes on React Native only at the time of this
  127. // writing. The effort to port Web's createInitialLocalTracksAndConnect
  128. // is significant and that's where the function createLocalTracksF got
  129. // born. I started with the idea a porting so that we could inherit the
  130. // ability to getUserMedia for audio only or video only if getUserMedia
  131. // for audio and video fails. Eventually though, I realized that on
  132. // mobile we do not have combined permission prompts implemented anyway
  133. // (either because there are no such prompts or it does not make sense
  134. // to implement them) and the right thing to do is to ask for each
  135. // device separately.
  136. for (const device of devices) {
  137. if (getLocalTrack(
  138. getState()['features/base/tracks'],
  139. device,
  140. /* includePending */ true)) {
  141. throw new Error(`Local track for ${device} already exists`);
  142. }
  143. const gumProcess
  144. = createLocalTracksF(
  145. {
  146. cameraDeviceId: options.cameraDeviceId,
  147. devices: [ device ],
  148. facingMode:
  149. options.facingMode || CAMERA_FACING_MODE.USER,
  150. micDeviceId: options.micDeviceId
  151. },
  152. store)
  153. .then(
  154. localTracks => {
  155. // Because GUM is called for 1 device (which is actually
  156. // a media type 'audio', 'video', 'screen', etc.) we
  157. // should not get more than one JitsiTrack.
  158. if (localTracks.length !== 1) {
  159. throw new Error(
  160. `Expected exactly 1 track, but was given ${
  161. localTracks.length} tracks for device: ${
  162. device}.`);
  163. }
  164. if (gumProcess.canceled) {
  165. return _disposeTracks(localTracks)
  166. .then(() =>
  167. dispatch(_trackCreateCanceled(device)));
  168. }
  169. return dispatch(trackAdded(localTracks[0]));
  170. },
  171. reason =>
  172. dispatch(
  173. gumProcess.canceled
  174. ? _trackCreateCanceled(device)
  175. : _onCreateLocalTracksRejected(
  176. reason,
  177. device)));
  178. /**
  179. * Cancels the {@code getUserMedia} process represented by this
  180. * {@code Promise}.
  181. *
  182. * @returns {Promise} This {@code Promise} i.e. {@code gumProcess}.
  183. */
  184. gumProcess.cancel = () => {
  185. gumProcess.canceled = true;
  186. return gumProcess;
  187. };
  188. dispatch({
  189. type: TRACK_WILL_CREATE,
  190. track: {
  191. gumProcess,
  192. local: true,
  193. mediaType: device
  194. }
  195. });
  196. }
  197. };
  198. }
  199. /**
  200. * Calls JitsiLocalTrack#dispose() on the given track or on all local tracks (if none are passed) ignoring errors if
  201. * track is already disposed. After that signals tracks to be removed.
  202. *
  203. * @param {JitsiLocalTrack|null} [track] - The local track that needs to be destroyed.
  204. * @returns {Function}
  205. */
  206. export function destroyLocalTracks(track = null) {
  207. if (track) {
  208. return dispatch => {
  209. dispatch(_disposeAndRemoveTracks([ track ]));
  210. };
  211. }
  212. return (dispatch, getState) => {
  213. // First wait until any getUserMedia in progress is settled and then get
  214. // rid of all local tracks.
  215. _cancelGUMProcesses(getState)
  216. .then(() =>
  217. dispatch(
  218. _disposeAndRemoveTracks(
  219. getState()['features/base/tracks']
  220. .filter(t => t.local)
  221. .map(t => t.jitsiTrack))));
  222. };
  223. }
  224. /**
  225. * Signals that the passed JitsiLocalTrack has triggered a no data from source event.
  226. *
  227. * @param {JitsiLocalTrack} track - The track.
  228. * @returns {{
  229. * type: TRACK_NO_DATA_FROM_SOURCE,
  230. * track: Track
  231. * }}
  232. */
  233. export function noDataFromSource(track) {
  234. return {
  235. type: TRACK_NO_DATA_FROM_SOURCE,
  236. track
  237. };
  238. }
  239. /**
  240. * Displays a no data from source video error if needed.
  241. *
  242. * @param {JitsiLocalTrack} jitsiTrack - The track.
  243. * @returns {Function}
  244. */
  245. export function showNoDataFromSourceVideoError(jitsiTrack) {
  246. return async (dispatch, getState) => {
  247. let notificationInfo;
  248. const track = getTrackByJitsiTrack(getState()['features/base/tracks'], jitsiTrack);
  249. if (!track) {
  250. return;
  251. }
  252. if (track.isReceivingData) {
  253. notificationInfo = undefined;
  254. } else {
  255. const notificationAction = await dispatch(showErrorNotification({
  256. descriptionKey: 'dialog.cameraNotSendingData',
  257. titleKey: 'dialog.cameraNotSendingDataTitle'
  258. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  259. notificationInfo = {
  260. uid: notificationAction.uid
  261. };
  262. }
  263. dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, notificationInfo));
  264. };
  265. }
  266. /**
  267. * Signals that the local participant is ending screensharing or beginning the
  268. * screensharing flow.
  269. *
  270. * @param {boolean} enabled - The state to toggle screen sharing to.
  271. * @param {boolean} audioOnly - Only share system audio.
  272. * @param {boolean} ignoreDidHaveVideo - Wether or not to ignore if video was on when sharing started.
  273. * @returns {{
  274. * type: TOGGLE_SCREENSHARING,
  275. * on: boolean,
  276. * audioOnly: boolean,
  277. * ignoreDidHaveVideo: boolean
  278. * }}
  279. */
  280. export function toggleScreensharing(enabled, audioOnly = false, ignoreDidHaveVideo = false) {
  281. return {
  282. type: TOGGLE_SCREENSHARING,
  283. enabled,
  284. audioOnly,
  285. ignoreDidHaveVideo
  286. };
  287. }
  288. /**
  289. * Replaces one track with another for one renegotiation instead of invoking
  290. * two renegotiations with a separate removeTrack and addTrack. Disposes the
  291. * removed track as well.
  292. *
  293. * @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
  294. * @param {JitsiLocalTrack|null} newTrack - The track to use instead.
  295. * @param {JitsiConference} [conference] - The conference from which to remove
  296. * and add the tracks. If one is not provided, the conference in the redux store
  297. * will be used.
  298. * @returns {Function}
  299. */
  300. export function replaceLocalTrack(oldTrack, newTrack, conference) {
  301. return async (dispatch, getState) => {
  302. conference
  303. // eslint-disable-next-line no-param-reassign
  304. || (conference = getState()['features/base/conference'].conference);
  305. if (conference) {
  306. await conference.replaceTrack(oldTrack, newTrack);
  307. }
  308. return dispatch(replaceStoredTracks(oldTrack, newTrack));
  309. };
  310. }
  311. /**
  312. * Replaces a stored track with another.
  313. *
  314. * @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
  315. * @param {JitsiLocalTrack|null} newTrack - The track to use instead.
  316. * @returns {Function}
  317. */
  318. function replaceStoredTracks(oldTrack, newTrack) {
  319. return async (dispatch, getState) => {
  320. // We call dispose after doing the replace because dispose will
  321. // try and do a new o/a after the track removes itself. Doing it
  322. // after means the JitsiLocalTrack.conference is already
  323. // cleared, so it won't try and do the o/a.
  324. if (oldTrack) {
  325. await dispatch(_disposeAndRemoveTracks([ oldTrack ]));
  326. }
  327. if (newTrack) {
  328. // The mute state of the new track should be reflected in the app's mute state. For example, if the
  329. // app is currently muted and changing to a new track that is not muted, the app's mute state
  330. // should be falsey. As such, emit a mute event here to set up the app to reflect the track's mute
  331. // state. If this is not done, the current mute state of the app will be reflected on the track,
  332. // not vice-versa.
  333. const setMuted = newTrack.isVideoTrack()
  334. ? getMultipleVideoSupportFeatureFlag(getState()) && newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
  335. ? setScreenshareMuted
  336. : setVideoMuted
  337. : setAudioMuted;
  338. const isMuted = newTrack.isMuted();
  339. sendAnalytics(createTrackMutedEvent(newTrack.getType(), 'track.replaced', isMuted));
  340. logger.log(`Replace ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
  341. await dispatch(setMuted(isMuted));
  342. await dispatch(_addTracks([ newTrack ]));
  343. }
  344. };
  345. }
  346. /**
  347. * Create an action for when a new track has been signaled to be added to the
  348. * conference.
  349. *
  350. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  351. * @returns {{ type: TRACK_ADDED, track: Track }}
  352. */
  353. export function trackAdded(track) {
  354. return async (dispatch, getState) => {
  355. track.on(
  356. JitsiTrackEvents.TRACK_MUTE_CHANGED,
  357. () => {
  358. if (getSourceNameSignalingFeatureFlag(getState()) && track.getVideoType() === VIDEO_TYPE.DESKTOP) {
  359. dispatch(screenshareTrackMutedChanged(track));
  360. }
  361. dispatch(trackMutedChanged(track));
  362. });
  363. track.on(
  364. JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED,
  365. type => dispatch(trackVideoTypeChanged(track, type)));
  366. // participantId
  367. const local = track.isLocal();
  368. const mediaType = getMultipleVideoSupportFeatureFlag(getState()) && track.getVideoType() === VIDEO_TYPE.DESKTOP
  369. ? MEDIA_TYPE.SCREENSHARE
  370. : track.getType();
  371. let isReceivingData, noDataFromSourceNotificationInfo, participantId;
  372. if (local) {
  373. // Reset the no data from src notification state when we change the track, as it's context is set
  374. // on a per device basis.
  375. dispatch(setNoSrcDataNotificationUid());
  376. const participant = getLocalParticipant(getState);
  377. if (participant) {
  378. participantId = participant.id;
  379. }
  380. isReceivingData = track.isReceivingData();
  381. track.on(JitsiTrackEvents.NO_DATA_FROM_SOURCE, () => dispatch(noDataFromSource({ jitsiTrack: track })));
  382. if (!isReceivingData) {
  383. if (mediaType === MEDIA_TYPE.AUDIO) {
  384. const notificationAction = await dispatch(showNotification({
  385. descriptionKey: 'dialog.micNotSendingData',
  386. titleKey: 'dialog.micNotSendingDataTitle'
  387. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  388. // Set the notification ID so that other parts of the application know that this was
  389. // displayed in the context of the current device.
  390. // I.E. The no-audio-signal notification shouldn't be displayed if this was already shown.
  391. dispatch(setNoSrcDataNotificationUid(notificationAction.uid));
  392. noDataFromSourceNotificationInfo = { uid: notificationAction.uid };
  393. } else {
  394. const timeout = setTimeout(() => dispatch(
  395. showNoDataFromSourceVideoError(track)),
  396. NOTIFICATION_TIMEOUT_TYPE.MEDIUM);
  397. noDataFromSourceNotificationInfo = { timeout };
  398. }
  399. }
  400. track.on(JitsiTrackEvents.LOCAL_TRACK_STOPPED,
  401. () => dispatch({
  402. type: TRACK_STOPPED,
  403. track: {
  404. jitsiTrack: track
  405. }
  406. }));
  407. } else {
  408. participantId = track.getParticipantId();
  409. isReceivingData = true;
  410. }
  411. return dispatch({
  412. type: TRACK_ADDED,
  413. track: {
  414. jitsiTrack: track,
  415. isReceivingData,
  416. local,
  417. mediaType,
  418. mirror: _shouldMirror(track),
  419. muted: track.isMuted(),
  420. noDataFromSourceNotificationInfo,
  421. participantId,
  422. videoStarted: false,
  423. videoType: track.videoType
  424. }
  425. });
  426. };
  427. }
  428. /**
  429. * Create an action for when a track's muted state has been signaled to be
  430. * changed.
  431. *
  432. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  433. * @returns {{
  434. * type: TRACK_UPDATED,
  435. * track: Track
  436. * }}
  437. */
  438. export function trackMutedChanged(track) {
  439. return {
  440. type: TRACK_UPDATED,
  441. track: {
  442. jitsiTrack: track,
  443. muted: track.isMuted()
  444. }
  445. };
  446. }
  447. /**
  448. * Create an action for when a screenshare track's muted state has been signaled to be changed.
  449. *
  450. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  451. * @returns {{
  452. * type: TRACK_UPDATED,
  453. * track: Track,
  454. * muted: boolean
  455. * }}
  456. */
  457. export function screenshareTrackMutedChanged(track) {
  458. return {
  459. type: SCREENSHARE_TRACK_MUTED_UPDATED,
  460. track: { jitsiTrack: track },
  461. muted: track.isMuted()
  462. };
  463. }
  464. /**
  465. * Create an action for when a track's muted state change action has failed. This could happen because of
  466. * {@code getUserMedia} errors during unmute or replace track errors at the peerconnection level.
  467. *
  468. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  469. * @param {boolean} wasMuting - If the operation that failed was a mute operation or an unmute operation.
  470. * @returns {{
  471. * type: TRACK_MUTE_UNMUTE_FAILED,
  472. * track: Track
  473. * }}
  474. */
  475. export function trackMuteUnmuteFailed(track, wasMuting) {
  476. return {
  477. type: TRACK_MUTE_UNMUTE_FAILED,
  478. track,
  479. wasMuting
  480. };
  481. }
  482. /**
  483. * Create an action for when a track's no data from source notification information changes.
  484. *
  485. * @param {JitsiLocalTrack} track - JitsiTrack instance.
  486. * @param {Object} noDataFromSourceNotificationInfo - Information about no data from source notification.
  487. * @returns {{
  488. * type: TRACK_UPDATED,
  489. * track: Track
  490. * }}
  491. */
  492. export function trackNoDataFromSourceNotificationInfoChanged(track, noDataFromSourceNotificationInfo) {
  493. return {
  494. type: TRACK_UPDATED,
  495. track: {
  496. jitsiTrack: track,
  497. noDataFromSourceNotificationInfo
  498. }
  499. };
  500. }
  501. /**
  502. * Create an action for when a track has been signaled for removal from the
  503. * conference.
  504. *
  505. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  506. * @returns {{
  507. * type: TRACK_REMOVED,
  508. * track: Track
  509. * }}
  510. */
  511. export function trackRemoved(track) {
  512. track.removeAllListeners(JitsiTrackEvents.TRACK_MUTE_CHANGED);
  513. track.removeAllListeners(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED);
  514. track.removeAllListeners(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  515. return {
  516. type: TRACK_REMOVED,
  517. track: {
  518. jitsiTrack: track
  519. }
  520. };
  521. }
  522. /**
  523. * Signal that track's video started to play.
  524. *
  525. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  526. * @returns {{
  527. * type: TRACK_UPDATED,
  528. * track: Track
  529. * }}
  530. */
  531. export function trackVideoStarted(track) {
  532. return {
  533. type: TRACK_UPDATED,
  534. track: {
  535. jitsiTrack: track,
  536. videoStarted: true
  537. }
  538. };
  539. }
  540. /**
  541. * Create an action for when participant video type changes.
  542. *
  543. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  544. * @param {VIDEO_TYPE|undefined} videoType - Video type.
  545. * @returns {{
  546. * type: TRACK_UPDATED,
  547. * track: Track
  548. * }}
  549. */
  550. export function trackVideoTypeChanged(track, videoType) {
  551. return {
  552. type: TRACK_UPDATED,
  553. track: {
  554. jitsiTrack: track,
  555. videoType
  556. }
  557. };
  558. }
  559. /**
  560. * Create an action for when track streaming status changes.
  561. *
  562. * @param {(JitsiRemoteTrack)} track - JitsiTrack instance.
  563. * @param {string} streamingStatus - The new streaming status of the track.
  564. * @returns {{
  565. * type: TRACK_UPDATED,
  566. * track: Track
  567. * }}
  568. */
  569. export function trackStreamingStatusChanged(track, streamingStatus) {
  570. return {
  571. type: TRACK_UPDATED,
  572. track: {
  573. jitsiTrack: track,
  574. streamingStatus
  575. }
  576. };
  577. }
  578. /**
  579. * Signals passed tracks to be added.
  580. *
  581. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  582. * @private
  583. * @returns {Function}
  584. */
  585. function _addTracks(tracks) {
  586. return dispatch => Promise.all(tracks.map(t => dispatch(trackAdded(t))));
  587. }
  588. /**
  589. * Cancels and waits for any {@code getUserMedia} process/currently in progress
  590. * to complete/settle.
  591. *
  592. * @param {Function} getState - The redux store {@code getState} function used
  593. * to obtain the state.
  594. * @private
  595. * @returns {Promise} - A {@code Promise} resolved once all
  596. * {@code gumProcess.cancel()} {@code Promise}s are settled because all we care
  597. * about here is to be sure that the {@code getUserMedia} callbacks have
  598. * completed (i.e. Returned from the native side).
  599. */
  600. function _cancelGUMProcesses(getState) {
  601. const logError
  602. = error =>
  603. logger.error('gumProcess.cancel failed', JSON.stringify(error));
  604. return Promise.all(
  605. getState()['features/base/tracks']
  606. .filter(t => t.local)
  607. .map(({ gumProcess }) =>
  608. gumProcess && gumProcess.cancel().catch(logError)));
  609. }
  610. /**
  611. * Disposes passed tracks and signals them to be removed.
  612. *
  613. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  614. * @protected
  615. * @returns {Function}
  616. */
  617. export function _disposeAndRemoveTracks(tracks) {
  618. return dispatch =>
  619. _disposeTracks(tracks)
  620. .then(() =>
  621. Promise.all(tracks.map(t => dispatch(trackRemoved(t)))));
  622. }
  623. /**
  624. * Disposes passed tracks.
  625. *
  626. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  627. * @private
  628. * @returns {Promise} - A Promise resolved once {@link JitsiTrack.dispose()} is
  629. * done for every track from the list.
  630. */
  631. function _disposeTracks(tracks) {
  632. return Promise.all(
  633. tracks.map(t =>
  634. t.dispose()
  635. .catch(err => {
  636. // Track might be already disposed so ignore such an error.
  637. // Of course, re-throw any other error(s).
  638. if (err.name !== JitsiTrackErrors.TRACK_IS_DISPOSED) {
  639. throw err;
  640. }
  641. })));
  642. }
  643. /**
  644. * Implements the {@code Promise} rejection handler of
  645. * {@code createLocalTracksA} and {@code createLocalTracksF}.
  646. *
  647. * @param {Object} error - The {@code Promise} rejection reason.
  648. * @param {string} device - The device/{@code MEDIA_TYPE} associated with the
  649. * rejection.
  650. * @private
  651. * @returns {Function}
  652. */
  653. function _onCreateLocalTracksRejected(error, device) {
  654. return dispatch => {
  655. // If permissions are not allowed, alert the user.
  656. dispatch({
  657. type: TRACK_CREATE_ERROR,
  658. permissionDenied: error?.name === 'SecurityError',
  659. trackType: device
  660. });
  661. };
  662. }
  663. /**
  664. * Returns true if the provided {@code JitsiTrack} should be rendered as a
  665. * mirror.
  666. *
  667. * We only want to show a video in mirrored mode when:
  668. * 1) The video source is local, and not remote.
  669. * 2) The video source is a camera, not a desktop (capture).
  670. * 3) The camera is capturing the user, not the environment.
  671. *
  672. * TODO Similar functionality is part of lib-jitsi-meet. This function should be
  673. * removed after https://github.com/jitsi/lib-jitsi-meet/pull/187 is merged.
  674. *
  675. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  676. * @private
  677. * @returns {boolean}
  678. */
  679. function _shouldMirror(track) {
  680. return (
  681. track
  682. && track.isLocal()
  683. && track.isVideoTrack()
  684. // XXX The type of the return value of JitsiLocalTrack's
  685. // getCameraFacingMode happens to be named CAMERA_FACING_MODE as
  686. // well, it's defined by lib-jitsi-meet. Note though that the type
  687. // of the value on the right side of the equality check is defined
  688. // by jitsi-meet. The type definitions are surely compatible today
  689. // but that may not be the case tomorrow.
  690. && track.getCameraFacingMode() === CAMERA_FACING_MODE.USER);
  691. }
  692. /**
  693. * Signals that track create operation for given media track has been canceled.
  694. * Will clean up local track stub from the redux state which holds the
  695. * {@code gumProcess} reference.
  696. *
  697. * @param {MEDIA_TYPE} mediaType - The type of the media for which the track was
  698. * being created.
  699. * @private
  700. * @returns {{
  701. * type,
  702. * trackType: MEDIA_TYPE
  703. * }}
  704. */
  705. function _trackCreateCanceled(mediaType) {
  706. return {
  707. type: TRACK_CREATE_CANCELED,
  708. trackType: mediaType
  709. };
  710. }
  711. /**
  712. * If thee local track if of type Desktop, it calls _disposeAndRemoveTracks) on it.
  713. *
  714. * @returns {Function}
  715. */
  716. export function destroyLocalDesktopTrackIfExists() {
  717. return (dispatch, getState) => {
  718. const videoTrack = getLocalVideoTrack(getState()['features/base/tracks']);
  719. const isDesktopTrack = videoTrack && videoTrack.videoType === VIDEO_TYPE.DESKTOP;
  720. if (isDesktopTrack) {
  721. dispatch(_disposeAndRemoveTracks([ videoTrack.jitsiTrack ]));
  722. }
  723. };
  724. }
  725. /**
  726. * Sets UID of the displayed no data from source notification. Used to track
  727. * if the notification was previously displayed in this context.
  728. *
  729. * @param {number} uid - Notification UID.
  730. * @returns {{
  731. * type: SET_NO_AUDIO_SIGNAL_UID,
  732. * uid: number
  733. * }}
  734. */
  735. export function setNoSrcDataNotificationUid(uid) {
  736. return {
  737. type: SET_NO_SRC_DATA_NOTIFICATION_UID,
  738. uid
  739. };
  740. }
  741. /**
  742. * Updates the last media event received for a video track.
  743. *
  744. * @param {JitsiRemoteTrack} track - JitsiTrack instance.
  745. * @param {string} name - The current media event name for the video.
  746. * @returns {{
  747. * type: TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
  748. * track: Track,
  749. * name: string
  750. * }}
  751. */
  752. export function updateLastTrackVideoMediaEvent(track, name) {
  753. return {
  754. type: TRACK_UPDATE_LAST_VIDEO_MEDIA_EVENT,
  755. track,
  756. name
  757. };
  758. }
  759. /**
  760. * Toggles the facingMode constraint on the video stream.
  761. *
  762. * @returns {Function}
  763. */
  764. export function toggleCamera() {
  765. return async (dispatch, getState) => {
  766. const state = getState();
  767. const tracks = state['features/base/tracks'];
  768. const localVideoTrack = getLocalVideoTrack(tracks).jitsiTrack;
  769. const currentFacingMode = localVideoTrack.getCameraFacingMode();
  770. /**
  771. * FIXME: Ideally, we should be dispatching {@code replaceLocalTrack} here,
  772. * but it seems to not trigger the re-rendering of the local video on Chrome;
  773. * could be due to a plan B vs unified plan issue. Therefore, we use the legacy
  774. * method defined in conference.js that manually takes care of updating the local
  775. * video as well.
  776. */
  777. await APP.conference.useVideoStream(null);
  778. const targetFacingMode = currentFacingMode === CAMERA_FACING_MODE.USER
  779. ? CAMERA_FACING_MODE.ENVIRONMENT
  780. : CAMERA_FACING_MODE.USER;
  781. // Update the flipX value so the environment facing camera is not flipped, before the new track is created.
  782. dispatch(updateSettings({ localFlipX: targetFacingMode === CAMERA_FACING_MODE.USER }));
  783. const newVideoTrack = await createLocalTrack('video', null, null, { facingMode: targetFacingMode });
  784. // FIXME: See above.
  785. await APP.conference.useVideoStream(newVideoTrack);
  786. };
  787. }