You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

actions.js 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567
  1. import {
  2. createTrackMutedEvent,
  3. sendAnalytics
  4. } from '../../analytics';
  5. import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
  6. import {
  7. CAMERA_FACING_MODE,
  8. MEDIA_TYPE,
  9. setAudioMuted,
  10. setVideoMuted,
  11. VIDEO_MUTISM_AUTHORITY
  12. } from '../media';
  13. import { getLocalParticipant } from '../participants';
  14. import {
  15. TOGGLE_SCREENSHARING,
  16. TRACK_ADDED,
  17. TRACK_CREATE_CANCELED,
  18. TRACK_CREATE_ERROR,
  19. TRACK_REMOVED,
  20. TRACK_UPDATED,
  21. TRACK_WILL_CREATE
  22. } from './actionTypes';
  23. import { createLocalTracksF, getLocalTrack, getLocalTracks } from './functions';
  24. const logger = require('jitsi-meet-logger').getLogger(__filename);
  25. /**
  26. * Requests the creating of the desired media type tracks. Desire is expressed
  27. * by base/media unless the function caller specifies desired media types
  28. * explicitly and thus override base/media. Dispatches a
  29. * {@code createLocalTracksA} action for the desired media types for which there
  30. * are no existing tracks yet.
  31. *
  32. * @returns {Function}
  33. */
  34. export function createDesiredLocalTracks(...desiredTypes) {
  35. return (dispatch, getState) => {
  36. const state = getState();
  37. if (desiredTypes.length === 0) {
  38. const { audio, video } = state['features/base/media'];
  39. audio.muted || desiredTypes.push(MEDIA_TYPE.AUDIO);
  40. // XXX When the app is coming into the foreground from the
  41. // background in order to handle a URL, it may realize the new
  42. // background state soon after it has tried to create the local
  43. // tracks requested by the URL. Ignore
  44. // VIDEO_MUTISM_AUTHORITY.BACKGROUND and create the local video
  45. // track if no other VIDEO_MUTISM_AUTHORITY has muted it. The local
  46. // video track will be muted until the app realizes the new
  47. // background state.
  48. // eslint-disable-next-line no-bitwise
  49. (video.muted & ~VIDEO_MUTISM_AUTHORITY.BACKGROUND)
  50. || desiredTypes.push(MEDIA_TYPE.VIDEO);
  51. }
  52. const availableTypes
  53. = getLocalTracks(
  54. state['features/base/tracks'],
  55. /* includePending */ true)
  56. .map(t => t.mediaType);
  57. // We need to create the desired tracks which are not already available.
  58. const createTypes
  59. = desiredTypes.filter(type => availableTypes.indexOf(type) === -1);
  60. createTypes.length
  61. && dispatch(createLocalTracksA({ devices: createTypes }));
  62. };
  63. }
  64. /**
  65. * Request to start capturing local audio and/or video. By default, the user
  66. * facing camera will be selected.
  67. *
  68. * @param {Object} [options] - For info @see JitsiMeetJS.createLocalTracks.
  69. * @returns {Function}
  70. */
  71. export function createLocalTracksA(options = {}) {
  72. return (dispatch, getState) => {
  73. const devices
  74. = options.devices || [ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ];
  75. const store = {
  76. dispatch,
  77. getState
  78. };
  79. // The following executes on React Native only at the time of this
  80. // writing. The effort to port Web's createInitialLocalTracksAndConnect
  81. // is significant and that's where the function createLocalTracksF got
  82. // born. I started with the idea a porting so that we could inherit the
  83. // ability to getUserMedia for audio only or video only if getUserMedia
  84. // for audio and video fails. Eventually though, I realized that on
  85. // mobile we do not have combined permission prompts implemented anyway
  86. // (either because there are no such prompts or it does not make sense
  87. // to implement them) and the right thing to do is to ask for each
  88. // device separately.
  89. for (const device of devices) {
  90. if (getLocalTrack(
  91. getState()['features/base/tracks'],
  92. device,
  93. /* includePending */ true)) {
  94. throw new Error(`Local track for ${device} already exists`);
  95. }
  96. const gumProcess
  97. = createLocalTracksF(
  98. {
  99. cameraDeviceId: options.cameraDeviceId,
  100. devices: [ device ],
  101. facingMode:
  102. options.facingMode || CAMERA_FACING_MODE.USER,
  103. micDeviceId: options.micDeviceId
  104. },
  105. /* firePermissionPromptIsShownEvent */ false,
  106. store)
  107. .then(
  108. localTracks => {
  109. // Because GUM is called for 1 device (which is actually
  110. // a media type 'audio', 'video', 'screen', etc.) we
  111. // should not get more than one JitsiTrack.
  112. if (localTracks.length !== 1) {
  113. throw new Error(
  114. `Expected exactly 1 track, but was given ${
  115. localTracks.length} tracks for device: ${
  116. device}.`);
  117. }
  118. if (gumProcess.canceled) {
  119. return _disposeTracks(localTracks)
  120. .then(() =>
  121. dispatch(_trackCreateCanceled(device)));
  122. }
  123. return dispatch(trackAdded(localTracks[0]));
  124. },
  125. reason =>
  126. dispatch(
  127. gumProcess.canceled
  128. ? _trackCreateCanceled(device)
  129. : _onCreateLocalTracksRejected(
  130. reason,
  131. device)));
  132. /**
  133. * Cancels the {@code getUserMedia} process represented by this
  134. * {@code Promise}.
  135. *
  136. * @returns {Promise} This {@code Promise} i.e. {@code gumProcess}.
  137. */
  138. gumProcess.cancel = () => {
  139. gumProcess.canceled = true;
  140. return gumProcess;
  141. };
  142. dispatch({
  143. type: TRACK_WILL_CREATE,
  144. track: {
  145. gumProcess,
  146. local: true,
  147. mediaType: device
  148. }
  149. });
  150. }
  151. };
  152. }
  153. /**
  154. * Calls JitsiLocalTrack#dispose() on all local tracks ignoring errors when
  155. * track is already disposed. After that signals tracks to be removed.
  156. *
  157. * @returns {Function}
  158. */
  159. export function destroyLocalTracks() {
  160. return (dispatch, getState) => {
  161. // First wait until any getUserMedia in progress is settled and then get
  162. // rid of all local tracks.
  163. _cancelGUMProcesses(getState)
  164. .then(() =>
  165. dispatch(
  166. _disposeAndRemoveTracks(
  167. getState()['features/base/tracks']
  168. .filter(t => t.local)
  169. .map(t => t.jitsiTrack))));
  170. };
  171. }
  172. /**
  173. * Signals that the local participant is ending screensharing or beginning the
  174. * screensharing flow.
  175. *
  176. * @returns {{
  177. * type: TOGGLE_SCREENSHARING,
  178. * }}
  179. */
  180. export function toggleScreensharing() {
  181. return {
  182. type: TOGGLE_SCREENSHARING
  183. };
  184. }
  185. /**
  186. * Replaces one track with another for one renegotiation instead of invoking
  187. * two renegotiations with a separate removeTrack and addTrack. Disposes the
  188. * removed track as well.
  189. *
  190. * @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
  191. * @param {JitsiLocalTrack|null} newTrack - The track to use instead.
  192. * @param {JitsiConference} [conference] - The conference from which to remove
  193. * and add the tracks. If one is not provided, the conference in the redux store
  194. * will be used.
  195. * @returns {Function}
  196. */
  197. export function replaceLocalTrack(oldTrack, newTrack, conference) {
  198. return (dispatch, getState) => {
  199. conference
  200. // eslint-disable-next-line no-param-reassign
  201. || (conference = getState()['features/base/conference'].conference);
  202. return conference.replaceTrack(oldTrack, newTrack)
  203. .then(() => {
  204. // We call dispose after doing the replace because dispose will
  205. // try and do a new o/a after the track removes itself. Doing it
  206. // after means the JitsiLocalTrack.conference is already
  207. // cleared, so it won't try and do the o/a.
  208. const disposePromise
  209. = oldTrack
  210. ? dispatch(_disposeAndRemoveTracks([ oldTrack ]))
  211. : Promise.resolve();
  212. return disposePromise
  213. .then(() => {
  214. if (newTrack) {
  215. // The mute state of the new track should be
  216. // reflected in the app's mute state. For example,
  217. // if the app is currently muted and changing to a
  218. // new track that is not muted, the app's mute
  219. // state should be falsey. As such, emit a mute
  220. // event here to set up the app to reflect the
  221. // track's mute state. If this is not done, the
  222. // current mute state of the app will be reflected
  223. // on the track, not vice-versa.
  224. const setMuted
  225. = newTrack.isVideoTrack()
  226. ? setVideoMuted
  227. : setAudioMuted;
  228. const isMuted = newTrack.isMuted();
  229. sendAnalytics(createTrackMutedEvent(
  230. newTrack.getType(),
  231. 'track.replaced',
  232. isMuted));
  233. logger.log(`Replace ${newTrack.getType()} track - ${
  234. isMuted ? 'muted' : 'unmuted'}`);
  235. return dispatch(setMuted(isMuted));
  236. }
  237. })
  238. .then(() => {
  239. if (newTrack) {
  240. return dispatch(_addTracks([ newTrack ]));
  241. }
  242. });
  243. });
  244. };
  245. }
  246. /**
  247. * Create an action for when a new track has been signaled to be added to the
  248. * conference.
  249. *
  250. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  251. * @returns {{ type: TRACK_ADDED, track: Track }}
  252. */
  253. export function trackAdded(track) {
  254. return (dispatch, getState) => {
  255. track.on(
  256. JitsiTrackEvents.TRACK_MUTE_CHANGED,
  257. () => dispatch(trackMutedChanged(track)));
  258. track.on(
  259. JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED,
  260. type => dispatch(trackVideoTypeChanged(track, type)));
  261. // participantId
  262. const local = track.isLocal();
  263. let participantId;
  264. if (local) {
  265. const participant = getLocalParticipant(getState);
  266. if (participant) {
  267. participantId = participant.id;
  268. }
  269. } else {
  270. participantId = track.getParticipantId();
  271. }
  272. return dispatch({
  273. type: TRACK_ADDED,
  274. track: {
  275. jitsiTrack: track,
  276. local,
  277. mediaType: track.getType(),
  278. mirror: _shouldMirror(track),
  279. muted: track.isMuted(),
  280. participantId,
  281. videoStarted: false,
  282. videoType: track.videoType
  283. }
  284. });
  285. };
  286. }
  287. /**
  288. * Create an action for when a track's muted state has been signaled to be
  289. * changed.
  290. *
  291. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  292. * @returns {{
  293. * type: TRACK_UPDATED,
  294. * track: Track
  295. * }}
  296. */
  297. export function trackMutedChanged(track) {
  298. return {
  299. type: TRACK_UPDATED,
  300. track: {
  301. jitsiTrack: track,
  302. muted: track.isMuted()
  303. }
  304. };
  305. }
  306. /**
  307. * Create an action for when a track has been signaled for removal from the
  308. * conference.
  309. *
  310. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  311. * @returns {{
  312. * type: TRACK_REMOVED,
  313. * track: Track
  314. * }}
  315. */
  316. export function trackRemoved(track) {
  317. track.removeAllListeners(JitsiTrackEvents.TRACK_MUTE_CHANGED);
  318. track.removeAllListeners(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED);
  319. return {
  320. type: TRACK_REMOVED,
  321. track: {
  322. jitsiTrack: track
  323. }
  324. };
  325. }
  326. /**
  327. * Signal that track's video started to play.
  328. *
  329. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  330. * @returns {{
  331. * type: TRACK_UPDATED,
  332. * track: Track
  333. * }}
  334. */
  335. export function trackVideoStarted(track) {
  336. return {
  337. type: TRACK_UPDATED,
  338. track: {
  339. jitsiTrack: track,
  340. videoStarted: true
  341. }
  342. };
  343. }
  344. /**
  345. * Create an action for when participant video type changes.
  346. *
  347. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  348. * @param {VIDEO_TYPE|undefined} videoType - Video type.
  349. * @returns {{
  350. * type: TRACK_UPDATED,
  351. * track: Track
  352. * }}
  353. */
  354. export function trackVideoTypeChanged(track, videoType) {
  355. return {
  356. type: TRACK_UPDATED,
  357. track: {
  358. jitsiTrack: track,
  359. videoType
  360. }
  361. };
  362. }
  363. /**
  364. * Signals passed tracks to be added.
  365. *
  366. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  367. * @private
  368. * @returns {Function}
  369. */
  370. function _addTracks(tracks) {
  371. return dispatch => Promise.all(tracks.map(t => dispatch(trackAdded(t))));
  372. }
  373. /**
  374. * Cancels and waits for any {@code getUserMedia} process/currently in progress
  375. * to complete/settle.
  376. *
  377. * @param {Function} getState - The redux store {@code getState} function used
  378. * to obtain the state.
  379. * @private
  380. * @returns {Promise} - A {@code Promise} resolved once all
  381. * {@code gumProcess.cancel()} {@code Promise}s are settled because all we care
  382. * about here is to be sure that the {@code getUserMedia} callbacks have
  383. * completed (i.e. returned from the native side).
  384. */
  385. function _cancelGUMProcesses(getState) {
  386. const logError
  387. = error =>
  388. logger.error('gumProcess.cancel failed', JSON.stringify(error));
  389. return Promise.all(
  390. getState()['features/base/tracks']
  391. .filter(t => t.local)
  392. .map(({ gumProcess }) =>
  393. gumProcess && gumProcess.cancel().catch(logError)));
  394. }
  395. /**
  396. * Disposes passed tracks and signals them to be removed.
  397. *
  398. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  399. * @protected
  400. * @returns {Function}
  401. */
  402. export function _disposeAndRemoveTracks(tracks) {
  403. return dispatch =>
  404. _disposeTracks(tracks)
  405. .then(() =>
  406. Promise.all(tracks.map(t => dispatch(trackRemoved(t)))));
  407. }
  408. /**
  409. * Disposes passed tracks.
  410. *
  411. * @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
  412. * @private
  413. * @returns {Promise} - A Promise resolved once {@link JitsiTrack.dispose()} is
  414. * done for every track from the list.
  415. */
  416. function _disposeTracks(tracks) {
  417. return Promise.all(
  418. tracks.map(t =>
  419. t.dispose()
  420. .catch(err => {
  421. // Track might be already disposed so ignore such an error.
  422. // Of course, re-throw any other error(s).
  423. if (err.name !== JitsiTrackErrors.TRACK_IS_DISPOSED) {
  424. throw err;
  425. }
  426. })));
  427. }
  428. /**
  429. * Implements the {@code Promise} rejection handler of
  430. * {@code createLocalTracksA} and {@code createLocalTracksF}.
  431. *
  432. * @param {Object} reason - The {@code Promise} rejection reason.
  433. * @param {string} device - The device/{@code MEDIA_TYPE} associated with the
  434. * rejection.
  435. * @private
  436. * @returns {Function}
  437. */
  438. function _onCreateLocalTracksRejected({ gum }, device) {
  439. return dispatch => {
  440. // If permissions are not allowed, alert the user.
  441. if (gum) {
  442. const { error } = gum;
  443. if (error) {
  444. // FIXME For whatever reason (which is probably an
  445. // implementation fault), react-native-webrtc will give the
  446. // error in one of the following formats depending on whether it
  447. // is attached to a remote debugger or not. (The remote debugger
  448. // scenario suggests that react-native-webrtc is at fault
  449. // because the remote debugger is Google Chrome and then its
  450. // JavaScript engine will define DOMException. I suspect I wrote
  451. // react-native-webrtc to return the error in the alternative
  452. // format if DOMException is not defined.)
  453. let trackPermissionError;
  454. switch (error.name) {
  455. case 'DOMException':
  456. trackPermissionError = error.message === 'NotAllowedError';
  457. break;
  458. case 'NotAllowedError':
  459. trackPermissionError = error instanceof DOMException;
  460. break;
  461. }
  462. dispatch({
  463. type: TRACK_CREATE_ERROR,
  464. permissionDenied: trackPermissionError,
  465. trackType: device
  466. });
  467. }
  468. }
  469. };
  470. }
  471. /**
  472. * Returns true if the provided {@code JitsiTrack} should be rendered as a
  473. * mirror.
  474. *
  475. * We only want to show a video in mirrored mode when:
  476. * 1) The video source is local, and not remote.
  477. * 2) The video source is a camera, not a desktop (capture).
  478. * 3) The camera is capturing the user, not the environment.
  479. *
  480. * TODO Similar functionality is part of lib-jitsi-meet. This function should be
  481. * removed after https://github.com/jitsi/lib-jitsi-meet/pull/187 is merged.
  482. *
  483. * @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
  484. * @private
  485. * @returns {boolean}
  486. */
  487. function _shouldMirror(track) {
  488. return (
  489. track
  490. && track.isLocal()
  491. && track.isVideoTrack()
  492. // XXX The type of the return value of JitsiLocalTrack's
  493. // getCameraFacingMode happens to be named CAMERA_FACING_MODE as
  494. // well, it's defined by lib-jitsi-meet. Note though that the type
  495. // of the value on the right side of the equality check is defined
  496. // by jitsi-meet. The type definitions are surely compatible today
  497. // but that may not be the case tomorrow.
  498. && track.getCameraFacingMode() === CAMERA_FACING_MODE.USER);
  499. }
  500. /**
  501. * Signals that track create operation for given media track has been canceled.
  502. * Will clean up local track stub from the redux state which holds the
  503. * {@code gumProcess} reference.
  504. *
  505. * @param {MEDIA_TYPE} mediaType - The type of the media for which the track was
  506. * being created.
  507. * @private
  508. * @returns {{
  509. * type,
  510. * trackType: MEDIA_TYPE
  511. * }}
  512. */
  513. function _trackCreateCanceled(mediaType) {
  514. return {
  515. type: TRACK_CREATE_CANCELED,
  516. trackType: mediaType
  517. };
  518. }