Ver código fonte

feat(presenter): add Presenter Mode

- Adds the ability to share video as a "PiP" when screenshare is in progress.
- Add a method for creating a local presenter track.
- Make sure isLocalVideoTrackMuted returns the correct mute state when only screenshare is present.
- Make sure we get the updated window size of the window being shared before painting it on the canvas.
- Make sure we check if the shared window has been resized
j8
Jaya Allamsetty 5 anos atrás
pai
commit
0a64bf2068

+ 202
- 44
conference.js Ver arquivo

93
     participantRoleChanged,
93
     participantRoleChanged,
94
     participantUpdated
94
     participantUpdated
95
 } from './react/features/base/participants';
95
 } from './react/features/base/participants';
96
-import { updateSettings } from './react/features/base/settings';
97
 import {
96
 import {
97
+    getUserSelectedCameraDeviceId,
98
+    updateSettings
99
+} from './react/features/base/settings';
100
+import {
101
+    createLocalPresenterTrack,
98
     createLocalTracksF,
102
     createLocalTracksF,
99
     destroyLocalTracks,
103
     destroyLocalTracks,
104
+    isLocalVideoTrackMuted,
100
     isLocalTrackMuted,
105
     isLocalTrackMuted,
101
     isUserInteractionRequiredForUnmute,
106
     isUserInteractionRequiredForUnmute,
102
     replaceLocalTrack,
107
     replaceLocalTrack,
113
 import { mediaPermissionPromptVisibilityChanged } from './react/features/overlay';
118
 import { mediaPermissionPromptVisibilityChanged } from './react/features/overlay';
114
 import { suspendDetected } from './react/features/power-monitor';
119
 import { suspendDetected } from './react/features/power-monitor';
115
 import { setSharedVideoStatus } from './react/features/shared-video';
120
 import { setSharedVideoStatus } from './react/features/shared-video';
121
+import { createPresenterEffect } from './react/features/stream-effects/presenter';
116
 import { endpointMessageReceived } from './react/features/subtitles';
122
 import { endpointMessageReceived } from './react/features/subtitles';
117
 
123
 
118
 const logger = require('jitsi-meet-logger').getLogger(__filename);
124
 const logger = require('jitsi-meet-logger').getLogger(__filename);
437
      */
443
      */
438
     localAudio: null,
444
     localAudio: null,
439
 
445
 
446
+    /**
447
+     * The local presenter video track (if any).
448
+     */
449
+    localPresenterVideo: null,
450
+
440
     /**
451
     /**
441
      * The local video track (if any).
452
      * The local video track (if any).
442
      * FIXME tracks from redux store should be the single source of truth, but
453
      * FIXME tracks from redux store should be the single source of truth, but
722
     isLocalVideoMuted() {
733
     isLocalVideoMuted() {
723
         // If the tracks are not ready, read from base/media state
734
         // If the tracks are not ready, read from base/media state
724
         return this._localTracksInitialized
735
         return this._localTracksInitialized
725
-            ? isLocalTrackMuted(
726
-                APP.store.getState()['features/base/tracks'],
727
-                MEDIA_TYPE.VIDEO)
736
+            ? isLocalVideoTrackMuted(
737
+                APP.store.getState()['features/base/tracks'])
728
             : isVideoMutedByUser(APP.store);
738
             : isVideoMutedByUser(APP.store);
729
     },
739
     },
730
 
740
 
798
         this.muteAudio(!this.isLocalAudioMuted(), showUI);
808
         this.muteAudio(!this.isLocalAudioMuted(), showUI);
799
     },
809
     },
800
 
810
 
811
+    /**
812
+     * Simulates toolbar button click for presenter video mute. Used by
813
+     * shortcuts and API.
814
+     * @param mute true for mute and false for unmute.
815
+     * @param {boolean} [showUI] when set to false will not display any error
816
+     * dialogs in case of media permissions error.
817
+     */
818
+    async mutePresenterVideo(mute, showUI = true) {
819
+        const maybeShowErrorDialog = error => {
820
+            showUI && APP.store.dispatch(notifyCameraError(error));
821
+        };
822
+
823
+        if (mute) {
824
+            try {
825
+                await this.localVideo.setEffect(undefined);
826
+                APP.store.dispatch(
827
+                    setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
828
+                this._untoggleScreenSharing
829
+                    = this._turnScreenSharingOff.bind(this, false);
830
+            } catch (err) {
831
+                logger.error('Failed to mute the Presenter video');
832
+            }
833
+
834
+            return;
835
+        }
836
+        const { height } = this.localVideo.track.getSettings();
837
+        const defaultCamera
838
+            = getUserSelectedCameraDeviceId(APP.store.getState());
839
+        let effect;
840
+
841
+        try {
842
+            effect = await this._createPresenterStreamEffect(height,
843
+                defaultCamera);
844
+        } catch (err) {
845
+            logger.error('Failed to unmute Presenter Video');
846
+            maybeShowErrorDialog(err);
847
+
848
+            return;
849
+        }
850
+        try {
851
+            await this.localVideo.setEffect(effect);
852
+            APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
853
+            this._untoggleScreenSharing
854
+                = this._turnScreenSharingOff.bind(this, true);
855
+        } catch (err) {
856
+            logger.error('Failed to apply the Presenter effect', err);
857
+        }
858
+    },
859
+
801
     /**
860
     /**
802
      * Simulates toolbar button click for video mute. Used by shortcuts and API.
861
      * Simulates toolbar button click for video mute. Used by shortcuts and API.
803
      * @param mute true for mute and false for unmute.
862
      * @param mute true for mute and false for unmute.
812
             return;
871
             return;
813
         }
872
         }
814
 
873
 
874
+        if (this.isSharingScreen) {
875
+            return this.mutePresenterVideo(mute);
876
+        }
877
+
815
         // If not ready to modify track's state yet adjust the base/media
878
         // If not ready to modify track's state yet adjust the base/media
816
         if (!this._localTracksInitialized) {
879
         if (!this._localTracksInitialized) {
817
             // This will only modify base/media.video.muted which is then synced
880
             // This will only modify base/media.video.muted which is then synced
1351
      * in case it fails.
1414
      * in case it fails.
1352
      * @private
1415
      * @private
1353
      */
1416
      */
1354
-    _turnScreenSharingOff(didHaveVideo, wasVideoMuted) {
1417
+    _turnScreenSharingOff(didHaveVideo) {
1355
         this._untoggleScreenSharing = null;
1418
         this._untoggleScreenSharing = null;
1356
         this.videoSwitchInProgress = true;
1419
         this.videoSwitchInProgress = true;
1357
         const { receiver } = APP.remoteControl;
1420
         const { receiver } = APP.remoteControl;
1369
                 .then(([ stream ]) => this.useVideoStream(stream))
1432
                 .then(([ stream ]) => this.useVideoStream(stream))
1370
                 .then(() => {
1433
                 .then(() => {
1371
                     sendAnalytics(createScreenSharingEvent('stopped'));
1434
                     sendAnalytics(createScreenSharingEvent('stopped'));
1372
-                    logger.log('Screen sharing stopped, switching to video.');
1373
-
1374
-                    if (!this.localVideo && wasVideoMuted) {
1375
-                        return Promise.reject('No local video to be muted!');
1376
-                    } else if (wasVideoMuted && this.localVideo) {
1377
-                        return this.localVideo.mute();
1378
-                    }
1435
+                    logger.log('Screen sharing stopped.');
1379
                 })
1436
                 })
1380
                 .catch(error => {
1437
                 .catch(error => {
1381
                     logger.error('failed to switch back to local video', error);
1438
                     logger.error('failed to switch back to local video', error);
1390
             promise = this.useVideoStream(null);
1447
             promise = this.useVideoStream(null);
1391
         }
1448
         }
1392
 
1449
 
1450
+        // mute the presenter track if it exists.
1451
+        if (this.localPresenterVideo) {
1452
+            APP.store.dispatch(
1453
+                setVideoMuted(true, MEDIA_TYPE.PRESENTER));
1454
+            this.localPresenterVideo.dispose();
1455
+            APP.store.dispatch(
1456
+                trackRemoved(this.localPresenterVideo));
1457
+            this.localPresenterVideo = null;
1458
+        }
1459
+
1393
         return promise.then(
1460
         return promise.then(
1394
             () => {
1461
             () => {
1395
                 this.videoSwitchInProgress = false;
1462
                 this.videoSwitchInProgress = false;
1415
      * 'window', etc.).
1482
      * 'window', etc.).
1416
      * @return {Promise.<T>}
1483
      * @return {Promise.<T>}
1417
      */
1484
      */
1418
-    toggleScreenSharing(toggle = !this._untoggleScreenSharing, options = {}) {
1485
+    async toggleScreenSharing(toggle = !this._untoggleScreenSharing, options = {}) {
1419
         if (this.videoSwitchInProgress) {
1486
         if (this.videoSwitchInProgress) {
1420
             return Promise.reject('Switch in progress.');
1487
             return Promise.reject('Switch in progress.');
1421
         }
1488
         }
1429
         }
1496
         }
1430
 
1497
 
1431
         if (toggle) {
1498
         if (toggle) {
1432
-            return this._switchToScreenSharing(options);
1499
+            const wasVideoMuted = this.isLocalVideoMuted();
1500
+
1501
+            try {
1502
+                await this._switchToScreenSharing(options);
1503
+            } catch (err) {
1504
+                logger.error('Failed to switch to screensharing', err);
1505
+
1506
+                return;
1507
+            }
1508
+            if (wasVideoMuted) {
1509
+                return;
1510
+            }
1511
+            const { height } = this.localVideo.track.getSettings();
1512
+            const defaultCamera
1513
+                = getUserSelectedCameraDeviceId(APP.store.getState());
1514
+            let effect;
1515
+
1516
+            try {
1517
+                effect = await this._createPresenterStreamEffect(
1518
+                    height, defaultCamera);
1519
+            } catch (err) {
1520
+                logger.error('Failed to create the presenter effect');
1521
+
1522
+                return;
1523
+            }
1524
+            try {
1525
+                await this.localVideo.setEffect(effect);
1526
+                muteLocalVideo(false);
1527
+
1528
+                return;
1529
+            } catch (err) {
1530
+                logger.error('Failed to create the presenter effect', err);
1531
+
1532
+                return;
1533
+            }
1433
         }
1534
         }
1434
 
1535
 
1435
         return this._untoggleScreenSharing
1536
         return this._untoggleScreenSharing
1455
         let externalInstallation = false;
1556
         let externalInstallation = false;
1456
         let DSExternalInstallationInProgress = false;
1557
         let DSExternalInstallationInProgress = false;
1457
         const didHaveVideo = Boolean(this.localVideo);
1558
         const didHaveVideo = Boolean(this.localVideo);
1458
-        const wasVideoMuted = this.isLocalVideoMuted();
1459
 
1559
 
1460
         const getDesktopStreamPromise = options.desktopStream
1560
         const getDesktopStreamPromise = options.desktopStream
1461
             ? Promise.resolve([ options.desktopStream ])
1561
             ? Promise.resolve([ options.desktopStream ])
1506
             // Stores the "untoggle" handler which remembers whether was
1606
             // Stores the "untoggle" handler which remembers whether was
1507
             // there any video before and whether was it muted.
1607
             // there any video before and whether was it muted.
1508
             this._untoggleScreenSharing
1608
             this._untoggleScreenSharing
1509
-                = this._turnScreenSharingOff
1510
-                      .bind(this, didHaveVideo, wasVideoMuted);
1609
+                = this._turnScreenSharingOff.bind(this, didHaveVideo);
1511
             desktopStream.on(
1610
             desktopStream.on(
1512
                 JitsiTrackEvents.LOCAL_TRACK_STOPPED,
1611
                 JitsiTrackEvents.LOCAL_TRACK_STOPPED,
1513
                 () => {
1612
                 () => {
1532
         });
1631
         });
1533
     },
1632
     },
1534
 
1633
 
1634
+    /**
1635
+     * Creates a new instance of presenter effect. A new video track is created
1636
+     * using the new set of constraints that are calculated based on
1637
+     * the height of the desktop that is being currently shared.
1638
+     *
1639
+     * @param {number} height - The height of the desktop stream that is being
1640
+     * currently shared.
1641
+     * @param {string} cameraDeviceId - The device id of the camera to be used.
1642
+     * @return {Promise<JitsiStreamPresenterEffect>} - A promise resolved with
1643
+     * {@link JitsiStreamPresenterEffect} if it succeeds.
1644
+     */
1645
+    async _createPresenterStreamEffect(height, cameraDeviceId = null) {
1646
+        let presenterTrack;
1647
+
1648
+        try {
1649
+            presenterTrack = await createLocalPresenterTrack({
1650
+                cameraDeviceId
1651
+            },
1652
+            height);
1653
+        } catch (err) {
1654
+            logger.error('Failed to create a camera track for presenter', err);
1655
+
1656
+            return;
1657
+        }
1658
+        this.localPresenterVideo = presenterTrack;
1659
+        try {
1660
+            const effect = await createPresenterEffect(presenterTrack.stream);
1661
+
1662
+            APP.store.dispatch(trackAdded(this.localPresenterVideo));
1663
+
1664
+            return effect;
1665
+        } catch (err) {
1666
+            logger.error('Failed to create the presenter effect', err);
1667
+            APP.store.dispatch(
1668
+                setVideoMuted(true, MEDIA_TYPE.PRESENTER));
1669
+            APP.store.dispatch(notifyCameraError(err));
1670
+        }
1671
+    },
1672
+
1535
     /**
1673
     /**
1536
      * Tries to switch to the screensharing mode by disposing camera stream and
1674
      * Tries to switch to the screensharing mode by disposing camera stream and
1537
      * replacing it with a desktop one.
1675
      * replacing it with a desktop one.
1992
                 const videoWasMuted = this.isLocalVideoMuted();
2130
                 const videoWasMuted = this.isLocalVideoMuted();
1993
 
2131
 
1994
                 sendAnalytics(createDeviceChangedEvent('video', 'input'));
2132
                 sendAnalytics(createDeviceChangedEvent('video', 'input'));
1995
-                createLocalTracksF({
1996
-                    devices: [ 'video' ],
1997
-                    cameraDeviceId,
1998
-                    micDeviceId: null
1999
-                })
2000
-                .then(([ stream ]) => {
2001
-                    // if we are in audio only mode or video was muted before
2002
-                    // changing device, then mute
2003
-                    if (this.isAudioOnly() || videoWasMuted) {
2004
-                        return stream.mute()
2005
-                            .then(() => stream);
2006
-                    }
2007
-
2008
-                    return stream;
2009
-                })
2010
-                .then(stream => {
2011
-                    // if we are screen sharing we do not want to stop it
2012
-                    if (this.isSharingScreen) {
2013
-                        return Promise.resolve();
2014
-                    }
2015
 
2133
 
2016
-                    return this.useVideoStream(stream);
2017
-                })
2018
-                .then(() => {
2134
+                // If both screenshare and video are in progress, restart the
2135
+                // presenter mode with the new camera device.
2136
+                if (this.isSharingScreen && !videoWasMuted) {
2137
+                    const { height } = this.localVideo.track.getSettings();
2138
+
2139
+                    // dispose the existing presenter track and create a new
2140
+                    // camera track.
2141
+                    APP.store.dispatch(setVideoMuted(true, MEDIA_TYPE.PRESENTER));
2142
+
2143
+                    return this._createPresenterStreamEffect(height, cameraDeviceId)
2144
+                        .then(effect => this.localVideo.setEffect(effect))
2145
+                        .then(() => {
2146
+                            muteLocalVideo(false);
2147
+                            this.setVideoMuteStatus(false);
2148
+                            logger.log('switched local video device');
2149
+                            this._updateVideoDeviceId();
2150
+                        })
2151
+                        .catch(err => APP.store.dispatch(notifyCameraError(err)));
2152
+
2153
+                // If screenshare is in progress but video is muted,
2154
+                // update the default device id for video.
2155
+                } else if (this.isSharingScreen && videoWasMuted) {
2019
                     logger.log('switched local video device');
2156
                     logger.log('switched local video device');
2020
                     this._updateVideoDeviceId();
2157
                     this._updateVideoDeviceId();
2021
-                })
2022
-                .catch(err => {
2023
-                    APP.store.dispatch(notifyCameraError(err));
2024
-                });
2158
+
2159
+                // if there is only video, switch to the new camera stream.
2160
+                } else {
2161
+                    createLocalTracksF({
2162
+                        devices: [ 'video' ],
2163
+                        cameraDeviceId,
2164
+                        micDeviceId: null
2165
+                    })
2166
+                    .then(([ stream ]) => {
2167
+                        // if we are in audio only mode or video was muted before
2168
+                        // changing device, then mute
2169
+                        if (this.isAudioOnly() || videoWasMuted) {
2170
+                            return stream.mute()
2171
+                                .then(() => stream);
2172
+                        }
2173
+
2174
+                        return stream;
2175
+                    })
2176
+                    .then(stream => this.useVideoStream(stream))
2177
+                    .then(() => {
2178
+                        logger.log('switched local video device');
2179
+                        this._updateVideoDeviceId();
2180
+                    })
2181
+                    .catch(err => APP.store.dispatch(notifyCameraError(err)));
2182
+                }
2025
             }
2183
             }
2026
         );
2184
         );
2027
 
2185
 

+ 2
- 1
react/features/analytics/middleware.js Ver arquivo

147
         const state = getState();
147
         const state = getState();
148
         const { localTracksDuration } = state['features/analytics'];
148
         const { localTracksDuration } = state['features/analytics'];
149
 
149
 
150
-        if (localTracksDuration.conference.startedTime === -1) {
150
+        if (localTracksDuration.conference.startedTime === -1 || action.mediaType === 'presenter') {
151
             // We don't want to track the media duration if the conference is not joined yet because otherwise we won't
151
             // We don't want to track the media duration if the conference is not joined yet because otherwise we won't
152
             // be able to compare them with the conference duration (from conference join to conference will leave).
152
             // be able to compare them with the conference duration (from conference join to conference will leave).
153
+            // Also, do not track media duration for presenter tracks.
153
             break;
154
             break;
154
         }
155
         }
155
         dispatch({
156
         dispatch({

+ 5
- 1
react/features/base/conference/middleware.js Ver arquivo

46
     getCurrentConference
46
     getCurrentConference
47
 } from './functions';
47
 } from './functions';
48
 import logger from './logger';
48
 import logger from './logger';
49
+import { MEDIA_TYPE } from '../media';
49
 
50
 
50
 declare var APP: Object;
51
 declare var APP: Object;
51
 
52
 
589
 function _trackAddedOrRemoved(store, next, action) {
590
 function _trackAddedOrRemoved(store, next, action) {
590
     const track = action.track;
591
     const track = action.track;
591
 
592
 
592
-    if (track && track.local) {
593
+    // TODO All track swapping should happen here instead of conference.js.
594
+    // Since we swap the tracks for the web client in conference.js, ignore
595
+    // presenter tracks here and do not add/remove them to/from the conference.
596
+    if (track && track.local && track.mediaType !== MEDIA_TYPE.PRESENTER) {
593
         return (
597
         return (
594
             _syncConferenceLocalTracksWithState(store, action)
598
             _syncConferenceLocalTracksWithState(store, action)
595
                 .then(() => next(action)));
599
                 .then(() => next(action)));

+ 9
- 1
react/features/base/media/actions.js Ver arquivo

11
     STORE_VIDEO_TRANSFORM,
11
     STORE_VIDEO_TRANSFORM,
12
     TOGGLE_CAMERA_FACING_MODE
12
     TOGGLE_CAMERA_FACING_MODE
13
 } from './actionTypes';
13
 } from './actionTypes';
14
-import { CAMERA_FACING_MODE, VIDEO_MUTISM_AUTHORITY } from './constants';
14
+import {
15
+    CAMERA_FACING_MODE,
16
+    MEDIA_TYPE,
17
+    VIDEO_MUTISM_AUTHORITY
18
+} from './constants';
15
 
19
 
16
 /**
20
 /**
17
  * Action to adjust the availability of the local audio.
21
  * Action to adjust the availability of the local audio.
89
  *
93
  *
90
  * @param {boolean} muted - True if the local video is to be muted or false if
94
  * @param {boolean} muted - True if the local video is to be muted or false if
91
  * the local video is to be unmuted.
95
  * the local video is to be unmuted.
96
+ * @param {MEDIA_TYPE} mediaType - The type of media.
92
  * @param {number} authority - The {@link VIDEO_MUTISM_AUTHORITY} which is
97
  * @param {number} authority - The {@link VIDEO_MUTISM_AUTHORITY} which is
93
  * muting/unmuting the local video.
98
  * muting/unmuting the local video.
94
  * @param {boolean} ensureTrack - True if we want to ensure that a new track is
99
  * @param {boolean} ensureTrack - True if we want to ensure that a new track is
97
  */
102
  */
98
 export function setVideoMuted(
103
 export function setVideoMuted(
99
         muted: boolean,
104
         muted: boolean,
105
+        mediaType: MEDIA_TYPE = MEDIA_TYPE.VIDEO,
100
         authority: number = VIDEO_MUTISM_AUTHORITY.USER,
106
         authority: number = VIDEO_MUTISM_AUTHORITY.USER,
101
         ensureTrack: boolean = false) {
107
         ensureTrack: boolean = false) {
102
     return (dispatch: Dispatch<any>, getState: Function) => {
108
     return (dispatch: Dispatch<any>, getState: Function) => {
107
 
113
 
108
         return dispatch({
114
         return dispatch({
109
             type: SET_VIDEO_MUTED,
115
             type: SET_VIDEO_MUTED,
116
+            authority,
117
+            mediaType,
110
             ensureTrack,
118
             ensureTrack,
111
             muted: newValue
119
             muted: newValue
112
         });
120
         });

+ 1
- 0
react/features/base/media/constants.js Ver arquivo

15
  */
15
  */
16
 export const MEDIA_TYPE = {
16
 export const MEDIA_TYPE = {
17
     AUDIO: 'audio',
17
     AUDIO: 'audio',
18
+    PRESENTER: 'presenter',
18
     VIDEO: 'video'
19
     VIDEO: 'video'
19
 };
20
 };
20
 
21
 

+ 18
- 5
react/features/base/media/middleware.js Ver arquivo

17
 import { setTrackMuted, TRACK_ADDED } from '../tracks';
17
 import { setTrackMuted, TRACK_ADDED } from '../tracks';
18
 
18
 
19
 import { setAudioMuted, setCameraFacingMode, setVideoMuted } from './actions';
19
 import { setAudioMuted, setCameraFacingMode, setVideoMuted } from './actions';
20
-import { CAMERA_FACING_MODE, VIDEO_MUTISM_AUTHORITY } from './constants';
20
+import {
21
+    CAMERA_FACING_MODE,
22
+    MEDIA_TYPE,
23
+    VIDEO_MUTISM_AUTHORITY
24
+} from './constants';
21
 import logger from './logger';
25
 import logger from './logger';
22
 import {
26
 import {
23
     _AUDIO_INITIAL_MEDIA_STATE,
27
     _AUDIO_INITIAL_MEDIA_STATE,
45
         const result = next(action);
49
         const result = next(action);
46
         const { track } = action;
50
         const { track } = action;
47
 
51
 
48
-        track.local && _syncTrackMutedState(store, track);
52
+        // Don't sync track mute state with the redux store for screenshare
53
+        // since video mute state represents local camera mute state only.
54
+        track.local && track.videoType !== 'desktop'
55
+            && _syncTrackMutedState(store, track);
49
 
56
 
50
         return result;
57
         return result;
51
     }
58
     }
72
 
79
 
73
     sendAnalytics(createTrackMutedEvent('video', 'background mode', mute));
80
     sendAnalytics(createTrackMutedEvent('video', 'background mode', mute));
74
 
81
 
75
-    dispatch(setVideoMuted(mute, VIDEO_MUTISM_AUTHORITY.BACKGROUND));
82
+    dispatch(setVideoMuted(mute, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.BACKGROUND));
76
 
83
 
77
     return next(action);
84
     return next(action);
78
 }
85
 }
94
 
101
 
95
     sendAnalytics(createTrackMutedEvent('video', 'audio-only mode', audioOnly));
102
     sendAnalytics(createTrackMutedEvent('video', 'audio-only mode', audioOnly));
96
 
103
 
97
-    dispatch(setVideoMuted(audioOnly, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
104
+    // Make sure we mute both the desktop and video tracks.
105
+    dispatch(setVideoMuted(
106
+        audioOnly, MEDIA_TYPE.VIDEO, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
107
+    dispatch(setVideoMuted(
108
+        audioOnly, MEDIA_TYPE.PRESENTER, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY, ensureVideoTrack));
98
 
109
 
99
     return next(action);
110
     return next(action);
100
 }
111
 }
231
  */
242
  */
232
 function _syncTrackMutedState({ getState }, track) {
243
 function _syncTrackMutedState({ getState }, track) {
233
     const state = getState()['features/base/media'];
244
     const state = getState()['features/base/media'];
234
-    const muted = Boolean(state[track.mediaType].muted);
245
+    const mediaType = track.mediaType === MEDIA_TYPE.PRESENTER
246
+        ? MEDIA_TYPE.VIDEO : track.mediaType;
247
+    const muted = Boolean(state[mediaType].muted);
235
 
248
 
236
     // XXX If muted state of track when it was added is different from our media
249
     // XXX If muted state of track when it was added is different from our media
237
     // muted state, we need to mute track and explicitly modify 'muted' property
250
     // muted state, we need to mute track and explicitly modify 'muted' property

+ 81
- 1
react/features/base/tracks/functions.js Ver arquivo

10
 
10
 
11
 import logger from './logger';
11
 import logger from './logger';
12
 
12
 
13
+/**
14
+ * Creates a local video track for presenter. The constraints are computed based
15
+ * on the height of the desktop that is being shared.
16
+ *
17
+ * @param {Object} options - The options with which the local presenter track
18
+ * is to be created.
19
+ * @param {string|null} [options.cameraDeviceId] - Camera device id or
20
+ * {@code undefined} to use app's settings.
21
+ * @param {number} desktopHeight - The height of the desktop that is being
22
+ * shared.
23
+ * @returns {Promise<JitsiLocalTrack>}
24
+ */
25
+export async function createLocalPresenterTrack(options, desktopHeight) {
26
+    const { cameraDeviceId } = options;
27
+
28
+    // compute the constraints of the camera track based on the resolution
29
+    // of the desktop screen that is being shared.
30
+    const cameraHeights = [ 180, 270, 360, 540, 720 ];
31
+    const proportion = 4;
32
+    const result = cameraHeights.find(
33
+            height => (desktopHeight / proportion) < height);
34
+    const constraints = {
35
+        video: {
36
+            aspectRatio: 4 / 3,
37
+            height: {
38
+                exact: result
39
+            }
40
+        }
41
+    };
42
+    const [ videoTrack ] = await JitsiMeetJS.createLocalTracks(
43
+        {
44
+            cameraDeviceId,
45
+            constraints,
46
+            devices: [ 'video' ]
47
+        });
48
+
49
+    videoTrack.type = MEDIA_TYPE.PRESENTER;
50
+
51
+    return videoTrack;
52
+}
53
+
13
 /**
54
 /**
14
  * Create local tracks of specific types.
55
  * Create local tracks of specific types.
15
  *
56
  *
53
 
94
 
54
     const state = store.getState();
95
     const state = store.getState();
55
     const {
96
     const {
56
-        constraints,
57
         desktopSharingFrameRate,
97
         desktopSharingFrameRate,
58
         firefox_fake_device, // eslint-disable-line camelcase
98
         firefox_fake_device, // eslint-disable-line camelcase
59
         resolution
99
         resolution
60
     } = state['features/base/config'];
100
     } = state['features/base/config'];
101
+    const constraints = options.constraints
102
+        ?? state['features/base/config'].constraints;
103
+
104
+    // Do not load blur effect if option for ignoring effects is present.
105
+    // This is needed when we are creating a video track for presenter mode.
61
     const loadEffectsPromise = state['features/blur'].blurEnabled
106
     const loadEffectsPromise = state['features/blur'].blurEnabled
62
         ? getBlurEffect()
107
         ? getBlurEffect()
63
             .then(blurEffect => [ blurEffect ])
108
             .then(blurEffect => [ blurEffect ])
157
     return getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
202
     return getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
158
 }
203
 }
159
 
204
 
205
+/**
206
+ * Returns the media type of the local video, presenter or video.
207
+ *
208
+ * @param {Track[]} tracks - List of all tracks.
209
+ * @returns {MEDIA_TYPE}
210
+ */
211
+export function getLocalVideoType(tracks) {
212
+    const presenterTrack = getLocalTrack(tracks, MEDIA_TYPE.PRESENTER);
213
+
214
+    return presenterTrack ? MEDIA_TYPE.PRESENTER : MEDIA_TYPE.VIDEO;
215
+}
216
+
160
 /**
217
 /**
161
  * Returns track of specified media type for specified participant id.
218
  * Returns track of specified media type for specified participant id.
162
  *
219
  *
197
     return tracks.filter(t => t.mediaType === mediaType);
254
     return tracks.filter(t => t.mediaType === mediaType);
198
 }
255
 }
199
 
256
 
257
+/**
258
+ * Checks if the local video track in the given set of tracks is muted.
259
+ *
260
+ * @param {Track[]} tracks - List of all tracks.
261
+ * @returns {Track[]}
262
+ */
263
+export function isLocalVideoTrackMuted(tracks) {
264
+    const presenterTrack = getLocalTrack(tracks, MEDIA_TYPE.PRESENTER);
265
+    const videoTrack = getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
266
+
267
+    // Make sure we check the mute status of only camera tracks, i.e.,
268
+    // presenter track when it exists, camera track when the presenter
269
+    // track doesn't exist.
270
+    if (presenterTrack) {
271
+        return isLocalTrackMuted(tracks, MEDIA_TYPE.PRESENTER);
272
+    } else if (videoTrack) {
273
+        return videoTrack.videoType === 'camera'
274
+            ? isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO) : true;
275
+    }
276
+
277
+    return true;
278
+}
279
+
200
 /**
280
 /**
201
  * Checks if the first local track in the given tracks set is muted.
281
  * Checks if the first local track in the given tracks set is muted.
202
  *
282
  *

+ 9
- 4
react/features/base/tracks/middleware.js Ver arquivo

6
     SET_AUDIO_MUTED,
6
     SET_AUDIO_MUTED,
7
     SET_CAMERA_FACING_MODE,
7
     SET_CAMERA_FACING_MODE,
8
     SET_VIDEO_MUTED,
8
     SET_VIDEO_MUTED,
9
+    VIDEO_MUTISM_AUTHORITY,
9
     TOGGLE_CAMERA_FACING_MODE,
10
     TOGGLE_CAMERA_FACING_MODE,
10
     toggleCameraFacingMode
11
     toggleCameraFacingMode
11
 } from '../media';
12
 } from '../media';
89
             return;
90
             return;
90
         }
91
         }
91
 
92
 
92
-        _setMuted(store, action, MEDIA_TYPE.VIDEO);
93
+        _setMuted(store, action, action.mediaType);
93
         break;
94
         break;
94
 
95
 
95
     case TOGGLE_CAMERA_FACING_MODE: {
96
     case TOGGLE_CAMERA_FACING_MODE: {
131
             const { jitsiTrack } = action.track;
132
             const { jitsiTrack } = action.track;
132
             const muted = jitsiTrack.isMuted();
133
             const muted = jitsiTrack.isMuted();
133
             const participantID = jitsiTrack.getParticipantId();
134
             const participantID = jitsiTrack.getParticipantId();
134
-            const isVideoTrack = jitsiTrack.isVideoTrack();
135
+            const isVideoTrack = jitsiTrack.type !== MEDIA_TYPE.AUDIO;
135
 
136
 
136
             if (isVideoTrack) {
137
             if (isVideoTrack) {
137
                 if (jitsiTrack.isLocal()) {
138
                 if (jitsiTrack.isLocal()) {
255
  * @private
256
  * @private
256
  * @returns {void}
257
  * @returns {void}
257
  */
258
  */
258
-function _setMuted(store, { ensureTrack, muted }, mediaType: MEDIA_TYPE) {
259
+function _setMuted(store, { ensureTrack, authority, muted }, mediaType: MEDIA_TYPE) {
259
     const localTrack
260
     const localTrack
260
         = _getLocalTrack(store, mediaType, /* includePending */ true);
261
         = _getLocalTrack(store, mediaType, /* includePending */ true);
261
 
262
 
265
         // `jitsiTrack`, then the `muted` state will be applied once the
266
         // `jitsiTrack`, then the `muted` state will be applied once the
266
         // `jitsiTrack` is created.
267
         // `jitsiTrack` is created.
267
         const { jitsiTrack } = localTrack;
268
         const { jitsiTrack } = localTrack;
269
+        const isAudioOnly = authority === VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY;
268
 
270
 
269
-        jitsiTrack && setTrackMuted(jitsiTrack, muted);
271
+        // screenshare cannot be muted or unmuted using the video mute button
272
+        // anymore, unless it is muted by audioOnly.
273
+        jitsiTrack && (jitsiTrack.videoType !== 'desktop' || isAudioOnly)
274
+            && setTrackMuted(jitsiTrack, muted);
270
     } else if (!muted && ensureTrack && typeof APP === 'undefined') {
275
     } else if (!muted && ensureTrack && typeof APP === 'undefined') {
271
         // FIXME: This only runs on mobile now because web has its own way of
276
         // FIXME: This only runs on mobile now because web has its own way of
272
         // creating local tracks. Adjust the check once they are unified.
277
         // creating local tracks. Adjust the check once they are unified.

+ 161
- 0
react/features/stream-effects/presenter/JitsiStreamPresenterEffect.js Ver arquivo

1
+// @flow
2
+
3
+import {
4
+    CLEAR_INTERVAL,
5
+    INTERVAL_TIMEOUT,
6
+    SET_INTERVAL,
7
+    timerWorkerScript
8
+} from './TimeWorker';
9
+
10
+/**
11
+ * Represents a modified MediaStream that adds video as pip on a desktop stream.
12
+ * <tt>JitsiStreamPresenterEffect</tt> does the processing of the original
13
+ * desktop stream.
14
+ */
15
+export default class JitsiStreamPresenterEffect {
16
+    _canvas: HTMLCanvasElement;
17
+    _ctx: CanvasRenderingContext2D;
18
+    _desktopElement: HTMLVideoElement;
19
+    _desktopStream: MediaStream;
20
+    _frameRate: number;
21
+    _onVideoFrameTimer: Function;
22
+    _onVideoFrameTimerWorker: Function;
23
+    _renderVideo: Function;
24
+    _videoFrameTimerWorker: Worker;
25
+    _videoElement: HTMLVideoElement;
26
+    isEnabled: Function;
27
+    startEffect: Function;
28
+    stopEffect: Function;
29
+
30
+    /**
31
+     * Represents a modified MediaStream that adds a camera track at the
32
+     * bottom right corner of the desktop track using a HTML canvas.
33
+     * <tt>JitsiStreamPresenterEffect</tt> does the processing of the original
34
+     * video stream.
35
+     *
36
+     * @param {MediaStream} videoStream - The video stream which is user for
37
+     * creating the canvas.
38
+     */
39
+    constructor(videoStream: MediaStream) {
40
+        const videoDiv = document.createElement('div');
41
+        const firstVideoTrack = videoStream.getVideoTracks()[0];
42
+        const { height, width, frameRate } = firstVideoTrack.getSettings() ?? firstVideoTrack.getConstraints();
43
+
44
+        this._canvas = document.createElement('canvas');
45
+        this._ctx = this._canvas.getContext('2d');
46
+
47
+        if (document.body !== null) {
48
+            document.body.appendChild(this._canvas);
49
+        }
50
+        this._desktopElement = document.createElement('video');
51
+        this._videoElement = document.createElement('video');
52
+        videoDiv.appendChild(this._videoElement);
53
+        videoDiv.appendChild(this._desktopElement);
54
+        if (document.body !== null) {
55
+            document.body.appendChild(videoDiv);
56
+        }
57
+
58
+        // Set the video element properties
59
+        this._frameRate = parseInt(frameRate, 10);
60
+        this._videoElement.width = parseInt(width, 10);
61
+        this._videoElement.height = parseInt(height, 10);
62
+        this._videoElement.autoplay = true;
63
+        this._videoElement.srcObject = videoStream;
64
+
65
+        // set the style attribute of the div to make it invisible
66
+        videoDiv.style.display = 'none';
67
+
68
+        // Bind event handler so it is only bound once for every instance.
69
+        this._onVideoFrameTimer = this._onVideoFrameTimer.bind(this);
70
+        this._videoFrameTimerWorker = new Worker(timerWorkerScript);
71
+        this._videoFrameTimerWorker.onmessage = this._onVideoFrameTimer;
72
+    }
73
+
74
+    /**
75
+     * EventHandler onmessage for the videoFrameTimerWorker WebWorker.
76
+     *
77
+     * @private
78
+     * @param {EventHandler} response - The onmessage EventHandler parameter.
79
+     * @returns {void}
80
+     */
81
+    _onVideoFrameTimer(response) {
82
+        if (response.data.id === INTERVAL_TIMEOUT) {
83
+            this._renderVideo();
84
+        }
85
+    }
86
+
87
+    /**
88
+     * Loop function to render the video frame input and draw presenter effect.
89
+     *
90
+     * @private
91
+     * @returns {void}
92
+     */
93
+    _renderVideo() {
94
+        // adjust the canvas width/height on every frame incase the window has been resized.
95
+        const [ track ] = this._desktopStream.getVideoTracks();
96
+        const { height, width } = track.getSettings() ?? track.getConstraints();
97
+
98
+        this._canvas.width = parseInt(width, 10);
99
+        this._canvas.height = parseInt(height, 10);
100
+        this._ctx.drawImage(this._desktopElement, 0, 0, this._canvas.width, this._canvas.height);
101
+        this._ctx.drawImage(this._videoElement, this._canvas.width - this._videoElement.width, this._canvas.height
102
+            - this._videoElement.height, this._videoElement.width, this._videoElement.height);
103
+
104
+        // draw a border around the video element.
105
+        this._ctx.beginPath();
106
+        this._ctx.lineWidth = 2;
107
+        this._ctx.strokeStyle = '#A9A9A9'; // dark grey
108
+        this._ctx.rect(this._canvas.width - this._videoElement.width, this._canvas.height - this._videoElement.height,
109
+            this._videoElement.width, this._videoElement.height);
110
+        this._ctx.stroke();
111
+    }
112
+
113
+    /**
114
+     * Checks if the local track supports this effect.
115
+     *
116
+     * @param {JitsiLocalTrack} jitsiLocalTrack - Track to apply effect.
117
+     * @returns {boolean} - Returns true if this effect can run on the
118
+     * specified track, false otherwise.
119
+     */
120
+    isEnabled(jitsiLocalTrack: Object) {
121
+        return jitsiLocalTrack.isVideoTrack() && jitsiLocalTrack.videoType === 'desktop';
122
+    }
123
+
124
+    /**
125
+     * Starts loop to capture video frame and render presenter effect.
126
+     *
127
+     * @param {MediaStream} desktopStream - Stream to be used for processing.
128
+     * @returns {MediaStream} - The stream with the applied effect.
129
+     */
130
+    startEffect(desktopStream: MediaStream) {
131
+        const firstVideoTrack = desktopStream.getVideoTracks()[0];
132
+        const { height, width } = firstVideoTrack.getSettings() ?? firstVideoTrack.getConstraints();
133
+
134
+        // set the desktop element properties.
135
+        this._desktopStream = desktopStream;
136
+        this._desktopElement.width = parseInt(width, 10);
137
+        this._desktopElement.height = parseInt(height, 10);
138
+        this._desktopElement.autoplay = true;
139
+        this._desktopElement.srcObject = desktopStream;
140
+        this._canvas.width = parseInt(width, 10);
141
+        this._canvas.height = parseInt(height, 10);
142
+        this._videoFrameTimerWorker.postMessage({
143
+            id: SET_INTERVAL,
144
+            timeMs: 1000 / this._frameRate
145
+        });
146
+
147
+        return this._canvas.captureStream(this._frameRate);
148
+    }
149
+
150
+    /**
151
+     * Stops the capture and render loop.
152
+     *
153
+     * @returns {void}
154
+     */
155
+    stopEffect() {
156
+        this._videoFrameTimerWorker.postMessage({
157
+            id: CLEAR_INTERVAL
158
+        });
159
+    }
160
+
161
+}

+ 62
- 0
react/features/stream-effects/presenter/TimeWorker.js Ver arquivo

1
+// @flow
2
+
3
+/**
4
+ * SET_INTERVAL constant is used to set interval and it is set in
5
+ * the id property of the request.data property. timeMs property must
6
+ * also be set. request.data example:
7
+ *
8
+ * {
9
+ *      id: SET_INTERVAL,
10
+ *      timeMs: 33
11
+ * }
12
+ */
13
+export const SET_INTERVAL = 1;
14
+
15
+/**
16
+ * CLEAR_INTERVAL constant is used to clear the interval and it is set in
17
+ * the id property of the request.data property.
18
+ *
19
+ * {
20
+ *      id: CLEAR_INTERVAL
21
+ * }
22
+ */
23
+export const CLEAR_INTERVAL = 2;
24
+
25
+/**
26
+ * INTERVAL_TIMEOUT constant is used as response and it is set in the id
27
+ * property.
28
+ *
29
+ * {
30
+ *      id: INTERVAL_TIMEOUT
31
+ * }
32
+ */
33
+export const INTERVAL_TIMEOUT = 3;
34
+
35
+/**
36
+ * The following code is needed as string to create a URL from a Blob.
37
+ * The URL is then passed to a WebWorker. Reason for this is to enable
38
+ * use of setInterval that is not throttled when tab is inactive.
39
+ */
40
+const code = `
41
+    var timer;
42
+
43
+    onmessage = function(request) {
44
+        switch (request.data.id) {
45
+        case ${SET_INTERVAL}: {
46
+            timer = setInterval(() => {
47
+                postMessage({ id: ${INTERVAL_TIMEOUT} });
48
+            }, request.data.timeMs);
49
+            break;
50
+        }
51
+        case ${CLEAR_INTERVAL}: {
52
+            if (timer) {
53
+                clearInterval(timer);
54
+            }
55
+            break;
56
+        }
57
+        }
58
+    };
59
+`;
60
+
61
+export const timerWorkerScript
62
+    = URL.createObjectURL(new Blob([ code ], { type: 'application/javascript' }));

+ 19
- 0
react/features/stream-effects/presenter/index.js Ver arquivo

1
+// @flow
2
+
3
+import JitsiStreamPresenterEffect from './JitsiStreamPresenterEffect';
4
+
5
+/**
6
+ * Creates a new instance of JitsiStreamPresenterEffect.
7
+ *
8
+ * @param {MediaStream} stream - The video stream which will be used for
9
+ * creating the presenter effect.
10
+ * @returns {Promise<JitsiStreamPresenterEffect>}
11
+ */
12
+export function createPresenterEffect(stream: MediaStream) {
13
+    if (!MediaStreamTrack.prototype.getSettings
14
+        && !MediaStreamTrack.prototype.getConstraints) {
15
+        return Promise.reject(new Error('JitsiStreamPresenterEffect not supported!'));
16
+    }
17
+
18
+    return Promise.resolve(new JitsiStreamPresenterEffect(stream));
19
+}

+ 10
- 3
react/features/toolbox/components/VideoMuteButton.js Ver arquivo

10
 import { setAudioOnly } from '../../base/audio-only';
10
 import { setAudioOnly } from '../../base/audio-only';
11
 import { translate } from '../../base/i18n';
11
 import { translate } from '../../base/i18n';
12
 import {
12
 import {
13
-    MEDIA_TYPE,
14
     VIDEO_MUTISM_AUTHORITY,
13
     VIDEO_MUTISM_AUTHORITY,
15
     setVideoMuted
14
     setVideoMuted
16
 } from '../../base/media';
15
 } from '../../base/media';
17
 import { connect } from '../../base/redux';
16
 import { connect } from '../../base/redux';
18
 import { AbstractVideoMuteButton } from '../../base/toolbox';
17
 import { AbstractVideoMuteButton } from '../../base/toolbox';
19
 import type { AbstractButtonProps } from '../../base/toolbox';
18
 import type { AbstractButtonProps } from '../../base/toolbox';
20
-import { isLocalTrackMuted } from '../../base/tracks';
19
+import { getLocalVideoType, isLocalVideoTrackMuted } from '../../base/tracks';
21
 import UIEvents from '../../../../service/UI/UIEvents';
20
 import UIEvents from '../../../../service/UI/UIEvents';
22
 
21
 
23
 declare var APP: Object;
22
 declare var APP: Object;
32
      */
31
      */
33
     _audioOnly: boolean,
32
     _audioOnly: boolean,
34
 
33
 
34
+    /**
35
+     * MEDIA_TYPE of the local video.
36
+     */
37
+    _videoMediaType: string,
38
+
35
     /**
39
     /**
36
      * Whether video is currently muted or not.
40
      * Whether video is currently muted or not.
37
      */
41
      */
136
             this.props.dispatch(
140
             this.props.dispatch(
137
                 setAudioOnly(false, /* ensureTrack */ true));
141
                 setAudioOnly(false, /* ensureTrack */ true));
138
         }
142
         }
143
+        const mediaType = this.props._videoMediaType;
139
 
144
 
140
         this.props.dispatch(
145
         this.props.dispatch(
141
             setVideoMuted(
146
             setVideoMuted(
142
                 videoMuted,
147
                 videoMuted,
148
+                mediaType,
143
                 VIDEO_MUTISM_AUTHORITY.USER,
149
                 VIDEO_MUTISM_AUTHORITY.USER,
144
                 /* ensureTrack */ true));
150
                 /* ensureTrack */ true));
145
 
151
 
167
 
173
 
168
     return {
174
     return {
169
         _audioOnly: Boolean(audioOnly),
175
         _audioOnly: Boolean(audioOnly),
170
-        _videoMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO)
176
+        _videoMediaType: getLocalVideoType(tracks),
177
+        _videoMuted: isLocalVideoTrackMuted(tracks)
171
     };
178
     };
172
 }
179
 }
173
 
180
 

Carregando…
Cancelar
Salvar