浏览代码

feat(conference, toolbox, API) get rid of {audio,video}Muted' flags

* ref: video muted state

Get rid of 'videoMuted' flag in conference.js

* ref: audio muted state

Get rid of 'audioMuted' flag in conference.js

* fix(conference.js|API): early audio/video muted updates

* ref(conference.js): rename isVideoMuted

Rename isVideoMuted to isLocalVideoMuted to be consistent with
isLocalAudioMuted.

* doc|style(conference.js): comments and space after if

* ref: move 'setTrackMuted' to functions

* fix(tracks/middleware): no-lonely-if

* ref(features/toolbox): get rid of last argument

* ref(defaultToolbarButtons): rename var
j8
Paweł Domas 7 年前
父节点
当前提交
99ce46cfa8

+ 122
- 128
conference.js 查看文件

@@ -38,8 +38,12 @@ import {
38 38
     isFatalJitsiConnectionError
39 39
 } from './react/features/base/lib-jitsi-meet';
40 40
 import {
41
+    isVideoMutedByUser,
42
+    MEDIA_TYPE,
41 43
     setAudioAvailable,
42
-    setVideoAvailable
44
+    setAudioMuted,
45
+    setVideoAvailable,
46
+    setVideoMuted
43 47
 } from './react/features/base/media';
44 48
 import {
45 49
     localParticipantConnectionStatusChanged,
@@ -54,6 +58,7 @@ import {
54 58
 } from './react/features/base/participants';
55 59
 import {
56 60
     createLocalTracks,
61
+    isLocalTrackMuted,
57 62
     replaceLocalTrack,
58 63
     trackAdded,
59 64
     trackRemoved
@@ -87,7 +92,6 @@ const eventEmitter = new EventEmitter();
87 92
 
88 93
 let room;
89 94
 let connection;
90
-let localAudio, localVideo;
91 95
 
92 96
 /*
93 97
  * Logic to open a desktop picker put on the window global for
@@ -134,7 +138,7 @@ function connect(roomName) {
134 138
  * @param {string} value new value
135 139
  */
136 140
 function sendData(command, value) {
137
-    if(!room) {
141
+    if (!room) {
138 142
         return;
139 143
     }
140 144
 
@@ -184,47 +188,18 @@ function getDisplayName(id) {
184 188
 /**
185 189
  * Mute or unmute local audio stream if it exists.
186 190
  * @param {boolean} muted - if audio stream should be muted or unmuted.
187
- *
188
- * @returns {Promise} resolved in case mute/unmute operations succeeds or
189
- * rejected with an error if something goes wrong. It is expected that often
190
- * the error will be of the {@link JitsiTrackError} type, but it's not
191
- * guaranteed.
192 191
  */
193 192
 function muteLocalAudio(muted) {
194
-    return muteLocalMedia(localAudio, muted);
195
-}
196
-
197
-/**
198
- * Mute or unmute local media stream if it exists.
199
- * @param {JitsiLocalTrack} localTrack
200
- * @param {boolean} muted
201
- *
202
- * @returns {Promise} resolved in case mute/unmute operations succeeds or
203
- * rejected with an error if something goes wrong. It is expected that often
204
- * the error will be of the {@link JitsiTrackError} type, but it's not
205
- * guaranteed.
206
- */
207
-function muteLocalMedia(localTrack, muted) {
208
-    if (!localTrack) {
209
-        return Promise.resolve();
210
-    }
211
-
212
-    const method = muted ? 'mute' : 'unmute';
213
-
214
-    return localTrack[method]();
193
+    APP.store.dispatch(setAudioMuted(muted));
215 194
 }
216 195
 
217 196
 /**
218 197
  * Mute or unmute local video stream if it exists.
219 198
  * @param {boolean} muted if video stream should be muted or unmuted.
220 199
  *
221
- * @returns {Promise} resolved in case mute/unmute operations succeeds or
222
- * rejected with an error if something goes wrong. It is expected that often
223
- * the error will be of the {@link JitsiTrackError} type, but it's not
224
- * guaranteed.
225 200
  */
226 201
 function muteLocalVideo(muted) {
227
-    return muteLocalMedia(localVideo, muted);
202
+    APP.store.dispatch(setVideoMuted(muted));
228 203
 }
229 204
 
230 205
 /**
@@ -458,8 +433,6 @@ export default {
458 433
      */
459 434
     _localTracksInitialized: false,
460 435
     isModerator: false,
461
-    audioMuted: false,
462
-    videoMuted: false,
463 436
     isSharingScreen: false,
464 437
     /**
465 438
      * Indicates if the desktop sharing functionality has been enabled.
@@ -491,6 +464,21 @@ export default {
491 464
      */
492 465
     isDominantSpeaker: false,
493 466
 
467
+    /**
468
+     * The local audio track (if any).
469
+     * FIXME tracks from redux store should be the single source of truth
470
+     * @type {JitsiLocalTrack|null}
471
+     */
472
+    localAudio: null,
473
+
474
+    /**
475
+     * The local video track (if any).
476
+     * FIXME tracks from redux store should be the single source of truth, but
477
+     * more refactoring is required around screen sharing ('localVideo' usages).
478
+     * @type {JitsiLocalTrack|null}
479
+     */
480
+    localVideo: null,
481
+
494 482
     /**
495 483
      * Creates local media tracks and connects to a room. Will show error
496 484
      * dialogs in case accessing the local microphone and/or camera failed. Will
@@ -655,13 +643,13 @@ export default {
655 643
     init(options) {
656 644
         this.roomName = options.roomName;
657 645
         // attaches global error handler, if there is already one, respect it
658
-        if(JitsiMeetJS.getGlobalOnErrorHandler){
646
+        if (JitsiMeetJS.getGlobalOnErrorHandler){
659 647
             var oldOnErrorHandler = window.onerror;
660 648
             window.onerror = function (message, source, lineno, colno, error) {
661 649
                 JitsiMeetJS.getGlobalOnErrorHandler(
662 650
                     message, source, lineno, colno, error);
663 651
 
664
-                if(oldOnErrorHandler)
652
+                if (oldOnErrorHandler)
665 653
                     oldOnErrorHandler(message, source, lineno, colno, error);
666 654
             };
667 655
 
@@ -671,7 +659,7 @@ export default {
671 659
             JitsiMeetJS.getGlobalOnErrorHandler(
672 660
                     null, null, null, null, event.reason);
673 661
 
674
-                if(oldOnUnhandledRejection)
662
+                if (oldOnUnhandledRejection)
675 663
                     oldOnUnhandledRejection(event);
676 664
             };
677 665
         }
@@ -690,9 +678,10 @@ export default {
690 678
                     });
691 679
             }).then(([tracks, con]) => {
692 680
                 tracks.forEach(track => {
693
-                    if (track.isAudioTrack() && this.audioMuted) {
681
+                    if (track.isAudioTrack() && this.isLocalAudioMuted()) {
694 682
                         track.mute();
695
-                    } else if (track.isVideoTrack() && this.videoMuted) {
683
+                    } else if (track.isVideoTrack()
684
+                                    && this.isLocalVideoMuted()) {
696 685
                         track.mute();
697 686
                     }
698 687
                 });
@@ -731,12 +720,10 @@ export default {
731 720
                 // to the conference
732 721
                 if (!tracks.find((t) => t.isAudioTrack())) {
733 722
                     this.setAudioMuteStatus(true);
734
-                    APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
735 723
                 }
736 724
 
737 725
                 if (!tracks.find((t) => t.isVideoTrack())) {
738 726
                     this.setVideoMuteStatus(true);
739
-                    APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
740 727
                 }
741 728
 
742 729
                 this._initDeviceList();
@@ -759,6 +746,20 @@ export default {
759 746
     isLocalId(id) {
760 747
         return this.getMyUserId() === id;
761 748
     },
749
+
750
+    /**
751
+     * Tells whether the local video is muted or not.
752
+     * @return {boolean}
753
+     */
754
+    isLocalVideoMuted() {
755
+        // If the tracks are not ready, read from base/media state
756
+        return this._localTracksInitialized
757
+            ? isLocalTrackMuted(
758
+                APP.store.getState()['features/base/tracks'],
759
+                MEDIA_TYPE.VIDEO)
760
+            : isVideoMutedByUser(APP.store);
761
+    },
762
+
762 763
     /**
763 764
      * Simulates toolbar button click for audio mute. Used by shortcuts and API.
764 765
      * @param {boolean} mute true for mute and false for unmute.
@@ -768,38 +769,31 @@ export default {
768 769
     muteAudio(mute, showUI = true) {
769 770
         // Not ready to modify track's state yet
770 771
         if (!this._localTracksInitialized) {
772
+            // This will only modify base/media.audio.muted which is then synced
773
+            // up with the track at the end of local tracks initialization.
774
+            muteLocalAudio(mute);
771 775
             this.setAudioMuteStatus(mute);
776
+
772 777
             return;
773
-        } else if (localAudio && localAudio.isMuted() === mute) {
778
+        } else if (this.isLocalAudioMuted() === mute) {
774 779
             // NO-OP
775 780
             return;
776 781
         }
777 782
 
778
-        const maybeShowErrorDialog = (error) => {
779
-            if (showUI) {
780
-                APP.UI.showMicErrorNotification(error);
781
-            }
782
-        };
783
-
784
-        if (!localAudio && this.audioMuted && !mute) {
783
+        if (!this.localAudio && !mute) {
785 784
             createLocalTracks({ devices: ['audio'] }, false)
786 785
                 .then(([audioTrack]) => audioTrack)
787 786
                 .catch(error => {
788
-                    maybeShowErrorDialog(error);
787
+                    if (showUI) {
788
+                        APP.UI.showMicErrorNotification(error);
789
+                    }
789 790
 
790 791
                     // Rollback the audio muted status by using null track
791 792
                     return null;
792 793
                 })
793 794
                 .then(audioTrack => this.useAudioStream(audioTrack));
794 795
         } else {
795
-            const oldMutedStatus = this.audioMuted;
796
-
797
-            muteLocalAudio(mute)
798
-                .catch(error => {
799
-                    maybeShowErrorDialog(error);
800
-                    this.setAudioMuteStatus(oldMutedStatus);
801
-                    APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
802
-                });
796
+            muteLocalAudio(mute);
803 797
         }
804 798
     },
805 799
     /**
@@ -807,7 +801,13 @@ export default {
807 801
      * @returns {boolean}
808 802
      */
809 803
     isLocalAudioMuted() {
810
-        return this.audioMuted;
804
+        // If the tracks are not ready, read from base/media state
805
+        return this._localTracksInitialized
806
+            ? isLocalTrackMuted(
807
+                APP.store.getState()['features/base/tracks'],
808
+                MEDIA_TYPE.AUDIO)
809
+            : Boolean(
810
+                APP.store.getState()['features/base/media'].audio.muted);
811 811
     },
812 812
     /**
813 813
      * Simulates toolbar button click for audio mute. Used by shortcuts
@@ -816,7 +816,7 @@ export default {
816 816
      * dialogs in case of media permissions error.
817 817
      */
818 818
     toggleAudioMuted(showUI = true) {
819
-        this.muteAudio(!this.audioMuted, showUI);
819
+        this.muteAudio(!this.isLocalAudioMuted(), showUI);
820 820
     },
821 821
     /**
822 822
      * Simulates toolbar button click for video mute. Used by shortcuts and API.
@@ -825,12 +825,15 @@ export default {
825 825
      * dialogs in case of media permissions error.
826 826
      */
827 827
     muteVideo(mute, showUI = true) {
828
-        // Not ready to modify track's state yet
828
+        // If not ready to modify track's state yet adjust the base/media
829 829
         if (!this._localTracksInitialized) {
830
+            // This will only modify base/media.video.muted which is then synced
831
+            // up with the track at the end of local tracks initialization.
832
+            muteLocalVideo(mute);
830 833
             this.setVideoMuteStatus(mute);
831 834
 
832 835
             return;
833
-        } else if (localVideo && localVideo.isMuted() === mute) {
836
+        } else if (this.isLocalVideoMuted() === mute) {
834 837
             // NO-OP
835 838
             return;
836 839
         }
@@ -841,7 +844,10 @@ export default {
841 844
             }
842 845
         };
843 846
 
844
-        if (!localVideo && this.videoMuted && !mute) {
847
+        // FIXME it is possible to queue this task twice, but it's not causing
848
+        // any issues. Specifically this can happen when the previous
849
+        // get user media call is blocked on "ask user for permissions" dialog.
850
+        if (!this.localVideo && !mute) {
845 851
             // Try to create local video if there wasn't any.
846 852
             // This handles the case when user joined with no video
847 853
             // (dismissed screen sharing screen or in audio only mode), but
@@ -861,14 +867,8 @@ export default {
861 867
                 })
862 868
                 .then(videoTrack => this.useVideoStream(videoTrack));
863 869
         } else {
864
-            const oldMutedStatus = this.videoMuted;
865
-
866
-            muteLocalVideo(mute)
867
-                .catch(error => {
868
-                    maybeShowErrorDialog(error);
869
-                    this.setVideoMuteStatus(oldMutedStatus);
870
-                    APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
871
-                });
870
+            // FIXME show error dialog if it fails (should be handled by react)
871
+            muteLocalVideo(mute);
872 872
         }
873 873
     },
874 874
     /**
@@ -877,7 +877,7 @@ export default {
877 877
      * dialogs in case of media permissions error.
878 878
      */
879 879
     toggleVideoMuted(showUI = true) {
880
-        this.muteVideo(!this.videoMuted, showUI);
880
+        this.muteVideo(!this.isLocalVideoMuted(), showUI);
881 881
     },
882 882
     /**
883 883
      * Retrieve list of conference participants (without local user).
@@ -1202,7 +1202,7 @@ export default {
1202 1202
 
1203 1203
     _getConferenceOptions() {
1204 1204
         let options = config;
1205
-        if(config.enableRecording && !config.recordingType) {
1205
+        if (config.enableRecording && !config.recordingType) {
1206 1206
             options.recordingType = (config.hosts &&
1207 1207
                 (typeof config.hosts.jirecon != "undefined"))?
1208 1208
                 "jirecon" : "colibri";
@@ -1219,20 +1219,18 @@ export default {
1219 1219
      */
1220 1220
     useVideoStream(newStream) {
1221 1221
         return APP.store.dispatch(
1222
-            replaceLocalTrack(localVideo, newStream, room))
1222
+            replaceLocalTrack(this.localVideo, newStream, room))
1223 1223
             .then(() => {
1224
-                localVideo = newStream;
1224
+                this.localVideo = newStream;
1225
+
1225 1226
                 if (newStream) {
1226
-                    this.setVideoMuteStatus(newStream.isMuted());
1227 1227
                     this.isSharingScreen = newStream.videoType === 'desktop';
1228 1228
 
1229 1229
                     APP.UI.addLocalStream(newStream);
1230 1230
                 } else {
1231
-                    // No video is treated the same way as being video muted
1232
-                    this.setVideoMuteStatus(true);
1233 1231
                     this.isSharingScreen = false;
1234 1232
                 }
1235
-                APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
1233
+                this.setVideoMuteStatus(this.isLocalVideoMuted());
1236 1234
                 APP.UI.updateDesktopSharingButtons();
1237 1235
             });
1238 1236
     },
@@ -1245,18 +1243,13 @@ export default {
1245 1243
      */
1246 1244
     useAudioStream(newStream) {
1247 1245
         return APP.store.dispatch(
1248
-            replaceLocalTrack(localAudio, newStream, room))
1246
+            replaceLocalTrack(this.localAudio, newStream, room))
1249 1247
             .then(() => {
1250
-                localAudio = newStream;
1251
-
1248
+                this.localAudio = newStream;
1252 1249
                 if (newStream) {
1253
-                    this.setAudioMuteStatus(newStream.isMuted());
1254 1250
                     APP.UI.addLocalStream(newStream);
1255
-                } else {
1256
-                    // No audio is treated the same way as being audio muted
1257
-                    this.setAudioMuteStatus(true);
1258 1251
                 }
1259
-                APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
1252
+                this.setAudioMuteStatus(this.isLocalAudioMuted());
1260 1253
             });
1261 1254
     },
1262 1255
 
@@ -1339,10 +1332,10 @@ export default {
1339 1332
                     JitsiMeetJS.analytics.sendEvent(
1340 1333
                         'conference.sharingDesktop.stop');
1341 1334
                     logger.log('switched back to local video');
1342
-                    if (!localVideo && wasVideoMuted) {
1335
+                    if (!this.localVideo && wasVideoMuted) {
1343 1336
                         return Promise.reject('No local video to be muted!');
1344
-                    } else if (wasVideoMuted && localVideo) {
1345
-                        return localVideo.mute();
1337
+                    } else if (wasVideoMuted && this.localVideo) {
1338
+                        return this.localVideo.mute();
1346 1339
                     }
1347 1340
                 })
1348 1341
                 .catch(error => {
@@ -1416,8 +1409,8 @@ export default {
1416 1409
     _createDesktopTrack(options = {}) {
1417 1410
         let externalInstallation = false;
1418 1411
         let DSExternalInstallationInProgress = false;
1419
-        const didHaveVideo = Boolean(localVideo);
1420
-        const wasVideoMuted = this.videoMuted;
1412
+        const didHaveVideo = Boolean(this.localVideo);
1413
+        const wasVideoMuted = this.isLocalVideoMuted();
1421 1414
 
1422 1415
         return createLocalTracks({
1423 1416
             desktopSharingSources: options.desktopSharingSources,
@@ -1671,28 +1664,28 @@ export default {
1671 1664
         });
1672 1665
 
1673 1666
         room.on(ConferenceEvents.TRACK_ADDED, (track) => {
1674
-            if(!track || track.isLocal())
1667
+            if (!track || track.isLocal())
1675 1668
                 return;
1676 1669
 
1677 1670
             APP.store.dispatch(trackAdded(track));
1678 1671
         });
1679 1672
 
1680 1673
         room.on(ConferenceEvents.TRACK_REMOVED, (track) => {
1681
-            if(!track || track.isLocal())
1674
+            if (!track || track.isLocal())
1682 1675
                 return;
1683 1676
 
1684 1677
             APP.store.dispatch(trackRemoved(track));
1685 1678
         });
1686 1679
 
1687 1680
         room.on(ConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => {
1688
-            if(this.isLocalId(id) && localAudio && localAudio.isMuted()) {
1681
+            if (this.isLocalId(id)
1682
+                && this.localAudio && this.localAudio.isMuted()) {
1689 1683
                 lvl = 0;
1690 1684
             }
1691 1685
 
1692
-            if(config.debug)
1693
-            {
1686
+            if (config.debug) {
1694 1687
                 this.audioLevelsMap[id] = lvl;
1695
-                if(config.debugAudioLevels)
1688
+                if (config.debugAudioLevels)
1696 1689
                     logger.log("AudioLevel:" + id + "/" + lvl);
1697 1690
             }
1698 1691
 
@@ -1866,12 +1859,14 @@ export default {
1866 1859
                     this.deviceChangeListener);
1867 1860
 
1868 1861
             // stop local video
1869
-            if (localVideo) {
1870
-                localVideo.dispose();
1862
+            if (this.localVideo) {
1863
+                this.localVideo.dispose();
1864
+                this.localVideo = null;
1871 1865
             }
1872 1866
             // stop local audio
1873
-            if (localAudio) {
1874
-                localAudio.dispose();
1867
+            if (this.localAudio) {
1868
+                this.localAudio.dispose();
1869
+                this.localAudio = null;
1875 1870
             }
1876 1871
         });
1877 1872
 
@@ -2215,14 +2210,14 @@ export default {
2215 2210
                         // storage and settings menu. This is a workaround until
2216 2211
                         // getConstraints() method will be implemented
2217 2212
                         // in browsers.
2218
-                        if (localAudio) {
2213
+                        if (this.localAudio) {
2219 2214
                             APP.settings.setMicDeviceId(
2220
-                                localAudio.getDeviceId(), false);
2215
+                                this.localAudio.getDeviceId(), false);
2221 2216
                         }
2222 2217
 
2223
-                        if (localVideo) {
2218
+                        if (this.localVideo) {
2224 2219
                             APP.settings.setCameraDeviceId(
2225
-                                localVideo.getDeviceId(), false);
2220
+                                this.localVideo.getDeviceId(), false);
2226 2221
                         }
2227 2222
 
2228 2223
                         mediaDeviceHelper.setCurrentMediaDevices(devices);
@@ -2263,10 +2258,13 @@ export default {
2263 2258
 
2264 2259
         let newDevices =
2265 2260
             mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged(
2266
-                devices, this.isSharingScreen, localVideo, localAudio);
2261
+                devices,
2262
+                this.isSharingScreen,
2263
+                this.localVideo,
2264
+                this.localAudio);
2267 2265
         let promises = [];
2268
-        let audioWasMuted = this.audioMuted;
2269
-        let videoWasMuted = this.videoMuted;
2266
+        let audioWasMuted = this.isLocalAudioMuted();
2267
+        let videoWasMuted = this.isLocalVideoMuted();
2270 2268
         let availableAudioInputDevices =
2271 2269
             mediaDeviceHelper.getDevicesFromListByKind(devices, 'audioinput');
2272 2270
         let availableVideoInputDevices =
@@ -2323,11 +2321,11 @@ export default {
2323 2321
 
2324 2322
         // The audio functionality is considered available if there are any
2325 2323
         // audio devices detected or if the local audio stream already exists.
2326
-        const available = audioDeviceCount > 0 || Boolean(localAudio);
2324
+        const available = audioDeviceCount > 0 || Boolean(this.localAudio);
2327 2325
 
2328 2326
         logger.debug(
2329 2327
             'Microphone button enabled: ' + available,
2330
-            'local audio: ' + localAudio,
2328
+            'local audio: ' + this.localAudio,
2331 2329
             'audio devices: ' + audioMediaDevices,
2332 2330
             'device count: ' + audioDeviceCount);
2333 2331
 
@@ -2348,11 +2346,11 @@ export default {
2348 2346
         // active which could be either screensharing stream or a video track
2349 2347
         // created before the permissions were rejected (through browser
2350 2348
         // config).
2351
-        const available = videoDeviceCount > 0 || Boolean(localVideo);
2349
+        const available = videoDeviceCount > 0 || Boolean(this.localVideo);
2352 2350
 
2353 2351
         logger.debug(
2354 2352
             'Camera button enabled: ' + available,
2355
-            'local video: ' + localVideo,
2353
+            'local video: ' + this.localVideo,
2356 2354
             'video devices: ' + videoMediaDevices,
2357 2355
             'device count: ' + videoDeviceCount);
2358 2356
 
@@ -2393,10 +2391,10 @@ export default {
2393 2391
      * NOTE: Should be used after conference.init
2394 2392
      */
2395 2393
     logEvent(name, value, label) {
2396
-        if(JitsiMeetJS.analytics) {
2394
+        if (JitsiMeetJS.analytics) {
2397 2395
             JitsiMeetJS.analytics.sendEvent(name, {value, label});
2398 2396
         }
2399
-        if(room) {
2397
+        if (room) {
2400 2398
             room.sendApplicationLog(JSON.stringify({name, value, label}));
2401 2399
         }
2402 2400
     },
@@ -2553,7 +2551,7 @@ export default {
2553 2551
      * track or the source id is not available, undefined will be returned.
2554 2552
      */
2555 2553
     getDesktopSharingSourceId() {
2556
-        return localVideo.sourceId;
2554
+        return this.localVideo.sourceId;
2557 2555
     },
2558 2556
 
2559 2557
     /**
@@ -2565,7 +2563,7 @@ export default {
2565 2563
      * returned.
2566 2564
      */
2567 2565
     getDesktopSharingSourceType() {
2568
-        return localVideo.sourceType;
2566
+        return this.localVideo.sourceType;
2569 2567
     },
2570 2568
 
2571 2569
     /**
@@ -2574,10 +2572,8 @@ export default {
2574 2572
      * @param {boolean} muted - New muted status.
2575 2573
      */
2576 2574
     setVideoMuteStatus(muted) {
2577
-        if (this.videoMuted !== muted) {
2578
-            this.videoMuted = muted;
2579
-            APP.API.notifyVideoMutedStatusChanged(muted);
2580
-        }
2575
+        APP.UI.setVideoMuted(this.getMyUserId(), muted);
2576
+        APP.API.notifyVideoMutedStatusChanged(muted);
2581 2577
     },
2582 2578
 
2583 2579
     /**
@@ -2586,9 +2582,7 @@ export default {
2586 2582
      * @param {boolean} muted - New muted status.
2587 2583
      */
2588 2584
     setAudioMuteStatus(muted) {
2589
-        if (this.audioMuted !== muted) {
2590
-            this.audioMuted = muted;
2591
-            APP.API.notifyAudioMutedStatusChanged(muted);
2592
-        }
2593
-    },
2585
+        APP.UI.setAudioMuted(this.getMyUserId(), muted);
2586
+        APP.API.notifyAudioMutedStatusChanged(muted);
2587
+    }
2594 2588
 };

+ 0
- 3
modules/UI/UI.js 查看文件

@@ -24,7 +24,6 @@ import Settings from "./../settings/Settings";
24 24
 import { debounce } from "../util/helpers";
25 25
 
26 26
 import { updateDeviceList } from '../../react/features/base/devices';
27
-import { setAudioMuted, setVideoMuted } from '../../react/features/base/media';
28 27
 import {
29 28
     openDeviceSelectionDialog
30 29
 } from '../../react/features/device-selection';
@@ -669,7 +668,6 @@ UI.askForNickname = function () {
669 668
 UI.setAudioMuted = function (id, muted) {
670 669
     VideoLayout.onAudioMute(id, muted);
671 670
     if (APP.conference.isLocalId(id)) {
672
-        APP.store.dispatch(setAudioMuted(muted));
673 671
         APP.conference.updateAudioIconEnabled();
674 672
     }
675 673
 };
@@ -680,7 +678,6 @@ UI.setAudioMuted = function (id, muted) {
680 678
 UI.setVideoMuted = function (id, muted) {
681 679
     VideoLayout.onVideoMute(id, muted);
682 680
     if (APP.conference.isLocalId(id)) {
683
-        APP.store.dispatch(setVideoMuted(muted));
684 681
         APP.conference.updateVideoIconEnabled();
685 682
     }
686 683
 };

+ 14
- 0
react/features/base/media/functions.js 查看文件

@@ -1,3 +1,5 @@
1
+import { VIDEO_MUTISM_AUTHORITY } from './constants';
2
+
1 3
 /**
2 4
  * Determines whether a specific videoTrack should be rendered.
3 5
  *
@@ -14,3 +16,15 @@ export function shouldRenderVideoTrack(videoTrack, waitForVideoStarted) {
14 16
             && !videoTrack.muted
15 17
             && (!waitForVideoStarted || videoTrack.videoStarted));
16 18
 }
19
+
20
+/**
21
+ * Checks if video is currently muted by the user authority.
22
+ *
23
+ * @param {Object} store - The redux store instance.
24
+ * @returns {boolean}
25
+ */
26
+export function isVideoMutedByUser({ getState }) {
27
+    return Boolean(
28
+        getState()['features/base/media'] // eslint-disable-line no-bitwise
29
+            .video.muted & VIDEO_MUTISM_AUTHORITY.USER);
30
+}

+ 2
- 2
react/features/base/media/middleware.js 查看文件

@@ -93,7 +93,7 @@ function _setRoom({ dispatch, getState }, next, action) {
93 93
  * @private
94 94
  * @returns {void}
95 95
  */
96
-function _syncTrackMutedState({ dispatch, getState }, track) {
96
+function _syncTrackMutedState({ getState }, track) {
97 97
     const state = getState()['features/base/media'];
98 98
     const muted = Boolean(state[track.mediaType].muted);
99 99
 
@@ -104,6 +104,6 @@ function _syncTrackMutedState({ dispatch, getState }, track) {
104 104
     // fired before track gets to state.
105 105
     if (track.muted !== muted) {
106 106
         track.muted = muted;
107
-        dispatch(setTrackMuted(track.jitsiTrack, muted));
107
+        setTrackMuted(track.jitsiTrack, muted);
108 108
     }
109 109
 }

+ 0
- 47
react/features/base/tracks/actions.js 查看文件

@@ -348,53 +348,6 @@ function _getLocalTracksToChange(currentTracks, newTracks) {
348 348
     };
349 349
 }
350 350
 
351
-/**
352
- * Mutes or unmutes a specific <tt>JitsiLocalTrack</tt>. If the muted state of
353
- * the specified <tt>track</tt> is already in accord with the specified
354
- * <tt>muted</tt> value, then does nothing. In case the actual muting/unmuting
355
- * fails, a rollback action will be dispatched to undo the muting/unmuting.
356
- *
357
- * @param {JitsiLocalTrack} track - The <tt>JitsiLocalTrack</tt> to mute or
358
- * unmute.
359
- * @param {boolean} muted - If the specified <tt>track</tt> is to be muted, then
360
- * <tt>true</tt>; otherwise, <tt>false</tt>.
361
- * @returns {Function}
362
- */
363
-export function setTrackMuted(track, muted) {
364
-    return dispatch => {
365
-        muted = Boolean(muted); // eslint-disable-line no-param-reassign
366
-
367
-        if (track.isMuted() === muted) {
368
-            return Promise.resolve();
369
-        }
370
-
371
-        const f = muted ? 'mute' : 'unmute';
372
-
373
-        return track[f]().catch(error => {
374
-            console.error(`set track ${f} failed`, error);
375
-
376
-            if (navigator.product === 'ReactNative') {
377
-                // Synchronizing the state of base/tracks into the state of
378
-                // base/media is not required in React (and, respectively, React
379
-                // Native) because base/media expresses the app's and the user's
380
-                // desires/expectations/intents and base/tracks expresses
381
-                // practice/reality. Unfortunately, the old Web does not comply
382
-                // and/or does the opposite.
383
-                return;
384
-            }
385
-
386
-            const setMuted
387
-                = track.mediaType === MEDIA_TYPE.AUDIO
388
-                    ? setAudioMuted
389
-                    : setVideoMuted;
390
-
391
-            // FIXME The following disregards VIDEO_MUTISM_AUTHORITY (in the
392
-            // case of setVideoMuted, of course).
393
-            dispatch(setMuted(!muted));
394
-        });
395
-    };
396
-}
397
-
398 351
 /**
399 352
  * Returns true if the provided JitsiTrack should be rendered as a mirror.
400 353
  *

+ 42
- 0
react/features/base/tracks/functions.js 查看文件

@@ -155,3 +155,45 @@ export function getTrackByJitsiTrack(tracks, jitsiTrack) {
155 155
 export function getTracksByMediaType(tracks, mediaType) {
156 156
     return tracks.filter(t => t.mediaType === mediaType);
157 157
 }
158
+
159
+/**
160
+ * Checks if the first local track in the given tracks set is muted.
161
+ *
162
+ * @param {Track[]} tracks - List of all tracks.
163
+ * @param {MEDIA_TYPE} mediaType - The media type of tracks to be checked.
164
+ * @returns {boolean} True if local track is muted or false if the track is
165
+ * unmuted or if there are no local tracks of the given media type in the given
166
+ * set of tracks.
167
+ */
168
+export function isLocalTrackMuted(tracks, mediaType) {
169
+    const track = getLocalTrack(tracks, mediaType);
170
+
171
+    return !track || track.muted;
172
+}
173
+
174
+/**
175
+ * Mutes or unmutes a specific <tt>JitsiLocalTrack</tt>. If the muted state of
176
+ * the specified <tt>track</tt> is already in accord with the specified
177
+ * <tt>muted</tt> value, then does nothing.
178
+ *
179
+ * @param {JitsiLocalTrack} track - The <tt>JitsiLocalTrack</tt> to mute or
180
+ * unmute.
181
+ * @param {boolean} muted - If the specified <tt>track</tt> is to be muted, then
182
+ * <tt>true</tt>; otherwise, <tt>false</tt>.
183
+ * @returns {Promise}
184
+ */
185
+export function setTrackMuted(track, muted) {
186
+    muted = Boolean(muted); // eslint-disable-line no-param-reassign
187
+
188
+    if (track.isMuted() === muted) {
189
+        return Promise.resolve();
190
+    }
191
+
192
+    const f = muted ? 'mute' : 'unmute';
193
+
194
+    return track[f]().catch(error => {
195
+
196
+        // FIXME emit mute failed, so that the app can show error dialog
197
+        console.error(`set track ${f} failed`, error);
198
+    });
199
+}

+ 7
- 81
react/features/base/tracks/middleware.js 查看文件

@@ -6,16 +6,13 @@ import {
6 6
     SET_AUDIO_MUTED,
7 7
     SET_CAMERA_FACING_MODE,
8 8
     SET_VIDEO_MUTED,
9
-    setAudioMuted,
10
-    setVideoMuted,
11 9
     TOGGLE_CAMERA_FACING_MODE,
12 10
     toggleCameraFacingMode
13 11
 } from '../media';
14 12
 import { MiddlewareRegistry } from '../redux';
15 13
 
16
-import { setTrackMuted } from './actions';
17 14
 import { TRACK_ADDED, TRACK_REMOVED, TRACK_UPDATED } from './actionTypes';
18
-import { getLocalTrack } from './functions';
15
+import { getLocalTrack, setTrackMuted } from './functions';
19 16
 
20 17
 declare var APP: Object;
21 18
 
@@ -108,30 +105,20 @@ MiddlewareRegistry.register(store => next => action => {
108 105
             const participantID = jitsiTrack.getParticipantId();
109 106
             const isVideoTrack = jitsiTrack.isVideoTrack();
110 107
 
111
-            if (jitsiTrack.isLocal()) {
112
-                if (isVideoTrack) {
108
+            if (isVideoTrack) {
109
+                if (jitsiTrack.isLocal()) {
113 110
                     APP.conference.setVideoMuteStatus(muted);
114 111
                 } else {
115
-                    APP.conference.setAudioMuteStatus(muted);
112
+                    APP.UI.setVideoMuted(participantID, muted);
116 113
                 }
117
-            }
118
-
119
-            if (isVideoTrack) {
120
-                APP.UI.setVideoMuted(participantID, muted);
121 114
                 APP.UI.onPeerVideoTypeChanged(
122 115
                     participantID,
123 116
                     jitsiTrack.videoType);
117
+            } else if (jitsiTrack.isLocal()) {
118
+                APP.conference.setAudioMuteStatus(muted);
124 119
             } else {
125 120
                 APP.UI.setAudioMuted(participantID, muted);
126 121
             }
127
-
128
-            // XXX The following synchronizes the state of base/tracks into the
129
-            // state of base/media. Which is not required in React (and,
130
-            // respectively, React Native) because base/media expresses the
131
-            // app's and the user's desires/expectations/intents and base/tracks
132
-            // expresses practice/reality. Unfortunately, the old Web does not
133
-            // comply and/or does the opposite. Hence, the following:
134
-            return _trackUpdated(store, next, action);
135 122
         }
136 123
 
137 124
     }
@@ -169,66 +156,5 @@ function _getLocalTrack({ getState }, mediaType: MEDIA_TYPE) {
169 156
 function _setMuted(store, { muted }, mediaType: MEDIA_TYPE) {
170 157
     const localTrack = _getLocalTrack(store, mediaType);
171 158
 
172
-    localTrack && store.dispatch(setTrackMuted(localTrack.jitsiTrack, muted));
173
-}
174
-
175
-/**
176
- * Intercepts the action <tt>TRACK_UPDATED</tt> in order to synchronize the
177
- * muted states of the local tracks of features/base/tracks with the muted
178
- * states of features/base/media.
179
- *
180
- * @param {Store} store - The redux store in which the specified <tt>action</tt>
181
- * is being dispatched.
182
- * @param {Dispatch} next - The redux dispatch function to dispatch the
183
- * specified <tt>action</tt> to the specified <tt>store</tt>.
184
- * @param {Action} action - The redux action <tt>TRACK_UPDATED</tt> which is
185
- * being dispatched in the specified <tt>store</tt>.
186
- * @private
187
- * @returns {Object} The new state that is the result of the reduction of the
188
- * specified <tt>action</tt>.
189
- */
190
-function _trackUpdated(store, next, action) {
191
-    // Determine the muted state of the local track before the update.
192
-    const track = action.track;
193
-    let mediaType;
194
-    let oldMuted;
195
-
196
-    if ('muted' in track) {
197
-        // XXX The return value of JitsiTrack.getType() is of type MEDIA_TYPE
198
-        // that happens to be compatible with the type MEDIA_TYPE defined by
199
-        // jitsi-meet.
200
-        mediaType = track.jitsiTrack.getType();
201
-
202
-        const localTrack = _getLocalTrack(store, mediaType);
203
-
204
-        if (localTrack) {
205
-            oldMuted = localTrack.muted;
206
-        }
207
-    }
208
-
209
-    const result = next(action);
210
-
211
-    if (typeof oldMuted !== 'undefined') {
212
-        // Determine the muted state of the local track after the update. If the
213
-        // muted states before and after the update differ, then the respective
214
-        // media state should by synchronized.
215
-        const localTrack = _getLocalTrack(store, mediaType);
216
-
217
-        if (localTrack) {
218
-            const newMuted = localTrack.muted;
219
-
220
-            if (oldMuted !== newMuted) {
221
-                switch (mediaType) {
222
-                case MEDIA_TYPE.AUDIO:
223
-                    store.dispatch(setAudioMuted(newMuted));
224
-                    break;
225
-                case MEDIA_TYPE.VIDEO:
226
-                    store.dispatch(setVideoMuted(newMuted));
227
-                    break;
228
-                }
229
-            }
230
-        }
231
-    }
232
-
233
-    return result;
159
+    localTrack && setTrackMuted(localTrack.jitsiTrack, muted);
234 160
 }

+ 5
- 4
react/features/toolbox/defaultToolbarButtons.js 查看文件

@@ -40,13 +40,14 @@ const buttons: Object = {
40 40
         isDisplayed: () => true,
41 41
         id: 'toolbar_button_camera',
42 42
         onClick() {
43
-            if (APP.conference.videoMuted) {
43
+            const newVideoMutedState = !APP.conference.isLocalVideoMuted();
44
+
45
+            if (newVideoMutedState) {
44 46
                 JitsiMeetJS.analytics.sendEvent('toolbar.video.enabled');
45
-                APP.UI.emitEvent(UIEvents.VIDEO_MUTED, false);
46 47
             } else {
47 48
                 JitsiMeetJS.analytics.sendEvent('toolbar.video.disabled');
48
-                APP.UI.emitEvent(UIEvents.VIDEO_MUTED, true);
49 49
             }
50
+            APP.UI.emitEvent(UIEvents.VIDEO_MUTED, newVideoMutedState);
50 51
         },
51 52
         popups: [
52 53
             {
@@ -290,7 +291,7 @@ const buttons: Object = {
290 291
         onClick() {
291 292
             const sharedVideoManager = APP.UI.getSharedVideoManager();
292 293
 
293
-            if (APP.conference.audioMuted) {
294
+            if (APP.conference.isLocalAudioMuted()) {
294 295
                 // If there's a shared video with the volume "on" and we aren't
295 296
                 // the video owner, we warn the user
296 297
                 // that currently it's not possible to unmute.

+ 4
- 6
react/features/toolbox/functions.native.js 查看文件

@@ -3,7 +3,8 @@
3 3
 import type { Dispatch } from 'redux';
4 4
 
5 5
 import { appNavigate } from '../app';
6
-import { getLocalAudioTrack, getLocalVideoTrack } from '../base/tracks';
6
+import { MEDIA_TYPE } from '../base/media';
7
+import { isLocalTrackMuted } from '../base/tracks';
7 8
 
8 9
 /**
9 10
  * Maps redux actions to {@link Toolbox} (React {@code Component}) props.
@@ -58,9 +59,6 @@ export function abstractMapStateToProps(state: Object): Object {
58 59
     const tracks = state['features/base/tracks'];
59 60
     const { visible } = state['features/toolbox'];
60 61
 
61
-    const audioTrack = getLocalAudioTrack(tracks);
62
-    const videoTrack = getLocalVideoTrack(tracks);
63
-
64 62
     return {
65 63
         /**
66 64
          * Flag showing whether audio is muted.
@@ -68,7 +66,7 @@ export function abstractMapStateToProps(state: Object): Object {
68 66
          * @protected
69 67
          * @type {boolean}
70 68
          */
71
-        _audioMuted: !audioTrack || audioTrack.muted,
69
+        _audioMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.AUDIO),
72 70
 
73 71
         /**
74 72
          * Flag showing whether video is muted.
@@ -76,7 +74,7 @@ export function abstractMapStateToProps(state: Object): Object {
76 74
          * @protected
77 75
          * @type {boolean}
78 76
          */
79
-        _videoMuted: !videoTrack || videoTrack.muted,
77
+        _videoMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO),
80 78
 
81 79
         /**
82 80
          * Flag showing whether toolbox is visible.

+ 59
- 41
react/features/toolbox/middleware.js 查看文件

@@ -1,11 +1,11 @@
1 1
 /* @flow */
2 2
 
3 3
 import {
4
+    MEDIA_TYPE,
4 5
     SET_AUDIO_AVAILABLE,
5
-    SET_AUDIO_MUTED,
6
-    SET_VIDEO_AVAILABLE,
7
-    SET_VIDEO_MUTED } from '../base/media';
6
+    SET_VIDEO_AVAILABLE } from '../base/media';
8 7
 import { MiddlewareRegistry } from '../base/redux';
8
+import { isLocalTrackMuted, TRACK_UPDATED } from '../base/tracks';
9 9
 
10 10
 import { setToolbarButton } from './actions';
11 11
 import { CLEAR_TOOLBOX_TIMEOUT, SET_TOOLBOX_TIMEOUT } from './actionTypes';
@@ -37,66 +37,84 @@ MiddlewareRegistry.register(store => next => action => {
37 37
         break;
38 38
     }
39 39
 
40
-    case SET_AUDIO_AVAILABLE:
41
-    case SET_AUDIO_MUTED: {
42
-        return _setAudioAvailableOrMuted(store, next, action);
40
+    case SET_AUDIO_AVAILABLE: {
41
+        return _setMediaAvailableOrMuted(store, next, action);
42
+    }
43
+
44
+    case SET_VIDEO_AVAILABLE: {
45
+        return _setMediaAvailableOrMuted(store, next, action);
46
+    }
47
+
48
+    case TRACK_UPDATED: {
49
+        if (action.track.jitsiTrack.isLocal()) {
50
+            return _setMediaAvailableOrMuted(store, next, action);
51
+        }
52
+        break;
43 53
     }
44 54
 
45
-    case SET_VIDEO_AVAILABLE:
46
-    case SET_VIDEO_MUTED:
47
-        return _setVideoAvailableOrMuted(store, next, action);
48 55
     }
49 56
 
50 57
     return next(action);
51 58
 });
52 59
 
53 60
 /**
54
- * Adjusts the state of toolbar's microphone button.
61
+ * Adjusts the state of toolbar's microphone or camera button.
55 62
  *
56 63
  * @param {Store} store - The Redux store instance.
57 64
  * @param {Function} next - The redux function to continue dispatching the
58 65
  * specified {@code action} in the specified {@code store}.
59
- * @param {Object} action - Either SET_AUDIO_AVAILABLE or SET_AUDIO_MUTED.
66
+ * @param {Object} action - SET_AUDIO_AVAILABLE, SET_VIDEO_AVAILABLE or
67
+ * TRACK_UPDATED.
60 68
  *
61 69
  * @returns {*}
62 70
  */
63
-function _setAudioAvailableOrMuted({ dispatch, getState }, next, action) {
71
+function _setMediaAvailableOrMuted({ dispatch, getState }, next, action) {
64 72
     const result = next(action);
65 73
 
66
-    const { available, muted } = getState()['features/base/media'].audio;
67
-    const i18nKey = available ? 'mute' : 'micDisabled';
74
+    let mediaType;
68 75
 
69
-    dispatch(setToolbarButton('microphone', {
70
-        enabled: available,
71
-        i18n: `[content]toolbar.${i18nKey}`,
72
-        toggled: available ? muted : true
73
-    }));
76
+    switch (action.type) {
77
+    case SET_AUDIO_AVAILABLE: {
78
+        mediaType = MEDIA_TYPE.AUDIO;
79
+        break;
80
+    }
74 81
 
75
-    return result;
76
-}
82
+    case SET_VIDEO_AVAILABLE: {
83
+        mediaType = MEDIA_TYPE.VIDEO;
84
+        break;
85
+    }
77 86
 
78
-/**
79
- * Adjusts the state of toolbar's camera button.
80
- *
81
- * @param {Store} store - The redux store.
82
- * @param {Function} next - The redux function to continue dispatching the
83
- * specified {@code action} in the specified {@code store}.
84
- * @param {Object} action - Either {@link SET_VIDEO_AVAILABLE} or
85
- * {@link SET_VIDEO_MUTED}.
86
- * @returns {Object} The new state that is the result of the reduction of the
87
- * specified {@code action}.
88
- */
89
-function _setVideoAvailableOrMuted({ dispatch, getState }, next, action) {
90
-    const result = next(action);
87
+    case TRACK_UPDATED: {
88
+        mediaType
89
+            = action.track.jitsiTrack.isAudioTrack()
90
+                ? MEDIA_TYPE.AUDIO : MEDIA_TYPE.VIDEO;
91
+        break;
92
+    }
91 93
 
92
-    const { available, muted } = getState()['features/base/media'].video;
93
-    const i18nKey = available ? 'videomute' : 'cameraDisabled';
94
+    default: {
95
+        throw new Error(`Unsupported action ${action}`);
96
+    }
97
+
98
+    }
94 99
 
95
-    dispatch(setToolbarButton('camera', {
96
-        enabled: available,
97
-        i18n: `[content]toolbar.${i18nKey}`,
98
-        toggled: available ? muted : true
99
-    }));
100
+    const mediaState = getState()['features/base/media'];
101
+    const { available }
102
+        = mediaType === MEDIA_TYPE.AUDIO
103
+            ? mediaState.audio : mediaState.video;
104
+    const i18nKey
105
+        = mediaType === MEDIA_TYPE.AUDIO
106
+            ? available ? 'mute' : 'micDisabled'
107
+            : available ? 'videomute' : 'cameraDisabled';
108
+
109
+    const tracks = getState()['features/base/tracks'];
110
+    const muted = isLocalTrackMuted(tracks, mediaType);
111
+
112
+    dispatch(setToolbarButton(
113
+        mediaType === MEDIA_TYPE.AUDIO ? 'microphone' : 'camera', {
114
+            enabled: available,
115
+            i18n: `[content]toolbar.${i18nKey}`,
116
+            toggled: available ? muted : true
117
+        }));
100 118
 
101 119
     return result;
102 120
 }

正在加载...
取消
保存