Browse Source

feat(conference, toolbox, API) get rid of {audio,video}Muted' flags

* ref: video muted state

Get rid of 'videoMuted' flag in conference.js

* ref: audio muted state

Get rid of 'audioMuted' flag in conference.js

* fix(conference.js|API): early audio/video muted updates

* ref(conference.js): rename isVideoMuted

Rename isVideoMuted to isLocalVideoMuted to be consistent with
isLocalAudioMuted.

* doc|style(conference.js): comments and space after if

* ref: move 'setTrackMuted' to functions

* fix(tracks/middleware): no-lonely-if

* ref(features/toolbox): get rid of last argument

* ref(defaultToolbarButtons): rename var
j8
Paweł Domas 7 years ago
parent
commit
99ce46cfa8

+ 122
- 128
conference.js View File

38
     isFatalJitsiConnectionError
38
     isFatalJitsiConnectionError
39
 } from './react/features/base/lib-jitsi-meet';
39
 } from './react/features/base/lib-jitsi-meet';
40
 import {
40
 import {
41
+    isVideoMutedByUser,
42
+    MEDIA_TYPE,
41
     setAudioAvailable,
43
     setAudioAvailable,
42
-    setVideoAvailable
44
+    setAudioMuted,
45
+    setVideoAvailable,
46
+    setVideoMuted
43
 } from './react/features/base/media';
47
 } from './react/features/base/media';
44
 import {
48
 import {
45
     localParticipantConnectionStatusChanged,
49
     localParticipantConnectionStatusChanged,
54
 } from './react/features/base/participants';
58
 } from './react/features/base/participants';
55
 import {
59
 import {
56
     createLocalTracks,
60
     createLocalTracks,
61
+    isLocalTrackMuted,
57
     replaceLocalTrack,
62
     replaceLocalTrack,
58
     trackAdded,
63
     trackAdded,
59
     trackRemoved
64
     trackRemoved
87
 
92
 
88
 let room;
93
 let room;
89
 let connection;
94
 let connection;
90
-let localAudio, localVideo;
91
 
95
 
92
 /*
96
 /*
93
  * Logic to open a desktop picker put on the window global for
97
  * Logic to open a desktop picker put on the window global for
134
  * @param {string} value new value
138
  * @param {string} value new value
135
  */
139
  */
136
 function sendData(command, value) {
140
 function sendData(command, value) {
137
-    if(!room) {
141
+    if (!room) {
138
         return;
142
         return;
139
     }
143
     }
140
 
144
 
184
 /**
188
 /**
185
  * Mute or unmute local audio stream if it exists.
189
  * Mute or unmute local audio stream if it exists.
186
  * @param {boolean} muted - if audio stream should be muted or unmuted.
190
  * @param {boolean} muted - if audio stream should be muted or unmuted.
187
- *
188
- * @returns {Promise} resolved in case mute/unmute operations succeeds or
189
- * rejected with an error if something goes wrong. It is expected that often
190
- * the error will be of the {@link JitsiTrackError} type, but it's not
191
- * guaranteed.
192
  */
191
  */
193
 function muteLocalAudio(muted) {
192
 function muteLocalAudio(muted) {
194
-    return muteLocalMedia(localAudio, muted);
195
-}
196
-
197
-/**
198
- * Mute or unmute local media stream if it exists.
199
- * @param {JitsiLocalTrack} localTrack
200
- * @param {boolean} muted
201
- *
202
- * @returns {Promise} resolved in case mute/unmute operations succeeds or
203
- * rejected with an error if something goes wrong. It is expected that often
204
- * the error will be of the {@link JitsiTrackError} type, but it's not
205
- * guaranteed.
206
- */
207
-function muteLocalMedia(localTrack, muted) {
208
-    if (!localTrack) {
209
-        return Promise.resolve();
210
-    }
211
-
212
-    const method = muted ? 'mute' : 'unmute';
213
-
214
-    return localTrack[method]();
193
+    APP.store.dispatch(setAudioMuted(muted));
215
 }
194
 }
216
 
195
 
217
 /**
196
 /**
218
  * Mute or unmute local video stream if it exists.
197
  * Mute or unmute local video stream if it exists.
219
  * @param {boolean} muted if video stream should be muted or unmuted.
198
  * @param {boolean} muted if video stream should be muted or unmuted.
220
  *
199
  *
221
- * @returns {Promise} resolved in case mute/unmute operations succeeds or
222
- * rejected with an error if something goes wrong. It is expected that often
223
- * the error will be of the {@link JitsiTrackError} type, but it's not
224
- * guaranteed.
225
  */
200
  */
226
 function muteLocalVideo(muted) {
201
 function muteLocalVideo(muted) {
227
-    return muteLocalMedia(localVideo, muted);
202
+    APP.store.dispatch(setVideoMuted(muted));
228
 }
203
 }
229
 
204
 
230
 /**
205
 /**
458
      */
433
      */
459
     _localTracksInitialized: false,
434
     _localTracksInitialized: false,
460
     isModerator: false,
435
     isModerator: false,
461
-    audioMuted: false,
462
-    videoMuted: false,
463
     isSharingScreen: false,
436
     isSharingScreen: false,
464
     /**
437
     /**
465
      * Indicates if the desktop sharing functionality has been enabled.
438
      * Indicates if the desktop sharing functionality has been enabled.
491
      */
464
      */
492
     isDominantSpeaker: false,
465
     isDominantSpeaker: false,
493
 
466
 
467
+    /**
468
+     * The local audio track (if any).
469
+     * FIXME tracks from redux store should be the single source of truth
470
+     * @type {JitsiLocalTrack|null}
471
+     */
472
+    localAudio: null,
473
+
474
+    /**
475
+     * The local video track (if any).
476
+     * FIXME tracks from redux store should be the single source of truth, but
477
+     * more refactoring is required around screen sharing ('localVideo' usages).
478
+     * @type {JitsiLocalTrack|null}
479
+     */
480
+    localVideo: null,
481
+
494
     /**
482
     /**
495
      * Creates local media tracks and connects to a room. Will show error
483
      * Creates local media tracks and connects to a room. Will show error
496
      * dialogs in case accessing the local microphone and/or camera failed. Will
484
      * dialogs in case accessing the local microphone and/or camera failed. Will
655
     init(options) {
643
     init(options) {
656
         this.roomName = options.roomName;
644
         this.roomName = options.roomName;
657
         // attaches global error handler, if there is already one, respect it
645
         // attaches global error handler, if there is already one, respect it
658
-        if(JitsiMeetJS.getGlobalOnErrorHandler){
646
+        if (JitsiMeetJS.getGlobalOnErrorHandler){
659
             var oldOnErrorHandler = window.onerror;
647
             var oldOnErrorHandler = window.onerror;
660
             window.onerror = function (message, source, lineno, colno, error) {
648
             window.onerror = function (message, source, lineno, colno, error) {
661
                 JitsiMeetJS.getGlobalOnErrorHandler(
649
                 JitsiMeetJS.getGlobalOnErrorHandler(
662
                     message, source, lineno, colno, error);
650
                     message, source, lineno, colno, error);
663
 
651
 
664
-                if(oldOnErrorHandler)
652
+                if (oldOnErrorHandler)
665
                     oldOnErrorHandler(message, source, lineno, colno, error);
653
                     oldOnErrorHandler(message, source, lineno, colno, error);
666
             };
654
             };
667
 
655
 
671
             JitsiMeetJS.getGlobalOnErrorHandler(
659
             JitsiMeetJS.getGlobalOnErrorHandler(
672
                     null, null, null, null, event.reason);
660
                     null, null, null, null, event.reason);
673
 
661
 
674
-                if(oldOnUnhandledRejection)
662
+                if (oldOnUnhandledRejection)
675
                     oldOnUnhandledRejection(event);
663
                     oldOnUnhandledRejection(event);
676
             };
664
             };
677
         }
665
         }
690
                     });
678
                     });
691
             }).then(([tracks, con]) => {
679
             }).then(([tracks, con]) => {
692
                 tracks.forEach(track => {
680
                 tracks.forEach(track => {
693
-                    if (track.isAudioTrack() && this.audioMuted) {
681
+                    if (track.isAudioTrack() && this.isLocalAudioMuted()) {
694
                         track.mute();
682
                         track.mute();
695
-                    } else if (track.isVideoTrack() && this.videoMuted) {
683
+                    } else if (track.isVideoTrack()
684
+                                    && this.isLocalVideoMuted()) {
696
                         track.mute();
685
                         track.mute();
697
                     }
686
                     }
698
                 });
687
                 });
731
                 // to the conference
720
                 // to the conference
732
                 if (!tracks.find((t) => t.isAudioTrack())) {
721
                 if (!tracks.find((t) => t.isAudioTrack())) {
733
                     this.setAudioMuteStatus(true);
722
                     this.setAudioMuteStatus(true);
734
-                    APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
735
                 }
723
                 }
736
 
724
 
737
                 if (!tracks.find((t) => t.isVideoTrack())) {
725
                 if (!tracks.find((t) => t.isVideoTrack())) {
738
                     this.setVideoMuteStatus(true);
726
                     this.setVideoMuteStatus(true);
739
-                    APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
740
                 }
727
                 }
741
 
728
 
742
                 this._initDeviceList();
729
                 this._initDeviceList();
759
     isLocalId(id) {
746
     isLocalId(id) {
760
         return this.getMyUserId() === id;
747
         return this.getMyUserId() === id;
761
     },
748
     },
749
+
750
+    /**
751
+     * Tells whether the local video is muted or not.
752
+     * @return {boolean}
753
+     */
754
+    isLocalVideoMuted() {
755
+        // If the tracks are not ready, read from base/media state
756
+        return this._localTracksInitialized
757
+            ? isLocalTrackMuted(
758
+                APP.store.getState()['features/base/tracks'],
759
+                MEDIA_TYPE.VIDEO)
760
+            : isVideoMutedByUser(APP.store);
761
+    },
762
+
762
     /**
763
     /**
763
      * Simulates toolbar button click for audio mute. Used by shortcuts and API.
764
      * Simulates toolbar button click for audio mute. Used by shortcuts and API.
764
      * @param {boolean} mute true for mute and false for unmute.
765
      * @param {boolean} mute true for mute and false for unmute.
768
     muteAudio(mute, showUI = true) {
769
     muteAudio(mute, showUI = true) {
769
         // Not ready to modify track's state yet
770
         // Not ready to modify track's state yet
770
         if (!this._localTracksInitialized) {
771
         if (!this._localTracksInitialized) {
772
+            // This will only modify base/media.audio.muted which is then synced
773
+            // up with the track at the end of local tracks initialization.
774
+            muteLocalAudio(mute);
771
             this.setAudioMuteStatus(mute);
775
             this.setAudioMuteStatus(mute);
776
+
772
             return;
777
             return;
773
-        } else if (localAudio && localAudio.isMuted() === mute) {
778
+        } else if (this.isLocalAudioMuted() === mute) {
774
             // NO-OP
779
             // NO-OP
775
             return;
780
             return;
776
         }
781
         }
777
 
782
 
778
-        const maybeShowErrorDialog = (error) => {
779
-            if (showUI) {
780
-                APP.UI.showMicErrorNotification(error);
781
-            }
782
-        };
783
-
784
-        if (!localAudio && this.audioMuted && !mute) {
783
+        if (!this.localAudio && !mute) {
785
             createLocalTracks({ devices: ['audio'] }, false)
784
             createLocalTracks({ devices: ['audio'] }, false)
786
                 .then(([audioTrack]) => audioTrack)
785
                 .then(([audioTrack]) => audioTrack)
787
                 .catch(error => {
786
                 .catch(error => {
788
-                    maybeShowErrorDialog(error);
787
+                    if (showUI) {
788
+                        APP.UI.showMicErrorNotification(error);
789
+                    }
789
 
790
 
790
                     // Rollback the audio muted status by using null track
791
                     // Rollback the audio muted status by using null track
791
                     return null;
792
                     return null;
792
                 })
793
                 })
793
                 .then(audioTrack => this.useAudioStream(audioTrack));
794
                 .then(audioTrack => this.useAudioStream(audioTrack));
794
         } else {
795
         } else {
795
-            const oldMutedStatus = this.audioMuted;
796
-
797
-            muteLocalAudio(mute)
798
-                .catch(error => {
799
-                    maybeShowErrorDialog(error);
800
-                    this.setAudioMuteStatus(oldMutedStatus);
801
-                    APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
802
-                });
796
+            muteLocalAudio(mute);
803
         }
797
         }
804
     },
798
     },
805
     /**
799
     /**
807
      * @returns {boolean}
801
      * @returns {boolean}
808
      */
802
      */
809
     isLocalAudioMuted() {
803
     isLocalAudioMuted() {
810
-        return this.audioMuted;
804
+        // If the tracks are not ready, read from base/media state
805
+        return this._localTracksInitialized
806
+            ? isLocalTrackMuted(
807
+                APP.store.getState()['features/base/tracks'],
808
+                MEDIA_TYPE.AUDIO)
809
+            : Boolean(
810
+                APP.store.getState()['features/base/media'].audio.muted);
811
     },
811
     },
812
     /**
812
     /**
813
      * Simulates toolbar button click for audio mute. Used by shortcuts
813
      * Simulates toolbar button click for audio mute. Used by shortcuts
816
      * dialogs in case of media permissions error.
816
      * dialogs in case of media permissions error.
817
      */
817
      */
818
     toggleAudioMuted(showUI = true) {
818
     toggleAudioMuted(showUI = true) {
819
-        this.muteAudio(!this.audioMuted, showUI);
819
+        this.muteAudio(!this.isLocalAudioMuted(), showUI);
820
     },
820
     },
821
     /**
821
     /**
822
      * Simulates toolbar button click for video mute. Used by shortcuts and API.
822
      * Simulates toolbar button click for video mute. Used by shortcuts and API.
825
      * dialogs in case of media permissions error.
825
      * dialogs in case of media permissions error.
826
      */
826
      */
827
     muteVideo(mute, showUI = true) {
827
     muteVideo(mute, showUI = true) {
828
-        // Not ready to modify track's state yet
828
+        // If not ready to modify track's state yet adjust the base/media
829
         if (!this._localTracksInitialized) {
829
         if (!this._localTracksInitialized) {
830
+            // This will only modify base/media.video.muted which is then synced
831
+            // up with the track at the end of local tracks initialization.
832
+            muteLocalVideo(mute);
830
             this.setVideoMuteStatus(mute);
833
             this.setVideoMuteStatus(mute);
831
 
834
 
832
             return;
835
             return;
833
-        } else if (localVideo && localVideo.isMuted() === mute) {
836
+        } else if (this.isLocalVideoMuted() === mute) {
834
             // NO-OP
837
             // NO-OP
835
             return;
838
             return;
836
         }
839
         }
841
             }
844
             }
842
         };
845
         };
843
 
846
 
844
-        if (!localVideo && this.videoMuted && !mute) {
847
+        // FIXME it is possible to queue this task twice, but it's not causing
848
+        // any issues. Specifically this can happen when the previous
849
+        // get user media call is blocked on "ask user for permissions" dialog.
850
+        if (!this.localVideo && !mute) {
845
             // Try to create local video if there wasn't any.
851
             // Try to create local video if there wasn't any.
846
             // This handles the case when user joined with no video
852
             // This handles the case when user joined with no video
847
             // (dismissed screen sharing screen or in audio only mode), but
853
             // (dismissed screen sharing screen or in audio only mode), but
861
                 })
867
                 })
862
                 .then(videoTrack => this.useVideoStream(videoTrack));
868
                 .then(videoTrack => this.useVideoStream(videoTrack));
863
         } else {
869
         } else {
864
-            const oldMutedStatus = this.videoMuted;
865
-
866
-            muteLocalVideo(mute)
867
-                .catch(error => {
868
-                    maybeShowErrorDialog(error);
869
-                    this.setVideoMuteStatus(oldMutedStatus);
870
-                    APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
871
-                });
870
+            // FIXME show error dialog if it fails (should be handled by react)
871
+            muteLocalVideo(mute);
872
         }
872
         }
873
     },
873
     },
874
     /**
874
     /**
877
      * dialogs in case of media permissions error.
877
      * dialogs in case of media permissions error.
878
      */
878
      */
879
     toggleVideoMuted(showUI = true) {
879
     toggleVideoMuted(showUI = true) {
880
-        this.muteVideo(!this.videoMuted, showUI);
880
+        this.muteVideo(!this.isLocalVideoMuted(), showUI);
881
     },
881
     },
882
     /**
882
     /**
883
      * Retrieve list of conference participants (without local user).
883
      * Retrieve list of conference participants (without local user).
1202
 
1202
 
1203
     _getConferenceOptions() {
1203
     _getConferenceOptions() {
1204
         let options = config;
1204
         let options = config;
1205
-        if(config.enableRecording && !config.recordingType) {
1205
+        if (config.enableRecording && !config.recordingType) {
1206
             options.recordingType = (config.hosts &&
1206
             options.recordingType = (config.hosts &&
1207
                 (typeof config.hosts.jirecon != "undefined"))?
1207
                 (typeof config.hosts.jirecon != "undefined"))?
1208
                 "jirecon" : "colibri";
1208
                 "jirecon" : "colibri";
1219
      */
1219
      */
1220
     useVideoStream(newStream) {
1220
     useVideoStream(newStream) {
1221
         return APP.store.dispatch(
1221
         return APP.store.dispatch(
1222
-            replaceLocalTrack(localVideo, newStream, room))
1222
+            replaceLocalTrack(this.localVideo, newStream, room))
1223
             .then(() => {
1223
             .then(() => {
1224
-                localVideo = newStream;
1224
+                this.localVideo = newStream;
1225
+
1225
                 if (newStream) {
1226
                 if (newStream) {
1226
-                    this.setVideoMuteStatus(newStream.isMuted());
1227
                     this.isSharingScreen = newStream.videoType === 'desktop';
1227
                     this.isSharingScreen = newStream.videoType === 'desktop';
1228
 
1228
 
1229
                     APP.UI.addLocalStream(newStream);
1229
                     APP.UI.addLocalStream(newStream);
1230
                 } else {
1230
                 } else {
1231
-                    // No video is treated the same way as being video muted
1232
-                    this.setVideoMuteStatus(true);
1233
                     this.isSharingScreen = false;
1231
                     this.isSharingScreen = false;
1234
                 }
1232
                 }
1235
-                APP.UI.setVideoMuted(this.getMyUserId(), this.videoMuted);
1233
+                this.setVideoMuteStatus(this.isLocalVideoMuted());
1236
                 APP.UI.updateDesktopSharingButtons();
1234
                 APP.UI.updateDesktopSharingButtons();
1237
             });
1235
             });
1238
     },
1236
     },
1245
      */
1243
      */
1246
     useAudioStream(newStream) {
1244
     useAudioStream(newStream) {
1247
         return APP.store.dispatch(
1245
         return APP.store.dispatch(
1248
-            replaceLocalTrack(localAudio, newStream, room))
1246
+            replaceLocalTrack(this.localAudio, newStream, room))
1249
             .then(() => {
1247
             .then(() => {
1250
-                localAudio = newStream;
1251
-
1248
+                this.localAudio = newStream;
1252
                 if (newStream) {
1249
                 if (newStream) {
1253
-                    this.setAudioMuteStatus(newStream.isMuted());
1254
                     APP.UI.addLocalStream(newStream);
1250
                     APP.UI.addLocalStream(newStream);
1255
-                } else {
1256
-                    // No audio is treated the same way as being audio muted
1257
-                    this.setAudioMuteStatus(true);
1258
                 }
1251
                 }
1259
-                APP.UI.setAudioMuted(this.getMyUserId(), this.audioMuted);
1252
+                this.setAudioMuteStatus(this.isLocalAudioMuted());
1260
             });
1253
             });
1261
     },
1254
     },
1262
 
1255
 
1339
                     JitsiMeetJS.analytics.sendEvent(
1332
                     JitsiMeetJS.analytics.sendEvent(
1340
                         'conference.sharingDesktop.stop');
1333
                         'conference.sharingDesktop.stop');
1341
                     logger.log('switched back to local video');
1334
                     logger.log('switched back to local video');
1342
-                    if (!localVideo && wasVideoMuted) {
1335
+                    if (!this.localVideo && wasVideoMuted) {
1343
                         return Promise.reject('No local video to be muted!');
1336
                         return Promise.reject('No local video to be muted!');
1344
-                    } else if (wasVideoMuted && localVideo) {
1345
-                        return localVideo.mute();
1337
+                    } else if (wasVideoMuted && this.localVideo) {
1338
+                        return this.localVideo.mute();
1346
                     }
1339
                     }
1347
                 })
1340
                 })
1348
                 .catch(error => {
1341
                 .catch(error => {
1416
     _createDesktopTrack(options = {}) {
1409
     _createDesktopTrack(options = {}) {
1417
         let externalInstallation = false;
1410
         let externalInstallation = false;
1418
         let DSExternalInstallationInProgress = false;
1411
         let DSExternalInstallationInProgress = false;
1419
-        const didHaveVideo = Boolean(localVideo);
1420
-        const wasVideoMuted = this.videoMuted;
1412
+        const didHaveVideo = Boolean(this.localVideo);
1413
+        const wasVideoMuted = this.isLocalVideoMuted();
1421
 
1414
 
1422
         return createLocalTracks({
1415
         return createLocalTracks({
1423
             desktopSharingSources: options.desktopSharingSources,
1416
             desktopSharingSources: options.desktopSharingSources,
1671
         });
1664
         });
1672
 
1665
 
1673
         room.on(ConferenceEvents.TRACK_ADDED, (track) => {
1666
         room.on(ConferenceEvents.TRACK_ADDED, (track) => {
1674
-            if(!track || track.isLocal())
1667
+            if (!track || track.isLocal())
1675
                 return;
1668
                 return;
1676
 
1669
 
1677
             APP.store.dispatch(trackAdded(track));
1670
             APP.store.dispatch(trackAdded(track));
1678
         });
1671
         });
1679
 
1672
 
1680
         room.on(ConferenceEvents.TRACK_REMOVED, (track) => {
1673
         room.on(ConferenceEvents.TRACK_REMOVED, (track) => {
1681
-            if(!track || track.isLocal())
1674
+            if (!track || track.isLocal())
1682
                 return;
1675
                 return;
1683
 
1676
 
1684
             APP.store.dispatch(trackRemoved(track));
1677
             APP.store.dispatch(trackRemoved(track));
1685
         });
1678
         });
1686
 
1679
 
1687
         room.on(ConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => {
1680
         room.on(ConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => {
1688
-            if(this.isLocalId(id) && localAudio && localAudio.isMuted()) {
1681
+            if (this.isLocalId(id)
1682
+                && this.localAudio && this.localAudio.isMuted()) {
1689
                 lvl = 0;
1683
                 lvl = 0;
1690
             }
1684
             }
1691
 
1685
 
1692
-            if(config.debug)
1693
-            {
1686
+            if (config.debug) {
1694
                 this.audioLevelsMap[id] = lvl;
1687
                 this.audioLevelsMap[id] = lvl;
1695
-                if(config.debugAudioLevels)
1688
+                if (config.debugAudioLevels)
1696
                     logger.log("AudioLevel:" + id + "/" + lvl);
1689
                     logger.log("AudioLevel:" + id + "/" + lvl);
1697
             }
1690
             }
1698
 
1691
 
1866
                     this.deviceChangeListener);
1859
                     this.deviceChangeListener);
1867
 
1860
 
1868
             // stop local video
1861
             // stop local video
1869
-            if (localVideo) {
1870
-                localVideo.dispose();
1862
+            if (this.localVideo) {
1863
+                this.localVideo.dispose();
1864
+                this.localVideo = null;
1871
             }
1865
             }
1872
             // stop local audio
1866
             // stop local audio
1873
-            if (localAudio) {
1874
-                localAudio.dispose();
1867
+            if (this.localAudio) {
1868
+                this.localAudio.dispose();
1869
+                this.localAudio = null;
1875
             }
1870
             }
1876
         });
1871
         });
1877
 
1872
 
2215
                         // storage and settings menu. This is a workaround until
2210
                         // storage and settings menu. This is a workaround until
2216
                         // getConstraints() method will be implemented
2211
                         // getConstraints() method will be implemented
2217
                         // in browsers.
2212
                         // in browsers.
2218
-                        if (localAudio) {
2213
+                        if (this.localAudio) {
2219
                             APP.settings.setMicDeviceId(
2214
                             APP.settings.setMicDeviceId(
2220
-                                localAudio.getDeviceId(), false);
2215
+                                this.localAudio.getDeviceId(), false);
2221
                         }
2216
                         }
2222
 
2217
 
2223
-                        if (localVideo) {
2218
+                        if (this.localVideo) {
2224
                             APP.settings.setCameraDeviceId(
2219
                             APP.settings.setCameraDeviceId(
2225
-                                localVideo.getDeviceId(), false);
2220
+                                this.localVideo.getDeviceId(), false);
2226
                         }
2221
                         }
2227
 
2222
 
2228
                         mediaDeviceHelper.setCurrentMediaDevices(devices);
2223
                         mediaDeviceHelper.setCurrentMediaDevices(devices);
2263
 
2258
 
2264
         let newDevices =
2259
         let newDevices =
2265
             mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged(
2260
             mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged(
2266
-                devices, this.isSharingScreen, localVideo, localAudio);
2261
+                devices,
2262
+                this.isSharingScreen,
2263
+                this.localVideo,
2264
+                this.localAudio);
2267
         let promises = [];
2265
         let promises = [];
2268
-        let audioWasMuted = this.audioMuted;
2269
-        let videoWasMuted = this.videoMuted;
2266
+        let audioWasMuted = this.isLocalAudioMuted();
2267
+        let videoWasMuted = this.isLocalVideoMuted();
2270
         let availableAudioInputDevices =
2268
         let availableAudioInputDevices =
2271
             mediaDeviceHelper.getDevicesFromListByKind(devices, 'audioinput');
2269
             mediaDeviceHelper.getDevicesFromListByKind(devices, 'audioinput');
2272
         let availableVideoInputDevices =
2270
         let availableVideoInputDevices =
2323
 
2321
 
2324
         // The audio functionality is considered available if there are any
2322
         // The audio functionality is considered available if there are any
2325
         // audio devices detected or if the local audio stream already exists.
2323
         // audio devices detected or if the local audio stream already exists.
2326
-        const available = audioDeviceCount > 0 || Boolean(localAudio);
2324
+        const available = audioDeviceCount > 0 || Boolean(this.localAudio);
2327
 
2325
 
2328
         logger.debug(
2326
         logger.debug(
2329
             'Microphone button enabled: ' + available,
2327
             'Microphone button enabled: ' + available,
2330
-            'local audio: ' + localAudio,
2328
+            'local audio: ' + this.localAudio,
2331
             'audio devices: ' + audioMediaDevices,
2329
             'audio devices: ' + audioMediaDevices,
2332
             'device count: ' + audioDeviceCount);
2330
             'device count: ' + audioDeviceCount);
2333
 
2331
 
2348
         // active which could be either screensharing stream or a video track
2346
         // active which could be either screensharing stream or a video track
2349
         // created before the permissions were rejected (through browser
2347
         // created before the permissions were rejected (through browser
2350
         // config).
2348
         // config).
2351
-        const available = videoDeviceCount > 0 || Boolean(localVideo);
2349
+        const available = videoDeviceCount > 0 || Boolean(this.localVideo);
2352
 
2350
 
2353
         logger.debug(
2351
         logger.debug(
2354
             'Camera button enabled: ' + available,
2352
             'Camera button enabled: ' + available,
2355
-            'local video: ' + localVideo,
2353
+            'local video: ' + this.localVideo,
2356
             'video devices: ' + videoMediaDevices,
2354
             'video devices: ' + videoMediaDevices,
2357
             'device count: ' + videoDeviceCount);
2355
             'device count: ' + videoDeviceCount);
2358
 
2356
 
2393
      * NOTE: Should be used after conference.init
2391
      * NOTE: Should be used after conference.init
2394
      */
2392
      */
2395
     logEvent(name, value, label) {
2393
     logEvent(name, value, label) {
2396
-        if(JitsiMeetJS.analytics) {
2394
+        if (JitsiMeetJS.analytics) {
2397
             JitsiMeetJS.analytics.sendEvent(name, {value, label});
2395
             JitsiMeetJS.analytics.sendEvent(name, {value, label});
2398
         }
2396
         }
2399
-        if(room) {
2397
+        if (room) {
2400
             room.sendApplicationLog(JSON.stringify({name, value, label}));
2398
             room.sendApplicationLog(JSON.stringify({name, value, label}));
2401
         }
2399
         }
2402
     },
2400
     },
2553
      * track or the source id is not available, undefined will be returned.
2551
      * track or the source id is not available, undefined will be returned.
2554
      */
2552
      */
2555
     getDesktopSharingSourceId() {
2553
     getDesktopSharingSourceId() {
2556
-        return localVideo.sourceId;
2554
+        return this.localVideo.sourceId;
2557
     },
2555
     },
2558
 
2556
 
2559
     /**
2557
     /**
2565
      * returned.
2563
      * returned.
2566
      */
2564
      */
2567
     getDesktopSharingSourceType() {
2565
     getDesktopSharingSourceType() {
2568
-        return localVideo.sourceType;
2566
+        return this.localVideo.sourceType;
2569
     },
2567
     },
2570
 
2568
 
2571
     /**
2569
     /**
2574
      * @param {boolean} muted - New muted status.
2572
      * @param {boolean} muted - New muted status.
2575
      */
2573
      */
2576
     setVideoMuteStatus(muted) {
2574
     setVideoMuteStatus(muted) {
2577
-        if (this.videoMuted !== muted) {
2578
-            this.videoMuted = muted;
2579
-            APP.API.notifyVideoMutedStatusChanged(muted);
2580
-        }
2575
+        APP.UI.setVideoMuted(this.getMyUserId(), muted);
2576
+        APP.API.notifyVideoMutedStatusChanged(muted);
2581
     },
2577
     },
2582
 
2578
 
2583
     /**
2579
     /**
2586
      * @param {boolean} muted - New muted status.
2582
      * @param {boolean} muted - New muted status.
2587
      */
2583
      */
2588
     setAudioMuteStatus(muted) {
2584
     setAudioMuteStatus(muted) {
2589
-        if (this.audioMuted !== muted) {
2590
-            this.audioMuted = muted;
2591
-            APP.API.notifyAudioMutedStatusChanged(muted);
2592
-        }
2593
-    },
2585
+        APP.UI.setAudioMuted(this.getMyUserId(), muted);
2586
+        APP.API.notifyAudioMutedStatusChanged(muted);
2587
+    }
2594
 };
2588
 };

+ 0
- 3
modules/UI/UI.js View File

24
 import { debounce } from "../util/helpers";
24
 import { debounce } from "../util/helpers";
25
 
25
 
26
 import { updateDeviceList } from '../../react/features/base/devices';
26
 import { updateDeviceList } from '../../react/features/base/devices';
27
-import { setAudioMuted, setVideoMuted } from '../../react/features/base/media';
28
 import {
27
 import {
29
     openDeviceSelectionDialog
28
     openDeviceSelectionDialog
30
 } from '../../react/features/device-selection';
29
 } from '../../react/features/device-selection';
669
 UI.setAudioMuted = function (id, muted) {
668
 UI.setAudioMuted = function (id, muted) {
670
     VideoLayout.onAudioMute(id, muted);
669
     VideoLayout.onAudioMute(id, muted);
671
     if (APP.conference.isLocalId(id)) {
670
     if (APP.conference.isLocalId(id)) {
672
-        APP.store.dispatch(setAudioMuted(muted));
673
         APP.conference.updateAudioIconEnabled();
671
         APP.conference.updateAudioIconEnabled();
674
     }
672
     }
675
 };
673
 };
680
 UI.setVideoMuted = function (id, muted) {
678
 UI.setVideoMuted = function (id, muted) {
681
     VideoLayout.onVideoMute(id, muted);
679
     VideoLayout.onVideoMute(id, muted);
682
     if (APP.conference.isLocalId(id)) {
680
     if (APP.conference.isLocalId(id)) {
683
-        APP.store.dispatch(setVideoMuted(muted));
684
         APP.conference.updateVideoIconEnabled();
681
         APP.conference.updateVideoIconEnabled();
685
     }
682
     }
686
 };
683
 };

+ 14
- 0
react/features/base/media/functions.js View File

1
+import { VIDEO_MUTISM_AUTHORITY } from './constants';
2
+
1
 /**
3
 /**
2
  * Determines whether a specific videoTrack should be rendered.
4
  * Determines whether a specific videoTrack should be rendered.
3
  *
5
  *
14
             && !videoTrack.muted
16
             && !videoTrack.muted
15
             && (!waitForVideoStarted || videoTrack.videoStarted));
17
             && (!waitForVideoStarted || videoTrack.videoStarted));
16
 }
18
 }
19
+
20
+/**
21
+ * Checks if video is currently muted by the user authority.
22
+ *
23
+ * @param {Object} store - The redux store instance.
24
+ * @returns {boolean}
25
+ */
26
+export function isVideoMutedByUser({ getState }) {
27
+    return Boolean(
28
+        getState()['features/base/media'] // eslint-disable-line no-bitwise
29
+            .video.muted & VIDEO_MUTISM_AUTHORITY.USER);
30
+}

+ 2
- 2
react/features/base/media/middleware.js View File

93
  * @private
93
  * @private
94
  * @returns {void}
94
  * @returns {void}
95
  */
95
  */
96
-function _syncTrackMutedState({ dispatch, getState }, track) {
96
+function _syncTrackMutedState({ getState }, track) {
97
     const state = getState()['features/base/media'];
97
     const state = getState()['features/base/media'];
98
     const muted = Boolean(state[track.mediaType].muted);
98
     const muted = Boolean(state[track.mediaType].muted);
99
 
99
 
104
     // fired before track gets to state.
104
     // fired before track gets to state.
105
     if (track.muted !== muted) {
105
     if (track.muted !== muted) {
106
         track.muted = muted;
106
         track.muted = muted;
107
-        dispatch(setTrackMuted(track.jitsiTrack, muted));
107
+        setTrackMuted(track.jitsiTrack, muted);
108
     }
108
     }
109
 }
109
 }

+ 0
- 47
react/features/base/tracks/actions.js View File

348
     };
348
     };
349
 }
349
 }
350
 
350
 
351
-/**
352
- * Mutes or unmutes a specific <tt>JitsiLocalTrack</tt>. If the muted state of
353
- * the specified <tt>track</tt> is already in accord with the specified
354
- * <tt>muted</tt> value, then does nothing. In case the actual muting/unmuting
355
- * fails, a rollback action will be dispatched to undo the muting/unmuting.
356
- *
357
- * @param {JitsiLocalTrack} track - The <tt>JitsiLocalTrack</tt> to mute or
358
- * unmute.
359
- * @param {boolean} muted - If the specified <tt>track</tt> is to be muted, then
360
- * <tt>true</tt>; otherwise, <tt>false</tt>.
361
- * @returns {Function}
362
- */
363
-export function setTrackMuted(track, muted) {
364
-    return dispatch => {
365
-        muted = Boolean(muted); // eslint-disable-line no-param-reassign
366
-
367
-        if (track.isMuted() === muted) {
368
-            return Promise.resolve();
369
-        }
370
-
371
-        const f = muted ? 'mute' : 'unmute';
372
-
373
-        return track[f]().catch(error => {
374
-            console.error(`set track ${f} failed`, error);
375
-
376
-            if (navigator.product === 'ReactNative') {
377
-                // Synchronizing the state of base/tracks into the state of
378
-                // base/media is not required in React (and, respectively, React
379
-                // Native) because base/media expresses the app's and the user's
380
-                // desires/expectations/intents and base/tracks expresses
381
-                // practice/reality. Unfortunately, the old Web does not comply
382
-                // and/or does the opposite.
383
-                return;
384
-            }
385
-
386
-            const setMuted
387
-                = track.mediaType === MEDIA_TYPE.AUDIO
388
-                    ? setAudioMuted
389
-                    : setVideoMuted;
390
-
391
-            // FIXME The following disregards VIDEO_MUTISM_AUTHORITY (in the
392
-            // case of setVideoMuted, of course).
393
-            dispatch(setMuted(!muted));
394
-        });
395
-    };
396
-}
397
-
398
 /**
351
 /**
399
  * Returns true if the provided JitsiTrack should be rendered as a mirror.
352
  * Returns true if the provided JitsiTrack should be rendered as a mirror.
400
  *
353
  *

+ 42
- 0
react/features/base/tracks/functions.js View File

155
 export function getTracksByMediaType(tracks, mediaType) {
155
 export function getTracksByMediaType(tracks, mediaType) {
156
     return tracks.filter(t => t.mediaType === mediaType);
156
     return tracks.filter(t => t.mediaType === mediaType);
157
 }
157
 }
158
+
159
+/**
160
+ * Checks if the first local track in the given tracks set is muted.
161
+ *
162
+ * @param {Track[]} tracks - List of all tracks.
163
+ * @param {MEDIA_TYPE} mediaType - The media type of tracks to be checked.
164
+ * @returns {boolean} True if local track is muted or false if the track is
165
+ * unmuted or if there are no local tracks of the given media type in the given
166
+ * set of tracks.
167
+ */
168
+export function isLocalTrackMuted(tracks, mediaType) {
169
+    const track = getLocalTrack(tracks, mediaType);
170
+
171
+    return !track || track.muted;
172
+}
173
+
174
+/**
175
+ * Mutes or unmutes a specific <tt>JitsiLocalTrack</tt>. If the muted state of
176
+ * the specified <tt>track</tt> is already in accord with the specified
177
+ * <tt>muted</tt> value, then does nothing.
178
+ *
179
+ * @param {JitsiLocalTrack} track - The <tt>JitsiLocalTrack</tt> to mute or
180
+ * unmute.
181
+ * @param {boolean} muted - If the specified <tt>track</tt> is to be muted, then
182
+ * <tt>true</tt>; otherwise, <tt>false</tt>.
183
+ * @returns {Promise}
184
+ */
185
+export function setTrackMuted(track, muted) {
186
+    muted = Boolean(muted); // eslint-disable-line no-param-reassign
187
+
188
+    if (track.isMuted() === muted) {
189
+        return Promise.resolve();
190
+    }
191
+
192
+    const f = muted ? 'mute' : 'unmute';
193
+
194
+    return track[f]().catch(error => {
195
+
196
+        // FIXME emit mute failed, so that the app can show error dialog
197
+        console.error(`set track ${f} failed`, error);
198
+    });
199
+}

+ 7
- 81
react/features/base/tracks/middleware.js View File

6
     SET_AUDIO_MUTED,
6
     SET_AUDIO_MUTED,
7
     SET_CAMERA_FACING_MODE,
7
     SET_CAMERA_FACING_MODE,
8
     SET_VIDEO_MUTED,
8
     SET_VIDEO_MUTED,
9
-    setAudioMuted,
10
-    setVideoMuted,
11
     TOGGLE_CAMERA_FACING_MODE,
9
     TOGGLE_CAMERA_FACING_MODE,
12
     toggleCameraFacingMode
10
     toggleCameraFacingMode
13
 } from '../media';
11
 } from '../media';
14
 import { MiddlewareRegistry } from '../redux';
12
 import { MiddlewareRegistry } from '../redux';
15
 
13
 
16
-import { setTrackMuted } from './actions';
17
 import { TRACK_ADDED, TRACK_REMOVED, TRACK_UPDATED } from './actionTypes';
14
 import { TRACK_ADDED, TRACK_REMOVED, TRACK_UPDATED } from './actionTypes';
18
-import { getLocalTrack } from './functions';
15
+import { getLocalTrack, setTrackMuted } from './functions';
19
 
16
 
20
 declare var APP: Object;
17
 declare var APP: Object;
21
 
18
 
108
             const participantID = jitsiTrack.getParticipantId();
105
             const participantID = jitsiTrack.getParticipantId();
109
             const isVideoTrack = jitsiTrack.isVideoTrack();
106
             const isVideoTrack = jitsiTrack.isVideoTrack();
110
 
107
 
111
-            if (jitsiTrack.isLocal()) {
112
-                if (isVideoTrack) {
108
+            if (isVideoTrack) {
109
+                if (jitsiTrack.isLocal()) {
113
                     APP.conference.setVideoMuteStatus(muted);
110
                     APP.conference.setVideoMuteStatus(muted);
114
                 } else {
111
                 } else {
115
-                    APP.conference.setAudioMuteStatus(muted);
112
+                    APP.UI.setVideoMuted(participantID, muted);
116
                 }
113
                 }
117
-            }
118
-
119
-            if (isVideoTrack) {
120
-                APP.UI.setVideoMuted(participantID, muted);
121
                 APP.UI.onPeerVideoTypeChanged(
114
                 APP.UI.onPeerVideoTypeChanged(
122
                     participantID,
115
                     participantID,
123
                     jitsiTrack.videoType);
116
                     jitsiTrack.videoType);
117
+            } else if (jitsiTrack.isLocal()) {
118
+                APP.conference.setAudioMuteStatus(muted);
124
             } else {
119
             } else {
125
                 APP.UI.setAudioMuted(participantID, muted);
120
                 APP.UI.setAudioMuted(participantID, muted);
126
             }
121
             }
127
-
128
-            // XXX The following synchronizes the state of base/tracks into the
129
-            // state of base/media. Which is not required in React (and,
130
-            // respectively, React Native) because base/media expresses the
131
-            // app's and the user's desires/expectations/intents and base/tracks
132
-            // expresses practice/reality. Unfortunately, the old Web does not
133
-            // comply and/or does the opposite. Hence, the following:
134
-            return _trackUpdated(store, next, action);
135
         }
122
         }
136
 
123
 
137
     }
124
     }
169
 function _setMuted(store, { muted }, mediaType: MEDIA_TYPE) {
156
 function _setMuted(store, { muted }, mediaType: MEDIA_TYPE) {
170
     const localTrack = _getLocalTrack(store, mediaType);
157
     const localTrack = _getLocalTrack(store, mediaType);
171
 
158
 
172
-    localTrack && store.dispatch(setTrackMuted(localTrack.jitsiTrack, muted));
173
-}
174
-
175
-/**
176
- * Intercepts the action <tt>TRACK_UPDATED</tt> in order to synchronize the
177
- * muted states of the local tracks of features/base/tracks with the muted
178
- * states of features/base/media.
179
- *
180
- * @param {Store} store - The redux store in which the specified <tt>action</tt>
181
- * is being dispatched.
182
- * @param {Dispatch} next - The redux dispatch function to dispatch the
183
- * specified <tt>action</tt> to the specified <tt>store</tt>.
184
- * @param {Action} action - The redux action <tt>TRACK_UPDATED</tt> which is
185
- * being dispatched in the specified <tt>store</tt>.
186
- * @private
187
- * @returns {Object} The new state that is the result of the reduction of the
188
- * specified <tt>action</tt>.
189
- */
190
-function _trackUpdated(store, next, action) {
191
-    // Determine the muted state of the local track before the update.
192
-    const track = action.track;
193
-    let mediaType;
194
-    let oldMuted;
195
-
196
-    if ('muted' in track) {
197
-        // XXX The return value of JitsiTrack.getType() is of type MEDIA_TYPE
198
-        // that happens to be compatible with the type MEDIA_TYPE defined by
199
-        // jitsi-meet.
200
-        mediaType = track.jitsiTrack.getType();
201
-
202
-        const localTrack = _getLocalTrack(store, mediaType);
203
-
204
-        if (localTrack) {
205
-            oldMuted = localTrack.muted;
206
-        }
207
-    }
208
-
209
-    const result = next(action);
210
-
211
-    if (typeof oldMuted !== 'undefined') {
212
-        // Determine the muted state of the local track after the update. If the
213
-        // muted states before and after the update differ, then the respective
214
-        // media state should by synchronized.
215
-        const localTrack = _getLocalTrack(store, mediaType);
216
-
217
-        if (localTrack) {
218
-            const newMuted = localTrack.muted;
219
-
220
-            if (oldMuted !== newMuted) {
221
-                switch (mediaType) {
222
-                case MEDIA_TYPE.AUDIO:
223
-                    store.dispatch(setAudioMuted(newMuted));
224
-                    break;
225
-                case MEDIA_TYPE.VIDEO:
226
-                    store.dispatch(setVideoMuted(newMuted));
227
-                    break;
228
-                }
229
-            }
230
-        }
231
-    }
232
-
233
-    return result;
159
+    localTrack && setTrackMuted(localTrack.jitsiTrack, muted);
234
 }
160
 }

+ 5
- 4
react/features/toolbox/defaultToolbarButtons.js View File

40
         isDisplayed: () => true,
40
         isDisplayed: () => true,
41
         id: 'toolbar_button_camera',
41
         id: 'toolbar_button_camera',
42
         onClick() {
42
         onClick() {
43
-            if (APP.conference.videoMuted) {
43
+            const newVideoMutedState = !APP.conference.isLocalVideoMuted();
44
+
45
+            if (newVideoMutedState) {
44
                 JitsiMeetJS.analytics.sendEvent('toolbar.video.enabled');
46
                 JitsiMeetJS.analytics.sendEvent('toolbar.video.enabled');
45
-                APP.UI.emitEvent(UIEvents.VIDEO_MUTED, false);
46
             } else {
47
             } else {
47
                 JitsiMeetJS.analytics.sendEvent('toolbar.video.disabled');
48
                 JitsiMeetJS.analytics.sendEvent('toolbar.video.disabled');
48
-                APP.UI.emitEvent(UIEvents.VIDEO_MUTED, true);
49
             }
49
             }
50
+            APP.UI.emitEvent(UIEvents.VIDEO_MUTED, newVideoMutedState);
50
         },
51
         },
51
         popups: [
52
         popups: [
52
             {
53
             {
290
         onClick() {
291
         onClick() {
291
             const sharedVideoManager = APP.UI.getSharedVideoManager();
292
             const sharedVideoManager = APP.UI.getSharedVideoManager();
292
 
293
 
293
-            if (APP.conference.audioMuted) {
294
+            if (APP.conference.isLocalAudioMuted()) {
294
                 // If there's a shared video with the volume "on" and we aren't
295
                 // If there's a shared video with the volume "on" and we aren't
295
                 // the video owner, we warn the user
296
                 // the video owner, we warn the user
296
                 // that currently it's not possible to unmute.
297
                 // that currently it's not possible to unmute.

+ 4
- 6
react/features/toolbox/functions.native.js View File

3
 import type { Dispatch } from 'redux';
3
 import type { Dispatch } from 'redux';
4
 
4
 
5
 import { appNavigate } from '../app';
5
 import { appNavigate } from '../app';
6
-import { getLocalAudioTrack, getLocalVideoTrack } from '../base/tracks';
6
+import { MEDIA_TYPE } from '../base/media';
7
+import { isLocalTrackMuted } from '../base/tracks';
7
 
8
 
8
 /**
9
 /**
9
  * Maps redux actions to {@link Toolbox} (React {@code Component}) props.
10
  * Maps redux actions to {@link Toolbox} (React {@code Component}) props.
58
     const tracks = state['features/base/tracks'];
59
     const tracks = state['features/base/tracks'];
59
     const { visible } = state['features/toolbox'];
60
     const { visible } = state['features/toolbox'];
60
 
61
 
61
-    const audioTrack = getLocalAudioTrack(tracks);
62
-    const videoTrack = getLocalVideoTrack(tracks);
63
-
64
     return {
62
     return {
65
         /**
63
         /**
66
          * Flag showing whether audio is muted.
64
          * Flag showing whether audio is muted.
68
          * @protected
66
          * @protected
69
          * @type {boolean}
67
          * @type {boolean}
70
          */
68
          */
71
-        _audioMuted: !audioTrack || audioTrack.muted,
69
+        _audioMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.AUDIO),
72
 
70
 
73
         /**
71
         /**
74
          * Flag showing whether video is muted.
72
          * Flag showing whether video is muted.
76
          * @protected
74
          * @protected
77
          * @type {boolean}
75
          * @type {boolean}
78
          */
76
          */
79
-        _videoMuted: !videoTrack || videoTrack.muted,
77
+        _videoMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO),
80
 
78
 
81
         /**
79
         /**
82
          * Flag showing whether toolbox is visible.
80
          * Flag showing whether toolbox is visible.

+ 59
- 41
react/features/toolbox/middleware.js View File

1
 /* @flow */
1
 /* @flow */
2
 
2
 
3
 import {
3
 import {
4
+    MEDIA_TYPE,
4
     SET_AUDIO_AVAILABLE,
5
     SET_AUDIO_AVAILABLE,
5
-    SET_AUDIO_MUTED,
6
-    SET_VIDEO_AVAILABLE,
7
-    SET_VIDEO_MUTED } from '../base/media';
6
+    SET_VIDEO_AVAILABLE } from '../base/media';
8
 import { MiddlewareRegistry } from '../base/redux';
7
 import { MiddlewareRegistry } from '../base/redux';
8
+import { isLocalTrackMuted, TRACK_UPDATED } from '../base/tracks';
9
 
9
 
10
 import { setToolbarButton } from './actions';
10
 import { setToolbarButton } from './actions';
11
 import { CLEAR_TOOLBOX_TIMEOUT, SET_TOOLBOX_TIMEOUT } from './actionTypes';
11
 import { CLEAR_TOOLBOX_TIMEOUT, SET_TOOLBOX_TIMEOUT } from './actionTypes';
37
         break;
37
         break;
38
     }
38
     }
39
 
39
 
40
-    case SET_AUDIO_AVAILABLE:
41
-    case SET_AUDIO_MUTED: {
42
-        return _setAudioAvailableOrMuted(store, next, action);
40
+    case SET_AUDIO_AVAILABLE: {
41
+        return _setMediaAvailableOrMuted(store, next, action);
42
+    }
43
+
44
+    case SET_VIDEO_AVAILABLE: {
45
+        return _setMediaAvailableOrMuted(store, next, action);
46
+    }
47
+
48
+    case TRACK_UPDATED: {
49
+        if (action.track.jitsiTrack.isLocal()) {
50
+            return _setMediaAvailableOrMuted(store, next, action);
51
+        }
52
+        break;
43
     }
53
     }
44
 
54
 
45
-    case SET_VIDEO_AVAILABLE:
46
-    case SET_VIDEO_MUTED:
47
-        return _setVideoAvailableOrMuted(store, next, action);
48
     }
55
     }
49
 
56
 
50
     return next(action);
57
     return next(action);
51
 });
58
 });
52
 
59
 
53
 /**
60
 /**
54
- * Adjusts the state of toolbar's microphone button.
61
+ * Adjusts the state of toolbar's microphone or camera button.
55
  *
62
  *
56
  * @param {Store} store - The Redux store instance.
63
  * @param {Store} store - The Redux store instance.
57
  * @param {Function} next - The redux function to continue dispatching the
64
  * @param {Function} next - The redux function to continue dispatching the
58
  * specified {@code action} in the specified {@code store}.
65
  * specified {@code action} in the specified {@code store}.
59
- * @param {Object} action - Either SET_AUDIO_AVAILABLE or SET_AUDIO_MUTED.
66
+ * @param {Object} action - SET_AUDIO_AVAILABLE, SET_VIDEO_AVAILABLE or
67
+ * TRACK_UPDATED.
60
  *
68
  *
61
  * @returns {*}
69
  * @returns {*}
62
  */
70
  */
63
-function _setAudioAvailableOrMuted({ dispatch, getState }, next, action) {
71
+function _setMediaAvailableOrMuted({ dispatch, getState }, next, action) {
64
     const result = next(action);
72
     const result = next(action);
65
 
73
 
66
-    const { available, muted } = getState()['features/base/media'].audio;
67
-    const i18nKey = available ? 'mute' : 'micDisabled';
74
+    let mediaType;
68
 
75
 
69
-    dispatch(setToolbarButton('microphone', {
70
-        enabled: available,
71
-        i18n: `[content]toolbar.${i18nKey}`,
72
-        toggled: available ? muted : true
73
-    }));
76
+    switch (action.type) {
77
+    case SET_AUDIO_AVAILABLE: {
78
+        mediaType = MEDIA_TYPE.AUDIO;
79
+        break;
80
+    }
74
 
81
 
75
-    return result;
76
-}
82
+    case SET_VIDEO_AVAILABLE: {
83
+        mediaType = MEDIA_TYPE.VIDEO;
84
+        break;
85
+    }
77
 
86
 
78
-/**
79
- * Adjusts the state of toolbar's camera button.
80
- *
81
- * @param {Store} store - The redux store.
82
- * @param {Function} next - The redux function to continue dispatching the
83
- * specified {@code action} in the specified {@code store}.
84
- * @param {Object} action - Either {@link SET_VIDEO_AVAILABLE} or
85
- * {@link SET_VIDEO_MUTED}.
86
- * @returns {Object} The new state that is the result of the reduction of the
87
- * specified {@code action}.
88
- */
89
-function _setVideoAvailableOrMuted({ dispatch, getState }, next, action) {
90
-    const result = next(action);
87
+    case TRACK_UPDATED: {
88
+        mediaType
89
+            = action.track.jitsiTrack.isAudioTrack()
90
+                ? MEDIA_TYPE.AUDIO : MEDIA_TYPE.VIDEO;
91
+        break;
92
+    }
91
 
93
 
92
-    const { available, muted } = getState()['features/base/media'].video;
93
-    const i18nKey = available ? 'videomute' : 'cameraDisabled';
94
+    default: {
95
+        throw new Error(`Unsupported action ${action}`);
96
+    }
97
+
98
+    }
94
 
99
 
95
-    dispatch(setToolbarButton('camera', {
96
-        enabled: available,
97
-        i18n: `[content]toolbar.${i18nKey}`,
98
-        toggled: available ? muted : true
99
-    }));
100
+    const mediaState = getState()['features/base/media'];
101
+    const { available }
102
+        = mediaType === MEDIA_TYPE.AUDIO
103
+            ? mediaState.audio : mediaState.video;
104
+    const i18nKey
105
+        = mediaType === MEDIA_TYPE.AUDIO
106
+            ? available ? 'mute' : 'micDisabled'
107
+            : available ? 'videomute' : 'cameraDisabled';
108
+
109
+    const tracks = getState()['features/base/tracks'];
110
+    const muted = isLocalTrackMuted(tracks, mediaType);
111
+
112
+    dispatch(setToolbarButton(
113
+        mediaType === MEDIA_TYPE.AUDIO ? 'microphone' : 'camera', {
114
+            enabled: available,
115
+            i18n: `[content]toolbar.${i18nKey}`,
116
+            toggled: available ? muted : true
117
+        }));
100
 
118
 
101
     return result;
119
     return result;
102
 }
120
 }

Loading…
Cancel
Save