浏览代码

feat(mute): Add analytics and console logs for audio/video mutes

j8
hristoterezov 7 年前
父节点
当前提交
0eddef4d62

+ 8
- 0
conference.js 查看文件

@@ -674,9 +674,13 @@ export default {
674 674
             }).then(([tracks, con]) => {
675 675
                 tracks.forEach(track => {
676 676
                     if (track.isAudioTrack() && this.isLocalAudioMuted()) {
677
+                        sendEvent('conference.audio.initiallyMuted');
678
+                        logger.log('Audio mute: initially muted');
677 679
                         track.mute();
678 680
                     } else if (track.isVideoTrack()
679 681
                                     && this.isLocalVideoMuted()) {
682
+                        sendEvent('conference.video.initiallyMuted');
683
+                        logger.log('Video mute: initially muted');
680 684
                         track.mute();
681 685
                     }
682 686
                 });
@@ -2260,6 +2264,8 @@ export default {
2260 2264
                     if (audioWasMuted ||
2261 2265
                         currentDevices.audioinput.length >
2262 2266
                         availableAudioInputDevices.length) {
2267
+                        sendEvent('deviceListChanged.audio.muted');
2268
+                        logger.log('Audio mute: device list changed');
2263 2269
                         muteLocalAudio(true);
2264 2270
                     }
2265 2271
 
@@ -2269,6 +2275,8 @@ export default {
2269 2275
                         (videoWasMuted ||
2270 2276
                             currentDevices.videoinput.length >
2271 2277
                                 availableVideoInputDevices.length)) {
2278
+                        sendEvent('deviceListChanged.video.muted');
2279
+                        logger.log('Video mute: device list changed');
2272 2280
                         muteLocalVideo(true);
2273 2281
                     }
2274 2282
                 }));

+ 7
- 0
modules/API/API.js 查看文件

@@ -2,10 +2,13 @@
2 2
 
3 3
 import * as JitsiMeetConferenceEvents from '../../ConferenceEvents';
4 4
 import { parseJWTFromURLParams } from '../../react/features/base/jwt';
5
+import { sendEvent } from '../../react/features/analytics';
5 6
 import { getJitsiMeetTransport } from '../transport';
6 7
 
7 8
 import { API_ID } from './constants';
8 9
 
10
+const logger = require('jitsi-meet-logger').getLogger(__filename);
11
+
9 12
 declare var APP: Object;
10 13
 
11 14
 /**
@@ -52,9 +55,13 @@ function initCommands() {
52 55
         'display-name':
53 56
             APP.conference.changeLocalDisplayName.bind(APP.conference),
54 57
         'toggle-audio': () => {
58
+            sendEvent('api.toggle.audio');
59
+            logger.log('Audio toggle: API command received');
55 60
             APP.conference.toggleAudioMuted(false /* no UI */);
56 61
         },
57 62
         'toggle-video': () => {
63
+            sendEvent('api.toggle.video');
64
+            logger.log('Video toggle: API command received');
58 65
             APP.conference.toggleVideoMuted(false /* no UI */);
59 66
         },
60 67
         'toggle-film-strip': APP.UI.toggleFilmstrip,

+ 4
- 2
modules/UI/shared_video/SharedVideo.js 查看文件

@@ -533,7 +533,8 @@ export default class SharedVideoManager {
533 533
         if (APP.conference.isLocalAudioMuted()
534 534
             && !this.mutedWithUserInteraction
535 535
             && !this.isSharedVideoVolumeOn()) {
536
-
536
+            sendEvent("sharedvideo.audio.unmuted");
537
+            logger.log('Shared video: audio unmuted');
537 538
             this.emitter.emit(UIEvents.AUDIO_MUTED, false, false);
538 539
             this.showMicMutedPopup(false);
539 540
         }
@@ -546,7 +547,8 @@ export default class SharedVideoManager {
546 547
     smartAudioMute() {
547 548
         if (!APP.conference.isLocalAudioMuted()
548 549
             && this.isSharedVideoVolumeOn()) {
549
-
550
+            sendEvent("sharedvideo.audio.muted");
551
+            logger.log('Shared video: audio muted');
550 552
             this.emitter.emit(UIEvents.AUDIO_MUTED, true, false);
551 553
             this.showMicMutedPopup(true);
552 554
         }

+ 7
- 1
modules/keyboardshortcut/keyboardshortcut.js 查看文件

@@ -4,6 +4,8 @@ import { toggleDialog } from '../../react/features/base/dialog';
4 4
 import { sendEvent } from '../../react/features/analytics';
5 5
 import { SpeakerStats } from '../../react/features/speaker-stats';
6 6
 
7
+const logger = require('jitsi-meet-logger').getLogger(__filename);
8
+
7 9
 /**
8 10
  * The reference to the shortcut dialogs when opened.
9 11
  */
@@ -28,6 +30,7 @@ function initGlobalShortcuts() {
28 30
     // register SPACE shortcut in two steps to insure visibility of help message
29 31
     KeyboardShortcut.registerShortcut(" ", null, function() {
30 32
         sendEvent("shortcut.talk.clicked");
33
+        logger.log('Talk shortcut pressed');
31 34
         APP.conference.muteAudio(true);
32 35
     });
33 36
     KeyboardShortcut._addShortcutToHelp("SPACE","keyboardShortcuts.pushToTalk");
@@ -119,8 +122,11 @@ const KeyboardShortcut = {
119 122
                 $(":focus").is("textarea"))) {
120 123
                 var key = self._getKeyboardKey(e).toUpperCase();
121 124
                 if(key === " ") {
122
-                    if(APP.conference.isLocalAudioMuted())
125
+                    if(APP.conference.isLocalAudioMuted()) {
126
+                        sendEvent("shortcut.talk.released");
127
+                        logger.log('Talk shortcut released');
123 128
                         APP.conference.muteAudio(false);
129
+                    }
124 130
                 }
125 131
             }
126 132
         };

+ 15
- 2
react/features/base/conference/actions.js 查看文件

@@ -1,5 +1,6 @@
1 1
 // @flow
2 2
 
3
+import { sendEvent } from '../../analytics';
3 4
 import { JitsiConferenceEvents } from '../lib-jitsi-meet';
4 5
 import { setAudioMuted, setVideoMuted } from '../media';
5 6
 import {
@@ -39,6 +40,8 @@ import { _addLocalTracksToConference } from './functions';
39 40
 
40 41
 import type { Dispatch } from 'redux';
41 42
 
43
+const logger = require('jitsi-meet-logger').getLogger(__filename);
44
+
42 45
 /**
43 46
  * Adds conference (event) listeners.
44 47
  *
@@ -69,6 +72,16 @@ function _addConferenceListeners(conference, dispatch) {
69 72
     conference.on(
70 73
         JitsiConferenceEvents.STARTED_MUTED,
71 74
         () => {
75
+            const audioMuted = Boolean(conference.startAudioMuted);
76
+            const videoMuted = Boolean(conference.startVideoMuted);
77
+
78
+            sendEvent(
79
+                `startmuted.server.audio.${audioMuted ? 'muted' : 'unmuted'}`);
80
+            sendEvent(
81
+                `startmuted.server.video.${videoMuted ? 'muted' : 'unmuted'}`);
82
+            logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${
83
+                videoMuted ? 'video' : ''}`);
84
+
72 85
             // XXX Jicofo tells lib-jitsi-meet to start with audio and/or video
73 86
             // muted i.e. Jicofo expresses an intent. Lib-jitsi-meet has turned
74 87
             // Jicofo's intent into reality by actually muting the respective
@@ -77,8 +90,8 @@ function _addConferenceListeners(conference, dispatch) {
77 90
             // TODO Maybe the app needs to learn about Jicofo's intent and
78 91
             // transfer that intent to lib-jitsi-meet instead of lib-jitsi-meet
79 92
             // acting on Jicofo's intent without the app's knowledge.
80
-            dispatch(setAudioMuted(Boolean(conference.startAudioMuted)));
81
-            dispatch(setVideoMuted(Boolean(conference.startVideoMuted)));
93
+            dispatch(setAudioMuted(audioMuted));
94
+            dispatch(setVideoMuted(videoMuted));
82 95
         });
83 96
 
84 97
     // Dispatches into features/base/tracks follow:

+ 7
- 2
react/features/base/conference/middleware.js 查看文件

@@ -35,6 +35,8 @@ import {
35 35
     _removeLocalTracksFromConference
36 36
 } from './functions';
37 37
 
38
+const logger = require('jitsi-meet-logger').getLogger(__filename);
39
+
38 40
 declare var APP: Object;
39 41
 
40 42
 /**
@@ -121,8 +123,11 @@ function _connectionEstablished(store, next, action) {
121 123
 function _conferenceFailedOrLeft({ dispatch, getState }, next, action) {
122 124
     const result = next(action);
123 125
 
124
-    getState()['features/base/conference'].audioOnly
125
-        && dispatch(setAudioOnly(false));
126
+    if (getState()['features/base/conference'].audioOnly) {
127
+        sendEvent('audioonly.disabled');
128
+        logger.log('Audio only disabled');
129
+        dispatch(setAudioOnly(false));
130
+    }
126 131
 
127 132
     return result;
128 133
 }

+ 22
- 3
react/features/base/media/middleware.js 查看文件

@@ -1,5 +1,6 @@
1 1
 /* @flow */
2 2
 
3
+import { sendEvent } from '../../analytics';
3 4
 import { SET_ROOM, setAudioOnly } from '../conference';
4 5
 import { parseURLParams } from '../config';
5 6
 import { MiddlewareRegistry } from '../redux';
@@ -8,6 +9,8 @@ import { setTrackMuted, TRACK_ADDED } from '../tracks';
8 9
 import { setAudioMuted, setCameraFacingMode, setVideoMuted } from './actions';
9 10
 import { CAMERA_FACING_MODE } from './constants';
10 11
 
12
+const logger = require('jitsi-meet-logger').getLogger(__filename);
13
+
11 14
 /**
12 15
  * Implements the entry point of the middleware of the feature base/media.
13 16
  *
@@ -77,14 +80,23 @@ function _setRoom({ dispatch, getState }, next, action) {
77 80
     typeof videoMuted === 'undefined'
78 81
         && (videoMuted = config.startWithVideoMuted);
79 82
 
83
+    audioMuted = Boolean(audioMuted);
84
+    videoMuted = Boolean(videoMuted);
85
+
80 86
     // Apply the config.
81 87
 
88
+    sendEvent(`startmuted.client.audio.${audioMuted ? 'muted' : 'unmuted'}`);
89
+    sendEvent(`startmuted.client.video.${videoMuted ? 'muted' : 'unmuted'}`);
90
+
91
+    logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${
92
+        videoMuted ? 'video' : ''}`);
93
+
82 94
     // Unconditionally express the desires/expectations/intents of the app and
83 95
     // the user i.e. the state of base/media. Eventually, practice/reality i.e.
84 96
     // the state of base/tracks will or will not agree with the desires.
85
-    dispatch(setAudioMuted(Boolean(audioMuted)));
97
+    dispatch(setAudioMuted(audioMuted));
86 98
     dispatch(setCameraFacingMode(CAMERA_FACING_MODE.USER));
87
-    dispatch(setVideoMuted(Boolean(videoMuted)));
99
+    dispatch(setVideoMuted(videoMuted));
88 100
 
89 101
     // config.startAudioOnly
90 102
     //
@@ -97,7 +109,10 @@ function _setRoom({ dispatch, getState }, next, action) {
97 109
         let audioOnly = urlParams && urlParams['config.startAudioOnly'];
98 110
 
99 111
         typeof audioOnly === 'undefined' && (audioOnly = config.startAudioOnly);
100
-        dispatch(setAudioOnly(Boolean(audioOnly)));
112
+        audioOnly = Boolean(audioOnly);
113
+        sendEvent(`startaudioonly.${audioOnly ? 'enabled' : 'disabled'}`);
114
+        logger.log(`Start audio only set to ${audioOnly.toString()}`);
115
+        dispatch(setAudioOnly(audioOnly));
101 116
     }
102 117
 
103 118
     return next(action);
@@ -121,6 +136,10 @@ function _syncTrackMutedState({ getState }, track) {
121 136
     // not yet in redux state and JitsiTrackEvents.TRACK_MUTE_CHANGED may be
122 137
     // fired before track gets to state.
123 138
     if (track.muted !== muted) {
139
+        sendEvent(
140
+            `synctrackstate.${track.mediaType}.${muted ? 'muted' : 'unmuted'}`);
141
+        logger.log(`Sync ${track.mediaType} track muted state to ${
142
+            muted ? 'muted' : 'unmuted'}`);
124 143
         track.muted = muted;
125 144
         setTrackMuted(track.jitsiTrack, muted);
126 145
     }

+ 10
- 1
react/features/base/tracks/actions.js 查看文件

@@ -1,3 +1,4 @@
1
+import { sendEvent } from '../../analytics';
1 2
 import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
2 3
 import {
3 4
     CAMERA_FACING_MODE,
@@ -15,6 +16,8 @@ import {
15 16
 } from './actionTypes';
16 17
 import { createLocalTracksF } from './functions';
17 18
 
19
+const logger = require('jitsi-meet-logger').getLogger(__filename);
20
+
18 21
 /**
19 22
  * Requests the creating of the desired media type tracks. Desire is expressed
20 23
  * by base/media unless the function caller specifies desired media types
@@ -154,8 +157,14 @@ export function replaceLocalTrack(oldTrack, newTrack, conference) {
154 157
                                 = newTrack.isVideoTrack()
155 158
                                     ? setVideoMuted
156 159
                                     : setAudioMuted;
160
+                            const isMuted = newTrack.isMuted();
161
+
162
+                            sendEvent(`replacetrack.${newTrack.getType()}.${
163
+                                isMuted ? 'muted' : 'unmuted'}`);
164
+                            logger.log(`Replace ${newTrack.getType()} track - ${
165
+                                isMuted ? 'muted' : 'unmuted'}`);
157 166
 
158
-                            return dispatch(setMuted(newTrack.isMuted()));
167
+                            return dispatch(setMuted());
159 168
                         }
160 169
                     })
161 170
                     .then(() => {

+ 4
- 0
react/features/mobile/background/actions.js 查看文件

@@ -1,5 +1,6 @@
1 1
 /* @flow */
2 2
 
3
+import { sendEvent } from '../../analytics';
3 4
 import { setLastN } from '../../base/conference';
4 5
 import { setVideoMuted, VIDEO_MUTISM_AUTHORITY } from '../../base/media';
5 6
 
@@ -41,6 +42,9 @@ export function _setBackgroundVideoMuted(muted: boolean) {
41 42
         const { audioOnly } = getState()['features/base/conference'];
42 43
 
43 44
         audioOnly || dispatch(setLastN(muted ? 0 : undefined));
45
+
46
+        sendEvent('callkit.background.video.muted');
47
+
44 48
         dispatch(setVideoMuted(muted, VIDEO_MUTISM_AUTHORITY.BACKGROUND));
45 49
     };
46 50
 }

+ 5
- 1
react/features/mobile/callkit/middleware.js 查看文件

@@ -3,6 +3,7 @@
3 3
 import { NativeModules } from 'react-native';
4 4
 import uuid from 'uuid';
5 5
 
6
+import { sendEvent } from '../../analytics';
6 7
 import { APP_WILL_MOUNT, APP_WILL_UNMOUNT, appNavigate } from '../../app';
7 8
 import {
8 9
     CONFERENCE_FAILED,
@@ -268,7 +269,10 @@ function _onPerformSetMutedCallAction({ callUUID, muted: newValue }) {
268 269
         const { muted: oldValue } = getState()['features/base/media'].audio;
269 270
 
270 271
         if (oldValue !== newValue) {
271
-            dispatch(setAudioMuted(Boolean(newValue)));
272
+            const value = Boolean(newValue);
273
+
274
+            sendEvent(`callkit.audio.${value ? 'muted' : 'unmuted'}`);
275
+            dispatch(setAudioMuted(value));
272 276
         }
273 277
     }
274 278
 }

+ 1
- 1
react/features/remote-video-menu/components/MuteButton.js 查看文件

@@ -98,7 +98,7 @@ class MuteButton extends Component {
98 98
         const { dispatch, onClick, participantID } = this.props;
99 99
 
100 100
         sendEvent(
101
-            'remotevideomenu.mute',
101
+            'remotevideomenu.mute.clicked',
102 102
             {
103 103
                 value: 1,
104 104
                 label: participantID

+ 10
- 1
react/features/toolbox/components/Toolbox.native.js 查看文件

@@ -3,6 +3,7 @@ import React, { Component } from 'react';
3 3
 import { View } from 'react-native';
4 4
 import { connect } from 'react-redux';
5 5
 
6
+import { sendEvent } from '../../analytics';
6 7
 import { toggleAudioOnly } from '../../base/conference';
7 8
 import {
8 9
     MEDIA_TYPE,
@@ -174,6 +175,10 @@ class Toolbox extends Component {
174 175
      * @returns {void}
175 176
      */
176 177
     _onToggleAudio() {
178
+        const mute = !this.props._audioMuted;
179
+
180
+        sendEvent(`toolbar.audio.${mute ? 'muted' : 'unmuted'}`);
181
+
177 182
         // The user sees the reality i.e. the state of base/tracks and intends
178 183
         // to change reality by tapping on the respective button i.e. the user
179 184
         // sets the state of base/media. Whether the user's intention will turn
@@ -181,7 +186,7 @@ class Toolbox extends Component {
181 186
         // tapping.
182 187
         this.props.dispatch(
183 188
             setAudioMuted(
184
-                !this.props._audioMuted,
189
+                mute,
185 190
                 VIDEO_MUTISM_AUTHORITY.USER,
186 191
                 /* ensureTrack */ true));
187 192
     }
@@ -193,6 +198,10 @@ class Toolbox extends Component {
193 198
      * @returns {void}
194 199
      */
195 200
     _onToggleVideo() {
201
+        const mute = !this.props._videoMuted;
202
+
203
+        sendEvent(`toolbar.video.${mute ? 'muted' : 'unmuted'}`);
204
+
196 205
         // The user sees the reality i.e. the state of base/tracks and intends
197 206
         // to change reality by tapping on the respective button i.e. the user
198 207
         // sets the state of base/media. Whether the user's intention will turn

+ 11
- 1
react/features/video-quality/components/VideoQualityDialog.web.js 查看文件

@@ -3,6 +3,7 @@ import PropTypes from 'prop-types';
3 3
 import React, { Component } from 'react';
4 4
 import { connect } from 'react-redux';
5 5
 
6
+import { sendEvent } from '../../analytics';
6 7
 import {
7 8
     setAudioOnly,
8 9
     setReceiveVideoQuality,
@@ -10,6 +11,8 @@ import {
10 11
 } from '../../base/conference';
11 12
 import { translate } from '../../base/i18n';
12 13
 
14
+const logger = require('jitsi-meet-logger').getLogger(__filename);
15
+
13 16
 const {
14 17
     HIGH,
15 18
     STANDARD,
@@ -211,6 +214,8 @@ class VideoQualityDialog extends Component {
211 214
      * @returns {void}
212 215
      */
213 216
     _enableAudioOnly() {
217
+        sendEvent('toolbar.audioonly.enabled');
218
+        logger.log('Video quality: audio only enabled');
214 219
         this.props.dispatch(setAudioOnly(true));
215 220
     }
216 221
 
@@ -222,6 +227,8 @@ class VideoQualityDialog extends Component {
222 227
      * @returns {void}
223 228
      */
224 229
     _enableHighDefinition() {
230
+        sendEvent('toolbar.videoquality.high');
231
+        logger.log('Video quality: high enabled');
225 232
         this.props.dispatch(setReceiveVideoQuality(HIGH));
226 233
     }
227 234
 
@@ -233,6 +240,8 @@ class VideoQualityDialog extends Component {
233 240
      * @returns {void}
234 241
      */
235 242
     _enableLowDefinition() {
243
+        sendEvent('toolbar.videoquality.low');
244
+        logger.log('Video quality: low enabled');
236 245
         this.props.dispatch(setReceiveVideoQuality(LOW));
237 246
     }
238 247
 
@@ -244,6 +253,8 @@ class VideoQualityDialog extends Component {
244 253
      * @returns {void}
245 254
      */
246 255
     _enableStandardDefinition() {
256
+        sendEvent('toolbar.videoquality.standard');
257
+        logger.log('Video quality: standard enabled');
247 258
         this.props.dispatch(setReceiveVideoQuality(STANDARD));
248 259
     }
249 260
 
@@ -324,4 +335,3 @@ function _mapStateToProps(state) {
324 335
 }
325 336
 
326 337
 export default translate(connect(_mapStateToProps)(VideoQualityDialog));
327
-

正在加载...
取消
保存