浏览代码

Merge pull request #3223 from ztl8702/local-recording

Feature: Local recording (Ready for review)
j8
bgrozev 6 年前
父节点
当前提交
25aaa74edc
没有帐户链接到提交者的电子邮件
共有 36 个文件被更改,包括 3520 次插入5 次删除
  1. 11
    2
      Makefile
  2. 19
    0
      config.js
  3. 1
    0
      css/main.scss
  4. 92
    0
      css/modals/local-recording/_local-recording.scss
  5. 4
    0
      css/modals/video-quality/_video-quality.scss
  6. 32
    1
      lang/main.json
  7. 4
    0
      package-lock.json
  8. 1
    0
      package.json
  9. 13
    0
      react/features/large-video/components/AbstractLabels.js
  10. 5
    0
      react/features/large-video/components/Labels.web.js
  11. 32
    0
      react/features/local-recording/actionTypes.js
  12. 59
    0
      react/features/local-recording/actions.js
  13. 86
    0
      react/features/local-recording/components/LocalRecordingButton.js
  14. 403
    0
      react/features/local-recording/components/LocalRecordingInfoDialog.js
  15. 0
    0
      react/features/local-recording/components/LocalRecordingLabel.native.js
  16. 75
    0
      react/features/local-recording/components/LocalRecordingLabel.web.js
  17. 5
    0
      react/features/local-recording/components/index.js
  18. 687
    0
      react/features/local-recording/controller/RecordingController.js
  19. 1
    0
      react/features/local-recording/controller/index.js
  20. 7
    0
      react/features/local-recording/index.js
  21. 92
    0
      react/features/local-recording/middleware.js
  22. 129
    0
      react/features/local-recording/recording/AbstractAudioContextAdapter.js
  23. 143
    0
      react/features/local-recording/recording/OggAdapter.js
  24. 85
    0
      react/features/local-recording/recording/RecordingAdapter.js
  25. 20
    0
      react/features/local-recording/recording/Utils.js
  26. 290
    0
      react/features/local-recording/recording/WavAdapter.js
  27. 262
    0
      react/features/local-recording/recording/flac/FlacAdapter.js
  28. 397
    0
      react/features/local-recording/recording/flac/flacEncodeWorker.js
  29. 1
    0
      react/features/local-recording/recording/flac/index.js
  30. 44
    0
      react/features/local-recording/recording/flac/messageTypes.js
  31. 5
    0
      react/features/local-recording/recording/index.js
  32. 35
    0
      react/features/local-recording/reducer.js
  33. 439
    0
      react/features/local-recording/session/SessionManager.js
  34. 1
    0
      react/features/local-recording/session/index.js
  35. 35
    1
      react/features/toolbox/components/web/Toolbox.js
  36. 5
    1
      webpack.config.js

+ 11
- 2
Makefile 查看文件

@@ -2,6 +2,7 @@ BUILD_DIR = build
2 2
 CLEANCSS = ./node_modules/.bin/cleancss
3 3
 DEPLOY_DIR = libs
4 4
 LIBJITSIMEET_DIR = node_modules/lib-jitsi-meet/
5
+LIBFLAC_DIR = node_modules/libflacjs/dist/min/
5 6
 NODE_SASS = ./node_modules/.bin/node-sass
6 7
 NPM = npm
7 8
 OUTPUT_DIR = .
@@ -19,7 +20,7 @@ compile:
19 20
 clean:
20 21
 	rm -fr $(BUILD_DIR)
21 22
 
22
-deploy: deploy-init deploy-appbundle deploy-lib-jitsi-meet deploy-css deploy-local
23
+deploy: deploy-init deploy-appbundle deploy-lib-jitsi-meet deploy-libflac deploy-css deploy-local
23 24
 
24 25
 deploy-init:
25 26
 	rm -fr $(DEPLOY_DIR)
@@ -33,6 +34,8 @@ deploy-appbundle:
33 34
 		$(BUILD_DIR)/do_external_connect.min.map \
34 35
 		$(BUILD_DIR)/external_api.min.js \
35 36
 		$(BUILD_DIR)/external_api.min.map \
37
+		$(BUILD_DIR)/flacEncodeWorker.min.js \
38
+		$(BUILD_DIR)/flacEncodeWorker.min.map \
36 39
 		$(BUILD_DIR)/device_selection_popup_bundle.min.js \
37 40
 		$(BUILD_DIR)/device_selection_popup_bundle.min.map \
38 41
 		$(BUILD_DIR)/dial_in_info_bundle.min.js \
@@ -50,6 +53,12 @@ deploy-lib-jitsi-meet:
50 53
 		$(LIBJITSIMEET_DIR)/modules/browser/capabilities.json \
51 54
 		$(DEPLOY_DIR)
52 55
 
56
+deploy-libflac:
57
+	cp \
58
+		$(LIBFLAC_DIR)/libflac4-1.3.2.min.js \
59
+		$(LIBFLAC_DIR)/libflac4-1.3.2.min.js.mem \
60
+		$(DEPLOY_DIR)
61
+
53 62
 deploy-css:
54 63
 	$(NODE_SASS) $(STYLES_MAIN) $(STYLES_BUNDLE) && \
55 64
 	$(CLEANCSS) $(STYLES_BUNDLE) > $(STYLES_DESTINATION) ; \
@@ -58,7 +67,7 @@ deploy-css:
58 67
 deploy-local:
59 68
 	([ ! -x deploy-local.sh ] || ./deploy-local.sh)
60 69
 
61
-dev: deploy-init deploy-css deploy-lib-jitsi-meet
70
+dev: deploy-init deploy-css deploy-lib-jitsi-meet deploy-libflac
62 71
 	$(WEBPACK_DEV_SERVER)
63 72
 
64 73
 source-package:

+ 19
- 0
config.js 查看文件

@@ -347,6 +347,24 @@ var config = {
347 347
         // userRegion: "asia"
348 348
     }
349 349
 
350
+    // Local Recording
351
+    //
352
+
353
+    // localRecording: {
354
+    // Enables local recording.
355
+    // Additionally, 'localrecording' (all lowercase) needs to be added to
356
+    // TOOLBAR_BUTTONS in interface_config.js for the Local Recording
357
+    // button to show up on the toolbar.
358
+    //
359
+    //     enabled: true,
360
+    //
361
+
362
+    // The recording format, can be one of 'ogg', 'flac' or 'wav'.
363
+    //     format: 'flac'
364
+    //
365
+
366
+    // }
367
+
350 368
     // Options related to end-to-end (participant to participant) ping.
351 369
     // e2eping: {
352 370
     //   // The interval in milliseconds at which pings will be sent.
@@ -408,6 +426,7 @@ var config = {
408 426
      nick
409 427
      startBitrate
410 428
      */
429
+
411 430
 };
412 431
 
413 432
 /* eslint-enable no-unused-vars, no-var */

+ 1
- 0
css/main.scss 查看文件

@@ -45,6 +45,7 @@
45 45
 @import 'modals/settings/settings';
46 46
 @import 'modals/speaker_stats/speaker_stats';
47 47
 @import 'modals/video-quality/video-quality';
48
+@import 'modals/local-recording/local-recording';
48 49
 @import 'videolayout_default';
49 50
 @import 'notice';
50 51
 @import 'popup_menu';

+ 92
- 0
css/modals/local-recording/_local-recording.scss 查看文件

@@ -0,0 +1,92 @@
1
+.localrec-participant-stats {
2
+    list-style: none;
3
+    padding: 0;
4
+    width: 100%;
5
+    font-weight: 500;
6
+
7
+    .localrec-participant-stats-item__status-dot {
8
+        position: relative;
9
+        display: block;
10
+        width: 9px;
11
+        height: 9px;
12
+        border-radius: 50%;
13
+        margin: 0 auto;
14
+
15
+        &.status-on {
16
+            background: green;
17
+        }
18
+
19
+        &.status-off {
20
+            background: gray;
21
+        }
22
+
23
+        &.status-unknown {
24
+            background: darkgoldenrod;
25
+        }
26
+
27
+        &.status-error {
28
+            background: darkred;
29
+        }
30
+    }
31
+
32
+    .localrec-participant-stats-item__status,
33
+    .localrec-participant-stats-item__name,
34
+    .localrec-participant-stats-item__sessionid {
35
+        display: inline-block;
36
+        margin: 5px 0;
37
+        vertical-align: middle;
38
+    }
39
+    .localrec-participant-stats-item__status {
40
+        width: 5%;
41
+    }
42
+    .localrec-participant-stats-item__name {
43
+        width: 40%;
44
+    }
45
+    .localrec-participant-stats-item__sessionid {
46
+        width: 55%;
47
+    }
48
+
49
+    .localrec-participant-stats-item__name,
50
+    .localrec-participant-stats-item__sessionid {
51
+        overflow: hidden;
52
+        text-overflow: ellipsis;
53
+        white-space: nowrap;
54
+    }
55
+}
56
+
57
+.localrec-control-info-label {
58
+    font-weight: bold;
59
+}
60
+
61
+.localrec-control-info-label:after {
62
+    content: ' ';
63
+}
64
+
65
+.localrec-control-action-link {
66
+    display: inline-block;
67
+    line-height: 1.5em;
68
+
69
+    a {
70
+        cursor: pointer;
71
+        vertical-align: middle;
72
+    }
73
+}
74
+
75
+.localrec-control-action-link:before {
76
+    color: $linkFontColor;
77
+    content: '\2022';
78
+    font-size: 1.5em;
79
+    padding: 0 10px;
80
+    vertical-align: middle;
81
+}
82
+
83
+.localrec-control-action-link:first-child:before {
84
+    content: '';
85
+    padding: 0;
86
+}
87
+
88
+.localrec-control-action-links {
89
+    font-weight: bold;
90
+    margin-top: 10px;
91
+    white-space: nowrap;
92
+}

+ 4
- 0
css/modals/video-quality/_video-quality.scss 查看文件

@@ -168,6 +168,10 @@
168 168
         background: #FF5630;
169 169
     }
170 170
 
171
+    .circular-label.local-rec {
172
+        background: #FF5630;
173
+    }
174
+
171 175
     .circular-label.stream {
172 176
         background: #0065FF;
173 177
     }

+ 32
- 1
lang/main.json 查看文件

@@ -43,7 +43,8 @@
43 43
         "mute": "Mute or unmute your microphone",
44 44
         "fullScreen": "View or exit full screen",
45 45
         "videoMute": "Start or stop your camera",
46
-        "showSpeakerStats": "Show speaker stats"
46
+        "showSpeakerStats": "Show speaker stats",
47
+        "localRecording": "Show or hide local recording controls"
47 48
     },
48 49
     "welcomepage":{
49 50
         "accessibilityLabel": {
@@ -87,6 +88,7 @@
87 88
             "fullScreen": "Toggle full screen",
88 89
             "hangup": "Leave the call",
89 90
             "invite": "Invite people",
91
+            "localRecording": "Toggle local recording controls",
90 92
             "lockRoom": "Toggle room lock",
91 93
             "moreActions": "Toggle more actions menu",
92 94
             "moreActionsMenu": "More actions menu",
@@ -668,5 +670,34 @@
668 670
         "decline": "Dismiss",
669 671
         "productLabel": "from Jitsi Meet",
670 672
         "videoCallTitle": "Incoming video call"
673
+    },
674
+    "localRecording": {
675
+        "localRecording": "Local Recording",
676
+        "dialogTitle": "Local Recording Controls",
677
+        "start": "Start Recording",
678
+        "stop": "Stop Recording",
679
+        "moderator": "Moderator",
680
+        "me": "Me",
681
+        "duration": "Duration",
682
+        "durationNA": "N/A",
683
+        "encoding": "Encoding",
684
+        "participantStats": "Participant Stats",
685
+        "participant": "Participant",
686
+        "sessionToken": "Session Token",
687
+        "clientState": {
688
+            "on": "On",
689
+            "off": "Off",
690
+            "unknown": "Unknown"
691
+        },
692
+        "messages": {
693
+            "engaged": "Local recording engaged.",
694
+            "finished": "Recording session __token__ finished. Please send the recorded file to the moderator.",
695
+            "finishedModerator": "Recording session __token__ finished. The recording of the local track has been saved. Please ask the other participants to submit their recordings.",
696
+            "notModerator": "You are not the moderator. You cannot start or stop local recording."
697
+        },
698
+        "yes": "Yes",
699
+        "no": "No",
700
+        "label": "LOR",
701
+        "labelToolTip": "Local recording is engaged"
671 702
     }
672 703
 }

+ 4
- 0
package-lock.json 查看文件

@@ -9736,6 +9736,10 @@
9736 9736
         "yaeti": "1.0.1"
9737 9737
       }
9738 9738
     },
9739
+    "libflacjs": {
9740
+      "version": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
9741
+      "from": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d"
9742
+    },
9739 9743
     "load-json-file": {
9740 9744
       "version": "2.0.0",
9741 9745
       "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz",

+ 1
- 0
package.json 查看文件

@@ -48,6 +48,7 @@
48 48
     "jsc-android": "224109.1.0",
49 49
     "jwt-decode": "2.2.0",
50 50
     "lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#4a28a196160411d657518022de8bded7c02ad679",
51
+    "libflacjs": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
51 52
     "lodash": "4.17.4",
52 53
     "moment": "2.19.4",
53 54
     "moment-duration-format": "2.2.2",

+ 13
- 0
react/features/large-video/components/AbstractLabels.js 查看文件

@@ -3,6 +3,7 @@
3 3
 import React, { Component } from 'react';
4 4
 
5 5
 import { isFilmstripVisible } from '../../filmstrip';
6
+import { LocalRecordingLabel } from '../../local-recording';
6 7
 import { RecordingLabel } from '../../recording';
7 8
 import { shouldDisplayTileView } from '../../video-layout';
8 9
 import { VideoQualityLabel } from '../../video-quality';
@@ -69,6 +70,18 @@ export default class AbstractLabels<P: Props, S> extends Component<P, S> {
69 70
             <TranscribingLabel />
70 71
         );
71 72
     }
73
+
74
+    /**
75
+     * Renders the {@code LocalRecordingLabel}.
76
+     *
77
+     * @returns {React$Element}
78
+     * @protected
79
+     */
80
+    _renderLocalRecordingLabel() {
81
+        return (
82
+            <LocalRecordingLabel />
83
+        );
84
+    }
72 85
 }
73 86
 
74 87
 /**

+ 5
- 0
react/features/large-video/components/Labels.web.js 查看文件

@@ -85,6 +85,9 @@ class Labels extends AbstractLabels<Props, State> {
85 85
                     this._renderRecordingLabel(
86 86
                         JitsiRecordingConstants.mode.STREAM)
87 87
                 }
88
+                {
89
+                    this._renderLocalRecordingLabel()
90
+                }
88 91
                 {
89 92
                     this._renderTranscribingLabel()
90 93
                 }
@@ -101,6 +104,8 @@ class Labels extends AbstractLabels<Props, State> {
101 104
     _renderVideoQualityLabel: () => React$Element<*>
102 105
 
103 106
     _renderTranscribingLabel: () => React$Element<*>
107
+
108
+    _renderLocalRecordingLabel: () => React$Element<*>
104 109
 }
105 110
 
106 111
 export default connect(_mapStateToProps)(Labels);

+ 32
- 0
react/features/local-recording/actionTypes.js 查看文件

@@ -0,0 +1,32 @@
1
+/**
2
+ * Action to signal that the local client has started to perform recording,
3
+ * (as in: {@code RecordingAdapter} is actively collecting audio data).
4
+ *
5
+ * {
6
+ *     type: LOCAL_RECORDING_ENGAGED,
7
+ *     recordingEngagedAt: Date
8
+ * }
9
+ */
10
+export const LOCAL_RECORDING_ENGAGED = Symbol('LOCAL_RECORDING_ENGAGED');
11
+
12
+/**
13
+ * Action to signal that the local client has stopped recording,
14
+ * (as in: {@code RecordingAdapter} is no longer collecting audio data).
15
+ *
16
+ * {
17
+ *     type: LOCAL_RECORDING_UNENGAGED
18
+ * }
19
+ */
20
+export const LOCAL_RECORDING_UNENGAGED = Symbol('LOCAL_RECORDING_UNENGAGED');
21
+
22
+/**
23
+ * Action to update {@code LocalRecordingInfoDialog} with stats from all
24
+ * clients.
25
+ *
26
+ * {
27
+ *     type: LOCAL_RECORDING_STATS_UPDATE,
28
+ *     stats: Object
29
+ * }
30
+ */
31
+export const LOCAL_RECORDING_STATS_UPDATE
32
+    = Symbol('LOCAL_RECORDING_STATS_UPDATE');

+ 59
- 0
react/features/local-recording/actions.js 查看文件

@@ -0,0 +1,59 @@
1
+/* @flow */
2
+
3
+import {
4
+    LOCAL_RECORDING_ENGAGED,
5
+    LOCAL_RECORDING_UNENGAGED,
6
+    LOCAL_RECORDING_STATS_UPDATE
7
+} from './actionTypes';
8
+
9
+// The following two actions signal state changes in local recording engagement.
10
+// In other words, the events of the local WebWorker / MediaRecorder starting to
11
+// record and finishing recording.
12
+// Note that this is not the event fired when the users tries to start the
13
+// recording in the UI.
14
+
15
+/**
16
+ * Signals that local recording has been engaged.
17
+ *
18
+ * @param {Date} startTime - Time when the recording is engaged.
19
+ * @returns {{
20
+ *     type: LOCAL_RECORDING_ENGAGED,
21
+ *     recordingEngagedAt: Date
22
+ * }}
23
+ */
24
+export function localRecordingEngaged(startTime: Date) {
25
+    return {
26
+        type: LOCAL_RECORDING_ENGAGED,
27
+        recordingEngagedAt: startTime
28
+    };
29
+}
30
+
31
+/**
32
+ * Signals that local recording has finished.
33
+ *
34
+ * @returns {{
35
+ *     type: LOCAL_RECORDING_UNENGAGED
36
+ * }}
37
+ */
38
+export function localRecordingUnengaged() {
39
+    return {
40
+        type: LOCAL_RECORDING_UNENGAGED
41
+    };
42
+}
43
+
44
+/**
45
+ * Updates the the local recording stats from each client,
46
+ * to be displayed on {@code LocalRecordingInfoDialog}.
47
+ *
48
+ * @param {*} stats - The stats object.
49
+ * @returns {{
50
+ *     type: LOCAL_RECORDING_STATS_UPDATE,
51
+ *     stats: Object
52
+ * }}
53
+ */
54
+export function statsUpdate(stats: Object) {
55
+    return {
56
+        type: LOCAL_RECORDING_STATS_UPDATE,
57
+        stats
58
+    };
59
+}

+ 86
- 0
react/features/local-recording/components/LocalRecordingButton.js 查看文件

@@ -0,0 +1,86 @@
1
+/* @flow */
2
+
3
+import React, { Component } from 'react';
4
+
5
+import { translate } from '../../base/i18n';
6
+import { ToolbarButton } from '../../toolbox';
7
+
8
+/**
9
+ * The type of the React {@code Component} state of
10
+ * {@link LocalRecordingButton}.
11
+ */
12
+type Props = {
13
+
14
+    /**
15
+     * Whether or not {@link LocalRecordingInfoDialog} should be displayed.
16
+     */
17
+    isDialogShown: boolean,
18
+
19
+    /**
20
+     * Callback function called when {@link LocalRecordingButton} is clicked.
21
+     */
22
+    onClick: Function,
23
+
24
+    /**
25
+     * Invoked to obtain translated strings.
26
+     */
27
+    t: Function
28
+}
29
+
30
+/**
31
+ * A React {@code Component} for opening or closing the
32
+ * {@code LocalRecordingInfoDialog}.
33
+ *
34
+ * @extends Component
35
+ */
36
+class LocalRecordingButton extends Component<Props> {
37
+
38
+    /**
39
+     * Initializes a new {@code LocalRecordingButton} instance.
40
+     *
41
+     * @param {Object} props - The read-only properties with which the new
42
+     * instance is to be initialized.
43
+     */
44
+    constructor(props: Props) {
45
+        super(props);
46
+
47
+        // Bind event handlers so they are only bound once per instance.
48
+        this._onClick = this._onClick.bind(this);
49
+    }
50
+
51
+    /**
52
+     * Implements React's {@link Component#render()}.
53
+     *
54
+     * @inheritdoc
55
+     * @returns {ReactElement}
56
+     */
57
+    render() {
58
+        const { isDialogShown, t } = this.props;
59
+        const iconClasses
60
+            = `icon-thumb-menu ${isDialogShown
61
+                ? 'icon-rec toggled' : 'icon-rec'}`;
62
+
63
+        return (
64
+            <ToolbarButton
65
+                accessibilityLabel
66
+                    = { t('toolbar.accessibilityLabel.localRecording') }
67
+                iconName = { iconClasses }
68
+                onClick = { this._onClick }
69
+                tooltip = { t('localRecording.dialogTitle') } />
70
+        );
71
+    }
72
+
73
+    _onClick: () => void;
74
+
75
+    /**
76
+     * Callback invoked when the Toolbar button is clicked.
77
+     *
78
+     * @private
79
+     * @returns {void}
80
+     */
81
+    _onClick() {
82
+        this.props.onClick();
83
+    }
84
+}
85
+
86
+export default translate(LocalRecordingButton);

+ 403
- 0
react/features/local-recording/components/LocalRecordingInfoDialog.js 查看文件

@@ -0,0 +1,403 @@
1
+/* @flow */
2
+
3
+import moment from 'moment';
4
+import React, { Component } from 'react';
5
+import { connect } from 'react-redux';
6
+
7
+import { Dialog } from '../../base/dialog';
8
+import { translate } from '../../base/i18n';
9
+import {
10
+    PARTICIPANT_ROLE,
11
+    getLocalParticipant
12
+} from '../../base/participants';
13
+
14
+import { statsUpdate } from '../actions';
15
+import { recordingController } from '../controller';
16
+
17
+
18
+/**
19
+ * The type of the React {@code Component} props of
20
+ * {@link LocalRecordingInfoDialog}.
21
+ */
22
+type Props = {
23
+
24
+    /**
25
+     * Redux store dispatch function.
26
+     */
27
+    dispatch: Dispatch<*>,
28
+
29
+    /**
30
+     * Current encoding format.
31
+     */
32
+    encodingFormat: string,
33
+
34
+    /**
35
+     * Whether the local user is the moderator.
36
+     */
37
+    isModerator: boolean,
38
+
39
+    /**
40
+     * Whether local recording is engaged.
41
+     */
42
+    isEngaged: boolean,
43
+
44
+    /**
45
+     * The start time of the current local recording session.
46
+     * Used to calculate the duration of recording.
47
+     */
48
+    recordingEngagedAt: Date,
49
+
50
+    /**
51
+     * Stats of all the participant.
52
+     */
53
+    stats: Object,
54
+
55
+    /**
56
+     * Invoked to obtain translated strings.
57
+     */
58
+    t: Function
59
+}
60
+
61
+/**
62
+ * The type of the React {@code Component} state of
63
+ * {@link LocalRecordingInfoDialog}.
64
+ */
65
+type State = {
66
+
67
+    /**
68
+     * The recording duration string to be displayed on the UI.
69
+     */
70
+    durationString: string
71
+}
72
+
73
+/**
74
+ * A React Component with the contents for a dialog that shows information about
75
+ * local recording. For users with moderator rights, this is also the "control
76
+ * panel" for starting/stopping local recording on all clients.
77
+ *
78
+ * @extends Component
79
+ */
80
+class LocalRecordingInfoDialog extends Component<Props, State> {
81
+
82
+    /**
83
+     * Saves a handle to the timer for UI updates,
84
+     * so that it can be cancelled when the component unmounts.
85
+     */
86
+    _timer: ?IntervalID;
87
+
88
+    /**
89
+     * Initializes a new {@code LocalRecordingInfoDialog} instance.
90
+     *
91
+     * @param {Props} props - The React {@code Component} props to initialize
92
+     * the new {@code LocalRecordingInfoDialog} instance with.
93
+     */
94
+    constructor(props: Props) {
95
+        super(props);
96
+        this.state = {
97
+            durationString: ''
98
+        };
99
+    }
100
+
101
+    /**
102
+     * Implements React's {@link Component#componentDidMount()}.
103
+     *
104
+     * @returns {void}
105
+     */
106
+    componentDidMount() {
107
+        this._timer = setInterval(
108
+            () => {
109
+                this.setState((_prevState, props) => {
110
+                    const nowTime = new Date();
111
+
112
+                    return {
113
+                        durationString: this._getDuration(nowTime,
114
+                            props.recordingEngagedAt)
115
+                    };
116
+                });
117
+                try {
118
+                    this.props.dispatch(
119
+                        statsUpdate(recordingController
120
+                            .getParticipantsStats()));
121
+                } catch (e) {
122
+                    // do nothing
123
+                }
124
+            },
125
+            1000
126
+        );
127
+    }
128
+
129
+    /**
130
+     * Implements React's {@link Component#componentWillUnmount()}.
131
+     *
132
+     * @returns {void}
133
+     */
134
+    componentWillUnmount() {
135
+        if (this._timer) {
136
+            clearInterval(this._timer);
137
+            this._timer = null;
138
+        }
139
+    }
140
+
141
+    /**
142
+     * Implements React's {@link Component#render()}.
143
+     *
144
+     * @inheritdoc
145
+     * @returns {ReactElement}
146
+     */
147
+    render() {
148
+        const { isModerator, t } = this.props;
149
+
150
+        return (
151
+            <Dialog
152
+                cancelTitleKey = { 'dialog.close' }
153
+                submitDisabled = { true }
154
+                titleKey = 'localRecording.dialogTitle'>
155
+                <div className = 'localrec-control'>
156
+                    <span className = 'localrec-control-info-label'>
157
+                        {`${t('localRecording.moderator')}:`}
158
+                    </span>
159
+                    <span className = 'info-value'>
160
+                        { isModerator
161
+                            ? t('localRecording.yes')
162
+                            : t('localRecording.no') }
163
+                    </span>
164
+                </div>
165
+                { this._renderModeratorControls() }
166
+                { this._renderDurationAndFormat() }
167
+            </Dialog>
168
+        );
169
+    }
170
+
171
+    /**
172
+     * Renders the recording duration and encoding format. Only shown if local
173
+     * recording is engaged.
174
+     *
175
+     * @private
176
+     * @returns {ReactElement|null}
177
+     */
178
+    _renderDurationAndFormat() {
179
+        const { encodingFormat, isEngaged, t } = this.props;
180
+        const { durationString } = this.state;
181
+
182
+        if (!isEngaged) {
183
+            return null;
184
+        }
185
+
186
+        return (
187
+            <div>
188
+                <div>
189
+                    <span className = 'localrec-control-info-label'>
190
+                        {`${t('localRecording.duration')}:`}
191
+                    </span>
192
+                    <span className = 'info-value'>
193
+                        { durationString === ''
194
+                            ? t('localRecording.durationNA')
195
+                            : durationString }
196
+                    </span>
197
+                </div>
198
+                <div>
199
+                    <span className = 'localrec-control-info-label'>
200
+                        {`${t('localRecording.encoding')}:`}
201
+                    </span>
202
+                    <span className = 'info-value'>
203
+                        { encodingFormat }
204
+                    </span>
205
+                </div>
206
+            </div>
207
+        );
208
+    }
209
+
210
+    /**
211
+     * Returns React elements for displaying the local recording stats of
212
+     * each participant.
213
+     *
214
+     * @private
215
+     * @returns {ReactElement|null}
216
+     */
217
+    _renderStats() {
218
+        const { stats } = this.props;
219
+
220
+        if (stats === undefined) {
221
+            return null;
222
+        }
223
+        const ids = Object.keys(stats);
224
+
225
+        return (
226
+            <div className = 'localrec-participant-stats' >
227
+                { this._renderStatsHeader() }
228
+                { ids.map((id, i) => this._renderStatsLine(i, id)) }
229
+            </div>
230
+        );
231
+    }
232
+
233
+    /**
234
+     * Renders the stats for one participant.
235
+     *
236
+     * @private
237
+     * @param {*} lineKey - The key required by React for elements in lists.
238
+     * @param {*} id - The ID of the participant.
239
+     * @returns {ReactElement}
240
+     */
241
+    _renderStatsLine(lineKey, id) {
242
+        const { stats } = this.props;
243
+        let statusClass = 'localrec-participant-stats-item__status-dot ';
244
+
245
+        statusClass += stats[id].recordingStats
246
+            ? stats[id].recordingStats.isRecording
247
+                ? 'status-on'
248
+                : 'status-off'
249
+            : 'status-unknown';
250
+
251
+        return (
252
+            <div
253
+                className = 'localrec-participant-stats-item'
254
+                key = { lineKey } >
255
+                <div className = 'localrec-participant-stats-item__status'>
256
+                    <span className = { statusClass } />
257
+                </div>
258
+                <div className = 'localrec-participant-stats-item__name'>
259
+                    { stats[id].displayName || id }
260
+                </div>
261
+                <div className = 'localrec-participant-stats-item__sessionid'>
262
+                    { stats[id].recordingStats.currentSessionToken }
263
+                </div>
264
+            </div>
265
+        );
266
+    }
267
+
268
+    /**
269
+     * Renders the participant stats header line.
270
+     *
271
+     * @private
272
+     * @returns {ReactElement}
273
+     */
274
+    _renderStatsHeader() {
275
+        const { t } = this.props;
276
+
277
+        return (
278
+            <div className = 'localrec-participant-stats-item'>
279
+                <div className = 'localrec-participant-stats-item__status' />
280
+                <div className = 'localrec-participant-stats-item__name'>
281
+                    { t('localRecording.participant') }
282
+                </div>
283
+                <div className = 'localrec-participant-stats-item__sessionid'>
284
+                    { t('localRecording.sessionToken') }
285
+                </div>
286
+            </div>
287
+        );
288
+    }
289
+
290
+    /**
291
+     * Renders the moderator-only controls, i.e. stats of all users and the
292
+     * action links.
293
+     *
294
+     * @private
295
+     * @returns {ReactElement|null}
296
+     */
297
+    _renderModeratorControls() {
298
+        const { isModerator, isEngaged, t } = this.props;
299
+
300
+        if (!isModerator) {
301
+            return null;
302
+        }
303
+
304
+        return (
305
+            <div>
306
+                <div className = 'localrec-control-action-links'>
307
+                    <div className = 'localrec-control-action-link'>
308
+                        { isEngaged ? <a
309
+                            onClick = { this._onStop }>
310
+                            { t('localRecording.stop') }
311
+                        </a>
312
+                            : <a
313
+                                onClick = { this._onStart }>
314
+                                { t('localRecording.start') }
315
+                            </a>
316
+                        }
317
+                    </div>
318
+                </div>
319
+                <div>
320
+                    <span className = 'localrec-control-info-label'>
321
+                        {`${t('localRecording.participantStats')}:`}
322
+                    </span>
323
+                </div>
324
+                { this._renderStats() }
325
+            </div>
326
+        );
327
+    }
328
+
329
+    /**
330
+     * Creates a duration string "HH:MM:SS" from two Date objects.
331
+     *
332
+     * @param {Date} now - Current time.
333
+     * @param {Date} prev - Previous time, the time to be subtracted.
334
+     * @returns {string}
335
+     */
336
+    _getDuration(now, prev) {
337
+        if (prev === null || prev === undefined) {
338
+            return '';
339
+        }
340
+
341
+        // Still a hack, as moment.js does not support formatting of duration
342
+        // (i.e. TimeDelta). Only works if total duration < 24 hours.
343
+        // But who is going to have a 24-hour long conference?
344
+        return moment(now - prev).utc()
345
+            .format('HH:mm:ss');
346
+    }
347
+
348
+    /**
349
+     * Callback function for the Start UI action.
350
+     *
351
+     * @private
352
+     * @returns {void}
353
+     */
354
+    _onStart() {
355
+        recordingController.startRecording();
356
+    }
357
+
358
+    /**
359
+     * Callback function for the Stop UI action.
360
+     *
361
+     * @private
362
+     * @returns {void}
363
+     */
364
+    _onStop() {
365
+        recordingController.stopRecording();
366
+    }
367
+
368
+}
369
+
370
+/**
371
+ * Maps (parts of) the Redux state to the associated props for the
372
+ * {@code LocalRecordingInfoDialog} component.
373
+ *
374
+ * @param {Object} state - The Redux state.
375
+ * @private
376
+ * @returns {{
377
+ *     encodingFormat: string,
378
+ *     isModerator: boolean,
379
+ *     isEngaged: boolean,
380
+ *     recordingEngagedAt: Date,
381
+ *     stats: Object
382
+ * }}
383
+ */
384
+function _mapStateToProps(state) {
385
+    const {
386
+        encodingFormat,
387
+        isEngaged,
388
+        recordingEngagedAt,
389
+        stats
390
+    } = state['features/local-recording'];
391
+    const isModerator
392
+        = getLocalParticipant(state).role === PARTICIPANT_ROLE.MODERATOR;
393
+
394
+    return {
395
+        encodingFormat,
396
+        isModerator,
397
+        isEngaged,
398
+        recordingEngagedAt,
399
+        stats
400
+    };
401
+}
402
+
403
+export default translate(connect(_mapStateToProps)(LocalRecordingInfoDialog));

+ 0
- 0
react/features/local-recording/components/LocalRecordingLabel.native.js 查看文件


+ 75
- 0
react/features/local-recording/components/LocalRecordingLabel.web.js 查看文件

@@ -0,0 +1,75 @@
1
+// @flow
2
+
3
+import Tooltip from '@atlaskit/tooltip';
4
+import React, { Component } from 'react';
5
+import { connect } from 'react-redux';
6
+
7
+import { translate } from '../../base/i18n/index';
8
+import { CircularLabel } from '../../base/label/index';
9
+
10
+
11
+/**
12
+ * The type of the React {@code Component} props of {@link LocalRecordingLabel}.
13
+ */
14
+type Props = {
15
+
16
+    /**
17
+     * Invoked to obtain translated strings.
18
+     */
19
+    t: Function,
20
+
21
+    /**
22
+     * Whether local recording is engaged or not.
23
+     */
24
+    isEngaged: boolean
25
+};
26
+
27
+/**
28
+ * React Component for displaying a label when local recording is engaged.
29
+ *
30
+ * @extends Component
31
+ */
32
+class LocalRecordingLabel extends Component<Props> {
33
+
34
+    /**
35
+     * Implements React's {@link Component#render()}.
36
+     *
37
+     * @inheritdoc
38
+     * @returns {ReactElement}
39
+     */
40
+    render() {
41
+        if (!this.props.isEngaged) {
42
+            return null;
43
+        }
44
+
45
+        return (
46
+            <Tooltip
47
+                content = { this.props.t('localRecording.labelToolTip') }
48
+                position = { 'left' }>
49
+                <CircularLabel
50
+                    className = 'local-rec'
51
+                    label = { this.props.t('localRecording.label') } />
52
+            </Tooltip>
53
+        );
54
+    }
55
+
56
+}
57
+
58
+/**
59
+ * Maps (parts of) the Redux state to the associated props for the
60
+ * {@code LocalRecordingLabel} component.
61
+ *
62
+ * @param {Object} state - The Redux state.
63
+ * @private
64
+ * @returns {{
65
+ * }}
66
+ */
67
+function _mapStateToProps(state) {
68
+    const { isEngaged } = state['features/local-recording'];
69
+
70
+    return {
71
+        isEngaged
72
+    };
73
+}
74
+
75
+export default translate(connect(_mapStateToProps)(LocalRecordingLabel));

+ 5
- 0
react/features/local-recording/components/index.js 查看文件

@@ -0,0 +1,5 @@
1
+export { default as LocalRecordingButton } from './LocalRecordingButton';
2
+export { default as LocalRecordingLabel } from './LocalRecordingLabel';
3
+export {
4
+    default as LocalRecordingInfoDialog
5
+} from './LocalRecordingInfoDialog';

+ 687
- 0
react/features/local-recording/controller/RecordingController.js 查看文件

@@ -0,0 +1,687 @@
1
+/* @flow */
2
+
3
+import { i18next } from '../../base/i18n';
4
+
5
+import {
6
+    FlacAdapter,
7
+    OggAdapter,
8
+    WavAdapter,
9
+    downloadBlob
10
+} from '../recording';
11
+import { sessionManager } from '../session';
12
+
13
+const logger = require('jitsi-meet-logger').getLogger(__filename);
14
+
15
+/**
16
+ * XMPP command for signaling the start of local recording to all clients.
17
+ * Should be sent by the moderator only.
18
+ */
19
+const COMMAND_START = 'localRecStart';
20
+
21
+/**
22
+ * XMPP command for signaling the stop of local recording to all clients.
23
+ * Should be sent by the moderator only.
24
+ */
25
+const COMMAND_STOP = 'localRecStop';
26
+
27
+/**
28
+ * One-time command used to trigger the moderator to resend the commands.
29
+ * This is a workaround for newly-joined clients to receive remote presence.
30
+ */
31
+const COMMAND_PING = 'localRecPing';
32
+
33
+/**
34
+ * One-time command sent upon receiving a {@code COMMAND_PING}.
35
+ * Only the moderator sends this command.
36
+ * This command does not carry any information itself, but rather forces the
37
+ * XMPP server to resend the remote presence.
38
+ */
39
+const COMMAND_PONG = 'localRecPong';
40
+
41
+/**
42
+ * Participant property key for local recording stats.
43
+ */
44
+const PROPERTY_STATS = 'localRecStats';
45
+
46
+/**
47
+ * Supported recording formats.
48
+ */
49
+const RECORDING_FORMATS = new Set([ 'flac', 'wav', 'ogg' ]);
50
+
51
+/**
52
+ * Default recording format.
53
+ */
54
+const DEFAULT_RECORDING_FORMAT = 'flac';
55
+
56
+/**
57
+ * States of the {@code RecordingController}.
58
+ */
59
+const ControllerState = Object.freeze({
60
+    /**
61
+     * Idle (not recording).
62
+     */
63
+    IDLE: Symbol('IDLE'),
64
+
65
+    /**
66
+     * Starting.
67
+     */
68
+    STARTING: Symbol('STARTING'),
69
+
70
+    /**
71
+     * Engaged (recording).
72
+     */
73
+    RECORDING: Symbol('RECORDING'),
74
+
75
+    /**
76
+     * Stopping.
77
+     */
78
+    STOPPING: Symbol('STOPPING'),
79
+
80
+    /**
81
+     * Failed, due to error during starting / stopping process.
82
+     */
83
+    FAILED: Symbol('FAILED')
84
+});
85
+
86
+/**
87
+ * Type of the stats reported by each participant (client).
88
+ */
89
+type RecordingStats = {
90
+
91
+    /**
92
+     * Current local recording session token used by the participant.
93
+     */
94
+    currentSessionToken: number,
95
+
96
+    /**
97
+     * Whether local recording is engaged on the participant's device.
98
+     */
99
+    isRecording: boolean,
100
+
101
+    /**
102
+     * Total recorded bytes. (Reserved for future use.)
103
+     */
104
+    recordedBytes: number,
105
+
106
+    /**
107
+     * Total recording duration. (Reserved for future use.)
108
+     */
109
+    recordedLength: number
110
+}
111
+
112
+/**
113
+ * The component responsible for the coordination of local recording, across
114
+ * multiple participants.
115
+ * Current implementation requires that there is only one moderator in a room.
116
+ */
117
+class RecordingController {
118
+
119
+    /**
120
+     * For each recording session, there is a separate @{code RecordingAdapter}
121
+     * instance so that encoded bits from the previous sessions can still be
122
+     * retrieved after they ended.
123
+     *
124
+     * @private
125
+     */
126
+    _adapters = {};
127
+
128
+    /**
129
+     * The {@code JitsiConference} instance.
130
+     *
131
+     * @private
132
+     */
133
+    _conference: * = null;
134
+
135
+    /**
136
+     * Current recording session token.
137
+     * Session token is a number generated by the moderator, to ensure every
138
+     * client is in the same recording state.
139
+     *
140
+     * @private
141
+     */
142
+    _currentSessionToken: number = -1;
143
+
144
+    /**
145
+     * Current state of {@code RecordingController}.
146
+     *
147
+     * @private
148
+     */
149
+    _state = ControllerState.IDLE;
150
+
151
+    /**
152
+     * Whether or not the audio is muted in the UI. This is stored as internal
153
+     * state of {@code RecordingController} because we might have recording
154
+     * sessions that start muted.
155
+     */
156
+    _isMuted = false;
157
+
158
+    /**
159
+     * The ID of the active microphone.
160
+     *
161
+     * @private
162
+     */
163
+    _micDeviceId = 'default';
164
+
165
+    /**
166
+     * Current recording format. This will be in effect from the next
167
+     * recording session, i.e., if this value is changed during an on-going
168
+     * recording session, that on-going session will not use the new format.
169
+     *
170
+     * @private
171
+     */
172
+    _format = DEFAULT_RECORDING_FORMAT;
173
+
174
+    /**
175
+     * Whether or not the {@code RecordingController} has registered for
176
+     * XMPP events. Prevents initialization from happening multiple times.
177
+     *
178
+     * @private
179
+     */
180
+    _registered = false;
181
+
182
+    /**
183
+     * FIXME: callback function for the {@code RecordingController} to notify
184
+     * UI it wants to display a notice. Keeps {@code RecordingController}
185
+     * decoupled from UI.
186
+     */
187
+    _onNotify: ?(messageKey: string, messageParams?: Object) => void;
188
+
189
+    /**
190
+     * FIXME: callback function for the {@code RecordingController} to notify
191
+     * UI it wants to display a warning. Keeps {@code RecordingController}
192
+     * decoupled from UI.
193
+     */
194
+    _onWarning: ?(messageKey: string, messageParams?: Object) => void;
195
+
196
+    /**
197
+     * FIXME: callback function for the {@code RecordingController} to notify
198
+     * UI that the local recording state has changed.
199
+     */
200
+    _onStateChanged: ?(boolean) => void;
201
+
202
+    /**
203
+     * Constructor.
204
+     *
205
+     * @returns {void}
206
+     */
207
+    constructor() {
208
+        this.registerEvents = this.registerEvents.bind(this);
209
+        this.getParticipantsStats = this.getParticipantsStats.bind(this);
210
+        this._onStartCommand = this._onStartCommand.bind(this);
211
+        this._onStopCommand = this._onStopCommand.bind(this);
212
+        this._onPingCommand = this._onPingCommand.bind(this);
213
+        this._doStartRecording = this._doStartRecording.bind(this);
214
+        this._doStopRecording = this._doStopRecording.bind(this);
215
+        this._updateStats = this._updateStats.bind(this);
216
+        this._switchToNewSession = this._switchToNewSession.bind(this);
217
+    }
218
+
219
+    registerEvents: () => void;
220
+
221
+    /**
222
+     * Registers listeners for XMPP events.
223
+     *
224
+     * @param {JitsiConference} conference - {@code JitsiConference} instance.
225
+     * @returns {void}
226
+     */
227
+    registerEvents(conference: Object) {
228
+        if (!this._registered) {
229
+            this._conference = conference;
230
+            if (this._conference) {
231
+                this._conference
232
+                    .addCommandListener(COMMAND_STOP, this._onStopCommand);
233
+                this._conference
234
+                    .addCommandListener(COMMAND_START, this._onStartCommand);
235
+                this._conference
236
+                    .addCommandListener(COMMAND_PING, this._onPingCommand);
237
+                this._registered = true;
238
+            }
239
+            if (!this._conference.isModerator()) {
240
+                this._conference.sendCommandOnce(COMMAND_PING, {});
241
+            }
242
+        }
243
+    }
244
+
245
+    /**
246
+     * Sets the event handler for {@code onStateChanged}.
247
+     *
248
+     * @param {Function} delegate - The event handler.
249
+     * @returns {void}
250
+     */
251
+    set onStateChanged(delegate: Function) {
252
+        this._onStateChanged = delegate;
253
+    }
254
+
255
+    /**
256
+     * Sets the event handler for {@code onNotify}.
257
+     *
258
+     * @param {Function} delegate - The event handler.
259
+     * @returns {void}
260
+     */
261
+    set onNotify(delegate: Function) {
262
+        this._onNotify = delegate;
263
+    }
264
+
265
+    /**
266
+     * Sets the event handler for {@code onWarning}.
267
+     *
268
+     * @param {Function} delegate - The event handler.
269
+     * @returns {void}
270
+     */
271
+    set onWarning(delegate: Function) {
272
+        this._onWarning = delegate;
273
+    }
274
+
275
+    /**
276
+     * Signals the participants to start local recording.
277
+     *
278
+     * @returns {void}
279
+     */
280
+    startRecording() {
281
+        this.registerEvents();
282
+        if (this._conference && this._conference.isModerator()) {
283
+            this._conference.removeCommand(COMMAND_STOP);
284
+            this._conference.sendCommand(COMMAND_START, {
285
+                attributes: {
286
+                    sessionToken: this._getRandomToken(),
287
+                    format: this._format
288
+                }
289
+            });
290
+        } else if (this._onWarning) {
291
+            this._onWarning('localRecording.messages.notModerator');
292
+        }
293
+    }
294
+
295
+    /**
296
+     * Signals the participants to stop local recording.
297
+     *
298
+     * @returns {void}
299
+     */
300
+    stopRecording() {
301
+        if (this._conference) {
302
+            if (this._conference.isModerator()) {
303
+                this._conference.removeCommand(COMMAND_START);
304
+                this._conference.sendCommand(COMMAND_STOP, {
305
+                    attributes: {
306
+                        sessionToken: this._currentSessionToken
307
+                    }
308
+                });
309
+            } else if (this._onWarning) {
310
+                this._onWarning('localRecording.messages.notModerator');
311
+            }
312
+        }
313
+    }
314
+
315
+    /**
316
+     * Triggers the download of recorded data.
317
+     * Browser only.
318
+     *
319
+     * @param {number} sessionToken - The token of the session to download.
320
+     * @returns {void}
321
+     */
322
+    downloadRecordedData(sessionToken: number) {
323
+        if (this._adapters[sessionToken]) {
324
+            this._adapters[sessionToken].exportRecordedData()
325
+                .then(args => {
326
+                    const { data, format } = args;
327
+
328
+                    const filename = `session_${sessionToken}`
329
+                        + `_${this._conference.myUserId()}.${format}`;
330
+
331
+                    downloadBlob(data, filename);
332
+                })
333
+                .catch(error => {
334
+                    logger.error('Failed to download audio for'
335
+                        + ` session ${sessionToken}. Error: ${error}`);
336
+                });
337
+        } else {
338
+            logger.error(`Invalid session token for download ${sessionToken}`);
339
+        }
340
+    }
341
+
342
+    /**
343
+     * Changes the current microphone.
344
+     *
345
+     * @param {string} micDeviceId - The new microphone device ID.
346
+     * @returns {void}
347
+     */
348
+    setMicDevice(micDeviceId: string) {
349
+        if (micDeviceId !== this._micDeviceId) {
350
+            this._micDeviceId = String(micDeviceId);
351
+
352
+            if (this._state === ControllerState.RECORDING) {
353
+                // sessionManager.endSegment(this._currentSessionToken);
354
+                logger.log('Before switching microphone...');
355
+                this._adapters[this._currentSessionToken]
356
+                    .setMicDevice(this._micDeviceId)
357
+                    .then(() => {
358
+                        logger.log('Finished switching microphone.');
359
+
360
+                        // sessionManager.beginSegment(this._currentSesoken);
361
+                    })
362
+                    .catch(() => {
363
+                        logger.error('Failed to switch microphone');
364
+                    });
365
+            }
366
+            logger.log(`Switch microphone to ${this._micDeviceId}`);
367
+        }
368
+    }
369
+
370
+    /**
371
+     * Mute or unmute audio. When muted, the ongoing local recording should
372
+     * produce silence.
373
+     *
374
+     * @param {boolean} muted - If the audio should be muted.
375
+     * @returns {void}
376
+     */
377
+    setMuted(muted: boolean) {
378
+        this._isMuted = Boolean(muted);
379
+
380
+        if (this._state === ControllerState.RECORDING) {
381
+            this._adapters[this._currentSessionToken].setMuted(this._isMuted);
382
+        }
383
+    }
384
+
385
+    /**
386
+     * Switches the recording format.
387
+     *
388
+     * @param {string} newFormat - The new format.
389
+     * @returns {void}
390
+     */
391
+    switchFormat(newFormat: string) {
392
+        if (!RECORDING_FORMATS.has(newFormat)) {
393
+            logger.log(`Unknown format ${newFormat}. Ignoring...`);
394
+
395
+            return;
396
+        }
397
+        this._format = newFormat;
398
+        logger.log(`Recording format switched to ${newFormat}`);
399
+
400
+        // the new format will be used in the next recording session
401
+    }
402
+
403
+    /**
404
+     * Returns the local recording stats.
405
+     *
406
+     * @returns {RecordingStats}
407
+     */
408
+    getLocalStats(): RecordingStats {
409
+        return {
410
+            currentSessionToken: this._currentSessionToken,
411
+            isRecording: this._state === ControllerState.RECORDING,
412
+            recordedBytes: 0,
413
+            recordedLength: 0
414
+        };
415
+    }
416
+
417
+    getParticipantsStats: () => *;
418
+
419
+    /**
420
+     * Returns the remote participants' local recording stats.
421
+     *
422
+     * @returns {*}
423
+     */
424
+    getParticipantsStats() {
425
+        const members
426
+            = this._conference.getParticipants()
427
+            .map(member => {
428
+                return {
429
+                    id: member.getId(),
430
+                    displayName: member.getDisplayName(),
431
+                    recordingStats:
432
+                        JSON.parse(member.getProperty(PROPERTY_STATS) || '{}'),
433
+                    isSelf: false
434
+                };
435
+            });
436
+
437
+        // transform into a dictionary for consistent ordering
438
+        const result = {};
439
+
440
+        for (let i = 0; i < members.length; ++i) {
441
+            result[members[i].id] = members[i];
442
+        }
443
+        const localId = this._conference.myUserId();
444
+
445
+        result[localId] = {
446
+            id: localId,
447
+            displayName: i18next.t('localRecording.me'),
448
+            recordingStats: this.getLocalStats(),
449
+            isSelf: true
450
+        };
451
+
452
+        return result;
453
+    }
454
+
455
+    _changeState: (Symbol) => void;
456
+
457
+    /**
458
+     * Changes the current state of {@code RecordingController}.
459
+     *
460
+     * @private
461
+     * @param {Symbol} newState - The new state.
462
+     * @returns {void}
463
+     */
464
+    _changeState(newState: Symbol) {
465
+        if (this._state !== newState) {
466
+            logger.log(`state change: ${this._state.toString()} -> `
467
+                + `${newState.toString()}`);
468
+            this._state = newState;
469
+        }
470
+    }
471
+
472
+    _updateStats: () => void;
473
+
474
+    /**
475
+     * Sends out updates about the local recording stats via XMPP.
476
+     *
477
+     * @private
478
+     * @returns {void}
479
+     */
480
+    _updateStats() {
481
+        if (this._conference) {
482
+            this._conference.setLocalParticipantProperty(PROPERTY_STATS,
483
+                JSON.stringify(this.getLocalStats()));
484
+        }
485
+    }
486
+
487
+    _onStartCommand: (*) => void;
488
+
489
+    /**
490
+     * Callback function for XMPP event.
491
+     *
492
+     * @private
493
+     * @param {*} value - The event args.
494
+     * @returns {void}
495
+     */
496
+    _onStartCommand(value) {
497
+        const { sessionToken, format } = value.attributes;
498
+
499
+        if (this._state === ControllerState.IDLE) {
500
+            this._changeState(ControllerState.STARTING);
501
+            this._switchToNewSession(sessionToken, format);
502
+            this._doStartRecording();
503
+        } else if (this._state === ControllerState.RECORDING
504
+            && this._currentSessionToken !== sessionToken) {
505
+            // There is local recording going on, but not for the same session.
506
+            // This means the current state might be out-of-sync with the
507
+            // moderator's, so we need to restart the recording.
508
+            this._changeState(ControllerState.STOPPING);
509
+            this._doStopRecording().then(() => {
510
+                this._changeState(ControllerState.STARTING);
511
+                this._switchToNewSession(sessionToken, format);
512
+                this._doStartRecording();
513
+            });
514
+        }
515
+    }
516
+
517
+    _onStopCommand: (*) => void;
518
+
519
+    /**
520
+     * Callback function for XMPP event.
521
+     *
522
+     * @private
523
+     * @param {*} value - The event args.
524
+     * @returns {void}
525
+     */
526
+    _onStopCommand(value) {
527
+        if (this._state === ControllerState.RECORDING
528
+            && this._currentSessionToken === value.attributes.sessionToken) {
529
+            this._changeState(ControllerState.STOPPING);
530
+            this._doStopRecording();
531
+        }
532
+    }
533
+
534
+    _onPingCommand: () => void;
535
+
536
+    /**
537
+     * Callback function for XMPP event.
538
+     *
539
+     * @private
540
+     * @returns {void}
541
+     */
542
+    _onPingCommand() {
543
+        if (this._conference.isModerator()) {
544
+            logger.log('Received ping, sending pong.');
545
+            this._conference.sendCommandOnce(COMMAND_PONG, {});
546
+        }
547
+    }
548
+
549
+    /**
550
+     * Generates a token that can be used to distinguish each local recording
551
+     * session.
552
+     *
553
+     * @returns {number}
554
+     */
555
+    _getRandomToken() {
556
+        return Math.floor(Math.random() * 100000000) + 1;
557
+    }
558
+
559
+    _doStartRecording: () => void;
560
+
561
+    /**
562
+     * Starts the recording locally.
563
+     *
564
+     * @private
565
+     * @returns {void}
566
+     */
567
+    _doStartRecording() {
568
+        if (this._state === ControllerState.STARTING) {
569
+            const delegate = this._adapters[this._currentSessionToken];
570
+
571
+            delegate.start(this._micDeviceId)
572
+            .then(() => {
573
+                this._changeState(ControllerState.RECORDING);
574
+                sessionManager.beginSegment(this._currentSessionToken);
575
+                logger.log('Local recording engaged.');
576
+
577
+                if (this._onNotify) {
578
+                    this._onNotify('localRecording.messages.engaged');
579
+                }
580
+                if (this._onStateChanged) {
581
+                    this._onStateChanged(true);
582
+                }
583
+
584
+                delegate.setMuted(this._isMuted);
585
+                this._updateStats();
586
+            })
587
+            .catch(err => {
588
+                logger.error('Failed to start local recording.', err);
589
+            });
590
+        }
591
+
592
+    }
593
+
594
+    _doStopRecording: () => Promise<void>;
595
+
596
+    /**
597
+     * Stops the recording locally.
598
+     *
599
+     * @private
600
+     * @returns {Promise<void>}
601
+     */
602
+    _doStopRecording() {
603
+        if (this._state === ControllerState.STOPPING) {
604
+            const token = this._currentSessionToken;
605
+
606
+            return this._adapters[this._currentSessionToken]
607
+                .stop()
608
+                .then(() => {
609
+                    this._changeState(ControllerState.IDLE);
610
+                    sessionManager.endSegment(this._currentSessionToken);
611
+                    logger.log('Local recording unengaged.');
612
+                    this.downloadRecordedData(token);
613
+
614
+                    const messageKey
615
+                        = this._conference.isModerator()
616
+                            ? 'localRecording.messages.finishedModerator'
617
+                            : 'localRecording.messages.finished';
618
+                    const messageParams = {
619
+                        token
620
+                    };
621
+
622
+                    if (this._onNotify) {
623
+                        this._onNotify(messageKey, messageParams);
624
+                    }
625
+                    if (this._onStateChanged) {
626
+                        this._onStateChanged(false);
627
+                    }
628
+                    this._updateStats();
629
+                })
630
+                .catch(err => {
631
+                    logger.error('Failed to stop local recording.', err);
632
+                });
633
+        }
634
+
635
+        /* eslint-disable */
636
+        return (Promise.resolve(): Promise<void>); 
637
+        // FIXME: better ways to satisfy flow and ESLint at the same time?
638
+        /* eslint-enable */
639
+
640
+    }
641
+
642
+    _switchToNewSession: (string, string) => void;
643
+
644
+    /**
645
+     * Switches to a new local recording session.
646
+     *
647
+     * @param {string} sessionToken - The session Token.
648
+     * @param {string} format - The recording format for the session.
649
+     * @returns {void}
650
+     */
651
+    _switchToNewSession(sessionToken, format) {
652
+        this._format = format;
653
+        this._currentSessionToken = sessionToken;
654
+        logger.log(`New session: ${this._currentSessionToken}, `
655
+            + `format: ${this._format}`);
656
+        this._adapters[sessionToken]
657
+             = this._createRecordingAdapter();
658
+        sessionManager.createSession(sessionToken, this._format);
659
+    }
660
+
661
+    /**
662
+     * Creates a recording adapter according to the current recording format.
663
+     *
664
+     * @private
665
+     * @returns {RecordingAdapter}
666
+     */
667
+    _createRecordingAdapter() {
668
+        logger.debug('[RecordingController] creating recording'
669
+            + ` adapter for ${this._format} format.`);
670
+
671
+        switch (this._format) {
672
+        case 'ogg':
673
+            return new OggAdapter();
674
+        case 'flac':
675
+            return new FlacAdapter();
676
+        case 'wav':
677
+            return new WavAdapter();
678
+        default:
679
+            throw new Error(`Unknown format: ${this._format}`);
680
+        }
681
+    }
682
+}
683
+
684
+/**
685
+ * Global singleton of {@code RecordingController}.
686
+ */
687
+export const recordingController = new RecordingController();

+ 1
- 0
react/features/local-recording/controller/index.js 查看文件

@@ -0,0 +1 @@
1
+export * from './RecordingController';

+ 7
- 0
react/features/local-recording/index.js 查看文件

@@ -0,0 +1,7 @@
1
+export * from './actions';
2
+export * from './actionTypes';
3
+export * from './components';
4
+export * from './controller';
5
+
6
+import './middleware';
7
+import './reducer';

+ 92
- 0
react/features/local-recording/middleware.js 查看文件

@@ -0,0 +1,92 @@
1
+/* @flow */
2
+
3
+import { createShortcutEvent, sendAnalytics } from '../analytics';
4
+import { APP_WILL_MOUNT, APP_WILL_UNMOUNT } from '../base/app';
5
+import { CONFERENCE_JOINED } from '../base/conference';
6
+import { toggleDialog } from '../base/dialog';
7
+import { i18next } from '../base/i18n';
8
+import { SET_AUDIO_MUTED } from '../base/media';
9
+import { MiddlewareRegistry } from '../base/redux';
10
+import { SETTINGS_UPDATED } from '../base/settings/actionTypes';
11
+import { showNotification } from '../notifications';
12
+
13
+import { localRecordingEngaged, localRecordingUnengaged } from './actions';
14
+import { LocalRecordingInfoDialog } from './components';
15
+import { recordingController } from './controller';
16
+
17
+declare var APP: Object;
18
+declare var config: Object;
19
+
20
+const isFeatureEnabled = typeof config === 'object' && config.localRecording
21
+    && config.localRecording.enabled === true;
22
+
23
+isFeatureEnabled
24
+&& MiddlewareRegistry.register(({ getState, dispatch }) => next => action => {
25
+    const result = next(action);
26
+
27
+    switch (action.type) {
28
+    case CONFERENCE_JOINED: {
29
+        const { conference } = getState()['features/base/conference'];
30
+        const { localRecording } = getState()['features/base/config'];
31
+
32
+        if (localRecording && localRecording.format) {
33
+            recordingController.switchFormat(localRecording.format);
34
+        }
35
+
36
+        recordingController.registerEvents(conference);
37
+        break;
38
+    }
39
+    case APP_WILL_MOUNT:
40
+
41
+        // realize the delegates on recordingController, allowing the UI to
42
+        // react to state changes in recordingController.
43
+        recordingController.onStateChanged = isEngaged => {
44
+            if (isEngaged) {
45
+                const nowTime = new Date();
46
+
47
+                dispatch(localRecordingEngaged(nowTime));
48
+            } else {
49
+                dispatch(localRecordingUnengaged());
50
+            }
51
+        };
52
+
53
+        recordingController.onWarning = (messageKey, messageParams) => {
54
+            dispatch(showNotification({
55
+                title: i18next.t('localRecording.localRecording'),
56
+                description: i18next.t(messageKey, messageParams)
57
+            }, 10000));
58
+        };
59
+
60
+        recordingController.onNotify = (messageKey, messageParams) => {
61
+            dispatch(showNotification({
62
+                title: i18next.t('localRecording.localRecording'),
63
+                description: i18next.t(messageKey, messageParams)
64
+            }, 10000));
65
+        };
66
+
67
+        typeof APP === 'object' && typeof APP.keyboardshortcut === 'object'
68
+            && APP.keyboardshortcut.registerShortcut('L', null, () => {
69
+                sendAnalytics(createShortcutEvent('local.recording'));
70
+                dispatch(toggleDialog(LocalRecordingInfoDialog));
71
+            }, 'keyboardShortcuts.localRecording');
72
+        break;
73
+    case APP_WILL_UNMOUNT:
74
+        recordingController.onStateChanged = null;
75
+        recordingController.onNotify = null;
76
+        recordingController.onWarning = null;
77
+        break;
78
+    case SET_AUDIO_MUTED:
79
+        recordingController.setMuted(action.muted);
80
+        break;
81
+    case SETTINGS_UPDATED: {
82
+        const { micDeviceId } = getState()['features/base/settings'];
83
+
84
+        if (micDeviceId) {
85
+            recordingController.setMicDevice(micDeviceId);
86
+        }
87
+        break;
88
+    }
89
+    }
90
+
91
+    return result;
92
+});

+ 129
- 0
react/features/local-recording/recording/AbstractAudioContextAdapter.js 查看文件

@@ -0,0 +1,129 @@
1
+import { RecordingAdapter } from './RecordingAdapter';
2
+
3
+const logger = require('jitsi-meet-logger').getLogger(__filename);
4
+
5
+/**
6
+ * Base class for {@code AudioContext}-based recording adapters.
7
+ */
8
+export class AbstractAudioContextAdapter extends RecordingAdapter {
9
+    /**
10
+     * The {@code AudioContext} instance.
11
+     */
12
+    _audioContext = null;
13
+
14
+    /**
15
+     * The {@code ScriptProcessorNode} instance.
16
+     */
17
+    _audioProcessingNode = null;
18
+
19
+    /**
20
+     * The {@code MediaStreamAudioSourceNode} instance.
21
+     */
22
+    _audioSource = null;
23
+
24
+    /**
25
+     * The {@code MediaStream} instance, representing the current audio device.
26
+     */
27
+    _stream = null;
28
+
29
+    /**
30
+     * Sample rate.
31
+     */
32
+    _sampleRate = 44100;
33
+
34
+    /**
35
+     * Constructor.
36
+     */
37
+    constructor() {
38
+        super();
39
+
40
+        // sampleRate is browser and OS dependent.
41
+        // Setting sampleRate explicitly is in the specs but not implemented
42
+        // by browsers.
43
+        // See: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/
44
+        //    AudioContext#Browser_compatibility
45
+        // And https://bugs.chromium.org/p/chromium/issues/detail?id=432248
46
+
47
+        this._audioContext = new AudioContext();
48
+        this._sampleRate = this._audioContext.sampleRate;
49
+        logger.log(`Current sampleRate ${this._sampleRate}.`);
50
+    }
51
+
52
+    /**
53
+     * Sets up the audio graph in the AudioContext.
54
+     *
55
+     * @protected
56
+     * @param {string} micDeviceId - The current microphone device ID.
57
+     * @param {Function} callback - Callback function to
58
+     * handle AudioProcessingEvents.
59
+     * @returns {Promise}
60
+     */
61
+    _initializeAudioContext(micDeviceId, callback) {
62
+        if (typeof callback !== 'function') {
63
+            return Promise.reject('a callback function is required.');
64
+        }
65
+
66
+        return this._getAudioStream(micDeviceId)
67
+        .then(stream => {
68
+            this._stream = stream;
69
+            this._audioSource
70
+                = this._audioContext.createMediaStreamSource(stream);
71
+            this._audioProcessingNode
72
+                = this._audioContext.createScriptProcessor(4096, 1, 1);
73
+            this._audioProcessingNode.onaudioprocess = callback;
74
+            logger.debug('AudioContext is set up.');
75
+        })
76
+        .catch(err => {
77
+            logger.error(`Error calling getUserMedia(): ${err}`);
78
+
79
+            return Promise.reject(err);
80
+        });
81
+    }
82
+
83
+    /**
84
+     * Connects the nodes in the {@code AudioContext} to start the flow of
85
+     * audio data.
86
+     *
87
+     * @protected
88
+     * @returns {void}
89
+     */
90
+    _connectAudioGraph() {
91
+        this._audioSource.connect(this._audioProcessingNode);
92
+        this._audioProcessingNode.connect(this._audioContext.destination);
93
+    }
94
+
95
+    /**
96
+     * Disconnects the nodes in the {@code AudioContext}.
97
+     *
98
+     * @protected
99
+     * @returns {void}
100
+     */
101
+    _disconnectAudioGraph() {
102
+        this._audioProcessingNode.onaudioprocess = undefined;
103
+        this._audioProcessingNode.disconnect();
104
+        this._audioSource.disconnect();
105
+    }
106
+
107
+    /**
108
+     * Replaces the current microphone MediaStream.
109
+     *
110
+     * @protected
111
+     * @param {string} micDeviceId - New microphone ID.
112
+     * @returns {Promise}
113
+     */
114
+    _replaceMic(micDeviceId) {
115
+        if (this._audioContext && this._audioProcessingNode) {
116
+            return this._getAudioStream(micDeviceId).then(newStream => {
117
+                const newSource = this._audioContext
118
+                    .createMediaStreamSource(newStream);
119
+
120
+                this._audioSource.disconnect();
121
+                newSource.connect(this._audioProcessingNode);
122
+                this._stream = newStream;
123
+                this._audioSource = newSource;
124
+            });
125
+        }
126
+
127
+        return Promise.resolve();
128
+    }
129
+}

+ 143
- 0
react/features/local-recording/recording/OggAdapter.js 查看文件

@@ -0,0 +1,143 @@
1
+import { RecordingAdapter } from './RecordingAdapter';
2
+
3
+const logger = require('jitsi-meet-logger').getLogger(__filename);
4
+
5
+/**
6
+ * Recording adapter that uses {@code MediaRecorder} (default browser encoding
7
+ * with Opus codec).
8
+ */
9
+export class OggAdapter extends RecordingAdapter {
10
+
11
+    /**
12
+     * Instance of MediaRecorder.
13
+     * @private
14
+     */
15
+    _mediaRecorder = null;
16
+
17
+    /**
18
+     * Initialization promise.
19
+     * @private
20
+     */
21
+    _initPromise = null;
22
+
23
+    /**
24
+     * The recorded audio file.
25
+     * @private
26
+     */
27
+    _recordedData = null;
28
+
29
+    /**
30
+     * Implements {@link RecordingAdapter#start()}.
31
+     *
32
+     * @inheritdoc
33
+     */
34
+    start(micDeviceId) {
35
+        if (!this._initPromise) {
36
+            this._initPromise = this._initialize(micDeviceId);
37
+        }
38
+
39
+        return this._initPromise.then(() =>
40
+            new Promise(resolve => {
41
+                this._mediaRecorder.start();
42
+                resolve();
43
+            })
44
+        );
45
+    }
46
+
47
+    /**
48
+     * Implements {@link RecordingAdapter#stop()}.
49
+     *
50
+     * @inheritdoc
51
+     */
52
+    stop() {
53
+        return new Promise(
54
+            resolve => {
55
+                this._mediaRecorder.onstop = () => resolve();
56
+                this._mediaRecorder.stop();
57
+            }
58
+        );
59
+    }
60
+
61
+    /**
62
+     * Implements {@link RecordingAdapter#exportRecordedData()}.
63
+     *
64
+     * @inheritdoc
65
+     */
66
+    exportRecordedData() {
67
+        if (this._recordedData !== null) {
68
+            return Promise.resolve({
69
+                data: this._recordedData,
70
+                format: 'ogg'
71
+            });
72
+        }
73
+
74
+        return Promise.reject('No audio data recorded.');
75
+    }
76
+
77
+    /**
78
+     * Implements {@link RecordingAdapter#setMuted()}.
79
+     *
80
+     * @inheritdoc
81
+     */
82
+    setMuted(muted) {
83
+        const shouldEnable = !muted;
84
+
85
+        if (!this._stream) {
86
+            return Promise.resolve();
87
+        }
88
+
89
+        const track = this._stream.getAudioTracks()[0];
90
+
91
+        if (!track) {
92
+            logger.error('Cannot mute/unmute. Track not found!');
93
+
94
+            return Promise.resolve();
95
+        }
96
+
97
+        if (track.enabled !== shouldEnable) {
98
+            track.enabled = shouldEnable;
99
+            logger.log(muted ? 'Mute' : 'Unmute');
100
+        }
101
+
102
+        return Promise.resolve();
103
+    }
104
+
105
+    /**
106
+     * Initialize the adapter.
107
+     *
108
+     * @private
109
+     * @param {string} micDeviceId - The current microphone device ID.
110
+     * @returns {Promise}
111
+     */
112
+    _initialize(micDeviceId) {
113
+        if (this._mediaRecorder) {
114
+            return Promise.resolve();
115
+        }
116
+
117
+        return new Promise((resolve, error) => {
118
+            this._getAudioStream(micDeviceId)
119
+            .then(stream => {
120
+                this._stream = stream;
121
+                this._mediaRecorder = new MediaRecorder(stream);
122
+                this._mediaRecorder.ondataavailable
123
+                    = e => this._saveMediaData(e.data);
124
+                resolve();
125
+            })
126
+            .catch(err => {
127
+                logger.error(`Error calling getUserMedia(): ${err}`);
128
+                error();
129
+            });
130
+        });
131
+    }
132
+
133
+    /**
134
+     * Callback for storing the encoded data.
135
+     *
136
+     * @private
137
+     * @param {Blob} data - Encoded data.
138
+     * @returns {void}
139
+     */
140
+    _saveMediaData(data) {
141
+        this._recordedData = data;
142
+    }
143
+}

+ 85
- 0
react/features/local-recording/recording/RecordingAdapter.js 查看文件

@@ -0,0 +1,85 @@
1
+import JitsiMeetJS from '../../base/lib-jitsi-meet';
2
+
3
+/**
4
+ * Base class for recording backends.
5
+ */
6
+export class RecordingAdapter {
7
+
8
+    /**
9
+     * Starts recording.
10
+     *
11
+     * @param {string} micDeviceId - The microphone to record on.
12
+     * @returns {Promise}
13
+     */
14
+    start(/* eslint-disable no-unused-vars */
15
+            micDeviceId/* eslint-enable no-unused-vars */) {
16
+        throw new Error('Not implemented');
17
+    }
18
+
19
+    /**
20
+     * Stops recording.
21
+     *
22
+     * @returns {Promise}
23
+     */
24
+    stop() {
25
+        throw new Error('Not implemented');
26
+    }
27
+
28
+    /**
29
+     * Export the recorded and encoded audio file.
30
+     *
31
+     * @returns {Promise<Object>}
32
+     */
33
+    exportRecordedData() {
34
+        throw new Error('Not implemented');
35
+    }
36
+
37
+    /**
38
+     * Mutes or unmutes the current recording.
39
+     *
40
+     * @param {boolean} muted - Whether to mute or to unmute.
41
+     * @returns {Promise}
42
+     */
43
+    setMuted(/* eslint-disable no-unused-vars */
44
+            muted/* eslint-enable no-unused-vars */) {
45
+        throw new Error('Not implemented');
46
+    }
47
+
48
+    /**
49
+     * Changes the current microphone.
50
+     *
51
+     * @param {string} micDeviceId - The new microphone device ID.
52
+     * @returns {Promise}
53
+     */
54
+    setMicDevice(/* eslint-disable no-unused-vars */
55
+            micDeviceId/* eslint-enable no-unused-vars */) {
56
+        throw new Error('Not implemented');
57
+    }
58
+
59
+    /**
60
+     * Helper method for getting an audio {@code MediaStream}. Use this instead
61
+     * of calling browser APIs directly.
62
+     *
63
+     * @protected
64
+     * @param {number} micDeviceId - The ID of the current audio device.
65
+     * @returns {Promise}
66
+     */
67
+    _getAudioStream(micDeviceId) {
68
+        return JitsiMeetJS.createLocalTracks({
69
+            devices: [ 'audio' ],
70
+            micDeviceId
71
+        }).then(result => {
72
+            if (result.length !== 1) {
73
+                throw new Error('Unexpected number of streams '
74
+                    + 'from createLocalTracks.');
75
+            }
76
+            const mediaStream = result[0].stream;
77
+
78
+            if (mediaStream === undefined) {
79
+                throw new Error('Failed to create local track.');
80
+            }
81
+
82
+            return mediaStream;
83
+        });
84
+    }
85
+}

+ 20
- 0
react/features/local-recording/recording/Utils.js 查看文件

@@ -0,0 +1,20 @@
1
+/**
2
+ * Force download of Blob in browser by faking an <a> tag.
3
+ *
4
+ * @param {Blob} blob - Base64 URL.
5
+ * @param {string} fileName - The filename to appear in the download dialog.
6
+ * @returns {void}
7
+ */
8
+export function downloadBlob(blob, fileName = 'recording.ogg') {
9
+    const base64Url = window.URL.createObjectURL(blob);
10
+
11
+    // fake a anchor tag
12
+    const a = document.createElement('a');
13
+
14
+    a.style = 'display: none';
15
+    a.href = base64Url;
16
+    a.download = fileName;
17
+    document.body.appendChild(a);
18
+    a.click();
19
+    document.body.removeChild(a);
20
+}

+ 290
- 0
react/features/local-recording/recording/WavAdapter.js 查看文件

@@ -0,0 +1,290 @@
1
+import { AbstractAudioContextAdapter } from './AbstractAudioContextAdapter';
2
+
3
+const logger = require('jitsi-meet-logger').getLogger(__filename);
4
+
5
+const WAV_BITS_PER_SAMPLE = 16;
6
+
7
+/**
8
+ * Recording adapter for raw WAVE format.
9
+ */
10
+export class WavAdapter extends AbstractAudioContextAdapter {
11
+
12
+    /**
13
+     * Length of the WAVE file, in number of samples.
14
+     */
15
+    _wavLength = 0;
16
+
17
+    /**
18
+     * The {@code ArrayBuffer}s that stores the PCM bits.
19
+     */
20
+    _wavBuffers = [];
21
+
22
+    /**
23
+     * Whether or not the {@code WavAdapter} is in a ready state.
24
+     */
25
+    _isInitialized = false;
26
+
27
+    /**
28
+     * Initialization promise.
29
+     */
30
+    _initPromise = null;
31
+
32
+    /**
33
+     * Constructor.
34
+     */
35
+    constructor() {
36
+        super();
37
+        this._onAudioProcess = this._onAudioProcess.bind(this);
38
+    }
39
+
40
+    /**
41
+     * Implements {@link RecordingAdapter#start()}.
42
+     *
43
+     * @inheritdoc
44
+     */
45
+    start(micDeviceId) {
46
+        if (!this._initPromise) {
47
+            this._initPromise = this._initialize(micDeviceId);
48
+        }
49
+
50
+        return this._initPromise.then(() => {
51
+            this._wavBuffers = [];
52
+            this._wavLength = 0;
53
+
54
+            this._connectAudioGraph();
55
+        });
56
+    }
57
+
58
+    /**
59
+     * Implements {@link RecordingAdapter#stop()}.
60
+     *
61
+     * @inheritdoc
62
+     */
63
+    stop() {
64
+        this._disconnectAudioGraph();
65
+        this._data = this._exportMonoWAV(this._wavBuffers, this._wavLength);
66
+        this._audioProcessingNode = null;
67
+        this._audioSource = null;
68
+        this._isInitialized = false;
69
+
70
+        return Promise.resolve();
71
+    }
72
+
73
+    /**
74
+     * Implements {@link RecordingAdapter#exportRecordedData()}.
75
+     *
76
+     * @inheritdoc
77
+     */
78
+    exportRecordedData() {
79
+        if (this._data !== null) {
80
+            return Promise.resolve({
81
+                data: this._data,
82
+                format: 'wav'
83
+            });
84
+        }
85
+
86
+        return Promise.reject('No audio data recorded.');
87
+    }
88
+
89
+    /**
90
+     * Implements {@link RecordingAdapter#setMuted()}.
91
+     *
92
+     * @inheritdoc
93
+     */
94
+    setMuted(muted) {
95
+        const shouldEnable = !muted;
96
+
97
+        if (!this._stream) {
98
+            return Promise.resolve();
99
+        }
100
+
101
+        const track = this._stream.getAudioTracks()[0];
102
+
103
+        if (!track) {
104
+            logger.error('Cannot mute/unmute. Track not found!');
105
+
106
+            return Promise.resolve();
107
+        }
108
+
109
+        if (track.enabled !== shouldEnable) {
110
+            track.enabled = shouldEnable;
111
+            logger.log(muted ? 'Mute' : 'Unmute');
112
+        }
113
+
114
+        return Promise.resolve();
115
+    }
116
+
117
+    /**
118
+     * Implements {@link RecordingAdapter#setMicDevice()}.
119
+     *
120
+     * @inheritdoc
121
+     */
122
+    setMicDevice(micDeviceId) {
123
+        return this._replaceMic(micDeviceId);
124
+    }
125
+
126
+    /**
127
+     * Creates a WAVE file header.
128
+     *
129
+     * @private
130
+     * @param {number} dataLength - Length of the payload (PCM data), in bytes.
131
+     * @returns {Uint8Array}
132
+     */
133
+    _createWavHeader(dataLength) {
134
+        // adapted from
135
+        // https://github.com/mmig/speech-to-flac/blob/master/encoder.js
136
+
137
+        // ref: http://soundfile.sapp.org/doc/WaveFormat/
138
+
139
+        // create our WAVE file header
140
+        const buffer = new ArrayBuffer(44);
141
+        const view = new DataView(buffer);
142
+
143
+        // RIFF chunk descriptor
144
+        writeUTFBytes(view, 0, 'RIFF');
145
+
146
+        // set file size at the end
147
+        writeUTFBytes(view, 8, 'WAVE');
148
+
149
+        // FMT sub-chunk
150
+        writeUTFBytes(view, 12, 'fmt ');
151
+        view.setUint32(16, 16, true);
152
+        view.setUint16(20, 1, true);
153
+
154
+        // NumChannels
155
+        view.setUint16(22, 1, true);
156
+
157
+        // SampleRate
158
+        view.setUint32(24, this._sampleRate, true);
159
+
160
+        // ByteRate
161
+        view.setUint32(28,
162
+            Number(this._sampleRate) * 1 * WAV_BITS_PER_SAMPLE / 8, true);
163
+
164
+        // BlockAlign
165
+        view.setUint16(32, 1 * Number(WAV_BITS_PER_SAMPLE) / 8, true);
166
+
167
+        view.setUint16(34, WAV_BITS_PER_SAMPLE, true);
168
+
169
+        // data sub-chunk
170
+        writeUTFBytes(view, 36, 'data');
171
+
172
+        // file length
173
+        view.setUint32(4, 32 + dataLength, true);
174
+
175
+        // data chunk length
176
+        view.setUint32(40, dataLength, true);
177
+
178
+        return new Uint8Array(buffer);
179
+    }
180
+
181
+    /**
182
+     * Initialize the adapter.
183
+     *
184
+     * @private
185
+     * @param {string} micDeviceId - The current microphone device ID.
186
+     * @returns {Promise}
187
+     */
188
+    _initialize(micDeviceId) {
189
+        if (this._isInitialized) {
190
+            return Promise.resolve();
191
+        }
192
+
193
+        return this._initializeAudioContext(micDeviceId, this._onAudioProcess)
194
+            .then(() => {
195
+                this._isInitialized = true;
196
+            });
197
+    }
198
+
199
+    /**
200
+     * Callback function for handling AudioProcessingEvents.
201
+     *
202
+     * @private
203
+     * @param {AudioProcessingEvent} e - The event containing the raw PCM.
204
+     * @returns {void}
205
+     */
206
+    _onAudioProcess(e) {
207
+        // See: https://developer.mozilla.org/en-US/docs/Web/API/
208
+        //      AudioBuffer/getChannelData
209
+        // The returned value is an Float32Array.
210
+        const channelLeft = e.inputBuffer.getChannelData(0);
211
+
212
+        // Need to copy the Float32Array:
213
+        // unlike passing to WebWorker, this data is passed by reference,
214
+        // so we need to copy it, otherwise the resulting audio file will be
215
+        // just repeating the last segment.
216
+        this._wavBuffers.push(new Float32Array(channelLeft));
217
+        this._wavLength += channelLeft.length;
218
+    }
219
+
220
+    /**
221
+     * Combines buffers and export to a wav file.
222
+     *
223
+     * @private
224
+     * @param {Float32Array[]} buffers - The stored buffers.
225
+     * @param {number} length - Total length (number of samples).
226
+     * @returns {Blob}
227
+     */
228
+    _exportMonoWAV(buffers, length) {
229
+        const dataLength = length * 2; // each sample = 16 bit = 2 bytes
230
+        const buffer = new ArrayBuffer(44 + dataLength);
231
+        const view = new DataView(buffer);
232
+
233
+        // copy WAV header data into the array buffer
234
+        const header = this._createWavHeader(dataLength);
235
+        const len = header.length;
236
+
237
+        for (let i = 0; i < len; ++i) {
238
+            view.setUint8(i, header[i]);
239
+        }
240
+
241
+        // write audio data
242
+        floatTo16BitPCM(view, 44, buffers);
243
+
244
+        return new Blob([ view ], { type: 'audio/wav' });
245
+    }
246
+}
247
+
248
+
249
+/**
250
+ * Helper function. Writes a UTF string to memory
251
+ * using big endianness. Required by WAVE headers.
252
+ *
253
+ * @param {ArrayBuffer} view - The view to memory.
254
+ * @param {number} offset - Offset.
255
+ * @param {string} string - The string to be written.
256
+ * @returns {void}
257
+ */
258
+function writeUTFBytes(view, offset, string) {
259
+    const lng = string.length;
260
+
261
+    // convert to big endianness
262
+    for (let i = 0; i < lng; ++i) {
263
+        view.setUint8(offset + i, string.charCodeAt(i));
264
+    }
265
+}
266
+
267
+/**
268
+ * Helper function for converting Float32Array to Int16Array.
269
+ *
270
+ * @param {DataView} output - View to the output buffer.
271
+ * @param {number} offset - The offset in output buffer to write from.
272
+ * @param {Float32Array[]} inputBuffers - The input buffers.
273
+ * @returns {void}
274
+ */
275
+function floatTo16BitPCM(output, offset, inputBuffers) {
276
+
277
+    let i, j;
278
+    let input, s, sampleCount;
279
+    const bufferCount = inputBuffers.length;
280
+    let o = offset;
281
+
282
+    for (i = 0; i < bufferCount; ++i) {
283
+        input = inputBuffers[i];
284
+        sampleCount = input.length;
285
+        for (j = 0; j < sampleCount; ++j, o += 2) {
286
+            s = Math.max(-1, Math.min(1, input[j]));
287
+            output.setInt16(o, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
288
+        }
289
+    }
290
+}

+ 262
- 0
react/features/local-recording/recording/flac/FlacAdapter.js 查看文件

@@ -0,0 +1,262 @@
1
+import {
2
+    DEBUG,
3
+    MAIN_THREAD_FINISH,
4
+    MAIN_THREAD_INIT,
5
+    MAIN_THREAD_NEW_DATA_ARRIVED,
6
+    WORKER_BLOB_READY,
7
+    WORKER_LIBFLAC_READY
8
+} from './messageTypes';
9
+
10
+import { AbstractAudioContextAdapter } from '../AbstractAudioContextAdapter';
11
+
12
+const logger = require('jitsi-meet-logger').getLogger(__filename);
13
+
14
+/**
15
+ * Recording adapter that uses libflac.js in the background.
16
+ */
17
+export class FlacAdapter extends AbstractAudioContextAdapter {
18
+
19
+    /**
20
+     * Instance of WebWorker (flacEncodeWorker).
21
+     */
22
+    _encoder = null;
23
+
24
+    /**
25
+     * Resolve function of the Promise returned by {@code stop()}.
26
+     * This is called after the WebWorker sends back {@code WORKER_BLOB_READY}.
27
+     */
28
+    _stopPromiseResolver = null;
29
+
30
+    /**
31
+     * Resolve function of the Promise that initializes the flacEncodeWorker.
32
+     */
33
+    _initWorkerPromiseResolver = null;
34
+
35
+    /**
36
+     * Initialization promise.
37
+     */
38
+    _initPromise = null;
39
+
40
+    /**
41
+     * Constructor.
42
+     */
43
+    constructor() {
44
+        super();
45
+        this._onAudioProcess = this._onAudioProcess.bind(this);
46
+        this._onWorkerMessage = this._onWorkerMessage.bind(this);
47
+    }
48
+
49
+    /**
50
+     * Implements {@link RecordingAdapter#start()}.
51
+     *
52
+     * @inheritdoc
53
+     */
54
+    start(micDeviceId) {
55
+        if (!this._initPromise) {
56
+            this._initPromise = this._initialize(micDeviceId);
57
+        }
58
+
59
+        return this._initPromise.then(() => {
60
+            this._connectAudioGraph();
61
+        });
62
+    }
63
+
64
+    /**
65
+     * Implements {@link RecordingAdapter#stop()}.
66
+     *
67
+     * @inheritdoc
68
+     */
69
+    stop() {
70
+        if (!this._encoder) {
71
+            logger.error('Attempting to stop but has nothing to stop.');
72
+
73
+            return Promise.reject();
74
+        }
75
+
76
+        return new Promise(resolve => {
77
+            this._initPromise = null;
78
+            this._disconnectAudioGraph();
79
+            this._stopPromiseResolver = resolve;
80
+            this._encoder.postMessage({
81
+                command: MAIN_THREAD_FINISH
82
+            });
83
+        });
84
+    }
85
+
86
+    /**
87
+     * Implements {@link RecordingAdapter#exportRecordedData()}.
88
+     *
89
+     * @inheritdoc
90
+     */
91
+    exportRecordedData() {
92
+        if (this._data !== null) {
93
+            return Promise.resolve({
94
+                data: this._data,
95
+                format: 'flac'
96
+            });
97
+        }
98
+
99
+        return Promise.reject('No audio data recorded.');
100
+    }
101
+
102
+    /**
103
+     * Implements {@link RecordingAdapter#setMuted()}.
104
+     *
105
+     * @inheritdoc
106
+     */
107
+    setMuted(muted) {
108
+        const shouldEnable = !muted;
109
+
110
+        if (!this._stream) {
111
+            return Promise.resolve();
112
+        }
113
+
114
+        const track = this._stream.getAudioTracks()[0];
115
+
116
+        if (!track) {
117
+            logger.error('Cannot mute/unmute. Track not found!');
118
+
119
+            return Promise.resolve();
120
+        }
121
+
122
+        if (track.enabled !== shouldEnable) {
123
+            track.enabled = shouldEnable;
124
+            logger.log(muted ? 'Mute' : 'Unmute');
125
+        }
126
+
127
+        return Promise.resolve();
128
+    }
129
+
130
+    /**
131
+     * Implements {@link RecordingAdapter#setMicDevice()}.
132
+     *
133
+     * @inheritdoc
134
+     */
135
+    setMicDevice(micDeviceId) {
136
+        return this._replaceMic(micDeviceId);
137
+    }
138
+
139
+    /**
140
+     * Initialize the adapter.
141
+     *
142
+     * @private
143
+     * @param {string} micDeviceId - The current microphone device ID.
144
+     * @returns {Promise}
145
+     */
146
+    _initialize(micDeviceId) {
147
+        if (this._encoder !== null) {
148
+            return Promise.resolve();
149
+        }
150
+
151
+        const promiseInitWorker = new Promise((resolve, reject) => {
152
+            try {
153
+                this._loadWebWorker();
154
+            } catch (e) {
155
+                reject();
156
+            }
157
+
158
+            // Save the Promise's resolver to resolve it later.
159
+            // This Promise is only resolved in _onWorkerMessage when we
160
+            // receive WORKER_LIBFLAC_READY from the WebWorker.
161
+            this._initWorkerPromiseResolver = resolve;
162
+
163
+            // set up listener for messages from the WebWorker
164
+            this._encoder.onmessage = this._onWorkerMessage;
165
+
166
+            this._encoder.postMessage({
167
+                command: MAIN_THREAD_INIT,
168
+                config: {
169
+                    sampleRate: this._sampleRate,
170
+                    bps: 16
171
+                }
172
+            });
173
+        });
174
+
175
+        // Arrow function is used here because we want AudioContext to be
176
+        // initialized only **after** promiseInitWorker is resolved.
177
+        return promiseInitWorker
178
+            .then(() =>
179
+                this._initializeAudioContext(
180
+                    micDeviceId,
181
+                    this._onAudioProcess
182
+                ));
183
+    }
184
+
185
+    /**
186
+     * Callback function for handling AudioProcessingEvents.
187
+     *
188
+     * @private
189
+     * @param {AudioProcessingEvent} e - The event containing the raw PCM.
190
+     * @returns {void}
191
+     */
192
+    _onAudioProcess(e) {
193
+        // Delegates to the WebWorker to do the encoding.
194
+        // The return of getChannelData() is a Float32Array,
195
+        // each element representing one sample.
196
+        const channelLeft = e.inputBuffer.getChannelData(0);
197
+
198
+        this._encoder.postMessage({
199
+            command: MAIN_THREAD_NEW_DATA_ARRIVED,
200
+            buf: channelLeft
201
+        });
202
+    }
203
+
204
+    /**
205
+     * Handler for messages from flacEncodeWorker.
206
+     *
207
+     * @private
208
+     * @param {MessageEvent} e - The event sent by the WebWorker.
209
+     * @returns {void}
210
+     */
211
+    _onWorkerMessage(e) {
212
+        switch (e.data.command) {
213
+        case WORKER_BLOB_READY:
214
+            // Received a Blob representing an encoded FLAC file.
215
+            this._data = e.data.buf;
216
+            if (this._stopPromiseResolver !== null) {
217
+                this._stopPromiseResolver();
218
+                this._stopPromiseResolver = null;
219
+                this._encoder.terminate();
220
+                this._encoder = null;
221
+            }
222
+            break;
223
+        case DEBUG:
224
+            logger.log(e.data);
225
+            break;
226
+        case WORKER_LIBFLAC_READY:
227
+            logger.log('libflac is ready.');
228
+            this._initWorkerPromiseResolver();
229
+            break;
230
+        default:
231
+            logger.error(
232
+                `Unknown event
233
+                from encoder (WebWorker): "${e.data.command}"!`);
234
+            break;
235
+        }
236
+    }
237
+
238
+    /**
239
+     * Loads the WebWorker.
240
+     *
241
+     * @private
242
+     * @returns {void}
243
+     */
244
+    _loadWebWorker() {
245
+        // FIXME: Workaround for different file names in development/
246
+        // production environments.
247
+        // We cannot import flacEncodeWorker as a webpack module,
248
+        // because it is in a different bundle and should be lazy-loaded
249
+        // only when flac recording is in use.
250
+        try {
251
+            // try load the minified version first
252
+            this._encoder = new Worker('/libs/flacEncodeWorker.min.js');
253
+        } catch (exception1) {
254
+            // if failed, try unminified version
255
+            try {
256
+                this._encoder = new Worker('/libs/flacEncodeWorker.js');
257
+            } catch (exception2) {
258
+                throw new Error('Failed to load flacEncodeWorker.');
259
+            }
260
+        }
261
+    }
262
+}

+ 397
- 0
react/features/local-recording/recording/flac/flacEncodeWorker.js 查看文件

@@ -0,0 +1,397 @@
1
+import {
2
+    MAIN_THREAD_FINISH,
3
+    MAIN_THREAD_INIT,
4
+    MAIN_THREAD_NEW_DATA_ARRIVED,
5
+    WORKER_BLOB_READY,
6
+    WORKER_LIBFLAC_READY
7
+} from './messageTypes';
8
+
9
+const logger = require('jitsi-meet-logger').getLogger(__filename);
10
+
11
+/**
12
+ * WebWorker that does FLAC encoding using libflac.js
13
+ */
14
+
15
+self.FLAC_SCRIPT_LOCATION = '/libs/';
16
+/* eslint-disable */
17
+importScripts('/libs/libflac4-1.3.2.min.js');
18
+/* eslint-enable */
19
+
20
+// There is a number of API calls to libflac.js, which does not conform
21
+// to the camalCase naming convention, but we cannot change it.
22
+// So we disable the ESLint rule `new-cap` in this file.
23
+/* eslint-disable new-cap */
24
+
25
+// Flow will complain about the number keys in `FLAC_ERRORS`,
26
+// ESLint will complain about the `declare` statement.
27
+// As the current workaround, add an exception for eslint.
28
+/* eslint-disable flowtype/no-types-missing-file-annotation */
29
+declare var Flac: Object;
30
+
31
+const FLAC_ERRORS = {
32
+    // The encoder is in the normal OK state and samples can be processed.
33
+    0: 'FLAC__STREAM_ENCODER_OK',
34
+
35
+    // The encoder is in the uninitialized state one of the
36
+    // FLAC__stream_encoder_init_*() functions must be called before samples can
37
+    // be processed.
38
+    1: 'FLAC__STREAM_ENCODER_UNINITIALIZED',
39
+
40
+    // An error occurred in the underlying Ogg layer.
41
+    2: 'FLAC__STREAM_ENCODER_OGG_ERROR',
42
+
43
+    // An error occurred in the underlying verify stream decoder; check
44
+    // FLAC__stream_encoder_get_verify_decoder_state().
45
+    3: 'FLAC__STREAM_ENCODER_VERIFY_DECODER_ERROR',
46
+
47
+    // The verify decoder detected a mismatch between the original audio signal
48
+    // and the decoded audio signal.
49
+    4: 'FLAC__STREAM_ENCODER_VERIFY_MISMATCH_IN_AUDIO_DATA',
50
+
51
+    // One of the callbacks returned a fatal error.
52
+    5: 'FLAC__STREAM_ENCODER_CLIENT_ERROR',
53
+
54
+    // An I/O error occurred while opening/reading/writing a file. Check errno.
55
+    6: 'FLAC__STREAM_ENCODER_IO_ERROR',
56
+
57
+    // An error occurred while writing the stream; usually, the write_callback
58
+    // returned an error.
59
+    7: 'FLAC__STREAM_ENCODER_FRAMING_ERROR',
60
+
61
+    // Memory allocation failed.
62
+    8: 'FLAC__STREAM_ENCODER_MEMORY_ALLOCATION_ERROR'
63
+};
64
+
65
+/**
66
+ * States of the {@code Encoder}.
67
+ */
68
+const EncoderState = Object.freeze({
69
+    /**
70
+     * Initial state, when libflac.js is not initialized.
71
+     */
72
+    UNINTIALIZED: Symbol('uninitialized'),
73
+
74
+    /**
75
+     * Actively encoding new audio bits.
76
+     */
77
+    WORKING: Symbol('working'),
78
+
79
+    /**
80
+     * Encoding has finished and encoded bits are available.
81
+     */
82
+    FINISHED: Symbol('finished')
83
+});
84
+
85
+/**
86
+ * Default FLAC compression level.
87
+ */
88
+const FLAC_COMPRESSION_LEVEL = 5;
89
+
90
+/**
91
+ * Concat multiple Uint8Arrays into one.
92
+ *
93
+ * @param {Uint8Array[]} arrays - Array of Uint8 arrays.
94
+ * @param {number} totalLength - Total length of all Uint8Arrays.
95
+ * @returns {Uint8Array}
96
+ */
97
+function mergeUint8Arrays(arrays, totalLength) {
98
+    const result = new Uint8Array(totalLength);
99
+    let offset = 0;
100
+    const len = arrays.length;
101
+
102
+    for (let i = 0; i < len; i++) {
103
+        const buffer = arrays[i];
104
+
105
+        result.set(buffer, offset);
106
+        offset += buffer.length;
107
+    }
108
+
109
+    return result;
110
+}
111
+
112
+/**
113
+ * Wrapper class around libflac API.
114
+ */
115
+class Encoder {
116
+
117
+    /**
118
+     * Flac encoder instance ID. (As per libflac.js API).
119
+     * @private
120
+     */
121
+    _encoderId = 0;
122
+
123
+    /**
124
+     * Sample rate.
125
+     * @private
126
+     */
127
+    _sampleRate;
128
+
129
+    /**
130
+     * Bit depth (bits per sample).
131
+     * @private
132
+     */
133
+    _bitDepth;
134
+
135
+    /**
136
+     * Buffer size.
137
+     * @private
138
+     */
139
+    _bufferSize;
140
+
141
+    /**
142
+     * Buffers to store encoded bits temporarily.
143
+     */
144
+    _flacBuffers = [];
145
+
146
+    /**
147
+     * Length of encoded FLAC bits.
148
+     */
149
+    _flacLength = 0;
150
+
151
+    /**
152
+     * The current state of the {@code Encoder}.
153
+     */
154
+    _state = EncoderState.UNINTIALIZED;
155
+
156
+    /**
157
+     * The ready-for-grab downloadable Blob.
158
+     */
159
+    _data = null;
160
+
161
+
162
+    /**
163
+     * Constructor.
164
+     * Note: only create instance when Flac.isReady() returns true.
165
+     *
166
+     * @param {number} sampleRate - Sample rate of the raw audio data.
167
+     * @param {number} bitDepth - Bit depth (bit per sample).
168
+     * @param {number} bufferSize - The size of each batch.
169
+     */
170
+    constructor(sampleRate, bitDepth = 16, bufferSize = 4096) {
171
+        if (!Flac.isReady()) {
172
+            throw new Error('libflac is not ready yet!');
173
+        }
174
+
175
+        this._sampleRate = sampleRate;
176
+        this._bitDepth = bitDepth;
177
+        this._bufferSize = bufferSize;
178
+
179
+        // create the encoder
180
+        this._encoderId = Flac.init_libflac_encoder(
181
+            this._sampleRate,
182
+
183
+            // Mono channel
184
+            1,
185
+            this._bitDepth,
186
+
187
+            FLAC_COMPRESSION_LEVEL,
188
+
189
+            // Pass 0 in becuase of unknown total samples,
190
+            0,
191
+
192
+            // checksum, FIXME: double-check whether this is necessary
193
+            true,
194
+
195
+            // Auto-determine block size (samples per frame)
196
+            0
197
+        );
198
+
199
+        if (this._encoderId === 0) {
200
+            throw new Error('Failed to create libflac encoder.');
201
+        }
202
+
203
+        // initialize the encoder
204
+        const initResult = Flac.init_encoder_stream(
205
+            this._encoderId,
206
+            this._onEncodedData.bind(this),
207
+            this._onMetadataAvailable.bind(this)
208
+        );
209
+
210
+        if (initResult !== 0) {
211
+            throw new Error('Failed to initalise libflac encoder.');
212
+        }
213
+
214
+        this._state = EncoderState.WORKING;
215
+    }
216
+
217
+    /**
218
+     * Receive and encode new data.
219
+     *
220
+     * @param {Float32Array} audioData - Raw audio data.
221
+     * @returns {void}
222
+     */
223
+    encode(audioData) {
224
+        if (this._state !== EncoderState.WORKING) {
225
+            throw new Error('Encoder is not ready or has finished.');
226
+        }
227
+
228
+        if (!Flac.isReady()) {
229
+            throw new Error('Flac not ready');
230
+        }
231
+        const bufferLength = audioData.length;
232
+
233
+        // Convert sample to signed 32-bit integers.
234
+        // According to libflac documentation:
235
+        // each sample in the buffers should be a signed integer,
236
+        // right-justified to the resolution set by
237
+        // FLAC__stream_encoder_set_bits_per_sample().
238
+
239
+        // Here we are using 16 bits per sample, the samples should all be in
240
+        // the range [-32768,32767]. This is achieved by multipling Float32
241
+        // numbers with 0x7FFF.
242
+
243
+        const bufferI32 = new Int32Array(bufferLength);
244
+        const view = new DataView(bufferI32.buffer);
245
+        const volume = 1;
246
+        let index = 0;
247
+
248
+        for (let i = 0; i < bufferLength; i++) {
249
+            view.setInt32(index, audioData[i] * (0x7FFF * volume), true);
250
+            index += 4; // 4 bytes (32-bit)
251
+        }
252
+
253
+        // pass it to libflac
254
+        const status = Flac.FLAC__stream_encoder_process_interleaved(
255
+            this._encoderId,
256
+            bufferI32,
257
+            bufferI32.length
258
+        );
259
+
260
+        if (status !== 1) {
261
+            // gets error number
262
+
263
+            const errorNo
264
+                = Flac.FLAC__stream_encoder_get_state(this._encoderId);
265
+
266
+            logger.error('Error during encoding', FLAC_ERRORS[errorNo]);
267
+        }
268
+    }
269
+
270
+    /**
271
+     * Signals the termination of encoding.
272
+     *
273
+     * @returns {void}
274
+     */
275
+    finish() {
276
+        if (this._state === EncoderState.WORKING) {
277
+            this._state = EncoderState.FINISHED;
278
+
279
+            const status = Flac.FLAC__stream_encoder_finish(this._encoderId);
280
+
281
+            logger.log('Flac encoding finished: ', status);
282
+
283
+            // free up resources
284
+            Flac.FLAC__stream_encoder_delete(this._encoderId);
285
+
286
+            this._data = this._exportFlacBlob();
287
+        }
288
+    }
289
+
290
+    /**
291
+     * Gets the encoded flac file.
292
+     *
293
+     * @returns {Blob} - The encoded flac file.
294
+     */
295
+    getBlob() {
296
+        if (this._state === EncoderState.FINISHED) {
297
+            return this._data;
298
+        }
299
+
300
+        return null;
301
+    }
302
+
303
+    /**
304
+     * Converts flac buffer to a Blob.
305
+     *
306
+     * @private
307
+     * @returns {void}
308
+     */
309
+    _exportFlacBlob() {
310
+        const samples = mergeUint8Arrays(this._flacBuffers, this._flacLength);
311
+
312
+        const blob = new Blob([ samples ], { type: 'audio/flac' });
313
+
314
+        return blob;
315
+    }
316
+
317
+    /* eslint-disable no-unused-vars */
318
+    /**
319
+     * Callback function for saving encoded Flac data.
320
+     * This is invoked by libflac.
321
+     *
322
+     * @private
323
+     * @param {Uint8Array} buffer - The encoded Flac data.
324
+     * @param {number} bytes - Number of bytes in the data.
325
+     * @returns {void}
326
+     */
327
+    _onEncodedData(buffer, bytes) {
328
+        this._flacBuffers.push(buffer);
329
+        this._flacLength += buffer.byteLength;
330
+    }
331
+    /* eslint-enable no-unused-vars */
332
+
333
+    /**
334
+     * Callback function for receiving metadata.
335
+     *
336
+     * @private
337
+     * @returns {void}
338
+     */
339
+    _onMetadataAvailable = () => {
340
+        // reserved for future use
341
+    }
342
+}
343
+
344
+
345
+let encoder = null;
346
+
347
+self.onmessage = function(e) {
348
+
349
+    switch (e.data.command) {
350
+    case MAIN_THREAD_INIT:
351
+    {
352
+        const bps = e.data.config.bps;
353
+        const sampleRate = e.data.config.sampleRate;
354
+
355
+        if (Flac.isReady()) {
356
+            encoder = new Encoder(sampleRate, bps);
357
+            self.postMessage({
358
+                command: WORKER_LIBFLAC_READY
359
+            });
360
+        } else {
361
+            Flac.onready = function() {
362
+                setTimeout(() => {
363
+                    encoder = new Encoder(sampleRate, bps);
364
+                    self.postMessage({
365
+                        command: WORKER_LIBFLAC_READY
366
+                    });
367
+                }, 0);
368
+            };
369
+        }
370
+        break;
371
+    }
372
+
373
+    case MAIN_THREAD_NEW_DATA_ARRIVED:
374
+        if (encoder === null) {
375
+            logger.error('flacEncoderWorker received data when the encoder is'
376
+                + 'not ready.');
377
+        } else {
378
+            encoder.encode(e.data.buf);
379
+        }
380
+        break;
381
+
382
+    case MAIN_THREAD_FINISH:
383
+        if (encoder !== null) {
384
+            encoder.finish();
385
+            const data = encoder.getBlob();
386
+
387
+            self.postMessage(
388
+                {
389
+                    command: WORKER_BLOB_READY,
390
+                    buf: data
391
+                }
392
+            );
393
+            encoder = null;
394
+        }
395
+        break;
396
+    }
397
+};

+ 1
- 0
react/features/local-recording/recording/flac/index.js 查看文件

@@ -0,0 +1 @@
1
+export * from './FlacAdapter';

+ 44
- 0
react/features/local-recording/recording/flac/messageTypes.js 查看文件

@@ -0,0 +1,44 @@
1
+/**
2
+ * Types of messages that are passed between the main thread and the WebWorker
3
+ * ({@code flacEncodeWorker})
4
+ */
5
+
6
+// Messages sent by the main thread
7
+
8
+/**
9
+ * Message type that signals the termination of encoding,
10
+ * after which no new audio bits should be sent to the
11
+ * WebWorker.
12
+ */
13
+export const MAIN_THREAD_FINISH = 'MAIN_THREAD_FINISH';
14
+
15
+/**
16
+ * Message type that carries initial parameters for
17
+ * the WebWorker.
18
+ */
19
+export const MAIN_THREAD_INIT = 'MAIN_THREAD_INIT';
20
+
21
+/**
22
+ * Message type that carries the newly received raw audio bits
23
+ * for the WebWorker to encode.
24
+ */
25
+export const MAIN_THREAD_NEW_DATA_ARRIVED = 'MAIN_THREAD_NEW_DATA_ARRIVED';
26
+
27
+// Messages sent by the WebWorker
28
+
29
+/**
30
+ * Message type that signals libflac is ready to receive audio bits.
31
+ */
32
+export const WORKER_LIBFLAC_READY = 'WORKER_LIBFLAC_READY';
33
+
34
+/**
35
+ * Message type that carries the encoded FLAC file as a Blob.
36
+ */
37
+export const WORKER_BLOB_READY = 'WORKER_BLOB_READY';
38
+
39
+// Messages sent by either the main thread or the WebWorker
40
+
41
+/**
42
+ * Debug messages.
43
+ */
44
+export const DEBUG = 'DEBUG';

+ 5
- 0
react/features/local-recording/recording/index.js 查看文件

@@ -0,0 +1,5 @@
1
+export * from './OggAdapter';
2
+export * from './RecordingAdapter';
3
+export * from './Utils';
4
+export * from './WavAdapter';
5
+export * from './flac';

+ 35
- 0
react/features/local-recording/reducer.js 查看文件

@@ -0,0 +1,35 @@
1
+/* @flow */
2
+
3
+import { ReducerRegistry } from '../base/redux';
4
+import {
5
+    LOCAL_RECORDING_ENGAGED,
6
+    LOCAL_RECORDING_STATS_UPDATE,
7
+    LOCAL_RECORDING_UNENGAGED
8
+} from './actionTypes';
9
+import { recordingController } from './controller';
10
+
11
+ReducerRegistry.register('features/local-recording', (state = {}, action) => {
12
+    switch (action.type) {
13
+    case LOCAL_RECORDING_ENGAGED: {
14
+        return {
15
+            ...state,
16
+            isEngaged: true,
17
+            recordingEngagedAt: action.recordingEngagedAt,
18
+            encodingFormat: recordingController._format
19
+        };
20
+    }
21
+    case LOCAL_RECORDING_UNENGAGED:
22
+        return {
23
+            ...state,
24
+            isEngaged: false,
25
+            recordingEngagedAt: null
26
+        };
27
+    case LOCAL_RECORDING_STATS_UPDATE:
28
+        return {
29
+            ...state,
30
+            stats: action.stats
31
+        };
32
+    default:
33
+        return state;
34
+    }
35
+});

+ 439
- 0
react/features/local-recording/session/SessionManager.js 查看文件

@@ -0,0 +1,439 @@
1
+/* @flow */
2
+
3
+import jitsiLocalStorage from '../../../../modules/util/JitsiLocalStorage';
4
+
5
+const logger = require('jitsi-meet-logger').getLogger(__filename);
6
+
7
+/**
8
+ * Gets high precision system time.
9
+ *
10
+ * @returns {number}
11
+ */
12
+function highPrecisionTime(): number {
13
+    return window.performance
14
+        && window.performance.now
15
+        && window.performance.timing
16
+        && window.performance.timing.navigationStart
17
+        ? window.performance.now() + window.performance.timing.navigationStart
18
+        : Date.now();
19
+}
20
+
21
+// Have to use string literal here, instead of Symbols,
22
+// because these values need to be JSON-serializible.
23
+
24
+/**
25
+ * Types of SessionEvents.
26
+ */
27
+const SessionEventType = Object.freeze({
28
+    /**
29
+     * Start of local recording session. This is recorded when the
30
+     * {@code RecordingController} receives the signal to start local recording,
31
+     * before the actual adapter is engaged.
32
+     */
33
+    SESSION_STARTED: 'SESSION_STARTED',
34
+
35
+    /**
36
+     * Start of a continuous segment. This is recorded when the adapter is
37
+     * engaged. Can happen multiple times in a local recording session,
38
+     * due to browser reloads or switching of recording device.
39
+     */
40
+    SEGMENT_STARTED: 'SEGMENT_STARTED',
41
+
42
+    /**
43
+     * End of a continuous segment. This is recorded when the adapter unengages.
44
+     */
45
+    SEGMENT_ENDED: 'SEGMENT_ENDED'
46
+});
47
+
48
+/**
49
+ * Represents an event during a local recording session.
50
+ * The event can be either that the adapter started recording, or stopped
51
+ * recording.
52
+ */
53
+type SessionEvent = {
54
+
55
+    /**
56
+     * The type of the event.
57
+     * Should be one of the values in {@code SessionEventType}.
58
+     */
59
+    type: string,
60
+
61
+    /**
62
+     * The timestamp of the event.
63
+     */
64
+    timestamp: number
65
+};
66
+
67
+/**
68
+ * Representation of the metadata of a segment.
69
+ */
70
+type SegmentInfo = {
71
+
72
+    /**
73
+     * The length of gap before this segment, in milliseconds.
74
+     * mull if unknown.
75
+     */
76
+    gapBefore?: ?number,
77
+
78
+    /**
79
+     * The duration of this segment, in milliseconds.
80
+     * null if unknown or the segment is not finished.
81
+     */
82
+    duration?: ?number,
83
+
84
+    /**
85
+     * The start time, in milliseconds.
86
+     */
87
+    start?: ?number,
88
+
89
+    /**
90
+     * The end time, in milliseconds.
91
+     * null if unknown, the segment is not finished, or the recording is
92
+     * interrupted (e.g. browser reload).
93
+     */
94
+    end?: ?number
95
+};
96
+
97
+/**
98
+ * Representation of metadata of a local recording session.
99
+ */
100
+type SessionInfo = {
101
+
102
+    /**
103
+     * The session token.
104
+     */
105
+    sessionToken: string,
106
+
107
+    /**
108
+     * The start time of the session.
109
+     */
110
+    start: ?number,
111
+
112
+    /**
113
+     * The recording format.
114
+     */
115
+    format: string,
116
+
117
+    /**
118
+     * Array of segments in the session.
119
+     */
120
+    segments: SegmentInfo[]
121
+}
122
+
123
+/**
124
+ * {@code localStorage} key.
125
+ */
126
+const LOCAL_STORAGE_KEY = 'localRecordingMetadataVersion1';
127
+
128
+/**
129
+ * SessionManager manages the metadata of each segment during each local
130
+ * recording session.
131
+ *
132
+ * A segment is a continous portion of recording done using the same adapter
133
+ * on the same microphone device.
134
+ *
135
+ * Browser refreshes, switching of microphone will cause new segments to be
136
+ * created.
137
+ *
138
+ * A recording session can consist of one or more segments.
139
+ */
140
+class SessionManager {
141
+
142
+    /**
143
+     * The metadata.
144
+     */
145
+    _sessionsMetadata = {
146
+    };
147
+
148
+    /**
149
+     * Constructor.
150
+     */
151
+    constructor() {
152
+        this._loadMetadata();
153
+    }
154
+
155
+    /**
156
+     * Loads metadata from localStorage.
157
+     *
158
+     * @private
159
+     * @returns {void}
160
+     */
161
+    _loadMetadata() {
162
+        const dataStr = jitsiLocalStorage.getItem(LOCAL_STORAGE_KEY);
163
+
164
+        if (dataStr !== null) {
165
+            try {
166
+                const dataObject = JSON.parse(dataStr);
167
+
168
+                this._sessionsMetadata = dataObject;
169
+            } catch (e) {
170
+                logger.warn('Failed to parse localStorage item.');
171
+
172
+                return;
173
+            }
174
+        }
175
+    }
176
+
177
+    /**
178
+     * Persists metadata to localStorage.
179
+     *
180
+     * @private
181
+     * @returns {void}
182
+     */
183
+    _saveMetadata() {
184
+        jitsiLocalStorage.setItem(LOCAL_STORAGE_KEY,
185
+            JSON.stringify(this._sessionsMetadata));
186
+    }
187
+
188
+    /**
189
+     * Creates a session if not exists.
190
+     *
191
+     * @param {string} sessionToken - The local recording session token.
192
+     * @param {string} format - The local recording format.
193
+     * @returns {void}
194
+     */
195
+    createSession(sessionToken: string, format: string) {
196
+        if (this._sessionsMetadata[sessionToken] === undefined) {
197
+            this._sessionsMetadata[sessionToken] = {
198
+                format,
199
+                events: []
200
+            };
201
+            this._sessionsMetadata[sessionToken].events.push({
202
+                type: SessionEventType.SESSION_STARTED,
203
+                timestamp: highPrecisionTime()
204
+            });
205
+            this._saveMetadata();
206
+        } else {
207
+            logger.warn(`Session ${sessionToken} already exists`);
208
+        }
209
+    }
210
+
211
+    /**
212
+     * Gets all the Sessions.
213
+     *
214
+     * @returns {SessionInfo[]}
215
+     */
216
+    getSessions(): SessionInfo[] {
217
+        const sessionTokens = Object.keys(this._sessionsMetadata);
218
+        const output = [];
219
+
220
+        for (let i = 0; i < sessionTokens.length; ++i) {
221
+            const thisSession = this._sessionsMetadata[sessionTokens[i]];
222
+            const newSessionInfo : SessionInfo = {
223
+                start: thisSession.events[0].timestamp,
224
+                format: thisSession.format,
225
+                sessionToken: sessionTokens[i],
226
+                segments: this.getSegments(sessionTokens[i])
227
+            };
228
+
229
+            output.push(newSessionInfo);
230
+        }
231
+
232
+        output.sort((a, b) => (a.start || 0) - (b.start || 0));
233
+
234
+        return output;
235
+    }
236
+
237
+    /**
238
+     * Removes session metadata.
239
+     *
240
+     * @param {string} sessionToken - The session token.
241
+     * @returns {void}
242
+     */
243
+    removeSession(sessionToken: string) {
244
+        delete this._sessionsMetadata[sessionToken];
245
+        this._saveMetadata();
246
+    }
247
+
248
+    /**
249
+     * Get segments of a given Session.
250
+     *
251
+     * @param {string} sessionToken - The session token.
252
+     * @returns {SegmentInfo[]}
253
+     */
254
+    getSegments(sessionToken: string): SegmentInfo[] {
255
+        const thisSession = this._sessionsMetadata[sessionToken];
256
+
257
+        if (thisSession) {
258
+            return this._constructSegments(thisSession.events);
259
+        }
260
+
261
+        return [];
262
+    }
263
+
264
+    /**
265
+     * Marks the start of a new segment.
266
+     * This should be invoked by {@code RecordingAdapter}s when they need to
267
+     * start asynchronous operations (such as switching tracks) that interrupts
268
+     * recording.
269
+     *
270
+     * @param {string} sessionToken - The token of the session to start a new
271
+     * segment in.
272
+     * @returns {number} - Current segment index.
273
+     */
274
+    beginSegment(sessionToken: string): number {
275
+        if (this._sessionsMetadata[sessionToken] === undefined) {
276
+            logger.warn('Attempting to add segments to nonexistent'
277
+                + ` session ${sessionToken}`);
278
+
279
+            return -1;
280
+        }
281
+        this._sessionsMetadata[sessionToken].events.push({
282
+            type: SessionEventType.SEGMENT_STARTED,
283
+            timestamp: highPrecisionTime()
284
+        });
285
+        this._saveMetadata();
286
+
287
+        return this.getSegments(sessionToken).length - 1;
288
+    }
289
+
290
+    /**
291
+     * Gets the current segment index. Starting from 0 for the first
292
+     * segment.
293
+     *
294
+     * @param {string} sessionToken - The session token.
295
+     * @returns {number}
296
+     */
297
+    getCurrentSegmentIndex(sessionToken: string): number {
298
+        if (this._sessionsMetadata[sessionToken] === undefined) {
299
+            return -1;
300
+        }
301
+        const segments = this.getSegments(sessionToken);
302
+
303
+        if (segments.length === 0) {
304
+            return -1;
305
+        }
306
+
307
+        const lastSegment = segments[segments.length - 1];
308
+
309
+        if (lastSegment.end) {
310
+            // last segment is already ended
311
+            return -1;
312
+        }
313
+
314
+        return segments.length - 1;
315
+    }
316
+
317
+    /**
318
+     * Marks the end of the last segment in a session.
319
+     *
320
+     * @param {string} sessionToken - The session token.
321
+     * @returns {void}
322
+     */
323
+    endSegment(sessionToken: string) {
324
+        if (this._sessionsMetadata[sessionToken] === undefined) {
325
+            logger.warn('Attempting to end a segment in nonexistent'
326
+                + ` session ${sessionToken}`);
327
+        } else {
328
+            this._sessionsMetadata[sessionToken].events.push({
329
+                type: SessionEventType.SEGMENT_ENDED,
330
+                timestamp: highPrecisionTime()
331
+            });
332
+            this._saveMetadata();
333
+        }
334
+    }
335
+
336
+    /**
337
+     * Constructs an array of {@code SegmentInfo} from an array of
338
+     * {@code SessionEvent}s.
339
+     *
340
+     * @private
341
+     * @param {SessionEvent[]} events - The array of {@code SessionEvent}s.
342
+     * @returns {SegmentInfo[]}
343
+     */
344
+    _constructSegments(events: SessionEvent[]): SegmentInfo[] {
345
+        if (events.length === 0) {
346
+            return [];
347
+        }
348
+
349
+        const output = [];
350
+        let sessionStartTime = null;
351
+        let currentSegment : SegmentInfo = {
352
+        };
353
+
354
+        /**
355
+         * Helper function for adding a new {@code SegmentInfo} object to the
356
+         * output.
357
+         *
358
+         * @returns {void}
359
+         */
360
+        function commit() {
361
+            if (currentSegment.gapBefore === undefined
362
+                || currentSegment.gapBefore === null) {
363
+                if (output.length > 0 && output[output.length - 1].end) {
364
+                    const lastSegment = output[output.length - 1];
365
+
366
+                    if (currentSegment.start && lastSegment.end) {
367
+                        currentSegment.gapBefore = currentSegment.start
368
+                            - lastSegment.end;
369
+                    } else {
370
+                        currentSegment.gapBefore = null;
371
+                    }
372
+                } else if (sessionStartTime !== null && output.length === 0) {
373
+                    currentSegment.gapBefore = currentSegment.start
374
+                        ? currentSegment.start - sessionStartTime
375
+                        : null;
376
+                } else {
377
+                    currentSegment.gapBefore = null;
378
+                }
379
+            }
380
+            currentSegment.duration = currentSegment.end && currentSegment.start
381
+                ? currentSegment.end - currentSegment.start
382
+                : null;
383
+            output.push(currentSegment);
384
+            currentSegment = {};
385
+        }
386
+
387
+        for (let i = 0; i < events.length; ++i) {
388
+            const currentEvent = events[i];
389
+
390
+            switch (currentEvent.type) {
391
+            case SessionEventType.SESSION_STARTED:
392
+                if (sessionStartTime === null) {
393
+                    sessionStartTime = currentEvent.timestamp;
394
+                } else {
395
+                    logger.warn('Unexpected SESSION_STARTED event.'
396
+                        , currentEvent);
397
+                }
398
+                break;
399
+            case SessionEventType.SEGMENT_STARTED:
400
+                if (currentSegment.start === undefined
401
+                    || currentSegment.start === null) {
402
+                    currentSegment.start = currentEvent.timestamp;
403
+                } else {
404
+                    commit();
405
+                    currentSegment.start = currentEvent.timestamp;
406
+                }
407
+                break;
408
+
409
+            case SessionEventType.SEGMENT_ENDED:
410
+                if (currentSegment.start === undefined
411
+                    || currentSegment.start === null) {
412
+                    logger.warn('Unexpected SEGMENT_ENDED event', currentEvent);
413
+                } else {
414
+                    currentSegment.end = currentEvent.timestamp;
415
+                    commit();
416
+                }
417
+                break;
418
+
419
+            default:
420
+                logger.warn('Unexpected error during _constructSegments');
421
+                break;
422
+            }
423
+        }
424
+        if (currentSegment.start) {
425
+            commit();
426
+        }
427
+
428
+        return output;
429
+    }
430
+
431
+}
432
+
433
+/**
434
+ * Global singleton of {@code SessionManager}.
435
+ */
436
+export const sessionManager = new SessionManager();
437
+
438
+// For debug only. To remove later.
439
+window.sessionManager = sessionManager;

+ 1
- 0
react/features/local-recording/session/index.js 查看文件

@@ -0,0 +1 @@
1
+export * from './SessionManager';

+ 35
- 1
react/features/toolbox/components/web/Toolbox.js 查看文件

@@ -28,6 +28,10 @@ import {
28 28
     isDialOutEnabled
29 29
 } from '../../../invite';
30 30
 import { openKeyboardShortcutsDialog } from '../../../keyboard-shortcuts';
31
+import {
32
+    LocalRecordingButton,
33
+    LocalRecordingInfoDialog
34
+} from '../../../local-recording';
31 35
 import {
32 36
     LiveStreamButton,
33 37
     RecordButton
@@ -129,6 +133,11 @@ type Props = {
129 133
      */
130 134
     _localParticipantID: String,
131 135
 
136
+    /**
137
+     * The subsection of Redux state for local recording
138
+     */
139
+    _localRecState: Object,
140
+
132 141
     /**
133 142
      * Whether or not the overflow menu is visible.
134 143
      */
@@ -159,6 +168,7 @@ type Props = {
159 168
      */
160 169
     _visible: boolean,
161 170
 
171
+
162 172
     /**
163 173
      * Set with the buttons which this Toolbox should display.
164 174
      */
@@ -228,6 +238,8 @@ class Toolbox extends Component<Props> {
228 238
             = this._onToolbarToggleScreenshare.bind(this);
229 239
         this._onToolbarToggleSharedVideo
230 240
             = this._onToolbarToggleSharedVideo.bind(this);
241
+        this._onToolbarOpenLocalRecordingInfoDialog
242
+            = this._onToolbarOpenLocalRecordingInfoDialog.bind(this);
231 243
     }
232 244
 
233 245
     /**
@@ -370,6 +382,12 @@ class Toolbox extends Component<Props> {
370 382
                         visible = { this._shouldShowButton('camera') } />
371 383
                 </div>
372 384
                 <div className = 'button-group-right'>
385
+                    { this._shouldShowButton('localrecording')
386
+                        && <LocalRecordingButton
387
+                            onClick = {
388
+                                this._onToolbarOpenLocalRecordingInfoDialog
389
+                            } />
390
+                    }
373 391
                     { this._shouldShowButton('tileview')
374 392
                         && <TileViewButton /> }
375 393
                     { this._shouldShowButton('invite')
@@ -842,6 +860,20 @@ class Toolbox extends Component<Props> {
842 860
         this._doToggleSharedVideo();
843 861
     }
844 862
 
863
+    _onToolbarOpenLocalRecordingInfoDialog: () => void;
864
+
865
+    /**
866
+     * Opens the {@code LocalRecordingInfoDialog}.
867
+     *
868
+     * @private
869
+     * @returns {void}
870
+     */
871
+    _onToolbarOpenLocalRecordingInfoDialog() {
872
+        sendAnalytics(createToolbarEvent('local.recording'));
873
+
874
+        this.props.dispatch(openDialog(LocalRecordingInfoDialog));
875
+    }
876
+
845 877
     /**
846 878
      * Renders a button for toggleing screen sharing.
847 879
      *
@@ -984,7 +1016,7 @@ class Toolbox extends Component<Props> {
984 1016
      * Returns if a button name has been explicitly configured to be displayed.
985 1017
      *
986 1018
      * @param {string} buttonName - The name of the button, as expected in
987
-     * {@link intefaceConfig}.
1019
+     * {@link interfaceConfig}.
988 1020
      * @private
989 1021
      * @returns {boolean} True if the button should be displayed.
990 1022
      */
@@ -1021,6 +1053,7 @@ function _mapStateToProps(state) {
1021 1053
         visible
1022 1054
     } = state['features/toolbox'];
1023 1055
     const localParticipant = getLocalParticipant(state);
1056
+    const localRecordingStates = state['features/local-recording'];
1024 1057
     const localVideo = getLocalVideoTrack(state['features/base/tracks']);
1025 1058
     const addPeopleEnabled = isAddPeopleEnabled(state);
1026 1059
     const dialOutEnabled = isDialOutEnabled(state);
@@ -1061,6 +1094,7 @@ function _mapStateToProps(state) {
1061 1094
         _isGuest: state['features/base/jwt'].isGuest,
1062 1095
         _fullScreen: fullScreen,
1063 1096
         _localParticipantID: localParticipant.id,
1097
+        _localRecState: localRecordingStates,
1064 1098
         _overflowMenuVisible: overflowMenuVisible,
1065 1099
         _raisedHand: localParticipant.raisedHand,
1066 1100
         _screensharing: localVideo && localVideo.videoType === 'desktop',

+ 5
- 1
webpack.config.js 查看文件

@@ -149,7 +149,11 @@ module.exports = [
149 149
             ],
150 150
 
151 151
             'do_external_connect':
152
-                './connection_optimization/do_external_connect.js'
152
+                './connection_optimization/do_external_connect.js',
153
+
154
+            'flacEncodeWorker':
155
+                './react/features/local-recording/'
156
+                    + 'recording/flac/flacEncodeWorker.js'
153 157
         }
154 158
     }),
155 159
 

正在加载...
取消
保存