Browse Source

refactor: AbstractAudioContextAdapter

move duplicate code from WavAdapter and FlacAdapter to a base class
j8
Radium Zheng 6 years ago
parent
commit
df6df1c6c3

+ 129
- 0
react/features/local-recording/recording/AbstractAudioContextAdapter.js View File

@@ -0,0 +1,129 @@
1
+import { RecordingAdapter } from './RecordingAdapter';
2
+
3
+const logger = require('jitsi-meet-logger').getLogger(__filename);
4
+
5
+/**
6
+ * Base class for {@code AudioContext}-based recording adapters.
7
+ */
8
+export class AbstractAudioContextAdapter extends RecordingAdapter {
9
+    /**
10
+     * The {@code AudioContext} instance.
11
+     */
12
+    _audioContext = null;
13
+
14
+    /**
15
+     * The {@code ScriptProcessorNode} instance.
16
+     */
17
+    _audioProcessingNode = null;
18
+
19
+    /**
20
+     * The {@code MediaStreamAudioSourceNode} instance.
21
+     */
22
+    _audioSource = null;
23
+
24
+    /**
25
+     * The {@code MediaStream} instance, representing the current audio device.
26
+     */
27
+    _stream = null;
28
+
29
+    /**
30
+     * Sample rate.
31
+     */
32
+    _sampleRate = 44100;
33
+
34
+    /**
35
+     * Constructor.
36
+     */
37
+    constructor() {
38
+        super();
39
+
40
+        // sampleRate is browser and OS dependent.
41
+        // Setting sampleRate explicitly is in the specs but not implemented
42
+        // by browsers.
43
+        // See: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/
44
+        //    AudioContext#Browser_compatibility
45
+        // And https://bugs.chromium.org/p/chromium/issues/detail?id=432248
46
+
47
+        this._audioContext = new AudioContext();
48
+        this._sampleRate = this._audioContext.sampleRate;
49
+        logger.log(`Current sampleRate ${this._sampleRate}.`);
50
+    }
51
+
52
+    /**
53
+     * Sets up the audio graph in the AudioContext.
54
+     *
55
+     * @protected
56
+     * @param {string} micDeviceId - The current microphone device ID.
57
+     * @param {Function} callback - Callback function to
58
+     * handle AudioProcessingEvents.
59
+     * @returns {Promise}
60
+     */
61
+    _initializeAudioContext(micDeviceId, callback) {
62
+        if (typeof callback !== 'function') {
63
+            return Promise.reject('a callback function is required.');
64
+        }
65
+
66
+        return this._getAudioStream(micDeviceId)
67
+        .then(stream => {
68
+            this._stream = stream;
69
+            this._audioSource
70
+                = this._audioContext.createMediaStreamSource(stream);
71
+            this._audioProcessingNode
72
+                = this._audioContext.createScriptProcessor(4096, 1, 1);
73
+            this._audioProcessingNode.onaudioprocess = callback;
74
+            logger.debug('AudioContext is set up.');
75
+        })
76
+        .catch(err => {
77
+            logger.error(`Error calling getUserMedia(): ${err}`);
78
+
79
+            return Promise.reject(err);
80
+        });
81
+    }
82
+
83
+    /**
84
+     * Connects the nodes in the {@code AudioContext} to start the flow of
85
+     * audio data.
86
+     *
87
+     * @protected
88
+     * @returns {void}
89
+     */
90
+    _connectAudioGraph() {
91
+        this._audioSource.connect(this._audioProcessingNode);
92
+        this._audioProcessingNode.connect(this._audioContext.destination);
93
+    }
94
+
95
+    /**
96
+     * Disconnects the nodes in the {@code AudioContext}.
97
+     *
98
+     * @protected
99
+     * @returns {void}
100
+     */
101
+    _disconnectAudioGraph() {
102
+        this._audioProcessingNode.onaudioprocess = undefined;
103
+        this._audioProcessingNode.disconnect();
104
+        this._audioSource.disconnect();
105
+    }
106
+
107
+    /**
108
+     * Replaces the current microphone MediaStream.
109
+     *
110
+     * @protected
111
+     * @param {string} micDeviceId - New microphone ID.
112
+     * @returns {Promise}
113
+     */
114
+    _replaceMic(micDeviceId) {
115
+        if (this._audioContext && this._audioProcessingNode) {
116
+            return this._getAudioStream(micDeviceId).then(newStream => {
117
+                const newSource = this._audioContext
118
+                    .createMediaStreamSource(newStream);
119
+
120
+                this._audioSource.disconnect();
121
+                newSource.connect(this._audioProcessingNode);
122
+                this._stream = newStream;
123
+                this._audioSource = newSource;
124
+            });
125
+        }
126
+
127
+        return Promise.resolve();
128
+    }
129
+}

+ 19
- 93
react/features/local-recording/recording/WavAdapter.js View File

@@ -1,34 +1,13 @@
1
-import { RecordingAdapter } from './RecordingAdapter';
1
+import { AbstractAudioContextAdapter } from './AbstractAudioContextAdapter';
2 2
 
3 3
 const logger = require('jitsi-meet-logger').getLogger(__filename);
4 4
 
5 5
 const WAV_BITS_PER_SAMPLE = 16;
6
-const WAV_SAMPLE_RATE = 44100;
7 6
 
8 7
 /**
9 8
  * Recording adapter for raw WAVE format.
10 9
  */
11
-export class WavAdapter extends RecordingAdapter {
12
-
13
-    /**
14
-     * The current {@code MediaStream} instance.
15
-     */
16
-    _stream = null;
17
-
18
-    /**
19
-     * {@code AudioContext} instance.
20
-     */
21
-    _audioContext = null;
22
-
23
-    /**
24
-     * {@code ScriptProcessorNode} instance, which receives the raw PCM bits.
25
-     */
26
-    _audioProcessingNode = null;
27
-
28
-    /**
29
-     * {@code MediaStreamAudioSourceNode} instance, which represents the mic.
30
-     */
31
-    _audioSource = null;
10
+export class WavAdapter extends AbstractAudioContextAdapter {
32 11
 
33 12
     /**
34 13
      * Length of the WAVE file, in number of samples.
@@ -55,8 +34,7 @@ export class WavAdapter extends RecordingAdapter {
55 34
      */
56 35
     constructor() {
57 36
         super();
58
-
59
-        this._onReceivePCM = this._onReceivePCM.bind(this);
37
+        this._onAudioProcess = this._onAudioProcess.bind(this);
60 38
     }
61 39
 
62 40
     /**
@@ -73,9 +51,7 @@ export class WavAdapter extends RecordingAdapter {
73 51
             this._wavBuffers = [];
74 52
             this._wavLength = 0;
75 53
 
76
-            this._audioSource.connect(this._audioProcessingNode);
77
-            this._audioProcessingNode
78
-                .connect(this._audioContext.destination);
54
+            this._connectAudioGraph();
79 55
         });
80 56
     }
81 57
 
@@ -85,10 +61,8 @@ export class WavAdapter extends RecordingAdapter {
85 61
      * @inheritdoc
86 62
      */
87 63
     stop() {
88
-        this._audioProcessingNode.disconnect();
89
-        this._audioSource.disconnect();
64
+        this._disconnectAudioGraph();
90 65
         this._data = this._exportMonoWAV(this._wavBuffers, this._wavLength);
91
-        this._audioContext = null;
92 66
         this._audioProcessingNode = null;
93 67
         this._audioSource = null;
94 68
         this._isInitialized = false;
@@ -149,34 +123,6 @@ export class WavAdapter extends RecordingAdapter {
149 123
         return this._replaceMic(micDeviceId);
150 124
     }
151 125
 
152
-    /**
153
-     * Replaces the current microphone MediaStream.
154
-     *
155
-     * @param {*} micDeviceId - New microphone ID.
156
-     * @returns {Promise}
157
-     */
158
-    _replaceMic(micDeviceId) {
159
-        if (this._audioContext && this._audioProcessingNode) {
160
-            return new Promise((resolve, reject) => {
161
-                this._getAudioStream(micDeviceId).then(newStream => {
162
-                    const newSource = this._audioContext
163
-                        .createMediaStreamSource(newStream);
164
-
165
-                    this._audioSource.disconnect();
166
-                    newSource.connect(this._audioProcessingNode);
167
-                    this._stream = newStream;
168
-                    this._audioSource = newSource;
169
-                    resolve();
170
-                })
171
-                .catch(() => {
172
-                    reject();
173
-                });
174
-            });
175
-        }
176
-
177
-        return Promise.resolve();
178
-    }
179
-
180 126
     /**
181 127
      * Creates a WAVE file header.
182 128
      *
@@ -209,11 +155,11 @@ export class WavAdapter extends RecordingAdapter {
209 155
         view.setUint16(22, 1, true);
210 156
 
211 157
         // SampleRate
212
-        view.setUint32(24, WAV_SAMPLE_RATE, true);
158
+        view.setUint32(24, this._sampleRate, true);
213 159
 
214 160
         // ByteRate
215 161
         view.setUint32(28,
216
-            Number(WAV_SAMPLE_RATE) * 1 * WAV_BITS_PER_SAMPLE / 8, true);
162
+            Number(this._sampleRate) * 1 * WAV_BITS_PER_SAMPLE / 8, true);
217 163
 
218 164
         // BlockAlign
219 165
         view.setUint16(32, 1 * Number(WAV_BITS_PER_SAMPLE) / 8, true);
@@ -244,51 +190,31 @@ export class WavAdapter extends RecordingAdapter {
244 190
             return Promise.resolve();
245 191
         }
246 192
 
247
-        const p = new Promise((resolve, reject) => {
248
-            this._getAudioStream(micDeviceId)
249
-            .then(stream => {
250
-                this._stream = stream;
251
-                this._audioContext = new AudioContext({
252
-                    sampleRate: WAV_SAMPLE_RATE
253
-                });
254
-                this._audioSource
255
-                    = this._audioContext.createMediaStreamSource(stream);
256
-                this._audioProcessingNode
257
-                    = this._audioContext.createScriptProcessor(4096, 1, 1);
258
-                this._audioProcessingNode.onaudioprocess = e => {
259
-                    const channelLeft = e.inputBuffer.getChannelData(0);
260
-
261
-                    // See: https://developer.mozilla.org/en-US/docs/Web/API/
262
-                    //      AudioBuffer/getChannelData
263
-                    // The returned value is an Float32Array.
264
-                    this._onReceivePCM(channelLeft);
265
-                };
193
+        return this._initializeAudioContext(micDeviceId, this._onAudioProcess)
194
+            .then(() => {
266 195
                 this._isInitialized = true;
267
-                resolve();
268
-            })
269
-            .catch(err => {
270
-                logger.error(`Error calling getUserMedia(): ${err}`);
271
-                reject();
272 196
             });
273
-        });
274
-
275
-        return p;
276 197
     }
277 198
 
278 199
     /**
279
-     * Callback function that saves the PCM bits.
200
+     * Callback function for handling AudioProcessingEvents.
280 201
      *
281 202
      * @private
282
-     * @param {Float32Array} data - The audio PCM data.
203
+     * @param {AudioProcessingEvent} e - The event containing the raw PCM.
283 204
      * @returns {void}
284 205
      */
285
-    _onReceivePCM(data) {
206
+    _onAudioProcess(e) {
207
+        // See: https://developer.mozilla.org/en-US/docs/Web/API/
208
+        //      AudioBuffer/getChannelData
209
+        // The returned value is an Float32Array.
210
+        const channelLeft = e.inputBuffer.getChannelData(0);
211
+
286 212
         // Need to copy the Float32Array:
287 213
         // unlike passing to WebWorker, this data is passed by reference,
288 214
         // so we need to copy it, otherwise the resulting audio file will be
289 215
         // just repeating the last segment.
290
-        this._wavBuffers.push(new Float32Array(data));
291
-        this._wavLength += data.length;
216
+        this._wavBuffers.push(new Float32Array(channelLeft));
217
+        this._wavLength += channelLeft.length;
292 218
     }
293 219
 
294 220
     /**

+ 80
- 115
react/features/local-recording/recording/flac/FlacAdapter.js View File

@@ -1,4 +1,3 @@
1
-import { RecordingAdapter } from '../RecordingAdapter';
2 1
 import {
3 2
     DEBUG,
4 3
     MAIN_THREAD_FINISH,
@@ -8,50 +7,41 @@ import {
8 7
     WORKER_LIBFLAC_READY
9 8
 } from './messageTypes';
10 9
 
10
+import { AbstractAudioContextAdapter } from '../AbstractAudioContextAdapter';
11
+
11 12
 const logger = require('jitsi-meet-logger').getLogger(__filename);
12 13
 
13 14
 /**
14 15
  * Recording adapter that uses libflac.js in the background.
15 16
  */
16
-export class FlacAdapter extends RecordingAdapter {
17
+export class FlacAdapter extends AbstractAudioContextAdapter {
17 18
 
18 19
     /**
19 20
      * Instance of flacEncodeWorker.
20 21
      */
21 22
     _encoder = null;
22 23
 
23
-    /**
24
-     * The {@code AudioContext} instance.
25
-     */
26
-    _audioContext = null;
27
-
28
-    /**
29
-     * The {@code ScriptProcessorNode} instance.
30
-     */
31
-    _audioProcessingNode = null;
32
-
33
-    /**
34
-     * The {@code MediaStreamAudioSourceNode} instance.
35
-     */
36
-    _audioSource = null;
37
-
38
-    /**
39
-     * The {@code MediaStream} instance, representing the current audio device.
40
-     */
41
-    _stream = null;
42
-
43 24
     /**
44 25
      * Resolve function of the promise returned by {@code stop()}.
45 26
      * This is called after the WebWorker sends back {@code WORKER_BLOB_READY}.
46 27
      */
47 28
     _stopPromiseResolver = null;
48 29
 
30
+    _initPromiseResolver = null;
31
+
49 32
     /**
50 33
      * Initialization promise.
51 34
      */
52 35
     _initPromise = null;
53 36
 
54
-    _sampleRate = 44100;
37
+    /**
38
+     * Constructor.
39
+     */
40
+    constructor() {
41
+        super();
42
+        this._onAudioProcess = this._onAudioProcess.bind(this);
43
+        this._onWorkerMessage = this._onWorkerMessage.bind(this);
44
+    }
55 45
 
56 46
     /**
57 47
      * Implements {@link RecordingAdapter#start()}.
@@ -64,8 +54,7 @@ export class FlacAdapter extends RecordingAdapter {
64 54
         }
65 55
 
66 56
         return this._initPromise.then(() => {
67
-            this._audioSource.connect(this._audioProcessingNode);
68
-            this._audioProcessingNode.connect(this._audioContext.destination);
57
+            this._connectAudioGraph();
69 58
         });
70 59
     }
71 60
 
@@ -83,9 +72,7 @@ export class FlacAdapter extends RecordingAdapter {
83 72
 
84 73
         return new Promise(resolve => {
85 74
             this._initPromise = null;
86
-            this._audioProcessingNode.onaudioprocess = undefined;
87
-            this._audioProcessingNode.disconnect();
88
-            this._audioSource.disconnect();
75
+            this._disconnectAudioGraph();
89 76
             this._stopPromiseResolver = resolve;
90 77
             this._encoder.postMessage({
91 78
                 command: MAIN_THREAD_FINISH
@@ -146,29 +133,6 @@ export class FlacAdapter extends RecordingAdapter {
146 133
         return this._replaceMic(micDeviceId);
147 134
     }
148 135
 
149
-    /**
150
-     * Replaces the current microphone MediaStream.
151
-     *
152
-     * @param {string} micDeviceId - New microphone ID.
153
-     * @returns {Promise}
154
-     */
155
-    _replaceMic(micDeviceId) {
156
-        if (this._audioContext && this._audioProcessingNode) {
157
-            return this._getAudioStream(micDeviceId).then(newStream => {
158
-                const newSource = this._audioContext
159
-                    .createMediaStreamSource(newStream);
160
-
161
-                this._audioSource.disconnect();
162
-                newSource.connect(this._audioProcessingNode);
163
-                this._stream = newStream;
164
-                this._audioSource = newSource;
165
-
166
-            });
167
-        }
168
-
169
-        return Promise.resolve();
170
-    }
171
-
172 136
     /**
173 137
      * Initialize the adapter.
174 138
      *
@@ -181,17 +145,6 @@ export class FlacAdapter extends RecordingAdapter {
181 145
             return Promise.resolve();
182 146
         }
183 147
 
184
-        // sampleRate is browser and OS dependent.
185
-        // Setting sampleRate explicitly is in the specs but not implemented
186
-        // by browsers.
187
-        // See: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/
188
-        //    AudioContext#Browser_compatibility
189
-        // And https://bugs.chromium.org/p/chromium/issues/detail?id=432248
190
-
191
-        this._audioContext = new AudioContext();
192
-        this._sampleRate = this._audioContext.sampleRate;
193
-        logger.log(`Current sampleRate ${this._sampleRate}.`);
194
-
195 148
         const promiseInitWorker = new Promise((resolve, reject) => {
196 149
             try {
197 150
                 this._loadWebWorker();
@@ -199,28 +152,11 @@ export class FlacAdapter extends RecordingAdapter {
199 152
                 reject();
200 153
             }
201 154
 
202
-            // set up listen for messages from the WebWorker
203
-            this._encoder.onmessage = e => {
204
-                if (e.data.command === WORKER_BLOB_READY) {
205
-                    // Received a Blob representing an encoded FLAC file.
206
-                    this._data = e.data.buf;
207
-                    if (this._stopPromiseResolver !== null) {
208
-                        this._stopPromiseResolver();
209
-                        this._stopPromiseResolver = null;
210
-                        this._encoder.terminate();
211
-                        this._encoder = null;
212
-                    }
213
-                } else if (e.data.command === DEBUG) {
214
-                    logger.log(e.data);
215
-                } else if (e.data.command === WORKER_LIBFLAC_READY) {
216
-                    logger.log('libflac is ready.');
217
-                    resolve();
218
-                } else {
219
-                    logger.error(
220
-                        `Unknown event
221
-                        from encoder (WebWorker): "${e.data.command}"!`);
222
-                }
223
-            };
155
+            // save the Promise's resolver to resolve it later.
156
+            this._initPromiseResolver = resolve;
157
+
158
+            // set up listener for messages from the WebWorker
159
+            this._encoder.onmessage = this._onWorkerMessage;
224 160
 
225 161
             this._encoder.postMessage({
226 162
                 command: MAIN_THREAD_INIT,
@@ -231,38 +167,67 @@ export class FlacAdapter extends RecordingAdapter {
231 167
             });
232 168
         });
233 169
 
234
-        const callbackInitAudioContext = () =>
235
-            this._getAudioStream(micDeviceId)
236
-            .then(stream => {
237
-                this._stream = stream;
238
-                this._audioSource
239
-                    = this._audioContext.createMediaStreamSource(stream);
240
-                this._audioProcessingNode
241
-                    = this._audioContext.createScriptProcessor(4096, 1, 1);
242
-                this._audioProcessingNode.onaudioprocess = e => {
243
-                    // Delegates to the WebWorker to do the encoding.
244
-                    // The return of getChannelData() is a Float32Array,
245
-                    // each element representing one sample.
246
-                    const channelLeft = e.inputBuffer.getChannelData(0);
247
-
248
-                    this._encoder.postMessage({
249
-                        command: MAIN_THREAD_NEW_DATA_ARRIVED,
250
-                        buf: channelLeft
251
-                    });
252
-                };
253
-                logger.debug('AudioContext is set up.');
254
-            })
255
-            .catch(err => {
256
-                logger.error(`Error calling getUserMedia(): ${err}`);
257
-
258
-                return Promise.reject(err);
259
-            });
260
-
261
-        // Because Promise constructor immediately executes the executor
262
-        // function. This is undesirable, we want callbackInitAudioContext to be
263
-        // executed only **after** promiseInitWorker is resolved.
170
+        // Arrow function is used here because we want AudioContext to be
171
+        // initialized only **after** promiseInitWorker is resolved.
264 172
         return promiseInitWorker
265
-            .then(callbackInitAudioContext);
173
+            .then(() =>
174
+                this._initializeAudioContext(
175
+                    micDeviceId,
176
+                    this._onAudioProcess
177
+                ));
178
+    }
179
+
180
+    /**
181
+     * Callback function for handling AudioProcessingEvents.
182
+     *
183
+     * @private
184
+     * @param {AudioProcessingEvent} e - The event containing the raw PCM.
185
+     * @returns {void}
186
+     */
187
+    _onAudioProcess(e) {
188
+        // Delegates to the WebWorker to do the encoding.
189
+        // The return of getChannelData() is a Float32Array,
190
+        // each element representing one sample.
191
+        const channelLeft = e.inputBuffer.getChannelData(0);
192
+
193
+        this._encoder.postMessage({
194
+            command: MAIN_THREAD_NEW_DATA_ARRIVED,
195
+            buf: channelLeft
196
+        });
197
+    }
198
+
199
+    /**
200
+     * Handler for messages from flacEncodeWorker.
201
+     *
202
+     * @private
203
+     * @param {MessageEvent} e - The event sent by the WebWorker.
204
+     * @returns {void}
205
+     */
206
+    _onWorkerMessage(e) {
207
+        switch (e.data.command) {
208
+        case WORKER_BLOB_READY:
209
+            // Received a Blob representing an encoded FLAC file.
210
+            this._data = e.data.buf;
211
+            if (this._stopPromiseResolver !== null) {
212
+                this._stopPromiseResolver();
213
+                this._stopPromiseResolver = null;
214
+                this._encoder.terminate();
215
+                this._encoder = null;
216
+            }
217
+            break;
218
+        case DEBUG:
219
+            logger.log(e.data);
220
+            break;
221
+        case WORKER_LIBFLAC_READY:
222
+            logger.log('libflac is ready.');
223
+            this._initPromiseResolver();
224
+            break;
225
+        default:
226
+            logger.error(
227
+                `Unknown event
228
+                from encoder (WebWorker): "${e.data.command}"!`);
229
+            break;
230
+        }
266 231
     }
267 232
 
268 233
     /**

+ 6
- 5
react/features/local-recording/recording/flac/flacEncodeWorker.js View File

@@ -6,6 +6,8 @@ import {
6 6
     WORKER_LIBFLAC_READY
7 7
 } from './messageTypes';
8 8
 
9
+const logger = require('jitsi-meet-logger').getLogger(__filename);
10
+
9 11
 /**
10 12
  * WebWorker that does FLAC encoding using libflac.js
11 13
  */
@@ -261,7 +263,7 @@ class Encoder {
261 263
             const errorNo
262 264
                 = Flac.FLAC__stream_encoder_get_state(this._encoderId);
263 265
 
264
-            console.error('Error during encoding', FLAC_ERRORS[errorNo]);
266
+            logger.error('Error during encoding', FLAC_ERRORS[errorNo]);
265 267
         }
266 268
     }
267 269
 
@@ -276,7 +278,7 @@ class Encoder {
276 278
 
277 279
             const status = Flac.FLAC__stream_encoder_finish(this._encoderId);
278 280
 
279
-            console.log('flac encoding finish: ', status);
281
+            logger.log('Flac encoding finished: ', status);
280 282
 
281 283
             // free up resources
282 284
             Flac.FLAC__stream_encoder_delete(this._encoderId);
@@ -370,9 +372,8 @@ self.onmessage = function(e) {
370 372
 
371 373
     case MAIN_THREAD_NEW_DATA_ARRIVED:
372 374
         if (encoder === null) {
373
-            console
374
-                .error('flacEncoderWorker:'
375
-                + 'received data when the encoder is not ready.');
375
+            logger.error('flacEncoderWorker received data when the encoder is'
376
+                + 'not ready.');
376 377
         } else {
377 378
             encoder.encode(e.data.buf);
378 379
         }

Loading…
Cancel
Save