|
@@ -2,6 +2,8 @@
|
2
|
2
|
* Provides statistics for the local stream.
|
3
|
3
|
*/
|
4
|
4
|
|
|
5
|
+const logger = require('@jitsi/logger').getLogger(__filename);
|
|
6
|
+
|
5
|
7
|
/**
|
6
|
8
|
* Size of the webaudio analyzer buffer.
|
7
|
9
|
* @type {number}
|
|
@@ -16,21 +18,12 @@ const WEBAUDIO_ANALYZER_SMOOTING_TIME = 0.8;
|
16
|
18
|
|
17
|
19
|
window.AudioContext = window.AudioContext || window.webkitAudioContext;
|
18
|
20
|
|
|
21
|
+/**
|
|
22
|
+ * The audio context.
|
|
23
|
+ * @type {AudioContext}
|
|
24
|
+ */
|
19
|
25
|
let context = null;
|
20
|
26
|
|
21
|
|
-if (window.AudioContext) {
|
22
|
|
- context = new AudioContext();
|
23
|
|
-
|
24
|
|
- // XXX Not all browsers define a suspend method on AudioContext. As the
|
25
|
|
- // invocation is at the (ES6 module) global execution level, it breaks the
|
26
|
|
- // loading of the lib-jitsi-meet library in such browsers and, consequently,
|
27
|
|
- // the loading of the very Web app that uses the lib-jitsi-meet library. For
|
28
|
|
- // example, Google Chrome 40 on Android does not define the method but we
|
29
|
|
- // still want to be able to load the lib-jitsi-meet library there and
|
30
|
|
- // display a page which notifies the user that the Web app is not supported
|
31
|
|
- // there.
|
32
|
|
- context.suspend && context.suspend();
|
33
|
|
-}
|
34
|
27
|
|
35
|
28
|
/**
|
36
|
29
|
* Converts time domain data array to audio level.
|
|
@@ -88,6 +81,8 @@ export default function LocalStatsCollector(stream, interval, callback) {
|
88
|
81
|
this.intervalMilis = interval;
|
89
|
82
|
this.audioLevel = 0;
|
90
|
83
|
this.callback = callback;
|
|
84
|
+ this.source = null;
|
|
85
|
+ this.analyser = null;
|
91
|
86
|
}
|
92
|
87
|
|
93
|
88
|
/**
|
|
@@ -97,21 +92,22 @@ LocalStatsCollector.prototype.start = function() {
|
97
|
92
|
if (!LocalStatsCollector.isLocalStatsSupported()) {
|
98
|
93
|
return;
|
99
|
94
|
}
|
|
95
|
+
|
100
|
96
|
context.resume();
|
101
|
|
- const analyser = context.createAnalyser();
|
|
97
|
+ this.analyser = context.createAnalyser();
|
102
|
98
|
|
103
|
|
- analyser.smoothingTimeConstant = WEBAUDIO_ANALYZER_SMOOTING_TIME;
|
104
|
|
- analyser.fftSize = WEBAUDIO_ANALYZER_FFT_SIZE;
|
|
99
|
+ this.analyser.smoothingTimeConstant = WEBAUDIO_ANALYZER_SMOOTING_TIME;
|
|
100
|
+ this.analyser.fftSize = WEBAUDIO_ANALYZER_FFT_SIZE;
|
105
|
101
|
|
106
|
|
- const source = context.createMediaStreamSource(this.stream);
|
|
102
|
+ this.source = context.createMediaStreamSource(this.stream);
|
107
|
103
|
|
108
|
|
- source.connect(analyser);
|
|
104
|
+ this.source.connect(this.analyser);
|
109
|
105
|
|
110
|
106
|
this.intervalId = setInterval(
|
111
|
107
|
() => {
|
112
|
|
- const array = new Uint8Array(analyser.frequencyBinCount);
|
|
108
|
+ const array = new Uint8Array(this.analyser.frequencyBinCount);
|
113
|
109
|
|
114
|
|
- analyser.getByteTimeDomainData(array);
|
|
110
|
+ this.analyser.getByteTimeDomainData(array);
|
115
|
111
|
const audioLevel = timeDomainDataToAudioLevel(array);
|
116
|
112
|
|
117
|
113
|
// Set the audio levels always as NoAudioSignalDetection now
|
|
@@ -133,6 +129,11 @@ LocalStatsCollector.prototype.stop = function() {
|
133
|
129
|
clearInterval(this.intervalId);
|
134
|
130
|
this.intervalId = null;
|
135
|
131
|
}
|
|
132
|
+
|
|
133
|
+ this.analyser?.disconnect();
|
|
134
|
+ this.analyser = null;
|
|
135
|
+ this.source?.disconnect();
|
|
136
|
+ this.source = null;
|
136
|
137
|
};
|
137
|
138
|
|
138
|
139
|
/**
|
|
@@ -142,5 +143,35 @@ LocalStatsCollector.prototype.stop = function() {
|
142
|
143
|
* @returns {boolean}
|
143
|
144
|
*/
|
144
|
145
|
LocalStatsCollector.isLocalStatsSupported = function() {
|
145
|
|
- return Boolean(context);
|
|
146
|
+ return Boolean(window.AudioContext);
|
|
147
|
+};
|
|
148
|
+
|
|
149
|
+/**
|
|
150
|
+ * Disconnects the audio context.
|
|
151
|
+ */
|
|
152
|
+LocalStatsCollector.disconnectAudioContext = async function() {
|
|
153
|
+ if (context) {
|
|
154
|
+ logger.info('Disconnecting audio context');
|
|
155
|
+ await context.close();
|
|
156
|
+ context = null;
|
|
157
|
+ }
|
146
|
158
|
};
|
|
159
|
+
|
|
160
|
+/**
|
|
161
|
+ * Connects the audio context.
|
|
162
|
+ */
|
|
163
|
+LocalStatsCollector.connectAudioContext = function() {
|
|
164
|
+ if (!LocalStatsCollector.isLocalStatsSupported()) {
|
|
165
|
+ return;
|
|
166
|
+ }
|
|
167
|
+
|
|
168
|
+ logger.info('Connecting audio context');
|
|
169
|
+ context = new AudioContext();
|
|
170
|
+
|
|
171
|
+ context.suspend();
|
|
172
|
+};
|
|
173
|
+
|
|
174
|
+/**
|
|
175
|
+ * Initialize the audio context on startup.
|
|
176
|
+ */
|
|
177
|
+LocalStatsCollector.connectAudioContext();
|