Browse Source

fix(blur-effect) enable blur effect on all platforms supporting canvas filters

That means all browsers except Safari, for now.

In addition, use the 96p model (instead of the 144p one) on browsers without SIMD support.
master
Tudor D. Pop 4 years ago
parent
commit
dd1f8339b1
No account linked to committer's email address

+ 1
- 15
react/features/blur/components/VideoBlurButton.js View File

1
 // @flow
1
 // @flow
2
 
2
 
3
-import React from 'react';
4
-
5
 import { createVideoBlurEvent, sendAnalytics } from '../../analytics';
3
 import { createVideoBlurEvent, sendAnalytics } from '../../analytics';
6
 import { translate } from '../../base/i18n';
4
 import { translate } from '../../base/i18n';
7
 import { IconBlurBackground } from '../../base/icons';
5
 import { IconBlurBackground } from '../../base/icons';
8
 import { connect } from '../../base/redux';
6
 import { connect } from '../../base/redux';
9
-import { AbstractButton, BetaTag } from '../../base/toolbox/components';
7
+import { AbstractButton } from '../../base/toolbox/components';
10
 import type { AbstractButtonProps } from '../../base/toolbox/components';
8
 import type { AbstractButtonProps } from '../../base/toolbox/components';
11
 import { toggleBlurEffect } from '../actions';
9
 import { toggleBlurEffect } from '../actions';
12
 
10
 
37
     tooltip = 'toolbar.startvideoblur';
35
     tooltip = 'toolbar.startvideoblur';
38
     toggledLabel = 'toolbar.stopvideoblur';
36
     toggledLabel = 'toolbar.stopvideoblur';
39
 
37
 
40
-    /**
41
-     * Helper function to be implemented by subclasses, which returns
42
-     * a React Element to display (a beta tag) at the end of the button.
43
-     *
44
-     * @override
45
-     * @protected
46
-     * @returns {ReactElement}
47
-     */
48
-    _getElementAfter() {
49
-        return <BetaTag />;
50
-    }
51
-
52
     /**
38
     /**
53
      * Handles clicking / pressing the button, and toggles the blur effect
39
      * Handles clicking / pressing the button, and toggles the blur effect
54
      * state accordingly.
40
      * state accordingly.

+ 20
- 0
react/features/blur/functions.js View File

2
 
2
 
3
 import { getJitsiMeetGlobalNS, loadScript } from '../base/util';
3
 import { getJitsiMeetGlobalNS, loadScript } from '../base/util';
4
 
4
 
5
+let filterSupport;
6
+
5
 /**
7
 /**
6
  * Returns promise that resolves with the blur effect instance.
8
  * Returns promise that resolves with the blur effect instance.
7
  *
9
  *
16
 
18
 
17
     return loadScript('libs/video-blur-effect.min.js').then(() => ns.effects.createBlurEffect());
19
     return loadScript('libs/video-blur-effect.min.js').then(() => ns.effects.createBlurEffect());
18
 }
20
 }
21
+
22
+/**
23
+ * Checks context filter support.
24
+ *
25
+ * @returns {boolean} True if the filter is supported and false if the filter is not supported by the browser.
26
+ */
27
+export function checkBlurSupport() {
28
+    if (typeof filterSupport === 'undefined') {
29
+        const canvas = document.createElement('canvas');
30
+        const ctx = canvas.getContext('2d');
31
+
32
+        filterSupport = typeof ctx.filter !== 'undefined';
33
+
34
+        canvas.remove();
35
+    }
36
+
37
+    return filterSupport;
38
+}

+ 20
- 18
react/features/stream-effects/blur/JitsiStreamBlurEffect.js View File

6
     timerWorkerScript
6
     timerWorkerScript
7
 } from './TimerWorker';
7
 } from './TimerWorker';
8
 
8
 
9
-const segmentationWidth = 256;
10
-const segmentationHeight = 144;
11
-const segmentationPixelCount = segmentationWidth * segmentationHeight;
12
 const blurValue = '25px';
9
 const blurValue = '25px';
13
 
10
 
14
 /**
11
 /**
18
  */
15
  */
19
 export default class JitsiStreamBlurEffect {
16
 export default class JitsiStreamBlurEffect {
20
     _model: Object;
17
     _model: Object;
18
+    _options: Object;
19
+    _segmentationPixelCount: number;
21
     _inputVideoElement: HTMLVideoElement;
20
     _inputVideoElement: HTMLVideoElement;
22
     _onMaskFrameTimer: Function;
21
     _onMaskFrameTimer: Function;
23
     _maskFrameTimerWorker: Worker;
22
     _maskFrameTimerWorker: Worker;
35
      * Represents a modified video MediaStream track.
34
      * Represents a modified video MediaStream track.
36
      *
35
      *
37
      * @class
36
      * @class
38
-     * @param {BodyPix} bpModel - BodyPix model.
37
+     * @param {Object} model - Meet model.
38
+     * @param {Object} options - Segmentation dimensions.
39
      */
39
      */
40
-    constructor(bpModel: Object) {
41
-        this._model = bpModel;
40
+    constructor(model: Object, options: Object) {
41
+        this._model = model;
42
+        this._options = options;
43
+        this._segmentationPixelCount = this._options.width * this._options.height;
42
 
44
 
43
         // Bind event handler so it is only bound once for every instance.
45
         // Bind event handler so it is only bound once for every instance.
44
         this._onMaskFrameTimer = this._onMaskFrameTimer.bind(this);
46
         this._onMaskFrameTimer = this._onMaskFrameTimer.bind(this);
76
             this._segmentationMaskCanvas,
78
             this._segmentationMaskCanvas,
77
             0,
79
             0,
78
             0,
80
             0,
79
-            segmentationWidth,
80
-            segmentationHeight,
81
+            this._options.width,
82
+            this._options.height,
81
             0,
83
             0,
82
             0,
84
             0,
83
             this._inputVideoElement.width,
85
             this._inputVideoElement.width,
89
         this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
91
         this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
90
 
92
 
91
         this._outputCanvasCtx.globalCompositeOperation = 'destination-over';
93
         this._outputCanvasCtx.globalCompositeOperation = 'destination-over';
92
-        this._outputCanvasCtx.filter = `blur(${blurValue})`; // FIXME Does not work on Safari.
94
+        this._outputCanvasCtx.filter = `blur(${blurValue})`;
93
         this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
95
         this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
94
     }
96
     }
95
 
97
 
102
         this._model._runInference();
104
         this._model._runInference();
103
         const outputMemoryOffset = this._model._getOutputMemoryOffset() / 4;
105
         const outputMemoryOffset = this._model._getOutputMemoryOffset() / 4;
104
 
106
 
105
-        for (let i = 0; i < segmentationPixelCount; i++) {
107
+        for (let i = 0; i < this._segmentationPixelCount; i++) {
106
             const background = this._model.HEAPF32[outputMemoryOffset + (i * 2)];
108
             const background = this._model.HEAPF32[outputMemoryOffset + (i * 2)];
107
             const person = this._model.HEAPF32[outputMemoryOffset + (i * 2) + 1];
109
             const person = this._model.HEAPF32[outputMemoryOffset + (i * 2) + 1];
108
             const shift = Math.max(background, person);
110
             const shift = Math.max(background, person);
146
             this._inputVideoElement.height,
148
             this._inputVideoElement.height,
147
             0,
149
             0,
148
             0,
150
             0,
149
-            segmentationWidth,
150
-            segmentationHeight
151
+            this._options.width,
152
+            this._options.height
151
         );
153
         );
152
 
154
 
153
         const imageData = this._segmentationMaskCtx.getImageData(
155
         const imageData = this._segmentationMaskCtx.getImageData(
154
             0,
156
             0,
155
             0,
157
             0,
156
-            segmentationWidth,
157
-            segmentationHeight
158
+            this._options.width,
159
+            this._options.height
158
         );
160
         );
159
         const inputMemoryOffset = this._model._getInputMemoryOffset() / 4;
161
         const inputMemoryOffset = this._model._getInputMemoryOffset() / 4;
160
 
162
 
161
-        for (let i = 0; i < segmentationPixelCount; i++) {
163
+        for (let i = 0; i < this._segmentationPixelCount; i++) {
162
             this._model.HEAPF32[inputMemoryOffset + (i * 3)] = imageData.data[i * 4] / 255;
164
             this._model.HEAPF32[inputMemoryOffset + (i * 3)] = imageData.data[i * 4] / 255;
163
             this._model.HEAPF32[inputMemoryOffset + (i * 3) + 1] = imageData.data[(i * 4) + 1] / 255;
165
             this._model.HEAPF32[inputMemoryOffset + (i * 3) + 1] = imageData.data[(i * 4) + 1] / 255;
164
             this._model.HEAPF32[inputMemoryOffset + (i * 3) + 2] = imageData.data[(i * 4) + 2] / 255;
166
             this._model.HEAPF32[inputMemoryOffset + (i * 3) + 2] = imageData.data[(i * 4) + 2] / 255;
189
         const { height, frameRate, width }
191
         const { height, frameRate, width }
190
             = firstVideoTrack.getSettings ? firstVideoTrack.getSettings() : firstVideoTrack.getConstraints();
192
             = firstVideoTrack.getSettings ? firstVideoTrack.getSettings() : firstVideoTrack.getConstraints();
191
 
193
 
192
-        this._segmentationMask = new ImageData(segmentationWidth, segmentationHeight);
194
+        this._segmentationMask = new ImageData(this._options.width, this._options.height);
193
         this._segmentationMaskCanvas = document.createElement('canvas');
195
         this._segmentationMaskCanvas = document.createElement('canvas');
194
-        this._segmentationMaskCanvas.width = segmentationWidth;
195
-        this._segmentationMaskCanvas.height = segmentationHeight;
196
+        this._segmentationMaskCanvas.width = this._options.width;
197
+        this._segmentationMaskCanvas.height = this._options.height;
196
         this._segmentationMaskCtx = this._segmentationMaskCanvas.getContext('2d');
198
         this._segmentationMaskCtx = this._segmentationMaskCanvas.getContext('2d');
197
         this._outputCanvasElement.width = parseInt(width, 10);
199
         this._outputCanvasElement.width = parseInt(width, 10);
198
         this._outputCanvasElement.height = parseInt(height, 10);
200
         this._outputCanvasElement.height = parseInt(height, 10);

+ 17
- 4
react/features/stream-effects/blur/index.js View File

7
 import createTFLiteSIMDModule from './vendor/tflite/tflite-simd';
7
 import createTFLiteSIMDModule from './vendor/tflite/tflite-simd';
8
 
8
 
9
 const models = {
9
 const models = {
10
-    '96': 'libs/segm_lite_v681.tflite',
11
-    '144': 'libs/segm_full_v679.tflite'
10
+    'model96': 'libs/segm_lite_v681.tflite',
11
+    'model144': 'libs/segm_full_v679.tflite'
12
+};
13
+
14
+const segmentationDimensions = {
15
+    'model96': {
16
+        'height': 96,
17
+        'width': 160
18
+    },
19
+    'model144': {
20
+        'height': 144,
21
+        'width': 256
22
+    }
12
 };
23
 };
13
 
24
 
14
 /**
25
 /**
31
 
42
 
32
     const modelBufferOffset = tflite._getModelBufferMemoryOffset();
43
     const modelBufferOffset = tflite._getModelBufferMemoryOffset();
33
     const modelResponse = await fetch(
44
     const modelResponse = await fetch(
34
-        models['144']
45
+        wasmCheck.feature.simd ? models.model144 : models.model96
35
     );
46
     );
36
 
47
 
37
     if (!modelResponse.ok) {
48
     if (!modelResponse.ok) {
44
 
55
 
45
     tflite._loadModel(model.byteLength);
56
     tflite._loadModel(model.byteLength);
46
 
57
 
47
-    return new JitsiStreamBlurEffect(tflite);
58
+    const options = wasmCheck.feature.simd ? segmentationDimensions.model144 : segmentationDimensions.model96;
59
+
60
+    return new JitsiStreamBlurEffect(tflite, options);
48
 }
61
 }

+ 2
- 2
react/features/toolbox/components/web/Toolbox.js View File

1
 // @flow
1
 // @flow
2
 
2
 
3
 import React, { Component } from 'react';
3
 import React, { Component } from 'react';
4
-import * as wasmCheck from 'wasm-check';
5
 
4
 
6
 import {
5
 import {
7
     ACTION_SHORTCUT_TRIGGERED,
6
     ACTION_SHORTCUT_TRIGGERED,
37
 import { getLocalVideoTrack, toggleScreensharing } from '../../../base/tracks';
36
 import { getLocalVideoTrack, toggleScreensharing } from '../../../base/tracks';
38
 import { isVpaasMeeting } from '../../../billing-counter/functions';
37
 import { isVpaasMeeting } from '../../../billing-counter/functions';
39
 import { VideoBlurButton } from '../../../blur';
38
 import { VideoBlurButton } from '../../../blur';
39
+import { checkBlurSupport } from '../../../blur/functions';
40
 import { CHAT_SIZE, ChatCounter, toggleChat } from '../../../chat';
40
 import { CHAT_SIZE, ChatCounter, toggleChat } from '../../../chat';
41
 import { EmbedMeetingDialog } from '../../../embed-meeting';
41
 import { EmbedMeetingDialog } from '../../../embed-meeting';
42
 import { SharedDocumentButton } from '../../../etherpad';
42
 import { SharedDocumentButton } from '../../../etherpad';
1071
                 && <VideoBlurButton
1071
                 && <VideoBlurButton
1072
                     key = 'videobackgroundblur'
1072
                     key = 'videobackgroundblur'
1073
                     showLabel = { true }
1073
                     showLabel = { true }
1074
-                    visible = { !_screensharing && wasmCheck.feature.simd } />,
1074
+                    visible = { !_screensharing && checkBlurSupport() } />,
1075
             this._shouldShowButton('settings')
1075
             this._shouldShowButton('settings')
1076
                 && <SettingsButton
1076
                 && <SettingsButton
1077
                     key = 'settings'
1077
                     key = 'settings'

Loading…
Cancel
Save