Browse Source

feat(face-landmarks): integrate human library

It replaces face-api.

* feat(face-landmarks): integrate human library

* feat(face-landmarks): rewrite worker in typescript

* fix(face-landmarks): allow worker bundle size up to 2 mib

* fix: remove unwanted comment

* code review
master
Gabriel Borlea 2 years ago
parent
commit
0c021868b5
No account linked to committer's email address

+ 5
- 5
Makefile View File

@@ -7,7 +7,7 @@ TF_WASM_DIR = node_modules/@tensorflow/tfjs-backend-wasm/dist/
7 7
 RNNOISE_WASM_DIR = node_modules/rnnoise-wasm/dist
8 8
 TFLITE_WASM = react/features/stream-effects/virtual-background/vendor/tflite
9 9
 MEET_MODELS_DIR  = react/features/stream-effects/virtual-background/vendor/models
10
-FACE_MODELS_DIR = node_modules/@vladmandic/face-api/model
10
+FACE_MODELS_DIR = node_modules/@vladmandic/human-models/models
11 11
 NODE_SASS = ./node_modules/.bin/sass
12 12
 NPM = npm
13 13
 OUTPUT_DIR = .
@@ -91,10 +91,10 @@ deploy-meet-models:
91 91
 
92 92
 deploy-face-landmarks:
93 93
 	cp \
94
-		$(FACE_MODELS_DIR)/tiny_face_detector_model-weights_manifest.json \
95
-		$(FACE_MODELS_DIR)/tiny_face_detector_model.bin \
96
-		$(FACE_MODELS_DIR)/face_expression_model-weights_manifest.json \
97
-		$(FACE_MODELS_DIR)/face_expression_model.bin \
94
+		$(FACE_MODELS_DIR)/blazeface-front.bin \
95
+		$(FACE_MODELS_DIR)/blazeface-front.json \
96
+		$(FACE_MODELS_DIR)/emotion.bin \
97
+		$(FACE_MODELS_DIR)/emotion.json \
98 98
 		$(DEPLOY_DIR)
99 99
 
100 100
 deploy-css:

+ 26
- 9
package-lock.json View File

@@ -53,7 +53,8 @@
53 53
         "@svgr/webpack": "4.3.2",
54 54
         "@tensorflow/tfjs-backend-wasm": "3.13.0",
55 55
         "@tensorflow/tfjs-core": "3.13.0",
56
-        "@vladmandic/face-api": "1.6.4",
56
+        "@vladmandic/human": "2.6.5",
57
+        "@vladmandic/human-models": "2.5.9",
57 58
         "@xmldom/xmldom": "0.7.5",
58 59
         "amplitude-js": "8.2.1",
59 60
         "base64-js": "1.3.1",
@@ -5587,14 +5588,22 @@
5587 5588
       "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-20.2.1.tgz",
5588 5589
       "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw=="
5589 5590
     },
5590
-    "node_modules/@vladmandic/face-api": {
5591
-      "version": "1.6.4",
5592
-      "resolved": "https://registry.npmjs.org/@vladmandic/face-api/-/face-api-1.6.4.tgz",
5593
-      "integrity": "sha512-tVx8lCL1mKb44qeN5EEypJNXqxRYXh+7BcSzfY4iMaZIoF5Y+Jev20UiIn9JvxwGV2caWkdFIjpvw+OxsL/kdg==",
5591
+    "node_modules/@vladmandic/human": {
5592
+      "version": "2.6.5",
5593
+      "resolved": "https://registry.npmjs.org/@vladmandic/human/-/human-2.6.5.tgz",
5594
+      "integrity": "sha512-5fG5lICkJw17d8fT9ZHtioAPLmOKsQlNN4rbuA1t21HlV7KL3M2EMbYeO+ZE0t6VLSioZZH/KoOFvW5A0JP0Hg==",
5594 5595
       "engines": {
5595 5596
         "node": ">=14.0.0"
5596 5597
       }
5597 5598
     },
5599
+    "node_modules/@vladmandic/human-models": {
5600
+      "version": "2.5.9",
5601
+      "resolved": "https://registry.npmjs.org/@vladmandic/human-models/-/human-models-2.5.9.tgz",
5602
+      "integrity": "sha512-WhV0RIeELsA73LGMpemYJvpIEuG0nkh6rP1x7JctKDQkNaARgWolTq9r1RUvWw++rQfzfZ82bYcAauJcaGW6bw==",
5603
+      "dependencies": {
5604
+        "@vladmandic/human": "^2.5.8"
5605
+      }
5606
+    },
5598 5607
     "node_modules/@webassemblyjs/ast": {
5599 5608
       "version": "1.11.1",
5600 5609
       "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz",
@@ -24180,10 +24189,18 @@
24180 24189
       "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-20.2.1.tgz",
24181 24190
       "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw=="
24182 24191
     },
24183
-    "@vladmandic/face-api": {
24184
-      "version": "1.6.4",
24185
-      "resolved": "https://registry.npmjs.org/@vladmandic/face-api/-/face-api-1.6.4.tgz",
24186
-      "integrity": "sha512-tVx8lCL1mKb44qeN5EEypJNXqxRYXh+7BcSzfY4iMaZIoF5Y+Jev20UiIn9JvxwGV2caWkdFIjpvw+OxsL/kdg=="
24192
+    "@vladmandic/human": {
24193
+      "version": "2.6.5",
24194
+      "resolved": "https://registry.npmjs.org/@vladmandic/human/-/human-2.6.5.tgz",
24195
+      "integrity": "sha512-5fG5lICkJw17d8fT9ZHtioAPLmOKsQlNN4rbuA1t21HlV7KL3M2EMbYeO+ZE0t6VLSioZZH/KoOFvW5A0JP0Hg=="
24196
+    },
24197
+    "@vladmandic/human-models": {
24198
+      "version": "2.5.9",
24199
+      "resolved": "https://registry.npmjs.org/@vladmandic/human-models/-/human-models-2.5.9.tgz",
24200
+      "integrity": "sha512-WhV0RIeELsA73LGMpemYJvpIEuG0nkh6rP1x7JctKDQkNaARgWolTq9r1RUvWw++rQfzfZ82bYcAauJcaGW6bw==",
24201
+      "requires": {
24202
+        "@vladmandic/human": "^2.5.8"
24203
+      }
24187 24204
     },
24188 24205
     "@webassemblyjs/ast": {
24189 24206
       "version": "1.11.1",

+ 2
- 1
package.json View File

@@ -58,7 +58,8 @@
58 58
     "@svgr/webpack": "4.3.2",
59 59
     "@tensorflow/tfjs-backend-wasm": "3.13.0",
60 60
     "@tensorflow/tfjs-core": "3.13.0",
61
-    "@vladmandic/face-api": "1.6.4",
61
+    "@vladmandic/human": "2.6.5",
62
+    "@vladmandic/human-models": "2.5.9",
62 63
     "@xmldom/xmldom": "0.7.5",
63 64
     "amplitude-js": "8.2.1",
64 65
     "base64-js": "1.3.1",

react/features/face-landmarks/constants.js → react/features/face-landmarks/constants.ts View File

@@ -1,5 +1,3 @@
1
-// @flow
2
-
3 1
 export const FACE_EXPRESSIONS_EMOJIS = {
4 2
     happy: '😊',
5 3
     neutral: '😐',
@@ -13,6 +11,16 @@ export const FACE_EXPRESSIONS_EMOJIS = {
13 11
 
14 12
 export const FACE_EXPRESSIONS = [ 'happy', 'neutral', 'sad', 'surprised', 'angry', 'fearful' ];
15 13
 
14
+export const FACE_EXPRESSIONS_NAMING_MAPPING = {
15
+    happy: 'happy',
16
+    neutral: 'neutral',
17
+    surprise: 'surprised',
18
+    angry: 'angry',
19
+    fear: 'fearful',
20
+    disgust: 'disgusted',
21
+    sad: 'sad'
22
+};
23
+
16 24
 /**
17 25
  * Time is ms used for sending expression.
18 26
  */

+ 0
- 108
react/features/face-landmarks/faceApiPatch.js View File

@@ -1,108 +0,0 @@
1
-/* eslint-disable */
2
-// From: https://github.com/justadudewhohacks/face-api.js/issues/47
3
-// This is needed because face-api.js does not support working in a WebWorker natively
4
-// Updated Dec 1 2020 to work on latest Chrome (tested in WebWorkers on Chrome Mobile on Android / Google Pixel 3 as well)
5
-self.useWasm = false;
6
-if(!self.OffscreenCanvas) {
7
-	self.useWasm = true;
8
-	self.OffscreenCanvas = class OffscreenCanvas {
9
-		constructor() {
10
-
11
-		}
12
-	}
13
-}
14
-
15
-if(!self.OffscreenCanvasRenderingContext2D) {
16
-	self.OffscreenCanvasRenderingContext2D = class OffscreenCanvasRenderingContext2D {
17
-		constructor() {
18
-			
19
-		}
20
-	}
21
-}
22
-
23
-self.Canvas = self.HTMLCanvasElement = OffscreenCanvas;
24
-// self.HTMLCanvasElement.name = 'HTMLCanvasElement';
25
-// self.Canvas.name = 'Canvas';
26
-
27
-self.CanvasRenderingContext2D = OffscreenCanvasRenderingContext2D;
28
-
29
-function HTMLImageElement(){}
30
-function HTMLVideoElement(){}
31
-
32
-self.Image = HTMLImageElement;
33
-self.Video = HTMLVideoElement;
34
-
35
-function Storage () {
36
-	let _data = {};
37
-	this.clear = function(){ return _data = {}; };
38
-	this.getItem = function(id){ return _data.hasOwnProperty(id) ? _data[id] : undefined; };
39
-	this.removeItem = function(id){ return delete _data[id]; };
40
-	this.setItem = function(id, val){ return _data[id] = String(val); };
41
-}
42
-class Document extends EventTarget {}
43
-
44
-self.document = new Document();
45
-
46
-self.window = self.Window = self;
47
-self.localStorage = new Storage();
48
-
49
-function createElement(element) {
50
-	switch(element) {
51
-		case 'canvas':
52
-			let canvas = new Canvas(1,1);
53
-			canvas.localName = 'canvas';
54
-			canvas.nodeName = 'CANVAS';
55
-			canvas.tagName = 'CANVAS';
56
-			canvas.nodeType = 1;
57
-			canvas.innerHTML = '';
58
-			canvas.remove = () => { console.log('nope'); };
59
-			return canvas;
60
-		default:
61
-			console.log('arg', element);
62
-			break;
63
-	}
64
-}
65
-
66
-document.createElement = createElement;
67
-document.location = self.location;
68
-
69
-// These are the same checks face-api.js/isBrowser does
70
-if(!typeof window == 'object') {
71
-	console.warn("Check failed: window");
72
-}
73
-if(typeof document === 'undefined') {
74
-	console.warn("Check failed: document");
75
-}
76
-if(typeof HTMLImageElement === 'undefined') {
77
-	console.warn("Check failed: HTMLImageElement");
78
-}
79
-if(typeof HTMLCanvasElement === 'undefined') {
80
-	console.warn("Check failed: HTMLCanvasElement");
81
-}
82
-if(typeof HTMLVideoElement === 'undefined') {
83
-	console.warn("Check failed: HTMLVideoElement");
84
-}
85
-if(typeof ImageData === 'undefined') {
86
-	console.warn("Check failed: ImageData");
87
-}
88
-if(typeof CanvasRenderingContext2D === 'undefined') {
89
-	console.warn("Check failed: CanvasRenderingContext2D");
90
-}
91
-
92
-self.window = window;
93
-self.document = document;
94
-self.HTMLImageElement = HTMLImageElement;
95
-self.HTMLVideoElement = HTMLVideoElement;
96
-
97
-// These are the same checks face-api.js/isBrowser does
98
-const isBrowserCheck = typeof window === 'object'
99
-	&& typeof document !== 'undefined'
100
-	&& typeof HTMLImageElement !== 'undefined'
101
-	&& typeof HTMLCanvasElement !== 'undefined'
102
-	&& typeof HTMLVideoElement !== 'undefined'
103
-	&& typeof ImageData !== 'undefined'
104
-	&& typeof CanvasRenderingContext2D !== 'undefined';
105
-;
106
-if(!isBrowserCheck) {
107
-	throw new Error("Failed to monkey patch for face-api, face-api will fail");
108
-}

+ 0
- 159
react/features/face-landmarks/faceLandmarksWorker.js View File

@@ -1,159 +0,0 @@
1
-import './faceApiPatch';
2
-
3
-import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
4
-import * as faceapi from '@vladmandic/face-api';
5
-
6
-import { DETECTION_TYPES, DETECT_FACE, INIT_WORKER } from './constants';
7
-
8
-/**
9
- * Detection types to be applied.
10
- */
11
-let faceDetectionTypes = [];
12
-
13
-/**
14
- * Indicates whether an init error occured.
15
- */
16
-let initError = false;
17
-
18
-/**
19
- * A flag that indicates whether the models are loaded or not.
20
- */
21
-let modelsLoaded = false;
22
-
23
-/**
24
- * A flag that indicates whether the tensorflow backend is set or not.
25
- */
26
-let backendSet = false;
27
-
28
-/**
29
- * Flag for indicating whether a face detection flow is in progress or not.
30
- */
31
-let detectionInProgress = false;
32
-
33
-/**
34
- * Contains the last valid face bounding box (passes threshold validation) which was sent to the main process.
35
- */
36
-let lastValidFaceBox;
37
-
38
-const detectFaceBox = async ({ detections, threshold }) => {
39
-    if (!detections.length) {
40
-        return null;
41
-    }
42
-
43
-    const faceBox = {
44
-        // normalize to percentage based
45
-        left: Math.round(Math.min(...detections.map(d => d.relativeBox.left)) * 100),
46
-        right: Math.round(Math.max(...detections.map(d => d.relativeBox.right)) * 100)
47
-    };
48
-
49
-    faceBox.width = Math.round(faceBox.right - faceBox.left);
50
-
51
-    if (lastValidFaceBox && Math.abs(lastValidFaceBox.left - faceBox.left) < threshold) {
52
-        return null;
53
-    }
54
-
55
-    lastValidFaceBox = faceBox;
56
-
57
-    return faceBox;
58
-};
59
-
60
-const detectFaceExpression = async ({ detections }) =>
61
-    detections[0]?.expressions.asSortedArray()[0].expression;
62
-
63
-const detect = async ({ image, threshold }) => {
64
-    let detections;
65
-    let faceExpression;
66
-    let faceBox;
67
-
68
-    detectionInProgress = true;
69
-    faceapi.tf.engine().startScope();
70
-
71
-    const imageTensor = faceapi.tf.browser.fromPixels(image);
72
-
73
-    if (faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
74
-        detections = await faceapi.detectAllFaces(
75
-            imageTensor,
76
-            new faceapi.TinyFaceDetectorOptions()
77
-        ).withFaceExpressions();
78
-
79
-        faceExpression = await detectFaceExpression({ detections });
80
-    }
81
-
82
-    if (faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
83
-        detections = detections
84
-            ? detections.map(d => d.detection)
85
-            : await faceapi.detectAllFaces(imageTensor, new faceapi.TinyFaceDetectorOptions());
86
-
87
-        faceBox = await detectFaceBox({
88
-            detections,
89
-            threshold
90
-        });
91
-    }
92
-
93
-    faceapi.tf.engine().endScope();
94
-
95
-    if (faceBox || faceExpression) {
96
-        self.postMessage({
97
-            faceBox,
98
-            faceExpression
99
-        });
100
-    }
101
-
102
-    detectionInProgress = false;
103
-};
104
-
105
-const init = async ({ baseUrl, detectionTypes }) => {
106
-    faceDetectionTypes = detectionTypes;
107
-
108
-    if (!backendSet) {
109
-        try {
110
-            if (self.useWasm) {
111
-                setWasmPaths(baseUrl);
112
-                await faceapi.tf.setBackend('wasm');
113
-            } else {
114
-                await faceapi.tf.setBackend('webgl');
115
-            }
116
-            backendSet = true;
117
-        } catch (err) {
118
-            initError = true;
119
-
120
-            return;
121
-        }
122
-    }
123
-
124
-    // load face detection model
125
-    if (!modelsLoaded) {
126
-        try {
127
-            await faceapi.loadTinyFaceDetectorModel(baseUrl);
128
-
129
-            if (detectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
130
-                await faceapi.loadFaceExpressionModel(baseUrl);
131
-            }
132
-
133
-            modelsLoaded = true;
134
-        } catch (err) {
135
-            initError = true;
136
-
137
-            return;
138
-        }
139
-    }
140
-};
141
-
142
-onmessage = function(message) {
143
-    switch (message.data.type) {
144
-    case DETECT_FACE: {
145
-        if (!backendSet || !modelsLoaded || initError || detectionInProgress) {
146
-            return;
147
-        }
148
-
149
-        detect(message.data);
150
-
151
-        break;
152
-    }
153
-
154
-    case INIT_WORKER: {
155
-        init(message.data);
156
-        break;
157
-    }
158
-    }
159
-};

+ 181
- 0
react/features/face-landmarks/faceLandmarksWorker.ts View File

@@ -0,0 +1,181 @@
1
+import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
2
+import { Human, Config, FaceResult } from '@vladmandic/human';
3
+
4
+import { DETECTION_TYPES, DETECT_FACE, INIT_WORKER, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
5
+
6
+type Detection = {
7
+    detections: Array<FaceResult>,
8
+    threshold?: number
9
+};
10
+
11
+type DetectInput = {
12
+    image: ImageBitmap | ImageData,
13
+    threshold: number
14
+};
15
+
16
+type FaceBox = {
17
+    left: number,
18
+    right: number,
19
+    width?: number
20
+};
21
+
22
+type InitInput = {
23
+    baseUrl: string,
24
+    detectionTypes: string[]
25
+}
26
+
27
+/**
28
+ * An object that is used for using human.
29
+ */
30
+let human: Human;
31
+
32
+/**
33
+ * Detection types to be applied.
34
+ */
35
+let faceDetectionTypes: string[] = [];
36
+
37
+/**
38
+ * Flag for indicating whether a face detection flow is in progress or not.
39
+ */
40
+let detectionInProgress = false;
41
+
42
+/**
43
+ * Contains the last valid face bounding box (passes threshold validation) which was sent to the main process.
44
+ */
45
+let lastValidFaceBox: FaceBox;
46
+
47
+/**
48
+ * Configuration for human.
49
+ */
50
+const config: Partial<Config> = {
51
+    backend: 'humangl',
52
+    async: true,
53
+    warmup: 'none',
54
+    cacheModels: true,
55
+    cacheSensitivity: 0,
56
+    debug: false,
57
+    deallocate: true,
58
+    filter: { enabled: false },
59
+    face: {
60
+        enabled: true,
61
+        detector: {
62
+            enabled: true,
63
+            rotation: false
64
+        },
65
+        mesh: { enabled: false },
66
+        iris: { enabled: false },
67
+        emotion: { enabled: false },
68
+        description: { enabled: false }
69
+    },
70
+    hand: { enabled: false },
71
+    gesture: { enabled: false },
72
+    body: { enabled: false },
73
+    segmentation: { enabled: false }
74
+};
75
+
76
+const detectFaceBox = async ({ detections, threshold }: Detection) => {
77
+    if (!detections.length) {
78
+        return null;
79
+    }
80
+
81
+    const faceBox: FaceBox = {
82
+        // normalize to percentage based
83
+        left: Math.round(Math.min(...detections.map(d => d.boxRaw[0])) * 100),
84
+        right: Math.round(Math.max(...detections.map(d => d.boxRaw[0] + d.boxRaw[2])) * 100)
85
+    };
86
+
87
+    faceBox.width = Math.round(faceBox.right - faceBox.left);
88
+
89
+    if (lastValidFaceBox && threshold && Math.abs(lastValidFaceBox.left - faceBox.left) < threshold) {
90
+        return null;
91
+    }
92
+
93
+    lastValidFaceBox = faceBox;
94
+
95
+    return faceBox;
96
+};
97
+
98
+const detectFaceExpression = async ({ detections }: Detection) => 
99
+    detections[0]?.emotion && FACE_EXPRESSIONS_NAMING_MAPPING[detections[0]?.emotion[0].emotion];
100
+
101
+const detect = async ({ image, threshold } : DetectInput) => {
102
+    let detections;
103
+    let faceExpression;
104
+    let faceBox;
105
+
106
+    detectionInProgress = true;
107
+
108
+    const imageTensor = human.tf.browser.fromPixels(image);
109
+
110
+    if (faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
111
+        const { face } = await human.detect(imageTensor, config);
112
+
113
+        detections = face;
114
+        faceExpression = await detectFaceExpression({ detections });
115
+    }
116
+
117
+    if (faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
118
+        if (!detections) {
119
+            const { face } = await human.detect(imageTensor, config);
120
+
121
+            detections = face;
122
+        }
123
+
124
+        faceBox = await detectFaceBox({
125
+            detections,
126
+            threshold
127
+        });
128
+    }
129
+
130
+    if (faceBox || faceExpression) {
131
+        self.postMessage({ 
132
+            faceBox, 
133
+            faceExpression 
134
+        });
135
+    }
136
+
137
+    detectionInProgress = false;
138
+};
139
+
140
+const init = async ({ baseUrl, detectionTypes }: InitInput) => {
141
+    faceDetectionTypes = detectionTypes;
142
+
143
+    if (!human) {
144
+        config.modelBasePath = baseUrl;
145
+        if (!self.OffscreenCanvas) {
146
+            config.backend = 'wasm';
147
+            config.wasmPath = baseUrl;
148
+            setWasmPaths(baseUrl);
149
+        }
150
+        if (detectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS) && config.face) {
151
+            config.face.emotion = { enabled: true };
152
+        }
153
+        const initialHuman = new Human(config);
154
+        try {
155
+            await initialHuman.load();
156
+        } catch (err) {
157
+            console.error(err);
158
+        }
159
+        
160
+        human = initialHuman;
161
+    }
162
+};
163
+
164
+onmessage = function(message: MessageEvent<any>) {
165
+    switch (message.data.type) {
166
+    case DETECT_FACE: {
167
+        if (!human || detectionInProgress) {
168
+            return;
169
+        }
170
+
171
+        detect(message.data);
172
+
173
+        break;
174
+    }
175
+
176
+    case INIT_WORKER: {
177
+        init(message.data);
178
+        break;
179
+    }
180
+    }
181
+};

+ 1
- 1
react/features/speaker-stats/components/web/SpeakerStatsItem.js View File

@@ -5,7 +5,7 @@ import React from 'react';
5 5
 import { Avatar, StatelessAvatar } from '../../../base/avatar';
6 6
 import { getInitials } from '../../../base/avatar/functions';
7 7
 import BaseTheme from '../../../base/ui/components/BaseTheme';
8
-import { FACE_EXPRESSIONS } from '../../../face-landmarks/constants.js';
8
+import { FACE_EXPRESSIONS } from '../../../face-landmarks/constants';
9 9
 
10 10
 import TimeElapsed from './TimeElapsed';
11 11
 

+ 1
- 1
react/features/speaker-stats/components/web/SpeakerStatsLabels.js View File

@@ -4,7 +4,7 @@ import React from 'react';
4 4
 import { useTranslation } from 'react-i18next';
5 5
 
6 6
 import { Tooltip } from '../../../base/tooltip';
7
-import { FACE_EXPRESSIONS_EMOJIS } from '../../../face-landmarks/constants.js';
7
+import { FACE_EXPRESSIONS_EMOJIS } from '../../../face-landmarks/constants';
8 8
 
9 9
 const useStyles = makeStyles(theme => {
10 10
     return {

+ 1
- 1
tsconfig.json View File

@@ -5,7 +5,7 @@
5 5
         "module": "es6",
6 6
         "target": "es6",
7 7
         "jsx": "react",
8
-        "lib": [ "ES2020", "DOM" ],
8
+        "lib": [ "webworker", "ES2020", "DOM" ],
9 9
         "noEmit": false,
10 10
         "moduleResolution": "Node",
11 11
         "strict": true,

+ 2
- 2
webpack.config.js View File

@@ -384,13 +384,13 @@ module.exports = (_env, argv) => {
384 384
         }),
385 385
         Object.assign({}, config, {
386 386
             entry: {
387
-                'face-landmarks-worker': './react/features/face-landmarks/faceLandmarksWorker.js'
387
+                'face-landmarks-worker': './react/features/face-landmarks/faceLandmarksWorker.ts'
388 388
             },
389 389
             plugins: [
390 390
                 ...config.plugins,
391 391
                 ...getBundleAnalyzerPlugin(analyzeBundle, 'face-landmarks-worker')
392 392
             ],
393
-            performance: getPerformanceHints(perfHintOptions, 1024 * 1024 * 1.5)
393
+            performance: getPerformanceHints(perfHintOptions, 1024 * 1024 * 2)
394 394
         })
395 395
     ];
396 396
 };

Loading…
Cancel
Save