Procházet zdrojové kódy

feat(face-centering) implement centering of faces in a video

Config options:

faceCoordinatesSharing.enabled
faceCoordinatesSharig.threshold
faceCoordinatesSharing.captureInterval
master
Tudor-Ovidiu Avram před 3 roky
rodič
revize
d718d9d8fb

+ 10
- 2
Makefile Zobrazit soubor

@@ -4,6 +4,7 @@ DEPLOY_DIR = libs
4 4
 LIBJITSIMEET_DIR = node_modules/lib-jitsi-meet
5 5
 LIBFLAC_DIR = node_modules/libflacjs/dist/min
6 6
 OLM_DIR = node_modules/@matrix-org/olm
7
+TF_WASM_DIR = node_modules/@tensorflow/tfjs-backend-wasm/dist/
7 8
 RNNOISE_WASM_DIR = node_modules/rnnoise-wasm/dist
8 9
 TFLITE_WASM = react/features/stream-effects/virtual-background/vendor/tflite
9 10
 MEET_MODELS_DIR  = react/features/stream-effects/virtual-background/vendor/models
@@ -29,7 +30,7 @@ clean:
29 30
 	rm -fr $(BUILD_DIR)
30 31
 
31 32
 .NOTPARALLEL:
32
-deploy: deploy-init deploy-appbundle deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-css deploy-local deploy-facial-expressions
33
+deploy: deploy-init deploy-appbundle deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-tf-wasm deploy-css deploy-local deploy-facial-expressions
33 34
 
34 35
 deploy-init:
35 36
 	rm -fr $(DEPLOY_DIR)
@@ -52,6 +53,8 @@ deploy-appbundle:
52 53
 		$(OUTPUT_DIR)/analytics-ga.js \
53 54
 		$(BUILD_DIR)/analytics-ga.min.js \
54 55
 		$(BUILD_DIR)/analytics-ga.min.js.map \
56
+		$(BUILD_DIR)/face-centering-worker.min.js \
57
+		$(BUILD_DIR)/face-centering-worker.min.js.map \
55 58
 		$(BUILD_DIR)/facial-expressions-worker.min.js \
56 59
 		$(BUILD_DIR)/facial-expressions-worker.min.js.map \
57 60
 		$(DEPLOY_DIR)
@@ -80,6 +83,11 @@ deploy-olm:
80 83
 		$(OLM_DIR)/olm.wasm \
81 84
 		$(DEPLOY_DIR)
82 85
 
86
+deploy-tf-wasm:
87
+	cp \
88
+		$(TF_WASM_DIR)/*.wasm \
89
+		$(DEPLOY_DIR)
90
+
83 91
 deploy-rnnoise-binary:
84 92
 	cp \
85 93
 		$(RNNOISE_WASM_DIR)/rnnoise.wasm \
@@ -109,7 +117,7 @@ deploy-local:
109 117
 	([ ! -x deploy-local.sh ] || ./deploy-local.sh)
110 118
 
111 119
 .NOTPARALLEL:
112
-dev: deploy-init deploy-css deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-facial-expressions
120
+dev: deploy-init deploy-css deploy-rnnoise-binary deploy-tflite deploy-meet-models deploy-lib-jitsi-meet deploy-libflac deploy-olm deploy-tf-wasm deploy-facial-expressions
113 121
 	$(WEBPACK_DEV_SERVER)
114 122
 
115 123
 source-package:

+ 12
- 0
config.js Zobrazit soubor

@@ -1,3 +1,4 @@
1
+
1 2
 /* eslint-disable no-unused-vars, no-var */
2 3
 
3 4
 var config = {
@@ -749,6 +750,17 @@ var config = {
749 750
     // Enables displaying facial expressions in speaker stats
750 751
     // enableDisplayFacialExpressions: true,
751 752
 
753
+    // faceCoordinatesSharing: {
754
+    //     // Enables sharing your face cordinates. Used for centering faces within a video.
755
+    //     enabled: false,
756
+
757
+    //     // Minimum required face movement percentage threshold for sending new face coordinates data.
758
+    //     threshold: 10,
759
+
760
+    //     // Miliseconds for processing a new image capture in order to detect face coordinates if they exist.
761
+    //     captureInterval: 100
762
+    // },
763
+
752 764
     // Controls the percentage of automatic feedback shown to participants when callstats is enabled.
753 765
     // The default value is 100%. If set to 0, no automatic feedback will be requested
754 766
     // feedbackPercentage: 100,

+ 1
- 0
css/_aui_reset.scss Zobrazit soubor

@@ -48,6 +48,7 @@ canvas,
48 48
 progress,
49 49
 video {
50 50
   display: inline-block;
51
+  transition: object-position 0.5s ease 0s;
51 52
   vertical-align: baseline;
52 53
 }
53 54
 audio:not([controls]) {

+ 0
- 1
css/filmstrip/_tile_view.scss Zobrazit soubor

@@ -2,7 +2,6 @@
2 2
  * CSS styles that are specific to the filmstrip that shows the thumbnail tiles.
3 3
  */
4 4
 .tile-view {
5
-
6 5
     .remote-videos {
7 6
         align-items: center;
8 7
         box-sizing: border-box;

+ 177
- 0
package-lock.json Zobrazit soubor

@@ -48,6 +48,10 @@
48 48
         "@react-navigation/native": "6.0.6",
49 49
         "@react-navigation/stack": "6.0.11",
50 50
         "@svgr/webpack": "4.3.2",
51
+        "@tensorflow-models/blazeface": "0.0.7",
52
+        "@tensorflow/tfjs-backend-wasm": "3.13.0",
53
+        "@tensorflow/tfjs-converter": "3.13.0",
54
+        "@tensorflow/tfjs-core": "3.13.0",
51 55
         "@vladmandic/face-api": "1.6.4",
52 56
         "@xmldom/xmldom": "0.7.5",
53 57
         "amplitude-js": "8.2.1",
@@ -4834,6 +4838,67 @@
4834 4838
         "node": ">=8"
4835 4839
       }
4836 4840
     },
4841
+    "node_modules/@tensorflow-models/blazeface": {
4842
+      "version": "0.0.7",
4843
+      "resolved": "https://registry.npmjs.org/@tensorflow-models/blazeface/-/blazeface-0.0.7.tgz",
4844
+      "integrity": "sha512-+hInPkvHJoubfiXlmNuF3SCucZvU6W1PMC25IV99NSAftJUpKvLokfF93iX8UkOFQCXkPFbnLKacGfGlbjgvMw==",
4845
+      "peerDependencies": {
4846
+        "@tensorflow/tfjs-converter": "^3.1.0",
4847
+        "@tensorflow/tfjs-core": "^3.1.0"
4848
+      }
4849
+    },
4850
+    "node_modules/@tensorflow/tfjs-backend-cpu": {
4851
+      "version": "3.13.0",
4852
+      "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-3.13.0.tgz",
4853
+      "integrity": "sha512-POmzUoAP8HooYYTZ72O1ZYkpVZB0f+8PeAkbTxIG0oahcJccj6a0Vovp1A6xWKfljUoPlJb3jWVC++S603ZL8w==",
4854
+      "dependencies": {
4855
+        "@types/seedrandom": "2.4.27",
4856
+        "seedrandom": "2.4.3"
4857
+      },
4858
+      "engines": {
4859
+        "yarn": ">= 1.3.2"
4860
+      },
4861
+      "peerDependencies": {
4862
+        "@tensorflow/tfjs-core": "3.13.0"
4863
+      }
4864
+    },
4865
+    "node_modules/@tensorflow/tfjs-backend-wasm": {
4866
+      "version": "3.13.0",
4867
+      "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-wasm/-/tfjs-backend-wasm-3.13.0.tgz",
4868
+      "integrity": "sha512-h5kNS4xvljoySzfcFwqbdFB6QZGR06IA9/Xq/PjBeZt18XEoJGqKHbOCYupmUlr5pxo/gnXzPhAC2h4SfZXPXw==",
4869
+      "dependencies": {
4870
+        "@tensorflow/tfjs-backend-cpu": "3.13.0",
4871
+        "@types/emscripten": "~0.0.34"
4872
+      },
4873
+      "peerDependencies": {
4874
+        "@tensorflow/tfjs-core": "3.13.0"
4875
+      }
4876
+    },
4877
+    "node_modules/@tensorflow/tfjs-converter": {
4878
+      "version": "3.13.0",
4879
+      "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-3.13.0.tgz",
4880
+      "integrity": "sha512-H2VpDTv9Ve0HBt7ttzz46DmnsPaiT0B+yJjVH3NebGZbgY9C8boBgJIsdyqfiqEWBS3WxF8h4rh58Hv5XXMgaQ==",
4881
+      "peerDependencies": {
4882
+        "@tensorflow/tfjs-core": "3.13.0"
4883
+      }
4884
+    },
4885
+    "node_modules/@tensorflow/tfjs-core": {
4886
+      "version": "3.13.0",
4887
+      "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-3.13.0.tgz",
4888
+      "integrity": "sha512-18qBEVIB/4u2OUK9nA5P1XT3e3LyarElD1UKNSNDpnMLxhLTUVZaCR71eHJcpl9wP2Q0cciaTJCTpJdPv1tNDQ==",
4889
+      "dependencies": {
4890
+        "@types/long": "^4.0.1",
4891
+        "@types/offscreencanvas": "~2019.3.0",
4892
+        "@types/seedrandom": "2.4.27",
4893
+        "@types/webgl-ext": "0.0.30",
4894
+        "long": "4.0.0",
4895
+        "node-fetch": "~2.6.1",
4896
+        "seedrandom": "2.4.3"
4897
+      },
4898
+      "engines": {
4899
+        "yarn": ">= 1.3.2"
4900
+      }
4901
+    },
4837 4902
     "node_modules/@trysound/sax": {
4838 4903
       "version": "0.2.0",
4839 4904
       "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz",
@@ -4880,6 +4945,11 @@
4880 4945
         "@types/node": "*"
4881 4946
       }
4882 4947
     },
4948
+    "node_modules/@types/emscripten": {
4949
+      "version": "0.0.34",
4950
+      "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-0.0.34.tgz",
4951
+      "integrity": "sha512-QSb9ojDincskc+uKMI0KXp8e1NALFINCrMlp8VGKGcTSxeEyRTTKyjWw75NYrCZHUsVEEEpr1tYHpbtaC++/sQ=="
4952
+    },
4883 4953
     "node_modules/@types/eslint": {
4884 4954
       "version": "8.4.1",
4885 4955
       "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz",
@@ -4984,6 +5054,11 @@
4984 5054
       "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=",
4985 5055
       "dev": true
4986 5056
     },
5057
+    "node_modules/@types/long": {
5058
+      "version": "4.0.1",
5059
+      "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz",
5060
+      "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w=="
5061
+    },
4987 5062
     "node_modules/@types/mime": {
4988 5063
       "version": "1.3.2",
4989 5064
       "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz",
@@ -4995,6 +5070,11 @@
4995 5070
       "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.19.tgz",
4996 5071
       "integrity": "sha512-PfeQhvcMR4cPFVuYfBN4ifG7p9c+Dlh3yUZR6k+5yQK7wX3gDgVxBly4/WkBRs9x4dmcy1TVl08SY67wwtEvmA=="
4997 5072
     },
5073
+    "node_modules/@types/offscreencanvas": {
5074
+      "version": "2019.3.0",
5075
+      "resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz",
5076
+      "integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q=="
5077
+    },
4998 5078
     "node_modules/@types/parse-json": {
4999 5079
       "version": "4.0.0",
5000 5080
       "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz",
@@ -5051,6 +5131,11 @@
5051 5131
       "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz",
5052 5132
       "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew=="
5053 5133
     },
5134
+    "node_modules/@types/seedrandom": {
5135
+      "version": "2.4.27",
5136
+      "resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.27.tgz",
5137
+      "integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
5138
+    },
5054 5139
     "node_modules/@types/serve-index": {
5055 5140
       "version": "1.9.1",
5056 5141
       "resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz",
@@ -5079,6 +5164,11 @@
5079 5164
         "@types/node": "*"
5080 5165
       }
5081 5166
     },
5167
+    "node_modules/@types/webgl-ext": {
5168
+      "version": "0.0.30",
5169
+      "resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz",
5170
+      "integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg=="
5171
+    },
5082 5172
     "node_modules/@types/ws": {
5083 5173
       "version": "8.2.3",
5084 5174
       "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.2.3.tgz",
@@ -11694,6 +11784,11 @@
11694 11784
         "logkitty": "bin/logkitty.js"
11695 11785
       }
11696 11786
     },
11787
+    "node_modules/long": {
11788
+      "version": "4.0.0",
11789
+      "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
11790
+      "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
11791
+    },
11697 11792
     "node_modules/loose-envify": {
11698 11793
       "version": "1.4.0",
11699 11794
       "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
@@ -16270,6 +16365,11 @@
16270 16365
         "sdp-verify": "checker.js"
16271 16366
       }
16272 16367
     },
16368
+    "node_modules/seedrandom": {
16369
+      "version": "2.4.3",
16370
+      "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
16371
+      "integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw="
16372
+    },
16273 16373
     "node_modules/select-hose": {
16274 16374
       "version": "2.0.0",
16275 16375
       "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz",
@@ -22857,6 +22957,48 @@
22857 22957
         "loader-utils": "^1.2.3"
22858 22958
       }
22859 22959
     },
22960
+    "@tensorflow-models/blazeface": {
22961
+      "version": "0.0.7",
22962
+      "resolved": "https://registry.npmjs.org/@tensorflow-models/blazeface/-/blazeface-0.0.7.tgz",
22963
+      "integrity": "sha512-+hInPkvHJoubfiXlmNuF3SCucZvU6W1PMC25IV99NSAftJUpKvLokfF93iX8UkOFQCXkPFbnLKacGfGlbjgvMw=="
22964
+    },
22965
+    "@tensorflow/tfjs-backend-cpu": {
22966
+      "version": "3.13.0",
22967
+      "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-3.13.0.tgz",
22968
+      "integrity": "sha512-POmzUoAP8HooYYTZ72O1ZYkpVZB0f+8PeAkbTxIG0oahcJccj6a0Vovp1A6xWKfljUoPlJb3jWVC++S603ZL8w==",
22969
+      "requires": {
22970
+        "@types/seedrandom": "2.4.27",
22971
+        "seedrandom": "2.4.3"
22972
+      }
22973
+    },
22974
+    "@tensorflow/tfjs-backend-wasm": {
22975
+      "version": "3.13.0",
22976
+      "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-wasm/-/tfjs-backend-wasm-3.13.0.tgz",
22977
+      "integrity": "sha512-h5kNS4xvljoySzfcFwqbdFB6QZGR06IA9/Xq/PjBeZt18XEoJGqKHbOCYupmUlr5pxo/gnXzPhAC2h4SfZXPXw==",
22978
+      "requires": {
22979
+        "@tensorflow/tfjs-backend-cpu": "3.13.0",
22980
+        "@types/emscripten": "~0.0.34"
22981
+      }
22982
+    },
22983
+    "@tensorflow/tfjs-converter": {
22984
+      "version": "3.13.0",
22985
+      "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-3.13.0.tgz",
22986
+      "integrity": "sha512-H2VpDTv9Ve0HBt7ttzz46DmnsPaiT0B+yJjVH3NebGZbgY9C8boBgJIsdyqfiqEWBS3WxF8h4rh58Hv5XXMgaQ=="
22987
+    },
22988
+    "@tensorflow/tfjs-core": {
22989
+      "version": "3.13.0",
22990
+      "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-3.13.0.tgz",
22991
+      "integrity": "sha512-18qBEVIB/4u2OUK9nA5P1XT3e3LyarElD1UKNSNDpnMLxhLTUVZaCR71eHJcpl9wP2Q0cciaTJCTpJdPv1tNDQ==",
22992
+      "requires": {
22993
+        "@types/long": "^4.0.1",
22994
+        "@types/offscreencanvas": "~2019.3.0",
22995
+        "@types/seedrandom": "2.4.27",
22996
+        "@types/webgl-ext": "0.0.30",
22997
+        "long": "4.0.0",
22998
+        "node-fetch": "~2.6.1",
22999
+        "seedrandom": "2.4.3"
23000
+      }
23001
+    },
22860 23002
     "@trysound/sax": {
22861 23003
       "version": "0.2.0",
22862 23004
       "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz",
@@ -22900,6 +23042,11 @@
22900 23042
         "@types/node": "*"
22901 23043
       }
22902 23044
     },
23045
+    "@types/emscripten": {
23046
+      "version": "0.0.34",
23047
+      "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-0.0.34.tgz",
23048
+      "integrity": "sha512-QSb9ojDincskc+uKMI0KXp8e1NALFINCrMlp8VGKGcTSxeEyRTTKyjWw75NYrCZHUsVEEEpr1tYHpbtaC++/sQ=="
23049
+    },
22903 23050
     "@types/eslint": {
22904 23051
       "version": "8.4.1",
22905 23052
       "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz",
@@ -23004,6 +23151,11 @@
23004 23151
       "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=",
23005 23152
       "dev": true
23006 23153
     },
23154
+    "@types/long": {
23155
+      "version": "4.0.1",
23156
+      "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz",
23157
+      "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w=="
23158
+    },
23007 23159
     "@types/mime": {
23008 23160
       "version": "1.3.2",
23009 23161
       "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz",
@@ -23015,6 +23167,11 @@
23015 23167
       "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.19.tgz",
23016 23168
       "integrity": "sha512-PfeQhvcMR4cPFVuYfBN4ifG7p9c+Dlh3yUZR6k+5yQK7wX3gDgVxBly4/WkBRs9x4dmcy1TVl08SY67wwtEvmA=="
23017 23169
     },
23170
+    "@types/offscreencanvas": {
23171
+      "version": "2019.3.0",
23172
+      "resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz",
23173
+      "integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q=="
23174
+    },
23018 23175
     "@types/parse-json": {
23019 23176
       "version": "4.0.0",
23020 23177
       "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz",
@@ -23071,6 +23228,11 @@
23071 23228
       "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz",
23072 23229
       "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew=="
23073 23230
     },
23231
+    "@types/seedrandom": {
23232
+      "version": "2.4.27",
23233
+      "resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.27.tgz",
23234
+      "integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
23235
+    },
23074 23236
     "@types/serve-index": {
23075 23237
       "version": "1.9.1",
23076 23238
       "resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz",
@@ -23099,6 +23261,11 @@
23099 23261
         "@types/node": "*"
23100 23262
       }
23101 23263
     },
23264
+    "@types/webgl-ext": {
23265
+      "version": "0.0.30",
23266
+      "resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz",
23267
+      "integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg=="
23268
+    },
23102 23269
     "@types/ws": {
23103 23270
       "version": "8.2.3",
23104 23271
       "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.2.3.tgz",
@@ -28240,6 +28407,11 @@
28240 28407
         "yargs": "^15.1.0"
28241 28408
       }
28242 28409
     },
28410
+    "long": {
28411
+      "version": "4.0.0",
28412
+      "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
28413
+      "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
28414
+    },
28243 28415
     "loose-envify": {
28244 28416
       "version": "1.4.0",
28245 28417
       "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
@@ -31698,6 +31870,11 @@
31698 31870
       "resolved": "https://registry.npmjs.org/sdp-transform/-/sdp-transform-2.3.0.tgz",
31699 31871
       "integrity": "sha1-V6lXWUIEHYV3qGnXx01MOgvYiPY="
31700 31872
     },
31873
+    "seedrandom": {
31874
+      "version": "2.4.3",
31875
+      "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
31876
+      "integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw="
31877
+    },
31701 31878
     "select-hose": {
31702 31879
       "version": "2.0.0",
31703 31880
       "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz",

+ 4
- 0
package.json Zobrazit soubor

@@ -53,6 +53,10 @@
53 53
     "@react-navigation/native": "6.0.6",
54 54
     "@react-navigation/stack": "6.0.11",
55 55
     "@svgr/webpack": "4.3.2",
56
+    "@tensorflow-models/blazeface": "0.0.7",
57
+    "@tensorflow/tfjs-backend-wasm": "3.13.0",
58
+    "@tensorflow/tfjs-converter": "3.13.0",
59
+    "@tensorflow/tfjs-core": "3.13.0",
56 60
     "@vladmandic/face-api": "1.6.4",
57 61
     "@xmldom/xmldom": "0.7.5",
58 62
     "amplitude-js": "8.2.1",

+ 2
- 0
react/.eslintrc.js Zobrazit soubor

@@ -7,6 +7,8 @@ module.exports = {
7 7
         '.eslintrc-react-native.js'
8 8
     ],
9 9
     'rules': {
10
+        'flowtype/no-types-missing-file-annotation': 0,
11
+
10 12
         // XXX remove this eventually.
11 13
         'react/jsx-indent-props': 0
12 14
     },

+ 1
- 0
react/features/app/middlewares.web.js Zobrazit soubor

@@ -20,6 +20,7 @@ import '../shared-video/middleware';
20 20
 import '../settings/middleware';
21 21
 import '../talk-while-muted/middleware';
22 22
 import '../virtual-background/middleware';
23
+import '../face-centering/middleware';
23 24
 import '../facial-recognition/middleware';
24 25
 
25 26
 import './middlewares.any';

+ 1
- 0
react/features/app/reducers.web.js Zobrazit soubor

@@ -2,6 +2,7 @@
2 2
 
3 3
 import '../base/devices/reducer';
4 4
 import '../e2ee/reducer';
5
+import '../face-centering/reducer';
5 6
 import '../facial-recognition/reducer';
6 7
 import '../feedback/reducer';
7 8
 import '../local-recording/reducer';

+ 1
- 0
react/features/base/config/configWhitelist.js Zobrazit soubor

@@ -153,6 +153,7 @@ export default [
153 153
     'enableTcc',
154 154
     'enableAutomaticUrlCopy',
155 155
     'etherpad_base',
156
+    'faceCoordinatesSharing',
156 157
     'failICE',
157 158
     'feedbackPercentage',
158 159
     'fileRecordingsEnabled',

+ 39
- 0
react/features/face-centering/actionTypes.js Zobrazit soubor

@@ -0,0 +1,39 @@
1
+/**
2
+ * Redux action type dispatched in order to set the time interval in which
3
+ * the message to the face centering worker will be sent.
4
+ *
5
+ * {
6
+ *      type: SET_DETECTION_TIME_INTERVAL,
7
+ *      time: number
8
+ * }
9
+ */
10
+export const SET_DETECTION_TIME_INTERVAL = 'SET_DETECTION_TIME_INTERVAL';
11
+
12
+/**
13
+ * Redux action type dispatched in order to set recognition active in the state.
14
+ *
15
+ * {
16
+ *      type: START_FACE_RECOGNITION
17
+ * }
18
+ */
19
+export const START_FACE_RECOGNITION = 'START_FACE_RECOGNITION';
20
+
21
+/**
22
+ * Redux action type dispatched in order to set recognition inactive in the state.
23
+ *
24
+ * {
25
+ *      type: STOP_FACE_RECOGNITION
26
+ * }
27
+ */
28
+export const STOP_FACE_RECOGNITION = 'STOP_FACE_RECOGNITION';
29
+
30
+/**
31
+ * Redux action type dispatched in order to update coordinates of a detected face.
32
+ *
33
+ * {
34
+ *      type: UPDATE_FACE_COORDINATES,
35
+ *      faceBox: Object({ left, bottom, right, top }),
36
+ *      participantId: string
37
+ * }
38
+ */
39
+ export const UPDATE_FACE_COORDINATES = 'UPDATE_FACE_COORDINATES';

+ 139
- 0
react/features/face-centering/actions.js Zobrazit soubor

@@ -0,0 +1,139 @@
1
+import 'image-capture';
2
+
3
+import { getCurrentConference } from '../base/conference';
4
+import { getLocalParticipant, getParticipantCount } from '../base/participants';
5
+import { getLocalVideoTrack } from '../base/tracks';
6
+import { getBaseUrl } from '../base/util';
7
+import '../facial-recognition/createImageBitmap';
8
+
9
+import {
10
+    START_FACE_RECOGNITION,
11
+    STOP_FACE_RECOGNITION,
12
+    UPDATE_FACE_COORDINATES
13
+} from './actionTypes';
14
+import {
15
+    FACE_BOX_MESSAGE,
16
+    SEND_IMAGE_INTERVAL_MS
17
+} from './constants';
18
+import { sendDataToWorker, sendFaceBoxToParticipants } from './functions';
19
+import logger from './logger';
20
+
21
+/**
22
+ * Interval object for sending new image data to worker.
23
+ */
24
+let interval;
25
+
26
+/**
27
+ * Object containing  a image capture of the local track.
28
+ */
29
+let imageCapture;
30
+
31
+/**
32
+ * Object where the face centering worker is stored.
33
+ */
34
+let worker;
35
+
36
+/**
37
+ * Loads the worker.
38
+ *
39
+ * @returns {Function}
40
+ */
41
+export function loadWorker() {
42
+    return async function(dispatch: Function, getState: Function) {
43
+        if (navigator.product === 'ReactNative') {
44
+            logger.warn('Unsupported environment for face centering');
45
+
46
+            return;
47
+        }
48
+
49
+        const baseUrl = getBaseUrl();
50
+        let workerUrl = `${baseUrl}libs/face-centering-worker.min.js`;
51
+
52
+        const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
53
+
54
+        workerUrl = window.URL.createObjectURL(workerBlob);
55
+        worker = new Worker(workerUrl, { name: 'Face Centering Worker' });
56
+        worker.onmessage = function(e: Object) {
57
+            const { type, value } = e.data;
58
+
59
+            // receives a message with the face(s) bounding box.
60
+            if (type === FACE_BOX_MESSAGE) {
61
+                const state = getState();
62
+                const conference = getCurrentConference(state);
63
+                const localParticipant = getLocalParticipant(state);
64
+
65
+                if (getParticipantCount(state) > 1) {
66
+                    sendFaceBoxToParticipants(conference, value);
67
+                }
68
+
69
+                dispatch({
70
+                    type: UPDATE_FACE_COORDINATES,
71
+                    faceBox: value,
72
+                    id: localParticipant.id
73
+                });
74
+            }
75
+        };
76
+
77
+        dispatch(startFaceRecognition());
78
+    };
79
+}
80
+
81
+/**
82
+ * Starts the recognition and detection of face position.
83
+ *
84
+ * @param {Track | undefined} track - Track for which to start detecting faces.
85
+ *
86
+ * @returns {Function}
87
+ */
88
+export function startFaceRecognition(track) {
89
+    return async function(dispatch: Function, getState: Function) {
90
+        if (!worker) {
91
+            return;
92
+        }
93
+        const state = getState();
94
+        const { recognitionActive } = state['features/face-centering'];
95
+
96
+        if (recognitionActive) {
97
+            logger.log('Face centering already active.');
98
+
99
+            return;
100
+        }
101
+
102
+        const localVideoTrack = track || getLocalVideoTrack(state['features/base/tracks']);
103
+
104
+        if (!localVideoTrack) {
105
+            logger.warn('Face centering is disabled due to missing local track.');
106
+
107
+            return;
108
+        }
109
+
110
+        dispatch({ type: START_FACE_RECOGNITION });
111
+        logger.log('Start face recognition');
112
+
113
+        const stream = localVideoTrack.jitsiTrack.getOriginalStream();
114
+        const firstVideoTrack = stream.getVideoTracks()[0];
115
+
116
+        imageCapture = new ImageCapture(firstVideoTrack);
117
+        const { disableLocalVideoFlip, faceCoordinatesSharing } = state['features/base/config'];
118
+
119
+        interval = setInterval(() => {
120
+            sendDataToWorker(worker, imageCapture, faceCoordinatesSharing?.threshold, !disableLocalVideoFlip);
121
+        }, faceCoordinatesSharing?.captureInterval || SEND_IMAGE_INTERVAL_MS);
122
+    };
123
+}
124
+
125
+/**
126
+ * Stops the recognition and detection of face position.
127
+ *
128
+ * @returns {Function}
129
+ */
130
+export function stopFaceRecognition() {
131
+    return function(dispatch: Function) {
132
+        clearInterval(interval);
133
+        interval = null;
134
+        imageCapture = null;
135
+
136
+        dispatch({ type: STOP_FACE_RECOGNITION });
137
+        logger.log('Stop face recognition');
138
+    };
139
+}

+ 20
- 0
react/features/face-centering/constants.js Zobrazit soubor

@@ -0,0 +1,20 @@
1
+/**
2
+ * Type of message sent from main thread to worker that contain image data and
3
+ * will trigger a response message from the worker containing the detected face(s) bounding box if any.
4
+ */
5
+export const DETECT_FACE_BOX = 'DETECT_FACE_BOX';
6
+
7
+/**
8
+ * Type of event sent on the data channel.
9
+ */
10
+export const FACE_BOX_EVENT_TYPE = 'face-box';
11
+
12
+/**
13
+ * Type of message sent from the worker to main thread that contains a face box or undefined.
14
+ */
15
+export const FACE_BOX_MESSAGE = 'face-box';
16
+
17
+/**
18
+ * Miliseconds interval value for sending new image data to the worker.
19
+ */
20
+export const SEND_IMAGE_INTERVAL_MS = 100;

+ 107
- 0
react/features/face-centering/faceCenteringWorker.js Zobrazit soubor

@@ -0,0 +1,107 @@
1
+import * as blazeface from '@tensorflow-models/blazeface';
2
+import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
3
+import * as tf from '@tensorflow/tfjs-core';
4
+
5
+import { FACE_BOX_MESSAGE, DETECT_FACE_BOX } from './constants';
6
+
7
+/**
8
+ * Indicates whether an init error occured.
9
+ */
10
+let initError = false;
11
+
12
+/**
13
+ * The blazeface model.
14
+ */
15
+let model;
16
+
17
+/**
18
+ * A flag that indicates whether the tensorflow backend is set or not.
19
+ */
20
+let backendSet = false;
21
+
22
+/**
23
+ * Flag for indicating whether an init operation (e.g setting tf backend) is in progress.
24
+ */
25
+let initInProgress = false;
26
+
27
+/**
28
+ * Callbacks queue for avoiding overlapping executions of face detection.
29
+ */
30
+const queue = [];
31
+
32
+/**
33
+ * Contains the last valid face bounding box (passes threshold validation) which was sent to the main process.
34
+ */
35
+let lastValidFaceBox;
36
+
37
+const detect = async message => {
38
+    const { baseUrl, imageBitmap, isHorizontallyFlipped, threshold } = message.data;
39
+
40
+    if (initInProgress || initError) {
41
+        return;
42
+    }
43
+
44
+    if (!backendSet) {
45
+        initInProgress = true;
46
+        setWasmPaths(`${baseUrl}libs/`);
47
+
48
+        try {
49
+            await tf.setBackend('wasm');
50
+        } catch (err) {
51
+            initError = true;
52
+
53
+            return;
54
+        }
55
+
56
+        backendSet = true;
57
+        initInProgress = false;
58
+    }
59
+
60
+    // load face detection model
61
+    if (!model) {
62
+        try {
63
+            model = await blazeface.load();
64
+        } catch (err) {
65
+            initError = true;
66
+
67
+            return;
68
+        }
69
+    }
70
+
71
+    tf.engine().startScope();
72
+
73
+    const image = tf.browser.fromPixels(imageBitmap);
74
+    const detections = await model.estimateFaces(image, false, isHorizontallyFlipped, false);
75
+
76
+    tf.engine().endScope();
77
+
78
+    let faceBox;
79
+
80
+    if (detections.length) {
81
+        faceBox = {
82
+            // normalize to percentage based
83
+            left: Math.round(Math.min(...detections.map(d => d.topLeft[0])) * 100 / imageBitmap.width),
84
+            right: Math.round(Math.max(...detections.map(d => d.bottomRight[0])) * 100 / imageBitmap.width),
85
+            top: Math.round(Math.min(...detections.map(d => d.topLeft[1])) * 100 / imageBitmap.height),
86
+            bottom: Math.round(Math.max(...detections.map(d => d.bottomRight[1])) * 100 / imageBitmap.height)
87
+        };
88
+
89
+        if (lastValidFaceBox && Math.abs(lastValidFaceBox.left - faceBox.left) < threshold) {
90
+            return;
91
+        }
92
+
93
+        lastValidFaceBox = faceBox;
94
+
95
+        self.postMessage({
96
+            type: FACE_BOX_MESSAGE,
97
+            value: faceBox
98
+        });
99
+    }
100
+};
101
+
102
+onmessage = function(message) {
103
+    if (message.data.id === DETECT_FACE_BOX) {
104
+        queue.push(() => detect(message));
105
+        queue.shift()();
106
+    }
107
+};

+ 96
- 0
react/features/face-centering/functions.js Zobrazit soubor

@@ -0,0 +1,96 @@
1
+import { getBaseUrl } from '../base/util';
2
+
3
+import { FACE_BOX_EVENT_TYPE, DETECT_FACE_BOX } from './constants';
4
+import logger from './logger';
5
+
6
+/**
7
+ * Sends the face box to all the other participants.
8
+ *
9
+ * @param {Object} conference - The current conference.
10
+ * @param  {Object} faceBox - Face box to be sent.
11
+ * @returns {void}
12
+ */
13
+export function sendFaceBoxToParticipants(
14
+        conference: Object,
15
+        faceBox: Object
16
+): void {
17
+    try {
18
+        conference.sendEndpointMessage('', {
19
+            type: FACE_BOX_EVENT_TYPE,
20
+            faceBox
21
+        });
22
+    } catch (err) {
23
+        logger.warn('Could not broadcast the face box to the other participants', err);
24
+    }
25
+}
26
+
27
+/**
28
+ * Sends the image data a canvas from the track in the image capture to the face centering worker.
29
+ *
30
+ * @param {Worker} worker - Face centering worker.
31
+ * @param {Object} imageCapture - Image capture that contains the current track.
32
+ * @param {number} threshold - Movement threshold as percentage for sharing face coordinates.
33
+ * @param {boolean} isHorizontallyFlipped - Indicates whether the image is horizontally flipped.
34
+ * @returns {Promise<void>}
35
+ */
36
+export async function sendDataToWorker(
37
+        worker: Worker,
38
+        imageCapture: Object,
39
+        threshold: number = 10,
40
+        isHorizontallyFlipped = true
41
+): Promise<void> {
42
+    if (imageCapture === null || imageCapture === undefined) {
43
+        return;
44
+    }
45
+
46
+    let imageBitmap;
47
+
48
+    try {
49
+        imageBitmap = await imageCapture.grabFrame();
50
+    } catch (err) {
51
+        logger.warn(err);
52
+
53
+        return;
54
+    }
55
+
56
+    worker.postMessage({
57
+        id: DETECT_FACE_BOX,
58
+        baseUrl: getBaseUrl(),
59
+        imageBitmap,
60
+        threshold,
61
+        isHorizontallyFlipped
62
+    });
63
+}
64
+
65
+/**
66
+ * Gets face box for a participant id.
67
+ *
68
+ * @param {string} id - The participant id.
69
+ * @param {Object} state - The redux state.
70
+ * @returns {Object}
71
+ */
72
+export function getFaceBoxForId(id: string, state: Object) {
73
+    return state['features/face-centering'].faceBoxes[id];
74
+}
75
+
76
+/**
77
+ * Gets the video object position for a participant id.
78
+ *
79
+ * @param {Object} state - The redux state.
80
+ * @param {string} id - The participant id.
81
+ * @returns {string} - CSS object-position in the shape of '{horizontalPercentage}% {verticalPercentage}%'.
82
+ */
83
+export function getVideoObjectPosition(state: Object, id: string) {
84
+    const faceBox = getFaceBoxForId(id, state);
85
+
86
+    if (faceBox) {
87
+        const { left, right, top, bottom } = faceBox;
88
+
89
+        const horizontalPos = 100 - Math.round((left + right) / 2, 100);
90
+        const verticalPos = 100 - Math.round((top + bottom) / 2, 100);
91
+
92
+        return `${horizontalPos}% ${verticalPos}%`;
93
+    }
94
+
95
+    return '50% 50%';
96
+}

+ 3
- 0
react/features/face-centering/logger.js Zobrazit soubor

@@ -0,0 +1,3 @@
1
+import { getLogger } from '../base/logging/functions';
2
+
3
+export default getLogger('features/face-centering');

+ 103
- 0
react/features/face-centering/middleware.js Zobrazit soubor

@@ -0,0 +1,103 @@
1
+import {
2
+    CONFERENCE_JOINED,
3
+    CONFERENCE_WILL_LEAVE,
4
+    getCurrentConference
5
+} from '../base/conference';
6
+import { JitsiConferenceEvents } from '../base/lib-jitsi-meet';
7
+import { MiddlewareRegistry } from '../base/redux';
8
+import { TRACK_UPDATED, TRACK_REMOVED, TRACK_ADDED } from '../base/tracks';
9
+
10
+import { UPDATE_FACE_COORDINATES } from './actionTypes';
11
+import {
12
+    loadWorker,
13
+    stopFaceRecognition,
14
+    startFaceRecognition
15
+} from './actions';
16
+import { FACE_BOX_EVENT_TYPE } from './constants';
17
+
18
+MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
19
+    const state = getState();
20
+    const { faceCoordinatesSharing } = state['features/base/config'];
21
+
22
+    if (!getCurrentConference(state)) {
23
+        return next(action);
24
+    }
25
+
26
+    if (action.type === CONFERENCE_JOINED) {
27
+        if (faceCoordinatesSharing?.enabled) {
28
+            dispatch(loadWorker());
29
+        }
30
+
31
+        // allow using remote face centering data when local face centering is not enabled
32
+        action.conference.on(
33
+            JitsiConferenceEvents.ENDPOINT_MESSAGE_RECEIVED,
34
+            (participant, eventData) => {
35
+                if (!participant || !eventData) {
36
+                    return;
37
+                }
38
+
39
+                if (eventData.type === FACE_BOX_EVENT_TYPE) {
40
+                    dispatch({
41
+                        type: UPDATE_FACE_COORDINATES,
42
+                        faceBox: eventData.faceBox,
43
+                        id: participant.getId()
44
+                    });
45
+                }
46
+            });
47
+
48
+        return next(action);
49
+    }
50
+
51
+    if (!faceCoordinatesSharing?.enabled) {
52
+        return next(action);
53
+    }
54
+
55
+    switch (action.type) {
56
+    case CONFERENCE_WILL_LEAVE : {
57
+        dispatch(stopFaceRecognition());
58
+
59
+        return next(action);
60
+    }
61
+    case TRACK_ADDED: {
62
+        const { jitsiTrack: { isLocal, videoType } } = action.track;
63
+
64
+        if (videoType === 'camera' && isLocal()) {
65
+            // need to pass this since the track is not yet added in the store
66
+            dispatch(startFaceRecognition(action.track));
67
+        }
68
+
69
+        return next(action);
70
+    }
71
+    case TRACK_UPDATED: {
72
+        const { jitsiTrack: { isLocal, videoType } } = action.track;
73
+
74
+        if (videoType !== 'camera' || !isLocal()) {
75
+            return next(action);
76
+        }
77
+
78
+        const { muted } = action.track;
79
+
80
+        if (muted !== undefined) {
81
+            // addresses video mute state changes
82
+            if (muted) {
83
+                dispatch(stopFaceRecognition());
84
+            } else {
85
+                dispatch(startFaceRecognition());
86
+            }
87
+        }
88
+
89
+        return next(action);
90
+    }
91
+    case TRACK_REMOVED: {
92
+        const { jitsiTrack: { isLocal, videoType } } = action.track;
93
+
94
+        if (videoType === 'camera' && isLocal()) {
95
+            dispatch(stopFaceRecognition());
96
+        }
97
+
98
+        return next(action);
99
+    }
100
+    }
101
+
102
+    return next(action);
103
+});

+ 55
- 0
react/features/face-centering/reducer.js Zobrazit soubor

@@ -0,0 +1,55 @@
1
+import { ReducerRegistry } from '../base/redux';
2
+
3
+import {
4
+    START_FACE_RECOGNITION,
5
+    STOP_FACE_RECOGNITION,
6
+    UPDATE_FACE_COORDINATES
7
+} from './actionTypes';
8
+
9
+/**
10
+ * The default state object.
11
+ */
12
+const defaultState = {
13
+    /**
14
+     * Map of participant ids containing their respective facebox in the shape of a left, right, bottom, top percentages
15
+     * The percentages indicate the distance of the detected face starting edge (top or left) to the corresponding edge.
16
+     *
17
+     * Examples:
18
+     * 70% left indicates a 70% distance from the left edge of the video to the left edge of the detected face.
19
+     * 70% right indicates a 70% distance from the right edge of the video to the left edge of the detected face.
20
+     * 30% top indicates a 30% distance from the top edge of the video to the top edge of the detected face.
21
+     * 30% bottom indicates a 30% distance from the bottom edge of the video to the top edge of the detected face.
22
+     */
23
+    faceBoxes: {},
24
+
25
+    /**
26
+     * Flag indicating whether face recognition is currently running.
27
+     */
28
+    recognitionActive: false
29
+};
30
+
31
+ReducerRegistry.register('features/face-centering', (state = defaultState, action) => {
32
+    switch (action.type) {
33
+    case UPDATE_FACE_COORDINATES: {
34
+        return {
35
+            ...state,
36
+            faceBoxes: {
37
+                ...state.faceBoxes,
38
+                [action.id]: action.faceBox
39
+            }
40
+        };
41
+    }
42
+    case START_FACE_RECOGNITION: {
43
+        return {
44
+            ...state,
45
+            recognitionActive: true
46
+        };
47
+    }
48
+
49
+    case STOP_FACE_RECOGNITION: {
50
+        return defaultState;
51
+    }
52
+    }
53
+
54
+    return state;
55
+});

+ 7
- 11
react/features/facial-recognition/actions.js Zobrazit soubor

@@ -1,9 +1,10 @@
1 1
 // @flow
2
-import { getLocalVideoTrack } from '../base/tracks';
3
-
4 2
 import 'image-capture';
5 3
 import './createImageBitmap';
6 4
 
5
+import { getLocalVideoTrack } from '../base/tracks';
6
+import { getBaseUrl } from '../base/util';
7
+
7 8
 import {
8 9
     ADD_FACIAL_EXPRESSION,
9 10
     ADD_TO_FACIAL_EXPRESSIONS_BUFFER,
@@ -65,15 +66,9 @@ export function loadWorker() {
65 66
 
66 67
             return;
67 68
         }
68
-        let baseUrl = '';
69
-        const app: Object = document.querySelector('script[src*="app.bundle.min.js"]');
70 69
 
71
-        if (app) {
72
-            const idx = app.src.lastIndexOf('/');
73
-
74
-            baseUrl = `${app.src.substring(0, idx)}/`;
75
-        }
76
-        let workerUrl = `${baseUrl}facial-expressions-worker.min.js`;
70
+        const baseUrl = getBaseUrl();
71
+        let workerUrl = `${baseUrl}libs/facial-expressions-worker.min.js`;
77 72
 
78 73
         const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
79 74
 
@@ -132,9 +127,10 @@ export function loadWorker() {
132 127
  */
133 128
 export function startFacialRecognition() {
134 129
     return async function(dispatch: Function, getState: Function) {
135
-        if (worker === undefined || worker === null) {
130
+        if (!worker) {
136 131
             return;
137 132
         }
133
+
138 134
         const state = getState();
139 135
         const { recognitionActive } = state['features/facial-recognition'];
140 136
 

+ 12
- 0
react/features/filmstrip/components/web/Thumbnail.js Zobrazit soubor

@@ -23,6 +23,7 @@ import {
23 23
     getTrackByMediaTypeAndParticipant,
24 24
     updateLastTrackVideoMediaEvent
25 25
 } from '../../../base/tracks';
26
+import { getVideoObjectPosition } from '../../../face-centering/functions';
26 27
 import { PresenceLabel } from '../../../presence-status';
27 28
 import { getCurrentLayout, LAYOUTS } from '../../../video-layout';
28 29
 import {
@@ -165,6 +166,11 @@ export type Props = {|
165 166
      */
166 167
     _raisedHand: boolean,
167 168
 
169
+    /**
170
+     * The video object position for the participant.
171
+     */
172
+    _videoObjectPosition: string,
173
+
168 174
     /**
169 175
      * The video track that will be displayed in the thumbnail.
170 176
      */
@@ -479,6 +485,7 @@ class Thumbnail extends Component<Props, State> {
479 485
             _isHidden,
480 486
             _isScreenSharing,
481 487
             _participant,
488
+            _videoObjectPosition,
482 489
             _videoTrack,
483 490
             _width,
484 491
             horizontalOffset,
@@ -522,6 +529,10 @@ class Thumbnail extends Component<Props, State> {
522 529
             };
523 530
         }
524 531
 
532
+        if (videoStyles.objectFit === 'cover') {
533
+            videoStyles.objectPosition = _videoObjectPosition;
534
+        }
535
+
525 536
         styles = {
526 537
             thumbnail: {
527 538
                 ...style,
@@ -1010,6 +1021,7 @@ function _mapStateToProps(state, ownProps): Object {
1010 1021
         _localFlipX: Boolean(localFlipX),
1011 1022
         _participant: participant,
1012 1023
         _raisedHand: hasRaisedHand(participant),
1024
+        _videoObjectPosition: getVideoObjectPosition(state, participant.id),
1013 1025
         _videoTrack,
1014 1026
         ...size
1015 1027
     };

+ 10
- 0
webpack.config.js Zobrazit soubor

@@ -384,6 +384,16 @@ module.exports = (_env, argv) => {
384 384
             ],
385 385
             performance: getPerformanceHints(perfHintOptions, 35 * 1024)
386 386
         }),
387
+        Object.assign({}, config, {
388
+            entry: {
389
+                'face-centering-worker': './react/features/face-centering/faceCenteringWorker.js'
390
+            },
391
+            plugins: [
392
+                ...config.plugins,
393
+                ...getBundleAnalyzerPlugin(analyzeBundle, 'face-centering-worker')
394
+            ],
395
+            performance: getPerformanceHints(perfHintOptions, 500 * 1024)
396
+        }),
387 397
         Object.assign({}, config, {
388 398
             entry: {
389 399
                 'facial-expressions-worker': './react/features/facial-recognition/facialExpressionsWorker.js'

Načítá se…
Zrušit
Uložit