You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

FaceLandmarksDetector.ts 10KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306
  1. import 'image-capture';
  2. import './createImageBitmap';
  3. import { IStore } from '../app/types';
  4. import { getLocalVideoTrack } from '../base/tracks/functions';
  5. import { getBaseUrl } from '../base/util/helpers';
  6. import {
  7. addFaceExpression,
  8. clearFaceExpressionBuffer,
  9. faceLandmarkDetectionStopped,
  10. newFaceBox
  11. } from './actions';
  12. import {
  13. DETECTION_TYPES,
  14. DETECT_FACE,
  15. FACE_LANDMARK_DETECTION_ERROR_THRESHOLD,
  16. INIT_WORKER,
  17. WEBHOOK_SEND_TIME_INTERVAL
  18. } from './constants';
  19. import {
  20. getDetectionInterval,
  21. getFaceExpressionDuration,
  22. sendFaceExpressionsWebhook
  23. } from './functions';
  24. import logger from './logger';
  25. declare const APP: any;
  26. /**
  27. * Class for face language detection.
  28. */
  29. class FaceLandmarksDetector {
  30. private static instance: FaceLandmarksDetector;
  31. private initialized = false;
  32. private imageCapture: ImageCapture | null = null;
  33. private worker: Worker | null = null;
  34. private lastFaceExpression: string | null = null;
  35. private lastFaceExpressionTimestamp: number | null = null;
  36. private duplicateConsecutiveExpressions = 0;
  37. private webhookSendInterval: number | null = null;
  38. private detectionInterval: number | null = null;
  39. private recognitionActive = false;
  40. private canvas?: HTMLCanvasElement;
  41. private context?: CanvasRenderingContext2D | null;
  42. private errorCount = 0;
  43. /**
  44. * Constructor for class, checks if the environment supports OffscreenCanvas.
  45. */
  46. private constructor() {
  47. if (typeof OffscreenCanvas === 'undefined') {
  48. this.canvas = document.createElement('canvas');
  49. this.context = this.canvas.getContext('2d');
  50. }
  51. }
  52. /**
  53. * Function for retrieving the FaceLandmarksDetector instance.
  54. *
  55. * @returns {FaceLandmarksDetector} - FaceLandmarksDetector instance.
  56. */
  57. public static getInstance(): FaceLandmarksDetector {
  58. if (!FaceLandmarksDetector.instance) {
  59. FaceLandmarksDetector.instance = new FaceLandmarksDetector();
  60. }
  61. return FaceLandmarksDetector.instance;
  62. }
  63. /**
  64. * Returns if the detected environment is initialized.
  65. *
  66. * @returns {boolean}
  67. */
  68. isInitialized(): boolean {
  69. return this.initialized;
  70. }
  71. /**
  72. * Initialization function: the worker is loaded and initialized, and then if possible the detection stats.
  73. *
  74. * @param {IStore} store - Redux store with dispatch and getState methods.
  75. * @returns {void}
  76. */
  77. init({ dispatch, getState }: IStore) {
  78. if (this.isInitialized()) {
  79. logger.info('Worker has already been initialized');
  80. return;
  81. }
  82. if (navigator.product === 'ReactNative') {
  83. logger.warn('Unsupported environment for face detection');
  84. return;
  85. }
  86. const baseUrl = `${getBaseUrl()}libs/`;
  87. let workerUrl = `${baseUrl}face-landmarks-worker.min.js`;
  88. // @ts-ignore
  89. const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
  90. // @ts-ignore
  91. workerUrl = window.URL.createObjectURL(workerBlob);
  92. this.worker = new Worker(workerUrl, { name: 'Face Recognition Worker' });
  93. this.worker.onmessage = ({ data }: MessageEvent<any>) => {
  94. const { faceExpression, faceBox } = data;
  95. if (faceExpression) {
  96. if (faceExpression === this.lastFaceExpression) {
  97. this.duplicateConsecutiveExpressions++;
  98. } else {
  99. if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
  100. dispatch(addFaceExpression(
  101. this.lastFaceExpression,
  102. getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1),
  103. this.lastFaceExpressionTimestamp
  104. ));
  105. }
  106. this.lastFaceExpression = faceExpression;
  107. this.lastFaceExpressionTimestamp = Date.now();
  108. this.duplicateConsecutiveExpressions = 0;
  109. }
  110. }
  111. if (faceBox) {
  112. dispatch(newFaceBox(faceBox));
  113. }
  114. APP.API.notifyFaceLandmarkDetected(faceBox, faceExpression);
  115. };
  116. const { faceLandmarks } = getState()['features/base/config'];
  117. const detectionTypes = [
  118. faceLandmarks?.enableFaceCentering && DETECTION_TYPES.FACE_BOX,
  119. faceLandmarks?.enableFaceExpressionsDetection && DETECTION_TYPES.FACE_EXPRESSIONS
  120. ].filter(Boolean);
  121. this.worker.postMessage({
  122. type: INIT_WORKER,
  123. baseUrl,
  124. detectionTypes
  125. });
  126. this.initialized = true;
  127. this.startDetection({
  128. dispatch,
  129. getState
  130. });
  131. }
  132. /**
  133. * The function which starts the detection process.
  134. *
  135. * @param {IStore} store - Redux store with dispatch and getState methods.
  136. * @param {any} track - Track from middleware; can be undefined.
  137. * @returns {void}
  138. */
  139. startDetection({ dispatch, getState }: IStore, track?: any) {
  140. if (!this.isInitialized()) {
  141. logger.info('Worker has not been initialized');
  142. return;
  143. }
  144. if (this.recognitionActive) {
  145. logger.log('Face detection already active.');
  146. return;
  147. }
  148. const state = getState();
  149. const localVideoTrack = track || getLocalVideoTrack(state['features/base/tracks']);
  150. if (localVideoTrack === undefined) {
  151. logger.warn('Face landmarks detection is disabled due to missing local track.');
  152. return;
  153. }
  154. const stream = localVideoTrack.jitsiTrack.getOriginalStream();
  155. const firstVideoTrack = stream.getVideoTracks()[0];
  156. this.imageCapture = new ImageCapture(firstVideoTrack);
  157. this.recognitionActive = true;
  158. logger.log('Start face detection');
  159. const { faceLandmarks } = state['features/base/config'];
  160. this.detectionInterval = window.setInterval(() => {
  161. if (this.worker && this.imageCapture) {
  162. this.sendDataToWorker(
  163. faceLandmarks?.faceCenteringThreshold
  164. ).then(status => {
  165. if (status) {
  166. this.errorCount = 0;
  167. } else if (++this.errorCount > FACE_LANDMARK_DETECTION_ERROR_THRESHOLD) {
  168. /* this prevents the detection from stopping immediately after occurring an error
  169. * sometimes due to the small detection interval when starting the detection some errors
  170. * might occur due to the track not being ready
  171. */
  172. this.stopDetection({
  173. dispatch,
  174. getState
  175. });
  176. }
  177. });
  178. }
  179. }, getDetectionInterval(state));
  180. const { webhookProxyUrl } = state['features/base/config'];
  181. if (faceLandmarks?.enableFaceExpressionsDetection && webhookProxyUrl) {
  182. this.webhookSendInterval = window.setInterval(async () => {
  183. const result = await sendFaceExpressionsWebhook(getState());
  184. if (result) {
  185. dispatch(clearFaceExpressionBuffer());
  186. }
  187. }, WEBHOOK_SEND_TIME_INTERVAL);
  188. }
  189. }
  190. /**
  191. * The function which stops the detection process.
  192. *
  193. * @param {IStore} store - Redux store with dispatch and getState methods.
  194. * @returns {void}
  195. */
  196. stopDetection({ dispatch, getState }: IStore) {
  197. if (!this.recognitionActive || !this.isInitialized()) {
  198. return;
  199. }
  200. if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
  201. dispatch(
  202. addFaceExpression(
  203. this.lastFaceExpression,
  204. getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1),
  205. this.lastFaceExpressionTimestamp
  206. )
  207. );
  208. this.duplicateConsecutiveExpressions = 0;
  209. this.lastFaceExpression = null;
  210. this.lastFaceExpressionTimestamp = null;
  211. }
  212. this.webhookSendInterval && window.clearInterval(this.webhookSendInterval);
  213. this.detectionInterval && window.clearInterval(this.detectionInterval);
  214. this.webhookSendInterval = null;
  215. this.detectionInterval = null;
  216. this.imageCapture = null;
  217. this.recognitionActive = false;
  218. dispatch(faceLandmarkDetectionStopped(Date.now()));
  219. logger.log('Stop face detection');
  220. }
  221. /**
  222. * Sends the image data a canvas from the track in the image capture to the face detection worker.
  223. *
  224. * @param {number} faceCenteringThreshold - Movement threshold as percentage for sharing face coordinates.
  225. * @returns {Promise<boolean>} - True if sent, false otherwise.
  226. */
  227. private async sendDataToWorker(faceCenteringThreshold = 10): Promise<boolean> {
  228. if (!this.imageCapture
  229. || !this.worker
  230. || !this.imageCapture?.track
  231. || this.imageCapture?.track.readyState !== 'live') {
  232. logger.log('Environment not ready! Could not send data to worker');
  233. return false;
  234. }
  235. let imageBitmap;
  236. let image;
  237. try {
  238. imageBitmap = await this.imageCapture.grabFrame();
  239. } catch (err) {
  240. logger.log('Could not send data to worker');
  241. return false;
  242. }
  243. if (typeof OffscreenCanvas === 'undefined' && this.canvas && this.context) {
  244. this.canvas.width = imageBitmap.width;
  245. this.canvas.height = imageBitmap.height;
  246. this.context.drawImage(imageBitmap, 0, 0);
  247. image = this.context.getImageData(0, 0, imageBitmap.width, imageBitmap.height);
  248. } else {
  249. image = imageBitmap;
  250. }
  251. this.worker.postMessage({
  252. type: DETECT_FACE,
  253. image,
  254. threshold: faceCenteringThreshold
  255. });
  256. imageBitmap.close();
  257. return true;
  258. }
  259. }
  260. export default FaceLandmarksDetector.getInstance();