You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

FaceLandmarksDetector.ts 10KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303
  1. import 'image-capture';
  2. import './createImageBitmap';
  3. import { IStore } from '../app/types';
  4. import { getLocalVideoTrack } from '../base/tracks/functions';
  5. import { getBaseUrl } from '../base/util/helpers';
  6. import {
  7. addFaceExpression,
  8. clearFaceExpressionBuffer,
  9. newFaceBox
  10. } from './actions';
  11. import {
  12. DETECTION_TYPES,
  13. DETECT_FACE,
  14. FACE_LANDMARK_DETECTION_ERROR_THRESHOLD,
  15. INIT_WORKER,
  16. WEBHOOK_SEND_TIME_INTERVAL
  17. } from './constants';
  18. import {
  19. getDetectionInterval,
  20. getFaceExpressionDuration,
  21. sendFaceExpressionsWebhook
  22. } from './functions';
  23. import logger from './logger';
  24. /**
  25. * Class for face language detection.
  26. */
  27. class FaceLandmarksDetector {
  28. private static instance: FaceLandmarksDetector;
  29. private initialized = false;
  30. private imageCapture: ImageCapture | null = null;
  31. private worker: Worker | null = null;
  32. private lastFaceExpression: string | null = null;
  33. private lastFaceExpressionTimestamp: number | null = null;
  34. private duplicateConsecutiveExpressions = 0;
  35. private webhookSendInterval: number | null = null;
  36. private detectionInterval: number | null = null;
  37. private recognitionActive = false;
  38. private canvas?: HTMLCanvasElement;
  39. private context?: CanvasRenderingContext2D | null;
  40. private errorCount = 0;
  41. /**
  42. * Constructor for class, checks if the environment supports OffscreenCanvas.
  43. */
  44. private constructor() {
  45. if (typeof OffscreenCanvas === 'undefined') {
  46. this.canvas = document.createElement('canvas');
  47. this.context = this.canvas.getContext('2d');
  48. }
  49. }
  50. /**
  51. * Function for retrieving the FaceLandmarksDetector instance.
  52. *
  53. * @returns {FaceLandmarksDetector} - FaceLandmarksDetector instance.
  54. */
  55. public static getInstance(): FaceLandmarksDetector {
  56. if (!FaceLandmarksDetector.instance) {
  57. FaceLandmarksDetector.instance = new FaceLandmarksDetector();
  58. }
  59. return FaceLandmarksDetector.instance;
  60. }
  61. /**
  62. * Returns if the detected environment is initialized.
  63. *
  64. * @returns {boolean}
  65. */
  66. isInitialized(): boolean {
  67. return this.initialized;
  68. }
  69. /**
  70. * Initialization function: the worker is loaded and initialized, and then if possible the detection stats.
  71. *
  72. * @param {IStore} store - Redux store with dispatch and getState methods.
  73. * @returns {void}
  74. */
  75. init({ dispatch, getState }: IStore) {
  76. if (this.isInitialized()) {
  77. logger.info('Worker has already been initialized');
  78. return;
  79. }
  80. if (navigator.product === 'ReactNative') {
  81. logger.warn('Unsupported environment for face detection');
  82. return;
  83. }
  84. const baseUrl = `${getBaseUrl()}libs/`;
  85. let workerUrl = `${baseUrl}face-landmarks-worker.min.js`;
  86. // @ts-ignore
  87. const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
  88. // @ts-ignore
  89. workerUrl = window.URL.createObjectURL(workerBlob);
  90. this.worker = new Worker(workerUrl, { name: 'Face Recognition Worker' });
  91. this.worker.onmessage = ({ data }: MessageEvent<any>) => {
  92. const { faceExpression, faceBox } = data;
  93. if (faceExpression) {
  94. if (faceExpression === this.lastFaceExpression) {
  95. this.duplicateConsecutiveExpressions++;
  96. } else {
  97. if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
  98. dispatch(addFaceExpression(
  99. this.lastFaceExpression,
  100. getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1),
  101. this.lastFaceExpressionTimestamp
  102. ));
  103. }
  104. this.lastFaceExpression = faceExpression;
  105. this.lastFaceExpressionTimestamp = Date.now();
  106. this.duplicateConsecutiveExpressions = 0;
  107. }
  108. }
  109. if (faceBox) {
  110. dispatch(newFaceBox(faceBox));
  111. }
  112. APP.API.notifyFaceLandmarkDetected(faceBox, faceExpression);
  113. };
  114. const { faceLandmarks } = getState()['features/base/config'];
  115. const detectionTypes = [
  116. faceLandmarks?.enableFaceCentering && DETECTION_TYPES.FACE_BOX,
  117. faceLandmarks?.enableFaceExpressionsDetection && DETECTION_TYPES.FACE_EXPRESSIONS
  118. ].filter(Boolean);
  119. this.worker.postMessage({
  120. type: INIT_WORKER,
  121. baseUrl,
  122. detectionTypes
  123. });
  124. this.initialized = true;
  125. this.startDetection({
  126. dispatch,
  127. getState
  128. });
  129. }
  130. /**
  131. * The function which starts the detection process.
  132. *
  133. * @param {IStore} store - Redux store with dispatch and getState methods.
  134. * @param {any} track - Track from middleware; can be undefined.
  135. * @returns {void}
  136. */
  137. startDetection({ dispatch, getState }: IStore, track?: any) {
  138. if (!this.isInitialized()) {
  139. logger.info('Worker has not been initialized');
  140. return;
  141. }
  142. if (this.recognitionActive) {
  143. logger.log('Face detection already active.');
  144. return;
  145. }
  146. const state = getState();
  147. const localVideoTrack = track || getLocalVideoTrack(state['features/base/tracks']);
  148. if (localVideoTrack === undefined) {
  149. logger.warn('Face landmarks detection is disabled due to missing local track.');
  150. return;
  151. }
  152. const stream = localVideoTrack.jitsiTrack.getOriginalStream();
  153. const firstVideoTrack = stream.getVideoTracks()[0];
  154. this.imageCapture = new ImageCapture(firstVideoTrack);
  155. this.recognitionActive = true;
  156. logger.log('Start face detection');
  157. const { faceLandmarks } = state['features/base/config'];
  158. this.detectionInterval = window.setInterval(() => {
  159. if (this.worker && this.imageCapture) {
  160. this.sendDataToWorker(
  161. faceLandmarks?.faceCenteringThreshold
  162. ).then(status => {
  163. if (status) {
  164. this.errorCount = 0;
  165. } else if (++this.errorCount > FACE_LANDMARK_DETECTION_ERROR_THRESHOLD) {
  166. /* this prevents the detection from stopping immediately after occurring an error
  167. * sometimes due to the small detection interval when starting the detection some errors
  168. * might occur due to the track not being ready
  169. */
  170. this.stopDetection({
  171. dispatch,
  172. getState
  173. });
  174. }
  175. });
  176. }
  177. }, getDetectionInterval(state));
  178. const { webhookProxyUrl } = state['features/base/config'];
  179. if (faceLandmarks?.enableFaceExpressionsDetection && webhookProxyUrl) {
  180. this.webhookSendInterval = window.setInterval(async () => {
  181. const result = await sendFaceExpressionsWebhook(getState());
  182. if (result) {
  183. dispatch(clearFaceExpressionBuffer());
  184. }
  185. }, WEBHOOK_SEND_TIME_INTERVAL);
  186. }
  187. }
  188. /**
  189. * The function which stops the detection process.
  190. *
  191. * @param {IStore} store - Redux store with dispatch and getState methods.
  192. * @returns {void}
  193. */
  194. stopDetection({ dispatch, getState }: IStore) {
  195. if (!this.recognitionActive || !this.isInitialized()) {
  196. return;
  197. }
  198. if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
  199. dispatch(
  200. addFaceExpression(
  201. this.lastFaceExpression,
  202. getFaceExpressionDuration(getState(), this.duplicateConsecutiveExpressions + 1),
  203. this.lastFaceExpressionTimestamp
  204. )
  205. );
  206. this.duplicateConsecutiveExpressions = 0;
  207. this.lastFaceExpression = null;
  208. this.lastFaceExpressionTimestamp = null;
  209. }
  210. this.webhookSendInterval && window.clearInterval(this.webhookSendInterval);
  211. this.detectionInterval && window.clearInterval(this.detectionInterval);
  212. this.webhookSendInterval = null;
  213. this.detectionInterval = null;
  214. this.imageCapture = null;
  215. this.recognitionActive = false;
  216. logger.log('Stop face detection');
  217. }
  218. /**
  219. * Sends the image data a canvas from the track in the image capture to the face detection worker.
  220. *
  221. * @param {number} faceCenteringThreshold - Movement threshold as percentage for sharing face coordinates.
  222. * @returns {Promise<boolean>} - True if sent, false otherwise.
  223. */
  224. private async sendDataToWorker(faceCenteringThreshold = 10): Promise<boolean> {
  225. if (!this.imageCapture
  226. || !this.worker
  227. || !this.imageCapture?.track
  228. || this.imageCapture?.track.readyState !== 'live') {
  229. logger.log('Environment not ready! Could not send data to worker');
  230. return false;
  231. }
  232. let imageBitmap;
  233. let image;
  234. try {
  235. imageBitmap = await this.imageCapture.grabFrame();
  236. } catch (err) {
  237. logger.log('Could not send data to worker');
  238. return false;
  239. }
  240. if (typeof OffscreenCanvas === 'undefined' && this.canvas && this.context) {
  241. this.canvas.width = imageBitmap.width;
  242. this.canvas.height = imageBitmap.height;
  243. this.context.drawImage(imageBitmap, 0, 0);
  244. image = this.context.getImageData(0, 0, imageBitmap.width, imageBitmap.height);
  245. } else {
  246. image = imageBitmap;
  247. }
  248. this.worker.postMessage({
  249. type: DETECT_FACE,
  250. image,
  251. threshold: faceCenteringThreshold
  252. });
  253. imageBitmap.close();
  254. return true;
  255. }
  256. }
  257. export default FaceLandmarksDetector.getInstance();