You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

FaceLandmarksDetector.ts 12KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355
  1. import 'image-capture';
  2. import './createImageBitmap';
  3. import { IStore } from '../app/types';
  4. import { getLocalVideoTrack } from '../base/tracks/functions';
  5. import { getBaseUrl } from '../base/util/helpers';
  6. import {
  7. addFaceLandmarks,
  8. clearFaceExpressionBuffer,
  9. newFaceBox
  10. } from './actions';
  11. import {
  12. DETECTION_TYPES,
  13. DETECT_FACE,
  14. FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD,
  15. INIT_WORKER,
  16. NO_DETECTION,
  17. NO_FACE_DETECTION_THRESHOLD,
  18. WEBHOOK_SEND_TIME_INTERVAL
  19. } from './constants';
  20. import {
  21. getDetectionInterval,
  22. sendFaceExpressionsWebhook
  23. } from './functions';
  24. import logger from './logger';
  25. /**
  26. * Class for face language detection.
  27. */
  28. class FaceLandmarksDetector {
  29. private static instance: FaceLandmarksDetector;
  30. private initialized = false;
  31. private imageCapture: ImageCapture | null = null;
  32. private worker: Worker | null = null;
  33. private lastFaceExpression: string | null = null;
  34. private lastFaceExpressionTimestamp: number | null = null;
  35. private webhookSendInterval: number | null = null;
  36. private detectionInterval: number | null = null;
  37. private recognitionActive = false;
  38. private canvas?: HTMLCanvasElement;
  39. private context?: CanvasRenderingContext2D | null;
  40. private errorCount = 0;
  41. private noDetectionCount = 0;
  42. private noDetectionStartTimestamp: number | null = null;
  43. /**
  44. * Constructor for class, checks if the environment supports OffscreenCanvas.
  45. */
  46. private constructor() {
  47. if (typeof OffscreenCanvas === 'undefined') {
  48. this.canvas = document.createElement('canvas');
  49. this.context = this.canvas.getContext('2d');
  50. }
  51. }
  52. /**
  53. * Function for retrieving the FaceLandmarksDetector instance.
  54. *
  55. * @returns {FaceLandmarksDetector} - FaceLandmarksDetector instance.
  56. */
  57. public static getInstance(): FaceLandmarksDetector {
  58. if (!FaceLandmarksDetector.instance) {
  59. FaceLandmarksDetector.instance = new FaceLandmarksDetector();
  60. }
  61. return FaceLandmarksDetector.instance;
  62. }
  63. /**
  64. * Returns if the detected environment is initialized.
  65. *
  66. * @returns {boolean}
  67. */
  68. isInitialized(): boolean {
  69. return this.initialized;
  70. }
  71. /**
  72. * Initialization function: the worker is loaded and initialized, and then if possible the detection stats.
  73. *
  74. * @param {IStore} store - Redux store with dispatch and getState methods.
  75. * @returns {void}
  76. */
  77. init({ dispatch, getState }: IStore) {
  78. if (this.isInitialized()) {
  79. logger.info('Worker has already been initialized');
  80. return;
  81. }
  82. if (navigator.product === 'ReactNative') {
  83. logger.warn('Unsupported environment for face detection');
  84. return;
  85. }
  86. const baseUrl = `${getBaseUrl()}libs/`;
  87. let workerUrl = `${baseUrl}face-landmarks-worker.min.js`;
  88. // @ts-ignore
  89. const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
  90. const state = getState();
  91. const addToBuffer = Boolean(state['features/base/config'].webhookProxyUrl);
  92. // @ts-ignore
  93. workerUrl = window.URL.createObjectURL(workerBlob);
  94. this.worker = new Worker(workerUrl, { name: 'Face Landmarks Worker' });
  95. this.worker.onmessage = ({ data }: MessageEvent<any>) => {
  96. const { faceExpression, faceBox, faceCount } = data;
  97. const messageTimestamp = Date.now();
  98. // if the number of faces detected is different from 1 we do not take into consideration that detection
  99. if (faceCount !== 1) {
  100. if (this.noDetectionCount === 0) {
  101. this.noDetectionStartTimestamp = messageTimestamp;
  102. }
  103. this.noDetectionCount++;
  104. if (this.noDetectionCount === NO_FACE_DETECTION_THRESHOLD && this.noDetectionStartTimestamp) {
  105. this.addFaceLandmarks(
  106. dispatch,
  107. this.noDetectionStartTimestamp,
  108. NO_DETECTION,
  109. addToBuffer
  110. );
  111. }
  112. return;
  113. } else if (this.noDetectionCount > 0) {
  114. this.noDetectionCount = 0;
  115. this.noDetectionStartTimestamp = null;
  116. }
  117. if (faceExpression?.expression) {
  118. const { expression } = faceExpression;
  119. if (expression !== this.lastFaceExpression) {
  120. this.addFaceLandmarks(
  121. dispatch,
  122. messageTimestamp,
  123. expression,
  124. addToBuffer
  125. );
  126. }
  127. }
  128. if (faceBox) {
  129. dispatch(newFaceBox(faceBox));
  130. }
  131. APP.API.notifyFaceLandmarkDetected(faceBox, faceExpression);
  132. };
  133. const { faceLandmarks } = state['features/base/config'];
  134. const detectionTypes = [
  135. faceLandmarks?.enableFaceCentering && DETECTION_TYPES.FACE_BOX,
  136. faceLandmarks?.enableFaceExpressionsDetection && DETECTION_TYPES.FACE_EXPRESSIONS
  137. ].filter(Boolean);
  138. this.worker.postMessage({
  139. type: INIT_WORKER,
  140. baseUrl,
  141. detectionTypes
  142. });
  143. this.initialized = true;
  144. this.startDetection({
  145. dispatch,
  146. getState
  147. });
  148. }
  149. /**
  150. * The function which starts the detection process.
  151. *
  152. * @param {IStore} store - Redux store with dispatch and getState methods.
  153. * @param {any} track - Track from middleware; can be undefined.
  154. * @returns {void}
  155. */
  156. startDetection({ dispatch, getState }: IStore, track?: any) {
  157. if (!this.isInitialized()) {
  158. logger.info('Worker has not been initialized');
  159. return;
  160. }
  161. if (this.recognitionActive) {
  162. logger.log('Face landmarks detection already active.');
  163. return;
  164. }
  165. const state = getState();
  166. const localVideoTrack = track || getLocalVideoTrack(state['features/base/tracks']);
  167. if (localVideoTrack === undefined) {
  168. logger.warn('Face landmarks detection is disabled due to missing local track.');
  169. return;
  170. }
  171. const stream = localVideoTrack.jitsiTrack.getOriginalStream();
  172. const firstVideoTrack = stream.getVideoTracks()[0];
  173. this.imageCapture = new ImageCapture(firstVideoTrack);
  174. this.recognitionActive = true;
  175. logger.log('Start face landmarks detection');
  176. const { faceLandmarks } = state['features/base/config'];
  177. this.detectionInterval = window.setInterval(() => {
  178. if (this.worker && this.imageCapture) {
  179. this.sendDataToWorker(
  180. faceLandmarks?.faceCenteringThreshold
  181. ).then(status => {
  182. if (status) {
  183. this.errorCount = 0;
  184. } else if (++this.errorCount > FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD) {
  185. /* this prevents the detection from stopping immediately after occurring an error
  186. * sometimes due to the small detection interval when starting the detection some errors
  187. * might occur due to the track not being ready
  188. */
  189. this.stopDetection({
  190. dispatch,
  191. getState
  192. });
  193. }
  194. });
  195. }
  196. }, getDetectionInterval(state));
  197. const { webhookProxyUrl } = state['features/base/config'];
  198. if (faceLandmarks?.enableFaceExpressionsDetection && webhookProxyUrl) {
  199. this.webhookSendInterval = window.setInterval(async () => {
  200. const result = await sendFaceExpressionsWebhook(getState());
  201. if (result) {
  202. dispatch(clearFaceExpressionBuffer());
  203. }
  204. }, WEBHOOK_SEND_TIME_INTERVAL);
  205. }
  206. }
  207. /**
  208. * The function which stops the detection process.
  209. *
  210. * @param {IStore} store - Redux store with dispatch and getState methods.
  211. * @returns {void}
  212. */
  213. stopDetection({ dispatch, getState }: IStore) {
  214. if (!this.recognitionActive || !this.isInitialized()) {
  215. return;
  216. }
  217. const stopTimestamp = Date.now();
  218. const addToBuffer = Boolean(getState()['features/base/config'].webhookProxyUrl);
  219. if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
  220. this.addFaceLandmarks(dispatch, stopTimestamp, null, addToBuffer);
  221. }
  222. this.webhookSendInterval && window.clearInterval(this.webhookSendInterval);
  223. this.detectionInterval && window.clearInterval(this.detectionInterval);
  224. this.webhookSendInterval = null;
  225. this.detectionInterval = null;
  226. this.imageCapture = null;
  227. this.recognitionActive = false;
  228. logger.log('Stop face landmarks detection');
  229. }
  230. /**
  231. * Dispatches the action for adding new face landmarks and changes the state of the class.
  232. *
  233. * @param {IStore.dispatch} dispatch - The redux dispatch function.
  234. * @param {number} endTimestamp - The timestamp when the face landmarks ended.
  235. * @param {string} newFaceExpression - The new face expression.
  236. * @param {boolean} addToBuffer - Flag for adding the face landmarks to the buffer.
  237. * @returns {void}
  238. */
  239. private addFaceLandmarks(
  240. dispatch: IStore['dispatch'],
  241. endTimestamp: number,
  242. newFaceExpression: string | null,
  243. addToBuffer = false) {
  244. if (this.lastFaceExpression && this.lastFaceExpressionTimestamp) {
  245. dispatch(addFaceLandmarks(
  246. {
  247. duration: endTimestamp - this.lastFaceExpressionTimestamp,
  248. faceExpression: this.lastFaceExpression,
  249. timestamp: this.lastFaceExpressionTimestamp
  250. },
  251. addToBuffer
  252. ));
  253. }
  254. this.lastFaceExpression = newFaceExpression;
  255. this.lastFaceExpressionTimestamp = endTimestamp;
  256. }
  257. /**
  258. * Sends the image data a canvas from the track in the image capture to the face detection worker.
  259. *
  260. * @param {number} faceCenteringThreshold - Movement threshold as percentage for sharing face coordinates.
  261. * @returns {Promise<boolean>} - True if sent, false otherwise.
  262. */
  263. private async sendDataToWorker(faceCenteringThreshold = 10): Promise<boolean> {
  264. if (!this.imageCapture
  265. || !this.worker
  266. || !this.imageCapture) {
  267. logger.log('Environment not ready! Could not send data to worker');
  268. return false;
  269. }
  270. // if ImageCapture is polyfilled then it would not have the track,
  271. // so there would be no point in checking for its readyState
  272. if (this.imageCapture.track && this.imageCapture.track.readyState !== 'live') {
  273. logger.log('Track not ready! Could not send data to worker');
  274. return false;
  275. }
  276. let imageBitmap;
  277. let image;
  278. try {
  279. imageBitmap = await this.imageCapture.grabFrame();
  280. } catch (err) {
  281. logger.log('Could not send data to worker');
  282. return false;
  283. }
  284. if (typeof OffscreenCanvas === 'undefined' && this.canvas && this.context) {
  285. this.canvas.width = imageBitmap.width;
  286. this.canvas.height = imageBitmap.height;
  287. this.context.drawImage(imageBitmap, 0, 0);
  288. image = this.context.getImageData(0, 0, imageBitmap.width, imageBitmap.height);
  289. } else {
  290. image = imageBitmap;
  291. }
  292. this.worker.postMessage({
  293. type: DETECT_FACE,
  294. image,
  295. threshold: faceCenteringThreshold
  296. });
  297. imageBitmap.close();
  298. return true;
  299. }
  300. }
  301. export default FaceLandmarksDetector.getInstance();