You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

FaceLandmarksHelper.ts 8.0KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250
  1. import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
  2. import { Config, FaceResult, Human } from '@vladmandic/human';
  3. import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
  4. import { DetectInput, DetectOutput, FaceBox, FaceExpression, InitInput } from './types';
  5. export interface IFaceLandmarksHelper {
  6. detect: ({ image, threshold }: DetectInput) => Promise<DetectOutput>;
  7. getDetectionInProgress: () => boolean;
  8. getDetections: (image: ImageBitmap | ImageData) => Promise<Array<FaceResult>>;
  9. getFaceBox: (detections: Array<FaceResult>, threshold: number) => FaceBox | undefined;
  10. getFaceCount: (detections: Array<FaceResult>) => number;
  11. getFaceExpression: (detections: Array<FaceResult>) => FaceExpression | undefined;
  12. init: () => Promise<void>;
  13. }
  14. /**
  15. * Helper class for human library.
  16. */
  17. export class HumanHelper implements IFaceLandmarksHelper {
  18. protected human: Human | undefined;
  19. protected faceDetectionTypes: string[];
  20. protected baseUrl: string;
  21. private detectionInProgress = false;
  22. private lastValidFaceBox: FaceBox | undefined;
  23. /**
  24. * Configuration for human.
  25. */
  26. private config: Partial<Config> = {
  27. backend: 'humangl',
  28. async: true,
  29. warmup: 'none',
  30. cacheModels: true,
  31. cacheSensitivity: 0,
  32. debug: false,
  33. deallocate: true,
  34. filter: { enabled: false },
  35. face: {
  36. enabled: false,
  37. detector: {
  38. enabled: false,
  39. rotation: false,
  40. modelPath: 'blazeface-front.json',
  41. maxDetected: 20
  42. },
  43. mesh: { enabled: false },
  44. iris: { enabled: false },
  45. emotion: {
  46. enabled: false,
  47. modelPath: 'emotion.json'
  48. },
  49. description: { enabled: false }
  50. },
  51. hand: { enabled: false },
  52. gesture: { enabled: false },
  53. body: { enabled: false },
  54. segmentation: { enabled: false }
  55. };
  56. /**
  57. * Constructor function for the helper which initialize the helper.
  58. *
  59. * @param {InitInput} input - The input for the helper.
  60. * @returns {void}
  61. */
  62. constructor({ baseUrl, detectionTypes }: InitInput) {
  63. this.faceDetectionTypes = detectionTypes;
  64. this.baseUrl = baseUrl;
  65. this.init();
  66. }
  67. /**
  68. * Initializes the human helper with the available tfjs backend for the given detection types.
  69. *
  70. * @returns {Promise<void>}
  71. */
  72. async init(): Promise<void> {
  73. if (!this.human) {
  74. this.config.modelBasePath = this.baseUrl;
  75. if (!self.OffscreenCanvas) {
  76. this.config.backend = 'wasm';
  77. this.config.wasmPath = this.baseUrl;
  78. setWasmPaths(this.baseUrl);
  79. }
  80. if (this.faceDetectionTypes.length > 0 && this.config.face) {
  81. this.config.face.enabled = true;
  82. }
  83. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX) && this.config.face?.detector) {
  84. this.config.face.detector.enabled = true;
  85. }
  86. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS) && this.config.face?.emotion) {
  87. this.config.face.emotion.enabled = true;
  88. }
  89. const initialHuman = new Human(this.config);
  90. try {
  91. await initialHuman.load();
  92. } catch (err) {
  93. console.error(err);
  94. }
  95. this.human = initialHuman;
  96. }
  97. }
  98. /**
  99. * Gets the face box from the detections, if there is no valid detections it will return undefined..
  100. *
  101. * @param {Array<FaceResult>} detections - The array with the detections.
  102. * @param {number} threshold - Face box position change threshold.
  103. * @returns {FaceBox | undefined}
  104. */
  105. getFaceBox(detections: Array<FaceResult>, threshold: number): FaceBox | undefined {
  106. if (this.getFaceCount(detections) !== 1) {
  107. return;
  108. }
  109. const faceBox: FaceBox = {
  110. // normalize to percentage based
  111. left: Math.round(detections[0].boxRaw[0] * 100),
  112. right: Math.round((detections[0].boxRaw[0] + detections[0].boxRaw[2]) * 100)
  113. };
  114. faceBox.width = Math.round(faceBox.right - faceBox.left);
  115. if (this.lastValidFaceBox && threshold && Math.abs(this.lastValidFaceBox.left - faceBox.left) < threshold) {
  116. return;
  117. }
  118. this.lastValidFaceBox = faceBox;
  119. return faceBox;
  120. }
  121. /**
  122. * Gets the face expression from the detections, if there is no valid detections it will return undefined.
  123. *
  124. * @param {Array<FaceResult>} detections - The array with the detections.
  125. * @returns {string | undefined}
  126. */
  127. getFaceExpression(detections: Array<FaceResult>): FaceExpression | undefined {
  128. if (this.getFaceCount(detections) !== 1) {
  129. return;
  130. }
  131. const detection = detections[0];
  132. if (detection.emotion) {
  133. return {
  134. expression: FACE_EXPRESSIONS_NAMING_MAPPING[detection.emotion[0].emotion],
  135. score: detection.emotion[0].score
  136. };
  137. }
  138. }
  139. /**
  140. * Gets the face count from the detections, which is the number of detections.
  141. *
  142. * @param {Array<FaceResult>} detections - The array with the detections.
  143. * @returns {number}
  144. */
  145. getFaceCount(detections: Array<FaceResult> | undefined): number {
  146. if (detections) {
  147. return detections.length;
  148. }
  149. return 0;
  150. }
  151. /**
  152. * Gets the detections from the image captured from the track.
  153. *
  154. * @param {ImageBitmap | ImageData} image - The image captured from the track,
  155. * if OffscreenCanvas available it will be ImageBitmap, otherwise it will be ImageData.
  156. * @returns {Promise<Array<FaceResult>>}
  157. */
  158. async getDetections(image: ImageBitmap | ImageData): Promise<Array<FaceResult>> {
  159. if (!this.human || !this.faceDetectionTypes.length) {
  160. return [];
  161. }
  162. this.human.tf.engine().startScope();
  163. const imageTensor = this.human.tf.browser.fromPixels(image);
  164. const { face: detections } = await this.human.detect(imageTensor, this.config);
  165. this.human.tf.engine().endScope();
  166. return detections.filter(detection => detection.score > FACE_DETECTION_SCORE_THRESHOLD);
  167. }
  168. /**
  169. * Gathers together all the data from the detections, it's the function that will be called in the worker.
  170. *
  171. * @param {DetectInput} input - The input for the detections.
  172. * @returns {Promise<DetectOutput>}
  173. */
  174. public async detect({ image, threshold }: DetectInput): Promise<DetectOutput> {
  175. let faceExpression;
  176. let faceBox;
  177. this.detectionInProgress = true;
  178. const detections = await this.getDetections(image);
  179. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
  180. faceExpression = this.getFaceExpression(detections);
  181. }
  182. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
  183. // if more than one face is detected the face centering will be disabled.
  184. if (this.getFaceCount(detections) > 1) {
  185. this.faceDetectionTypes.splice(this.faceDetectionTypes.indexOf(DETECTION_TYPES.FACE_BOX), 1);
  186. // face-box for re-centering
  187. faceBox = {
  188. left: 0,
  189. right: 100,
  190. width: 100
  191. };
  192. } else {
  193. faceBox = this.getFaceBox(detections, threshold);
  194. }
  195. }
  196. this.detectionInProgress = false;
  197. return {
  198. faceExpression,
  199. faceBox,
  200. faceCount: this.getFaceCount(detections)
  201. };
  202. }
  203. /**
  204. * Returns the detection state.
  205. *
  206. * @returns {boolean}
  207. */
  208. public getDetectionInProgress(): boolean {
  209. return this.detectionInProgress;
  210. }
  211. }