You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

FaceLandmarksHelper.ts 5.5KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
  2. import { Human, Config, FaceResult } from '@vladmandic/human';
  3. import { DETECTION_TYPES, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
  4. type Detection = {
  5. detections: Array<FaceResult>,
  6. threshold?: number
  7. };
  8. type DetectInput = {
  9. image: ImageBitmap | ImageData,
  10. threshold: number
  11. };
  12. type FaceBox = {
  13. left: number,
  14. right: number,
  15. width?: number
  16. };
  17. type InitInput = {
  18. baseUrl: string,
  19. detectionTypes: string[]
  20. }
  21. type DetectOutput = {
  22. faceExpression?: string,
  23. faceBox?: FaceBox
  24. };
  25. export interface FaceLandmarksHelper {
  26. getFaceBox({ detections, threshold }: Detection): FaceBox | undefined;
  27. getFaceExpression({ detections }: Detection): string | undefined;
  28. init(): Promise<void>;
  29. detect({ image, threshold } : DetectInput): Promise<DetectOutput | undefined>;
  30. getDetectionInProgress(): boolean;
  31. }
  32. /**
  33. * Helper class for human library
  34. */
  35. export class HumanHelper implements FaceLandmarksHelper {
  36. protected human: Human | undefined;
  37. protected faceDetectionTypes: string[];
  38. protected baseUrl: string;
  39. private detectionInProgress = false;
  40. private lastValidFaceBox: FaceBox | undefined;
  41. /**
  42. * Configuration for human.
  43. */
  44. private config: Partial<Config> = {
  45. backend: 'humangl',
  46. async: true,
  47. warmup: 'none',
  48. cacheModels: true,
  49. cacheSensitivity: 0,
  50. debug: false,
  51. deallocate: true,
  52. filter: { enabled: false },
  53. face: {
  54. enabled: true,
  55. detector: {
  56. enabled: false,
  57. rotation: false,
  58. modelPath: 'blazeface-front.json'
  59. },
  60. mesh: { enabled: false },
  61. iris: { enabled: false },
  62. emotion: {
  63. enabled: false,
  64. modelPath: 'emotion.json'
  65. },
  66. description: { enabled: false }
  67. },
  68. hand: { enabled: false },
  69. gesture: { enabled: false },
  70. body: { enabled: false },
  71. segmentation: { enabled: false }
  72. };
  73. constructor({ baseUrl, detectionTypes }: InitInput) {
  74. this.faceDetectionTypes = detectionTypes;
  75. this.baseUrl = baseUrl;
  76. this.init();
  77. }
  78. async init(): Promise<void> {
  79. if (!this.human) {
  80. this.config.modelBasePath = this.baseUrl;
  81. if (!self.OffscreenCanvas) {
  82. this.config.backend = 'wasm';
  83. this.config.wasmPath = this.baseUrl;
  84. setWasmPaths(this.baseUrl);
  85. }
  86. if (this.faceDetectionTypes.length > 0 && this.config.face) {
  87. this.config.face.enabled = true
  88. }
  89. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX) && this.config.face?.detector) {
  90. this.config.face.detector.enabled = true;
  91. }
  92. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS) && this.config.face?.emotion) {
  93. this.config.face.emotion.enabled = true;
  94. }
  95. const initialHuman = new Human(this.config);
  96. try {
  97. await initialHuman.load();
  98. } catch (err) {
  99. console.error(err);
  100. }
  101. this.human = initialHuman;
  102. }
  103. }
  104. getFaceBox({ detections, threshold }: Detection): FaceBox | undefined {
  105. if (!detections.length) {
  106. return;
  107. }
  108. const faceBox: FaceBox = {
  109. // normalize to percentage based
  110. left: Math.round(Math.min(...detections.map(d => d.boxRaw[0])) * 100),
  111. right: Math.round(Math.max(...detections.map(d => d.boxRaw[0] + d.boxRaw[2])) * 100)
  112. };
  113. faceBox.width = Math.round(faceBox.right - faceBox.left);
  114. if (this.lastValidFaceBox && threshold && Math.abs(this.lastValidFaceBox.left - faceBox.left) < threshold) {
  115. return;
  116. }
  117. this.lastValidFaceBox = faceBox;
  118. return faceBox;
  119. }
  120. getFaceExpression({ detections }: Detection): string | undefined {
  121. if (detections[0]?.emotion) {
  122. return FACE_EXPRESSIONS_NAMING_MAPPING[detections[0]?.emotion[0].emotion];
  123. }
  124. }
  125. public async detect({ image, threshold } : DetectInput): Promise<DetectOutput | undefined> {
  126. let detections;
  127. let faceExpression;
  128. let faceBox;
  129. if (!this.human){
  130. return;
  131. }
  132. this.detectionInProgress = true;
  133. this.human.tf.engine().startScope();
  134. const imageTensor = this.human.tf.browser.fromPixels(image);
  135. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
  136. const { face } = await this.human.detect(imageTensor, this.config);
  137. detections = face;
  138. faceExpression = this.getFaceExpression({ detections });
  139. }
  140. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
  141. if (!detections) {
  142. const { face } = await this.human.detect(imageTensor, this.config);
  143. detections = face;
  144. }
  145. faceBox = this.getFaceBox({
  146. detections,
  147. threshold
  148. });
  149. }
  150. this.human.tf.engine().endScope();
  151. this.detectionInProgress = false;
  152. return {
  153. faceExpression,
  154. faceBox
  155. }
  156. }
  157. public getDetectionInProgress(): boolean {
  158. return this.detectionInProgress;
  159. }
  160. }