You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

FaceLandmarksHelper.ts 6.1KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211
  1. import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
  2. import { Human, Config, FaceResult } from '@vladmandic/human';
  3. import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
  4. type Detection = {
  5. detections: Array<FaceResult>,
  6. threshold?: number
  7. };
  8. type DetectInput = {
  9. image: ImageBitmap | ImageData,
  10. threshold: number
  11. };
  12. type FaceBox = {
  13. left: number,
  14. right: number,
  15. width?: number
  16. };
  17. type InitInput = {
  18. baseUrl: string,
  19. detectionTypes: string[],
  20. maxFacesDetected?: number
  21. }
  22. type DetectOutput = {
  23. faceExpression?: string,
  24. faceBox?: FaceBox
  25. };
  26. export interface FaceLandmarksHelper {
  27. getFaceBox({ detections, threshold }: Detection): FaceBox | undefined;
  28. getFaceExpression({ detections }: Detection): string | undefined;
  29. init(): Promise<void>;
  30. detect({ image, threshold } : DetectInput): Promise<DetectOutput | undefined>;
  31. getDetectionInProgress(): boolean;
  32. }
  33. /**
  34. * Helper class for human library
  35. */
  36. export class HumanHelper implements FaceLandmarksHelper {
  37. protected human: Human | undefined;
  38. protected faceDetectionTypes: string[];
  39. protected baseUrl: string;
  40. protected maxFacesDetected?: number;
  41. private detectionInProgress = false;
  42. private lastValidFaceBox: FaceBox | undefined;
  43. /**
  44. * Configuration for human.
  45. */
  46. private config: Partial<Config> = {
  47. backend: 'humangl',
  48. async: true,
  49. warmup: 'none',
  50. cacheModels: true,
  51. cacheSensitivity: 0,
  52. debug: false,
  53. deallocate: true,
  54. filter: { enabled: false },
  55. face: {
  56. enabled: false,
  57. detector: {
  58. enabled: false,
  59. rotation: false,
  60. modelPath: 'blazeface-front.json',
  61. maxDetected: 4
  62. },
  63. mesh: { enabled: false },
  64. iris: { enabled: false },
  65. emotion: {
  66. enabled: false,
  67. modelPath: 'emotion.json'
  68. },
  69. description: { enabled: false }
  70. },
  71. hand: { enabled: false },
  72. gesture: { enabled: false },
  73. body: { enabled: false },
  74. segmentation: { enabled: false }
  75. };
  76. constructor({ baseUrl, detectionTypes, maxFacesDetected }: InitInput) {
  77. this.faceDetectionTypes = detectionTypes;
  78. this.baseUrl = baseUrl;
  79. this.maxFacesDetected = maxFacesDetected;
  80. this.init();
  81. }
  82. async init(): Promise<void> {
  83. if (!this.human) {
  84. this.config.modelBasePath = this.baseUrl;
  85. if (!self.OffscreenCanvas) {
  86. this.config.backend = 'wasm';
  87. this.config.wasmPath = this.baseUrl;
  88. setWasmPaths(this.baseUrl);
  89. }
  90. if (this.faceDetectionTypes.length > 0 && this.config.face) {
  91. this.config.face.enabled = true
  92. }
  93. if (this.maxFacesDetected && this.config.face?.detector) {
  94. this.config.face.detector.maxDetected = this.maxFacesDetected;
  95. }
  96. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX) && this.config.face?.detector) {
  97. this.config.face.detector.enabled = true;
  98. }
  99. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS) && this.config.face?.emotion) {
  100. this.config.face.emotion.enabled = true;
  101. }
  102. const initialHuman = new Human(this.config);
  103. try {
  104. await initialHuman.load();
  105. } catch (err) {
  106. console.error(err);
  107. }
  108. this.human = initialHuman;
  109. }
  110. }
  111. getFaceBox({ detections, threshold }: Detection): FaceBox | undefined {
  112. if (!detections.length) {
  113. return;
  114. }
  115. const faceBox: FaceBox = {
  116. // normalize to percentage based
  117. left: Math.round(Math.min(...detections.map(d => d.boxRaw[0])) * 100),
  118. right: Math.round(Math.max(...detections.map(d => d.boxRaw[0] + d.boxRaw[2])) * 100)
  119. };
  120. faceBox.width = Math.round(faceBox.right - faceBox.left);
  121. if (this.lastValidFaceBox && threshold && Math.abs(this.lastValidFaceBox.left - faceBox.left) < threshold) {
  122. return;
  123. }
  124. this.lastValidFaceBox = faceBox;
  125. return faceBox;
  126. }
  127. getFaceExpression({ detections }: Detection): string | undefined {
  128. if (detections[0]?.emotion) {
  129. return FACE_EXPRESSIONS_NAMING_MAPPING[detections[0]?.emotion[0].emotion];
  130. }
  131. }
  132. async getDetections(image: ImageBitmap | ImageData) {
  133. if (!this.human) {
  134. return;
  135. }
  136. this.human.tf.engine().startScope();
  137. const imageTensor = this.human.tf.browser.fromPixels(image);
  138. const { face: detections } = await this.human.detect(imageTensor, this.config);
  139. this.human.tf.engine().endScope();
  140. return detections.filter(detection => detection.score > FACE_DETECTION_SCORE_THRESHOLD);
  141. }
  142. public async detect({ image, threshold } : DetectInput): Promise<DetectOutput | undefined> {
  143. let detections;
  144. let faceExpression;
  145. let faceBox;
  146. this.detectionInProgress = true;
  147. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
  148. detections = await this.getDetections(image);
  149. if (detections) {
  150. faceExpression = this.getFaceExpression({ detections });
  151. }
  152. }
  153. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
  154. if (!detections) {
  155. detections = await this.getDetections(image);
  156. }
  157. if(detections) {
  158. faceBox = this.getFaceBox({
  159. detections,
  160. threshold
  161. });
  162. }
  163. }
  164. this.detectionInProgress = false;
  165. return {
  166. faceExpression,
  167. faceBox
  168. }
  169. }
  170. public getDetectionInProgress(): boolean {
  171. return this.detectionInProgress;
  172. }
  173. }