Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

FaceLandmarksHelper.ts 7.8KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245
  1. import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
  2. import { Config, FaceResult, Human } from '@vladmandic/human';
  3. import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
  4. import { DetectInput, DetectOutput, FaceBox, InitInput } from './types';
  5. export interface IFaceLandmarksHelper {
  6. detect: ({ image, threshold }: DetectInput) => Promise<DetectOutput>;
  7. getDetectionInProgress: () => boolean;
  8. getDetections: (image: ImageBitmap | ImageData) => Promise<Array<FaceResult>>;
  9. getFaceBox: (detections: Array<FaceResult>, threshold: number) => FaceBox | undefined;
  10. getFaceCount: (detections: Array<FaceResult>) => number;
  11. getFaceExpression: (detections: Array<FaceResult>) => string | undefined;
  12. init: () => Promise<void>;
  13. }
  14. /**
  15. * Helper class for human library.
  16. */
  17. export class HumanHelper implements IFaceLandmarksHelper {
  18. protected human: Human | undefined;
  19. protected faceDetectionTypes: string[];
  20. protected baseUrl: string;
  21. private detectionInProgress = false;
  22. private lastValidFaceBox: FaceBox | undefined;
  23. /**
  24. * Configuration for human.
  25. */
  26. private config: Partial<Config> = {
  27. backend: 'humangl',
  28. async: true,
  29. warmup: 'none',
  30. cacheModels: true,
  31. cacheSensitivity: 0,
  32. debug: false,
  33. deallocate: true,
  34. filter: { enabled: false },
  35. face: {
  36. enabled: false,
  37. detector: {
  38. enabled: false,
  39. rotation: false,
  40. modelPath: 'blazeface-front.json',
  41. maxDetected: 20
  42. },
  43. mesh: { enabled: false },
  44. iris: { enabled: false },
  45. emotion: {
  46. enabled: false,
  47. modelPath: 'emotion.json'
  48. },
  49. description: { enabled: false }
  50. },
  51. hand: { enabled: false },
  52. gesture: { enabled: false },
  53. body: { enabled: false },
  54. segmentation: { enabled: false }
  55. };
  56. /**
  57. * Constructor function for the helper which initialize the helper.
  58. *
  59. * @param {InitInput} input - The input for the helper.
  60. * @returns {void}
  61. */
  62. constructor({ baseUrl, detectionTypes }: InitInput) {
  63. this.faceDetectionTypes = detectionTypes;
  64. this.baseUrl = baseUrl;
  65. this.init();
  66. }
  67. /**
  68. * Initializes the human helper with the available tfjs backend for the given detection types.
  69. *
  70. * @returns {Promise<void>}
  71. */
  72. async init(): Promise<void> {
  73. if (!this.human) {
  74. this.config.modelBasePath = this.baseUrl;
  75. if (!self.OffscreenCanvas) {
  76. this.config.backend = 'wasm';
  77. this.config.wasmPath = this.baseUrl;
  78. setWasmPaths(this.baseUrl);
  79. }
  80. if (this.faceDetectionTypes.length > 0 && this.config.face) {
  81. this.config.face.enabled = true;
  82. }
  83. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX) && this.config.face?.detector) {
  84. this.config.face.detector.enabled = true;
  85. }
  86. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS) && this.config.face?.emotion) {
  87. this.config.face.emotion.enabled = true;
  88. }
  89. const initialHuman = new Human(this.config);
  90. try {
  91. await initialHuman.load();
  92. } catch (err) {
  93. console.error(err);
  94. }
  95. this.human = initialHuman;
  96. }
  97. }
  98. /**
  99. * Gets the face box from the detections, if there is no valid detections it will return undefined..
  100. *
  101. * @param {Array<FaceResult>} detections - The array with the detections.
  102. * @param {number} threshold - Face box position change threshold.
  103. * @returns {FaceBox | undefined}
  104. */
  105. getFaceBox(detections: Array<FaceResult>, threshold: number): FaceBox | undefined {
  106. if (this.getFaceCount(detections) !== 1) {
  107. return;
  108. }
  109. const faceBox: FaceBox = {
  110. // normalize to percentage based
  111. left: Math.round(detections[0].boxRaw[0] * 100),
  112. right: Math.round((detections[0].boxRaw[0] + detections[0].boxRaw[2]) * 100)
  113. };
  114. faceBox.width = Math.round(faceBox.right - faceBox.left);
  115. if (this.lastValidFaceBox && threshold && Math.abs(this.lastValidFaceBox.left - faceBox.left) < threshold) {
  116. return;
  117. }
  118. this.lastValidFaceBox = faceBox;
  119. return faceBox;
  120. }
  121. /**
  122. * Gets the face expression from the detections, if there is no valid detections it will return undefined.
  123. *
  124. * @param {Array<FaceResult>} detections - The array with the detections.
  125. * @returns {string | undefined}
  126. */
  127. getFaceExpression(detections: Array<FaceResult>): string | undefined {
  128. if (this.getFaceCount(detections) !== 1) {
  129. return;
  130. }
  131. if (detections[0].emotion) {
  132. return FACE_EXPRESSIONS_NAMING_MAPPING[detections[0].emotion[0].emotion];
  133. }
  134. }
  135. /**
  136. * Gets the face count from the detections, which is the number of detections.
  137. *
  138. * @param {Array<FaceResult>} detections - The array with the detections.
  139. * @returns {number}
  140. */
  141. getFaceCount(detections: Array<FaceResult> | undefined): number {
  142. if (detections) {
  143. return detections.length;
  144. }
  145. return 0;
  146. }
  147. /**
  148. * Gets the detections from the image captured from the track.
  149. *
  150. * @param {ImageBitmap | ImageData} image - The image captured from the track,
  151. * if OffscreenCanvas available it will be ImageBitmap, otherwise it will be ImageData.
  152. * @returns {Promise<Array<FaceResult>>}
  153. */
  154. async getDetections(image: ImageBitmap | ImageData): Promise<Array<FaceResult>> {
  155. if (!this.human || !this.faceDetectionTypes.length) {
  156. return [];
  157. }
  158. this.human.tf.engine().startScope();
  159. const imageTensor = this.human.tf.browser.fromPixels(image);
  160. const { face: detections } = await this.human.detect(imageTensor, this.config);
  161. this.human.tf.engine().endScope();
  162. return detections.filter(detection => detection.score > FACE_DETECTION_SCORE_THRESHOLD);
  163. }
  164. /**
  165. * Gathers together all the data from the detections, it's the function that will be called in the worker.
  166. *
  167. * @param {DetectInput} input - The input for the detections.
  168. * @returns {Promise<DetectOutput>}
  169. */
  170. public async detect({ image, threshold }: DetectInput): Promise<DetectOutput> {
  171. let faceExpression;
  172. let faceBox;
  173. this.detectionInProgress = true;
  174. const detections = await this.getDetections(image);
  175. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
  176. faceExpression = this.getFaceExpression(detections);
  177. }
  178. if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
  179. // if more than one face is detected the face centering will be disabled.
  180. if (this.getFaceCount(detections) > 1) {
  181. this.faceDetectionTypes.splice(this.faceDetectionTypes.indexOf(DETECTION_TYPES.FACE_BOX), 1);
  182. // face-box for re-centering
  183. faceBox = {
  184. left: 0,
  185. right: 100,
  186. width: 100
  187. };
  188. } else {
  189. faceBox = this.getFaceBox(detections, threshold);
  190. }
  191. }
  192. this.detectionInProgress = false;
  193. return {
  194. faceExpression,
  195. faceBox,
  196. faceCount: this.getFaceCount(detections)
  197. };
  198. }
  199. /**
  200. * Returns the detection state.
  201. *
  202. * @returns {boolean}
  203. */
  204. public getDetectionInProgress(): boolean {
  205. return this.detectionInProgress;
  206. }
  207. }