Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

faceLandmarksWorker.js 3.8KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. import './faceApiPatch';
  2. import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
  3. import * as faceapi from '@vladmandic/face-api';
  4. import { DETECTION_TYPES, DETECT_FACE, INIT_WORKER } from './constants';
  5. /**
  6. * Detection types to be applied.
  7. */
  8. let faceDetectionTypes = [];
  9. /**
  10. * Indicates whether an init error occured.
  11. */
  12. let initError = false;
  13. /**
  14. * A flag that indicates whether the models are loaded or not.
  15. */
  16. let modelsLoaded = false;
  17. /**
  18. * A flag that indicates whether the tensorflow backend is set or not.
  19. */
  20. let backendSet = false;
  21. /**
  22. * Flag for indicating whether a face detection flow is in progress or not.
  23. */
  24. let detectionInProgress = false;
  25. /**
  26. * Contains the last valid face bounding box (passes threshold validation) which was sent to the main process.
  27. */
  28. let lastValidFaceBox;
  29. const detectFaceBox = async ({ detections, threshold }) => {
  30. if (!detections.length) {
  31. return null;
  32. }
  33. const faceBox = {
  34. // normalize to percentage based
  35. left: Math.round(Math.min(...detections.map(d => d.relativeBox.left)) * 100),
  36. right: Math.round(Math.max(...detections.map(d => d.relativeBox.right)) * 100)
  37. };
  38. faceBox.width = Math.round(faceBox.right - faceBox.left);
  39. if (lastValidFaceBox && Math.abs(lastValidFaceBox.left - faceBox.left) < threshold) {
  40. return null;
  41. }
  42. lastValidFaceBox = faceBox;
  43. return faceBox;
  44. };
  45. const detectFaceExpression = async ({ detections }) =>
  46. detections[0]?.expressions.asSortedArray()[0].expression;
  47. const detect = async ({ image, threshold }) => {
  48. let detections;
  49. let faceExpression;
  50. let faceBox;
  51. detectionInProgress = true;
  52. faceapi.tf.engine().startScope();
  53. const imageTensor = faceapi.tf.browser.fromPixels(image);
  54. if (faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
  55. detections = await faceapi.detectAllFaces(
  56. imageTensor,
  57. new faceapi.TinyFaceDetectorOptions()
  58. ).withFaceExpressions();
  59. faceExpression = await detectFaceExpression({ detections });
  60. }
  61. if (faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
  62. detections = detections
  63. ? detections.map(d => d.detection)
  64. : await faceapi.detectAllFaces(imageTensor, new faceapi.TinyFaceDetectorOptions());
  65. faceBox = await detectFaceBox({
  66. detections,
  67. threshold
  68. });
  69. }
  70. faceapi.tf.engine().endScope();
  71. if (faceBox || faceExpression) {
  72. self.postMessage({
  73. faceBox,
  74. faceExpression
  75. });
  76. }
  77. detectionInProgress = false;
  78. };
  79. const init = async ({ baseUrl, detectionTypes }) => {
  80. faceDetectionTypes = detectionTypes;
  81. if (!backendSet) {
  82. try {
  83. if (self.useWasm) {
  84. setWasmPaths(baseUrl);
  85. await faceapi.tf.setBackend('wasm');
  86. } else {
  87. await faceapi.tf.setBackend('webgl');
  88. }
  89. backendSet = true;
  90. } catch (err) {
  91. initError = true;
  92. return;
  93. }
  94. }
  95. // load face detection model
  96. if (!modelsLoaded) {
  97. try {
  98. await faceapi.loadTinyFaceDetectorModel(baseUrl);
  99. if (detectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
  100. await faceapi.loadFaceExpressionModel(baseUrl);
  101. }
  102. modelsLoaded = true;
  103. } catch (err) {
  104. initError = true;
  105. return;
  106. }
  107. }
  108. };
  109. onmessage = function(message) {
  110. switch (message.data.type) {
  111. case DETECT_FACE: {
  112. if (!backendSet || !modelsLoaded || initError || detectionInProgress) {
  113. return;
  114. }
  115. detect(message.data);
  116. break;
  117. }
  118. case INIT_WORKER: {
  119. init(message.data);
  120. break;
  121. }
  122. }
  123. };