You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

faceCenteringWorker.js 2.6KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107
  1. import * as blazeface from '@tensorflow-models/blazeface';
  2. import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
  3. import * as tf from '@tensorflow/tfjs-core';
  4. import { FACE_BOX_MESSAGE, DETECT_FACE_BOX } from './constants';
  5. /**
  6. * Indicates whether an init error occured.
  7. */
  8. let initError = false;
  9. /**
  10. * The blazeface model.
  11. */
  12. let model;
  13. /**
  14. * A flag that indicates whether the tensorflow backend is set or not.
  15. */
  16. let backendSet = false;
  17. /**
  18. * Flag for indicating whether an init operation (e.g setting tf backend) is in progress.
  19. */
  20. let initInProgress = false;
  21. /**
  22. * Callbacks queue for avoiding overlapping executions of face detection.
  23. */
  24. const queue = [];
  25. /**
  26. * Contains the last valid face bounding box (passes threshold validation) which was sent to the main process.
  27. */
  28. let lastValidFaceBox;
  29. const detect = async message => {
  30. const { baseUrl, imageBitmap, isHorizontallyFlipped, threshold } = message.data;
  31. if (initInProgress || initError) {
  32. return;
  33. }
  34. if (!backendSet) {
  35. initInProgress = true;
  36. setWasmPaths(`${baseUrl}libs/`);
  37. try {
  38. await tf.setBackend('wasm');
  39. } catch (err) {
  40. initError = true;
  41. return;
  42. }
  43. backendSet = true;
  44. initInProgress = false;
  45. }
  46. // load face detection model
  47. if (!model) {
  48. try {
  49. model = await blazeface.load();
  50. } catch (err) {
  51. initError = true;
  52. return;
  53. }
  54. }
  55. tf.engine().startScope();
  56. const image = tf.browser.fromPixels(imageBitmap);
  57. const detections = await model.estimateFaces(image, false, isHorizontallyFlipped, false);
  58. tf.engine().endScope();
  59. let faceBox;
  60. if (detections.length) {
  61. faceBox = {
  62. // normalize to percentage based
  63. left: Math.round(Math.min(...detections.map(d => d.topLeft[0])) * 100 / imageBitmap.width),
  64. right: Math.round(Math.max(...detections.map(d => d.bottomRight[0])) * 100 / imageBitmap.width),
  65. top: Math.round(Math.min(...detections.map(d => d.topLeft[1])) * 100 / imageBitmap.height),
  66. bottom: Math.round(Math.max(...detections.map(d => d.bottomRight[1])) * 100 / imageBitmap.height)
  67. };
  68. if (lastValidFaceBox && Math.abs(lastValidFaceBox.left - faceBox.left) < threshold) {
  69. return;
  70. }
  71. lastValidFaceBox = faceBox;
  72. self.postMessage({
  73. type: FACE_BOX_MESSAGE,
  74. value: faceBox
  75. });
  76. }
  77. };
  78. onmessage = function(message) {
  79. if (message.data.id === DETECT_FACE_BOX) {
  80. queue.push(() => detect(message));
  81. queue.shift()();
  82. }
  83. };