You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

actions.js 8.6KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301
  1. // @flow
  2. import 'image-capture';
  3. import './createImageBitmap';
  4. import { getLocalVideoTrack } from '../base/tracks';
  5. import { getBaseUrl } from '../base/util';
  6. import {
  7. ADD_FACIAL_EXPRESSION,
  8. ADD_TO_FACIAL_EXPRESSIONS_BUFFER,
  9. CLEAR_FACIAL_EXPRESSIONS_BUFFER,
  10. SET_DETECTION_TIME_INTERVAL,
  11. START_FACIAL_RECOGNITION,
  12. STOP_FACIAL_RECOGNITION
  13. } from './actionTypes';
  14. import {
  15. CLEAR_TIMEOUT,
  16. FACIAL_EXPRESSION_MESSAGE,
  17. INIT_WORKER,
  18. INTERVAL_MESSAGE,
  19. WEBHOOK_SEND_TIME_INTERVAL
  20. } from './constants';
  21. import { sendDataToWorker, sendFacialExpressionsWebhook } from './functions';
  22. import logger from './logger';
  23. /**
  24. * Object containing a image capture of the local track.
  25. */
  26. let imageCapture;
  27. /**
  28. * Object where the facial expression worker is stored.
  29. */
  30. let worker;
  31. /**
  32. * The last facial expression received from the worker.
  33. */
  34. let lastFacialExpression;
  35. /**
  36. * The last facial expression timestamp.
  37. */
  38. let lastFacialExpressionTimestamp;
  39. /**
  40. * How many duplicate consecutive expression occurred.
  41. * If a expression that is not the same as the last one it is reset to 0.
  42. */
  43. let duplicateConsecutiveExpressions = 0;
  44. /**
  45. * Variable that keeps the interval for sending expressions to webhook.
  46. */
  47. let sendInterval;
  48. /**
  49. * Loads the worker that predicts the facial expression.
  50. *
  51. * @returns {void}
  52. */
  53. export function loadWorker() {
  54. return function(dispatch: Function) {
  55. if (!window.Worker) {
  56. logger.warn('Browser does not support web workers');
  57. return;
  58. }
  59. const baseUrl = getBaseUrl();
  60. let workerUrl = `${baseUrl}libs/facial-expressions-worker.min.js`;
  61. const workerBlob = new Blob([ `importScripts("${workerUrl}");` ], { type: 'application/javascript' });
  62. workerUrl = window.URL.createObjectURL(workerBlob);
  63. worker = new Worker(workerUrl, { name: 'Facial Expression Worker' });
  64. worker.onmessage = function(e: Object) {
  65. const { type, value } = e.data;
  66. // receives a message indicating what type of backend tfjs decided to use.
  67. // it is received after as a response to the first message sent to the worker.
  68. if (type === INTERVAL_MESSAGE) {
  69. value && dispatch(setDetectionTimeInterval(value));
  70. }
  71. // receives a message with the predicted facial expression.
  72. if (type === FACIAL_EXPRESSION_MESSAGE) {
  73. sendDataToWorker(worker, imageCapture);
  74. if (!value) {
  75. return;
  76. }
  77. if (value === lastFacialExpression) {
  78. duplicateConsecutiveExpressions++;
  79. } else {
  80. if (lastFacialExpression && lastFacialExpressionTimestamp) {
  81. dispatch(
  82. addFacialExpression(
  83. lastFacialExpression,
  84. duplicateConsecutiveExpressions + 1,
  85. lastFacialExpressionTimestamp
  86. )
  87. );
  88. }
  89. lastFacialExpression = value;
  90. lastFacialExpressionTimestamp = Date.now();
  91. duplicateConsecutiveExpressions = 0;
  92. }
  93. }
  94. };
  95. worker.postMessage({
  96. type: INIT_WORKER,
  97. url: baseUrl,
  98. windowScreenSize: window.screen ? {
  99. width: window.screen.width,
  100. height: window.screen.height
  101. } : undefined
  102. });
  103. dispatch(startFacialRecognition());
  104. };
  105. }
  106. /**
  107. * Starts the recognition and detection of face expressions.
  108. *
  109. * @param {Object} stream - Video stream.
  110. * @returns {Function}
  111. */
  112. export function startFacialRecognition() {
  113. return async function(dispatch: Function, getState: Function) {
  114. if (!worker) {
  115. return;
  116. }
  117. const state = getState();
  118. const { recognitionActive } = state['features/facial-recognition'];
  119. if (recognitionActive) {
  120. return;
  121. }
  122. const localVideoTrack = getLocalVideoTrack(state['features/base/tracks']);
  123. if (localVideoTrack === undefined) {
  124. return;
  125. }
  126. const stream = localVideoTrack.jitsiTrack.getOriginalStream();
  127. if (stream === null) {
  128. return;
  129. }
  130. dispatch({ type: START_FACIAL_RECOGNITION });
  131. logger.log('Start face recognition');
  132. const firstVideoTrack = stream.getVideoTracks()[0];
  133. // $FlowFixMe
  134. imageCapture = new ImageCapture(firstVideoTrack);
  135. sendDataToWorker(worker, imageCapture);
  136. sendInterval = setInterval(async () => {
  137. const result = await sendFacialExpressionsWebhook(getState());
  138. if (result) {
  139. dispatch(clearFacialExpressionBuffer());
  140. }
  141. }
  142. , WEBHOOK_SEND_TIME_INTERVAL);
  143. };
  144. }
  145. /**
  146. * Stops the recognition and detection of face expressions.
  147. *
  148. * @returns {void}
  149. */
  150. export function stopFacialRecognition() {
  151. return function(dispatch: Function, getState: Function) {
  152. const state = getState();
  153. const { recognitionActive } = state['features/facial-recognition'];
  154. if (!recognitionActive) {
  155. imageCapture = null;
  156. return;
  157. }
  158. imageCapture = null;
  159. worker.postMessage({
  160. type: CLEAR_TIMEOUT
  161. });
  162. if (lastFacialExpression && lastFacialExpressionTimestamp) {
  163. dispatch(
  164. addFacialExpression(
  165. lastFacialExpression,
  166. duplicateConsecutiveExpressions + 1,
  167. lastFacialExpressionTimestamp
  168. )
  169. );
  170. }
  171. duplicateConsecutiveExpressions = 0;
  172. if (sendInterval) {
  173. clearInterval(sendInterval);
  174. sendInterval = null;
  175. }
  176. dispatch({ type: STOP_FACIAL_RECOGNITION });
  177. logger.log('Stop face recognition');
  178. };
  179. }
  180. /**
  181. * Resets the track in the image capture.
  182. *
  183. * @returns {void}
  184. */
  185. export function resetTrack() {
  186. return function(dispatch: Function, getState: Function) {
  187. const state = getState();
  188. const { jitsiTrack: localVideoTrack } = getLocalVideoTrack(state['features/base/tracks']);
  189. const stream = localVideoTrack.getOriginalStream();
  190. const firstVideoTrack = stream.getVideoTracks()[0];
  191. // $FlowFixMe
  192. imageCapture = new ImageCapture(firstVideoTrack);
  193. };
  194. }
  195. /**
  196. * Changes the track from the image capture with a given one.
  197. *
  198. * @param {Object} track - The track that will be in the new image capture.
  199. * @returns {void}
  200. */
  201. export function changeTrack(track: Object) {
  202. const { jitsiTrack } = track;
  203. const stream = jitsiTrack.getOriginalStream();
  204. const firstVideoTrack = stream.getVideoTracks()[0];
  205. // $FlowFixMe
  206. imageCapture = new ImageCapture(firstVideoTrack);
  207. }
  208. /**
  209. * Adds a new facial expression and its duration.
  210. *
  211. * @param {string} facialExpression - Facial expression to be added.
  212. * @param {number} duration - Duration in seconds of the facial expression.
  213. * @param {number} timestamp - Duration in seconds of the facial expression.
  214. * @returns {Object}
  215. */
  216. function addFacialExpression(facialExpression: string, duration: number, timestamp: number) {
  217. return function(dispatch: Function, getState: Function) {
  218. const { detectionTimeInterval } = getState()['features/facial-recognition'];
  219. let finalDuration = duration;
  220. if (detectionTimeInterval !== -1) {
  221. finalDuration *= detectionTimeInterval / 1000;
  222. }
  223. dispatch({
  224. type: ADD_FACIAL_EXPRESSION,
  225. facialExpression,
  226. duration: finalDuration,
  227. timestamp
  228. });
  229. };
  230. }
  231. /**
  232. * Sets the time interval for the detection worker post message.
  233. *
  234. * @param {number} time - The time interval.
  235. * @returns {Object}
  236. */
  237. function setDetectionTimeInterval(time: number) {
  238. return {
  239. type: SET_DETECTION_TIME_INTERVAL,
  240. time
  241. };
  242. }
  243. /**
  244. * Adds a facial expression with its timestamp to the facial expression buffer.
  245. *
  246. * @param {Object} facialExpression - Object containing facial expression string and its timestamp.
  247. * @returns {Object}
  248. */
  249. export function addToFacialExpressionsBuffer(facialExpression: Object) {
  250. return {
  251. type: ADD_TO_FACIAL_EXPRESSIONS_BUFFER,
  252. facialExpression
  253. };
  254. }
  255. /**
  256. * Clears the facial expressions array in the state.
  257. *
  258. * @returns {Object}
  259. */
  260. function clearFacialExpressionBuffer() {
  261. return {
  262. type: CLEAR_FACIAL_EXPRESSIONS_BUFFER
  263. };
  264. }