You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245
  1. /* eslint-disable lines-around-comment */
  2. import { IState } from '../app/types';
  3. import { getLocalParticipant } from '../base/participants/functions';
  4. // @ts-ignore
  5. import { extractFqnFromPath } from '../dynamic-branding/functions.any';
  6. import { DETECT_FACE, FACE_BOX_EVENT_TYPE, SEND_IMAGE_INTERVAL_MS } from './constants';
  7. import logger from './logger';
  8. import { FaceBox } from './types';
  9. let canvas: HTMLCanvasElement;
  10. let context: CanvasRenderingContext2D | null;
  11. if (typeof OffscreenCanvas === 'undefined') {
  12. canvas = document.createElement('canvas');
  13. context = canvas.getContext('2d');
  14. }
  15. /**
  16. * Sends the face expression with its duration to all the other participants.
  17. *
  18. * @param {any} conference - The current conference.
  19. * @param {string} faceExpression - Face expression to be sent.
  20. * @param {number} duration - The duration of the face expression in seconds.
  21. * @returns {void}
  22. */
  23. export function sendFaceExpressionToParticipants(
  24. conference: any,
  25. faceExpression: string,
  26. duration: number
  27. ): void {
  28. try {
  29. conference.sendEndpointMessage('', {
  30. type: 'face_landmark',
  31. faceExpression,
  32. duration
  33. });
  34. } catch (err) {
  35. logger.warn('Could not broadcast the face expression to the other participants', err);
  36. }
  37. }
  38. /**
  39. * Sends the face box to all the other participants.
  40. *
  41. * @param {any} conference - The current conference.
  42. * @param {FaceBox} faceBox - Face box to be sent.
  43. * @returns {void}
  44. */
  45. export function sendFaceBoxToParticipants(
  46. conference: any,
  47. faceBox: FaceBox
  48. ): void {
  49. try {
  50. conference.sendEndpointMessage('', {
  51. type: FACE_BOX_EVENT_TYPE,
  52. faceBox
  53. });
  54. } catch (err) {
  55. logger.warn('Could not broadcast the face box to the other participants', err);
  56. }
  57. }
  58. /**
  59. * Sends the face expression with its duration to xmpp server.
  60. *
  61. * @param {any} conference - The current conference.
  62. * @param {string} faceExpression - Face expression to be sent.
  63. * @param {number} duration - The duration of the face expression in seconds.
  64. * @returns {void}
  65. */
  66. export function sendFaceExpressionToServer(
  67. conference: any,
  68. faceExpression: string,
  69. duration: number
  70. ): void {
  71. try {
  72. conference.sendFaceLandmarks({
  73. faceExpression,
  74. duration
  75. });
  76. } catch (err) {
  77. logger.warn('Could not send the face expression to xmpp server', err);
  78. }
  79. }
  80. /**
  81. * Sends face expression to backend.
  82. *
  83. * @param {Object} state - Redux state.
  84. * @returns {boolean} - True if sent, false otherwise.
  85. */
  86. export async function sendFaceExpressionsWebhook(state: IState) {
  87. const { webhookProxyUrl: url } = state['features/base/config'];
  88. const { conference } = state['features/base/conference'];
  89. const { jwt } = state['features/base/jwt'];
  90. const { connection } = state['features/base/connection'];
  91. const jid = connection?.getJid();
  92. const localParticipant = getLocalParticipant(state);
  93. const { faceExpressionsBuffer } = state['features/face-landmarks'];
  94. if (faceExpressionsBuffer.length === 0) {
  95. return false;
  96. }
  97. const headers = {
  98. ...jwt ? { 'Authorization': `Bearer ${jwt}` } : {},
  99. 'Content-Type': 'application/json'
  100. };
  101. const reqBody = {
  102. meetingFqn: extractFqnFromPath(),
  103. sessionId: conference?.sessionId,
  104. submitted: Date.now(),
  105. emotions: faceExpressionsBuffer,
  106. participantId: localParticipant?.jwtId,
  107. participantName: localParticipant?.name,
  108. participantJid: jid
  109. };
  110. if (url) {
  111. try {
  112. const res = await fetch(`${url}/emotions`, {
  113. method: 'POST',
  114. headers,
  115. body: JSON.stringify(reqBody)
  116. });
  117. if (res.ok) {
  118. return true;
  119. }
  120. logger.error('Status error:', res.status);
  121. } catch (err) {
  122. logger.error('Could not send request', err);
  123. }
  124. }
  125. return false;
  126. }
  127. /**
  128. * Sends the image data a canvas from the track in the image capture to the face recognition worker.
  129. *
  130. * @param {Worker} worker - Face recognition worker.
  131. * @param {Object} imageCapture - Image capture that contains the current track.
  132. * @param {number} threshold - Movement threshold as percentage for sharing face coordinates.
  133. * @returns {Promise<boolean>} - True if sent, false otherwise.
  134. */
  135. export async function sendDataToWorker(
  136. worker: Worker,
  137. imageCapture: ImageCapture,
  138. threshold = 10
  139. ): Promise<boolean> {
  140. if (imageCapture === null || imageCapture === undefined) {
  141. return false;
  142. }
  143. let imageBitmap;
  144. let image;
  145. try {
  146. imageBitmap = await imageCapture.grabFrame();
  147. } catch (err) {
  148. logger.warn(err);
  149. return false;
  150. }
  151. if (typeof OffscreenCanvas === 'undefined') {
  152. canvas.width = imageBitmap.width;
  153. canvas.height = imageBitmap.height;
  154. context?.drawImage(imageBitmap, 0, 0);
  155. image = context?.getImageData(0, 0, imageBitmap.width, imageBitmap.height);
  156. } else {
  157. image = imageBitmap;
  158. }
  159. worker.postMessage({
  160. type: DETECT_FACE,
  161. image,
  162. threshold
  163. });
  164. imageBitmap.close();
  165. return true;
  166. }
  167. /**
  168. * Gets face box for a participant id.
  169. *
  170. * @param {string} id - The participant id.
  171. * @param {IState} state - The redux state.
  172. * @returns {Object}
  173. */
  174. function getFaceBoxForId(id: string, state: IState) {
  175. return state['features/face-landmarks'].faceBoxes[id];
  176. }
  177. /**
  178. * Gets the video object position for a participant id.
  179. *
  180. * @param {IState} state - The redux state.
  181. * @param {string} id - The participant id.
  182. * @returns {string} - CSS object-position in the shape of '{horizontalPercentage}% {verticalPercentage}%'.
  183. */
  184. export function getVideoObjectPosition(state: IState, id?: string) {
  185. const faceBox = id && getFaceBoxForId(id, state);
  186. if (faceBox) {
  187. const { right, width } = faceBox;
  188. if (right && width) {
  189. return `${right - (width / 2)}% 50%`;
  190. }
  191. }
  192. return '50% 50%';
  193. }
  194. /**
  195. * Gets the video object position for a participant id.
  196. *
  197. * @param {IState} state - The redux state.
  198. * @returns {number} - Number of milliseconds for doing face detection.
  199. */
  200. export function getDetectionInterval(state: IState) {
  201. const { faceLandmarks } = state['features/base/config'];
  202. return Math.max(faceLandmarks?.captureInterval || SEND_IMAGE_INTERVAL_MS);
  203. }
  204. /**
  205. * Returns the duration in seconds of a face expression.
  206. *
  207. * @param {IState} state - The redux state.
  208. * @param {number} faceExpressionCount - The number of consecutive face expressions.
  209. * @returns {number} - Duration of face expression in seconds.
  210. */
  211. export function getFaceExpressionDuration(state: IState, faceExpressionCount: number) {
  212. return faceExpressionCount * (getDetectionInterval(state) / 1000);
  213. }