Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

functions.ts 6.8KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243
  1. import { IReduxState } from '../app/types';
  2. import { getLocalParticipant } from '../base/participants/functions';
  3. import { extractFqnFromPath } from '../dynamic-branding/functions.any';
  4. import { DETECT_FACE, FACE_BOX_EVENT_TYPE, SEND_IMAGE_INTERVAL_MS } from './constants';
  5. import logger from './logger';
  6. import { FaceBox } from './types';
  7. let canvas: HTMLCanvasElement;
  8. let context: CanvasRenderingContext2D | null;
  9. if (typeof OffscreenCanvas === 'undefined') {
  10. canvas = document.createElement('canvas');
  11. context = canvas.getContext('2d');
  12. }
  13. /**
  14. * Sends the face expression with its duration to all the other participants.
  15. *
  16. * @param {any} conference - The current conference.
  17. * @param {string} faceExpression - Face expression to be sent.
  18. * @param {number} duration - The duration of the face expression in seconds.
  19. * @returns {void}
  20. */
  21. export function sendFaceExpressionToParticipants(
  22. conference: any,
  23. faceExpression: string,
  24. duration: number
  25. ): void {
  26. try {
  27. conference.sendEndpointMessage('', {
  28. type: 'face_landmark',
  29. faceExpression,
  30. duration
  31. });
  32. } catch (err) {
  33. logger.warn('Could not broadcast the face expression to the other participants', err);
  34. }
  35. }
  36. /**
  37. * Sends the face box to all the other participants.
  38. *
  39. * @param {any} conference - The current conference.
  40. * @param {FaceBox} faceBox - Face box to be sent.
  41. * @returns {void}
  42. */
  43. export function sendFaceBoxToParticipants(
  44. conference: any,
  45. faceBox: FaceBox
  46. ): void {
  47. try {
  48. conference.sendEndpointMessage('', {
  49. type: FACE_BOX_EVENT_TYPE,
  50. faceBox
  51. });
  52. } catch (err) {
  53. logger.warn('Could not broadcast the face box to the other participants', err);
  54. }
  55. }
  56. /**
  57. * Sends the face expression with its duration to xmpp server.
  58. *
  59. * @param {any} conference - The current conference.
  60. * @param {string} faceExpression - Face expression to be sent.
  61. * @param {number} duration - The duration of the face expression in seconds.
  62. * @returns {void}
  63. */
  64. export function sendFaceExpressionToServer(
  65. conference: any,
  66. faceExpression: string,
  67. duration: number
  68. ): void {
  69. try {
  70. conference.sendFaceLandmarks({
  71. faceExpression,
  72. duration
  73. });
  74. } catch (err) {
  75. logger.warn('Could not send the face expression to xmpp server', err);
  76. }
  77. }
  78. /**
  79. * Sends face expression to backend.
  80. *
  81. * @param {Object} state - Redux state.
  82. * @returns {boolean} - True if sent, false otherwise.
  83. */
  84. export async function sendFaceExpressionsWebhook(state: IReduxState) {
  85. const { webhookProxyUrl: url } = state['features/base/config'];
  86. const { conference } = state['features/base/conference'];
  87. const { jwt } = state['features/base/jwt'];
  88. const { connection } = state['features/base/connection'];
  89. const jid = connection?.getJid();
  90. const localParticipant = getLocalParticipant(state);
  91. const { faceExpressionsBuffer } = state['features/face-landmarks'];
  92. if (faceExpressionsBuffer.length === 0) {
  93. return false;
  94. }
  95. const headers = {
  96. ...jwt ? { 'Authorization': `Bearer ${jwt}` } : {},
  97. 'Content-Type': 'application/json'
  98. };
  99. const reqBody = {
  100. meetingFqn: extractFqnFromPath(),
  101. sessionId: conference?.sessionId,
  102. submitted: Date.now(),
  103. emotions: faceExpressionsBuffer,
  104. participantId: localParticipant?.jwtId,
  105. participantName: localParticipant?.name,
  106. participantJid: jid
  107. };
  108. if (url) {
  109. try {
  110. const res = await fetch(`${url}/emotions`, {
  111. method: 'POST',
  112. headers,
  113. body: JSON.stringify(reqBody)
  114. });
  115. if (res.ok) {
  116. return true;
  117. }
  118. logger.error('Status error:', res.status);
  119. } catch (err) {
  120. logger.error('Could not send request', err);
  121. }
  122. }
  123. return false;
  124. }
  125. /**
  126. * Sends the image data a canvas from the track in the image capture to the face recognition worker.
  127. *
  128. * @param {Worker} worker - Face recognition worker.
  129. * @param {Object} imageCapture - Image capture that contains the current track.
  130. * @param {number} threshold - Movement threshold as percentage for sharing face coordinates.
  131. * @returns {Promise<boolean>} - True if sent, false otherwise.
  132. */
  133. export async function sendDataToWorker(
  134. worker: Worker,
  135. imageCapture: ImageCapture,
  136. threshold = 10
  137. ): Promise<boolean> {
  138. if (imageCapture === null || imageCapture === undefined) {
  139. return false;
  140. }
  141. let imageBitmap;
  142. let image;
  143. try {
  144. imageBitmap = await imageCapture.grabFrame();
  145. } catch (err) {
  146. logger.warn(err);
  147. return false;
  148. }
  149. if (typeof OffscreenCanvas === 'undefined') {
  150. canvas.width = imageBitmap.width;
  151. canvas.height = imageBitmap.height;
  152. context?.drawImage(imageBitmap, 0, 0);
  153. image = context?.getImageData(0, 0, imageBitmap.width, imageBitmap.height);
  154. } else {
  155. image = imageBitmap;
  156. }
  157. worker.postMessage({
  158. type: DETECT_FACE,
  159. image,
  160. threshold
  161. });
  162. imageBitmap.close();
  163. return true;
  164. }
  165. /**
  166. * Gets face box for a participant id.
  167. *
  168. * @param {string} id - The participant id.
  169. * @param {IReduxState} state - The redux state.
  170. * @returns {Object}
  171. */
  172. function getFaceBoxForId(id: string, state: IReduxState) {
  173. return state['features/face-landmarks'].faceBoxes[id];
  174. }
  175. /**
  176. * Gets the video object position for a participant id.
  177. *
  178. * @param {IReduxState} state - The redux state.
  179. * @param {string} id - The participant id.
  180. * @returns {string} - CSS object-position in the shape of '{horizontalPercentage}% {verticalPercentage}%'.
  181. */
  182. export function getVideoObjectPosition(state: IReduxState, id?: string) {
  183. const faceBox = id && getFaceBoxForId(id, state);
  184. if (faceBox) {
  185. const { right, width } = faceBox;
  186. if (right && width) {
  187. return `${right - (width / 2)}% 50%`;
  188. }
  189. }
  190. return '50% 50%';
  191. }
  192. /**
  193. * Gets the video object position for a participant id.
  194. *
  195. * @param {IReduxState} state - The redux state.
  196. * @returns {number} - Number of milliseconds for doing face detection.
  197. */
  198. export function getDetectionInterval(state: IReduxState) {
  199. const { faceLandmarks } = state['features/base/config'];
  200. return Math.max(faceLandmarks?.captureInterval || SEND_IMAGE_INTERVAL_MS);
  201. }
  202. /**
  203. * Returns the duration in seconds of a face expression.
  204. *
  205. * @param {IReduxState} state - The redux state.
  206. * @param {number} faceExpressionCount - The number of consecutive face expressions.
  207. * @returns {number} - Duration of face expression in seconds.
  208. */
  209. export function getFaceExpressionDuration(state: IReduxState, faceExpressionCount: number) {
  210. return faceExpressionCount * (getDetectionInterval(state) / 1000);
  211. }