You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

functions.js 6.2KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227
  1. // @flow
  2. import { getLocalParticipant } from '../base/participants';
  3. import { extractFqnFromPath } from '../dynamic-branding/functions.any';
  4. import { DETECT_FACE, FACE_BOX_EVENT_TYPE, SEND_IMAGE_INTERVAL_MS } from './constants';
  5. import logger from './logger';
  6. let canvas;
  7. let context;
  8. if (typeof OffscreenCanvas === 'undefined') {
  9. canvas = document.createElement('canvas');
  10. context = canvas.getContext('2d');
  11. }
  12. /**
  13. * Sends the face expression with its duration to all the other participants.
  14. *
  15. * @param {Object} conference - The current conference.
  16. * @param {string} faceExpression - Face expression to be sent.
  17. * @param {number} duration - The duration of the face expression in seconds.
  18. * @returns {void}
  19. */
  20. export function sendFaceExpressionToParticipants(
  21. conference: Object,
  22. faceExpression: string,
  23. duration: number
  24. ): void {
  25. try {
  26. conference.sendEndpointMessage('', {
  27. type: 'face_landmark',
  28. faceExpression,
  29. duration
  30. });
  31. } catch (err) {
  32. logger.warn('Could not broadcast the face expression to the other participants', err);
  33. }
  34. }
  35. /**
  36. * Sends the face box to all the other participants.
  37. *
  38. * @param {Object} conference - The current conference.
  39. * @param {Object} faceBox - Face box to be sent.
  40. * @returns {void}
  41. */
  42. export function sendFaceBoxToParticipants(
  43. conference: Object,
  44. faceBox: Object
  45. ): void {
  46. try {
  47. conference.sendEndpointMessage('', {
  48. type: FACE_BOX_EVENT_TYPE,
  49. faceBox
  50. });
  51. } catch (err) {
  52. logger.warn('Could not broadcast the face box to the other participants', err);
  53. }
  54. }
  55. /**
  56. * Sends the face expression with its duration to xmpp server.
  57. *
  58. * @param {Object} conference - The current conference.
  59. * @param {string} faceExpression - Face expression to be sent.
  60. * @param {number} duration - The duration of the face expression in seconds.
  61. * @returns {void}
  62. */
  63. export function sendFaceExpressionToServer(
  64. conference: Object,
  65. faceExpression: string,
  66. duration: number
  67. ): void {
  68. try {
  69. conference.sendFaceLandmarks({
  70. faceExpression,
  71. duration
  72. });
  73. } catch (err) {
  74. logger.warn('Could not send the face expression to xmpp server', err);
  75. }
  76. }
  77. /**
  78. * Sends face expression to backend.
  79. *
  80. * @param {Object} state - Redux state.
  81. * @returns {boolean} - True if sent, false otherwise.
  82. */
  83. export async function sendFaceExpressionsWebhook(state: Object) {
  84. const { webhookProxyUrl: url } = state['features/base/config'];
  85. const { conference } = state['features/base/conference'];
  86. const { jwt } = state['features/base/jwt'];
  87. const { connection } = state['features/base/connection'];
  88. const jid = connection.getJid();
  89. const localParticipant = getLocalParticipant(state);
  90. const { faceExpressionsBuffer } = state['features/face-landmarks'];
  91. if (faceExpressionsBuffer.length === 0) {
  92. return false;
  93. }
  94. const headers = {
  95. ...jwt ? { 'Authorization': `Bearer ${jwt}` } : {},
  96. 'Content-Type': 'application/json'
  97. };
  98. const reqBody = {
  99. meetingFqn: extractFqnFromPath(),
  100. sessionId: conference.sessionId,
  101. submitted: Date.now(),
  102. emotions: faceExpressionsBuffer,
  103. participantId: localParticipant.jwtId,
  104. participantName: localParticipant.name,
  105. participantJid: jid
  106. };
  107. if (url) {
  108. try {
  109. const res = await fetch(`${url}/emotions`, {
  110. method: 'POST',
  111. headers,
  112. body: JSON.stringify(reqBody)
  113. });
  114. if (res.ok) {
  115. return true;
  116. }
  117. logger.error('Status error:', res.status);
  118. } catch (err) {
  119. logger.error('Could not send request', err);
  120. }
  121. }
  122. return false;
  123. }
  124. /**
  125. * Sends the image data a canvas from the track in the image capture to the face recognition worker.
  126. *
  127. * @param {Worker} worker - Face recognition worker.
  128. * @param {Object} imageCapture - Image capture that contains the current track.
  129. * @param {number} threshold - Movement threshold as percentage for sharing face coordinates.
  130. * @returns {Promise<void>}
  131. */
  132. export async function sendDataToWorker(
  133. worker: Worker,
  134. imageCapture: Object,
  135. threshold: number = 10
  136. ): Promise<void> {
  137. if (imageCapture === null || imageCapture === undefined) {
  138. return;
  139. }
  140. let imageBitmap;
  141. let image;
  142. try {
  143. imageBitmap = await imageCapture.grabFrame();
  144. } catch (err) {
  145. logger.warn(err);
  146. return;
  147. }
  148. if (typeof OffscreenCanvas === 'undefined') {
  149. canvas.width = imageBitmap.width;
  150. canvas.height = imageBitmap.height;
  151. context.drawImage(imageBitmap, 0, 0);
  152. image = context.getImageData(0, 0, imageBitmap.width, imageBitmap.height);
  153. } else {
  154. image = imageBitmap;
  155. }
  156. worker.postMessage({
  157. type: DETECT_FACE,
  158. image,
  159. threshold
  160. });
  161. imageBitmap.close();
  162. }
  163. /**
  164. * Gets face box for a participant id.
  165. *
  166. * @param {string} id - The participant id.
  167. * @param {Object} state - The redux state.
  168. * @returns {Object}
  169. */
  170. function getFaceBoxForId(id: string, state: Object) {
  171. return state['features/face-landmarks'].faceBoxes[id];
  172. }
  173. /**
  174. * Gets the video object position for a participant id.
  175. *
  176. * @param {Object} state - The redux state.
  177. * @param {string} id - The participant id.
  178. * @returns {string} - CSS object-position in the shape of '{horizontalPercentage}% {verticalPercentage}%'.
  179. */
  180. export function getVideoObjectPosition(state: Object, id: string) {
  181. const faceBox = getFaceBoxForId(id, state);
  182. if (faceBox) {
  183. const { right, width } = faceBox;
  184. return `${right - (width / 2)}% 50%`;
  185. }
  186. return '50% 50%';
  187. }
  188. /**
  189. * Gets the video object position for a participant id.
  190. *
  191. * @param {Object} state - The redux state.
  192. * @returns {number} - Number of milliseconds for doing face detection.
  193. */
  194. export function getDetectionInterval(state: Object) {
  195. const { faceLandmarks } = state['features/base/config'];
  196. return Math.max(faceLandmarks?.captureInterval || SEND_IMAGE_INTERVAL_MS);
  197. }