Vous ne pouvez pas sélectionner plus de 25 sujets Les noms de sujets doivent commencer par une lettre ou un nombre, peuvent contenir des tirets ('-') et peuvent comporter jusqu'à 35 caractères.

JitsiStreamBackgroundEffect.js 9.6KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283
  1. // @flow
  2. import { VIRTUAL_BACKGROUND_TYPE } from '../../virtual-background/constants';
  3. import {
  4. CLEAR_TIMEOUT,
  5. TIMEOUT_TICK,
  6. SET_TIMEOUT,
  7. timerWorkerScript
  8. } from './TimerWorker';
  9. /**
  10. * Represents a modified MediaStream that adds effects to video background.
  11. * <tt>JitsiStreamBackgroundEffect</tt> does the processing of the original
  12. * video stream.
  13. */
  14. export default class JitsiStreamBackgroundEffect {
  15. _model: Object;
  16. _options: Object;
  17. _desktopShareDimensions: Object;
  18. _segmentationPixelCount: number;
  19. _inputVideoElement: HTMLVideoElement;
  20. _onMaskFrameTimer: Function;
  21. _maskFrameTimerWorker: Worker;
  22. _outputCanvasElement: HTMLCanvasElement;
  23. _outputCanvasCtx: Object;
  24. _segmentationMaskCtx: Object;
  25. _segmentationMask: Object;
  26. _segmentationMaskCanvas: Object;
  27. _renderMask: Function;
  28. _virtualImage: HTMLImageElement;
  29. _virtualVideo: HTMLVideoElement;
  30. isEnabled: Function;
  31. startEffect: Function;
  32. stopEffect: Function;
  33. /**
  34. * Represents a modified video MediaStream track.
  35. *
  36. * @class
  37. * @param {Object} model - Meet model.
  38. * @param {Object} options - Segmentation dimensions.
  39. */
  40. constructor(model: Object, options: Object) {
  41. this._options = options;
  42. if (this._options.virtualBackground.backgroundType === VIRTUAL_BACKGROUND_TYPE.IMAGE) {
  43. this._virtualImage = document.createElement('img');
  44. this._virtualImage.crossOrigin = 'anonymous';
  45. this._virtualImage.src = this._options.virtualBackground.virtualSource;
  46. }
  47. if (this._options.virtualBackground.backgroundType === VIRTUAL_BACKGROUND_TYPE.DESKTOP_SHARE) {
  48. this._virtualVideo = document.createElement('video');
  49. this._virtualVideo.autoplay = true;
  50. this._virtualVideo.srcObject = this._options?.virtualBackground?.virtualSource?.stream;
  51. }
  52. this._model = model;
  53. this._segmentationPixelCount = this._options.width * this._options.height;
  54. // Bind event handler so it is only bound once for every instance.
  55. this._onMaskFrameTimer = this._onMaskFrameTimer.bind(this);
  56. // Workaround for FF issue https://bugzilla.mozilla.org/show_bug.cgi?id=1388974
  57. this._outputCanvasElement = document.createElement('canvas');
  58. this._outputCanvasElement.getContext('2d');
  59. this._inputVideoElement = document.createElement('video');
  60. }
  61. /**
  62. * EventHandler onmessage for the maskFrameTimerWorker WebWorker.
  63. *
  64. * @private
  65. * @param {EventHandler} response - The onmessage EventHandler parameter.
  66. * @returns {void}
  67. */
  68. _onMaskFrameTimer(response: Object) {
  69. if (response.data.id === TIMEOUT_TICK) {
  70. this._renderMask();
  71. }
  72. }
  73. /**
  74. * Represents the run post processing.
  75. *
  76. * @returns {void}
  77. */
  78. runPostProcessing() {
  79. this._outputCanvasCtx.globalCompositeOperation = 'copy';
  80. // Draw segmentation mask.
  81. //
  82. // Smooth out the edges.
  83. if (this._options.virtualBackground.backgroundType === VIRTUAL_BACKGROUND_TYPE.IMAGE) {
  84. this._outputCanvasCtx.filter = 'blur(4px)';
  85. } else {
  86. this._outputCanvasCtx.filter = 'blur(8px)';
  87. }
  88. this._outputCanvasCtx.drawImage(
  89. this._segmentationMaskCanvas,
  90. 0,
  91. 0,
  92. this._options.width,
  93. this._options.height,
  94. 0,
  95. 0,
  96. this._inputVideoElement.width,
  97. this._inputVideoElement.height
  98. );
  99. this._outputCanvasCtx.globalCompositeOperation = 'source-in';
  100. this._outputCanvasCtx.filter = 'none';
  101. // Draw the foreground video.
  102. //
  103. this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
  104. // Draw the background.
  105. //
  106. this._outputCanvasCtx.globalCompositeOperation = 'destination-over';
  107. if (this._options.virtualBackground.backgroundType === VIRTUAL_BACKGROUND_TYPE.IMAGE) {
  108. this._outputCanvasCtx.drawImage(
  109. this._virtualImage,
  110. 0,
  111. 0,
  112. this._inputVideoElement.width,
  113. this._inputVideoElement.height
  114. );
  115. }
  116. if (this._options.virtualBackground.backgroundType === VIRTUAL_BACKGROUND_TYPE.DESKTOP_SHARE) {
  117. this._outputCanvasCtx.drawImage(
  118. this._virtualVideo,
  119. 0,
  120. 0,
  121. this._desktopShareDimensions.width,
  122. this._desktopShareDimensions.height
  123. );
  124. } else {
  125. this._outputCanvasCtx.filter = `blur(${this._options.virtualBackground.blurValue}px)`;
  126. this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
  127. }
  128. }
  129. /**
  130. * Represents the run Tensorflow Interference.
  131. *
  132. * @returns {void}
  133. */
  134. runInference() {
  135. this._model._runInference();
  136. const outputMemoryOffset = this._model._getOutputMemoryOffset() / 4;
  137. for (let i = 0; i < this._segmentationPixelCount; i++) {
  138. const background = this._model.HEAPF32[outputMemoryOffset + (i * 2)];
  139. const person = this._model.HEAPF32[outputMemoryOffset + (i * 2) + 1];
  140. const shift = Math.max(background, person);
  141. const backgroundExp = Math.exp(background - shift);
  142. const personExp = Math.exp(person - shift);
  143. // Sets only the alpha component of each pixel.
  144. this._segmentationMask.data[(i * 4) + 3] = (255 * personExp) / (backgroundExp + personExp);
  145. }
  146. this._segmentationMaskCtx.putImageData(this._segmentationMask, 0, 0);
  147. }
  148. /**
  149. * Loop function to render the background mask.
  150. *
  151. * @private
  152. * @returns {void}
  153. */
  154. _renderMask() {
  155. const desktopShareTrack = this._options?.virtualBackground?.virtualSource?.track;
  156. if (desktopShareTrack) {
  157. this._desktopShareDimensions = desktopShareTrack.getSettings ? desktopShareTrack.getSettings()
  158. : desktopShareTrack.getConstraints();
  159. }
  160. this.resizeSource();
  161. this.runInference();
  162. this.runPostProcessing();
  163. this._maskFrameTimerWorker.postMessage({
  164. id: SET_TIMEOUT,
  165. timeMs: 1000 / 30
  166. });
  167. }
  168. /**
  169. * Represents the resize source process.
  170. *
  171. * @returns {void}
  172. */
  173. resizeSource() {
  174. this._segmentationMaskCtx.drawImage(
  175. this._inputVideoElement,
  176. 0,
  177. 0,
  178. this._inputVideoElement.width,
  179. this._inputVideoElement.height,
  180. 0,
  181. 0,
  182. this._options.width,
  183. this._options.height
  184. );
  185. const imageData = this._segmentationMaskCtx.getImageData(
  186. 0,
  187. 0,
  188. this._options.width,
  189. this._options.height
  190. );
  191. const inputMemoryOffset = this._model._getInputMemoryOffset() / 4;
  192. for (let i = 0; i < this._segmentationPixelCount; i++) {
  193. this._model.HEAPF32[inputMemoryOffset + (i * 3)] = imageData.data[i * 4] / 255;
  194. this._model.HEAPF32[inputMemoryOffset + (i * 3) + 1] = imageData.data[(i * 4) + 1] / 255;
  195. this._model.HEAPF32[inputMemoryOffset + (i * 3) + 2] = imageData.data[(i * 4) + 2] / 255;
  196. }
  197. }
  198. /**
  199. * Checks if the local track supports this effect.
  200. *
  201. * @param {JitsiLocalTrack} jitsiLocalTrack - Track to apply effect.
  202. * @returns {boolean} - Returns true if this effect can run on the specified track
  203. * false otherwise.
  204. */
  205. isEnabled(jitsiLocalTrack: Object) {
  206. return jitsiLocalTrack.isVideoTrack() && jitsiLocalTrack.videoType === 'camera';
  207. }
  208. /**
  209. * Starts loop to capture video frame and render the segmentation mask.
  210. *
  211. * @param {MediaStream} stream - Stream to be used for processing.
  212. * @returns {MediaStream} - The stream with the applied effect.
  213. */
  214. startEffect(stream: MediaStream) {
  215. this._maskFrameTimerWorker = new Worker(timerWorkerScript, { name: 'Blur effect worker' });
  216. this._maskFrameTimerWorker.onmessage = this._onMaskFrameTimer;
  217. const firstVideoTrack = stream.getVideoTracks()[0];
  218. const { height, frameRate, width }
  219. = firstVideoTrack.getSettings ? firstVideoTrack.getSettings() : firstVideoTrack.getConstraints();
  220. this._segmentationMask = new ImageData(this._options.width, this._options.height);
  221. this._segmentationMaskCanvas = document.createElement('canvas');
  222. this._segmentationMaskCanvas.width = this._options.width;
  223. this._segmentationMaskCanvas.height = this._options.height;
  224. this._segmentationMaskCtx = this._segmentationMaskCanvas.getContext('2d');
  225. this._outputCanvasElement.width = parseInt(width, 10);
  226. this._outputCanvasElement.height = parseInt(height, 10);
  227. this._outputCanvasCtx = this._outputCanvasElement.getContext('2d');
  228. this._inputVideoElement.width = parseInt(width, 10);
  229. this._inputVideoElement.height = parseInt(height, 10);
  230. this._inputVideoElement.autoplay = true;
  231. this._inputVideoElement.srcObject = stream;
  232. this._inputVideoElement.onloadeddata = () => {
  233. this._maskFrameTimerWorker.postMessage({
  234. id: SET_TIMEOUT,
  235. timeMs: 1000 / 30
  236. });
  237. };
  238. return this._outputCanvasElement.captureStream(parseInt(frameRate, 10));
  239. }
  240. /**
  241. * Stops the capture and render loop.
  242. *
  243. * @returns {void}
  244. */
  245. stopEffect() {
  246. this._maskFrameTimerWorker.postMessage({
  247. id: CLEAR_TIMEOUT
  248. });
  249. this._maskFrameTimerWorker.terminate();
  250. }
  251. }