You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiStreamBackgroundEffect.js 10KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291
  1. // @flow
  2. import { VIRTUAL_BACKGROUND_TYPE } from '../../virtual-background/constants';
  3. import {
  4. CLEAR_TIMEOUT,
  5. TIMEOUT_TICK,
  6. SET_TIMEOUT,
  7. timerWorkerScript
  8. } from './TimerWorker';
  9. /**
  10. * Represents a modified MediaStream that adds effects to video background.
  11. * <tt>JitsiStreamBackgroundEffect</tt> does the processing of the original
  12. * video stream.
  13. */
  14. export default class JitsiStreamBackgroundEffect {
  15. _model: Object;
  16. _options: Object;
  17. _stream: Object;
  18. _segmentationPixelCount: number;
  19. _inputVideoElement: HTMLVideoElement;
  20. _onMaskFrameTimer: Function;
  21. _maskFrameTimerWorker: Worker;
  22. _outputCanvasElement: HTMLCanvasElement;
  23. _outputCanvasCtx: Object;
  24. _segmentationMaskCtx: Object;
  25. _segmentationMask: Object;
  26. _segmentationMaskCanvas: Object;
  27. _renderMask: Function;
  28. _virtualImage: HTMLImageElement;
  29. _virtualVideo: HTMLVideoElement;
  30. isEnabled: Function;
  31. startEffect: Function;
  32. stopEffect: Function;
  33. /**
  34. * Represents a modified video MediaStream track.
  35. *
  36. * @class
  37. * @param {Object} model - Meet model.
  38. * @param {Object} options - Segmentation dimensions.
  39. */
  40. constructor(model: Object, options: Object) {
  41. this._options = options;
  42. if (this._options.virtualBackground.backgroundType === VIRTUAL_BACKGROUND_TYPE.IMAGE) {
  43. this._virtualImage = document.createElement('img');
  44. this._virtualImage.crossOrigin = 'anonymous';
  45. this._virtualImage.src = this._options.virtualBackground.virtualSource;
  46. }
  47. if (this._options.virtualBackground.backgroundType === VIRTUAL_BACKGROUND_TYPE.DESKTOP_SHARE) {
  48. this._virtualVideo = document.createElement('video');
  49. this._virtualVideo.autoplay = true;
  50. this._virtualVideo.srcObject = this._options?.virtualBackground?.virtualSource?.stream;
  51. }
  52. this._model = model;
  53. this._segmentationPixelCount = this._options.width * this._options.height;
  54. // Bind event handler so it is only bound once for every instance.
  55. this._onMaskFrameTimer = this._onMaskFrameTimer.bind(this);
  56. // Workaround for FF issue https://bugzilla.mozilla.org/show_bug.cgi?id=1388974
  57. this._outputCanvasElement = document.createElement('canvas');
  58. this._outputCanvasElement.getContext('2d');
  59. this._inputVideoElement = document.createElement('video');
  60. }
  61. /**
  62. * EventHandler onmessage for the maskFrameTimerWorker WebWorker.
  63. *
  64. * @private
  65. * @param {EventHandler} response - The onmessage EventHandler parameter.
  66. * @returns {void}
  67. */
  68. _onMaskFrameTimer(response: Object) {
  69. if (response.data.id === TIMEOUT_TICK) {
  70. this._renderMask();
  71. }
  72. }
  73. /**
  74. * Represents the run post processing.
  75. *
  76. * @returns {void}
  77. */
  78. runPostProcessing() {
  79. const track = this._stream.getVideoTracks()[0];
  80. const { height, width } = track.getSettings() ?? track.getConstraints();
  81. const { backgroundType } = this._options.virtualBackground;
  82. this._outputCanvasElement.height = height;
  83. this._outputCanvasElement.width = width;
  84. this._outputCanvasCtx.globalCompositeOperation = 'copy';
  85. // Draw segmentation mask.
  86. // Smooth out the edges.
  87. this._outputCanvasCtx.filter = backgroundType === VIRTUAL_BACKGROUND_TYPE.IMAGE ? 'blur(4px)' : 'blur(8px)';
  88. if (backgroundType === VIRTUAL_BACKGROUND_TYPE.DESKTOP_SHARE) {
  89. // Save current context before applying transformations.
  90. this._outputCanvasCtx.save();
  91. // Flip the canvas and prevent mirror behaviour.
  92. this._outputCanvasCtx.scale(-1, 1);
  93. this._outputCanvasCtx.translate(-this._outputCanvasElement.width, 0);
  94. }
  95. this._outputCanvasCtx.drawImage(
  96. this._segmentationMaskCanvas,
  97. 0,
  98. 0,
  99. this._options.width,
  100. this._options.height,
  101. 0,
  102. 0,
  103. this._inputVideoElement.width,
  104. this._inputVideoElement.height
  105. );
  106. if (backgroundType === VIRTUAL_BACKGROUND_TYPE.DESKTOP_SHARE) {
  107. this._outputCanvasCtx.restore();
  108. }
  109. this._outputCanvasCtx.globalCompositeOperation = 'source-in';
  110. this._outputCanvasCtx.filter = 'none';
  111. // Draw the foreground video.
  112. if (backgroundType === VIRTUAL_BACKGROUND_TYPE.DESKTOP_SHARE) {
  113. // Save current context before applying transformations.
  114. this._outputCanvasCtx.save();
  115. // Flip the canvas and prevent mirror behaviour.
  116. this._outputCanvasCtx.scale(-1, 1);
  117. this._outputCanvasCtx.translate(-this._outputCanvasElement.width, 0);
  118. }
  119. this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
  120. if (backgroundType === VIRTUAL_BACKGROUND_TYPE.DESKTOP_SHARE) {
  121. this._outputCanvasCtx.restore();
  122. }
  123. // Draw the background.
  124. this._outputCanvasCtx.globalCompositeOperation = 'destination-over';
  125. if (backgroundType === VIRTUAL_BACKGROUND_TYPE.IMAGE
  126. || backgroundType === VIRTUAL_BACKGROUND_TYPE.DESKTOP_SHARE) {
  127. this._outputCanvasCtx.drawImage(
  128. backgroundType === VIRTUAL_BACKGROUND_TYPE.IMAGE
  129. ? this._virtualImage : this._virtualVideo,
  130. 0,
  131. 0,
  132. this._outputCanvasElement.width,
  133. this._outputCanvasElement.height
  134. );
  135. } else {
  136. this._outputCanvasCtx.filter = `blur(${this._options.virtualBackground.blurValue}px)`;
  137. this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
  138. }
  139. }
  140. /**
  141. * Represents the run Tensorflow Interference.
  142. *
  143. * @returns {void}
  144. */
  145. runInference() {
  146. this._model._runInference();
  147. const outputMemoryOffset = this._model._getOutputMemoryOffset() / 4;
  148. for (let i = 0; i < this._segmentationPixelCount; i++) {
  149. const background = this._model.HEAPF32[outputMemoryOffset + (i * 2)];
  150. const person = this._model.HEAPF32[outputMemoryOffset + (i * 2) + 1];
  151. const shift = Math.max(background, person);
  152. const backgroundExp = Math.exp(background - shift);
  153. const personExp = Math.exp(person - shift);
  154. // Sets only the alpha component of each pixel.
  155. this._segmentationMask.data[(i * 4) + 3] = (255 * personExp) / (backgroundExp + personExp);
  156. }
  157. this._segmentationMaskCtx.putImageData(this._segmentationMask, 0, 0);
  158. }
  159. /**
  160. * Loop function to render the background mask.
  161. *
  162. * @private
  163. * @returns {void}
  164. */
  165. _renderMask() {
  166. this.resizeSource();
  167. this.runInference();
  168. this.runPostProcessing();
  169. this._maskFrameTimerWorker.postMessage({
  170. id: SET_TIMEOUT,
  171. timeMs: 1000 / 30
  172. });
  173. }
  174. /**
  175. * Represents the resize source process.
  176. *
  177. * @returns {void}
  178. */
  179. resizeSource() {
  180. this._segmentationMaskCtx.drawImage(
  181. this._inputVideoElement,
  182. 0,
  183. 0,
  184. this._inputVideoElement.width,
  185. this._inputVideoElement.height,
  186. 0,
  187. 0,
  188. this._options.width,
  189. this._options.height
  190. );
  191. const imageData = this._segmentationMaskCtx.getImageData(
  192. 0,
  193. 0,
  194. this._options.width,
  195. this._options.height
  196. );
  197. const inputMemoryOffset = this._model._getInputMemoryOffset() / 4;
  198. for (let i = 0; i < this._segmentationPixelCount; i++) {
  199. this._model.HEAPF32[inputMemoryOffset + (i * 3)] = imageData.data[i * 4] / 255;
  200. this._model.HEAPF32[inputMemoryOffset + (i * 3) + 1] = imageData.data[(i * 4) + 1] / 255;
  201. this._model.HEAPF32[inputMemoryOffset + (i * 3) + 2] = imageData.data[(i * 4) + 2] / 255;
  202. }
  203. }
  204. /**
  205. * Checks if the local track supports this effect.
  206. *
  207. * @param {JitsiLocalTrack} jitsiLocalTrack - Track to apply effect.
  208. * @returns {boolean} - Returns true if this effect can run on the specified track
  209. * false otherwise.
  210. */
  211. isEnabled(jitsiLocalTrack: Object) {
  212. return jitsiLocalTrack.isVideoTrack() && jitsiLocalTrack.videoType === 'camera';
  213. }
  214. /**
  215. * Starts loop to capture video frame and render the segmentation mask.
  216. *
  217. * @param {MediaStream} stream - Stream to be used for processing.
  218. * @returns {MediaStream} - The stream with the applied effect.
  219. */
  220. startEffect(stream: MediaStream) {
  221. this._stream = stream;
  222. this._maskFrameTimerWorker = new Worker(timerWorkerScript, { name: 'Blur effect worker' });
  223. this._maskFrameTimerWorker.onmessage = this._onMaskFrameTimer;
  224. const firstVideoTrack = this._stream.getVideoTracks()[0];
  225. const { height, frameRate, width }
  226. = firstVideoTrack.getSettings ? firstVideoTrack.getSettings() : firstVideoTrack.getConstraints();
  227. this._segmentationMask = new ImageData(this._options.width, this._options.height);
  228. this._segmentationMaskCanvas = document.createElement('canvas');
  229. this._segmentationMaskCanvas.width = this._options.width;
  230. this._segmentationMaskCanvas.height = this._options.height;
  231. this._segmentationMaskCtx = this._segmentationMaskCanvas.getContext('2d');
  232. this._outputCanvasElement.width = parseInt(width, 10);
  233. this._outputCanvasElement.height = parseInt(height, 10);
  234. this._outputCanvasCtx = this._outputCanvasElement.getContext('2d');
  235. this._inputVideoElement.width = parseInt(width, 10);
  236. this._inputVideoElement.height = parseInt(height, 10);
  237. this._inputVideoElement.autoplay = true;
  238. this._inputVideoElement.srcObject = this._stream;
  239. this._inputVideoElement.onloadeddata = () => {
  240. this._maskFrameTimerWorker.postMessage({
  241. id: SET_TIMEOUT,
  242. timeMs: 1000 / 30
  243. });
  244. };
  245. return this._outputCanvasElement.captureStream(parseInt(frameRate, 10));
  246. }
  247. /**
  248. * Stops the capture and render loop.
  249. *
  250. * @returns {void}
  251. */
  252. stopEffect() {
  253. this._maskFrameTimerWorker.postMessage({
  254. id: CLEAR_TIMEOUT
  255. });
  256. this._maskFrameTimerWorker.terminate();
  257. }
  258. }