Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

JitsiStreamBackgroundEffect.js 9.9KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287
  1. // @flow
  2. import { JitsiTrackEvents } from '../../base/lib-jitsi-meet';
  3. import {
  4. CLEAR_TIMEOUT,
  5. TIMEOUT_TICK,
  6. SET_TIMEOUT,
  7. timerWorkerScript
  8. } from './TimerWorker';
  9. /**
  10. * Represents a modified MediaStream that adds effects to video background.
  11. * <tt>JitsiStreamBackgroundEffect</tt> does the processing of the original
  12. * video stream.
  13. */
  14. export default class JitsiStreamBackgroundEffect {
  15. _model: Object;
  16. _options: Object;
  17. _screenSharing: Object;
  18. _segmentationPixelCount: number;
  19. _inputVideoElement: HTMLVideoElement;
  20. _onMaskFrameTimer: Function;
  21. _maskFrameTimerWorker: Worker;
  22. _outputCanvasElement: HTMLCanvasElement;
  23. _outputCanvasCtx: Object;
  24. _segmentationMaskCtx: Object;
  25. _segmentationMask: Object;
  26. _segmentationMaskCanvas: Object;
  27. _renderMask: Function;
  28. _virtualImage: HTMLImageElement;
  29. _virtualVideo: HTMLVideoElement;
  30. isEnabled: Function;
  31. startEffect: Function;
  32. stopEffect: Function;
  33. /**
  34. * Represents a modified video MediaStream track.
  35. *
  36. * @class
  37. * @param {Object} model - Meet model.
  38. * @param {Object} options - Segmentation dimensions.
  39. * @param {Object} screenSharing - Desktop track for displaying desktop share as virtual background.
  40. */
  41. constructor(model: Object, options: Object, screenSharing: Object) {
  42. this._options = options;
  43. this._screenSharing = screenSharing;
  44. if (this._options.virtualBackground.backgroundType === 'image') {
  45. this._virtualImage = document.createElement('img');
  46. this._virtualImage.crossOrigin = 'anonymous';
  47. this._virtualImage.src = this._options.virtualBackground.virtualSource;
  48. }
  49. if (this._options.virtualBackground.backgroundType === 'desktop-share' && this._screenSharing) {
  50. this._virtualVideo = document.createElement('video');
  51. this._virtualVideo.autoplay = true;
  52. this._virtualVideo.srcObject = this._screenSharing.stream;
  53. }
  54. this._model = model;
  55. this._options = options;
  56. this._segmentationPixelCount = this._options.width * this._options.height;
  57. // Bind event handler so it is only bound once for every instance.
  58. this._onMaskFrameTimer = this._onMaskFrameTimer.bind(this);
  59. // Workaround for FF issue https://bugzilla.mozilla.org/show_bug.cgi?id=1388974
  60. this._outputCanvasElement = document.createElement('canvas');
  61. this._outputCanvasElement.getContext('2d');
  62. this._inputVideoElement = document.createElement('video');
  63. }
  64. /**
  65. * EventHandler onmessage for the maskFrameTimerWorker WebWorker.
  66. *
  67. * @private
  68. * @param {EventHandler} response - The onmessage EventHandler parameter.
  69. * @returns {void}
  70. */
  71. _onMaskFrameTimer(response: Object) {
  72. if (response.data.id === TIMEOUT_TICK) {
  73. this._renderMask();
  74. }
  75. }
  76. /**
  77. * Represents the run post processing.
  78. *
  79. * @returns {void}
  80. */
  81. runPostProcessing() {
  82. this._outputCanvasCtx.globalCompositeOperation = 'copy';
  83. // Draw segmentation mask.
  84. //
  85. // Smooth out the edges.
  86. if (this._options.virtualBackground.backgroundType === 'image') {
  87. this._outputCanvasCtx.filter = 'blur(4px)';
  88. } else {
  89. this._outputCanvasCtx.filter = 'blur(8px)';
  90. }
  91. this._outputCanvasCtx.drawImage(
  92. this._segmentationMaskCanvas,
  93. 0,
  94. 0,
  95. this._options.width,
  96. this._options.height,
  97. 0,
  98. 0,
  99. this._inputVideoElement.width,
  100. this._inputVideoElement.height
  101. );
  102. this._outputCanvasCtx.globalCompositeOperation = 'source-in';
  103. this._outputCanvasCtx.filter = 'none';
  104. // Draw the foreground video.
  105. //
  106. this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
  107. // Draw the background.
  108. //
  109. this._outputCanvasCtx.globalCompositeOperation = 'destination-over';
  110. if (this._options.virtualBackground.backgroundType === 'image') {
  111. this._outputCanvasCtx.drawImage(
  112. this._virtualImage,
  113. 0,
  114. 0,
  115. this._inputVideoElement.width,
  116. this._inputVideoElement.height
  117. );
  118. }
  119. if (this._options.virtualBackground.backgroundType === 'desktop-share') {
  120. this._outputCanvasCtx.drawImage(
  121. this._virtualVideo,
  122. 0,
  123. 0
  124. );
  125. } else {
  126. this._outputCanvasCtx.filter = `blur(${this._options.virtualBackground.blurValue}px)`;
  127. this._outputCanvasCtx.drawImage(this._inputVideoElement, 0, 0);
  128. }
  129. }
  130. /**
  131. * Represents the run Tensorflow Interference.
  132. *
  133. * @returns {void}
  134. */
  135. runInference() {
  136. this._model._runInference();
  137. const outputMemoryOffset = this._model._getOutputMemoryOffset() / 4;
  138. for (let i = 0; i < this._segmentationPixelCount; i++) {
  139. const background = this._model.HEAPF32[outputMemoryOffset + (i * 2)];
  140. const person = this._model.HEAPF32[outputMemoryOffset + (i * 2) + 1];
  141. const shift = Math.max(background, person);
  142. const backgroundExp = Math.exp(background - shift);
  143. const personExp = Math.exp(person - shift);
  144. // Sets only the alpha component of each pixel.
  145. this._segmentationMask.data[(i * 4) + 3] = (255 * personExp) / (backgroundExp + personExp);
  146. }
  147. this._segmentationMaskCtx.putImageData(this._segmentationMask, 0, 0);
  148. }
  149. /**
  150. * Loop function to render the background mask.
  151. *
  152. * @private
  153. * @returns {void}
  154. */
  155. _renderMask() {
  156. this.resizeSource();
  157. this.runInference();
  158. this.runPostProcessing();
  159. this._maskFrameTimerWorker.postMessage({
  160. id: SET_TIMEOUT,
  161. timeMs: 1000 / 30
  162. });
  163. }
  164. /**
  165. * Represents the resize source process.
  166. *
  167. * @returns {void}
  168. */
  169. resizeSource() {
  170. this._segmentationMaskCtx.drawImage(
  171. this._inputVideoElement,
  172. 0,
  173. 0,
  174. this._inputVideoElement.width,
  175. this._inputVideoElement.height,
  176. 0,
  177. 0,
  178. this._options.width,
  179. this._options.height
  180. );
  181. const imageData = this._segmentationMaskCtx.getImageData(
  182. 0,
  183. 0,
  184. this._options.width,
  185. this._options.height
  186. );
  187. const inputMemoryOffset = this._model._getInputMemoryOffset() / 4;
  188. for (let i = 0; i < this._segmentationPixelCount; i++) {
  189. this._model.HEAPF32[inputMemoryOffset + (i * 3)] = imageData.data[i * 4] / 255;
  190. this._model.HEAPF32[inputMemoryOffset + (i * 3) + 1] = imageData.data[(i * 4) + 1] / 255;
  191. this._model.HEAPF32[inputMemoryOffset + (i * 3) + 2] = imageData.data[(i * 4) + 2] / 255;
  192. }
  193. }
  194. /**
  195. * Checks if the local track supports this effect.
  196. *
  197. * @param {JitsiLocalTrack} jitsiLocalTrack - Track to apply effect.
  198. * @returns {boolean} - Returns true if this effect can run on the specified track
  199. * false otherwise.
  200. */
  201. isEnabled(jitsiLocalTrack: Object) {
  202. return jitsiLocalTrack.isVideoTrack() && jitsiLocalTrack.videoType === 'camera';
  203. }
  204. /**
  205. * Starts loop to capture video frame and render the segmentation mask.
  206. *
  207. * @param {MediaStream} stream - Stream to be used for processing.
  208. * @returns {MediaStream} - The stream with the applied effect.
  209. */
  210. startEffect(stream: MediaStream) {
  211. this._maskFrameTimerWorker = new Worker(timerWorkerScript, { name: 'Blur effect worker' });
  212. this._maskFrameTimerWorker.onmessage = this._onMaskFrameTimer;
  213. const firstVideoTrack = stream.getVideoTracks()[0];
  214. const { height, frameRate, width }
  215. = firstVideoTrack.getSettings ? firstVideoTrack.getSettings() : firstVideoTrack.getConstraints();
  216. this._segmentationMask = new ImageData(this._options.width, this._options.height);
  217. this._segmentationMaskCanvas = document.createElement('canvas');
  218. this._segmentationMaskCanvas.width = this._options.width;
  219. this._segmentationMaskCanvas.height = this._options.height;
  220. this._segmentationMaskCtx = this._segmentationMaskCanvas.getContext('2d');
  221. this._outputCanvasElement.width = parseInt(width, 10);
  222. this._outputCanvasElement.height = parseInt(height, 10);
  223. this._outputCanvasCtx = this._outputCanvasElement.getContext('2d');
  224. this._inputVideoElement.width = parseInt(width, 10);
  225. this._inputVideoElement.height = parseInt(height, 10);
  226. this._inputVideoElement.autoplay = true;
  227. this._inputVideoElement.srcObject = stream;
  228. this._screenSharing && this._screenSharing.on(
  229. JitsiTrackEvents.LOCAL_TRACK_STOPPED,
  230. () => {
  231. this._options.virtualBackground.enabled = false;
  232. this._options.virtualBackground.backgroundType = 'none';
  233. this._options.virtualBackground.selectedThumbnail = 'none';
  234. this._options.virtualBackground.backgroundEffectEnabled = false;
  235. this._options.virtualBackground.enabled = false;
  236. });
  237. this._inputVideoElement.onloadeddata = () => {
  238. this._maskFrameTimerWorker.postMessage({
  239. id: SET_TIMEOUT,
  240. timeMs: 1000 / 30
  241. });
  242. };
  243. return this._outputCanvasElement.captureStream(parseInt(frameRate, 10));
  244. }
  245. /**
  246. * Stops the capture and render loop.
  247. *
  248. * @returns {void}
  249. */
  250. stopEffect() {
  251. this._maskFrameTimerWorker.postMessage({
  252. id: CLEAR_TIMEOUT
  253. });
  254. this._maskFrameTimerWorker.terminate();
  255. this._screenSharing && this._screenSharing.dispose();
  256. }
  257. }