Browse Source

feat(screenSharing): Add system audio screen sharing

master
Andrei Gavrilescu 4 years ago
parent
commit
f502e13edc
No account linked to committer's email address
4 changed files with 145 additions and 23 deletions
  1. 70
    18
      conference.js
  2. 4
    4
      package-lock.json
  3. 1
    1
      package.json
  4. 70
    0
      react/features/stream-effects/audio-mixer/AudioMixerEffect.js

+ 70
- 18
conference.js View File

@@ -120,6 +120,7 @@ import {
120 120
 import { mediaPermissionPromptVisibilityChanged } from './react/features/overlay';
121 121
 import { suspendDetected } from './react/features/power-monitor';
122 122
 import { setSharedVideoStatus } from './react/features/shared-video';
123
+import { AudioMixerEffect } from './react/features/stream-effects/audio-mixer/AudioMixerEffect';
123 124
 import { createPresenterEffect } from './react/features/stream-effects/presenter';
124 125
 import { endpointMessageReceived } from './react/features/subtitles';
125 126
 import { createRnnoiseProcessorPromise } from './react/features/rnnoise';
@@ -659,10 +660,10 @@ export default {
659 660
                     startAudioOnly: config.startAudioOnly,
660 661
                     startScreenSharing: config.startScreenSharing,
661 662
                     startWithAudioMuted: config.startWithAudioMuted
662
-                        || config.startSilent
663
-                        || isUserInteractionRequiredForUnmute(APP.store.getState()),
663
+                    || config.startSilent
664
+                    || isUserInteractionRequiredForUnmute(APP.store.getState()),
664 665
                     startWithVideoMuted: config.startWithVideoMuted
665
-                        || isUserInteractionRequiredForUnmute(APP.store.getState())
666
+                    || isUserInteractionRequiredForUnmute(APP.store.getState())
666 667
                 }))
667 668
             .then(([ tracks, con ]) => {
668 669
                 tracks.forEach(track => {
@@ -1417,7 +1418,7 @@ export default {
1417 1418
      * in case it fails.
1418 1419
      * @private
1419 1420
      */
1420
-    _turnScreenSharingOff(didHaveVideo) {
1421
+    async _turnScreenSharingOff(didHaveVideo) {
1421 1422
         this._untoggleScreenSharing = null;
1422 1423
         this.videoSwitchInProgress = true;
1423 1424
         const { receiver } = APP.remoteControl;
@@ -1446,6 +1447,20 @@ export default {
1446 1447
             }
1447 1448
         });
1448 1449
 
1450
+        // If system audio was also shared stop the AudioMixerEffect and dispose of the desktop audio track.
1451
+        if (this._mixerEffect) {
1452
+            await this.localAudio.setEffect(undefined);
1453
+            await this._desktopAudioStream.dispose();
1454
+            this._mixerEffect = undefined;
1455
+            this._desktopAudioStream = undefined;
1456
+
1457
+        // In case there was no local audio when screen sharing was started the fact that we set the audio stream to
1458
+        // null will take care of the desktop audio stream cleanup.
1459
+        } else if (this._desktopAudioStream) {
1460
+            await this.useAudioStream(null);
1461
+            this._desktopAudioStream = undefined;
1462
+        }
1463
+
1449 1464
         if (didHaveVideo) {
1450 1465
             promise = promise.then(() => createLocalTracksF({ devices: [ 'video' ] }))
1451 1466
                 .then(([ stream ]) => this.useVideoStream(stream))
@@ -1585,26 +1600,31 @@ export default {
1585 1600
                 }
1586 1601
             });
1587 1602
 
1588
-        return getDesktopStreamPromise.then(([ desktopStream ]) => {
1603
+        return getDesktopStreamPromise.then(desktopStreams => {
1589 1604
             // Stores the "untoggle" handler which remembers whether was
1590 1605
             // there any video before and whether was it muted.
1591 1606
             this._untoggleScreenSharing
1592 1607
                 = this._turnScreenSharingOff.bind(this, didHaveVideo);
1593
-            desktopStream.on(
1594
-                JitsiTrackEvents.LOCAL_TRACK_STOPPED,
1595
-                () => {
1596
-                    // If the stream was stopped during screen sharing
1597
-                    // session then we should switch back to video.
1598
-                    this.isSharingScreen
1599
-                        && this._untoggleScreenSharing
1600
-                        && this._untoggleScreenSharing();
1601
-                }
1602
-            );
1608
+
1609
+            const desktopVideoStream = desktopStreams.find(stream => stream.getType() === MEDIA_TYPE.VIDEO);
1610
+
1611
+            if (desktopVideoStream) {
1612
+                desktopVideoStream.on(
1613
+                    JitsiTrackEvents.LOCAL_TRACK_STOPPED,
1614
+                    () => {
1615
+                        // If the stream was stopped during screen sharing
1616
+                        // session then we should switch back to video.
1617
+                        this.isSharingScreen
1618
+                            && this._untoggleScreenSharing
1619
+                            && this._untoggleScreenSharing();
1620
+                    }
1621
+                );
1622
+            }
1603 1623
 
1604 1624
             // close external installation dialog on success.
1605 1625
             externalInstallation && $.prompt.close();
1606 1626
 
1607
-            return desktopStream;
1627
+            return desktopStreams;
1608 1628
         }, error => {
1609 1629
             DSExternalInstallationInProgress = false;
1610 1630
 
@@ -1755,7 +1775,29 @@ export default {
1755 1775
         this.videoSwitchInProgress = true;
1756 1776
 
1757 1777
         return this._createDesktopTrack(options)
1758
-            .then(stream => this.useVideoStream(stream))
1778
+            .then(async streams => {
1779
+                const desktopVideoStream = streams.find(stream => stream.getType() === MEDIA_TYPE.VIDEO);
1780
+
1781
+                if (desktopVideoStream) {
1782
+                    this.useVideoStream(desktopVideoStream);
1783
+                }
1784
+
1785
+                this._desktopAudioStream = streams.find(stream => stream.getType() === MEDIA_TYPE.AUDIO);
1786
+
1787
+                if (this._desktopAudioStream) {
1788
+                    // If there is a localAudio stream, mix in the desktop audio stream captured by the screen sharing
1789
+                    // api.
1790
+                    if (this.localAudio) {
1791
+                        this._mixerEffect = new AudioMixerEffect(this._desktopAudioStream);
1792
+
1793
+                        await this.localAudio.setEffect(this._mixerEffect);
1794
+                    } else {
1795
+                        // If no local stream is present ( i.e. no input audio devices) we use the screen share audio
1796
+                        // stream as we would use a regular stream.
1797
+                        await this.useAudioStream(this._desktopAudioStream);
1798
+                    }
1799
+                }
1800
+            })
1759 1801
             .then(() => {
1760 1802
                 this.videoSwitchInProgress = false;
1761 1803
                 if (config.enableScreenshotCapture) {
@@ -2288,7 +2330,17 @@ export default {
2288 2330
 
2289 2331
                     return stream;
2290 2332
                 })
2291
-                .then(stream => this.useAudioStream(stream))
2333
+                .then(async stream => {
2334
+                    // In case screen sharing audio is also shared we mix it with new input stream. The old _mixerEffect
2335
+                    // will be cleaned up when the existing track is replaced.
2336
+                    if (this._mixerEffect) {
2337
+                        this._mixerEffect = new AudioMixerEffect(this._desktopAudioStream);
2338
+
2339
+                        await stream.setEffect(this._mixerEffect);
2340
+                    }
2341
+
2342
+                    return this.useAudioStream(stream);
2343
+                })
2292 2344
                 .then(() => {
2293 2345
                     logger.log(`switched local audio device: ${this.localAudio?.getDeviceId()}`);
2294 2346
 

+ 4
- 4
package-lock.json View File

@@ -10653,8 +10653,8 @@
10653 10653
       "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls="
10654 10654
     },
10655 10655
     "js-utils": {
10656
-      "version": "github:jitsi/js-utils#7a2be83d17dc4a3d0fac4a742ab999478f326f2e",
10657
-      "from": "github:jitsi/js-utils#7a2be83d17dc4a3d0fac4a742ab999478f326f2e",
10656
+      "version": "github:jitsi/js-utils#91c5e53ca5fa42907c88d56bc78254e6e56e058d",
10657
+      "from": "github:jitsi/js-utils#91c5e53ca5fa42907c88d56bc78254e6e56e058d",
10658 10658
       "requires": {
10659 10659
         "bowser": "2.7.0",
10660 10660
         "js-md5": "0.7.3",
@@ -10883,8 +10883,8 @@
10883 10883
       }
10884 10884
     },
10885 10885
     "lib-jitsi-meet": {
10886
-      "version": "github:jitsi/lib-jitsi-meet#5466c9d08a2c262ebb5889e3bb0cbbe6f08dc0c3",
10887
-      "from": "github:jitsi/lib-jitsi-meet#5466c9d08a2c262ebb5889e3bb0cbbe6f08dc0c3",
10886
+      "version": "github:jitsi/lib-jitsi-meet#a7950f8ebb489225c2e8bf41fe65f330b3de0874",
10887
+      "from": "github:jitsi/lib-jitsi-meet#a7950f8ebb489225c2e8bf41fe65f330b3de0874",
10888 10888
       "requires": {
10889 10889
         "@jitsi/sdp-interop": "0.1.14",
10890 10890
         "@jitsi/sdp-simulcast": "0.2.2",

+ 1
- 1
package.json View File

@@ -56,7 +56,7 @@
56 56
     "js-utils": "github:jitsi/js-utils#91c5e53ca5fa42907c88d56bc78254e6e56e058d",
57 57
     "jsrsasign": "8.0.12",
58 58
     "jwt-decode": "2.2.0",
59
-    "lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#5466c9d08a2c262ebb5889e3bb0cbbe6f08dc0c3",
59
+    "lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#a7950f8ebb489225c2e8bf41fe65f330b3de0874",
60 60
     "libflacjs": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d",
61 61
     "lodash": "4.17.13",
62 62
     "moment": "2.19.4",

+ 70
- 0
react/features/stream-effects/audio-mixer/AudioMixerEffect.js View File

@@ -0,0 +1,70 @@
1
+// @flow
2
+
3
+import JitsiMeetJS from '../../base/lib-jitsi-meet';
4
+import { MEDIA_TYPE } from '../../base/media';
5
+
6
+/**
7
+ * Class Implementing the effect interface expected by a JitsiLocalTrack.
8
+ * The AudioMixerEffect, as the name implies, mixes two JitsiLocalTracks containing a audio track. First track is
9
+ * provided at the moment of creation, second is provided through the effect interface.
10
+ */
11
+export class AudioMixerEffect {
12
+    /**
13
+     * JitsiLocalTrack that is going to be mixed into the track that uses this effect.
14
+     */
15
+    _mixAudio: Object;
16
+
17
+    /**
18
+     * lib-jitsi-meet AudioMixer.
19
+     */
20
+    _audioMixer: Object;
21
+
22
+    /**
23
+     * Creates AudioMixerEffect.
24
+     *
25
+     * @param {JitsiLocalTrack} mixAudio - JitsiLocalTrack which will be mixed with the original track.
26
+     */
27
+    constructor(mixAudio: Object) {
28
+        if (mixAudio.getType() !== MEDIA_TYPE.AUDIO) {
29
+            throw new Error('AudioMixerEffect only supports audio JitsiLocalTracks; effect will not work!');
30
+        }
31
+
32
+        this._mixAudio = mixAudio;
33
+    }
34
+
35
+    /**
36
+     * Checks if the JitsiLocalTrack supports this effect.
37
+     *
38
+     * @param {JitsiLocalTrack} sourceLocalTrack - Track to which the effect will be applied.
39
+     * @returns {boolean} - Returns true if this effect can run on the specified track, false otherwise.
40
+     */
41
+    isEnabled(sourceLocalTrack: Object) {
42
+        // Both JitsiLocalTracks need to be audio i.e. contain an audio MediaStreamTrack
43
+        return sourceLocalTrack.isAudioTrack() && this._mixAudio.isAudioTrack();
44
+    }
45
+
46
+    /**
47
+     * Effect interface called by source JitsiLocalTrack, At this point a WebAudio ChannelMergerNode is created
48
+     * and and the two associated MediaStreams are connected to it; the resulting mixed MediaStream is returned.
49
+     *
50
+     * @param {MediaStream} audioStream - Audio stream which will be mixed with _mixAudio.
51
+     * @returns {MediaStream} - MediaStream containing both audio tracks mixed together.
52
+     */
53
+    startEffect(audioStream: MediaStream) {
54
+        this._audioMixer = JitsiMeetJS.createAudioMixer();
55
+        this._audioMixer.addMediaStream(this._mixAudio.getOriginalStream());
56
+        this._audioMixer.addMediaStream(audioStream);
57
+
58
+        return this._audioMixer.start();
59
+    }
60
+
61
+    /**
62
+     * Reset the AudioMixer stopping it in the process.
63
+     *
64
+     * @returns {void}
65
+     */
66
+    stopEffect() {
67
+        this._audioMixer.reset();
68
+    }
69
+
70
+}

Loading…
Cancel
Save