Просмотр исходного кода

Use redux for local tracks instead of conference.js (#9920)

* do not use this.local video

* move tracks initialized flag around

* do not use this.localAudio

* untangle use audio/video stream methods

It should be safe to call setVideoMuteStatus and
setAudioMuteStatus regardless of the prejoin page
visibility state.

* add NO-OP to use track methods and fix crash
in _setLocalAudioVideoStreams on not a promise

* use allSettled
master
Paweł Domas 4 лет назад
Родитель
Сommit
1db52354fb
Аккаунт пользователя с таким Email не найден
1 измененных файлов: 78 добавлений и 91 удалений
  1. 78
    91
      conference.js

+ 78
- 91
conference.js Просмотреть файл

@@ -454,27 +454,12 @@ export default {
454 454
 
455 455
     isSharingScreen: false,
456 456
 
457
-    /**
458
-     * The local audio track (if any).
459
-     * FIXME tracks from redux store should be the single source of truth
460
-     * @type {JitsiLocalTrack|null}
461
-     */
462
-    localAudio: null,
463
-
464 457
     /**
465 458
      * The local presenter video track (if any).
466 459
      * @type {JitsiLocalTrack|null}
467 460
      */
468 461
     localPresenterVideo: null,
469 462
 
470
-    /**
471
-     * The local video track (if any).
472
-     * FIXME tracks from redux store should be the single source of truth, but
473
-     * more refactoring is required around screen sharing ('localVideo' usages).
474
-     * @type {JitsiLocalTrack|null}
475
-     */
476
-    localVideo: null,
477
-
478 463
     /**
479 464
      * Returns an object containing a promise which resolves with the created tracks &
480 465
      * the errors resulting from that process.
@@ -728,9 +713,7 @@ export default {
728 713
                 track.mute();
729 714
             }
730 715
         });
731
-        logger.log(`Initialized with ${tracks.length} local tracks`);
732 716
 
733
-        this._localTracksInitialized = true;
734 717
         con.addEventListener(JitsiConnectionEvents.CONNECTION_FAILED, _connectionFailedHandler);
735 718
         APP.connection = connection = con;
736 719
 
@@ -907,7 +890,9 @@ export default {
907 890
             return;
908 891
         }
909 892
 
910
-        if (!this.localAudio && !mute) {
893
+        const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
894
+
895
+        if (!localAudio && !mute) {
911 896
             const maybeShowErrorDialog = error => {
912 897
                 showUI && APP.store.dispatch(notifyMicError(error));
913 898
             };
@@ -961,17 +946,18 @@ export default {
961 946
         const maybeShowErrorDialog = error => {
962 947
             showUI && APP.store.dispatch(notifyCameraError(error));
963 948
         };
949
+        const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
964 950
 
965 951
         if (mute) {
966 952
             try {
967
-                await this.localVideo.setEffect(undefined);
953
+                await localVideo.setEffect(undefined);
968 954
             } catch (err) {
969 955
                 logger.error('Failed to remove the presenter effect', err);
970 956
                 maybeShowErrorDialog(err);
971 957
             }
972 958
         } else {
973 959
             try {
974
-                await this.localVideo.setEffect(await this._createPresenterStreamEffect());
960
+                await localVideo.setEffect(await this._createPresenterStreamEffect());
975 961
             } catch (err) {
976 962
                 logger.error('Failed to apply the presenter effect', err);
977 963
                 maybeShowErrorDialog(err);
@@ -1013,7 +999,9 @@ export default {
1013 999
             return;
1014 1000
         }
1015 1001
 
1016
-        if (!this.localVideo && !mute) {
1002
+        const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
1003
+
1004
+        if (!localVideo && !mute) {
1017 1005
             const maybeShowErrorDialog = error => {
1018 1006
                 showUI && APP.store.dispatch(notifyCameraError(error));
1019 1007
             };
@@ -1347,7 +1335,7 @@ export default {
1347 1335
      * @private
1348 1336
      */
1349 1337
     _setLocalAudioVideoStreams(tracks = []) {
1350
-        return tracks.map(track => {
1338
+        const promises = tracks.map(track => {
1351 1339
             if (track.isAudioTrack()) {
1352 1340
                 return this.useAudioStream(track);
1353 1341
             } else if (track.isVideoTrack()) {
@@ -1356,12 +1344,16 @@ export default {
1356 1344
                 return this.useVideoStream(track);
1357 1345
             }
1358 1346
 
1359
-            logger.error(
1360
-                    'Ignored not an audio nor a video track: ', track);
1347
+            logger.error('Ignored not an audio nor a video track: ', track);
1361 1348
 
1362 1349
             return Promise.resolve();
1363 1350
 
1364 1351
         });
1352
+
1353
+        return Promise.allSettled(promises).then(() => {
1354
+            this._localTracksInitialized = true;
1355
+            logger.log(`Initialized with ${tracks.length} local tracks`);
1356
+        });
1365 1357
     },
1366 1358
 
1367 1359
     _getConferenceOptions() {
@@ -1383,29 +1375,20 @@ export default {
1383 1375
 
1384 1376
         return new Promise((resolve, reject) => {
1385 1377
             _replaceLocalVideoTrackQueue.enqueue(onFinish => {
1386
-                const state = APP.store.getState();
1378
+                const oldTrack = getLocalJitsiVideoTrack(APP.store.getState());
1387 1379
 
1388
-                // When the prejoin page is displayed localVideo is not set
1389
-                // so just replace the video track from the store with the new one.
1390
-                if (isPrejoinPageVisible(state)) {
1391
-                    const oldTrack = getLocalJitsiVideoTrack(state);
1380
+                logger.debug(`useVideoStream: Replacing ${oldTrack} with ${newTrack}`);
1392 1381
 
1393
-                    logger.debug(`useVideoStream on the prejoin screen: Replacing ${oldTrack} with ${newTrack}`);
1382
+                if (oldTrack === newTrack) {
1383
+                    resolve();
1384
+                    onFinish();
1394 1385
 
1395
-                    return APP.store.dispatch(replaceLocalTrack(oldTrack, newTrack))
1396
-                        .then(resolve)
1397
-                        .catch(error => {
1398
-                            logger.error(`useVideoStream failed on the prejoin screen: ${error}`);
1399
-                            reject(error);
1400
-                        })
1401
-                        .then(onFinish);
1386
+                    return;
1402 1387
                 }
1403 1388
 
1404
-                logger.debug(`useVideoStream: Replacing ${this.localVideo} with ${newTrack}`);
1405 1389
                 APP.store.dispatch(
1406
-                    replaceLocalTrack(this.localVideo, newTrack, room))
1390
+                    replaceLocalTrack(oldTrack, newTrack, room))
1407 1391
                     .then(() => {
1408
-                        this.localVideo = newTrack;
1409 1392
                         this._setSharingScreen(newTrack);
1410 1393
                         this.setVideoMuteStatus();
1411 1394
                     })
@@ -1455,23 +1438,18 @@ export default {
1455 1438
     useAudioStream(newTrack) {
1456 1439
         return new Promise((resolve, reject) => {
1457 1440
             _replaceLocalAudioTrackQueue.enqueue(onFinish => {
1458
-                const state = APP.store.getState();
1441
+                const oldTrack = getLocalJitsiAudioTrack(APP.store.getState());
1459 1442
 
1460
-                // When the prejoin page is displayed localAudio is not set
1461
-                // so just replace the audio track from the store with the new one.
1462
-                if (isPrejoinPageVisible(state)) {
1463
-                    const oldTrack = getLocalJitsiAudioTrack(state);
1443
+                if (oldTrack === newTrack) {
1444
+                    resolve();
1445
+                    onFinish();
1464 1446
 
1465
-                    return APP.store.dispatch(replaceLocalTrack(oldTrack, newTrack))
1466
-                        .then(resolve)
1467
-                        .catch(reject)
1468
-                        .then(onFinish);
1447
+                    return;
1469 1448
                 }
1470 1449
 
1471 1450
                 APP.store.dispatch(
1472
-                replaceLocalTrack(this.localAudio, newTrack, room))
1451
+                replaceLocalTrack(oldTrack, newTrack, room))
1473 1452
                     .then(() => {
1474
-                        this.localAudio = newTrack;
1475 1453
                         this.setAudioMuteStatus(this.isLocalAudioMuted());
1476 1454
                     })
1477 1455
                     .then(resolve)
@@ -1546,7 +1524,9 @@ export default {
1546 1524
 
1547 1525
         // If system audio was also shared stop the AudioMixerEffect and dispose of the desktop audio track.
1548 1526
         if (this._mixerEffect) {
1549
-            await this.localAudio.setEffect(undefined);
1527
+            const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
1528
+
1529
+            await localAudio.setEffect(undefined);
1550 1530
             await this._desktopAudioStream.dispose();
1551 1531
             this._mixerEffect = undefined;
1552 1532
             this._desktopAudioStream = undefined;
@@ -1772,7 +1752,8 @@ export default {
1772 1752
 
1773 1753
         // Create a new presenter track and apply the presenter effect.
1774 1754
         if (!this.localPresenterVideo && !mute) {
1775
-            const { height, width } = this.localVideo.track.getSettings() ?? this.localVideo.track.getConstraints();
1755
+            const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
1756
+            const { height, width } = localVideo.track.getSettings() ?? localVideo.track.getConstraints();
1776 1757
             const isPortrait = height >= width;
1777 1758
             const DESKTOP_STREAM_CAP = 720;
1778 1759
 
@@ -1801,7 +1782,7 @@ export default {
1801 1782
 
1802 1783
                 // Apply the constraints on the desktop track.
1803 1784
                 try {
1804
-                    await this.localVideo.track.applyConstraints(desktopResizeConstraints);
1785
+                    await localVideo.track.applyConstraints(desktopResizeConstraints);
1805 1786
                 } catch (err) {
1806 1787
                     logger.error('Failed to apply constraints on the desktop stream for presenter mode', err);
1807 1788
 
@@ -1809,7 +1790,7 @@ export default {
1809 1790
                 }
1810 1791
             }
1811 1792
             const trackHeight = resizeDesktopStream
1812
-                ? this.localVideo.track.getSettings().height ?? DESKTOP_STREAM_CAP
1793
+                ? localVideo.track.getSettings().height ?? DESKTOP_STREAM_CAP
1813 1794
                 : height;
1814 1795
             let effect;
1815 1796
 
@@ -1824,7 +1805,7 @@ export default {
1824 1805
 
1825 1806
             // Replace the desktop track on the peerconnection.
1826 1807
             try {
1827
-                await this.localVideo.setEffect(effect);
1808
+                await localVideo.setEffect(effect);
1828 1809
                 APP.store.dispatch(setVideoMuted(mute, MEDIA_TYPE.PRESENTER));
1829 1810
                 this.setVideoMuteStatus();
1830 1811
             } catch (err) {
@@ -1880,12 +1861,14 @@ export default {
1880 1861
                 }
1881 1862
 
1882 1863
                 if (this._desktopAudioStream) {
1864
+                    const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
1865
+
1883 1866
                     // If there is a localAudio stream, mix in the desktop audio stream captured by the screen sharing
1884 1867
                     // api.
1885
-                    if (this.localAudio) {
1868
+                    if (localAudio) {
1886 1869
                         this._mixerEffect = new AudioMixerEffect(this._desktopAudioStream);
1887 1870
 
1888
-                        await this.localAudio.setEffect(this._mixerEffect);
1871
+                        await localAudio.setEffect(this._mixerEffect);
1889 1872
                     } else {
1890 1873
                         // If no local stream is present ( i.e. no input audio devices) we use the screen share audio
1891 1874
                         // stream as we would use a regular stream.
@@ -2066,10 +2049,10 @@ export default {
2066 2049
         });
2067 2050
 
2068 2051
         room.on(JitsiConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => {
2052
+            const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
2069 2053
             let newLvl = lvl;
2070 2054
 
2071
-            if (this.isLocalId(id)
2072
-                && this.localAudio && this.localAudio.isMuted()) {
2055
+            if (this.isLocalId(id) && localAudio?.isMuted()) {
2073 2056
                 newLvl = 0;
2074 2057
             }
2075 2058
 
@@ -2311,6 +2294,7 @@ export default {
2311 2294
         APP.UI.addListener(
2312 2295
             UIEvents.VIDEO_DEVICE_CHANGED,
2313 2296
             cameraDeviceId => {
2297
+                const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
2314 2298
                 const videoWasMuted = this.isLocalVideoMuted();
2315 2299
 
2316 2300
                 sendAnalytics(createDeviceChangedEvent('video', 'input'));
@@ -2318,7 +2302,7 @@ export default {
2318 2302
                 // If both screenshare and video are in progress, restart the
2319 2303
                 // presenter mode with the new camera device.
2320 2304
                 if (this.isSharingScreen && !videoWasMuted) {
2321
-                    const { height } = this.localVideo.track.getSettings();
2305
+                    const { height } = localVideo.track.getSettings();
2322 2306
 
2323 2307
                     // dispose the existing presenter track and create a new
2324 2308
                     // camera track.
@@ -2327,7 +2311,7 @@ export default {
2327 2311
                     this.localPresenterVideo = null;
2328 2312
 
2329 2313
                     return this._createPresenterStreamEffect(height, cameraDeviceId)
2330
-                        .then(effect => this.localVideo.setEffect(effect))
2314
+                        .then(effect => localVideo.setEffect(effect))
2331 2315
                         .then(() => {
2332 2316
                             this.setVideoMuteStatus();
2333 2317
                             logger.log('Switched local video device while screen sharing and the video is unmuted');
@@ -2340,7 +2324,7 @@ export default {
2340 2324
                 // that can be applied on un-mute.
2341 2325
                 } else if (this.isSharingScreen && videoWasMuted) {
2342 2326
                     logger.log('Switched local video device: while screen sharing and the video is muted');
2343
-                    const { height } = this.localVideo.track.getSettings();
2327
+                    const { height } = localVideo.track.getSettings();
2344 2328
 
2345 2329
                     this._updateVideoDeviceId();
2346 2330
 
@@ -2426,13 +2410,15 @@ export default {
2426 2410
                     return this.useAudioStream(stream);
2427 2411
                 })
2428 2412
                 .then(() => {
2429
-                    if (this.localAudio && hasDefaultMicChanged) {
2413
+                    const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
2414
+
2415
+                    if (localAudio && hasDefaultMicChanged) {
2430 2416
                         // workaround for the default device to be shown as selected in the
2431 2417
                         // settings even when the real device id was passed to gUM because of the
2432 2418
                         // above mentioned chrome bug.
2433
-                        this.localAudio._realDeviceId = this.localAudio.deviceId = 'default';
2419
+                        localAudio._realDeviceId = localAudio.deviceId = 'default';
2434 2420
                     }
2435
-                    logger.log(`switched local audio device: ${this.localAudio?.getDeviceId()}`);
2421
+                    logger.log(`switched local audio device: ${localAudio?.getDeviceId()}`);
2436 2422
 
2437 2423
                     this._updateAudioDeviceId();
2438 2424
                 })
@@ -2498,9 +2484,6 @@ export default {
2498 2484
                 JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED,
2499 2485
                 this.deviceChangeListener);
2500 2486
         }
2501
-
2502
-        this.localVideo = null;
2503
-        this.localAudio = null;
2504 2487
     },
2505 2488
 
2506 2489
     /**
@@ -2563,10 +2546,11 @@ export default {
2563 2546
      * @private
2564 2547
      */
2565 2548
     _updateVideoDeviceId() {
2566
-        if (this.localVideo
2567
-            && this.localVideo.videoType === 'camera') {
2549
+        const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
2550
+
2551
+        if (localVideo && localVideo.videoType === 'camera') {
2568 2552
             APP.store.dispatch(updateSettings({
2569
-                cameraDeviceId: this.localVideo.getDeviceId()
2553
+                cameraDeviceId: localVideo.getDeviceId()
2570 2554
             }));
2571 2555
         }
2572 2556
 
@@ -2584,9 +2568,11 @@ export default {
2584 2568
      * @private
2585 2569
      */
2586 2570
     _updateAudioDeviceId() {
2587
-        if (this.localAudio) {
2571
+        const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
2572
+
2573
+        if (localAudio) {
2588 2574
             APP.store.dispatch(updateSettings({
2589
-                micDeviceId: this.localAudio.getDeviceId()
2575
+                micDeviceId: localAudio.getDeviceId()
2590 2576
             }));
2591 2577
         }
2592 2578
     },
@@ -2600,6 +2586,8 @@ export default {
2600 2586
      */
2601 2587
     _onDeviceListChanged(devices) {
2602 2588
         const oldDevices = APP.store.getState()['features/base/devices'].availableDevices;
2589
+        const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
2590
+        const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
2603 2591
 
2604 2592
         APP.store.dispatch(updateDeviceList(devices));
2605 2593
 
@@ -2607,8 +2595,8 @@ export default {
2607 2595
             = mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged(
2608 2596
                 devices,
2609 2597
                 this.isSharingScreen,
2610
-                this.localVideo,
2611
-                this.localAudio);
2598
+                localVideo,
2599
+                localAudio);
2612 2600
         const promises = [];
2613 2601
         const audioWasMuted = this.isLocalAudioMuted();
2614 2602
         const videoWasMuted = this.isLocalVideoMuted();
@@ -2631,12 +2619,12 @@ export default {
2631 2619
         // simpler):
2632 2620
         // If the default device is changed we need to first stop the local streams and then call GUM. Otherwise GUM
2633 2621
         // will return a stream using the old default device.
2634
-        if (requestedInput.audio && this.localAudio) {
2635
-            this.localAudio.stopStream();
2622
+        if (requestedInput.audio && localAudio) {
2623
+            localAudio.stopStream();
2636 2624
         }
2637 2625
 
2638
-        if (requestedInput.video && this.localVideo) {
2639
-            this.localVideo.stopStream();
2626
+        if (requestedInput.video && localVideo) {
2627
+            localVideo.stopStream();
2640 2628
         }
2641 2629
 
2642 2630
         // Let's handle unknown/non-preferred devices
@@ -2716,15 +2704,16 @@ export default {
2716 2704
                                         = mediaType === 'audio'
2717 2705
                                             ? this.useAudioStream.bind(this)
2718 2706
                                             : this.useVideoStream.bind(this);
2707
+                                    const track = tracks.find(t => t.getType() === mediaType) || null;
2719 2708
 
2720 2709
                                     // Use the new stream or null if we failed to obtain it.
2721
-                                    return useStream(tracks.find(track => track.getType() === mediaType) || null)
2710
+                                    return useStream(track)
2722 2711
                                         .then(() => {
2723
-                                            if (this.localAudio && hasDefaultMicChanged) {
2712
+                                            if (track?.isAudioTrack() && hasDefaultMicChanged) {
2724 2713
                                                 // workaround for the default device to be shown as selected in the
2725 2714
                                                 // settings even when the real device id was passed to gUM because of
2726 2715
                                                 // the above mentioned chrome bug.
2727
-                                                this.localAudio._realDeviceId = this.localAudio.deviceId = 'default';
2716
+                                                track._realDeviceId = track.deviceId = 'default';
2728 2717
                                             }
2729 2718
                                             mediaType === 'audio'
2730 2719
                                                 ? this._updateAudioDeviceId()
@@ -2764,14 +2753,13 @@ export default {
2764 2753
      * Determines whether or not the audio button should be enabled.
2765 2754
      */
2766 2755
     updateAudioIconEnabled() {
2767
-        const audioMediaDevices
2768
-            = APP.store.getState()['features/base/devices'].availableDevices.audioInput;
2769
-        const audioDeviceCount
2770
-            = audioMediaDevices ? audioMediaDevices.length : 0;
2756
+        const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
2757
+        const audioMediaDevices = APP.store.getState()['features/base/devices'].availableDevices.audioInput;
2758
+        const audioDeviceCount = audioMediaDevices ? audioMediaDevices.length : 0;
2771 2759
 
2772 2760
         // The audio functionality is considered available if there are any
2773 2761
         // audio devices detected or if the local audio stream already exists.
2774
-        const available = audioDeviceCount > 0 || Boolean(this.localAudio);
2762
+        const available = audioDeviceCount > 0 || Boolean(localAudio);
2775 2763
 
2776 2764
         APP.store.dispatch(setAudioAvailable(available));
2777 2765
         APP.API.notifyAudioAvailabilityChanged(available);
@@ -2785,13 +2773,14 @@ export default {
2785 2773
             = APP.store.getState()['features/base/devices'].availableDevices.videoInput;
2786 2774
         const videoDeviceCount
2787 2775
             = videoMediaDevices ? videoMediaDevices.length : 0;
2776
+        const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
2788 2777
 
2789 2778
         // The video functionality is considered available if there are any
2790 2779
         // video devices detected or if there is local video stream already
2791 2780
         // active which could be either screensharing stream or a video track
2792 2781
         // created before the permissions were rejected (through browser
2793 2782
         // config).
2794
-        const available = videoDeviceCount > 0 || Boolean(this.localVideo);
2783
+        const available = videoDeviceCount > 0 || Boolean(localVideo);
2795 2784
 
2796 2785
         APP.store.dispatch(setVideoAvailable(available));
2797 2786
         APP.API.notifyVideoAvailabilityChanged(available);
@@ -2809,8 +2798,6 @@ export default {
2809 2798
 
2810 2799
         APP.store.dispatch(destroyLocalTracks());
2811 2800
         this._localTracksInitialized = false;
2812
-        this.localVideo = null;
2813
-        this.localAudio = null;
2814 2801
 
2815 2802
         // Remove unnecessary event listeners from firing callbacks.
2816 2803
         if (this.deviceChangeListener) {

Загрузка…
Отмена
Сохранить