Преглед на файлове

fix(RTC) drop the "old gUM" flow

dev1
Saúl Ibarra Corretgé преди 4 години
родител
ревизия
ad5692d6aa

+ 2
- 76
JitsiMeetJS.js Целия файл

@@ -34,7 +34,6 @@ import ScriptUtil from './modules/util/ScriptUtil';
34 34
 import * as VideoSIPGWConstants from './modules/videosipgw/VideoSIPGWConstants';
35 35
 import AudioMixer from './modules/webaudio/AudioMixer';
36 36
 import * as MediaType from './service/RTC/MediaType';
37
-import Resolutions from './service/RTC/Resolutions';
38 37
 import * as ConnectionQualityEvents
39 38
     from './service/connectivity/ConnectionQualityEvents';
40 39
 import * as E2ePingEvents from './service/e2eping/E2ePingEvents';
@@ -48,38 +47,6 @@ const logger = Logger.getLogger(__filename);
48 47
  */
49 48
 const USER_MEDIA_SLOW_PROMISE_TIMEOUT = 1000;
50 49
 
51
-/**
52
- * Gets the next lowest desirable resolution to try for a camera. If the given
53
- * resolution is already the lowest acceptable resolution, returns {@code null}.
54
- *
55
- * @param resolution the current resolution
56
- * @return the next lowest resolution from the given one, or {@code null} if it
57
- * is already the lowest acceptable resolution.
58
- */
59
-function getLowerResolution(resolution) {
60
-    if (!Resolutions[resolution]) {
61
-        return null;
62
-    }
63
-    const order = Resolutions[resolution].order;
64
-    let res = null;
65
-    let resName = null;
66
-
67
-    Object.keys(Resolutions).forEach(r => {
68
-        const value = Resolutions[r];
69
-
70
-        if (!res || (res.order < value.order && value.order < order)) {
71
-            resName = r;
72
-            res = value;
73
-        }
74
-    });
75
-
76
-    if (resName === resolution) {
77
-        resName = null;
78
-    }
79
-
80
-    return resName;
81
-}
82
-
83 50
 /**
84 51
  * Extracts from an 'options' objects with a specific format (TODO what IS the
85 52
  * format?) the attributes which are to be logged in analytics events.
@@ -327,13 +294,11 @@ export default _mergeNamespaceAndModule({
327 294
      * will finish the execution with rejected Promise.
328 295
      *
329 296
      * @deprecated old firePermissionPromptIsShownEvent
330
-     * @param originalOptions - internal use only, to be able to store the
331
-     * originally requested options.
332 297
      * @returns {Promise.<{Array.<JitsiTrack>}, JitsiConferenceError>} A promise
333 298
      * that returns an array of created JitsiTracks if resolved, or a
334 299
      * JitsiConferenceError if rejected.
335 300
      */
336
-    createLocalTracks(options = {}, oldfirePermissionPromptIsShownEvent, originalOptions) {
301
+    createLocalTracks(options = {}, oldfirePermissionPromptIsShownEvent) {
337 302
         let promiseFulfilled = false;
338 303
 
339 304
         const { firePermissionPromptIsShownEvent, fireSlowPromiseEvent, ...restOptions } = options;
@@ -413,46 +378,7 @@ export default _mergeNamespaceAndModule({
413 378
             .catch(error => {
414 379
                 promiseFulfilled = true;
415 380
 
416
-                if (error.name === JitsiTrackErrors.UNSUPPORTED_RESOLUTION
417
-                    && !browser.usesNewGumFlow()) {
418
-                    const oldResolution = restOptions.resolution || '720';
419
-                    const newResolution = getLowerResolution(oldResolution);
420
-
421
-                    if (newResolution !== null) {
422
-                        restOptions.resolution = newResolution;
423
-
424
-                        logger.debug(
425
-                            'Retry createLocalTracks with resolution',
426
-                            newResolution);
427
-
428
-                        Statistics.sendAnalytics(createGetUserMediaEvent(
429
-                            'warning',
430
-                            {
431
-                                'old_resolution': oldResolution,
432
-                                'new_resolution': newResolution,
433
-                                reason: 'unsupported resolution'
434
-                            }));
435
-
436
-                        return this.createLocalTracks(
437
-                            restOptions,
438
-                            originalOptions || Object.assign({}, restOptions));
439
-                    }
440
-
441
-                    // We tried everything. If there is a mandatory device id,
442
-                    // remove it and let gum find a device to use.
443
-                    if (originalOptions
444
-                        && error.gum.constraints
445
-                        && error.gum.constraints.video
446
-                        && error.gum.constraints.video.mandatory
447
-                        && error.gum.constraints.video.mandatory.sourceId) {
448
-                        originalOptions.cameraDeviceId = undefined;
449
-
450
-                        return this.createLocalTracks(originalOptions);
451
-                    }
452
-                }
453
-
454
-                if (error.name
455
-                        === JitsiTrackErrors.SCREENSHARING_USER_CANCELED) {
381
+                if (error.name === JitsiTrackErrors.SCREENSHARING_USER_CANCELED) {
456 382
                     // User cancelled action is not really an error, so only
457 383
                     // log it as an event to avoid having conference classified
458 384
                     // as partially failed

+ 23
- 52
modules/RTC/JitsiLocalTrack.js Целия файл

@@ -90,29 +90,21 @@ export default class JitsiLocalTrack extends JitsiTrack {
90 90
         this.sourceId = sourceId;
91 91
         this.sourceType = sourceType;
92 92
 
93
-        if (browser.usesNewGumFlow()) {
94
-            // Get the resolution from the track itself because it cannot be
95
-            // certain which resolution webrtc has fallen back to using.
96
-            this.resolution = track.getSettings().height;
97
-            this.maxEnabledResolution = resolution;
98
-
99
-            // Cache the constraints of the track in case of any this track
100
-            // model needs to call getUserMedia again, such as when unmuting.
101
-            this._constraints = track.getConstraints();
102
-
103
-            // Safari returns an empty constraints object, construct the constraints using getSettings.
104
-            if (!Object.keys(this._constraints).length && videoType === VideoType.CAMERA) {
105
-                this._constraints = {
106
-                    height: track.getSettings().height,
107
-                    width: track.getSettings().width
108
-                };
109
-            }
110
-        } else {
111
-            // FIXME Currently, Firefox is ignoring our constraints about
112
-            // resolutions so we do not store it, to avoid wrong reporting of
113
-            // local track resolution.
114
-            this.resolution = browser.isFirefox() ? null : resolution;
115
-            this.maxEnabledResolution = this.resolution;
93
+        // Get the resolution from the track itself because it cannot be
94
+        // certain which resolution webrtc has fallen back to using.
95
+        this.resolution = track.getSettings().height;
96
+        this.maxEnabledResolution = resolution;
97
+
98
+        // Cache the constraints of the track in case of any this track
99
+        // model needs to call getUserMedia again, such as when unmuting.
100
+        this._constraints = track.getConstraints();
101
+
102
+        // Safari returns an empty constraints object, construct the constraints using getSettings.
103
+        if (!Object.keys(this._constraints).length && videoType === VideoType.CAMERA) {
104
+            this._constraints = {
105
+                height: track.getSettings().height,
106
+                width: track.getSettings().width
107
+            };
116 108
         }
117 109
 
118 110
         this.deviceId = deviceId;
@@ -552,30 +544,16 @@ export default class JitsiLocalTrack extends JitsiTrack {
552 544
                 facingMode: this.getCameraFacingMode()
553 545
             };
554 546
 
555
-            if (browser.usesNewGumFlow()) {
556
-                promise
557
-                    = RTCUtils.newObtainAudioAndVideoPermissions(Object.assign(
558
-                        {},
559
-                        streamOptions,
560
-                        { constraints: { video: this._constraints } }));
561
-            } else {
562
-                if (this.resolution) {
563
-                    streamOptions.resolution = this.resolution;
564
-                }
565
-
566
-                promise
567
-                    = RTCUtils.obtainAudioAndVideoPermissions(streamOptions);
568
-            }
547
+            promise
548
+                = RTCUtils.obtainAudioAndVideoPermissions(Object.assign(
549
+                    {},
550
+                    streamOptions,
551
+                    { constraints: { video: this._constraints } }));
569 552
 
570 553
             promise = promise.then(streamsInfo => {
571 554
                 // The track kind for presenter track is video as well.
572 555
                 const mediaType = this.getType() === MediaType.PRESENTER ? MediaType.VIDEO : this.getType();
573
-                const streamInfo
574
-                    = browser.usesNewGumFlow()
575
-                        ? streamsInfo.find(
576
-                            info => info.track.kind === mediaType)
577
-                        : streamsInfo.find(
578
-                            info => info.mediaType === mediaType);
556
+                const streamInfo = streamsInfo.find(info => info.track.kind === mediaType);
579 557
 
580 558
                 if (streamInfo) {
581 559
                     this._setStream(streamInfo.stream);
@@ -822,15 +800,8 @@ export default class JitsiLocalTrack extends JitsiTrack {
822 800
             // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
823 801
             // for Firefox. Even if a browser implements getSettings() already,
824 802
             // it might still not return anything for 'facingMode'.
825
-            let trackSettings;
826
-
827
-            try {
828
-                trackSettings = this.track.getSettings();
829
-            } catch (e) {
830
-                // XXX React-native-webrtc, for example, defines
831
-                // MediaStreamTrack#getSettings() but the implementation throws
832
-                // a "Not implemented" Error.
833
-            }
803
+            const trackSettings = this.track.getSettings?.();
804
+
834 805
             if (trackSettings && 'facingMode' in trackSettings) {
835 806
                 return trackSettings.facingMode;
836 807
             }

+ 5
- 44
modules/RTC/RTC.js Целия файл

@@ -33,36 +33,6 @@ let peerConnectionIdCounter = 0;
33 33
  */
34 34
 let rtcTrackIdCounter = 0;
35 35
 
36
-/**
37
- *
38
- * @param tracksInfo
39
- * @param options
40
- */
41
-function createLocalTracks(tracksInfo, options) {
42
-    const newTracks = [];
43
-    let deviceId = null;
44
-
45
-    tracksInfo.forEach(trackInfo => {
46
-        if (trackInfo.mediaType === MediaType.AUDIO) {
47
-            deviceId = options.micDeviceId;
48
-        } else if (trackInfo.videoType === VideoType.CAMERA) {
49
-            deviceId = options.cameraDeviceId;
50
-        }
51
-        rtcTrackIdCounter = safeCounterIncrement(rtcTrackIdCounter);
52
-        const localTrack = new JitsiLocalTrack({
53
-            ...trackInfo,
54
-            deviceId,
55
-            facingMode: options.facingMode,
56
-            rtcId: rtcTrackIdCounter,
57
-            effects: options.effects
58
-        });
59
-
60
-        newTracks.push(localTrack);
61
-    });
62
-
63
-    return newTracks;
64
-}
65
-
66 36
 /**
67 37
  * Creates {@code JitsiLocalTrack} instances from the passed in meta information
68 38
  * about MedieaTracks.
@@ -78,7 +48,7 @@ function createLocalTracks(tracksInfo, options) {
78 48
  *     effects: Array of effect types
79 49
  * }}
80 50
  */
81
-function _newCreateLocalTracks(mediaStreamMetaData = []) {
51
+function _createLocalTracks(mediaStreamMetaData = []) {
82 52
     return mediaStreamMetaData.map(metaData => {
83 53
         const {
84 54
             sourceId,
@@ -223,8 +193,8 @@ export default class RTC extends Listenable {
223 193
      * @param {Array<Object>} tracksInfo
224 194
      * @returns {Array<JitsiLocalTrack>}
225 195
      */
226
-    static newCreateLocalTracks(tracksInfo) {
227
-        return _newCreateLocalTracks(tracksInfo);
196
+    static createLocalTracks(tracksInfo) {
197
+        return _createLocalTracks(tracksInfo);
228 198
     }
229 199
 
230 200
     /**
@@ -237,18 +207,9 @@ export default class RTC extends Listenable {
237 207
      * @returns {*} Promise object that will receive the new JitsiTracks
238 208
      */
239 209
     static obtainAudioAndVideoPermissions(options) {
240
-        const usesNewGumFlow = browser.usesNewGumFlow();
241
-        const obtainMediaPromise = usesNewGumFlow
242
-            ? RTCUtils.newObtainAudioAndVideoPermissions(options)
243
-            : RTCUtils.obtainAudioAndVideoPermissions(options);
244
-
245
-        return obtainMediaPromise.then(tracksInfo => {
246
-            if (usesNewGumFlow) {
247
-                return _newCreateLocalTracks(tracksInfo);
248
-            }
210
+        return RTCUtils.obtainAudioAndVideoPermissions(options)
211
+            .then(tracksInfo => _createLocalTracks(tracksInfo));
249 212
 
250
-            return createLocalTracks(tracksInfo, options);
251
-        });
252 213
     }
253 214
 
254 215
     /**

+ 32
- 603
modules/RTC/RTCUtils.js Целия файл

@@ -13,7 +13,6 @@ import clonedeep from 'lodash.clonedeep';
13 13
 import JitsiTrackError from '../../JitsiTrackError';
14 14
 import * as JitsiTrackErrors from '../../JitsiTrackErrors';
15 15
 import CameraFacingMode from '../../service/RTC/CameraFacingMode';
16
-import * as MediaType from '../../service/RTC/MediaType';
17 16
 import RTCEvents from '../../service/RTC/RTCEvents';
18 17
 import Resolutions from '../../service/RTC/Resolutions';
19 18
 import VideoType from '../../service/RTC/VideoType';
@@ -39,19 +38,6 @@ const eventEmitter = new EventEmitter();
39 38
 
40 39
 const AVAILABLE_DEVICES_POLL_INTERVAL_TIME = 3000; // ms
41 40
 
42
-/**
43
- * Default resolution to obtain for video tracks if no resolution is specified.
44
- * This default is used for old gum flow only, as new gum flow uses
45
- * {@link DEFAULT_CONSTRAINTS}.
46
- */
47
-const OLD_GUM_DEFAULT_RESOLUTION = 720;
48
-
49
-/**
50
- * Default devices to obtain when no specific devices are specified. This
51
- * default is used for old gum flow only.
52
- */
53
-const OLD_GUM_DEFAULT_DEVICES = [ 'audio', 'video' ];
54
-
55 41
 /**
56 42
  * Default MediaStreamConstraints to use for calls to getUserMedia.
57 43
  *
@@ -72,11 +58,6 @@ const DEFAULT_CONSTRAINTS = {
72 58
     }
73 59
 };
74 60
 
75
-/**
76
- * The default frame rate for Screen Sharing.
77
- */
78
-export const SS_DEFAULT_FRAME_RATE = 5;
79
-
80 61
 // Currently audio output device change is supported only in Chrome and
81 62
 // default output always has 'default' device ID
82 63
 let audioOutputDeviceId = 'default'; // default device
@@ -95,9 +76,6 @@ let disableNS = false;
95 76
 // Disables Automatic Gain Control
96 77
 let disableAGC = false;
97 78
 
98
-// Disables Highpass Filter
99
-let disableHPF = false;
100
-
101 79
 // Enables stereo.
102 80
 let stereo = null;
103 81
 
@@ -115,246 +93,6 @@ function emptyFuncton() {
115 93
     // no-op
116 94
 }
117 95
 
118
-/**
119
- *
120
- * @param constraints
121
- * @param isNewStyleConstraintsSupported
122
- * @param resolution
123
- */
124
-function setResolutionConstraints(
125
-        constraints,
126
-        isNewStyleConstraintsSupported,
127
-        resolution) {
128
-    if (Resolutions[resolution]) {
129
-        if (isNewStyleConstraintsSupported) {
130
-            constraints.video.width = {
131
-                ideal: Resolutions[resolution].width
132
-            };
133
-            constraints.video.height = {
134
-                ideal: Resolutions[resolution].height
135
-            };
136
-        }
137
-
138
-        constraints.video.mandatory.minWidth = Resolutions[resolution].width;
139
-        constraints.video.mandatory.minHeight = Resolutions[resolution].height;
140
-    }
141
-
142
-    if (constraints.video.mandatory.minWidth) {
143
-        constraints.video.mandatory.maxWidth
144
-            = constraints.video.mandatory.minWidth;
145
-    }
146
-
147
-    if (constraints.video.mandatory.minHeight) {
148
-        constraints.video.mandatory.maxHeight
149
-            = constraints.video.mandatory.minHeight;
150
-    }
151
-}
152
-
153
-/**
154
- * @param {string[]} um required user media types
155
- *
156
- * @param {Object} [options={}] optional parameters
157
- * @param {string} options.resolution
158
- * @param {number} options.bandwidth
159
- * @param {number} options.fps
160
- * @param {string} options.desktopStream
161
- * @param {string} options.cameraDeviceId
162
- * @param {string} options.micDeviceId
163
- * @param {CameraFacingMode} options.facingMode
164
- * @param {bool} firefox_fake_device
165
- * @param {Object} options.frameRate - used only for dekstop sharing.
166
- * @param {Object} options.frameRate.min - Minimum fps
167
- * @param {Object} options.frameRate.max - Maximum fps
168
- * @param {bool}   options.screenShareAudio - Used by electron clients to
169
- * enable system audio screen sharing.
170
- */
171
-function getConstraints(um, options = {}) {
172
-    const constraints = {
173
-        audio: false,
174
-        video: false
175
-    };
176
-
177
-    // Don't mix new and old style settings for Chromium as this leads
178
-    // to TypeError in new Chromium versions. @see
179
-    // https://bugs.chromium.org/p/chromium/issues/detail?id=614716
180
-    // This is a temporary solution, in future we will fully split old and
181
-    // new style constraints when new versions of Chromium and Firefox will
182
-    // have stable support of new constraints format. For more information
183
-    // @see https://github.com/jitsi/lib-jitsi-meet/pull/136
184
-    const isNewStyleConstraintsSupported
185
-        = browser.isFirefox()
186
-            || browser.isWebKitBased()
187
-            || browser.isReactNative();
188
-
189
-    if (um.indexOf('video') >= 0) {
190
-        // same behaviour as true
191
-        constraints.video = { mandatory: {},
192
-            optional: [] };
193
-
194
-        if (options.cameraDeviceId) {
195
-            if (isNewStyleConstraintsSupported) {
196
-                // New style of setting device id.
197
-                constraints.video.deviceId = options.cameraDeviceId;
198
-            }
199
-
200
-            // Old style.
201
-            constraints.video.mandatory.sourceId = options.cameraDeviceId;
202
-        } else {
203
-            // Prefer the front i.e. user-facing camera (to the back i.e.
204
-            // environment-facing camera, for example).
205
-            // TODO: Maybe use "exact" syntax if options.facingMode is defined,
206
-            // but this probably needs to be decided when updating other
207
-            // constraints, as we currently don't use "exact" syntax anywhere.
208
-            const facingMode = options.facingMode || CameraFacingMode.USER;
209
-
210
-            if (isNewStyleConstraintsSupported) {
211
-                constraints.video.facingMode = facingMode;
212
-            }
213
-            constraints.video.optional.push({
214
-                facingMode
215
-            });
216
-        }
217
-
218
-        if (options.minFps || options.maxFps || options.fps) {
219
-            // for some cameras it might be necessary to request 30fps
220
-            // so they choose 30fps mjpg over 10fps yuy2
221
-            if (options.minFps || options.fps) {
222
-                // Fall back to options.fps for backwards compatibility
223
-                options.minFps = options.minFps || options.fps;
224
-                constraints.video.mandatory.minFrameRate = options.minFps;
225
-            }
226
-            if (options.maxFps) {
227
-                constraints.video.mandatory.maxFrameRate = options.maxFps;
228
-            }
229
-        }
230
-
231
-        setResolutionConstraints(
232
-            constraints, isNewStyleConstraintsSupported, options.resolution);
233
-    }
234
-    if (um.indexOf('audio') >= 0) {
235
-        if (browser.isReactNative()) {
236
-            // The react-native-webrtc project that we're currently using
237
-            // expects the audio constraint to be a boolean.
238
-            constraints.audio = true;
239
-        } else if (browser.isFirefox()) {
240
-            if (options.micDeviceId) {
241
-                constraints.audio = {
242
-                    mandatory: {},
243
-                    deviceId: options.micDeviceId, // new style
244
-                    optional: [ {
245
-                        sourceId: options.micDeviceId // old style
246
-                    } ] };
247
-            } else {
248
-                constraints.audio = true;
249
-            }
250
-        } else {
251
-            // same behaviour as true
252
-            constraints.audio = { mandatory: {},
253
-                optional: [] };
254
-            if (options.micDeviceId) {
255
-                if (isNewStyleConstraintsSupported) {
256
-                    // New style of setting device id.
257
-                    constraints.audio.deviceId = options.micDeviceId;
258
-                }
259
-
260
-                // Old style.
261
-                constraints.audio.optional.push({
262
-                    sourceId: options.micDeviceId
263
-                });
264
-            }
265
-
266
-            // if it is good enough for hangouts...
267
-            constraints.audio.optional.push(
268
-                { echoCancellation: !disableAEC && !disableAP },
269
-                { googEchoCancellation: !disableAEC && !disableAP },
270
-                { googAutoGainControl: !disableAGC && !disableAP },
271
-                { googNoiseSuppression: !disableNS && !disableAP },
272
-                { googHighpassFilter: !disableHPF && !disableAP },
273
-                { googNoiseSuppression2: !disableNS && !disableAP },
274
-                { googEchoCancellation2: !disableAEC && !disableAP },
275
-                { googAutoGainControl2: !disableAGC && !disableAP }
276
-            );
277
-        }
278
-    }
279
-    if (um.indexOf('screen') >= 0) {
280
-        if (browser.isChrome()) {
281
-            constraints.video = {
282
-                mandatory: getSSConstraints({
283
-                    ...options,
284
-                    source: 'screen'
285
-                }),
286
-                optional: []
287
-            };
288
-
289
-        } else if (browser.isFirefox()) {
290
-            constraints.video = {
291
-                mozMediaSource: 'window',
292
-                mediaSource: 'window',
293
-                frameRate: options.frameRate || {
294
-                    min: SS_DEFAULT_FRAME_RATE,
295
-                    max: SS_DEFAULT_FRAME_RATE
296
-                }
297
-            };
298
-
299
-        } else {
300
-            const errmsg
301
-                = '\'screen\' WebRTC media source is supported only in Chrome'
302
-                    + ' and Firefox';
303
-
304
-            GlobalOnErrorHandler.callErrorHandler(new Error(errmsg));
305
-            logger.error(errmsg);
306
-        }
307
-    }
308
-    if (um.indexOf('desktop') >= 0) {
309
-        constraints.video = {
310
-            mandatory: getSSConstraints({
311
-                ...options,
312
-                source: 'desktop'
313
-            }),
314
-            optional: []
315
-        };
316
-
317
-        // Audio screen sharing for electron only works for screen type devices.
318
-        // i.e. when the user shares the whole desktop.
319
-        if (browser.isElectron() && options.screenShareAudio
320
-            && (options.desktopStream.indexOf('screen') >= 0)) {
321
-
322
-            // Provide constraints as described by the electron desktop capturer
323
-            // documentation here:
324
-            // https://www.electronjs.org/docs/api/desktop-capturer
325
-            // Note. The documentation specifies that chromeMediaSourceId should not be present
326
-            // which, in the case a users has multiple monitors, leads to them being shared all
327
-            // at once. However we tested with chromeMediaSourceId present and it seems to be
328
-            // working properly and also takes care of the previously mentioned issue.
329
-            constraints.audio = { mandatory: {
330
-                chromeMediaSource: constraints.video.mandatory.chromeMediaSource
331
-            } };
332
-        }
333
-    }
334
-
335
-    if (options.bandwidth) {
336
-        if (!constraints.video) {
337
-            // same behaviour as true
338
-            constraints.video = { mandatory: {},
339
-                optional: [] };
340
-        }
341
-        constraints.video.optional.push({ bandwidth: options.bandwidth });
342
-    }
343
-
344
-    // we turn audio for both audio and video tracks, the fake audio & video
345
-    // seems to work only when enabled in one getUserMedia call, we cannot get
346
-    // fake audio separate by fake video this later can be a problem with some
347
-    // of the tests
348
-    if (browser.isFirefox() && options.firefox_fake_device) {
349
-        // seems to be fixed now, removing this experimental fix, as having
350
-        // multiple audio tracks brake the tests
351
-        // constraints.audio = true;
352
-        constraints.fake = true;
353
-    }
354
-
355
-    return constraints;
356
-}
357
-
358 96
 /**
359 97
  * Creates a constraints object to be passed into a call to getUserMedia.
360 98
  *
@@ -377,12 +115,22 @@ function getConstraints(um, options = {}) {
377 115
  * @private
378 116
  * @returns {Object}
379 117
  */
380
-function newGetConstraints(um = [], options = {}) {
118
+function getConstraints(um = [], options = {}) {
381 119
     // Create a deep copy of the constraints to avoid any modification of
382 120
     // the passed in constraints object.
383 121
     const constraints = clonedeep(options.constraints || DEFAULT_CONSTRAINTS);
384 122
 
385 123
     if (um.indexOf('video') >= 0) {
124
+        // The "resolution" option is a shortcut and takes precendence.
125
+        if (Resolutions[options.resolution]) {
126
+            const r = Resolutions[options.resolution];
127
+
128
+            constraints.video = {
129
+                height: { ideal: r.height },
130
+                width: { ideal: r.width }
131
+            };
132
+        }
133
+
386 134
         if (!constraints.video) {
387 135
             constraints.video = {};
388 136
         }
@@ -393,12 +141,12 @@ function newGetConstraints(um = [], options = {}) {
393 141
         // TODO: remove this hack when the bug fix is available on Mojave, Sierra and High Sierra.
394 142
         if (browser.isWebKitBased()) {
395 143
             if (constraints.video.height && constraints.video.height.ideal) {
396
-                constraints.video.height = { ideal: clonedeep(constraints.video.height.ideal) };
144
+                constraints.video.height = { ideal: constraints.video.height.ideal };
397 145
             } else {
398 146
                 logger.warn('Ideal camera height missing, camera may not start properly');
399 147
             }
400 148
             if (constraints.video.width && constraints.video.width.ideal) {
401
-                constraints.video.width = { ideal: clonedeep(constraints.video.width.ideal) };
149
+                constraints.video.width = { ideal: constraints.video.width.ideal };
402 150
             } else {
403 151
                 logger.warn('Ideal camera width missing, camera may not start properly');
404 152
             }
@@ -433,60 +181,6 @@ function newGetConstraints(um = [], options = {}) {
433 181
         constraints.audio = false;
434 182
     }
435 183
 
436
-    if (um.indexOf('desktop') >= 0) {
437
-        if (!constraints.video || typeof constraints.video === 'boolean') {
438
-            constraints.video = {};
439
-        }
440
-
441
-        constraints.video = {
442
-            mandatory: getSSConstraints({
443
-                ...options,
444
-                source: 'desktop'
445
-            })
446
-        };
447
-    }
448
-
449
-    return constraints;
450
-}
451
-
452
-/**
453
- * Generates GUM constraints for screen sharing.
454
- *
455
- * @param {Object} options - The options passed to
456
- * <tt>obtainAudioAndVideoPermissions</tt>.
457
- * @returns {Object} - GUM constraints.
458
- *
459
- * TODO: Currently only the new GUM flow and Chrome is using the method. We
460
- * should make it work for all use cases.
461
- */
462
-function getSSConstraints(options = {}) {
463
-    const {
464
-        desktopStream,
465
-        frameRate = {
466
-            min: SS_DEFAULT_FRAME_RATE,
467
-            max: SS_DEFAULT_FRAME_RATE
468
-        }
469
-    } = options;
470
-    const { max, min } = frameRate;
471
-
472
-    const constraints = {
473
-        chromeMediaSource: options.source,
474
-        maxWidth: window.screen.width,
475
-        maxHeight: window.screen.height
476
-    };
477
-
478
-    if (typeof min === 'number') {
479
-        constraints.minFrameRate = min;
480
-    }
481
-
482
-    if (typeof max === 'number') {
483
-        constraints.maxFrameRate = max;
484
-    }
485
-
486
-    if (typeof desktopStream !== 'undefined') {
487
-        constraints.chromeMediaSourceId = desktopStream;
488
-    }
489
-
490 184
     return constraints;
491 185
 }
492 186
 
@@ -615,92 +309,6 @@ function onMediaDevicesListChanged(devicesReceived) {
615 309
     eventEmitter.emit(RTCEvents.DEVICE_LIST_CHANGED, availableDevices);
616 310
 }
617 311
 
618
-/**
619
- * Handles the newly created Media Streams.
620
- * @param streams the new Media Streams
621
- * @param resolution the resolution of the video streams
622
- * @returns {*[]} object that describes the new streams
623
- */
624
-function handleLocalStream(streams, resolution) {
625
-    let audioStream, desktopStream, videoStream;
626
-    const res = [];
627
-
628
-    // XXX The function obtainAudioAndVideoPermissions has examined the type of
629
-    // the browser, its capabilities, etc. and has taken the decision whether to
630
-    // invoke getUserMedia per device (e.g. Firefox) or once for both audio and
631
-    // video (e.g. Chrome). In order to not duplicate the logic here, examine
632
-    // the specified streams and figure out what we've received based on
633
-    // obtainAudioAndVideoPermissions' decision.
634
-    if (streams) {
635
-        // As mentioned above, certian types of browser (e.g. Chrome) support
636
-        // (with a result which meets our requirements expressed bellow) calling
637
-        // getUserMedia once for both audio and video.
638
-        const audioVideo = streams.audioVideo;
639
-
640
-        if (audioVideo) {
641
-            const audioTracks = audioVideo.getAudioTracks();
642
-
643
-            if (audioTracks.length) {
644
-                audioStream = new MediaStream();
645
-                for (let i = 0; i < audioTracks.length; i++) {
646
-                    audioStream.addTrack(audioTracks[i]);
647
-                }
648
-            }
649
-
650
-            const videoTracks = audioVideo.getVideoTracks();
651
-
652
-            if (videoTracks.length) {
653
-                videoStream = new MediaStream();
654
-                for (let j = 0; j < videoTracks.length; j++) {
655
-                    videoStream.addTrack(videoTracks[j]);
656
-                }
657
-            }
658
-
659
-            audioVideo.release && audioVideo.release(false);
660
-        } else {
661
-            // On other types of browser (e.g. Firefox) we choose (namely,
662
-            // obtainAudioAndVideoPermissions) to call getUserMedia per device
663
-            // (type).
664
-            audioStream = streams.audio;
665
-            videoStream = streams.video;
666
-        }
667
-
668
-        desktopStream = streams.desktop;
669
-    }
670
-
671
-    if (desktopStream) {
672
-        const { stream, sourceId, sourceType } = desktopStream;
673
-
674
-        res.push({
675
-            stream,
676
-            sourceId,
677
-            sourceType,
678
-            track: stream.getVideoTracks()[0],
679
-            mediaType: MediaType.VIDEO,
680
-            videoType: VideoType.DESKTOP
681
-        });
682
-    }
683
-    if (audioStream) {
684
-        res.push({
685
-            stream: audioStream,
686
-            track: audioStream.getAudioTracks()[0],
687
-            mediaType: MediaType.AUDIO,
688
-            videoType: null
689
-        });
690
-    }
691
-    if (videoStream) {
692
-        res.push({
693
-            stream: videoStream,
694
-            track: videoStream.getVideoTracks()[0],
695
-            mediaType: MediaType.VIDEO,
696
-            videoType: VideoType.CAMERA,
697
-            resolution
698
-        });
699
-    }
700
-
701
-    return res;
702
-}
703
-
704 312
 /**
705 313
  *
706 314
  */
@@ -738,10 +346,6 @@ class RTCUtils extends Listenable {
738 346
             disableAGC = options.disableAGC;
739 347
             logger.info(`Disable AGC: ${disableAGC}`);
740 348
         }
741
-        if (typeof options.disableHPF === 'boolean') {
742
-            disableHPF = options.disableHPF;
743
-            logger.info(`Disable HPF: ${disableHPF}`);
744
-        }
745 349
         if (typeof options.audioQuality?.stereo === 'boolean') {
746 350
             stereo = options.audioQuality.stereo;
747 351
             logger.info(`Stereo: ${stereo}`);
@@ -750,19 +354,7 @@ class RTCUtils extends Listenable {
750 354
         window.clearInterval(availableDevicesPollTimer);
751 355
         availableDevicesPollTimer = undefined;
752 356
 
753
-        if (browser.usesNewGumFlow()) {
754
-            this.RTCPeerConnectionType = RTCPeerConnection;
755
-
756
-            this.attachMediaStream
757
-                = wrapAttachMediaStream((element, stream) => {
758
-                    if (element) {
759
-                        element.srcObject = stream;
760
-                    }
761
-                });
762
-
763
-            this.getStreamID = ({ id }) => id;
764
-            this.getTrackID = ({ id }) => id;
765
-        } else if (browser.isReactNative()) {
357
+        if (browser.isReactNative()) {
766 358
             this.RTCPeerConnectionType = RTCPeerConnection;
767 359
 
768 360
             this.attachMediaStream = undefined; // Unused on React Native.
@@ -779,10 +371,17 @@ class RTCUtils extends Listenable {
779 371
             };
780 372
             this.getTrackID = ({ id }) => id;
781 373
         } else {
782
-            const message = 'Endpoint does not appear to be WebRTC-capable';
374
+            this.RTCPeerConnectionType = RTCPeerConnection;
783 375
 
784
-            logger.error(message);
785
-            throw new Error(message);
376
+            this.attachMediaStream
377
+                = wrapAttachMediaStream((element, stream) => {
378
+                    if (element) {
379
+                        element.srcObject = stream;
380
+                    }
381
+                });
382
+
383
+            this.getStreamID = ({ id }) => id;
384
+            this.getTrackID = ({ id }) => id;
786 385
         }
787 386
 
788 387
         this.pcConstraints = browser.isChromiumBased() || browser.isReactNative()
@@ -792,9 +391,7 @@ class RTCUtils extends Listenable {
792 391
             ] }
793 392
             : {};
794 393
 
795
-        screenObtainer.init(
796
-            options,
797
-            this.getUserMediaWithConstraints.bind(this));
394
+        screenObtainer.init(options);
798 395
 
799 396
         if (this.isDeviceListAvailable()) {
800 397
             this.enumerateDevices(ds => {
@@ -839,38 +436,6 @@ class RTCUtils extends Listenable {
839 436
             });
840 437
     }
841 438
 
842
-    /* eslint-disable max-params */
843
-
844
-    /**
845
-    * @param {string[]} um required user media types
846
-    * @param {Object} [options] optional parameters
847
-    * @param {string} options.resolution
848
-    * @param {number} options.bandwidth
849
-    * @param {number} options.fps
850
-    * @param {string} options.desktopStream
851
-    * @param {string} options.cameraDeviceId
852
-    * @param {string} options.micDeviceId
853
-    * @param {Object} options.frameRate - used only for dekstop sharing.
854
-    * @param {Object} options.frameRate.min - Minimum fps
855
-    * @param {Object} options.frameRate.max - Maximum fps
856
-    * @param {bool}   options.screenShareAudio - Used by electron clients to
857
-    * enable system audio screen sharing.
858
-    * @param {number} options.timeout - The timeout in ms for GUM.
859
-    * @returns {Promise} Returns a media stream on success or a JitsiTrackError
860
-    * on failure.
861
-    **/
862
-    getUserMediaWithConstraints(um, options = {}) {
863
-        const {
864
-            timeout,
865
-            ...otherOptions
866
-        } = options;
867
-        const constraints = getConstraints(um, otherOptions);
868
-
869
-        logger.info('Get media constraints', JSON.stringify(constraints));
870
-
871
-        return this._getUserMedia(um, constraints, timeout);
872
-    }
873
-
874 439
     /**
875 440
      * Acquires a media stream via getUserMedia that
876 441
      * matches the given constraints
@@ -930,23 +495,17 @@ class RTCUtils extends Listenable {
930 495
      * logic compared to use screenObtainer versus normal device capture logic
931 496
      * in RTCUtils#_getUserMedia.
932 497
      *
933
-     * @param {Object} options
934
-     * @param {string[]} options.desktopSharingSources
935
-     * @param {Object} options.desktopSharingFrameRate
936
-     * @param {Object} options.desktopSharingFrameRate.min - Minimum fps
937
-     * @param {Object} options.desktopSharingFrameRate.max - Maximum fps
938 498
      * @returns {Promise} A promise which will be resolved with an object which
939 499
      * contains the acquired display stream. If desktop sharing is not supported
940 500
      * then a rejected promise will be returned.
941 501
      */
942
-    _newGetDesktopMedia(options) {
502
+    _getDesktopMedia() {
943 503
         if (!screenObtainer.isSupported()) {
944 504
             return Promise.reject(new Error('Desktop sharing is not supported!'));
945 505
         }
946 506
 
947 507
         return new Promise((resolve, reject) => {
948 508
             screenObtainer.obtainStream(
949
-                this._parseDesktopSharingOptions(options),
950 509
                 stream => {
951 510
                     resolve(stream);
952 511
                 },
@@ -956,108 +515,6 @@ class RTCUtils extends Listenable {
956 515
         });
957 516
     }
958 517
 
959
-    /* eslint-enable max-params */
960
-
961
-    /**
962
-     * Creates the local MediaStreams.
963
-     * @param {Object} [options] optional parameters
964
-     * @param {Array} options.devices the devices that will be requested
965
-     * @param {string} options.resolution resolution constraints
966
-     * @param {string} options.cameraDeviceId
967
-     * @param {string} options.micDeviceId
968
-     * @param {Object} options.desktopSharingFrameRate
969
-     * @param {Object} options.desktopSharingFrameRate.min - Minimum fps
970
-     * @param {Object} options.desktopSharingFrameRate.max - Maximum fps
971
-     * @returns {*} Promise object that will receive the new JitsiTracks
972
-     */
973
-    obtainAudioAndVideoPermissions(options = {}) {
974
-        options.devices = options.devices || [ ...OLD_GUM_DEFAULT_DEVICES ];
975
-        options.resolution = options.resolution || OLD_GUM_DEFAULT_RESOLUTION;
976
-
977
-        const requestingDesktop = options.devices.includes('desktop');
978
-
979
-        if (requestingDesktop && !screenObtainer.isSupported()) {
980
-            return Promise.reject(
981
-                new Error('Desktop sharing is not supported!'));
982
-        }
983
-
984
-        return this._getAudioAndVideoStreams(options).then(streams =>
985
-            handleLocalStream(streams, options.resolution));
986
-    }
987
-
988
-    /**
989
-     * Performs one call to getUserMedia for audio and/or video and another call
990
-     * for desktop.
991
-     *
992
-     * @param {Object} options - An object describing how the gUM request should
993
-     * be executed. See {@link obtainAudioAndVideoPermissions} for full options.
994
-     * @returns {*} Promise object that will receive the new JitsiTracks on
995
-     * success or a JitsiTrackError on failure.
996
-     */
997
-    _getAudioAndVideoStreams(options) {
998
-        const requestingDesktop = options.devices.includes('desktop');
999
-
1000
-        options.devices = options.devices.filter(device =>
1001
-            device !== 'desktop');
1002
-
1003
-        const gumPromise = options.devices.length
1004
-            ? this.getUserMediaWithConstraints(options.devices, options)
1005
-            : Promise.resolve(null);
1006
-
1007
-        return gumPromise
1008
-            .then(avStream => {
1009
-                // If any requested devices are missing, call gum again in
1010
-                // an attempt to obtain the actual error. For example, the
1011
-                // requested video device is missing or permission was
1012
-                // denied.
1013
-                const missingTracks
1014
-                    = this._getMissingTracks(options.devices, avStream);
1015
-
1016
-                if (missingTracks.length) {
1017
-                    this.stopMediaStream(avStream);
1018
-
1019
-                    return this.getUserMediaWithConstraints(
1020
-                        missingTracks, options)
1021
-
1022
-                        // GUM has already failed earlier and this success
1023
-                        // handling should not be reached.
1024
-                        .then(() => Promise.reject(new JitsiTrackError(
1025
-                            { name: 'UnknownError' },
1026
-                            getConstraints(options.devices, options),
1027
-                            missingTracks)));
1028
-                }
1029
-
1030
-                return avStream;
1031
-            })
1032
-            .then(audioVideo => {
1033
-                if (!requestingDesktop) {
1034
-                    return { audioVideo };
1035
-                }
1036
-
1037
-                if (options.desktopSharingSourceDevice) {
1038
-                    this.stopMediaStream(audioVideo);
1039
-
1040
-                    throw new Error('Using a camera as screenshare source is'
1041
-                        + 'not supported on this browser.');
1042
-                }
1043
-
1044
-                return new Promise((resolve, reject) => {
1045
-                    screenObtainer.obtainStream(
1046
-                        this._parseDesktopSharingOptions(options),
1047
-                        desktop => resolve({
1048
-                            audioVideo,
1049
-                            desktop
1050
-                        }),
1051
-                        error => {
1052
-                            if (audioVideo) {
1053
-                                this.stopMediaStream(audioVideo);
1054
-                            }
1055
-                            reject(error);
1056
-                        });
1057
-                });
1058
-            });
1059
-    }
1060
-
1061 518
     /**
1062 519
      * Private utility for determining if the passed in MediaStream contains
1063 520
      * tracks of the type(s) specified in the requested devices.
@@ -1091,22 +548,6 @@ class RTCUtils extends Listenable {
1091 548
         return missingDevices;
1092 549
     }
1093 550
 
1094
-    /**
1095
-     * Returns an object formatted for specifying desktop sharing parameters.
1096
-     *
1097
-     * @param {Object} options - Takes in the same options object as
1098
-     * {@link obtainAudioAndVideoPermissions}.
1099
-     * @returns {Object}
1100
-     */
1101
-    _parseDesktopSharingOptions(options) {
1102
-        return {
1103
-            desktopSharingSources: options.desktopSharingSources,
1104
-            gumOptions: {
1105
-                frameRate: options.desktopSharingFrameRate
1106
-            }
1107
-        };
1108
-    }
1109
-
1110 551
     /**
1111 552
      * Gets streams from specified device types. This function intentionally
1112 553
      * ignores errors for upstream to catch and handle instead.
@@ -1125,9 +566,7 @@ class RTCUtils extends Listenable {
1125 566
      * track. If an error occurs, it will be deferred to the caller for
1126 567
      * handling.
1127 568
      */
1128
-    newObtainAudioAndVideoPermissions(options) {
1129
-        logger.info('Using the new gUM flow');
1130
-
569
+    obtainAudioAndVideoPermissions(options) {
1131 570
         const {
1132 571
             timeout,
1133 572
             ...otherOptions
@@ -1155,9 +594,7 @@ class RTCUtils extends Listenable {
1155 594
             }
1156 595
 
1157 596
             const {
1158
-                desktopSharingSourceDevice,
1159
-                desktopSharingSources,
1160
-                desktopSharingFrameRate
597
+                desktopSharingSourceDevice
1161 598
             } = otherOptions;
1162 599
 
1163 600
             // Attempt to use a video input device as a screenshare source if
@@ -1178,15 +615,11 @@ class RTCUtils extends Listenable {
1178 615
                 }
1179 616
 
1180 617
                 const requestedDevices = [ 'video' ];
1181
-
1182
-                // Leverage the helper used by {@link _newGetDesktopMedia} to
1183
-                // get constraints for the desktop stream.
1184
-                const { gumOptions } = this._parseDesktopSharingOptions(otherOptions);
1185
-
1186 618
                 const constraints = {
1187 619
                     video: {
1188
-                        ...gumOptions,
1189 620
                         deviceId: matchingDevice.deviceId
621
+
622
+                        // frameRate is omited here on purpose since this is a device that we'll pretend is a screen.
1190 623
                     }
1191 624
                 };
1192 625
 
@@ -1199,10 +632,7 @@ class RTCUtils extends Listenable {
1199 632
                     });
1200 633
             }
1201 634
 
1202
-            return this._newGetDesktopMedia({
1203
-                desktopSharingSources,
1204
-                desktopSharingFrameRate
1205
-            });
635
+            return this._getDesktopMedia();
1206 636
         }.bind(this);
1207 637
 
1208 638
         /**
@@ -1264,8 +694,7 @@ class RTCUtils extends Listenable {
1264 694
                 return Promise.resolve();
1265 695
             }
1266 696
 
1267
-            const constraints = newGetConstraints(
1268
-                requestedCaptureDevices, otherOptions);
697
+            const constraints = getConstraints(requestedCaptureDevices, otherOptions);
1269 698
 
1270 699
             logger.info('Got media constraints: ', JSON.stringify(constraints));
1271 700
 

+ 0
- 252
modules/RTC/RTCUtils.spec.js Целия файл

@@ -1,252 +0,0 @@
1
-import browser from '../browser';
2
-
3
-import RTCUtils from './RTCUtils';
4
-import screenObtainer from './ScreenObtainer';
5
-
6
-// TODO move webrtc mocks/polyfills into a easily accessible file when needed
7
-/**
8
- * A constructor to create a mock for the native MediaStreamTrack.
9
- */
10
-function MediaStreamTrackMock(kind, options = {}) {
11
-    this.kind = kind;
12
-    this._settings = {};
13
-
14
-    if (options.resolution) {
15
-        this._settings.height = options.resolution;
16
-    }
17
-}
18
-
19
-MediaStreamTrackMock.prototype.getSettings = function() {
20
-    return this._settings;
21
-};
22
-
23
-MediaStreamTrackMock.prototype.stop
24
-    = function() { /** intentionally blank **/ };
25
-
26
-/**
27
- * A constructor to create a mock for the native MediaStream.
28
- */
29
-function MediaStreamMock() {
30
-    this.id = Date.now();
31
-    this._audioTracks = [];
32
-    this._videoTracks = [];
33
-}
34
-
35
-MediaStreamMock.prototype.addTrack = function(track) {
36
-    if (track.kind === 'audio') {
37
-        this._audioTracks.push(track);
38
-    } else if (track.kind === 'video') {
39
-        this._videoTracks.push(track);
40
-    }
41
-};
42
-
43
-MediaStreamMock.prototype.getAudioTracks = function() {
44
-    return this._audioTracks;
45
-};
46
-
47
-MediaStreamMock.prototype.getTracks = function() {
48
-    return [
49
-        ...this._audioTracks,
50
-        ...this._videoTracks
51
-    ];
52
-};
53
-
54
-MediaStreamMock.prototype.getVideoTracks = function() {
55
-    return this._videoTracks;
56
-};
57
-
58
-/* eslint-disable max-params */
59
-/**
60
- * A mock function to be used for stubbing out the wrapper around getUserMedia.
61
- *
62
- * @param {String[]} devices - The media devices to obtain. Valid devices are
63
- * 'audio', 'video', and 'desktop'.
64
- * @param {Function} onSuccess - An optional success callback to trigger.
65
- * @param {Function} onError - An optional error callback to trigger. This is
66
- * not used in this function.
67
- * @param {Object} options - An object describing the constraints to pass to
68
- * gum.
69
- * @private
70
- * @returns {Promise} A resolved promise with a MediaStreamMock.
71
- */
72
-function successfulGum(devices, options) {
73
-    /* eslint-enable max-params */
74
-
75
-    const mediaStreamMock = new MediaStreamMock();
76
-
77
-    if (devices.includes('audio')) {
78
-        mediaStreamMock.addTrack(new MediaStreamTrackMock('audio', options));
79
-    }
80
-
81
-    if (devices.includes('video')) {
82
-        mediaStreamMock.addTrack(new MediaStreamTrackMock('video', options));
83
-    }
84
-
85
-    if (devices.includes('desktop')) {
86
-        mediaStreamMock.addTrack(new MediaStreamTrackMock('video', options));
87
-    }
88
-
89
-    return Promise.resolve(mediaStreamMock);
90
-}
91
-
92
-/**
93
- * General error handling for a promise chain that threw an unexpected error.
94
- *
95
- * @param {Error} error - The error object describing what error occurred.
96
- * @param {function} done - Jasmine's done function to trigger a failed test.
97
- * @private
98
- * @returns {void}
99
- */
100
-function unexpectedErrorHandler(error = {}, done) {
101
-    done.fail(`unexpected error occurred: ${error.message}`);
102
-}
103
-
104
-describe('RTCUtils', () => {
105
-    describe('obtainAudioAndVideoPermissions', () => {
106
-        let getUserMediaSpy, isScreenSupportedSpy, oldMediaStream,
107
-            oldMediaStreamTrack, oldWebkitMediaStream;
108
-
109
-        beforeEach(() => {
110
-            // FIXME: To get some kind of initial testing working assume a
111
-            // chrome environment so RTCUtils can actually initialize properly.
112
-            spyOn(browser, 'isChrome').and.returnValue(true);
113
-            spyOn(screenObtainer, '_createObtainStreamMethod')
114
-                .and.returnValue(() => { /** intentional no op */ });
115
-            isScreenSupportedSpy = spyOn(screenObtainer, 'isSupported')
116
-                .and.returnValue(true);
117
-
118
-            oldMediaStreamTrack = window.MediaStreamTrack;
119
-            window.MediaStreamTrack = MediaStreamTrackMock;
120
-
121
-            oldMediaStream = window.MediaStream;
122
-            window.MediaStream = MediaStreamMock;
123
-
124
-            oldWebkitMediaStream = window.webkitMediaStream;
125
-            window.webkitMediaStream = MediaStreamMock;
126
-            RTCUtils.init();
127
-
128
-            getUserMediaSpy = spyOn(RTCUtils, 'getUserMediaWithConstraints');
129
-        });
130
-
131
-        afterEach(() => {
132
-            window.MediaStreamTrack = oldMediaStreamTrack;
133
-            window.MediaStream = oldMediaStream;
134
-            window.webkitMediaStream = oldWebkitMediaStream;
135
-        });
136
-
137
-        it('gets audio and video by default', done => {
138
-            getUserMediaSpy.and.callFake(successfulGum);
139
-
140
-            RTCUtils.obtainAudioAndVideoPermissions()
141
-                .then(streams => {
142
-                    expect(streams.length).toBe(2);
143
-
144
-                    const audioStream = streams.find(stream =>
145
-                        stream.mediaType === 'audio');
146
-
147
-                    expect(audioStream).toBeTruthy();
148
-                    expect(audioStream.stream instanceof MediaStreamMock)
149
-                        .toBe(true);
150
-                    expect(audioStream.stream.getAudioTracks().length).toBe(1);
151
-
152
-                    const videoStream = streams.find(stream =>
153
-                        stream.mediaType === 'video');
154
-
155
-                    expect(videoStream).toBeTruthy();
156
-                    expect(videoStream.stream instanceof MediaStreamMock)
157
-                        .toBe(true);
158
-                    expect(videoStream.stream.getVideoTracks().length).toBe(1);
159
-
160
-                    done();
161
-                })
162
-                .catch(error => unexpectedErrorHandler(error, done));
163
-        });
164
-
165
-        it('can get an audio track', done => {
166
-            getUserMediaSpy.and.callFake(successfulGum);
167
-
168
-            RTCUtils.obtainAudioAndVideoPermissions({ devices: [ 'audio' ] })
169
-                .then(streams => {
170
-                    expect(streams.length).toBe(1);
171
-
172
-                    expect(streams[0].stream instanceof MediaStreamMock)
173
-                        .toBe(true);
174
-                    expect(streams[0].stream.getAudioTracks().length).toBe(1);
175
-
176
-                    done();
177
-                })
178
-                .catch(error => unexpectedErrorHandler(error, done));
179
-
180
-        });
181
-
182
-        it('can get a video track', done => {
183
-            getUserMediaSpy.and.callFake(successfulGum);
184
-
185
-            RTCUtils.obtainAudioAndVideoPermissions({ devices: [ 'video' ] })
186
-                .then(streams => {
187
-                    expect(streams.length).toBe(1);
188
-
189
-                    expect(streams[0].stream instanceof MediaStreamMock)
190
-                        .toBe(true);
191
-                    expect(streams[0].stream.getVideoTracks().length).toBe(1);
192
-
193
-                    done();
194
-                })
195
-                .catch(error => unexpectedErrorHandler(error, done));
196
-        });
197
-
198
-        it('gets 720 videor resolution by default', done => {
199
-            getUserMediaSpy.and.callFake(successfulGum);
200
-
201
-            RTCUtils.obtainAudioAndVideoPermissions({ devices: [ 'video' ] })
202
-                .then(streams => {
203
-                    const videoTrack = streams[0].stream.getVideoTracks()[0];
204
-                    const { height } = videoTrack.getSettings();
205
-
206
-                    expect(height).toBe(720);
207
-
208
-                    done();
209
-                })
210
-                .catch(error => unexpectedErrorHandler(error, done));
211
-        });
212
-
213
-        describe('requesting desktop', () => {
214
-            it('errors if desktop is not supported', done => {
215
-                isScreenSupportedSpy.and.returnValue(false);
216
-
217
-                RTCUtils.obtainAudioAndVideoPermissions({
218
-                    devices: [ 'desktop' ] })
219
-                    .then(() => done.fail(
220
-                        'obtainAudioAndVideoPermissions should not succeed'))
221
-                    .catch(error => {
222
-                        expect(error.message)
223
-                            .toBe('Desktop sharing is not supported!');
224
-
225
-                        done();
226
-                    });
227
-            });
228
-
229
-            it('can obtain a desktop stream', done => {
230
-                spyOn(screenObtainer, 'obtainStream')
231
-                    .and.callFake((options, success) => {
232
-                        const mediaStreamMock = new MediaStreamMock();
233
-
234
-                        mediaStreamMock.addTrack(
235
-                            new MediaStreamTrackMock('video', options));
236
-
237
-                        success({ stream: mediaStreamMock });
238
-                    });
239
-
240
-                RTCUtils.obtainAudioAndVideoPermissions({
241
-                    devices: [ 'desktop' ] })
242
-                    .then(streams => {
243
-                        expect(streams.length).toBe(1);
244
-                        expect(streams[0].videoType).toBe('desktop');
245
-
246
-                        done();
247
-                    })
248
-                    .catch(error => unexpectedErrorHandler(error, done));
249
-            });
250
-        });
251
-    });
252
-});

+ 84
- 94
modules/RTC/ScreenObtainer.js Целия файл

@@ -5,7 +5,10 @@ import browser from '../browser';
5 5
 
6 6
 const logger = require('jitsi-meet-logger').getLogger(__filename);
7 7
 
8
-let gumFunction = null;
8
+/**
9
+ * The default frame rate for Screen Sharing.
10
+ */
11
+export const SS_DEFAULT_FRAME_RATE = 5;
9 12
 
10 13
 /**
11 14
  * Handles obtaining a stream from a screen capture on different browsers.
@@ -25,12 +28,9 @@ const ScreenObtainer = {
25 28
      * (this.obtainStream).
26 29
      *
27 30
      * @param {object} options
28
-     * @param {Function} gum GUM method
29 31
      */
30
-    init(options = {}, gum) {
32
+    init(options = {}) {
31 33
         this.options = options;
32
-        gumFunction = gum;
33
-
34 34
         this.obtainStream = this._createObtainStreamMethod();
35 35
 
36 36
         if (!this.obtainStream) {
@@ -47,7 +47,7 @@ const ScreenObtainer = {
47 47
      */
48 48
     _createObtainStreamMethod() {
49 49
         if (browser.isNWJS()) {
50
-            return (_, onSuccess, onFailure) => {
50
+            return (onSuccess, onFailure) => {
51 51
                 window.JitsiMeetNW.obtainDesktopStream(
52 52
                     onSuccess,
53 53
                     (error, constraints) => {
@@ -93,6 +93,23 @@ const ScreenObtainer = {
93 93
         return null;
94 94
     },
95 95
 
96
+    /**
97
+     * Gets the appropriate constraints for audio sharing.
98
+     *
99
+     * @returns {Object|boolean}
100
+     */
101
+    _getAudioConstraints() {
102
+        const { audioQuality } = this.options;
103
+        const audio = audioQuality?.stereo ? {
104
+            autoGainControl: false,
105
+            channelCount: 2,
106
+            echoCancellation: false,
107
+            noiseSuppression: false
108
+        } : true;
109
+
110
+        return audio;
111
+    },
112
+
96 113
     /**
97 114
      * Checks whether obtaining a screen capture is supported in the current
98 115
      * environment.
@@ -105,43 +122,76 @@ const ScreenObtainer = {
105 122
     /**
106 123
      * Obtains a screen capture stream on Electron.
107 124
      *
108
-     * @param {Object} [options] - Screen sharing options.
109
-     * @param {Array<string>} [options.desktopSharingSources] - Array with the
110
-     * sources that have to be displayed in the desktop picker window ('screen',
111
-     * 'window', etc.).
112 125
      * @param onSuccess - Success callback.
113 126
      * @param onFailure - Failure callback.
114 127
      */
115
-    obtainScreenOnElectron(options = {}, onSuccess, onFailure) {
116
-        if (window.JitsiMeetScreenObtainer
117
-            && window.JitsiMeetScreenObtainer.openDesktopPicker) {
118
-            const { desktopSharingSources, gumOptions } = options;
128
+    obtainScreenOnElectron(onSuccess, onFailure) {
129
+        if (window.JitsiMeetScreenObtainer && window.JitsiMeetScreenObtainer.openDesktopPicker) {
130
+            const { desktopSharingFrameRate, desktopSharingSources } = this.options;
119 131
 
120 132
             window.JitsiMeetScreenObtainer.openDesktopPicker(
121 133
                 {
122 134
                     desktopSharingSources: desktopSharingSources || [ 'screen', 'window' ]
123 135
                 },
124
-                (streamId, streamType, screenShareAudio = false) =>
125
-                    onGetStreamResponse(
126
-                        {
127
-                            response: {
128
-                                streamId,
129
-                                streamType,
130
-                                screenShareAudio
131
-                            },
132
-                            gumOptions
133
-                        },
134
-                        onSuccess,
135
-                        onFailure
136
-                    ),
136
+                (streamId, streamType, screenShareAudio = false) => {
137
+                    if (streamId) {
138
+                        let audioConstraints = false;
139
+
140
+                        if (screenShareAudio) {
141
+                            audioConstraints = {
142
+                                optional: {
143
+                                    ...this._getAudioConstraints()
144
+                                }
145
+                            };
146
+
147
+                            // Audio screen sharing for electron only works for screen type devices.
148
+                            // i.e. when the user shares the whole desktop.
149
+                            // Note. The documentation specifies that chromeMediaSourceId should not be present
150
+                            // which, in the case a users has multiple monitors, leads to them being shared all
151
+                            // at once. However we tested with chromeMediaSourceId present and it seems to be
152
+                            // working properly.
153
+                            if (streamType === 'screen') {
154
+                                audioConstraints.mandatory = {
155
+                                    chromeMediaSource: 'desktop'
156
+                                };
157
+                            }
158
+                        }
159
+
160
+                        const constraints = {
161
+                            audio: audioConstraints,
162
+                            video: {
163
+                                mandatory: {
164
+                                    chromeMediaSource: 'desktop',
165
+                                    chromeMediaSourceId: streamId,
166
+                                    minFrameRate: desktopSharingFrameRate?.min ?? SS_DEFAULT_FRAME_RATE,
167
+                                    maxFrameRate: desktopSharingFrameRate?.max ?? SS_DEFAULT_FRAME_RATE,
168
+                                    maxWidth: window.screen.width,
169
+                                    maxHeight: window.screen.height
170
+                                }
171
+                            }
172
+                        };
173
+
174
+                        // We have to use the old API on Electron to get a desktop stream.
175
+                        navigator.mediaDevices.getUserMedia(constraints)
176
+                            .then(stream => onSuccess({
177
+                                stream,
178
+                                sourceId: streamId,
179
+                                sourceType: streamType
180
+                            }), onFailure);
181
+                    } else {
182
+                        // As noted in Chrome Desktop Capture API:
183
+                        // If user didn't select any source (i.e. canceled the prompt)
184
+                        // then the callback is called with an empty streamId.
185
+                        onFailure(new JitsiTrackError(JitsiTrackErrors.SCREENSHARING_USER_CANCELED));
186
+                    }
187
+                },
137 188
                 err => onFailure(new JitsiTrackError(
138 189
                     JitsiTrackErrors.ELECTRON_DESKTOP_PICKER_ERROR,
139 190
                     err
140 191
                 ))
141 192
             );
142 193
         } else {
143
-            onFailure(new JitsiTrackError(
144
-                JitsiTrackErrors.ELECTRON_DESKTOP_PICKER_NOT_FOUND));
194
+            onFailure(new JitsiTrackError(JitsiTrackErrors.ELECTRON_DESKTOP_PICKER_NOT_FOUND));
145 195
         }
146 196
     },
147 197
 
@@ -151,7 +201,7 @@ const ScreenObtainer = {
151 201
      * @param callback - The success callback.
152 202
      * @param errorCallback - The error callback.
153 203
      */
154
-    obtainScreenFromGetDisplayMedia(options, callback, errorCallback) {
204
+    obtainScreenFromGetDisplayMedia(callback, errorCallback) {
155 205
         let getDisplayMedia;
156 206
 
157 207
         if (navigator.getDisplayMedia) {
@@ -161,15 +211,9 @@ const ScreenObtainer = {
161 211
             getDisplayMedia = navigator.mediaDevices.getDisplayMedia.bind(navigator.mediaDevices);
162 212
         }
163 213
 
164
-        const { audioQuality } = this.options;
165
-        const audio = audioQuality?.stereo ? {
166
-            autoGainControl: false,
167
-            channelCount: 2,
168
-            echoCancellation: false,
169
-            noiseSuppression: false
170
-        } : true;
171
-
172
-        const video = Object.keys(options.gumOptions).length > 0 ? options.gumOptions : true;
214
+        const { desktopSharingFrameRate } = this.options;
215
+        const video = typeof desktopSharingFrameRate === 'object' ? { frameRate: desktopSharingFrameRate } : true;
216
+        const audio = this._getAudioConstraints();
173 217
 
174 218
         // At the time of this writing 'min' constraint for fps is not supported by getDisplayMedia.
175 219
         video.frameRate && delete video.frameRate.min;
@@ -216,7 +260,7 @@ const ScreenObtainer = {
216 260
      * @param callback - The success callback.
217 261
      * @param errorCallback - The error callback.
218 262
      */
219
-    obtainScreenFromGetDisplayMediaRN(options, callback, errorCallback) {
263
+    obtainScreenFromGetDisplayMediaRN(callback, errorCallback) {
220 264
         logger.info('Using getDisplayMedia for screen sharing');
221 265
 
222 266
         navigator.mediaDevices.getDisplayMedia({ video: true })
@@ -232,58 +276,4 @@ const ScreenObtainer = {
232 276
     }
233 277
 };
234 278
 
235
-/**
236
- * Handles response from external application / extension and calls GUM to
237
- * receive the desktop streams or reports error.
238
- * @param {object} options
239
- * @param {object} options.response
240
- * @param {string} options.response.streamId - the streamId for the desktop
241
- * stream.
242
- * @param {bool}   options.response.screenShareAudio - Used by electron clients to
243
- * enable system audio screen sharing.
244
- * @param {string} options.response.error - error to be reported.
245
- * @param {object} options.gumOptions - options passed to GUM.
246
- * @param {Function} onSuccess - callback for success.
247
- * @param {Function} onFailure - callback for failure.
248
- * @param {object} gumOptions - options passed to GUM.
249
- */
250
-function onGetStreamResponse(
251
-        options = {
252
-            response: {},
253
-            gumOptions: {}
254
-        },
255
-        onSuccess,
256
-        onFailure) {
257
-    const { streamId, streamType, screenShareAudio, error } = options.response || {};
258
-
259
-    if (streamId) {
260
-        const gumOptions = {
261
-            desktopStream: streamId,
262
-            screenShareAudio,
263
-            ...options.gumOptions
264
-        };
265
-
266
-        gumFunction([ 'desktop' ], gumOptions)
267
-            .then(stream => onSuccess({
268
-                stream,
269
-                sourceId: streamId,
270
-                sourceType: streamType
271
-            }), onFailure);
272
-    } else {
273
-        // As noted in Chrome Desktop Capture API:
274
-        // If user didn't select any source (i.e. canceled the prompt)
275
-        // then the callback is called with an empty streamId.
276
-        if (streamId === '') {
277
-            onFailure(new JitsiTrackError(
278
-                JitsiTrackErrors.SCREENSHARING_USER_CANCELED));
279
-
280
-            return;
281
-        }
282
-
283
-        onFailure(new JitsiTrackError(
284
-            JitsiTrackErrors.SCREENSHARING_GENERIC_ERROR,
285
-            error));
286
-    }
287
-}
288
-
289 279
 export default ScreenObtainer;

+ 2
- 19
modules/browser/BrowserCapabilities.js Целия файл

@@ -239,29 +239,12 @@ export default class BrowserCapabilities extends BrowserDetection {
239 239
     }
240 240
 
241 241
     /**
242
-     * Returns whether or not the current browser should be using the new
243
-     * getUserMedia flow, which utilizes the adapter shim. This method should
244
-     * be temporary and used while migrating all browsers to use adapter and
245
-     * the new getUserMedia.
246
-     *
247
-     * @returns {boolean}
248
-     */
249
-    usesNewGumFlow() {
250
-        if (this.isChromiumBased() || this.isFirefox() || this.isWebKitBased()) {
251
-            return true;
252
-        }
253
-
254
-        return false;
255
-    }
256
-
257
-    /**
258
-     * Checks if the browser uses webrtc-adapter. All browsers using the new
259
-     * getUserMedia flow.
242
+     * Checks if the browser uses webrtc-adapter. All browsers except React Native do.
260 243
      *
261 244
      * @returns {boolean}
262 245
      */
263 246
     usesAdapter() {
264
-        return this.usesNewGumFlow();
247
+        return !this.isReactNative();
265 248
     }
266 249
 
267 250
     /**

+ 1
- 1
modules/proxyconnection/ProxyConnectionService.js Целия файл

@@ -276,7 +276,7 @@ export default class ProxyConnectionService {
276 276
         // Grab the webrtc media stream and pipe it through the same processing
277 277
         // that would occur for a locally obtained media stream.
278 278
         const mediaStream = jitsiRemoteTrack.getOriginalStream();
279
-        const jitsiLocalTracks = RTC.newCreateLocalTracks(
279
+        const jitsiLocalTracks = RTC.createLocalTracks(
280 280
             [
281 281
                 {
282 282
                     deviceId:

+ 1
- 1
modules/xmpp/JingleSessionPC.js Целия файл

@@ -9,7 +9,7 @@ import {
9 9
     ICE_STATE_CHANGED
10 10
 } from '../../service/statistics/AnalyticsEvents';
11 11
 import XMPPEvents from '../../service/xmpp/XMPPEvents';
12
-import { SS_DEFAULT_FRAME_RATE } from '../RTC/RTCUtils';
12
+import { SS_DEFAULT_FRAME_RATE } from '../RTC/ScreenObtainer';
13 13
 import SDP from '../sdp/SDP';
14 14
 import SDPDiffer from '../sdp/SDPDiffer';
15 15
 import SDPUtil from '../sdp/SDPUtil';

+ 24
- 36
service/RTC/Resolutions.js Целия файл

@@ -1,67 +1,55 @@
1 1
 const Resolutions = {
2
+    '2160': {
3
+        width: 3840,
4
+        height: 2160
5
+    },
6
+    '4k': {
7
+        width: 3840,
8
+        height: 2160
9
+    },
2 10
     '1080': {
3 11
         width: 1920,
4
-        height: 1080,
5
-        order: 8
12
+        height: 1080
6 13
     },
7 14
     'fullhd': {
8 15
         width: 1920,
9
-        height: 1080,
10
-        order: 8
16
+        height: 1080
11 17
     },
12 18
     '720': {
13 19
         width: 1280,
14
-        height: 720,
15
-        order: 7
20
+        height: 720
16 21
     },
17 22
     'hd': {
18 23
         width: 1280,
19
-        height: 720,
20
-        order: 7
21
-    },
22
-    '960': {
23
-        width: 960,
24
-        height: 720,
25
-        order: 6
24
+        height: 720
26 25
     },
27 26
     '540': {
28 27
         width: 960,
29
-        height: 540,
30
-        order: 5
28
+        height: 540
31 29
     },
32 30
     'qhd': {
33 31
         width: 960,
34
-        height: 540,
35
-        order: 5
32
+        height: 540
36 33
     },
37
-
38
-    // 16:9 resolution first.
39
-    '360': {
34
+    '480': {
40 35
         width: 640,
41
-        height: 360,
42
-        order: 4
36
+        height: 480
43 37
     },
44
-    '640': {
38
+    'vga': {
45 39
         width: 640,
46
-        height: 480,
47
-        order: 3
40
+        height: 480
48 41
     },
49
-    'vga': {
42
+    '360': {
50 43
         width: 640,
51
-        height: 480,
52
-        order: 3
44
+        height: 360
53 45
     },
54
-
55
-    // 16:9 resolution first.
56
-    '180': {
46
+    '240': {
57 47
         width: 320,
58
-        height: 180,
59
-        order: 2
48
+        height: 240
60 49
     },
61
-    '320': {
50
+    '180': {
62 51
         width: 320,
63
-        height: 240,
64
-        order: 1
52
+        height: 180
65 53
     }
66 54
 };
67 55
 

Loading…
Отказ
Запис