Sfoglia il codice sorgente

[eslint] no-use-before-define

dev1
Lyubo Marinov 8 anni fa
parent
commit
7c0b8d9145

+ 1
- 0
.eslintrc.js Vedi File

@@ -152,6 +152,7 @@ module.exports = {
152 152
         'no-undef-init': 2,
153 153
         'no-undefined': 0,
154 154
         'no-unused-vars': 2,
155
+        'no-use-before-define': [ 'error', { 'functions': false } ],
155 156
 
156 157
         // Stylistic issues group
157 158
         'array-bracket-spacing': [

+ 14
- 15
JitsiMediaDevices.js Vedi File

@@ -7,21 +7,6 @@ import Statistics from './modules/statistics/statistics';
7 7
 
8 8
 const eventEmitter = new EventEmitter();
9 9
 
10
-RTC.addListener(
11
-    RTCEvents.DEVICE_LIST_CHANGED,
12
-    devices =>
13
-        eventEmitter.emit(
14
-            JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED,
15
-            devices));
16
-
17
-RTC.addListener(RTCEvents.DEVICE_LIST_AVAILABLE,
18
-    devices => {
19
-        // log output device
20
-        logOutputDevice(
21
-            JitsiMediaDevices.getAudioOutputDevice(),
22
-            devices);
23
-    });
24
-
25 10
 /**
26 11
  * Gathers data and sends it to statistics.
27 12
  * @param deviceID the device id to log
@@ -145,4 +130,18 @@ const JitsiMediaDevices = {
145 130
     }
146 131
 };
147 132
 
133
+
134
+RTC.addListener(
135
+    RTCEvents.DEVICE_LIST_CHANGED,
136
+    devices =>
137
+        eventEmitter.emit(
138
+            JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED,
139
+            devices));
140
+RTC.addListener(
141
+    RTCEvents.DEVICE_LIST_AVAILABLE,
142
+    devices =>
143
+        logOutputDevice(
144
+            JitsiMediaDevices.getAudioOutputDevice(),
145
+            devices));
146
+
148 147
 module.exports = JitsiMediaDevices;

+ 5
- 2
modules/RTC/RTCBrowserType.js Vedi File

@@ -1,7 +1,11 @@
1 1
 const logger = require('jitsi-meet-logger').getLogger(__filename);
2 2
 
3
+let browserVersion; // eslint-disable-line prefer-const
4
+
3 5
 let currentBrowser;
4 6
 
7
+const isAndroid = navigator.userAgent.indexOf('Android') !== -1;
8
+
5 9
 const RTCBrowserType = {
6 10
 
7 11
     RTC_BROWSER_CHROME: 'rtc_browser.chrome',
@@ -376,7 +380,6 @@ function detectBrowser() {
376 380
     return 1;
377 381
 }
378 382
 
379
-const browserVersion = detectBrowser();
380
-const isAndroid = navigator.userAgent.indexOf('Android') !== -1;
383
+browserVersion = detectBrowser();
381 384
 
382 385
 module.exports = RTCBrowserType;

+ 91
- 91
modules/RTC/RTCUtils.js Vedi File

@@ -431,35 +431,6 @@ function onMediaDevicesListChanged(devicesReceived) {
431 431
     eventEmitter.emit(RTCEvents.DEVICE_LIST_CHANGED, devicesReceived);
432 432
 }
433 433
 
434
-// In case of IE we continue from 'onReady' callback
435
-// passed to RTCUtils constructor. It will be invoked by Temasys plugin
436
-// once it is initialized.
437
-function onReady(options, GUM) {
438
-    rtcReady = true;
439
-    eventEmitter.emit(RTCEvents.RTC_READY, true);
440
-    screenObtainer.init(options, GUM);
441
-
442
-    // Initialize rawEnumerateDevicesWithCallback
443
-    initRawEnumerateDevicesWithCallback();
444
-
445
-    if (rtcUtils.isDeviceListAvailable() && rawEnumerateDevicesWithCallback) {
446
-        rawEnumerateDevicesWithCallback(ds => {
447
-            currentlyAvailableMediaDevices = ds.splice(0);
448
-
449
-            eventEmitter.emit(RTCEvents.DEVICE_LIST_AVAILABLE,
450
-                currentlyAvailableMediaDevices);
451
-
452
-            if (isDeviceChangeEventSupported) {
453
-                navigator.mediaDevices.addEventListener(
454
-                    'devicechange',
455
-                    () => rtcUtils.enumerateDevices(onMediaDevicesListChanged));
456
-            } else {
457
-                pollForAvailableMediaDevices();
458
-            }
459
-        });
460
-    }
461
-}
462
-
463 434
 /**
464 435
  * Apply function with arguments if function exists.
465 436
  * Do nothing if function not provided.
@@ -558,30 +529,6 @@ function convertMediaStreamTrackSource(source) {
558 529
     };
559 530
 }
560 531
 
561
-function obtainDevices(options) {
562
-    if (!options.devices || options.devices.length === 0) {
563
-        return options.successCallback(options.streams || {});
564
-    }
565
-
566
-    const device = options.devices.splice(0, 1);
567
-
568
-    options.deviceGUM[device](
569
-        stream => {
570
-            options.streams = options.streams || {};
571
-            options.streams[device] = stream;
572
-            obtainDevices(options);
573
-        },
574
-        error => {
575
-            Object.keys(options.streams).forEach(
576
-                d => rtcUtils.stopMediaStream(options.streams[d]));
577
-            logger.error(
578
-                `failed to obtain ${device} stream - stop`, error);
579
-
580
-            options.errorCallback(error);
581
-        });
582
-}
583
-
584
-
585 532
 /**
586 533
  * Handles the newly created Media Streams.
587 534
  * @param streams the new Media Streams
@@ -665,44 +612,6 @@ function handleLocalStream(streams, resolution) {
665 612
     return res;
666 613
 }
667 614
 
668
-/**
669
- * Wraps original attachMediaStream function to set current audio output device
670
- * if this is supported.
671
- * @param {Function} origAttachMediaStream
672
- * @returns {Function}
673
- */
674
-function wrapAttachMediaStream(origAttachMediaStream) {
675
-    return function(element, stream) {
676
-        // eslint-disable-next-line prefer-rest-params
677
-        const res = origAttachMediaStream.apply(rtcUtils, arguments);
678
-
679
-        if (stream
680
-                && rtcUtils.isDeviceChangeAvailable('output')
681
-                && stream.getAudioTracks
682
-                && stream.getAudioTracks().length
683
-
684
-                // we skip setting audio output if there was no explicit change
685
-                && audioOutputChanged) {
686
-            element.setSinkId(rtcUtils.getAudioOutputDevice())
687
-                .catch(function(ex) {
688
-                    const err
689
-                        = new JitsiTrackError(ex, null, [ 'audiooutput' ]);
690
-
691
-                    GlobalOnErrorHandler.callUnhandledRejectionHandler(
692
-                        { promise: this,
693
-                            reason: err });
694
-
695
-                    logger.warn('Failed to set audio output device for the '
696
-                        + 'element. Default audio output device will be used '
697
-                        + 'instead',
698
-                        element, err);
699
-                });
700
-        }
701
-
702
-        return res;
703
-    };
704
-}
705
-
706 615
 /**
707 616
  * Represents a default implementation of setting a <tt>MediaStream</tt> as the
708 617
  * source of a video element that tries to be browser-agnostic through feature
@@ -1414,4 +1323,95 @@ function rejectWithWebRTCNotSupported(errorMessage, reject) {
1414 1323
 
1415 1324
 const rtcUtils = new RTCUtils();
1416 1325
 
1326
+function obtainDevices(options) {
1327
+    if (!options.devices || options.devices.length === 0) {
1328
+        return options.successCallback(options.streams || {});
1329
+    }
1330
+
1331
+    const device = options.devices.splice(0, 1);
1332
+
1333
+    options.deviceGUM[device](
1334
+        stream => {
1335
+            options.streams = options.streams || {};
1336
+            options.streams[device] = stream;
1337
+            obtainDevices(options);
1338
+        },
1339
+        error => {
1340
+            Object.keys(options.streams).forEach(
1341
+                d => rtcUtils.stopMediaStream(options.streams[d]));
1342
+            logger.error(
1343
+                `failed to obtain ${device} stream - stop`, error);
1344
+
1345
+            options.errorCallback(error);
1346
+        });
1347
+}
1348
+
1349
+// In case of IE we continue from 'onReady' callback
1350
+// passed to RTCUtils constructor. It will be invoked by Temasys plugin
1351
+// once it is initialized.
1352
+function onReady(options, GUM) {
1353
+    rtcReady = true;
1354
+    eventEmitter.emit(RTCEvents.RTC_READY, true);
1355
+    screenObtainer.init(options, GUM);
1356
+
1357
+    // Initialize rawEnumerateDevicesWithCallback
1358
+    initRawEnumerateDevicesWithCallback();
1359
+
1360
+    if (rtcUtils.isDeviceListAvailable() && rawEnumerateDevicesWithCallback) {
1361
+        rawEnumerateDevicesWithCallback(ds => {
1362
+            currentlyAvailableMediaDevices = ds.splice(0);
1363
+
1364
+            eventEmitter.emit(RTCEvents.DEVICE_LIST_AVAILABLE,
1365
+                currentlyAvailableMediaDevices);
1366
+
1367
+            if (isDeviceChangeEventSupported) {
1368
+                navigator.mediaDevices.addEventListener(
1369
+                    'devicechange',
1370
+                    () => rtcUtils.enumerateDevices(onMediaDevicesListChanged));
1371
+            } else {
1372
+                pollForAvailableMediaDevices();
1373
+            }
1374
+        });
1375
+    }
1376
+}
1377
+
1378
+/**
1379
+ * Wraps original attachMediaStream function to set current audio output device
1380
+ * if this is supported.
1381
+ * @param {Function} origAttachMediaStream
1382
+ * @returns {Function}
1383
+ */
1384
+function wrapAttachMediaStream(origAttachMediaStream) {
1385
+    return function(element, stream) {
1386
+        // eslint-disable-next-line prefer-rest-params
1387
+        const res = origAttachMediaStream.apply(rtcUtils, arguments);
1388
+
1389
+        if (stream
1390
+                && rtcUtils.isDeviceChangeAvailable('output')
1391
+                && stream.getAudioTracks
1392
+                && stream.getAudioTracks().length
1393
+
1394
+                // we skip setting audio output if there was no explicit change
1395
+                && audioOutputChanged) {
1396
+            element.setSinkId(rtcUtils.getAudioOutputDevice())
1397
+                .catch(function(ex) {
1398
+                    const err
1399
+                        = new JitsiTrackError(ex, null, [ 'audiooutput' ]);
1400
+
1401
+                    GlobalOnErrorHandler.callUnhandledRejectionHandler({
1402
+                        promise: this,
1403
+                        reason: err
1404
+                    });
1405
+
1406
+                    logger.warn('Failed to set audio output device for the '
1407
+                        + 'element. Default audio output device will be used '
1408
+                        + 'instead',
1409
+                        element, err);
1410
+                });
1411
+        }
1412
+
1413
+        return res;
1414
+    };
1415
+}
1416
+
1417 1417
 export default rtcUtils;

+ 76
- 76
modules/statistics/CallStats.js Vedi File

@@ -55,72 +55,15 @@ let callStats = null;
55 55
  */
56 56
 const DEFAULT_REMOTE_USER = 'jitsi';
57 57
 
58
-function initCallback(err, msg) {
59
-    logger.log(`CallStats Status: err=${err} msg=${msg}`);
60
-
61
-    CallStats.initializeInProgress = false;
62
-
63
-    // there is no lib, nothing to report to
64
-    if (err !== 'success') {
65
-        CallStats.initializeFailed = true;
66
-
67
-        return;
68
-    }
69
-
70
-    const ret = callStats.addNewFabric(this.peerconnection,
71
-        DEFAULT_REMOTE_USER,
72
-        callStats.fabricUsage.multiplex,
73
-        this.confID,
74
-        this.pcCallback.bind(this));
75
-
76
-    const fabricInitialized = ret.status === 'success';
77
-
78
-    if (!fabricInitialized) {
79
-        CallStats.initializeFailed = true;
80
-        logger.log('callstats fabric not initilized', ret.message);
81
-
82
-        return;
83
-    }
84
-
85
-    CallStats.initializeFailed = false;
86
-    CallStats.initialized = true;
87
-    CallStats.feedbackEnabled = true;
88
-
89
-    // notify callstats about failures if there were any
90
-    if (CallStats.reportsQueue.length) {
91
-        CallStats.reportsQueue.forEach(function(report) {
92
-            if (report.type === reportType.ERROR) {
93
-                const error = report.data;
94
-
95
-                CallStats._reportError.call(this, error.type, error.error,
96
-                    error.pc);
97
-            } else if (report.type === reportType.EVENT
98
-                && fabricInitialized) {
99
-                // if we have and event to report and we failed to add fabric
100
-                // this event will not be reported anyway, returning an error
101
-                const eventData = report.data;
102
-
103
-                callStats.sendFabricEvent(
104
-                    this.peerconnection,
105
-                    eventData.event,
106
-                    this.confID,
107
-                    eventData.eventData);
108
-            } else if (report.type === reportType.MST_WITH_USERID) {
109
-                const data = report.data;
110
-
111
-                callStats.associateMstWithUserID(
112
-                    this.peerconnection,
113
-                    data.callStatsId,
114
-                    this.confID,
115
-                    data.ssrc,
116
-                    data.usageLabel,
117
-                    data.containerId
118
-                );
119
-            }
120
-        }, this);
121
-        CallStats.reportsQueue.length = 0;
122
-    }
123
-}
58
+/**
59
+ * Type of pending reports, can be event or an error.
60
+ * @type {{ERROR: string, EVENT: string}}
61
+ */
62
+const reportType = {
63
+    ERROR: 'error',
64
+    EVENT: 'event',
65
+    MST_WITH_USERID: 'mstWithUserID'
66
+};
124 67
 
125 68
 /**
126 69
  * Returns a function which invokes f in a try/catch block, logs any exception
@@ -232,16 +175,6 @@ CallStats._checkInitialize = function() {
232 175
         initCallback.bind(callStats));
233 176
 };
234 177
 
235
-/**
236
- * Type of pending reports, can be event or an error.
237
- * @type {{ERROR: string, EVENT: string}}
238
- */
239
-const reportType = {
240
-    ERROR: 'error',
241
-    EVENT: 'event',
242
-    MST_WITH_USERID: 'mstWithUserID'
243
-};
244
-
245 178
 CallStats.prototype.pcCallback = tryCatch((err, msg) => {
246 179
     if (callStats && err !== 'success') {
247 180
         logger.error(`Monitoring status: ${err} msg: ${msg}`);
@@ -531,4 +464,71 @@ CallStats.dispose = function() {
531 464
     CallStats.initializeInProgress = false;
532 465
 };
533 466
 
467
+function initCallback(err, msg) {
468
+    logger.log(`CallStats Status: err=${err} msg=${msg}`);
469
+
470
+    CallStats.initializeInProgress = false;
471
+
472
+    // there is no lib, nothing to report to
473
+    if (err !== 'success') {
474
+        CallStats.initializeFailed = true;
475
+
476
+        return;
477
+    }
478
+
479
+    const ret = callStats.addNewFabric(this.peerconnection,
480
+        DEFAULT_REMOTE_USER,
481
+        callStats.fabricUsage.multiplex,
482
+        this.confID,
483
+        this.pcCallback.bind(this));
484
+
485
+    const fabricInitialized = ret.status === 'success';
486
+
487
+    if (!fabricInitialized) {
488
+        CallStats.initializeFailed = true;
489
+        logger.log('callstats fabric not initilized', ret.message);
490
+
491
+        return;
492
+    }
493
+
494
+    CallStats.initializeFailed = false;
495
+    CallStats.initialized = true;
496
+    CallStats.feedbackEnabled = true;
497
+
498
+    // notify callstats about failures if there were any
499
+    if (CallStats.reportsQueue.length) {
500
+        CallStats.reportsQueue.forEach(function(report) {
501
+            if (report.type === reportType.ERROR) {
502
+                const error = report.data;
503
+
504
+                CallStats._reportError.call(this, error.type, error.error,
505
+                    error.pc);
506
+            } else if (report.type === reportType.EVENT
507
+                && fabricInitialized) {
508
+                // if we have and event to report and we failed to add fabric
509
+                // this event will not be reported anyway, returning an error
510
+                const eventData = report.data;
511
+
512
+                callStats.sendFabricEvent(
513
+                    this.peerconnection,
514
+                    eventData.event,
515
+                    this.confID,
516
+                    eventData.eventData);
517
+            } else if (report.type === reportType.MST_WITH_USERID) {
518
+                const data = report.data;
519
+
520
+                callStats.associateMstWithUserID(
521
+                    this.peerconnection,
522
+                    data.callStatsId,
523
+                    this.confID,
524
+                    data.ssrc,
525
+                    data.usageLabel,
526
+                    data.containerId
527
+                );
528
+            }
529
+        }, this);
530
+        CallStats.reportsQueue.length = 0;
531
+    }
532
+}
533
+
534 534
 module.exports = CallStats;

+ 32
- 35
modules/transcription/audioRecorder.js Vedi File

@@ -59,38 +59,6 @@ function stopRecorder(trackRecorder) {
59 59
     trackRecorder.recorder.stop();
60 60
 }
61 61
 
62
-/**
63
- * Creates a TrackRecorder object. Also creates the MediaRecorder and
64
- * data array for the trackRecorder.
65
- * @param track the JitsiTrack holding the audio MediaStream(s)
66
- */
67
-function instantiateTrackRecorder(track) {
68
-    const trackRecorder = new TrackRecorder(track);
69
-
70
-    // Create a new stream which only holds the audio track
71
-    const originalStream = trackRecorder.track.getOriginalStream();
72
-    const stream = createEmptyStream();
73
-
74
-    originalStream.getAudioTracks().forEach(t => stream.addTrack(t));
75
-
76
-    // Create the MediaRecorder
77
-    trackRecorder.recorder = new MediaRecorder(stream,
78
-        { mimeType: audioRecorder.fileType });
79
-
80
-    // array for holding the recorder data. Resets it when
81
-    // audio already has been recorder once
82
-    trackRecorder.data = [];
83
-
84
-    // function handling a dataEvent, e.g the stream gets new data
85
-    trackRecorder.recorder.ondataavailable = function(dataEvent) {
86
-        if (dataEvent.data.size > 0) {
87
-            trackRecorder.data.push(dataEvent.data);
88
-        }
89
-    };
90
-
91
-    return trackRecorder;
92
-}
93
-
94 62
 /**
95 63
  * Determines which kind of audio recording the browser supports
96 64
  * chrome supports "audio/webm" and firefox supports "audio/ogg"
@@ -101,9 +69,8 @@ function determineCorrectFileType() {
101 69
     } else if (MediaRecorder.isTypeSupported(AUDIO_OGG)) {
102 70
         return AUDIO_OGG;
103 71
     }
104
-    throw new Error('unable to create a MediaRecorder with the'
105
-            + 'right mimetype!');
106
-
72
+    throw new Error(
73
+        'unable to create a MediaRecorder with the right mimetype!');
107 74
 }
108 75
 
109 76
 /**
@@ -316,6 +283,36 @@ function createEmptyStream() {
316 283
     throw new Error('cannot create a clean mediaStream');
317 284
 }
318 285
 
286
+/**
287
+ * Creates a TrackRecorder object. Also creates the MediaRecorder and
288
+ * data array for the trackRecorder.
289
+ * @param track the JitsiTrack holding the audio MediaStream(s)
290
+ */
291
+function instantiateTrackRecorder(track) {
292
+    const trackRecorder = new TrackRecorder(track);
293
+
294
+    // Create a new stream which only holds the audio track
295
+    const originalStream = trackRecorder.track.getOriginalStream();
296
+    const stream = createEmptyStream();
297
+
298
+    originalStream.getAudioTracks().forEach(t => stream.addTrack(t));
299
+
300
+    // Create the MediaRecorder
301
+    trackRecorder.recorder
302
+        = new MediaRecorder(stream, { mimeType: audioRecorder.fileType });
303
+
304
+    // array for holding the recorder data. Resets it when
305
+    // audio already has been recorder once
306
+    trackRecorder.data = [];
307
+
308
+    // function handling a dataEvent, e.g the stream gets new data
309
+    trackRecorder.recorder.ondataavailable = ({ data }) => {
310
+        (data.size > 0) && trackRecorder.data.push(data);
311
+    };
312
+
313
+    return trackRecorder;
314
+}
315
+
319 316
 /**
320 317
  * export the main object audioRecorder
321 318
  */

+ 7
- 8
modules/transcription/transcriber.js Vedi File

@@ -110,7 +110,7 @@ transcriber.prototype.stop = function stop(callback) {
110 110
  * @param {RecordingResult} answer a RecordingResult object with a defined
111 111
  * WordArray
112 112
  */
113
-const blobCallBack = function(answer) {
113
+function blobCallBack(answer) {
114 114
     console.log(
115 115
         'retrieved an answer from the transcription service. The answer has an'
116 116
             + ` array of length: ${answer.wordArray.length}`);
@@ -150,7 +150,7 @@ const blobCallBack = function(answer) {
150 150
 
151 151
     // and check if all results have been received.
152 152
     this.maybeMerge();
153
-};
153
+}
154 154
 
155 155
 /**
156 156
  * this method will check if the counter is zero. If it is, it will call
@@ -263,7 +263,7 @@ transcriber.prototype.updateTranscription = function(word, name) {
263 263
  * @param {Array<Array>} twoDimensionalArray the array to check
264 264
  * @returns {boolean} true if any non-zero arrays inside, otherwise false
265 265
  */
266
-const hasPopulatedArrays = function(twoDimensionalArray) {
266
+function hasPopulatedArrays(twoDimensionalArray) {
267 267
     for (let i = 0; i < twoDimensionalArray.length; i++) {
268 268
         if (twoDimensionalArray[i].length === 0) {
269 269
             twoDimensionalArray.splice(i, 1);
@@ -271,7 +271,7 @@ const hasPopulatedArrays = function(twoDimensionalArray) {
271 271
     }
272 272
 
273 273
     return twoDimensionalArray.length > 0;
274
-};
274
+}
275 275
 
276 276
 /**
277 277
  * Push a word to the right location in a sorted array. The array is sorted
@@ -281,7 +281,7 @@ const hasPopulatedArrays = function(twoDimensionalArray) {
281 281
  * @param {Array<Word>} array the sorted array to push to
282 282
  * @param {Word} word the word to push into the array
283 283
  */
284
-const pushWordToSortedArray = function(array, word) {
284
+function pushWordToSortedArray(array, word) {
285 285
     if (array.length === 0) {
286 286
         array.push(word);
287 287
     } else {
@@ -290,9 +290,8 @@ const pushWordToSortedArray = function(array, word) {
290 290
 
291 291
             return;
292 292
         }
293
-        let i;
294 293
 
295
-        for (i = 0; i < array.length; i++) {
294
+        for (let i = 0; i < array.length; i++) {
296 295
             if (word.begin < array[i].begin) {
297 296
                 array.splice(i, 0, word);
298 297
 
@@ -301,7 +300,7 @@ const pushWordToSortedArray = function(array, word) {
301 300
         }
302 301
         array.push(word); // fail safe
303 302
     }
304
-};
303
+}
305 304
 
306 305
 /**
307 306
  * Gives the transcriber a JitsiTrack holding an audioStream to transcribe.

+ 53
- 55
modules/xmpp/SdpTransformUtil.js Vedi File

@@ -32,61 +32,6 @@ function _getSSRCCount(mLine) {
32 32
             .map(ssrcInfo => ssrcInfo.id)
33 33
             .filter((ssrc, index, array) => array.indexOf(ssrc) === index)
34 34
             .length;
35
-
36
-}
37
-
38
-/**
39
- * Utility class for SDP manipulation using the 'sdp-transform' library.
40
- *
41
- * Typical use usage scenario:
42
- *
43
- * const transformer = new SdpTransformWrap(rawSdp);
44
- * const videoMLine = transformer.selectMedia('video);
45
- * if (videoMLine) {
46
- *     videoMLiner.addSSRCAttribute({
47
- *         id: 2342343,
48
- *         attribute: "cname",
49
- *         value: "someCname"
50
- *     });
51
- *     rawSdp = transformer.toRawSdp();
52
- * }
53
- */
54
-export class SdpTransformWrap {
55
-
56
-    /**
57
-     * Creates new instance and parses the raw SDP into objects using
58
-     * 'sdp-transform' lib.
59
-     * @param {string} rawSDP the SDP in raw text format.
60
-     */
61
-    constructor(rawSDP) {
62
-        this.parsedSDP = transform.parse(rawSDP);
63
-    }
64
-
65
-    /**
66
-     * Selects the first media SDP of given name.
67
-     * @param {string} mediaType the name of the media e.g. 'audio', 'video',
68
-     * 'data'.
69
-     * @return {MLineWrap|null} return {@link MLineWrap} instance for the media
70
-     * line or <tt>null</tt> if not found. The object returned references
71
-     * the underlying SDP state held by this <tt>SdpTransformWrap</tt> instance
72
-     * (it's not a copy).
73
-     */
74
-    selectMedia(mediaType) {
75
-        const selectedMLine
76
-            = this.parsedSDP.media.find(mLine => mLine.type === mediaType);
77
-
78
-
79
-        return selectedMLine ? new MLineWrap(selectedMLine) : null;
80
-    }
81
-
82
-    /**
83
-     * Converts the currently stored SDP state in this instance to raw text SDP
84
-     * format.
85
-     * @return {string}
86
-     */
87
-    toRawSDP() {
88
-        return transform.write(this.parsedSDP);
89
-    }
90 35
 }
91 36
 
92 37
 /**
@@ -399,3 +344,56 @@ class MLineWrap {
399 344
         this.ssrcGroups.push(group);
400 345
     }
401 346
 }
347
+
348
+/**
349
+ * Utility class for SDP manipulation using the 'sdp-transform' library.
350
+ *
351
+ * Typical use usage scenario:
352
+ *
353
+ * const transformer = new SdpTransformWrap(rawSdp);
354
+ * const videoMLine = transformer.selectMedia('video);
355
+ * if (videoMLine) {
356
+ *     videoMLiner.addSSRCAttribute({
357
+ *         id: 2342343,
358
+ *         attribute: "cname",
359
+ *         value: "someCname"
360
+ *     });
361
+ *     rawSdp = transformer.toRawSdp();
362
+ * }
363
+ */
364
+export class SdpTransformWrap {
365
+
366
+    /**
367
+     * Creates new instance and parses the raw SDP into objects using
368
+     * 'sdp-transform' lib.
369
+     * @param {string} rawSDP the SDP in raw text format.
370
+     */
371
+    constructor(rawSDP) {
372
+        this.parsedSDP = transform.parse(rawSDP);
373
+    }
374
+
375
+    /**
376
+     * Selects the first media SDP of given name.
377
+     * @param {string} mediaType the name of the media e.g. 'audio', 'video',
378
+     * 'data'.
379
+     * @return {MLineWrap|null} return {@link MLineWrap} instance for the media
380
+     * line or <tt>null</tt> if not found. The object returned references
381
+     * the underlying SDP state held by this <tt>SdpTransformWrap</tt> instance
382
+     * (it's not a copy).
383
+     */
384
+    selectMedia(mediaType) {
385
+        const selectedMLine
386
+            = this.parsedSDP.media.find(mLine => mLine.type === mediaType);
387
+
388
+        return selectedMLine ? new MLineWrap(selectedMLine) : null;
389
+    }
390
+
391
+    /**
392
+     * Converts the currently stored SDP state in this instance to raw text SDP
393
+     * format.
394
+     * @return {string}
395
+     */
396
+    toRawSDP() {
397
+        return transform.write(this.parsedSDP);
398
+    }
399
+}

Loading…
Annulla
Salva