Browse Source

Add MediaType

master
paweldomas 9 years ago
parent
commit
0f3cd3b873

+ 2
- 1
JitsiMeetJS.js View File

@@ -7,6 +7,7 @@ var JitsiConferenceErrors = require("./JitsiConferenceErrors");
7 7
 var JitsiTrackEvents = require("./JitsiTrackEvents");
8 8
 var JitsiTrackErrors = require("./JitsiTrackErrors");
9 9
 var Logger = require("jitsi-meet-logger");
10
+var MediaType = require("./service/RTC/MediaType");
10 11
 var RTC = require("./modules/RTC/RTC");
11 12
 var RTCUIHelper = require("./modules/RTC/RTCUIHelper");
12 13
 var Statistics = require("./modules/statistics/statistics");
@@ -102,7 +103,7 @@ var LibJitsiMeet = {
102 103
                     for(var i = 0; i < tracks.length; i++) {
103 104
                         var track = tracks[i];
104 105
                         var mStream = track.getOriginalStream();
105
-                        if(track.getType() === "audio"){
106
+                        if(track.getType() === MediaType.AUDIO){
106 107
                             Statistics.startLocalStats(mStream,
107 108
                                 track.setAudioLevel.bind(track));
108 109
                             track.addEventListener(

+ 2
- 2
modules/RTC/JitsiLocalTrack.js View File

@@ -95,7 +95,7 @@ JitsiLocalTrack.prototype._setMute = function (mute, resolve, reject) {
95 95
         resolve();
96 96
         return;
97 97
     }
98
-    var isAudio = this.type === JitsiTrack.AUDIO;
98
+    var isAudio = this.isAudioTrack();
99 99
     this.dontFireRemoveEvent = false;
100 100
 
101 101
     var setStreamToNull = false;
@@ -220,7 +220,7 @@ JitsiLocalTrack.prototype.isMuted = function () {
220 220
     if (!this.stream)
221 221
         return true;
222 222
     var tracks = [];
223
-    var isAudio = this.type === JitsiTrack.AUDIO;
223
+    var isAudio = this.isAudioTrack();
224 224
     if (isAudio) {
225 225
         tracks = this.stream.getAudioTracks();
226 226
     } else {

+ 2
- 2
modules/RTC/JitsiRemoteTrack.js View File

@@ -20,8 +20,8 @@ function JitsiRemoteTrack(RTC, data, sid, ssrc) {
20 20
     this.videoType = data.videoType;
21 21
     this.ssrc = ssrc;
22 22
     this.muted = false;
23
-    if((this.type === JitsiTrack.AUDIO && data.audiomuted)
24
-      || (this.type === JitsiTrack.VIDEO && data.videomuted)) {
23
+    if((this.isAudioTrack() && data.audiomuted)
24
+      || (this.isVideoTrack() && data.videomuted)) {
25 25
         this.muted = true;
26 26
     }
27 27
 }

+ 6
- 17
modules/RTC/JitsiTrack.js View File

@@ -5,6 +5,7 @@ var RTCEvents = require("../../service/RTC/RTCEvents");
5 5
 var RTCUtils = require("./RTCUtils");
6 6
 var JitsiTrackEvents = require("../../JitsiTrackEvents");
7 7
 var EventEmitter = require("events");
8
+var MediaType = require("../../service/RTC/MediaType");
8 9
 
9 10
 /**
10 11
  * This implements 'onended' callback normally fired by WebRTC after the stream
@@ -63,8 +64,8 @@ function JitsiTrack(rtc, stream, streamInactiveHandler, jitsiTrackType)
63 64
     this.eventEmitter = new EventEmitter();
64 65
     this.audioLevel = -1;
65 66
     this.type = jitsiTrackType || ((this.stream.getVideoTracks().length > 0)?
66
-        JitsiTrack.VIDEO : JitsiTrack.AUDIO);
67
-    if(this.type == JitsiTrack.AUDIO) {
67
+        MediaType.VIDEO : MediaType.AUDIO);
68
+    if(this.isAudioTrack()) {
68 69
         this._getTracks = function () {
69 70
             return this.stream? this.stream.getAudioTracks() : [];
70 71
         }.bind(this);
@@ -82,18 +83,6 @@ function JitsiTrack(rtc, stream, streamInactiveHandler, jitsiTrackType)
82 83
     }
83 84
 }
84 85
 
85
-/**
86
- * JitsiTrack video type.
87
- * @type {string}
88
- */
89
-JitsiTrack.VIDEO = "video";
90
-
91
-/**
92
- * JitsiTrack audio type.
93
- * @type {string}
94
- */
95
-JitsiTrack.AUDIO = "audio";
96
-
97 86
 /**
98 87
  * Returns the type (audio or video) of this track.
99 88
  */
@@ -105,14 +94,14 @@ JitsiTrack.prototype.getType = function() {
105 94
  * Check if this is audiotrack.
106 95
  */
107 96
 JitsiTrack.prototype.isAudioTrack = function () {
108
-    return this.getType() === JitsiTrack.AUDIO;
97
+    return this.getType() === MediaType.AUDIO;
109 98
 };
110 99
 
111 100
 /**
112 101
  * Check if this is videotrack.
113 102
  */
114 103
 JitsiTrack.prototype.isVideoTrack = function () {
115
-    return this.getType() === JitsiTrack.VIDEO;
104
+    return this.getType() === MediaType.VIDEO;
116 105
 };
117 106
 
118 107
 /**
@@ -128,7 +117,7 @@ JitsiTrack.prototype.getOriginalStream = function() {
128 117
  * @returns {string}
129 118
  */
130 119
 JitsiTrack.prototype.getUsageLabel = function () {
131
-    if (this.type == JitsiTrack.AUDIO) {
120
+    if (this.isAudioTrack()) {
132 121
         return "mic";
133 122
     } else {
134 123
         return this.videoType ? this.videoType : "default";

+ 19
- 17
modules/RTC/RTC.js View File

@@ -6,14 +6,14 @@ var JitsiTrack = require("./JitsiTrack");
6 6
 var JitsiLocalTrack = require("./JitsiLocalTrack.js");
7 7
 var DataChannels = require("./DataChannels");
8 8
 var JitsiRemoteTrack = require("./JitsiRemoteTrack.js");
9
-var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
9
+var MediaType = require("../../service/RTC/MediaType");
10 10
 var RTCEvents = require("../../service/RTC/RTCEvents.js");
11 11
 
12 12
 function createLocalTracks(streams, options) {
13 13
     var newStreams = []
14 14
     var deviceId = null;
15 15
     for (var i = 0; i < streams.length; i++) {
16
-        if (streams[i].type === 'audio') {
16
+        if (streams[i].type === MediaType.AUDIO) {
17 17
           deviceId = options.micDeviceId;
18 18
         } else if (streams[i].videoType === 'camera'){
19 19
           deviceId = options.cameraDeviceId;
@@ -40,31 +40,31 @@ function RTC(room, options) {
40 40
             // If there is no video track, but we receive it is muted,
41 41
             // we need to create a dummy track which we will mute, so we can
42 42
             // notify interested about the muting
43
-            if(!self.remoteStreams[from][JitsiTrack.VIDEO]) {
43
+            if(!self.remoteStreams[from][MediaType.VIDEO]) {
44 44
                 var track = self.createRemoteStream(
45 45
                     {peerjid:room.roomjid + "/" + from,
46 46
                      videoType:"camera",
47
-                     jitsiTrackType:JitsiTrack.VIDEO},
47
+                     jitsiTrackType: MediaType.VIDEO},
48 48
                     null, null);
49 49
                 self.eventEmitter
50 50
                     .emit(RTCEvents.FAKE_VIDEO_TRACK_CREATED, track);
51 51
             }
52 52
 
53
-            self.remoteStreams[from][JitsiTrack.VIDEO]
53
+            self.remoteStreams[from][MediaType.VIDEO]
54 54
                 .setMute(values.value == "true");
55 55
         }
56 56
     });
57 57
     room.addPresenceListener("audiomuted", function (values, from) {
58 58
         if(self.remoteStreams[from]) {
59
-            self.remoteStreams[from][JitsiTrack.AUDIO]
59
+            self.remoteStreams[from][MediaType.AUDIO]
60 60
                 .setMute(values.value == "true");
61 61
         }
62 62
     });
63 63
     room.addPresenceListener("videoType", function(data, from) {
64 64
         if(!self.remoteStreams[from] ||
65
-            (!self.remoteStreams[from][JitsiTrack.VIDEO]))
65
+            (!self.remoteStreams[from][MediaType.VIDEO]))
66 66
             return;
67
-        self.remoteStreams[from][JitsiTrack.VIDEO]._setVideoType(data.value);
67
+        self.remoteStreams[from][MediaType.VIDEO]._setVideoType(data.value);
68 68
     });
69 69
 }
70 70
 
@@ -98,7 +98,7 @@ RTC.prototype.onIncommingCall = function(event) {
98 98
         {
99 99
             var ssrcInfo = null;
100 100
             if(this.localStreams[i].isMuted() &&
101
-                this.localStreams[i].getType() === "video") {
101
+                this.localStreams[i].getType() === MediaType.VIDEO) {
102 102
                 /**
103 103
                  * Handles issues when the stream is added before the peerconnection is created.
104 104
                  * The peerconnection is created when second participant enters the call. In
@@ -194,7 +194,7 @@ RTC.prototype.setAudioMute = function (value) {
194 194
     var mutePromises = [];
195 195
     for(var i = 0; i < this.localStreams.length; i++) {
196 196
         var stream = this.localStreams[i];
197
-        if(stream.getType() !== "audio") {
197
+        if(stream.getType() !== MediaType.AUDIO) {
198 198
             continue;
199 199
         }
200 200
         // this is a Promise
@@ -227,7 +227,7 @@ RTC.prototype.createRemoteStream = function (data, sid, thessrc) {
227 227
     if(!this.remoteStreams[resource]) {
228 228
         this.remoteStreams[resource] = {};
229 229
     }
230
-    this.remoteStreams[resource][remoteStream.type]= remoteStream;
230
+    this.remoteStreams[resource][remoteStream.getType()]= remoteStream;
231 231
     return remoteStream;
232 232
 };
233 233
 
@@ -314,8 +314,10 @@ RTC.prototype.switchVideoStreams = function (newStream) {
314 314
 RTC.prototype.setAudioLevel = function (resource, audioLevel) {
315 315
     if(!resource)
316 316
         return;
317
-    if(this.remoteStreams[resource] && this.remoteStreams[resource][JitsiTrack.AUDIO])
318
-        this.remoteStreams[resource][JitsiTrack.AUDIO].setAudioLevel(audioLevel);
317
+    if(this.remoteStreams[resource] &&
318
+        this.remoteStreams[resource][MediaType.AUDIO]) {
319
+        this.remoteStreams[resource][MediaType.AUDIO].setAudioLevel(audioLevel);
320
+    }
319 321
 };
320 322
 
321 323
 /**
@@ -331,10 +333,10 @@ RTC.prototype.getResourceBySSRC = function (ssrc) {
331 333
 
332 334
     var resultResource = null;
333 335
     $.each(this.remoteStreams, function (resource, remoteTracks) {
334
-        if((remoteTracks[JitsiTrack.AUDIO]
335
-                && remoteTracks[JitsiTrack.AUDIO].getSSRC() == ssrc)
336
-            || (remoteTracks[JitsiTrack.VIDEO]
337
-                && remoteTracks[JitsiTrack.VIDEO].getSSRC() == ssrc))
336
+        if((remoteTracks[MediaType.AUDIO]
337
+                && remoteTracks[MediaType.AUDIO].getSSRC() == ssrc)
338
+            || (remoteTracks[MediaType.VIDEO]
339
+                && remoteTracks[MediaType.VIDEO].getSSRC() == ssrc))
338 340
             resultResource = resource;
339 341
     });
340 342
 

+ 4
- 3
modules/RTC/RTCUtils.js View File

@@ -14,6 +14,7 @@ var SDPUtil = require("../xmpp/SDPUtil");
14 14
 var EventEmitter = require("events");
15 15
 var screenObtainer = require("./ScreenObtainer");
16 16
 var JitsiTrackErrors = require("../../JitsiTrackErrors");
17
+var MediaType = require("../../service/RTC/MediaType");
17 18
 
18 19
 var eventEmitter = new EventEmitter();
19 20
 
@@ -407,13 +408,13 @@ function handleLocalStream(streams, resolution) {
407 408
 
408 409
     if (desktopStream)
409 410
         res.push({stream: desktopStream,
410
-            type: "video", videoType: "desktop"});
411
+            type: MediaType.VIDEO, videoType: "desktop"});
411 412
 
412 413
     if(audioStream)
413
-        res.push({stream: audioStream, type: "audio", videoType: null});
414
+        res.push({stream: audioStream, type: MediaType.AUDIO, videoType: null});
414 415
 
415 416
     if(videoStream)
416
-        res.push({stream: videoStream, type: "video", videoType: "camera",
417
+        res.push({stream: videoStream, type: MediaType.VIDEO, videoType: "camera",
417 418
             resolution: resolution});
418 419
 
419 420
     return res;

+ 0
- 6
service/RTC/MediaStreamTypes.js View File

@@ -1,6 +0,0 @@
1
-var MediaStreamType = {
2
-    VIDEO_TYPE: "Video",
3
-
4
-    AUDIO_TYPE: "Audio"
5
-};
6
-module.exports = MediaStreamType;

+ 15
- 0
service/RTC/MediaType.js View File

@@ -0,0 +1,15 @@
1
+/**
2
+ * Enumeration of RTC media stream types
3
+ * @type {{AUDIO: string, VIDEO: string}}
4
+ */
5
+var MediaType = {
6
+    /**
7
+     * The audio type.
8
+     */
9
+    AUDIO: "audio",
10
+    /**
11
+     * The video type.
12
+     */
13
+    VIDEO: "video"
14
+};
15
+module.exports = MediaType;

Loading…
Cancel
Save