|
|
@@ -6,14 +6,14 @@ var JitsiTrack = require("./JitsiTrack");
|
|
6
|
6
|
var JitsiLocalTrack = require("./JitsiLocalTrack.js");
|
|
7
|
7
|
var DataChannels = require("./DataChannels");
|
|
8
|
8
|
var JitsiRemoteTrack = require("./JitsiRemoteTrack.js");
|
|
9
|
|
-var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
|
|
|
9
|
+var MediaType = require("../../service/RTC/MediaType");
|
|
10
|
10
|
var RTCEvents = require("../../service/RTC/RTCEvents.js");
|
|
11
|
11
|
|
|
12
|
12
|
function createLocalTracks(streams, options) {
|
|
13
|
13
|
var newStreams = []
|
|
14
|
14
|
var deviceId = null;
|
|
15
|
15
|
for (var i = 0; i < streams.length; i++) {
|
|
16
|
|
- if (streams[i].type === 'audio') {
|
|
|
16
|
+ if (streams[i].type === MediaType.AUDIO) {
|
|
17
|
17
|
deviceId = options.micDeviceId;
|
|
18
|
18
|
} else if (streams[i].videoType === 'camera'){
|
|
19
|
19
|
deviceId = options.cameraDeviceId;
|
|
|
@@ -40,31 +40,31 @@ function RTC(room, options) {
|
|
40
|
40
|
// If there is no video track, but we receive it is muted,
|
|
41
|
41
|
// we need to create a dummy track which we will mute, so we can
|
|
42
|
42
|
// notify interested about the muting
|
|
43
|
|
- if(!self.remoteStreams[from][JitsiTrack.VIDEO]) {
|
|
|
43
|
+ if(!self.remoteStreams[from][MediaType.VIDEO]) {
|
|
44
|
44
|
var track = self.createRemoteStream(
|
|
45
|
45
|
{peerjid:room.roomjid + "/" + from,
|
|
46
|
46
|
videoType:"camera",
|
|
47
|
|
- jitsiTrackType:JitsiTrack.VIDEO},
|
|
|
47
|
+ jitsiTrackType: MediaType.VIDEO},
|
|
48
|
48
|
null, null);
|
|
49
|
49
|
self.eventEmitter
|
|
50
|
50
|
.emit(RTCEvents.FAKE_VIDEO_TRACK_CREATED, track);
|
|
51
|
51
|
}
|
|
52
|
52
|
|
|
53
|
|
- self.remoteStreams[from][JitsiTrack.VIDEO]
|
|
|
53
|
+ self.remoteStreams[from][MediaType.VIDEO]
|
|
54
|
54
|
.setMute(values.value == "true");
|
|
55
|
55
|
}
|
|
56
|
56
|
});
|
|
57
|
57
|
room.addPresenceListener("audiomuted", function (values, from) {
|
|
58
|
58
|
if(self.remoteStreams[from]) {
|
|
59
|
|
- self.remoteStreams[from][JitsiTrack.AUDIO]
|
|
|
59
|
+ self.remoteStreams[from][MediaType.AUDIO]
|
|
60
|
60
|
.setMute(values.value == "true");
|
|
61
|
61
|
}
|
|
62
|
62
|
});
|
|
63
|
63
|
room.addPresenceListener("videoType", function(data, from) {
|
|
64
|
64
|
if(!self.remoteStreams[from] ||
|
|
65
|
|
- (!self.remoteStreams[from][JitsiTrack.VIDEO]))
|
|
|
65
|
+ (!self.remoteStreams[from][MediaType.VIDEO]))
|
|
66
|
66
|
return;
|
|
67
|
|
- self.remoteStreams[from][JitsiTrack.VIDEO]._setVideoType(data.value);
|
|
|
67
|
+ self.remoteStreams[from][MediaType.VIDEO]._setVideoType(data.value);
|
|
68
|
68
|
});
|
|
69
|
69
|
}
|
|
70
|
70
|
|
|
|
@@ -98,7 +98,7 @@ RTC.prototype.onIncommingCall = function(event) {
|
|
98
|
98
|
{
|
|
99
|
99
|
var ssrcInfo = null;
|
|
100
|
100
|
if(this.localStreams[i].isMuted() &&
|
|
101
|
|
- this.localStreams[i].getType() === "video") {
|
|
|
101
|
+ this.localStreams[i].getType() === MediaType.VIDEO) {
|
|
102
|
102
|
/**
|
|
103
|
103
|
* Handles issues when the stream is added before the peerconnection is created.
|
|
104
|
104
|
* The peerconnection is created when second participant enters the call. In
|
|
|
@@ -194,7 +194,7 @@ RTC.prototype.setAudioMute = function (value) {
|
|
194
|
194
|
var mutePromises = [];
|
|
195
|
195
|
for(var i = 0; i < this.localStreams.length; i++) {
|
|
196
|
196
|
var stream = this.localStreams[i];
|
|
197
|
|
- if(stream.getType() !== "audio") {
|
|
|
197
|
+ if(stream.getType() !== MediaType.AUDIO) {
|
|
198
|
198
|
continue;
|
|
199
|
199
|
}
|
|
200
|
200
|
// this is a Promise
|
|
|
@@ -227,7 +227,7 @@ RTC.prototype.createRemoteStream = function (data, sid, thessrc) {
|
|
227
|
227
|
if(!this.remoteStreams[resource]) {
|
|
228
|
228
|
this.remoteStreams[resource] = {};
|
|
229
|
229
|
}
|
|
230
|
|
- this.remoteStreams[resource][remoteStream.type]= remoteStream;
|
|
|
230
|
+ this.remoteStreams[resource][remoteStream.getType()]= remoteStream;
|
|
231
|
231
|
return remoteStream;
|
|
232
|
232
|
};
|
|
233
|
233
|
|
|
|
@@ -314,8 +314,10 @@ RTC.prototype.switchVideoStreams = function (newStream) {
|
|
314
|
314
|
RTC.prototype.setAudioLevel = function (resource, audioLevel) {
|
|
315
|
315
|
if(!resource)
|
|
316
|
316
|
return;
|
|
317
|
|
- if(this.remoteStreams[resource] && this.remoteStreams[resource][JitsiTrack.AUDIO])
|
|
318
|
|
- this.remoteStreams[resource][JitsiTrack.AUDIO].setAudioLevel(audioLevel);
|
|
|
317
|
+ if(this.remoteStreams[resource] &&
|
|
|
318
|
+ this.remoteStreams[resource][MediaType.AUDIO]) {
|
|
|
319
|
+ this.remoteStreams[resource][MediaType.AUDIO].setAudioLevel(audioLevel);
|
|
|
320
|
+ }
|
|
319
|
321
|
};
|
|
320
|
322
|
|
|
321
|
323
|
/**
|
|
|
@@ -331,10 +333,10 @@ RTC.prototype.getResourceBySSRC = function (ssrc) {
|
|
331
|
333
|
|
|
332
|
334
|
var resultResource = null;
|
|
333
|
335
|
$.each(this.remoteStreams, function (resource, remoteTracks) {
|
|
334
|
|
- if((remoteTracks[JitsiTrack.AUDIO]
|
|
335
|
|
- && remoteTracks[JitsiTrack.AUDIO].getSSRC() == ssrc)
|
|
336
|
|
- || (remoteTracks[JitsiTrack.VIDEO]
|
|
337
|
|
- && remoteTracks[JitsiTrack.VIDEO].getSSRC() == ssrc))
|
|
|
336
|
+ if((remoteTracks[MediaType.AUDIO]
|
|
|
337
|
+ && remoteTracks[MediaType.AUDIO].getSSRC() == ssrc)
|
|
|
338
|
+ || (remoteTracks[MediaType.VIDEO]
|
|
|
339
|
+ && remoteTracks[MediaType.VIDEO].getSSRC() == ssrc))
|
|
338
|
340
|
resultResource = resource;
|
|
339
|
341
|
});
|
|
340
|
342
|
|