|
@@ -1,4 +1,4 @@
|
1
|
|
-(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.APP = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
|
|
1
|
+!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.APP=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
|
2
|
2
|
/* jshint -W117 */
|
3
|
3
|
/* application specific logic */
|
4
|
4
|
|
|
@@ -578,12 +578,15 @@ module.exports = DataChannels;
|
578
|
578
|
var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
579
|
579
|
|
580
|
580
|
|
581
|
|
-function LocalStream(stream, type, eventEmitter, videoType)
|
|
581
|
+function LocalStream(stream, type, eventEmitter, videoType, isGUMStream)
|
582
|
582
|
{
|
583
|
583
|
this.stream = stream;
|
584
|
584
|
this.eventEmitter = eventEmitter;
|
585
|
585
|
this.type = type;
|
586
|
586
|
this.videoType = videoType;
|
|
587
|
+ this.isGUMStream = true;
|
|
588
|
+ if(isGUMStream === false)
|
|
589
|
+ this.isGUMStream = isGUMStream;
|
587
|
590
|
var self = this;
|
588
|
591
|
if(type == "audio")
|
589
|
592
|
{
|
|
@@ -614,26 +617,14 @@ LocalStream.prototype.getOriginalStream = function()
|
614
|
617
|
}
|
615
|
618
|
|
616
|
619
|
LocalStream.prototype.isAudioStream = function () {
|
617
|
|
- return (this.stream.getAudioTracks() && this.stream.getAudioTracks().length > 0);
|
618
|
|
-};
|
619
|
|
-
|
620
|
|
-LocalStream.prototype.mute = function()
|
621
|
|
-{
|
622
|
|
- var ismuted = false;
|
623
|
|
- var tracks = this.getTracks();
|
624
|
|
-
|
625
|
|
- for (var idx = 0; idx < tracks.length; idx++) {
|
626
|
|
- ismuted = !tracks[idx].enabled;
|
627
|
|
- tracks[idx].enabled = ismuted;
|
628
|
|
- }
|
629
|
|
- return ismuted;
|
|
620
|
+ return this.type === "audio";
|
630
|
621
|
};
|
631
|
622
|
|
632
|
623
|
LocalStream.prototype.setMute = function(mute)
|
633
|
624
|
{
|
634
|
625
|
|
635
|
|
- if(window.location.protocol != "https:" ||
|
636
|
|
- this.isAudioStream() || this.videoType === "screen")
|
|
626
|
+ if((window.location.protocol != "https:" && this.isGUMStream) ||
|
|
627
|
+ (this.isAudioStream() && this.isGUMStream) || this.videoType === "screen")
|
637
|
628
|
{
|
638
|
629
|
var tracks = this.getTracks();
|
639
|
630
|
|
|
@@ -649,9 +640,18 @@ LocalStream.prototype.setMute = function(mute)
|
649
|
640
|
}
|
650
|
641
|
else
|
651
|
642
|
{
|
652
|
|
- APP.RTC.rtcUtils.obtainAudioAndVideoPermissions(["video"],
|
|
643
|
+ var self = this;
|
|
644
|
+ APP.RTC.rtcUtils.obtainAudioAndVideoPermissions(
|
|
645
|
+ (this.isAudioStream() ? ["audio"] : ["video"]),
|
653
|
646
|
function (stream) {
|
654
|
|
- APP.RTC.changeLocalVideo(stream, false, function () {});
|
|
647
|
+ if(self.isAudioStream())
|
|
648
|
+ {
|
|
649
|
+ APP.RTC.changeLocalAudio(stream, function () {});
|
|
650
|
+ }
|
|
651
|
+ else
|
|
652
|
+ {
|
|
653
|
+ APP.RTC.changeLocalVideo(stream, false, function () {});
|
|
654
|
+ }
|
655
|
655
|
});
|
656
|
656
|
}
|
657
|
657
|
}
|
|
@@ -760,11 +760,41 @@ var UIEvents = require("../../service/UI/UIEvents");
|
760
|
760
|
|
761
|
761
|
var eventEmitter = new EventEmitter();
|
762
|
762
|
|
|
763
|
+
|
|
764
|
+function getMediaStreamUsage()
|
|
765
|
+{
|
|
766
|
+ var result = {
|
|
767
|
+ audio: 1,
|
|
768
|
+ video: 1
|
|
769
|
+ };
|
|
770
|
+ if( config.startAudioMuted === true)
|
|
771
|
+ result.audio = 0;
|
|
772
|
+ if( config.startVideoMuted === true)
|
|
773
|
+ result.video = 0;
|
|
774
|
+
|
|
775
|
+ /** There are some issues with the desktop sharing
|
|
776
|
+ * when this property is enabled.
|
|
777
|
+
|
|
778
|
+ if(result.audio > 0 && result.video > 0)
|
|
779
|
+ return result;
|
|
780
|
+ var isSecureConnection = window.location.protocol == "https:";
|
|
781
|
+
|
|
782
|
+ if(config.disableEarlyMediaPermissionRequests || !isSecureConnection)
|
|
783
|
+ {
|
|
784
|
+ if(result.audio === 0)
|
|
785
|
+ result.audio = -1;
|
|
786
|
+ if(result.video === 0)
|
|
787
|
+ result.video = -1;
|
|
788
|
+ }**/
|
|
789
|
+
|
|
790
|
+ return result;
|
|
791
|
+}
|
|
792
|
+
|
763
|
793
|
var RTC = {
|
764
|
794
|
rtcUtils: null,
|
765
|
795
|
devices: {
|
766
|
|
- audio: false,
|
767
|
|
- video: false
|
|
796
|
+ audio: true,
|
|
797
|
+ video: true
|
768
|
798
|
},
|
769
|
799
|
localStreams: [],
|
770
|
800
|
remoteStreams: {},
|
|
@@ -782,13 +812,15 @@ var RTC = {
|
782
|
812
|
|
783
|
813
|
eventEmitter.removeListener(eventType, listener);
|
784
|
814
|
},
|
785
|
|
- createLocalStream: function (stream, type, change, videoType) {
|
|
815
|
+ createLocalStream: function (stream, type, change, videoType, isMuted, isGUMStream) {
|
786
|
816
|
|
787
|
|
- var localStream = new LocalStream(stream, type, eventEmitter, videoType);
|
|
817
|
+ var localStream = new LocalStream(stream, type, eventEmitter, videoType, isGUMStream);
|
788
|
818
|
//in firefox we have only one stream object
|
789
|
819
|
if(this.localStreams.length == 0 ||
|
790
|
820
|
this.localStreams[0].getOriginalStream() != stream)
|
791
|
821
|
this.localStreams.push(localStream);
|
|
822
|
+ if(isMuted === true)
|
|
823
|
+ localStream.setMute(false);
|
792
|
824
|
|
793
|
825
|
if(type == "audio")
|
794
|
826
|
{
|
|
@@ -802,7 +834,7 @@ var RTC = {
|
802
|
834
|
if(change)
|
803
|
835
|
eventType = StreamEventTypes.EVENT_TYPE_LOCAL_CHANGED;
|
804
|
836
|
|
805
|
|
- eventEmitter.emit(eventType, localStream);
|
|
837
|
+ eventEmitter.emit(eventType, localStream, isMuted);
|
806
|
838
|
return localStream;
|
807
|
839
|
},
|
808
|
840
|
removeLocalStream: function (stream) {
|
|
@@ -886,7 +918,8 @@ var RTC = {
|
886
|
918
|
APP.UI.addListener(UIEvents.PINNED_ENDPOINT,
|
887
|
919
|
DataChannels.handlePinnedEndpointEvent);
|
888
|
920
|
this.rtcUtils = new RTCUtils(this);
|
889
|
|
- this.rtcUtils.obtainAudioAndVideoPermissions();
|
|
921
|
+ this.rtcUtils.obtainAudioAndVideoPermissions(
|
|
922
|
+ null, null, getMediaStreamUsage());
|
890
|
923
|
},
|
891
|
924
|
muteRemoteVideoStream: function (jid, value) {
|
892
|
925
|
var stream;
|
|
@@ -927,12 +960,20 @@ var RTC = {
|
927
|
960
|
callback();
|
928
|
961
|
};
|
929
|
962
|
}
|
930
|
|
- var videoStream = this.rtcUtils.createVideoStream(stream);
|
|
963
|
+ var videoStream = this.rtcUtils.createStream(stream, true);
|
931
|
964
|
this.localVideo = this.createLocalStream(videoStream, "video", true, type);
|
932
|
965
|
// Stop the stream to trigger onended event for old stream
|
933
|
966
|
oldStream.stop();
|
934
|
967
|
APP.xmpp.switchStreams(videoStream, oldStream,localCallback);
|
935
|
968
|
},
|
|
969
|
+ changeLocalAudio: function (stream, callback) {
|
|
970
|
+ var oldStream = this.localAudio.getOriginalStream();
|
|
971
|
+ var newStream = this.rtcUtils.createStream(stream);
|
|
972
|
+ this.localAudio = this.createLocalStream(newStream, "audio", true);
|
|
973
|
+ // Stop the stream to trigger onended event for old stream
|
|
974
|
+ oldStream.stop();
|
|
975
|
+ APP.xmpp.switchStreams(newStream, oldStream, callback, true);
|
|
976
|
+ },
|
936
|
977
|
/**
|
937
|
978
|
* Checks if video identified by given src is desktop stream.
|
938
|
979
|
* @param videoSrc eg.
|
|
@@ -967,7 +1008,7 @@ var RTC = {
|
967
|
1008
|
{
|
968
|
1009
|
APP.xmpp.sendVideoInfoPresence(mute);
|
969
|
1010
|
if(callback)
|
970
|
|
- callback();
|
|
1011
|
+ callback(mute);
|
971
|
1012
|
}
|
972
|
1013
|
else
|
973
|
1014
|
{
|
|
@@ -1138,6 +1179,8 @@ function RTCUtils(RTCService)
|
1138
|
1179
|
//
|
1139
|
1180
|
// https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
|
1140
|
1181
|
// https://github.com/webrtc/samples/issues/302
|
|
1182
|
+ if(!element[0])
|
|
1183
|
+ return;
|
1141
|
1184
|
element[0].mozSrcObject = stream;
|
1142
|
1185
|
element[0].play();
|
1143
|
1186
|
};
|
|
@@ -1150,10 +1193,13 @@ function RTCUtils(RTCService)
|
1150
|
1193
|
return tracks[0].id.replace(/[\{,\}]/g,"");
|
1151
|
1194
|
};
|
1152
|
1195
|
this.getVideoSrc = function (element) {
|
|
1196
|
+ if(!element)
|
|
1197
|
+ return null;
|
1153
|
1198
|
return element.mozSrcObject;
|
1154
|
1199
|
};
|
1155
|
1200
|
this.setVideoSrc = function (element, src) {
|
1156
|
|
- element.mozSrcObject = src;
|
|
1201
|
+ if(element)
|
|
1202
|
+ element.mozSrcObject = src;
|
1157
|
1203
|
};
|
1158
|
1204
|
RTCSessionDescription = mozRTCSessionDescription;
|
1159
|
1205
|
RTCIceCandidate = mozRTCIceCandidate;
|
|
@@ -1176,10 +1222,13 @@ function RTCUtils(RTCService)
|
1176
|
1222
|
return stream.id.replace(/[\{,\}]/g,"");
|
1177
|
1223
|
};
|
1178
|
1224
|
this.getVideoSrc = function (element) {
|
|
1225
|
+ if(!element)
|
|
1226
|
+ return null;
|
1179
|
1227
|
return element.getAttribute("src");
|
1180
|
1228
|
};
|
1181
|
1229
|
this.setVideoSrc = function (element, src) {
|
1182
|
|
- element.setAttribute("src", src);
|
|
1230
|
+ if(element)
|
|
1231
|
+ element.setAttribute("src", src);
|
1183
|
1232
|
};
|
1184
|
1233
|
// DTLS should now be enabled by default but..
|
1185
|
1234
|
this.pc_constraints = {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]};
|
|
@@ -1286,20 +1335,43 @@ RTCUtils.prototype.setAvailableDevices = function (um, available) {
|
1286
|
1335
|
* We ask for audio and video combined stream in order to get permissions and
|
1287
|
1336
|
* not to ask twice.
|
1288
|
1337
|
*/
|
1289
|
|
-RTCUtils.prototype.obtainAudioAndVideoPermissions = function(devices, callback) {
|
|
1338
|
+RTCUtils.prototype.obtainAudioAndVideoPermissions = function(devices, callback, usageOptions) {
|
1290
|
1339
|
var self = this;
|
1291
|
1340
|
// Get AV
|
1292
|
1341
|
|
|
1342
|
+ var successCallback = function (stream) {
|
|
1343
|
+ if(callback)
|
|
1344
|
+ callback(stream, usageOptions);
|
|
1345
|
+ else
|
|
1346
|
+ self.successCallback(stream, usageOptions);
|
|
1347
|
+ };
|
|
1348
|
+
|
1293
|
1349
|
if(!devices)
|
1294
|
1350
|
devices = ['audio', 'video'];
|
1295
|
1351
|
|
|
1352
|
+ var newDevices = [];
|
|
1353
|
+
|
|
1354
|
+
|
|
1355
|
+ if(usageOptions)
|
|
1356
|
+ for(var i = 0; i < devices.length; i++)
|
|
1357
|
+ {
|
|
1358
|
+ var device = devices[i];
|
|
1359
|
+ if(usageOptions[device] !== -1)
|
|
1360
|
+ newDevices.push(device);
|
|
1361
|
+ }
|
|
1362
|
+ else
|
|
1363
|
+ newDevices = devices;
|
|
1364
|
+
|
|
1365
|
+ if(newDevices.length === 0)
|
|
1366
|
+ {
|
|
1367
|
+ successCallback();
|
|
1368
|
+ return;
|
|
1369
|
+ }
|
|
1370
|
+
|
1296
|
1371
|
this.getUserMediaWithConstraints(
|
1297
|
|
- devices,
|
|
1372
|
+ newDevices,
|
1298
|
1373
|
function (stream) {
|
1299
|
|
- if(callback)
|
1300
|
|
- callback(stream);
|
1301
|
|
- else
|
1302
|
|
- self.successCallback(stream);
|
|
1374
|
+ successCallback(stream);
|
1303
|
1375
|
},
|
1304
|
1376
|
function (error) {
|
1305
|
1377
|
self.errorCallback(error);
|
|
@@ -1307,11 +1379,11 @@ RTCUtils.prototype.obtainAudioAndVideoPermissions = function(devices, callback)
|
1307
|
1379
|
config.resolution || '360');
|
1308
|
1380
|
}
|
1309
|
1381
|
|
1310
|
|
-RTCUtils.prototype.successCallback = function (stream) {
|
|
1382
|
+RTCUtils.prototype.successCallback = function (stream, usageOptions) {
|
1311
|
1383
|
if(stream)
|
1312
|
1384
|
console.log('got', stream, stream.getAudioTracks().length,
|
1313
|
1385
|
stream.getVideoTracks().length);
|
1314
|
|
- this.handleLocalStream(stream);
|
|
1386
|
+ this.handleLocalStream(stream, usageOptions);
|
1315
|
1387
|
};
|
1316
|
1388
|
|
1317
|
1389
|
RTCUtils.prototype.errorCallback = function (error) {
|
|
@@ -1349,7 +1421,7 @@ RTCUtils.prototype.errorCallback = function (error) {
|
1349
|
1421
|
|
1350
|
1422
|
}
|
1351
|
1423
|
|
1352
|
|
-RTCUtils.prototype.handleLocalStream = function(stream)
|
|
1424
|
+RTCUtils.prototype.handleLocalStream = function(stream, usageOptions)
|
1353
|
1425
|
{
|
1354
|
1426
|
if(window.webkitMediaStream)
|
1355
|
1427
|
{
|
|
@@ -1369,9 +1441,18 @@ RTCUtils.prototype.handleLocalStream = function(stream)
|
1369
|
1441
|
}
|
1370
|
1442
|
}
|
1371
|
1443
|
|
1372
|
|
- this.service.createLocalStream(audioStream, "audio");
|
|
1444
|
+ var audioMuted = (usageOptions && usageOptions.audio != 1),
|
|
1445
|
+ videoMuted = (usageOptions && usageOptions.video != 1);
|
|
1446
|
+
|
|
1447
|
+ var audioGUM = (!usageOptions || usageOptions.audio != -1),
|
|
1448
|
+ videoGUM = (!usageOptions || usageOptions.video != -1);
|
|
1449
|
+
|
1373
|
1450
|
|
1374
|
|
- this.service.createLocalStream(videoStream, "video");
|
|
1451
|
+ this.service.createLocalStream(audioStream, "audio", null, null,
|
|
1452
|
+ audioMuted, audioGUM);
|
|
1453
|
+
|
|
1454
|
+ this.service.createLocalStream(videoStream, "video", null, null,
|
|
1455
|
+ videoMuted, videoGUM);
|
1375
|
1456
|
}
|
1376
|
1457
|
else
|
1377
|
1458
|
{//firefox
|
|
@@ -1380,26 +1461,26 @@ RTCUtils.prototype.handleLocalStream = function(stream)
|
1380
|
1461
|
|
1381
|
1462
|
};
|
1382
|
1463
|
|
1383
|
|
-RTCUtils.prototype.createVideoStream = function(stream)
|
|
1464
|
+RTCUtils.prototype.createStream = function(stream, isVideo)
|
1384
|
1465
|
{
|
1385
|
|
- var videoStream = null;
|
|
1466
|
+ var newStream = null;
|
1386
|
1467
|
if(window.webkitMediaStream)
|
1387
|
1468
|
{
|
1388
|
|
- videoStream = new webkitMediaStream();
|
1389
|
|
- if(stream)
|
|
1469
|
+ newStream = new webkitMediaStream();
|
|
1470
|
+ if(newStream)
|
1390
|
1471
|
{
|
1391
|
|
- var videoTracks = stream.getVideoTracks();
|
|
1472
|
+ var tracks = (isVideo? stream.getVideoTracks() : stream.getAudioTracks());
|
1392
|
1473
|
|
1393
|
|
- for (i = 0; i < videoTracks.length; i++) {
|
1394
|
|
- videoStream.addTrack(videoTracks[i]);
|
|
1474
|
+ for (i = 0; i < tracks.length; i++) {
|
|
1475
|
+ newStream.addTrack(tracks[i]);
|
1395
|
1476
|
}
|
1396
|
1477
|
}
|
1397
|
1478
|
|
1398
|
1479
|
}
|
1399
|
1480
|
else
|
1400
|
|
- videoStream = stream;
|
|
1481
|
+ newStream = stream;
|
1401
|
1482
|
|
1402
|
|
- return videoStream;
|
|
1483
|
+ return newStream;
|
1403
|
1484
|
};
|
1404
|
1485
|
|
1405
|
1486
|
module.exports = RTCUtils;
|
|
@@ -1439,6 +1520,14 @@ var eventEmitter = new EventEmitter();
|
1439
|
1520
|
var roomName = null;
|
1440
|
1521
|
|
1441
|
1522
|
|
|
1523
|
+function notifyForInitialMute()
|
|
1524
|
+{
|
|
1525
|
+ if(config.startAudioMuted || config.startVideoMuted)
|
|
1526
|
+ {
|
|
1527
|
+ messageHandler.notify(null, "notify.me", "connected", "notify.muted");
|
|
1528
|
+ }
|
|
1529
|
+}
|
|
1530
|
+
|
1442
|
1531
|
function setupPrezi()
|
1443
|
1532
|
{
|
1444
|
1533
|
$("#reloadPresentationLink").click(function()
|
|
@@ -1461,17 +1550,17 @@ function setupToolbars() {
|
1461
|
1550
|
BottomToolbar.init();
|
1462
|
1551
|
}
|
1463
|
1552
|
|
1464
|
|
-function streamHandler(stream) {
|
|
1553
|
+function streamHandler(stream, isMuted) {
|
1465
|
1554
|
switch (stream.type)
|
1466
|
1555
|
{
|
1467
|
1556
|
case "audio":
|
1468
|
|
- VideoLayout.changeLocalAudio(stream);
|
|
1557
|
+ VideoLayout.changeLocalAudio(stream, isMuted);
|
1469
|
1558
|
break;
|
1470
|
1559
|
case "video":
|
1471
|
|
- VideoLayout.changeLocalVideo(stream);
|
|
1560
|
+ VideoLayout.changeLocalVideo(stream, isMuted);
|
1472
|
1561
|
break;
|
1473
|
1562
|
case "stream":
|
1474
|
|
- VideoLayout.changeLocalStream(stream);
|
|
1563
|
+ VideoLayout.changeLocalStream(stream, isMuted);
|
1475
|
1564
|
break;
|
1476
|
1565
|
}
|
1477
|
1566
|
}
|
|
@@ -1788,6 +1877,8 @@ UI.start = function (init) {
|
1788
|
1877
|
|
1789
|
1878
|
SettingsMenu.init();
|
1790
|
1879
|
|
|
1880
|
+ notifyForInitialMute();
|
|
1881
|
+
|
1791
|
1882
|
};
|
1792
|
1883
|
|
1793
|
1884
|
function chatAddError(errorMessage, originalText)
|
|
@@ -1821,6 +1912,9 @@ function onMucJoined(jid, info) {
|
1821
|
1912
|
|
1822
|
1913
|
if (displayName)
|
1823
|
1914
|
onDisplayNameChanged('localVideoContainer', displayName);
|
|
1915
|
+
|
|
1916
|
+
|
|
1917
|
+ VideoLayout.mucJoined();
|
1824
|
1918
|
}
|
1825
|
1919
|
|
1826
|
1920
|
function initEtherpad(name) {
|
|
@@ -2121,9 +2215,17 @@ UI.toggleAudio = function() {
|
2121
|
2215
|
/**
|
2122
|
2216
|
* Sets muted audio state for the local participant.
|
2123
|
2217
|
*/
|
2124
|
|
-UI.setAudioMuted = function (mute) {
|
2125
|
|
-
|
2126
|
|
- if(!APP.xmpp.setAudioMute(mute, function () {
|
|
2218
|
+UI.setAudioMuted = function (mute, earlyMute) {
|
|
2219
|
+ var audioMute = null;
|
|
2220
|
+ if(earlyMute)
|
|
2221
|
+ audioMute = function (mute, cb) {
|
|
2222
|
+ return APP.xmpp.sendAudioInfoPresence(mute, cb);
|
|
2223
|
+ };
|
|
2224
|
+ else
|
|
2225
|
+ audioMute = function (mute, cb) {
|
|
2226
|
+ return APP.xmpp.setAudioMute(mute, cb);
|
|
2227
|
+ }
|
|
2228
|
+ if(!audioMute(mute, function () {
|
2127
|
2229
|
VideoLayout.showLocalAudioIndicator(mute);
|
2128
|
2230
|
|
2129
|
2231
|
UIUtil.buttonClick("#mute", "icon-microphone icon-mic-disabled");
|
|
@@ -2171,6 +2273,9 @@ UI.setVideoMuteButtonsState = function (mute) {
|
2171
|
2273
|
}
|
2172
|
2274
|
}
|
2173
|
2275
|
|
|
2276
|
+
|
|
2277
|
+UI.setVideoMute = setVideoMute;
|
|
2278
|
+
|
2174
|
2279
|
module.exports = UI;
|
2175
|
2280
|
|
2176
|
2281
|
|
|
@@ -2929,6 +3034,12 @@ function isUserMuted(jid) {
|
2929
|
3034
|
}
|
2930
|
3035
|
}
|
2931
|
3036
|
|
|
3037
|
+ if(jid && jid == APP.xmpp.myJid())
|
|
3038
|
+ {
|
|
3039
|
+ var localVideo = APP.RTC.localVideo;
|
|
3040
|
+ return (!localVideo || localVideo.isMuted());
|
|
3041
|
+ }
|
|
3042
|
+
|
2932
|
3043
|
if (!APP.RTC.remoteStreams[jid] || !APP.RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]) {
|
2933
|
3044
|
return null;
|
2934
|
3045
|
}
|
|
@@ -3028,8 +3139,9 @@ var Avatar = {
|
3028
|
3139
|
} else {
|
3029
|
3140
|
if (video && video.length > 0) {
|
3030
|
3141
|
setVisibility(video, !show);
|
3031
|
|
- setVisibility(avatar, show);
|
3032
|
3142
|
}
|
|
3143
|
+ setVisibility(avatar, show);
|
|
3144
|
+
|
3033
|
3145
|
}
|
3034
|
3146
|
}
|
3035
|
3147
|
},
|
|
@@ -7261,37 +7373,19 @@ var VideoLayout = (function (my) {
|
7261
|
7373
|
|| (lastNEndpointsCache && lastNEndpointsCache.indexOf(resource) !== -1);
|
7262
|
7374
|
};
|
7263
|
7375
|
|
7264
|
|
- my.changeLocalStream = function (stream) {
|
7265
|
|
- VideoLayout.changeLocalVideo(stream);
|
|
7376
|
+ my.changeLocalStream = function (stream, isMuted) {
|
|
7377
|
+ VideoLayout.changeLocalVideo(stream, isMuted);
|
7266
|
7378
|
};
|
7267
|
7379
|
|
7268
|
|
- my.changeLocalAudio = function(stream) {
|
|
7380
|
+ my.changeLocalAudio = function(stream, isMuted) {
|
|
7381
|
+ if(isMuted)
|
|
7382
|
+ APP.UI.setAudioMuted(true, true);
|
7269
|
7383
|
APP.RTC.attachMediaStream($('#localAudio'), stream.getOriginalStream());
|
7270
|
7384
|
document.getElementById('localAudio').autoplay = true;
|
7271
|
7385
|
document.getElementById('localAudio').volume = 0;
|
7272
|
|
- if (preMuted) {
|
7273
|
|
- if(!APP.UI.setAudioMuted(true))
|
7274
|
|
- {
|
7275
|
|
- preMuted = mute;
|
7276
|
|
- }
|
7277
|
|
- preMuted = false;
|
7278
|
|
- }
|
7279
|
7386
|
};
|
7280
|
7387
|
|
7281
|
|
- my.changeLocalVideo = function(stream) {
|
7282
|
|
- var flipX = true;
|
7283
|
|
- if(stream.videoType == "screen")
|
7284
|
|
- flipX = false;
|
7285
|
|
- var localVideo = document.createElement('video');
|
7286
|
|
- localVideo.id = 'localVideo_' +
|
7287
|
|
- APP.RTC.getStreamID(stream.getOriginalStream());
|
7288
|
|
- localVideo.autoplay = true;
|
7289
|
|
- localVideo.volume = 0; // is it required if audio is separated ?
|
7290
|
|
- localVideo.oncontextmenu = function () { return false; };
|
7291
|
|
-
|
7292
|
|
- var localVideoContainer = document.getElementById('localVideoWrapper');
|
7293
|
|
- localVideoContainer.appendChild(localVideo);
|
7294
|
|
-
|
|
7388
|
+ my.changeLocalVideo = function(stream, isMuted) {
|
7295
|
7389
|
// Set default display name.
|
7296
|
7390
|
setDisplayName('localVideoContainer');
|
7297
|
7391
|
|
|
@@ -7302,7 +7396,7 @@ var VideoLayout = (function (my) {
|
7302
|
7396
|
|
7303
|
7397
|
AudioLevels.updateAudioLevelCanvas(null, VideoLayout);
|
7304
|
7398
|
|
7305
|
|
- var localVideoSelector = $('#' + localVideo.id);
|
|
7399
|
+ var localVideo = null;
|
7306
|
7400
|
|
7307
|
7401
|
function localVideoClick(event) {
|
7308
|
7402
|
event.stopPropagation();
|
|
@@ -7311,9 +7405,7 @@ var VideoLayout = (function (my) {
|
7311
|
7405
|
false,
|
7312
|
7406
|
APP.xmpp.myResource());
|
7313
|
7407
|
}
|
7314
|
|
- // Add click handler to both video and video wrapper elements in case
|
7315
|
|
- // there's no video.
|
7316
|
|
- localVideoSelector.click(localVideoClick);
|
|
7408
|
+
|
7317
|
7409
|
$('#localVideoContainer').click(localVideoClick);
|
7318
|
7410
|
|
7319
|
7411
|
// Add hover handler
|
|
@@ -7323,24 +7415,52 @@ var VideoLayout = (function (my) {
|
7323
|
7415
|
},
|
7324
|
7416
|
function() {
|
7325
|
7417
|
if (!VideoLayout.isLargeVideoVisible()
|
7326
|
|
- || APP.RTC.getVideoSrc(localVideo) !== APP.RTC.getVideoSrc($('#largeVideo')[0]))
|
|
7418
|
+ || APP.RTC.getVideoSrc(localVideo) !== APP.RTC.getVideoSrc($('#largeVideo')[0]))
|
7327
|
7419
|
VideoLayout.showDisplayName('localVideoContainer', false);
|
7328
|
7420
|
}
|
7329
|
7421
|
);
|
7330
|
|
- // Add stream ended handler
|
7331
|
|
- stream.getOriginalStream().onended = function () {
|
7332
|
|
- localVideoContainer.removeChild(localVideo);
|
7333
|
|
- VideoLayout.updateRemovedVideo(APP.RTC.getVideoSrc(localVideo));
|
7334
|
|
- };
|
|
7422
|
+
|
|
7423
|
+ if(isMuted)
|
|
7424
|
+ {
|
|
7425
|
+ APP.UI.setVideoMute(true);
|
|
7426
|
+ return;
|
|
7427
|
+ }
|
|
7428
|
+ var flipX = true;
|
|
7429
|
+ if(stream.videoType == "screen")
|
|
7430
|
+ flipX = false;
|
|
7431
|
+ var localVideo = document.createElement('video');
|
|
7432
|
+ localVideo.id = 'localVideo_' +
|
|
7433
|
+ APP.RTC.getStreamID(stream.getOriginalStream());
|
|
7434
|
+ localVideo.autoplay = true;
|
|
7435
|
+ localVideo.volume = 0; // is it required if audio is separated ?
|
|
7436
|
+ localVideo.oncontextmenu = function () { return false; };
|
|
7437
|
+
|
|
7438
|
+ var localVideoContainer = document.getElementById('localVideoWrapper');
|
|
7439
|
+ localVideoContainer.appendChild(localVideo);
|
|
7440
|
+
|
|
7441
|
+ var localVideoSelector = $('#' + localVideo.id);
|
|
7442
|
+
|
|
7443
|
+ // Add click handler to both video and video wrapper elements in case
|
|
7444
|
+ // there's no video.
|
|
7445
|
+ localVideoSelector.click(localVideoClick);
|
|
7446
|
+
|
7335
|
7447
|
// Flip video x axis if needed
|
7336
|
7448
|
flipXLocalVideo = flipX;
|
7337
|
7449
|
if (flipX) {
|
7338
|
7450
|
localVideoSelector.addClass("flipVideoX");
|
7339
|
7451
|
}
|
|
7452
|
+
|
7340
|
7453
|
// Attach WebRTC stream
|
7341
|
7454
|
var videoStream = APP.simulcast.getLocalVideoStream();
|
7342
|
7455
|
APP.RTC.attachMediaStream(localVideoSelector, videoStream);
|
7343
|
7456
|
|
|
7457
|
+ // Add stream ended handler
|
|
7458
|
+ stream.getOriginalStream().onended = function () {
|
|
7459
|
+ localVideoContainer.removeChild(localVideo);
|
|
7460
|
+ VideoLayout.updateRemovedVideo(APP.RTC.getVideoSrc(localVideo));
|
|
7461
|
+ };
|
|
7462
|
+
|
|
7463
|
+
|
7344
|
7464
|
localVideoSrc = APP.RTC.getVideoSrc(localVideo);
|
7345
|
7465
|
|
7346
|
7466
|
var myResourceJid = APP.xmpp.myResource();
|
|
@@ -7350,6 +7470,14 @@ var VideoLayout = (function (my) {
|
7350
|
7470
|
|
7351
|
7471
|
};
|
7352
|
7472
|
|
|
7473
|
+ my.mucJoined = function () {
|
|
7474
|
+ var myResourceJid = APP.xmpp.myResource();
|
|
7475
|
+
|
|
7476
|
+ if(!largeVideoState.userResourceJid)
|
|
7477
|
+ VideoLayout.updateLargeVideo(localVideoSrc, 0,
|
|
7478
|
+ myResourceJid, true);
|
|
7479
|
+ };
|
|
7480
|
+
|
7353
|
7481
|
/**
|
7354
|
7482
|
* Adds or removes icons for not available camera and microphone.
|
7355
|
7483
|
* @param resourceJid the jid of user
|
|
@@ -7417,26 +7545,34 @@ var VideoLayout = (function (my) {
|
7417
|
7545
|
}
|
7418
|
7546
|
}
|
7419
|
7547
|
|
|
7548
|
+ var src = null, volume = null;
|
7420
|
7549
|
// mute if localvideo
|
7421
|
7550
|
if (pick) {
|
7422
|
7551
|
var container = pick.parentNode;
|
7423
|
|
- var jid = null;
|
7424
|
|
- if(container)
|
7425
|
|
- {
|
7426
|
|
- if(container.id == "localVideoWrapper")
|
7427
|
|
- {
|
7428
|
|
- jid = APP.xmpp.myResource();
|
7429
|
|
- }
|
7430
|
|
- else
|
7431
|
|
- {
|
7432
|
|
- jid = VideoLayout.getPeerContainerResourceJid(container);
|
7433
|
|
- }
|
7434
|
|
- }
|
7435
|
|
-
|
7436
|
|
- VideoLayout.updateLargeVideo(APP.RTC.getVideoSrc(pick), pick.volume, jid);
|
|
7552
|
+ src = APP.RTC.getVideoSrc(pick);
|
|
7553
|
+ volume = pick.volume;
|
7437
|
7554
|
} else {
|
7438
|
7555
|
console.warn("Failed to elect large video");
|
|
7556
|
+ container = $('#remoteVideos>span[id!="mixedstream"]:visible:last').get(0);
|
|
7557
|
+
|
|
7558
|
+ }
|
|
7559
|
+
|
|
7560
|
+ var jid = null;
|
|
7561
|
+ if(container)
|
|
7562
|
+ {
|
|
7563
|
+ if(container.id == "localVideoWrapper")
|
|
7564
|
+ {
|
|
7565
|
+ jid = APP.xmpp.myResource();
|
|
7566
|
+ }
|
|
7567
|
+ else
|
|
7568
|
+ {
|
|
7569
|
+ jid = VideoLayout.getPeerContainerResourceJid(container);
|
|
7570
|
+ }
|
7439
|
7571
|
}
|
|
7572
|
+ else
|
|
7573
|
+ return;
|
|
7574
|
+
|
|
7575
|
+ VideoLayout.updateLargeVideo(src, volume, jid);
|
7440
|
7576
|
}
|
7441
|
7577
|
};
|
7442
|
7578
|
|
|
@@ -7485,11 +7621,10 @@ var VideoLayout = (function (my) {
|
7485
|
7621
|
/**
|
7486
|
7622
|
* Updates the large video with the given new video source.
|
7487
|
7623
|
*/
|
7488
|
|
- my.updateLargeVideo = function(newSrc, vol, resourceJid) {
|
7489
|
|
- console.log('hover in', newSrc);
|
7490
|
|
-
|
7491
|
|
- if (APP.RTC.getVideoSrc($('#largeVideo')[0]) !== newSrc) {
|
|
7624
|
+ my.updateLargeVideo = function(newSrc, vol, resourceJid, forceUpdate) {
|
|
7625
|
+ console.log('hover in', newSrc, resourceJid);
|
7492
|
7626
|
|
|
7627
|
+ if (APP.RTC.getVideoSrc($('#largeVideo')[0]) !== newSrc || forceUpdate) {
|
7493
|
7628
|
$('#activeSpeaker').css('visibility', 'hidden');
|
7494
|
7629
|
// Due to the simulcast the localVideoSrc may have changed when the
|
7495
|
7630
|
// fadeOut event triggers. In that case the getJidFromVideoSrc and
|
|
@@ -7527,7 +7662,6 @@ var VideoLayout = (function (my) {
|
7527
|
7662
|
largeVideoState.updateInProgress = true;
|
7528
|
7663
|
|
7529
|
7664
|
var doUpdate = function () {
|
7530
|
|
-
|
7531
|
7665
|
Avatar.updateActiveSpeakerAvatarSrc(
|
7532
|
7666
|
APP.xmpp.findJidFromResource(
|
7533
|
7667
|
largeVideoState.userResourceJid));
|
|
@@ -8372,10 +8506,9 @@ var VideoLayout = (function (my) {
|
8372
|
8506
|
if (videoSpan.classList.contains("dominantspeaker"))
|
8373
|
8507
|
videoSpan.classList.remove("dominantspeaker");
|
8374
|
8508
|
}
|
8375
|
|
-
|
8376
|
|
- Avatar.showUserAvatar(
|
8377
|
|
- APP.xmpp.findJidFromResource(resourceJid));
|
8378
|
8509
|
}
|
|
8510
|
+ Avatar.showUserAvatar(
|
|
8511
|
+ APP.xmpp.findJidFromResource(resourceJid));
|
8379
|
8512
|
};
|
8380
|
8513
|
|
8381
|
8514
|
/**
|
|
@@ -13436,7 +13569,7 @@ JingleSession.prototype._modifySources = function (successCallback, queueCallbac
|
13436
|
13569
|
* @param oldStream old video stream of this session.
|
13437
|
13570
|
* @param success_callback callback executed after successful stream switch.
|
13438
|
13571
|
*/
|
13439
|
|
-JingleSession.prototype.switchStreams = function (new_stream, oldStream, success_callback) {
|
|
13572
|
+JingleSession.prototype.switchStreams = function (new_stream, oldStream, success_callback, isAudio) {
|
13440
|
13573
|
|
13441
|
13574
|
var self = this;
|
13442
|
13575
|
|
|
@@ -13451,7 +13584,8 @@ JingleSession.prototype.switchStreams = function (new_stream, oldStream, success
|
13451
|
13584
|
self.peerconnection.addStream(new_stream);
|
13452
|
13585
|
}
|
13453
|
13586
|
|
13454
|
|
- APP.RTC.switchVideoStreams(new_stream, oldStream);
|
|
13587
|
+ if(!isAudio)
|
|
13588
|
+ APP.RTC.switchVideoStreams(new_stream, oldStream);
|
13455
|
13589
|
|
13456
|
13590
|
// Conference is not active
|
13457
|
13591
|
if(!oldSdp || !self.peerconnection) {
|
|
@@ -15877,6 +16011,15 @@ module.exports = function(XMPP, eventEmitter) {
|
15877
|
16011
|
initPresenceMap: function (myroomjid) {
|
15878
|
16012
|
this.presMap['to'] = myroomjid;
|
15879
|
16013
|
this.presMap['xns'] = 'http://jabber.org/protocol/muc';
|
|
16014
|
+ if(APP.RTC.localAudio.isMuted())
|
|
16015
|
+ {
|
|
16016
|
+ this.addAudioInfoToPresence(true);
|
|
16017
|
+ }
|
|
16018
|
+
|
|
16019
|
+ if(APP.RTC.localVideo.isMuted())
|
|
16020
|
+ {
|
|
16021
|
+ this.addVideoInfoToPresence(true);
|
|
16022
|
+ }
|
15880
|
16023
|
},
|
15881
|
16024
|
doJoin: function (jid, password) {
|
15882
|
16025
|
this.myroomjid = jid;
|
|
@@ -17328,10 +17471,10 @@ var XMPP = {
|
17328
|
17471
|
isExternalAuthEnabled: function () {
|
17329
|
17472
|
return Moderator.isExternalAuthEnabled();
|
17330
|
17473
|
},
|
17331
|
|
- switchStreams: function (stream, oldStream, callback) {
|
|
17474
|
+ switchStreams: function (stream, oldStream, callback, isAudio) {
|
17332
|
17475
|
if (connection && connection.jingle.activecall) {
|
17333
|
17476
|
// FIXME: will block switchInProgress on true value in case of exception
|
17334
|
|
- connection.jingle.activecall.switchStreams(stream, oldStream, callback);
|
|
17477
|
+ connection.jingle.activecall.switchStreams(stream, oldStream, callback, isAudio);
|
17335
|
17478
|
} else {
|
17336
|
17479
|
// We are done immediately
|
17337
|
17480
|
console.warn("No conference handler or conference not started yet");
|
|
@@ -17339,6 +17482,8 @@ var XMPP = {
|
17339
|
17482
|
}
|
17340
|
17483
|
},
|
17341
|
17484
|
sendVideoInfoPresence: function (mute) {
|
|
17485
|
+ if(!connection)
|
|
17486
|
+ return;
|
17342
|
17487
|
connection.emuc.addVideoInfoToPresence(mute);
|
17343
|
17488
|
connection.emuc.sendPresence();
|
17344
|
17489
|
},
|
|
@@ -17382,10 +17527,17 @@ var XMPP = {
|
17382
|
17527
|
// It is not clear what is the right way to handle multiple tracks.
|
17383
|
17528
|
// So at least make sure that they are all muted or all unmuted and
|
17384
|
17529
|
// that we send presence just once.
|
17385
|
|
- APP.RTC.localAudio.mute();
|
|
17530
|
+ APP.RTC.localAudio.setMute(!mute);
|
17386
|
17531
|
// isMuted is the opposite of audioEnabled
|
17387
|
|
- connection.emuc.addAudioInfoToPresence(mute);
|
17388
|
|
- connection.emuc.sendPresence();
|
|
17532
|
+ this.sendAudioInfoPresence(mute, callback);
|
|
17533
|
+ return true;
|
|
17534
|
+ },
|
|
17535
|
+ sendAudioInfoPresence: function(mute, callback)
|
|
17536
|
+ {
|
|
17537
|
+ if(connection) {
|
|
17538
|
+ connection.emuc.addAudioInfoToPresence(mute);
|
|
17539
|
+ connection.emuc.sendPresence();
|
|
17540
|
+ }
|
17389
|
17541
|
callback();
|
17390
|
17542
|
return true;
|
17391
|
17543
|
},
|