Kaynağa Gözat

Implements RTC module.

master
hristoterezov 10 yıl önce
ebeveyn
işleme
5b34a66cb6

+ 10
- 186
app.js Dosyayı Görüntüle

@@ -4,14 +4,12 @@ var connection = null;
4 4
 var authenticatedUser = false;
5 5
 var authenticationWindow = null;
6 6
 var activecall = null;
7
-var RTC = null;
8 7
 var nickname = null;
9 8
 var sharedKey = '';
10 9
 var focusMucJid = null;
11 10
 var roomUrl = null;
12 11
 var roomName = null;
13 12
 var ssrc2jid = {};
14
-var mediaStreams = {};
15 13
 var bridgeIsDown = false;
16 14
 //TODO: this array must be removed when firefox implement multistream support
17 15
 var notReceivedSSRCs = [];
@@ -62,49 +60,10 @@ var sessionTerminated = false;
62 60
 
63 61
 function init() {
64 62
     Toolbar.setupButtonsFromConfig();
65
-    RTC = setupRTC();
66
-    if (RTC === null) {
67
-        window.location.href = 'webrtcrequired.html';
68
-        return;
69
-    } else if (RTC.browser !== 'chrome' &&
70
-        config.enableFirefoxSupport !== true) {
71
-        window.location.href = 'chromeonly.html';
72
-        return;
73
-    }
74
-
75
-    obtainAudioAndVideoPermissions(function (stream) {
76
-        var audioStream, videoStream;
77
-        if(window.webkitMediaStream)
78
-        {
79
-            var audioStream = new webkitMediaStream();
80
-            var videoStream = new webkitMediaStream();
81
-            var audioTracks = stream.getAudioTracks();
82
-            var videoTracks = stream.getVideoTracks();
83
-            for (var i = 0; i < audioTracks.length; i++) {
84
-                audioStream.addTrack(audioTracks[i]);
85
-            }
86
-
87
-            for (i = 0; i < videoTracks.length; i++) {
88
-                videoStream.addTrack(videoTracks[i]);
89
-            }
90
-            VideoLayout.changeLocalAudio(audioStream);
91
-            statistics.onStreamCreated(audioStream);
92
-
93 63
 
94
-            VideoLayout.changeLocalVideo(videoStream, true);
95
-        }
96
-        else
97
-        {
98
-            VideoLayout.changeLocalStream(stream);
99
-            statistics.onStreamCreated(stream);
100
-
101
-        }
102
-
103
-
104
-
105
-
106
-        maybeDoJoin();
107
-    });
64
+    RTC.addStreamListener(maybeDoJoin, StreamEventTypes.EVENT_TYPE_LOCAL_CREATED);
65
+    RTC.addStreamListener(VideoLayout.onLocalStreamCreated, StreamEventTypes.EVENT_TYPE_LOCAL_CREATED)
66
+    RTC.start();
108 67
 
109 68
     var jid = document.getElementById('jid').value || config.hosts.anonymousdomain || config.hosts.domain || window.location.hostname;
110 69
     connect(jid);
@@ -132,7 +91,7 @@ function connect(jid, password) {
132 91
     if (connection.disco) {
133 92
         // for chrome, add multistream cap
134 93
     }
135
-    connection.jingle.pc_constraints = RTC.pc_constraints;
94
+    connection.jingle.pc_constraints = RTC.getPCConstraints();
136 95
     if (config.useIPv6) {
137 96
         // https://code.google.com/p/webrtc/issues/detail?id=2828
138 97
         if (!connection.jingle.pc_constraints.optional) connection.jingle.pc_constraints.optional = [];
@@ -175,42 +134,7 @@ function connect(jid, password) {
175 134
     });
176 135
 }
177 136
 
178
-/**
179
- * We ask for audio and video combined stream in order to get permissions and
180
- * not to ask twice.
181
- */
182
-function obtainAudioAndVideoPermissions(callback) {
183
-    // Get AV
184
-    var cb = function (stream) {
185
-        console.log('got', stream, stream.getAudioTracks().length, stream.getVideoTracks().length);
186
-        callback(stream);
187
-        trackUsage('localMedia', {
188
-            audio: stream.getAudioTracks().length,
189
-            video: stream.getVideoTracks().length
190
-        });
191
-    }
192
-    getUserMediaWithConstraints(
193
-        ['audio', 'video'],
194
-        cb,
195
-        function (error) {
196
-            console.error('failed to obtain audio/video stream - trying audio only', error);
197
-            getUserMediaWithConstraints(
198
-                ['audio'],
199
-                cb,
200
-                function (error) {
201
-                    console.error('failed to obtain audio/video stream - stop', error);
202
-                    trackUsage('localMediaError', {
203
-                        media: error.media || 'video',
204
-                        name : error.name
205
-                    });
206
-                    messageHandler.showError("Error",
207
-                        "Failed to obtain permissions to use the local microphone" +
208
-                            "and/or camera.");
209
-                }
210
-            );
211
-        },
212
-        config.resolution || '360');
213
-}
137
+
214 138
 
215 139
 function maybeDoJoin() {
216 140
     if (connection && connection.connected && Strophe.getResourceFromJid(connection.jid) // .connected is true while connecting?
@@ -382,7 +306,7 @@ function waitForPresence(data, sid) {
382 306
     }
383 307
 
384 308
     //TODO: this code should be removed when firefox implement multistream support
385
-    if(RTC.browser == "firefox")
309
+    if(RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_FIREFOX)
386 310
     {
387 311
         if((notReceivedSSRCs.length == 0) ||
388 312
             !ssrc2jid[notReceivedSSRCs[notReceivedSSRCs.length - 1]])
@@ -402,15 +326,7 @@ function waitForPresence(data, sid) {
402 326
         }
403 327
     }
404 328
 
405
-    // NOTE(gp) now that we have simulcast, a media stream can have more than 1
406
-    // ssrc. We should probably take that into account in our MediaStream
407
-    // wrapper.
408
-    var mediaStream = new MediaStream(data, sid, thessrc);
409
-    var jid = data.peerjid || connection.emuc.myroomjid;
410
-    if(!mediaStreams[jid]) {
411
-        mediaStreams[jid] = {};
412
-    }
413
-    mediaStreams[jid][mediaStream.type] = mediaStream;
329
+    RTC.createRemoteStream(data, sid, thessrc);
414 330
 
415 331
     var container;
416 332
     var remotes = document.getElementById('remoteVideos');
@@ -569,13 +485,8 @@ $(document).bind('callincoming.jingle', function (event, sid) {
569 485
     // TODO: do we check activecall == null?
570 486
     activecall = sess;
571 487
 
572
-    statistics.onConfereceCreated(sess);
573
-
574
-    // Bind data channel listener in case we're a regular participant
575
-    if (config.openSctp)
576
-    {
577
-        bindDataChannelListener(sess.peerconnection);
578
-    }
488
+    statistics.onConferenceCreated(sess);
489
+    RTC.onConferenceCreated(sess);
579 490
 
580 491
     // TODO: check affiliation and/or role
581 492
     console.log('emuc data for', sess.peerjid, connection.emuc.members[sess.peerjid]);
@@ -588,15 +499,7 @@ $(document).bind('callincoming.jingle', function (event, sid) {
588 499
 $(document).bind('conferenceCreated.jingle', function (event, focus)
589 500
 {
590 501
     statistics.onConfereceCreated(getConferenceHandler());
591
-});
592
-
593
-$(document).bind('conferenceCreated.jingle', function (event, focus)
594
-{
595
-    // Bind data channel listener in case we're the focus
596
-    if (config.openSctp)
597
-    {
598
-        bindDataChannelListener(focus.peerconnection);
599
-    }
502
+    RTC.onConfereceCreated(focus);
600 503
 });
601 504
 
602 505
 $(document).bind('setLocalDescription.jingle', function (event, sid) {
@@ -1622,29 +1525,9 @@ $(document).on('webkitfullscreenchange mozfullscreenchange fullscreenchange',
1622 1525
             document.mozFullScreen ||
1623 1526
             document.webkitIsFullScreen;
1624 1527
 
1625
-        if (isFullScreen) {
1626
-            setView("fullscreen");
1627
-        }
1628
-        else {
1629
-            setView("default");
1630
-        }
1631 1528
     }
1632 1529
 );
1633 1530
 
1634
-/**
1635
- * Sets the current view.
1636
- */
1637
-function setView(viewName) {
1638
-//    if (viewName == "fullscreen") {
1639
-//        document.getElementById('videolayout_fullscreen').disabled  = false;
1640
-//        document.getElementById('videolayout_default').disabled  = true;
1641
-//    }
1642
-//    else {
1643
-//        document.getElementById('videolayout_default').disabled  = false;
1644
-//        document.getElementById('videolayout_fullscreen').disabled  = true;
1645
-//    }
1646
-}
1647
-
1648 1531
 $(document).bind('error.jingle',
1649 1532
     function (event, session, error)
1650 1533
     {
@@ -1662,54 +1545,6 @@ $(document).bind('fatalError.jingle',
1662 1545
     }
1663 1546
 );
1664 1547
 
1665
-function onSelectedEndpointChanged(userJid)
1666
-{
1667
-    console.log('selected endpoint changed: ', userJid);
1668
-    if (_dataChannels && _dataChannels.length != 0)
1669
-    {
1670
-        _dataChannels.some(function (dataChannel) {
1671
-            if (dataChannel.readyState == 'open')
1672
-            {
1673
-                dataChannel.send(JSON.stringify({
1674
-                    'colibriClass': 'SelectedEndpointChangedEvent',
1675
-                    'selectedEndpoint': (!userJid || userJid == null)
1676
-                        ? null : userJid
1677
-                }));
1678
-
1679
-                return true;
1680
-            }
1681
-        });
1682
-    }
1683
-}
1684
-
1685
-$(document).bind("selectedendpointchanged", function(event, userJid) {
1686
-    onSelectedEndpointChanged(userJid);
1687
-});
1688
-
1689
-function onPinnedEndpointChanged(userJid)
1690
-{
1691
-    console.log('pinned endpoint changed: ', userJid);
1692
-    if (_dataChannels && _dataChannels.length != 0)
1693
-    {
1694
-        _dataChannels.some(function (dataChannel) {
1695
-            if (dataChannel.readyState == 'open')
1696
-            {
1697
-                dataChannel.send(JSON.stringify({
1698
-                    'colibriClass': 'PinnedEndpointChangedEvent',
1699
-                    'pinnedEndpoint': (!userJid || userJid == null)
1700
-                        ? null : Strophe.getResourceFromJid(userJid)
1701
-                }));
1702
-
1703
-                return true;
1704
-            }
1705
-        });
1706
-    }
1707
-}
1708
-
1709
-$(document).bind("pinnedendpointchanged", function(event, userJid) {
1710
-    onPinnedEndpointChanged(userJid);
1711
-});
1712
-
1713 1548
 function callSipButtonClicked()
1714 1549
 {
1715 1550
     var defaultNumber
@@ -1768,14 +1603,3 @@ function hangup() {
1768 1603
     );
1769 1604
 
1770 1605
 }
1771
-
1772
-$(document).on('videomuted.muc', function(event, jid, value) {
1773
-    if(mediaStreams[jid] && mediaStreams[jid][MediaStream.VIDEO_TYPE]) {
1774
-        var stream = mediaStreams[jid][MediaStream.VIDEO_TYPE];
1775
-        var isMuted = (value === "true");
1776
-        if (isMuted != stream.muted) {
1777
-            stream.muted = isMuted;
1778
-            Avatar.showUserAvatar(jid, isMuted);
1779
-        }
1780
-    }
1781
-});

+ 2
- 2
avatar.js Dosyayı Görüntüle

@@ -129,10 +129,10 @@ var Avatar = (function(my) {
129 129
             }
130 130
         }
131 131
 
132
-        if (!mediaStreams[jid] || !mediaStreams[jid][MediaStream.VIDEO_TYPE]) {
132
+        if (!RTC.remoteStreams[jid] || !RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]) {
133 133
             return null;
134 134
         }
135
-        return mediaStreams[jid][MediaStream.VIDEO_TYPE].muted;
135
+        return RTC.remoteStreams[jid][MediaStream.VIDEO_TYPE].muted;
136 136
     }
137 137
 
138 138
     function getGravatarUrl(id, size) {

+ 0
- 180
data_channels.js Dosyayı Görüntüle

@@ -1,180 +0,0 @@
1
-/* global connection, Strophe, updateLargeVideo, focusedVideoSrc*/
2
-
3
-// cache datachannels to avoid garbage collection
4
-// https://code.google.com/p/chromium/issues/detail?id=405545
5
-var _dataChannels = [];
6
-
7
-/**
8
- * Callback triggered by PeerConnection when new data channel is opened
9
- * on the bridge.
10
- * @param event the event info object.
11
- */
12
-
13
-function onDataChannel(event)
14
-{
15
-    var dataChannel = event.channel;
16
-
17
-    dataChannel.onopen = function ()
18
-    {
19
-        console.info("Data channel opened by the Videobridge!", dataChannel);
20
-
21
-        // Code sample for sending string and/or binary data
22
-        // Sends String message to the bridge
23
-        //dataChannel.send("Hello bridge!");
24
-        // Sends 12 bytes binary message to the bridge
25
-        //dataChannel.send(new ArrayBuffer(12));
26
-
27
-        // when the data channel becomes available, tell the bridge about video
28
-        // selections so that it can do adaptive simulcast,
29
-        var userJid = VideoLayout.getLargeVideoState().userJid;
30
-        // we want the notification to trigger even if userJid is undefined,
31
-        // or null.
32
-        onSelectedEndpointChanged(userJid);
33
-    };
34
-
35
-    dataChannel.onerror = function (error)
36
-    {
37
-        console.error("Data Channel Error:", error, dataChannel);
38
-    };
39
-
40
-    dataChannel.onmessage = function (event)
41
-    {
42
-        var data = event.data;
43
-        // JSON
44
-        var obj;
45
-
46
-        try
47
-        {
48
-            obj = JSON.parse(data);
49
-        }
50
-        catch (e)
51
-        {
52
-            console.error(
53
-                "Failed to parse data channel message as JSON: ",
54
-                data,
55
-                dataChannel);
56
-        }
57
-        if (('undefined' !== typeof(obj)) && (null !== obj))
58
-        {
59
-            var colibriClass = obj.colibriClass;
60
-
61
-            if ("DominantSpeakerEndpointChangeEvent" === colibriClass)
62
-            {
63
-                // Endpoint ID from the Videobridge.
64
-                var dominantSpeakerEndpoint = obj.dominantSpeakerEndpoint;
65
-
66
-                console.info(
67
-                    "Data channel new dominant speaker event: ",
68
-                    dominantSpeakerEndpoint);
69
-                $(document).trigger(
70
-                    'dominantspeakerchanged',
71
-                    [dominantSpeakerEndpoint]);
72
-            }
73
-            else if ("InLastNChangeEvent" === colibriClass)
74
-            {
75
-                var oldValue = obj.oldValue;
76
-                var newValue = obj.newValue;
77
-                // Make sure that oldValue and newValue are of type boolean.
78
-                var type;
79
-
80
-                if ((type = typeof oldValue) !== 'boolean') {
81
-                    if (type === 'string') {
82
-                        oldValue = (oldValue == "true");
83
-                    } else {
84
-                        oldValue = new Boolean(oldValue).valueOf();
85
-                    }
86
-                }
87
-                if ((type = typeof newValue) !== 'boolean') {
88
-                    if (type === 'string') {
89
-                        newValue = (newValue == "true");
90
-                    } else {
91
-                        newValue = new Boolean(newValue).valueOf();
92
-                    }
93
-                }
94
-                $(document).trigger('inlastnchanged', [oldValue, newValue]);
95
-            }
96
-            else if ("LastNEndpointsChangeEvent" === colibriClass)
97
-            {
98
-                // The new/latest list of last-n endpoint IDs.
99
-                var lastNEndpoints = obj.lastNEndpoints;
100
-                // The list of endpoint IDs which are entering the list of
101
-                // last-n at this time i.e. were not in the old list of last-n
102
-                // endpoint IDs.
103
-                var endpointsEnteringLastN = obj.endpointsEnteringLastN;
104
-                var stream = obj.stream;
105
-
106
-                console.log(
107
-                    "Data channel new last-n event: ",
108
-                    lastNEndpoints, endpointsEnteringLastN, obj);
109
-                $(document).trigger(
110
-                    'lastnchanged',
111
-                    [lastNEndpoints, endpointsEnteringLastN, stream]);
112
-            }
113
-            else if ("SimulcastLayersChangedEvent" === colibriClass)
114
-            {
115
-                $(document).trigger(
116
-                    'simulcastlayerschanged',
117
-                    [obj.endpointSimulcastLayers]);
118
-            }
119
-            else if ("SimulcastLayersChangingEvent" === colibriClass)
120
-            {
121
-                $(document).trigger(
122
-                    'simulcastlayerschanging',
123
-                    [obj.endpointSimulcastLayers]);
124
-            }
125
-            else if ("StartSimulcastLayerEvent" === colibriClass)
126
-            {
127
-                $(document).trigger('startsimulcastlayer', obj.simulcastLayer);
128
-            }
129
-            else if ("StopSimulcastLayerEvent" === colibriClass)
130
-            {
131
-                $(document).trigger('stopsimulcastlayer', obj.simulcastLayer);
132
-            }
133
-            else
134
-            {
135
-                console.debug("Data channel JSON-formatted message: ", obj);
136
-            }
137
-        }
138
-    };
139
-
140
-    dataChannel.onclose = function ()
141
-    {
142
-        console.info("The Data Channel closed", dataChannel);
143
-        var idx = _dataChannels.indexOf(dataChannel);
144
-        if (idx > -1) 
145
-            _dataChannels = _dataChannels.splice(idx, 1);
146
-    };
147
-    _dataChannels.push(dataChannel);
148
-}
149
-
150
-/**
151
- * Binds "ondatachannel" event listener to given PeerConnection instance.
152
- * @param peerConnection WebRTC peer connection instance.
153
- */
154
-function bindDataChannelListener(peerConnection)
155
-{
156
-    peerConnection.ondatachannel = onDataChannel;
157
-
158
-    // Sample code for opening new data channel from Jitsi Meet to the bridge.
159
-    // Although it's not a requirement to open separate channels from both bridge
160
-    // and peer as single channel can be used for sending and receiving data.
161
-    // So either channel opened by the bridge or the one opened here is enough
162
-    // for communication with the bridge.
163
-/*
164
-    var dataChannelOptions = { reliable: true };
165
-    var dataChannel
166
-       = peerConnection.createDataChannel("myChannel", dataChannelOptions);
167
-
168
-    // Can be used only when is in open state
169
-    dataChannel.onopen = function ()
170
-    {
171
-        dataChannel.send("My channel !!!");
172
-    };
173
-    dataChannel.onmessage = function (event)
174
-    {
175
-        var msgData = event.data;
176
-        console.info("Got My Data Channel Message:", msgData, dataChannel);
177
-    };
178
-*/
179
-}
180
-

+ 3
- 3
desktopsharing.js Dosyayı Görüntüle

@@ -28,7 +28,7 @@ var _desktopSharingEnabled = null;
28 28
  * Flag 'chrome://flags/#enable-usermedia-screen-capture' must be enabled.
29 29
  */
30 30
 function obtainWebRTCScreen(streamCallback, failCallback) {
31
-    getUserMediaWithConstraints(
31
+    RTC.getUserMediaWithConstraints(
32 32
         ['screen'],
33 33
         streamCallback,
34 34
         failCallback
@@ -135,7 +135,7 @@ function doGetStreamFromExtension(streamCallback, failCallback) {
135 135
             }
136 136
             console.log("Response from extension: " + response);
137 137
             if (response.streamId) {
138
-                getUserMediaWithConstraints(
138
+                RTC.getUserMediaWithConstraints(
139 139
                     ['desktop'],
140 140
                     function (stream) {
141 141
                         streamCallback(stream);
@@ -303,7 +303,7 @@ function toggleScreenSharing() {
303 303
             getSwitchStreamFailed);
304 304
     } else {
305 305
         // Disable screen stream
306
-        getUserMediaWithConstraints(
306
+        RTC.getUserMediaWithConstraints(
307 307
             ['video'],
308 308
             function (stream) {
309 309
                 // We are now using camera stream

+ 5
- 2
index.html Dosyayı Görüntüle

@@ -34,7 +34,6 @@
34 34
     <script src="muc.js?v=17"></script><!-- simple MUC library -->
35 35
     <script src="estos_log.js?v=2"></script><!-- simple stanza logger -->
36 36
     <script src="desktopsharing.js?v=3"></script><!-- desktop sharing -->
37
-    <script src="data_channels.js?v=3"></script><!-- data channels -->
38 37
     <script src="app.js?v=22"></script><!-- application logic -->
39 38
     <script src="commands.js?v=1"></script><!-- application logic -->
40 39
     <script src="chat.js?v=15"></script><!-- chat logic -->
@@ -64,7 +63,11 @@
64 63
     <script src="message_handler.js?v=2"></script>
65 64
     <script src="api_connector.js?v=2"></script>
66 65
     <script src="settings_menu.js?v=1"></script>
67
-    <script src="libs/modules/statistics.bundle.js"></script>
66
+    <script src="service/RTC/RTCBrowserType.js?v=1"></script>
67
+    <script src="service/RTC/StreamEventTypes.js?v=1"></script>
68
+    <script src="service/RTC/MediaStreamTypes.js?v=1"></script>
69
+    <script src="libs/modules/statistics.bundle.js?v=1"></script>
70
+    <script src="libs/modules/RTC.bundle.js?v=1"></script>
68 71
     <script src="avatar.js?v=4"></script><!-- avatars -->
69 72
     <link rel="stylesheet" href="css/font.css?v=6"/>
70 73
     <link rel="stylesheet" href="css/toastr.css?v=1">

+ 1126
- 0
libs/modules/RTC.bundle.js
Dosya farkı çok büyük olduğundan ihmal edildi
Dosyayı Görüntüle


+ 52
- 45
libs/modules/statistics.bundle.js
Dosya farkı çok büyük olduğundan ihmal edildi
Dosyayı Görüntüle


+ 0
- 272
libs/strophe/strophe.jingle.adapter.js Dosyayı Görüntüle

@@ -512,275 +512,3 @@ TraceablePeerConnection.prototype.getStats = function(callback, errback) {
512 512
     }
513 513
 };
514 514
 
515
-// mozilla chrome compat layer -- very similar to adapter.js
516
-function setupRTC() {
517
-    var RTC = null;
518
-    if (navigator.mozGetUserMedia) {
519
-        console.log('This appears to be Firefox');
520
-        var version = parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
521
-        if (version >= 22) {
522
-            RTC = {
523
-                peerconnection: mozRTCPeerConnection,
524
-                browser: 'firefox',
525
-                getUserMedia: navigator.mozGetUserMedia.bind(navigator),
526
-                attachMediaStream: function (element, stream) {
527
-                    element[0].mozSrcObject = stream;
528
-                    element[0].play();
529
-                },
530
-                pc_constraints: {},
531
-                getLocalSSRC: function (session, callback) {
532
-                    // NOTE(gp) latest FF nightlies seem to provide the local
533
-                    // SSRCs in their SDP so there's no longer necessary to
534
-                    // take it from the peer connection stats.
535
-                    /*session.peerconnection.getStats(function (s) {
536
-                            var ssrcs = {};
537
-                            s.forEach(function (item) {
538
-                                if (item.type == "outboundrtp" && !item.isRemote)
539
-                                {
540
-                                    ssrcs[item.id.split('_')[2]] = item.ssrc;
541
-                                }
542
-                            });
543
-                            session.localStreamsSSRC = {
544
-                                "audio": ssrcs.audio,//for stable 0
545
-                                "video": ssrcs.video// for stable 1
546
-                            };
547
-                            callback(session.localStreamsSSRC);
548
-                        },
549
-                        function () {
550
-                            callback(null);
551
-                        });*/
552
-                    callback(null);
553
-                },
554
-                getStreamID: function (stream) {
555
-                    var tracks = stream.getVideoTracks();
556
-                    if(!tracks || tracks.length == 0)
557
-                    {
558
-                        tracks = stream.getAudioTracks();
559
-                    }
560
-                    return tracks[0].id.replace(/[\{,\}]/g,"");
561
-                },
562
-                getVideoSrc: function (element) {
563
-                    return element.mozSrcObject;
564
-                },
565
-                setVideoSrc: function (element, src) {
566
-                    element.mozSrcObject = src;
567
-                }
568
-            };
569
-            if (!MediaStream.prototype.getVideoTracks)
570
-                MediaStream.prototype.getVideoTracks = function () { return []; };
571
-            if (!MediaStream.prototype.getAudioTracks)
572
-                MediaStream.prototype.getAudioTracks = function () { return []; };
573
-            RTCSessionDescription = mozRTCSessionDescription;
574
-            RTCIceCandidate = mozRTCIceCandidate;
575
-        }
576
-    } else if (navigator.webkitGetUserMedia) {
577
-        console.log('This appears to be Chrome');
578
-        RTC = {
579
-            peerconnection: webkitRTCPeerConnection,
580
-            browser: 'chrome',
581
-            getUserMedia: navigator.webkitGetUserMedia.bind(navigator),
582
-            attachMediaStream: function (element, stream) {
583
-                element.attr('src', webkitURL.createObjectURL(stream));
584
-            },
585
-            // DTLS should now be enabled by default but..
586
-            pc_constraints: {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]},
587
-            getLocalSSRC: function (session, callback) {
588
-                callback(null);
589
-            },
590
-            getStreamID: function (stream) {
591
-                // streams from FF endpoints have the characters '{' and '}'
592
-                // that make jQuery choke.
593
-                return stream.id.replace(/[\{,\}]/g,"");
594
-            },
595
-            getVideoSrc: function (element) {
596
-                return element.getAttribute("src");
597
-            },
598
-            setVideoSrc: function (element, src) {
599
-                element.setAttribute("src", src);
600
-            }
601
-        };
602
-        if (navigator.userAgent.indexOf('Android') != -1) {
603
-            RTC.pc_constraints = {}; // disable DTLS on Android
604
-        }
605
-        if (!webkitMediaStream.prototype.getVideoTracks) {
606
-            webkitMediaStream.prototype.getVideoTracks = function () {
607
-                return this.videoTracks;
608
-            };
609
-        }
610
-        if (!webkitMediaStream.prototype.getAudioTracks) {
611
-            webkitMediaStream.prototype.getAudioTracks = function () {
612
-                return this.audioTracks;
613
-            };
614
-        }
615
-    }
616
-    if (RTC === null) {
617
-        try { console.log('Browser does not appear to be WebRTC-capable'); } catch (e) { }
618
-    }
619
-    return RTC;
620
-}
621
-
622
-function getUserMediaWithConstraints(um, success_callback, failure_callback, resolution, bandwidth, fps, desktopStream) {
623
-    var constraints = {audio: false, video: false};
624
-
625
-    if (um.indexOf('video') >= 0) {
626
-        constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
627
-    }
628
-    if (um.indexOf('audio') >= 0) {
629
-        constraints.audio = { mandatory: {}, optional: []};// same behaviour as true
630
-    }
631
-    if (um.indexOf('screen') >= 0) {
632
-        constraints.video = {
633
-            mandatory: {
634
-                chromeMediaSource: "screen",
635
-                googLeakyBucket: true,
636
-                maxWidth: window.screen.width,
637
-                maxHeight: window.screen.height,
638
-                maxFrameRate: 3
639
-            },
640
-            optional: []
641
-        };
642
-    }
643
-    if (um.indexOf('desktop') >= 0) {
644
-        constraints.video = {
645
-            mandatory: {
646
-                chromeMediaSource: "desktop",
647
-                chromeMediaSourceId: desktopStream,
648
-                googLeakyBucket: true,
649
-                maxWidth: window.screen.width,
650
-                maxHeight: window.screen.height,
651
-                maxFrameRate: 3
652
-            },
653
-            optional: []
654
-        }
655
-    }
656
-
657
-    if (constraints.audio) {
658
-        // if it is good enough for hangouts...
659
-        constraints.audio.optional.push(
660
-            {googEchoCancellation: true},
661
-            {googAutoGainControl: true},
662
-            {googNoiseSupression: true},
663
-            {googHighpassFilter: true},
664
-            {googNoisesuppression2: true},
665
-            {googEchoCancellation2: true},
666
-            {googAutoGainControl2: true}
667
-        );
668
-    }
669
-    if (constraints.video) {
670
-        constraints.video.optional.push(
671
-            {googNoiseReduction: false} // chrome 37 workaround for issue 3807, reenable in M38
672
-        );
673
-        if (um.indexOf('video') >= 0) {
674
-            constraints.video.optional.push(
675
-                {googLeakyBucket: true}
676
-            );
677
-        }
678
-    }
679
-
680
-    // Check if we are running on Android device
681
-    var isAndroid = navigator.userAgent.indexOf('Android') != -1;
682
-
683
-    if (resolution && !constraints.video || isAndroid) {
684
-        constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
685
-    }
686
-    // see https://code.google.com/p/chromium/issues/detail?id=143631#c9 for list of supported resolutions
687
-    switch (resolution) {
688
-        // 16:9 first
689
-        case '1080':
690
-        case 'fullhd':
691
-            constraints.video.mandatory.minWidth = 1920;
692
-            constraints.video.mandatory.minHeight = 1080;
693
-            break;
694
-        case '720':
695
-        case 'hd':
696
-            constraints.video.mandatory.minWidth = 1280;
697
-            constraints.video.mandatory.minHeight = 720;
698
-            break;
699
-        case '360':
700
-            constraints.video.mandatory.minWidth = 640;
701
-            constraints.video.mandatory.minHeight = 360;
702
-            break;
703
-        case '180':
704
-            constraints.video.mandatory.minWidth = 320;
705
-            constraints.video.mandatory.minHeight = 180;
706
-            break;
707
-        // 4:3
708
-        case '960':
709
-            constraints.video.mandatory.minWidth = 960;
710
-            constraints.video.mandatory.minHeight = 720;
711
-            break;
712
-        case '640':
713
-        case 'vga':
714
-            constraints.video.mandatory.minWidth = 640;
715
-            constraints.video.mandatory.minHeight = 480;
716
-            break;
717
-        case '320':
718
-            constraints.video.mandatory.minWidth = 320;
719
-            constraints.video.mandatory.minHeight = 240;
720
-            break;
721
-        default:
722
-            if (isAndroid) {
723
-                constraints.video.mandatory.minWidth = 320;
724
-                constraints.video.mandatory.minHeight = 240;
725
-                constraints.video.mandatory.maxFrameRate = 15;
726
-            }
727
-            break;
728
-    }
729
-    if (constraints.video.mandatory.minWidth)
730
-        constraints.video.mandatory.maxWidth = constraints.video.mandatory.minWidth;
731
-    if (constraints.video.mandatory.minHeight)
732
-        constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight;
733
-
734
-    if (bandwidth) { // doesn't work currently, see webrtc issue 1846
735
-        if (!constraints.video) constraints.video = {mandatory: {}, optional: []};//same behaviour as true
736
-        constraints.video.optional.push({bandwidth: bandwidth});
737
-    }
738
-    if (fps) { // for some cameras it might be necessary to request 30fps
739
-        // so they choose 30fps mjpg over 10fps yuy2
740
-        if (!constraints.video) constraints.video = {mandatory: {}, optional: []};// same behaviour as true;
741
-        constraints.video.mandatory.minFrameRate = fps;
742
-    }
743
-
744
-    var isFF = navigator.userAgent.toLowerCase().indexOf('firefox') > -1;
745
-
746
-    try {
747
-        if (config.enableSimulcast
748
-            && constraints.video
749
-            && constraints.video.chromeMediaSource !== 'screen'
750
-            && constraints.video.chromeMediaSource !== 'desktop'
751
-            && !isAndroid
752
-
753
-            // We currently do not support FF, as it doesn't have multistream support.
754
-            && !isFF) {
755
-            simulcast.getUserMedia(constraints, function (stream) {
756
-                    console.log('onUserMediaSuccess');
757
-                    success_callback(stream);
758
-                },
759
-                function (error) {
760
-                    console.warn('Failed to get access to local media. Error ', error);
761
-                    if (failure_callback) {
762
-                        failure_callback(error);
763
-                    }
764
-                });
765
-        } else {
766
-
767
-            RTC.getUserMedia(constraints,
768
-                function (stream) {
769
-                    console.log('onUserMediaSuccess');
770
-                    success_callback(stream);
771
-                },
772
-                function (error) {
773
-                    console.warn('Failed to get access to local media. Error ', error);
774
-                    if (failure_callback) {
775
-                        failure_callback(error);
776
-                    }
777
-                });
778
-
779
-        }
780
-    } catch (e) {
781
-        console.error('GUM failed: ', e);
782
-        if(failure_callback) {
783
-            failure_callback(e);
784
-        }
785
-    }
786
-}

+ 5
- 22
libs/strophe/strophe.jingle.session.js Dosyayı Görüntüle

@@ -262,19 +262,7 @@ JingleSession.prototype.sendIceCandidate = function (candidate) {
262 262
                     },
263 263
                     10000);
264 264
             }
265
-
266
-            RTC.getLocalSSRC(this, function (ssrcs) {
267
-                if(ssrcs)
268
-                {
269
-                    sendJingle(ssrcs);
270
-                    $(document).trigger("setLocalDescription.jingle", [self.sid]);
271
-                }
272
-                else
273
-                {
274
-                    sendJingle();
275
-                }
276
-            });
277
-
265
+            sendJingle();
278 266
         }
279 267
         this.lasticecandidate = true;
280 268
         console.log('Have we encountered any srflx candidates? ' + this.hadstuncandidate);
@@ -390,11 +378,8 @@ JingleSession.prototype.createdOffer = function (sdp) {
390 378
         function () {
391 379
             if(this.usetrickle)
392 380
             {
393
-                RTC.getLocalSSRC(function(ssrc)
394
-                {
395
-                    sendJingle(ssrc);
396
-                    $(document).trigger('setLocalDescription.jingle', [self.sid]);
397
-                });
381
+                sendJingle();
382
+                $(document).trigger('setLocalDescription.jingle', [self.sid]);
398 383
             }
399 384
             else
400 385
                 $(document).trigger('setLocalDescription.jingle', [self.sid]);
@@ -631,10 +616,8 @@ JingleSession.prototype.createdAnswer = function (sdp, provisional) {
631 616
 
632 617
             //console.log('setLocalDescription success');
633 618
             if (self.usetrickle && !self.usepranswer) {
634
-                RTC.getLocalSSRC(self, function (ssrc) {
635
-                    sendJingle(ssrc);
636
-                    $(document).trigger('setLocalDescription.jingle', [self.sid]);
637
-                });
619
+                sendJingle();
620
+                $(document).trigger('setLocalDescription.jingle', [self.sid]);
638 621
             }
639 622
             else
640 623
                 $(document).trigger('setLocalDescription.jingle', [self.sid]);

+ 235
- 0
modules/RTC/DataChannels.js Dosyayı Görüntüle

@@ -0,0 +1,235 @@
1
+/* global connection, Strophe, updateLargeVideo, focusedVideoSrc*/
2
+
3
+// cache datachannels to avoid garbage collection
4
+// https://code.google.com/p/chromium/issues/detail?id=405545
5
+var _dataChannels = [];
6
+
7
+
8
+
9
+var DataChannels =
10
+{
11
+
12
+    /**
13
+     * Callback triggered by PeerConnection when new data channel is opened
14
+     * on the bridge.
15
+     * @param event the event info object.
16
+     */
17
+
18
+    onDataChannel: function (event)
19
+    {
20
+        var dataChannel = event.channel;
21
+
22
+        dataChannel.onopen = function () {
23
+            console.info("Data channel opened by the Videobridge!", dataChannel);
24
+
25
+            // Code sample for sending string and/or binary data
26
+            // Sends String message to the bridge
27
+            //dataChannel.send("Hello bridge!");
28
+            // Sends 12 bytes binary message to the bridge
29
+            //dataChannel.send(new ArrayBuffer(12));
30
+
31
+            // when the data channel becomes available, tell the bridge about video
32
+            // selections so that it can do adaptive simulcast,
33
+            // we want the notification to trigger even if userJid is undefined,
34
+            // or null.
35
+            var userJid = VideoLayout.getLargeVideoState().userJid;
36
+            // we want the notification to trigger even if userJid is undefined,
37
+            // or null.
38
+            onSelectedEndpointChanged(userJid);
39
+        };
40
+
41
+        dataChannel.onerror = function (error) {
42
+            console.error("Data Channel Error:", error, dataChannel);
43
+        };
44
+
45
+        dataChannel.onmessage = function (event) {
46
+            var data = event.data;
47
+            // JSON
48
+            var obj;
49
+
50
+            try {
51
+                obj = JSON.parse(data);
52
+            }
53
+            catch (e) {
54
+                console.error(
55
+                    "Failed to parse data channel message as JSON: ",
56
+                    data,
57
+                    dataChannel);
58
+            }
59
+            if (('undefined' !== typeof(obj)) && (null !== obj)) {
60
+                var colibriClass = obj.colibriClass;
61
+
62
+                if ("DominantSpeakerEndpointChangeEvent" === colibriClass) {
63
+                    // Endpoint ID from the Videobridge.
64
+                    var dominantSpeakerEndpoint = obj.dominantSpeakerEndpoint;
65
+
66
+                    console.info(
67
+                        "Data channel new dominant speaker event: ",
68
+                        dominantSpeakerEndpoint);
69
+                    $(document).trigger(
70
+                        'dominantspeakerchanged',
71
+                        [dominantSpeakerEndpoint]);
72
+                }
73
+                else if ("InLastNChangeEvent" === colibriClass)
74
+                {
75
+                    var oldValue = obj.oldValue;
76
+                    var newValue = obj.newValue;
77
+                    // Make sure that oldValue and newValue are of type boolean.
78
+                    var type;
79
+
80
+                    if ((type = typeof oldValue) !== 'boolean') {
81
+                        if (type === 'string') {
82
+                            oldValue = (oldValue == "true");
83
+                        } else {
84
+                            oldValue = new Boolean(oldValue).valueOf();
85
+                        }
86
+                    }
87
+                    if ((type = typeof newValue) !== 'boolean') {
88
+                        if (type === 'string') {
89
+                            newValue = (newValue == "true");
90
+                        } else {
91
+                            newValue = new Boolean(newValue).valueOf();
92
+                        }
93
+                    }
94
+                    $(document).trigger('inlastnchanged', [oldValue, newValue]);
95
+                }
96
+                else if ("LastNEndpointsChangeEvent" === colibriClass)
97
+                {
98
+                    // The new/latest list of last-n endpoint IDs.
99
+                    var lastNEndpoints = obj.lastNEndpoints;
100
+                    // The list of endpoint IDs which are entering the list of
101
+                    // last-n at this time i.e. were not in the old list of last-n
102
+                    // endpoint IDs.
103
+                    var endpointsEnteringLastN = obj.endpointsEnteringLastN;
104
+                    var stream = obj.stream;
105
+
106
+                    console.log(
107
+                        "Data channel new last-n event: ",
108
+                        lastNEndpoints, endpointsEnteringLastN, obj);
109
+                    $(document).trigger(
110
+                        'lastnchanged',
111
+                        [lastNEndpoints, endpointsEnteringLastN, stream]);
112
+                }
113
+                else if ("SimulcastLayersChangedEvent" === colibriClass)
114
+                {
115
+                    $(document).trigger(
116
+                        'simulcastlayerschanged',
117
+                        [obj.endpointSimulcastLayers]);
118
+                }
119
+                else if ("SimulcastLayersChangingEvent" === colibriClass)
120
+                {
121
+                    $(document).trigger(
122
+                        'simulcastlayerschanging',
123
+                        [obj.endpointSimulcastLayers]);
124
+                }
125
+                else if ("StartSimulcastLayerEvent" === colibriClass)
126
+                {
127
+                    $(document).trigger('startsimulcastlayer', obj.simulcastLayer);
128
+                }
129
+                else if ("StopSimulcastLayerEvent" === colibriClass)
130
+                {
131
+                    $(document).trigger('stopsimulcastlayer', obj.simulcastLayer);
132
+                }
133
+                else
134
+                {
135
+                    console.debug("Data channel JSON-formatted message: ", obj);
136
+                }
137
+            }
138
+        };
139
+
140
+        dataChannel.onclose = function ()
141
+        {
142
+            console.info("The Data Channel closed", dataChannel);
143
+            var idx = _dataChannels.indexOf(dataChannel);
144
+            if (idx > -1)
145
+                _dataChannels = _dataChannels.splice(idx, 1);
146
+        };
147
+        _dataChannels.push(dataChannel);
148
+    },
149
+
150
+    /**
151
+     * Binds "ondatachannel" event listener to given PeerConnection instance.
152
+     * @param peerConnection WebRTC peer connection instance.
153
+     */
154
+    bindDataChannelListener: function (peerConnection) {
155
+        if(!config.openSctp)
156
+            retrun;
157
+
158
+        peerConnection.ondatachannel = this.onDataChannel;
159
+
160
+        // Sample code for opening new data channel from Jitsi Meet to the bridge.
161
+        // Although it's not a requirement to open separate channels from both bridge
162
+        // and peer as single channel can be used for sending and receiving data.
163
+        // So either channel opened by the bridge or the one opened here is enough
164
+        // for communication with the bridge.
165
+        /*var dataChannelOptions =
166
+         {
167
+         reliable: true
168
+         };
169
+         var dataChannel
170
+         = peerConnection.createDataChannel("myChannel", dataChannelOptions);
171
+
172
+         // Can be used only when is in open state
173
+         dataChannel.onopen = function ()
174
+         {
175
+         dataChannel.send("My channel !!!");
176
+         };
177
+         dataChannel.onmessage = function (event)
178
+         {
179
+         var msgData = event.data;
180
+         console.info("Got My Data Channel Message:", msgData, dataChannel);
181
+         };*/
182
+    }
183
+
184
+}
185
+
186
+function onSelectedEndpointChanged(userJid)
187
+{
188
+    console.log('selected endpoint changed: ', userJid);
189
+    if (_dataChannels && _dataChannels.length != 0)
190
+    {
191
+        _dataChannels.some(function (dataChannel) {
192
+            if (dataChannel.readyState == 'open')
193
+            {
194
+                dataChannel.send(JSON.stringify({
195
+                    'colibriClass': 'SelectedEndpointChangedEvent',
196
+                    'selectedEndpoint': (!userJid || userJid == null)
197
+                        ? null : userJid
198
+                }));
199
+
200
+                return true;
201
+            }
202
+        });
203
+    }
204
+}
205
+
206
+$(document).bind("selectedendpointchanged", function(event, userJid) {
207
+    onSelectedEndpointChanged(userJid);
208
+});
209
+
210
+function onPinnedEndpointChanged(userJid)
211
+{
212
+    console.log('pinned endpoint changed: ', userJid);
213
+    if (_dataChannels && _dataChannels.length != 0)
214
+    {
215
+        _dataChannels.some(function (dataChannel) {
216
+            if (dataChannel.readyState == 'open')
217
+            {
218
+                dataChannel.send(JSON.stringify({
219
+                    'colibriClass': 'PinnedEndpointChangedEvent',
220
+                    'pinnedEndpoint': (!userJid || userJid == null)
221
+                        ? null : Strophe.getResourceFromJid(userJid)
222
+                }));
223
+
224
+                return true;
225
+            }
226
+        });
227
+    }
228
+}
229
+
230
+$(document).bind("pinnedendpointchanged", function(event, userJid) {
231
+    onPinnedEndpointChanged(userJid);
232
+});
233
+
234
+module.exports = DataChannels;
235
+

+ 66
- 0
modules/RTC/LocalStream.js Dosyayı Görüntüle

@@ -0,0 +1,66 @@
1
+//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
2
+
3
+function LocalStream(stream, type, eventEmitter)
4
+{
5
+    this.stream = stream;
6
+    this.eventEmitter = eventEmitter;
7
+    this.type = type;
8
+
9
+    var self = this;
10
+    this.stream.onended = function()
11
+    {
12
+        self.streamEnded();
13
+    };
14
+}
15
+
16
+LocalStream.prototype.streamEnded = function () {
17
+    this.eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, this);
18
+}
19
+
20
+LocalStream.prototype.getOriginalStream = function()
21
+{
22
+    return this.stream;
23
+}
24
+
25
+LocalStream.prototype.isAudioStream = function () {
26
+    return (this.stream.getAudioTracks() && this.stream.getAudioTracks().length > 0);
27
+}
28
+
29
+LocalStream.prototype.mute = function()
30
+{
31
+    var ismuted = false;
32
+    var tracks = [];
33
+    if(this.type = "audio")
34
+    {
35
+        tracks = this.stream.getAudioTracks();
36
+    }
37
+    else
38
+    {
39
+        tracks = this.stream.getVideoTracks();
40
+    }
41
+
42
+    for (var idx = 0; idx < tracks.length; idx++) {
43
+        ismuted = !tracks[idx].enabled;
44
+        tracks[idx].enabled = !tracks[idx].enabled;
45
+    }
46
+    return ismuted;
47
+}
48
+
49
+LocalStream.prototype.isMuted = function () {
50
+    var tracks = [];
51
+    if(this.type = "audio")
52
+    {
53
+        tracks = this.stream.getAudioTracks();
54
+    }
55
+    else
56
+    {
57
+        tracks = this.stream.getVideoTracks();
58
+    }
59
+    for (var idx = 0; idx < tracks.length; idx++) {
60
+        if(tracks[idx].enabled)
61
+            return false;
62
+    }
63
+    return true;
64
+}
65
+
66
+module.exports = LocalStream;

+ 49
- 0
modules/RTC/MediaStream.js Dosyayı Görüntüle

@@ -0,0 +1,49 @@
1
+var RTC = require("./RTC.js");
2
+////These lines should be uncommented when require works in app.js
3
+//var RTCBrowserType = require("../../service/RTC/RTCBrowserType.js");
4
+//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
5
+//var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
6
+
7
+/**
8
+ * Creates a MediaStream object for the given data, session id and ssrc.
9
+ * It is a wrapper class for the MediaStream.
10
+ *
11
+ * @param data the data object from which we obtain the stream,
12
+ * the peerjid, etc.
13
+ * @param sid the session id
14
+ * @param ssrc the ssrc corresponding to this MediaStream
15
+ *
16
+ * @constructor
17
+ */
18
+function MediaStream(data, sid, ssrc, eventEmmiter) {
19
+    this.sid = sid;
20
+    this.stream = data.stream;
21
+    this.peerjid = data.peerjid;
22
+    this.ssrc = ssrc;
23
+    this.type = (this.stream.getVideoTracks().length > 0)?
24
+        MediaStreamType.VIDEO_TYPE : MediaStreamType.AUDIO_TYPE;
25
+    this.muted = false;
26
+    eventEmmiter.emit(StreamEventTypes.EVENT_TYPE_REMOTE_CREATED, this);
27
+}
28
+
29
+if(RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_FIREFOX)
30
+{
31
+    if (!MediaStream.prototype.getVideoTracks)
32
+        MediaStream.prototype.getVideoTracks = function () { return []; };
33
+    if (!MediaStream.prototype.getAudioTracks)
34
+        MediaStream.prototype.getAudioTracks = function () { return []; };
35
+}
36
+
37
+MediaStream.prototype.getOriginalStream = function()
38
+{
39
+    return this.stream;
40
+}
41
+
42
+MediaStream.prototype.setMute = function (value)
43
+{
44
+    this.stream.muted = value;
45
+    this.muted = value;
46
+}
47
+
48
+
49
+module.exports = MediaStream;

+ 122
- 0
modules/RTC/RTC.js Dosyayı Görüntüle

@@ -0,0 +1,122 @@
1
+var EventEmitter = require("events");
2
+var RTCUtils = require("./RTCUtils.js");
3
+//These lines should be uncommented when require works in app.js
4
+//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
5
+//var XMPPEvents = require("../service/xmpp/XMPPEvents");
6
+
7
+var eventEmitter = new EventEmitter();
8
+
9
+var RTC = {
10
+    rtcUtils: null,
11
+    localStreams: [],
12
+    remoteStreams: {},
13
+    localAudio: null,
14
+    localVideo: null,
15
+    addStreamListener: function (listener, eventType) {
16
+        eventEmitter.on(eventType, listener);
17
+    },
18
+    removeStreamListener: function (listener, eventType) {
19
+        if(!(eventType instanceof StreamEventTypes))
20
+            throw "Illegal argument";
21
+
22
+        eventEmitter.removeListener(eventType, listener);
23
+    },
24
+    createLocalStream: function (stream, type) {
25
+        var LocalStream = require("./LocalStream.js");
26
+        var localStream =  new LocalStream(stream, type, eventEmitter);
27
+        this.localStreams.push(localStream);
28
+        if(type == "audio")
29
+        {
30
+            this.localAudio = localStream;
31
+        }
32
+        else
33
+        {
34
+            this.localVideo = localStream;
35
+        }
36
+        eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_CREATED,
37
+            localStream);
38
+        return localStream;
39
+    },
40
+    removeLocalStream: function (stream) {
41
+        for(var i = 0; i < this.localStreams.length; i++)
42
+        {
43
+            if(this.localStreams[i].getOriginalStream() === stream) {
44
+                delete this.localStreams[i];
45
+                return;
46
+            }
47
+        }
48
+    },
49
+    createRemoteStream: function (data, sid, thessrc) {
50
+        var MediaStream = require("./MediaStream.js")
51
+        var remoteStream = new MediaStream(data, sid, thessrc, eventEmitter);
52
+        var jid = data.peerjid || connection.emuc.myroomjid;
53
+        if(!this.remoteStreams[jid]) {
54
+            this.remoteStreams[jid] = {};
55
+        }
56
+        this.remoteStreams[jid][remoteStream.type]= remoteStream;
57
+        return remoteStream;
58
+    },
59
+    getBrowserType: function () {
60
+        return this.rtcUtils.browser;
61
+    },
62
+    getPCConstraints: function () {
63
+        return this.rtcUtils.pc_constraints;
64
+    },
65
+    getUserMediaWithConstraints:function(um, success_callback,
66
+                                         failure_callback, resolution,
67
+                                         bandwidth, fps, desktopStream)
68
+    {
69
+        return this.rtcUtils.getUserMediaWithConstraints(um, success_callback,
70
+            failure_callback, resolution, bandwidth, fps, desktopStream);
71
+    },
72
+    attachMediaStream:  function (element, stream) {
73
+        this.rtcUtils.attachMediaStream(element, stream);
74
+    },
75
+    getStreamID:  function (stream) {
76
+        return this.rtcUtils.getStreamID(stream);
77
+    },
78
+    getVideoSrc: function (element) {
79
+        return this.rtcUtils.getVideoSrc(element);
80
+    },
81
+    setVideoSrc: function (element, src) {
82
+        this.rtcUtils.setVideoSrc(element, src);
83
+    },
84
+    dispose: function() {
85
+        if (this.rtcUtils) {
86
+            this.rtcUtils = null;
87
+        }
88
+    },
89
+    stop:  function () {
90
+        this.dispose();
91
+    },
92
+    start: function () {
93
+        this.rtcUtils = new RTCUtils(this);
94
+        this.rtcUtils.obtainAudioAndVideoPermissions();
95
+    },
96
+    onConferenceCreated: function(event) {
97
+        var DataChannels = require("./datachannels");
98
+        DataChannels.bindDataChannelListener(event.peerconnection);
99
+    },
100
+    muteRemoteVideoStream: function (jid, value) {
101
+        var stream;
102
+
103
+        if(this.remoteStreams[jid] &&
104
+            this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE])
105
+        {
106
+            stream = this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE];
107
+        }
108
+
109
+        if(!stream)
110
+            return false;
111
+
112
+        var isMuted = (value === "true");
113
+        if (isMuted != stream.muted) {
114
+            stream.setMute(isMuted);
115
+            return true;
116
+        }
117
+        return false;
118
+    }
119
+
120
+};
121
+
122
+module.exports = RTC;

+ 338
- 0
modules/RTC/RTCUtils.js Dosyayı Görüntüle

@@ -0,0 +1,338 @@
1
+//This should be uncommented when app.js supports require
2
+//var RTCBrowserType = require("../../service/RTC/RTCBrowserType.js");
3
+
4
+var constraints = {audio: false, video: false};
5
+
6
+function setResolutionConstraints(resolution, isAndroid)
7
+{
8
+    if (resolution && !constraints.video || isAndroid) {
9
+        constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
10
+    }
11
+    // see https://code.google.com/p/chromium/issues/detail?id=143631#c9 for list of supported resolutions
12
+    switch (resolution) {
13
+        // 16:9 first
14
+        case '1080':
15
+        case 'fullhd':
16
+            constraints.video.mandatory.minWidth = 1920;
17
+            constraints.video.mandatory.minHeight = 1080;
18
+            break;
19
+        case '720':
20
+        case 'hd':
21
+            constraints.video.mandatory.minWidth = 1280;
22
+            constraints.video.mandatory.minHeight = 720;
23
+            break;
24
+        case '360':
25
+            constraints.video.mandatory.minWidth = 640;
26
+            constraints.video.mandatory.minHeight = 360;
27
+            break;
28
+        case '180':
29
+            constraints.video.mandatory.minWidth = 320;
30
+            constraints.video.mandatory.minHeight = 180;
31
+            break;
32
+        // 4:3
33
+        case '960':
34
+            constraints.video.mandatory.minWidth = 960;
35
+            constraints.video.mandatory.minHeight = 720;
36
+            break;
37
+        case '640':
38
+        case 'vga':
39
+            constraints.video.mandatory.minWidth = 640;
40
+            constraints.video.mandatory.minHeight = 480;
41
+            break;
42
+        case '320':
43
+            constraints.video.mandatory.minWidth = 320;
44
+            constraints.video.mandatory.minHeight = 240;
45
+            break;
46
+        default:
47
+            if (isAndroid) {
48
+                constraints.video.mandatory.minWidth = 320;
49
+                constraints.video.mandatory.minHeight = 240;
50
+                constraints.video.mandatory.maxFrameRate = 15;
51
+            }
52
+            break;
53
+    }
54
+    if (constraints.video.mandatory.minWidth)
55
+        constraints.video.mandatory.maxWidth = constraints.video.mandatory.minWidth;
56
+    if (constraints.video.mandatory.minHeight)
57
+        constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight;
58
+}
59
+
60
+
61
+function setConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid)
62
+{
63
+    if (um.indexOf('video') >= 0) {
64
+        constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
65
+    }
66
+    if (um.indexOf('audio') >= 0) {
67
+        constraints.audio = { mandatory: {}, optional: []};// same behaviour as true
68
+    }
69
+    if (um.indexOf('screen') >= 0) {
70
+        constraints.video = {
71
+            mandatory: {
72
+                chromeMediaSource: "screen",
73
+                googLeakyBucket: true,
74
+                maxWidth: window.screen.width,
75
+                maxHeight: window.screen.height,
76
+                maxFrameRate: 3
77
+            },
78
+            optional: []
79
+        };
80
+    }
81
+    if (um.indexOf('desktop') >= 0) {
82
+        constraints.video = {
83
+            mandatory: {
84
+                chromeMediaSource: "desktop",
85
+                chromeMediaSourceId: desktopStream,
86
+                googLeakyBucket: true,
87
+                maxWidth: window.screen.width,
88
+                maxHeight: window.screen.height,
89
+                maxFrameRate: 3
90
+            },
91
+            optional: []
92
+        };
93
+    }
94
+
95
+    if (constraints.audio) {
96
+        // if it is good enough for hangouts...
97
+        constraints.audio.optional.push(
98
+            {googEchoCancellation: true},
99
+            {googAutoGainControl: true},
100
+            {googNoiseSupression: true},
101
+            {googHighpassFilter: true},
102
+            {googNoisesuppression2: true},
103
+            {googEchoCancellation2: true},
104
+            {googAutoGainControl2: true}
105
+        );
106
+    }
107
+    if (constraints.video) {
108
+        constraints.video.optional.push(
109
+            {googNoiseReduction: false} // chrome 37 workaround for issue 3807, reenable in M38
110
+        );
111
+        if (um.indexOf('video') >= 0) {
112
+            constraints.video.optional.push(
113
+                {googLeakyBucket: true}
114
+            );
115
+        }
116
+    }
117
+
118
+    setResolutionConstraints(resolution, isAndroid);
119
+
120
+    if (bandwidth) { // doesn't work currently, see webrtc issue 1846
121
+        if (!constraints.video) constraints.video = {mandatory: {}, optional: []};//same behaviour as true
122
+        constraints.video.optional.push({bandwidth: bandwidth});
123
+    }
124
+    if (fps) { // for some cameras it might be necessary to request 30fps
125
+        // so they choose 30fps mjpg over 10fps yuy2
126
+        if (!constraints.video) constraints.video = {mandatory: {}, optional: []};// same behaviour as true;
127
+        constraints.video.mandatory.minFrameRate = fps;
128
+    }
129
+}
130
+
131
+
132
+function RTCUtils(RTCService)
133
+{
134
+    this.service = RTCService;
135
+    if (navigator.mozGetUserMedia) {
136
+        console.log('This appears to be Firefox');
137
+        var version = parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
138
+        if (version >= 22) {
139
+            this.peerconnection = mozRTCPeerConnection;
140
+            this.browser = RTCBrowserType.RTC_BROWSER_FIREFOX;
141
+            this.getUserMedia = navigator.mozGetUserMedia.bind(navigator);
142
+            this.pc_constraints = {};
143
+            this.attachMediaStream =  function (element, stream) {
144
+                element[0].mozSrcObject = stream;
145
+                element[0].play();
146
+            };
147
+            this.getStreamID =  function (stream) {
148
+                var tracks = stream.getVideoTracks();
149
+                if(!tracks || tracks.length == 0)
150
+                {
151
+                    tracks = stream.getAudioTracks();
152
+                }
153
+                return tracks[0].id.replace(/[\{,\}]/g,"");
154
+            };
155
+            this.getVideoSrc = function (element) {
156
+                return element.mozSrcObject;
157
+            };
158
+            this.setVideoSrc = function (element, src) {
159
+                element.mozSrcObject = src;
160
+            };
161
+            RTCSessionDescription = mozRTCSessionDescription;
162
+            RTCIceCandidate = mozRTCIceCandidate;
163
+        }
164
+    } else if (navigator.webkitGetUserMedia) {
165
+        console.log('This appears to be Chrome');
166
+        this.peerconnection = webkitRTCPeerConnection;
167
+        this.browser = RTCBrowserType.RTC_BROWSER_CHROME;
168
+        this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
169
+        this.attachMediaStream = function (element, stream) {
170
+            element.attr('src', webkitURL.createObjectURL(stream));
171
+        };
172
+        this.getStreamID = function (stream) {
173
+            // streams from FF endpoints have the characters '{' and '}'
174
+            // that make jQuery choke.
175
+            return stream.id.replace(/[\{,\}]/g,"");
176
+        };
177
+        this.getVideoSrc = function (element) {
178
+            return element.getAttribute("src");
179
+        };
180
+        this.setVideoSrc = function (element, src) {
181
+            element.setAttribute("src", src);
182
+        };
183
+        // DTLS should now be enabled by default but..
184
+        this.pc_constraints = {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]};
185
+        if (navigator.userAgent.indexOf('Android') != -1) {
186
+            this.pc_constraints = {}; // disable DTLS on Android
187
+        }
188
+        if (!webkitMediaStream.prototype.getVideoTracks) {
189
+            webkitMediaStream.prototype.getVideoTracks = function () {
190
+                return this.videoTracks;
191
+            };
192
+        }
193
+        if (!webkitMediaStream.prototype.getAudioTracks) {
194
+            webkitMediaStream.prototype.getAudioTracks = function () {
195
+                return this.audioTracks;
196
+            };
197
+        }
198
+    }
199
+    else
200
+    {
201
+        try { console.log('Browser does not appear to be WebRTC-capable'); } catch (e) { }
202
+
203
+        window.location.href = 'webrtcrequired.html';
204
+        return;
205
+    }
206
+
207
+    if (this.browser !== RTCBrowserType.RTC_BROWSER_CHROME &&
208
+        config.enableFirefoxSupport !== true) {
209
+        window.location.href = 'chromeonly.html';
210
+        return;
211
+    }
212
+
213
+}
214
+
215
+
216
+RTCUtils.prototype.getUserMediaWithConstraints = function(
217
+    um, success_callback, failure_callback, resolution,bandwidth, fps,
218
+    desktopStream)
219
+{
220
+    // Check if we are running on Android device
221
+    var isAndroid = navigator.userAgent.indexOf('Android') != -1;
222
+
223
+    setConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid);
224
+
225
+    var isFF = navigator.userAgent.toLowerCase().indexOf('firefox') > -1;
226
+
227
+    try {
228
+        if (config.enableSimulcast
229
+            && constraints.video
230
+            && constraints.video.chromeMediaSource !== 'screen'
231
+            && constraints.video.chromeMediaSource !== 'desktop'
232
+            && !isAndroid
233
+
234
+            // We currently do not support FF, as it doesn't have multistream support.
235
+            && !isFF) {
236
+            simulcast.getUserMedia(constraints, function (stream) {
237
+                    console.log('onUserMediaSuccess');
238
+                    success_callback(stream);
239
+                },
240
+                function (error) {
241
+                    console.warn('Failed to get access to local media. Error ', error);
242
+                    if (failure_callback) {
243
+                        failure_callback(error);
244
+                    }
245
+                });
246
+        } else {
247
+
248
+            RTCUtils.getUserMedia(constraints,
249
+                function (stream) {
250
+                    console.log('onUserMediaSuccess');
251
+                    success_callback(stream);
252
+                },
253
+                function (error) {
254
+                    console.warn('Failed to get access to local media. Error ', error);
255
+                    if (failure_callback) {
256
+                        failure_callback(error);
257
+                    }
258
+                });
259
+
260
+        }
261
+    } catch (e) {
262
+        console.error('GUM failed: ', e);
263
+        if(failure_callback) {
264
+            failure_callback(e);
265
+        }
266
+    }
267
+};
268
+
269
+/**
270
+ * We ask for audio and video combined stream in order to get permissions and
271
+ * not to ask twice.
272
+ */
273
+RTCUtils.prototype.obtainAudioAndVideoPermissions = function() {
274
+    var self = this;
275
+    // Get AV
276
+    var cb = function (stream) {
277
+        console.log('got', stream, stream.getAudioTracks().length, stream.getVideoTracks().length);
278
+        self.handleLocalStream(stream);
279
+        trackUsage('localMedia', {
280
+            audio: stream.getAudioTracks().length,
281
+            video: stream.getVideoTracks().length
282
+        });
283
+    };
284
+    var self = this;
285
+    this.getUserMediaWithConstraints(
286
+        ['audio', 'video'],
287
+        cb,
288
+        function (error) {
289
+            console.error('failed to obtain audio/video stream - trying audio only', error);
290
+            self.getUserMediaWithConstraints(
291
+                ['audio'],
292
+                cb,
293
+                function (error) {
294
+                    console.error('failed to obtain audio/video stream - stop', error);
295
+                    trackUsage('localMediaError', {
296
+                        media: error.media || 'video',
297
+                        name : error.name
298
+                    });
299
+                    messageHandler.showError("Error",
300
+                            "Failed to obtain permissions to use the local microphone" +
301
+                            "and/or camera.");
302
+                }
303
+            );
304
+        },
305
+            config.resolution || '360');
306
+}
307
+
308
+RTCUtils.prototype.handleLocalStream = function(stream)
309
+{
310
+    if(window.webkitMediaStream)
311
+    {
312
+        var audioStream = new webkitMediaStream();
313
+        var videoStream = new webkitMediaStream();
314
+        var audioTracks = stream.getAudioTracks();
315
+        var videoTracks = stream.getVideoTracks();
316
+        for (var i = 0; i < audioTracks.length; i++) {
317
+            audioStream.addTrack(audioTracks[i]);
318
+        }
319
+
320
+        this.service.createLocalStream(audioStream, "audio");
321
+
322
+        for (i = 0; i < videoTracks.length; i++) {
323
+            videoStream.addTrack(videoTracks[i]);
324
+        }
325
+
326
+
327
+        this.service.createLocalStream(videoStream, "video");
328
+    }
329
+    else
330
+    {//firefox
331
+        this.service.createLocalStream(stream, "stream");
332
+    }
333
+
334
+};
335
+
336
+
337
+
338
+module.exports = RTCUtils;

+ 32
- 32
modules/statistics/RTPStatsCollector.js Dosyayı Görüntüle

@@ -14,10 +14,10 @@ function calculatePacketLoss(lostPackets, totalPackets) {
14 14
 }
15 15
 
16 16
 function getStatValue(item, name) {
17
-    if(!keyMap[RTC.browser][name])
17
+    if(!keyMap[RTC.getBrowserType()][name])
18 18
         throw "The property isn't supported!";
19
-    var key = keyMap[RTC.browser][name];
20
-    return RTC.browser == "chrome"? item.stat(key) : item[key];
19
+    var key = keyMap[RTC.getBrowserType()][name];
20
+    return RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_CHROME? item.stat(key) : item[key];
21 21
 }
22 22
 
23 23
 /**
@@ -357,37 +357,37 @@ StatsCollector.prototype.logStats = function () {
357 357
     this.statsToBeLogged.stats = {};
358 358
     this.statsToBeLogged.timestamps = [];
359 359
 };
360
-var keyMap = {
361
-    "firefox": {
362
-        "ssrc": "ssrc",
363
-        "packetsReceived": "packetsReceived",
364
-        "packetsLost": "packetsLost",
365
-        "packetsSent": "packetsSent",
366
-        "bytesReceived": "bytesReceived",
367
-        "bytesSent": "bytesSent"
368
-    },
369
-    "chrome": {
370
-        "receiveBandwidth": "googAvailableReceiveBandwidth",
371
-        "sendBandwidth": "googAvailableSendBandwidth",
372
-        "remoteAddress": "googRemoteAddress",
373
-        "transportType": "googTransportType",
374
-        "localAddress": "googLocalAddress",
375
-        "activeConnection": "googActiveConnection",
376
-        "ssrc": "ssrc",
377
-        "packetsReceived": "packetsReceived",
378
-        "packetsSent": "packetsSent",
379
-        "packetsLost": "packetsLost",
380
-        "bytesReceived": "bytesReceived",
381
-        "bytesSent": "bytesSent",
382
-        "googFrameHeightReceived": "googFrameHeightReceived",
383
-        "googFrameWidthReceived": "googFrameWidthReceived",
384
-        "googFrameHeightSent": "googFrameHeightSent",
385
-        "googFrameWidthSent": "googFrameWidthSent",
386
-        "audioInputLevel": "audioInputLevel",
387
-        "audioOutputLevel": "audioOutputLevel"
388
-    }
360
+var keyMap = {};
361
+keyMap[RTCBrowserType.RTC_BROWSER_FIREFOX] = {
362
+    "ssrc": "ssrc",
363
+    "packetsReceived": "packetsReceived",
364
+    "packetsLost": "packetsLost",
365
+    "packetsSent": "packetsSent",
366
+    "bytesReceived": "bytesReceived",
367
+    "bytesSent": "bytesSent"
368
+};
369
+keyMap[RTCBrowserType.RTC_BROWSER_CHROME] = {
370
+    "receiveBandwidth": "googAvailableReceiveBandwidth",
371
+    "sendBandwidth": "googAvailableSendBandwidth",
372
+    "remoteAddress": "googRemoteAddress",
373
+    "transportType": "googTransportType",
374
+    "localAddress": "googLocalAddress",
375
+    "activeConnection": "googActiveConnection",
376
+    "ssrc": "ssrc",
377
+    "packetsReceived": "packetsReceived",
378
+    "packetsSent": "packetsSent",
379
+    "packetsLost": "packetsLost",
380
+    "bytesReceived": "bytesReceived",
381
+    "bytesSent": "bytesSent",
382
+    "googFrameHeightReceived": "googFrameHeightReceived",
383
+    "googFrameWidthReceived": "googFrameWidthReceived",
384
+    "googFrameHeightSent": "googFrameHeightSent",
385
+    "googFrameWidthSent": "googFrameWidthSent",
386
+    "audioInputLevel": "audioInputLevel",
387
+    "audioOutputLevel": "audioOutputLevel"
389 388
 };
390 389
 
390
+
391 391
 /**
392 392
  * Stats processing logic.
393 393
  */

+ 18
- 12
modules/statistics/statistics.js Dosyayı Görüntüle

@@ -4,7 +4,9 @@
4 4
 var LocalStats = require("./LocalStatsCollector.js");
5 5
 var RTPStats = require("./RTPStatsCollector.js");
6 6
 var EventEmitter = require("events");
7
-//var StreamEventTypes = require("../service/RTC/StreamEventTypes.js");
7
+//These lines should be uncommented when require works in app.js
8
+//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
9
+//var RTCBrowserType = require("../../service/RTC/RTCBrowserType");
8 10
 //var XMPPEvents = require("../service/xmpp/XMPPEvents");
9 11
 
10 12
 var eventEmitter = new EventEmitter();
@@ -13,8 +15,6 @@ var localStats = null;
13 15
 
14 16
 var rtpStats = null;
15 17
 
16
-var RTCService = null;
17
-
18 18
 function stopLocal()
19 19
 {
20 20
     if(localStats)
@@ -49,6 +49,16 @@ function startRemoteStats (peerconnection) {
49 49
 
50 50
 }
51 51
 
52
+function onStreamCreated(stream)
53
+{
54
+    if(stream.getAudioTracks().length === 0)
55
+        return;
56
+
57
+    localStats = new LocalStats(stream, 100, this,
58
+        eventEmitter);
59
+    localStats.start();
60
+}
61
+
52 62
 
53 63
 var statistics =
54 64
 {
@@ -103,7 +113,7 @@ var statistics =
103 113
         stopRemote();
104 114
     },
105 115
 
106
-    onConfereceCreated: function (event) {
116
+    onConferenceCreated: function (event) {
107 117
         startRemoteStats(event.peerconnection);
108 118
     },
109 119
 
@@ -115,15 +125,11 @@ var statistics =
115 125
         }
116 126
     },
117 127
 
118
-    onStreamCreated: function(stream)
119
-    {
120
-        if(stream.getAudioTracks().length === 0)
121
-            return;
122
-
123
-        localStats = new LocalStats(stream, 100, this,
124
-            eventEmitter);
125
-        localStats.start();
128
+    start: function () {
129
+        RTC.addStreamListener(onStreamCreated,
130
+            StreamEventTypes.EVENT_TYPE_LOCAL_CREATED);
126 131
     }
132
+
127 133
 };
128 134
 
129 135
 

+ 7
- 0
service/RTC/MediaStreamTypes.js Dosyayı Görüntüle

@@ -0,0 +1,7 @@
1
+var MediaStreamType = {
2
+    VIDEO_TYPE: "Video",
3
+
4
+    AUDIO_TYPE: "Audio"
5
+};
6
+////These lines should be uncommented when require works in app.js
7
+//module.exports = MediaStreamType;

+ 7
- 0
service/RTC/RTCBrowserType.js Dosyayı Görüntüle

@@ -0,0 +1,7 @@
1
+var RTCBrowserType = {
2
+    RTC_BROWSER_CHROME: "rtc_browser.chrome",
3
+
4
+    RTC_BROWSER_FIREFOX: "rtc_browser.firefox"
5
+};
6
+
7
+//module.exports = RTCBrowserType;

+ 12
- 0
service/RTC/StreamEventTypes.js Dosyayı Görüntüle

@@ -0,0 +1,12 @@
1
+var StreamEventTypes = {
2
+    EVENT_TYPE_LOCAL_CREATED: "stream.local_created",
3
+
4
+    EVENT_TYPE_LOCAL_ENDED: "stream.local_ended",
5
+
6
+    EVENT_TYPE_REMOTE_CREATED: "stream.remote_created",
7
+
8
+    EVENT_TYPE_REMOTE_ENDED: "stream.remote_ended"
9
+};
10
+
11
+//These lines should be uncommented when require works in app.js
12
+//module.exports = StreamEventTypes;

+ 20
- 1
videolayout.js Dosyayı Görüntüle

@@ -20,6 +20,21 @@ var VideoLayout = (function (my) {
20 20
             || (lastNEndpointsCache && lastNEndpointsCache.indexOf(resource) !== -1);
21 21
     };
22 22
 
23
+    my.onLocalStreamCreated = function (stream) {
24
+        switch(stream.type)
25
+        {
26
+            case "audio":
27
+                VideoLayout.changeLocalAudio(stream.getOriginalStream());
28
+                break;
29
+            case "video":
30
+                VideoLayout.changeLocalVideo(stream.getOriginalStream(), true);
31
+                break;
32
+            case "stream":
33
+                VideoLayout.changeLocalStream(stream.getOriginalStream());
34
+                break;
35
+        }
36
+    };
37
+
23 38
     my.changeLocalStream = function (stream) {
24 39
         connection.jingle.localAudio = stream;
25 40
         VideoLayout.changeLocalVideo(stream, true);
@@ -1495,6 +1510,10 @@ var VideoLayout = (function (my) {
1495 1510
      * On video muted event.
1496 1511
      */
1497 1512
     $(document).bind('videomuted.muc', function (event, jid, isMuted) {
1513
+        if(!RTC.muteRemoteVideoStream(jid, isMuted))
1514
+            return;
1515
+
1516
+        Avatar.showUserAvatar(jid, isMuted);
1498 1517
         var videoSpanId = null;
1499 1518
         if (jid === connection.emuc.myroomjid) {
1500 1519
             videoSpanId = 'localVideoContainer';
@@ -1674,7 +1693,7 @@ var VideoLayout = (function (my) {
1674 1693
                     console.log("Add to last N", resourceJid);
1675 1694
 
1676 1695
                     var jid = connection.emuc.findJidFromResource(resourceJid);
1677
-                    var mediaStream = mediaStreams[jid][MediaStream.VIDEO_TYPE];
1696
+                    var mediaStream = RTC.remoteStreams[jid][MediaStreamType.VIDEO_TYPE];
1678 1697
                     var sel = $('#participant_' + resourceJid + '>video');
1679 1698
 
1680 1699
                     var videoStream = simulcast.getReceivingVideoStream(

Loading…
İptal
Kaydet