|
@@ -5,6 +5,13 @@ var AdapterJS = require("./adapter.screenshare");
|
5
|
5
|
var SDPUtil = require("../xmpp/SDPUtil");
|
6
|
6
|
|
7
|
7
|
var currentResolution = null;
|
|
8
|
+function DummyMediaStream(id) {
|
|
9
|
+ this.id = id;
|
|
10
|
+ this.label = id;
|
|
11
|
+ this.stop = function() { };
|
|
12
|
+ this.getAudioTracks = function() { return []; };
|
|
13
|
+ this.getVideoTracks = function() { return []; };
|
|
14
|
+}
|
8
|
15
|
|
9
|
16
|
function getPreviousResolution(resolution) {
|
10
|
17
|
if(!Resolutions[resolution])
|
|
@@ -135,420 +142,413 @@ function getConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid
|
135
|
142
|
}
|
136
|
143
|
|
137
|
144
|
//Options parameter is to pass config options. Currently uses only "useIPv6".
|
138
|
|
-function RTCUtils(RTCService, onTemasysPluginReady)
|
139
|
|
-{
|
140
|
|
- var self = this;
|
141
|
|
- this.service = RTCService;
|
142
|
|
- if (RTCBrowserType.isFirefox()) {
|
143
|
|
- var FFversion = RTCBrowserType.getFirefoxVersion();
|
144
|
|
- if (FFversion >= 40) {
|
145
|
|
- this.peerconnection = mozRTCPeerConnection;
|
146
|
|
- this.getUserMedia = navigator.mozGetUserMedia.bind(navigator);
|
147
|
|
- this.pc_constraints = {};
|
148
|
|
- this.attachMediaStream = function (element, stream) {
|
149
|
|
- // srcObject is being standardized and FF will eventually
|
150
|
|
- // support that unprefixed. FF also supports the
|
151
|
|
- // "element.src = URL.createObjectURL(...)" combo, but that
|
152
|
|
- // will be deprecated in favour of srcObject.
|
153
|
|
- //
|
154
|
|
- // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
|
155
|
|
- // https://github.com/webrtc/samples/issues/302
|
156
|
|
- if(!element[0])
|
157
|
|
- return;
|
158
|
|
- element[0].mozSrcObject = stream;
|
159
|
|
- element[0].play();
|
160
|
|
- };
|
161
|
|
- this.getStreamID = function (stream) {
|
162
|
|
- var id = stream.id;
|
163
|
|
- if (!id) {
|
164
|
|
- var tracks = stream.getVideoTracks();
|
165
|
|
- if (!tracks || tracks.length === 0) {
|
166
|
|
- tracks = stream.getAudioTracks();
|
|
145
|
+var RTCUtils = {
|
|
146
|
+ init: function (onTemasysPluginReady) {
|
|
147
|
+ var self = this;
|
|
148
|
+ if (RTCBrowserType.isFirefox()) {
|
|
149
|
+ var FFversion = RTCBrowserType.getFirefoxVersion();
|
|
150
|
+ if (FFversion >= 40) {
|
|
151
|
+ this.peerconnection = mozRTCPeerConnection;
|
|
152
|
+ this.getUserMedia = navigator.mozGetUserMedia.bind(navigator);
|
|
153
|
+ this.pc_constraints = {};
|
|
154
|
+ this.attachMediaStream = function (element, stream) {
|
|
155
|
+ // srcObject is being standardized and FF will eventually
|
|
156
|
+ // support that unprefixed. FF also supports the
|
|
157
|
+ // "element.src = URL.createObjectURL(...)" combo, but that
|
|
158
|
+ // will be deprecated in favour of srcObject.
|
|
159
|
+ //
|
|
160
|
+ // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
|
|
161
|
+ // https://github.com/webrtc/samples/issues/302
|
|
162
|
+ if (!element[0])
|
|
163
|
+ return;
|
|
164
|
+ element[0].mozSrcObject = stream;
|
|
165
|
+ element[0].play();
|
|
166
|
+ };
|
|
167
|
+ this.getStreamID = function (stream) {
|
|
168
|
+ var id = stream.id;
|
|
169
|
+ if (!id) {
|
|
170
|
+ var tracks = stream.getVideoTracks();
|
|
171
|
+ if (!tracks || tracks.length === 0) {
|
|
172
|
+ tracks = stream.getAudioTracks();
|
|
173
|
+ }
|
|
174
|
+ id = tracks[0].id;
|
167
|
175
|
}
|
168
|
|
- id = tracks[0].id;
|
169
|
|
- }
|
170
|
|
- return SDPUtil.filter_special_chars(id);
|
|
176
|
+ return SDPUtil.filter_special_chars(id);
|
|
177
|
+ };
|
|
178
|
+ this.getVideoSrc = function (element) {
|
|
179
|
+ if (!element)
|
|
180
|
+ return null;
|
|
181
|
+ return element.mozSrcObject;
|
|
182
|
+ };
|
|
183
|
+ this.setVideoSrc = function (element, src) {
|
|
184
|
+ if (element)
|
|
185
|
+ element.mozSrcObject = src;
|
|
186
|
+ };
|
|
187
|
+ RTCSessionDescription = mozRTCSessionDescription;
|
|
188
|
+ RTCIceCandidate = mozRTCIceCandidate;
|
|
189
|
+ } else {
|
|
190
|
+ console.error(
|
|
191
|
+ "Firefox version too old: " + FFversion + ". Required >= 40.");
|
|
192
|
+ window.location.href = 'unsupported_browser.html';
|
|
193
|
+ return;
|
|
194
|
+ }
|
|
195
|
+
|
|
196
|
+ } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) {
|
|
197
|
+ this.peerconnection = webkitRTCPeerConnection;
|
|
198
|
+ this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
|
|
199
|
+ this.attachMediaStream = function (element, stream) {
|
|
200
|
+ element.attr('src', webkitURL.createObjectURL(stream));
|
|
201
|
+ };
|
|
202
|
+ this.getStreamID = function (stream) {
|
|
203
|
+ // streams from FF endpoints have the characters '{' and '}'
|
|
204
|
+ // that make jQuery choke.
|
|
205
|
+ return SDPUtil.filter_special_chars(stream.id);
|
171
|
206
|
};
|
172
|
207
|
this.getVideoSrc = function (element) {
|
173
|
|
- if(!element)
|
|
208
|
+ if (!element)
|
174
|
209
|
return null;
|
175
|
|
- return element.mozSrcObject;
|
|
210
|
+ return element.getAttribute("src");
|
176
|
211
|
};
|
177
|
212
|
this.setVideoSrc = function (element, src) {
|
178
|
|
- if(element)
|
179
|
|
- element.mozSrcObject = src;
|
180
|
|
- };
|
181
|
|
- RTCSessionDescription = mozRTCSessionDescription;
|
182
|
|
- RTCIceCandidate = mozRTCIceCandidate;
|
183
|
|
- } else {
|
184
|
|
- console.error(
|
185
|
|
- "Firefox version too old: " + FFversion + ". Required >= 40.");
|
186
|
|
- window.location.href = 'unsupported_browser.html';
|
187
|
|
- return;
|
188
|
|
- }
|
189
|
|
-
|
190
|
|
- } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) {
|
191
|
|
- this.peerconnection = webkitRTCPeerConnection;
|
192
|
|
- this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
|
193
|
|
- this.attachMediaStream = function (element, stream) {
|
194
|
|
- element.attr('src', webkitURL.createObjectURL(stream));
|
195
|
|
- };
|
196
|
|
- this.getStreamID = function (stream) {
|
197
|
|
- // streams from FF endpoints have the characters '{' and '}'
|
198
|
|
- // that make jQuery choke.
|
199
|
|
- return SDPUtil.filter_special_chars(stream.id);
|
200
|
|
- };
|
201
|
|
- this.getVideoSrc = function (element) {
|
202
|
|
- if(!element)
|
203
|
|
- return null;
|
204
|
|
- return element.getAttribute("src");
|
205
|
|
- };
|
206
|
|
- this.setVideoSrc = function (element, src) {
|
207
|
|
- if(element)
|
208
|
|
- element.setAttribute("src", src);
|
209
|
|
- };
|
210
|
|
- // DTLS should now be enabled by default but..
|
211
|
|
- this.pc_constraints = {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]};
|
212
|
|
- if (this.service.options.useIPv6) {
|
213
|
|
- // https://code.google.com/p/webrtc/issues/detail?id=2828
|
214
|
|
- this.pc_constraints.optional.push({googIPv6: true});
|
215
|
|
- }
|
216
|
|
- if (navigator.userAgent.indexOf('Android') != -1) {
|
217
|
|
- this.pc_constraints = {}; // disable DTLS on Android
|
218
|
|
- }
|
219
|
|
- if (!webkitMediaStream.prototype.getVideoTracks) {
|
220
|
|
- webkitMediaStream.prototype.getVideoTracks = function () {
|
221
|
|
- return this.videoTracks;
|
222
|
|
- };
|
223
|
|
- }
|
224
|
|
- if (!webkitMediaStream.prototype.getAudioTracks) {
|
225
|
|
- webkitMediaStream.prototype.getAudioTracks = function () {
|
226
|
|
- return this.audioTracks;
|
|
213
|
+ if (element)
|
|
214
|
+ element.setAttribute("src", src);
|
227
|
215
|
};
|
|
216
|
+ // DTLS should now be enabled by default but..
|
|
217
|
+ this.pc_constraints = {'optional': [
|
|
218
|
+ {'DtlsSrtpKeyAgreement': 'true'}
|
|
219
|
+ ]};
|
|
220
|
+ if (this.service.options.useIPv6) {
|
|
221
|
+ // https://code.google.com/p/webrtc/issues/detail?id=2828
|
|
222
|
+ this.pc_constraints.optional.push({googIPv6: true});
|
|
223
|
+ }
|
|
224
|
+ if (navigator.userAgent.indexOf('Android') != -1) {
|
|
225
|
+ this.pc_constraints = {}; // disable DTLS on Android
|
|
226
|
+ }
|
|
227
|
+ if (!webkitMediaStream.prototype.getVideoTracks) {
|
|
228
|
+ webkitMediaStream.prototype.getVideoTracks = function () {
|
|
229
|
+ return this.videoTracks;
|
|
230
|
+ };
|
|
231
|
+ }
|
|
232
|
+ if (!webkitMediaStream.prototype.getAudioTracks) {
|
|
233
|
+ webkitMediaStream.prototype.getAudioTracks = function () {
|
|
234
|
+ return this.audioTracks;
|
|
235
|
+ };
|
|
236
|
+ }
|
228
|
237
|
}
|
229
|
|
- }
|
230
|
|
- // Detect IE/Safari
|
231
|
|
- else if (RTCBrowserType.isTemasysPluginUsed()) {
|
|
238
|
+ // Detect IE/Safari
|
|
239
|
+ else if (RTCBrowserType.isTemasysPluginUsed()) {
|
232
|
240
|
|
233
|
|
- //AdapterJS.WebRTCPlugin.setLogLevel(
|
234
|
|
- // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
|
|
241
|
+ //AdapterJS.WebRTCPlugin.setLogLevel(
|
|
242
|
+ // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
|
235
|
243
|
|
236
|
|
- AdapterJS.webRTCReady(function (isPlugin) {
|
|
244
|
+ AdapterJS.webRTCReady(function (isPlugin) {
|
237
|
245
|
|
238
|
|
- self.peerconnection = RTCPeerConnection;
|
239
|
|
- self.getUserMedia = getUserMedia;
|
240
|
|
- self.attachMediaStream = function (elSel, stream) {
|
|
246
|
+ self.peerconnection = RTCPeerConnection;
|
|
247
|
+ self.getUserMedia = getUserMedia;
|
|
248
|
+ self.attachMediaStream = function (elSel, stream) {
|
241
|
249
|
|
242
|
|
- if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
|
243
|
|
- return;
|
244
|
|
- }
|
|
250
|
+ if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
|
|
251
|
+ return;
|
|
252
|
+ }
|
245
|
253
|
|
246
|
|
- attachMediaStream(elSel[0], stream);
|
247
|
|
- };
|
248
|
|
- self.getStreamID = function (stream) {
|
249
|
|
- var id = SDPUtil.filter_special_chars(stream.label);
|
250
|
|
- return id;
|
251
|
|
- };
|
252
|
|
- self.getVideoSrc = function (element) {
|
253
|
|
- if (!element) {
|
254
|
|
- console.warn("Attempt to get video SRC of null element");
|
|
254
|
+ attachMediaStream(elSel[0], stream);
|
|
255
|
+ };
|
|
256
|
+ self.getStreamID = function (stream) {
|
|
257
|
+ var id = SDPUtil.filter_special_chars(stream.label);
|
|
258
|
+ return id;
|
|
259
|
+ };
|
|
260
|
+ self.getVideoSrc = function (element) {
|
|
261
|
+ if (!element) {
|
|
262
|
+ console.warn("Attempt to get video SRC of null element");
|
|
263
|
+ return null;
|
|
264
|
+ }
|
|
265
|
+ var children = element.children;
|
|
266
|
+ for (var i = 0; i !== children.length; ++i) {
|
|
267
|
+ if (children[i].name === 'streamId') {
|
|
268
|
+ return children[i].value;
|
|
269
|
+ }
|
|
270
|
+ }
|
|
271
|
+ //console.info(element.id + " SRC: " + src);
|
255
|
272
|
return null;
|
256
|
|
- }
|
257
|
|
- var children = element.children;
|
258
|
|
- for (var i = 0; i !== children.length; ++i) {
|
259
|
|
- if (children[i].name === 'streamId') {
|
260
|
|
- return children[i].value;
|
|
273
|
+ };
|
|
274
|
+ self.setVideoSrc = function (element, src) {
|
|
275
|
+ //console.info("Set video src: ", element, src);
|
|
276
|
+ if (!src) {
|
|
277
|
+ console.warn("Not attaching video stream, 'src' is null");
|
|
278
|
+ return;
|
261
|
279
|
}
|
262
|
|
- }
|
263
|
|
- //console.info(element.id + " SRC: " + src);
|
264
|
|
- return null;
|
265
|
|
- };
|
266
|
|
- self.setVideoSrc = function (element, src) {
|
267
|
|
- //console.info("Set video src: ", element, src);
|
268
|
|
- if (!src) {
|
269
|
|
- console.warn("Not attaching video stream, 'src' is null");
|
270
|
|
- return;
|
271
|
|
- }
|
272
|
|
- AdapterJS.WebRTCPlugin.WaitForPluginReady();
|
273
|
|
- var stream = AdapterJS.WebRTCPlugin.plugin
|
274
|
|
- .getStreamWithId(AdapterJS.WebRTCPlugin.pageId, src);
|
275
|
|
- attachMediaStream(element, stream);
|
276
|
|
- };
|
|
280
|
+ AdapterJS.WebRTCPlugin.WaitForPluginReady();
|
|
281
|
+ var stream = AdapterJS.WebRTCPlugin.plugin
|
|
282
|
+ .getStreamWithId(AdapterJS.WebRTCPlugin.pageId, src);
|
|
283
|
+ attachMediaStream(element, stream);
|
|
284
|
+ };
|
277
|
285
|
|
278
|
|
- onTemasysPluginReady(isPlugin);
|
279
|
|
- });
|
280
|
|
- } else {
|
281
|
|
- try {
|
282
|
|
- console.log('Browser does not appear to be WebRTC-capable');
|
283
|
|
- } catch (e) { }
|
284
|
|
- window.location.href = 'unsupported_browser.html';
|
285
|
|
- }
|
|
286
|
+ onTemasysPluginReady(isPlugin);
|
|
287
|
+ });
|
|
288
|
+ } else {
|
|
289
|
+ try {
|
|
290
|
+ console.log('Browser does not appear to be WebRTC-capable');
|
|
291
|
+ } catch (e) {
|
|
292
|
+ }
|
|
293
|
+ window.location.href = 'unsupported_browser.html';
|
|
294
|
+ }
|
286
|
295
|
|
287
|
|
-}
|
|
296
|
+ },
|
288
|
297
|
|
289
|
298
|
|
290
|
|
-RTCUtils.prototype.getUserMediaWithConstraints = function(
|
291
|
|
- um, success_callback, failure_callback, resolution,bandwidth, fps,
|
292
|
|
- desktopStream) {
|
293
|
|
- currentResolution = resolution;
|
294
|
|
- // Check if we are running on Android device
|
295
|
|
- var isAndroid = navigator.userAgent.indexOf('Android') != -1;
|
|
299
|
+ getUserMediaWithConstraints: function (RTC, um, success_callback, failure_callback, resolution, bandwidth, fps, desktopStream) {
|
|
300
|
+ // Check if we are running on Android device
|
|
301
|
+ var isAndroid = navigator.userAgent.indexOf('Android') != -1;
|
296
|
302
|
|
297
|
|
- var constraints = getConstraints(
|
298
|
|
- um, resolution, bandwidth, fps, desktopStream, isAndroid);
|
|
303
|
+ var constraints = getConstraints(
|
|
304
|
+ um, resolution, bandwidth, fps, desktopStream, isAndroid);
|
299
|
305
|
|
300
|
|
- console.info("Get media constraints", constraints);
|
|
306
|
+ console.info("Get media constraints", constraints);
|
301
|
307
|
|
302
|
|
- var self = this;
|
|
308
|
+ var self = this;
|
303
|
309
|
|
304
|
|
- try {
|
305
|
|
- this.getUserMedia(constraints,
|
306
|
|
- function (stream) {
|
307
|
|
- console.log('onUserMediaSuccess');
|
308
|
|
- self.setAvailableDevices(um, true);
|
309
|
|
- success_callback(stream);
|
310
|
|
- },
|
311
|
|
- function (error) {
|
312
|
|
- self.setAvailableDevices(um, false);
|
313
|
|
- console.warn('Failed to get access to local media. Error ',
|
314
|
|
- error, constraints);
|
315
|
|
- if (failure_callback) {
|
316
|
|
- failure_callback(error);
|
317
|
|
- }
|
318
|
|
- });
|
319
|
|
- } catch (e) {
|
320
|
|
- console.error('GUM failed: ', e);
|
321
|
|
- if(failure_callback) {
|
322
|
|
- failure_callback(e);
|
|
310
|
+ try {
|
|
311
|
+ this.getUserMedia(constraints,
|
|
312
|
+ function (stream) {
|
|
313
|
+ console.log('onUserMediaSuccess');
|
|
314
|
+ self.setAvailableDevices(RTC, um, true);
|
|
315
|
+ success_callback(stream);
|
|
316
|
+ },
|
|
317
|
+ function (error) {
|
|
318
|
+ self.setAvailableDevices(RTC, um, false);
|
|
319
|
+ console.warn('Failed to get access to local media. Error ',
|
|
320
|
+ error, constraints);
|
|
321
|
+ if (failure_callback) {
|
|
322
|
+ failure_callback(error, resolution);
|
|
323
|
+ }
|
|
324
|
+ });
|
|
325
|
+ } catch (e) {
|
|
326
|
+ console.error('GUM failed: ', e);
|
|
327
|
+ if (failure_callback) {
|
|
328
|
+ failure_callback(e);
|
|
329
|
+ }
|
323
|
330
|
}
|
324
|
|
- }
|
325
|
|
-};
|
|
331
|
+ },
|
326
|
332
|
|
327
|
|
-RTCUtils.prototype.setAvailableDevices = function (um, available) {
|
328
|
|
- var devices = {};
|
329
|
|
- if(um.indexOf("video") != -1) {
|
330
|
|
- devices.video = available;
|
331
|
|
- }
|
332
|
|
- if(um.indexOf("audio") != -1) {
|
333
|
|
- devices.audio = available;
|
334
|
|
- }
|
335
|
|
- this.service.setDeviceAvailability(devices);
|
336
|
|
-};
|
337
|
|
-
|
338
|
|
-/**
|
339
|
|
- * We ask for audio and video combined stream in order to get permissions and
|
340
|
|
- * not to ask twice.
|
341
|
|
- */
|
342
|
|
-RTCUtils.prototype.obtainAudioAndVideoPermissions =
|
343
|
|
- function(devices, usageOptions, resolution)
|
344
|
|
-{
|
345
|
|
- var self = this;
|
346
|
|
- // Get AV
|
|
333
|
+ setAvailableDevices: function (RTC, um, available) {
|
|
334
|
+ var devices = {};
|
|
335
|
+ if (um.indexOf("video") != -1) {
|
|
336
|
+ devices.video = available;
|
|
337
|
+ }
|
|
338
|
+ if (um.indexOf("audio") != -1) {
|
|
339
|
+ devices.audio = available;
|
|
340
|
+ }
|
|
341
|
+ RTC.setDeviceAvailability(devices);
|
|
342
|
+ },
|
|
343
|
+
|
|
344
|
+ /**
|
|
345
|
+ * We ask for audio and video combined stream in order to get permissions and
|
|
346
|
+ * not to ask twice.
|
|
347
|
+ */
|
|
348
|
+ obtainAudioAndVideoPermissions: function (RTC, devices, usageOptions, resolution) {
|
|
349
|
+ var self = this;
|
|
350
|
+ // Get AV
|
|
351
|
+
|
|
352
|
+ return new Promise(function (resolve, reject) {
|
|
353
|
+ var successCallback = function (stream) {
|
|
354
|
+ resolve(self.successCallback(stream, usageOptions));
|
|
355
|
+ };
|
347
|
356
|
|
348
|
|
- return new Promise(function(resolve, reject) {
|
349
|
|
- var successCallback = function (stream) {
|
350
|
|
- resolve(self.successCallback(stream, usageOptions));
|
351
|
|
- };
|
|
357
|
+ if (!devices)
|
|
358
|
+ devices = ['audio', 'video'];
|
352
|
359
|
|
353
|
|
- if (!devices)
|
354
|
|
- devices = ['audio', 'video'];
|
|
360
|
+ var newDevices = [];
|
355
|
361
|
|
356
|
|
- var newDevices = [];
|
357
|
362
|
|
|
363
|
+ if (usageOptions)
|
|
364
|
+ for (var i = 0; i < devices.length; i++) {
|
|
365
|
+ var device = devices[i];
|
|
366
|
+ if (usageOptions[device] === true)
|
|
367
|
+ newDevices.push(device);
|
|
368
|
+ }
|
|
369
|
+ else
|
|
370
|
+ newDevices = devices;
|
358
|
371
|
|
359
|
|
- if (usageOptions)
|
360
|
|
- for (var i = 0; i < devices.length; i++) {
|
361
|
|
- var device = devices[i];
|
362
|
|
- if (usageOptions[device] === true)
|
363
|
|
- newDevices.push(device);
|
|
372
|
+ if (newDevices.length === 0) {
|
|
373
|
+ successCallback();
|
|
374
|
+ return;
|
364
|
375
|
}
|
365
|
|
- else
|
366
|
|
- newDevices = devices;
|
367
|
376
|
|
368
|
|
- if (newDevices.length === 0) {
|
369
|
|
- successCallback();
|
370
|
|
- return;
|
371
|
|
- }
|
|
377
|
+ if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) {
|
372
|
378
|
|
373
|
|
- if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) {
|
374
|
|
-
|
375
|
|
- // With FF/IE we can't split the stream into audio and video because FF
|
376
|
|
- // doesn't support media stream constructors. So, we need to get the
|
377
|
|
- // audio stream separately from the video stream using two distinct GUM
|
378
|
|
- // calls. Not very user friendly :-( but we don't have many other
|
379
|
|
- // options neither.
|
380
|
|
- //
|
381
|
|
- // Note that we pack those 2 streams in a single object and pass it to
|
382
|
|
- // the successCallback method.
|
383
|
|
- var obtainVideo = function (audioStream) {
|
384
|
|
- self.getUserMediaWithConstraints(
|
385
|
|
- ['video'],
|
386
|
|
- function (videoStream) {
|
387
|
|
- return successCallback({
|
388
|
|
- audioStream: audioStream,
|
389
|
|
- videoStream: videoStream
|
390
|
|
- });
|
|
379
|
+ // With FF/IE we can't split the stream into audio and video because FF
|
|
380
|
+ // doesn't support media stream constructors. So, we need to get the
|
|
381
|
+ // audio stream separately from the video stream using two distinct GUM
|
|
382
|
+ // calls. Not very user friendly :-( but we don't have many other
|
|
383
|
+ // options neither.
|
|
384
|
+ //
|
|
385
|
+ // Note that we pack those 2 streams in a single object and pass it to
|
|
386
|
+ // the successCallback method.
|
|
387
|
+ var obtainVideo = function (audioStream) {
|
|
388
|
+ self.getUserMediaWithConstraints(
|
|
389
|
+ RTC,
|
|
390
|
+ ['video'],
|
|
391
|
+ function (videoStream) {
|
|
392
|
+ return successCallback({
|
|
393
|
+ audioStream: audioStream,
|
|
394
|
+ videoStream: videoStream
|
|
395
|
+ });
|
|
396
|
+ },
|
|
397
|
+ function (error, resolution) {
|
|
398
|
+ console.error(
|
|
399
|
+ 'failed to obtain video stream - stop', error);
|
|
400
|
+ self.errorCallback(error, resolve, RTC, resolution);
|
|
401
|
+ },
|
|
402
|
+ config.resolution || '360');
|
|
403
|
+ };
|
|
404
|
+ var obtainAudio = function () {
|
|
405
|
+ self.getUserMediaWithConstraints(
|
|
406
|
+ RTC,
|
|
407
|
+ ['audio'],
|
|
408
|
+ function (audioStream) {
|
|
409
|
+ if (newDevices.indexOf('video') !== -1)
|
|
410
|
+ obtainVideo(audioStream);
|
|
411
|
+ },
|
|
412
|
+ function (error) {
|
|
413
|
+ console.error(
|
|
414
|
+ 'failed to obtain audio stream - stop', error);
|
|
415
|
+ self.errorCallback(error, resolve, RTC);
|
|
416
|
+ }
|
|
417
|
+ );
|
|
418
|
+ };
|
|
419
|
+ if (newDevices.indexOf('audio') !== -1) {
|
|
420
|
+ obtainAudio();
|
|
421
|
+ } else {
|
|
422
|
+ obtainVideo(null);
|
|
423
|
+ }
|
|
424
|
+ } else {
|
|
425
|
+ this.getUserMediaWithConstraints(
|
|
426
|
+ RTC,
|
|
427
|
+ newDevices,
|
|
428
|
+ function (stream) {
|
|
429
|
+ successCallback(stream);
|
391
|
430
|
},
|
392
|
|
- function (error) {
|
393
|
|
- console.error(
|
394
|
|
- 'failed to obtain video stream - stop', error);
|
395
|
|
- self.errorCallback(error, resolve);
|
|
431
|
+ function (error, resolution) {
|
|
432
|
+ self.errorCallback(error, resolve, RTC, resolution);
|
396
|
433
|
},
|
397
|
|
- config.resolution || '360');
|
398
|
|
- };
|
399
|
|
- var obtainAudio = function () {
|
400
|
|
- self.getUserMediaWithConstraints(
|
401
|
|
- ['audio'],
|
402
|
|
- function (audioStream) {
|
403
|
|
- if (newDevices.indexOf('video') !== -1)
|
404
|
|
- obtainVideo(audioStream);
|
405
|
|
- },
|
406
|
|
- function (error) {
|
407
|
|
- console.error(
|
408
|
|
- 'failed to obtain audio stream - stop', error);
|
409
|
|
- self.errorCallback(error, resolve);
|
410
|
|
- }
|
411
|
|
- );
|
412
|
|
- };
|
413
|
|
- if (newDevices.indexOf('audio') !== -1) {
|
414
|
|
- obtainAudio();
|
415
|
|
- } else {
|
416
|
|
- obtainVideo(null);
|
|
434
|
+ resolution || '360');
|
417
|
435
|
}
|
418
|
|
- } else {
|
419
|
|
- this.getUserMediaWithConstraints(
|
420
|
|
- newDevices,
|
|
436
|
+ }.bind(this));
|
|
437
|
+ },
|
|
438
|
+
|
|
439
|
+ successCallback: function (stream, usageOptions) {
|
|
440
|
+ // If this is FF or IE, the stream parameter is *not* a MediaStream object,
|
|
441
|
+ // it's an object with two properties: audioStream, videoStream.
|
|
442
|
+ if (stream && stream.getAudioTracks && stream.getVideoTracks)
|
|
443
|
+ console.log('got', stream, stream.getAudioTracks().length,
|
|
444
|
+ stream.getVideoTracks().length);
|
|
445
|
+ return this.handleLocalStream(stream, usageOptions);
|
|
446
|
+ },
|
|
447
|
+
|
|
448
|
+ errorCallback: function (error, resolve, RTC, currentResolution) {
|
|
449
|
+ var self = this;
|
|
450
|
+ console.error('failed to obtain audio/video stream - trying audio only', error);
|
|
451
|
+ var resolution = getPreviousResolution(currentResolution);
|
|
452
|
+ if (typeof error == "object" && error.constraintName && error.name
|
|
453
|
+ && (error.name == "ConstraintNotSatisfiedError" ||
|
|
454
|
+ error.name == "OverconstrainedError") &&
|
|
455
|
+ (error.constraintName == "minWidth" || error.constraintName == "maxWidth" ||
|
|
456
|
+ error.constraintName == "minHeight" || error.constraintName == "maxHeight")
|
|
457
|
+ && resolution != null) {
|
|
458
|
+ self.getUserMediaWithConstraints(RTC, ['audio', 'video'],
|
|
459
|
+ function (stream) {
|
|
460
|
+ resolve(self.successCallback(stream));
|
|
461
|
+ }, function (error, resolution) {
|
|
462
|
+ return self.errorCallback(error, resolve, RTC, resolution);
|
|
463
|
+ }, resolution);
|
|
464
|
+ }
|
|
465
|
+ else {
|
|
466
|
+ self.getUserMediaWithConstraints(
|
|
467
|
+ RTC,
|
|
468
|
+ ['audio'],
|
421
|
469
|
function (stream) {
|
422
|
|
- successCallback(stream);
|
|
470
|
+ resolve(self.successCallback(stream));
|
423
|
471
|
},
|
424
|
472
|
function (error) {
|
425
|
|
- self.errorCallback(error, resolve);
|
426
|
|
- },
|
427
|
|
- resolution || '360');
|
|
473
|
+ console.error('failed to obtain audio/video stream - stop',
|
|
474
|
+ error);
|
|
475
|
+ resolve(self.successCallback(null));
|
|
476
|
+ }
|
|
477
|
+ );
|
428
|
478
|
}
|
429
|
|
- }.bind(this));
|
430
|
|
-};
|
431
|
|
-
|
432
|
|
-RTCUtils.prototype.successCallback = function (stream, usageOptions) {
|
433
|
|
- // If this is FF or IE, the stream parameter is *not* a MediaStream object,
|
434
|
|
- // it's an object with two properties: audioStream, videoStream.
|
435
|
|
- if (stream && stream.getAudioTracks && stream.getVideoTracks)
|
436
|
|
- console.log('got', stream, stream.getAudioTracks().length,
|
437
|
|
- stream.getVideoTracks().length);
|
438
|
|
- return this.handleLocalStream(stream, usageOptions);
|
439
|
|
-};
|
440
|
|
-
|
441
|
|
-RTCUtils.prototype.errorCallback = function (error, resolve) {
|
442
|
|
- var self = this;
|
443
|
|
- console.error('failed to obtain audio/video stream - trying audio only', error);
|
444
|
|
- var resolution = getPreviousResolution(currentResolution);
|
445
|
|
- if(typeof error == "object" && error.constraintName && error.name
|
446
|
|
- && (error.name == "ConstraintNotSatisfiedError" ||
|
447
|
|
- error.name == "OverconstrainedError") &&
|
448
|
|
- (error.constraintName == "minWidth" || error.constraintName == "maxWidth" ||
|
449
|
|
- error.constraintName == "minHeight" || error.constraintName == "maxHeight")
|
450
|
|
- && resolution != null)
|
451
|
|
- {
|
452
|
|
- self.getUserMediaWithConstraints(['audio', 'video'],
|
453
|
|
- function (stream) {
|
454
|
|
- resolve(self.successCallback(stream));
|
455
|
|
- }, function (error) {
|
456
|
|
- return self.errorCallback(error);
|
457
|
|
- }, resolution);
|
458
|
|
- }
|
459
|
|
- else {
|
460
|
|
- self.getUserMediaWithConstraints(
|
461
|
|
- ['audio'],
|
462
|
|
- function (stream) {
|
463
|
|
- resolve(self.successCallback(stream));
|
464
|
|
- },
|
465
|
|
- function (error) {
|
466
|
|
- console.error('failed to obtain audio/video stream - stop',
|
467
|
|
- error);
|
468
|
|
- resolve(self.successCallback(null));
|
469
|
|
- }
|
470
|
|
- );
|
471
|
|
- }
|
472
|
|
-};
|
473
|
|
-
|
474
|
|
-RTCUtils.prototype.handleLocalStream = function(stream, usageOptions) {
|
475
|
|
- // If this is FF, the stream parameter is *not* a MediaStream object, it's
|
476
|
|
- // an object with two properties: audioStream, videoStream.
|
477
|
|
- if(window.webkitMediaStream)
|
478
|
|
- {
|
479
|
|
- audioStream = new webkitMediaStream();
|
480
|
|
- videoStream = new webkitMediaStream();
|
481
|
|
- if(stream) {
|
482
|
|
- var audioTracks = stream.getAudioTracks();
|
483
|
|
-
|
484
|
|
- for (var i = 0; i < audioTracks.length; i++) {
|
485
|
|
- audioStream.addTrack(audioTracks[i]);
|
486
|
|
- }
|
|
479
|
+ },
|
|
480
|
+
|
|
481
|
+ handleLocalStream: function (stream, usageOptions) {
|
|
482
|
+ // If this is FF, the stream parameter is *not* a MediaStream object, it's
|
|
483
|
+ // an object with two properties: audioStream, videoStream.
|
|
484
|
+ if (window.webkitMediaStream) {
|
|
485
|
+ audioStream = new webkitMediaStream();
|
|
486
|
+ videoStream = new webkitMediaStream();
|
|
487
|
+ if (stream) {
|
|
488
|
+ var audioTracks = stream.getAudioTracks();
|
|
489
|
+
|
|
490
|
+ for (var i = 0; i < audioTracks.length; i++) {
|
|
491
|
+ audioStream.addTrack(audioTracks[i]);
|
|
492
|
+ }
|
487
|
493
|
|
488
|
|
- var videoTracks = stream.getVideoTracks();
|
|
494
|
+ var videoTracks = stream.getVideoTracks();
|
489
|
495
|
|
490
|
|
- for (i = 0; i < videoTracks.length; i++) {
|
491
|
|
- videoStream.addTrack(videoTracks[i]);
|
|
496
|
+ for (i = 0; i < videoTracks.length; i++) {
|
|
497
|
+ videoStream.addTrack(videoTracks[i]);
|
|
498
|
+ }
|
492
|
499
|
}
|
493
|
|
- }
|
494
|
500
|
|
495
|
|
- }
|
496
|
|
- else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed())
|
497
|
|
- { // Firefox and Temasys plugin
|
498
|
|
- if (stream && stream.audioStream)
|
499
|
|
- audioStream = stream.audioStream;
|
500
|
|
- else
|
501
|
|
- audioStream = new DummyMediaStream("dummyAudio");
|
502
|
|
-
|
503
|
|
- if (stream && stream.videoStream)
|
504
|
|
- videoStream = stream.videoStream;
|
505
|
|
- else
|
506
|
|
- videoStream = new DummyMediaStream("dummyVideo");
|
507
|
|
- }
|
|
501
|
+ }
|
|
502
|
+ else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { // Firefox and Temasys plugin
|
|
503
|
+ if (stream && stream.audioStream)
|
|
504
|
+ audioStream = stream.audioStream;
|
|
505
|
+ else
|
|
506
|
+ audioStream = new DummyMediaStream("dummyAudio");
|
|
507
|
+
|
|
508
|
+ if (stream && stream.videoStream)
|
|
509
|
+ videoStream = stream.videoStream;
|
|
510
|
+ else
|
|
511
|
+ videoStream = new DummyMediaStream("dummyVideo");
|
|
512
|
+ }
|
508
|
513
|
|
509
|
|
- var audioMuted = (usageOptions && usageOptions.audio === false),
|
510
|
|
- videoMuted = (usageOptions && usageOptions.video === false);
|
|
514
|
+ var audioMuted = (usageOptions && usageOptions.audio === false),
|
|
515
|
+ videoMuted = (usageOptions && usageOptions.video === false);
|
511
|
516
|
|
512
|
|
- var audioGUM = (!usageOptions || usageOptions.audio !== false),
|
513
|
|
- videoGUM = (!usageOptions || usageOptions.video !== false);
|
|
517
|
+ var audioGUM = (!usageOptions || usageOptions.audio !== false),
|
|
518
|
+ videoGUM = (!usageOptions || usageOptions.video !== false);
|
514
|
519
|
|
515
|
|
- return this.service.createLocalStreams(
|
516
|
|
- [{stream: audioStream, type: "audio", isMuted: audioMuted, isGUMStream: audioGUM, videoType: null},
|
517
|
|
- {stream: videoStream, type: "video", isMuted: videoMuted, isGUMStream: videoGUM, videoType: "camera"}]);
|
518
|
|
-};
|
|
520
|
+ return this.service.createLocalStreams(
|
|
521
|
+ [
|
|
522
|
+ {stream: audioStream, type: "audio", isMuted: audioMuted, isGUMStream: audioGUM, videoType: null},
|
|
523
|
+ {stream: videoStream, type: "video", isMuted: videoMuted, isGUMStream: videoGUM, videoType: "camera"}
|
|
524
|
+ ]);
|
|
525
|
+ },
|
519
|
526
|
|
520
|
|
-function DummyMediaStream(id) {
|
521
|
|
- this.id = id;
|
522
|
|
- this.label = id;
|
523
|
|
- this.stop = function() { };
|
524
|
|
- this.getAudioTracks = function() { return []; };
|
525
|
|
- this.getVideoTracks = function() { return []; };
|
526
|
|
-}
|
|
527
|
+ createStream: function (stream, isVideo) {
|
|
528
|
+ var newStream = null;
|
|
529
|
+ if (window.webkitMediaStream) {
|
|
530
|
+ newStream = new webkitMediaStream();
|
|
531
|
+ if (newStream) {
|
|
532
|
+ var tracks = (isVideo ? stream.getVideoTracks() : stream.getAudioTracks());
|
527
|
533
|
|
528
|
|
-RTCUtils.prototype.createStream = function(stream, isVideo) {
|
529
|
|
- var newStream = null;
|
530
|
|
- if (window.webkitMediaStream) {
|
531
|
|
- newStream = new webkitMediaStream();
|
532
|
|
- if (newStream) {
|
533
|
|
- var tracks = (isVideo ? stream.getVideoTracks() : stream.getAudioTracks());
|
|
534
|
+ for (var i = 0; i < tracks.length; i++) {
|
|
535
|
+ newStream.addTrack(tracks[i]);
|
|
536
|
+ }
|
|
537
|
+ }
|
534
|
538
|
|
535
|
|
- for (var i = 0; i < tracks.length; i++) {
|
536
|
|
- newStream.addTrack(tracks[i]);
|
|
539
|
+ }
|
|
540
|
+ else {
|
|
541
|
+ // FIXME: this is duplicated with 'handleLocalStream' !!!
|
|
542
|
+ if (stream) {
|
|
543
|
+ newStream = stream;
|
|
544
|
+ } else {
|
|
545
|
+ newStream =
|
|
546
|
+ new DummyMediaStream(isVideo ? "dummyVideo" : "dummyAudio");
|
537
|
547
|
}
|
538
|
548
|
}
|
539
|
549
|
|
|
550
|
+ return newStream;
|
540
|
551
|
}
|
541
|
|
- else {
|
542
|
|
- // FIXME: this is duplicated with 'handleLocalStream' !!!
|
543
|
|
- if (stream) {
|
544
|
|
- newStream = stream;
|
545
|
|
- } else {
|
546
|
|
- newStream =
|
547
|
|
- new DummyMediaStream(isVideo ? "dummyVideo" : "dummyAudio");
|
548
|
|
- }
|
549
|
|
- }
|
550
|
|
-
|
551
|
|
- return newStream;
|
552
|
|
-};
|
|
552
|
+}
|
553
|
553
|
|
554
|
554
|
module.exports = RTCUtils;
|