/*! adapterjs - v0.14.0 - 2016-10-03 */
var console = require("jitsi-meet-logger").getLogger(__filename);
// Adapter's interface.
var AdapterJS = AdapterJS || {};
// Browserify compatibility
if(typeof exports !== 'undefined') {
module.exports = AdapterJS;
}
AdapterJS.options = AdapterJS.options || {};
// uncomment to get virtual webcams
// AdapterJS.options.getAllCams = true;
// uncomment to prevent the install prompt when the plugin in not yet installed
// AdapterJS.options.hidePluginInstallPrompt = true;
// AdapterJS version
AdapterJS.VERSION = '0.14.0';
// This function will be called when the WebRTC API is ready to be used
// Whether it is the native implementation (Chrome, Firefox, Opera) or
// the plugin
// You may Override this function to synchronise the start of your application
// with the WebRTC API being ready.
// If you decide not to override use this synchronisation, it may result in
// an extensive CPU usage on the plugin start (once per tab loaded)
// Params:
// - isUsingPlugin: true is the WebRTC plugin is being used, false otherwise
//
AdapterJS.onwebrtcready = AdapterJS.onwebrtcready || function(isUsingPlugin) {
// The WebRTC API is ready.
// Override me and do whatever you want here
};
// New interface to store multiple callbacks, private
AdapterJS._onwebrtcreadies = [];
// Sets a callback function to be called when the WebRTC interface is ready.
// The first argument is the function to callback.\
// Throws an error if the first argument is not a function
AdapterJS.webRTCReady = function (callback) {
if (typeof callback !== 'function') {
throw new Error('Callback provided is not a function');
}
if (true === AdapterJS.onwebrtcreadyDone) {
// All WebRTC interfaces are ready, just call the callback
callback(null !== AdapterJS.WebRTCPlugin.plugin);
} else {
// will be triggered automatically when your browser/plugin is ready.
AdapterJS._onwebrtcreadies.push(callback);
}
};
// Plugin namespace
AdapterJS.WebRTCPlugin = AdapterJS.WebRTCPlugin || {};
// The object to store plugin information
/* jshint ignore:start */
AdapterJS.WebRTCPlugin.pluginInfo = AdapterJS.WebRTCPlugin.pluginInfo || {
prefix : 'Tem',
plugName : 'TemWebRTCPlugin',
pluginId : 'plugin0',
type : 'application/x-temwebrtcplugin',
onload : '__TemWebRTCReady0',
portalLink : 'http://skylink.io/plugin/',
downloadLink : null, //set below
companyName: 'Temasys',
downloadLinks : {
mac: 'http://bit.ly/webrtcpluginpkg',
win: 'http://bit.ly/webrtcpluginmsi'
}
};
if(typeof AdapterJS.WebRTCPlugin.pluginInfo.downloadLinks !== "undefined" && AdapterJS.WebRTCPlugin.pluginInfo.downloadLinks !== null) {
if(!!navigator.platform.match(/^Mac/i)) {
AdapterJS.WebRTCPlugin.pluginInfo.downloadLink = AdapterJS.WebRTCPlugin.pluginInfo.downloadLinks.mac;
}
else if(!!navigator.platform.match(/^Win/i)) {
AdapterJS.WebRTCPlugin.pluginInfo.downloadLink = AdapterJS.WebRTCPlugin.pluginInfo.downloadLinks.win;
}
}
/* jshint ignore:end */
AdapterJS.WebRTCPlugin.TAGS = {
NONE : 'none',
AUDIO : 'audio',
VIDEO : 'video'
};
// Unique identifier of each opened page
AdapterJS.WebRTCPlugin.pageId = Math.random().toString(36).slice(2);
// Use this whenever you want to call the plugin.
AdapterJS.WebRTCPlugin.plugin = null;
// Set log level for the plugin once it is ready.
// The different values are
// This is an asynchronous function that will run when the plugin is ready
AdapterJS.WebRTCPlugin.setLogLevel = null;
// Defines webrtc's JS interface according to the plugin's implementation.
// Define plugin Browsers as WebRTC Interface.
AdapterJS.WebRTCPlugin.defineWebRTCInterface = null;
// This function detects whether or not a plugin is installed.
// Checks if Not IE (firefox, for example), else if it's IE,
// we're running IE and do something. If not it is not supported.
AdapterJS.WebRTCPlugin.isPluginInstalled = null;
// Lets adapter.js wait until the the document is ready before injecting the plugin
AdapterJS.WebRTCPlugin.pluginInjectionInterval = null;
// Inject the HTML DOM object element into the page.
AdapterJS.WebRTCPlugin.injectPlugin = null;
// States of readiness that the plugin goes through when
// being injected and stated
AdapterJS.WebRTCPlugin.PLUGIN_STATES = {
NONE : 0, // no plugin use
INITIALIZING : 1, // Detected need for plugin
INJECTING : 2, // Injecting plugin
INJECTED: 3, // Plugin element injected but not usable yet
READY: 4 // Plugin ready to be used
};
// Current state of the plugin. You cannot use the plugin before this is
// equal to AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY
AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.NONE;
// True is AdapterJS.onwebrtcready was already called, false otherwise
// Used to make sure AdapterJS.onwebrtcready is only called once
AdapterJS.onwebrtcreadyDone = false;
// Log levels for the plugin.
// To be set by calling AdapterJS.WebRTCPlugin.setLogLevel
/*
Log outputs are prefixed in some cases.
INFO: Information reported by the plugin.
ERROR: Errors originating from within the plugin.
WEBRTC: Error originating from within the libWebRTC library
*/
// From the least verbose to the most verbose
AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS = {
NONE : 'NONE',
ERROR : 'ERROR',
WARNING : 'WARNING',
INFO: 'INFO',
VERBOSE: 'VERBOSE',
SENSITIVE: 'SENSITIVE'
};
// Does a waiting check before proceeding to load the plugin.
AdapterJS.WebRTCPlugin.WaitForPluginReady = null;
// This methid will use an interval to wait for the plugin to be ready.
AdapterJS.WebRTCPlugin.callWhenPluginReady = null;
// !!!! WARNING: DO NOT OVERRIDE THIS FUNCTION. !!!
// This function will be called when plugin is ready. It sends necessary
// details to the plugin.
// The function will wait for the document to be ready and the set the
// plugin state to AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY,
// indicating that it can start being requested.
// This function is not in the IE/Safari condition brackets so that
// TemPluginLoaded function might be called on Chrome/Firefox.
// This function is the only private function that is not encapsulated to
// allow the plugin method to be called.
__TemWebRTCReady0 = function () {
if (document.readyState === 'complete') {
AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY;
AdapterJS.maybeThroughWebRTCReady();
} else {
var timer = setInterval(function () {
if (document.readyState === 'complete') {
// TODO: update comments, we wait for the document to be ready
clearInterval(timer);
AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY;
AdapterJS.maybeThroughWebRTCReady();
}
}, 100);
}
};
AdapterJS.maybeThroughWebRTCReady = function() {
if (!AdapterJS.onwebrtcreadyDone) {
AdapterJS.onwebrtcreadyDone = true;
// If new interface for multiple callbacks used
if (AdapterJS._onwebrtcreadies.length) {
AdapterJS._onwebrtcreadies.forEach(function (callback) {
if (typeof(callback) === 'function') {
callback(AdapterJS.WebRTCPlugin.plugin !== null);
}
});
// Else if no callbacks on new interface assuming user used old(deprecated) way to set callback through AdapterJS.onwebrtcready = ...
} else if (typeof(AdapterJS.onwebrtcready) === 'function') {
AdapterJS.onwebrtcready(AdapterJS.WebRTCPlugin.plugin !== null);
}
}
};
// Text namespace
AdapterJS.TEXT = {
PLUGIN: {
REQUIRE_INSTALLATION: 'This website requires you to install a WebRTC-enabling plugin ' +
'to work on this browser.',
NOT_SUPPORTED: 'Your browser does not support WebRTC.',
BUTTON: 'Install Now'
},
REFRESH: {
REQUIRE_REFRESH: 'Please refresh page',
BUTTON: 'Refresh Page'
}
};
// The result of ice connection states.
// - starting: Ice connection is starting.
// - checking: Ice connection is checking.
// - connected Ice connection is connected.
// - completed Ice connection is connected.
// - done Ice connection has been completed.
// - disconnected Ice connection has been disconnected.
// - failed Ice connection has failed.
// - closed Ice connection is closed.
AdapterJS._iceConnectionStates = {
starting : 'starting',
checking : 'checking',
connected : 'connected',
completed : 'connected',
done : 'completed',
disconnected : 'disconnected',
failed : 'failed',
closed : 'closed'
};
//The IceConnection states that has been fired for each peer.
AdapterJS._iceConnectionFiredStates = [];
// Check if WebRTC Interface is defined.
AdapterJS.isDefined = null;
// This function helps to retrieve the webrtc detected browser information.
// This sets:
// - webrtcDetectedBrowser: The browser agent name.
// - webrtcDetectedVersion: The browser version.
// - webrtcMinimumVersion: The minimum browser version still supported by AJS.
// - webrtcDetectedType: The types of webRTC support.
// - 'moz': Mozilla implementation of webRTC.
// - 'webkit': WebKit implementation of webRTC.
// - 'plugin': Using the plugin implementation.
AdapterJS.parseWebrtcDetectedBrowser = function () {
var hasMatch = null;
// Detect Opera (8.0+)
if ((!!window.opr && !!opr.addons) || !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0) {
hasMatch = navigator.userAgent.match(/OPR\/(\d+)/i) || [];
webrtcDetectedBrowser = 'opera';
webrtcDetectedVersion = parseInt(hasMatch[1] || '0', 10);
webrtcMinimumVersion = 26;
webrtcDetectedType = 'webkit';
webrtcDetectedDCSupport = 'SCTP'; // Opera 20+ uses Chrome 33
// Detect Bowser on iOS
} else if (navigator.userAgent.match(/Bowser\/[0-9.]*/g)) {
hasMatch = navigator.userAgent.match(/Bowser\/[0-9.]*/g) || [];
var chromiumVersion = parseInt((navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./i) || [])[2] || '0', 10);
webrtcDetectedBrowser = 'bowser';
webrtcDetectedVersion = parseFloat((hasMatch[0] || '0/0').split('/')[1], 10);
webrtcMinimumVersion = 0;
webrtcDetectedType = 'webkit';
webrtcDetectedDCSupport = chromiumVersion > 30 ? 'SCTP' : 'RTP';
// Detect Opera on iOS (does not support WebRTC yet)
} else if (navigator.userAgent.indexOf('OPiOS') > 0) {
hasMatch = navigator.userAgent.match(/OPiOS\/([0-9]+)\./);
// Browser which do not support webrtc yet
webrtcDetectedBrowser = 'opera';
webrtcDetectedVersion = parseInt(hasMatch[1] || '0', 10);
webrtcMinimumVersion = 0;
webrtcDetectedType = null;
webrtcDetectedDCSupport = null;
// Detect Chrome on iOS (does not support WebRTC yet)
} else if (navigator.userAgent.indexOf('CriOS') > 0) {
hasMatch = navigator.userAgent.match(/CriOS\/([0-9]+)\./) || [];
webrtcDetectedBrowser = 'chrome';
webrtcDetectedVersion = parseInt(hasMatch[1] || '0', 10);
webrtcMinimumVersion = 0;
webrtcDetectedType = null;
webrtcDetectedDCSupport = null;
// Detect Firefox on iOS (does not support WebRTC yet)
} else if (navigator.userAgent.indexOf('FxiOS') > 0) {
hasMatch = navigator.userAgent.match(/FxiOS\/([0-9]+)\./) || [];
// Browser which do not support webrtc yet
webrtcDetectedBrowser = 'firefox';
webrtcDetectedVersion = parseInt(hasMatch[1] || '0', 10);
webrtcMinimumVersion = 0;
webrtcDetectedType = null;
webrtcDetectedDCSupport = null;
// Detect IE (6-11)
} else if (/*@cc_on!@*/false || !!document.documentMode) {
hasMatch = /\brv[ :]+(\d+)/g.exec(navigator.userAgent) || [];
webrtcDetectedBrowser = 'IE';
webrtcDetectedVersion = parseInt(hasMatch[1], 10);
webrtcMinimumVersion = 9;
webrtcDetectedType = 'plugin';
webrtcDetectedDCSupport = 'SCTP';
if (!webrtcDetectedVersion) {
hasMatch = /\bMSIE[ :]+(\d+)/g.exec(navigator.userAgent) || [];
webrtcDetectedVersion = parseInt(hasMatch[1] || '0', 10);
}
// Detect Edge (20+)
} else if (!!window.StyleMedia || navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)) {
hasMatch = navigator.userAgent.match(/Edge\/(\d+).(\d+)$/) || [];
// Previous webrtc/adapter uses minimum version as 10547 but checking in the Edge release history,
// It's close to 13.10547 and ObjectRTC API is fully supported in that version
webrtcDetectedBrowser = 'edge';
webrtcDetectedVersion = parseFloat((hasMatch[0] || '0/0').split('/')[1], 10);
webrtcMinimumVersion = 13.10547;
webrtcDetectedType = 'ms';
webrtcDetectedDCSupport = null;
// Detect Firefox (1.0+)
// Placed before Safari check to ensure Firefox on Android is detected
} else if (typeof InstallTrigger !== 'undefined' || navigator.userAgent.indexOf('irefox') > 0) {
hasMatch = navigator.userAgent.match(/Firefox\/([0-9]+)\./) || [];
webrtcDetectedBrowser = 'firefox';
webrtcDetectedVersion = parseInt(hasMatch[1] || '0', 10);
webrtcMinimumVersion = 31;
webrtcDetectedType = 'moz';
webrtcDetectedDCSupport = 'SCTP';
// Detect Chrome (1+ and mobile)
// Placed before Safari check to ensure Chrome on Android is detected
} else if ((!!window.chrome && !!window.chrome.webstore) || navigator.userAgent.indexOf('Chrom') > 0) {
hasMatch = navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./i) || [];
webrtcDetectedBrowser = 'chrome';
webrtcDetectedVersion = parseInt(hasMatch[2] || '0', 10);
webrtcMinimumVersion = 38;
webrtcDetectedType = 'webkit';
webrtcDetectedDCSupport = webrtcDetectedVersion > 30 ? 'SCTP' : 'RTP'; // Chrome 31+ supports SCTP without flags
// Detect Safari
} else if (/^((?!chrome|android).)*safari/i.test(navigator.userAgent)) {
hasMatch = navigator.userAgent.match(/version\/(\d+)/i) || [];
var isMobile = navigator.userAgent.match(/(iPhone|iPad)/gi) || [];
webrtcDetectedBrowser = 'safari';
webrtcDetectedVersion = parseInt(hasMatch[1] || '0', 10);
webrtcMinimumVersion = 7;
webrtcDetectedType = isMobile.length === 0 ? 'plugin' : null;
webrtcDetectedDCSupport = isMobile.length === 0 ? 'SCTP' : null;
// Detect WebView on iOS (does not support WebRTC yet)
} else if (/(iPhone|iPod|iPad).*AppleWebKit(?!.*Safari)/i.test(navigator.userAgent)) {
hasMatch = navigator.userAgent.match(/AppleWebKit\/([0-9]+)\./) || [];
webrtcDetectedBrowser = 'safari';
webrtcDetectedVersion = parseInt(hasMatch[1] || '0', 10);
webrtcMinimumVersion = 0;
webrtcDetectedType = null;
webrtcDetectedDCSupport = null;
}
window.webrtcDetectedBrowser = webrtcDetectedBrowser;
window.webrtcDetectedVersion = webrtcDetectedVersion;
window.webrtcMinimumVersion = webrtcMinimumVersion;
window.webrtcDetectedType = webrtcDetectedType; // Scope it to window for better consistency
window.webrtcDetectedDCSupport = webrtcDetectedDCSupport; // Scope it to window for better consistency
};
AdapterJS.addEvent = function(elem, evnt, func) {
if (elem.addEventListener) { // W3C DOM
elem.addEventListener(evnt, func, false);
} else if (elem.attachEvent) {// OLD IE DOM
elem.attachEvent('on'+evnt, func);
} else { // No much to do
elem[evnt] = func;
}
};
AdapterJS.renderNotificationBar = function (text, buttonText, buttonLink, openNewTab, displayRefreshBar) {
// only inject once the page is ready
if (document.readyState !== 'complete') {
return;
}
var w = window;
var i = document.createElement('iframe');
i.name = 'adapterjs-alert';
i.style.position = 'fixed';
i.style.top = '-41px';
i.style.left = 0;
i.style.right = 0;
i.style.width = '100%';
i.style.height = '40px';
i.style.backgroundColor = '#ffffe1';
i.style.border = 'none';
i.style.borderBottom = '1px solid #888888';
i.style.zIndex = '9999999';
if(typeof i.style.webkitTransition === 'string') {
i.style.webkitTransition = 'all .5s ease-out';
} else if(typeof i.style.transition === 'string') {
i.style.transition = 'all .5s ease-out';
}
document.body.appendChild(i);
var c = (i.contentWindow) ? i.contentWindow :
(i.contentDocument.document) ? i.contentDocument.document : i.contentDocument;
c.document.open();
c.document.write('' + text + '');
if(buttonText && buttonLink) {
c.document.write('');
c.document.close();
// On click on okay
AdapterJS.addEvent(c.document.getElementById('okay'), 'click', function(e) {
if (!!displayRefreshBar) {
AdapterJS.renderNotificationBar(AdapterJS.TEXT.EXTENSION ?
AdapterJS.TEXT.EXTENSION.REQUIRE_REFRESH : AdapterJS.TEXT.REFRESH.REQUIRE_REFRESH,
AdapterJS.TEXT.REFRESH.BUTTON, 'javascript:location.reload()'); // jshint ignore:line
}
window.open(buttonLink, !!openNewTab ? '_blank' : '_top');
e.preventDefault();
try {
e.cancelBubble = true;
} catch(error) { }
var pluginInstallInterval = setInterval(function(){
if(! isIE) {
navigator.plugins.refresh(false);
}
AdapterJS.WebRTCPlugin.isPluginInstalled(
AdapterJS.WebRTCPlugin.pluginInfo.prefix,
AdapterJS.WebRTCPlugin.pluginInfo.plugName,
AdapterJS.WebRTCPlugin.pluginInfo.type,
function() { // plugin now installed
clearInterval(pluginInstallInterval);
AdapterJS.WebRTCPlugin.defineWebRTCInterface();
},
function() {
// still no plugin detected, nothing to do
});
} , 500);
});
// On click on Cancel
AdapterJS.addEvent(c.document.getElementById('cancel'), 'click', function(e) {
w.document.body.removeChild(i);
});
} else {
c.document.close();
}
setTimeout(function() {
if(typeof i.style.webkitTransform === 'string') {
i.style.webkitTransform = 'translateY(40px)';
} else if(typeof i.style.transform === 'string') {
i.style.transform = 'translateY(40px)';
} else {
i.style.top = '0px';
}
}, 300);
};
// -----------------------------------------------------------
// Detected webrtc implementation. Types are:
// - 'moz': Mozilla implementation of webRTC.
// - 'webkit': WebKit implementation of webRTC.
// - 'plugin': Using the plugin implementation.
webrtcDetectedType = null;
// Set the settings for creating DataChannels, MediaStream for
// Cross-browser compability.
// - This is only for SCTP based support browsers.
// the 'urls' attribute.
checkMediaDataChannelSettings =
function (peerBrowserAgent, peerBrowserVersion, callback, constraints) {
if (typeof callback !== 'function') {
return;
}
var beOfferer = true;
var isLocalFirefox = webrtcDetectedBrowser === 'firefox';
// Nightly version does not require MozDontOfferDataChannel for interop
var isLocalFirefoxInterop = webrtcDetectedType === 'moz' && webrtcDetectedVersion > 30;
var isPeerFirefox = peerBrowserAgent === 'firefox';
var isPeerFirefoxInterop = peerBrowserAgent === 'firefox' &&
((peerBrowserVersion) ? (peerBrowserVersion > 30) : false);
// Resends an updated version of constraints for MozDataChannel to work
// If other userAgent is firefox and user is firefox, remove MozDataChannel
if ((isLocalFirefox && isPeerFirefox) || (isLocalFirefoxInterop)) {
try {
delete constraints.mandatory.MozDontOfferDataChannel;
} catch (error) {
console.error('Failed deleting MozDontOfferDataChannel');
console.error(error);
}
} else if ((isLocalFirefox && !isPeerFirefox)) {
constraints.mandatory.MozDontOfferDataChannel = true;
}
if (!isLocalFirefox) {
// temporary measure to remove Moz* constraints in non Firefox browsers
for (var prop in constraints.mandatory) {
if (constraints.mandatory.hasOwnProperty(prop)) {
if (prop.indexOf('Moz') !== -1) {
delete constraints.mandatory[prop];
}
}
}
}
// Firefox (not interopable) cannot offer DataChannel as it will cause problems to the
// interopability of the media stream
if (isLocalFirefox && !isPeerFirefox && !isLocalFirefoxInterop) {
beOfferer = false;
}
callback(beOfferer, constraints);
};
// Handles the differences for all browsers ice connection state output.
// - Tested outcomes are:
// - Chrome (offerer) : 'checking' > 'completed' > 'completed'
// - Chrome (answerer) : 'checking' > 'connected'
// - Firefox (offerer) : 'checking' > 'connected'
// - Firefox (answerer): 'checking' > 'connected'
checkIceConnectionState = function (peerId, iceConnectionState, callback) {
if (typeof callback !== 'function') {
console.warn('No callback specified in checkIceConnectionState. Aborted.');
return;
}
peerId = (peerId) ? peerId : 'peer';
if (!AdapterJS._iceConnectionFiredStates[peerId] ||
iceConnectionState === AdapterJS._iceConnectionStates.disconnected ||
iceConnectionState === AdapterJS._iceConnectionStates.failed ||
iceConnectionState === AdapterJS._iceConnectionStates.closed) {
AdapterJS._iceConnectionFiredStates[peerId] = [];
}
iceConnectionState = AdapterJS._iceConnectionStates[iceConnectionState];
if (AdapterJS._iceConnectionFiredStates[peerId].indexOf(iceConnectionState) < 0) {
AdapterJS._iceConnectionFiredStates[peerId].push(iceConnectionState);
if (iceConnectionState === AdapterJS._iceConnectionStates.connected) {
setTimeout(function () {
AdapterJS._iceConnectionFiredStates[peerId]
.push(AdapterJS._iceConnectionStates.done);
callback(AdapterJS._iceConnectionStates.done);
}, 1000);
}
callback(iceConnectionState);
}
return;
};
// Firefox:
// - Creates iceServer from the url for Firefox.
// - Create iceServer with stun url.
// - Create iceServer with turn url.
// - Ignore the transport parameter from TURN url for FF version <=27.
// - Return null for createIceServer if transport=tcp.
// - FF 27 and above supports transport parameters in TURN url,
// - So passing in the full url to create iceServer.
// Chrome:
// - Creates iceServer from the url for Chrome M33 and earlier.
// - Create iceServer with stun url.
// - Chrome M28 & above uses below TURN format.
// Plugin:
// - Creates Ice Server for Plugin Browsers
// - If Stun - Create iceServer with stun url.
// - Else - Create iceServer with turn url
// - This is a WebRTC Function
createIceServer = null;
// Firefox:
// - Creates IceServers for Firefox
// - Use .url for FireFox.
// - Multiple Urls support
// Chrome:
// - Creates iceServers from the urls for Chrome M34 and above.
// - .urls is supported since Chrome M34.
// - Multiple Urls support
// Plugin:
// - Creates Ice Servers for Plugin Browsers
// - Multiple Urls support
// - This is a WebRTC Function
createIceServers = null;
//------------------------------------------------------------
//The RTCPeerConnection object.
RTCPeerConnection = null;
// Creates RTCSessionDescription object for Plugin Browsers
RTCSessionDescription = (typeof RTCSessionDescription === 'function') ?
RTCSessionDescription : null;
// Creates RTCIceCandidate object for Plugin Browsers
RTCIceCandidate = (typeof RTCIceCandidate === 'function') ?
RTCIceCandidate : null;
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = null;
// Attach a media stream to an element.
attachMediaStream = null;
// Re-attach a media stream to an element.
reattachMediaStream = null;
// Detected browser agent name. Types are:
// - 'firefox': Firefox browser.
// - 'chrome': Chrome browser.
// - 'opera': Opera browser.
// - 'safari': Safari browser.
// - 'IE' - Internet Explorer browser.
webrtcDetectedBrowser = null;
// Detected browser version.
webrtcDetectedVersion = null;
// The minimum browser version still supported by AJS.
webrtcMinimumVersion = null;
// Check for browser types and react accordingly
if ( (navigator.mozGetUserMedia ||
navigator.webkitGetUserMedia ||
(navigator.mediaDevices &&
navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)))
&& !((navigator.userAgent.match(/android/ig) || []).length === 0 &&
(navigator.userAgent.match(/chrome/ig) || []).length === 0 && navigator.userAgent.indexOf('Safari/') > 0)) {
///////////////////////////////////////////////////////////////////
// INJECTION OF GOOGLE'S ADAPTER.JS CONTENT
/* jshint ignore:start */
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.adapter = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o 0 ? 'm=' + part : part).trim() + '\r\n';
});
};
// Returns lines that start with a certain prefix.
SDPUtils.matchPrefix = function(blob, prefix) {
return SDPUtils.splitLines(blob).filter(function(line) {
return line.indexOf(prefix) === 0;
});
};
// Parses an ICE candidate line. Sample input:
// candidate:702786350 2 udp 41819902 8.8.8.8 60769 typ relay raddr 8.8.8.8
// rport 55996"
SDPUtils.parseCandidate = function(line) {
var parts;
// Parse both variants.
if (line.indexOf('a=candidate:') === 0) {
parts = line.substring(12).split(' ');
} else {
parts = line.substring(10).split(' ');
}
var candidate = {
foundation: parts[0],
component: parts[1],
protocol: parts[2].toLowerCase(),
priority: parseInt(parts[3], 10),
ip: parts[4],
port: parseInt(parts[5], 10),
// skip parts[6] == 'typ'
type: parts[7]
};
for (var i = 8; i < parts.length; i += 2) {
switch (parts[i]) {
case 'raddr':
candidate.relatedAddress = parts[i + 1];
break;
case 'rport':
candidate.relatedPort = parseInt(parts[i + 1], 10);
break;
case 'tcptype':
candidate.tcpType = parts[i + 1];
break;
default: // Unknown extensions are silently ignored.
break;
}
}
return candidate;
};
// Translates a candidate object into SDP candidate attribute.
SDPUtils.writeCandidate = function(candidate) {
var sdp = [];
sdp.push(candidate.foundation);
sdp.push(candidate.component);
sdp.push(candidate.protocol.toUpperCase());
sdp.push(candidate.priority);
sdp.push(candidate.ip);
sdp.push(candidate.port);
var type = candidate.type;
sdp.push('typ');
sdp.push(type);
if (type !== 'host' && candidate.relatedAddress &&
candidate.relatedPort) {
sdp.push('raddr');
sdp.push(candidate.relatedAddress); // was: relAddr
sdp.push('rport');
sdp.push(candidate.relatedPort); // was: relPort
}
if (candidate.tcpType && candidate.protocol.toLowerCase() === 'tcp') {
sdp.push('tcptype');
sdp.push(candidate.tcpType);
}
return 'candidate:' + sdp.join(' ');
};
// Parses an rtpmap line, returns RTCRtpCoddecParameters. Sample input:
// a=rtpmap:111 opus/48000/2
SDPUtils.parseRtpMap = function(line) {
var parts = line.substr(9).split(' ');
var parsed = {
payloadType: parseInt(parts.shift(), 10) // was: id
};
parts = parts[0].split('/');
parsed.name = parts[0];
parsed.clockRate = parseInt(parts[1], 10); // was: clockrate
// was: channels
parsed.numChannels = parts.length === 3 ? parseInt(parts[2], 10) : 1;
return parsed;
};
// Generate an a=rtpmap line from RTCRtpCodecCapability or
// RTCRtpCodecParameters.
SDPUtils.writeRtpMap = function(codec) {
var pt = codec.payloadType;
if (codec.preferredPayloadType !== undefined) {
pt = codec.preferredPayloadType;
}
return 'a=rtpmap:' + pt + ' ' + codec.name + '/' + codec.clockRate +
(codec.numChannels !== 1 ? '/' + codec.numChannels : '') + '\r\n';
};
// Parses an a=extmap line (headerextension from RFC 5285). Sample input:
// a=extmap:2 urn:ietf:params:rtp-hdrext:toffset
SDPUtils.parseExtmap = function(line) {
var parts = line.substr(9).split(' ');
return {
id: parseInt(parts[0], 10),
uri: parts[1]
};
};
// Generates a=extmap line from RTCRtpHeaderExtensionParameters or
// RTCRtpHeaderExtension.
SDPUtils.writeExtmap = function(headerExtension) {
return 'a=extmap:' + (headerExtension.id || headerExtension.preferredId) +
' ' + headerExtension.uri + '\r\n';
};
// Parses an ftmp line, returns dictionary. Sample input:
// a=fmtp:96 vbr=on;cng=on
// Also deals with vbr=on; cng=on
SDPUtils.parseFmtp = function(line) {
var parsed = {};
var kv;
var parts = line.substr(line.indexOf(' ') + 1).split(';');
for (var j = 0; j < parts.length; j++) {
kv = parts[j].trim().split('=');
parsed[kv[0].trim()] = kv[1];
}
return parsed;
};
// Generates an a=ftmp line from RTCRtpCodecCapability or RTCRtpCodecParameters.
SDPUtils.writeFmtp = function(codec) {
var line = '';
var pt = codec.payloadType;
if (codec.preferredPayloadType !== undefined) {
pt = codec.preferredPayloadType;
}
if (codec.parameters && Object.keys(codec.parameters).length) {
var params = [];
Object.keys(codec.parameters).forEach(function(param) {
params.push(param + '=' + codec.parameters[param]);
});
line += 'a=fmtp:' + pt + ' ' + params.join(';') + '\r\n';
}
return line;
};
// Parses an rtcp-fb line, returns RTCPRtcpFeedback object. Sample input:
// a=rtcp-fb:98 nack rpsi
SDPUtils.parseRtcpFb = function(line) {
var parts = line.substr(line.indexOf(' ') + 1).split(' ');
return {
type: parts.shift(),
parameter: parts.join(' ')
};
};
// Generate a=rtcp-fb lines from RTCRtpCodecCapability or RTCRtpCodecParameters.
SDPUtils.writeRtcpFb = function(codec) {
var lines = '';
var pt = codec.payloadType;
if (codec.preferredPayloadType !== undefined) {
pt = codec.preferredPayloadType;
}
if (codec.rtcpFeedback && codec.rtcpFeedback.length) {
// FIXME: special handling for trr-int?
codec.rtcpFeedback.forEach(function(fb) {
lines += 'a=rtcp-fb:' + pt + ' ' + fb.type +
(fb.parameter && fb.parameter.length ? ' ' + fb.parameter : '') +
'\r\n';
});
}
return lines;
};
// Parses an RFC 5576 ssrc media attribute. Sample input:
// a=ssrc:3735928559 cname:something
SDPUtils.parseSsrcMedia = function(line) {
var sp = line.indexOf(' ');
var parts = {
ssrc: parseInt(line.substr(7, sp - 7), 10)
};
var colon = line.indexOf(':', sp);
if (colon > -1) {
parts.attribute = line.substr(sp + 1, colon - sp - 1);
parts.value = line.substr(colon + 1);
} else {
parts.attribute = line.substr(sp + 1);
}
return parts;
};
// Extracts DTLS parameters from SDP media section or sessionpart.
// FIXME: for consistency with other functions this should only
// get the fingerprint line as input. See also getIceParameters.
SDPUtils.getDtlsParameters = function(mediaSection, sessionpart) {
var lines = SDPUtils.splitLines(mediaSection);
// Search in session part, too.
lines = lines.concat(SDPUtils.splitLines(sessionpart));
var fpLine = lines.filter(function(line) {
return line.indexOf('a=fingerprint:') === 0;
})[0].substr(14);
// Note: a=setup line is ignored since we use the 'auto' role.
var dtlsParameters = {
role: 'auto',
fingerprints: [{
algorithm: fpLine.split(' ')[0],
value: fpLine.split(' ')[1]
}]
};
return dtlsParameters;
};
// Serializes DTLS parameters to SDP.
SDPUtils.writeDtlsParameters = function(params, setupType) {
var sdp = 'a=setup:' + setupType + '\r\n';
params.fingerprints.forEach(function(fp) {
sdp += 'a=fingerprint:' + fp.algorithm + ' ' + fp.value + '\r\n';
});
return sdp;
};
// Parses ICE information from SDP media section or sessionpart.
// FIXME: for consistency with other functions this should only
// get the ice-ufrag and ice-pwd lines as input.
SDPUtils.getIceParameters = function(mediaSection, sessionpart) {
var lines = SDPUtils.splitLines(mediaSection);
// Search in session part, too.
lines = lines.concat(SDPUtils.splitLines(sessionpart));
var iceParameters = {
usernameFragment: lines.filter(function(line) {
return line.indexOf('a=ice-ufrag:') === 0;
})[0].substr(12),
password: lines.filter(function(line) {
return line.indexOf('a=ice-pwd:') === 0;
})[0].substr(10)
};
return iceParameters;
};
// Serializes ICE parameters to SDP.
SDPUtils.writeIceParameters = function(params) {
return 'a=ice-ufrag:' + params.usernameFragment + '\r\n' +
'a=ice-pwd:' + params.password + '\r\n';
};
// Parses the SDP media section and returns RTCRtpParameters.
SDPUtils.parseRtpParameters = function(mediaSection) {
var description = {
codecs: [],
headerExtensions: [],
fecMechanisms: [],
rtcp: []
};
var lines = SDPUtils.splitLines(mediaSection);
var mline = lines[0].split(' ');
for (var i = 3; i < mline.length; i++) { // find all codecs from mline[3..]
var pt = mline[i];
var rtpmapline = SDPUtils.matchPrefix(
mediaSection, 'a=rtpmap:' + pt + ' ')[0];
if (rtpmapline) {
var codec = SDPUtils.parseRtpMap(rtpmapline);
var fmtps = SDPUtils.matchPrefix(
mediaSection, 'a=fmtp:' + pt + ' ');
// Only the first a=fmtp: is considered.
codec.parameters = fmtps.length ? SDPUtils.parseFmtp(fmtps[0]) : {};
codec.rtcpFeedback = SDPUtils.matchPrefix(
mediaSection, 'a=rtcp-fb:' + pt + ' ')
.map(SDPUtils.parseRtcpFb);
description.codecs.push(codec);
// parse FEC mechanisms from rtpmap lines.
switch (codec.name.toUpperCase()) {
case 'RED':
case 'ULPFEC':
description.fecMechanisms.push(codec.name.toUpperCase());
break;
default: // only RED and ULPFEC are recognized as FEC mechanisms.
break;
}
}
}
SDPUtils.matchPrefix(mediaSection, 'a=extmap:').forEach(function(line) {
description.headerExtensions.push(SDPUtils.parseExtmap(line));
});
// FIXME: parse rtcp.
return description;
};
// Generates parts of the SDP media section describing the capabilities /
// parameters.
SDPUtils.writeRtpDescription = function(kind, caps) {
var sdp = '';
// Build the mline.
sdp += 'm=' + kind + ' ';
sdp += caps.codecs.length > 0 ? '9' : '0'; // reject if no codecs.
sdp += ' UDP/TLS/RTP/SAVPF ';
sdp += caps.codecs.map(function(codec) {
if (codec.preferredPayloadType !== undefined) {
return codec.preferredPayloadType;
}
return codec.payloadType;
}).join(' ') + '\r\n';
sdp += 'c=IN IP4 0.0.0.0\r\n';
sdp += 'a=rtcp:9 IN IP4 0.0.0.0\r\n';
// Add a=rtpmap lines for each codec. Also fmtp and rtcp-fb.
caps.codecs.forEach(function(codec) {
sdp += SDPUtils.writeRtpMap(codec);
sdp += SDPUtils.writeFmtp(codec);
sdp += SDPUtils.writeRtcpFb(codec);
});
// FIXME: add headerExtensions, fecMechanismş and rtcp.
sdp += 'a=rtcp-mux\r\n';
return sdp;
};
// Parses the SDP media section and returns an array of
// RTCRtpEncodingParameters.
SDPUtils.parseRtpEncodingParameters = function(mediaSection) {
var encodingParameters = [];
var description = SDPUtils.parseRtpParameters(mediaSection);
var hasRed = description.fecMechanisms.indexOf('RED') !== -1;
var hasUlpfec = description.fecMechanisms.indexOf('ULPFEC') !== -1;
// filter a=ssrc:... cname:, ignore PlanB-msid
var ssrcs = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
.map(function(line) {
return SDPUtils.parseSsrcMedia(line);
})
.filter(function(parts) {
return parts.attribute === 'cname';
});
var primarySsrc = ssrcs.length > 0 && ssrcs[0].ssrc;
var secondarySsrc;
var flows = SDPUtils.matchPrefix(mediaSection, 'a=ssrc-group:FID')
.map(function(line) {
var parts = line.split(' ');
parts.shift();
return parts.map(function(part) {
return parseInt(part, 10);
});
});
if (flows.length > 0 && flows[0].length > 1 && flows[0][0] === primarySsrc) {
secondarySsrc = flows[0][1];
}
description.codecs.forEach(function(codec) {
if (codec.name.toUpperCase() === 'RTX' && codec.parameters.apt) {
var encParam = {
ssrc: primarySsrc,
codecPayloadType: parseInt(codec.parameters.apt, 10),
rtx: {
payloadType: codec.payloadType,
ssrc: secondarySsrc
}
};
encodingParameters.push(encParam);
if (hasRed) {
encParam = JSON.parse(JSON.stringify(encParam));
encParam.fec = {
ssrc: secondarySsrc,
mechanism: hasUlpfec ? 'red+ulpfec' : 'red'
};
encodingParameters.push(encParam);
}
}
});
if (encodingParameters.length === 0 && primarySsrc) {
encodingParameters.push({
ssrc: primarySsrc
});
}
// we support both b=AS and b=TIAS but interpret AS as TIAS.
var bandwidth = SDPUtils.matchPrefix(mediaSection, 'b=');
if (bandwidth.length) {
if (bandwidth[0].indexOf('b=TIAS:') === 0) {
bandwidth = parseInt(bandwidth[0].substr(7), 10);
} else if (bandwidth[0].indexOf('b=AS:') === 0) {
bandwidth = parseInt(bandwidth[0].substr(5), 10);
}
encodingParameters.forEach(function(params) {
params.maxBitrate = bandwidth;
});
}
return encodingParameters;
};
SDPUtils.writeSessionBoilerplate = function() {
// FIXME: sess-id should be an NTP timestamp.
return 'v=0\r\n' +
'o=thisisadapterortc 8169639915646943137 2 IN IP4 127.0.0.1\r\n' +
's=-\r\n' +
't=0 0\r\n';
};
SDPUtils.writeMediaSection = function(transceiver, caps, type, stream) {
var sdp = SDPUtils.writeRtpDescription(transceiver.kind, caps);
// Map ICE parameters (ufrag, pwd) to SDP.
sdp += SDPUtils.writeIceParameters(
transceiver.iceGatherer.getLocalParameters());
// Map DTLS parameters to SDP.
sdp += SDPUtils.writeDtlsParameters(
transceiver.dtlsTransport.getLocalParameters(),
type === 'offer' ? 'actpass' : 'active');
sdp += 'a=mid:' + transceiver.mid + '\r\n';
if (transceiver.rtpSender && transceiver.rtpReceiver) {
sdp += 'a=sendrecv\r\n';
} else if (transceiver.rtpSender) {
sdp += 'a=sendonly\r\n';
} else if (transceiver.rtpReceiver) {
sdp += 'a=recvonly\r\n';
} else {
sdp += 'a=inactive\r\n';
}
// FIXME: for RTX there might be multiple SSRCs. Not implemented in Edge yet.
if (transceiver.rtpSender) {
var msid = 'msid:' + stream.id + ' ' +
transceiver.rtpSender.track.id + '\r\n';
sdp += 'a=' + msid;
sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
' ' + msid;
}
// FIXME: this should be written by writeRtpDescription.
sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
' cname:' + SDPUtils.localCName + '\r\n';
return sdp;
};
// Gets the direction from the mediaSection or the sessionpart.
SDPUtils.getDirection = function(mediaSection, sessionpart) {
// Look for sendrecv, sendonly, recvonly, inactive, default to sendrecv.
var lines = SDPUtils.splitLines(mediaSection);
for (var i = 0; i < lines.length; i++) {
switch (lines[i]) {
case 'a=sendrecv':
case 'a=sendonly':
case 'a=recvonly':
case 'a=inactive':
return lines[i].substr(2);
default:
// FIXME: What should happen here?
}
}
if (sessionpart) {
return SDPUtils.getDirection(sessionpart);
}
return 'sendrecv';
};
// Expose public methods.
module.exports = SDPUtils;
},{}],2:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
// Shimming starts here.
(function() {
// Utils.
var logging = require('./utils').log;
var browserDetails = require('./utils').browserDetails;
// Export to the adapter global object visible in the browser.
module.exports.browserDetails = browserDetails;
module.exports.extractVersion = require('./utils').extractVersion;
module.exports.disableLog = require('./utils').disableLog;
// Uncomment the line below if you want logging to occur, including logging
// for the switch statement below. Can also be turned on in the browser via
// adapter.disableLog(false), but then logging from the switch statement below
// will not appear.
// require('./utils').disableLog(false);
// Browser shims.
var chromeShim = require('./chrome/chrome_shim') || null;
var edgeShim = require('./edge/edge_shim') || null;
var firefoxShim = require('./firefox/firefox_shim') || null;
var safariShim = require('./safari/safari_shim') || null;
// Shim browser if found.
switch (browserDetails.browser) {
case 'opera': // fallthrough as it uses chrome shims
case 'chrome':
if (!chromeShim || !chromeShim.shimPeerConnection) {
logging('Chrome shim is not included in this adapter release.');
return;
}
logging('adapter.js shimming chrome.');
// Export to the adapter global object visible in the browser.
module.exports.browserShim = chromeShim;
chromeShim.shimGetUserMedia();
chromeShim.shimMediaStream();
chromeShim.shimSourceObject();
chromeShim.shimPeerConnection();
chromeShim.shimOnTrack();
break;
case 'firefox':
if (!firefoxShim || !firefoxShim.shimPeerConnection) {
logging('Firefox shim is not included in this adapter release.');
return;
}
logging('adapter.js shimming firefox.');
// Export to the adapter global object visible in the browser.
module.exports.browserShim = firefoxShim;
firefoxShim.shimGetUserMedia();
firefoxShim.shimSourceObject();
firefoxShim.shimPeerConnection();
firefoxShim.shimOnTrack();
break;
case 'edge':
if (!edgeShim || !edgeShim.shimPeerConnection) {
logging('MS edge shim is not included in this adapter release.');
return;
}
logging('adapter.js shimming edge.');
// Export to the adapter global object visible in the browser.
module.exports.browserShim = edgeShim;
edgeShim.shimGetUserMedia();
edgeShim.shimPeerConnection();
break;
case 'safari':
if (!safariShim) {
logging('Safari shim is not included in this adapter release.');
return;
}
logging('adapter.js shimming safari.');
// Export to the adapter global object visible in the browser.
module.exports.browserShim = safariShim;
safariShim.shimGetUserMedia();
break;
default:
logging('Unsupported browser!');
}
})();
},{"./chrome/chrome_shim":3,"./edge/edge_shim":5,"./firefox/firefox_shim":7,"./safari/safari_shim":9,"./utils":10}],3:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var logging = require('../utils.js').log;
var browserDetails = require('../utils.js').browserDetails;
var chromeShim = {
shimMediaStream: function() {
window.MediaStream = window.MediaStream || window.webkitMediaStream;
},
shimOnTrack: function() {
if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
window.RTCPeerConnection.prototype)) {
Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
get: function() {
return this._ontrack;
},
set: function(f) {
var self = this;
if (this._ontrack) {
this.removeEventListener('track', this._ontrack);
this.removeEventListener('addstream', this._ontrackpoly);
}
this.addEventListener('track', this._ontrack = f);
this.addEventListener('addstream', this._ontrackpoly = function(e) {
// onaddstream does not fire when a track is added to an existing
// stream. But stream.onaddtrack is implemented so we use that.
e.stream.addEventListener('addtrack', function(te) {
var event = new Event('track');
event.track = te.track;
event.receiver = {track: te.track};
event.streams = [e.stream];
self.dispatchEvent(event);
});
e.stream.getTracks().forEach(function(track) {
var event = new Event('track');
event.track = track;
event.receiver = {track: track};
event.streams = [e.stream];
this.dispatchEvent(event);
}.bind(this));
}.bind(this));
}
});
}
},
shimSourceObject: function() {
if (typeof window === 'object') {
if (window.HTMLMediaElement &&
!('srcObject' in window.HTMLMediaElement.prototype)) {
// Shim the srcObject property, once, when HTMLMediaElement is found.
Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
get: function() {
return this._srcObject;
},
set: function(stream) {
var self = this;
// Use _srcObject as a private property for this shim
this._srcObject = stream;
if (this.src) {
URL.revokeObjectURL(this.src);
}
if (!stream) {
this.src = '';
return;
}
this.src = URL.createObjectURL(stream);
// We need to recreate the blob url when a track is added or
// removed. Doing it manually since we want to avoid a recursion.
stream.addEventListener('addtrack', function() {
if (self.src) {
URL.revokeObjectURL(self.src);
}
self.src = URL.createObjectURL(stream);
});
stream.addEventListener('removetrack', function() {
if (self.src) {
URL.revokeObjectURL(self.src);
}
self.src = URL.createObjectURL(stream);
});
}
});
}
}
},
shimPeerConnection: function() {
// The RTCPeerConnection object.
window.RTCPeerConnection = function(pcConfig, pcConstraints) {
// Translate iceTransportPolicy to iceTransports,
// see https://code.google.com/p/webrtc/issues/detail?id=4869
logging('PeerConnection');
if (pcConfig && pcConfig.iceTransportPolicy) {
pcConfig.iceTransports = pcConfig.iceTransportPolicy;
}
var pc = new webkitRTCPeerConnection(pcConfig, pcConstraints);
var origGetStats = pc.getStats.bind(pc);
pc.getStats = function(selector, successCallback, errorCallback) {
var self = this;
var args = arguments;
// If selector is a function then we are in the old style stats so just
// pass back the original getStats format to avoid breaking old users.
if (arguments.length > 0 && typeof selector === 'function') {
return origGetStats(selector, successCallback);
}
var fixChromeStats_ = function(response) {
var standardReport = {};
var reports = response.result();
reports.forEach(function(report) {
var standardStats = {
id: report.id,
timestamp: report.timestamp,
type: report.type
};
report.names().forEach(function(name) {
standardStats[name] = report.stat(name);
});
standardReport[standardStats.id] = standardStats;
});
return standardReport;
};
// shim getStats with maplike support
var makeMapStats = function(stats, legacyStats) {
var map = new Map(Object.keys(stats).map(function(key) {
return[key, stats[key]];
}));
legacyStats = legacyStats || stats;
Object.keys(legacyStats).forEach(function(key) {
map[key] = legacyStats[key];
});
return map;
};
if (arguments.length >= 2) {
var successCallbackWrapper_ = function(response) {
args[1](makeMapStats(fixChromeStats_(response)));
};
return origGetStats.apply(this, [successCallbackWrapper_,
arguments[0]]);
}
// promise-support
return new Promise(function(resolve, reject) {
if (args.length === 1 && typeof selector === 'object') {
origGetStats.apply(self, [
function(response) {
resolve(makeMapStats(fixChromeStats_(response)));
}, reject]);
} else {
// Preserve legacy chrome stats only on legacy access of stats obj
origGetStats.apply(self, [
function(response) {
resolve(makeMapStats(fixChromeStats_(response),
response.result()));
}, reject]);
}
}).then(successCallback, errorCallback);
};
return pc;
};
window.RTCPeerConnection.prototype = webkitRTCPeerConnection.prototype;
// wrap static methods. Currently just generateCertificate.
if (webkitRTCPeerConnection.generateCertificate) {
Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
get: function() {
return webkitRTCPeerConnection.generateCertificate;
}
});
}
['createOffer', 'createAnswer'].forEach(function(method) {
var nativeMethod = webkitRTCPeerConnection.prototype[method];
webkitRTCPeerConnection.prototype[method] = function() {
var self = this;
if (arguments.length < 1 || (arguments.length === 1 &&
typeof arguments[0] === 'object')) {
var opts = arguments.length === 1 ? arguments[0] : undefined;
return new Promise(function(resolve, reject) {
nativeMethod.apply(self, [resolve, reject, opts]);
});
}
return nativeMethod.apply(this, arguments);
};
});
// add promise support -- natively available in Chrome 51
if (browserDetails.version < 51) {
['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
.forEach(function(method) {
var nativeMethod = webkitRTCPeerConnection.prototype[method];
webkitRTCPeerConnection.prototype[method] = function() {
var args = arguments;
var self = this;
var promise = new Promise(function(resolve, reject) {
nativeMethod.apply(self, [args[0], resolve, reject]);
});
if (args.length < 2) {
return promise;
}
return promise.then(function() {
args[1].apply(null, []);
},
function(err) {
if (args.length >= 3) {
args[2].apply(null, [err]);
}
});
};
});
}
// shim implicit creation of RTCSessionDescription/RTCIceCandidate
['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
.forEach(function(method) {
var nativeMethod = webkitRTCPeerConnection.prototype[method];
webkitRTCPeerConnection.prototype[method] = function() {
arguments[0] = new ((method === 'addIceCandidate') ?
RTCIceCandidate : RTCSessionDescription)(arguments[0]);
return nativeMethod.apply(this, arguments);
};
});
// support for addIceCandidate(null)
var nativeAddIceCandidate =
RTCPeerConnection.prototype.addIceCandidate;
RTCPeerConnection.prototype.addIceCandidate = function() {
return arguments[0] === null ? Promise.resolve()
: nativeAddIceCandidate.apply(this, arguments);
};
}
};
// Expose public methods.
module.exports = {
shimMediaStream: chromeShim.shimMediaStream,
shimOnTrack: chromeShim.shimOnTrack,
shimSourceObject: chromeShim.shimSourceObject,
shimPeerConnection: chromeShim.shimPeerConnection,
shimGetUserMedia: require('./getusermedia')
};
},{"../utils.js":10,"./getusermedia":4}],4:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var logging = require('../utils.js').log;
// Expose public methods.
module.exports = function() {
var constraintsToChrome_ = function(c) {
if (typeof c !== 'object' || c.mandatory || c.optional) {
return c;
}
var cc = {};
Object.keys(c).forEach(function(key) {
if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
return;
}
var r = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]};
if (r.exact !== undefined && typeof r.exact === 'number') {
r.min = r.max = r.exact;
}
var oldname_ = function(prefix, name) {
if (prefix) {
return prefix + name.charAt(0).toUpperCase() + name.slice(1);
}
return (name === 'deviceId') ? 'sourceId' : name;
};
if (r.ideal !== undefined) {
cc.optional = cc.optional || [];
var oc = {};
if (typeof r.ideal === 'number') {
oc[oldname_('min', key)] = r.ideal;
cc.optional.push(oc);
oc = {};
oc[oldname_('max', key)] = r.ideal;
cc.optional.push(oc);
} else {
oc[oldname_('', key)] = r.ideal;
cc.optional.push(oc);
}
}
if (r.exact !== undefined && typeof r.exact !== 'number') {
cc.mandatory = cc.mandatory || {};
cc.mandatory[oldname_('', key)] = r.exact;
} else {
['min', 'max'].forEach(function(mix) {
if (r[mix] !== undefined) {
cc.mandatory = cc.mandatory || {};
cc.mandatory[oldname_(mix, key)] = r[mix];
}
});
}
});
if (c.advanced) {
cc.optional = (cc.optional || []).concat(c.advanced);
}
return cc;
};
var shimConstraints_ = function(constraints, func) {
constraints = JSON.parse(JSON.stringify(constraints));
if (constraints && constraints.audio) {
constraints.audio = constraintsToChrome_(constraints.audio);
}
if (constraints && typeof constraints.video === 'object') {
// Shim facingMode for mobile, where it defaults to "user".
var face = constraints.video.facingMode;
face = face && ((typeof face === 'object') ? face : {ideal: face});
if ((face && (face.exact === 'user' || face.exact === 'environment' ||
face.ideal === 'user' || face.ideal === 'environment')) &&
!(navigator.mediaDevices.getSupportedConstraints &&
navigator.mediaDevices.getSupportedConstraints().facingMode)) {
delete constraints.video.facingMode;
if (face.exact === 'environment' || face.ideal === 'environment') {
// Look for "back" in label, or use last cam (typically back cam).
return navigator.mediaDevices.enumerateDevices()
.then(function(devices) {
devices = devices.filter(function(d) {
return d.kind === 'videoinput';
});
var back = devices.find(function(d) {
return d.label.toLowerCase().indexOf('back') !== -1;
}) || (devices.length && devices[devices.length - 1]);
if (back) {
constraints.video.deviceId = face.exact ? {exact: back.deviceId} :
{ideal: back.deviceId};
}
constraints.video = constraintsToChrome_(constraints.video);
logging('chrome: ' + JSON.stringify(constraints));
return func(constraints);
});
}
}
constraints.video = constraintsToChrome_(constraints.video);
}
logging('chrome: ' + JSON.stringify(constraints));
return func(constraints);
};
var shimError_ = function(e) {
return {
name: {
PermissionDeniedError: 'NotAllowedError',
ConstraintNotSatisfiedError: 'OverconstrainedError'
}[e.name] || e.name,
message: e.message,
constraint: e.constraintName,
toString: function() {
return this.name + (this.message && ': ') + this.message;
}
};
};
var getUserMedia_ = function(constraints, onSuccess, onError) {
shimConstraints_(constraints, function(c) {
navigator.webkitGetUserMedia(c, onSuccess, function(e) {
onError(shimError_(e));
});
});
};
navigator.getUserMedia = getUserMedia_;
// Returns the result of getUserMedia as a Promise.
var getUserMediaPromise_ = function(constraints) {
return new Promise(function(resolve, reject) {
navigator.getUserMedia(constraints, resolve, reject);
});
};
if (!navigator.mediaDevices) {
navigator.mediaDevices = {
getUserMedia: getUserMediaPromise_,
enumerateDevices: function() {
return new Promise(function(resolve) {
var kinds = {audio: 'audioinput', video: 'videoinput'};
return MediaStreamTrack.getSources(function(devices) {
resolve(devices.map(function(device) {
return {label: device.label,
kind: kinds[device.kind],
deviceId: device.id,
groupId: ''};
}));
});
});
}
};
}
// A shim for getUserMedia method on the mediaDevices object.
// TODO(KaptenJansson) remove once implemented in Chrome stable.
if (!navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia = function(constraints) {
return getUserMediaPromise_(constraints);
};
} else {
// Even though Chrome 45 has navigator.mediaDevices and a getUserMedia
// function which returns a Promise, it does not accept spec-style
// constraints.
var origGetUserMedia = navigator.mediaDevices.getUserMedia.
bind(navigator.mediaDevices);
navigator.mediaDevices.getUserMedia = function(cs) {
return shimConstraints_(cs, function(c) {
return origGetUserMedia(c).catch(function(e) {
return Promise.reject(shimError_(e));
});
});
};
}
// Dummy devicechange event methods.
// TODO(KaptenJansson) remove once implemented in Chrome stable.
if (typeof navigator.mediaDevices.addEventListener === 'undefined') {
navigator.mediaDevices.addEventListener = function() {
logging('Dummy mediaDevices.addEventListener called.');
};
}
if (typeof navigator.mediaDevices.removeEventListener === 'undefined') {
navigator.mediaDevices.removeEventListener = function() {
logging('Dummy mediaDevices.removeEventListener called.');
};
}
};
},{"../utils.js":10}],5:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var SDPUtils = require('sdp');
var browserDetails = require('../utils').browserDetails;
var edgeShim = {
shimPeerConnection: function() {
if (window.RTCIceGatherer) {
// ORTC defines an RTCIceCandidate object but no constructor.
// Not implemented in Edge.
if (!window.RTCIceCandidate) {
window.RTCIceCandidate = function(args) {
return args;
};
}
// ORTC does not have a session description object but
// other browsers (i.e. Chrome) that will support both PC and ORTC
// in the future might have this defined already.
if (!window.RTCSessionDescription) {
window.RTCSessionDescription = function(args) {
return args;
};
}
}
window.RTCPeerConnection = function(config) {
var self = this;
var _eventTarget = document.createDocumentFragment();
['addEventListener', 'removeEventListener', 'dispatchEvent']
.forEach(function(method) {
self[method] = _eventTarget[method].bind(_eventTarget);
});
this.onicecandidate = null;
this.onaddstream = null;
this.ontrack = null;
this.onremovestream = null;
this.onsignalingstatechange = null;
this.oniceconnectionstatechange = null;
this.onnegotiationneeded = null;
this.ondatachannel = null;
this.localStreams = [];
this.remoteStreams = [];
this.getLocalStreams = function() {
return self.localStreams;
};
this.getRemoteStreams = function() {
return self.remoteStreams;
};
this.localDescription = new RTCSessionDescription({
type: '',
sdp: ''
});
this.remoteDescription = new RTCSessionDescription({
type: '',
sdp: ''
});
this.signalingState = 'stable';
this.iceConnectionState = 'new';
this.iceGatheringState = 'new';
this.iceOptions = {
gatherPolicy: 'all',
iceServers: []
};
if (config && config.iceTransportPolicy) {
switch (config.iceTransportPolicy) {
case 'all':
case 'relay':
this.iceOptions.gatherPolicy = config.iceTransportPolicy;
break;
case 'none':
// FIXME: remove once implementation and spec have added this.
throw new TypeError('iceTransportPolicy "none" not supported');
default:
// don't set iceTransportPolicy.
break;
}
}
this.usingBundle = config && config.bundlePolicy === 'max-bundle';
if (config && config.iceServers) {
// Edge does not like
// 1) stun:
// 2) turn: that does not have all of turn:host:port?transport=udp
// 3) turn: with ipv6 addresses
var iceServers = JSON.parse(JSON.stringify(config.iceServers));
this.iceOptions.iceServers = iceServers.filter(function(server) {
if (server && server.urls) {
var urls = server.urls;
if (typeof urls === 'string') {
urls = [urls];
}
urls = urls.filter(function(url) {
return (url.indexOf('turn:') === 0 &&
url.indexOf('transport=udp') !== -1 &&
url.indexOf('turn:[') === -1) ||
(url.indexOf('stun:') === 0 &&
browserDetails.version >= 14393);
})[0];
return !!urls;
}
return false;
});
}
// per-track iceGathers, iceTransports, dtlsTransports, rtpSenders, ...
// everything that is needed to describe a SDP m-line.
this.transceivers = [];
// since the iceGatherer is currently created in createOffer but we
// must not emit candidates until after setLocalDescription we buffer
// them in this array.
this._localIceCandidatesBuffer = [];
};
window.RTCPeerConnection.prototype._emitBufferedCandidates = function() {
var self = this;
var sections = SDPUtils.splitSections(self.localDescription.sdp);
// FIXME: need to apply ice candidates in a way which is async but
// in-order
this._localIceCandidatesBuffer.forEach(function(event) {
var end = !event.candidate || Object.keys(event.candidate).length === 0;
if (end) {
for (var j = 1; j < sections.length; j++) {
if (sections[j].indexOf('\r\na=end-of-candidates\r\n') === -1) {
sections[j] += 'a=end-of-candidates\r\n';
}
}
} else if (event.candidate.candidate.indexOf('typ endOfCandidates')
=== -1) {
sections[event.candidate.sdpMLineIndex + 1] +=
'a=' + event.candidate.candidate + '\r\n';
}
self.localDescription.sdp = sections.join('');
self.dispatchEvent(event);
if (self.onicecandidate !== null) {
self.onicecandidate(event);
}
if (!event.candidate && self.iceGatheringState !== 'complete') {
var complete = self.transceivers.every(function(transceiver) {
return transceiver.iceGatherer &&
transceiver.iceGatherer.state === 'completed';
});
if (complete) {
self.iceGatheringState = 'complete';
}
}
});
this._localIceCandidatesBuffer = [];
};
window.RTCPeerConnection.prototype.addStream = function(stream) {
// Clone is necessary for local demos mostly, attaching directly
// to two different senders does not work (build 10547).
this.localStreams.push(stream.clone());
this._maybeFireNegotiationNeeded();
};
window.RTCPeerConnection.prototype.removeStream = function(stream) {
var idx = this.localStreams.indexOf(stream);
if (idx > -1) {
this.localStreams.splice(idx, 1);
this._maybeFireNegotiationNeeded();
}
};
window.RTCPeerConnection.prototype.getSenders = function() {
return this.transceivers.filter(function(transceiver) {
return !!transceiver.rtpSender;
})
.map(function(transceiver) {
return transceiver.rtpSender;
});
};
window.RTCPeerConnection.prototype.getReceivers = function() {
return this.transceivers.filter(function(transceiver) {
return !!transceiver.rtpReceiver;
})
.map(function(transceiver) {
return transceiver.rtpReceiver;
});
};
// Determines the intersection of local and remote capabilities.
window.RTCPeerConnection.prototype._getCommonCapabilities =
function(localCapabilities, remoteCapabilities) {
var commonCapabilities = {
codecs: [],
headerExtensions: [],
fecMechanisms: []
};
localCapabilities.codecs.forEach(function(lCodec) {
for (var i = 0; i < remoteCapabilities.codecs.length; i++) {
var rCodec = remoteCapabilities.codecs[i];
if (lCodec.name.toLowerCase() === rCodec.name.toLowerCase() &&
lCodec.clockRate === rCodec.clockRate &&
lCodec.numChannels === rCodec.numChannels) {
// push rCodec so we reply with offerer payload type
commonCapabilities.codecs.push(rCodec);
// determine common feedback mechanisms
rCodec.rtcpFeedback = rCodec.rtcpFeedback.filter(function(fb) {
for (var j = 0; j < lCodec.rtcpFeedback.length; j++) {
if (lCodec.rtcpFeedback[j].type === fb.type &&
lCodec.rtcpFeedback[j].parameter === fb.parameter) {
return true;
}
}
return false;
});
// FIXME: also need to determine .parameters
// see https://github.com/openpeer/ortc/issues/569
break;
}
}
});
localCapabilities.headerExtensions
.forEach(function(lHeaderExtension) {
for (var i = 0; i < remoteCapabilities.headerExtensions.length;
i++) {
var rHeaderExtension = remoteCapabilities.headerExtensions[i];
if (lHeaderExtension.uri === rHeaderExtension.uri) {
commonCapabilities.headerExtensions.push(rHeaderExtension);
break;
}
}
});
// FIXME: fecMechanisms
return commonCapabilities;
};
// Create ICE gatherer, ICE transport and DTLS transport.
window.RTCPeerConnection.prototype._createIceAndDtlsTransports =
function(mid, sdpMLineIndex) {
var self = this;
var iceGatherer = new RTCIceGatherer(self.iceOptions);
var iceTransport = new RTCIceTransport(iceGatherer);
iceGatherer.onlocalcandidate = function(evt) {
var event = new Event('icecandidate');
event.candidate = {sdpMid: mid, sdpMLineIndex: sdpMLineIndex};
var cand = evt.candidate;
var end = !cand || Object.keys(cand).length === 0;
// Edge emits an empty object for RTCIceCandidateComplete‥
if (end) {
// polyfill since RTCIceGatherer.state is not implemented in
// Edge 10547 yet.
if (iceGatherer.state === undefined) {
iceGatherer.state = 'completed';
}
// Emit a candidate with type endOfCandidates to make the samples
// work. Edge requires addIceCandidate with this empty candidate
// to start checking. The real solution is to signal
// end-of-candidates to the other side when getting the null
// candidate but some apps (like the samples) don't do that.
event.candidate.candidate =
'candidate:1 1 udp 1 0.0.0.0 9 typ endOfCandidates';
} else {
// RTCIceCandidate doesn't have a component, needs to be added
cand.component = iceTransport.component === 'RTCP' ? 2 : 1;
event.candidate.candidate = SDPUtils.writeCandidate(cand);
}
// update local description.
var sections = SDPUtils.splitSections(self.localDescription.sdp);
if (event.candidate.candidate.indexOf('typ endOfCandidates')
=== -1) {
sections[event.candidate.sdpMLineIndex + 1] +=
'a=' + event.candidate.candidate + '\r\n';
} else {
sections[event.candidate.sdpMLineIndex + 1] +=
'a=end-of-candidates\r\n';
}
self.localDescription.sdp = sections.join('');
var complete = self.transceivers.every(function(transceiver) {
return transceiver.iceGatherer &&
transceiver.iceGatherer.state === 'completed';
});
// Emit candidate if localDescription is set.
// Also emits null candidate when all gatherers are complete.
switch (self.iceGatheringState) {
case 'new':
self._localIceCandidatesBuffer.push(event);
if (end && complete) {
self._localIceCandidatesBuffer.push(
new Event('icecandidate'));
}
break;
case 'gathering':
self._emitBufferedCandidates();
self.dispatchEvent(event);
if (self.onicecandidate !== null) {
self.onicecandidate(event);
}
if (complete) {
self.dispatchEvent(new Event('icecandidate'));
if (self.onicecandidate !== null) {
self.onicecandidate(new Event('icecandidate'));
}
self.iceGatheringState = 'complete';
}
break;
case 'complete':
// should not happen... currently!
break;
default: // no-op.
break;
}
};
iceTransport.onicestatechange = function() {
self._updateConnectionState();
};
var dtlsTransport = new RTCDtlsTransport(iceTransport);
dtlsTransport.ondtlsstatechange = function() {
self._updateConnectionState();
};
dtlsTransport.onerror = function() {
// onerror does not set state to failed by itself.
dtlsTransport.state = 'failed';
self._updateConnectionState();
};
return {
iceGatherer: iceGatherer,
iceTransport: iceTransport,
dtlsTransport: dtlsTransport
};
};
// Start the RTP Sender and Receiver for a transceiver.
window.RTCPeerConnection.prototype._transceive = function(transceiver,
send, recv) {
var params = this._getCommonCapabilities(transceiver.localCapabilities,
transceiver.remoteCapabilities);
if (send && transceiver.rtpSender) {
params.encodings = transceiver.sendEncodingParameters;
params.rtcp = {
cname: SDPUtils.localCName
};
if (transceiver.recvEncodingParameters.length) {
params.rtcp.ssrc = transceiver.recvEncodingParameters[0].ssrc;
}
transceiver.rtpSender.send(params);
}
if (recv && transceiver.rtpReceiver) {
params.encodings = transceiver.recvEncodingParameters;
params.rtcp = {
cname: transceiver.cname
};
if (transceiver.sendEncodingParameters.length) {
params.rtcp.ssrc = transceiver.sendEncodingParameters[0].ssrc;
}
transceiver.rtpReceiver.receive(params);
}
};
window.RTCPeerConnection.prototype.setLocalDescription =
function(description) {
var self = this;
var sections;
var sessionpart;
if (description.type === 'offer') {
// FIXME: What was the purpose of this empty if statement?
// if (!this._pendingOffer) {
// } else {
if (this._pendingOffer) {
// VERY limited support for SDP munging. Limited to:
// * changing the order of codecs
sections = SDPUtils.splitSections(description.sdp);
sessionpart = sections.shift();
sections.forEach(function(mediaSection, sdpMLineIndex) {
var caps = SDPUtils.parseRtpParameters(mediaSection);
self._pendingOffer[sdpMLineIndex].localCapabilities = caps;
});
this.transceivers = this._pendingOffer;
delete this._pendingOffer;
}
} else if (description.type === 'answer') {
sections = SDPUtils.splitSections(self.remoteDescription.sdp);
sessionpart = sections.shift();
var isIceLite = SDPUtils.matchPrefix(sessionpart,
'a=ice-lite').length > 0;
sections.forEach(function(mediaSection, sdpMLineIndex) {
var transceiver = self.transceivers[sdpMLineIndex];
var iceGatherer = transceiver.iceGatherer;
var iceTransport = transceiver.iceTransport;
var dtlsTransport = transceiver.dtlsTransport;
var localCapabilities = transceiver.localCapabilities;
var remoteCapabilities = transceiver.remoteCapabilities;
var rejected = mediaSection.split('\n', 1)[0]
.split(' ', 2)[1] === '0';
if (!rejected && !transceiver.isDatachannel) {
var remoteIceParameters = SDPUtils.getIceParameters(
mediaSection, sessionpart);
if (isIceLite) {
var cands = SDPUtils.matchPrefix(mediaSection, 'a=candidate:')
.map(function(cand) {
return SDPUtils.parseCandidate(cand);
})
.filter(function(cand) {
return cand.component === '1';
});
// ice-lite only includes host candidates in the SDP so we can
// use setRemoteCandidates (which implies an
// RTCIceCandidateComplete)
if (cands.length) {
iceTransport.setRemoteCandidates(cands);
}
}
var remoteDtlsParameters = SDPUtils.getDtlsParameters(
mediaSection, sessionpart);
if (isIceLite) {
remoteDtlsParameters.role = 'server';
}
if (!self.usingBundle || sdpMLineIndex === 0) {
iceTransport.start(iceGatherer, remoteIceParameters,
isIceLite ? 'controlling' : 'controlled');
dtlsTransport.start(remoteDtlsParameters);
}
// Calculate intersection of capabilities.
var params = self._getCommonCapabilities(localCapabilities,
remoteCapabilities);
// Start the RTCRtpSender. The RTCRtpReceiver for this
// transceiver has already been started in setRemoteDescription.
self._transceive(transceiver,
params.codecs.length > 0,
false);
}
});
}
this.localDescription = {
type: description.type,
sdp: description.sdp
};
switch (description.type) {
case 'offer':
this._updateSignalingState('have-local-offer');
break;
case 'answer':
this._updateSignalingState('stable');
break;
default:
throw new TypeError('unsupported type "' + description.type +
'"');
}
// If a success callback was provided, emit ICE candidates after it
// has been executed. Otherwise, emit callback after the Promise is
// resolved.
var hasCallback = arguments.length > 1 &&
typeof arguments[1] === 'function';
if (hasCallback) {
var cb = arguments[1];
window.setTimeout(function() {
cb();
if (self.iceGatheringState === 'new') {
self.iceGatheringState = 'gathering';
}
self._emitBufferedCandidates();
}, 0);
}
var p = Promise.resolve();
p.then(function() {
if (!hasCallback) {
if (self.iceGatheringState === 'new') {
self.iceGatheringState = 'gathering';
}
// Usually candidates will be emitted earlier.
window.setTimeout(self._emitBufferedCandidates.bind(self), 500);
}
});
return p;
};
window.RTCPeerConnection.prototype.setRemoteDescription =
function(description) {
var self = this;
var stream = new MediaStream();
var receiverList = [];
var sections = SDPUtils.splitSections(description.sdp);
var sessionpart = sections.shift();
var isIceLite = SDPUtils.matchPrefix(sessionpart,
'a=ice-lite').length > 0;
this.usingBundle = SDPUtils.matchPrefix(sessionpart,
'a=group:BUNDLE ').length > 0;
sections.forEach(function(mediaSection, sdpMLineIndex) {
var lines = SDPUtils.splitLines(mediaSection);
var mline = lines[0].substr(2).split(' ');
var kind = mline[0];
var rejected = mline[1] === '0';
var direction = SDPUtils.getDirection(mediaSection, sessionpart);
var mid = SDPUtils.matchPrefix(mediaSection, 'a=mid:');
if (mid.length) {
mid = mid[0].substr(6);
} else {
mid = SDPUtils.generateIdentifier();
}
// Reject datachannels which are not implemented yet.
if (kind === 'application' && mline[2] === 'DTLS/SCTP') {
self.transceivers[sdpMLineIndex] = {
mid: mid,
isDatachannel: true
};
return;
}
var transceiver;
var iceGatherer;
var iceTransport;
var dtlsTransport;
var rtpSender;
var rtpReceiver;
var sendEncodingParameters;
var recvEncodingParameters;
var localCapabilities;
var track;
// FIXME: ensure the mediaSection has rtcp-mux set.
var remoteCapabilities = SDPUtils.parseRtpParameters(mediaSection);
var remoteIceParameters;
var remoteDtlsParameters;
if (!rejected) {
remoteIceParameters = SDPUtils.getIceParameters(mediaSection,
sessionpart);
remoteDtlsParameters = SDPUtils.getDtlsParameters(mediaSection,
sessionpart);
remoteDtlsParameters.role = 'client';
}
recvEncodingParameters =
SDPUtils.parseRtpEncodingParameters(mediaSection);
var cname;
// Gets the first SSRC. Note that with RTX there might be multiple
// SSRCs.
var remoteSsrc = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
.map(function(line) {
return SDPUtils.parseSsrcMedia(line);
})
.filter(function(obj) {
return obj.attribute === 'cname';
})[0];
if (remoteSsrc) {
cname = remoteSsrc.value;
}
var isComplete = SDPUtils.matchPrefix(mediaSection,
'a=end-of-candidates', sessionpart).length > 0;
var cands = SDPUtils.matchPrefix(mediaSection, 'a=candidate:')
.map(function(cand) {
return SDPUtils.parseCandidate(cand);
})
.filter(function(cand) {
return cand.component === '1';
});
if (description.type === 'offer' && !rejected) {
var transports = self.usingBundle && sdpMLineIndex > 0 ? {
iceGatherer: self.transceivers[0].iceGatherer,
iceTransport: self.transceivers[0].iceTransport,
dtlsTransport: self.transceivers[0].dtlsTransport
} : self._createIceAndDtlsTransports(mid, sdpMLineIndex);
if (isComplete) {
transports.iceTransport.setRemoteCandidates(cands);
}
localCapabilities = RTCRtpReceiver.getCapabilities(kind);
sendEncodingParameters = [{
ssrc: (2 * sdpMLineIndex + 2) * 1001
}];
rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport, kind);
track = rtpReceiver.track;
receiverList.push([track, rtpReceiver]);
// FIXME: not correct when there are multiple streams but that is
// not currently supported in this shim.
stream.addTrack(track);
// FIXME: look at direction.
if (self.localStreams.length > 0 &&
self.localStreams[0].getTracks().length >= sdpMLineIndex) {
var localTrack;
if (kind === 'audio') {
localTrack = self.localStreams[0].getAudioTracks()[0];
} else if (kind === 'video') {
localTrack = self.localStreams[0].getVideoTracks()[0];
}
if (localTrack) {
rtpSender = new RTCRtpSender(localTrack,
transports.dtlsTransport);
}
}
self.transceivers[sdpMLineIndex] = {
iceGatherer: transports.iceGatherer,
iceTransport: transports.iceTransport,
dtlsTransport: transports.dtlsTransport,
localCapabilities: localCapabilities,
remoteCapabilities: remoteCapabilities,
rtpSender: rtpSender,
rtpReceiver: rtpReceiver,
kind: kind,
mid: mid,
cname: cname,
sendEncodingParameters: sendEncodingParameters,
recvEncodingParameters: recvEncodingParameters
};
// Start the RTCRtpReceiver now. The RTPSender is started in
// setLocalDescription.
self._transceive(self.transceivers[sdpMLineIndex],
false,
direction === 'sendrecv' || direction === 'sendonly');
} else if (description.type === 'answer' && !rejected) {
transceiver = self.transceivers[sdpMLineIndex];
iceGatherer = transceiver.iceGatherer;
iceTransport = transceiver.iceTransport;
dtlsTransport = transceiver.dtlsTransport;
rtpSender = transceiver.rtpSender;
rtpReceiver = transceiver.rtpReceiver;
sendEncodingParameters = transceiver.sendEncodingParameters;
localCapabilities = transceiver.localCapabilities;
self.transceivers[sdpMLineIndex].recvEncodingParameters =
recvEncodingParameters;
self.transceivers[sdpMLineIndex].remoteCapabilities =
remoteCapabilities;
self.transceivers[sdpMLineIndex].cname = cname;
if ((isIceLite || isComplete) && cands.length) {
iceTransport.setRemoteCandidates(cands);
}
if (!self.usingBundle || sdpMLineIndex === 0) {
iceTransport.start(iceGatherer, remoteIceParameters,
'controlling');
dtlsTransport.start(remoteDtlsParameters);
}
self._transceive(transceiver,
direction === 'sendrecv' || direction === 'recvonly',
direction === 'sendrecv' || direction === 'sendonly');
if (rtpReceiver &&
(direction === 'sendrecv' || direction === 'sendonly')) {
track = rtpReceiver.track;
receiverList.push([track, rtpReceiver]);
stream.addTrack(track);
} else {
// FIXME: actually the receiver should be created later.
delete transceiver.rtpReceiver;
}
}
});
this.remoteDescription = {
type: description.type,
sdp: description.sdp
};
switch (description.type) {
case 'offer':
this._updateSignalingState('have-remote-offer');
break;
case 'answer':
this._updateSignalingState('stable');
break;
default:
throw new TypeError('unsupported type "' + description.type +
'"');
}
if (stream.getTracks().length) {
self.remoteStreams.push(stream);
window.setTimeout(function() {
var event = new Event('addstream');
event.stream = stream;
self.dispatchEvent(event);
if (self.onaddstream !== null) {
window.setTimeout(function() {
self.onaddstream(event);
}, 0);
}
receiverList.forEach(function(item) {
var track = item[0];
var receiver = item[1];
var trackEvent = new Event('track');
trackEvent.track = track;
trackEvent.receiver = receiver;
trackEvent.streams = [stream];
self.dispatchEvent(event);
if (self.ontrack !== null) {
window.setTimeout(function() {
self.ontrack(trackEvent);
}, 0);
}
});
}, 0);
}
if (arguments.length > 1 && typeof arguments[1] === 'function') {
window.setTimeout(arguments[1], 0);
}
return Promise.resolve();
};
window.RTCPeerConnection.prototype.close = function() {
this.transceivers.forEach(function(transceiver) {
/* not yet
if (transceiver.iceGatherer) {
transceiver.iceGatherer.close();
}
*/
if (transceiver.iceTransport) {
transceiver.iceTransport.stop();
}
if (transceiver.dtlsTransport) {
transceiver.dtlsTransport.stop();
}
if (transceiver.rtpSender) {
transceiver.rtpSender.stop();
}
if (transceiver.rtpReceiver) {
transceiver.rtpReceiver.stop();
}
});
// FIXME: clean up tracks, local streams, remote streams, etc
this._updateSignalingState('closed');
};
// Update the signaling state.
window.RTCPeerConnection.prototype._updateSignalingState =
function(newState) {
this.signalingState = newState;
var event = new Event('signalingstatechange');
this.dispatchEvent(event);
if (this.onsignalingstatechange !== null) {
this.onsignalingstatechange(event);
}
};
// Determine whether to fire the negotiationneeded event.
window.RTCPeerConnection.prototype._maybeFireNegotiationNeeded =
function() {
// Fire away (for now).
var event = new Event('negotiationneeded');
this.dispatchEvent(event);
if (this.onnegotiationneeded !== null) {
this.onnegotiationneeded(event);
}
};
// Update the connection state.
window.RTCPeerConnection.prototype._updateConnectionState = function() {
var self = this;
var newState;
var states = {
'new': 0,
closed: 0,
connecting: 0,
checking: 0,
connected: 0,
completed: 0,
failed: 0
};
this.transceivers.forEach(function(transceiver) {
states[transceiver.iceTransport.state]++;
states[transceiver.dtlsTransport.state]++;
});
// ICETransport.completed and connected are the same for this purpose.
states.connected += states.completed;
newState = 'new';
if (states.failed > 0) {
newState = 'failed';
} else if (states.connecting > 0 || states.checking > 0) {
newState = 'connecting';
} else if (states.disconnected > 0) {
newState = 'disconnected';
} else if (states.new > 0) {
newState = 'new';
} else if (states.connected > 0 || states.completed > 0) {
newState = 'connected';
}
if (newState !== self.iceConnectionState) {
self.iceConnectionState = newState;
var event = new Event('iceconnectionstatechange');
this.dispatchEvent(event);
if (this.oniceconnectionstatechange !== null) {
this.oniceconnectionstatechange(event);
}
}
};
window.RTCPeerConnection.prototype.createOffer = function() {
var self = this;
if (this._pendingOffer) {
throw new Error('createOffer called while there is a pending offer.');
}
var offerOptions;
if (arguments.length === 1 && typeof arguments[0] !== 'function') {
offerOptions = arguments[0];
} else if (arguments.length === 3) {
offerOptions = arguments[2];
}
var tracks = [];
var numAudioTracks = 0;
var numVideoTracks = 0;
// Default to sendrecv.
if (this.localStreams.length) {
numAudioTracks = this.localStreams[0].getAudioTracks().length;
numVideoTracks = this.localStreams[0].getVideoTracks().length;
}
// Determine number of audio and video tracks we need to send/recv.
if (offerOptions) {
// Reject Chrome legacy constraints.
if (offerOptions.mandatory || offerOptions.optional) {
throw new TypeError(
'Legacy mandatory/optional constraints not supported.');
}
if (offerOptions.offerToReceiveAudio !== undefined) {
numAudioTracks = offerOptions.offerToReceiveAudio;
}
if (offerOptions.offerToReceiveVideo !== undefined) {
numVideoTracks = offerOptions.offerToReceiveVideo;
}
}
if (this.localStreams.length) {
// Push local streams.
this.localStreams[0].getTracks().forEach(function(track) {
tracks.push({
kind: track.kind,
track: track,
wantReceive: track.kind === 'audio' ?
numAudioTracks > 0 : numVideoTracks > 0
});
if (track.kind === 'audio') {
numAudioTracks--;
} else if (track.kind === 'video') {
numVideoTracks--;
}
});
}
// Create M-lines for recvonly streams.
while (numAudioTracks > 0 || numVideoTracks > 0) {
if (numAudioTracks > 0) {
tracks.push({
kind: 'audio',
wantReceive: true
});
numAudioTracks--;
}
if (numVideoTracks > 0) {
tracks.push({
kind: 'video',
wantReceive: true
});
numVideoTracks--;
}
}
var sdp = SDPUtils.writeSessionBoilerplate();
var transceivers = [];
tracks.forEach(function(mline, sdpMLineIndex) {
// For each track, create an ice gatherer, ice transport,
// dtls transport, potentially rtpsender and rtpreceiver.
var track = mline.track;
var kind = mline.kind;
var mid = SDPUtils.generateIdentifier();
var transports = self.usingBundle && sdpMLineIndex > 0 ? {
iceGatherer: transceivers[0].iceGatherer,
iceTransport: transceivers[0].iceTransport,
dtlsTransport: transceivers[0].dtlsTransport
} : self._createIceAndDtlsTransports(mid, sdpMLineIndex);
var localCapabilities = RTCRtpSender.getCapabilities(kind);
var rtpSender;
var rtpReceiver;
// generate an ssrc now, to be used later in rtpSender.send
var sendEncodingParameters = [{
ssrc: (2 * sdpMLineIndex + 1) * 1001
}];
if (track) {
rtpSender = new RTCRtpSender(track, transports.dtlsTransport);
}
if (mline.wantReceive) {
rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport, kind);
}
transceivers[sdpMLineIndex] = {
iceGatherer: transports.iceGatherer,
iceTransport: transports.iceTransport,
dtlsTransport: transports.dtlsTransport,
localCapabilities: localCapabilities,
remoteCapabilities: null,
rtpSender: rtpSender,
rtpReceiver: rtpReceiver,
kind: kind,
mid: mid,
sendEncodingParameters: sendEncodingParameters,
recvEncodingParameters: null
};
});
if (this.usingBundle) {
sdp += 'a=group:BUNDLE ' + transceivers.map(function(t) {
return t.mid;
}).join(' ') + '\r\n';
}
tracks.forEach(function(mline, sdpMLineIndex) {
var transceiver = transceivers[sdpMLineIndex];
sdp += SDPUtils.writeMediaSection(transceiver,
transceiver.localCapabilities, 'offer', self.localStreams[0]);
});
this._pendingOffer = transceivers;
var desc = new RTCSessionDescription({
type: 'offer',
sdp: sdp
});
if (arguments.length && typeof arguments[0] === 'function') {
window.setTimeout(arguments[0], 0, desc);
}
return Promise.resolve(desc);
};
window.RTCPeerConnection.prototype.createAnswer = function() {
var self = this;
var sdp = SDPUtils.writeSessionBoilerplate();
if (this.usingBundle) {
sdp += 'a=group:BUNDLE ' + this.transceivers.map(function(t) {
return t.mid;
}).join(' ') + '\r\n';
}
this.transceivers.forEach(function(transceiver) {
if (transceiver.isDatachannel) {
sdp += 'm=application 0 DTLS/SCTP 5000\r\n' +
'c=IN IP4 0.0.0.0\r\n' +
'a=mid:' + transceiver.mid + '\r\n';
return;
}
// Calculate intersection of capabilities.
var commonCapabilities = self._getCommonCapabilities(
transceiver.localCapabilities,
transceiver.remoteCapabilities);
sdp += SDPUtils.writeMediaSection(transceiver, commonCapabilities,
'answer', self.localStreams[0]);
});
var desc = new RTCSessionDescription({
type: 'answer',
sdp: sdp
});
if (arguments.length && typeof arguments[0] === 'function') {
window.setTimeout(arguments[0], 0, desc);
}
return Promise.resolve(desc);
};
window.RTCPeerConnection.prototype.addIceCandidate = function(candidate) {
if (candidate === null) {
this.transceivers.forEach(function(transceiver) {
transceiver.iceTransport.addRemoteCandidate({});
});
} else {
var mLineIndex = candidate.sdpMLineIndex;
if (candidate.sdpMid) {
for (var i = 0; i < this.transceivers.length; i++) {
if (this.transceivers[i].mid === candidate.sdpMid) {
mLineIndex = i;
break;
}
}
}
var transceiver = this.transceivers[mLineIndex];
if (transceiver) {
var cand = Object.keys(candidate.candidate).length > 0 ?
SDPUtils.parseCandidate(candidate.candidate) : {};
// Ignore Chrome's invalid candidates since Edge does not like them.
if (cand.protocol === 'tcp' && (cand.port === 0 || cand.port === 9)) {
return;
}
// Ignore RTCP candidates, we assume RTCP-MUX.
if (cand.component !== '1') {
return;
}
// A dirty hack to make samples work.
if (cand.type === 'endOfCandidates') {
cand = {};
}
transceiver.iceTransport.addRemoteCandidate(cand);
// update the remoteDescription.
var sections = SDPUtils.splitSections(this.remoteDescription.sdp);
sections[mLineIndex + 1] += (cand.type ? candidate.candidate.trim()
: 'a=end-of-candidates') + '\r\n';
this.remoteDescription.sdp = sections.join('');
}
}
if (arguments.length > 1 && typeof arguments[1] === 'function') {
window.setTimeout(arguments[1], 0);
}
return Promise.resolve();
};
window.RTCPeerConnection.prototype.getStats = function() {
var promises = [];
this.transceivers.forEach(function(transceiver) {
['rtpSender', 'rtpReceiver', 'iceGatherer', 'iceTransport',
'dtlsTransport'].forEach(function(method) {
if (transceiver[method]) {
promises.push(transceiver[method].getStats());
}
});
});
var cb = arguments.length > 1 && typeof arguments[1] === 'function' &&
arguments[1];
return new Promise(function(resolve) {
// shim getStats with maplike support
var results = new Map();
Promise.all(promises).then(function(res) {
res.forEach(function(result) {
Object.keys(result).forEach(function(id) {
results.set(id, result[id]);
results[id] = result[id];
});
});
if (cb) {
window.setTimeout(cb, 0, results);
}
resolve(results);
});
});
};
}
};
// Expose public methods.
module.exports = {
shimPeerConnection: edgeShim.shimPeerConnection,
shimGetUserMedia: require('./getusermedia')
};
},{"../utils":10,"./getusermedia":6,"sdp":1}],6:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
// Expose public methods.
module.exports = function() {
var shimError_ = function(e) {
return {
name: {PermissionDeniedError: 'NotAllowedError'}[e.name] || e.name,
message: e.message,
constraint: e.constraint,
toString: function() {
return this.name;
}
};
};
// getUserMedia error shim.
var origGetUserMedia = navigator.mediaDevices.getUserMedia.
bind(navigator.mediaDevices);
navigator.mediaDevices.getUserMedia = function(c) {
return origGetUserMedia(c).catch(function(e) {
return Promise.reject(shimError_(e));
});
};
};
},{}],7:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var browserDetails = require('../utils').browserDetails;
var firefoxShim = {
shimOnTrack: function() {
if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
window.RTCPeerConnection.prototype)) {
Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
get: function() {
return this._ontrack;
},
set: function(f) {
if (this._ontrack) {
this.removeEventListener('track', this._ontrack);
this.removeEventListener('addstream', this._ontrackpoly);
}
this.addEventListener('track', this._ontrack = f);
this.addEventListener('addstream', this._ontrackpoly = function(e) {
e.stream.getTracks().forEach(function(track) {
var event = new Event('track');
event.track = track;
event.receiver = {track: track};
event.streams = [e.stream];
this.dispatchEvent(event);
}.bind(this));
}.bind(this));
}
});
}
},
shimSourceObject: function() {
// Firefox has supported mozSrcObject since FF22, unprefixed in 42.
if (typeof window === 'object') {
if (window.HTMLMediaElement &&
!('srcObject' in window.HTMLMediaElement.prototype)) {
// Shim the srcObject property, once, when HTMLMediaElement is found.
Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
get: function() {
return this.mozSrcObject;
},
set: function(stream) {
this.mozSrcObject = stream;
}
});
}
}
},
shimPeerConnection: function() {
if (typeof window !== 'object' || !(window.RTCPeerConnection ||
window.mozRTCPeerConnection)) {
return; // probably media.peerconnection.enabled=false in about:config
}
// The RTCPeerConnection object.
if (!window.RTCPeerConnection) {
window.RTCPeerConnection = function(pcConfig, pcConstraints) {
if (browserDetails.version < 38) {
// .urls is not supported in FF < 38.
// create RTCIceServers with a single url.
if (pcConfig && pcConfig.iceServers) {
var newIceServers = [];
for (var i = 0; i < pcConfig.iceServers.length; i++) {
var server = pcConfig.iceServers[i];
if (server.hasOwnProperty('urls')) {
for (var j = 0; j < server.urls.length; j++) {
var newServer = {
url: server.urls[j]
};
if (server.urls[j].indexOf('turn') === 0) {
newServer.username = server.username;
newServer.credential = server.credential;
}
newIceServers.push(newServer);
}
} else {
newIceServers.push(pcConfig.iceServers[i]);
}
}
pcConfig.iceServers = newIceServers;
}
}
return new mozRTCPeerConnection(pcConfig, pcConstraints);
};
window.RTCPeerConnection.prototype = mozRTCPeerConnection.prototype;
// wrap static methods. Currently just generateCertificate.
if (mozRTCPeerConnection.generateCertificate) {
Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
get: function() {
return mozRTCPeerConnection.generateCertificate;
}
});
}
window.RTCSessionDescription = mozRTCSessionDescription;
window.RTCIceCandidate = mozRTCIceCandidate;
}
// shim away need for obsolete RTCIceCandidate/RTCSessionDescription.
['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
.forEach(function(method) {
var nativeMethod = RTCPeerConnection.prototype[method];
RTCPeerConnection.prototype[method] = function() {
arguments[0] = new ((method === 'addIceCandidate') ?
RTCIceCandidate : RTCSessionDescription)(arguments[0]);
return nativeMethod.apply(this, arguments);
};
});
// support for addIceCandidate(null)
var nativeAddIceCandidate =
RTCPeerConnection.prototype.addIceCandidate;
RTCPeerConnection.prototype.addIceCandidate = function() {
return arguments[0] === null ? Promise.resolve()
: nativeAddIceCandidate.apply(this, arguments);
};
// shim getStats with maplike support
var makeMapStats = function(stats) {
var map = new Map();
Object.keys(stats).forEach(function(key) {
map.set(key, stats[key]);
map[key] = stats[key];
});
return map;
};
var nativeGetStats = RTCPeerConnection.prototype.getStats;
RTCPeerConnection.prototype.getStats = function(selector, onSucc, onErr) {
return nativeGetStats.apply(this, [selector || null])
.then(function(stats) {
return makeMapStats(stats);
})
.then(onSucc, onErr);
};
}
};
// Expose public methods.
module.exports = {
shimOnTrack: firefoxShim.shimOnTrack,
shimSourceObject: firefoxShim.shimSourceObject,
shimPeerConnection: firefoxShim.shimPeerConnection,
shimGetUserMedia: require('./getusermedia')
};
},{"../utils":10,"./getusermedia":8}],8:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var logging = require('../utils').log;
var browserDetails = require('../utils').browserDetails;
// Expose public methods.
module.exports = function() {
var shimError_ = function(e) {
return {
name: {
SecurityError: 'NotAllowedError',
PermissionDeniedError: 'NotAllowedError'
}[e.name] || e.name,
message: {
'The operation is insecure.': 'The request is not allowed by the ' +
'user agent or the platform in the current context.'
}[e.message] || e.message,
constraint: e.constraint,
toString: function() {
return this.name + (this.message && ': ') + this.message;
}
};
};
// getUserMedia constraints shim.
var getUserMedia_ = function(constraints, onSuccess, onError) {
var constraintsToFF37_ = function(c) {
if (typeof c !== 'object' || c.require) {
return c;
}
var require = [];
Object.keys(c).forEach(function(key) {
if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
return;
}
var r = c[key] = (typeof c[key] === 'object') ?
c[key] : {ideal: c[key]};
if (r.min !== undefined ||
r.max !== undefined || r.exact !== undefined) {
require.push(key);
}
if (r.exact !== undefined) {
if (typeof r.exact === 'number') {
r. min = r.max = r.exact;
} else {
c[key] = r.exact;
}
delete r.exact;
}
if (r.ideal !== undefined) {
c.advanced = c.advanced || [];
var oc = {};
if (typeof r.ideal === 'number') {
oc[key] = {min: r.ideal, max: r.ideal};
} else {
oc[key] = r.ideal;
}
c.advanced.push(oc);
delete r.ideal;
if (!Object.keys(r).length) {
delete c[key];
}
}
});
if (require.length) {
c.require = require;
}
return c;
};
constraints = JSON.parse(JSON.stringify(constraints));
if (browserDetails.version < 38) {
logging('spec: ' + JSON.stringify(constraints));
if (constraints.audio) {
constraints.audio = constraintsToFF37_(constraints.audio);
}
if (constraints.video) {
constraints.video = constraintsToFF37_(constraints.video);
}
logging('ff37: ' + JSON.stringify(constraints));
}
return navigator.mozGetUserMedia(constraints, onSuccess, function(e) {
onError(shimError_(e));
});
};
// Returns the result of getUserMedia as a Promise.
var getUserMediaPromise_ = function(constraints) {
return new Promise(function(resolve, reject) {
getUserMedia_(constraints, resolve, reject);
});
};
// Shim for mediaDevices on older versions.
if (!navigator.mediaDevices) {
navigator.mediaDevices = {getUserMedia: getUserMediaPromise_,
addEventListener: function() { },
removeEventListener: function() { }
};
}
navigator.mediaDevices.enumerateDevices =
navigator.mediaDevices.enumerateDevices || function() {
return new Promise(function(resolve) {
var infos = [
{kind: 'audioinput', deviceId: 'default', label: '', groupId: ''},
{kind: 'videoinput', deviceId: 'default', label: '', groupId: ''}
];
resolve(infos);
});
};
if (browserDetails.version < 41) {
// Work around http://bugzil.la/1169665
var orgEnumerateDevices =
navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);
navigator.mediaDevices.enumerateDevices = function() {
return orgEnumerateDevices().then(undefined, function(e) {
if (e.name === 'NotFoundError') {
return [];
}
throw e;
});
};
}
if (browserDetails.version < 49) {
var origGetUserMedia = navigator.mediaDevices.getUserMedia.
bind(navigator.mediaDevices);
navigator.mediaDevices.getUserMedia = function(c) {
return origGetUserMedia(c).catch(function(e) {
return Promise.reject(shimError_(e));
});
};
}
navigator.getUserMedia = function(constraints, onSuccess, onError) {
if (browserDetails.version < 44) {
return getUserMedia_(constraints, onSuccess, onError);
}
// Replace Firefox 44+'s deprecation warning with unprefixed version.
console.warn('navigator.getUserMedia has been replaced by ' +
'navigator.mediaDevices.getUserMedia');
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
};
};
},{"../utils":10}],9:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
'use strict';
var safariShim = {
// TODO: DrAlex, should be here, double check against LayoutTests
// shimOnTrack: function() { },
// TODO: once the back-end for the mac port is done, add.
// TODO: check for webkitGTK+
// shimPeerConnection: function() { },
shimGetUserMedia: function() {
navigator.getUserMedia = navigator.webkitGetUserMedia;
}
};
// Expose public methods.
module.exports = {
shimGetUserMedia: safariShim.shimGetUserMedia
// TODO
// shimOnTrack: safariShim.shimOnTrack,
// shimPeerConnection: safariShim.shimPeerConnection
};
},{}],10:[function(require,module,exports){
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
/* eslint-env node */
'use strict';
var logDisabled_ = true;
// Utility methods.
var utils = {
disableLog: function(bool) {
if (typeof bool !== 'boolean') {
return new Error('Argument type: ' + typeof bool +
'. Please use a boolean.');
}
logDisabled_ = bool;
return (bool) ? 'adapter.js logging disabled' :
'adapter.js logging enabled';
},
log: function() {
if (typeof window === 'object') {
if (logDisabled_) {
return;
}
if (typeof console !== 'undefined' && typeof console.log === 'function') {
console.log.apply(console, arguments);
}
}
},
/**
* Extract browser version out of the provided user agent string.
*
* @param {!string} uastring userAgent string.
* @param {!string} expr Regular expression used as match criteria.
* @param {!number} pos position in the version string to be returned.
* @return {!number} browser version.
*/
extractVersion: function(uastring, expr, pos) {
var match = uastring.match(expr);
return match && match.length >= pos && parseInt(match[pos], 10);
},
/**
* Browser detector.
*
* @return {object} result containing browser and version
* properties.
*/
detectBrowser: function() {
// Returned result object.
var result = {};
result.browser = null;
result.version = null;
// Fail early if it's not a browser
if (typeof window === 'undefined' || !window.navigator) {
result.browser = 'Not a browser.';
return result;
}
// Firefox.
if (navigator.mozGetUserMedia) {
result.browser = 'firefox';
result.version = this.extractVersion(navigator.userAgent,
/Firefox\/([0-9]+)\./, 1);
// all webkit-based browsers
} else if (navigator.webkitGetUserMedia) {
// Chrome, Chromium, Webview, Opera, all use the chrome shim for now
if (window.webkitRTCPeerConnection) {
result.browser = 'chrome';
result.version = this.extractVersion(navigator.userAgent,
/Chrom(e|ium)\/([0-9]+)\./, 2);
// Safari or unknown webkit-based
// for the time being Safari has support for MediaStreams but not webRTC
} else {
// Safari UA substrings of interest for reference:
// - webkit version: AppleWebKit/602.1.25 (also used in Op,Cr)
// - safari UI version: Version/9.0.3 (unique to Safari)
// - safari UI webkit version: Safari/601.4.4 (also used in Op,Cr)
//
// if the webkit version and safari UI webkit versions are equals,
// ... this is a stable version.
//
// only the internal webkit version is important today to know if
// media streams are supported
//
if (navigator.userAgent.match(/Version\/(\d+).(\d+)/)) {
result.browser = 'safari';
result.version = this.extractVersion(navigator.userAgent,
/AppleWebKit\/([0-9]+)\./, 1);
// unknown webkit-based browser
} else {
result.browser = 'Unsupported webkit-based browser ' +
'with GUM support but no WebRTC support.';
return result;
}
}
// Edge.
} else if (navigator.mediaDevices &&
navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)) {
result.browser = 'edge';
result.version = this.extractVersion(navigator.userAgent,
/Edge\/(\d+).(\d+)$/, 2);
// Default fallthrough: not supported.
} else {
result.browser = 'Not a supported browser.';
return result;
}
return result;
}
};
// Export.
module.exports = {
log: utils.log,
disableLog: utils.disableLog,
browserDetails: utils.detectBrowser(),
extractVersion: utils.extractVersion
};
},{}]},{},[2])(2)
});
/* jshint ignore:end */
// END OF INJECTION OF GOOGLE'S ADAPTER.JS CONTENT
///////////////////////////////////////////////////////////////////
AdapterJS.parseWebrtcDetectedBrowser();
///////////////////////////////////////////////////////////////////
// EXTENSION FOR CHROME, FIREFOX AND EDGE
// Includes legacy functions
// -- createIceServer
// -- createIceServers
// -- MediaStreamTrack.getSources
//
// and additional shims
// -- attachMediaStream
// -- reattachMediaStream
// -- requestUserMedia
// -- a call to AdapterJS.maybeThroughWebRTCReady (notifies WebRTC is ready)
// Add support for legacy functions createIceServer and createIceServers
if ( navigator.mozGetUserMedia ) {
// Shim for MediaStreamTrack.getSources.
MediaStreamTrack.getSources = function(successCb) {
setTimeout(function() {
var infos = [
{ kind: 'audio', id: 'default', label:'', facing:'' },
{ kind: 'video', id: 'default', label:'', facing:'' }
];
successCb(infos);
}, 0);
};
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
element.srcObject = stream;
return element;
};
reattachMediaStream = function(to, from) {
to.srcObject = from.srcObject;
return to;
};
createIceServer = function (url, username, password) {
console.warn('createIceServer is deprecated. It should be replaced with an application level implementation.');
// Note: Google's import of AJS will auto-reverse to 'url': '...' for FF < 38
var iceServer = null;
var urlParts = url.split(':');
if (urlParts[0].indexOf('stun') === 0) {
iceServer = { urls : [url] };
} else if (urlParts[0].indexOf('turn') === 0) {
if (webrtcDetectedVersion < 27) {
var turnUrlParts = url.split('?');
if (turnUrlParts.length === 1 ||
turnUrlParts[1].indexOf('transport=udp') === 0) {
iceServer = {
urls : [turnUrlParts[0]],
credential : password,
username : username
};
}
} else {
iceServer = {
urls : [url],
credential : password,
username : username
};
}
}
return iceServer;
};
createIceServers = function (urls, username, password) {
console.warn('createIceServers is deprecated. It should be replaced with an application level implementation.');
var iceServers = [];
for (i = 0; i < urls.length; i++) {
var iceServer = createIceServer(urls[i], username, password);
if (iceServer !== null) {
iceServers.push(iceServer);
}
}
return iceServers;
};
} else if ( navigator.webkitGetUserMedia ) {
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
if (webrtcDetectedVersion >= 43) {
element.srcObject = stream;
} else if (typeof element.src !== 'undefined') {
element.src = URL.createObjectURL(stream);
} else {
console.error('Error attaching stream to element.');
// logging('Error attaching stream to element.');
}
return element;
};
reattachMediaStream = function(to, from) {
if (webrtcDetectedVersion >= 43) {
to.srcObject = from.srcObject;
} else {
to.src = from.src;
}
return to;
};
createIceServer = function (url, username, password) {
console.warn('createIceServer is deprecated. It should be replaced with an application level implementation.');
var iceServer = null;
var urlParts = url.split(':');
if (urlParts[0].indexOf('stun') === 0) {
iceServer = { 'url' : url };
} else if (urlParts[0].indexOf('turn') === 0) {
iceServer = {
'url' : url,
'credential' : password,
'username' : username
};
}
return iceServer;
};
createIceServers = function (urls, username, password) {
console.warn('createIceServers is deprecated. It should be replaced with an application level implementation.');
var iceServers = [];
if (webrtcDetectedVersion >= 34) {
iceServers = {
'urls' : urls,
'credential' : password,
'username' : username
};
} else {
for (i = 0; i < urls.length; i++) {
var iceServer = createIceServer(urls[i], username, password);
if (iceServer !== null) {
iceServers.push(iceServer);
}
}
}
return iceServers;
};
} else if (navigator.mediaDevices && navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)) {
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
element.srcObject = stream;
return element;
};
reattachMediaStream = function(to, from) {
to.srcObject = from.srcObject;
return to;
};
}
// Need to override attachMediaStream and reattachMediaStream
// to support the plugin's logic
attachMediaStream_base = attachMediaStream;
if (webrtcDetectedBrowser === 'opera') {
attachMediaStream_base = function (element, stream) {
if (webrtcDetectedVersion > 38) {
element.srcObject = stream;
} else if (typeof element.src !== 'undefined') {
element.src = URL.createObjectURL(stream);
}
// Else it doesn't work
};
}
attachMediaStream = function (element, stream) {
if ((webrtcDetectedBrowser === 'chrome' ||
webrtcDetectedBrowser === 'opera') &&
!stream) {
// Chrome does not support "src = null"
element.src = '';
} else {
attachMediaStream_base(element, stream);
}
return element;
};
reattachMediaStream_base = reattachMediaStream;
reattachMediaStream = function (to, from) {
reattachMediaStream_base(to, from);
return to;
};
// Propagate attachMediaStream and gUM in window and AdapterJS
window.attachMediaStream = attachMediaStream;
window.reattachMediaStream = reattachMediaStream;
window.getUserMedia = function(constraints, onSuccess, onFailure) {
navigator.getUserMedia(constraints, onSuccess, onFailure);
};
AdapterJS.attachMediaStream = attachMediaStream;
AdapterJS.reattachMediaStream = reattachMediaStream;
AdapterJS.getUserMedia = getUserMedia;
// Removed Google defined promises when promise is not defined
if (typeof Promise === 'undefined') {
requestUserMedia = null;
}
AdapterJS.maybeThroughWebRTCReady();
// END OF EXTENSION OF CHROME, FIREFOX AND EDGE
///////////////////////////////////////////////////////////////////
} else { // TRY TO USE PLUGIN
///////////////////////////////////////////////////////////////////
// WEBRTC PLUGIN SHIM
// Will automatically check if the plugin is available and inject it
// into the DOM if it is.
// When the plugin is not available, will prompt a banner to suggest installing it
// Use AdapterJS.options.hidePluginInstallPrompt to prevent this banner from popping
//
// Shims the follwing:
// -- getUserMedia
// -- MediaStreamTrack
// -- MediaStreamTrack.getSources
// -- RTCPeerConnection
// -- RTCSessionDescription
// -- RTCIceCandidate
// -- createIceServer
// -- createIceServers
// -- attachMediaStream
// -- reattachMediaStream
// -- webrtcDetectedBrowser
// -- webrtcDetectedVersion
// IE 9 is not offering an implementation of console.log until you open a console
if (typeof console !== 'object' || typeof console.log !== 'function') {
/* jshint -W020 */
console = {} || console;
// Implemented based on console specs from MDN
// You may override these functions
console.log = function (arg) {};
console.info = function (arg) {};
console.error = function (arg) {};
console.dir = function (arg) {};
console.exception = function (arg) {};
console.trace = function (arg) {};
console.warn = function (arg) {};
console.count = function (arg) {};
console.debug = function (arg) {};
console.count = function (arg) {};
console.time = function (arg) {};
console.timeEnd = function (arg) {};
console.group = function (arg) {};
console.groupCollapsed = function (arg) {};
console.groupEnd = function (arg) {};
/* jshint +W020 */
}
AdapterJS.parseWebrtcDetectedBrowser();
isIE = webrtcDetectedBrowser === 'IE';
/* jshint -W035 */
AdapterJS.WebRTCPlugin.WaitForPluginReady = function() {
while (AdapterJS.WebRTCPlugin.pluginState !== AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY) {
/* empty because it needs to prevent the function from running. */
}
};
/* jshint +W035 */
AdapterJS.WebRTCPlugin.callWhenPluginReady = function (callback) {
if (AdapterJS.WebRTCPlugin.pluginState === AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY) {
// Call immediately if possible
// Once the plugin is set, the code will always take this path
callback();
} else {
// otherwise start a 100ms interval
var checkPluginReadyState = setInterval(function () {
if (AdapterJS.WebRTCPlugin.pluginState === AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY) {
clearInterval(checkPluginReadyState);
callback();
}
}, 100);
}
};
AdapterJS.WebRTCPlugin.setLogLevel = function(logLevel) {
AdapterJS.WebRTCPlugin.callWhenPluginReady(function() {
AdapterJS.WebRTCPlugin.plugin.setLogLevel(logLevel);
});
};
AdapterJS.WebRTCPlugin.injectPlugin = function () {
// only inject once the page is ready
if (document.readyState !== 'complete') {
return;
}
// Prevent multiple injections
if (AdapterJS.WebRTCPlugin.pluginState !== AdapterJS.WebRTCPlugin.PLUGIN_STATES.INITIALIZING) {
return;
}
AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.INJECTING;
if (webrtcDetectedBrowser === 'IE' && webrtcDetectedVersion <= 10) {
var frag = document.createDocumentFragment();
AdapterJS.WebRTCPlugin.plugin = document.createElement('div');
AdapterJS.WebRTCPlugin.plugin.innerHTML = '';
while (AdapterJS.WebRTCPlugin.plugin.firstChild) {
frag.appendChild(AdapterJS.WebRTCPlugin.plugin.firstChild);
}
document.body.appendChild(frag);
// Need to re-fetch the plugin
AdapterJS.WebRTCPlugin.plugin =
document.getElementById(AdapterJS.WebRTCPlugin.pluginInfo.pluginId);
} else {
// Load Plugin
AdapterJS.WebRTCPlugin.plugin = document.createElement('object');
AdapterJS.WebRTCPlugin.plugin.id =
AdapterJS.WebRTCPlugin.pluginInfo.pluginId;
// IE will only start the plugin if it's ACTUALLY visible
if (isIE) {
AdapterJS.WebRTCPlugin.plugin.width = '1px';
AdapterJS.WebRTCPlugin.plugin.height = '1px';
} else { // The size of the plugin on Safari should be 0x0px
// so that the autorisation prompt is at the top
AdapterJS.WebRTCPlugin.plugin.width = '0px';
AdapterJS.WebRTCPlugin.plugin.height = '0px';
}
AdapterJS.WebRTCPlugin.plugin.type = AdapterJS.WebRTCPlugin.pluginInfo.type;
AdapterJS.WebRTCPlugin.plugin.innerHTML = '' +
'' +
' ' +
(AdapterJS.options.getAllCams ? '':'') +
'' +
'';
document.body.appendChild(AdapterJS.WebRTCPlugin.plugin);
}
AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.INJECTED;
};
AdapterJS.WebRTCPlugin.isPluginInstalled =
function (comName, plugName, plugType, installedCb, notInstalledCb) {
if (!isIE) {
var pluginArray = navigator.mimeTypes;
for (var i = 0; i < pluginArray.length; i++) {
if (pluginArray[i].type.indexOf(plugType) >= 0) {
installedCb();
return;
}
}
notInstalledCb();
} else {
try {
var axo = new ActiveXObject(comName + '.' + plugName);
} catch (e) {
notInstalledCb();
return;
}
installedCb();
}
};
AdapterJS.WebRTCPlugin.defineWebRTCInterface = function () {
if (AdapterJS.WebRTCPlugin.pluginState ===
AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY) {
console.error('AdapterJS - WebRTC interface has already been defined');
return;
}
AdapterJS.WebRTCPlugin.pluginState = AdapterJS.WebRTCPlugin.PLUGIN_STATES.INITIALIZING;
AdapterJS.isDefined = function (variable) {
return variable !== null && variable !== undefined;
};
createIceServer = function (url, username, password) {
var iceServer = null;
var urlParts = url.split(':');
if (urlParts[0].indexOf('stun') === 0) {
iceServer = {
'url' : url,
'hasCredentials' : false
};
} else if (urlParts[0].indexOf('turn') === 0) {
iceServer = {
'url' : url,
'hasCredentials' : true,
'credential' : password,
'username' : username
};
}
return iceServer;
};
createIceServers = function (urls, username, password) {
var iceServers = [];
for (var i = 0; i < urls.length; ++i) {
iceServers.push(createIceServer(urls[i], username, password));
}
return iceServers;
};
RTCSessionDescription = function (info) {
AdapterJS.WebRTCPlugin.WaitForPluginReady();
return AdapterJS.WebRTCPlugin.plugin.
ConstructSessionDescription(info.type, info.sdp);
};
RTCPeerConnection = function (servers, constraints) {
// Validate server argumenr
if (!(servers === undefined ||
servers === null ||
Array.isArray(servers.iceServers))) {
throw new Error('Failed to construct \'RTCPeerConnection\': Malformed RTCConfiguration');
}
// Validate constraints argument
if (typeof constraints !== 'undefined' && constraints !== null) {
var invalidConstraits = false;
invalidConstraits |= typeof constraints !== 'object';
invalidConstraits |= constraints.hasOwnProperty('mandatory') &&
constraints.mandatory !== undefined &&
constraints.mandatory !== null &&
constraints.mandatory.constructor !== Object;
invalidConstraits |= constraints.hasOwnProperty('optional') &&
constraints.optional !== undefined &&
constraints.optional !== null &&
!Array.isArray(constraints.optional);
if (invalidConstraits) {
throw new Error('Failed to construct \'RTCPeerConnection\': Malformed constraints object');
}
}
// Call relevant PeerConnection constructor according to plugin version
AdapterJS.WebRTCPlugin.WaitForPluginReady();
// RTCPeerConnection prototype from the old spec
var iceServers = null;
if (servers && Array.isArray(servers.iceServers)) {
iceServers = servers.iceServers;
for (var i = 0; i < iceServers.length; i++) {
// Legacy plugin versions compatibility
if (iceServers[i].urls && !iceServers[i].url) {
iceServers[i].url = iceServers[i].urls;
}
iceServers[i].hasCredentials = AdapterJS.
isDefined(iceServers[i].username) &&
AdapterJS.isDefined(iceServers[i].credential);
}
}
if (AdapterJS.WebRTCPlugin.plugin.PEER_CONNECTION_VERSION &&
AdapterJS.WebRTCPlugin.plugin.PEER_CONNECTION_VERSION > 1) {
// RTCPeerConnection prototype from the new spec
if (iceServers) {
servers.iceServers = iceServers;
}
return AdapterJS.WebRTCPlugin.plugin.PeerConnection(servers);
} else {
var mandatory = (constraints && constraints.mandatory) ?
constraints.mandatory : null;
var optional = (constraints && constraints.optional) ?
constraints.optional : null;
return AdapterJS.WebRTCPlugin.plugin.
PeerConnection(AdapterJS.WebRTCPlugin.pageId,
iceServers, mandatory, optional);
}
};
MediaStreamTrack = function(){};
MediaStreamTrack.getSources = function (callback) {
AdapterJS.WebRTCPlugin.callWhenPluginReady(function() {
AdapterJS.WebRTCPlugin.plugin.GetSources(callback);
});
};
// getUserMedia constraints shim.
// Copied from Chrome
var constraintsToPlugin = function(c) {
if (typeof c !== 'object' || c.mandatory || c.optional) {
return c;
}
var cc = {};
Object.keys(c).forEach(function(key) {
if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
return;
}
var r = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]};
if (r.exact !== undefined && typeof r.exact === 'number') {
r.min = r.max = r.exact;
}
var oldname = function(prefix, name) {
if (prefix) {
return prefix + name.charAt(0).toUpperCase() + name.slice(1);
}
return (name === 'deviceId') ? 'sourceId' : name;
};
if (r.ideal !== undefined) {
cc.optional = cc.optional || [];
var oc = {};
if (typeof r.ideal === 'number') {
oc[oldname('min', key)] = r.ideal;
cc.optional.push(oc);
oc = {};
oc[oldname('max', key)] = r.ideal;
cc.optional.push(oc);
} else {
oc[oldname('', key)] = r.ideal;
cc.optional.push(oc);
}
}
if (r.exact !== undefined && typeof r.exact !== 'number') {
cc.mandatory = cc.mandatory || {};
cc.mandatory[oldname('', key)] = r.exact;
} else {
['min', 'max'].forEach(function(mix) {
if (r[mix] !== undefined) {
cc.mandatory = cc.mandatory || {};
cc.mandatory[oldname(mix, key)] = r[mix];
}
});
}
});
if (c.advanced) {
cc.optional = (cc.optional || []).concat(c.advanced);
}
return cc;
};
getUserMedia = function (constraints, successCallback, failureCallback) {
var cc = {};
cc.audio = constraints.audio ?
constraintsToPlugin(constraints.audio) : false;
cc.video = constraints.video ?
constraintsToPlugin(constraints.video) : false;
AdapterJS.WebRTCPlugin.callWhenPluginReady(function() {
AdapterJS.WebRTCPlugin.plugin.
getUserMedia(cc, successCallback, failureCallback);
});
};
window.navigator.getUserMedia = getUserMedia;
// Defined mediaDevices when promises are available
if ( !navigator.mediaDevices &&
typeof Promise !== 'undefined') {
requestUserMedia = function(constraints) {
return new Promise(function(resolve, reject) {
getUserMedia(constraints, resolve, reject);
});
};
navigator.mediaDevices = {getUserMedia: requestUserMedia,
enumerateDevices: function() {
return new Promise(function(resolve) {
var kinds = {audio: 'audioinput', video: 'videoinput'};
return MediaStreamTrack.getSources(function(devices) {
resolve(devices.map(function(device) {
return {label: device.label,
kind: kinds[device.kind],
id: device.id,
deviceId: device.id,
groupId: ''};
}));
});
});
}};
}
attachMediaStream = function (element, stream) {
if (!element || !element.parentNode) {
return;
}
var streamId;
if (stream === null) {
streamId = '';
} else {
if (typeof stream.enableSoundTracks !== 'undefined') {
stream.enableSoundTracks(true);
}
streamId = stream.id;
}
var elementId = element.id.length === 0 ? Math.random().toString(36).slice(2) : element.id;
var nodeName = element.nodeName.toLowerCase();
if (nodeName !== 'object') { // not a plugin