Nelze vybrat více než 25 témat Téma musí začínat písmenem nebo číslem, může obsahovat pomlčky („-“) a může být dlouhé až 35 znaků.

RTCUtils.js 41KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060
  1. /* global config, require, attachMediaStream, getUserMedia,
  2. RTCPeerConnection, RTCSessionDescription, RTCIceCandidate, MediaStreamTrack,
  3. mozRTCPeerConnection, mozRTCSessionDescription, mozRTCIceCandidate,
  4. webkitRTCPeerConnection, webkitMediaStream, webkitURL
  5. */
  6. /* jshint -W101 */
  7. var logger = require("jitsi-meet-logger").getLogger(__filename);
  8. var RTCBrowserType = require("./RTCBrowserType");
  9. var Resolutions = require("../../service/RTC/Resolutions");
  10. var RTCEvents = require("../../service/RTC/RTCEvents");
  11. var AdapterJS = require("./adapter.screenshare");
  12. var SDPUtil = require("../xmpp/SDPUtil");
  13. var EventEmitter = require("events");
  14. var screenObtainer = require("./ScreenObtainer");
  15. var JitsiTrackErrors = require("../../JitsiTrackErrors");
  16. var JitsiTrackError = require("../../JitsiTrackError");
  17. var MediaType = require("../../service/RTC/MediaType");
  18. var VideoType = require("../../service/RTC/VideoType");
  19. var GlobalOnErrorHandler = require("../util/GlobalOnErrorHandler");
  20. var eventEmitter = new EventEmitter();
  21. var AVAILABLE_DEVICES_POLL_INTERVAL_TIME = 3000; // ms
  22. var devices = {
  23. audio: false,
  24. video: false
  25. };
  26. // Currently audio output device change is supported only in Chrome and
  27. // default output always has 'default' device ID
  28. var audioOutputDeviceId = 'default'; // default device
  29. var featureDetectionAudioEl = document.createElement('audio');
  30. var isAudioOutputDeviceChangeAvailable =
  31. typeof featureDetectionAudioEl.setSinkId !== 'undefined';
  32. var currentlyAvailableMediaDevices = [];
  33. var rawEnumerateDevicesWithCallback = navigator.mediaDevices
  34. && navigator.mediaDevices.enumerateDevices
  35. ? function(callback) {
  36. navigator.mediaDevices.enumerateDevices().then(callback, function () {
  37. callback([]);
  38. });
  39. }
  40. : (MediaStreamTrack && MediaStreamTrack.getSources)
  41. ? function (callback) {
  42. MediaStreamTrack.getSources(function (sources) {
  43. callback(sources.map(convertMediaStreamTrackSource));
  44. });
  45. }
  46. : undefined;
  47. // TODO: currently no browser supports 'devicechange' event even in nightly
  48. // builds so no feature/browser detection is used at all. However in future this
  49. // should be changed to some expression. Progress on 'devicechange' event
  50. // implementation for Chrome/Opera/NWJS can be tracked at
  51. // https://bugs.chromium.org/p/chromium/issues/detail?id=388648, for Firefox -
  52. // at https://bugzilla.mozilla.org/show_bug.cgi?id=1152383. More information on
  53. // 'devicechange' event can be found in spec -
  54. // http://w3c.github.io/mediacapture-main/#event-mediadevices-devicechange
  55. // TODO: check MS Edge
  56. var isDeviceChangeEventSupported = false;
  57. var rtcReady = false;
  58. function setResolutionConstraints(constraints, resolution) {
  59. var isAndroid = RTCBrowserType.isAndroid();
  60. if (Resolutions[resolution]) {
  61. constraints.video.mandatory.minWidth = Resolutions[resolution].width;
  62. constraints.video.mandatory.minHeight = Resolutions[resolution].height;
  63. }
  64. else if (isAndroid) {
  65. // FIXME can't remember if the purpose of this was to always request
  66. // low resolution on Android ? if yes it should be moved up front
  67. constraints.video.mandatory.minWidth = 320;
  68. constraints.video.mandatory.minHeight = 180;
  69. constraints.video.mandatory.maxFrameRate = 15;
  70. }
  71. if (constraints.video.mandatory.minWidth)
  72. constraints.video.mandatory.maxWidth =
  73. constraints.video.mandatory.minWidth;
  74. if (constraints.video.mandatory.minHeight)
  75. constraints.video.mandatory.maxHeight =
  76. constraints.video.mandatory.minHeight;
  77. }
  78. /**
  79. * @param {string[]} um required user media types
  80. *
  81. * @param {Object} [options={}] optional parameters
  82. * @param {string} options.resolution
  83. * @param {number} options.bandwidth
  84. * @param {number} options.fps
  85. * @param {string} options.desktopStream
  86. * @param {string} options.cameraDeviceId
  87. * @param {string} options.micDeviceId
  88. * @param {bool} firefox_fake_device
  89. */
  90. function getConstraints(um, options) {
  91. var constraints = {audio: false, video: false};
  92. if (um.indexOf('video') >= 0) {
  93. // same behaviour as true
  94. constraints.video = { mandatory: {}, optional: [] };
  95. if (options.cameraDeviceId) {
  96. // new style of settings device id (FF only)
  97. constraints.video.deviceId = options.cameraDeviceId;
  98. // old style
  99. constraints.video.optional.push({
  100. sourceId: options.cameraDeviceId
  101. });
  102. }
  103. constraints.video.optional.push({ googLeakyBucket: true });
  104. setResolutionConstraints(constraints, options.resolution);
  105. }
  106. if (um.indexOf('audio') >= 0) {
  107. if (RTCBrowserType.isReactNative()) {
  108. // The react-native-webrtc project that we're currently using
  109. // expects the audio constraint to be a boolean.
  110. constraints.audio = true;
  111. } else if (!RTCBrowserType.isFirefox()) {
  112. // same behaviour as true
  113. constraints.audio = { mandatory: {}, optional: []};
  114. if (options.micDeviceId) {
  115. // new style of settings device id (FF only)
  116. constraints.audio.deviceId = options.micDeviceId;
  117. // old style
  118. constraints.audio.optional.push({
  119. sourceId: options.micDeviceId
  120. });
  121. }
  122. // if it is good enough for hangouts...
  123. constraints.audio.optional.push(
  124. {googEchoCancellation: true},
  125. {googAutoGainControl: true},
  126. {googNoiseSupression: true},
  127. {googHighpassFilter: true},
  128. {googNoisesuppression2: true},
  129. {googEchoCancellation2: true},
  130. {googAutoGainControl2: true}
  131. );
  132. } else {
  133. if (options.micDeviceId) {
  134. constraints.audio = {
  135. mandatory: {},
  136. deviceId: options.micDeviceId, // new style
  137. optional: [{
  138. sourceId: options.micDeviceId // old style
  139. }]};
  140. } else {
  141. constraints.audio = true;
  142. }
  143. }
  144. }
  145. if (um.indexOf('screen') >= 0) {
  146. if (RTCBrowserType.isChrome()) {
  147. constraints.video = {
  148. mandatory: {
  149. chromeMediaSource: "screen",
  150. googLeakyBucket: true,
  151. maxWidth: window.screen.width,
  152. maxHeight: window.screen.height,
  153. maxFrameRate: 3
  154. },
  155. optional: []
  156. };
  157. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  158. constraints.video = {
  159. optional: [
  160. {
  161. sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
  162. }
  163. ]
  164. };
  165. } else if (RTCBrowserType.isFirefox()) {
  166. constraints.video = {
  167. mozMediaSource: "window",
  168. mediaSource: "window"
  169. };
  170. } else {
  171. var errmsg
  172. = "'screen' WebRTC media source is supported only in Chrome"
  173. + " and with Temasys plugin";
  174. GlobalOnErrorHandler.callErrorHandler(new Error(errmsg));
  175. logger.error(errmsg);
  176. }
  177. }
  178. if (um.indexOf('desktop') >= 0) {
  179. constraints.video = {
  180. mandatory: {
  181. chromeMediaSource: "desktop",
  182. chromeMediaSourceId: options.desktopStream,
  183. googLeakyBucket: true,
  184. maxWidth: window.screen.width,
  185. maxHeight: window.screen.height,
  186. maxFrameRate: 3
  187. },
  188. optional: []
  189. };
  190. }
  191. if (options.bandwidth) {
  192. if (!constraints.video) {
  193. //same behaviour as true
  194. constraints.video = {mandatory: {}, optional: []};
  195. }
  196. constraints.video.optional.push({bandwidth: options.bandwidth});
  197. }
  198. if(options.minFps || options.maxFps || options.fps) {
  199. // for some cameras it might be necessary to request 30fps
  200. // so they choose 30fps mjpg over 10fps yuy2
  201. if (!constraints.video) {
  202. // same behaviour as true;
  203. constraints.video = {mandatory: {}, optional: []};
  204. }
  205. if(options.minFps || options.fps) {
  206. options.minFps = options.minFps || options.fps; //Fall back to options.fps for backwards compatibility
  207. constraints.video.mandatory.minFrameRate = options.minFps;
  208. }
  209. if(options.maxFps) {
  210. constraints.video.mandatory.maxFrameRate = options.maxFps;
  211. }
  212. }
  213. // we turn audio for both audio and video tracks, the fake audio & video seems to work
  214. // only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video
  215. // this later can be a problem with some of the tests
  216. if(RTCBrowserType.isFirefox() && options.firefox_fake_device)
  217. {
  218. // seems to be fixed now, removing this experimental fix, as having
  219. // multiple audio tracks brake the tests
  220. //constraints.audio = true;
  221. constraints.fake = true;
  222. }
  223. return constraints;
  224. }
  225. function setAvailableDevices(um, available) {
  226. if (um.indexOf("video") != -1) {
  227. devices.video = available;
  228. }
  229. if (um.indexOf("audio") != -1) {
  230. devices.audio = available;
  231. }
  232. eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices);
  233. }
  234. /**
  235. * Checks if new list of available media devices differs from previous one.
  236. * @param {MediaDeviceInfo[]} newDevices - list of new devices.
  237. * @returns {boolean} - true if list is different, false otherwise.
  238. */
  239. function compareAvailableMediaDevices(newDevices) {
  240. if (newDevices.length !== currentlyAvailableMediaDevices.length) {
  241. return true;
  242. }
  243. return newDevices.map(mediaDeviceInfoToJSON).sort().join('') !==
  244. currentlyAvailableMediaDevices.map(mediaDeviceInfoToJSON).sort().join('');
  245. function mediaDeviceInfoToJSON(info) {
  246. return JSON.stringify({
  247. kind: info.kind,
  248. deviceId: info.deviceId,
  249. groupId: info.groupId,
  250. label: info.label,
  251. facing: info.facing
  252. });
  253. }
  254. }
  255. /**
  256. * Periodically polls enumerateDevices() method to check if list of media
  257. * devices has changed. This is temporary workaround until 'devicechange' event
  258. * will be supported by browsers.
  259. */
  260. function pollForAvailableMediaDevices() {
  261. // Here we use plain navigator.mediaDevices.enumerateDevices instead of
  262. // wrapped because we just need to know the fact the devices changed, labels
  263. // do not matter. This fixes situation when we have no devices initially,
  264. // and then plug in a new one.
  265. if (rawEnumerateDevicesWithCallback) {
  266. rawEnumerateDevicesWithCallback(function (devices) {
  267. if (compareAvailableMediaDevices(devices)) {
  268. onMediaDevicesListChanged(devices);
  269. }
  270. window.setTimeout(pollForAvailableMediaDevices,
  271. AVAILABLE_DEVICES_POLL_INTERVAL_TIME);
  272. });
  273. }
  274. }
  275. /**
  276. * Event handler for the 'devicechange' event.
  277. * @param {MediaDeviceInfo[]} devices - list of media devices.
  278. * @emits RTCEvents.DEVICE_LIST_CHANGED
  279. */
  280. function onMediaDevicesListChanged(devices) {
  281. currentlyAvailableMediaDevices = devices.slice(0);
  282. logger.info('list of media devices has changed:', currentlyAvailableMediaDevices);
  283. var videoInputDevices = currentlyAvailableMediaDevices.filter(function (d) {
  284. return d.kind === 'videoinput';
  285. }),
  286. audioInputDevices = currentlyAvailableMediaDevices.filter(function (d) {
  287. return d.kind === 'audioinput';
  288. }),
  289. videoInputDevicesWithEmptyLabels = videoInputDevices.filter(
  290. function (d) {
  291. return d.label === '';
  292. }),
  293. audioInputDevicesWithEmptyLabels = audioInputDevices.filter(
  294. function (d) {
  295. return d.label === '';
  296. });
  297. if (videoInputDevices.length &&
  298. videoInputDevices.length === videoInputDevicesWithEmptyLabels.length) {
  299. setAvailableDevices(['video'], false);
  300. }
  301. if (audioInputDevices.length &&
  302. audioInputDevices.length === audioInputDevicesWithEmptyLabels.length) {
  303. setAvailableDevices(['audio'], false);
  304. }
  305. eventEmitter.emit(RTCEvents.DEVICE_LIST_CHANGED, devices);
  306. }
  307. // In case of IE we continue from 'onReady' callback
  308. // passed to RTCUtils constructor. It will be invoked by Temasys plugin
  309. // once it is initialized.
  310. function onReady (options, GUM) {
  311. rtcReady = true;
  312. eventEmitter.emit(RTCEvents.RTC_READY, true);
  313. screenObtainer.init(options, GUM);
  314. if (isDeviceChangeEventSupported && RTCUtils.isDeviceListAvailable()) {
  315. navigator.mediaDevices.addEventListener('devicechange', function () {
  316. RTCUtils.enumerateDevices(onMediaDevicesListChanged);
  317. });
  318. } else if (RTCUtils.isDeviceListAvailable()) {
  319. pollForAvailableMediaDevices();
  320. }
  321. }
  322. /**
  323. * Apply function with arguments if function exists.
  324. * Do nothing if function not provided.
  325. * @param {function} [fn] function to apply
  326. * @param {Array} [args=[]] arguments for function
  327. */
  328. function maybeApply(fn, args) {
  329. if (fn) {
  330. fn.apply(null, args || []);
  331. }
  332. }
  333. var getUserMediaStatus = {
  334. initialized: false,
  335. callbacks: []
  336. };
  337. /**
  338. * Wrap `getUserMedia` to allow others to know if it was executed at least
  339. * once or not. Wrapper function uses `getUserMediaStatus` object.
  340. * @param {Function} getUserMedia native function
  341. * @returns {Function} wrapped function
  342. */
  343. function wrapGetUserMedia(getUserMedia) {
  344. return function (constraints, successCallback, errorCallback) {
  345. getUserMedia(constraints, function (stream) {
  346. maybeApply(successCallback, [stream]);
  347. if (!getUserMediaStatus.initialized) {
  348. getUserMediaStatus.initialized = true;
  349. getUserMediaStatus.callbacks.forEach(function (callback) {
  350. callback();
  351. });
  352. getUserMediaStatus.callbacks.length = 0;
  353. }
  354. }, function (error) {
  355. maybeApply(errorCallback, [error]);
  356. });
  357. };
  358. }
  359. /**
  360. * Execute function after getUserMedia was executed at least once.
  361. * @param {Function} callback function to execute after getUserMedia
  362. */
  363. function afterUserMediaInitialized(callback) {
  364. if (getUserMediaStatus.initialized) {
  365. callback();
  366. } else {
  367. getUserMediaStatus.callbacks.push(callback);
  368. }
  369. }
  370. /**
  371. * Wrapper function which makes enumerateDevices to wait
  372. * until someone executes getUserMedia first time.
  373. * @param {Function} enumerateDevices native function
  374. * @returns {Funtion} wrapped function
  375. */
  376. function wrapEnumerateDevices(enumerateDevices) {
  377. return function (callback) {
  378. // enumerate devices only after initial getUserMedia
  379. afterUserMediaInitialized(function () {
  380. enumerateDevices().then(callback, function (err) {
  381. logger.error('cannot enumerate devices: ', err);
  382. callback([]);
  383. });
  384. });
  385. };
  386. }
  387. /**
  388. * Use old MediaStreamTrack to get devices list and
  389. * convert it to enumerateDevices format.
  390. * @param {Function} callback function to call when received devices list.
  391. */
  392. function enumerateDevicesThroughMediaStreamTrack (callback) {
  393. MediaStreamTrack.getSources(function (sources) {
  394. callback(sources.map(convertMediaStreamTrackSource));
  395. });
  396. }
  397. /**
  398. * Converts MediaStreamTrack Source to enumerateDevices format.
  399. * @param {Object} source
  400. */
  401. function convertMediaStreamTrackSource(source) {
  402. var kind = (source.kind || '').toLowerCase();
  403. return {
  404. facing: source.facing || null,
  405. label: source.label,
  406. // theoretically deprecated MediaStreamTrack.getSources should
  407. // not return 'audiooutput' devices but let's handle it in any
  408. // case
  409. kind: kind
  410. ? (kind === 'audiooutput' ? kind : kind + 'input')
  411. : null,
  412. deviceId: source.id,
  413. groupId: source.groupId || null
  414. };
  415. }
  416. function obtainDevices(options) {
  417. if(!options.devices || options.devices.length === 0) {
  418. return options.successCallback(options.streams || {});
  419. }
  420. var device = options.devices.splice(0, 1);
  421. var devices = [];
  422. devices.push(device);
  423. options.deviceGUM[device](function (stream) {
  424. options.streams = options.streams || {};
  425. options.streams[device] = stream;
  426. obtainDevices(options);
  427. },
  428. function (error) {
  429. Object.keys(options.streams).forEach(function(device) {
  430. RTCUtils.stopMediaStream(options.streams[device]);
  431. });
  432. logger.error(
  433. "failed to obtain " + device + " stream - stop", error);
  434. options.errorCallback(error);
  435. });
  436. }
  437. /**
  438. * Handles the newly created Media Streams.
  439. * @param streams the new Media Streams
  440. * @param resolution the resolution of the video streams
  441. * @returns {*[]} object that describes the new streams
  442. */
  443. function handleLocalStream(streams, resolution) {
  444. var audioStream, videoStream, desktopStream, res = [];
  445. // XXX The function obtainAudioAndVideoPermissions has examined the type of
  446. // the browser, its capabilities, etc. and has taken the decision whether to
  447. // invoke getUserMedia per device (e.g. Firefox) or once for both audio and
  448. // video (e.g. Chrome). In order to not duplicate the logic here, examine
  449. // the specified streams and figure out what we've received based on
  450. // obtainAudioAndVideoPermissions' decision.
  451. if (streams) {
  452. // As mentioned above, certian types of browser (e.g. Chrome) support
  453. // (with a result which meets our requirements expressed bellow) calling
  454. // getUserMedia once for both audio and video.
  455. var audioVideo = streams.audioVideo;
  456. if (audioVideo) {
  457. var audioTracks = audioVideo.getAudioTracks();
  458. if (audioTracks.length) {
  459. audioStream = new webkitMediaStream();
  460. for (var i = 0; i < audioTracks.length; i++) {
  461. audioStream.addTrack(audioTracks[i]);
  462. }
  463. }
  464. var videoTracks = audioVideo.getVideoTracks();
  465. if (videoTracks.length) {
  466. videoStream = new webkitMediaStream();
  467. for (var j = 0; j < videoTracks.length; j++) {
  468. videoStream.addTrack(videoTracks[j]);
  469. }
  470. }
  471. } else {
  472. // On other types of browser (e.g. Firefox) we choose (namely,
  473. // obtainAudioAndVideoPermissions) to call getUsermedia per device
  474. // (type).
  475. audioStream = streams.audio;
  476. videoStream = streams.video;
  477. }
  478. // Again, different choices on different types of browser.
  479. desktopStream = streams.desktopStream || streams.desktop;
  480. }
  481. if (desktopStream) {
  482. res.push({
  483. stream: desktopStream,
  484. track: desktopStream.getVideoTracks()[0],
  485. mediaType: MediaType.VIDEO,
  486. videoType: VideoType.DESKTOP
  487. });
  488. }
  489. if (audioStream) {
  490. res.push({
  491. stream: audioStream,
  492. track: audioStream.getAudioTracks()[0],
  493. mediaType: MediaType.AUDIO,
  494. videoType: null
  495. });
  496. }
  497. if (videoStream) {
  498. res.push({
  499. stream: videoStream,
  500. track: videoStream.getVideoTracks()[0],
  501. mediaType: MediaType.VIDEO,
  502. videoType: VideoType.CAMERA,
  503. resolution: resolution
  504. });
  505. }
  506. return res;
  507. }
  508. /**
  509. * Wraps original attachMediaStream function to set current audio output device
  510. * if this is supported.
  511. * @param {Function} origAttachMediaStream
  512. * @returns {Function}
  513. */
  514. function wrapAttachMediaStream(origAttachMediaStream) {
  515. return function(element, stream) {
  516. var res = origAttachMediaStream.apply(RTCUtils, arguments);
  517. if (RTCUtils.isDeviceChangeAvailable('output') &&
  518. stream.getAudioTracks && stream.getAudioTracks().length) {
  519. element.setSinkId(RTCUtils.getAudioOutputDevice())
  520. .catch(function (ex) {
  521. GlobalOnErrorHandler.callUnhandledRejectionHandler(
  522. {promise: this, reason: ex});
  523. logger.warn('Failed to set audio output device for the ' +
  524. 'element. Default audio output device will be used ' +
  525. 'instead',
  526. element, ex);
  527. });
  528. }
  529. return res;
  530. }
  531. }
  532. //Options parameter is to pass config options. Currently uses only "useIPv6".
  533. var RTCUtils = {
  534. init: function (options) {
  535. return new Promise(function(resolve, reject) {
  536. if (RTCBrowserType.isFirefox()) {
  537. var FFversion = RTCBrowserType.getFirefoxVersion();
  538. if (FFversion < 40) {
  539. logger.error(
  540. "Firefox version too old: " + FFversion +
  541. ". Required >= 40.");
  542. reject(new Error("Firefox version too old: " + FFversion +
  543. ". Required >= 40."));
  544. return;
  545. }
  546. this.peerconnection = mozRTCPeerConnection;
  547. this.getUserMedia = wrapGetUserMedia(navigator.mozGetUserMedia.bind(navigator));
  548. this.enumerateDevices = wrapEnumerateDevices(
  549. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  550. );
  551. this.pc_constraints = {};
  552. this.attachMediaStream = wrapAttachMediaStream(function (element, stream) {
  553. // srcObject is being standardized and FF will eventually
  554. // support that unprefixed. FF also supports the
  555. // "element.src = URL.createObjectURL(...)" combo, but that
  556. // will be deprecated in favour of srcObject.
  557. //
  558. // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
  559. // https://github.com/webrtc/samples/issues/302
  560. if (!element)
  561. return;
  562. element.mozSrcObject = stream;
  563. element.play();
  564. return element;
  565. });
  566. this.getStreamID = function (stream) {
  567. var id = stream.id;
  568. if (!id) {
  569. var tracks = stream.getVideoTracks();
  570. if (!tracks || tracks.length === 0) {
  571. tracks = stream.getAudioTracks();
  572. }
  573. id = tracks[0].id;
  574. }
  575. return SDPUtil.filter_special_chars(id);
  576. };
  577. this.getVideoSrc = function (element) {
  578. if (!element)
  579. return null;
  580. return element.mozSrcObject;
  581. };
  582. this.setVideoSrc = function (element, src) {
  583. if (element)
  584. element.mozSrcObject = src;
  585. };
  586. RTCSessionDescription = mozRTCSessionDescription;
  587. RTCIceCandidate = mozRTCIceCandidate;
  588. } else if (RTCBrowserType.isChrome() ||
  589. RTCBrowserType.isOpera() ||
  590. RTCBrowserType.isNWJS() ||
  591. RTCBrowserType.isReactNative()) {
  592. this.peerconnection = webkitRTCPeerConnection;
  593. var getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
  594. if (navigator.mediaDevices) {
  595. this.getUserMedia = wrapGetUserMedia(getUserMedia);
  596. this.enumerateDevices = wrapEnumerateDevices(
  597. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  598. );
  599. } else {
  600. this.getUserMedia = getUserMedia;
  601. this.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  602. }
  603. this.attachMediaStream = wrapAttachMediaStream(function (element, stream) {
  604. // saves the created url for the stream, so we can reuse it
  605. // and not keep creating urls
  606. if (!stream.jitsiObjectURL) {
  607. stream.jitsiObjectURL
  608. = webkitURL.createObjectURL(stream);
  609. }
  610. element.src = stream.jitsiObjectURL;
  611. return element;
  612. });
  613. this.getStreamID = function (stream) {
  614. // Streams from FF endpoints have the characters '{' and '}'
  615. // that make jQuery choke.
  616. return SDPUtil.filter_special_chars(stream.id);
  617. };
  618. this.getVideoSrc = function (element) {
  619. return element ? element.getAttribute("src") : null;
  620. };
  621. this.setVideoSrc = function (element, src) {
  622. if (element)
  623. element.setAttribute("src", src || '');
  624. };
  625. // DTLS should now be enabled by default but..
  626. this.pc_constraints = {'optional': [
  627. {'DtlsSrtpKeyAgreement': 'true'}
  628. ]};
  629. if (options.useIPv6) {
  630. // https://code.google.com/p/webrtc/issues/detail?id=2828
  631. this.pc_constraints.optional.push({googIPv6: true});
  632. }
  633. if (RTCBrowserType.isAndroid()) {
  634. this.pc_constraints = {}; // disable DTLS on Android
  635. }
  636. if (!webkitMediaStream.prototype.getVideoTracks) {
  637. webkitMediaStream.prototype.getVideoTracks = function () {
  638. return this.videoTracks;
  639. };
  640. }
  641. if (!webkitMediaStream.prototype.getAudioTracks) {
  642. webkitMediaStream.prototype.getAudioTracks = function () {
  643. return this.audioTracks;
  644. };
  645. }
  646. }
  647. // Detect IE/Safari
  648. else if (RTCBrowserType.isTemasysPluginUsed()) {
  649. //AdapterJS.WebRTCPlugin.setLogLevel(
  650. // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
  651. var self = this;
  652. AdapterJS.webRTCReady(function (isPlugin) {
  653. self.peerconnection = RTCPeerConnection;
  654. self.getUserMedia = window.getUserMedia;
  655. self.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  656. self.attachMediaStream = wrapAttachMediaStream(function (element, stream) {
  657. if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
  658. return;
  659. }
  660. var isVideoStream = !!stream.getVideoTracks().length;
  661. if (isVideoStream && !$(element).is(':visible')) {
  662. throw new Error('video element must be visible to attach video stream');
  663. }
  664. return attachMediaStream(element, stream);
  665. });
  666. self.getStreamID = function (stream) {
  667. return SDPUtil.filter_special_chars(stream.label);
  668. };
  669. self.getVideoSrc = function (element) {
  670. if (!element) {
  671. logger.warn("Attempt to get video SRC of null element");
  672. return null;
  673. }
  674. var children = element.children;
  675. for (var i = 0; i !== children.length; ++i) {
  676. if (children[i].name === 'streamId') {
  677. return children[i].value;
  678. }
  679. }
  680. //logger.info(element.id + " SRC: " + src);
  681. return null;
  682. };
  683. self.setVideoSrc = function (element, src) {
  684. //logger.info("Set video src: ", element, src);
  685. if (!src) {
  686. attachMediaStream(element, null);
  687. } else {
  688. AdapterJS.WebRTCPlugin.WaitForPluginReady();
  689. var stream
  690. = AdapterJS.WebRTCPlugin.plugin
  691. .getStreamWithId(
  692. AdapterJS.WebRTCPlugin.pageId, src);
  693. attachMediaStream(element, stream);
  694. }
  695. };
  696. onReady(options, self.getUserMediaWithConstraints);
  697. resolve();
  698. });
  699. } else {
  700. var errmsg = 'Browser does not appear to be WebRTC-capable';
  701. try {
  702. logger.error(errmsg);
  703. } catch (e) {
  704. }
  705. reject(new Error(errmsg));
  706. return;
  707. }
  708. // Call onReady() if Temasys plugin is not used
  709. if (!RTCBrowserType.isTemasysPluginUsed()) {
  710. onReady(options, this.getUserMediaWithConstraints);
  711. resolve();
  712. }
  713. }.bind(this));
  714. },
  715. /**
  716. * @param {string[]} um required user media types
  717. * @param {function} success_callback
  718. * @param {Function} failure_callback
  719. * @param {Object} [options] optional parameters
  720. * @param {string} options.resolution
  721. * @param {number} options.bandwidth
  722. * @param {number} options.fps
  723. * @param {string} options.desktopStream
  724. * @param {string} options.cameraDeviceId
  725. * @param {string} options.micDeviceId
  726. **/
  727. getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) {
  728. options = options || {};
  729. var resolution = options.resolution;
  730. var constraints = getConstraints(um, options);
  731. logger.info("Get media constraints", constraints);
  732. try {
  733. this.getUserMedia(constraints,
  734. function (stream) {
  735. logger.log('onUserMediaSuccess');
  736. setAvailableDevices(um, true);
  737. success_callback(stream);
  738. },
  739. function (error) {
  740. setAvailableDevices(um, false);
  741. logger.warn('Failed to get access to local media. Error ',
  742. error, constraints);
  743. if (failure_callback) {
  744. failure_callback(
  745. new JitsiTrackError(error, constraints, um));
  746. }
  747. });
  748. } catch (e) {
  749. logger.error('GUM failed: ', e);
  750. if (failure_callback) {
  751. failure_callback(new JitsiTrackError(e, constraints, um));
  752. }
  753. }
  754. },
  755. /**
  756. * Creates the local MediaStreams.
  757. * @param {Object} [options] optional parameters
  758. * @param {Array} options.devices the devices that will be requested
  759. * @param {string} options.resolution resolution constraints
  760. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
  761. * type: "audio" or "video", videoType: "camera" or "desktop"}
  762. * will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
  763. * @param {string} options.cameraDeviceId
  764. * @param {string} options.micDeviceId
  765. * @returns {*} Promise object that will receive the new JitsiTracks
  766. */
  767. obtainAudioAndVideoPermissions: function (options) {
  768. var self = this;
  769. options = options || {};
  770. return new Promise(function (resolve, reject) {
  771. var successCallback = function (stream) {
  772. resolve(handleLocalStream(stream, options.resolution));
  773. };
  774. options.devices = options.devices || ['audio', 'video'];
  775. if(!screenObtainer.isSupported()
  776. && options.devices.indexOf("desktop") !== -1){
  777. reject(new Error("Desktop sharing is not supported!"));
  778. }
  779. if (RTCBrowserType.isFirefox() ||
  780. RTCBrowserType.isTemasysPluginUsed()) {
  781. var GUM = function (device, s, e) {
  782. this.getUserMediaWithConstraints(device, s, e, options);
  783. };
  784. var deviceGUM = {
  785. "audio": GUM.bind(self, ["audio"]),
  786. "video": GUM.bind(self, ["video"])
  787. };
  788. if(screenObtainer.isSupported()){
  789. deviceGUM["desktop"] = screenObtainer.obtainStream.bind(
  790. screenObtainer);
  791. }
  792. // With FF/IE we can't split the stream into audio and video because FF
  793. // doesn't support media stream constructors. So, we need to get the
  794. // audio stream separately from the video stream using two distinct GUM
  795. // calls. Not very user friendly :-( but we don't have many other
  796. // options neither.
  797. //
  798. // Note that we pack those 2 streams in a single object and pass it to
  799. // the successCallback method.
  800. obtainDevices({
  801. devices: options.devices,
  802. streams: [],
  803. successCallback: successCallback,
  804. errorCallback: reject,
  805. deviceGUM: deviceGUM
  806. });
  807. } else {
  808. var hasDesktop = options.devices.indexOf('desktop') > -1;
  809. if (hasDesktop) {
  810. options.devices.splice(options.devices.indexOf("desktop"), 1);
  811. }
  812. options.resolution = options.resolution || '360';
  813. if(options.devices.length) {
  814. this.getUserMediaWithConstraints(
  815. options.devices,
  816. function (stream) {
  817. var audioDeviceRequested = options.devices.indexOf("audio") !== -1;
  818. var videoDeviceRequested = options.devices.indexOf("video") !== -1;
  819. var audioTracksReceived = !!stream.getAudioTracks().length;
  820. var videoTracksReceived = !!stream.getVideoTracks().length;
  821. if((audioDeviceRequested && !audioTracksReceived) ||
  822. (videoDeviceRequested && !videoTracksReceived))
  823. {
  824. self.stopMediaStream(stream);
  825. // We are getting here in case if we requested
  826. // 'audio' or 'video' devices or both, but
  827. // didn't get corresponding MediaStreamTrack in
  828. // response stream. We don't know the reason why
  829. // this happened, so reject with general error.
  830. var devices = [];
  831. if (audioDeviceRequested && !audioTracksReceived) {
  832. devices.push("audio");
  833. }
  834. if (videoDeviceRequested && !videoTracksReceived) {
  835. devices.push("video");
  836. }
  837. reject(new JitsiTrackError(
  838. { name: "UnknownError" },
  839. getConstraints(options.devices, options),
  840. devices)
  841. );
  842. return;
  843. }
  844. if(hasDesktop) {
  845. screenObtainer.obtainStream(
  846. function (desktopStream) {
  847. successCallback({audioVideo: stream,
  848. desktopStream: desktopStream});
  849. }, function (error) {
  850. self.stopMediaStream(stream);
  851. reject(error);
  852. });
  853. } else {
  854. successCallback({audioVideo: stream});
  855. }
  856. },
  857. function (error) {
  858. reject(error);
  859. },
  860. options);
  861. } else if (hasDesktop) {
  862. screenObtainer.obtainStream(
  863. function (stream) {
  864. successCallback({desktopStream: stream});
  865. }, function (error) {
  866. reject(error);
  867. });
  868. }
  869. }
  870. }.bind(this));
  871. },
  872. addListener: function (eventType, listener) {
  873. eventEmitter.on(eventType, listener);
  874. },
  875. removeListener: function (eventType, listener) {
  876. eventEmitter.removeListener(eventType, listener);
  877. },
  878. getDeviceAvailability: function () {
  879. return devices;
  880. },
  881. isRTCReady: function () {
  882. return rtcReady;
  883. },
  884. /**
  885. * Checks if its possible to enumerate available cameras/micropones.
  886. * @returns {boolean} true if available, false otherwise.
  887. */
  888. isDeviceListAvailable: function () {
  889. var isEnumerateDevicesAvailable
  890. = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices;
  891. if (isEnumerateDevicesAvailable) {
  892. return true;
  893. }
  894. return (MediaStreamTrack && MediaStreamTrack.getSources)? true : false;
  895. },
  896. /**
  897. * Returns true if changing the input (camera / microphone) or output
  898. * (audio) device is supported and false if not.
  899. * @params {string} [deviceType] - type of device to change. Default is
  900. * undefined or 'input', 'output' - for audio output device change.
  901. * @returns {boolean} true if available, false otherwise.
  902. */
  903. isDeviceChangeAvailable: function (deviceType) {
  904. return deviceType === 'output' || deviceType === 'audiooutput'
  905. ? isAudioOutputDeviceChangeAvailable
  906. : RTCBrowserType.isChrome() ||
  907. RTCBrowserType.isFirefox() ||
  908. RTCBrowserType.isOpera() ||
  909. RTCBrowserType.isTemasysPluginUsed()||
  910. RTCBrowserType.isNWJS();
  911. },
  912. /**
  913. * A method to handle stopping of the stream.
  914. * One point to handle the differences in various implementations.
  915. * @param mediaStream MediaStream object to stop.
  916. */
  917. stopMediaStream: function (mediaStream) {
  918. mediaStream.getTracks().forEach(function (track) {
  919. // stop() not supported with IE
  920. if (!RTCBrowserType.isTemasysPluginUsed() && track.stop) {
  921. track.stop();
  922. }
  923. });
  924. // leave stop for implementation still using it
  925. if (mediaStream.stop) {
  926. mediaStream.stop();
  927. }
  928. // if we have done createObjectURL, lets clean it
  929. if (mediaStream.jitsiObjectURL) {
  930. webkitURL.revokeObjectURL(mediaStream.jitsiObjectURL);
  931. }
  932. },
  933. /**
  934. * Returns whether the desktop sharing is enabled or not.
  935. * @returns {boolean}
  936. */
  937. isDesktopSharingEnabled: function () {
  938. return screenObtainer.isSupported();
  939. },
  940. /**
  941. * Sets current audio output device.
  942. * @param {string} deviceId - id of 'audiooutput' device from
  943. * navigator.mediaDevices.enumerateDevices(), 'default' for default
  944. * device
  945. * @returns {Promise} - resolves when audio output is changed, is rejected
  946. * otherwise
  947. */
  948. setAudioOutputDevice: function (deviceId) {
  949. if (!this.isDeviceChangeAvailable('output')) {
  950. Promise.reject(
  951. new Error('Audio output device change is not supported'));
  952. }
  953. return featureDetectionAudioEl.setSinkId(deviceId)
  954. .then(function() {
  955. audioOutputDeviceId = deviceId;
  956. logger.log('Audio output device set to ' + deviceId);
  957. eventEmitter.emit(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  958. deviceId);
  959. });
  960. },
  961. /**
  962. * Returns currently used audio output device id, '' stands for default
  963. * device
  964. * @returns {string}
  965. */
  966. getAudioOutputDevice: function () {
  967. return audioOutputDeviceId;
  968. }
  969. };
  970. module.exports = RTCUtils;