modified lib-jitsi-meet dev repo
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTCUtils.js 31KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816
  1. /* global config, require, attachMediaStream, getUserMedia,
  2. RTCPeerConnection, RTCSessionDescription, RTCIceCandidate, MediaStreamTrack,
  3. mozRTCPeerConnection, mozRTCSessionDescription, mozRTCIceCandidate,
  4. webkitRTCPeerConnection, webkitMediaStream, webkitURL
  5. */
  6. /* jshint -W101 */
  7. var logger = require("jitsi-meet-logger").getLogger(__filename);
  8. var RTCBrowserType = require("./RTCBrowserType");
  9. var Resolutions = require("../../service/RTC/Resolutions");
  10. var RTCEvents = require("../../service/RTC/RTCEvents");
  11. var AdapterJS = require("./adapter.screenshare");
  12. var SDPUtil = require("../xmpp/SDPUtil");
  13. var EventEmitter = require("events");
  14. var screenObtainer = require("./ScreenObtainer");
  15. var JitsiTrackErrors = require("../../JitsiTrackErrors");
  16. var eventEmitter = new EventEmitter();
  17. var devices = {
  18. audio: true,
  19. video: true
  20. };
  21. var rtcReady = false;
  22. function setResolutionConstraints(constraints, resolution) {
  23. var isAndroid = RTCBrowserType.isAndroid();
  24. if (Resolutions[resolution]) {
  25. constraints.video.mandatory.minWidth = Resolutions[resolution].width;
  26. constraints.video.mandatory.minHeight = Resolutions[resolution].height;
  27. }
  28. else if (isAndroid) {
  29. // FIXME can't remember if the purpose of this was to always request
  30. // low resolution on Android ? if yes it should be moved up front
  31. constraints.video.mandatory.minWidth = 320;
  32. constraints.video.mandatory.minHeight = 240;
  33. constraints.video.mandatory.maxFrameRate = 15;
  34. }
  35. if (constraints.video.mandatory.minWidth)
  36. constraints.video.mandatory.maxWidth =
  37. constraints.video.mandatory.minWidth;
  38. if (constraints.video.mandatory.minHeight)
  39. constraints.video.mandatory.maxHeight =
  40. constraints.video.mandatory.minHeight;
  41. }
  42. /**
  43. * @param {string[]} um required user media types
  44. *
  45. * @param {Object} [options={}] optional parameters
  46. * @param {string} options.resolution
  47. * @param {number} options.bandwidth
  48. * @param {number} options.fps
  49. * @param {string} options.desktopStream
  50. * @param {string} options.cameraDeviceId
  51. * @param {string} options.micDeviceId
  52. * @param {bool} firefox_fake_device
  53. */
  54. function getConstraints(um, options) {
  55. var constraints = {audio: false, video: false};
  56. if (um.indexOf('video') >= 0) {
  57. // same behaviour as true
  58. constraints.video = { mandatory: {}, optional: [] };
  59. if (options.cameraDeviceId) {
  60. constraints.video.optional.push({
  61. sourceId: options.cameraDeviceId
  62. });
  63. }
  64. constraints.video.optional.push({ googLeakyBucket: true });
  65. setResolutionConstraints(constraints, options.resolution);
  66. }
  67. if (um.indexOf('audio') >= 0) {
  68. if (!RTCBrowserType.isFirefox()) {
  69. // same behaviour as true
  70. constraints.audio = { mandatory: {}, optional: []};
  71. if (options.micDeviceId) {
  72. constraints.audio.optional.push({
  73. sourceId: options.micDeviceId
  74. });
  75. }
  76. // if it is good enough for hangouts...
  77. constraints.audio.optional.push(
  78. {googEchoCancellation: true},
  79. {googAutoGainControl: true},
  80. {googNoiseSupression: true},
  81. {googHighpassFilter: true},
  82. {googNoisesuppression2: true},
  83. {googEchoCancellation2: true},
  84. {googAutoGainControl2: true}
  85. );
  86. } else {
  87. if (options.micDeviceId) {
  88. constraints.audio = {
  89. mandatory: {},
  90. optional: [{
  91. sourceId: options.micDeviceId
  92. }]};
  93. } else {
  94. constraints.audio = true;
  95. }
  96. }
  97. }
  98. if (um.indexOf('screen') >= 0) {
  99. if (RTCBrowserType.isChrome()) {
  100. constraints.video = {
  101. mandatory: {
  102. chromeMediaSource: "screen",
  103. googLeakyBucket: true,
  104. maxWidth: window.screen.width,
  105. maxHeight: window.screen.height,
  106. maxFrameRate: 3
  107. },
  108. optional: []
  109. };
  110. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  111. constraints.video = {
  112. optional: [
  113. {
  114. sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
  115. }
  116. ]
  117. };
  118. } else if (RTCBrowserType.isFirefox()) {
  119. constraints.video = {
  120. mozMediaSource: "window",
  121. mediaSource: "window"
  122. };
  123. } else {
  124. logger.error(
  125. "'screen' WebRTC media source is supported only in Chrome" +
  126. " and with Temasys plugin");
  127. }
  128. }
  129. if (um.indexOf('desktop') >= 0) {
  130. constraints.video = {
  131. mandatory: {
  132. chromeMediaSource: "desktop",
  133. chromeMediaSourceId: options.desktopStream,
  134. googLeakyBucket: true,
  135. maxWidth: window.screen.width,
  136. maxHeight: window.screen.height,
  137. maxFrameRate: 3
  138. },
  139. optional: []
  140. };
  141. }
  142. if (options.bandwidth) {
  143. if (!constraints.video) {
  144. //same behaviour as true
  145. constraints.video = {mandatory: {}, optional: []};
  146. }
  147. constraints.video.optional.push({bandwidth: options.bandwidth});
  148. }
  149. if (options.fps) {
  150. // for some cameras it might be necessary to request 30fps
  151. // so they choose 30fps mjpg over 10fps yuy2
  152. if (!constraints.video) {
  153. // same behaviour as true;
  154. constraints.video = {mandatory: {}, optional: []};
  155. }
  156. constraints.video.mandatory.minFrameRate = options.fps;
  157. }
  158. // we turn audio for both audio and video tracks, the fake audio & video seems to work
  159. // only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video
  160. // this later can be a problem with some of the tests
  161. if(RTCBrowserType.isFirefox() && options.firefox_fake_device)
  162. {
  163. // seems to be fixed now, removing this experimental fix, as having
  164. // multiple audio tracks brake the tests
  165. //constraints.audio = true;
  166. constraints.fake = true;
  167. }
  168. return constraints;
  169. }
  170. function setAvailableDevices(um, available) {
  171. if (um.indexOf("video") != -1) {
  172. devices.video = available;
  173. }
  174. if (um.indexOf("audio") != -1) {
  175. devices.audio = available;
  176. }
  177. eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices);
  178. }
  179. // In case of IE we continue from 'onReady' callback
  180. // passed to RTCUtils constructor. It will be invoked by Temasys plugin
  181. // once it is initialized.
  182. function onReady (options, GUM) {
  183. rtcReady = true;
  184. eventEmitter.emit(RTCEvents.RTC_READY, true);
  185. screenObtainer.init(options, GUM);
  186. }
  187. /**
  188. * Apply function with arguments if function exists.
  189. * Do nothing if function not provided.
  190. * @param {function} [fn] function to apply
  191. * @param {Array} [args=[]] arguments for function
  192. */
  193. function maybeApply(fn, args) {
  194. if (fn) {
  195. fn.apply(null, args || []);
  196. }
  197. }
  198. var getUserMediaStatus = {
  199. initialized: false,
  200. callbacks: []
  201. };
  202. /**
  203. * Wrap `getUserMedia` to allow others to know if it was executed at least
  204. * once or not. Wrapper function uses `getUserMediaStatus` object.
  205. * @param {Function} getUserMedia native function
  206. * @returns {Function} wrapped function
  207. */
  208. function wrapGetUserMedia(getUserMedia) {
  209. return function (constraints, successCallback, errorCallback) {
  210. getUserMedia(constraints, function (stream) {
  211. maybeApply(successCallback, [stream]);
  212. if (!getUserMediaStatus.initialized) {
  213. getUserMediaStatus.initialized = true;
  214. getUserMediaStatus.callbacks.forEach(function (callback) {
  215. callback();
  216. });
  217. getUserMediaStatus.callbacks.length = 0;
  218. }
  219. }, function (error) {
  220. maybeApply(errorCallback, [error]);
  221. });
  222. };
  223. }
  224. /**
  225. * Create stub device which equals to auto selected device.
  226. * @param {string} kind if that should be `audio` or `video` device
  227. * @returns {Object} stub device description in `enumerateDevices` format
  228. */
  229. function createAutoDeviceInfo(kind) {
  230. return {
  231. facing: null,
  232. label: 'Auto',
  233. kind: kind,
  234. deviceId: '',
  235. groupId: null
  236. };
  237. }
  238. /**
  239. * Execute function after getUserMedia was executed at least once.
  240. * @param {Function} callback function to execute after getUserMedia
  241. */
  242. function afterUserMediaInitialized(callback) {
  243. if (getUserMediaStatus.initialized) {
  244. callback();
  245. } else {
  246. getUserMediaStatus.callbacks.push(callback);
  247. }
  248. }
  249. /**
  250. * Wrapper function which makes enumerateDevices to wait
  251. * until someone executes getUserMedia first time.
  252. * @param {Function} enumerateDevices native function
  253. * @returns {Funtion} wrapped function
  254. */
  255. function wrapEnumerateDevices(enumerateDevices) {
  256. return function (callback) {
  257. // enumerate devices only after initial getUserMedia
  258. afterUserMediaInitialized(function () {
  259. enumerateDevices().then(function (devices) {
  260. //add auto devices
  261. devices.unshift(
  262. createAutoDeviceInfo('audioinput'),
  263. createAutoDeviceInfo('videoinput')
  264. );
  265. callback(devices);
  266. }, function (err) {
  267. console.error('cannot enumerate devices: ', err);
  268. // return only auto devices
  269. callback([createAutoDeviceInfo('audioInput'),
  270. createAutoDeviceInfo('videoinput')]);
  271. });
  272. });
  273. };
  274. }
  275. /**
  276. * Use old MediaStreamTrack to get devices list and
  277. * convert it to enumerateDevices format.
  278. * @param {Function} callback function to call when received devices list.
  279. */
  280. function enumerateDevicesThroughMediaStreamTrack (callback) {
  281. MediaStreamTrack.getSources(function (sources) {
  282. var devices = sources.map(function (source) {
  283. var kind = (source.kind || '').toLowerCase();
  284. return {
  285. facing: source.facing || null,
  286. label: source.label,
  287. kind: kind ? kind + 'input': null,
  288. deviceId: source.id,
  289. groupId: source.groupId || null
  290. };
  291. });
  292. //add auto devices
  293. devices.unshift(
  294. createAutoDeviceInfo('audioinput'),
  295. createAutoDeviceInfo('videoinput')
  296. );
  297. callback(devices);
  298. });
  299. }
  300. function obtainDevices(options) {
  301. if(!options.devices || options.devices.length === 0) {
  302. return options.successCallback(options.streams || {});
  303. }
  304. var device = options.devices.splice(0, 1);
  305. var devices = [];
  306. devices.push(device);
  307. options.deviceGUM[device](function (stream) {
  308. options.streams = options.streams || {};
  309. options.streams[device] = stream;
  310. obtainDevices(options);
  311. },
  312. function (error) {
  313. Object.keys(options.streams).forEach(function(device) {
  314. RTCUtils.stopMediaStream(options.streams[device]);
  315. });
  316. logger.error(
  317. "failed to obtain " + device + " stream - stop", error);
  318. options.errorCallback(JitsiTrackErrors.parseError(error, devices));
  319. });
  320. }
  321. /**
  322. * Handles the newly created Media Streams.
  323. * @param streams the new Media Streams
  324. * @param resolution the resolution of the video streams
  325. * @returns {*[]} object that describes the new streams
  326. */
  327. function handleLocalStream(streams, resolution) {
  328. var audioStream, videoStream, desktopStream, res = [];
  329. // If this is FF, the stream parameter is *not* a MediaStream object, it's
  330. // an object with two properties: audioStream, videoStream.
  331. if (window.webkitMediaStream) {
  332. var audioVideo = streams.audioVideo;
  333. if (audioVideo) {
  334. var audioTracks = audioVideo.getAudioTracks();
  335. if(audioTracks.length) {
  336. audioStream = new webkitMediaStream();
  337. for (var i = 0; i < audioTracks.length; i++) {
  338. audioStream.addTrack(audioTracks[i]);
  339. }
  340. }
  341. var videoTracks = audioVideo.getVideoTracks();
  342. if(videoTracks.length) {
  343. videoStream = new webkitMediaStream();
  344. for (var j = 0; j < videoTracks.length; j++) {
  345. videoStream.addTrack(videoTracks[j]);
  346. }
  347. }
  348. }
  349. if (streams && streams.desktopStream)
  350. desktopStream = streams.desktopStream;
  351. }
  352. else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { // Firefox and Temasys plugin
  353. if (streams && streams.audio)
  354. audioStream = streams.audio;
  355. if (streams && streams.video)
  356. videoStream = streams.video;
  357. if(streams && streams.desktop)
  358. desktopStream = streams.desktop;
  359. }
  360. if (desktopStream)
  361. res.push({stream: desktopStream,
  362. type: "video", videoType: "desktop"});
  363. if(audioStream)
  364. res.push({stream: audioStream, type: "audio", videoType: null});
  365. if(videoStream)
  366. res.push({stream: videoStream, type: "video", videoType: "camera",
  367. resolution: resolution});
  368. return res;
  369. }
  370. //Options parameter is to pass config options. Currently uses only "useIPv6".
  371. var RTCUtils = {
  372. init: function (options) {
  373. return new Promise(function(resolve, reject) {
  374. if (RTCBrowserType.isFirefox()) {
  375. var FFversion = RTCBrowserType.getFirefoxVersion();
  376. if (FFversion < 40) {
  377. logger.error(
  378. "Firefox version too old: " + FFversion +
  379. ". Required >= 40.");
  380. reject(new Error("Firefox version too old: " + FFversion +
  381. ". Required >= 40."));
  382. return;
  383. }
  384. this.peerconnection = mozRTCPeerConnection;
  385. this.getUserMedia = wrapGetUserMedia(navigator.mozGetUserMedia.bind(navigator));
  386. this.enumerateDevices = wrapEnumerateDevices(
  387. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  388. );
  389. this.pc_constraints = {};
  390. this.attachMediaStream = function (element, stream) {
  391. // srcObject is being standardized and FF will eventually
  392. // support that unprefixed. FF also supports the
  393. // "element.src = URL.createObjectURL(...)" combo, but that
  394. // will be deprecated in favour of srcObject.
  395. //
  396. // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
  397. // https://github.com/webrtc/samples/issues/302
  398. if (!element[0])
  399. return;
  400. element[0].mozSrcObject = stream;
  401. element[0].play();
  402. };
  403. this.getStreamID = function (stream) {
  404. var id = stream.id;
  405. if (!id) {
  406. var tracks = stream.getVideoTracks();
  407. if (!tracks || tracks.length === 0) {
  408. tracks = stream.getAudioTracks();
  409. }
  410. id = tracks[0].id;
  411. }
  412. return SDPUtil.filter_special_chars(id);
  413. };
  414. this.getVideoSrc = function (element) {
  415. if (!element)
  416. return null;
  417. return element.mozSrcObject;
  418. };
  419. this.setVideoSrc = function (element, src) {
  420. if (element)
  421. element.mozSrcObject = src;
  422. };
  423. RTCSessionDescription = mozRTCSessionDescription;
  424. RTCIceCandidate = mozRTCIceCandidate;
  425. } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) {
  426. this.peerconnection = webkitRTCPeerConnection;
  427. var getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
  428. if (navigator.mediaDevices) {
  429. this.getUserMedia = wrapGetUserMedia(getUserMedia);
  430. this.enumerateDevices = wrapEnumerateDevices(
  431. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  432. );
  433. } else {
  434. this.getUserMedia = getUserMedia;
  435. this.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  436. }
  437. this.attachMediaStream = function (element, stream) {
  438. // saves the created url for the stream, so we can reuse it
  439. // and not keep creating urls
  440. if (!stream.jitsiObjectURL) {
  441. stream.jitsiObjectURL
  442. = webkitURL.createObjectURL(stream);
  443. }
  444. element.attr('src', stream.jitsiObjectURL);
  445. };
  446. this.getStreamID = function (stream) {
  447. // streams from FF endpoints have the characters '{' and '}'
  448. // that make jQuery choke.
  449. return SDPUtil.filter_special_chars(stream.id);
  450. };
  451. this.getVideoSrc = function (element) {
  452. if (!element)
  453. return null;
  454. return element.getAttribute("src");
  455. };
  456. this.setVideoSrc = function (element, src) {
  457. if (element)
  458. element.setAttribute("src", src);
  459. };
  460. // DTLS should now be enabled by default but..
  461. this.pc_constraints = {'optional': [
  462. {'DtlsSrtpKeyAgreement': 'true'}
  463. ]};
  464. if (options.useIPv6) {
  465. // https://code.google.com/p/webrtc/issues/detail?id=2828
  466. this.pc_constraints.optional.push({googIPv6: true});
  467. }
  468. if (RTCBrowserType.isAndroid()) {
  469. this.pc_constraints = {}; // disable DTLS on Android
  470. }
  471. if (!webkitMediaStream.prototype.getVideoTracks) {
  472. webkitMediaStream.prototype.getVideoTracks = function () {
  473. return this.videoTracks;
  474. };
  475. }
  476. if (!webkitMediaStream.prototype.getAudioTracks) {
  477. webkitMediaStream.prototype.getAudioTracks = function () {
  478. return this.audioTracks;
  479. };
  480. }
  481. }
  482. // Detect IE/Safari
  483. else if (RTCBrowserType.isTemasysPluginUsed()) {
  484. //AdapterJS.WebRTCPlugin.setLogLevel(
  485. // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
  486. var self = this;
  487. AdapterJS.webRTCReady(function (isPlugin) {
  488. self.peerconnection = RTCPeerConnection;
  489. self.getUserMedia = window.getUserMedia;
  490. self.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  491. self.attachMediaStream = function (elSel, stream) {
  492. if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
  493. return;
  494. }
  495. attachMediaStream(elSel[0], stream);
  496. };
  497. self.getStreamID = function (stream) {
  498. var id = SDPUtil.filter_special_chars(stream.label);
  499. return id;
  500. };
  501. self.getVideoSrc = function (element) {
  502. if (!element) {
  503. logger.warn("Attempt to get video SRC of null element");
  504. return null;
  505. }
  506. var children = element.children;
  507. for (var i = 0; i !== children.length; ++i) {
  508. if (children[i].name === 'streamId') {
  509. return children[i].value;
  510. }
  511. }
  512. //logger.info(element.id + " SRC: " + src);
  513. return null;
  514. };
  515. self.setVideoSrc = function (element, src) {
  516. //logger.info("Set video src: ", element, src);
  517. if (!src) {
  518. logger.warn("Not attaching video stream, 'src' is null");
  519. return;
  520. }
  521. AdapterJS.WebRTCPlugin.WaitForPluginReady();
  522. var stream = AdapterJS.WebRTCPlugin.plugin
  523. .getStreamWithId(AdapterJS.WebRTCPlugin.pageId, src);
  524. attachMediaStream(element, stream);
  525. };
  526. onReady(options, self.getUserMediaWithConstraints);
  527. resolve();
  528. });
  529. } else {
  530. try {
  531. logger.error('Browser does not appear to be WebRTC-capable');
  532. } catch (e) {
  533. }
  534. reject('Browser does not appear to be WebRTC-capable');
  535. return;
  536. }
  537. // Call onReady() if Temasys plugin is not used
  538. if (!RTCBrowserType.isTemasysPluginUsed()) {
  539. onReady(options, this.getUserMediaWithConstraints);
  540. resolve();
  541. }
  542. }.bind(this));
  543. },
  544. /**
  545. * @param {string[]} um required user media types
  546. * @param {function} success_callback
  547. * @param {Function} failure_callback
  548. * @param {Object} [options] optional parameters
  549. * @param {string} options.resolution
  550. * @param {number} options.bandwidth
  551. * @param {number} options.fps
  552. * @param {string} options.desktopStream
  553. * @param {string} options.cameraDeviceId
  554. * @param {string} options.micDeviceId
  555. **/
  556. getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) {
  557. options = options || {};
  558. var resolution = options.resolution;
  559. var constraints = getConstraints(um, options);
  560. logger.info("Get media constraints", constraints);
  561. try {
  562. this.getUserMedia(constraints,
  563. function (stream) {
  564. logger.log('onUserMediaSuccess');
  565. setAvailableDevices(um, true);
  566. success_callback(stream);
  567. },
  568. function (error) {
  569. setAvailableDevices(um, false);
  570. logger.warn('Failed to get access to local media. Error ',
  571. error, constraints);
  572. if (failure_callback) {
  573. failure_callback(error, resolution);
  574. }
  575. });
  576. } catch (e) {
  577. logger.error('GUM failed: ', e);
  578. if (failure_callback) {
  579. failure_callback(e);
  580. }
  581. }
  582. },
  583. /**
  584. * Creates the local MediaStreams.
  585. * @param {Object} [options] optional parameters
  586. * @param {Array} options.devices the devices that will be requested
  587. * @param {string} options.resolution resolution constraints
  588. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
  589. * type: "audio" or "video", videoType: "camera" or "desktop"}
  590. * will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
  591. * @param {string} options.cameraDeviceId
  592. * @param {string} options.micDeviceId
  593. * @returns {*} Promise object that will receive the new JitsiTracks
  594. */
  595. obtainAudioAndVideoPermissions: function (options) {
  596. var self = this;
  597. options = options || {};
  598. return new Promise(function (resolve, reject) {
  599. var successCallback = function (stream) {
  600. resolve(handleLocalStream(stream, options.resolution));
  601. };
  602. options.devices = options.devices || ['audio', 'video'];
  603. if(!screenObtainer.isSupported()
  604. && options.devices.indexOf("desktop") !== -1){
  605. reject(new Error("Desktop sharing is not supported!"));
  606. }
  607. if (RTCBrowserType.isFirefox() ||
  608. RTCBrowserType.isTemasysPluginUsed()) {
  609. var GUM = function (device, s, e) {
  610. this.getUserMediaWithConstraints(device, s, e, options);
  611. };
  612. var deviceGUM = {
  613. "audio": GUM.bind(self, ["audio"]),
  614. "video": GUM.bind(self, ["video"])
  615. };
  616. if(screenObtainer.isSupported()){
  617. deviceGUM["desktop"] = screenObtainer.obtainStream.bind(
  618. screenObtainer);
  619. }
  620. // With FF/IE we can't split the stream into audio and video because FF
  621. // doesn't support media stream constructors. So, we need to get the
  622. // audio stream separately from the video stream using two distinct GUM
  623. // calls. Not very user friendly :-( but we don't have many other
  624. // options neither.
  625. //
  626. // Note that we pack those 2 streams in a single object and pass it to
  627. // the successCallback method.
  628. obtainDevices({
  629. devices: options.devices,
  630. streams: [],
  631. successCallback: successCallback,
  632. errorCallback: reject,
  633. deviceGUM: deviceGUM
  634. });
  635. } else {
  636. var hasDesktop = options.devices.indexOf('desktop') > -1;
  637. if (hasDesktop) {
  638. options.devices.splice(options.devices.indexOf("desktop"), 1);
  639. }
  640. options.resolution = options.resolution || '360';
  641. if(options.devices.length) {
  642. this.getUserMediaWithConstraints(
  643. options.devices,
  644. function (stream) {
  645. if((options.devices.indexOf("audio") !== -1 &&
  646. !stream.getAudioTracks().length) ||
  647. (options.devices.indexOf("video") !== -1 &&
  648. !stream.getVideoTracks().length))
  649. {
  650. self.stopMediaStream(stream);
  651. reject(JitsiTrackErrors.parseError(
  652. new Error("Unable to get the audio and " +
  653. "video tracks."),
  654. options.devices));
  655. return;
  656. }
  657. if(hasDesktop) {
  658. screenObtainer.obtainStream(
  659. function (desktopStream) {
  660. successCallback({audioVideo: stream,
  661. desktopStream: desktopStream});
  662. }, function (error) {
  663. self.stopMediaStream(stream);
  664. reject(
  665. JitsiTrackErrors.parseError(error,
  666. options.devices));
  667. });
  668. } else {
  669. successCallback({audioVideo: stream});
  670. }
  671. },
  672. function (error) {
  673. reject(JitsiTrackErrors.parseError(error,
  674. options.devices));
  675. },
  676. options);
  677. } else if (hasDesktop) {
  678. screenObtainer.obtainStream(
  679. function (stream) {
  680. successCallback({desktopStream: stream});
  681. }, function (error) {
  682. reject(
  683. JitsiTrackErrors.parseError(error,
  684. ["desktop"]));
  685. });
  686. }
  687. }
  688. }.bind(this));
  689. },
  690. addListener: function (eventType, listener) {
  691. eventEmitter.on(eventType, listener);
  692. },
  693. removeListener: function (eventType, listener) {
  694. eventEmitter.removeListener(eventType, listener);
  695. },
  696. getDeviceAvailability: function () {
  697. return devices;
  698. },
  699. isRTCReady: function () {
  700. return rtcReady;
  701. },
  702. /**
  703. * Checks if its possible to enumerate available cameras/micropones.
  704. * @returns {boolean} true if available, false otherwise.
  705. */
  706. isDeviceListAvailable: function () {
  707. var isEnumerateDevicesAvailable = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices;
  708. if (isEnumerateDevicesAvailable) {
  709. return true;
  710. }
  711. return (MediaStreamTrack && MediaStreamTrack.getSources)? true : false;
  712. },
  713. /**
  714. * Returns true if changing the camera / microphone device is supported and
  715. * false if not.
  716. */
  717. isDeviceChangeAvailable: function () {
  718. if(RTCBrowserType.isChrome() || RTCBrowserType.isOpera() ||
  719. RTCBrowserType.isTemasysPluginUsed())
  720. return true;
  721. return false;
  722. },
  723. /**
  724. * A method to handle stopping of the stream.
  725. * One point to handle the differences in various implementations.
  726. * @param mediaStream MediaStream object to stop.
  727. */
  728. stopMediaStream: function (mediaStream) {
  729. mediaStream.getTracks().forEach(function (track) {
  730. // stop() not supported with IE
  731. if (track.stop) {
  732. track.stop();
  733. }
  734. });
  735. // leave stop for implementation still using it
  736. if (mediaStream.stop) {
  737. mediaStream.stop();
  738. }
  739. // if we have done createObjectURL, lets clean it
  740. if (mediaStream.jitsiObjectURL) {
  741. webkitURL.revokeObjectURL(mediaStream.jitsiObjectURL);
  742. }
  743. },
  744. /**
  745. * Returns whether the desktop sharing is enabled or not.
  746. * @returns {boolean}
  747. */
  748. isDesktopSharingEnabled: function () {
  749. return screenObtainer.isSupported();
  750. }
  751. };
  752. module.exports = RTCUtils;