You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTCUtils.js 31KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827
  1. /* global config, require, attachMediaStream, getUserMedia,
  2. RTCPeerConnection, RTCSessionDescription, RTCIceCandidate, MediaStreamTrack,
  3. mozRTCPeerConnection, mozRTCSessionDescription, mozRTCIceCandidate,
  4. webkitRTCPeerConnection, webkitMediaStream, webkitURL
  5. */
  6. /* jshint -W101 */
  7. var logger = require("jitsi-meet-logger").getLogger(__filename);
  8. var RTCBrowserType = require("./RTCBrowserType");
  9. var Resolutions = require("../../service/RTC/Resolutions");
  10. var RTCEvents = require("../../service/RTC/RTCEvents");
  11. var AdapterJS = require("./adapter.screenshare");
  12. var SDPUtil = require("../xmpp/SDPUtil");
  13. var EventEmitter = require("events");
  14. var screenObtainer = require("./ScreenObtainer");
  15. var JitsiTrackErrors = require("../../JitsiTrackErrors");
  16. var eventEmitter = new EventEmitter();
  17. var devices = {
  18. audio: true,
  19. video: true
  20. };
  21. var rtcReady = false;
  22. function setResolutionConstraints(constraints, resolution) {
  23. var isAndroid = RTCBrowserType.isAndroid();
  24. if (Resolutions[resolution]) {
  25. constraints.video.mandatory.minWidth = Resolutions[resolution].width;
  26. constraints.video.mandatory.minHeight = Resolutions[resolution].height;
  27. }
  28. else if (isAndroid) {
  29. // FIXME can't remember if the purpose of this was to always request
  30. // low resolution on Android ? if yes it should be moved up front
  31. constraints.video.mandatory.minWidth = 320;
  32. constraints.video.mandatory.minHeight = 240;
  33. constraints.video.mandatory.maxFrameRate = 15;
  34. }
  35. if (constraints.video.mandatory.minWidth)
  36. constraints.video.mandatory.maxWidth =
  37. constraints.video.mandatory.minWidth;
  38. if (constraints.video.mandatory.minHeight)
  39. constraints.video.mandatory.maxHeight =
  40. constraints.video.mandatory.minHeight;
  41. }
  42. /**
  43. * @param {string[]} um required user media types
  44. *
  45. * @param {Object} [options={}] optional parameters
  46. * @param {string} options.resolution
  47. * @param {number} options.bandwidth
  48. * @param {number} options.fps
  49. * @param {string} options.desktopStream
  50. * @param {string} options.cameraDeviceId
  51. * @param {string} options.micDeviceId
  52. * @param {bool} firefox_fake_device
  53. */
  54. function getConstraints(um, options) {
  55. var constraints = {audio: false, video: false};
  56. if (um.indexOf('video') >= 0) {
  57. // same behaviour as true
  58. constraints.video = { mandatory: {}, optional: [] };
  59. if (options.cameraDeviceId) {
  60. constraints.video.optional.push({
  61. sourceId: options.cameraDeviceId
  62. });
  63. }
  64. constraints.video.optional.push({ googLeakyBucket: true });
  65. setResolutionConstraints(constraints, options.resolution);
  66. }
  67. if (um.indexOf('audio') >= 0) {
  68. if (!RTCBrowserType.isFirefox()) {
  69. // same behaviour as true
  70. constraints.audio = { mandatory: {}, optional: []};
  71. if (options.micDeviceId) {
  72. constraints.audio.optional.push({
  73. sourceId: options.micDeviceId
  74. });
  75. }
  76. // if it is good enough for hangouts...
  77. constraints.audio.optional.push(
  78. {googEchoCancellation: true},
  79. {googAutoGainControl: true},
  80. {googNoiseSupression: true},
  81. {googHighpassFilter: true},
  82. {googNoisesuppression2: true},
  83. {googEchoCancellation2: true},
  84. {googAutoGainControl2: true}
  85. );
  86. } else {
  87. if (options.micDeviceId) {
  88. constraints.audio = {
  89. mandatory: {},
  90. optional: [{
  91. sourceId: options.micDeviceId
  92. }]};
  93. } else {
  94. constraints.audio = true;
  95. }
  96. }
  97. }
  98. if (um.indexOf('screen') >= 0) {
  99. if (RTCBrowserType.isChrome()) {
  100. constraints.video = {
  101. mandatory: {
  102. chromeMediaSource: "screen",
  103. googLeakyBucket: true,
  104. maxWidth: window.screen.width,
  105. maxHeight: window.screen.height,
  106. maxFrameRate: 3
  107. },
  108. optional: []
  109. };
  110. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  111. constraints.video = {
  112. optional: [
  113. {
  114. sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
  115. }
  116. ]
  117. };
  118. } else if (RTCBrowserType.isFirefox()) {
  119. constraints.video = {
  120. mozMediaSource: "window",
  121. mediaSource: "window"
  122. };
  123. } else {
  124. logger.error(
  125. "'screen' WebRTC media source is supported only in Chrome" +
  126. " and with Temasys plugin");
  127. }
  128. }
  129. if (um.indexOf('desktop') >= 0) {
  130. constraints.video = {
  131. mandatory: {
  132. chromeMediaSource: "desktop",
  133. chromeMediaSourceId: options.desktopStream,
  134. googLeakyBucket: true,
  135. maxWidth: window.screen.width,
  136. maxHeight: window.screen.height,
  137. maxFrameRate: 3
  138. },
  139. optional: []
  140. };
  141. }
  142. if (options.bandwidth) {
  143. if (!constraints.video) {
  144. //same behaviour as true
  145. constraints.video = {mandatory: {}, optional: []};
  146. }
  147. constraints.video.optional.push({bandwidth: options.bandwidth});
  148. }
  149. if (options.fps) {
  150. // for some cameras it might be necessary to request 30fps
  151. // so they choose 30fps mjpg over 10fps yuy2
  152. if (!constraints.video) {
  153. // same behaviour as true;
  154. constraints.video = {mandatory: {}, optional: []};
  155. }
  156. constraints.video.mandatory.minFrameRate = options.fps;
  157. }
  158. // we turn audio for both audio and video tracks, the fake audio & video seems to work
  159. // only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video
  160. // this later can be a problem with some of the tests
  161. if(RTCBrowserType.isFirefox() && options.firefox_fake_device)
  162. {
  163. // seems to be fixed now, removing this experimental fix, as having
  164. // multiple audio tracks brake the tests
  165. //constraints.audio = true;
  166. constraints.fake = true;
  167. }
  168. return constraints;
  169. }
  170. function setAvailableDevices(um, available) {
  171. if (um.indexOf("video") != -1) {
  172. devices.video = available;
  173. }
  174. if (um.indexOf("audio") != -1) {
  175. devices.audio = available;
  176. }
  177. eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices);
  178. }
  179. // In case of IE we continue from 'onReady' callback
  180. // passed to RTCUtils constructor. It will be invoked by Temasys plugin
  181. // once it is initialized.
  182. function onReady (options, GUM) {
  183. rtcReady = true;
  184. eventEmitter.emit(RTCEvents.RTC_READY, true);
  185. screenObtainer.init(options, GUM);
  186. }
  187. /**
  188. * Apply function with arguments if function exists.
  189. * Do nothing if function not provided.
  190. * @param {function} [fn] function to apply
  191. * @param {Array} [args=[]] arguments for function
  192. */
  193. function maybeApply(fn, args) {
  194. if (fn) {
  195. fn.apply(null, args || []);
  196. }
  197. }
  198. var getUserMediaStatus = {
  199. initialized: false,
  200. callbacks: []
  201. };
  202. /**
  203. * Wrap `getUserMedia` to allow others to know if it was executed at least
  204. * once or not. Wrapper function uses `getUserMediaStatus` object.
  205. * @param {Function} getUserMedia native function
  206. * @returns {Function} wrapped function
  207. */
  208. function wrapGetUserMedia(getUserMedia) {
  209. return function (constraints, successCallback, errorCallback) {
  210. getUserMedia(constraints, function (stream) {
  211. maybeApply(successCallback, [stream]);
  212. if (!getUserMediaStatus.initialized) {
  213. getUserMediaStatus.initialized = true;
  214. getUserMediaStatus.callbacks.forEach(function (callback) {
  215. callback();
  216. });
  217. getUserMediaStatus.callbacks.length = 0;
  218. }
  219. }, function (error) {
  220. maybeApply(errorCallback, [error]);
  221. });
  222. };
  223. }
  224. /**
  225. * Create stub device which equals to auto selected device.
  226. * @param {string} kind if that should be `audio` or `video` device
  227. * @returns {Object} stub device description in `enumerateDevices` format
  228. */
  229. function createAutoDeviceInfo(kind) {
  230. return {
  231. facing: null,
  232. label: 'Auto',
  233. kind: kind,
  234. deviceId: '',
  235. groupId: null
  236. };
  237. }
  238. /**
  239. * Execute function after getUserMedia was executed at least once.
  240. * @param {Function} callback function to execute after getUserMedia
  241. */
  242. function afterUserMediaInitialized(callback) {
  243. if (getUserMediaStatus.initialized) {
  244. callback();
  245. } else {
  246. getUserMediaStatus.callbacks.push(callback);
  247. }
  248. }
  249. /**
  250. * Wrapper function which makes enumerateDevices to wait
  251. * until someone executes getUserMedia first time.
  252. * @param {Function} enumerateDevices native function
  253. * @returns {Funtion} wrapped function
  254. */
  255. function wrapEnumerateDevices(enumerateDevices) {
  256. return function (callback) {
  257. // enumerate devices only after initial getUserMedia
  258. afterUserMediaInitialized(function () {
  259. enumerateDevices().then(function (devices) {
  260. //add auto devices
  261. devices.unshift(
  262. createAutoDeviceInfo('audioinput'),
  263. createAutoDeviceInfo('videoinput')
  264. );
  265. callback(devices);
  266. }, function (err) {
  267. console.error('cannot enumerate devices: ', err);
  268. // return only auto devices
  269. callback([createAutoDeviceInfo('audioinput'),
  270. createAutoDeviceInfo('videoinput')]);
  271. });
  272. });
  273. };
  274. }
  275. /**
  276. * Use old MediaStreamTrack to get devices list and
  277. * convert it to enumerateDevices format.
  278. * @param {Function} callback function to call when received devices list.
  279. */
  280. function enumerateDevicesThroughMediaStreamTrack (callback) {
  281. MediaStreamTrack.getSources(function (sources) {
  282. var devices = sources.map(function (source) {
  283. var kind = (source.kind || '').toLowerCase();
  284. return {
  285. facing: source.facing || null,
  286. label: source.label,
  287. kind: kind ? kind + 'input': null,
  288. deviceId: source.id,
  289. groupId: source.groupId || null
  290. };
  291. });
  292. //add auto devices
  293. devices.unshift(
  294. createAutoDeviceInfo('audioinput'),
  295. createAutoDeviceInfo('videoinput')
  296. );
  297. callback(devices);
  298. });
  299. }
  300. function obtainDevices(options) {
  301. if(!options.devices || options.devices.length === 0) {
  302. return options.successCallback(options.streams || {});
  303. }
  304. var device = options.devices.splice(0, 1);
  305. var devices = [];
  306. devices.push(device);
  307. options.deviceGUM[device](function (stream) {
  308. options.streams = options.streams || {};
  309. options.streams[device] = stream;
  310. obtainDevices(options);
  311. },
  312. function (error) {
  313. Object.keys(options.streams).forEach(function(device) {
  314. RTCUtils.stopMediaStream(options.streams[device]);
  315. });
  316. logger.error(
  317. "failed to obtain " + device + " stream - stop", error);
  318. options.errorCallback(JitsiTrackErrors.parseError(error, devices));
  319. });
  320. }
  321. /**
  322. * Handles the newly created Media Streams.
  323. * @param streams the new Media Streams
  324. * @param resolution the resolution of the video streams
  325. * @returns {*[]} object that describes the new streams
  326. */
  327. function handleLocalStream(streams, resolution) {
  328. var audioStream, videoStream, desktopStream, res = [];
  329. // If this is FF, the stream parameter is *not* a MediaStream object, it's
  330. // an object with two properties: audioStream, videoStream.
  331. if (window.webkitMediaStream) {
  332. var audioVideo = streams.audioVideo;
  333. if (audioVideo) {
  334. var audioTracks = audioVideo.getAudioTracks();
  335. if(audioTracks.length) {
  336. audioStream = new webkitMediaStream();
  337. for (var i = 0; i < audioTracks.length; i++) {
  338. audioStream.addTrack(audioTracks[i]);
  339. }
  340. }
  341. var videoTracks = audioVideo.getVideoTracks();
  342. if(videoTracks.length) {
  343. videoStream = new webkitMediaStream();
  344. for (var j = 0; j < videoTracks.length; j++) {
  345. videoStream.addTrack(videoTracks[j]);
  346. }
  347. }
  348. }
  349. if (streams && streams.desktopStream)
  350. desktopStream = streams.desktopStream;
  351. }
  352. else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { // Firefox and Temasys plugin
  353. if (streams && streams.audio)
  354. audioStream = streams.audio;
  355. if (streams && streams.video)
  356. videoStream = streams.video;
  357. if(streams && streams.desktop)
  358. desktopStream = streams.desktop;
  359. }
  360. if (desktopStream)
  361. res.push({stream: desktopStream,
  362. type: "video", videoType: "desktop"});
  363. if(audioStream)
  364. res.push({stream: audioStream, type: "audio", videoType: null});
  365. if(videoStream)
  366. res.push({stream: videoStream, type: "video", videoType: "camera",
  367. resolution: resolution});
  368. return res;
  369. }
  370. //Options parameter is to pass config options. Currently uses only "useIPv6".
  371. var RTCUtils = {
  372. init: function (options) {
  373. return new Promise(function(resolve, reject) {
  374. if (RTCBrowserType.isFirefox()) {
  375. var FFversion = RTCBrowserType.getFirefoxVersion();
  376. if (FFversion < 40) {
  377. logger.error(
  378. "Firefox version too old: " + FFversion +
  379. ". Required >= 40.");
  380. reject(new Error("Firefox version too old: " + FFversion +
  381. ". Required >= 40."));
  382. return;
  383. }
  384. this.peerconnection = mozRTCPeerConnection;
  385. this.getUserMedia = wrapGetUserMedia(navigator.mozGetUserMedia.bind(navigator));
  386. this.enumerateDevices = wrapEnumerateDevices(
  387. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  388. );
  389. this.pc_constraints = {};
  390. this.attachMediaStream = function (element, stream) {
  391. // srcObject is being standardized and FF will eventually
  392. // support that unprefixed. FF also supports the
  393. // "element.src = URL.createObjectURL(...)" combo, but that
  394. // will be deprecated in favour of srcObject.
  395. //
  396. // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
  397. // https://github.com/webrtc/samples/issues/302
  398. if (!element)
  399. return;
  400. element.mozSrcObject = stream;
  401. element.play();
  402. return element;
  403. };
  404. this.getStreamID = function (stream) {
  405. var id = stream.id;
  406. if (!id) {
  407. var tracks = stream.getVideoTracks();
  408. if (!tracks || tracks.length === 0) {
  409. tracks = stream.getAudioTracks();
  410. }
  411. id = tracks[0].id;
  412. }
  413. return SDPUtil.filter_special_chars(id);
  414. };
  415. this.getVideoSrc = function (element) {
  416. if (!element)
  417. return null;
  418. return element.mozSrcObject;
  419. };
  420. this.setVideoSrc = function (element, src) {
  421. if (element)
  422. element.mozSrcObject = src;
  423. };
  424. RTCSessionDescription = mozRTCSessionDescription;
  425. RTCIceCandidate = mozRTCIceCandidate;
  426. } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) {
  427. this.peerconnection = webkitRTCPeerConnection;
  428. var getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
  429. if (navigator.mediaDevices) {
  430. this.getUserMedia = wrapGetUserMedia(getUserMedia);
  431. this.enumerateDevices = wrapEnumerateDevices(
  432. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  433. );
  434. } else {
  435. this.getUserMedia = getUserMedia;
  436. this.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  437. }
  438. this.attachMediaStream = function (element, stream) {
  439. // saves the created url for the stream, so we can reuse it
  440. // and not keep creating urls
  441. if (!stream.jitsiObjectURL) {
  442. stream.jitsiObjectURL
  443. = webkitURL.createObjectURL(stream);
  444. }
  445. element.src = stream.jitsiObjectURL;
  446. return element;
  447. };
  448. this.getStreamID = function (stream) {
  449. // streams from FF endpoints have the characters '{' and '}'
  450. // that make jQuery choke.
  451. return SDPUtil.filter_special_chars(stream.id);
  452. };
  453. this.getVideoSrc = function (element) {
  454. if (!element)
  455. return null;
  456. return element.getAttribute("src");
  457. };
  458. this.setVideoSrc = function (element, src) {
  459. if (element)
  460. element.setAttribute("src", src);
  461. };
  462. // DTLS should now be enabled by default but..
  463. this.pc_constraints = {'optional': [
  464. {'DtlsSrtpKeyAgreement': 'true'}
  465. ]};
  466. if (options.useIPv6) {
  467. // https://code.google.com/p/webrtc/issues/detail?id=2828
  468. this.pc_constraints.optional.push({googIPv6: true});
  469. }
  470. if (RTCBrowserType.isAndroid()) {
  471. this.pc_constraints = {}; // disable DTLS on Android
  472. }
  473. if (!webkitMediaStream.prototype.getVideoTracks) {
  474. webkitMediaStream.prototype.getVideoTracks = function () {
  475. return this.videoTracks;
  476. };
  477. }
  478. if (!webkitMediaStream.prototype.getAudioTracks) {
  479. webkitMediaStream.prototype.getAudioTracks = function () {
  480. return this.audioTracks;
  481. };
  482. }
  483. }
  484. // Detect IE/Safari
  485. else if (RTCBrowserType.isTemasysPluginUsed()) {
  486. //AdapterJS.WebRTCPlugin.setLogLevel(
  487. // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
  488. var self = this;
  489. AdapterJS.webRTCReady(function (isPlugin) {
  490. self.peerconnection = RTCPeerConnection;
  491. self.getUserMedia = window.getUserMedia;
  492. self.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  493. self.attachMediaStream = function (element, stream) {
  494. if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
  495. return;
  496. }
  497. var isVideoStream = !!stream.getVideoTracks().length;
  498. if (isVideoStream && !$(element).is(':visible')) {
  499. throw new Error('video element must be visible to attach video stream');
  500. }
  501. return attachMediaStream(element, stream);
  502. };
  503. self.getStreamID = function (stream) {
  504. var id = SDPUtil.filter_special_chars(stream.label);
  505. return id;
  506. };
  507. self.getVideoSrc = function (element) {
  508. if (!element) {
  509. logger.warn("Attempt to get video SRC of null element");
  510. return null;
  511. }
  512. var children = element.children;
  513. for (var i = 0; i !== children.length; ++i) {
  514. if (children[i].name === 'streamId') {
  515. return children[i].value;
  516. }
  517. }
  518. //logger.info(element.id + " SRC: " + src);
  519. return null;
  520. };
  521. self.setVideoSrc = function (element, src) {
  522. //logger.info("Set video src: ", element, src);
  523. if (!src) {
  524. attachMediaStream(element, null);
  525. } else {
  526. AdapterJS.WebRTCPlugin.WaitForPluginReady();
  527. var stream
  528. = AdapterJS.WebRTCPlugin.plugin
  529. .getStreamWithId(
  530. AdapterJS.WebRTCPlugin.pageId, src);
  531. attachMediaStream(element, stream);
  532. }
  533. };
  534. onReady(options, self.getUserMediaWithConstraints);
  535. resolve();
  536. });
  537. } else {
  538. try {
  539. logger.error('Browser does not appear to be WebRTC-capable');
  540. } catch (e) {
  541. }
  542. reject('Browser does not appear to be WebRTC-capable');
  543. return;
  544. }
  545. // Call onReady() if Temasys plugin is not used
  546. if (!RTCBrowserType.isTemasysPluginUsed()) {
  547. onReady(options, this.getUserMediaWithConstraints);
  548. resolve();
  549. }
  550. }.bind(this));
  551. },
  552. /**
  553. * @param {string[]} um required user media types
  554. * @param {function} success_callback
  555. * @param {Function} failure_callback
  556. * @param {Object} [options] optional parameters
  557. * @param {string} options.resolution
  558. * @param {number} options.bandwidth
  559. * @param {number} options.fps
  560. * @param {string} options.desktopStream
  561. * @param {string} options.cameraDeviceId
  562. * @param {string} options.micDeviceId
  563. **/
  564. getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) {
  565. options = options || {};
  566. var resolution = options.resolution;
  567. var constraints = getConstraints(um, options);
  568. logger.info("Get media constraints", constraints);
  569. try {
  570. this.getUserMedia(constraints,
  571. function (stream) {
  572. logger.log('onUserMediaSuccess');
  573. setAvailableDevices(um, true);
  574. success_callback(stream);
  575. },
  576. function (error) {
  577. setAvailableDevices(um, false);
  578. logger.warn('Failed to get access to local media. Error ',
  579. error, constraints);
  580. if (failure_callback) {
  581. failure_callback(error, resolution);
  582. }
  583. });
  584. } catch (e) {
  585. logger.error('GUM failed: ', e);
  586. if (failure_callback) {
  587. failure_callback(e);
  588. }
  589. }
  590. },
  591. /**
  592. * Creates the local MediaStreams.
  593. * @param {Object} [options] optional parameters
  594. * @param {Array} options.devices the devices that will be requested
  595. * @param {string} options.resolution resolution constraints
  596. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
  597. * type: "audio" or "video", videoType: "camera" or "desktop"}
  598. * will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
  599. * @param {string} options.cameraDeviceId
  600. * @param {string} options.micDeviceId
  601. * @returns {*} Promise object that will receive the new JitsiTracks
  602. */
  603. obtainAudioAndVideoPermissions: function (options) {
  604. var self = this;
  605. options = options || {};
  606. return new Promise(function (resolve, reject) {
  607. var successCallback = function (stream) {
  608. resolve(handleLocalStream(stream, options.resolution));
  609. };
  610. options.devices = options.devices || ['audio', 'video'];
  611. if(!screenObtainer.isSupported()
  612. && options.devices.indexOf("desktop") !== -1){
  613. reject(new Error("Desktop sharing is not supported!"));
  614. }
  615. if (RTCBrowserType.isFirefox() ||
  616. RTCBrowserType.isTemasysPluginUsed()) {
  617. var GUM = function (device, s, e) {
  618. this.getUserMediaWithConstraints(device, s, e, options);
  619. };
  620. var deviceGUM = {
  621. "audio": GUM.bind(self, ["audio"]),
  622. "video": GUM.bind(self, ["video"])
  623. };
  624. if(screenObtainer.isSupported()){
  625. deviceGUM["desktop"] = screenObtainer.obtainStream.bind(
  626. screenObtainer);
  627. }
  628. // With FF/IE we can't split the stream into audio and video because FF
  629. // doesn't support media stream constructors. So, we need to get the
  630. // audio stream separately from the video stream using two distinct GUM
  631. // calls. Not very user friendly :-( but we don't have many other
  632. // options neither.
  633. //
  634. // Note that we pack those 2 streams in a single object and pass it to
  635. // the successCallback method.
  636. obtainDevices({
  637. devices: options.devices,
  638. streams: [],
  639. successCallback: successCallback,
  640. errorCallback: reject,
  641. deviceGUM: deviceGUM
  642. });
  643. } else {
  644. var hasDesktop = options.devices.indexOf('desktop') > -1;
  645. if (hasDesktop) {
  646. options.devices.splice(options.devices.indexOf("desktop"), 1);
  647. }
  648. options.resolution = options.resolution || '360';
  649. if(options.devices.length) {
  650. this.getUserMediaWithConstraints(
  651. options.devices,
  652. function (stream) {
  653. if((options.devices.indexOf("audio") !== -1 &&
  654. !stream.getAudioTracks().length) ||
  655. (options.devices.indexOf("video") !== -1 &&
  656. !stream.getVideoTracks().length))
  657. {
  658. self.stopMediaStream(stream);
  659. reject(JitsiTrackErrors.parseError(
  660. new Error("Unable to get the audio and " +
  661. "video tracks."),
  662. options.devices));
  663. return;
  664. }
  665. if(hasDesktop) {
  666. screenObtainer.obtainStream(
  667. function (desktopStream) {
  668. successCallback({audioVideo: stream,
  669. desktopStream: desktopStream});
  670. }, function (error) {
  671. self.stopMediaStream(stream);
  672. reject(
  673. JitsiTrackErrors.parseError(error,
  674. options.devices));
  675. });
  676. } else {
  677. successCallback({audioVideo: stream});
  678. }
  679. },
  680. function (error) {
  681. reject(JitsiTrackErrors.parseError(error,
  682. options.devices));
  683. },
  684. options);
  685. } else if (hasDesktop) {
  686. screenObtainer.obtainStream(
  687. function (stream) {
  688. successCallback({desktopStream: stream});
  689. }, function (error) {
  690. reject(
  691. JitsiTrackErrors.parseError(error,
  692. ["desktop"]));
  693. });
  694. }
  695. }
  696. }.bind(this));
  697. },
  698. addListener: function (eventType, listener) {
  699. eventEmitter.on(eventType, listener);
  700. },
  701. removeListener: function (eventType, listener) {
  702. eventEmitter.removeListener(eventType, listener);
  703. },
  704. getDeviceAvailability: function () {
  705. return devices;
  706. },
  707. isRTCReady: function () {
  708. return rtcReady;
  709. },
  710. /**
  711. * Checks if its possible to enumerate available cameras/micropones.
  712. * @returns {boolean} true if available, false otherwise.
  713. */
  714. isDeviceListAvailable: function () {
  715. var isEnumerateDevicesAvailable = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices;
  716. if (isEnumerateDevicesAvailable) {
  717. return true;
  718. }
  719. return (MediaStreamTrack && MediaStreamTrack.getSources)? true : false;
  720. },
  721. /**
  722. * Returns true if changing the camera / microphone device is supported and
  723. * false if not.
  724. */
  725. isDeviceChangeAvailable: function () {
  726. if(RTCBrowserType.isChrome() || RTCBrowserType.isOpera() ||
  727. RTCBrowserType.isTemasysPluginUsed())
  728. return true;
  729. return false;
  730. },
  731. /**
  732. * A method to handle stopping of the stream.
  733. * One point to handle the differences in various implementations.
  734. * @param mediaStream MediaStream object to stop.
  735. */
  736. stopMediaStream: function (mediaStream) {
  737. mediaStream.getTracks().forEach(function (track) {
  738. // stop() not supported with IE
  739. if (!RTCBrowserType.isTemasysPluginUsed() && track.stop) {
  740. track.stop();
  741. }
  742. });
  743. // leave stop for implementation still using it
  744. if (mediaStream.stop) {
  745. mediaStream.stop();
  746. }
  747. // if we have done createObjectURL, lets clean it
  748. if (mediaStream.jitsiObjectURL) {
  749. webkitURL.revokeObjectURL(mediaStream.jitsiObjectURL);
  750. }
  751. },
  752. /**
  753. * Returns whether the desktop sharing is enabled or not.
  754. * @returns {boolean}
  755. */
  756. isDesktopSharingEnabled: function () {
  757. return screenObtainer.isSupported();
  758. }
  759. };
  760. module.exports = RTCUtils;