You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816
  1. /* global config, require, attachMediaStream, getUserMedia */
  2. var logger = require("jitsi-meet-logger").getLogger(__filename);
  3. var RTCBrowserType = require("./RTCBrowserType");
  4. var Resolutions = require("../../service/RTC/Resolutions");
  5. var RTCEvents = require("../../service/RTC/RTCEvents");
  6. var AdapterJS = require("./adapter.screenshare");
  7. var SDPUtil = require("../xmpp/SDPUtil");
  8. var EventEmitter = require("events");
  9. var JitsiLocalTrack = require("./JitsiLocalTrack");
  10. var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
  11. var eventEmitter = new EventEmitter();
  12. var devices = {
  13. audio: true,
  14. video: true
  15. }
  16. var rtcReady = false;
  17. function DummyMediaStream(id) {
  18. this.id = id;
  19. this.label = id;
  20. this.stop = function() { };
  21. this.getAudioTracks = function() { return []; };
  22. this.getVideoTracks = function() { return []; };
  23. }
  24. function getPreviousResolution(resolution) {
  25. if(!Resolutions[resolution])
  26. return null;
  27. var order = Resolutions[resolution].order;
  28. var res = null;
  29. var resName = null;
  30. for(var i in Resolutions) {
  31. var tmp = Resolutions[i];
  32. if(res == null || (res.order < tmp.order && tmp.order < order)) {
  33. resName = i;
  34. res = tmp;
  35. }
  36. }
  37. return resName;
  38. }
  39. function setResolutionConstraints(constraints, resolution) {
  40. var isAndroid = RTCBrowserType.isAndroid();
  41. if (Resolutions[resolution]) {
  42. constraints.video.mandatory.minWidth = Resolutions[resolution].width;
  43. constraints.video.mandatory.minHeight = Resolutions[resolution].height;
  44. }
  45. else if (isAndroid) {
  46. // FIXME can't remember if the purpose of this was to always request
  47. // low resolution on Android ? if yes it should be moved up front
  48. constraints.video.mandatory.minWidth = 320;
  49. constraints.video.mandatory.minHeight = 240;
  50. constraints.video.mandatory.maxFrameRate = 15;
  51. }
  52. if (constraints.video.mandatory.minWidth)
  53. constraints.video.mandatory.maxWidth =
  54. constraints.video.mandatory.minWidth;
  55. if (constraints.video.mandatory.minHeight)
  56. constraints.video.mandatory.maxHeight =
  57. constraints.video.mandatory.minHeight;
  58. }
  59. /**
  60. * @param {string[]} um required user media types
  61. *
  62. * @param {Object} [options={}] optional parameters
  63. * @param {string} options.resolution
  64. * @param {number} options.bandwidth
  65. * @param {number} options.fps
  66. * @param {string} options.desktopStream
  67. * @param {string} options.cameraDeviceId
  68. * @param {string} options.micDeviceId
  69. * @param {bool} firefox_fake_device
  70. */
  71. function getConstraints(um, options) {
  72. var constraints = {audio: false, video: false};
  73. if (um.indexOf('video') >= 0) {
  74. // same behaviour as true
  75. constraints.video = { mandatory: {}, optional: [] };
  76. if (options.cameraDeviceId) {
  77. constraints.video.optional.push({
  78. sourceId: options.cameraDeviceId
  79. });
  80. }
  81. constraints.video.optional.push({ googLeakyBucket: true });
  82. setResolutionConstraints(constraints, options.resolution);
  83. }
  84. if (um.indexOf('audio') >= 0) {
  85. if (!RTCBrowserType.isFirefox()) {
  86. // same behaviour as true
  87. constraints.audio = { mandatory: {}, optional: []};
  88. if (options.micDeviceId) {
  89. constraints.audio.optional.push({
  90. sourceId: options.micDeviceId
  91. });
  92. }
  93. // if it is good enough for hangouts...
  94. constraints.audio.optional.push(
  95. {googEchoCancellation: true},
  96. {googAutoGainControl: true},
  97. {googNoiseSupression: true},
  98. {googHighpassFilter: true},
  99. {googNoisesuppression2: true},
  100. {googEchoCancellation2: true},
  101. {googAutoGainControl2: true}
  102. );
  103. } else {
  104. if (options.micDeviceId) {
  105. constraints.audio = {
  106. mandatory: {},
  107. optional: [{
  108. sourceId: options.micDeviceId
  109. }]};
  110. } else {
  111. constraints.audio = true;
  112. }
  113. }
  114. }
  115. if (um.indexOf('screen') >= 0) {
  116. if (RTCBrowserType.isChrome()) {
  117. constraints.video = {
  118. mandatory: {
  119. chromeMediaSource: "screen",
  120. googLeakyBucket: true,
  121. maxWidth: window.screen.width,
  122. maxHeight: window.screen.height,
  123. maxFrameRate: 3
  124. },
  125. optional: []
  126. };
  127. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  128. constraints.video = {
  129. optional: [
  130. {
  131. sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
  132. }
  133. ]
  134. };
  135. } else {
  136. logger.error(
  137. "'screen' WebRTC media source is supported only in Chrome" +
  138. " and with Temasys plugin");
  139. }
  140. }
  141. if (um.indexOf('desktop') >= 0) {
  142. constraints.video = {
  143. mandatory: {
  144. chromeMediaSource: "desktop",
  145. chromeMediaSourceId: options.desktopStream,
  146. googLeakyBucket: true,
  147. maxWidth: window.screen.width,
  148. maxHeight: window.screen.height,
  149. maxFrameRate: 3
  150. },
  151. optional: []
  152. };
  153. }
  154. if (options.bandwidth) {
  155. if (!constraints.video) {
  156. //same behaviour as true
  157. constraints.video = {mandatory: {}, optional: []};
  158. }
  159. constraints.video.optional.push({bandwidth: options.bandwidth});
  160. }
  161. if (options.fps) {
  162. // for some cameras it might be necessary to request 30fps
  163. // so they choose 30fps mjpg over 10fps yuy2
  164. if (!constraints.video) {
  165. // same behaviour as true;
  166. constraints.video = {mandatory: {}, optional: []};
  167. }
  168. constraints.video.mandatory.minFrameRate = options.fps;
  169. }
  170. // we turn audio for both audio and video tracks, the fake audio & video seems to work
  171. // only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video
  172. // this later can be a problem with some of the tests
  173. if(RTCBrowserType.isFirefox() && options.firefox_fake_device)
  174. {
  175. constraints.audio = true;
  176. constraints.fake = true;
  177. }
  178. return constraints;
  179. }
  180. function setAvailableDevices(um, available) {
  181. if (um.indexOf("video") != -1) {
  182. devices.video = available;
  183. }
  184. if (um.indexOf("audio") != -1) {
  185. devices.audio = available;
  186. }
  187. eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices);
  188. }
  189. // In case of IE we continue from 'onReady' callback
  190. // passed to RTCUtils constructor. It will be invoked by Temasys plugin
  191. // once it is initialized.
  192. function onReady () {
  193. rtcReady = true;
  194. eventEmitter.emit(RTCEvents.RTC_READY, true);
  195. };
  196. /**
  197. * Apply function with arguments if function exists.
  198. * Do nothing if function not provided.
  199. * @param {function} [fn] function to apply
  200. * @param {Array} [args=[]] arguments for function
  201. */
  202. function maybeApply(fn, args) {
  203. if (fn) {
  204. fn.apply(null, args || []);
  205. }
  206. }
  207. var getUserMediaStatus = {
  208. initialized: false,
  209. callbacks: []
  210. };
  211. /**
  212. * Wrap `getUserMedia` to allow others to know if it was executed at least
  213. * once or not. Wrapper function uses `getUserMediaStatus` object.
  214. * @param {Function} getUserMedia native function
  215. * @returns {Function} wrapped function
  216. */
  217. function wrapGetUserMedia(getUserMedia) {
  218. return function (constraints, successCallback, errorCallback) {
  219. getUserMedia(constraints, function (stream) {
  220. maybeApply(successCallback, [stream]);
  221. if (!getUserMediaStatus.initialized) {
  222. getUserMediaStatus.initialized = true;
  223. getUserMediaStatus.callbacks.forEach(function (callback) {
  224. callback();
  225. });
  226. getUserMediaStatus.callbacks.length = 0;
  227. }
  228. }, function (error) {
  229. maybeApply(errorCallback, [error]);
  230. });
  231. };
  232. }
  233. /**
  234. * Create stub device which equals to auto selected device.
  235. * @param {string} kind if that should be `audio` or `video` device
  236. * @returns {Object} stub device description in `enumerateDevices` format
  237. */
  238. function createAutoDeviceInfo(kind) {
  239. return {
  240. facing: null,
  241. label: 'Auto',
  242. kind: kind,
  243. deviceId: '',
  244. groupId: null
  245. };
  246. }
  247. /**
  248. * Execute function after getUserMedia was executed at least once.
  249. * @param {Function} callback function to execute after getUserMedia
  250. */
  251. function afterUserMediaInitialized(callback) {
  252. if (getUserMediaStatus.initialized) {
  253. callback();
  254. } else {
  255. getUserMediaStatus.callbacks.push(callback);
  256. }
  257. }
  258. /**
  259. * Wrapper function which makes enumerateDevices to wait
  260. * until someone executes getUserMedia first time.
  261. * @param {Function} enumerateDevices native function
  262. * @returns {Funtion} wrapped function
  263. */
  264. function wrapEnumerateDevices(enumerateDevices) {
  265. return function (callback) {
  266. // enumerate devices only after initial getUserMedia
  267. afterUserMediaInitialized(function () {
  268. enumerateDevices().then(function (devices) {
  269. //add auto devices
  270. devices.unshift(
  271. createAutoDeviceInfo('audioinput'),
  272. createAutoDeviceInfo('videoinput')
  273. );
  274. callback(devices);
  275. }, function (err) {
  276. console.error('cannot enumerate devices: ', err);
  277. // return only auto devices
  278. callback([createAutoDeviceInfo('audioInput'),
  279. createAutoDeviceInfo('videoinput')]);
  280. });
  281. });
  282. };
  283. }
  284. /**
  285. * Use old MediaStreamTrack to get devices list and
  286. * convert it to enumerateDevices format.
  287. * @param {Function} callback function to call when received devices list.
  288. */
  289. function enumerateDevicesThroughMediaStreamTrack (callback) {
  290. MediaStreamTrack.getSources(function (sources) {
  291. var devices = sources.map(function (source) {
  292. var kind = (source.kind || '').toLowerCase();
  293. return {
  294. facing: source.facing || null,
  295. label: source.label,
  296. kind: kind ? kind + 'input': null,
  297. deviceId: source.id,
  298. groupId: source.groupId || null
  299. };
  300. });
  301. //add auto devices
  302. devices.unshift(
  303. createAutoDeviceInfo('audioinput'),
  304. createAutoDeviceInfo('videoinput')
  305. );
  306. callback(devices);
  307. });
  308. }
  309. //Options parameter is to pass config options. Currently uses only "useIPv6".
  310. var RTCUtils = {
  311. init: function (options) {
  312. var self = this;
  313. if (RTCBrowserType.isFirefox()) {
  314. var FFversion = RTCBrowserType.getFirefoxVersion();
  315. if (FFversion >= 40) {
  316. this.peerconnection = mozRTCPeerConnection;
  317. this.getUserMedia = wrapGetUserMedia(navigator.mozGetUserMedia.bind(navigator));
  318. this.enumerateDevices = wrapEnumerateDevices(
  319. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  320. );
  321. this.pc_constraints = {};
  322. this.attachMediaStream = function (element, stream) {
  323. // srcObject is being standardized and FF will eventually
  324. // support that unprefixed. FF also supports the
  325. // "element.src = URL.createObjectURL(...)" combo, but that
  326. // will be deprecated in favour of srcObject.
  327. //
  328. // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
  329. // https://github.com/webrtc/samples/issues/302
  330. if (!element[0])
  331. return;
  332. element[0].mozSrcObject = stream;
  333. element[0].play();
  334. };
  335. this.getStreamID = function (stream) {
  336. var id = stream.id;
  337. if (!id) {
  338. var tracks = stream.getVideoTracks();
  339. if (!tracks || tracks.length === 0) {
  340. tracks = stream.getAudioTracks();
  341. }
  342. id = tracks[0].id;
  343. }
  344. return SDPUtil.filter_special_chars(id);
  345. };
  346. this.getVideoSrc = function (element) {
  347. if (!element)
  348. return null;
  349. return element.mozSrcObject;
  350. };
  351. this.setVideoSrc = function (element, src) {
  352. if (element)
  353. element.mozSrcObject = src;
  354. };
  355. RTCSessionDescription = mozRTCSessionDescription;
  356. RTCIceCandidate = mozRTCIceCandidate;
  357. } else {
  358. logger.error(
  359. "Firefox version too old: " + FFversion + ". Required >= 40.");
  360. window.location.href = 'unsupported_browser.html';
  361. return;
  362. }
  363. } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) {
  364. this.peerconnection = webkitRTCPeerConnection;
  365. var getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
  366. if (navigator.mediaDevices) {
  367. this.getUserMedia = wrapGetUserMedia(getUserMedia);
  368. this.enumerateDevices = wrapEnumerateDevices(
  369. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  370. );
  371. } else {
  372. this.getUserMedia = getUserMedia;
  373. this.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  374. }
  375. this.attachMediaStream = function (element, stream) {
  376. element.attr('src', webkitURL.createObjectURL(stream));
  377. };
  378. this.getStreamID = function (stream) {
  379. // streams from FF endpoints have the characters '{' and '}'
  380. // that make jQuery choke.
  381. return SDPUtil.filter_special_chars(stream.id);
  382. };
  383. this.getVideoSrc = function (element) {
  384. if (!element)
  385. return null;
  386. return element.getAttribute("src");
  387. };
  388. this.setVideoSrc = function (element, src) {
  389. if (element)
  390. element.setAttribute("src", src);
  391. };
  392. // DTLS should now be enabled by default but..
  393. this.pc_constraints = {'optional': [
  394. {'DtlsSrtpKeyAgreement': 'true'}
  395. ]};
  396. if (options.useIPv6) {
  397. // https://code.google.com/p/webrtc/issues/detail?id=2828
  398. this.pc_constraints.optional.push({googIPv6: true});
  399. }
  400. if (RTCBrowserType.isAndroid()) {
  401. this.pc_constraints = {}; // disable DTLS on Android
  402. }
  403. if (!webkitMediaStream.prototype.getVideoTracks) {
  404. webkitMediaStream.prototype.getVideoTracks = function () {
  405. return this.videoTracks;
  406. };
  407. }
  408. if (!webkitMediaStream.prototype.getAudioTracks) {
  409. webkitMediaStream.prototype.getAudioTracks = function () {
  410. return this.audioTracks;
  411. };
  412. }
  413. }
  414. // Detect IE/Safari
  415. else if (RTCBrowserType.isTemasysPluginUsed()) {
  416. //AdapterJS.WebRTCPlugin.setLogLevel(
  417. // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
  418. AdapterJS.webRTCReady(function (isPlugin) {
  419. self.peerconnection = RTCPeerConnection;
  420. self.getUserMedia = getUserMedia;
  421. self.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  422. self.attachMediaStream = function (elSel, stream) {
  423. if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
  424. return;
  425. }
  426. attachMediaStream(elSel[0], stream);
  427. };
  428. self.getStreamID = function (stream) {
  429. var id = SDPUtil.filter_special_chars(stream.label);
  430. return id;
  431. };
  432. self.getVideoSrc = function (element) {
  433. if (!element) {
  434. logger.warn("Attempt to get video SRC of null element");
  435. return null;
  436. }
  437. var children = element.children;
  438. for (var i = 0; i !== children.length; ++i) {
  439. if (children[i].name === 'streamId') {
  440. return children[i].value;
  441. }
  442. }
  443. //logger.info(element.id + " SRC: " + src);
  444. return null;
  445. };
  446. self.setVideoSrc = function (element, src) {
  447. //logger.info("Set video src: ", element, src);
  448. if (!src) {
  449. logger.warn("Not attaching video stream, 'src' is null");
  450. return;
  451. }
  452. AdapterJS.WebRTCPlugin.WaitForPluginReady();
  453. var stream = AdapterJS.WebRTCPlugin.plugin
  454. .getStreamWithId(AdapterJS.WebRTCPlugin.pageId, src);
  455. attachMediaStream(element, stream);
  456. };
  457. onReady(isPlugin);
  458. });
  459. } else {
  460. try {
  461. logger.error('Browser does not appear to be WebRTC-capable');
  462. } catch (e) {
  463. }
  464. return;
  465. }
  466. // Call onReady() if Temasys plugin is not used
  467. if (!RTCBrowserType.isTemasysPluginUsed()) {
  468. onReady();
  469. }
  470. },
  471. /**
  472. * @param {string[]} um required user media types
  473. * @param {function} success_callback
  474. * @param {Function} failure_callback
  475. * @param {Object} [options] optional parameters
  476. * @param {string} options.resolution
  477. * @param {number} options.bandwidth
  478. * @param {number} options.fps
  479. * @param {string} options.desktopStream
  480. * @param {string} options.cameraDeviceId
  481. * @param {string} options.micDeviceId
  482. **/
  483. getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) {
  484. options = options || {};
  485. resolution = options.resolution;
  486. var constraints = getConstraints(
  487. um, options);
  488. logger.info("Get media constraints", constraints);
  489. try {
  490. this.getUserMedia(constraints,
  491. function (stream) {
  492. logger.log('onUserMediaSuccess');
  493. setAvailableDevices(um, true);
  494. success_callback(stream);
  495. },
  496. function (error) {
  497. setAvailableDevices(um, false);
  498. logger.warn('Failed to get access to local media. Error ',
  499. error, constraints);
  500. if (failure_callback) {
  501. failure_callback(error, resolution);
  502. }
  503. });
  504. } catch (e) {
  505. logger.error('GUM failed: ', e);
  506. if (failure_callback) {
  507. failure_callback(e);
  508. }
  509. }
  510. },
  511. /**
  512. * Creates the local MediaStreams.
  513. * @param {Object} [options] optional parameters
  514. * @param {Array} options.devices the devices that will be requested
  515. * @param {string} options.resolution resolution constraints
  516. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
  517. * type: "audio" or "video", videoType: "camera" or "desktop"}
  518. * will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
  519. * @param {string} options.cameraDeviceId
  520. * @param {string} options.micDeviceId
  521. * @returns {*} Promise object that will receive the new JitsiTracks
  522. */
  523. obtainAudioAndVideoPermissions: function (options) {
  524. var self = this;
  525. options = options || {};
  526. return new Promise(function (resolve, reject) {
  527. var successCallback = function (stream) {
  528. var streams = self.successCallback(stream, options.resolution);
  529. resolve(options.dontCreateJitsiTracks?
  530. streams: self.createLocalTracks(streams));
  531. };
  532. options.devices = options.devices || ['audio', 'video'];
  533. if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) {
  534. // With FF/IE we can't split the stream into audio and video because FF
  535. // doesn't support media stream constructors. So, we need to get the
  536. // audio stream separately from the video stream using two distinct GUM
  537. // calls. Not very user friendly :-( but we don't have many other
  538. // options neither.
  539. //
  540. // Note that we pack those 2 streams in a single object and pass it to
  541. // the successCallback method.
  542. var obtainVideo = function (audioStream) {
  543. self.getUserMediaWithConstraints(
  544. ['video'],
  545. function (videoStream) {
  546. return successCallback({
  547. audioStream: audioStream,
  548. videoStream: videoStream
  549. });
  550. },
  551. function (error, resolution) {
  552. logger.error(
  553. 'failed to obtain video stream - stop', error);
  554. self.errorCallback(error, resolve, options);
  555. },
  556. {resolution: options.resolution || '360',
  557. cameraDeviceId: options.cameraDeviceId});
  558. };
  559. var obtainAudio = function () {
  560. self.getUserMediaWithConstraints(
  561. ['audio'],
  562. function (audioStream) {
  563. (options.devices.indexOf('video') === -1) ||
  564. obtainVideo(audioStream);
  565. },
  566. function (error) {
  567. logger.error(
  568. 'failed to obtain audio stream - stop', error);
  569. self.errorCallback(error, resolve, options);
  570. },{micDeviceId: options.micDeviceId});
  571. };
  572. if((devices.indexOf('audio') === -1))
  573. obtainVideo(null)
  574. else
  575. obtainAudio();
  576. } else {
  577. this.getUserMediaWithConstraints(
  578. options.devices,
  579. function (stream) {
  580. successCallback(stream);
  581. },
  582. function (error, resolution) {
  583. self.errorCallback(error, resolve, options);
  584. },
  585. {resolution: options.resolution || '360',
  586. cameraDeviceId: options.cameraDeviceId,
  587. micDeviceId: options.micDeviceId});
  588. }
  589. }.bind(this));
  590. },
  591. /**
  592. * Successful callback called from GUM.
  593. * @param stream the new MediaStream
  594. * @param resolution the resolution of the video stream.
  595. * @returns {*}
  596. */
  597. successCallback: function (stream, resolution) {
  598. // If this is FF or IE, the stream parameter is *not* a MediaStream object,
  599. // it's an object with two properties: audioStream, videoStream.
  600. if (stream && stream.getAudioTracks && stream.getVideoTracks)
  601. logger.log('got', stream, stream.getAudioTracks().length,
  602. stream.getVideoTracks().length);
  603. return this.handleLocalStream(stream, resolution);
  604. },
  605. /**
  606. * Error callback called from GUM. Retries the GUM call with different resolutions.
  607. * @param error the error
  608. * @param resolve the resolve funtion that will be called on success.
  609. * @param {Object} options with the following properties:
  610. * @param resolution the last resolution used for GUM.
  611. * @param dontCreateJitsiTracks if <tt>true</tt> objects with the following structure {stream: the Media Stream,
  612. * type: "audio" or "video", videoType: "camera" or "desktop"}
  613. * will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
  614. */
  615. errorCallback: function (error, resolve, options) {
  616. var self = this;
  617. options = options || {};
  618. logger.error('failed to obtain audio/video stream - trying audio only', error);
  619. var resolution = getPreviousResolution(options.resolution);
  620. if (typeof error == "object" && error.constraintName && error.name
  621. && (error.name == "ConstraintNotSatisfiedError" ||
  622. error.name == "OverconstrainedError") &&
  623. (error.constraintName == "minWidth" || error.constraintName == "maxWidth" ||
  624. error.constraintName == "minHeight" || error.constraintName == "maxHeight")
  625. && resolution != null) {
  626. self.getUserMediaWithConstraints(['audio', 'video'],
  627. function (stream) {
  628. var streams = self.successCallback(stream, resolution);
  629. resolve(options.dontCreateJitsiTracks? streams: self.createLocalTracks(streams));
  630. }, function (error, resolution) {
  631. return self.errorCallback(error, resolve,
  632. {resolution: resolution,
  633. dontCreateJitsiTracks: options.dontCreateJitsiTracks});
  634. },
  635. {resolution: options.resolution});
  636. }
  637. else {
  638. self.getUserMediaWithConstraints(
  639. ['audio'],
  640. function (stream) {
  641. var streams = self.successCallback(stream, resolution);
  642. resolve(options.dontCreateJitsiTracks? streams: self.createLocalTracks(streams));
  643. },
  644. function (error) {
  645. logger.error('failed to obtain audio/video stream - stop',
  646. error);
  647. var streams = self.successCallback(null);
  648. resolve(options.dontCreateJitsiTracks? streams: self.createLocalTracks(streams));
  649. }
  650. );
  651. }
  652. },
  653. /**
  654. * Handles the newly created Media Streams.
  655. * @param stream the new Media Streams
  656. * @param resolution the resolution of the video stream.
  657. * @returns {*[]} Promise object with the new Media Streams.
  658. */
  659. handleLocalStream: function (stream, resolution) {
  660. var audioStream, videoStream;
  661. // If this is FF, the stream parameter is *not* a MediaStream object, it's
  662. // an object with two properties: audioStream, videoStream.
  663. if (window.webkitMediaStream) {
  664. audioStream = new webkitMediaStream();
  665. videoStream = new webkitMediaStream();
  666. if (stream) {
  667. var audioTracks = stream.getAudioTracks();
  668. for (var i = 0; i < audioTracks.length; i++) {
  669. audioStream.addTrack(audioTracks[i]);
  670. }
  671. var videoTracks = stream.getVideoTracks();
  672. for (i = 0; i < videoTracks.length; i++) {
  673. videoStream.addTrack(videoTracks[i]);
  674. }
  675. }
  676. }
  677. else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { // Firefox and Temasys plugin
  678. if (stream && stream.audioStream)
  679. audioStream = stream.audioStream;
  680. else
  681. audioStream = new DummyMediaStream("dummyAudio");
  682. if (stream && stream.videoStream)
  683. videoStream = stream.videoStream;
  684. else
  685. videoStream = new DummyMediaStream("dummyVideo");
  686. }
  687. return [
  688. {stream: audioStream, type: "audio", videoType: null},
  689. {stream: videoStream, type: "video", videoType: "camera",
  690. resolution: resolution}
  691. ];
  692. },
  693. createStream: function (stream, isVideo) {
  694. var newStream = null;
  695. if (window.webkitMediaStream) {
  696. newStream = new webkitMediaStream();
  697. if (newStream) {
  698. var tracks = (isVideo ? stream.getVideoTracks() : stream.getAudioTracks());
  699. for (var i = 0; i < tracks.length; i++) {
  700. newStream.addTrack(tracks[i]);
  701. }
  702. }
  703. } else {
  704. // FIXME: this is duplicated with 'handleLocalStream' !!!
  705. if (stream) {
  706. newStream = stream;
  707. } else {
  708. newStream =
  709. new DummyMediaStream(isVideo ? "dummyVideo" : "dummyAudio");
  710. }
  711. }
  712. return newStream;
  713. },
  714. addListener: function (eventType, listener) {
  715. eventEmitter.on(eventType, listener);
  716. },
  717. removeListener: function (eventType, listener) {
  718. eventEmitter.removeListener(eventType, listener);
  719. },
  720. getDeviceAvailability: function () {
  721. return devices;
  722. },
  723. isRTCReady: function () {
  724. return rtcReady;
  725. },
  726. createLocalTracks: function (streams) {
  727. var newStreams = []
  728. for (var i = 0; i < streams.length; i++) {
  729. var localStream = new JitsiLocalTrack(null, streams[i].stream,
  730. eventEmitter, streams[i].videoType, streams[i].resolution);
  731. newStreams.push(localStream);
  732. if (streams[i].isMuted === true)
  733. localStream.setMute(true);
  734. var eventType = StreamEventTypes.EVENT_TYPE_LOCAL_CREATED;
  735. eventEmitter.emit(eventType, localStream);
  736. }
  737. return newStreams;
  738. },
  739. /**
  740. * Checks if its possible to enumerate available cameras/micropones.
  741. * @returns {boolean} true if available, false otherwise.
  742. */
  743. isDeviceListAvailable: function () {
  744. var isEnumerateDevicesAvailable = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices;
  745. if (isEnumerateDevicesAvailable) {
  746. return true;
  747. }
  748. return (MediaStreamTrack && MediaStreamTrack.getSources)? true : false;
  749. }
  750. };
  751. module.exports = RTCUtils;