You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTCUtils.js 35KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933
  1. /* global config, require, attachMediaStream, getUserMedia,
  2. RTCPeerConnection, RTCSessionDescription, RTCIceCandidate, MediaStreamTrack,
  3. mozRTCPeerConnection, mozRTCSessionDescription, mozRTCIceCandidate,
  4. webkitRTCPeerConnection, webkitMediaStream, webkitURL
  5. */
  6. /* jshint -W101 */
  7. var logger = require("jitsi-meet-logger").getLogger(__filename);
  8. var RTCBrowserType = require("./RTCBrowserType");
  9. var Resolutions = require("../../service/RTC/Resolutions");
  10. var RTCEvents = require("../../service/RTC/RTCEvents");
  11. var AdapterJS = require("./adapter.screenshare");
  12. var SDPUtil = require("../xmpp/SDPUtil");
  13. var EventEmitter = require("events");
  14. var screenObtainer = require("./ScreenObtainer");
  15. var JitsiTrackErrors = require("../../JitsiTrackErrors");
  16. var MediaType = require("../../service/RTC/MediaType");
  17. var VideoType = require("../../service/RTC/VideoType");
  18. var eventEmitter = new EventEmitter();
  19. var devices = {
  20. audio: true,
  21. video: true
  22. };
  23. var audioOuputDeviceId = ''; // default device
  24. var featureDetectionAudioEl = document.createElement('audio');
  25. var isAudioOutputDeviceChangeAvailable =
  26. typeof featureDetectionAudioEl.setSinkId !== 'undefined';
  27. var rtcReady = false;
  28. function setResolutionConstraints(constraints, resolution) {
  29. var isAndroid = RTCBrowserType.isAndroid();
  30. if (Resolutions[resolution]) {
  31. constraints.video.mandatory.minWidth = Resolutions[resolution].width;
  32. constraints.video.mandatory.minHeight = Resolutions[resolution].height;
  33. }
  34. else if (isAndroid) {
  35. // FIXME can't remember if the purpose of this was to always request
  36. // low resolution on Android ? if yes it should be moved up front
  37. constraints.video.mandatory.minWidth = 320;
  38. constraints.video.mandatory.minHeight = 180;
  39. constraints.video.mandatory.maxFrameRate = 15;
  40. }
  41. if (constraints.video.mandatory.minWidth)
  42. constraints.video.mandatory.maxWidth =
  43. constraints.video.mandatory.minWidth;
  44. if (constraints.video.mandatory.minHeight)
  45. constraints.video.mandatory.maxHeight =
  46. constraints.video.mandatory.minHeight;
  47. }
  48. /**
  49. * @param {string[]} um required user media types
  50. *
  51. * @param {Object} [options={}] optional parameters
  52. * @param {string} options.resolution
  53. * @param {number} options.bandwidth
  54. * @param {number} options.fps
  55. * @param {string} options.desktopStream
  56. * @param {string} options.cameraDeviceId
  57. * @param {string} options.micDeviceId
  58. * @param {bool} firefox_fake_device
  59. */
  60. function getConstraints(um, options) {
  61. var constraints = {audio: false, video: false};
  62. if (um.indexOf('video') >= 0) {
  63. // same behaviour as true
  64. constraints.video = { mandatory: {}, optional: [] };
  65. if (options.cameraDeviceId) {
  66. // new style of settings device id (FF only)
  67. constraints.video.deviceId = options.cameraDeviceId;
  68. // old style
  69. constraints.video.optional.push({
  70. sourceId: options.cameraDeviceId
  71. });
  72. }
  73. constraints.video.optional.push({ googLeakyBucket: true });
  74. setResolutionConstraints(constraints, options.resolution);
  75. }
  76. if (um.indexOf('audio') >= 0) {
  77. if (!RTCBrowserType.isFirefox()) {
  78. // same behaviour as true
  79. constraints.audio = { mandatory: {}, optional: []};
  80. if (options.micDeviceId) {
  81. // new style of settings device id (FF only)
  82. constraints.audio.deviceId = options.micDeviceId;
  83. // old style
  84. constraints.audio.optional.push({
  85. sourceId: options.micDeviceId
  86. });
  87. }
  88. // if it is good enough for hangouts...
  89. constraints.audio.optional.push(
  90. {googEchoCancellation: true},
  91. {googAutoGainControl: true},
  92. {googNoiseSupression: true},
  93. {googHighpassFilter: true},
  94. {googNoisesuppression2: true},
  95. {googEchoCancellation2: true},
  96. {googAutoGainControl2: true}
  97. );
  98. } else {
  99. if (options.micDeviceId) {
  100. constraints.audio = {
  101. mandatory: {},
  102. deviceId: options.micDeviceId, // new style
  103. optional: [{
  104. sourceId: options.micDeviceId // old style
  105. }]};
  106. } else {
  107. constraints.audio = true;
  108. }
  109. }
  110. }
  111. if (um.indexOf('screen') >= 0) {
  112. if (RTCBrowserType.isChrome()) {
  113. constraints.video = {
  114. mandatory: {
  115. chromeMediaSource: "screen",
  116. googLeakyBucket: true,
  117. maxWidth: window.screen.width,
  118. maxHeight: window.screen.height,
  119. maxFrameRate: 3
  120. },
  121. optional: []
  122. };
  123. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  124. constraints.video = {
  125. optional: [
  126. {
  127. sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
  128. }
  129. ]
  130. };
  131. } else if (RTCBrowserType.isFirefox()) {
  132. constraints.video = {
  133. mozMediaSource: "window",
  134. mediaSource: "window"
  135. };
  136. } else {
  137. logger.error(
  138. "'screen' WebRTC media source is supported only in Chrome" +
  139. " and with Temasys plugin");
  140. }
  141. }
  142. if (um.indexOf('desktop') >= 0) {
  143. constraints.video = {
  144. mandatory: {
  145. chromeMediaSource: "desktop",
  146. chromeMediaSourceId: options.desktopStream,
  147. googLeakyBucket: true,
  148. maxWidth: window.screen.width,
  149. maxHeight: window.screen.height,
  150. maxFrameRate: 3
  151. },
  152. optional: []
  153. };
  154. }
  155. if (options.bandwidth) {
  156. if (!constraints.video) {
  157. //same behaviour as true
  158. constraints.video = {mandatory: {}, optional: []};
  159. }
  160. constraints.video.optional.push({bandwidth: options.bandwidth});
  161. }
  162. if(options.minFps || options.maxFps || options.fps) {
  163. // for some cameras it might be necessary to request 30fps
  164. // so they choose 30fps mjpg over 10fps yuy2
  165. if (!constraints.video) {
  166. // same behaviour as true;
  167. constraints.video = {mandatory: {}, optional: []};
  168. }
  169. if(options.minFps || options.fps) {
  170. options.minFps = options.minFps || options.fps; //Fall back to options.fps for backwards compatibility
  171. constraints.video.mandatory.minFrameRate = options.minFps;
  172. }
  173. if(options.maxFps) {
  174. constraints.video.mandatory.maxFrameRate = options.maxFps;
  175. }
  176. }
  177. // we turn audio for both audio and video tracks, the fake audio & video seems to work
  178. // only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video
  179. // this later can be a problem with some of the tests
  180. if(RTCBrowserType.isFirefox() && options.firefox_fake_device)
  181. {
  182. // seems to be fixed now, removing this experimental fix, as having
  183. // multiple audio tracks brake the tests
  184. //constraints.audio = true;
  185. constraints.fake = true;
  186. }
  187. return constraints;
  188. }
  189. function setAvailableDevices(um, available) {
  190. if (um.indexOf("video") != -1) {
  191. devices.video = available;
  192. }
  193. if (um.indexOf("audio") != -1) {
  194. devices.audio = available;
  195. }
  196. eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices);
  197. }
  198. // In case of IE we continue from 'onReady' callback
  199. // passed to RTCUtils constructor. It will be invoked by Temasys plugin
  200. // once it is initialized.
  201. function onReady (options, GUM) {
  202. rtcReady = true;
  203. eventEmitter.emit(RTCEvents.RTC_READY, true);
  204. screenObtainer.init(options, GUM);
  205. }
  206. /**
  207. * Apply function with arguments if function exists.
  208. * Do nothing if function not provided.
  209. * @param {function} [fn] function to apply
  210. * @param {Array} [args=[]] arguments for function
  211. */
  212. function maybeApply(fn, args) {
  213. if (fn) {
  214. fn.apply(null, args || []);
  215. }
  216. }
  217. var getUserMediaStatus = {
  218. initialized: false,
  219. callbacks: []
  220. };
  221. /**
  222. * Wrap `getUserMedia` to allow others to know if it was executed at least
  223. * once or not. Wrapper function uses `getUserMediaStatus` object.
  224. * @param {Function} getUserMedia native function
  225. * @returns {Function} wrapped function
  226. */
  227. function wrapGetUserMedia(getUserMedia) {
  228. return function (constraints, successCallback, errorCallback) {
  229. getUserMedia(constraints, function (stream) {
  230. maybeApply(successCallback, [stream]);
  231. if (!getUserMediaStatus.initialized) {
  232. getUserMediaStatus.initialized = true;
  233. getUserMediaStatus.callbacks.forEach(function (callback) {
  234. callback();
  235. });
  236. getUserMediaStatus.callbacks.length = 0;
  237. }
  238. }, function (error) {
  239. maybeApply(errorCallback, [error]);
  240. });
  241. };
  242. }
  243. /**
  244. * Create stub device which equals to auto selected device.
  245. * @param {string} kind if that should be `audio` or `video` device
  246. * @returns {Object} stub device description in `enumerateDevices` format
  247. */
  248. function createAutoDeviceInfo(kind) {
  249. return {
  250. facing: null,
  251. label: 'Auto',
  252. kind: kind,
  253. deviceId: '',
  254. groupId: ''
  255. };
  256. }
  257. /**
  258. * Execute function after getUserMedia was executed at least once.
  259. * @param {Function} callback function to execute after getUserMedia
  260. */
  261. function afterUserMediaInitialized(callback) {
  262. if (getUserMediaStatus.initialized) {
  263. callback();
  264. } else {
  265. getUserMediaStatus.callbacks.push(callback);
  266. }
  267. }
  268. /**
  269. * Wrapper function which makes enumerateDevices to wait
  270. * until someone executes getUserMedia first time.
  271. * @param {Function} enumerateDevices native function
  272. * @returns {Funtion} wrapped function
  273. */
  274. function wrapEnumerateDevices(enumerateDevices) {
  275. return function (callback) {
  276. // enumerate devices only after initial getUserMedia
  277. afterUserMediaInitialized(function () {
  278. enumerateDevices().then(function (devices) {
  279. //add auto devices
  280. devices.unshift(
  281. createAutoDeviceInfo('audioinput'),
  282. createAutoDeviceInfo('videoinput'),
  283. createAutoDeviceInfo('audiooutput')
  284. );
  285. callback(devices);
  286. }, function (err) {
  287. console.error('cannot enumerate devices: ', err);
  288. // return only auto devices
  289. callback([
  290. createAutoDeviceInfo('audioinput'),
  291. createAutoDeviceInfo('videoinput'),
  292. createAutoDeviceInfo('audiooutput')
  293. ]);
  294. });
  295. });
  296. };
  297. }
  298. /**
  299. * Use old MediaStreamTrack to get devices list and
  300. * convert it to enumerateDevices format.
  301. * @param {Function} callback function to call when received devices list.
  302. */
  303. function enumerateDevicesThroughMediaStreamTrack (callback) {
  304. MediaStreamTrack.getSources(function (sources) {
  305. var devices = sources.map(function (source) {
  306. var kind = (source.kind || '').toLowerCase();
  307. return {
  308. facing: source.facing || null,
  309. label: source.label,
  310. // theoretically deprecated MediaStreamTrack.getSources should
  311. // not return 'audiooutput' devices but let's handle it in any
  312. // case
  313. kind: kind
  314. ? (kind === 'audiooutput' ? kind : kind + 'input')
  315. : null,
  316. deviceId: source.id,
  317. groupId: source.groupId || null
  318. };
  319. });
  320. //add auto devices
  321. devices.unshift(
  322. createAutoDeviceInfo('audioinput'),
  323. createAutoDeviceInfo('videoinput'),
  324. createAutoDeviceInfo('audiooutput')
  325. );
  326. callback(devices);
  327. });
  328. }
  329. function obtainDevices(options) {
  330. if(!options.devices || options.devices.length === 0) {
  331. return options.successCallback(options.streams || {});
  332. }
  333. var device = options.devices.splice(0, 1);
  334. var devices = [];
  335. devices.push(device);
  336. options.deviceGUM[device](function (stream) {
  337. options.streams = options.streams || {};
  338. options.streams[device] = stream;
  339. obtainDevices(options);
  340. },
  341. function (error) {
  342. Object.keys(options.streams).forEach(function(device) {
  343. RTCUtils.stopMediaStream(options.streams[device]);
  344. });
  345. logger.error(
  346. "failed to obtain " + device + " stream - stop", error);
  347. options.errorCallback(JitsiTrackErrors.parseError(error, devices));
  348. });
  349. }
  350. /**
  351. * Handles the newly created Media Streams.
  352. * @param streams the new Media Streams
  353. * @param resolution the resolution of the video streams
  354. * @returns {*[]} object that describes the new streams
  355. */
  356. function handleLocalStream(streams, resolution) {
  357. var audioStream, videoStream, desktopStream, res = [];
  358. // If this is FF, the stream parameter is *not* a MediaStream object, it's
  359. // an object with two properties: audioStream, videoStream.
  360. if (window.webkitMediaStream) {
  361. var audioVideo = streams.audioVideo;
  362. if (audioVideo) {
  363. var audioTracks = audioVideo.getAudioTracks();
  364. if (audioTracks.length) {
  365. audioStream = new webkitMediaStream();
  366. for (var i = 0; i < audioTracks.length; i++) {
  367. audioStream.addTrack(audioTracks[i]);
  368. }
  369. }
  370. var videoTracks = audioVideo.getVideoTracks();
  371. if (videoTracks.length) {
  372. videoStream = new webkitMediaStream();
  373. for (var j = 0; j < videoTracks.length; j++) {
  374. videoStream.addTrack(videoTracks[j]);
  375. }
  376. }
  377. }
  378. // FIXME Checking streams here is unnecessary because there's
  379. // streams.audioVideo above.
  380. if (streams)
  381. desktopStream = streams.desktopStream;
  382. }
  383. else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { // Firefox and Temasys plugin
  384. if (streams) {
  385. audioStream = streams.audio;
  386. videoStream = streams.video;
  387. desktopStream = streams.desktop;
  388. }
  389. }
  390. if (desktopStream)
  391. res.push({
  392. stream: desktopStream,
  393. track: desktopStream.getVideoTracks()[0],
  394. mediaType: MediaType.VIDEO,
  395. videoType: VideoType.DESKTOP
  396. });
  397. if(audioStream)
  398. res.push({
  399. stream: audioStream,
  400. track: audioStream.getAudioTracks()[0],
  401. mediaType: MediaType.AUDIO,
  402. videoType: null
  403. });
  404. if(videoStream)
  405. res.push({
  406. stream: videoStream,
  407. track: videoStream.getVideoTracks()[0],
  408. mediaType: MediaType.VIDEO,
  409. videoType: VideoType.CAMERA,
  410. resolution: resolution
  411. });
  412. return res;
  413. }
  414. /**
  415. * Wraps original attachMediaStream function to set current audio output device
  416. * if this is supported.
  417. * @param {Function} origAttachMediaStream
  418. * @returns {Function}
  419. */
  420. function wrapAttachMediaStream(origAttachMediaStream) {
  421. return function(element, stream) {
  422. var res = origAttachMediaStream.apply(RTCUtils, arguments);
  423. if (RTCUtils.isDeviceChangeAvailable('output') &&
  424. stream.getAudioTracks && stream.getAudioTracks().length) {
  425. element.setSinkId(RTCUtils.getAudioOutputDevice())
  426. .catch(function (ex) {
  427. logger.error('Failed to set audio output on element',
  428. element, ex);
  429. });
  430. }
  431. return res;
  432. }
  433. }
  434. //Options parameter is to pass config options. Currently uses only "useIPv6".
  435. var RTCUtils = {
  436. init: function (options) {
  437. return new Promise(function(resolve, reject) {
  438. if (RTCBrowserType.isFirefox()) {
  439. var FFversion = RTCBrowserType.getFirefoxVersion();
  440. if (FFversion < 40) {
  441. logger.error(
  442. "Firefox version too old: " + FFversion +
  443. ". Required >= 40.");
  444. reject(new Error("Firefox version too old: " + FFversion +
  445. ". Required >= 40."));
  446. return;
  447. }
  448. this.peerconnection = mozRTCPeerConnection;
  449. this.getUserMedia = wrapGetUserMedia(navigator.mozGetUserMedia.bind(navigator));
  450. this.enumerateDevices = wrapEnumerateDevices(
  451. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  452. );
  453. this.pc_constraints = {};
  454. this.attachMediaStream = wrapAttachMediaStream(function (element, stream) {
  455. // srcObject is being standardized and FF will eventually
  456. // support that unprefixed. FF also supports the
  457. // "element.src = URL.createObjectURL(...)" combo, but that
  458. // will be deprecated in favour of srcObject.
  459. //
  460. // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
  461. // https://github.com/webrtc/samples/issues/302
  462. if (!element)
  463. return;
  464. element.mozSrcObject = stream;
  465. element.play();
  466. return element;
  467. });
  468. this.getStreamID = function (stream) {
  469. var id = stream.id;
  470. if (!id) {
  471. var tracks = stream.getVideoTracks();
  472. if (!tracks || tracks.length === 0) {
  473. tracks = stream.getAudioTracks();
  474. }
  475. id = tracks[0].id;
  476. }
  477. return SDPUtil.filter_special_chars(id);
  478. };
  479. this.getVideoSrc = function (element) {
  480. if (!element)
  481. return null;
  482. return element.mozSrcObject;
  483. };
  484. this.setVideoSrc = function (element, src) {
  485. if (element)
  486. element.mozSrcObject = src;
  487. };
  488. RTCSessionDescription = mozRTCSessionDescription;
  489. RTCIceCandidate = mozRTCIceCandidate;
  490. } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera() || RTCBrowserType.isNWJS()) {
  491. this.peerconnection = webkitRTCPeerConnection;
  492. var getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
  493. if (navigator.mediaDevices) {
  494. this.getUserMedia = wrapGetUserMedia(getUserMedia);
  495. this.enumerateDevices = wrapEnumerateDevices(
  496. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  497. );
  498. } else {
  499. this.getUserMedia = getUserMedia;
  500. this.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  501. }
  502. this.attachMediaStream = wrapAttachMediaStream(function (element, stream) {
  503. // saves the created url for the stream, so we can reuse it
  504. // and not keep creating urls
  505. if (!stream.jitsiObjectURL) {
  506. stream.jitsiObjectURL
  507. = webkitURL.createObjectURL(stream);
  508. }
  509. element.src = stream.jitsiObjectURL;
  510. return element;
  511. });
  512. this.getStreamID = function (stream) {
  513. // streams from FF endpoints have the characters '{' and '}'
  514. // that make jQuery choke.
  515. return SDPUtil.filter_special_chars(stream.id);
  516. };
  517. this.getVideoSrc = function (element) {
  518. if (!element)
  519. return null;
  520. return element.getAttribute("src");
  521. };
  522. this.setVideoSrc = function (element, src) {
  523. if (!src) {
  524. src = '';
  525. }
  526. if (element)
  527. element.setAttribute("src", src);
  528. };
  529. // DTLS should now be enabled by default but..
  530. this.pc_constraints = {'optional': [
  531. {'DtlsSrtpKeyAgreement': 'true'}
  532. ]};
  533. if (options.useIPv6) {
  534. // https://code.google.com/p/webrtc/issues/detail?id=2828
  535. this.pc_constraints.optional.push({googIPv6: true});
  536. }
  537. if (RTCBrowserType.isAndroid()) {
  538. this.pc_constraints = {}; // disable DTLS on Android
  539. }
  540. if (!webkitMediaStream.prototype.getVideoTracks) {
  541. webkitMediaStream.prototype.getVideoTracks = function () {
  542. return this.videoTracks;
  543. };
  544. }
  545. if (!webkitMediaStream.prototype.getAudioTracks) {
  546. webkitMediaStream.prototype.getAudioTracks = function () {
  547. return this.audioTracks;
  548. };
  549. }
  550. }
  551. // Detect IE/Safari
  552. else if (RTCBrowserType.isTemasysPluginUsed()) {
  553. //AdapterJS.WebRTCPlugin.setLogLevel(
  554. // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
  555. var self = this;
  556. AdapterJS.webRTCReady(function (isPlugin) {
  557. self.peerconnection = RTCPeerConnection;
  558. self.getUserMedia = window.getUserMedia;
  559. self.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  560. self.attachMediaStream = wrapAttachMediaStream(function (element, stream) {
  561. if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
  562. return;
  563. }
  564. var isVideoStream = !!stream.getVideoTracks().length;
  565. if (isVideoStream && !$(element).is(':visible')) {
  566. throw new Error('video element must be visible to attach video stream');
  567. }
  568. return attachMediaStream(element, stream);
  569. });
  570. self.getStreamID = function (stream) {
  571. return SDPUtil.filter_special_chars(stream.label);
  572. };
  573. self.getVideoSrc = function (element) {
  574. if (!element) {
  575. logger.warn("Attempt to get video SRC of null element");
  576. return null;
  577. }
  578. var children = element.children;
  579. for (var i = 0; i !== children.length; ++i) {
  580. if (children[i].name === 'streamId') {
  581. return children[i].value;
  582. }
  583. }
  584. //logger.info(element.id + " SRC: " + src);
  585. return null;
  586. };
  587. self.setVideoSrc = function (element, src) {
  588. //logger.info("Set video src: ", element, src);
  589. if (!src) {
  590. attachMediaStream(element, null);
  591. } else {
  592. AdapterJS.WebRTCPlugin.WaitForPluginReady();
  593. var stream
  594. = AdapterJS.WebRTCPlugin.plugin
  595. .getStreamWithId(
  596. AdapterJS.WebRTCPlugin.pageId, src);
  597. attachMediaStream(element, stream);
  598. }
  599. };
  600. onReady(options, self.getUserMediaWithConstraints);
  601. resolve();
  602. });
  603. } else {
  604. try {
  605. logger.error('Browser does not appear to be WebRTC-capable');
  606. } catch (e) {
  607. }
  608. reject('Browser does not appear to be WebRTC-capable');
  609. return;
  610. }
  611. // Call onReady() if Temasys plugin is not used
  612. if (!RTCBrowserType.isTemasysPluginUsed()) {
  613. onReady(options, this.getUserMediaWithConstraints);
  614. resolve();
  615. }
  616. }.bind(this));
  617. },
  618. /**
  619. * @param {string[]} um required user media types
  620. * @param {function} success_callback
  621. * @param {Function} failure_callback
  622. * @param {Object} [options] optional parameters
  623. * @param {string} options.resolution
  624. * @param {number} options.bandwidth
  625. * @param {number} options.fps
  626. * @param {string} options.desktopStream
  627. * @param {string} options.cameraDeviceId
  628. * @param {string} options.micDeviceId
  629. **/
  630. getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) {
  631. options = options || {};
  632. var resolution = options.resolution;
  633. var constraints = getConstraints(um, options);
  634. logger.info("Get media constraints", constraints);
  635. try {
  636. this.getUserMedia(constraints,
  637. function (stream) {
  638. logger.log('onUserMediaSuccess');
  639. setAvailableDevices(um, true);
  640. success_callback(stream);
  641. },
  642. function (error) {
  643. setAvailableDevices(um, false);
  644. logger.warn('Failed to get access to local media. Error ',
  645. error, constraints);
  646. if (failure_callback) {
  647. failure_callback(error, resolution);
  648. }
  649. });
  650. } catch (e) {
  651. logger.error('GUM failed: ', e);
  652. if (failure_callback) {
  653. failure_callback(e);
  654. }
  655. }
  656. },
  657. /**
  658. * Creates the local MediaStreams.
  659. * @param {Object} [options] optional parameters
  660. * @param {Array} options.devices the devices that will be requested
  661. * @param {string} options.resolution resolution constraints
  662. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
  663. * type: "audio" or "video", videoType: "camera" or "desktop"}
  664. * will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
  665. * @param {string} options.cameraDeviceId
  666. * @param {string} options.micDeviceId
  667. * @returns {*} Promise object that will receive the new JitsiTracks
  668. */
  669. obtainAudioAndVideoPermissions: function (options) {
  670. var self = this;
  671. options = options || {};
  672. return new Promise(function (resolve, reject) {
  673. var successCallback = function (stream) {
  674. resolve(handleLocalStream(stream, options.resolution));
  675. };
  676. options.devices = options.devices || ['audio', 'video'];
  677. if(!screenObtainer.isSupported()
  678. && options.devices.indexOf("desktop") !== -1){
  679. reject(new Error("Desktop sharing is not supported!"));
  680. }
  681. if (RTCBrowserType.isFirefox() ||
  682. RTCBrowserType.isTemasysPluginUsed()) {
  683. var GUM = function (device, s, e) {
  684. this.getUserMediaWithConstraints(device, s, e, options);
  685. };
  686. var deviceGUM = {
  687. "audio": GUM.bind(self, ["audio"]),
  688. "video": GUM.bind(self, ["video"])
  689. };
  690. if(screenObtainer.isSupported()){
  691. deviceGUM["desktop"] = screenObtainer.obtainStream.bind(
  692. screenObtainer);
  693. }
  694. // With FF/IE we can't split the stream into audio and video because FF
  695. // doesn't support media stream constructors. So, we need to get the
  696. // audio stream separately from the video stream using two distinct GUM
  697. // calls. Not very user friendly :-( but we don't have many other
  698. // options neither.
  699. //
  700. // Note that we pack those 2 streams in a single object and pass it to
  701. // the successCallback method.
  702. obtainDevices({
  703. devices: options.devices,
  704. streams: [],
  705. successCallback: successCallback,
  706. errorCallback: reject,
  707. deviceGUM: deviceGUM
  708. });
  709. } else {
  710. var hasDesktop = options.devices.indexOf('desktop') > -1;
  711. if (hasDesktop) {
  712. options.devices.splice(options.devices.indexOf("desktop"), 1);
  713. }
  714. options.resolution = options.resolution || '360';
  715. if(options.devices.length) {
  716. this.getUserMediaWithConstraints(
  717. options.devices,
  718. function (stream) {
  719. if((options.devices.indexOf("audio") !== -1 &&
  720. !stream.getAudioTracks().length) ||
  721. (options.devices.indexOf("video") !== -1 &&
  722. !stream.getVideoTracks().length))
  723. {
  724. self.stopMediaStream(stream);
  725. reject(JitsiTrackErrors.parseError(
  726. new Error("Unable to get the audio and " +
  727. "video tracks."),
  728. options.devices));
  729. return;
  730. }
  731. if(hasDesktop) {
  732. screenObtainer.obtainStream(
  733. function (desktopStream) {
  734. successCallback({audioVideo: stream,
  735. desktopStream: desktopStream});
  736. }, function (error) {
  737. self.stopMediaStream(stream);
  738. reject(
  739. JitsiTrackErrors.parseError(error,
  740. options.devices));
  741. });
  742. } else {
  743. successCallback({audioVideo: stream});
  744. }
  745. },
  746. function (error) {
  747. reject(JitsiTrackErrors.parseError(error,
  748. options.devices));
  749. },
  750. options);
  751. } else if (hasDesktop) {
  752. screenObtainer.obtainStream(
  753. function (stream) {
  754. successCallback({desktopStream: stream});
  755. }, function (error) {
  756. reject(
  757. JitsiTrackErrors.parseError(error,
  758. ["desktop"]));
  759. });
  760. }
  761. }
  762. }.bind(this));
  763. },
  764. addListener: function (eventType, listener) {
  765. eventEmitter.on(eventType, listener);
  766. },
  767. removeListener: function (eventType, listener) {
  768. eventEmitter.removeListener(eventType, listener);
  769. },
  770. getDeviceAvailability: function () {
  771. return devices;
  772. },
  773. isRTCReady: function () {
  774. return rtcReady;
  775. },
  776. /**
  777. * Checks if its possible to enumerate available cameras/micropones.
  778. * @returns {boolean} true if available, false otherwise.
  779. */
  780. isDeviceListAvailable: function () {
  781. var isEnumerateDevicesAvailable
  782. = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices;
  783. if (isEnumerateDevicesAvailable) {
  784. return true;
  785. }
  786. return (MediaStreamTrack && MediaStreamTrack.getSources)? true : false;
  787. },
  788. /**
  789. * Returns true if changing the input (camera / microphone) or output
  790. * (audio) device is supported and false if not.
  791. * @params {string} [deviceType] - type of device to change. Default is
  792. * undefined or 'input', 'output' - for audio output device change.
  793. * @returns {boolean} true if available, false otherwise.
  794. */
  795. isDeviceChangeAvailable: function (deviceType) {
  796. return deviceType === 'output' || deviceType === 'audiooutput'
  797. ? isAudioOutputDeviceChangeAvailable
  798. : RTCBrowserType.isChrome() ||
  799. RTCBrowserType.isFirefox() ||
  800. RTCBrowserType.isOpera() ||
  801. RTCBrowserType.isTemasysPluginUsed();
  802. },
  803. /**
  804. * A method to handle stopping of the stream.
  805. * One point to handle the differences in various implementations.
  806. * @param mediaStream MediaStream object to stop.
  807. */
  808. stopMediaStream: function (mediaStream) {
  809. mediaStream.getTracks().forEach(function (track) {
  810. // stop() not supported with IE
  811. if (!RTCBrowserType.isTemasysPluginUsed() && track.stop) {
  812. track.stop();
  813. }
  814. });
  815. // leave stop for implementation still using it
  816. if (mediaStream.stop) {
  817. mediaStream.stop();
  818. }
  819. // if we have done createObjectURL, lets clean it
  820. if (mediaStream.jitsiObjectURL) {
  821. webkitURL.revokeObjectURL(mediaStream.jitsiObjectURL);
  822. }
  823. },
  824. /**
  825. * Returns whether the desktop sharing is enabled or not.
  826. * @returns {boolean}
  827. */
  828. isDesktopSharingEnabled: function () {
  829. return screenObtainer.isSupported();
  830. },
  831. /**
  832. * Sets current audio output device.
  833. * @param {string} deviceId - id of 'audiooutput' device from
  834. * navigator.mediaDevices.enumerateDevices(), '' for default device
  835. * @returns {Promise} - resolves when audio output is changed, is rejected
  836. * otherwise
  837. */
  838. setAudioOutputDevice: function (deviceId) {
  839. if (!this.isDeviceChangeAvailable('output')) {
  840. Promise.reject(
  841. new Error('Audio output device change is not supported'));
  842. }
  843. return featureDetectionAudioEl.setSinkId(deviceId)
  844. .then(function() {
  845. audioOuputDeviceId = deviceId;
  846. logger.log('Audio output device set to ' + deviceId);
  847. eventEmitter.emit(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  848. deviceId);
  849. });
  850. },
  851. /**
  852. * Returns currently used audio output device id, '' stands for default
  853. * device
  854. * @returns {string}
  855. */
  856. getAudioOutputDevice: function () {
  857. return audioOuputDeviceId;
  858. }
  859. };
  860. module.exports = RTCUtils;