您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

RTCUtils.js 35KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927
  1. /* global config, require, attachMediaStream, getUserMedia,
  2. RTCPeerConnection, RTCSessionDescription, RTCIceCandidate, MediaStreamTrack,
  3. mozRTCPeerConnection, mozRTCSessionDescription, mozRTCIceCandidate,
  4. webkitRTCPeerConnection, webkitMediaStream, webkitURL
  5. */
  6. /* jshint -W101 */
  7. var logger = require("jitsi-meet-logger").getLogger(__filename);
  8. var RTCBrowserType = require("./RTCBrowserType");
  9. var Resolutions = require("../../service/RTC/Resolutions");
  10. var RTCEvents = require("../../service/RTC/RTCEvents");
  11. var AdapterJS = require("./adapter.screenshare");
  12. var SDPUtil = require("../xmpp/SDPUtil");
  13. var EventEmitter = require("events");
  14. var screenObtainer = require("./ScreenObtainer");
  15. var JitsiTrackErrors = require("../../JitsiTrackErrors");
  16. var MediaType = require("../../service/RTC/MediaType");
  17. var VideoType = require("../../service/RTC/VideoType");
  18. var eventEmitter = new EventEmitter();
  19. var devices = {
  20. audio: true,
  21. video: true
  22. };
  23. var audioOuputDeviceId = ''; // default device
  24. var featureDetectionVideoEl = document.createElement('video');
  25. var rtcReady = false;
  26. function setResolutionConstraints(constraints, resolution) {
  27. var isAndroid = RTCBrowserType.isAndroid();
  28. if (Resolutions[resolution]) {
  29. constraints.video.mandatory.minWidth = Resolutions[resolution].width;
  30. constraints.video.mandatory.minHeight = Resolutions[resolution].height;
  31. }
  32. else if (isAndroid) {
  33. // FIXME can't remember if the purpose of this was to always request
  34. // low resolution on Android ? if yes it should be moved up front
  35. constraints.video.mandatory.minWidth = 320;
  36. constraints.video.mandatory.minHeight = 180;
  37. constraints.video.mandatory.maxFrameRate = 15;
  38. }
  39. if (constraints.video.mandatory.minWidth)
  40. constraints.video.mandatory.maxWidth =
  41. constraints.video.mandatory.minWidth;
  42. if (constraints.video.mandatory.minHeight)
  43. constraints.video.mandatory.maxHeight =
  44. constraints.video.mandatory.minHeight;
  45. }
  46. /**
  47. * @param {string[]} um required user media types
  48. *
  49. * @param {Object} [options={}] optional parameters
  50. * @param {string} options.resolution
  51. * @param {number} options.bandwidth
  52. * @param {number} options.fps
  53. * @param {string} options.desktopStream
  54. * @param {string} options.cameraDeviceId
  55. * @param {string} options.micDeviceId
  56. * @param {bool} firefox_fake_device
  57. */
  58. function getConstraints(um, options) {
  59. var constraints = {audio: false, video: false};
  60. if (um.indexOf('video') >= 0) {
  61. // same behaviour as true
  62. constraints.video = { mandatory: {}, optional: [] };
  63. if (options.cameraDeviceId) {
  64. // new style of settings device id (FF only)
  65. constraints.video.deviceId = options.cameraDeviceId;
  66. // old style
  67. constraints.video.optional.push({
  68. sourceId: options.cameraDeviceId
  69. });
  70. }
  71. constraints.video.optional.push({ googLeakyBucket: true });
  72. setResolutionConstraints(constraints, options.resolution);
  73. }
  74. if (um.indexOf('audio') >= 0) {
  75. if (!RTCBrowserType.isFirefox()) {
  76. // same behaviour as true
  77. constraints.audio = { mandatory: {}, optional: []};
  78. if (options.micDeviceId) {
  79. // new style of settings device id (FF only)
  80. constraints.audio.deviceId = options.micDeviceId;
  81. // old style
  82. constraints.audio.optional.push({
  83. sourceId: options.micDeviceId
  84. });
  85. }
  86. // if it is good enough for hangouts...
  87. constraints.audio.optional.push(
  88. {googEchoCancellation: true},
  89. {googAutoGainControl: true},
  90. {googNoiseSupression: true},
  91. {googHighpassFilter: true},
  92. {googNoisesuppression2: true},
  93. {googEchoCancellation2: true},
  94. {googAutoGainControl2: true}
  95. );
  96. } else {
  97. if (options.micDeviceId) {
  98. constraints.audio = {
  99. mandatory: {},
  100. deviceId: options.micDeviceId, // new style
  101. optional: [{
  102. sourceId: options.micDeviceId // old style
  103. }]};
  104. } else {
  105. constraints.audio = true;
  106. }
  107. }
  108. }
  109. if (um.indexOf('screen') >= 0) {
  110. if (RTCBrowserType.isChrome()) {
  111. constraints.video = {
  112. mandatory: {
  113. chromeMediaSource: "screen",
  114. googLeakyBucket: true,
  115. maxWidth: window.screen.width,
  116. maxHeight: window.screen.height,
  117. maxFrameRate: 3
  118. },
  119. optional: []
  120. };
  121. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  122. constraints.video = {
  123. optional: [
  124. {
  125. sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
  126. }
  127. ]
  128. };
  129. } else if (RTCBrowserType.isFirefox()) {
  130. constraints.video = {
  131. mozMediaSource: "window",
  132. mediaSource: "window"
  133. };
  134. } else {
  135. logger.error(
  136. "'screen' WebRTC media source is supported only in Chrome" +
  137. " and with Temasys plugin");
  138. }
  139. }
  140. if (um.indexOf('desktop') >= 0) {
  141. constraints.video = {
  142. mandatory: {
  143. chromeMediaSource: "desktop",
  144. chromeMediaSourceId: options.desktopStream,
  145. googLeakyBucket: true,
  146. maxWidth: window.screen.width,
  147. maxHeight: window.screen.height,
  148. maxFrameRate: 3
  149. },
  150. optional: []
  151. };
  152. }
  153. if (options.bandwidth) {
  154. if (!constraints.video) {
  155. //same behaviour as true
  156. constraints.video = {mandatory: {}, optional: []};
  157. }
  158. constraints.video.optional.push({bandwidth: options.bandwidth});
  159. }
  160. if(options.minFps || options.maxFps || options.fps) {
  161. // for some cameras it might be necessary to request 30fps
  162. // so they choose 30fps mjpg over 10fps yuy2
  163. if (!constraints.video) {
  164. // same behaviour as true;
  165. constraints.video = {mandatory: {}, optional: []};
  166. }
  167. if(options.minFps || options.fps) {
  168. options.minFps = options.minFps || options.fps; //Fall back to options.fps for backwards compatibility
  169. constraints.video.mandatory.minFrameRate = options.minFps;
  170. }
  171. if(options.maxFps) {
  172. constraints.video.mandatory.maxFrameRate = options.maxFps;
  173. }
  174. }
  175. // we turn audio for both audio and video tracks, the fake audio & video seems to work
  176. // only when enabled in one getUserMedia call, we cannot get fake audio separate by fake video
  177. // this later can be a problem with some of the tests
  178. if(RTCBrowserType.isFirefox() && options.firefox_fake_device)
  179. {
  180. // seems to be fixed now, removing this experimental fix, as having
  181. // multiple audio tracks brake the tests
  182. //constraints.audio = true;
  183. constraints.fake = true;
  184. }
  185. return constraints;
  186. }
  187. function setAvailableDevices(um, available) {
  188. if (um.indexOf("video") != -1) {
  189. devices.video = available;
  190. }
  191. if (um.indexOf("audio") != -1) {
  192. devices.audio = available;
  193. }
  194. eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices);
  195. }
  196. // In case of IE we continue from 'onReady' callback
  197. // passed to RTCUtils constructor. It will be invoked by Temasys plugin
  198. // once it is initialized.
  199. function onReady (options, GUM) {
  200. rtcReady = true;
  201. eventEmitter.emit(RTCEvents.RTC_READY, true);
  202. screenObtainer.init(options, GUM);
  203. }
  204. /**
  205. * Apply function with arguments if function exists.
  206. * Do nothing if function not provided.
  207. * @param {function} [fn] function to apply
  208. * @param {Array} [args=[]] arguments for function
  209. */
  210. function maybeApply(fn, args) {
  211. if (fn) {
  212. fn.apply(null, args || []);
  213. }
  214. }
  215. var getUserMediaStatus = {
  216. initialized: false,
  217. callbacks: []
  218. };
  219. /**
  220. * Wrap `getUserMedia` to allow others to know if it was executed at least
  221. * once or not. Wrapper function uses `getUserMediaStatus` object.
  222. * @param {Function} getUserMedia native function
  223. * @returns {Function} wrapped function
  224. */
  225. function wrapGetUserMedia(getUserMedia) {
  226. return function (constraints, successCallback, errorCallback) {
  227. getUserMedia(constraints, function (stream) {
  228. maybeApply(successCallback, [stream]);
  229. if (!getUserMediaStatus.initialized) {
  230. getUserMediaStatus.initialized = true;
  231. getUserMediaStatus.callbacks.forEach(function (callback) {
  232. callback();
  233. });
  234. getUserMediaStatus.callbacks.length = 0;
  235. }
  236. }, function (error) {
  237. maybeApply(errorCallback, [error]);
  238. });
  239. };
  240. }
  241. /**
  242. * Create stub device which equals to auto selected device.
  243. * @param {string} kind if that should be `audio` or `video` device
  244. * @returns {Object} stub device description in `enumerateDevices` format
  245. */
  246. function createAutoDeviceInfo(kind) {
  247. return {
  248. facing: null,
  249. label: 'Auto',
  250. kind: kind,
  251. deviceId: '',
  252. groupId: ''
  253. };
  254. }
  255. /**
  256. * Execute function after getUserMedia was executed at least once.
  257. * @param {Function} callback function to execute after getUserMedia
  258. */
  259. function afterUserMediaInitialized(callback) {
  260. if (getUserMediaStatus.initialized) {
  261. callback();
  262. } else {
  263. getUserMediaStatus.callbacks.push(callback);
  264. }
  265. }
  266. /**
  267. * Wrapper function which makes enumerateDevices to wait
  268. * until someone executes getUserMedia first time.
  269. * @param {Function} enumerateDevices native function
  270. * @returns {Funtion} wrapped function
  271. */
  272. function wrapEnumerateDevices(enumerateDevices) {
  273. return function (callback) {
  274. // enumerate devices only after initial getUserMedia
  275. afterUserMediaInitialized(function () {
  276. enumerateDevices().then(function (devices) {
  277. //add auto devices
  278. devices.unshift(
  279. createAutoDeviceInfo('audioinput'),
  280. createAutoDeviceInfo('videoinput'),
  281. createAutoDeviceInfo('audiooutput')
  282. );
  283. callback(devices);
  284. }, function (err) {
  285. console.error('cannot enumerate devices: ', err);
  286. // return only auto devices
  287. callback([
  288. createAutoDeviceInfo('audioinput'),
  289. createAutoDeviceInfo('videoinput'),
  290. createAutoDeviceInfo('audiooutput')
  291. ]);
  292. });
  293. });
  294. };
  295. }
  296. /**
  297. * Use old MediaStreamTrack to get devices list and
  298. * convert it to enumerateDevices format.
  299. * @param {Function} callback function to call when received devices list.
  300. */
  301. function enumerateDevicesThroughMediaStreamTrack (callback) {
  302. MediaStreamTrack.getSources(function (sources) {
  303. var devices = sources.map(function (source) {
  304. var kind = (source.kind || '').toLowerCase();
  305. return {
  306. facing: source.facing || null,
  307. label: source.label,
  308. // theoretically deprecated MediaStreamTrack.getSources should not return 'audiooutput' devices but
  309. // let's handle it in any case
  310. kind: kind ? (kind === 'audiooutput' ? kind : kind + 'input') : null,
  311. deviceId: source.id,
  312. groupId: source.groupId || null
  313. };
  314. });
  315. //add auto devices
  316. devices.unshift(
  317. createAutoDeviceInfo('audioinput'),
  318. createAutoDeviceInfo('videoinput'),
  319. createAutoDeviceInfo('audiooutput')
  320. );
  321. callback(devices);
  322. });
  323. }
  324. function obtainDevices(options) {
  325. if(!options.devices || options.devices.length === 0) {
  326. return options.successCallback(options.streams || {});
  327. }
  328. var device = options.devices.splice(0, 1);
  329. var devices = [];
  330. devices.push(device);
  331. options.deviceGUM[device](function (stream) {
  332. options.streams = options.streams || {};
  333. options.streams[device] = stream;
  334. obtainDevices(options);
  335. },
  336. function (error) {
  337. Object.keys(options.streams).forEach(function(device) {
  338. RTCUtils.stopMediaStream(options.streams[device]);
  339. });
  340. logger.error(
  341. "failed to obtain " + device + " stream - stop", error);
  342. options.errorCallback(JitsiTrackErrors.parseError(error, devices));
  343. });
  344. }
  345. /**
  346. * Handles the newly created Media Streams.
  347. * @param streams the new Media Streams
  348. * @param resolution the resolution of the video streams
  349. * @returns {*[]} object that describes the new streams
  350. */
  351. function handleLocalStream(streams, resolution) {
  352. var audioStream, videoStream, desktopStream, res = [];
  353. // If this is FF, the stream parameter is *not* a MediaStream object, it's
  354. // an object with two properties: audioStream, videoStream.
  355. if (window.webkitMediaStream) {
  356. var audioVideo = streams.audioVideo;
  357. if (audioVideo) {
  358. var audioTracks = audioVideo.getAudioTracks();
  359. if (audioTracks.length) {
  360. audioStream = new webkitMediaStream();
  361. for (var i = 0; i < audioTracks.length; i++) {
  362. audioStream.addTrack(audioTracks[i]);
  363. }
  364. }
  365. var videoTracks = audioVideo.getVideoTracks();
  366. if (videoTracks.length) {
  367. videoStream = new webkitMediaStream();
  368. for (var j = 0; j < videoTracks.length; j++) {
  369. videoStream.addTrack(videoTracks[j]);
  370. }
  371. }
  372. }
  373. // FIXME Checking streams here is unnecessary because there's
  374. // streams.audioVideo above.
  375. if (streams)
  376. desktopStream = streams.desktopStream;
  377. }
  378. else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { // Firefox and Temasys plugin
  379. if (streams) {
  380. audioStream = streams.audio;
  381. videoStream = streams.video;
  382. desktopStream = streams.desktop;
  383. }
  384. }
  385. if (desktopStream)
  386. res.push({
  387. stream: desktopStream,
  388. track: desktopStream.getVideoTracks()[0],
  389. mediaType: MediaType.VIDEO,
  390. videoType: VideoType.DESKTOP
  391. });
  392. if(audioStream)
  393. res.push({
  394. stream: audioStream,
  395. track: audioStream.getAudioTracks()[0],
  396. mediaType: MediaType.AUDIO,
  397. videoType: null
  398. });
  399. if(videoStream)
  400. res.push({
  401. stream: videoStream,
  402. track: videoStream.getVideoTracks()[0],
  403. mediaType: MediaType.VIDEO,
  404. videoType: VideoType.CAMERA,
  405. resolution: resolution
  406. });
  407. return res;
  408. }
  409. /**
  410. * Wraps original attachMediaStream function to set current audio output device
  411. * if this is supported.
  412. * @param {Function} origAttachMediaStream
  413. * @returns {Function}
  414. */
  415. function wrapAttachMediaStream(origAttachMediaStream) {
  416. return function(element, stream) {
  417. var res = origAttachMediaStream.apply(RTCUtils, arguments);
  418. if (RTCUtils.isAudioOutputDeviceChangeAvailable()) {
  419. element.setSinkId(RTCUtils.getAudioOutputDevice())
  420. .catch(function (ex) {
  421. console.error('Failed to set audio output on element',
  422. element, ex);
  423. });
  424. }
  425. return res;
  426. }
  427. }
  428. //Options parameter is to pass config options. Currently uses only "useIPv6".
  429. var RTCUtils = {
  430. init: function (options) {
  431. return new Promise(function(resolve, reject) {
  432. if (RTCBrowserType.isFirefox()) {
  433. var FFversion = RTCBrowserType.getFirefoxVersion();
  434. if (FFversion < 40) {
  435. logger.error(
  436. "Firefox version too old: " + FFversion +
  437. ". Required >= 40.");
  438. reject(new Error("Firefox version too old: " + FFversion +
  439. ". Required >= 40."));
  440. return;
  441. }
  442. this.peerconnection = mozRTCPeerConnection;
  443. this.getUserMedia = wrapGetUserMedia(navigator.mozGetUserMedia.bind(navigator));
  444. this.enumerateDevices = wrapEnumerateDevices(
  445. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  446. );
  447. this.pc_constraints = {};
  448. this.attachMediaStream = wrapAttachMediaStream(function (element, stream) {
  449. // srcObject is being standardized and FF will eventually
  450. // support that unprefixed. FF also supports the
  451. // "element.src = URL.createObjectURL(...)" combo, but that
  452. // will be deprecated in favour of srcObject.
  453. //
  454. // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
  455. // https://github.com/webrtc/samples/issues/302
  456. if (!element)
  457. return;
  458. element.mozSrcObject = stream;
  459. element.play();
  460. return element;
  461. });
  462. this.getStreamID = function (stream) {
  463. var id = stream.id;
  464. if (!id) {
  465. var tracks = stream.getVideoTracks();
  466. if (!tracks || tracks.length === 0) {
  467. tracks = stream.getAudioTracks();
  468. }
  469. id = tracks[0].id;
  470. }
  471. return SDPUtil.filter_special_chars(id);
  472. };
  473. this.getVideoSrc = function (element) {
  474. if (!element)
  475. return null;
  476. return element.mozSrcObject;
  477. };
  478. this.setVideoSrc = function (element, src) {
  479. if (element)
  480. element.mozSrcObject = src;
  481. };
  482. RTCSessionDescription = mozRTCSessionDescription;
  483. RTCIceCandidate = mozRTCIceCandidate;
  484. } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera() || RTCBrowserType.isNWJS()) {
  485. this.peerconnection = webkitRTCPeerConnection;
  486. var getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
  487. if (navigator.mediaDevices) {
  488. this.getUserMedia = wrapGetUserMedia(getUserMedia);
  489. this.enumerateDevices = wrapEnumerateDevices(
  490. navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices)
  491. );
  492. } else {
  493. this.getUserMedia = getUserMedia;
  494. this.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  495. }
  496. this.attachMediaStream = wrapAttachMediaStream(function (element, stream) {
  497. // saves the created url for the stream, so we can reuse it
  498. // and not keep creating urls
  499. if (!stream.jitsiObjectURL) {
  500. stream.jitsiObjectURL
  501. = webkitURL.createObjectURL(stream);
  502. }
  503. element.src = stream.jitsiObjectURL;
  504. return element;
  505. });
  506. this.getStreamID = function (stream) {
  507. // streams from FF endpoints have the characters '{' and '}'
  508. // that make jQuery choke.
  509. return SDPUtil.filter_special_chars(stream.id);
  510. };
  511. this.getVideoSrc = function (element) {
  512. if (!element)
  513. return null;
  514. return element.getAttribute("src");
  515. };
  516. this.setVideoSrc = function (element, src) {
  517. if (!src) {
  518. src = '';
  519. }
  520. if (element)
  521. element.setAttribute("src", src);
  522. };
  523. // DTLS should now be enabled by default but..
  524. this.pc_constraints = {'optional': [
  525. {'DtlsSrtpKeyAgreement': 'true'}
  526. ]};
  527. if (options.useIPv6) {
  528. // https://code.google.com/p/webrtc/issues/detail?id=2828
  529. this.pc_constraints.optional.push({googIPv6: true});
  530. }
  531. if (RTCBrowserType.isAndroid()) {
  532. this.pc_constraints = {}; // disable DTLS on Android
  533. }
  534. if (!webkitMediaStream.prototype.getVideoTracks) {
  535. webkitMediaStream.prototype.getVideoTracks = function () {
  536. return this.videoTracks;
  537. };
  538. }
  539. if (!webkitMediaStream.prototype.getAudioTracks) {
  540. webkitMediaStream.prototype.getAudioTracks = function () {
  541. return this.audioTracks;
  542. };
  543. }
  544. }
  545. // Detect IE/Safari
  546. else if (RTCBrowserType.isTemasysPluginUsed()) {
  547. //AdapterJS.WebRTCPlugin.setLogLevel(
  548. // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
  549. var self = this;
  550. AdapterJS.webRTCReady(function (isPlugin) {
  551. self.peerconnection = RTCPeerConnection;
  552. self.getUserMedia = window.getUserMedia;
  553. self.enumerateDevices = enumerateDevicesThroughMediaStreamTrack;
  554. self.attachMediaStream = wrapAttachMediaStream(function (element, stream) {
  555. if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
  556. return;
  557. }
  558. var isVideoStream = !!stream.getVideoTracks().length;
  559. if (isVideoStream && !$(element).is(':visible')) {
  560. throw new Error('video element must be visible to attach video stream');
  561. }
  562. return attachMediaStream(element, stream);
  563. });
  564. self.getStreamID = function (stream) {
  565. return SDPUtil.filter_special_chars(stream.label);
  566. };
  567. self.getVideoSrc = function (element) {
  568. if (!element) {
  569. logger.warn("Attempt to get video SRC of null element");
  570. return null;
  571. }
  572. var children = element.children;
  573. for (var i = 0; i !== children.length; ++i) {
  574. if (children[i].name === 'streamId') {
  575. return children[i].value;
  576. }
  577. }
  578. //logger.info(element.id + " SRC: " + src);
  579. return null;
  580. };
  581. self.setVideoSrc = function (element, src) {
  582. //logger.info("Set video src: ", element, src);
  583. if (!src) {
  584. attachMediaStream(element, null);
  585. } else {
  586. AdapterJS.WebRTCPlugin.WaitForPluginReady();
  587. var stream
  588. = AdapterJS.WebRTCPlugin.plugin
  589. .getStreamWithId(
  590. AdapterJS.WebRTCPlugin.pageId, src);
  591. attachMediaStream(element, stream);
  592. }
  593. };
  594. onReady(options, self.getUserMediaWithConstraints);
  595. resolve();
  596. });
  597. } else {
  598. try {
  599. logger.error('Browser does not appear to be WebRTC-capable');
  600. } catch (e) {
  601. }
  602. reject('Browser does not appear to be WebRTC-capable');
  603. return;
  604. }
  605. // Call onReady() if Temasys plugin is not used
  606. if (!RTCBrowserType.isTemasysPluginUsed()) {
  607. onReady(options, this.getUserMediaWithConstraints);
  608. resolve();
  609. }
  610. }.bind(this));
  611. },
  612. /**
  613. * @param {string[]} um required user media types
  614. * @param {function} success_callback
  615. * @param {Function} failure_callback
  616. * @param {Object} [options] optional parameters
  617. * @param {string} options.resolution
  618. * @param {number} options.bandwidth
  619. * @param {number} options.fps
  620. * @param {string} options.desktopStream
  621. * @param {string} options.cameraDeviceId
  622. * @param {string} options.micDeviceId
  623. **/
  624. getUserMediaWithConstraints: function ( um, success_callback, failure_callback, options) {
  625. options = options || {};
  626. var resolution = options.resolution;
  627. var constraints = getConstraints(um, options);
  628. logger.info("Get media constraints", constraints);
  629. try {
  630. this.getUserMedia(constraints,
  631. function (stream) {
  632. logger.log('onUserMediaSuccess');
  633. setAvailableDevices(um, true);
  634. success_callback(stream);
  635. },
  636. function (error) {
  637. setAvailableDevices(um, false);
  638. logger.warn('Failed to get access to local media. Error ',
  639. error, constraints);
  640. if (failure_callback) {
  641. failure_callback(error, resolution);
  642. }
  643. });
  644. } catch (e) {
  645. logger.error('GUM failed: ', e);
  646. if (failure_callback) {
  647. failure_callback(e);
  648. }
  649. }
  650. },
  651. /**
  652. * Creates the local MediaStreams.
  653. * @param {Object} [options] optional parameters
  654. * @param {Array} options.devices the devices that will be requested
  655. * @param {string} options.resolution resolution constraints
  656. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
  657. * type: "audio" or "video", videoType: "camera" or "desktop"}
  658. * will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
  659. * @param {string} options.cameraDeviceId
  660. * @param {string} options.micDeviceId
  661. * @returns {*} Promise object that will receive the new JitsiTracks
  662. */
  663. obtainAudioAndVideoPermissions: function (options) {
  664. var self = this;
  665. options = options || {};
  666. return new Promise(function (resolve, reject) {
  667. var successCallback = function (stream) {
  668. resolve(handleLocalStream(stream, options.resolution));
  669. };
  670. options.devices = options.devices || ['audio', 'video'];
  671. if(!screenObtainer.isSupported()
  672. && options.devices.indexOf("desktop") !== -1){
  673. reject(new Error("Desktop sharing is not supported!"));
  674. }
  675. if (RTCBrowserType.isFirefox() ||
  676. RTCBrowserType.isTemasysPluginUsed()) {
  677. var GUM = function (device, s, e) {
  678. this.getUserMediaWithConstraints(device, s, e, options);
  679. };
  680. var deviceGUM = {
  681. "audio": GUM.bind(self, ["audio"]),
  682. "video": GUM.bind(self, ["video"])
  683. };
  684. if(screenObtainer.isSupported()){
  685. deviceGUM["desktop"] = screenObtainer.obtainStream.bind(
  686. screenObtainer);
  687. }
  688. // With FF/IE we can't split the stream into audio and video because FF
  689. // doesn't support media stream constructors. So, we need to get the
  690. // audio stream separately from the video stream using two distinct GUM
  691. // calls. Not very user friendly :-( but we don't have many other
  692. // options neither.
  693. //
  694. // Note that we pack those 2 streams in a single object and pass it to
  695. // the successCallback method.
  696. obtainDevices({
  697. devices: options.devices,
  698. streams: [],
  699. successCallback: successCallback,
  700. errorCallback: reject,
  701. deviceGUM: deviceGUM
  702. });
  703. } else {
  704. var hasDesktop = options.devices.indexOf('desktop') > -1;
  705. if (hasDesktop) {
  706. options.devices.splice(options.devices.indexOf("desktop"), 1);
  707. }
  708. options.resolution = options.resolution || '360';
  709. if(options.devices.length) {
  710. this.getUserMediaWithConstraints(
  711. options.devices,
  712. function (stream) {
  713. if((options.devices.indexOf("audio") !== -1 &&
  714. !stream.getAudioTracks().length) ||
  715. (options.devices.indexOf("video") !== -1 &&
  716. !stream.getVideoTracks().length))
  717. {
  718. self.stopMediaStream(stream);
  719. reject(JitsiTrackErrors.parseError(
  720. new Error("Unable to get the audio and " +
  721. "video tracks."),
  722. options.devices));
  723. return;
  724. }
  725. if(hasDesktop) {
  726. screenObtainer.obtainStream(
  727. function (desktopStream) {
  728. successCallback({audioVideo: stream,
  729. desktopStream: desktopStream});
  730. }, function (error) {
  731. self.stopMediaStream(stream);
  732. reject(
  733. JitsiTrackErrors.parseError(error,
  734. options.devices));
  735. });
  736. } else {
  737. successCallback({audioVideo: stream});
  738. }
  739. },
  740. function (error) {
  741. reject(JitsiTrackErrors.parseError(error,
  742. options.devices));
  743. },
  744. options);
  745. } else if (hasDesktop) {
  746. screenObtainer.obtainStream(
  747. function (stream) {
  748. successCallback({desktopStream: stream});
  749. }, function (error) {
  750. reject(
  751. JitsiTrackErrors.parseError(error,
  752. ["desktop"]));
  753. });
  754. }
  755. }
  756. }.bind(this));
  757. },
  758. addListener: function (eventType, listener) {
  759. eventEmitter.on(eventType, listener);
  760. },
  761. removeListener: function (eventType, listener) {
  762. eventEmitter.removeListener(eventType, listener);
  763. },
  764. getDeviceAvailability: function () {
  765. return devices;
  766. },
  767. isRTCReady: function () {
  768. return rtcReady;
  769. },
  770. /**
  771. * Checks if its possible to enumerate available cameras/micropones.
  772. * @returns {boolean} true if available, false otherwise.
  773. */
  774. isDeviceListAvailable: function () {
  775. var isEnumerateDevicesAvailable
  776. = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices;
  777. if (isEnumerateDevicesAvailable) {
  778. return true;
  779. }
  780. return (MediaStreamTrack && MediaStreamTrack.getSources)? true : false;
  781. },
  782. /**
  783. * Returns true if changing the camera / microphone device is supported and
  784. * false if not.
  785. */
  786. isDeviceChangeAvailable: function () {
  787. return RTCBrowserType.isChrome() ||
  788. RTCBrowserType.isFirefox() ||
  789. RTCBrowserType.isOpera() ||
  790. RTCBrowserType.isTemasysPluginUsed();
  791. },
  792. /**
  793. * Returns true if changing the audio output of media elements is supported
  794. * and false if not.
  795. */
  796. isAudioOutputDeviceChangeAvailable: function () {
  797. return typeof featureDetectionVideoEl.setSinkId !== 'undefined';
  798. },
  799. /**
  800. * A method to handle stopping of the stream.
  801. * One point to handle the differences in various implementations.
  802. * @param mediaStream MediaStream object to stop.
  803. */
  804. stopMediaStream: function (mediaStream) {
  805. mediaStream.getTracks().forEach(function (track) {
  806. // stop() not supported with IE
  807. if (!RTCBrowserType.isTemasysPluginUsed() && track.stop) {
  808. track.stop();
  809. }
  810. });
  811. // leave stop for implementation still using it
  812. if (mediaStream.stop) {
  813. mediaStream.stop();
  814. }
  815. // if we have done createObjectURL, lets clean it
  816. if (mediaStream.jitsiObjectURL) {
  817. webkitURL.revokeObjectURL(mediaStream.jitsiObjectURL);
  818. }
  819. },
  820. /**
  821. * Returns whether the desktop sharing is enabled or not.
  822. * @returns {boolean}
  823. */
  824. isDesktopSharingEnabled: function () {
  825. return screenObtainer.isSupported();
  826. },
  827. /**
  828. * Sets current audio output device.
  829. * @param {string} deviceId - id of 'audiooutput' device from
  830. * navigator.mediaDevices.enumerateDevices()
  831. * @returns {Promise} - resolves when audio output is changed, is rejected
  832. * otherwise
  833. */
  834. setAudioOutputDevice: function (deviceId) {
  835. if (!this.isAudioOutputDeviceChangeAvailable()) {
  836. Promise.reject(
  837. new Error('Audio output device change is not supported'));
  838. }
  839. return featureDetectionVideoEl.setSinkId(deviceId)
  840. .then(function() {
  841. audioOuputDeviceId = deviceId;
  842. eventEmitter.emit(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  843. deviceId);
  844. });
  845. },
  846. /**
  847. * Returns currently used audio output device id, '' stands for default
  848. * device
  849. * @returns {string}
  850. */
  851. getAudioOutputDevice: function () {
  852. return audioOuputDeviceId;
  853. }
  854. };
  855. module.exports = RTCUtils;