modified lib-jitsi-meet dev repo
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTCUtils.js 23KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589
  1. /* global config, require, attachMediaStream, getUserMedia */
  2. var RTCBrowserType = require("./RTCBrowserType");
  3. var Resolutions = require("../../service/RTC/Resolutions");
  4. var AdapterJS = require("./adapter.screenshare");
  5. var SDPUtil = require("../xmpp/SDPUtil");
  6. var EventEmitter = require("events");
  7. function DummyMediaStream(id) {
  8. this.id = id;
  9. this.label = id;
  10. this.stop = function() { };
  11. this.getAudioTracks = function() { return []; };
  12. this.getVideoTracks = function() { return []; };
  13. }
  14. function getPreviousResolution(resolution) {
  15. if(!Resolutions[resolution])
  16. return null;
  17. var order = Resolutions[resolution].order;
  18. var res = null;
  19. var resName = null;
  20. for(var i in Resolutions) {
  21. var tmp = Resolutions[i];
  22. if(res == null || (res.order < tmp.order && tmp.order < order)) {
  23. resName = i;
  24. res = tmp;
  25. }
  26. }
  27. return resName;
  28. }
  29. function setResolutionConstraints(constraints, resolution, isAndroid) {
  30. if (Resolutions[resolution]) {
  31. constraints.video.mandatory.minWidth = Resolutions[resolution].width;
  32. constraints.video.mandatory.minHeight = Resolutions[resolution].height;
  33. }
  34. else if (isAndroid) {
  35. // FIXME can't remember if the purpose of this was to always request
  36. // low resolution on Android ? if yes it should be moved up front
  37. constraints.video.mandatory.minWidth = 320;
  38. constraints.video.mandatory.minHeight = 240;
  39. constraints.video.mandatory.maxFrameRate = 15;
  40. }
  41. if (constraints.video.mandatory.minWidth)
  42. constraints.video.mandatory.maxWidth =
  43. constraints.video.mandatory.minWidth;
  44. if (constraints.video.mandatory.minHeight)
  45. constraints.video.mandatory.maxHeight =
  46. constraints.video.mandatory.minHeight;
  47. }
  48. function getConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid)
  49. {
  50. var constraints = {audio: false, video: false};
  51. if (um.indexOf('video') >= 0) {
  52. // same behaviour as true
  53. constraints.video = { mandatory: {}, optional: [] };
  54. constraints.video.optional.push({ googLeakyBucket: true });
  55. setResolutionConstraints(constraints, resolution, isAndroid);
  56. }
  57. if (um.indexOf('audio') >= 0) {
  58. if (!RTCBrowserType.isFirefox()) {
  59. // same behaviour as true
  60. constraints.audio = { mandatory: {}, optional: []};
  61. // if it is good enough for hangouts...
  62. constraints.audio.optional.push(
  63. {googEchoCancellation: true},
  64. {googAutoGainControl: true},
  65. {googNoiseSupression: true},
  66. {googHighpassFilter: true},
  67. {googNoisesuppression2: true},
  68. {googEchoCancellation2: true},
  69. {googAutoGainControl2: true}
  70. );
  71. } else {
  72. constraints.audio = true;
  73. }
  74. }
  75. if (um.indexOf('screen') >= 0) {
  76. if (RTCBrowserType.isChrome()) {
  77. constraints.video = {
  78. mandatory: {
  79. chromeMediaSource: "screen",
  80. googLeakyBucket: true,
  81. maxWidth: window.screen.width,
  82. maxHeight: window.screen.height,
  83. maxFrameRate: 3
  84. },
  85. optional: []
  86. };
  87. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  88. constraints.video = {
  89. optional: [
  90. {
  91. sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
  92. }
  93. ]
  94. };
  95. } else {
  96. console.error(
  97. "'screen' WebRTC media source is supported only in Chrome" +
  98. " and with Temasys plugin");
  99. }
  100. }
  101. if (um.indexOf('desktop') >= 0) {
  102. constraints.video = {
  103. mandatory: {
  104. chromeMediaSource: "desktop",
  105. chromeMediaSourceId: desktopStream,
  106. googLeakyBucket: true,
  107. maxWidth: window.screen.width,
  108. maxHeight: window.screen.height,
  109. maxFrameRate: 3
  110. },
  111. optional: []
  112. };
  113. }
  114. if (bandwidth) {
  115. if (!constraints.video) {
  116. //same behaviour as true
  117. constraints.video = {mandatory: {}, optional: []};
  118. }
  119. constraints.video.optional.push({bandwidth: bandwidth});
  120. }
  121. if (fps) {
  122. // for some cameras it might be necessary to request 30fps
  123. // so they choose 30fps mjpg over 10fps yuy2
  124. if (!constraints.video) {
  125. // same behaviour as true;
  126. constraints.video = {mandatory: {}, optional: []};
  127. }
  128. constraints.video.mandatory.minFrameRate = fps;
  129. }
  130. return constraints;
  131. }
  132. //Options parameter is to pass config options. Currently uses only "useIPv6".
  133. var RTCUtils = {
  134. eventEmitter: new EventEmitter(),
  135. init: function (onTemasysPluginReady, options) {
  136. var self = this;
  137. if (RTCBrowserType.isFirefox()) {
  138. var FFversion = RTCBrowserType.getFirefoxVersion();
  139. if (FFversion >= 40) {
  140. this.peerconnection = mozRTCPeerConnection;
  141. this.getUserMedia = navigator.mozGetUserMedia.bind(navigator);
  142. this.pc_constraints = {};
  143. this.attachMediaStream = function (element, stream) {
  144. // srcObject is being standardized and FF will eventually
  145. // support that unprefixed. FF also supports the
  146. // "element.src = URL.createObjectURL(...)" combo, but that
  147. // will be deprecated in favour of srcObject.
  148. //
  149. // https://groups.google.com/forum/#!topic/mozilla.dev.media/pKOiioXonJg
  150. // https://github.com/webrtc/samples/issues/302
  151. if (!element[0])
  152. return;
  153. element[0].mozSrcObject = stream;
  154. element[0].play();
  155. };
  156. this.getStreamID = function (stream) {
  157. var id = stream.id;
  158. if (!id) {
  159. var tracks = stream.getVideoTracks();
  160. if (!tracks || tracks.length === 0) {
  161. tracks = stream.getAudioTracks();
  162. }
  163. id = tracks[0].id;
  164. }
  165. return SDPUtil.filter_special_chars(id);
  166. };
  167. this.getVideoSrc = function (element) {
  168. if (!element)
  169. return null;
  170. return element.mozSrcObject;
  171. };
  172. this.setVideoSrc = function (element, src) {
  173. if (element)
  174. element.mozSrcObject = src;
  175. };
  176. RTCSessionDescription = mozRTCSessionDescription;
  177. RTCIceCandidate = mozRTCIceCandidate;
  178. } else {
  179. console.error(
  180. "Firefox version too old: " + FFversion + ". Required >= 40.");
  181. window.location.href = 'unsupported_browser.html';
  182. return;
  183. }
  184. } else if (RTCBrowserType.isChrome() || RTCBrowserType.isOpera()) {
  185. this.peerconnection = webkitRTCPeerConnection;
  186. this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
  187. this.attachMediaStream = function (element, stream) {
  188. element.attr('src', webkitURL.createObjectURL(stream));
  189. };
  190. this.getStreamID = function (stream) {
  191. // streams from FF endpoints have the characters '{' and '}'
  192. // that make jQuery choke.
  193. return SDPUtil.filter_special_chars(stream.id);
  194. };
  195. this.getVideoSrc = function (element) {
  196. if (!element)
  197. return null;
  198. return element.getAttribute("src");
  199. };
  200. this.setVideoSrc = function (element, src) {
  201. if (element)
  202. element.setAttribute("src", src);
  203. };
  204. // DTLS should now be enabled by default but..
  205. this.pc_constraints = {'optional': [
  206. {'DtlsSrtpKeyAgreement': 'true'}
  207. ]};
  208. if (options.useIPv6) {
  209. // https://code.google.com/p/webrtc/issues/detail?id=2828
  210. this.pc_constraints.optional.push({googIPv6: true});
  211. }
  212. if (navigator.userAgent.indexOf('Android') != -1) {
  213. this.pc_constraints = {}; // disable DTLS on Android
  214. }
  215. if (!webkitMediaStream.prototype.getVideoTracks) {
  216. webkitMediaStream.prototype.getVideoTracks = function () {
  217. return this.videoTracks;
  218. };
  219. }
  220. if (!webkitMediaStream.prototype.getAudioTracks) {
  221. webkitMediaStream.prototype.getAudioTracks = function () {
  222. return this.audioTracks;
  223. };
  224. }
  225. }
  226. // Detect IE/Safari
  227. else if (RTCBrowserType.isTemasysPluginUsed()) {
  228. //AdapterJS.WebRTCPlugin.setLogLevel(
  229. // AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS.VERBOSE);
  230. AdapterJS.webRTCReady(function (isPlugin) {
  231. self.peerconnection = RTCPeerConnection;
  232. self.getUserMedia = getUserMedia;
  233. self.attachMediaStream = function (elSel, stream) {
  234. if (stream.id === "dummyAudio" || stream.id === "dummyVideo") {
  235. return;
  236. }
  237. attachMediaStream(elSel[0], stream);
  238. };
  239. self.getStreamID = function (stream) {
  240. var id = SDPUtil.filter_special_chars(stream.label);
  241. return id;
  242. };
  243. self.getVideoSrc = function (element) {
  244. if (!element) {
  245. console.warn("Attempt to get video SRC of null element");
  246. return null;
  247. }
  248. var children = element.children;
  249. for (var i = 0; i !== children.length; ++i) {
  250. if (children[i].name === 'streamId') {
  251. return children[i].value;
  252. }
  253. }
  254. //console.info(element.id + " SRC: " + src);
  255. return null;
  256. };
  257. self.setVideoSrc = function (element, src) {
  258. //console.info("Set video src: ", element, src);
  259. if (!src) {
  260. console.warn("Not attaching video stream, 'src' is null");
  261. return;
  262. }
  263. AdapterJS.WebRTCPlugin.WaitForPluginReady();
  264. var stream = AdapterJS.WebRTCPlugin.plugin
  265. .getStreamWithId(AdapterJS.WebRTCPlugin.pageId, src);
  266. attachMediaStream(element, stream);
  267. };
  268. onTemasysPluginReady(isPlugin);
  269. });
  270. } else {
  271. try {
  272. console.log('Browser does not appear to be WebRTC-capable');
  273. } catch (e) {
  274. }
  275. window.location.href = 'unsupported_browser.html';
  276. }
  277. },
  278. getUserMediaWithConstraints: function (RTC, um, success_callback, failure_callback, resolution, bandwidth, fps, desktopStream) {
  279. // Check if we are running on Android device
  280. var isAndroid = navigator.userAgent.indexOf('Android') != -1;
  281. var constraints = getConstraints(
  282. um, resolution, bandwidth, fps, desktopStream, isAndroid);
  283. console.info("Get media constraints", constraints);
  284. var self = this;
  285. try {
  286. this.getUserMedia(constraints,
  287. function (stream) {
  288. console.log('onUserMediaSuccess');
  289. self.setAvailableDevices(RTC, um, true);
  290. success_callback(stream);
  291. },
  292. function (error) {
  293. self.setAvailableDevices(RTC, um, false);
  294. console.warn('Failed to get access to local media. Error ',
  295. error, constraints);
  296. if (failure_callback) {
  297. failure_callback(error, resolution);
  298. }
  299. });
  300. } catch (e) {
  301. console.error('GUM failed: ', e);
  302. if (failure_callback) {
  303. failure_callback(e);
  304. }
  305. }
  306. },
  307. setAvailableDevices: function (RTC, um, available) {
  308. var devices = {};
  309. if (um.indexOf("video") != -1) {
  310. devices.video = available;
  311. }
  312. if (um.indexOf("audio") != -1) {
  313. devices.audio = available;
  314. }
  315. RTC.setDeviceAvailability(devices);
  316. },
  317. /**
  318. * Creates the local MediaStreams.
  319. * @param RTC the rtc service.
  320. * @param devices the devices that will be requested
  321. * @param usageOptions object with devices that should be requested.
  322. * @param resolution resolution constraints
  323. * @param dontCreateJitsiTrack if <tt>true</tt> objects with the following structure {stream: the Media Stream,
  324. * type: "audio" or "video", isMuted: true/false, videoType: "camera" or "desktop"}
  325. * will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
  326. * @returns {*} Promise object that will receive the new JitsiTracks
  327. */
  328. obtainAudioAndVideoPermissions: function (RTC, devices, usageOptions, resolution, dontCreateJitsiTracks) {
  329. var self = this;
  330. // Get AV
  331. return new Promise(function (resolve, reject) {
  332. var successCallback = function (stream) {
  333. var streams = self.successCallback(RTC , stream, usageOptions);
  334. resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
  335. };
  336. if (!devices)
  337. devices = ['audio', 'video'];
  338. var newDevices = [];
  339. if (usageOptions)
  340. for (var i = 0; i < devices.length; i++) {
  341. var device = devices[i];
  342. if (usageOptions[device] === true)
  343. newDevices.push(device);
  344. }
  345. else
  346. newDevices = devices;
  347. if (newDevices.length === 0) {
  348. successCallback();
  349. return;
  350. }
  351. if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) {
  352. // With FF/IE we can't split the stream into audio and video because FF
  353. // doesn't support media stream constructors. So, we need to get the
  354. // audio stream separately from the video stream using two distinct GUM
  355. // calls. Not very user friendly :-( but we don't have many other
  356. // options neither.
  357. //
  358. // Note that we pack those 2 streams in a single object and pass it to
  359. // the successCallback method.
  360. var obtainVideo = function (audioStream) {
  361. self.getUserMediaWithConstraints(
  362. RTC,
  363. ['video'],
  364. function (videoStream) {
  365. return successCallback({
  366. audioStream: audioStream,
  367. videoStream: videoStream
  368. });
  369. },
  370. function (error, resolution) {
  371. console.error(
  372. 'failed to obtain video stream - stop', error);
  373. self.errorCallback(error, resolve, RTC, resolution, dontCreateJitsiTracks);
  374. },
  375. config.resolution || '360');
  376. };
  377. var obtainAudio = function () {
  378. self.getUserMediaWithConstraints(
  379. RTC,
  380. ['audio'],
  381. function (audioStream) {
  382. if (newDevices.indexOf('video') !== -1)
  383. obtainVideo(audioStream);
  384. },
  385. function (error) {
  386. console.error(
  387. 'failed to obtain audio stream - stop', error);
  388. self.errorCallback(error, resolve, RTC, null, dontCreateJitsiTracks);
  389. }
  390. );
  391. };
  392. if (newDevices.indexOf('audio') !== -1) {
  393. obtainAudio();
  394. } else {
  395. obtainVideo(null);
  396. }
  397. } else {
  398. this.getUserMediaWithConstraints(
  399. RTC,
  400. newDevices,
  401. function (stream) {
  402. successCallback(stream);
  403. },
  404. function (error, resolution) {
  405. self.errorCallback(error, resolve, RTC, resolution, dontCreateJitsiTracks);
  406. },
  407. resolution || '360');
  408. }
  409. }.bind(this));
  410. },
  411. /**
  412. * Successful callback called from GUM.
  413. * @param RTC the rtc service
  414. * @param stream the new MediaStream
  415. * @param usageOptions the list of the devices that should be queried.
  416. * @returns {*}
  417. */
  418. successCallback: function (RTC, stream, usageOptions) {
  419. // If this is FF or IE, the stream parameter is *not* a MediaStream object,
  420. // it's an object with two properties: audioStream, videoStream.
  421. if (stream && stream.getAudioTracks && stream.getVideoTracks)
  422. console.log('got', stream, stream.getAudioTracks().length,
  423. stream.getVideoTracks().length);
  424. return this.handleLocalStream(RTC, stream, usageOptions);
  425. },
  426. /**
  427. * Error callback called from GUM. Retries the GUM call with different resolutions.
  428. * @param error the error
  429. * @param resolve the resolve funtion that will be called on success.
  430. * @param RTC the rtc service
  431. * @param currentResolution the last resolution used for GUM.
  432. * @param dontCreateJitsiTracks if <tt>true</tt> objects with the following structure {stream: the Media Stream,
  433. * type: "audio" or "video", isMuted: true/false, videoType: "camera" or "desktop"}
  434. * will be returned trough the Promise, otherwise JitsiTrack objects will be returned.
  435. */
  436. errorCallback: function (error, resolve, RTC, currentResolution, dontCreateJitsiTracks) {
  437. var self = this;
  438. console.error('failed to obtain audio/video stream - trying audio only', error);
  439. var resolution = getPreviousResolution(currentResolution);
  440. if (typeof error == "object" && error.constraintName && error.name
  441. && (error.name == "ConstraintNotSatisfiedError" ||
  442. error.name == "OverconstrainedError") &&
  443. (error.constraintName == "minWidth" || error.constraintName == "maxWidth" ||
  444. error.constraintName == "minHeight" || error.constraintName == "maxHeight")
  445. && resolution != null) {
  446. self.getUserMediaWithConstraints(RTC, ['audio', 'video'],
  447. function (stream) {
  448. var streams = self.successCallback(RTC, stream);
  449. resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
  450. }, function (error, resolution) {
  451. return self.errorCallback(error, resolve, RTC, resolution, dontCreateJitsiTracks);
  452. }, resolution);
  453. }
  454. else {
  455. self.getUserMediaWithConstraints(
  456. RTC,
  457. ['audio'],
  458. function (stream) {
  459. var streams = self.successCallback(RTC, stream);
  460. resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
  461. },
  462. function (error) {
  463. console.error('failed to obtain audio/video stream - stop',
  464. error);
  465. var streams = self.successCallback(RTC, null);
  466. resolve(dontCreateJitsiTracks? streams: RTC.createLocalStreams(streams));
  467. }
  468. );
  469. }
  470. },
  471. /**
  472. * Handles the newly created Media Streams.
  473. * @param service the rtc service
  474. * @param stream the new Media Streams
  475. * @param usageOptions the list of the devices that should be queried.
  476. * @returns {*[]} Promise object with the new Media Streams.
  477. */
  478. handleLocalStream: function (service, stream, usageOptions) {
  479. var audioStream, videoStream;
  480. // If this is FF, the stream parameter is *not* a MediaStream object, it's
  481. // an object with two properties: audioStream, videoStream.
  482. if (window.webkitMediaStream) {
  483. audioStream = new webkitMediaStream();
  484. videoStream = new webkitMediaStream();
  485. if (stream) {
  486. var audioTracks = stream.getAudioTracks();
  487. for (var i = 0; i < audioTracks.length; i++) {
  488. audioStream.addTrack(audioTracks[i]);
  489. }
  490. var videoTracks = stream.getVideoTracks();
  491. for (i = 0; i < videoTracks.length; i++) {
  492. videoStream.addTrack(videoTracks[i]);
  493. }
  494. }
  495. }
  496. else if (RTCBrowserType.isFirefox() || RTCBrowserType.isTemasysPluginUsed()) { // Firefox and Temasys plugin
  497. if (stream && stream.audioStream)
  498. audioStream = stream.audioStream;
  499. else
  500. audioStream = new DummyMediaStream("dummyAudio");
  501. if (stream && stream.videoStream)
  502. videoStream = stream.videoStream;
  503. else
  504. videoStream = new DummyMediaStream("dummyVideo");
  505. }
  506. var audioMuted = (usageOptions && usageOptions.audio === false),
  507. videoMuted = (usageOptions && usageOptions.video === false);
  508. var audioGUM = (!usageOptions || usageOptions.audio !== false),
  509. videoGUM = (!usageOptions || usageOptions.video !== false);
  510. return [
  511. {stream: audioStream, type: "audio", isMuted: audioMuted, isGUMStream: audioGUM, videoType: null},
  512. {stream: videoStream, type: "video", isMuted: videoMuted, isGUMStream: videoGUM, videoType: "camera"}
  513. ];
  514. },
  515. createStream: function (stream, isVideo) {
  516. var newStream = null;
  517. if (window.webkitMediaStream) {
  518. newStream = new webkitMediaStream();
  519. if (newStream) {
  520. var tracks = (isVideo ? stream.getVideoTracks() : stream.getAudioTracks());
  521. for (var i = 0; i < tracks.length; i++) {
  522. newStream.addTrack(tracks[i]);
  523. }
  524. }
  525. } else {
  526. // FIXME: this is duplicated with 'handleLocalStream' !!!
  527. if (stream) {
  528. newStream = stream;
  529. } else {
  530. newStream =
  531. new DummyMediaStream(isVideo ? "dummyVideo" : "dummyAudio");
  532. }
  533. }
  534. return newStream;
  535. }
  536. }
  537. module.exports = RTCUtils;