You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTCUtils.js 57KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576
  1. /* global $,
  2. __filename,
  3. attachMediaStream,
  4. MediaStreamTrack,
  5. RTCIceCandidate: true,
  6. RTCPeerConnection,
  7. RTCSessionDescription: true,
  8. mozRTCIceCandidate,
  9. mozRTCPeerConnection,
  10. mozRTCSessionDescription,
  11. webkitMediaStream,
  12. webkitRTCPeerConnection,
  13. webkitURL
  14. */
  15. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  16. import EventEmitter from 'events';
  17. import { getLogger } from 'jitsi-meet-logger';
  18. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  19. import JitsiTrackError from '../../JitsiTrackError';
  20. import Listenable from '../util/Listenable';
  21. import * as MediaType from '../../service/RTC/MediaType';
  22. import Resolutions from '../../service/RTC/Resolutions';
  23. import RTCBrowserType from './RTCBrowserType';
  24. import RTCEvents from '../../service/RTC/RTCEvents';
  25. import ortcRTCPeerConnection from './ortc/RTCPeerConnection';
  26. import screenObtainer from './ScreenObtainer';
  27. import SDPUtil from '../xmpp/SDPUtil';
  28. import Statistics from '../statistics/statistics';
  29. import VideoType from '../../service/RTC/VideoType';
  30. const logger = getLogger(__filename);
  31. // XXX Don't require Temasys unless it's to be used because it doesn't run on
  32. // React Native, for example.
  33. const AdapterJS
  34. = RTCBrowserType.isTemasysPluginUsed()
  35. ? require('./adapter.screenshare')
  36. : undefined;
  37. const eventEmitter = new EventEmitter();
  38. const AVAILABLE_DEVICES_POLL_INTERVAL_TIME = 3000; // ms
  39. const devices = {
  40. audio: false,
  41. video: false
  42. };
  43. // Currently audio output device change is supported only in Chrome and
  44. // default output always has 'default' device ID
  45. let audioOutputDeviceId = 'default'; // default device
  46. // whether user has explicitly set a device to use
  47. let audioOutputChanged = false;
  48. // Disables all audio processing
  49. let disableAP = false;
  50. // Disables Acoustic Echo Cancellation
  51. let disableAEC = false;
  52. // Disables Noise Suppression
  53. let disableNS = false;
  54. // Disables Automatic Gain Control
  55. let disableAGC = false;
  56. // Disables Highpass Filter
  57. let disableHPF = false;
  58. const featureDetectionAudioEl = document.createElement('audio');
  59. const isAudioOutputDeviceChangeAvailable
  60. = typeof featureDetectionAudioEl.setSinkId !== 'undefined';
  61. let currentlyAvailableMediaDevices;
  62. /**
  63. * "rawEnumerateDevicesWithCallback" will be initialized only after WebRTC is
  64. * ready. Otherwise it is too early to assume that the devices listing is not
  65. * supported.
  66. */
  67. let rawEnumerateDevicesWithCallback;
  68. /**
  69. *
  70. */
  71. function initRawEnumerateDevicesWithCallback() {
  72. rawEnumerateDevicesWithCallback
  73. = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices
  74. ? function(callback) {
  75. navigator.mediaDevices.enumerateDevices().then(
  76. callback,
  77. () => callback([]));
  78. }
  79. // Safari:
  80. // "ReferenceError: Can't find variable: MediaStreamTrack" when
  81. // Temasys plugin is not installed yet, have to delay this call
  82. // until WebRTC is ready.
  83. : MediaStreamTrack && MediaStreamTrack.getSources
  84. ? function(callback) {
  85. MediaStreamTrack.getSources(
  86. sources =>
  87. callback(
  88. sources.map(convertMediaStreamTrackSource)));
  89. }
  90. : undefined;
  91. }
  92. // TODO: currently no browser supports 'devicechange' event even in nightly
  93. // builds so no feature/browser detection is used at all. However in future this
  94. // should be changed to some expression. Progress on 'devicechange' event
  95. // implementation for Chrome/Opera/NWJS can be tracked at
  96. // https://bugs.chromium.org/p/chromium/issues/detail?id=388648, for Firefox -
  97. // at https://bugzilla.mozilla.org/show_bug.cgi?id=1152383. More information on
  98. // 'devicechange' event can be found in spec -
  99. // http://w3c.github.io/mediacapture-main/#event-mediadevices-devicechange
  100. // TODO: check MS Edge
  101. const isDeviceChangeEventSupported = false;
  102. let rtcReady = false;
  103. /**
  104. *
  105. * @param constraints
  106. * @param isNewStyleConstraintsSupported
  107. * @param resolution
  108. */
  109. function setResolutionConstraints(
  110. constraints,
  111. isNewStyleConstraintsSupported,
  112. resolution) {
  113. if (Resolutions[resolution]) {
  114. if (isNewStyleConstraintsSupported) {
  115. constraints.video.width = {
  116. ideal: Resolutions[resolution].width
  117. };
  118. constraints.video.height = {
  119. ideal: Resolutions[resolution].height
  120. };
  121. }
  122. constraints.video.mandatory.minWidth = Resolutions[resolution].width;
  123. constraints.video.mandatory.minHeight = Resolutions[resolution].height;
  124. }
  125. if (constraints.video.mandatory.minWidth) {
  126. constraints.video.mandatory.maxWidth
  127. = constraints.video.mandatory.minWidth;
  128. }
  129. if (constraints.video.mandatory.minHeight) {
  130. constraints.video.mandatory.maxHeight
  131. = constraints.video.mandatory.minHeight;
  132. }
  133. }
  134. /**
  135. * @param {string[]} um required user media types
  136. *
  137. * @param {Object} [options={}] optional parameters
  138. * @param {string} options.resolution
  139. * @param {number} options.bandwidth
  140. * @param {number} options.fps
  141. * @param {string} options.desktopStream
  142. * @param {string} options.cameraDeviceId
  143. * @param {string} options.micDeviceId
  144. * @param {CameraFacingMode} options.facingMode
  145. * @param {bool} firefox_fake_device
  146. */
  147. function getConstraints(um, options) {
  148. const constraints = { audio: false,
  149. video: false };
  150. // Don't mix new and old style settings for Chromium as this leads
  151. // to TypeError in new Chromium versions. @see
  152. // https://bugs.chromium.org/p/chromium/issues/detail?id=614716
  153. // This is a temporary solution, in future we will fully split old and
  154. // new style constraints when new versions of Chromium and Firefox will
  155. // have stable support of new constraints format. For more information
  156. // @see https://github.com/jitsi/lib-jitsi-meet/pull/136
  157. const isNewStyleConstraintsSupported
  158. = RTCBrowserType.isFirefox()
  159. || RTCBrowserType.isEdge()
  160. || RTCBrowserType.isReactNative()
  161. || RTCBrowserType.isTemasysPluginUsed();
  162. if (um.indexOf('video') >= 0) {
  163. // same behaviour as true
  164. constraints.video = { mandatory: {},
  165. optional: [] };
  166. if (options.cameraDeviceId) {
  167. if (isNewStyleConstraintsSupported) {
  168. // New style of setting device id.
  169. constraints.video.deviceId = options.cameraDeviceId;
  170. }
  171. // Old style.
  172. constraints.video.optional.push({
  173. sourceId: options.cameraDeviceId
  174. });
  175. } else {
  176. // Prefer the front i.e. user-facing camera (to the back i.e.
  177. // environment-facing camera, for example).
  178. // TODO: Maybe use "exact" syntax if options.facingMode is defined,
  179. // but this probably needs to be decided when updating other
  180. // constraints, as we currently don't use "exact" syntax anywhere.
  181. const facingMode = options.facingMode || CameraFacingMode.USER;
  182. if (isNewStyleConstraintsSupported) {
  183. constraints.video.facingMode = facingMode;
  184. }
  185. constraints.video.optional.push({
  186. facingMode
  187. });
  188. }
  189. if (options.minFps || options.maxFps || options.fps) {
  190. // for some cameras it might be necessary to request 30fps
  191. // so they choose 30fps mjpg over 10fps yuy2
  192. if (options.minFps || options.fps) {
  193. // Fall back to options.fps for backwards compatibility
  194. options.minFps = options.minFps || options.fps;
  195. constraints.video.mandatory.minFrameRate = options.minFps;
  196. }
  197. if (options.maxFps) {
  198. constraints.video.mandatory.maxFrameRate = options.maxFps;
  199. }
  200. }
  201. setResolutionConstraints(
  202. constraints, isNewStyleConstraintsSupported, options.resolution);
  203. }
  204. if (um.indexOf('audio') >= 0) {
  205. if (RTCBrowserType.isReactNative()) {
  206. // The react-native-webrtc project that we're currently using
  207. // expects the audio constraint to be a boolean.
  208. constraints.audio = true;
  209. } else if (RTCBrowserType.isFirefox()) {
  210. if (options.micDeviceId) {
  211. constraints.audio = {
  212. mandatory: {},
  213. deviceId: options.micDeviceId, // new style
  214. optional: [ {
  215. sourceId: options.micDeviceId // old style
  216. } ] };
  217. } else {
  218. constraints.audio = true;
  219. }
  220. } else {
  221. // same behaviour as true
  222. constraints.audio = { mandatory: {},
  223. optional: [] };
  224. if (options.micDeviceId) {
  225. if (isNewStyleConstraintsSupported) {
  226. // New style of setting device id.
  227. constraints.audio.deviceId = options.micDeviceId;
  228. }
  229. // Old style.
  230. constraints.audio.optional.push({
  231. sourceId: options.micDeviceId
  232. });
  233. }
  234. // if it is good enough for hangouts...
  235. constraints.audio.optional.push(
  236. { echoCancellation: !disableAEC && !disableAP },
  237. { googEchoCancellation: !disableAEC && !disableAP },
  238. { googAutoGainControl: !disableAGC && !disableAP },
  239. { googNoiseSupression: !disableNS && !disableAP },
  240. { googHighpassFilter: !disableHPF && !disableAP },
  241. { googNoiseSuppression2: !disableNS && !disableAP },
  242. { googEchoCancellation2: !disableAEC && !disableAP },
  243. { googAutoGainControl2: !disableAGC && !disableAP }
  244. );
  245. }
  246. }
  247. if (um.indexOf('screen') >= 0) {
  248. if (RTCBrowserType.isChrome()) {
  249. constraints.video = {
  250. mandatory: {
  251. chromeMediaSource: 'screen',
  252. maxWidth: window.screen.width,
  253. maxHeight: window.screen.height,
  254. maxFrameRate: 3
  255. },
  256. optional: []
  257. };
  258. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  259. constraints.video = {
  260. optional: [
  261. {
  262. sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
  263. }
  264. ]
  265. };
  266. } else if (RTCBrowserType.isFirefox()) {
  267. constraints.video = {
  268. mozMediaSource: 'window',
  269. mediaSource: 'window'
  270. };
  271. } else {
  272. const errmsg
  273. = '\'screen\' WebRTC media source is supported only in Chrome'
  274. + ' and with Temasys plugin';
  275. GlobalOnErrorHandler.callErrorHandler(new Error(errmsg));
  276. logger.error(errmsg);
  277. }
  278. }
  279. if (um.indexOf('desktop') >= 0) {
  280. constraints.video = {
  281. mandatory: {
  282. chromeMediaSource: 'desktop',
  283. chromeMediaSourceId: options.desktopStream,
  284. maxWidth: window.screen.width,
  285. maxHeight: window.screen.height,
  286. maxFrameRate: 3
  287. },
  288. optional: []
  289. };
  290. }
  291. if (options.bandwidth) {
  292. if (!constraints.video) {
  293. // same behaviour as true
  294. constraints.video = { mandatory: {},
  295. optional: [] };
  296. }
  297. constraints.video.optional.push({ bandwidth: options.bandwidth });
  298. }
  299. // we turn audio for both audio and video tracks, the fake audio & video
  300. // seems to work only when enabled in one getUserMedia call, we cannot get
  301. // fake audio separate by fake video this later can be a problem with some
  302. // of the tests
  303. if (RTCBrowserType.isFirefox() && options.firefox_fake_device) {
  304. // seems to be fixed now, removing this experimental fix, as having
  305. // multiple audio tracks brake the tests
  306. // constraints.audio = true;
  307. constraints.fake = true;
  308. }
  309. return constraints;
  310. }
  311. /**
  312. * Sets the availbale devices based on the options we requested and the
  313. * streams we received.
  314. * @param um the options we requested to getUserMedia.
  315. * @param stream the stream we received from calling getUserMedia.
  316. */
  317. function setAvailableDevices(um, stream) {
  318. const audioTracksReceived = stream && stream.getAudioTracks().length > 0;
  319. const videoTracksReceived = stream && stream.getVideoTracks().length > 0;
  320. if (um.indexOf('video') !== -1) {
  321. devices.video = videoTracksReceived;
  322. }
  323. if (um.indexOf('audio') !== -1) {
  324. devices.audio = audioTracksReceived;
  325. }
  326. eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices);
  327. }
  328. /**
  329. * Checks if new list of available media devices differs from previous one.
  330. * @param {MediaDeviceInfo[]} newDevices - list of new devices.
  331. * @returns {boolean} - true if list is different, false otherwise.
  332. */
  333. function compareAvailableMediaDevices(newDevices) {
  334. if (newDevices.length !== currentlyAvailableMediaDevices.length) {
  335. return true;
  336. }
  337. /* eslint-disable newline-per-chained-call */
  338. return (
  339. newDevices.map(mediaDeviceInfoToJSON).sort().join('')
  340. !== currentlyAvailableMediaDevices
  341. .map(mediaDeviceInfoToJSON).sort().join(''));
  342. /* eslint-enable newline-per-chained-call */
  343. /**
  344. *
  345. * @param info
  346. */
  347. function mediaDeviceInfoToJSON(info) {
  348. return JSON.stringify({
  349. kind: info.kind,
  350. deviceId: info.deviceId,
  351. groupId: info.groupId,
  352. label: info.label,
  353. facing: info.facing
  354. });
  355. }
  356. }
  357. /**
  358. * Periodically polls enumerateDevices() method to check if list of media
  359. * devices has changed. This is temporary workaround until 'devicechange' event
  360. * will be supported by browsers.
  361. */
  362. function pollForAvailableMediaDevices() {
  363. // Here we use plain navigator.mediaDevices.enumerateDevices instead of
  364. // wrapped because we just need to know the fact the devices changed, labels
  365. // do not matter. This fixes situation when we have no devices initially,
  366. // and then plug in a new one.
  367. if (rawEnumerateDevicesWithCallback) {
  368. rawEnumerateDevicesWithCallback(ds => {
  369. // We don't fire RTCEvents.DEVICE_LIST_CHANGED for the first time
  370. // we call enumerateDevices(). This is the initial step.
  371. if (typeof currentlyAvailableMediaDevices === 'undefined') {
  372. currentlyAvailableMediaDevices = ds.slice(0);
  373. } else if (compareAvailableMediaDevices(ds)) {
  374. onMediaDevicesListChanged(ds);
  375. }
  376. window.setTimeout(pollForAvailableMediaDevices,
  377. AVAILABLE_DEVICES_POLL_INTERVAL_TIME);
  378. });
  379. }
  380. }
  381. /**
  382. * Sends analytics event with the passed device list.
  383. *
  384. * @param {Array<MediaDeviceInfo>} deviceList - List with info about the
  385. * available devices.
  386. * @returns {void}
  387. */
  388. function sendDeviceListToAnalytics(deviceList) {
  389. const devicesPropsArray
  390. = deviceList.map(
  391. ({ deviceId, groupId, kind, label }) => {
  392. // Filter the props of the device object.
  393. return {
  394. deviceId,
  395. groupId,
  396. kind,
  397. label
  398. };
  399. });
  400. Statistics.analytics.sendEvent(
  401. 'devices.deviceList', {
  402. devices: devicesPropsArray
  403. });
  404. }
  405. /**
  406. * Event handler for the 'devicechange' event.
  407. *
  408. * @param {MediaDeviceInfo[]} devices - list of media devices.
  409. * @emits RTCEvents.DEVICE_LIST_CHANGED
  410. */
  411. function onMediaDevicesListChanged(devicesReceived) {
  412. currentlyAvailableMediaDevices = devicesReceived.slice(0);
  413. logger.info(
  414. 'list of media devices has changed:',
  415. currentlyAvailableMediaDevices);
  416. sendDeviceListToAnalytics(currentlyAvailableMediaDevices);
  417. const videoInputDevices
  418. = currentlyAvailableMediaDevices.filter(d => d.kind === 'videoinput');
  419. const audioInputDevices
  420. = currentlyAvailableMediaDevices.filter(d => d.kind === 'audioinput');
  421. const videoInputDevicesWithEmptyLabels
  422. = videoInputDevices.filter(d => d.label === '');
  423. const audioInputDevicesWithEmptyLabels
  424. = audioInputDevices.filter(d => d.label === '');
  425. if (videoInputDevices.length
  426. && videoInputDevices.length
  427. === videoInputDevicesWithEmptyLabels.length) {
  428. devices.video = false;
  429. }
  430. if (audioInputDevices.length
  431. && audioInputDevices.length
  432. === audioInputDevicesWithEmptyLabels.length) {
  433. devices.audio = false;
  434. }
  435. eventEmitter.emit(RTCEvents.DEVICE_LIST_CHANGED, devicesReceived);
  436. }
  437. /**
  438. * Apply function with arguments if function exists.
  439. * Do nothing if function not provided.
  440. * @param {function} [fn] function to apply
  441. * @param {Array} [args=[]] arguments for function
  442. */
  443. function maybeApply(fn, args) {
  444. fn && fn(...args);
  445. }
  446. /**
  447. * Wrap `getUserMedia` in order to convert between callback and Promise based
  448. * APIs.
  449. * @param {Function} getUserMedia native function
  450. * @returns {Function} wrapped function
  451. */
  452. function wrapGetUserMedia(getUserMedia, usePromises = false) {
  453. let gUM;
  454. if (usePromises) {
  455. gUM = function(constraints, successCallback, errorCallback) {
  456. return getUserMedia(constraints)
  457. .then(stream => {
  458. maybeApply(successCallback, [ stream ]);
  459. return stream;
  460. })
  461. .catch(error => {
  462. maybeApply(errorCallback, [ error ]);
  463. throw error;
  464. });
  465. };
  466. } else {
  467. gUM = function(constraints, successCallback, errorCallback) {
  468. getUserMedia(constraints, stream => {
  469. maybeApply(successCallback, [ stream ]);
  470. }, error => {
  471. maybeApply(errorCallback, [ error ]);
  472. });
  473. };
  474. }
  475. return gUM;
  476. }
  477. /**
  478. * Use old MediaStreamTrack to get devices list and
  479. * convert it to enumerateDevices format.
  480. * @param {Function} callback function to call when received devices list.
  481. */
  482. function enumerateDevicesThroughMediaStreamTrack(callback) {
  483. MediaStreamTrack.getSources(
  484. sources => callback(sources.map(convertMediaStreamTrackSource)));
  485. }
  486. /**
  487. * Converts MediaStreamTrack Source to enumerateDevices format.
  488. * @param {Object} source
  489. */
  490. function convertMediaStreamTrackSource(source) {
  491. const kind = (source.kind || '').toLowerCase();
  492. return {
  493. facing: source.facing || null,
  494. label: source.label,
  495. // theoretically deprecated MediaStreamTrack.getSources should
  496. // not return 'audiooutput' devices but let's handle it in any
  497. // case
  498. kind: kind
  499. ? kind === 'audiooutput' ? kind : `${kind}input`
  500. : null,
  501. deviceId: source.id,
  502. groupId: source.groupId || null
  503. };
  504. }
  505. /**
  506. * Handles the newly created Media Streams.
  507. * @param streams the new Media Streams
  508. * @param resolution the resolution of the video streams
  509. * @returns {*[]} object that describes the new streams
  510. */
  511. function handleLocalStream(streams, resolution) {
  512. let audioStream, desktopStream, videoStream;
  513. const res = [];
  514. // XXX The function obtainAudioAndVideoPermissions has examined the type of
  515. // the browser, its capabilities, etc. and has taken the decision whether to
  516. // invoke getUserMedia per device (e.g. Firefox) or once for both audio and
  517. // video (e.g. Chrome). In order to not duplicate the logic here, examine
  518. // the specified streams and figure out what we've received based on
  519. // obtainAudioAndVideoPermissions' decision.
  520. if (streams) {
  521. // As mentioned above, certian types of browser (e.g. Chrome) support
  522. // (with a result which meets our requirements expressed bellow) calling
  523. // getUserMedia once for both audio and video.
  524. const audioVideo = streams.audioVideo;
  525. if (audioVideo) {
  526. const NativeMediaStream
  527. = window.webkitMediaStream || window.MediaStream;
  528. const audioTracks = audioVideo.getAudioTracks();
  529. if (audioTracks.length) {
  530. // eslint-disable-next-line new-cap
  531. audioStream = new NativeMediaStream();
  532. for (let i = 0; i < audioTracks.length; i++) {
  533. audioStream.addTrack(audioTracks[i]);
  534. }
  535. }
  536. const videoTracks = audioVideo.getVideoTracks();
  537. if (videoTracks.length) {
  538. // eslint-disable-next-line new-cap
  539. videoStream = new NativeMediaStream();
  540. for (let j = 0; j < videoTracks.length; j++) {
  541. videoStream.addTrack(videoTracks[j]);
  542. }
  543. }
  544. } else {
  545. // On other types of browser (e.g. Firefox) we choose (namely,
  546. // obtainAudioAndVideoPermissions) to call getUserMedia per device
  547. // (type).
  548. audioStream = streams.audio;
  549. videoStream = streams.video;
  550. }
  551. desktopStream = streams.desktop;
  552. }
  553. if (desktopStream) {
  554. const { stream, sourceId, sourceType } = desktopStream;
  555. res.push({
  556. stream,
  557. sourceId,
  558. sourceType,
  559. track: stream.getVideoTracks()[0],
  560. mediaType: MediaType.VIDEO,
  561. videoType: VideoType.DESKTOP
  562. });
  563. }
  564. if (audioStream) {
  565. res.push({
  566. stream: audioStream,
  567. track: audioStream.getAudioTracks()[0],
  568. mediaType: MediaType.AUDIO,
  569. videoType: null
  570. });
  571. }
  572. if (videoStream) {
  573. res.push({
  574. stream: videoStream,
  575. track: videoStream.getVideoTracks()[0],
  576. mediaType: MediaType.VIDEO,
  577. videoType: VideoType.CAMERA,
  578. resolution
  579. });
  580. }
  581. return res;
  582. }
  583. /**
  584. * Represents a default implementation of setting a <tt>MediaStream</tt> as the
  585. * source of a video element that tries to be browser-agnostic through feature
  586. * checking. Note though that it was not completely clear from the predating
  587. * browser-specific implementations what &quot;videoSrc&quot; was because one
  588. * implementation of {@link RTCUtils#getVideoSrc} would return
  589. * <tt>MediaStream</tt> (e.g. Firefox), another a <tt>string</tt> representation
  590. * of the <tt>URL</tt> of the <tt>MediaStream</tt> (e.g. Chrome) and the return
  591. * value was only used by {@link RTCUIHelper#getVideoId} which itself did not
  592. * appear to be used anywhere. Generally, the implementation will try to follow
  593. * the related standards i.e. work with the <tt>srcObject</tt> and <tt>src</tt>
  594. * properties of the specified <tt>element</tt> taking into account vender
  595. * prefixes.
  596. *
  597. * @param element the element whose video source/src is to be set to the
  598. * specified <tt>stream</tt>
  599. * @param {MediaStream} stream the <tt>MediaStream</tt> to set as the video
  600. * source/src of <tt>element</tt>
  601. */
  602. function defaultSetVideoSrc(element, stream) {
  603. // srcObject
  604. let srcObjectPropertyName = 'srcObject';
  605. if (!(srcObjectPropertyName in element)) {
  606. srcObjectPropertyName = 'mozSrcObject';
  607. if (!(srcObjectPropertyName in element)) {
  608. srcObjectPropertyName = null;
  609. }
  610. }
  611. if (srcObjectPropertyName) {
  612. element[srcObjectPropertyName] = stream;
  613. return;
  614. }
  615. // src
  616. let src;
  617. if (stream) {
  618. src = stream.jitsiObjectURL;
  619. // Save the created URL for stream so we can reuse it and not keep
  620. // creating URLs.
  621. if (!src) {
  622. stream.jitsiObjectURL
  623. = src
  624. = (URL || webkitURL).createObjectURL(stream);
  625. }
  626. }
  627. element.src = src || '';
  628. }
  629. /**
  630. *
  631. */
  632. class RTCUtils extends Listenable {
  633. /**
  634. *
  635. */
  636. constructor() {
  637. super(eventEmitter);
  638. }
  639. /**
  640. *
  641. * @param options
  642. */
  643. init(options) {
  644. if (typeof options.disableAEC === 'boolean') {
  645. disableAEC = options.disableAEC;
  646. logger.info(`Disable AEC: ${disableAEC}`);
  647. }
  648. if (typeof options.disableNS === 'boolean') {
  649. disableNS = options.disableNS;
  650. logger.info(`Disable NS: ${disableNS}`);
  651. }
  652. if (typeof options.disableAP === 'boolean') {
  653. disableAP = options.disableAP;
  654. logger.info(`Disable AP: ${disableAP}`);
  655. }
  656. if (typeof options.disableAGC === 'boolean') {
  657. disableAGC = options.disableAGC;
  658. logger.info(`Disable AGC: ${disableAGC}`);
  659. }
  660. if (typeof options.disableHPF === 'boolean') {
  661. disableHPF = options.disableHPF;
  662. logger.info(`Disable HPF: ${disableHPF}`);
  663. }
  664. // Initialize rawEnumerateDevicesWithCallback
  665. initRawEnumerateDevicesWithCallback();
  666. return new Promise((resolve, reject) => {
  667. if (RTCBrowserType.isFirefox()) {
  668. const FFversion = RTCBrowserType.getFirefoxVersion();
  669. if (FFversion < 40) {
  670. rejectWithWebRTCNotSupported(
  671. `Firefox version too old: ${FFversion}.`
  672. + ' Required >= 40.',
  673. reject);
  674. return;
  675. }
  676. this.RTCPeerConnectionType = mozRTCPeerConnection;
  677. this.getUserMedia
  678. = wrapGetUserMedia(
  679. navigator.mozGetUserMedia.bind(navigator));
  680. this.enumerateDevices = rawEnumerateDevicesWithCallback;
  681. this.pcConstraints = {};
  682. this.attachMediaStream
  683. = wrapAttachMediaStream((element, stream) => {
  684. // srcObject is being standardized and FF will
  685. // eventually support that unprefixed. FF also supports
  686. // the "element.src = URL.createObjectURL(...)" combo,
  687. // but that will be deprecated in favour of srcObject.
  688. //
  689. // https://groups.google.com/forum/#!topic/
  690. // mozilla.dev.media/pKOiioXonJg
  691. // https://github.com/webrtc/samples/issues/302
  692. if (element) {
  693. defaultSetVideoSrc(element, stream);
  694. if (stream) {
  695. element.play();
  696. }
  697. }
  698. return element;
  699. });
  700. this.getStreamID = function(stream) {
  701. let id = stream.id;
  702. if (!id) {
  703. let tracks = stream.getVideoTracks();
  704. if (!tracks || tracks.length === 0) {
  705. tracks = stream.getAudioTracks();
  706. }
  707. id = tracks[0].id;
  708. }
  709. return SDPUtil.filterSpecialChars(id);
  710. };
  711. this.getTrackID = function(track) {
  712. return track.id;
  713. };
  714. /* eslint-disable no-global-assign, no-native-reassign */
  715. RTCSessionDescription = mozRTCSessionDescription;
  716. RTCIceCandidate = mozRTCIceCandidate;
  717. /* eslint-enable no-global-assign, no-native-reassign */
  718. } else if (RTCBrowserType.isChrome()
  719. || RTCBrowserType.isOpera()
  720. || RTCBrowserType.isNWJS()
  721. || RTCBrowserType.isElectron()
  722. || RTCBrowserType.isReactNative()) {
  723. this.RTCPeerConnectionType = webkitRTCPeerConnection;
  724. const getUserMedia
  725. = navigator.webkitGetUserMedia.bind(navigator);
  726. this.getUserMedia = wrapGetUserMedia(getUserMedia);
  727. this.enumerateDevices = rawEnumerateDevicesWithCallback;
  728. this.attachMediaStream
  729. = wrapAttachMediaStream((element, stream) => {
  730. defaultSetVideoSrc(element, stream);
  731. return element;
  732. });
  733. this.getStreamID = function(stream) {
  734. // A. MediaStreams from FF endpoints have the characters '{'
  735. // and '}' that make jQuery choke.
  736. // B. The react-native-webrtc implementation that we use on
  737. // React Native at the time of this writing returns a number
  738. // for the id of MediaStream. Let's just say that a number
  739. // contains no special characters.
  740. const id = stream.id;
  741. // XXX The return statement is affected by automatic
  742. // semicolon insertion (ASI). No line terminator is allowed
  743. // between the return keyword and the expression.
  744. return (
  745. typeof id === 'number'
  746. ? id
  747. : SDPUtil.filterSpecialChars(id));
  748. };
  749. this.getTrackID = function(track) {
  750. return track.id;
  751. };
  752. this.pcConstraints = { optional: [] };
  753. if (options.useIPv6) {
  754. // https://code.google.com/p/webrtc/issues/detail?id=2828
  755. this.pcConstraints.optional.push({ googIPv6: true });
  756. }
  757. if (!webkitMediaStream.prototype.getVideoTracks) {
  758. webkitMediaStream.prototype.getVideoTracks = function() {
  759. return this.videoTracks;
  760. };
  761. }
  762. if (!webkitMediaStream.prototype.getAudioTracks) {
  763. webkitMediaStream.prototype.getAudioTracks = function() {
  764. return this.audioTracks;
  765. };
  766. }
  767. this.p2pPcConstraints
  768. = JSON.parse(JSON.stringify(this.pcConstraints));
  769. // Allows sending of video to be suspended if the bandwidth
  770. // estimation is too low.
  771. if (!options.disableSuspendVideo) {
  772. this.pcConstraints.optional.push(
  773. { googSuspendBelowMinBitrate: true });
  774. }
  775. /**
  776. * This option is used to enable the suspend video only for
  777. * part of the users on the P2P peer connection. The value of
  778. * the option is the ratio:
  779. * (users with suspended video enabled)/(all users).
  780. *
  781. * Note: The option is not documented because it is temporary
  782. * and only for internal testing purpose.
  783. *
  784. * @type {number}
  785. */
  786. const forceP2PSuspendVideoRatio
  787. = options.testing.forceP2PSuspendVideoRatio;
  788. // If <tt>forceP2PSuspendVideoRatio</tt> is invalid (not a
  789. // number) fallback to the default behavior (enabled for every
  790. // user).
  791. if (typeof forceP2PSuspendVideoRatio !== 'number'
  792. || Math.random() < forceP2PSuspendVideoRatio) {
  793. logger.info(`Enable suspend video mode for p2p (ratio=${
  794. forceP2PSuspendVideoRatio})`);
  795. Statistics.analytics.addPermanentProperties({
  796. forceP2PSuspendVideo: true
  797. });
  798. this.p2pPcConstraints.optional.push({
  799. googSuspendBelowMinBitrate: true
  800. });
  801. }
  802. } else if (RTCBrowserType.isEdge()) {
  803. this.RTCPeerConnectionType = ortcRTCPeerConnection;
  804. this.getUserMedia
  805. = wrapGetUserMedia(
  806. navigator.mediaDevices.getUserMedia.bind(
  807. navigator.mediaDevices),
  808. true);
  809. this.enumerateDevices = rawEnumerateDevicesWithCallback;
  810. this.attachMediaStream
  811. = wrapAttachMediaStream((element, stream) => {
  812. defaultSetVideoSrc(element, stream);
  813. return element;
  814. });
  815. // ORTC does not generate remote MediaStreams so those are
  816. // manually created by the ORTC shim. This means that their
  817. // id (internally generated) does not match the stream id
  818. // signaled into the remote SDP. Therefore, the shim adds a
  819. // custom jitsiRemoteId property with the original stream id.
  820. this.getStreamID = function(stream) {
  821. const id = stream.jitsiRemoteId || stream.id;
  822. return SDPUtil.filterSpecialChars(id);
  823. };
  824. // Remote MediaStreamTracks generated by ORTC (within a
  825. // RTCRtpReceiver) have an internally/random id which does not
  826. // match the track id signaled in the remote SDP. The shim adds
  827. // a custom jitsi-id property with the original track id.
  828. this.getTrackID = function(track) {
  829. return track.jitsiRemoteId || track.id;
  830. };
  831. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  832. // Detect IE/Safari
  833. const webRTCReadyCb = () => {
  834. this.RTCPeerConnectionType = RTCPeerConnection;
  835. this.getUserMedia = window.getUserMedia;
  836. this.enumerateDevices
  837. = enumerateDevicesThroughMediaStreamTrack;
  838. this.attachMediaStream
  839. = wrapAttachMediaStream((element, stream) => {
  840. if (stream) {
  841. if (stream.id === 'dummyAudio'
  842. || stream.id === 'dummyVideo') {
  843. return;
  844. }
  845. // The container must be visible in order to
  846. // play or attach the stream when Temasys plugin
  847. // is in use
  848. const containerSel = $(element);
  849. if (RTCBrowserType.isTemasysPluginUsed()
  850. && !containerSel.is(':visible')) {
  851. containerSel.show();
  852. }
  853. const video
  854. = stream.getVideoTracks().length > 0;
  855. if (video && !$(element).is(':visible')) {
  856. throw new Error(
  857. 'video element must be visible to'
  858. + ' attach video stream');
  859. }
  860. }
  861. return attachMediaStream(element, stream);
  862. });
  863. this.getStreamID
  864. = stream => SDPUtil.filterSpecialChars(stream.label);
  865. this.getTrackID
  866. = track => track.id;
  867. onReady(
  868. options,
  869. this.getUserMediaWithConstraints.bind(this));
  870. };
  871. const webRTCReadyPromise
  872. = new Promise(r => AdapterJS.webRTCReady(r));
  873. // Resolve or reject depending on whether the Temasys plugin is
  874. // installed.
  875. AdapterJS.WebRTCPlugin.isPluginInstalled(
  876. AdapterJS.WebRTCPlugin.pluginInfo.prefix,
  877. AdapterJS.WebRTCPlugin.pluginInfo.plugName,
  878. AdapterJS.WebRTCPlugin.pluginInfo.type,
  879. /* installed */ () => {
  880. webRTCReadyPromise.then(() => {
  881. webRTCReadyCb();
  882. resolve();
  883. });
  884. },
  885. /* not installed */ () => {
  886. const error
  887. = new Error('Temasys plugin is not installed');
  888. error.name = 'WEBRTC_NOT_READY';
  889. error.webRTCReadyPromise = webRTCReadyPromise;
  890. reject(error);
  891. });
  892. } else {
  893. rejectWithWebRTCNotSupported(
  894. 'Browser does not appear to be WebRTC-capable',
  895. reject);
  896. return;
  897. }
  898. this.p2pPcConstraints = this.p2pPcConstraints || this.pcConstraints;
  899. // Call onReady() if Temasys plugin is not used
  900. if (!RTCBrowserType.isTemasysPluginUsed()) {
  901. onReady(options, this.getUserMediaWithConstraints.bind(this));
  902. resolve();
  903. }
  904. });
  905. }
  906. /* eslint-disable max-params */
  907. /**
  908. * @param {string[]} um required user media types
  909. * @param {function} successCallback
  910. * @param {Function} failureCallback
  911. * @param {Object} [options] optional parameters
  912. * @param {string} options.resolution
  913. * @param {number} options.bandwidth
  914. * @param {number} options.fps
  915. * @param {string} options.desktopStream
  916. * @param {string} options.cameraDeviceId
  917. * @param {string} options.micDeviceId
  918. **/
  919. getUserMediaWithConstraints(
  920. um,
  921. successCallback,
  922. failureCallback,
  923. options = {}) {
  924. const constraints = getConstraints(um, options);
  925. logger.info('Get media constraints', constraints);
  926. try {
  927. this.getUserMedia(
  928. constraints,
  929. stream => {
  930. logger.log('onUserMediaSuccess');
  931. setAvailableDevices(um, stream);
  932. successCallback(stream);
  933. },
  934. error => {
  935. setAvailableDevices(um, undefined);
  936. logger.warn('Failed to get access to local media. Error ',
  937. error, constraints);
  938. if (failureCallback) {
  939. failureCallback(
  940. new JitsiTrackError(error, constraints, um));
  941. }
  942. });
  943. } catch (e) {
  944. logger.error('GUM failed: ', e);
  945. if (failureCallback) {
  946. failureCallback(new JitsiTrackError(e, constraints, um));
  947. }
  948. }
  949. }
  950. /* eslint-enable max-params */
  951. /**
  952. * Creates the local MediaStreams.
  953. * @param {Object} [options] optional parameters
  954. * @param {Array} options.devices the devices that will be requested
  955. * @param {string} options.resolution resolution constraints
  956. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with
  957. * the following structure {stream: the Media Stream, type: "audio" or
  958. * "video", videoType: "camera" or "desktop"} will be returned trough the
  959. * Promise, otherwise JitsiTrack objects will be returned.
  960. * @param {string} options.cameraDeviceId
  961. * @param {string} options.micDeviceId
  962. * @returns {*} Promise object that will receive the new JitsiTracks
  963. */
  964. obtainAudioAndVideoPermissions(options = {}) {
  965. const self = this;
  966. const dsOptions = {
  967. ...options.desktopSharingExtensionExternalInstallation,
  968. desktopSharingSources: options.desktopSharingSources
  969. };
  970. return new Promise((resolve, reject) => {
  971. const successCallback = function(stream) {
  972. resolve(handleLocalStream(stream, options.resolution));
  973. };
  974. options.devices = options.devices || [ 'audio', 'video' ];
  975. options.resolution = options.resolution || '720';
  976. if (!screenObtainer.isSupported()
  977. && options.devices.indexOf('desktop') !== -1) {
  978. reject(new Error('Desktop sharing is not supported!'));
  979. }
  980. if (RTCBrowserType.isFirefox()
  981. // XXX The react-native-webrtc implementation that we
  982. // utilize on React Native at the time of this writing does
  983. // not support the MediaStream constructors defined by
  984. // https://www.w3.org/TR/mediacapture-streams/#constructors
  985. // and instead has a single constructor which expects (an
  986. // NSNumber as) a MediaStream ID.
  987. || RTCBrowserType.isReactNative()
  988. || RTCBrowserType.isTemasysPluginUsed()) {
  989. const GUM = function(device, s, e) {
  990. this.getUserMediaWithConstraints(device, s, e, options);
  991. };
  992. const deviceGUM = {
  993. 'audio': GUM.bind(self, [ 'audio' ]),
  994. 'video': GUM.bind(self, [ 'video' ])
  995. };
  996. if (screenObtainer.isSupported()) {
  997. deviceGUM.desktop = screenObtainer.obtainStream.bind(
  998. screenObtainer,
  999. dsOptions);
  1000. }
  1001. // With FF/IE we can't split the stream into audio and video
  1002. // because FF doesn't support media stream constructors. So, we
  1003. // need to get the audio stream separately from the video stream
  1004. // using two distinct GUM calls. Not very user friendly :-( but
  1005. // we don't have many other options neither.
  1006. //
  1007. // Note that we pack those 2 streams in a single object and pass
  1008. // it to the successCallback method.
  1009. obtainDevices({
  1010. devices: options.devices,
  1011. streams: [],
  1012. successCallback,
  1013. errorCallback: reject,
  1014. deviceGUM
  1015. });
  1016. } else {
  1017. const hasDesktop = options.devices.indexOf('desktop') > -1;
  1018. if (hasDesktop) {
  1019. options.devices.splice(
  1020. options.devices.indexOf('desktop'),
  1021. 1);
  1022. }
  1023. if (options.devices.length) {
  1024. this.getUserMediaWithConstraints(
  1025. options.devices,
  1026. stream => {
  1027. const audioDeviceRequested
  1028. = options.devices.indexOf('audio') !== -1;
  1029. const videoDeviceRequested
  1030. = options.devices.indexOf('video') !== -1;
  1031. const audioTracksReceived
  1032. = stream.getAudioTracks().length > 0;
  1033. const videoTracksReceived
  1034. = stream.getVideoTracks().length > 0;
  1035. if ((audioDeviceRequested && !audioTracksReceived)
  1036. || (videoDeviceRequested
  1037. && !videoTracksReceived)) {
  1038. self.stopMediaStream(stream);
  1039. // We are getting here in case if we requested
  1040. // 'audio' or 'video' devices or both, but
  1041. // didn't get corresponding MediaStreamTrack in
  1042. // response stream. We don't know the reason why
  1043. // this happened, so reject with general error.
  1044. // eslint-disable-next-line no-shadow
  1045. const devices = [];
  1046. if (audioDeviceRequested
  1047. && !audioTracksReceived) {
  1048. devices.push('audio');
  1049. }
  1050. if (videoDeviceRequested
  1051. && !videoTracksReceived) {
  1052. devices.push('video');
  1053. }
  1054. // we are missing one of the media we requested
  1055. // in order to get the actual error that caused
  1056. // this missing media we will call one more time
  1057. // getUserMedia so we can obtain the actual
  1058. // error (Example usecases are requesting
  1059. // audio and video and video device is missing
  1060. // or device is denied to be used and chrome is
  1061. // set to not ask for permissions)
  1062. self.getUserMediaWithConstraints(
  1063. devices,
  1064. () => {
  1065. // we already failed to obtain this
  1066. // media, so we are not supposed in any
  1067. // way to receive success for this call
  1068. // any way we will throw an error to be
  1069. // sure the promise will finish
  1070. reject(new JitsiTrackError(
  1071. { name: 'UnknownError' },
  1072. getConstraints(
  1073. options.devices,
  1074. options),
  1075. devices)
  1076. );
  1077. },
  1078. error => {
  1079. // rejects with real error for not
  1080. // obtaining the media
  1081. reject(error);
  1082. }, options);
  1083. return;
  1084. }
  1085. if (hasDesktop) {
  1086. screenObtainer.obtainStream(
  1087. dsOptions,
  1088. desktop => {
  1089. successCallback({ audioVideo: stream,
  1090. desktop });
  1091. }, error => {
  1092. self.stopMediaStream(stream);
  1093. reject(error);
  1094. });
  1095. } else {
  1096. successCallback({ audioVideo: stream });
  1097. }
  1098. },
  1099. error => reject(error),
  1100. options);
  1101. } else if (hasDesktop) {
  1102. screenObtainer.obtainStream(
  1103. dsOptions,
  1104. desktop => successCallback({ desktop }),
  1105. error => reject(error));
  1106. }
  1107. }
  1108. });
  1109. }
  1110. /**
  1111. *
  1112. */
  1113. getDeviceAvailability() {
  1114. return devices;
  1115. }
  1116. /**
  1117. *
  1118. */
  1119. isRTCReady() {
  1120. return rtcReady;
  1121. }
  1122. /**
  1123. *
  1124. */
  1125. _isDeviceListAvailable() {
  1126. if (!rtcReady) {
  1127. throw new Error('WebRTC not ready yet');
  1128. }
  1129. return Boolean(
  1130. (navigator.mediaDevices
  1131. && navigator.mediaDevices.enumerateDevices)
  1132. || (typeof MediaStreamTrack !== 'undefined'
  1133. && MediaStreamTrack.getSources));
  1134. }
  1135. /**
  1136. * Returns a promise which can be used to make sure that the WebRTC stack
  1137. * has been initialized.
  1138. *
  1139. * @returns {Promise} which is resolved only if the WebRTC stack is ready.
  1140. * Note that currently we do not detect stack initialization failure and
  1141. * the promise is never rejected(unless unexpected error occurs).
  1142. */
  1143. onRTCReady() {
  1144. if (rtcReady) {
  1145. return Promise.resolve();
  1146. }
  1147. return new Promise(resolve => {
  1148. const listener = () => {
  1149. eventEmitter.removeListener(RTCEvents.RTC_READY, listener);
  1150. resolve();
  1151. };
  1152. eventEmitter.addListener(RTCEvents.RTC_READY, listener);
  1153. // We have no failed event, so... it either resolves or nothing
  1154. // happens.
  1155. });
  1156. }
  1157. /**
  1158. * Checks if its possible to enumerate available cameras/microphones.
  1159. *
  1160. * @returns {Promise<boolean>} a Promise which will be resolved only once
  1161. * the WebRTC stack is ready, either with true if the device listing is
  1162. * available available or with false otherwise.
  1163. */
  1164. isDeviceListAvailable() {
  1165. return this.onRTCReady().then(this._isDeviceListAvailable.bind(this));
  1166. }
  1167. /**
  1168. * Returns true if changing the input (camera / microphone) or output
  1169. * (audio) device is supported and false if not.
  1170. * @params {string} [deviceType] - type of device to change. Default is
  1171. * undefined or 'input', 'output' - for audio output device change.
  1172. * @returns {boolean} true if available, false otherwise.
  1173. */
  1174. isDeviceChangeAvailable(deviceType) {
  1175. return deviceType === 'output' || deviceType === 'audiooutput'
  1176. ? isAudioOutputDeviceChangeAvailable
  1177. : RTCBrowserType.isChrome()
  1178. || RTCBrowserType.isFirefox()
  1179. || RTCBrowserType.isOpera()
  1180. || RTCBrowserType.isTemasysPluginUsed()
  1181. || RTCBrowserType.isNWJS()
  1182. || RTCBrowserType.isElectron()
  1183. || RTCBrowserType.isEdge();
  1184. }
  1185. /**
  1186. * A method to handle stopping of the stream.
  1187. * One point to handle the differences in various implementations.
  1188. * @param mediaStream MediaStream object to stop.
  1189. */
  1190. stopMediaStream(mediaStream) {
  1191. mediaStream.getTracks().forEach(track => {
  1192. // stop() not supported with IE
  1193. if (!RTCBrowserType.isTemasysPluginUsed() && track.stop) {
  1194. track.stop();
  1195. }
  1196. });
  1197. // leave stop for implementation still using it
  1198. if (mediaStream.stop) {
  1199. mediaStream.stop();
  1200. }
  1201. // The MediaStream implementation of the react-native-webrtc project has
  1202. // an explicit release method that is to be invoked in order to release
  1203. // used resources such as memory.
  1204. if (mediaStream.release) {
  1205. mediaStream.release();
  1206. }
  1207. // if we have done createObjectURL, lets clean it
  1208. const url = mediaStream.jitsiObjectURL;
  1209. if (url) {
  1210. delete mediaStream.jitsiObjectURL;
  1211. (URL || webkitURL).revokeObjectURL(url);
  1212. }
  1213. }
  1214. /**
  1215. * Returns whether the desktop sharing is enabled or not.
  1216. * @returns {boolean}
  1217. */
  1218. isDesktopSharingEnabled() {
  1219. return screenObtainer.isSupported();
  1220. }
  1221. /**
  1222. * Sets current audio output device.
  1223. * @param {string} deviceId - id of 'audiooutput' device from
  1224. * navigator.mediaDevices.enumerateDevices(), 'default' for default
  1225. * device
  1226. * @returns {Promise} - resolves when audio output is changed, is rejected
  1227. * otherwise
  1228. */
  1229. setAudioOutputDevice(deviceId) {
  1230. if (!this.isDeviceChangeAvailable('output')) {
  1231. Promise.reject(
  1232. new Error('Audio output device change is not supported'));
  1233. }
  1234. return featureDetectionAudioEl.setSinkId(deviceId)
  1235. .then(() => {
  1236. audioOutputDeviceId = deviceId;
  1237. audioOutputChanged = true;
  1238. logger.log(`Audio output device set to ${deviceId}`);
  1239. eventEmitter.emit(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  1240. deviceId);
  1241. });
  1242. }
  1243. /**
  1244. * Returns currently used audio output device id, '' stands for default
  1245. * device
  1246. * @returns {string}
  1247. */
  1248. getAudioOutputDevice() {
  1249. return audioOutputDeviceId;
  1250. }
  1251. /**
  1252. * Returns list of available media devices if its obtained, otherwise an
  1253. * empty array is returned/
  1254. * @returns {Array} list of available media devices.
  1255. */
  1256. getCurrentlyAvailableMediaDevices() {
  1257. return currentlyAvailableMediaDevices;
  1258. }
  1259. /**
  1260. * Returns event data for device to be reported to stats.
  1261. * @returns {MediaDeviceInfo} device.
  1262. */
  1263. getEventDataForActiveDevice(device) {
  1264. const deviceList = [];
  1265. const deviceData = {
  1266. 'deviceId': device.deviceId,
  1267. 'kind': device.kind,
  1268. 'label': device.label,
  1269. 'groupId': device.groupId
  1270. };
  1271. deviceList.push(deviceData);
  1272. return { deviceList };
  1273. }
  1274. }
  1275. /**
  1276. * Rejects a Promise because WebRTC is not supported.
  1277. *
  1278. * @param {string} errorMessage - The human-readable message of the Error which
  1279. * is the reason for the rejection.
  1280. * @param {Function} reject - The reject function of the Promise.
  1281. * @returns {void}
  1282. */
  1283. function rejectWithWebRTCNotSupported(errorMessage, reject) {
  1284. const error = new Error(errorMessage);
  1285. // WebRTC is not supported either natively or via a known plugin such as
  1286. // Temasys.
  1287. // XXX The Error class already has a property name which is commonly used to
  1288. // detail the represented error in a non-human-readable way (in contrast to
  1289. // the human-readable property message). I explicitly did not want to
  1290. // introduce a new specific property.
  1291. // FIXME None of the existing JitsiXXXErrors seemed to be appropriate
  1292. // recipients of the constant WEBRTC_NOT_SUPPORTED so I explicitly chose to
  1293. // leave it as a magic string at the time of this writing.
  1294. error.name = 'WEBRTC_NOT_SUPPORTED';
  1295. logger.error(errorMessage);
  1296. reject(error);
  1297. }
  1298. const rtcUtils = new RTCUtils();
  1299. /**
  1300. *
  1301. * @param options
  1302. */
  1303. function obtainDevices(options) {
  1304. if (!options.devices || options.devices.length === 0) {
  1305. return options.successCallback(options.streams || {});
  1306. }
  1307. const device = options.devices.splice(0, 1);
  1308. options.deviceGUM[device](
  1309. stream => {
  1310. options.streams = options.streams || {};
  1311. options.streams[device] = stream;
  1312. obtainDevices(options);
  1313. },
  1314. error => {
  1315. Object.keys(options.streams).forEach(
  1316. d => rtcUtils.stopMediaStream(options.streams[d]));
  1317. logger.error(
  1318. `failed to obtain ${device} stream - stop`, error);
  1319. options.errorCallback(error);
  1320. });
  1321. }
  1322. /**
  1323. * In case of IE we continue from 'onReady' callback passed to RTCUtils
  1324. * constructor. It will be invoked by Temasys plugin once it is initialized.
  1325. *
  1326. * @param options
  1327. * @param GUM
  1328. */
  1329. function onReady(options, GUM) {
  1330. rtcReady = true;
  1331. eventEmitter.emit(RTCEvents.RTC_READY, true);
  1332. screenObtainer.init(options, GUM);
  1333. if (rtcUtils.isDeviceListAvailable() && rawEnumerateDevicesWithCallback) {
  1334. rawEnumerateDevicesWithCallback(ds => {
  1335. currentlyAvailableMediaDevices = ds.splice(0);
  1336. logger.info('Available devices: ', currentlyAvailableMediaDevices);
  1337. sendDeviceListToAnalytics(currentlyAvailableMediaDevices);
  1338. eventEmitter.emit(RTCEvents.DEVICE_LIST_AVAILABLE,
  1339. currentlyAvailableMediaDevices);
  1340. if (isDeviceChangeEventSupported) {
  1341. navigator.mediaDevices.addEventListener(
  1342. 'devicechange',
  1343. () => rtcUtils.enumerateDevices(onMediaDevicesListChanged));
  1344. } else {
  1345. pollForAvailableMediaDevices();
  1346. }
  1347. });
  1348. }
  1349. }
  1350. /**
  1351. * Wraps original attachMediaStream function to set current audio output device
  1352. * if this is supported.
  1353. * @param {Function} origAttachMediaStream
  1354. * @returns {Function}
  1355. */
  1356. function wrapAttachMediaStream(origAttachMediaStream) {
  1357. return function(element, stream) {
  1358. // eslint-disable-next-line prefer-rest-params
  1359. const res = origAttachMediaStream.apply(rtcUtils, arguments);
  1360. if (stream
  1361. && rtcUtils.isDeviceChangeAvailable('output')
  1362. && stream.getAudioTracks
  1363. && stream.getAudioTracks().length
  1364. // we skip setting audio output if there was no explicit change
  1365. && audioOutputChanged) {
  1366. element.setSinkId(rtcUtils.getAudioOutputDevice())
  1367. .catch(function(ex) {
  1368. const err
  1369. = new JitsiTrackError(ex, null, [ 'audiooutput' ]);
  1370. GlobalOnErrorHandler.callUnhandledRejectionHandler({
  1371. promise: this, // eslint-disable-line no-invalid-this
  1372. reason: err
  1373. });
  1374. logger.warn(
  1375. 'Failed to set audio output device for the element.'
  1376. + ' Default audio output device will be used'
  1377. + ' instead',
  1378. element,
  1379. err);
  1380. });
  1381. }
  1382. return res;
  1383. };
  1384. }
  1385. export default rtcUtils;