You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTCUtils.js 55KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518
  1. /* global $,
  2. __filename,
  3. attachMediaStream,
  4. MediaStreamTrack,
  5. RTCIceCandidate: true,
  6. RTCPeerConnection,
  7. RTCSessionDescription: true,
  8. mozRTCIceCandidate,
  9. mozRTCPeerConnection,
  10. mozRTCSessionDescription,
  11. webkitMediaStream,
  12. webkitRTCPeerConnection,
  13. webkitURL
  14. */
  15. import CameraFacingMode from '../../service/RTC/CameraFacingMode';
  16. import EventEmitter from 'events';
  17. import { getLogger } from 'jitsi-meet-logger';
  18. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  19. import JitsiTrackError from '../../JitsiTrackError';
  20. import Listenable from '../util/Listenable';
  21. import * as MediaType from '../../service/RTC/MediaType';
  22. import Resolutions from '../../service/RTC/Resolutions';
  23. import RTCBrowserType from './RTCBrowserType';
  24. import RTCEvents from '../../service/RTC/RTCEvents';
  25. import ortcRTCPeerConnection from './ortc/RTCPeerConnection';
  26. import screenObtainer from './ScreenObtainer';
  27. import SDPUtil from '../xmpp/SDPUtil';
  28. import VideoType from '../../service/RTC/VideoType';
  29. const logger = getLogger(__filename);
  30. // XXX Don't require Temasys unless it's to be used because it doesn't run on
  31. // React Native, for example.
  32. const AdapterJS
  33. = RTCBrowserType.isTemasysPluginUsed()
  34. ? require('./adapter.screenshare')
  35. : undefined;
  36. const eventEmitter = new EventEmitter();
  37. const AVAILABLE_DEVICES_POLL_INTERVAL_TIME = 3000; // ms
  38. const devices = {
  39. audio: false,
  40. video: false
  41. };
  42. // Currently audio output device change is supported only in Chrome and
  43. // default output always has 'default' device ID
  44. let audioOutputDeviceId = 'default'; // default device
  45. // whether user has explicitly set a device to use
  46. let audioOutputChanged = false;
  47. // Disables all audio processing
  48. let disableAP = false;
  49. // Disables Acoustic Echo Cancellation
  50. let disableAEC = false;
  51. // Disables Noise Suppression
  52. let disableNS = false;
  53. // Disables Automatic Gain Control
  54. let disableAGC = false;
  55. // Disables Highpass Filter
  56. let disableHPF = false;
  57. const featureDetectionAudioEl = document.createElement('audio');
  58. const isAudioOutputDeviceChangeAvailable
  59. = typeof featureDetectionAudioEl.setSinkId !== 'undefined';
  60. let currentlyAvailableMediaDevices;
  61. /**
  62. * "rawEnumerateDevicesWithCallback" will be initialized only after WebRTC is
  63. * ready. Otherwise it is too early to assume that the devices listing is not
  64. * supported.
  65. */
  66. let rawEnumerateDevicesWithCallback;
  67. /**
  68. *
  69. */
  70. function initRawEnumerateDevicesWithCallback() {
  71. rawEnumerateDevicesWithCallback
  72. = navigator.mediaDevices && navigator.mediaDevices.enumerateDevices
  73. ? function(callback) {
  74. navigator.mediaDevices.enumerateDevices().then(
  75. callback,
  76. () => callback([]));
  77. }
  78. // Safari:
  79. // "ReferenceError: Can't find variable: MediaStreamTrack" when
  80. // Temasys plugin is not installed yet, have to delay this call
  81. // until WebRTC is ready.
  82. : MediaStreamTrack && MediaStreamTrack.getSources
  83. ? function(callback) {
  84. MediaStreamTrack.getSources(
  85. sources =>
  86. callback(
  87. sources.map(convertMediaStreamTrackSource)));
  88. }
  89. : undefined;
  90. }
  91. // TODO: currently no browser supports 'devicechange' event even in nightly
  92. // builds so no feature/browser detection is used at all. However in future this
  93. // should be changed to some expression. Progress on 'devicechange' event
  94. // implementation for Chrome/Opera/NWJS can be tracked at
  95. // https://bugs.chromium.org/p/chromium/issues/detail?id=388648, for Firefox -
  96. // at https://bugzilla.mozilla.org/show_bug.cgi?id=1152383. More information on
  97. // 'devicechange' event can be found in spec -
  98. // http://w3c.github.io/mediacapture-main/#event-mediadevices-devicechange
  99. // TODO: check MS Edge
  100. const isDeviceChangeEventSupported = false;
  101. let rtcReady = false;
  102. /**
  103. *
  104. * @param constraints
  105. * @param isNewStyleConstraintsSupported
  106. * @param resolution
  107. */
  108. function setResolutionConstraints(
  109. constraints,
  110. isNewStyleConstraintsSupported,
  111. resolution) {
  112. if (Resolutions[resolution]) {
  113. if (isNewStyleConstraintsSupported) {
  114. constraints.video.width = {
  115. ideal: Resolutions[resolution].width
  116. };
  117. constraints.video.height = {
  118. ideal: Resolutions[resolution].height
  119. };
  120. }
  121. constraints.video.mandatory.minWidth = Resolutions[resolution].width;
  122. constraints.video.mandatory.minHeight = Resolutions[resolution].height;
  123. }
  124. if (constraints.video.mandatory.minWidth) {
  125. constraints.video.mandatory.maxWidth
  126. = constraints.video.mandatory.minWidth;
  127. }
  128. if (constraints.video.mandatory.minHeight) {
  129. constraints.video.mandatory.maxHeight
  130. = constraints.video.mandatory.minHeight;
  131. }
  132. }
  133. /**
  134. * @param {string[]} um required user media types
  135. *
  136. * @param {Object} [options={}] optional parameters
  137. * @param {string} options.resolution
  138. * @param {number} options.bandwidth
  139. * @param {number} options.fps
  140. * @param {string} options.desktopStream
  141. * @param {string} options.cameraDeviceId
  142. * @param {string} options.micDeviceId
  143. * @param {CameraFacingMode} options.facingMode
  144. * @param {bool} firefox_fake_device
  145. */
  146. function getConstraints(um, options) {
  147. const constraints = { audio: false,
  148. video: false };
  149. // Don't mix new and old style settings for Chromium as this leads
  150. // to TypeError in new Chromium versions. @see
  151. // https://bugs.chromium.org/p/chromium/issues/detail?id=614716
  152. // This is a temporary solution, in future we will fully split old and
  153. // new style constraints when new versions of Chromium and Firefox will
  154. // have stable support of new constraints format. For more information
  155. // @see https://github.com/jitsi/lib-jitsi-meet/pull/136
  156. const isNewStyleConstraintsSupported
  157. = RTCBrowserType.isFirefox()
  158. || RTCBrowserType.isEdge()
  159. || RTCBrowserType.isReactNative()
  160. || RTCBrowserType.isTemasysPluginUsed();
  161. if (um.indexOf('video') >= 0) {
  162. // same behaviour as true
  163. constraints.video = { mandatory: {},
  164. optional: [] };
  165. if (options.cameraDeviceId) {
  166. if (isNewStyleConstraintsSupported) {
  167. // New style of setting device id.
  168. constraints.video.deviceId = options.cameraDeviceId;
  169. }
  170. // Old style.
  171. constraints.video.optional.push({
  172. sourceId: options.cameraDeviceId
  173. });
  174. } else {
  175. // Prefer the front i.e. user-facing camera (to the back i.e.
  176. // environment-facing camera, for example).
  177. // TODO: Maybe use "exact" syntax if options.facingMode is defined,
  178. // but this probably needs to be decided when updating other
  179. // constraints, as we currently don't use "exact" syntax anywhere.
  180. const facingMode = options.facingMode || CameraFacingMode.USER;
  181. if (isNewStyleConstraintsSupported) {
  182. constraints.video.facingMode = facingMode;
  183. }
  184. constraints.video.optional.push({
  185. facingMode
  186. });
  187. }
  188. if (options.minFps || options.maxFps || options.fps) {
  189. // for some cameras it might be necessary to request 30fps
  190. // so they choose 30fps mjpg over 10fps yuy2
  191. if (options.minFps || options.fps) {
  192. // Fall back to options.fps for backwards compatibility
  193. options.minFps = options.minFps || options.fps;
  194. constraints.video.mandatory.minFrameRate = options.minFps;
  195. }
  196. if (options.maxFps) {
  197. constraints.video.mandatory.maxFrameRate = options.maxFps;
  198. }
  199. }
  200. setResolutionConstraints(
  201. constraints, isNewStyleConstraintsSupported, options.resolution);
  202. }
  203. if (um.indexOf('audio') >= 0) {
  204. if (RTCBrowserType.isReactNative()) {
  205. // The react-native-webrtc project that we're currently using
  206. // expects the audio constraint to be a boolean.
  207. constraints.audio = true;
  208. } else if (RTCBrowserType.isFirefox()) {
  209. if (options.micDeviceId) {
  210. constraints.audio = {
  211. mandatory: {},
  212. deviceId: options.micDeviceId, // new style
  213. optional: [ {
  214. sourceId: options.micDeviceId // old style
  215. } ] };
  216. } else {
  217. constraints.audio = true;
  218. }
  219. } else {
  220. // same behaviour as true
  221. constraints.audio = { mandatory: {},
  222. optional: [] };
  223. if (options.micDeviceId) {
  224. if (isNewStyleConstraintsSupported) {
  225. // New style of setting device id.
  226. constraints.audio.deviceId = options.micDeviceId;
  227. }
  228. // Old style.
  229. constraints.audio.optional.push({
  230. sourceId: options.micDeviceId
  231. });
  232. }
  233. // if it is good enough for hangouts...
  234. constraints.audio.optional.push(
  235. { echoCancellation: !disableAEC && !disableAP },
  236. { googEchoCancellation: !disableAEC && !disableAP },
  237. { googAutoGainControl: !disableAGC && !disableAP },
  238. { googNoiseSupression: !disableNS && !disableAP },
  239. { googHighpassFilter: !disableHPF && !disableAP },
  240. { googNoiseSuppression2: !disableNS && !disableAP },
  241. { googEchoCancellation2: !disableAEC && !disableAP },
  242. { googAutoGainControl2: !disableAGC && !disableAP }
  243. );
  244. }
  245. }
  246. if (um.indexOf('screen') >= 0) {
  247. if (RTCBrowserType.isChrome()) {
  248. constraints.video = {
  249. mandatory: {
  250. chromeMediaSource: 'screen',
  251. maxWidth: window.screen.width,
  252. maxHeight: window.screen.height,
  253. maxFrameRate: 3
  254. },
  255. optional: []
  256. };
  257. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  258. constraints.video = {
  259. optional: [
  260. {
  261. sourceId: AdapterJS.WebRTCPlugin.plugin.screensharingKey
  262. }
  263. ]
  264. };
  265. } else if (RTCBrowserType.isFirefox()) {
  266. constraints.video = {
  267. mozMediaSource: 'window',
  268. mediaSource: 'window'
  269. };
  270. } else {
  271. const errmsg
  272. = '\'screen\' WebRTC media source is supported only in Chrome'
  273. + ' and with Temasys plugin';
  274. GlobalOnErrorHandler.callErrorHandler(new Error(errmsg));
  275. logger.error(errmsg);
  276. }
  277. }
  278. if (um.indexOf('desktop') >= 0) {
  279. constraints.video = {
  280. mandatory: {
  281. chromeMediaSource: 'desktop',
  282. chromeMediaSourceId: options.desktopStream,
  283. maxWidth: window.screen.width,
  284. maxHeight: window.screen.height,
  285. maxFrameRate: 3
  286. },
  287. optional: []
  288. };
  289. }
  290. if (options.bandwidth) {
  291. if (!constraints.video) {
  292. // same behaviour as true
  293. constraints.video = { mandatory: {},
  294. optional: [] };
  295. }
  296. constraints.video.optional.push({ bandwidth: options.bandwidth });
  297. }
  298. // we turn audio for both audio and video tracks, the fake audio & video
  299. // seems to work only when enabled in one getUserMedia call, we cannot get
  300. // fake audio separate by fake video this later can be a problem with some
  301. // of the tests
  302. if (RTCBrowserType.isFirefox() && options.firefox_fake_device) {
  303. // seems to be fixed now, removing this experimental fix, as having
  304. // multiple audio tracks brake the tests
  305. // constraints.audio = true;
  306. constraints.fake = true;
  307. }
  308. return constraints;
  309. }
  310. /**
  311. * Sets the availbale devices based on the options we requested and the
  312. * streams we received.
  313. * @param um the options we requested to getUserMedia.
  314. * @param stream the stream we received from calling getUserMedia.
  315. */
  316. function setAvailableDevices(um, stream) {
  317. const audioTracksReceived = stream && stream.getAudioTracks().length > 0;
  318. const videoTracksReceived = stream && stream.getVideoTracks().length > 0;
  319. if (um.indexOf('video') !== -1) {
  320. devices.video = videoTracksReceived;
  321. }
  322. if (um.indexOf('audio') !== -1) {
  323. devices.audio = audioTracksReceived;
  324. }
  325. eventEmitter.emit(RTCEvents.AVAILABLE_DEVICES_CHANGED, devices);
  326. }
  327. /**
  328. * Checks if new list of available media devices differs from previous one.
  329. * @param {MediaDeviceInfo[]} newDevices - list of new devices.
  330. * @returns {boolean} - true if list is different, false otherwise.
  331. */
  332. function compareAvailableMediaDevices(newDevices) {
  333. if (newDevices.length !== currentlyAvailableMediaDevices.length) {
  334. return true;
  335. }
  336. /* eslint-disable newline-per-chained-call */
  337. return (
  338. newDevices.map(mediaDeviceInfoToJSON).sort().join('')
  339. !== currentlyAvailableMediaDevices
  340. .map(mediaDeviceInfoToJSON).sort().join(''));
  341. /* eslint-enable newline-per-chained-call */
  342. /**
  343. *
  344. * @param info
  345. */
  346. function mediaDeviceInfoToJSON(info) {
  347. return JSON.stringify({
  348. kind: info.kind,
  349. deviceId: info.deviceId,
  350. groupId: info.groupId,
  351. label: info.label,
  352. facing: info.facing
  353. });
  354. }
  355. }
  356. /**
  357. * Periodically polls enumerateDevices() method to check if list of media
  358. * devices has changed. This is temporary workaround until 'devicechange' event
  359. * will be supported by browsers.
  360. */
  361. function pollForAvailableMediaDevices() {
  362. // Here we use plain navigator.mediaDevices.enumerateDevices instead of
  363. // wrapped because we just need to know the fact the devices changed, labels
  364. // do not matter. This fixes situation when we have no devices initially,
  365. // and then plug in a new one.
  366. if (rawEnumerateDevicesWithCallback) {
  367. rawEnumerateDevicesWithCallback(ds => {
  368. // We don't fire RTCEvents.DEVICE_LIST_CHANGED for the first time
  369. // we call enumerateDevices(). This is the initial step.
  370. if (typeof currentlyAvailableMediaDevices === 'undefined') {
  371. currentlyAvailableMediaDevices = ds.slice(0);
  372. } else if (compareAvailableMediaDevices(ds)) {
  373. onMediaDevicesListChanged(ds);
  374. }
  375. window.setTimeout(pollForAvailableMediaDevices,
  376. AVAILABLE_DEVICES_POLL_INTERVAL_TIME);
  377. });
  378. }
  379. }
  380. /**
  381. * Event handler for the 'devicechange' event.
  382. *
  383. * @param {MediaDeviceInfo[]} devices - list of media devices.
  384. * @emits RTCEvents.DEVICE_LIST_CHANGED
  385. */
  386. function onMediaDevicesListChanged(devicesReceived) {
  387. currentlyAvailableMediaDevices = devicesReceived.slice(0);
  388. logger.info(
  389. 'list of media devices has changed:',
  390. currentlyAvailableMediaDevices);
  391. const videoInputDevices
  392. = currentlyAvailableMediaDevices.filter(d => d.kind === 'videoinput');
  393. const audioInputDevices
  394. = currentlyAvailableMediaDevices.filter(d => d.kind === 'audioinput');
  395. const videoInputDevicesWithEmptyLabels
  396. = videoInputDevices.filter(d => d.label === '');
  397. const audioInputDevicesWithEmptyLabels
  398. = audioInputDevices.filter(d => d.label === '');
  399. if (videoInputDevices.length
  400. && videoInputDevices.length
  401. === videoInputDevicesWithEmptyLabels.length) {
  402. devices.video = false;
  403. }
  404. if (audioInputDevices.length
  405. && audioInputDevices.length
  406. === audioInputDevicesWithEmptyLabels.length) {
  407. devices.audio = false;
  408. }
  409. eventEmitter.emit(RTCEvents.DEVICE_LIST_CHANGED, devicesReceived);
  410. }
  411. /**
  412. * Apply function with arguments if function exists.
  413. * Do nothing if function not provided.
  414. * @param {function} [fn] function to apply
  415. * @param {Array} [args=[]] arguments for function
  416. */
  417. function maybeApply(fn, args) {
  418. fn && fn(...args);
  419. }
  420. /**
  421. * Wrap `getUserMedia` in order to convert between callback and Promise based
  422. * APIs.
  423. * @param {Function} getUserMedia native function
  424. * @returns {Function} wrapped function
  425. */
  426. function wrapGetUserMedia(getUserMedia, usePromises = false) {
  427. let gUM;
  428. if (usePromises) {
  429. gUM = function(constraints, successCallback, errorCallback) {
  430. return getUserMedia(constraints)
  431. .then(stream => {
  432. maybeApply(successCallback, [ stream ]);
  433. return stream;
  434. })
  435. .catch(error => {
  436. maybeApply(errorCallback, [ error ]);
  437. throw error;
  438. });
  439. };
  440. } else {
  441. gUM = function(constraints, successCallback, errorCallback) {
  442. getUserMedia(constraints, stream => {
  443. maybeApply(successCallback, [ stream ]);
  444. }, error => {
  445. maybeApply(errorCallback, [ error ]);
  446. });
  447. };
  448. }
  449. return gUM;
  450. }
  451. /**
  452. * Use old MediaStreamTrack to get devices list and
  453. * convert it to enumerateDevices format.
  454. * @param {Function} callback function to call when received devices list.
  455. */
  456. function enumerateDevicesThroughMediaStreamTrack(callback) {
  457. MediaStreamTrack.getSources(
  458. sources => callback(sources.map(convertMediaStreamTrackSource)));
  459. }
  460. /**
  461. * Converts MediaStreamTrack Source to enumerateDevices format.
  462. * @param {Object} source
  463. */
  464. function convertMediaStreamTrackSource(source) {
  465. const kind = (source.kind || '').toLowerCase();
  466. return {
  467. facing: source.facing || null,
  468. label: source.label,
  469. // theoretically deprecated MediaStreamTrack.getSources should
  470. // not return 'audiooutput' devices but let's handle it in any
  471. // case
  472. kind: kind
  473. ? kind === 'audiooutput' ? kind : `${kind}input`
  474. : null,
  475. deviceId: source.id,
  476. groupId: source.groupId || null
  477. };
  478. }
  479. /**
  480. * Handles the newly created Media Streams.
  481. * @param streams the new Media Streams
  482. * @param resolution the resolution of the video streams
  483. * @returns {*[]} object that describes the new streams
  484. */
  485. function handleLocalStream(streams, resolution) {
  486. let audioStream, desktopStream, videoStream;
  487. const res = [];
  488. // XXX The function obtainAudioAndVideoPermissions has examined the type of
  489. // the browser, its capabilities, etc. and has taken the decision whether to
  490. // invoke getUserMedia per device (e.g. Firefox) or once for both audio and
  491. // video (e.g. Chrome). In order to not duplicate the logic here, examine
  492. // the specified streams and figure out what we've received based on
  493. // obtainAudioAndVideoPermissions' decision.
  494. if (streams) {
  495. // As mentioned above, certian types of browser (e.g. Chrome) support
  496. // (with a result which meets our requirements expressed bellow) calling
  497. // getUserMedia once for both audio and video.
  498. const audioVideo = streams.audioVideo;
  499. if (audioVideo) {
  500. const NativeMediaStream
  501. = window.webkitMediaStream || window.MediaStream;
  502. const audioTracks = audioVideo.getAudioTracks();
  503. if (audioTracks.length) {
  504. // eslint-disable-next-line new-cap
  505. audioStream = new NativeMediaStream();
  506. for (let i = 0; i < audioTracks.length; i++) {
  507. audioStream.addTrack(audioTracks[i]);
  508. }
  509. }
  510. const videoTracks = audioVideo.getVideoTracks();
  511. if (videoTracks.length) {
  512. // eslint-disable-next-line new-cap
  513. videoStream = new NativeMediaStream();
  514. for (let j = 0; j < videoTracks.length; j++) {
  515. videoStream.addTrack(videoTracks[j]);
  516. }
  517. }
  518. } else {
  519. // On other types of browser (e.g. Firefox) we choose (namely,
  520. // obtainAudioAndVideoPermissions) to call getUserMedia per device
  521. // (type).
  522. audioStream = streams.audio;
  523. videoStream = streams.video;
  524. }
  525. desktopStream = streams.desktop;
  526. }
  527. if (desktopStream) {
  528. const { stream, sourceId, sourceType } = desktopStream;
  529. res.push({
  530. stream,
  531. sourceId,
  532. sourceType,
  533. track: stream.getVideoTracks()[0],
  534. mediaType: MediaType.VIDEO,
  535. videoType: VideoType.DESKTOP
  536. });
  537. }
  538. if (audioStream) {
  539. res.push({
  540. stream: audioStream,
  541. track: audioStream.getAudioTracks()[0],
  542. mediaType: MediaType.AUDIO,
  543. videoType: null
  544. });
  545. }
  546. if (videoStream) {
  547. res.push({
  548. stream: videoStream,
  549. track: videoStream.getVideoTracks()[0],
  550. mediaType: MediaType.VIDEO,
  551. videoType: VideoType.CAMERA,
  552. resolution
  553. });
  554. }
  555. return res;
  556. }
  557. /**
  558. * Represents a default implementation of setting a <tt>MediaStream</tt> as the
  559. * source of a video element that tries to be browser-agnostic through feature
  560. * checking. Note though that it was not completely clear from the predating
  561. * browser-specific implementations what &quot;videoSrc&quot; was because one
  562. * implementation of {@link RTCUtils#getVideoSrc} would return
  563. * <tt>MediaStream</tt> (e.g. Firefox), another a <tt>string</tt> representation
  564. * of the <tt>URL</tt> of the <tt>MediaStream</tt> (e.g. Chrome) and the return
  565. * value was only used by {@link RTCUIHelper#getVideoId} which itself did not
  566. * appear to be used anywhere. Generally, the implementation will try to follow
  567. * the related standards i.e. work with the <tt>srcObject</tt> and <tt>src</tt>
  568. * properties of the specified <tt>element</tt> taking into account vender
  569. * prefixes.
  570. *
  571. * @param element the element whose video source/src is to be set to the
  572. * specified <tt>stream</tt>
  573. * @param {MediaStream} stream the <tt>MediaStream</tt> to set as the video
  574. * source/src of <tt>element</tt>
  575. */
  576. function defaultSetVideoSrc(element, stream) {
  577. // srcObject
  578. let srcObjectPropertyName = 'srcObject';
  579. if (!(srcObjectPropertyName in element)) {
  580. srcObjectPropertyName = 'mozSrcObject';
  581. if (!(srcObjectPropertyName in element)) {
  582. srcObjectPropertyName = null;
  583. }
  584. }
  585. if (srcObjectPropertyName) {
  586. element[srcObjectPropertyName] = stream;
  587. return;
  588. }
  589. // src
  590. let src;
  591. if (stream) {
  592. src = stream.jitsiObjectURL;
  593. // Save the created URL for stream so we can reuse it and not keep
  594. // creating URLs.
  595. if (!src) {
  596. stream.jitsiObjectURL
  597. = src
  598. = (URL || webkitURL).createObjectURL(stream);
  599. }
  600. }
  601. element.src = src || '';
  602. }
  603. /**
  604. *
  605. */
  606. class RTCUtils extends Listenable {
  607. /**
  608. *
  609. */
  610. constructor() {
  611. super(eventEmitter);
  612. }
  613. /**
  614. *
  615. * @param options
  616. */
  617. init(options) {
  618. if (typeof options.disableAEC === 'boolean') {
  619. disableAEC = options.disableAEC;
  620. logger.info(`Disable AEC: ${disableAEC}`);
  621. }
  622. if (typeof options.disableNS === 'boolean') {
  623. disableNS = options.disableNS;
  624. logger.info(`Disable NS: ${disableNS}`);
  625. }
  626. if (typeof options.disableAP === 'boolean') {
  627. disableAP = options.disableAP;
  628. logger.info(`Disable AP: ${disableAP}`);
  629. }
  630. if (typeof options.disableAGC === 'boolean') {
  631. disableAGC = options.disableAGC;
  632. logger.info(`Disable AGC: ${disableAGC}`);
  633. }
  634. if (typeof options.disableHPF === 'boolean') {
  635. disableHPF = options.disableHPF;
  636. logger.info(`Disable HPF: ${disableHPF}`);
  637. }
  638. // Initialize rawEnumerateDevicesWithCallback
  639. initRawEnumerateDevicesWithCallback();
  640. return new Promise((resolve, reject) => {
  641. if (RTCBrowserType.isFirefox()) {
  642. const FFversion = RTCBrowserType.getFirefoxVersion();
  643. if (FFversion < 40) {
  644. rejectWithWebRTCNotSupported(
  645. `Firefox version too old: ${FFversion}.`
  646. + ' Required >= 40.',
  647. reject);
  648. return;
  649. }
  650. this.RTCPeerConnectionType = mozRTCPeerConnection;
  651. this.getUserMedia
  652. = wrapGetUserMedia(
  653. navigator.mozGetUserMedia.bind(navigator));
  654. this.enumerateDevices = rawEnumerateDevicesWithCallback;
  655. this.pcConstraints = {};
  656. this.attachMediaStream
  657. = wrapAttachMediaStream((element, stream) => {
  658. // srcObject is being standardized and FF will
  659. // eventually support that unprefixed. FF also supports
  660. // the "element.src = URL.createObjectURL(...)" combo,
  661. // but that will be deprecated in favour of srcObject.
  662. //
  663. // https://groups.google.com/forum/#!topic/
  664. // mozilla.dev.media/pKOiioXonJg
  665. // https://github.com/webrtc/samples/issues/302
  666. if (element) {
  667. defaultSetVideoSrc(element, stream);
  668. if (stream) {
  669. element.play();
  670. }
  671. }
  672. return element;
  673. });
  674. this.getStreamID = function(stream) {
  675. let id = stream.id;
  676. if (!id) {
  677. let tracks = stream.getVideoTracks();
  678. if (!tracks || tracks.length === 0) {
  679. tracks = stream.getAudioTracks();
  680. }
  681. id = tracks[0].id;
  682. }
  683. return SDPUtil.filterSpecialChars(id);
  684. };
  685. this.getTrackID = function(track) {
  686. return track.id;
  687. };
  688. /* eslint-disable no-global-assign, no-native-reassign */
  689. RTCSessionDescription = mozRTCSessionDescription;
  690. RTCIceCandidate = mozRTCIceCandidate;
  691. /* eslint-enable no-global-assign, no-native-reassign */
  692. } else if (RTCBrowserType.isChrome()
  693. || RTCBrowserType.isOpera()
  694. || RTCBrowserType.isNWJS()
  695. || RTCBrowserType.isElectron()
  696. || RTCBrowserType.isReactNative()) {
  697. this.RTCPeerConnectionType = webkitRTCPeerConnection;
  698. const getUserMedia
  699. = navigator.webkitGetUserMedia.bind(navigator);
  700. this.getUserMedia = wrapGetUserMedia(getUserMedia);
  701. this.enumerateDevices = rawEnumerateDevicesWithCallback;
  702. this.attachMediaStream
  703. = wrapAttachMediaStream((element, stream) => {
  704. defaultSetVideoSrc(element, stream);
  705. return element;
  706. });
  707. this.getStreamID = function(stream) {
  708. // A. MediaStreams from FF endpoints have the characters '{'
  709. // and '}' that make jQuery choke.
  710. // B. The react-native-webrtc implementation that we use on
  711. // React Native at the time of this writing returns a number
  712. // for the id of MediaStream. Let's just say that a number
  713. // contains no special characters.
  714. const id = stream.id;
  715. // XXX The return statement is affected by automatic
  716. // semicolon insertion (ASI). No line terminator is allowed
  717. // between the return keyword and the expression.
  718. return (
  719. typeof id === 'number'
  720. ? id
  721. : SDPUtil.filterSpecialChars(id));
  722. };
  723. this.getTrackID = function(track) {
  724. return track.id;
  725. };
  726. this.pcConstraints = { optional: [] };
  727. if (options.useIPv6) {
  728. // https://code.google.com/p/webrtc/issues/detail?id=2828
  729. this.pcConstraints.optional.push({ googIPv6: true });
  730. }
  731. if (!webkitMediaStream.prototype.getVideoTracks) {
  732. webkitMediaStream.prototype.getVideoTracks = function() {
  733. return this.videoTracks;
  734. };
  735. }
  736. if (!webkitMediaStream.prototype.getAudioTracks) {
  737. webkitMediaStream.prototype.getAudioTracks = function() {
  738. return this.audioTracks;
  739. };
  740. }
  741. this.p2pPcConstraints
  742. = JSON.parse(JSON.stringify(this.pcConstraints));
  743. // Allows sending of video to be suspended if the bandwidth
  744. // estimation is too low.
  745. if (!options.disableSuspendVideo) {
  746. this.pcConstraints.optional.push(
  747. { googSuspendBelowMinBitrate: true });
  748. }
  749. // There's no reason not to use this for p2p
  750. this.p2pPcConstraints.optional.push({
  751. googSuspendBelowMinBitrate: true
  752. });
  753. } else if (RTCBrowserType.isEdge()) {
  754. this.RTCPeerConnectionType = ortcRTCPeerConnection;
  755. this.getUserMedia
  756. = wrapGetUserMedia(
  757. navigator.mediaDevices.getUserMedia.bind(
  758. navigator.mediaDevices),
  759. true);
  760. this.enumerateDevices = rawEnumerateDevicesWithCallback;
  761. this.attachMediaStream
  762. = wrapAttachMediaStream((element, stream) => {
  763. defaultSetVideoSrc(element, stream);
  764. return element;
  765. });
  766. // ORTC does not generate remote MediaStreams so those are
  767. // manually created by the ORTC shim. This means that their
  768. // id (internally generated) does not match the stream id
  769. // signaled into the remote SDP. Therefore, the shim adds a
  770. // custom jitsiRemoteId property with the original stream id.
  771. this.getStreamID = function(stream) {
  772. const id = stream.jitsiRemoteId || stream.id;
  773. return SDPUtil.filterSpecialChars(id);
  774. };
  775. // Remote MediaStreamTracks generated by ORTC (within a
  776. // RTCRtpReceiver) have an internally/random id which does not
  777. // match the track id signaled in the remote SDP. The shim adds
  778. // a custom jitsi-id property with the original track id.
  779. this.getTrackID = function(track) {
  780. return track.jitsiRemoteId || track.id;
  781. };
  782. } else if (RTCBrowserType.isTemasysPluginUsed()) {
  783. // Detect IE/Safari
  784. const webRTCReadyCb = () => {
  785. this.RTCPeerConnectionType = RTCPeerConnection;
  786. this.getUserMedia = window.getUserMedia;
  787. this.enumerateDevices
  788. = enumerateDevicesThroughMediaStreamTrack;
  789. this.attachMediaStream
  790. = wrapAttachMediaStream((element, stream) => {
  791. if (stream) {
  792. if (stream.id === 'dummyAudio'
  793. || stream.id === 'dummyVideo') {
  794. return;
  795. }
  796. // The container must be visible in order to
  797. // play or attach the stream when Temasys plugin
  798. // is in use
  799. const containerSel = $(element);
  800. if (RTCBrowserType.isTemasysPluginUsed()
  801. && !containerSel.is(':visible')) {
  802. containerSel.show();
  803. }
  804. const video
  805. = stream.getVideoTracks().length > 0;
  806. if (video && !$(element).is(':visible')) {
  807. throw new Error(
  808. 'video element must be visible to'
  809. + ' attach video stream');
  810. }
  811. }
  812. return attachMediaStream(element, stream);
  813. });
  814. this.getStreamID
  815. = stream => SDPUtil.filterSpecialChars(stream.label);
  816. this.getTrackID
  817. = track => track.id;
  818. onReady(
  819. options,
  820. this.getUserMediaWithConstraints.bind(this));
  821. };
  822. const webRTCReadyPromise
  823. = new Promise(r => AdapterJS.webRTCReady(r));
  824. // Resolve or reject depending on whether the Temasys plugin is
  825. // installed.
  826. AdapterJS.WebRTCPlugin.isPluginInstalled(
  827. AdapterJS.WebRTCPlugin.pluginInfo.prefix,
  828. AdapterJS.WebRTCPlugin.pluginInfo.plugName,
  829. AdapterJS.WebRTCPlugin.pluginInfo.type,
  830. /* installed */ () => {
  831. webRTCReadyPromise.then(() => {
  832. webRTCReadyCb();
  833. resolve();
  834. });
  835. },
  836. /* not installed */ () => {
  837. const error
  838. = new Error('Temasys plugin is not installed');
  839. error.name = 'WEBRTC_NOT_READY';
  840. error.webRTCReadyPromise = webRTCReadyPromise;
  841. reject(error);
  842. });
  843. } else {
  844. rejectWithWebRTCNotSupported(
  845. 'Browser does not appear to be WebRTC-capable',
  846. reject);
  847. return;
  848. }
  849. this.p2pPcConstraints = this.p2pPcConstraints || this.pcConstraints;
  850. // Call onReady() if Temasys plugin is not used
  851. if (!RTCBrowserType.isTemasysPluginUsed()) {
  852. onReady(options, this.getUserMediaWithConstraints.bind(this));
  853. resolve();
  854. }
  855. });
  856. }
  857. /* eslint-disable max-params */
  858. /**
  859. * @param {string[]} um required user media types
  860. * @param {function} successCallback
  861. * @param {Function} failureCallback
  862. * @param {Object} [options] optional parameters
  863. * @param {string} options.resolution
  864. * @param {number} options.bandwidth
  865. * @param {number} options.fps
  866. * @param {string} options.desktopStream
  867. * @param {string} options.cameraDeviceId
  868. * @param {string} options.micDeviceId
  869. **/
  870. getUserMediaWithConstraints(
  871. um,
  872. successCallback,
  873. failureCallback,
  874. options = {}) {
  875. const constraints = getConstraints(um, options);
  876. logger.info('Get media constraints', constraints);
  877. try {
  878. this.getUserMedia(
  879. constraints,
  880. stream => {
  881. logger.log('onUserMediaSuccess');
  882. setAvailableDevices(um, stream);
  883. successCallback(stream);
  884. },
  885. error => {
  886. setAvailableDevices(um, undefined);
  887. logger.warn('Failed to get access to local media. Error ',
  888. error, constraints);
  889. if (failureCallback) {
  890. failureCallback(
  891. new JitsiTrackError(error, constraints, um));
  892. }
  893. });
  894. } catch (e) {
  895. logger.error('GUM failed: ', e);
  896. if (failureCallback) {
  897. failureCallback(new JitsiTrackError(e, constraints, um));
  898. }
  899. }
  900. }
  901. /* eslint-enable max-params */
  902. /**
  903. * Creates the local MediaStreams.
  904. * @param {Object} [options] optional parameters
  905. * @param {Array} options.devices the devices that will be requested
  906. * @param {string} options.resolution resolution constraints
  907. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with
  908. * the following structure {stream: the Media Stream, type: "audio" or
  909. * "video", videoType: "camera" or "desktop"} will be returned trough the
  910. * Promise, otherwise JitsiTrack objects will be returned.
  911. * @param {string} options.cameraDeviceId
  912. * @param {string} options.micDeviceId
  913. * @returns {*} Promise object that will receive the new JitsiTracks
  914. */
  915. obtainAudioAndVideoPermissions(options = {}) {
  916. const self = this;
  917. const dsOptions = {
  918. ...options.desktopSharingExtensionExternalInstallation,
  919. desktopSharingSources: options.desktopSharingSources
  920. };
  921. return new Promise((resolve, reject) => {
  922. const successCallback = function(stream) {
  923. resolve(handleLocalStream(stream, options.resolution));
  924. };
  925. options.devices = options.devices || [ 'audio', 'video' ];
  926. if (!screenObtainer.isSupported()
  927. && options.devices.indexOf('desktop') !== -1) {
  928. reject(new Error('Desktop sharing is not supported!'));
  929. }
  930. if (RTCBrowserType.isFirefox()
  931. // XXX The react-native-webrtc implementation that we
  932. // utilize on React Native at the time of this writing does
  933. // not support the MediaStream constructors defined by
  934. // https://www.w3.org/TR/mediacapture-streams/#constructors
  935. // and instead has a single constructor which expects (an
  936. // NSNumber as) a MediaStream ID.
  937. || RTCBrowserType.isReactNative()
  938. || RTCBrowserType.isTemasysPluginUsed()) {
  939. const GUM = function(device, s, e) {
  940. this.getUserMediaWithConstraints(device, s, e, options);
  941. };
  942. const deviceGUM = {
  943. 'audio': GUM.bind(self, [ 'audio' ]),
  944. 'video': GUM.bind(self, [ 'video' ])
  945. };
  946. if (screenObtainer.isSupported()) {
  947. deviceGUM.desktop = screenObtainer.obtainStream.bind(
  948. screenObtainer,
  949. dsOptions);
  950. }
  951. // With FF/IE we can't split the stream into audio and video
  952. // because FF doesn't support media stream constructors. So, we
  953. // need to get the audio stream separately from the video stream
  954. // using two distinct GUM calls. Not very user friendly :-( but
  955. // we don't have many other options neither.
  956. //
  957. // Note that we pack those 2 streams in a single object and pass
  958. // it to the successCallback method.
  959. obtainDevices({
  960. devices: options.devices,
  961. streams: [],
  962. successCallback,
  963. errorCallback: reject,
  964. deviceGUM
  965. });
  966. } else {
  967. const hasDesktop = options.devices.indexOf('desktop') > -1;
  968. if (hasDesktop) {
  969. options.devices.splice(
  970. options.devices.indexOf('desktop'),
  971. 1);
  972. }
  973. options.resolution = options.resolution || '360';
  974. if (options.devices.length) {
  975. this.getUserMediaWithConstraints(
  976. options.devices,
  977. stream => {
  978. const audioDeviceRequested
  979. = options.devices.indexOf('audio') !== -1;
  980. const videoDeviceRequested
  981. = options.devices.indexOf('video') !== -1;
  982. const audioTracksReceived
  983. = stream.getAudioTracks().length > 0;
  984. const videoTracksReceived
  985. = stream.getVideoTracks().length > 0;
  986. if ((audioDeviceRequested && !audioTracksReceived)
  987. || (videoDeviceRequested
  988. && !videoTracksReceived)) {
  989. self.stopMediaStream(stream);
  990. // We are getting here in case if we requested
  991. // 'audio' or 'video' devices or both, but
  992. // didn't get corresponding MediaStreamTrack in
  993. // response stream. We don't know the reason why
  994. // this happened, so reject with general error.
  995. // eslint-disable-next-line no-shadow
  996. const devices = [];
  997. if (audioDeviceRequested
  998. && !audioTracksReceived) {
  999. devices.push('audio');
  1000. }
  1001. if (videoDeviceRequested
  1002. && !videoTracksReceived) {
  1003. devices.push('video');
  1004. }
  1005. // we are missing one of the media we requested
  1006. // in order to get the actual error that caused
  1007. // this missing media we will call one more time
  1008. // getUserMedia so we can obtain the actual
  1009. // error (Example usecases are requesting
  1010. // audio and video and video device is missing
  1011. // or device is denied to be used and chrome is
  1012. // set to not ask for permissions)
  1013. self.getUserMediaWithConstraints(
  1014. devices,
  1015. () => {
  1016. // we already failed to obtain this
  1017. // media, so we are not supposed in any
  1018. // way to receive success for this call
  1019. // any way we will throw an error to be
  1020. // sure the promise will finish
  1021. reject(new JitsiTrackError(
  1022. { name: 'UnknownError' },
  1023. getConstraints(
  1024. options.devices,
  1025. options),
  1026. devices)
  1027. );
  1028. },
  1029. error => {
  1030. // rejects with real error for not
  1031. // obtaining the media
  1032. reject(error);
  1033. }, options);
  1034. return;
  1035. }
  1036. if (hasDesktop) {
  1037. screenObtainer.obtainStream(
  1038. dsOptions,
  1039. desktop => {
  1040. successCallback({ audioVideo: stream,
  1041. desktop });
  1042. }, error => {
  1043. self.stopMediaStream(stream);
  1044. reject(error);
  1045. });
  1046. } else {
  1047. successCallback({ audioVideo: stream });
  1048. }
  1049. },
  1050. error => reject(error),
  1051. options);
  1052. } else if (hasDesktop) {
  1053. screenObtainer.obtainStream(
  1054. dsOptions,
  1055. desktop => successCallback({ desktop }),
  1056. error => reject(error));
  1057. }
  1058. }
  1059. });
  1060. }
  1061. /**
  1062. *
  1063. */
  1064. getDeviceAvailability() {
  1065. return devices;
  1066. }
  1067. /**
  1068. *
  1069. */
  1070. isRTCReady() {
  1071. return rtcReady;
  1072. }
  1073. /**
  1074. *
  1075. */
  1076. _isDeviceListAvailable() {
  1077. if (!rtcReady) {
  1078. throw new Error('WebRTC not ready yet');
  1079. }
  1080. return Boolean(
  1081. (navigator.mediaDevices
  1082. && navigator.mediaDevices.enumerateDevices)
  1083. || (typeof MediaStreamTrack !== 'undefined'
  1084. && MediaStreamTrack.getSources));
  1085. }
  1086. /**
  1087. * Returns a promise which can be used to make sure that the WebRTC stack
  1088. * has been initialized.
  1089. *
  1090. * @returns {Promise} which is resolved only if the WebRTC stack is ready.
  1091. * Note that currently we do not detect stack initialization failure and
  1092. * the promise is never rejected(unless unexpected error occurs).
  1093. */
  1094. onRTCReady() {
  1095. if (rtcReady) {
  1096. return Promise.resolve();
  1097. }
  1098. return new Promise(resolve => {
  1099. const listener = () => {
  1100. eventEmitter.removeListener(RTCEvents.RTC_READY, listener);
  1101. resolve();
  1102. };
  1103. eventEmitter.addListener(RTCEvents.RTC_READY, listener);
  1104. // We have no failed event, so... it either resolves or nothing
  1105. // happens.
  1106. });
  1107. }
  1108. /**
  1109. * Checks if its possible to enumerate available cameras/microphones.
  1110. *
  1111. * @returns {Promise<boolean>} a Promise which will be resolved only once
  1112. * the WebRTC stack is ready, either with true if the device listing is
  1113. * available available or with false otherwise.
  1114. */
  1115. isDeviceListAvailable() {
  1116. return this.onRTCReady().then(this._isDeviceListAvailable.bind(this));
  1117. }
  1118. /**
  1119. * Returns true if changing the input (camera / microphone) or output
  1120. * (audio) device is supported and false if not.
  1121. * @params {string} [deviceType] - type of device to change. Default is
  1122. * undefined or 'input', 'output' - for audio output device change.
  1123. * @returns {boolean} true if available, false otherwise.
  1124. */
  1125. isDeviceChangeAvailable(deviceType) {
  1126. return deviceType === 'output' || deviceType === 'audiooutput'
  1127. ? isAudioOutputDeviceChangeAvailable
  1128. : RTCBrowserType.isChrome()
  1129. || RTCBrowserType.isFirefox()
  1130. || RTCBrowserType.isOpera()
  1131. || RTCBrowserType.isTemasysPluginUsed()
  1132. || RTCBrowserType.isNWJS()
  1133. || RTCBrowserType.isElectron()
  1134. || RTCBrowserType.isEdge();
  1135. }
  1136. /**
  1137. * A method to handle stopping of the stream.
  1138. * One point to handle the differences in various implementations.
  1139. * @param mediaStream MediaStream object to stop.
  1140. */
  1141. stopMediaStream(mediaStream) {
  1142. mediaStream.getTracks().forEach(track => {
  1143. // stop() not supported with IE
  1144. if (!RTCBrowserType.isTemasysPluginUsed() && track.stop) {
  1145. track.stop();
  1146. }
  1147. });
  1148. // leave stop for implementation still using it
  1149. if (mediaStream.stop) {
  1150. mediaStream.stop();
  1151. }
  1152. // The MediaStream implementation of the react-native-webrtc project has
  1153. // an explicit release method that is to be invoked in order to release
  1154. // used resources such as memory.
  1155. if (mediaStream.release) {
  1156. mediaStream.release();
  1157. }
  1158. // if we have done createObjectURL, lets clean it
  1159. const url = mediaStream.jitsiObjectURL;
  1160. if (url) {
  1161. delete mediaStream.jitsiObjectURL;
  1162. (URL || webkitURL).revokeObjectURL(url);
  1163. }
  1164. }
  1165. /**
  1166. * Returns whether the desktop sharing is enabled or not.
  1167. * @returns {boolean}
  1168. */
  1169. isDesktopSharingEnabled() {
  1170. return screenObtainer.isSupported();
  1171. }
  1172. /**
  1173. * Sets current audio output device.
  1174. * @param {string} deviceId - id of 'audiooutput' device from
  1175. * navigator.mediaDevices.enumerateDevices(), 'default' for default
  1176. * device
  1177. * @returns {Promise} - resolves when audio output is changed, is rejected
  1178. * otherwise
  1179. */
  1180. setAudioOutputDevice(deviceId) {
  1181. if (!this.isDeviceChangeAvailable('output')) {
  1182. Promise.reject(
  1183. new Error('Audio output device change is not supported'));
  1184. }
  1185. return featureDetectionAudioEl.setSinkId(deviceId)
  1186. .then(() => {
  1187. audioOutputDeviceId = deviceId;
  1188. audioOutputChanged = true;
  1189. logger.log(`Audio output device set to ${deviceId}`);
  1190. eventEmitter.emit(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  1191. deviceId);
  1192. });
  1193. }
  1194. /**
  1195. * Returns currently used audio output device id, '' stands for default
  1196. * device
  1197. * @returns {string}
  1198. */
  1199. getAudioOutputDevice() {
  1200. return audioOutputDeviceId;
  1201. }
  1202. /**
  1203. * Returns list of available media devices if its obtained, otherwise an
  1204. * empty array is returned/
  1205. * @returns {Array} list of available media devices.
  1206. */
  1207. getCurrentlyAvailableMediaDevices() {
  1208. return currentlyAvailableMediaDevices;
  1209. }
  1210. /**
  1211. * Returns event data for device to be reported to stats.
  1212. * @returns {MediaDeviceInfo} device.
  1213. */
  1214. getEventDataForActiveDevice(device) {
  1215. const deviceList = [];
  1216. const deviceData = {
  1217. 'deviceId': device.deviceId,
  1218. 'kind': device.kind,
  1219. 'label': device.label,
  1220. 'groupId': device.groupId
  1221. };
  1222. deviceList.push(deviceData);
  1223. return { deviceList };
  1224. }
  1225. }
  1226. /**
  1227. * Rejects a Promise because WebRTC is not supported.
  1228. *
  1229. * @param {string} errorMessage - The human-readable message of the Error which
  1230. * is the reason for the rejection.
  1231. * @param {Function} reject - The reject function of the Promise.
  1232. * @returns {void}
  1233. */
  1234. function rejectWithWebRTCNotSupported(errorMessage, reject) {
  1235. const error = new Error(errorMessage);
  1236. // WebRTC is not supported either natively or via a known plugin such as
  1237. // Temasys.
  1238. // XXX The Error class already has a property name which is commonly used to
  1239. // detail the represented error in a non-human-readable way (in contrast to
  1240. // the human-readable property message). I explicitly did not want to
  1241. // introduce a new specific property.
  1242. // FIXME None of the existing JitsiXXXErrors seemed to be appropriate
  1243. // recipients of the constant WEBRTC_NOT_SUPPORTED so I explicitly chose to
  1244. // leave it as a magic string at the time of this writing.
  1245. error.name = 'WEBRTC_NOT_SUPPORTED';
  1246. logger.error(errorMessage);
  1247. reject(error);
  1248. }
  1249. const rtcUtils = new RTCUtils();
  1250. /**
  1251. *
  1252. * @param options
  1253. */
  1254. function obtainDevices(options) {
  1255. if (!options.devices || options.devices.length === 0) {
  1256. return options.successCallback(options.streams || {});
  1257. }
  1258. const device = options.devices.splice(0, 1);
  1259. options.deviceGUM[device](
  1260. stream => {
  1261. options.streams = options.streams || {};
  1262. options.streams[device] = stream;
  1263. obtainDevices(options);
  1264. },
  1265. error => {
  1266. Object.keys(options.streams).forEach(
  1267. d => rtcUtils.stopMediaStream(options.streams[d]));
  1268. logger.error(
  1269. `failed to obtain ${device} stream - stop`, error);
  1270. options.errorCallback(error);
  1271. });
  1272. }
  1273. /**
  1274. * In case of IE we continue from 'onReady' callback passed to RTCUtils
  1275. * constructor. It will be invoked by Temasys plugin once it is initialized.
  1276. *
  1277. * @param options
  1278. * @param GUM
  1279. */
  1280. function onReady(options, GUM) {
  1281. rtcReady = true;
  1282. eventEmitter.emit(RTCEvents.RTC_READY, true);
  1283. screenObtainer.init(options, GUM);
  1284. if (rtcUtils.isDeviceListAvailable() && rawEnumerateDevicesWithCallback) {
  1285. rawEnumerateDevicesWithCallback(ds => {
  1286. currentlyAvailableMediaDevices = ds.splice(0);
  1287. eventEmitter.emit(RTCEvents.DEVICE_LIST_AVAILABLE,
  1288. currentlyAvailableMediaDevices);
  1289. if (isDeviceChangeEventSupported) {
  1290. navigator.mediaDevices.addEventListener(
  1291. 'devicechange',
  1292. () => rtcUtils.enumerateDevices(onMediaDevicesListChanged));
  1293. } else {
  1294. pollForAvailableMediaDevices();
  1295. }
  1296. });
  1297. }
  1298. }
  1299. /**
  1300. * Wraps original attachMediaStream function to set current audio output device
  1301. * if this is supported.
  1302. * @param {Function} origAttachMediaStream
  1303. * @returns {Function}
  1304. */
  1305. function wrapAttachMediaStream(origAttachMediaStream) {
  1306. return function(element, stream) {
  1307. // eslint-disable-next-line prefer-rest-params
  1308. const res = origAttachMediaStream.apply(rtcUtils, arguments);
  1309. if (stream
  1310. && rtcUtils.isDeviceChangeAvailable('output')
  1311. && stream.getAudioTracks
  1312. && stream.getAudioTracks().length
  1313. // we skip setting audio output if there was no explicit change
  1314. && audioOutputChanged) {
  1315. element.setSinkId(rtcUtils.getAudioOutputDevice())
  1316. .catch(function(ex) {
  1317. const err
  1318. = new JitsiTrackError(ex, null, [ 'audiooutput' ]);
  1319. GlobalOnErrorHandler.callUnhandledRejectionHandler({
  1320. promise: this, // eslint-disable-line no-invalid-this
  1321. reason: err
  1322. });
  1323. logger.warn(
  1324. 'Failed to set audio output device for the element.'
  1325. + ' Default audio output device will be used'
  1326. + ' instead',
  1327. element,
  1328. err);
  1329. });
  1330. }
  1331. return res;
  1332. };
  1333. }
  1334. export default rtcUtils;