You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiMeetJS.ts 22KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596
  1. import Logger from '@jitsi/logger';
  2. import * as JitsiConferenceErrors from './JitsiConferenceErrors';
  3. import * as JitsiConferenceEvents from './JitsiConferenceEvents';
  4. import JitsiConnection from './JitsiConnection';
  5. import * as JitsiConnectionErrors from './JitsiConnectionErrors';
  6. import * as JitsiConnectionEvents from './JitsiConnectionEvents';
  7. import JitsiMediaDevices from './JitsiMediaDevices';
  8. import * as JitsiMediaDevicesEvents from './JitsiMediaDevicesEvents';
  9. import JitsiTrackError from './JitsiTrackError';
  10. import * as JitsiTrackErrors from './JitsiTrackErrors';
  11. import * as JitsiTrackEvents from './JitsiTrackEvents';
  12. import * as JitsiTranscriptionStatus from './JitsiTranscriptionStatus';
  13. import RTC from './modules/RTC/RTC';
  14. import RTCStats from './modules/RTCStats/RTCStats';
  15. import browser from './modules/browser';
  16. import NetworkInfo from './modules/connectivity/NetworkInfo';
  17. import { TrackStreamingStatus } from './modules/connectivity/TrackStreamingStatus';
  18. import getActiveAudioDevice from './modules/detection/ActiveDeviceDetector';
  19. import * as DetectionEvents from './modules/detection/DetectionEvents';
  20. import TrackVADEmitter from './modules/detection/TrackVADEmitter';
  21. import FeatureFlags from './modules/flags/FeatureFlags';
  22. import ProxyConnectionService
  23. from './modules/proxyconnection/ProxyConnectionService';
  24. import recordingConstants from './modules/recording/recordingConstants';
  25. import Settings from './modules/settings/Settings';
  26. import LocalStatsCollector from './modules/statistics/LocalStatsCollector';
  27. import Statistics from './modules/statistics/statistics';
  28. import GlobalOnErrorHandler from './modules/util/GlobalOnErrorHandler';
  29. import ScriptUtil from './modules/util/ScriptUtil';
  30. import * as VideoSIPGWConstants from './modules/videosipgw/VideoSIPGWConstants';
  31. import AudioMixer from './modules/webaudio/AudioMixer';
  32. import { MediaType } from './service/RTC/MediaType';
  33. import * as ConnectionQualityEvents
  34. from './service/connectivity/ConnectionQualityEvents';
  35. import * as E2ePingEvents from './service/e2eping/E2ePingEvents';
  36. import { createGetUserMediaEvent } from './service/statistics/AnalyticsEvents';
  37. import * as RTCStatsEvents from './modules/RTCStats/RTCStatsEvents';
  38. import { VideoType } from './service/RTC/VideoType';
  39. const logger = Logger.getLogger(__filename);
  40. /**
  41. * The amount of time to wait until firing
  42. * {@link JitsiMediaDevicesEvents.PERMISSION_PROMPT_IS_SHOWN} event.
  43. */
  44. const USER_MEDIA_SLOW_PROMISE_TIMEOUT = 1000;
  45. /**
  46. * Indicates whether GUM has been executed or not.
  47. */
  48. let hasGUMExecuted = false;
  49. /**
  50. * Extracts from an 'options' objects with a specific format (TODO what IS the
  51. * format?) the attributes which are to be logged in analytics events.
  52. *
  53. * @param options gum options (???)
  54. * @returns {*} the attributes to attach to analytics events.
  55. */
  56. function getAnalyticsAttributesFromOptions(options) {
  57. const attributes: any = {};
  58. attributes['audio_requested'] = options.devices.includes('audio');
  59. attributes['video_requested'] = options.devices.includes('video');
  60. attributes['screen_sharing_requested'] = options.devices.includes('desktop');
  61. if (attributes.video_requested) {
  62. attributes.resolution = options.resolution;
  63. }
  64. return attributes;
  65. }
  66. interface ICreateLocalTrackOptions {
  67. cameraDeviceId?: string;
  68. devices?: any[];
  69. firePermissionPromptIsShownEvent?: boolean;
  70. fireSlowPromiseEvent?: boolean;
  71. micDeviceId?: string;
  72. resolution?: string;
  73. }
  74. interface IJitsiMeetJSOptions {
  75. enableAnalyticsLogging?: boolean;
  76. enableWindowOnErrorHandler?: boolean;
  77. externalStorage?: Storage;
  78. flags?: {
  79. runInLiteMode?: boolean;
  80. ssrcRewritingEnabled?: boolean;
  81. }
  82. }
  83. interface ICreateLocalTrackFromMediaStreamOptions {
  84. stream: MediaStream,
  85. sourceType: string,
  86. mediaType: MediaType,
  87. videoType?: VideoType
  88. }
  89. /**
  90. * The public API of the Jitsi Meet library (a.k.a. {@code JitsiMeetJS}).
  91. */
  92. export default {
  93. version: '{#COMMIT_HASH#}',
  94. JitsiConnection,
  95. /**
  96. * {@code ProxyConnectionService} is used to connect a remote peer to a
  97. * local Jitsi participant without going through a Jitsi conference. It is
  98. * currently used for room integration development, specifically wireless
  99. * screensharing. Its API is experimental and will likely change; usage of
  100. * it is advised against.
  101. */
  102. ProxyConnectionService,
  103. constants: {
  104. recording: recordingConstants,
  105. sipVideoGW: VideoSIPGWConstants,
  106. transcriptionStatus: JitsiTranscriptionStatus,
  107. trackStreamingStatus: TrackStreamingStatus
  108. },
  109. events: {
  110. conference: JitsiConferenceEvents,
  111. connection: JitsiConnectionEvents,
  112. detection: DetectionEvents,
  113. track: JitsiTrackEvents,
  114. mediaDevices: JitsiMediaDevicesEvents,
  115. connectionQuality: ConnectionQualityEvents,
  116. e2eping: E2ePingEvents,
  117. rtcstats: RTCStatsEvents
  118. },
  119. errors: {
  120. conference: JitsiConferenceErrors,
  121. connection: JitsiConnectionErrors,
  122. track: JitsiTrackErrors
  123. },
  124. errorTypes: {
  125. JitsiTrackError
  126. },
  127. logLevels: Logger.levels,
  128. mediaDevices: JitsiMediaDevices as unknown,
  129. analytics: Statistics.analytics as unknown,
  130. init(options: IJitsiMeetJSOptions = {}) {
  131. // @ts-ignore
  132. logger.info(`This appears to be ${browser.getName()}, ver: ${browser.getVersion()}`);
  133. Settings.init(options.externalStorage);
  134. Statistics.init(options);
  135. const flags = options.flags || {};
  136. // Configure the feature flags.
  137. FeatureFlags.init(flags);
  138. // Initialize global window.connectionTimes
  139. // FIXME do not use 'window'
  140. if (!window.connectionTimes) {
  141. window.connectionTimes = {};
  142. }
  143. if (options.enableAnalyticsLogging !== true) {
  144. logger.warn('Analytics disabled, disposing.');
  145. this.analytics.dispose();
  146. }
  147. if (options.enableWindowOnErrorHandler) {
  148. GlobalOnErrorHandler.addHandler(
  149. this.getGlobalOnErrorHandler.bind(this));
  150. }
  151. return RTC.init(options);
  152. },
  153. /**
  154. * Returns whether the desktop sharing is enabled or not.
  155. *
  156. * @returns {boolean}
  157. */
  158. isDesktopSharingEnabled() {
  159. return RTC.isDesktopSharingEnabled();
  160. },
  161. /**
  162. * Returns whether the current execution environment supports WebRTC (for
  163. * use within this library).
  164. *
  165. * @returns {boolean} {@code true} if WebRTC is supported in the current
  166. * execution environment (for use within this library); {@code false},
  167. * otherwise.
  168. */
  169. isWebRtcSupported() {
  170. return RTC.isWebRtcSupported();
  171. },
  172. setLogLevel(level) {
  173. Logger.setLogLevel(level);
  174. },
  175. /**
  176. * Expose rtcstats to the public API.
  177. */
  178. rtcstats: {
  179. /**
  180. * Sends identity data to the rtcstats server. This data is used
  181. * to identify the specifics of a particular client, it can be any object
  182. * and will show in the generated rtcstats dump under "identity" entries.
  183. *
  184. * @param {Object} identityData - Identity data to send.
  185. * @returns {void}
  186. */
  187. sendIdentityEntry(identityData) {
  188. RTCStats.sendIdentity(identityData);
  189. },
  190. /**
  191. * Sends a stats entry to rtcstats server.
  192. * @param {string} statsType - The type of stats to send.
  193. * @param {Object} data - The stats data to send.
  194. */
  195. sendStatsEntry(statsType, data) {
  196. RTCStats.sendStatsEntry(statsType, null, data);
  197. },
  198. /**
  199. * Events generated by rtcstats, such as PeerConnections state,
  200. * and websocket connection state.
  201. *
  202. * @param {RTCStatsEvents} event - The event name.
  203. * @param {function} handler - The event handler.
  204. */
  205. on(event, handler) {
  206. RTCStats.events.on(event, handler);
  207. }
  208. },
  209. /**
  210. * Sets the log level to the <tt>Logger</tt> instance with given id.
  211. *
  212. * @param {Logger.levels} level the logging level to be set
  213. * @param {string} id the logger id to which new logging level will be set.
  214. * Usually it's the name of the JavaScript source file including the path
  215. * ex. "modules/xmpp/ChatRoom.js"
  216. */
  217. setLogLevelById(level, id) {
  218. Logger.setLogLevelById(level, id);
  219. },
  220. /**
  221. * Registers new global logger transport to the library logging framework.
  222. *
  223. * @param globalTransport
  224. * @see Logger.addGlobalTransport
  225. */
  226. addGlobalLogTransport(globalTransport) {
  227. Logger.addGlobalTransport(globalTransport);
  228. },
  229. /**
  230. * Removes global logging transport from the library logging framework.
  231. *
  232. * @param globalTransport
  233. * @see Logger.removeGlobalTransport
  234. */
  235. removeGlobalLogTransport(globalTransport) {
  236. Logger.removeGlobalTransport(globalTransport);
  237. },
  238. /**
  239. * Sets global options which will be used by all loggers. Changing these
  240. * works even after other loggers are created.
  241. *
  242. * @param options
  243. * @see Logger.setGlobalOptions
  244. */
  245. setGlobalLogOptions(options) {
  246. Logger.setGlobalOptions(options);
  247. },
  248. /**
  249. * Creates the media tracks and returns them trough the callback.
  250. *
  251. * @param options Object with properties / settings specifying the tracks
  252. * which should be created. should be created or some additional
  253. * configurations about resolution for example.
  254. * @param {Array} options.effects optional effects array for the track
  255. * @param {boolean} options.firePermissionPromptIsShownEvent - if event
  256. * JitsiMediaDevicesEvents.PERMISSION_PROMPT_IS_SHOWN should be fired
  257. * @param {boolean} options.fireSlowPromiseEvent - if event
  258. * JitsiMediaDevicesEvents.USER_MEDIA_SLOW_PROMISE_TIMEOUT should be fired
  259. * @param {Array} options.devices the devices that will be requested
  260. * @param {string} options.resolution resolution constraints
  261. * @param {string} options.cameraDeviceId
  262. * @param {string} options.micDeviceId
  263. * @param {intiger} interval - the interval (in ms) for
  264. * checking whether the desktop sharing extension is installed or not
  265. * @param {Function} checkAgain - returns boolean. While checkAgain()==true
  266. * createLocalTracks will wait and check on every "interval" ms for the
  267. * extension. If the desktop extension is not install and checkAgain()==true
  268. * createLocalTracks will finish with rejected Promise.
  269. * @param {Function} listener - The listener will be called to notify the
  270. * user of lib-jitsi-meet that createLocalTracks is starting external
  271. * extension installation process.
  272. * NOTE: If the inline installation process is not possible and external
  273. * installation is enabled the listener property will be called to notify
  274. * the start of external installation process. After that createLocalTracks
  275. * will start to check for the extension on every interval ms until the
  276. * plugin is installed or until checkAgain return false. If the extension
  277. * is found createLocalTracks will try to get the desktop sharing track and
  278. * will finish the execution. If checkAgain returns false, createLocalTracks
  279. * will finish the execution with rejected Promise.
  280. *
  281. * @deprecated old firePermissionPromptIsShownEvent
  282. * @returns {Promise.<{Array.<JitsiTrack>}, JitsiConferenceError>} A promise
  283. * that returns an array of created JitsiTracks if resolved, or a
  284. * JitsiConferenceError if rejected.
  285. */
  286. createLocalTracks(options: ICreateLocalTrackOptions = {}, oldfirePermissionPromptIsShownEvent) {
  287. let promiseFulfilled = false;
  288. const { firePermissionPromptIsShownEvent, fireSlowPromiseEvent, ...restOptions } = options;
  289. const firePermissionPrompt = firePermissionPromptIsShownEvent || oldfirePermissionPromptIsShownEvent;
  290. if (firePermissionPrompt && !RTC.arePermissionsGrantedForAvailableDevices()) {
  291. // @ts-ignore
  292. JitsiMediaDevices.emitEvent(JitsiMediaDevicesEvents.PERMISSION_PROMPT_IS_SHOWN, browser.getName());
  293. } else if (fireSlowPromiseEvent) {
  294. window.setTimeout(() => {
  295. if (!promiseFulfilled) {
  296. JitsiMediaDevices.emitEvent(JitsiMediaDevicesEvents.SLOW_GET_USER_MEDIA);
  297. }
  298. }, USER_MEDIA_SLOW_PROMISE_TIMEOUT);
  299. }
  300. let isFirstGUM = false;
  301. let startTS = window.performance.now();
  302. if (!window.connectionTimes) {
  303. window.connectionTimes = {};
  304. }
  305. if (!hasGUMExecuted) {
  306. hasGUMExecuted = true;
  307. isFirstGUM = true;
  308. window.connectionTimes['firstObtainPermissions.start'] = startTS;
  309. }
  310. window.connectionTimes['obtainPermissions.start'] = startTS;
  311. return RTC.obtainAudioAndVideoPermissions(restOptions)
  312. .then(tracks => {
  313. promiseFulfilled = true;
  314. let endTS = window.performance.now();
  315. window.connectionTimes['obtainPermissions.end'] = endTS;
  316. if (isFirstGUM) {
  317. window.connectionTimes['firstObtainPermissions.end'] = endTS;
  318. }
  319. Statistics.sendAnalytics(
  320. createGetUserMediaEvent(
  321. 'success',
  322. getAnalyticsAttributesFromOptions(restOptions)));
  323. if (this.isCollectingLocalStats()) {
  324. for (let i = 0; i < tracks.length; i++) {
  325. const track = tracks[i];
  326. if (track.getType() === MediaType.AUDIO) {
  327. Statistics.startLocalStats(track,
  328. track.setAudioLevel.bind(track));
  329. }
  330. }
  331. }
  332. // set real device ids
  333. const currentlyAvailableMediaDevices
  334. = RTC.getCurrentlyAvailableMediaDevices();
  335. if (currentlyAvailableMediaDevices) {
  336. for (let i = 0; i < tracks.length; i++) {
  337. const track = tracks[i];
  338. track._setRealDeviceIdFromDeviceList(
  339. currentlyAvailableMediaDevices);
  340. }
  341. }
  342. return tracks;
  343. })
  344. .catch(error => {
  345. promiseFulfilled = true;
  346. if (error.name === JitsiTrackErrors.SCREENSHARING_USER_CANCELED) {
  347. Statistics.sendAnalytics(
  348. createGetUserMediaEvent(
  349. 'warning',
  350. {
  351. reason: 'extension install user canceled'
  352. }));
  353. } else if (error.name === JitsiTrackErrors.NOT_FOUND) {
  354. const attributes
  355. = getAnalyticsAttributesFromOptions(options);
  356. attributes.reason = 'device not found';
  357. attributes.devices = error.gum.devices.join('.');
  358. Statistics.sendAnalytics(
  359. createGetUserMediaEvent('error', attributes));
  360. } else {
  361. const attributes
  362. = getAnalyticsAttributesFromOptions(options);
  363. attributes.reason = error.name;
  364. Statistics.sendAnalytics(
  365. createGetUserMediaEvent('error', attributes));
  366. }
  367. let endTS = window.performance.now();
  368. window.connectionTimes['obtainPermissions.end'] = endTS;
  369. if (isFirstGUM) {
  370. window.connectionTimes['firstObtainPermissions.end'] = endTS;
  371. }
  372. return Promise.reject(error);
  373. });
  374. },
  375. /**
  376. * Manually create JitsiLocalTrack's from the provided track info, by exposing the RTC method
  377. *
  378. * @param {Array<ICreateLocalTrackFromMediaStreamOptions>} tracksInfo - array of track information
  379. * @returns {Array<JitsiLocalTrack>} - created local tracks
  380. */
  381. createLocalTracksFromMediaStreams(tracksInfo) {
  382. return RTC.createLocalTracks(tracksInfo.map((trackInfo) => {
  383. const tracks = trackInfo.stream.getTracks()
  384. .filter(track => track.kind === trackInfo.mediaType);
  385. if (!tracks || tracks.length === 0) {
  386. throw new JitsiTrackError(JitsiTrackErrors.TRACK_NO_STREAM_TRACKS_FOUND, null, null);
  387. }
  388. if (tracks.length > 1) {
  389. throw new JitsiTrackError(JitsiTrackErrors.TRACK_TOO_MANY_TRACKS_IN_STREAM, null, null);
  390. }
  391. trackInfo.track = tracks[0];
  392. return trackInfo;
  393. }));
  394. },
  395. /**
  396. * Create a TrackVADEmitter service that connects an audio track to an VAD (voice activity detection) processor in
  397. * order to obtain VAD scores for individual PCM audio samples.
  398. * @param {string} localAudioDeviceId - The target local audio device.
  399. * @param {number} sampleRate - Sample rate at which the emitter will operate. Possible values 256, 512, 1024,
  400. * 4096, 8192, 16384. Passing other values will default to closes neighbor.
  401. * I.e. Providing a value of 4096 means that the emitter will process 4096 PCM samples at a time, higher values mean
  402. * longer calls, lowers values mean more calls but shorter.
  403. * @param {Object} vadProcessor - VAD Processors that does the actual compute on a PCM sample.The processor needs
  404. * to implement the following functions:
  405. * - <tt>getSampleLength()</tt> - Returns the sample size accepted by calculateAudioFrameVAD.
  406. * - <tt>getRequiredPCMFrequency()</tt> - Returns the PCM frequency at which the processor operates.
  407. * i.e. (16KHz, 44.1 KHz etc.)
  408. * - <tt>calculateAudioFrameVAD(pcmSample)</tt> - Process a 32 float pcm sample of getSampleLength size.
  409. * @returns {Promise<TrackVADEmitter>}
  410. */
  411. createTrackVADEmitter(localAudioDeviceId, sampleRate, vadProcessor) {
  412. return TrackVADEmitter.create(localAudioDeviceId, sampleRate, vadProcessor);
  413. },
  414. /**
  415. * Create AudioMixer, which is essentially a wrapper over web audio ChannelMergerNode. It essentially allows the
  416. * user to mix multiple MediaStreams into a single one.
  417. *
  418. * @returns {AudioMixer}
  419. */
  420. createAudioMixer() {
  421. return new AudioMixer();
  422. },
  423. /**
  424. * Go through all audio devices on the system and return one that is active, i.e. has audio signal.
  425. *
  426. * @returns Promise<Object> - Object containing information about the found device.
  427. */
  428. getActiveAudioDevice() {
  429. return getActiveAudioDevice();
  430. },
  431. /**
  432. * Checks if its possible to enumerate available cameras/microphones.
  433. *
  434. * @returns {Promise<boolean>} a Promise which will be resolved only once
  435. * the WebRTC stack is ready, either with true if the device listing is
  436. * available available or with false otherwise.
  437. * @deprecated use JitsiMeetJS.mediaDevices.isDeviceListAvailable instead
  438. */
  439. isDeviceListAvailable() {
  440. logger.warn('This method is deprecated, use '
  441. + 'JitsiMeetJS.mediaDevices.isDeviceListAvailable instead');
  442. return this.mediaDevices.isDeviceListAvailable();
  443. },
  444. /**
  445. * Returns true if changing the input (camera / microphone) or output
  446. * (audio) device is supported and false if not.
  447. *
  448. * @param {string} [deviceType] - type of device to change. Default is
  449. * {@code undefined} or 'input', 'output' - for audio output device change.
  450. * @returns {boolean} {@code true} if available; {@code false}, otherwise.
  451. * @deprecated use JitsiMeetJS.mediaDevices.isDeviceChangeAvailable instead
  452. */
  453. isDeviceChangeAvailable(deviceType) {
  454. logger.warn('This method is deprecated, use '
  455. + 'JitsiMeetJS.mediaDevices.isDeviceChangeAvailable instead');
  456. return this.mediaDevices.isDeviceChangeAvailable(deviceType);
  457. },
  458. /**
  459. * Checks if the current environment supports having multiple audio
  460. * input devices in use simultaneously.
  461. *
  462. * @returns {boolean} True if multiple audio input devices can be used.
  463. */
  464. isMultipleAudioInputSupported() {
  465. return this.mediaDevices.isMultipleAudioInputSupported();
  466. },
  467. /**
  468. * Checks if local tracks can collect stats and collection is enabled.
  469. *
  470. * @param {boolean} True if stats are being collected for local tracks.
  471. */
  472. isCollectingLocalStats() {
  473. return Statistics.audioLevelsEnabled && LocalStatsCollector.isLocalStatsSupported();
  474. },
  475. /**
  476. * Executes callback with list of media devices connected.
  477. *
  478. * @param {function} callback
  479. * @deprecated use JitsiMeetJS.mediaDevices.enumerateDevices instead
  480. */
  481. enumerateDevices(callback) {
  482. logger.warn('This method is deprecated, use '
  483. + 'JitsiMeetJS.mediaDevices.enumerateDevices instead');
  484. this.mediaDevices.enumerateDevices(callback);
  485. },
  486. /* eslint-disable max-params */
  487. /**
  488. * @returns function that can be used to be attached to window.onerror and
  489. * if options.enableWindowOnErrorHandler is enabled returns
  490. * the function used by the lib.
  491. * (function(message, source, lineno, colno, error)).
  492. */
  493. getGlobalOnErrorHandler(message, source, lineno, colno, error) {
  494. logger.error(
  495. `UnhandledError: ${message}`,
  496. `Script: ${source}`,
  497. `Line: ${lineno}`,
  498. `Column: ${colno}`,
  499. 'StackTrace: ', error);
  500. },
  501. /* eslint-enable max-params */
  502. /**
  503. * Informs lib-jitsi-meet about the current network status.
  504. *
  505. * @param {object} state - The network info state.
  506. * @param {boolean} state.isOnline - {@code true} if the internet connectivity is online or {@code false}
  507. * otherwise.
  508. */
  509. setNetworkInfo({ isOnline }) {
  510. NetworkInfo.updateNetworkInfo({ isOnline });
  511. },
  512. /**
  513. * Represents a hub/namespace for utility functionality which may be of
  514. * interest to lib-jitsi-meet clients.
  515. */
  516. util: {
  517. ScriptUtil,
  518. browser
  519. }
  520. };