You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

conference.js 90KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524
  1. /* global APP, JitsiMeetJS, config, interfaceConfig */
  2. import { jitsiLocalStorage } from '@jitsi/js-utils';
  3. import Logger from '@jitsi/logger';
  4. import { ENDPOINT_TEXT_MESSAGE_NAME } from './modules/API/constants';
  5. import mediaDeviceHelper from './modules/devices/mediaDeviceHelper';
  6. import Recorder from './modules/recorder/Recorder';
  7. import { createTaskQueue } from './modules/util/helpers';
  8. import {
  9. createDeviceChangedEvent,
  10. createScreenSharingEvent,
  11. createStartSilentEvent,
  12. createTrackMutedEvent
  13. } from './react/features/analytics/AnalyticsEvents';
  14. import { sendAnalytics } from './react/features/analytics/functions';
  15. import {
  16. maybeRedirectToWelcomePage,
  17. reloadWithStoredParams
  18. } from './react/features/app/actions';
  19. import { showModeratedNotification } from './react/features/av-moderation/actions';
  20. import { shouldShowModeratedNotification } from './react/features/av-moderation/functions';
  21. import {
  22. _conferenceWillJoin,
  23. authStatusChanged,
  24. conferenceFailed,
  25. conferenceJoinInProgress,
  26. conferenceJoined,
  27. conferenceLeft,
  28. conferencePropertiesChanged,
  29. conferenceSubjectChanged,
  30. conferenceTimestampChanged,
  31. conferenceUniqueIdSet,
  32. conferenceWillInit,
  33. conferenceWillLeave,
  34. dataChannelClosed,
  35. dataChannelOpened,
  36. e2eRttChanged,
  37. endpointMessageReceived,
  38. kickedOut,
  39. lockStateChanged,
  40. nonParticipantMessageReceived,
  41. onStartMutedPolicyChanged,
  42. p2pStatusChanged
  43. } from './react/features/base/conference/actions';
  44. import {
  45. AVATAR_URL_COMMAND,
  46. CONFERENCE_LEAVE_REASONS,
  47. EMAIL_COMMAND
  48. } from './react/features/base/conference/constants';
  49. import {
  50. commonUserJoinedHandling,
  51. commonUserLeftHandling,
  52. getConferenceOptions,
  53. sendLocalParticipant
  54. } from './react/features/base/conference/functions';
  55. import { getReplaceParticipant, getSsrcRewritingFeatureFlag } from './react/features/base/config/functions';
  56. import { connect } from './react/features/base/connection/actions.web';
  57. import {
  58. checkAndNotifyForNewDevice,
  59. getAvailableDevices,
  60. notifyCameraError,
  61. notifyMicError,
  62. updateDeviceList
  63. } from './react/features/base/devices/actions.web';
  64. import {
  65. areDevicesDifferent,
  66. filterIgnoredDevices,
  67. flattenAvailableDevices,
  68. getDefaultDeviceId,
  69. logDevices,
  70. setAudioOutputDeviceId
  71. } from './react/features/base/devices/functions.web';
  72. import {
  73. JitsiConferenceErrors,
  74. JitsiConferenceEvents,
  75. JitsiE2ePingEvents,
  76. JitsiMediaDevicesEvents,
  77. JitsiTrackEvents,
  78. browser
  79. } from './react/features/base/lib-jitsi-meet';
  80. import {
  81. gumPending,
  82. setAudioAvailable,
  83. setAudioMuted,
  84. setAudioUnmutePermissions,
  85. setInitialGUMPromise,
  86. setVideoAvailable,
  87. setVideoMuted,
  88. setVideoUnmutePermissions
  89. } from './react/features/base/media/actions';
  90. import { MEDIA_TYPE, VIDEO_TYPE } from './react/features/base/media/constants';
  91. import {
  92. getStartWithAudioMuted,
  93. getStartWithVideoMuted,
  94. isVideoMutedByUser
  95. } from './react/features/base/media/functions';
  96. import { IGUMPendingState } from './react/features/base/media/types';
  97. import {
  98. dominantSpeakerChanged,
  99. localParticipantAudioLevelChanged,
  100. localParticipantRoleChanged,
  101. participantKicked,
  102. participantMutedUs,
  103. participantPresenceChanged,
  104. participantRoleChanged,
  105. participantSourcesUpdated,
  106. participantUpdated,
  107. screenshareParticipantDisplayNameChanged,
  108. updateRemoteParticipantFeatures
  109. } from './react/features/base/participants/actions';
  110. import {
  111. getLocalParticipant,
  112. getNormalizedDisplayName,
  113. getParticipantByIdOrUndefined,
  114. getVirtualScreenshareParticipantByOwnerId
  115. } from './react/features/base/participants/functions';
  116. import { updateSettings } from './react/features/base/settings/actions';
  117. import {
  118. addLocalTrack,
  119. createInitialAVTracks,
  120. destroyLocalTracks,
  121. displayErrorsForCreateInitialLocalTracks,
  122. replaceLocalTrack,
  123. setGUMPendingStateOnFailedTracks,
  124. toggleScreensharing as toggleScreensharingA,
  125. trackAdded,
  126. trackRemoved
  127. } from './react/features/base/tracks/actions';
  128. import {
  129. createLocalTracksF,
  130. getLocalJitsiAudioTrack,
  131. getLocalJitsiVideoTrack,
  132. getLocalTracks,
  133. getLocalVideoTrack,
  134. isLocalTrackMuted,
  135. isUserInteractionRequiredForUnmute
  136. } from './react/features/base/tracks/functions';
  137. import { downloadJSON } from './react/features/base/util/downloadJSON';
  138. import { getJitsiMeetGlobalNSConnectionTimes } from './react/features/base/util/helpers';
  139. import { openLeaveReasonDialog } from './react/features/conference/actions.web';
  140. import { showDesktopPicker } from './react/features/desktop-picker/actions';
  141. import { appendSuffix } from './react/features/display-name/functions';
  142. import { maybeOpenFeedbackDialog, submitFeedback } from './react/features/feedback/actions';
  143. import { maybeSetLobbyChatMessageListener } from './react/features/lobby/actions.any';
  144. import { setNoiseSuppressionEnabled } from './react/features/noise-suppression/actions';
  145. import {
  146. hideNotification,
  147. showErrorNotification,
  148. showNotification,
  149. showWarningNotification
  150. } from './react/features/notifications/actions';
  151. import {
  152. DATA_CHANNEL_CLOSED_NOTIFICATION_ID,
  153. NOTIFICATION_TIMEOUT_TYPE
  154. } from './react/features/notifications/constants';
  155. import { isModerationNotificationDisplayed } from './react/features/notifications/functions';
  156. import { suspendDetected } from './react/features/power-monitor/actions';
  157. import { initPrejoin, isPrejoinPageVisible } from './react/features/prejoin/functions';
  158. import { disableReceiver, stopReceiver } from './react/features/remote-control/actions';
  159. import { setScreenAudioShareState } from './react/features/screen-share/actions.web';
  160. import { isScreenAudioShared } from './react/features/screen-share/functions';
  161. import { toggleScreenshotCaptureSummary } from './react/features/screenshot-capture/actions';
  162. import { AudioMixerEffect } from './react/features/stream-effects/audio-mixer/AudioMixerEffect';
  163. import { createRnnoiseProcessor } from './react/features/stream-effects/rnnoise';
  164. import { handleToggleVideoMuted } from './react/features/toolbox/actions.any';
  165. import { transcriberJoined, transcriberLeft } from './react/features/transcribing/actions';
  166. import { muteLocal } from './react/features/video-menu/actions.any';
  167. const logger = Logger.getLogger(__filename);
  168. let room;
  169. /*
  170. * Logic to open a desktop picker put on the window global for
  171. * lib-jitsi-meet to detect and invoke
  172. */
  173. window.JitsiMeetScreenObtainer = {
  174. openDesktopPicker(options, onSourceChoose) {
  175. APP.store.dispatch(showDesktopPicker(options, onSourceChoose));
  176. }
  177. };
  178. /**
  179. * Known custom conference commands.
  180. */
  181. const commands = {
  182. AVATAR_URL: AVATAR_URL_COMMAND,
  183. CUSTOM_ROLE: 'custom-role',
  184. EMAIL: EMAIL_COMMAND,
  185. ETHERPAD: 'etherpad'
  186. };
  187. /**
  188. * Share data to other users.
  189. * @param command the command
  190. * @param {string} value new value
  191. */
  192. function sendData(command, value) {
  193. if (!room) {
  194. return;
  195. }
  196. room.removeCommand(command);
  197. room.sendCommand(command, { value });
  198. }
  199. /**
  200. * Mute or unmute local audio stream if it exists.
  201. * @param {boolean} muted - if audio stream should be muted or unmuted.
  202. */
  203. function muteLocalAudio(muted) {
  204. APP.store.dispatch(setAudioMuted(muted));
  205. }
  206. /**
  207. * Mute or unmute local video stream if it exists.
  208. * @param {boolean} muted if video stream should be muted or unmuted.
  209. *
  210. */
  211. function muteLocalVideo(muted) {
  212. APP.store.dispatch(setVideoMuted(muted));
  213. }
  214. /**
  215. * A queue for the async replaceLocalTrack action so that multiple audio
  216. * replacements cannot happen simultaneously. This solves the issue where
  217. * replaceLocalTrack is called multiple times with an oldTrack of null, causing
  218. * multiple local tracks of the same type to be used.
  219. *
  220. * @private
  221. * @type {Object}
  222. */
  223. const _replaceLocalAudioTrackQueue = createTaskQueue();
  224. /**
  225. * A task queue for replacement local video tracks. This separate queue exists
  226. * so video replacement is not blocked by audio replacement tasks in the queue
  227. * {@link _replaceLocalAudioTrackQueue}.
  228. *
  229. * @private
  230. * @type {Object}
  231. */
  232. const _replaceLocalVideoTrackQueue = createTaskQueue();
  233. /**
  234. *
  235. */
  236. class ConferenceConnector {
  237. /**
  238. *
  239. */
  240. constructor(resolve, reject, conference) {
  241. this._conference = conference;
  242. this._resolve = resolve;
  243. this._reject = reject;
  244. this.reconnectTimeout = null;
  245. room.on(JitsiConferenceEvents.CONFERENCE_JOINED,
  246. this._handleConferenceJoined.bind(this));
  247. room.on(JitsiConferenceEvents.CONFERENCE_FAILED,
  248. this._onConferenceFailed.bind(this));
  249. }
  250. /**
  251. *
  252. */
  253. _handleConferenceFailed(err) {
  254. this._unsubscribe();
  255. this._reject(err);
  256. }
  257. /**
  258. *
  259. */
  260. _onConferenceFailed(err, ...params) {
  261. APP.store.dispatch(conferenceFailed(room, err, ...params));
  262. logger.error('CONFERENCE FAILED:', err, ...params);
  263. switch (err) {
  264. case JitsiConferenceErrors.RESERVATION_ERROR: {
  265. const [ code, msg ] = params;
  266. APP.store.dispatch(showErrorNotification({
  267. descriptionArguments: {
  268. code,
  269. msg
  270. },
  271. descriptionKey: 'dialog.reservationErrorMsg',
  272. titleKey: 'dialog.reservationError'
  273. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  274. break;
  275. }
  276. case JitsiConferenceErrors.GRACEFUL_SHUTDOWN:
  277. APP.store.dispatch(showErrorNotification({
  278. descriptionKey: 'dialog.gracefulShutdown',
  279. titleKey: 'dialog.serviceUnavailable'
  280. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  281. break;
  282. // FIXME FOCUS_DISCONNECTED is a confusing event name.
  283. // What really happens there is that the library is not ready yet,
  284. // because Jicofo is not available, but it is going to give it another
  285. // try.
  286. case JitsiConferenceErrors.FOCUS_DISCONNECTED: {
  287. const [ focus, retrySec ] = params;
  288. APP.store.dispatch(showNotification({
  289. descriptionKey: focus,
  290. titleKey: retrySec
  291. }, NOTIFICATION_TIMEOUT_TYPE.SHORT));
  292. break;
  293. }
  294. case JitsiConferenceErrors.FOCUS_LEFT:
  295. case JitsiConferenceErrors.ICE_FAILED:
  296. case JitsiConferenceErrors.VIDEOBRIDGE_NOT_AVAILABLE:
  297. case JitsiConferenceErrors.OFFER_ANSWER_FAILED:
  298. APP.store.dispatch(conferenceWillLeave(room));
  299. // FIXME the conference should be stopped by the library and not by
  300. // the app. Both the errors above are unrecoverable from the library
  301. // perspective.
  302. room.leave(CONFERENCE_LEAVE_REASONS.UNRECOVERABLE_ERROR).then(() => APP.connection.disconnect());
  303. break;
  304. case JitsiConferenceErrors.INCOMPATIBLE_SERVER_VERSIONS:
  305. APP.store.dispatch(reloadWithStoredParams());
  306. break;
  307. default:
  308. this._handleConferenceFailed(err, ...params);
  309. }
  310. }
  311. /**
  312. *
  313. */
  314. _unsubscribe() {
  315. room.off(
  316. JitsiConferenceEvents.CONFERENCE_JOINED,
  317. this._handleConferenceJoined);
  318. room.off(
  319. JitsiConferenceEvents.CONFERENCE_FAILED,
  320. this._onConferenceFailed);
  321. if (this.reconnectTimeout !== null) {
  322. clearTimeout(this.reconnectTimeout);
  323. }
  324. }
  325. /**
  326. *
  327. */
  328. _handleConferenceJoined() {
  329. this._unsubscribe();
  330. this._resolve();
  331. }
  332. /**
  333. *
  334. */
  335. connect() {
  336. const replaceParticipant = getReplaceParticipant(APP.store.getState());
  337. // the local storage overrides here and in connection.js can be used by jibri
  338. room.join(jitsiLocalStorage.getItem('xmpp_conference_password_override'), replaceParticipant);
  339. }
  340. }
  341. /**
  342. * Disconnects the connection.
  343. * @returns resolved Promise. We need this in order to make the Promise.all
  344. * call in hangup() to resolve when all operations are finished.
  345. */
  346. function disconnect() {
  347. const onDisconnected = () => {
  348. APP.API.notifyConferenceLeft(APP.conference.roomName);
  349. return Promise.resolve();
  350. };
  351. if (!APP.connection) {
  352. return onDisconnected();
  353. }
  354. return APP.connection.disconnect().then(onDisconnected, onDisconnected);
  355. }
  356. export default {
  357. /**
  358. * Flag used to delay modification of the muted status of local media tracks
  359. * until those are created (or not, but at that point it's certain that
  360. * the tracks won't exist).
  361. */
  362. _localTracksInitialized: false,
  363. /**
  364. * Flag used to prevent the creation of another local video track in this.muteVideo if one is already in progress.
  365. */
  366. isCreatingLocalTrack: false,
  367. isSharingScreen: false,
  368. /**
  369. * Returns an object containing a promise which resolves with the created tracks &
  370. * the errors resulting from that process.
  371. * @param {object} options
  372. * @param {boolean} options.startAudioOnly=false - if <tt>true</tt> then
  373. * only audio track will be created and the audio only mode will be turned
  374. * on.
  375. * @param {boolean} options.startScreenSharing=false - if <tt>true</tt>
  376. * should start with screensharing instead of camera video.
  377. * @param {boolean} options.startWithAudioMuted - will start the conference
  378. * without any audio tracks.
  379. * @param {boolean} options.startWithVideoMuted - will start the conference
  380. * without any video tracks.
  381. * @param {boolean} recordTimeMetrics - If true time metrics will be recorded.
  382. * @returns {Promise<JitsiLocalTrack[]>, Object}
  383. */
  384. createInitialLocalTracks(options = {}, recordTimeMetrics = false) {
  385. const errors = {};
  386. // Always get a handle on the audio input device so that we have statistics (such as "No audio input" or
  387. // "Are you trying to speak?" ) even if the user joins the conference muted.
  388. const initialDevices = config.startSilent || config.disableInitialGUM ? [] : [ MEDIA_TYPE.AUDIO ];
  389. const requestedAudio = !config.disableInitialGUM;
  390. let requestedVideo = false;
  391. if (!config.disableInitialGUM
  392. && !options.startWithVideoMuted
  393. && !options.startAudioOnly
  394. && !options.startScreenSharing) {
  395. initialDevices.push(MEDIA_TYPE.VIDEO);
  396. requestedVideo = true;
  397. }
  398. let tryCreateLocalTracks = Promise.resolve([]);
  399. // On Electron there is no permission prompt for granting permissions. That's why we don't need to
  400. // spend much time displaying the overlay screen. If GUM is not resolved within 15 seconds it will
  401. // probably never resolve.
  402. const timeout = browser.isElectron() ? 15000 : 60000;
  403. const audioOptions = {
  404. devices: [ MEDIA_TYPE.AUDIO ],
  405. timeout
  406. };
  407. // Spot uses the _desktopSharingSourceDevice config option to use an external video input device label as
  408. // screenshare and calls getUserMedia instead of getDisplayMedia for capturing the media.
  409. if (options.startScreenSharing && config._desktopSharingSourceDevice) {
  410. tryCreateLocalTracks = this._createDesktopTrack()
  411. .then(([ desktopStream ]) => {
  412. if (!requestedAudio) {
  413. return [ desktopStream ];
  414. }
  415. return createLocalTracksF(audioOptions)
  416. .then(([ audioStream ]) =>
  417. [ desktopStream, audioStream ])
  418. .catch(error => {
  419. errors.audioOnlyError = error;
  420. return [ desktopStream ];
  421. });
  422. })
  423. .catch(error => {
  424. logger.error('Failed to obtain desktop stream', error);
  425. errors.screenSharingError = error;
  426. return requestedAudio ? createLocalTracksF(audioOptions) : [];
  427. })
  428. .catch(error => {
  429. errors.audioOnlyError = error;
  430. return [];
  431. });
  432. } else if (requestedAudio || requestedVideo) {
  433. tryCreateLocalTracks = APP.store.dispatch(createInitialAVTracks({
  434. devices: initialDevices,
  435. timeout
  436. }, recordTimeMetrics)).then(({ tracks, errors: pErrors }) => {
  437. Object.assign(errors, pErrors);
  438. return tracks;
  439. });
  440. }
  441. return {
  442. tryCreateLocalTracks,
  443. errors
  444. };
  445. },
  446. startConference(tracks) {
  447. tracks.forEach(track => {
  448. if ((track.isAudioTrack() && this.isLocalAudioMuted())
  449. || (track.isVideoTrack() && this.isLocalVideoMuted())) {
  450. const mediaType = track.getType();
  451. sendAnalytics(
  452. createTrackMutedEvent(mediaType, 'initial mute'));
  453. logger.log(`${mediaType} mute: initially muted.`);
  454. track.mute();
  455. }
  456. });
  457. this._createRoom(tracks);
  458. // if user didn't give access to mic or camera or doesn't have
  459. // them at all, we mark corresponding toolbar buttons as muted,
  460. // so that the user can try unmute later on and add audio/video
  461. // to the conference
  462. if (!tracks.find(t => t.isAudioTrack())) {
  463. this.updateAudioIconEnabled();
  464. }
  465. if (!tracks.find(t => t.isVideoTrack())) {
  466. this.setVideoMuteStatus();
  467. }
  468. if (config.iAmRecorder) {
  469. this.recorder = new Recorder();
  470. }
  471. if (config.startSilent) {
  472. sendAnalytics(createStartSilentEvent());
  473. APP.store.dispatch(showNotification({
  474. descriptionKey: 'notify.startSilentDescription',
  475. titleKey: 'notify.startSilentTitle'
  476. }, NOTIFICATION_TIMEOUT_TYPE.LONG));
  477. }
  478. // XXX The API will take care of disconnecting from the XMPP
  479. // server (and, thus, leaving the room) on unload.
  480. return new Promise((resolve, reject) => {
  481. new ConferenceConnector(resolve, reject, this).connect();
  482. });
  483. },
  484. /**
  485. * Open new connection and join the conference when prejoin page is not enabled.
  486. * If prejoin page is enabled open an new connection in the background
  487. * and create local tracks.
  488. *
  489. * @param {{ roomName: string, shouldDispatchConnect }} options
  490. * @returns {Promise}
  491. */
  492. async init({ roomName, shouldDispatchConnect }) {
  493. const state = APP.store.getState();
  494. const initialOptions = {
  495. startAudioOnly: config.startAudioOnly,
  496. startScreenSharing: config.startScreenSharing,
  497. startWithAudioMuted: getStartWithAudioMuted(state) || isUserInteractionRequiredForUnmute(state),
  498. startWithVideoMuted: getStartWithVideoMuted(state) || isUserInteractionRequiredForUnmute(state)
  499. };
  500. const connectionTimes = getJitsiMeetGlobalNSConnectionTimes();
  501. const startTime = window.performance.now();
  502. connectionTimes['conference.init.start'] = startTime;
  503. logger.debug(`Executed conference.init with roomName: ${roomName} (performance.now=${startTime})`);
  504. this.roomName = roomName;
  505. try {
  506. // Initialize the device list first. This way, when creating tracks based on preferred devices, loose label
  507. // matching can be done in cases where the exact ID match is no longer available, such as -
  508. // 1. When the camera device has switched USB ports.
  509. // 2. When in startSilent mode we want to start with audio muted
  510. await this._initDeviceList();
  511. } catch (error) {
  512. logger.warn('initial device list initialization failed', error);
  513. }
  514. // Filter out the local tracks based on various config options, i.e., when user joins muted or is muted by
  515. // focus. However, audio track will always be created even though it is not added to the conference since we
  516. // want audio related features (noisy mic, talk while muted, etc.) to work even if the mic is muted.
  517. const handleInitialTracks = (options, tracks) => {
  518. let localTracks = tracks;
  519. if (options.startWithAudioMuted) {
  520. // Always add the track on Safari because of a known issue where audio playout doesn't happen
  521. // if the user joins audio and video muted, i.e., if there is no local media capture.
  522. if (browser.isWebKitBased()) {
  523. this.muteAudio(true, true);
  524. } else {
  525. localTracks = localTracks.filter(track => track.getType() !== MEDIA_TYPE.AUDIO);
  526. }
  527. }
  528. return localTracks;
  529. };
  530. const { dispatch, getState } = APP.store;
  531. const createLocalTracksStart = window.performance.now();
  532. connectionTimes['conference.init.createLocalTracks.start'] = createLocalTracksStart;
  533. logger.debug(`(TIME) createInitialLocalTracks: ${createLocalTracksStart} `);
  534. const { tryCreateLocalTracks, errors } = this.createInitialLocalTracks(initialOptions, true);
  535. tryCreateLocalTracks.then(async tr => {
  536. const createLocalTracksEnd = window.performance.now();
  537. connectionTimes['conference.init.createLocalTracks.end'] = createLocalTracksEnd;
  538. logger.debug(`(TIME) createInitialLocalTracks finished: ${createLocalTracksEnd} `);
  539. const tracks = handleInitialTracks(initialOptions, tr);
  540. this._initDeviceList(true);
  541. const { initialGUMPromise } = getState()['features/base/media'];
  542. if (isPrejoinPageVisible(getState())) {
  543. dispatch(gumPending([ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ], IGUMPendingState.NONE));
  544. // Since the conference is not yet created in redux this function will execute synchronous
  545. // which will guarantee us that the local tracks are added to redux before we proceed.
  546. initPrejoin(tracks, errors, dispatch);
  547. connectionTimes['conference.init.end'] = window.performance.now();
  548. // resolve the initialGUMPromise in case connect have finished so that we can proceed to join.
  549. if (initialGUMPromise) {
  550. logger.debug('Resolving the initialGUM promise! (prejoinVisible=true)');
  551. initialGUMPromise.resolve({
  552. tracks,
  553. errors
  554. });
  555. }
  556. logger.debug('Clear the initialGUM promise! (prejoinVisible=true)');
  557. // For prejoin we don't need the initial GUM promise since the tracks are already added to the store
  558. // via initPrejoin
  559. dispatch(setInitialGUMPromise());
  560. } else {
  561. APP.store.dispatch(displayErrorsForCreateInitialLocalTracks(errors));
  562. setGUMPendingStateOnFailedTracks(tracks, APP.store.dispatch);
  563. connectionTimes['conference.init.end'] = window.performance.now();
  564. if (initialGUMPromise) {
  565. logger.debug('Resolving the initialGUM promise!');
  566. initialGUMPromise.resolve({
  567. tracks,
  568. errors
  569. });
  570. }
  571. }
  572. });
  573. if (shouldDispatchConnect) {
  574. logger.info('Dispatching connect from init since prejoin is not visible.');
  575. dispatch(connect());
  576. }
  577. },
  578. /**
  579. * Check if id is id of the local user.
  580. * @param {string} id id to check
  581. * @returns {boolean}
  582. */
  583. isLocalId(id) {
  584. return this.getMyUserId() === id;
  585. },
  586. /**
  587. * Tells whether the local video is muted or not.
  588. * @return {boolean}
  589. */
  590. isLocalVideoMuted() {
  591. // If the tracks are not ready, read from base/media state
  592. return this._localTracksInitialized
  593. ? isLocalTrackMuted(APP.store.getState()['features/base/tracks'], MEDIA_TYPE.VIDEO)
  594. : isVideoMutedByUser(APP.store);
  595. },
  596. /**
  597. * Verify if there is an ongoing system audio sharing session and apply to the provided track
  598. * as a AudioMixer effect.
  599. *
  600. * @param {*} localAudioTrack - track to which system audio track will be applied as an effect, most likely
  601. * microphone local audio track.
  602. */
  603. async _maybeApplyAudioMixerEffect(localAudioTrack) {
  604. // At the time of writing this comment there were two separate flows for toggling screen-sharing
  605. // and system audio sharing, the first is the legacy method using the functionality from conference.js
  606. // the second is used when both sendMultipleVideoStreams and sourceNameSignaling flags are set to true.
  607. // The second flow uses functionality from base/conference/middleware.web.js.
  608. // We check if system audio sharing was done using the first flow by verifying this._desktopAudioStream and
  609. // for the second by checking 'features/screen-share' state.
  610. const { desktopAudioTrack } = APP.store.getState()['features/screen-share'];
  611. const currentDesktopAudioTrack = this._desktopAudioStream || desktopAudioTrack;
  612. // If system audio is already being sent, mix it with the provided audio track.
  613. if (currentDesktopAudioTrack) {
  614. // In case system audio sharing was done in the absence of an initial mic audio track, there is no
  615. // AudioMixerEffect so we have to remove system audio track from the room before setting it as an effect.
  616. await room.replaceTrack(currentDesktopAudioTrack, null);
  617. this._mixerEffect = new AudioMixerEffect(currentDesktopAudioTrack);
  618. logger.debug('Mixing new audio track with existing screen audio track.');
  619. await localAudioTrack.setEffect(this._mixerEffect);
  620. }
  621. },
  622. /**
  623. * Simulates toolbar button click for audio mute. Used by shortcuts and API.
  624. *
  625. * @param {boolean} mute true for mute and false for unmute.
  626. * @param {boolean} [showUI] when set to false will not display any error
  627. * dialogs in case of media permissions error.
  628. * @returns {Promise}
  629. */
  630. async muteAudio(mute, showUI = true) {
  631. const state = APP.store.getState();
  632. if (!mute
  633. && isUserInteractionRequiredForUnmute(state)) {
  634. logger.error('Unmuting audio requires user interaction');
  635. return;
  636. }
  637. // check for A/V Moderation when trying to unmute
  638. if (!mute && shouldShowModeratedNotification(MEDIA_TYPE.AUDIO, state)) {
  639. if (!isModerationNotificationDisplayed(MEDIA_TYPE.AUDIO, state)) {
  640. APP.store.dispatch(showModeratedNotification(MEDIA_TYPE.AUDIO));
  641. }
  642. return;
  643. }
  644. // Not ready to modify track's state yet
  645. if (!this._localTracksInitialized) {
  646. // This will only modify base/media.audio.muted which is then synced
  647. // up with the track at the end of local tracks initialization.
  648. muteLocalAudio(mute);
  649. this.updateAudioIconEnabled();
  650. return;
  651. } else if (this.isLocalAudioMuted() === mute) {
  652. // NO-OP
  653. return;
  654. }
  655. const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
  656. if (!localAudio && !mute) {
  657. const maybeShowErrorDialog = error => {
  658. showUI && APP.store.dispatch(notifyMicError(error));
  659. };
  660. APP.store.dispatch(gumPending([ MEDIA_TYPE.AUDIO ], IGUMPendingState.PENDING_UNMUTE));
  661. await createLocalTracksF({ devices: [ 'audio' ] })
  662. .then(([ audioTrack ]) => audioTrack)
  663. .catch(error => {
  664. maybeShowErrorDialog(error);
  665. // Rollback the audio muted status by using null track
  666. return null;
  667. })
  668. .then(async audioTrack => {
  669. await this._maybeApplyAudioMixerEffect(audioTrack);
  670. return this.useAudioStream(audioTrack);
  671. })
  672. .finally(() => {
  673. APP.store.dispatch(gumPending([ MEDIA_TYPE.AUDIO ], IGUMPendingState.NONE));
  674. });
  675. } else {
  676. muteLocalAudio(mute);
  677. }
  678. },
  679. /**
  680. * Returns whether local audio is muted or not.
  681. * @returns {boolean}
  682. */
  683. isLocalAudioMuted() {
  684. // If the tracks are not ready, read from base/media state
  685. return this._localTracksInitialized
  686. ? isLocalTrackMuted(
  687. APP.store.getState()['features/base/tracks'],
  688. MEDIA_TYPE.AUDIO)
  689. : Boolean(
  690. APP.store.getState()['features/base/media'].audio.muted);
  691. },
  692. /**
  693. * Simulates toolbar button click for audio mute. Used by shortcuts
  694. * and API.
  695. * @param {boolean} [showUI] when set to false will not display any error
  696. * dialogs in case of media permissions error.
  697. */
  698. toggleAudioMuted(showUI = true) {
  699. this.muteAudio(!this.isLocalAudioMuted(), showUI);
  700. },
  701. /**
  702. * Simulates toolbar button click for video mute. Used by shortcuts and API.
  703. * @param mute true for mute and false for unmute.
  704. * @param {boolean} [showUI] when set to false will not display any error
  705. * dialogs in case of media permissions error.
  706. */
  707. muteVideo(mute, showUI = true) {
  708. if (this.videoSwitchInProgress) {
  709. logger.warn('muteVideo - unable to perform operations while video switch is in progress');
  710. return;
  711. }
  712. const state = APP.store.getState();
  713. if (!mute
  714. && isUserInteractionRequiredForUnmute(state)) {
  715. logger.error('Unmuting video requires user interaction');
  716. return;
  717. }
  718. // check for A/V Moderation when trying to unmute and return early
  719. if (!mute && shouldShowModeratedNotification(MEDIA_TYPE.VIDEO, state)) {
  720. return;
  721. }
  722. // If not ready to modify track's state yet adjust the base/media
  723. if (!this._localTracksInitialized) {
  724. // This will only modify base/media.video.muted which is then synced
  725. // up with the track at the end of local tracks initialization.
  726. muteLocalVideo(mute);
  727. this.setVideoMuteStatus();
  728. return;
  729. } else if (this.isLocalVideoMuted() === mute) {
  730. // NO-OP
  731. return;
  732. }
  733. const localVideo = getLocalJitsiVideoTrack(state);
  734. if (!localVideo && !mute && !this.isCreatingLocalTrack) {
  735. const maybeShowErrorDialog = error => {
  736. showUI && APP.store.dispatch(notifyCameraError(error));
  737. };
  738. this.isCreatingLocalTrack = true;
  739. APP.store.dispatch(gumPending([ MEDIA_TYPE.VIDEO ], IGUMPendingState.PENDING_UNMUTE));
  740. // Try to create local video if there wasn't any.
  741. // This handles the case when user joined with no video
  742. // (dismissed screen sharing screen or in audio only mode), but
  743. // decided to add it later on by clicking on muted video icon or
  744. // turning off the audio only mode.
  745. //
  746. // FIXME when local track creation is moved to react/redux
  747. // it should take care of the use case described above
  748. createLocalTracksF({ devices: [ 'video' ] })
  749. .then(([ videoTrack ]) => videoTrack)
  750. .catch(error => {
  751. // FIXME should send some feedback to the API on error ?
  752. maybeShowErrorDialog(error);
  753. // Rollback the video muted status by using null track
  754. return null;
  755. })
  756. .then(videoTrack => {
  757. logger.debug(`muteVideo: calling useVideoStream for track: ${videoTrack}`);
  758. return this.useVideoStream(videoTrack);
  759. })
  760. .finally(() => {
  761. this.isCreatingLocalTrack = false;
  762. APP.store.dispatch(gumPending([ MEDIA_TYPE.VIDEO ], IGUMPendingState.NONE));
  763. });
  764. } else {
  765. // FIXME show error dialog if it fails (should be handled by react)
  766. muteLocalVideo(mute);
  767. }
  768. },
  769. /**
  770. * Simulates toolbar button click for video mute. Used by shortcuts and API.
  771. * @param {boolean} [showUI] when set to false will not display any error
  772. * dialogs in case of media permissions error.
  773. * @param {boolean} ensureTrack - True if we want to ensure that a new track is
  774. * created if missing.
  775. */
  776. toggleVideoMuted(showUI = true, ensureTrack = false) {
  777. const mute = !this.isLocalVideoMuted();
  778. APP.store.dispatch(handleToggleVideoMuted(mute, showUI, ensureTrack));
  779. },
  780. /**
  781. * Retrieve list of ids of conference participants (without local user).
  782. * @returns {string[]}
  783. */
  784. listMembersIds() {
  785. return room.getParticipants().map(p => p.getId());
  786. },
  787. /**
  788. * Checks whether the participant identified by id is a moderator.
  789. * @id id to search for participant
  790. * @return {boolean} whether the participant is moderator
  791. */
  792. isParticipantModerator(id) {
  793. const user = room.getParticipantById(id);
  794. return user && user.isModerator();
  795. },
  796. /**
  797. * Retrieve list of conference participants (without local user).
  798. * @returns {JitsiParticipant[]}
  799. *
  800. * NOTE: Used by jitsi-meet-torture!
  801. */
  802. listMembers() {
  803. return room.getParticipants();
  804. },
  805. /**
  806. * Used by Jibri to detect when it's alone and the meeting should be terminated.
  807. */
  808. get membersCount() {
  809. return room.getParticipants()
  810. .filter(p => !p.isHidden() || !(config.iAmRecorder && p.isHiddenFromRecorder())).length + 1;
  811. },
  812. /**
  813. * Get speaker stats that track total dominant speaker time.
  814. *
  815. * @returns {object} A hash with keys being user ids and values being the
  816. * library's SpeakerStats model used for calculating time as dominant
  817. * speaker.
  818. */
  819. getSpeakerStats() {
  820. return room.getSpeakerStats();
  821. },
  822. // used by torture currently
  823. isJoined() {
  824. return room && room.isJoined();
  825. },
  826. getConnectionState() {
  827. return room && room.getConnectionState();
  828. },
  829. /**
  830. * Obtains current P2P ICE connection state.
  831. * @return {string|null} ICE connection state or <tt>null</tt> if there's no
  832. * P2P connection
  833. */
  834. getP2PConnectionState() {
  835. return room && room.getP2PConnectionState();
  836. },
  837. /**
  838. * Starts P2P (for tests only)
  839. * @private
  840. */
  841. _startP2P() {
  842. try {
  843. room && room.startP2PSession();
  844. } catch (error) {
  845. logger.error('Start P2P failed', error);
  846. throw error;
  847. }
  848. },
  849. /**
  850. * Stops P2P (for tests only)
  851. * @private
  852. */
  853. _stopP2P() {
  854. try {
  855. room && room.stopP2PSession();
  856. } catch (error) {
  857. logger.error('Stop P2P failed', error);
  858. throw error;
  859. }
  860. },
  861. /**
  862. * Checks whether or not our connection is currently in interrupted and
  863. * reconnect attempts are in progress.
  864. *
  865. * @returns {boolean} true if the connection is in interrupted state or
  866. * false otherwise.
  867. */
  868. isConnectionInterrupted() {
  869. return room.isConnectionInterrupted();
  870. },
  871. /**
  872. * Finds JitsiParticipant for given id.
  873. *
  874. * @param {string} id participant's identifier(MUC nickname).
  875. *
  876. * @returns {JitsiParticipant|null} participant instance for given id or
  877. * null if not found.
  878. */
  879. getParticipantById(id) {
  880. return room ? room.getParticipantById(id) : null;
  881. },
  882. getMyUserId() {
  883. return room && room.myUserId();
  884. },
  885. /**
  886. * Will be filled with values only when config.debug is enabled.
  887. * Its used by torture to check audio levels.
  888. */
  889. audioLevelsMap: {},
  890. /**
  891. * Returns the stored audio level (stored only if config.debug is enabled)
  892. * @param id the id for the user audio level to return (the id value is
  893. * returned for the participant using getMyUserId() method)
  894. */
  895. getPeerSSRCAudioLevel(id) {
  896. return this.audioLevelsMap[id];
  897. },
  898. /**
  899. * @return {number} the number of participants in the conference with at
  900. * least one track.
  901. */
  902. getNumberOfParticipantsWithTracks() {
  903. return room.getParticipants()
  904. .filter(p => p.getTracks().length > 0)
  905. .length;
  906. },
  907. /**
  908. * Returns the stats.
  909. */
  910. getStats() {
  911. return room.connectionQuality.getStats();
  912. },
  913. // end used by torture
  914. /**
  915. * Download logs, a function that can be called from console while
  916. * debugging.
  917. * @param filename (optional) specify target filename
  918. */
  919. saveLogs(filename = 'meetlog.json') {
  920. // this can be called from console and will not have reference to this
  921. // that's why we reference the global var
  922. const logs = APP.connection.getLogs();
  923. downloadJSON(logs, filename);
  924. },
  925. /**
  926. * Exposes a Command(s) API on this instance. It is necessitated by (1) the
  927. * desire to keep room private to this instance and (2) the need of other
  928. * modules to send and receive commands to and from participants.
  929. * Eventually, this instance remains in control with respect to the
  930. * decision whether the Command(s) API of room (i.e. lib-jitsi-meet's
  931. * JitsiConference) is to be used in the implementation of the Command(s)
  932. * API of this instance.
  933. */
  934. commands: {
  935. /**
  936. * Known custom conference commands.
  937. */
  938. defaults: commands,
  939. /**
  940. * Receives notifications from other participants about commands aka
  941. * custom events (sent by sendCommand or sendCommandOnce methods).
  942. * @param command {String} the name of the command
  943. * @param handler {Function} handler for the command
  944. */
  945. addCommandListener() {
  946. // eslint-disable-next-line prefer-rest-params
  947. room.addCommandListener(...arguments);
  948. },
  949. /**
  950. * Removes command.
  951. * @param name {String} the name of the command.
  952. */
  953. removeCommand() {
  954. // eslint-disable-next-line prefer-rest-params
  955. room.removeCommand(...arguments);
  956. },
  957. /**
  958. * Sends command.
  959. * @param name {String} the name of the command.
  960. * @param values {Object} with keys and values that will be sent.
  961. */
  962. sendCommand() {
  963. // eslint-disable-next-line prefer-rest-params
  964. room.sendCommand(...arguments);
  965. },
  966. /**
  967. * Sends command one time.
  968. * @param name {String} the name of the command.
  969. * @param values {Object} with keys and values that will be sent.
  970. */
  971. sendCommandOnce() {
  972. // eslint-disable-next-line prefer-rest-params
  973. room.sendCommandOnce(...arguments);
  974. }
  975. },
  976. /**
  977. * Used by the Breakout Rooms feature to join a breakout room or go back to the main room.
  978. */
  979. async joinRoom(roomName, options) {
  980. APP.store.dispatch(conferenceWillInit());
  981. // Restore initial state.
  982. this._localTracksInitialized = false;
  983. this.isSharingScreen = false;
  984. this.roomName = roomName;
  985. const { tryCreateLocalTracks, errors } = this.createInitialLocalTracks(options);
  986. const localTracks = await tryCreateLocalTracks;
  987. APP.store.dispatch(displayErrorsForCreateInitialLocalTracks(errors));
  988. localTracks.forEach(track => {
  989. if ((track.isAudioTrack() && this.isLocalAudioMuted())
  990. || (track.isVideoTrack() && this.isLocalVideoMuted())) {
  991. track.mute();
  992. }
  993. });
  994. this._createRoom(localTracks);
  995. return new Promise((resolve, reject) => {
  996. new ConferenceConnector(resolve, reject, this).connect();
  997. });
  998. },
  999. _createRoom(localTracks) {
  1000. room = APP.connection.initJitsiConference(APP.conference.roomName, this._getConferenceOptions());
  1001. // Filter out the tracks that are muted (except on Safari).
  1002. let tracks = localTracks;
  1003. if (!browser.isWebKitBased()) {
  1004. const mutedTrackTypes = [];
  1005. tracks = localTracks.filter(track => {
  1006. if (!track.isMuted()) {
  1007. return true;
  1008. }
  1009. if (track.getVideoType() !== VIDEO_TYPE.DESKTOP) {
  1010. mutedTrackTypes.push(track.getType());
  1011. }
  1012. return false;
  1013. });
  1014. APP.store.dispatch(gumPending(mutedTrackTypes, IGUMPendingState.NONE));
  1015. }
  1016. this._room = room; // FIXME do not use this
  1017. APP.store.dispatch(_conferenceWillJoin(room));
  1018. this._setLocalAudioVideoStreams(tracks);
  1019. sendLocalParticipant(APP.store, room);
  1020. this._setupListeners();
  1021. },
  1022. /**
  1023. * Sets local video and audio streams.
  1024. * @param {JitsiLocalTrack[]} tracks=[]
  1025. * @returns {Promise[]}
  1026. * @private
  1027. */
  1028. _setLocalAudioVideoStreams(tracks = []) {
  1029. const { dispatch } = APP.store;
  1030. const pendingGUMDevicesToRemove = [];
  1031. const promises = tracks.map(track => {
  1032. if (track.isAudioTrack()) {
  1033. pendingGUMDevicesToRemove.push(MEDIA_TYPE.AUDIO);
  1034. return this.useAudioStream(track);
  1035. } else if (track.isVideoTrack()) {
  1036. logger.debug(`_setLocalAudioVideoStreams is calling useVideoStream with track: ${track}`);
  1037. pendingGUMDevicesToRemove.push(MEDIA_TYPE.VIDEO);
  1038. return this.useVideoStream(track);
  1039. }
  1040. logger.error('Ignored not an audio nor a video track: ', track);
  1041. return Promise.resolve();
  1042. });
  1043. return Promise.allSettled(promises).then(() => {
  1044. if (pendingGUMDevicesToRemove.length > 0) {
  1045. dispatch(gumPending(pendingGUMDevicesToRemove, IGUMPendingState.NONE));
  1046. }
  1047. this._localTracksInitialized = true;
  1048. logger.log(`Initialized with ${tracks.length} local tracks`);
  1049. });
  1050. },
  1051. _getConferenceOptions() {
  1052. const options = getConferenceOptions(APP.store.getState());
  1053. options.createVADProcessor = createRnnoiseProcessor;
  1054. return options;
  1055. },
  1056. /**
  1057. * Start using provided video stream.
  1058. * Stops previous video stream.
  1059. * @param {JitsiLocalTrack} newTrack - new track to use or null
  1060. * @returns {Promise}
  1061. */
  1062. useVideoStream(newTrack) {
  1063. logger.debug(`useVideoStream: ${newTrack}`);
  1064. return new Promise((resolve, reject) => {
  1065. _replaceLocalVideoTrackQueue.enqueue(onFinish => {
  1066. const state = APP.store.getState();
  1067. const oldTrack = getLocalJitsiVideoTrack(state);
  1068. logger.debug(`useVideoStream: Replacing ${oldTrack} with ${newTrack}`);
  1069. if (oldTrack === newTrack || (!oldTrack && !newTrack)) {
  1070. resolve();
  1071. onFinish();
  1072. return;
  1073. }
  1074. // Add the track to the conference if there is no existing track, replace it otherwise.
  1075. const trackAction = oldTrack
  1076. ? replaceLocalTrack(oldTrack, newTrack, room)
  1077. : addLocalTrack(newTrack);
  1078. APP.store.dispatch(trackAction)
  1079. .then(() => {
  1080. this.setVideoMuteStatus();
  1081. })
  1082. .then(resolve)
  1083. .catch(error => {
  1084. logger.error(`useVideoStream failed: ${error}`);
  1085. reject(error);
  1086. })
  1087. .then(onFinish);
  1088. });
  1089. });
  1090. },
  1091. /**
  1092. * Start using provided audio stream.
  1093. * Stops previous audio stream.
  1094. * @param {JitsiLocalTrack} newTrack - new track to use or null
  1095. * @returns {Promise}
  1096. */
  1097. useAudioStream(newTrack) {
  1098. return new Promise((resolve, reject) => {
  1099. _replaceLocalAudioTrackQueue.enqueue(onFinish => {
  1100. const oldTrack = getLocalJitsiAudioTrack(APP.store.getState());
  1101. if (oldTrack === newTrack) {
  1102. resolve();
  1103. onFinish();
  1104. return;
  1105. }
  1106. APP.store.dispatch(replaceLocalTrack(oldTrack, newTrack, room))
  1107. .then(() => {
  1108. this.updateAudioIconEnabled();
  1109. })
  1110. .then(resolve)
  1111. .catch(reject)
  1112. .then(onFinish);
  1113. });
  1114. });
  1115. },
  1116. /**
  1117. * Returns whether or not the conference is currently in audio only mode.
  1118. *
  1119. * @returns {boolean}
  1120. */
  1121. isAudioOnly() {
  1122. return Boolean(APP.store.getState()['features/base/audio-only'].enabled);
  1123. },
  1124. videoSwitchInProgress: false,
  1125. /**
  1126. * This fields stores a handler which will create a Promise which turns off
  1127. * the screen sharing and restores the previous video state (was there
  1128. * any video, before switching to screen sharing ? was it muted ?).
  1129. *
  1130. * Once called this fields is cleared to <tt>null</tt>.
  1131. * @type {Function|null}
  1132. */
  1133. _untoggleScreenSharing: null,
  1134. /**
  1135. * Creates a Promise which turns off the screen sharing and restores
  1136. * the previous state described by the arguments.
  1137. *
  1138. * This method is bound to the appropriate values, after switching to screen
  1139. * sharing and stored in {@link _untoggleScreenSharing}.
  1140. *
  1141. * @param {boolean} didHaveVideo indicates if there was a camera video being
  1142. * used, before switching to screen sharing.
  1143. * @param {boolean} ignoreDidHaveVideo indicates if the camera video should be
  1144. * ignored when switching screen sharing off.
  1145. * @return {Promise} resolved after the screen sharing is turned off, or
  1146. * rejected with some error (no idea what kind of error, possible GUM error)
  1147. * in case it fails.
  1148. * @private
  1149. */
  1150. async _turnScreenSharingOff(didHaveVideo, ignoreDidHaveVideo) {
  1151. this._untoggleScreenSharing = null;
  1152. this.videoSwitchInProgress = true;
  1153. APP.store.dispatch(stopReceiver());
  1154. this._stopProxyConnection();
  1155. APP.store.dispatch(toggleScreenshotCaptureSummary(false));
  1156. const tracks = APP.store.getState()['features/base/tracks'];
  1157. const duration = getLocalVideoTrack(tracks)?.jitsiTrack.getDuration() ?? 0;
  1158. // If system audio was also shared stop the AudioMixerEffect and dispose of the desktop audio track.
  1159. if (this._mixerEffect) {
  1160. const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
  1161. await localAudio.setEffect(undefined);
  1162. await this._desktopAudioStream.dispose();
  1163. this._mixerEffect = undefined;
  1164. this._desktopAudioStream = undefined;
  1165. // In case there was no local audio when screen sharing was started the fact that we set the audio stream to
  1166. // null will take care of the desktop audio stream cleanup.
  1167. } else if (this._desktopAudioStream) {
  1168. await room.replaceTrack(this._desktopAudioStream, null);
  1169. this._desktopAudioStream.dispose();
  1170. this._desktopAudioStream = undefined;
  1171. }
  1172. APP.store.dispatch(setScreenAudioShareState(false));
  1173. let promise;
  1174. if (didHaveVideo && !ignoreDidHaveVideo) {
  1175. promise = createLocalTracksF({ devices: [ 'video' ] })
  1176. .then(([ stream ]) => {
  1177. logger.debug(`_turnScreenSharingOff using ${stream} for useVideoStream`);
  1178. return this.useVideoStream(stream);
  1179. })
  1180. .catch(error => {
  1181. logger.error('failed to switch back to local video', error);
  1182. return this.useVideoStream(null).then(() =>
  1183. // Still fail with the original err
  1184. Promise.reject(error)
  1185. );
  1186. });
  1187. } else {
  1188. promise = this.useVideoStream(null);
  1189. }
  1190. return promise.then(
  1191. () => {
  1192. this.videoSwitchInProgress = false;
  1193. sendAnalytics(createScreenSharingEvent('stopped',
  1194. duration === 0 ? null : duration));
  1195. logger.info('Screen sharing stopped.');
  1196. },
  1197. error => {
  1198. this.videoSwitchInProgress = false;
  1199. logger.error(`_turnScreenSharingOff failed: ${error}`);
  1200. throw error;
  1201. });
  1202. },
  1203. /**
  1204. * Creates desktop (screensharing) {@link JitsiLocalTrack}
  1205. *
  1206. * @param {Object} [options] - Screen sharing options that will be passed to
  1207. * createLocalTracks.
  1208. * @param {Object} [options.desktopSharing]
  1209. * @param {Object} [options.desktopStream] - An existing desktop stream to
  1210. * use instead of creating a new desktop stream.
  1211. * @return {Promise.<JitsiLocalTrack>} - A Promise resolved with
  1212. * {@link JitsiLocalTrack} for the screensharing or rejected with
  1213. * {@link JitsiTrackError}.
  1214. *
  1215. * @private
  1216. */
  1217. _createDesktopTrack(options = {}) {
  1218. const didHaveVideo = !this.isLocalVideoMuted();
  1219. const getDesktopStreamPromise = options.desktopStream
  1220. ? Promise.resolve([ options.desktopStream ])
  1221. : createLocalTracksF({
  1222. desktopSharingSourceDevice: options.desktopSharingSources
  1223. ? null : config._desktopSharingSourceDevice,
  1224. desktopSharingSources: options.desktopSharingSources,
  1225. devices: [ 'desktop' ]
  1226. });
  1227. return getDesktopStreamPromise.then(desktopStreams => {
  1228. // Stores the "untoggle" handler which remembers whether was
  1229. // there any video before and whether was it muted.
  1230. this._untoggleScreenSharing
  1231. = this._turnScreenSharingOff.bind(this, didHaveVideo);
  1232. const desktopVideoStream = desktopStreams.find(stream => stream.getType() === MEDIA_TYPE.VIDEO);
  1233. const desktopAudioStream = desktopStreams.find(stream => stream.getType() === MEDIA_TYPE.AUDIO);
  1234. if (desktopAudioStream) {
  1235. desktopAudioStream.on(
  1236. JitsiTrackEvents.LOCAL_TRACK_STOPPED,
  1237. () => {
  1238. logger.debug(`Local screensharing audio track stopped. ${this.isSharingScreen}`);
  1239. // Handle case where screen share was stopped from the browsers 'screen share in progress'
  1240. // window. If audio screen sharing is stopped via the normal UX flow this point shouldn't
  1241. // be reached.
  1242. isScreenAudioShared(APP.store.getState())
  1243. && this._untoggleScreenSharing
  1244. && this._untoggleScreenSharing();
  1245. }
  1246. );
  1247. }
  1248. if (desktopVideoStream) {
  1249. desktopVideoStream.on(
  1250. JitsiTrackEvents.LOCAL_TRACK_STOPPED,
  1251. () => {
  1252. logger.debug(`Local screensharing track stopped. ${this.isSharingScreen}`);
  1253. // If the stream was stopped during screen sharing
  1254. // session then we should switch back to video.
  1255. this.isSharingScreen
  1256. && this._untoggleScreenSharing
  1257. && this._untoggleScreenSharing();
  1258. }
  1259. );
  1260. }
  1261. return desktopStreams;
  1262. }, error => {
  1263. throw error;
  1264. });
  1265. },
  1266. /**
  1267. * Setup interaction between conference and UI.
  1268. */
  1269. _setupListeners() {
  1270. // add local streams when joined to the conference
  1271. room.on(JitsiConferenceEvents.CONFERENCE_JOINED, () => {
  1272. this._onConferenceJoined();
  1273. });
  1274. room.on(
  1275. JitsiConferenceEvents.CONFERENCE_JOIN_IN_PROGRESS,
  1276. () => APP.store.dispatch(conferenceJoinInProgress(room)));
  1277. room.on(
  1278. JitsiConferenceEvents.CONFERENCE_LEFT,
  1279. (...args) => {
  1280. APP.store.dispatch(conferenceTimestampChanged(0));
  1281. APP.store.dispatch(conferenceLeft(room, ...args));
  1282. });
  1283. room.on(
  1284. JitsiConferenceEvents.CONFERENCE_UNIQUE_ID_SET,
  1285. (...args) => {
  1286. // Preserve the sessionId so that the value is accessible even after room
  1287. // is disconnected.
  1288. room.sessionId = room.getMeetingUniqueId();
  1289. APP.store.dispatch(conferenceUniqueIdSet(room, ...args));
  1290. });
  1291. // we want to ignore this event in case of tokenAuthUrl config
  1292. // we are deprecating this and at some point will get rid of it
  1293. if (!config.tokenAuthUrl) {
  1294. room.on(
  1295. JitsiConferenceEvents.AUTH_STATUS_CHANGED,
  1296. (authEnabled, authLogin) =>
  1297. APP.store.dispatch(authStatusChanged(authEnabled, authLogin)));
  1298. }
  1299. room.on(JitsiConferenceEvents.PARTCIPANT_FEATURES_CHANGED, user => {
  1300. APP.store.dispatch(updateRemoteParticipantFeatures(user));
  1301. });
  1302. room.on(JitsiConferenceEvents.USER_JOINED, (id, user) => {
  1303. if (config.iAmRecorder && user.isHiddenFromRecorder()) {
  1304. return;
  1305. }
  1306. // The logic shared between RN and web.
  1307. commonUserJoinedHandling(APP.store, room, user);
  1308. if (user.isHidden()) {
  1309. return;
  1310. }
  1311. APP.store.dispatch(updateRemoteParticipantFeatures(user));
  1312. logger.log(`USER ${id} connected:`, user);
  1313. APP.UI.addUser(user);
  1314. });
  1315. room.on(JitsiConferenceEvents.USER_LEFT, (id, user) => {
  1316. // The logic shared between RN and web.
  1317. commonUserLeftHandling(APP.store, room, user);
  1318. if (user.isHidden()) {
  1319. return;
  1320. }
  1321. logger.log(`USER ${id} LEFT:`, user);
  1322. });
  1323. room.on(JitsiConferenceEvents.USER_STATUS_CHANGED, (id, status) => {
  1324. APP.store.dispatch(participantPresenceChanged(id, status));
  1325. const user = room.getParticipantById(id);
  1326. if (user) {
  1327. APP.UI.updateUserStatus(user, status);
  1328. }
  1329. });
  1330. room.on(JitsiConferenceEvents.USER_ROLE_CHANGED, (id, role) => {
  1331. if (this.isLocalId(id)) {
  1332. logger.info(`My role changed, new role: ${role}`);
  1333. if (role === 'moderator') {
  1334. APP.store.dispatch(maybeSetLobbyChatMessageListener());
  1335. }
  1336. APP.store.dispatch(localParticipantRoleChanged(role));
  1337. APP.API.notifyUserRoleChanged(id, role);
  1338. } else {
  1339. APP.store.dispatch(participantRoleChanged(id, role));
  1340. }
  1341. });
  1342. room.on(JitsiConferenceEvents.TRACK_ADDED, track => {
  1343. if (!track || track.isLocal()) {
  1344. return;
  1345. }
  1346. if (config.iAmRecorder) {
  1347. const participant = room.getParticipantById(track.getParticipantId());
  1348. if (participant.isHiddenFromRecorder()) {
  1349. return;
  1350. }
  1351. }
  1352. APP.store.dispatch(trackAdded(track));
  1353. });
  1354. room.on(JitsiConferenceEvents.TRACK_REMOVED, track => {
  1355. if (!track || track.isLocal()) {
  1356. return;
  1357. }
  1358. APP.store.dispatch(trackRemoved(track));
  1359. });
  1360. room.on(JitsiConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED, (id, lvl) => {
  1361. const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
  1362. let newLvl = lvl;
  1363. if (this.isLocalId(id)) {
  1364. APP.store.dispatch(localParticipantAudioLevelChanged(lvl));
  1365. }
  1366. if (this.isLocalId(id) && localAudio?.isMuted()) {
  1367. newLvl = 0;
  1368. }
  1369. if (config.debug) {
  1370. this.audioLevelsMap[id] = newLvl;
  1371. if (config.debugAudioLevels) {
  1372. logger.log(`AudioLevel:${id}/${newLvl}`);
  1373. }
  1374. }
  1375. APP.UI.setAudioLevel(id, newLvl);
  1376. });
  1377. room.on(JitsiConferenceEvents.TRACK_MUTE_CHANGED, (track, participantThatMutedUs) => {
  1378. if (participantThatMutedUs) {
  1379. APP.store.dispatch(participantMutedUs(participantThatMutedUs, track));
  1380. if (this.isSharingScreen && track.isVideoTrack()) {
  1381. logger.debug('TRACK_MUTE_CHANGED while screen sharing');
  1382. this._turnScreenSharingOff(false);
  1383. }
  1384. }
  1385. });
  1386. room.on(JitsiConferenceEvents.TRACK_UNMUTE_REJECTED, track => APP.store.dispatch(destroyLocalTracks(track)));
  1387. room.on(JitsiConferenceEvents.SUBJECT_CHANGED,
  1388. subject => APP.store.dispatch(conferenceSubjectChanged(subject)));
  1389. room.on(
  1390. JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
  1391. (leavingIds, enteringIds) =>
  1392. APP.UI.handleLastNEndpoints(leavingIds, enteringIds));
  1393. room.on(
  1394. JitsiConferenceEvents.P2P_STATUS,
  1395. (jitsiConference, p2p) =>
  1396. APP.store.dispatch(p2pStatusChanged(p2p)));
  1397. room.on(
  1398. JitsiConferenceEvents.DOMINANT_SPEAKER_CHANGED,
  1399. (dominant, previous, silence) => {
  1400. APP.store.dispatch(dominantSpeakerChanged(dominant, previous, Boolean(silence), room));
  1401. });
  1402. room.on(
  1403. JitsiConferenceEvents.CONFERENCE_CREATED_TIMESTAMP,
  1404. conferenceTimestamp => {
  1405. APP.store.dispatch(conferenceTimestampChanged(conferenceTimestamp));
  1406. APP.API.notifyConferenceCreatedTimestamp(conferenceTimestamp);
  1407. }
  1408. );
  1409. room.on(
  1410. JitsiConferenceEvents.DISPLAY_NAME_CHANGED,
  1411. (id, displayName) => {
  1412. const formattedDisplayName
  1413. = getNormalizedDisplayName(displayName);
  1414. const state = APP.store.getState();
  1415. const {
  1416. defaultRemoteDisplayName
  1417. } = state['features/base/config'];
  1418. APP.store.dispatch(participantUpdated({
  1419. conference: room,
  1420. id,
  1421. name: formattedDisplayName
  1422. }));
  1423. const virtualScreenshareParticipantId = getVirtualScreenshareParticipantByOwnerId(state, id)?.id;
  1424. if (virtualScreenshareParticipantId) {
  1425. APP.store.dispatch(
  1426. screenshareParticipantDisplayNameChanged(virtualScreenshareParticipantId, formattedDisplayName)
  1427. );
  1428. }
  1429. APP.API.notifyDisplayNameChanged(id, {
  1430. displayName: formattedDisplayName,
  1431. formattedDisplayName:
  1432. appendSuffix(
  1433. formattedDisplayName
  1434. || defaultRemoteDisplayName)
  1435. });
  1436. }
  1437. );
  1438. room.on(
  1439. JitsiConferenceEvents.SILENT_STATUS_CHANGED,
  1440. (id, isSilent) => {
  1441. APP.store.dispatch(participantUpdated({
  1442. conference: room,
  1443. id,
  1444. isSilent
  1445. }));
  1446. }
  1447. );
  1448. room.on(
  1449. JitsiConferenceEvents.BOT_TYPE_CHANGED,
  1450. (id, botType) => {
  1451. APP.store.dispatch(participantUpdated({
  1452. conference: room,
  1453. id,
  1454. botType
  1455. }));
  1456. }
  1457. );
  1458. room.on(
  1459. JitsiConferenceEvents.TRANSCRIPTION_STATUS_CHANGED,
  1460. (status, id, abruptly) => {
  1461. if (status === JitsiMeetJS.constants.transcriptionStatus.ON) {
  1462. APP.store.dispatch(transcriberJoined(id));
  1463. } else if (status === JitsiMeetJS.constants.transcriptionStatus.OFF) {
  1464. APP.store.dispatch(transcriberLeft(id, abruptly));
  1465. }
  1466. });
  1467. room.on(
  1468. JitsiConferenceEvents.ENDPOINT_MESSAGE_RECEIVED,
  1469. (participant, data) => {
  1470. APP.store.dispatch(endpointMessageReceived(participant, data));
  1471. if (data?.name === ENDPOINT_TEXT_MESSAGE_NAME) {
  1472. APP.API.notifyEndpointTextMessageReceived({
  1473. senderInfo: {
  1474. jid: participant.getJid(),
  1475. id: participant.getId()
  1476. },
  1477. eventData: data
  1478. });
  1479. }
  1480. });
  1481. room.on(
  1482. JitsiConferenceEvents.NON_PARTICIPANT_MESSAGE_RECEIVED,
  1483. (id, data) => {
  1484. APP.store.dispatch(nonParticipantMessageReceived(id, data));
  1485. APP.API.notifyNonParticipantMessageReceived(id, data);
  1486. });
  1487. room.on(
  1488. JitsiConferenceEvents.LOCK_STATE_CHANGED,
  1489. (...args) => APP.store.dispatch(lockStateChanged(room, ...args)));
  1490. room.on(
  1491. JitsiConferenceEvents.PROPERTIES_CHANGED,
  1492. properties => APP.store.dispatch(conferencePropertiesChanged(properties)));
  1493. room.on(JitsiConferenceEvents.KICKED, (participant, reason, isReplaced) => {
  1494. if (isReplaced) {
  1495. // this event triggers when the local participant is kicked, `participant`
  1496. // is the kicker. In replace participant case, kicker is undefined,
  1497. // as the server initiated it. We mark in store the local participant
  1498. // as being replaced based on jwt.
  1499. const localParticipant = getLocalParticipant(APP.store.getState());
  1500. APP.store.dispatch(participantUpdated({
  1501. conference: room,
  1502. id: localParticipant.id,
  1503. isReplaced
  1504. }));
  1505. // we send readyToClose when kicked participant is replace so that
  1506. // embedding app can choose to dispose the iframe API on the handler.
  1507. APP.API.notifyReadyToClose();
  1508. }
  1509. APP.store.dispatch(kickedOut(room, participant));
  1510. });
  1511. room.on(JitsiConferenceEvents.PARTICIPANT_KICKED, (kicker, kicked) => {
  1512. APP.store.dispatch(participantKicked(kicker, kicked));
  1513. });
  1514. room.on(JitsiConferenceEvents.PARTICIPANT_SOURCE_UPDATED,
  1515. jitsiParticipant => {
  1516. APP.store.dispatch(participantSourcesUpdated(jitsiParticipant));
  1517. });
  1518. room.on(JitsiConferenceEvents.SUSPEND_DETECTED, () => {
  1519. APP.store.dispatch(suspendDetected());
  1520. });
  1521. room.on(
  1522. JitsiConferenceEvents.AUDIO_UNMUTE_PERMISSIONS_CHANGED,
  1523. disableAudioMuteChange => {
  1524. APP.store.dispatch(setAudioUnmutePermissions(disableAudioMuteChange));
  1525. });
  1526. room.on(
  1527. JitsiConferenceEvents.VIDEO_UNMUTE_PERMISSIONS_CHANGED,
  1528. disableVideoMuteChange => {
  1529. APP.store.dispatch(setVideoUnmutePermissions(disableVideoMuteChange));
  1530. });
  1531. room.on(
  1532. JitsiE2ePingEvents.E2E_RTT_CHANGED,
  1533. (...args) => APP.store.dispatch(e2eRttChanged(...args)));
  1534. room.addCommandListener(this.commands.defaults.ETHERPAD,
  1535. ({ value }) => {
  1536. APP.UI.initEtherpad(value);
  1537. }
  1538. );
  1539. room.addCommandListener(this.commands.defaults.EMAIL, (data, from) => {
  1540. APP.store.dispatch(participantUpdated({
  1541. conference: room,
  1542. id: from,
  1543. email: data.value
  1544. }));
  1545. });
  1546. room.addCommandListener(
  1547. this.commands.defaults.AVATAR_URL,
  1548. (data, from) => {
  1549. const participant = getParticipantByIdOrUndefined(APP.store, from);
  1550. // if already set from presence(jwt), skip the command processing
  1551. if (!participant?.avatarURL) {
  1552. APP.store.dispatch(
  1553. participantUpdated({
  1554. conference: room,
  1555. id: from,
  1556. avatarURL: data.value
  1557. }));
  1558. }
  1559. });
  1560. room.on(
  1561. JitsiConferenceEvents.START_MUTED_POLICY_CHANGED,
  1562. ({ audio, video }) => {
  1563. APP.store.dispatch(
  1564. onStartMutedPolicyChanged(audio, video));
  1565. }
  1566. );
  1567. room.on(JitsiConferenceEvents.STARTED_MUTED, () => {
  1568. const audioMuted = room.isStartAudioMuted();
  1569. const videoMuted = room.isStartVideoMuted();
  1570. const localTracks = getLocalTracks(APP.store.getState()['features/base/tracks']);
  1571. const promises = [];
  1572. APP.store.dispatch(setAudioMuted(audioMuted));
  1573. APP.store.dispatch(setVideoMuted(videoMuted));
  1574. // Remove the tracks from the peerconnection.
  1575. for (const track of localTracks) {
  1576. // Always add the track on Safari because of a known issue where audio playout doesn't happen
  1577. // if the user joins audio and video muted, i.e., if there is no local media capture.
  1578. if (audioMuted && track.jitsiTrack?.getType() === MEDIA_TYPE.AUDIO && !browser.isWebKitBased()) {
  1579. promises.push(this.useAudioStream(null));
  1580. }
  1581. if (videoMuted && track.jitsiTrack?.getType() === MEDIA_TYPE.VIDEO) {
  1582. promises.push(this.useVideoStream(null));
  1583. }
  1584. }
  1585. Promise.allSettled(promises)
  1586. .then(() => {
  1587. APP.store.dispatch(showNotification({
  1588. titleKey: 'notify.mutedTitle',
  1589. descriptionKey: 'notify.muted'
  1590. }, NOTIFICATION_TIMEOUT_TYPE.SHORT));
  1591. });
  1592. });
  1593. room.on(
  1594. JitsiConferenceEvents.DATA_CHANNEL_OPENED, () => {
  1595. APP.store.dispatch(dataChannelOpened());
  1596. APP.store.dispatch(hideNotification(DATA_CHANNEL_CLOSED_NOTIFICATION_ID));
  1597. }
  1598. );
  1599. room.on(
  1600. JitsiConferenceEvents.DATA_CHANNEL_CLOSED, ev => {
  1601. const state = APP.store.getState();
  1602. const { dataChannelOpen } = state['features/base/conference'];
  1603. const timeout = typeof dataChannelOpen === 'undefined' ? 15000 : 60000;
  1604. // Show the notification only when the data channel connection doesn't get re-established in 60 secs if
  1605. // it was already established at the beginning of the call, show it sooner otherwise. This notification
  1606. // can be confusing and alarming to users even when there is no significant impact to user experience
  1607. // if the the reconnect happens immediately.
  1608. setTimeout(() => {
  1609. const { dataChannelOpen: open } = APP.store.getState()['features/base/conference'];
  1610. if (!open) {
  1611. const descriptionKey = getSsrcRewritingFeatureFlag(state)
  1612. ? 'notify.dataChannelClosedDescriptionWithAudio' : 'notify.dataChannelClosedDescription';
  1613. const titleKey = getSsrcRewritingFeatureFlag(state)
  1614. ? 'notify.dataChannelClosedWithAudio' : 'notify.dataChannelClosed';
  1615. APP.store.dispatch(dataChannelClosed(ev.code, ev.reason));
  1616. APP.store.dispatch(showWarningNotification({
  1617. descriptionKey,
  1618. titleKey,
  1619. uid: DATA_CHANNEL_CLOSED_NOTIFICATION_ID
  1620. }, NOTIFICATION_TIMEOUT_TYPE.STICKY));
  1621. }
  1622. }, timeout);
  1623. }
  1624. );
  1625. },
  1626. /**
  1627. * Handles audio device changes.
  1628. *
  1629. * @param {string} cameraDeviceId - The new device id.
  1630. * @returns {Promise}
  1631. */
  1632. async onAudioDeviceChanged(micDeviceId) {
  1633. const audioWasMuted = this.isLocalAudioMuted();
  1634. // Disable noise suppression if it was enabled on the previous track.
  1635. await APP.store.dispatch(setNoiseSuppressionEnabled(false));
  1636. // When the 'default' mic needs to be selected, we need to pass the real device id to gUM instead of
  1637. // 'default' in order to get the correct MediaStreamTrack from chrome because of the following bug.
  1638. // https://bugs.chromium.org/p/chromium/issues/detail?id=997689.
  1639. const isDefaultMicSelected = micDeviceId === 'default';
  1640. const selectedDeviceId = isDefaultMicSelected
  1641. ? getDefaultDeviceId(APP.store.getState(), 'audioInput')
  1642. : micDeviceId;
  1643. logger.info(`Switching audio input device to ${selectedDeviceId}`);
  1644. sendAnalytics(createDeviceChangedEvent('audio', 'input'));
  1645. createLocalTracksF({
  1646. devices: [ 'audio' ],
  1647. micDeviceId: selectedDeviceId
  1648. })
  1649. .then(([ stream ]) => {
  1650. // if audio was muted before changing the device, mute
  1651. // with the new device
  1652. if (audioWasMuted) {
  1653. return stream.mute()
  1654. .then(() => stream);
  1655. }
  1656. return stream;
  1657. })
  1658. .then(async stream => {
  1659. await this._maybeApplyAudioMixerEffect(stream);
  1660. return this.useAudioStream(stream);
  1661. })
  1662. .then(() => {
  1663. const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
  1664. if (localAudio && isDefaultMicSelected) {
  1665. // workaround for the default device to be shown as selected in the
  1666. // settings even when the real device id was passed to gUM because of the
  1667. // above mentioned chrome bug.
  1668. localAudio._realDeviceId = localAudio.deviceId = 'default';
  1669. }
  1670. })
  1671. .catch(err => {
  1672. logger.error(`Failed to switch to selected audio input device ${selectedDeviceId}, error=${err}`);
  1673. APP.store.dispatch(notifyMicError(err));
  1674. });
  1675. },
  1676. /**
  1677. * Handles video device changes.
  1678. *
  1679. * @param {string} cameraDeviceId - The new device id.
  1680. * @returns {void}
  1681. */
  1682. onVideoDeviceChanged(cameraDeviceId) {
  1683. const videoWasMuted = this.isLocalVideoMuted();
  1684. const localVideoTrack = getLocalJitsiVideoTrack(APP.store.getState());
  1685. if (localVideoTrack?.getDeviceId() === cameraDeviceId) {
  1686. return;
  1687. }
  1688. sendAnalytics(createDeviceChangedEvent('video', 'input'));
  1689. createLocalTracksF({
  1690. devices: [ 'video' ],
  1691. cameraDeviceId
  1692. })
  1693. .then(([ stream ]) => {
  1694. // if we are in audio only mode or video was muted before
  1695. // changing device, then mute
  1696. if (this.isAudioOnly() || videoWasMuted) {
  1697. return stream.mute()
  1698. .then(() => stream);
  1699. }
  1700. return stream;
  1701. })
  1702. .then(stream => {
  1703. logger.info(`Switching the local video device to ${cameraDeviceId}.`);
  1704. return this.useVideoStream(stream);
  1705. })
  1706. .catch(error => {
  1707. logger.error(`Failed to switch to selected camera:${cameraDeviceId}, error:${error}`);
  1708. return APP.store.dispatch(notifyCameraError(error));
  1709. });
  1710. },
  1711. /**
  1712. * Handles audio only changes.
  1713. */
  1714. onToggleAudioOnly() {
  1715. // Immediately update the UI by having remote videos and the large video update themselves.
  1716. const displayedUserId = APP.UI.getLargeVideoID();
  1717. if (displayedUserId) {
  1718. APP.UI.updateLargeVideo(displayedUserId, true);
  1719. }
  1720. },
  1721. /**
  1722. * Cleanups local conference on suspend.
  1723. */
  1724. onSuspendDetected() {
  1725. // After wake up, we will be in a state where conference is left
  1726. // there will be dialog shown to user.
  1727. // We do not want video/audio as we show an overlay and after it
  1728. // user need to rejoin or close, while waking up we can detect
  1729. // camera wakeup as a problem with device.
  1730. // We also do not care about device change, which happens
  1731. // on resume after suspending PC.
  1732. if (this.deviceChangeListener) {
  1733. JitsiMeetJS.mediaDevices.removeEventListener(
  1734. JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED,
  1735. this.deviceChangeListener);
  1736. }
  1737. },
  1738. /**
  1739. * Callback invoked when the conference has been successfully joined.
  1740. * Initializes the UI and various other features.
  1741. *
  1742. * @private
  1743. * @returns {void}
  1744. */
  1745. _onConferenceJoined() {
  1746. const { dispatch } = APP.store;
  1747. APP.UI.initConference();
  1748. dispatch(conferenceJoined(room));
  1749. const jwt = APP.store.getState()['features/base/jwt'];
  1750. if (jwt?.user?.hiddenFromRecorder) {
  1751. dispatch(muteLocal(true, MEDIA_TYPE.AUDIO));
  1752. dispatch(muteLocal(true, MEDIA_TYPE.VIDEO));
  1753. dispatch(setAudioUnmutePermissions(true, true));
  1754. dispatch(setVideoUnmutePermissions(true, true));
  1755. }
  1756. },
  1757. /**
  1758. * Updates the list of current devices.
  1759. * @param {boolean} setDeviceListChangeHandler - Whether to add the deviceList change handlers.
  1760. * @private
  1761. * @returns {Promise}
  1762. */
  1763. _initDeviceList(setDeviceListChangeHandler = false) {
  1764. const { mediaDevices } = JitsiMeetJS;
  1765. if (mediaDevices.isDeviceListAvailable()
  1766. && mediaDevices.isDeviceChangeAvailable()) {
  1767. if (setDeviceListChangeHandler) {
  1768. this.deviceChangeListener = devices =>
  1769. window.setTimeout(() => this._onDeviceListChanged(devices), 0);
  1770. mediaDevices.addEventListener(
  1771. JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED,
  1772. this.deviceChangeListener);
  1773. }
  1774. const { dispatch } = APP.store;
  1775. return dispatch(getAvailableDevices())
  1776. .then(() => {
  1777. this.updateAudioIconEnabled();
  1778. this.updateVideoIconEnabled();
  1779. });
  1780. }
  1781. return Promise.resolve();
  1782. },
  1783. /**
  1784. * Event listener for JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED to
  1785. * handle change of available media devices.
  1786. * @private
  1787. * @param {MediaDeviceInfo[]} devices
  1788. * @returns {Promise}
  1789. */
  1790. async _onDeviceListChanged(devices) {
  1791. const state = APP.store.getState();
  1792. const { filteredDevices, ignoredDevices } = filterIgnoredDevices(devices);
  1793. const oldDevices = state['features/base/devices'].availableDevices;
  1794. if (!areDevicesDifferent(flattenAvailableDevices(oldDevices), filteredDevices)) {
  1795. return Promise.resolve();
  1796. }
  1797. logDevices(ignoredDevices, 'Ignored devices on device list changed:');
  1798. const localAudio = getLocalJitsiAudioTrack(state);
  1799. const localVideo = getLocalJitsiVideoTrack(state);
  1800. APP.store.dispatch(updateDeviceList(filteredDevices));
  1801. // Firefox users can choose their preferred device in the gUM prompt. In that case
  1802. // we should respect that and not attempt to switch to the preferred device from
  1803. // our settings.
  1804. const newLabelsOnly = mediaDeviceHelper.newDeviceListAddedLabelsOnly(oldDevices, filteredDevices);
  1805. const newDevices
  1806. = mediaDeviceHelper.getNewMediaDevicesAfterDeviceListChanged(
  1807. filteredDevices,
  1808. localVideo,
  1809. localAudio,
  1810. newLabelsOnly);
  1811. const promises = [];
  1812. const requestedInput = {
  1813. audio: Boolean(newDevices.audioinput),
  1814. video: Boolean(newDevices.videoinput)
  1815. };
  1816. if (typeof newDevices.audiooutput !== 'undefined') {
  1817. const { dispatch } = APP.store;
  1818. const setAudioOutputPromise
  1819. = setAudioOutputDeviceId(newDevices.audiooutput, dispatch)
  1820. .catch(err => {
  1821. logger.error(`Failed to set the audio output device to ${newDevices.audiooutput} - ${err}`);
  1822. });
  1823. promises.push(setAudioOutputPromise);
  1824. }
  1825. // Handles the use case when the default device is changed (we are always stopping the streams because it's
  1826. // simpler):
  1827. // If the default device is changed we need to first stop the local streams and then call GUM. Otherwise GUM
  1828. // will return a stream using the old default device.
  1829. if (requestedInput.audio && localAudio) {
  1830. localAudio.stopStream();
  1831. }
  1832. if (requestedInput.video && localVideo) {
  1833. localVideo.stopStream();
  1834. }
  1835. // Let's handle unknown/non-preferred devices
  1836. const newAvailDevices = APP.store.getState()['features/base/devices'].availableDevices;
  1837. let newAudioDevices = [];
  1838. let oldAudioDevices = [];
  1839. if (typeof newDevices.audiooutput === 'undefined') {
  1840. newAudioDevices = newAvailDevices.audioOutput;
  1841. oldAudioDevices = oldDevices.audioOutput;
  1842. }
  1843. if (!requestedInput.audio) {
  1844. newAudioDevices = newAudioDevices.concat(newAvailDevices.audioInput);
  1845. oldAudioDevices = oldAudioDevices.concat(oldDevices.audioInput);
  1846. }
  1847. // check for audio
  1848. if (newAudioDevices.length > 0) {
  1849. APP.store.dispatch(checkAndNotifyForNewDevice(newAudioDevices, oldAudioDevices));
  1850. }
  1851. // check for video
  1852. if (requestedInput.video) {
  1853. APP.store.dispatch(checkAndNotifyForNewDevice(newAvailDevices.videoInput, oldDevices.videoInput));
  1854. }
  1855. // When the 'default' mic needs to be selected, we need to pass the real device id to gUM instead of 'default'
  1856. // in order to get the correct MediaStreamTrack from chrome because of the following bug.
  1857. // https://bugs.chromium.org/p/chromium/issues/detail?id=997689
  1858. const hasDefaultMicChanged = newDevices.audioinput === 'default';
  1859. // When the local video is muted and a preferred device is connected, update the settings and remove the track
  1860. // from the conference. A new track will be created and replaced when the user unmutes their camera.
  1861. if (requestedInput.video && this.isLocalVideoMuted()) {
  1862. APP.store.dispatch(updateSettings({
  1863. cameraDeviceId: newDevices.videoinput
  1864. }));
  1865. requestedInput.video = false;
  1866. delete newDevices.videoinput;
  1867. // Remove the track from the conference.
  1868. if (localVideo) {
  1869. await this.useVideoStream(null);
  1870. logger.debug('_onDeviceListChanged: Removed the current video track.');
  1871. }
  1872. }
  1873. // When the local audio is muted and a preferred device is connected, update the settings and remove the track
  1874. // from the conference. A new track will be created and replaced when the user unmutes their mic.
  1875. if (requestedInput.audio && this.isLocalAudioMuted()) {
  1876. APP.store.dispatch(updateSettings({
  1877. micDeviceId: newDevices.audioinput
  1878. }));
  1879. requestedInput.audio = false;
  1880. delete newDevices.audioinput;
  1881. // Remove the track from the conference.
  1882. if (localAudio) {
  1883. await this.useAudioStream(null);
  1884. logger.debug('_onDeviceListChanged: Removed the current audio track.');
  1885. }
  1886. }
  1887. // Create the tracks and replace them only if the user is unmuted.
  1888. if (requestedInput.audio || requestedInput.video) {
  1889. let tracks = [];
  1890. const realAudioDeviceId = hasDefaultMicChanged
  1891. ? getDefaultDeviceId(APP.store.getState(), 'audioInput') : newDevices.audioinput;
  1892. try {
  1893. tracks = await mediaDeviceHelper.createLocalTracksAfterDeviceListChanged(
  1894. createLocalTracksF,
  1895. requestedInput.video ? newDevices.videoinput : null,
  1896. requestedInput.audio ? realAudioDeviceId : null
  1897. );
  1898. } catch (error) {
  1899. logger.error(`Track creation failed on device change, ${error}`);
  1900. return Promise.reject(error);
  1901. }
  1902. for (const track of tracks) {
  1903. if (track.isAudioTrack()) {
  1904. promises.push(
  1905. this.useAudioStream(track)
  1906. .then(() => {
  1907. hasDefaultMicChanged && (track._realDeviceId = track.deviceId = 'default');
  1908. }));
  1909. } else {
  1910. promises.push(
  1911. this.useVideoStream(track));
  1912. }
  1913. }
  1914. }
  1915. return Promise.all(promises)
  1916. .then(() => {
  1917. this.updateAudioIconEnabled();
  1918. this.updateVideoIconEnabled();
  1919. });
  1920. },
  1921. /**
  1922. * Determines whether or not the audio button should be enabled.
  1923. */
  1924. updateAudioIconEnabled() {
  1925. const localAudio = getLocalJitsiAudioTrack(APP.store.getState());
  1926. const audioMediaDevices = APP.store.getState()['features/base/devices'].availableDevices.audioInput;
  1927. const audioDeviceCount = audioMediaDevices ? audioMediaDevices.length : 0;
  1928. // The audio functionality is considered available if there are any
  1929. // audio devices detected or if the local audio stream already exists.
  1930. const available = audioDeviceCount > 0 || Boolean(localAudio);
  1931. APP.store.dispatch(setAudioAvailable(available));
  1932. },
  1933. /**
  1934. * Determines whether or not the video button should be enabled.
  1935. */
  1936. updateVideoIconEnabled() {
  1937. const videoMediaDevices
  1938. = APP.store.getState()['features/base/devices'].availableDevices.videoInput;
  1939. const videoDeviceCount
  1940. = videoMediaDevices ? videoMediaDevices.length : 0;
  1941. const localVideo = getLocalJitsiVideoTrack(APP.store.getState());
  1942. // The video functionality is considered available if there are any
  1943. // video devices detected or if there is local video stream already
  1944. // active which could be either screensharing stream or a video track
  1945. // created before the permissions were rejected (through browser
  1946. // config).
  1947. const available = videoDeviceCount > 0 || Boolean(localVideo);
  1948. APP.store.dispatch(setVideoAvailable(available));
  1949. APP.API.notifyVideoAvailabilityChanged(available);
  1950. },
  1951. /**
  1952. * Disconnect from the conference and optionally request user feedback.
  1953. * @param {boolean} [requestFeedback=false] if user feedback should be
  1954. * @param {string} [hangupReason] the reason for leaving the meeting
  1955. * requested
  1956. */
  1957. hangup(requestFeedback = false, hangupReason) {
  1958. APP.store.dispatch(disableReceiver());
  1959. this._stopProxyConnection();
  1960. APP.store.dispatch(destroyLocalTracks());
  1961. this._localTracksInitialized = false;
  1962. // Remove unnecessary event listeners from firing callbacks.
  1963. if (this.deviceChangeListener) {
  1964. JitsiMeetJS.mediaDevices.removeEventListener(
  1965. JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED,
  1966. this.deviceChangeListener);
  1967. }
  1968. let feedbackResultPromise = Promise.resolve({});
  1969. if (requestFeedback) {
  1970. const feedbackDialogClosed = (feedbackResult = {}) => {
  1971. if (!feedbackResult.wasDialogShown && hangupReason) {
  1972. return APP.store.dispatch(
  1973. openLeaveReasonDialog(hangupReason)).then(() => feedbackResult);
  1974. }
  1975. return Promise.resolve(feedbackResult);
  1976. };
  1977. feedbackResultPromise
  1978. = APP.store.dispatch(maybeOpenFeedbackDialog(room, hangupReason))
  1979. .then(feedbackDialogClosed, feedbackDialogClosed);
  1980. }
  1981. const leavePromise = this.leaveRoom().catch(() => Promise.resolve());
  1982. Promise.allSettled([ feedbackResultPromise, leavePromise ]).then(([ feedback, _ ]) => {
  1983. this._room = undefined;
  1984. room = undefined;
  1985. /**
  1986. * Don't call {@code notifyReadyToClose} if the promotional page flag is set
  1987. * and let the page take care of sending the message, since there will be
  1988. * a redirect to the page anyway.
  1989. */
  1990. if (!interfaceConfig.SHOW_PROMOTIONAL_CLOSE_PAGE) {
  1991. APP.API.notifyReadyToClose();
  1992. }
  1993. APP.store.dispatch(maybeRedirectToWelcomePage(feedback.value ?? {}));
  1994. });
  1995. },
  1996. /**
  1997. * Leaves the room.
  1998. *
  1999. * @param {boolean} doDisconnect - Whether leaving the room should also terminate the connection.
  2000. * @param {string} reason - reason for leaving the room.
  2001. * @returns {Promise}
  2002. */
  2003. leaveRoom(doDisconnect = true, reason = '') {
  2004. APP.store.dispatch(conferenceWillLeave(room));
  2005. const maybeDisconnect = () => {
  2006. if (doDisconnect) {
  2007. return disconnect();
  2008. }
  2009. };
  2010. if (room && room.isJoined()) {
  2011. return room.leave(reason).then(() => maybeDisconnect())
  2012. .catch(e => {
  2013. logger.error(e);
  2014. return maybeDisconnect();
  2015. });
  2016. }
  2017. return maybeDisconnect();
  2018. },
  2019. /**
  2020. * Changes the email for the local user
  2021. * @param email {string} the new email
  2022. */
  2023. changeLocalEmail(email = '') {
  2024. const formattedEmail = String(email).trim();
  2025. APP.store.dispatch(updateSettings({
  2026. email: formattedEmail
  2027. }));
  2028. sendData(commands.EMAIL, formattedEmail);
  2029. },
  2030. /**
  2031. * Changes the avatar url for the local user
  2032. * @param url {string} the new url
  2033. */
  2034. changeLocalAvatarUrl(url = '') {
  2035. const formattedUrl = String(url).trim();
  2036. APP.store.dispatch(updateSettings({
  2037. avatarURL: formattedUrl
  2038. }));
  2039. sendData(commands.AVATAR_URL, url);
  2040. },
  2041. /**
  2042. * Sends a message via the data channel.
  2043. * @param {string} to the id of the endpoint that should receive the
  2044. * message. If "" - the message will be sent to all participants.
  2045. * @param {object} payload the payload of the message.
  2046. * @throws NetworkError or InvalidStateError or Error if the operation
  2047. * fails.
  2048. */
  2049. sendEndpointMessage(to, payload) {
  2050. room.sendEndpointMessage(to, payload);
  2051. },
  2052. /**
  2053. * Callback invoked by the external api create or update a direct connection
  2054. * from the local client to an external client.
  2055. *
  2056. * @param {Object} event - The object containing information that should be
  2057. * passed to the {@code ProxyConnectionService}.
  2058. * @returns {void}
  2059. */
  2060. onProxyConnectionEvent(event) {
  2061. if (!this._proxyConnection) {
  2062. this._proxyConnection = new JitsiMeetJS.ProxyConnectionService({
  2063. /**
  2064. * Pass the {@code JitsiConnection} instance which will be used
  2065. * to fetch TURN credentials.
  2066. */
  2067. jitsiConnection: APP.connection,
  2068. /**
  2069. * The proxy connection feature is currently tailored towards
  2070. * taking a proxied video stream and showing it as a local
  2071. * desktop screen.
  2072. */
  2073. convertVideoToDesktop: true,
  2074. /**
  2075. * Callback invoked when the connection has been closed
  2076. * automatically. Triggers cleanup of screensharing if active.
  2077. *
  2078. * @returns {void}
  2079. */
  2080. onConnectionClosed: () => {
  2081. if (this._untoggleScreenSharing) {
  2082. this._untoggleScreenSharing();
  2083. }
  2084. },
  2085. /**
  2086. * Callback invoked to pass messages from the local client back
  2087. * out to the external client.
  2088. *
  2089. * @param {string} peerJid - The jid of the intended recipient
  2090. * of the message.
  2091. * @param {Object} data - The message that should be sent. For
  2092. * screensharing this is an iq.
  2093. * @returns {void}
  2094. */
  2095. onSendMessage: (peerJid, data) =>
  2096. APP.API.sendProxyConnectionEvent({
  2097. data,
  2098. to: peerJid
  2099. }),
  2100. /**
  2101. * Callback invoked when the remote peer of the proxy connection
  2102. * has provided a video stream, intended to be used as a local
  2103. * desktop stream.
  2104. *
  2105. * @param {JitsiLocalTrack} remoteProxyStream - The media
  2106. * stream to use as a local desktop stream.
  2107. * @returns {void}
  2108. */
  2109. onRemoteStream: desktopStream => {
  2110. if (desktopStream.videoType !== 'desktop') {
  2111. logger.warn('Received a non-desktop stream to proxy.');
  2112. desktopStream.dispose();
  2113. return;
  2114. }
  2115. APP.store.dispatch(toggleScreensharingA(undefined, false, { desktopStream }));
  2116. }
  2117. });
  2118. }
  2119. this._proxyConnection.processMessage(event);
  2120. },
  2121. /**
  2122. * Sets the video muted status.
  2123. */
  2124. setVideoMuteStatus() {
  2125. APP.UI.setVideoMuted(this.getMyUserId());
  2126. },
  2127. /**
  2128. * Dispatches the passed in feedback for submission. The submitted score
  2129. * should be a number inclusively between 1 through 5, or -1 for no score.
  2130. *
  2131. * @param {number} score - a number between 1 and 5 (inclusive) or -1 for no
  2132. * score.
  2133. * @param {string} message - An optional message to attach to the feedback
  2134. * in addition to the score.
  2135. * @returns {void}
  2136. */
  2137. submitFeedback(score = -1, message = '') {
  2138. if (score === -1 || (score >= 1 && score <= 5)) {
  2139. APP.store.dispatch(submitFeedback(score, message, room));
  2140. }
  2141. },
  2142. /**
  2143. * Terminates any proxy screensharing connection that is active.
  2144. *
  2145. * @private
  2146. * @returns {void}
  2147. */
  2148. _stopProxyConnection() {
  2149. if (this._proxyConnection) {
  2150. this._proxyConnection.stop();
  2151. }
  2152. this._proxyConnection = null;
  2153. }
  2154. };