You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTC.js 32KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993
  1. import { getLogger } from '@jitsi/logger';
  2. import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
  3. import BridgeVideoType from '../../service/RTC/BridgeVideoType';
  4. import { MediaType } from '../../service/RTC/MediaType';
  5. import RTCEvents from '../../service/RTC/RTCEvents';
  6. import browser from '../browser';
  7. import FeatureFlags from '../flags/FeatureFlags';
  8. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  9. import Listenable from '../util/Listenable';
  10. import { safeCounterIncrement } from '../util/MathUtil';
  11. import BridgeChannel from './BridgeChannel';
  12. import JitsiLocalTrack from './JitsiLocalTrack';
  13. import RTCUtils from './RTCUtils';
  14. import TraceablePeerConnection from './TraceablePeerConnection';
  15. const logger = getLogger(__filename);
  16. /**
  17. * The counter used to generated id numbers assigned to peer connections
  18. * @type {number}
  19. */
  20. let peerConnectionIdCounter = 0;
  21. /**
  22. * The counter used to generate id number for the local
  23. * <code>MediaStreamTrack</code>s.
  24. * @type {number}
  25. */
  26. let rtcTrackIdCounter = 0;
  27. /**
  28. * Creates {@code JitsiLocalTrack} instances from the passed in meta information
  29. * about MedieaTracks.
  30. *
  31. * @param {Object[]} mediaStreamMetaData - An array of meta information with
  32. * MediaTrack instances. Each can look like:
  33. * {{
  34. * stream: MediaStream instance that holds a track with audio or video,
  35. * track: MediaTrack within the MediaStream,
  36. * videoType: "camera" or "desktop" or falsy,
  37. * sourceId: ID of the desktopsharing source,
  38. * sourceType: The desktopsharing source type,
  39. * effects: Array of effect types
  40. * }}
  41. */
  42. function _createLocalTracks(mediaStreamMetaData = []) {
  43. return mediaStreamMetaData.map(metaData => {
  44. const {
  45. sourceId,
  46. sourceType,
  47. stream,
  48. track,
  49. videoType,
  50. effects
  51. } = metaData;
  52. const { deviceId, facingMode } = track.getSettings();
  53. // FIXME Move rtcTrackIdCounter to a static method in JitsiLocalTrack
  54. // so RTC does not need to handle ID management. This move would be
  55. // safer to do once the old createLocalTracks is removed.
  56. rtcTrackIdCounter = safeCounterIncrement(rtcTrackIdCounter);
  57. return new JitsiLocalTrack({
  58. deviceId,
  59. facingMode,
  60. mediaType: track.kind,
  61. rtcId: rtcTrackIdCounter,
  62. sourceId,
  63. sourceType,
  64. stream,
  65. track,
  66. videoType: videoType || null,
  67. effects
  68. });
  69. });
  70. }
  71. /**
  72. *
  73. */
  74. export default class RTC extends Listenable {
  75. /**
  76. *
  77. * @param conference
  78. * @param options
  79. */
  80. constructor(conference, options = {}) {
  81. super();
  82. this.conference = conference;
  83. /**
  84. * A map of active <tt>TraceablePeerConnection</tt>.
  85. * @type {Map.<number, TraceablePeerConnection>}
  86. */
  87. this.peerConnections = new Map();
  88. this.localTracks = [];
  89. this.options = options;
  90. // BridgeChannel instance.
  91. // @private
  92. // @type {BridgeChannel}
  93. this._channel = null;
  94. /**
  95. * The value specified to the last invocation of setLastN before the
  96. * channel completed opening. If non-null, the value will be sent
  97. * through a channel (once) as soon as it opens and will then be
  98. * discarded.
  99. * @private
  100. * @type {number}
  101. */
  102. this._lastN = undefined;
  103. /**
  104. * Defines the last N endpoints list. It can be null or an array once
  105. * initialised with a channel last N event.
  106. * @type {Array<string>|null}
  107. * @private
  108. */
  109. this._lastNEndpoints = null;
  110. /**
  111. * Defines the forwarded sources list. It can be null or an array once initialised with a channel forwarded
  112. * sources event.
  113. *
  114. * @type {Array<string>|null}
  115. * @private
  116. */
  117. this._forwardedSources = null;
  118. /**
  119. * The number representing the maximum video height the local client
  120. * should receive from the bridge.
  121. *
  122. * @type {number|undefined}
  123. * @private
  124. */
  125. this._maxFrameHeight = undefined;
  126. /**
  127. * The endpoint IDs of currently selected participants.
  128. *
  129. * @type {Array}
  130. * @private
  131. */
  132. this._selectedEndpoints = null;
  133. // The last N change listener.
  134. this._lastNChangeListener = this._onLastNChanged.bind(this);
  135. // The forwarded sources change listener.
  136. this._forwardedSourcesChangeListener = this._onForwardedSourcesChanged.bind(this);
  137. this._onDeviceListChanged = this._onDeviceListChanged.bind(this);
  138. this._updateAudioOutputForAudioTracks
  139. = this._updateAudioOutputForAudioTracks.bind(this);
  140. /**
  141. * The default video type assumed by the bridge.
  142. * @deprecated this will go away with multiple streams support
  143. * @type {BridgeVideoType}
  144. * @private
  145. */
  146. this._videoType = BridgeVideoType.NONE;
  147. // Switch audio output device on all remote audio tracks. Local audio
  148. // tracks handle this event by themselves.
  149. if (RTCUtils.isDeviceChangeAvailable('output')) {
  150. RTCUtils.addListener(
  151. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  152. this._updateAudioOutputForAudioTracks
  153. );
  154. RTCUtils.addListener(
  155. RTCEvents.DEVICE_LIST_CHANGED,
  156. this._onDeviceListChanged
  157. );
  158. }
  159. }
  160. /**
  161. * Removes any listeners and stored state from this {@code RTC} instance.
  162. *
  163. * @returns {void}
  164. */
  165. destroy() {
  166. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED, this._updateAudioOutputForAudioTracks);
  167. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED, this._onDeviceListChanged);
  168. if (this._channelOpenListener) {
  169. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN, this._channelOpenListener);
  170. }
  171. }
  172. /**
  173. * Exposes the private helper for converting a WebRTC MediaStream to a
  174. * JitsiLocalTrack.
  175. *
  176. * @param {Array<Object>} tracksInfo
  177. * @returns {Array<JitsiLocalTrack>}
  178. */
  179. static createLocalTracks(tracksInfo) {
  180. return _createLocalTracks(tracksInfo);
  181. }
  182. /**
  183. * Creates the local MediaStreams.
  184. * @param {object} [options] Optional parameters.
  185. * @param {array} options.devices The devices that will be requested.
  186. * @param {string} options.resolution Resolution constraints.
  187. * @param {string} options.cameraDeviceId
  188. * @param {string} options.micDeviceId
  189. * @returns {*} Promise object that will receive the new JitsiTracks
  190. */
  191. static obtainAudioAndVideoPermissions(options) {
  192. return RTCUtils.obtainAudioAndVideoPermissions(options)
  193. .then(tracksInfo => _createLocalTracks(tracksInfo));
  194. }
  195. /**
  196. * Initializes the bridge channel of this instance.
  197. * At least one of both, peerconnection or wsUrl parameters, must be
  198. * given.
  199. * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
  200. * instance.
  201. * @param {string} [wsUrl] WebSocket URL.
  202. */
  203. initializeBridgeChannel(peerconnection, wsUrl) {
  204. this._channel = new BridgeChannel(peerconnection, wsUrl, this.eventEmitter);
  205. this._channelOpenListener = () => {
  206. const logError = (error, msgType, value) => {
  207. GlobalOnErrorHandler.callErrorHandler(error);
  208. logger.error(`Cannot send ${msgType}(${JSON.stringify(value)}) endpoint message`, error);
  209. };
  210. // When the channel becomes available, tell the bridge about video selections so that it can do adaptive
  211. // simulcast, we want the notification to trigger even if userJid is undefined, or null.
  212. if (this._receiverVideoConstraints) {
  213. try {
  214. this._channel.sendNewReceiverVideoConstraintsMessage(this._receiverVideoConstraints);
  215. } catch (error) {
  216. logError(error, 'ReceiverVideoConstraints', this._receiverVideoConstraints);
  217. }
  218. }
  219. if (this._selectedEndpoints) {
  220. try {
  221. this._channel.sendSelectedEndpointsMessage(this._selectedEndpoints);
  222. } catch (error) {
  223. logError(error, 'SelectedEndpointsChangedEvent', this._selectedEndpoints);
  224. }
  225. }
  226. if (typeof this._maxFrameHeight !== 'undefined') {
  227. try {
  228. this._channel.sendReceiverVideoConstraintMessage(this._maxFrameHeight);
  229. } catch (error) {
  230. logError(error, 'ReceiverVideoConstraint', this._maxFrameHeight);
  231. }
  232. }
  233. if (typeof this._lastN !== 'undefined' && this._lastN !== -1) {
  234. try {
  235. this._channel.sendSetLastNMessage(this._lastN);
  236. } catch (error) {
  237. logError(error, 'LastNChangedEvent', this._lastN);
  238. }
  239. }
  240. if (!FeatureFlags.isSourceNameSignalingEnabled()) {
  241. try {
  242. this._channel.sendVideoTypeMessage(this._videoType);
  243. } catch (error) {
  244. logError(error, 'VideoTypeMessage', this._videoType);
  245. }
  246. }
  247. };
  248. this.addListener(RTCEvents.DATA_CHANNEL_OPEN, this._channelOpenListener);
  249. // Add Last N change listener.
  250. this.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED, this._lastNChangeListener);
  251. if (FeatureFlags.isSourceNameSignalingEnabled()) {
  252. // Add forwarded sources change listener.
  253. this.addListener(RTCEvents.FORWARDED_SOURCES_CHANGED, this._forwardedSourcesChangeListener);
  254. }
  255. }
  256. /**
  257. * Callback invoked when the list of known audio and video devices has
  258. * been updated. Attempts to update the known available audio output
  259. * devices.
  260. *
  261. * @private
  262. * @returns {void}
  263. */
  264. _onDeviceListChanged() {
  265. this._updateAudioOutputForAudioTracks(RTCUtils.getAudioOutputDevice());
  266. }
  267. /**
  268. * Receives events when Last N had changed.
  269. * @param {array} lastNEndpoints The new Last N endpoints.
  270. * @private
  271. */
  272. _onLastNChanged(lastNEndpoints = []) {
  273. const oldLastNEndpoints = this._lastNEndpoints || [];
  274. let leavingLastNEndpoints = [];
  275. let enteringLastNEndpoints = [];
  276. this._lastNEndpoints = lastNEndpoints;
  277. leavingLastNEndpoints = oldLastNEndpoints.filter(
  278. id => !this.isInLastN(id));
  279. enteringLastNEndpoints = lastNEndpoints.filter(
  280. id => oldLastNEndpoints.indexOf(id) === -1);
  281. this.conference.eventEmitter.emit(
  282. JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
  283. leavingLastNEndpoints,
  284. enteringLastNEndpoints);
  285. }
  286. /**
  287. * Receives events when forwarded sources had changed.
  288. *
  289. * @param {array} forwardedSources The new forwarded sources.
  290. * @private
  291. */
  292. _onForwardedSourcesChanged(forwardedSources = []) {
  293. const oldForwardedSources = this._forwardedSources || [];
  294. let leavingForwardedSources = [];
  295. let enteringForwardedSources = [];
  296. this._forwardedSources = forwardedSources;
  297. leavingForwardedSources = oldForwardedSources.filter(sourceName => !this.isInForwardedSources(sourceName));
  298. enteringForwardedSources = forwardedSources.filter(
  299. sourceName => oldForwardedSources.indexOf(sourceName) === -1);
  300. this.conference.eventEmitter.emit(
  301. JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED,
  302. leavingForwardedSources,
  303. enteringForwardedSources,
  304. Date.now());
  305. }
  306. /**
  307. * Should be called when current media session ends and after the
  308. * PeerConnection has been closed using PeerConnection.close() method.
  309. */
  310. onCallEnded() {
  311. if (this._channel) {
  312. // The BridgeChannel is not explicitly closed as the PeerConnection
  313. // is closed on call ended which triggers datachannel onclose
  314. // events. If using a WebSocket, the channel must be closed since
  315. // it is not managed by the PeerConnection.
  316. // The reference is cleared to disable any logic related to the
  317. // channel.
  318. if (this._channel && this._channel.mode === 'websocket') {
  319. this._channel.close();
  320. }
  321. this._channel = null;
  322. }
  323. }
  324. /**
  325. * Sets the capture frame rate to be used for desktop tracks.
  326. *
  327. * @param {number} maxFps framerate to be used for desktop track capture.
  328. */
  329. setDesktopSharingFrameRate(maxFps) {
  330. RTCUtils.setDesktopSharingFrameRate(maxFps);
  331. }
  332. /**
  333. * Sets the receiver video constraints that determine how bitrate is allocated to each of the video streams
  334. * requested from the bridge. The constraints are cached and sent through the bridge channel once the channel
  335. * is established.
  336. * @param {*} constraints
  337. */
  338. setNewReceiverVideoConstraints(constraints) {
  339. this._receiverVideoConstraints = constraints;
  340. if (this._channel && this._channel.isOpen()) {
  341. this._channel.sendNewReceiverVideoConstraintsMessage(constraints);
  342. }
  343. }
  344. /**
  345. * Sets the maximum video size the local participant should receive from
  346. * remote participants. Will cache the value and send it through the channel
  347. * once it is created.
  348. *
  349. * @param {number} maxFrameHeightPixels the maximum frame height, in pixels,
  350. * this receiver is willing to receive.
  351. * @returns {void}
  352. */
  353. setReceiverVideoConstraint(maxFrameHeight) {
  354. this._maxFrameHeight = maxFrameHeight;
  355. if (this._channel && this._channel.isOpen()) {
  356. this._channel.sendReceiverVideoConstraintMessage(maxFrameHeight);
  357. }
  358. }
  359. /**
  360. * Sets the video type and availability for the local video source.
  361. *
  362. * @param {string} videoType 'camera' for camera, 'desktop' for screenshare and
  363. * 'none' for when local video source is muted or removed from the peerconnection.
  364. * @returns {void}
  365. */
  366. setVideoType(videoType) {
  367. if (this._videoType !== videoType) {
  368. this._videoType = videoType;
  369. if (this._channel && this._channel.isOpen()) {
  370. this._channel.sendVideoTypeMessage(videoType);
  371. }
  372. }
  373. }
  374. /**
  375. * Sends the track's video type to the JVB.
  376. * @param {SourceName} sourceName - the track's source name.
  377. * @param {BridgeVideoType} videoType - the track's video type.
  378. */
  379. sendSourceVideoType(sourceName, videoType) {
  380. if (this._channel && this._channel.isOpen()) {
  381. this._channel.sendSourceVideoTypeMessage(sourceName, videoType);
  382. }
  383. }
  384. /**
  385. * Elects the participants with the given ids to be the selected
  386. * participants in order to always receive video for this participant (even
  387. * when last n is enabled). If there is no channel we store it and send it
  388. * through the channel once it is created.
  389. *
  390. * @param {Array<string>} ids - The user ids.
  391. * @throws NetworkError or InvalidStateError or Error if the operation
  392. * fails.
  393. * @returns {void}
  394. */
  395. selectEndpoints(ids) {
  396. this._selectedEndpoints = ids;
  397. if (this._channel && this._channel.isOpen()) {
  398. this._channel.sendSelectedEndpointsMessage(ids);
  399. }
  400. }
  401. /**
  402. *
  403. * @param eventType
  404. * @param listener
  405. */
  406. static addListener(eventType, listener) {
  407. RTCUtils.addListener(eventType, listener);
  408. }
  409. /**
  410. *
  411. * @param eventType
  412. * @param listener
  413. */
  414. static removeListener(eventType, listener) {
  415. RTCUtils.removeListener(eventType, listener);
  416. }
  417. /**
  418. *
  419. * @param options
  420. */
  421. static init(options = {}) {
  422. this.options = options;
  423. return RTCUtils.init(this.options);
  424. }
  425. /* eslint-disable max-params */
  426. /**
  427. * Creates new <tt>TraceablePeerConnection</tt>
  428. * @param {SignalingLayer} signaling The signaling layer that will provide information about the media or
  429. * participants which is not carried over SDP.
  430. * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
  431. * @param {boolean} isP2P Indicates whether or not the new TPC will be used in a peer to peer type of session.
  432. * @param {object} options The config options.
  433. * @param {boolean} options.enableInsertableStreams - Set to true when the insertable streams constraints is to be
  434. * enabled on the PeerConnection.
  435. * @param {boolean} options.disableSimulcast If set to 'true' will disable the simulcast.
  436. * @param {boolean} options.disableRtx If set to 'true' will disable the RTX.
  437. * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
  438. * @return {TraceablePeerConnection}
  439. */
  440. createPeerConnection(signaling, pcConfig, isP2P, options) {
  441. const pcConstraints = JSON.parse(JSON.stringify(RTCUtils.pcConstraints));
  442. if (options.enableInsertableStreams) {
  443. logger.debug('E2EE - setting insertable streams constraints');
  444. pcConfig.encodedInsertableStreams = true;
  445. }
  446. const supportsSdpSemantics = browser.isReactNative()
  447. || (browser.isChromiumBased() && !options.usesUnifiedPlan);
  448. if (supportsSdpSemantics) {
  449. logger.debug('WebRTC application is running in plan-b mode');
  450. pcConfig.sdpSemantics = 'plan-b';
  451. }
  452. if (options.forceTurnRelay) {
  453. pcConfig.iceTransportPolicy = 'relay';
  454. }
  455. // Set the RTCBundlePolicy to max-bundle so that only one set of ice candidates is generated.
  456. // The default policy generates separate ice candidates for audio and video connections.
  457. // This change is necessary for Unified plan to work properly on Chrome and Safari.
  458. pcConfig.bundlePolicy = 'max-bundle';
  459. peerConnectionIdCounter = safeCounterIncrement(peerConnectionIdCounter);
  460. const newConnection
  461. = new TraceablePeerConnection(
  462. this,
  463. peerConnectionIdCounter,
  464. signaling,
  465. pcConfig, pcConstraints,
  466. isP2P, options);
  467. this.peerConnections.set(newConnection.id, newConnection);
  468. return newConnection;
  469. }
  470. /* eslint-enable max-params */
  471. /**
  472. * Removed given peer connection from this RTC module instance.
  473. * @param {TraceablePeerConnection} traceablePeerConnection
  474. * @return {boolean} <tt>true</tt> if the given peer connection was removed
  475. * successfully or <tt>false</tt> if there was no peer connection mapped in
  476. * this RTC instance.
  477. */
  478. _removePeerConnection(traceablePeerConnection) {
  479. const id = traceablePeerConnection.id;
  480. if (this.peerConnections.has(id)) {
  481. // NOTE Remote tracks are not removed here.
  482. this.peerConnections.delete(id);
  483. return true;
  484. }
  485. return false;
  486. }
  487. /**
  488. *
  489. * @param track
  490. */
  491. addLocalTrack(track) {
  492. if (!track) {
  493. throw new Error('track must not be null nor undefined');
  494. }
  495. this.localTracks.push(track);
  496. track.conference = this.conference;
  497. }
  498. /**
  499. * Get forwarded sources list.
  500. * @returns {Array<string>|null}
  501. */
  502. getForwardedSources() {
  503. return this._forwardedSources;
  504. }
  505. /**
  506. * Get local video track.
  507. * @returns {JitsiLocalTrack|undefined}
  508. */
  509. getLocalVideoTrack() {
  510. const localVideo = this.getLocalTracks(MediaType.VIDEO);
  511. return localVideo.length ? localVideo[0] : undefined;
  512. }
  513. /**
  514. * Returns all the local video tracks.
  515. * @returns {Array<JitsiLocalTrack>}
  516. */
  517. getLocalVideoTracks() {
  518. return this.getLocalTracks(MediaType.VIDEO);
  519. }
  520. /**
  521. * Get local audio track.
  522. * @returns {JitsiLocalTrack|undefined}
  523. */
  524. getLocalAudioTrack() {
  525. const localAudio = this.getLocalTracks(MediaType.AUDIO);
  526. return localAudio.length ? localAudio[0] : undefined;
  527. }
  528. /**
  529. * Returns the endpoint id for the local user.
  530. * @returns {string}
  531. */
  532. getLocalEndpointId() {
  533. return this.conference.myUserId();
  534. }
  535. /**
  536. * Returns the local tracks of the given media type, or all local tracks if
  537. * no specific type is given.
  538. * @param {MediaType} [mediaType] Optional media type filter.
  539. * (audio or video).
  540. */
  541. getLocalTracks(mediaType) {
  542. let tracks = this.localTracks.slice();
  543. if (mediaType !== undefined) {
  544. tracks = tracks.filter(
  545. track => track.getType() === mediaType);
  546. }
  547. return tracks;
  548. }
  549. /**
  550. * Obtains all remote tracks currently known to this RTC module instance.
  551. * @param {MediaType} [mediaType] The remote tracks will be filtered
  552. * by their media type if this argument is specified.
  553. * @return {Array<JitsiRemoteTrack>}
  554. */
  555. getRemoteTracks(mediaType) {
  556. let remoteTracks = [];
  557. for (const tpc of this.peerConnections.values()) {
  558. const pcRemoteTracks = tpc.getRemoteTracks(undefined, mediaType);
  559. if (pcRemoteTracks) {
  560. remoteTracks = remoteTracks.concat(pcRemoteTracks);
  561. }
  562. }
  563. return remoteTracks;
  564. }
  565. /**
  566. * Set mute for all local audio streams attached to the conference.
  567. * @param value The mute value.
  568. * @returns {Promise}
  569. */
  570. setAudioMute(value) {
  571. const mutePromises = [];
  572. this.getLocalTracks(MediaType.AUDIO).forEach(audioTrack => {
  573. // this is a Promise
  574. mutePromises.push(value ? audioTrack.mute() : audioTrack.unmute());
  575. });
  576. // We return a Promise from all Promises so we can wait for their
  577. // execution.
  578. return Promise.all(mutePromises);
  579. }
  580. /**
  581. * Set mute for all local video streams attached to the conference.
  582. * @param value The mute value.
  583. * @returns {Promise}
  584. */
  585. setVideoMute(value) {
  586. const mutePromises = [];
  587. this.getLocalTracks(MediaType.VIDEO).concat(this.getLocalTracks(MediaType.PRESENTER))
  588. .forEach(videoTrack => {
  589. // this is a Promise
  590. mutePromises.push(value ? videoTrack.mute() : videoTrack.unmute());
  591. });
  592. // We return a Promise from all Promises so we can wait for their
  593. // execution.
  594. return Promise.all(mutePromises);
  595. }
  596. /**
  597. *
  598. * @param track
  599. */
  600. removeLocalTrack(track) {
  601. const pos = this.localTracks.indexOf(track);
  602. if (pos === -1) {
  603. return;
  604. }
  605. this.localTracks.splice(pos, 1);
  606. }
  607. /**
  608. *
  609. * @param elSelector
  610. * @param stream
  611. */
  612. static attachMediaStream(elSelector, stream) {
  613. return RTCUtils.attachMediaStream(elSelector, stream);
  614. }
  615. /**
  616. * Returns the id of the given stream.
  617. * @param {MediaStream} stream
  618. */
  619. static getStreamID(stream) {
  620. return RTCUtils.getStreamID(stream);
  621. }
  622. /**
  623. * Returns the id of the given track.
  624. * @param {MediaStreamTrack} track
  625. */
  626. static getTrackID(track) {
  627. return RTCUtils.getTrackID(track);
  628. }
  629. /**
  630. * Returns true if retrieving the list of input devices is supported
  631. * and false if not.
  632. */
  633. static isDeviceListAvailable() {
  634. return RTCUtils.isDeviceListAvailable();
  635. }
  636. /**
  637. * Returns true if changing the input (camera / microphone) or output
  638. * (audio) device is supported and false if not.
  639. * @param {string} [deviceType] Type of device to change. Default is
  640. * undefined or 'input', 'output' - for audio output device change.
  641. * @returns {boolean} true if available, false otherwise.
  642. */
  643. static isDeviceChangeAvailable(deviceType) {
  644. return RTCUtils.isDeviceChangeAvailable(deviceType);
  645. }
  646. /**
  647. * Returns whether the current execution environment supports WebRTC (for
  648. * use within this library).
  649. *
  650. * @returns {boolean} {@code true} if WebRTC is supported in the current
  651. * execution environment (for use within this library); {@code false},
  652. * otherwise.
  653. */
  654. static isWebRtcSupported() {
  655. return browser.isSupported();
  656. }
  657. /**
  658. * Returns currently used audio output device id, '' stands for default
  659. * device
  660. * @returns {string}
  661. */
  662. static getAudioOutputDevice() {
  663. return RTCUtils.getAudioOutputDevice();
  664. }
  665. /**
  666. * Returns list of available media devices if its obtained, otherwise an
  667. * empty array is returned/
  668. * @returns {array} list of available media devices.
  669. */
  670. static getCurrentlyAvailableMediaDevices() {
  671. return RTCUtils.getCurrentlyAvailableMediaDevices();
  672. }
  673. /**
  674. * Returns whether available devices have permissions granted
  675. * @returns {Boolean}
  676. */
  677. static arePermissionsGrantedForAvailableDevices() {
  678. return RTCUtils.arePermissionsGrantedForAvailableDevices();
  679. }
  680. /**
  681. * Returns event data for device to be reported to stats.
  682. * @returns {MediaDeviceInfo} device.
  683. */
  684. static getEventDataForActiveDevice(device) {
  685. return RTCUtils.getEventDataForActiveDevice(device);
  686. }
  687. /**
  688. * Sets current audio output device.
  689. * @param {string} deviceId Id of 'audiooutput' device from
  690. * navigator.mediaDevices.enumerateDevices().
  691. * @returns {Promise} resolves when audio output is changed, is rejected
  692. * otherwise
  693. */
  694. static setAudioOutputDevice(deviceId) {
  695. return RTCUtils.setAudioOutputDevice(deviceId);
  696. }
  697. /**
  698. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  699. * "user" stream which means that it's not a "receive only" stream nor a
  700. * "mixed" JVB stream.
  701. *
  702. * Clients that implement Unified Plan, such as Firefox use recvonly
  703. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  704. * to Plan B where there are only 3 channels: audio, video and data.
  705. *
  706. * @param {MediaStream} stream The WebRTC MediaStream instance.
  707. * @returns {boolean}
  708. */
  709. static isUserStream(stream) {
  710. return RTC.isUserStreamById(RTCUtils.getStreamID(stream));
  711. }
  712. /**
  713. * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
  714. * ID is considered a valid "user" stream which means that it's not a
  715. * "receive only" stream nor a "mixed" JVB stream.
  716. *
  717. * Clients that implement Unified Plan, such as Firefox use recvonly
  718. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  719. * to Plan B where there are only 3 channels: audio, video and data.
  720. *
  721. * @param {string} streamId The id of WebRTC MediaStream.
  722. * @returns {boolean}
  723. */
  724. static isUserStreamById(streamId) {
  725. return streamId && streamId !== 'mixedmslabel'
  726. && streamId !== 'default';
  727. }
  728. /**
  729. * Allows to receive list of available cameras/microphones.
  730. * @param {function} callback Would receive array of devices as an
  731. * argument.
  732. */
  733. static enumerateDevices(callback) {
  734. RTCUtils.enumerateDevices(callback);
  735. }
  736. /**
  737. * A method to handle stopping of the stream.
  738. * One point to handle the differences in various implementations.
  739. * @param {MediaStream} mediaStream MediaStream object to stop.
  740. */
  741. static stopMediaStream(mediaStream) {
  742. RTCUtils.stopMediaStream(mediaStream);
  743. }
  744. /**
  745. * Returns whether the desktop sharing is enabled or not.
  746. * @returns {boolean}
  747. */
  748. static isDesktopSharingEnabled() {
  749. return RTCUtils.isDesktopSharingEnabled();
  750. }
  751. /**
  752. * Closes the currently opened bridge channel.
  753. */
  754. closeBridgeChannel() {
  755. if (this._channel) {
  756. this._channel.close();
  757. this._channel = null;
  758. this.removeListener(RTCEvents.LASTN_ENDPOINT_CHANGED, this._lastNChangeListener);
  759. }
  760. }
  761. /* eslint-disable max-params */
  762. /**
  763. *
  764. * @param {TraceablePeerConnection} tpc
  765. * @param {number} ssrc
  766. * @param {number} audioLevel
  767. * @param {boolean} isLocal
  768. */
  769. setAudioLevel(tpc, ssrc, audioLevel, isLocal) {
  770. const track = tpc.getTrackBySSRC(ssrc);
  771. if (!track) {
  772. return;
  773. } else if (!track.isAudioTrack()) {
  774. logger.warn(`Received audio level for non-audio track: ${ssrc}`);
  775. return;
  776. } else if (track.isLocal() !== isLocal) {
  777. logger.error(
  778. `${track} was expected to ${isLocal ? 'be' : 'not be'} local`);
  779. }
  780. track.setAudioLevel(audioLevel, tpc);
  781. }
  782. /**
  783. * Sends message via the bridge channel.
  784. * @param {string} to The id of the endpoint that should receive the
  785. * message. If "" the message will be sent to all participants.
  786. * @param {object} payload The payload of the message.
  787. * @throws NetworkError or InvalidStateError or Error if the operation
  788. * fails or there is no data channel created.
  789. */
  790. sendChannelMessage(to, payload) {
  791. if (this._channel) {
  792. this._channel.sendMessage(to, payload);
  793. } else {
  794. throw new Error('Channel support is disabled!');
  795. }
  796. }
  797. /**
  798. * Sends the local stats via the bridge channel.
  799. * @param {Object} payload The payload of the message.
  800. * @throws NetworkError/InvalidStateError/Error if the operation fails or if there is no data channel created.
  801. */
  802. sendEndpointStatsMessage(payload) {
  803. if (this._channel && this._channel.isOpen()) {
  804. this._channel.sendEndpointStatsMessage(payload);
  805. }
  806. }
  807. /**
  808. * Selects a new value for "lastN". The requested amount of videos are going
  809. * to be delivered after the value is in effect. Set to -1 for unlimited or
  810. * all available videos.
  811. * @param {number} value the new value for lastN.
  812. */
  813. setLastN(value) {
  814. if (this._lastN !== value) {
  815. this._lastN = value;
  816. if (this._channel && this._channel.isOpen()) {
  817. this._channel.sendSetLastNMessage(value);
  818. }
  819. this.eventEmitter.emit(RTCEvents.LASTN_VALUE_CHANGED, value);
  820. }
  821. }
  822. /**
  823. * Indicates if the endpoint id is currently included in the last N.
  824. * @param {string} id The endpoint id that we check for last N.
  825. * @returns {boolean} true if the endpoint id is in the last N or if we
  826. * don't have bridge channel support, otherwise we return false.
  827. */
  828. isInLastN(id) {
  829. return !this._lastNEndpoints // lastNEndpoints not initialised yet.
  830. || this._lastNEndpoints.indexOf(id) > -1;
  831. }
  832. /**
  833. * Indicates if the source name is currently included in the forwarded sources.
  834. *
  835. * @param {string} sourceName The source name that we check for forwarded sources.
  836. * @returns {boolean} true if the source name is in the forwarded sources or if we don't have bridge channel
  837. * support, otherwise we return false.
  838. */
  839. isInForwardedSources(sourceName) {
  840. return !this._forwardedSources // forwardedSources not initialised yet.
  841. || this._forwardedSources.indexOf(sourceName) > -1;
  842. }
  843. /**
  844. * Updates the target audio output device for all remote audio tracks.
  845. *
  846. * @param {string} deviceId - The device id of the audio ouput device to
  847. * use for all remote tracks.
  848. * @private
  849. * @returns {void}
  850. */
  851. _updateAudioOutputForAudioTracks(deviceId) {
  852. const remoteAudioTracks = this.getRemoteTracks(MediaType.AUDIO);
  853. for (const track of remoteAudioTracks) {
  854. track.setAudioOutput(deviceId);
  855. }
  856. }
  857. }