You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769
  1. /* global __filename */
  2. import { getLogger } from 'jitsi-meet-logger';
  3. import BridgeChannel from './BridgeChannel';
  4. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  5. import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
  6. import JitsiLocalTrack from './JitsiLocalTrack';
  7. import JitsiTrackError from '../../JitsiTrackError';
  8. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  9. import Listenable from '../util/Listenable';
  10. import * as MediaType from '../../service/RTC/MediaType';
  11. import RTCEvents from '../../service/RTC/RTCEvents';
  12. import RTCUtils from './RTCUtils';
  13. import TraceablePeerConnection from './TraceablePeerConnection';
  14. import VideoType from '../../service/RTC/VideoType';
  15. const logger = getLogger(__filename);
  16. let rtcTrackIdCounter = 0;
  17. /**
  18. *
  19. * @param tracksInfo
  20. * @param options
  21. */
  22. function createLocalTracks(tracksInfo, options) {
  23. const newTracks = [];
  24. let deviceId = null;
  25. tracksInfo.forEach(trackInfo => {
  26. if (trackInfo.mediaType === MediaType.AUDIO) {
  27. deviceId = options.micDeviceId;
  28. } else if (trackInfo.videoType === VideoType.CAMERA) {
  29. deviceId = options.cameraDeviceId;
  30. }
  31. rtcTrackIdCounter += 1;
  32. const localTrack = new JitsiLocalTrack({
  33. ...trackInfo,
  34. deviceId,
  35. facingMode: options.facingMode,
  36. rtcId: rtcTrackIdCounter
  37. });
  38. newTracks.push(localTrack);
  39. });
  40. return newTracks;
  41. }
  42. /**
  43. *
  44. */
  45. export default class RTC extends Listenable {
  46. /**
  47. *
  48. * @param conference
  49. * @param options
  50. */
  51. constructor(conference, options = {}) {
  52. super();
  53. this.conference = conference;
  54. /**
  55. * A map of active <tt>TraceablePeerConnection</tt>.
  56. * @type {Map.<number, TraceablePeerConnection>}
  57. */
  58. this.peerConnections = new Map();
  59. /**
  60. * The counter used to generated id numbers assigned to peer connections
  61. * @type {number}
  62. */
  63. this.peerConnectionIdCounter = 1;
  64. this.localTracks = [];
  65. this.options = options;
  66. // BridgeChannel instance.
  67. // @private
  68. // @type {BridgeChannel}
  69. this._channel = null;
  70. // A flag whether we had received that the channel had opened we can
  71. // get this flag out of sync if for some reason channel got closed
  72. // from server, a desired behaviour so we can see errors when this
  73. // happen.
  74. // @private
  75. // @type {boolean}
  76. this._channelOpen = false;
  77. /**
  78. * The value specified to the last invocation of setLastN before the
  79. * channel completed opening. If non-null, the value will be sent
  80. * through a channel (once) as soon as it opens and will then be
  81. * discarded.
  82. * @private
  83. * @type {number}
  84. */
  85. this._lastN = -1;
  86. /**
  87. * Defines the last N endpoints list. It can be null or an array once
  88. * initialised with a channel last N event.
  89. * @type {Array<string>|null}
  90. * @private
  91. */
  92. this._lastNEndpoints = null;
  93. /**
  94. * The endpoint ID of currently pinned participant or <tt>null</tt> if
  95. * no user is pinned.
  96. * @type {string|null}
  97. * @private
  98. */
  99. this._pinnedEndpoint = null;
  100. /**
  101. * The endpoint ID of currently selected participant or <tt>null</tt> if
  102. * no user is selected.
  103. * @type {string|null}
  104. * @private
  105. */
  106. this._selectedEndpoint = null;
  107. // The last N change listener.
  108. this._lastNChangeListener = this._onLastNChanged.bind(this);
  109. // Switch audio output device on all remote audio tracks. Local audio
  110. // tracks handle this event by themselves.
  111. if (RTCUtils.isDeviceChangeAvailable('output')) {
  112. RTCUtils.addListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  113. deviceId => {
  114. const remoteAudioTracks
  115. = this.getRemoteTracks(MediaType.AUDIO);
  116. for (const track of remoteAudioTracks) {
  117. track.setAudioOutput(deviceId);
  118. }
  119. });
  120. }
  121. }
  122. /**
  123. * Creates the local MediaStreams.
  124. * @param {object} [options] Optional parameters.
  125. * @param {array} options.devices The devices that will be requested.
  126. * @param {string} options.resolution Resolution constraints.
  127. * @param {bool} options.dontCreateJitsiTrack If <tt>true</tt> objects with
  128. * the following structure {stream: the Media Stream, type: "audio" or
  129. * "video", videoType: "camera" or "desktop"} will be returned trough
  130. * the Promise, otherwise JitsiTrack objects will be returned.
  131. * @param {string} options.cameraDeviceId
  132. * @param {string} options.micDeviceId
  133. * @returns {*} Promise object that will receive the new JitsiTracks
  134. */
  135. static obtainAudioAndVideoPermissions(options) {
  136. return RTCUtils.obtainAudioAndVideoPermissions(options).then(
  137. tracksInfo => {
  138. const tracks = createLocalTracks(tracksInfo, options);
  139. return tracks.some(track => !track._isReceivingData())
  140. ? Promise.reject(
  141. new JitsiTrackError(
  142. JitsiTrackErrors.NO_DATA_FROM_SOURCE))
  143. : tracks;
  144. });
  145. }
  146. /**
  147. * Initializes the bridge channel of this instance.
  148. * At least one of both, peerconnection or wsUrl parameters, must be
  149. * given.
  150. * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
  151. * instance.
  152. * @param {string} [wsUrl] WebSocket URL.
  153. */
  154. initializeBridgeChannel(peerconnection, wsUrl) {
  155. this._channel = new BridgeChannel(
  156. peerconnection, wsUrl, this.eventEmitter);
  157. this._channelOpenListener = () => {
  158. // Mark that channel as opened.
  159. this._channelOpen = true;
  160. // When the channel becomes available, tell the bridge about
  161. // video selections so that it can do adaptive simulcast,
  162. // we want the notification to trigger even if userJid
  163. // is undefined, or null.
  164. try {
  165. this._channel.sendPinnedEndpointMessage(
  166. this._pinnedEndpoint);
  167. this._channel.sendSelectedEndpointMessage(
  168. this._selectedEndpoint);
  169. } catch (error) {
  170. GlobalOnErrorHandler.callErrorHandler(error);
  171. logger.error(
  172. `Cannot send selected(${this._selectedEndpoint})`
  173. + `pinned(${this._pinnedEndpoint}) endpoint message.`,
  174. error);
  175. }
  176. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN,
  177. this._channelOpenListener);
  178. this._channelOpenListener = null;
  179. // If setLastN was invoked before the bridge channel completed
  180. // opening, apply the specified value now that the channel
  181. // is open. NOTE that -1 is the default value assumed by both
  182. // RTC module and the JVB.
  183. if (this._lastN !== -1) {
  184. this._channel.sendSetLastNMessage(this._lastN);
  185. }
  186. };
  187. this.addListener(RTCEvents.DATA_CHANNEL_OPEN,
  188. this._channelOpenListener);
  189. // Add Last N change listener.
  190. this.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED,
  191. this._lastNChangeListener);
  192. }
  193. /**
  194. * Receives events when Last N had changed.
  195. * @param {array} lastNEndpoints The new Last N endpoints.
  196. * @private
  197. */
  198. _onLastNChanged(lastNEndpoints = []) {
  199. const oldLastNEndpoints = this._lastNEndpoints || [];
  200. let leavingLastNEndpoints = [];
  201. let enteringLastNEndpoints = [];
  202. this._lastNEndpoints = lastNEndpoints;
  203. leavingLastNEndpoints = oldLastNEndpoints.filter(
  204. id => !this.isInLastN(id));
  205. enteringLastNEndpoints = lastNEndpoints.filter(
  206. id => oldLastNEndpoints.indexOf(id) === -1);
  207. this.conference.eventEmitter.emit(
  208. JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
  209. leavingLastNEndpoints,
  210. enteringLastNEndpoints);
  211. }
  212. /**
  213. * Should be called when current media session ends and after the
  214. * PeerConnection has been closed using PeerConnection.close() method.
  215. */
  216. onCallEnded() {
  217. if (this._channel) {
  218. // The BridgeChannel is not explicitly closed as the PeerConnection
  219. // is closed on call ended which triggers datachannel onclose
  220. // events. If using a WebSocket, the channel must be closed since
  221. // it is not managed by the PeerConnection.
  222. // The reference is cleared to disable any logic related to the
  223. // channel.
  224. if (this._channel && this._channel.mode === 'websocket') {
  225. this._channel.close();
  226. }
  227. this._channel = null;
  228. this._channelOpen = false;
  229. }
  230. }
  231. /**
  232. * Elects the participant with the given id to be the selected participant
  233. * in order to always receive video for this participant (even when last n
  234. * is enabled).
  235. * If there is no channel we store it and send it through the channel once
  236. * it is created.
  237. * @param {string} id The user id.
  238. * @throws NetworkError or InvalidStateError or Error if the operation
  239. * fails.
  240. */
  241. selectEndpoint(id) {
  242. // Cache the value if channel is missing, till we open it.
  243. this._selectedEndpoint = id;
  244. if (this._channel && this._channelOpen) {
  245. this._channel.sendSelectedEndpointMessage(id);
  246. }
  247. }
  248. /**
  249. * Elects the participant with the given id to be the pinned participant in
  250. * order to always receive video for this participant (even when last n is
  251. * enabled).
  252. * @param {stirng} id The user id.
  253. * @throws NetworkError or InvalidStateError or Error if the operation
  254. * fails.
  255. */
  256. pinEndpoint(id) {
  257. // Cache the value if channel is missing, till we open it.
  258. this._pinnedEndpoint = id;
  259. if (this._channel && this._channelOpen) {
  260. this._channel.sendPinnedEndpointMessage(id);
  261. }
  262. }
  263. /**
  264. *
  265. * @param eventType
  266. * @param listener
  267. */
  268. static addListener(eventType, listener) {
  269. RTCUtils.addListener(eventType, listener);
  270. }
  271. /**
  272. *
  273. * @param eventType
  274. * @param listener
  275. */
  276. static removeListener(eventType, listener) {
  277. RTCUtils.removeListener(eventType, listener);
  278. }
  279. /**
  280. *
  281. */
  282. static isRTCReady() {
  283. return RTCUtils.isRTCReady();
  284. }
  285. /**
  286. *
  287. * @param options
  288. */
  289. static init(options = {}) {
  290. this.options = options;
  291. return RTCUtils.init(this.options);
  292. }
  293. /**
  294. *
  295. */
  296. static getDeviceAvailability() {
  297. return RTCUtils.getDeviceAvailability();
  298. }
  299. /* eslint-disable max-params */
  300. /**
  301. * Creates new <tt>TraceablePeerConnection</tt>
  302. * @param {SignalingLayer} signaling The signaling layer that will
  303. * provide information about the media or participants which is not
  304. * carried over SDP.
  305. * @param {object} iceConfig An object describing the ICE config like
  306. * defined in the WebRTC specification.
  307. * @param {boolean} isP2P Indicates whether or not the new TPC will be used
  308. * in a peer to peer type of session.
  309. * @param {object} options The config options.
  310. * @param {boolean} options.disableSimulcast If set to 'true' will disable
  311. * the simulcast.
  312. * @param {boolean} options.disableRtx If set to 'true' will disable the
  313. * RTX.
  314. * @param {boolean} options.preferH264 If set to 'true' H264 will be
  315. * preferred over other video codecs.
  316. * @return {TraceablePeerConnection}
  317. */
  318. createPeerConnection(signaling, iceConfig, isP2P, options) {
  319. const newConnection
  320. = new TraceablePeerConnection(
  321. this,
  322. this.peerConnectionIdCounter,
  323. signaling, iceConfig, RTC.getPCConstraints(), isP2P, options);
  324. this.peerConnections.set(newConnection.id, newConnection);
  325. this.peerConnectionIdCounter += 1;
  326. return newConnection;
  327. }
  328. /* eslint-enable max-params */
  329. /**
  330. * Removed given peer connection from this RTC module instance.
  331. * @param {TraceablePeerConnection} traceablePeerConnection
  332. * @return {boolean} <tt>true</tt> if the given peer connection was removed
  333. * successfully or <tt>false</tt> if there was no peer connection mapped in
  334. * this RTC instance.
  335. */
  336. _removePeerConnection(traceablePeerConnection) {
  337. const id = traceablePeerConnection.id;
  338. if (this.peerConnections.has(id)) {
  339. // NOTE Remote tracks are not removed here.
  340. this.peerConnections.delete(id);
  341. return true;
  342. }
  343. return false;
  344. }
  345. /**
  346. *
  347. * @param track
  348. */
  349. addLocalTrack(track) {
  350. if (!track) {
  351. throw new Error('track must not be null nor undefined');
  352. }
  353. this.localTracks.push(track);
  354. track.conference = this.conference;
  355. }
  356. /**
  357. * Returns the current value for "lastN" - the amount of videos are going
  358. * to be delivered. When set to -1 for unlimited or all available videos.
  359. * @return {number}
  360. */
  361. getLastN() {
  362. return this._lastN;
  363. }
  364. /**
  365. * Get local video track.
  366. * @returns {JitsiLocalTrack|undefined}
  367. */
  368. getLocalVideoTrack() {
  369. const localVideo = this.getLocalTracks(MediaType.VIDEO);
  370. return localVideo.length ? localVideo[0] : undefined;
  371. }
  372. /**
  373. * Get local audio track.
  374. * @returns {JitsiLocalTrack|undefined}
  375. */
  376. getLocalAudioTrack() {
  377. const localAudio = this.getLocalTracks(MediaType.AUDIO);
  378. return localAudio.length ? localAudio[0] : undefined;
  379. }
  380. /**
  381. * Returns the local tracks of the given media type, or all local tracks if
  382. * no specific type is given.
  383. * @param {MediaType} [mediaType] Optional media type filter.
  384. * (audio or video).
  385. */
  386. getLocalTracks(mediaType) {
  387. let tracks = this.localTracks.slice();
  388. if (mediaType !== undefined) {
  389. tracks = tracks.filter(
  390. track => track.getType() === mediaType);
  391. }
  392. return tracks;
  393. }
  394. /**
  395. * Obtains all remote tracks currently known to this RTC module instance.
  396. * @param {MediaType} [mediaType] The remote tracks will be filtered
  397. * by their media type if this argument is specified.
  398. * @return {Array<JitsiRemoteTrack>}
  399. */
  400. getRemoteTracks(mediaType) {
  401. let remoteTracks = [];
  402. for (const tpc of this.peerConnections.values()) {
  403. const pcRemoteTracks = tpc.getRemoteTracks(undefined, mediaType);
  404. if (pcRemoteTracks) {
  405. remoteTracks = remoteTracks.concat(pcRemoteTracks);
  406. }
  407. }
  408. return remoteTracks;
  409. }
  410. /**
  411. * Set mute for all local audio streams attached to the conference.
  412. * @param value The mute value.
  413. * @returns {Promise}
  414. */
  415. setAudioMute(value) {
  416. const mutePromises = [];
  417. this.getLocalTracks(MediaType.AUDIO).forEach(audioTrack => {
  418. // this is a Promise
  419. mutePromises.push(value ? audioTrack.mute() : audioTrack.unmute());
  420. });
  421. // We return a Promise from all Promises so we can wait for their
  422. // execution.
  423. return Promise.all(mutePromises);
  424. }
  425. /**
  426. *
  427. * @param track
  428. */
  429. removeLocalTrack(track) {
  430. const pos = this.localTracks.indexOf(track);
  431. if (pos === -1) {
  432. return;
  433. }
  434. this.localTracks.splice(pos, 1);
  435. }
  436. /**
  437. * Removes all JitsiRemoteTracks associated with given MUC nickname
  438. * (resource part of the JID). Returns array of removed tracks.
  439. *
  440. * @param {string} Owner The resource part of the MUC JID.
  441. * @returns {JitsiRemoteTrack[]}
  442. */
  443. removeRemoteTracks(owner) {
  444. let removedTracks = [];
  445. for (const tpc of this.peerConnections.values()) {
  446. const pcRemovedTracks = tpc.removeRemoteTracks(owner);
  447. removedTracks = removedTracks.concat(pcRemovedTracks);
  448. }
  449. logger.debug(
  450. `Removed remote tracks for ${owner}`
  451. + ` count: ${removedTracks.length}`);
  452. return removedTracks;
  453. }
  454. /**
  455. *
  456. */
  457. static getPCConstraints() {
  458. return RTCUtils.pcConstraints;
  459. }
  460. /**
  461. *
  462. * @param elSelector
  463. * @param stream
  464. */
  465. static attachMediaStream(elSelector, stream) {
  466. return RTCUtils.attachMediaStream(elSelector, stream);
  467. }
  468. /**
  469. * Returns the id of the given stream.
  470. * @param {MediaStream} stream
  471. */
  472. static getStreamID(stream) {
  473. return RTCUtils.getStreamID(stream);
  474. }
  475. /**
  476. * Returns the id of the given track.
  477. * @param {MediaStreamTrack} track
  478. */
  479. static getTrackID(track) {
  480. return RTCUtils.getTrackID(track);
  481. }
  482. /**
  483. * Returns true if retrieving the the list of input devices is supported
  484. * and false if not.
  485. */
  486. static isDeviceListAvailable() {
  487. return RTCUtils.isDeviceListAvailable();
  488. }
  489. /**
  490. * Returns true if changing the input (camera / microphone) or output
  491. * (audio) device is supported and false if not.
  492. * @param {string} [deviceType] Type of device to change. Default is
  493. * undefined or 'input', 'output' - for audio output device change.
  494. * @returns {boolean} true if available, false otherwise.
  495. */
  496. static isDeviceChangeAvailable(deviceType) {
  497. return RTCUtils.isDeviceChangeAvailable(deviceType);
  498. }
  499. /**
  500. * Returns currently used audio output device id, '' stands for default
  501. * device
  502. * @returns {string}
  503. */
  504. static getAudioOutputDevice() {
  505. return RTCUtils.getAudioOutputDevice();
  506. }
  507. /**
  508. * Returns list of available media devices if its obtained, otherwise an
  509. * empty array is returned/
  510. * @returns {array} list of available media devices.
  511. */
  512. static getCurrentlyAvailableMediaDevices() {
  513. return RTCUtils.getCurrentlyAvailableMediaDevices();
  514. }
  515. /**
  516. * Returns event data for device to be reported to stats.
  517. * @returns {MediaDeviceInfo} device.
  518. */
  519. static getEventDataForActiveDevice(device) {
  520. return RTCUtils.getEventDataForActiveDevice(device);
  521. }
  522. /**
  523. * Sets current audio output device.
  524. * @param {string} deviceId Id of 'audiooutput' device from
  525. * navigator.mediaDevices.enumerateDevices().
  526. * @returns {Promise} resolves when audio output is changed, is rejected
  527. * otherwise
  528. */
  529. static setAudioOutputDevice(deviceId) {
  530. return RTCUtils.setAudioOutputDevice(deviceId);
  531. }
  532. /**
  533. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  534. * "user" stream which means that it's not a "receive only" stream nor a
  535. * "mixed" JVB stream.
  536. *
  537. * Clients that implement Unified Plan, such as Firefox use recvonly
  538. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  539. * to Plan B where there are only 3 channels: audio, video and data.
  540. *
  541. * @param {MediaStream} stream The WebRTC MediaStream instance.
  542. * @returns {boolean}
  543. */
  544. static isUserStream(stream) {
  545. return RTC.isUserStreamById(RTCUtils.getStreamID(stream));
  546. }
  547. /**
  548. * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
  549. * ID is considered a valid "user" stream which means that it's not a
  550. * "receive only" stream nor a "mixed" JVB stream.
  551. *
  552. * Clients that implement Unified Plan, such as Firefox use recvonly
  553. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  554. * to Plan B where there are only 3 channels: audio, video and data.
  555. *
  556. * @param {string} streamId The id of WebRTC MediaStream.
  557. * @returns {boolean}
  558. */
  559. static isUserStreamById(streamId) {
  560. return streamId && streamId !== 'mixedmslabel'
  561. && streamId !== 'default';
  562. }
  563. /**
  564. * Allows to receive list of available cameras/microphones.
  565. * @param {function} callback Would receive array of devices as an
  566. * argument.
  567. */
  568. static enumerateDevices(callback) {
  569. RTCUtils.enumerateDevices(callback);
  570. }
  571. /**
  572. * A method to handle stopping of the stream.
  573. * One point to handle the differences in various implementations.
  574. * @param {MediaStream} mediaStream MediaStream object to stop.
  575. */
  576. static stopMediaStream(mediaStream) {
  577. RTCUtils.stopMediaStream(mediaStream);
  578. }
  579. /**
  580. * Returns whether the desktop sharing is enabled or not.
  581. * @returns {boolean}
  582. */
  583. static isDesktopSharingEnabled() {
  584. return RTCUtils.isDesktopSharingEnabled();
  585. }
  586. /**
  587. * Closes the currently opened bridge channel.
  588. */
  589. closeBridgeChannel() {
  590. if (this._channel) {
  591. this._channel.close();
  592. this._channelOpen = false;
  593. this.removeListener(RTCEvents.LASTN_ENDPOINT_CHANGED,
  594. this._lastNChangeListener);
  595. }
  596. }
  597. /* eslint-disable max-params */
  598. /**
  599. *
  600. * @param {TraceablePeerConnection} tpc
  601. * @param {number} ssrc
  602. * @param {number} audioLevel
  603. * @param {boolean} isLocal
  604. */
  605. setAudioLevel(tpc, ssrc, audioLevel, isLocal) {
  606. const track = tpc.getTrackBySSRC(ssrc);
  607. if (!track) {
  608. return;
  609. } else if (!track.isAudioTrack()) {
  610. logger.warn(`Received audio level for non-audio track: ${ssrc}`);
  611. return;
  612. } else if (track.isLocal() !== isLocal) {
  613. logger.error(
  614. `${track} was expected to ${isLocal ? 'be' : 'not be'} local`);
  615. }
  616. track.setAudioLevel(audioLevel, tpc);
  617. }
  618. /* eslint-enable max-params */
  619. /**
  620. * Sends message via the bridge channel.
  621. * @param {string} to The id of the endpoint that should receive the
  622. * message. If "" the message will be sent to all participants.
  623. * @param {object} payload The payload of the message.
  624. * @throws NetworkError or InvalidStateError or Error if the operation
  625. * fails or there is no data channel created.
  626. */
  627. sendChannelMessage(to, payload) {
  628. if (this._channel) {
  629. this._channel.sendMessage(to, payload);
  630. } else {
  631. throw new Error('Channel support is disabled!');
  632. }
  633. }
  634. /**
  635. * Selects a new value for "lastN". The requested amount of videos are going
  636. * to be delivered after the value is in effect. Set to -1 for unlimited or
  637. * all available videos.
  638. * @param {number} value the new value for lastN.
  639. */
  640. setLastN(value) {
  641. if (this._lastN !== value) {
  642. this._lastN = value;
  643. if (this._channel && this._channelOpen) {
  644. this._channel.sendSetLastNMessage(value);
  645. }
  646. this.eventEmitter.emit(RTCEvents.LASTN_VALUE_CHANGED, value);
  647. }
  648. }
  649. /**
  650. * Indicates if the endpoint id is currently included in the last N.
  651. * @param {string} id The endpoint id that we check for last N.
  652. * @returns {boolean} true if the endpoint id is in the last N or if we
  653. * don't have bridge channel support, otherwise we return false.
  654. */
  655. isInLastN(id) {
  656. return !this._lastNEndpoints // lastNEndpoints not initialised yet.
  657. || this._lastNEndpoints.indexOf(id) > -1;
  658. }
  659. }