You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTC.js 25KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776
  1. import { getLogger } from '@jitsi/logger';
  2. import { cloneDeep, isEqual } from 'lodash-es';
  3. import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
  4. import { MediaType } from '../../service/RTC/MediaType';
  5. import RTCEvents from '../../service/RTC/RTCEvents';
  6. import browser from '../browser';
  7. import Listenable from '../util/Listenable';
  8. import { safeCounterIncrement } from '../util/MathUtil';
  9. import BridgeChannel from './BridgeChannel';
  10. import JitsiLocalTrack from './JitsiLocalTrack';
  11. import RTCUtils from './RTCUtils';
  12. import TraceablePeerConnection from './TraceablePeerConnection';
  13. const logger = getLogger('modules/RTC/RTC');
  14. /**
  15. * The counter used to generated id numbers assigned to peer connections
  16. * @type {number}
  17. */
  18. let peerConnectionIdCounter = 0;
  19. /**
  20. * The counter used to generate id number for the local
  21. * <code>MediaStreamTrack</code>s.
  22. * @type {number}
  23. */
  24. let rtcTrackIdCounter = 0;
  25. /**
  26. * Creates {@code JitsiLocalTrack} instances from the passed in meta information
  27. * about MedieaTracks.
  28. *
  29. * @param {Object[]} mediaStreamMetaData - An array of meta information with
  30. * MediaTrack instances. Each can look like:
  31. * {{
  32. * stream: MediaStream instance that holds a track with audio or video,
  33. * track: MediaTrack within the MediaStream,
  34. * videoType: "camera" or "desktop" or falsy,
  35. * sourceId: ID of the desktopsharing source,
  36. * sourceType: The desktopsharing source type,
  37. * effects: Array of effect types
  38. * }}
  39. */
  40. function _createLocalTracks(mediaStreamMetaData = []) {
  41. return mediaStreamMetaData.map(metaData => {
  42. const {
  43. constraints,
  44. sourceId,
  45. sourceType,
  46. stream,
  47. track,
  48. videoType,
  49. effects
  50. } = metaData;
  51. const { deviceId, facingMode } = track.getSettings();
  52. // FIXME Move rtcTrackIdCounter to a static method in JitsiLocalTrack
  53. // so RTC does not need to handle ID management. This move would be
  54. // safer to do once the old createLocalTracks is removed.
  55. rtcTrackIdCounter = safeCounterIncrement(rtcTrackIdCounter);
  56. return new JitsiLocalTrack({
  57. constraints,
  58. deviceId,
  59. facingMode,
  60. mediaType: track.kind,
  61. rtcId: rtcTrackIdCounter,
  62. sourceId,
  63. sourceType,
  64. stream,
  65. track,
  66. videoType: videoType || null,
  67. effects
  68. });
  69. });
  70. }
  71. /**
  72. *
  73. */
  74. export default class RTC extends Listenable {
  75. /**
  76. *
  77. * @param conference
  78. * @param options
  79. */
  80. constructor(conference, options = {}) {
  81. super();
  82. this.conference = conference;
  83. /**
  84. * A map of active <tt>TraceablePeerConnection</tt>.
  85. * @type {Map.<number, TraceablePeerConnection>}
  86. */
  87. this.peerConnections = new Map();
  88. this.localTracks = [];
  89. this.options = options;
  90. // BridgeChannel instance.
  91. // @private
  92. // @type {BridgeChannel}
  93. this._channel = null;
  94. /**
  95. * The value specified to the last invocation of setLastN before the
  96. * channel completed opening. If non-null, the value will be sent
  97. * through a channel (once) as soon as it opens and will then be
  98. * discarded.
  99. * @private
  100. * @type {number}
  101. */
  102. this._lastN = undefined;
  103. /**
  104. * Defines the forwarded sources list. It can be null or an array once initialised with a channel forwarded
  105. * sources event.
  106. *
  107. * @type {Array<string>|null}
  108. * @private
  109. */
  110. this._forwardedSources = null;
  111. // The forwarded sources change listener.
  112. this._forwardedSourcesChangeListener = this._onForwardedSourcesChanged.bind(this);
  113. this._onDeviceListChanged = this._onDeviceListChanged.bind(this);
  114. this._updateAudioOutputForAudioTracks = this._updateAudioOutputForAudioTracks.bind(this);
  115. // Switch audio output device on all remote audio tracks. Local audio
  116. // tracks handle this event by themselves.
  117. if (RTCUtils.isDeviceChangeAvailable('output')) {
  118. RTCUtils.addListener(
  119. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  120. this._updateAudioOutputForAudioTracks
  121. );
  122. RTCUtils.addListener(
  123. RTCEvents.DEVICE_LIST_CHANGED,
  124. this._onDeviceListChanged
  125. );
  126. }
  127. }
  128. /**
  129. * Removes any listeners and stored state from this {@code RTC} instance.
  130. *
  131. * @returns {void}
  132. */
  133. destroy() {
  134. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED, this._updateAudioOutputForAudioTracks);
  135. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED, this._onDeviceListChanged);
  136. if (this._channelOpenListener) {
  137. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN, this._channelOpenListener);
  138. }
  139. }
  140. /**
  141. * Exposes the private helper for converting a WebRTC MediaStream to a
  142. * JitsiLocalTrack.
  143. *
  144. * @param {Array<Object>} tracksInfo
  145. * @returns {Array<JitsiLocalTrack>}
  146. */
  147. static createLocalTracks(tracksInfo) {
  148. return _createLocalTracks(tracksInfo);
  149. }
  150. /**
  151. * Creates the local MediaStreams.
  152. * @param {object} [options] Optional parameters.
  153. * @param {Array=} options.devices The devices that will be requested.
  154. * @param {string=} options.resolution Resolution constraints.
  155. * @param {string=} options.cameraDeviceId
  156. * @param {string=} options.micDeviceId
  157. * @returns {*} Promise object that will receive the new JitsiTracks
  158. */
  159. static obtainAudioAndVideoPermissions(options) {
  160. return RTCUtils.obtainAudioAndVideoPermissions(options)
  161. .then(tracksInfo => _createLocalTracks(tracksInfo));
  162. }
  163. /**
  164. * Initializes the bridge channel of this instance.
  165. * At least one of both, peerconnection or wsUrl parameters, must be
  166. * given.
  167. * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
  168. * instance.
  169. * @param {string} [wsUrl] WebSocket URL.
  170. */
  171. initializeBridgeChannel(peerconnection, wsUrl) {
  172. this._channel = new BridgeChannel(peerconnection, wsUrl, this.eventEmitter, this.conference);
  173. this._channelOpenListener = () => {
  174. const logError = (error, msgType, value) => {
  175. logger.error(`Cannot send ${msgType}(${JSON.stringify(value)}) endpoint message`, error);
  176. };
  177. // When the channel becomes available, tell the bridge about video selections so that it can do adaptive
  178. // simulcast, we want the notification to trigger even if userJid is undefined, or null.
  179. if (this._receiverVideoConstraints) {
  180. try {
  181. this._channel.sendReceiverVideoConstraintsMessage(this._receiverVideoConstraints);
  182. } catch (error) {
  183. logError(error, 'ReceiverVideoConstraints', this._receiverVideoConstraints);
  184. }
  185. }
  186. if (typeof this._lastN !== 'undefined' && this._lastN !== -1) {
  187. try {
  188. this._channel.sendSetLastNMessage(this._lastN);
  189. } catch (error) {
  190. logError(error, 'LastNChangedEvent', this._lastN);
  191. }
  192. }
  193. };
  194. this.addListener(RTCEvents.DATA_CHANNEL_OPEN, this._channelOpenListener);
  195. // Add forwarded sources change listener.
  196. this.addListener(RTCEvents.FORWARDED_SOURCES_CHANGED, this._forwardedSourcesChangeListener);
  197. }
  198. /**
  199. * Callback invoked when the list of known audio and video devices has
  200. * been updated. Attempts to update the known available audio output
  201. * devices.
  202. *
  203. * @private
  204. * @returns {void}
  205. */
  206. _onDeviceListChanged() {
  207. this._updateAudioOutputForAudioTracks(RTCUtils.getAudioOutputDevice());
  208. }
  209. /**
  210. * Receives events when forwarded sources had changed.
  211. *
  212. * @param {array} forwardedSources The new forwarded sources.
  213. * @private
  214. */
  215. _onForwardedSourcesChanged(forwardedSources = []) {
  216. const oldForwardedSources = this._forwardedSources || [];
  217. let leavingForwardedSources = [];
  218. let enteringForwardedSources = [];
  219. const timestamp = Date.now();
  220. this._forwardedSources = forwardedSources;
  221. leavingForwardedSources = oldForwardedSources.filter(sourceName => !this.isInForwardedSources(sourceName));
  222. enteringForwardedSources = forwardedSources.filter(
  223. sourceName => oldForwardedSources.indexOf(sourceName) === -1);
  224. logger.debug(`Fowarded sources changed leaving=${leavingForwardedSources}, entering=`
  225. + `${enteringForwardedSources} at ${timestamp}`);
  226. this.conference.eventEmitter.emit(
  227. JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED,
  228. leavingForwardedSources,
  229. enteringForwardedSources,
  230. timestamp);
  231. }
  232. /**
  233. * Should be called when current media session ends and after the
  234. * PeerConnection has been closed using PeerConnection.close() method.
  235. */
  236. onCallEnded() {
  237. if (this._channel) {
  238. // The BridgeChannel is not explicitly closed as the PeerConnection
  239. // is closed on call ended which triggers datachannel onclose
  240. // events. If using a WebSocket, the channel must be closed since
  241. // it is not managed by the PeerConnection.
  242. // The reference is cleared to disable any logic related to the
  243. // channel.
  244. if (this._channel && this._channel.mode === 'websocket') {
  245. this._channel.close();
  246. }
  247. this._channel = null;
  248. }
  249. }
  250. /**
  251. * Sets the capture frame rate to be used for desktop tracks.
  252. *
  253. * @param {number} maxFps framerate to be used for desktop track capture.
  254. */
  255. setDesktopSharingFrameRate(maxFps) {
  256. RTCUtils.setDesktopSharingFrameRate(maxFps);
  257. }
  258. /**
  259. * Sets the receiver video constraints that determine how bitrate is allocated to each of the video streams
  260. * requested from the bridge. The constraints are cached and sent through the bridge channel once the channel
  261. * is established.
  262. * @param {*} constraints
  263. */
  264. setReceiverVideoConstraints(constraints) {
  265. if (isEqual(this._receiverVideoConstraints, constraints)) {
  266. return;
  267. }
  268. this._receiverVideoConstraints = cloneDeep(constraints);
  269. if (this._channel && this._channel.isOpen()) {
  270. this._channel.sendReceiverVideoConstraintsMessage(constraints);
  271. }
  272. }
  273. /**
  274. * Sends the track's video type to the JVB.
  275. * @param {SourceName} sourceName - the track's source name.
  276. * @param {BridgeVideoType} videoType - the track's video type.
  277. */
  278. sendSourceVideoType(sourceName, videoType) {
  279. if (this._channel && this._channel.isOpen()) {
  280. this._channel.sendSourceVideoTypeMessage(sourceName, videoType);
  281. }
  282. }
  283. /**
  284. *
  285. * @param eventType
  286. * @param listener
  287. */
  288. static addListener(eventType, listener) {
  289. RTCUtils.addListener(eventType, listener);
  290. }
  291. /**
  292. *
  293. * @param eventType
  294. * @param listener
  295. */
  296. static removeListener(eventType, listener) {
  297. RTCUtils.removeListener(eventType, listener);
  298. }
  299. /**
  300. *
  301. * @param options
  302. */
  303. static init(options = {}) {
  304. this.options = options;
  305. return RTCUtils.init(this.options);
  306. }
  307. /* eslint-disable max-params */
  308. /**
  309. * Creates new <tt>TraceablePeerConnection</tt>
  310. * @param {SignalingLayer} signaling The signaling layer that will provide information about the media or
  311. * participants which is not carried over SDP.
  312. * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
  313. * @param {boolean} isP2P Indicates whether or not the new TPC will be used in a peer to peer type of session.
  314. * @param {object} options The config options.
  315. * @param {Object} options.audioQuality - Quality settings to applied on the outbound audio stream.
  316. * @param {boolean} options.capScreenshareBitrate if set to true, lower layers will be disabled for screenshare.
  317. * @param {Array<CodecMimeType>} options.codecSettings - codec settings to be applied for video streams.
  318. * @param {boolean} options.disableSimulcast if set to 'true' will disable the simulcast.
  319. * @param {boolean} options.disableRtx if set to 'true' will disable the RTX.
  320. * @param {boolean} options.enableInsertableStreams set to true when the insertable streams constraints is to be
  321. * enabled on the PeerConnection.
  322. * @param {boolean} options.forceTurnRelay If set to true, the browser will generate only Relay ICE candidates.
  323. * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
  324. * @param {Object} options.videoQuality - Quality settings to applied on the outbound video streams.
  325. * @return {TraceablePeerConnection}
  326. */
  327. createPeerConnection(signaling, pcConfig, isP2P, options) {
  328. const pcConstraints = {};
  329. if (options.enableInsertableStreams) {
  330. logger.debug('E2EE - setting insertable streams constraints');
  331. pcConfig.encodedInsertableStreams = true;
  332. }
  333. if (options.forceTurnRelay) {
  334. pcConfig.iceTransportPolicy = 'relay';
  335. }
  336. // Set the RTCBundlePolicy to max-bundle so that only one set of ice candidates is generated.
  337. // The default policy generates separate ice candidates for audio and video connections.
  338. // This change is necessary for Unified plan to work properly on Chrome and Safari.
  339. pcConfig.bundlePolicy = 'max-bundle';
  340. peerConnectionIdCounter = safeCounterIncrement(peerConnectionIdCounter);
  341. const newConnection
  342. = new TraceablePeerConnection(
  343. this,
  344. peerConnectionIdCounter,
  345. signaling,
  346. pcConfig, pcConstraints,
  347. isP2P, options);
  348. this.peerConnections.set(newConnection.id, newConnection);
  349. return newConnection;
  350. }
  351. /* eslint-enable max-params */
  352. /**
  353. * Removed given peer connection from this RTC module instance.
  354. * @param {TraceablePeerConnection} traceablePeerConnection
  355. * @return {boolean} <tt>true</tt> if the given peer connection was removed
  356. * successfully or <tt>false</tt> if there was no peer connection mapped in
  357. * this RTC instance.
  358. */
  359. _removePeerConnection(traceablePeerConnection) {
  360. const id = traceablePeerConnection.id;
  361. if (this.peerConnections.has(id)) {
  362. // NOTE Remote tracks are not removed here.
  363. this.peerConnections.delete(id);
  364. return true;
  365. }
  366. return false;
  367. }
  368. /**
  369. *
  370. * @param track
  371. */
  372. addLocalTrack(track) {
  373. if (!track) {
  374. throw new Error('track must not be null nor undefined');
  375. }
  376. this.localTracks.push(track);
  377. track.conference = this.conference;
  378. }
  379. /**
  380. * Get forwarded sources list.
  381. * @returns {Array<string>|null}
  382. */
  383. getForwardedSources() {
  384. return this._forwardedSources;
  385. }
  386. /**
  387. * Get local video track.
  388. * @returns {JitsiLocalTrack|undefined}
  389. */
  390. getLocalVideoTrack() {
  391. const localVideo = this.getLocalTracks(MediaType.VIDEO);
  392. return localVideo.length ? localVideo[0] : undefined;
  393. }
  394. /**
  395. * Returns all the local video tracks.
  396. * @returns {Array<JitsiLocalTrack>}
  397. */
  398. getLocalVideoTracks() {
  399. return this.getLocalTracks(MediaType.VIDEO);
  400. }
  401. /**
  402. * Get local audio track.
  403. * @returns {JitsiLocalTrack|undefined}
  404. */
  405. getLocalAudioTrack() {
  406. const localAudio = this.getLocalTracks(MediaType.AUDIO);
  407. return localAudio.length ? localAudio[0] : undefined;
  408. }
  409. /**
  410. * Returns the endpoint id for the local user.
  411. * @returns {string}
  412. */
  413. getLocalEndpointId() {
  414. return this.conference.myUserId();
  415. }
  416. /**
  417. * Returns the local tracks of the given media type, or all local tracks if
  418. * no specific type is given.
  419. * @param {MediaType} [mediaType] Optional media type filter.
  420. * (audio or video).
  421. */
  422. getLocalTracks(mediaType) {
  423. let tracks = this.localTracks.slice();
  424. if (mediaType !== undefined) {
  425. tracks = tracks.filter(
  426. track => track.getType() === mediaType);
  427. }
  428. return tracks;
  429. }
  430. /**
  431. * Obtains all remote tracks currently known to this RTC module instance.
  432. * @param {MediaType} [mediaType] The remote tracks will be filtered
  433. * by their media type if this argument is specified.
  434. * @return {Array<JitsiRemoteTrack>}
  435. */
  436. getRemoteTracks(mediaType) {
  437. let remoteTracks = [];
  438. for (const tpc of this.peerConnections.values()) {
  439. const pcRemoteTracks = tpc.getRemoteTracks(undefined, mediaType);
  440. if (pcRemoteTracks) {
  441. remoteTracks = remoteTracks.concat(pcRemoteTracks);
  442. }
  443. }
  444. return remoteTracks;
  445. }
  446. /**
  447. * Set mute for all local audio streams attached to the conference.
  448. * @param value The mute value.
  449. * @returns {Promise}
  450. */
  451. setAudioMute(value) {
  452. const mutePromises = [];
  453. this.getLocalTracks(MediaType.AUDIO).forEach(audioTrack => {
  454. // this is a Promise
  455. mutePromises.push(value ? audioTrack.mute() : audioTrack.unmute());
  456. });
  457. // We return a Promise from all Promises so we can wait for their
  458. // execution.
  459. return Promise.all(mutePromises);
  460. }
  461. /**
  462. * Set mute for all local video streams attached to the conference.
  463. * @param value The mute value.
  464. * @returns {Promise}
  465. */
  466. setVideoMute(value) {
  467. const mutePromises = [];
  468. this.getLocalTracks(MediaType.VIDEO)
  469. .forEach(videoTrack => {
  470. // this is a Promise
  471. mutePromises.push(value ? videoTrack.mute() : videoTrack.unmute());
  472. });
  473. // We return a Promise from all Promises so we can wait for their
  474. // execution.
  475. return Promise.all(mutePromises);
  476. }
  477. /**
  478. *
  479. * @param track
  480. */
  481. removeLocalTrack(track) {
  482. const pos = this.localTracks.indexOf(track);
  483. if (pos === -1) {
  484. return;
  485. }
  486. this.localTracks.splice(pos, 1);
  487. }
  488. /**
  489. *
  490. * @param elSelector
  491. * @param stream
  492. */
  493. static attachMediaStream(elSelector, stream) {
  494. return RTCUtils.attachMediaStream(elSelector, stream);
  495. }
  496. /**
  497. * Returns true if changing the input (camera / microphone) or output
  498. * (audio) device is supported and false if not.
  499. * @param {string} [deviceType] Type of device to change. Default is
  500. * undefined or 'input', 'output' - for audio output device change.
  501. * @returns {boolean} true if available, false otherwise.
  502. */
  503. static isDeviceChangeAvailable(deviceType) {
  504. return RTCUtils.isDeviceChangeAvailable(deviceType);
  505. }
  506. /**
  507. * Returns whether the current execution environment supports WebRTC (for
  508. * use within this library).
  509. *
  510. * @returns {boolean} {@code true} if WebRTC is supported in the current
  511. * execution environment (for use within this library); {@code false},
  512. * otherwise.
  513. */
  514. static isWebRtcSupported() {
  515. return browser.isSupported();
  516. }
  517. /**
  518. * Returns currently used audio output device id, '' stands for default
  519. * device
  520. * @returns {string}
  521. */
  522. static getAudioOutputDevice() {
  523. return RTCUtils.getAudioOutputDevice();
  524. }
  525. /**
  526. * Returns list of available media devices if its obtained, otherwise an
  527. * empty array is returned/
  528. * @returns {array} list of available media devices.
  529. */
  530. static getCurrentlyAvailableMediaDevices() {
  531. return RTCUtils.getCurrentlyAvailableMediaDevices();
  532. }
  533. /**
  534. * Returns event data for device to be reported to stats.
  535. * @returns {MediaDeviceInfo} device.
  536. */
  537. static getEventDataForActiveDevice(device) {
  538. return RTCUtils.getEventDataForActiveDevice(device);
  539. }
  540. /**
  541. * Sets current audio output device.
  542. * @param {string} deviceId Id of 'audiooutput' device from
  543. * navigator.mediaDevices.enumerateDevices().
  544. * @returns {Promise} resolves when audio output is changed, is rejected
  545. * otherwise
  546. */
  547. static setAudioOutputDevice(deviceId) {
  548. return RTCUtils.setAudioOutputDevice(deviceId);
  549. }
  550. /**
  551. * Allows to receive list of available cameras/microphones.
  552. * @param {function} callback Would receive array of devices as an
  553. * argument.
  554. */
  555. static enumerateDevices(callback) {
  556. RTCUtils.enumerateDevices(callback);
  557. }
  558. /**
  559. * A method to handle stopping of the stream.
  560. * One point to handle the differences in various implementations.
  561. * @param {MediaStream} mediaStream MediaStream object to stop.
  562. */
  563. static stopMediaStream(mediaStream) {
  564. RTCUtils.stopMediaStream(mediaStream);
  565. }
  566. /**
  567. * Returns whether the desktop sharing is enabled or not.
  568. * @returns {boolean}
  569. */
  570. static isDesktopSharingEnabled() {
  571. return RTCUtils.isDesktopSharingEnabled();
  572. }
  573. /**
  574. * Closes the currently opened bridge channel.
  575. */
  576. closeBridgeChannel() {
  577. if (this._channel) {
  578. this._channel.close();
  579. this._channel = null;
  580. }
  581. }
  582. /* eslint-disable max-params */
  583. /**
  584. *
  585. * @param {TraceablePeerConnection} tpc
  586. * @param {number} ssrc
  587. * @param {number} audioLevel
  588. * @param {boolean} isLocal
  589. */
  590. setAudioLevel(tpc, ssrc, audioLevel, isLocal) {
  591. const track = tpc.getTrackBySSRC(ssrc);
  592. if (!track) {
  593. return;
  594. } else if (!track.isAudioTrack()) {
  595. logger.warn(`Received audio level for non-audio track: ${ssrc}`);
  596. return;
  597. } else if (track.isLocal() !== isLocal) {
  598. logger.error(
  599. `${track} was expected to ${isLocal ? 'be' : 'not be'} local`);
  600. }
  601. track.setAudioLevel(audioLevel, tpc);
  602. }
  603. /**
  604. * Sends message via the bridge channel.
  605. * @param {string} to The id of the endpoint that should receive the
  606. * message. If "" the message will be sent to all participants.
  607. * @param {object} payload The payload of the message.
  608. * @throws NetworkError or InvalidStateError or Error if the operation
  609. * fails or there is no data channel created.
  610. */
  611. sendChannelMessage(to, payload) {
  612. if (this._channel) {
  613. this._channel.sendMessage(to, payload);
  614. } else {
  615. throw new Error('BridgeChannel has not been initialized yet');
  616. }
  617. }
  618. /**
  619. * Sends the local stats via the bridge channel.
  620. * @param {Object} payload The payload of the message.
  621. * @throws NetworkError/InvalidStateError/Error if the operation fails or if there is no data channel created.
  622. */
  623. sendEndpointStatsMessage(payload) {
  624. if (this._channel && this._channel.isOpen()) {
  625. this._channel.sendEndpointStatsMessage(payload);
  626. }
  627. }
  628. /**
  629. * Selects a new value for "lastN". The requested amount of videos are going
  630. * to be delivered after the value is in effect. Set to -1 for unlimited or
  631. * all available videos.
  632. * @param {number} value the new value for lastN.
  633. */
  634. setLastN(value) {
  635. if (this._lastN !== value) {
  636. this._lastN = value;
  637. if (this._channel && this._channel.isOpen()) {
  638. this._channel.sendSetLastNMessage(value);
  639. }
  640. this.eventEmitter.emit(RTCEvents.LASTN_VALUE_CHANGED, value);
  641. }
  642. }
  643. /**
  644. * Indicates if the source name is currently included in the forwarded sources.
  645. *
  646. * @param {string} sourceName The source name that we check for forwarded sources.
  647. * @returns {boolean} true if the source name is in the forwarded sources or if we don't have bridge channel
  648. * support, otherwise we return false.
  649. */
  650. isInForwardedSources(sourceName) {
  651. return !this._forwardedSources // forwardedSources not initialised yet.
  652. || this._forwardedSources.indexOf(sourceName) > -1;
  653. }
  654. /**
  655. * Updates the target audio output device for all remote audio tracks.
  656. *
  657. * @param {string} deviceId - The device id of the audio ouput device to
  658. * use for all remote tracks.
  659. * @private
  660. * @returns {void}
  661. */
  662. _updateAudioOutputForAudioTracks(deviceId) {
  663. const remoteAudioTracks = this.getRemoteTracks(MediaType.AUDIO);
  664. for (const track of remoteAudioTracks) {
  665. track.setAudioOutput(deviceId);
  666. }
  667. }
  668. }