You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTC.js 32KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001
  1. import { getLogger } from '@jitsi/logger';
  2. import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
  3. import BridgeVideoType from '../../service/RTC/BridgeVideoType';
  4. import { MediaType } from '../../service/RTC/MediaType';
  5. import RTCEvents from '../../service/RTC/RTCEvents';
  6. import browser from '../browser';
  7. import FeatureFlags from '../flags/FeatureFlags';
  8. import Statistics from '../statistics/statistics';
  9. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  10. import Listenable from '../util/Listenable';
  11. import { safeCounterIncrement } from '../util/MathUtil';
  12. import BridgeChannel from './BridgeChannel';
  13. import JitsiLocalTrack from './JitsiLocalTrack';
  14. import RTCUtils from './RTCUtils';
  15. import TraceablePeerConnection from './TraceablePeerConnection';
  16. const logger = getLogger(__filename);
  17. /**
  18. * The counter used to generated id numbers assigned to peer connections
  19. * @type {number}
  20. */
  21. let peerConnectionIdCounter = 0;
  22. /**
  23. * The counter used to generate id number for the local
  24. * <code>MediaStreamTrack</code>s.
  25. * @type {number}
  26. */
  27. let rtcTrackIdCounter = 0;
  28. /**
  29. * Creates {@code JitsiLocalTrack} instances from the passed in meta information
  30. * about MedieaTracks.
  31. *
  32. * @param {Object[]} mediaStreamMetaData - An array of meta information with
  33. * MediaTrack instances. Each can look like:
  34. * {{
  35. * stream: MediaStream instance that holds a track with audio or video,
  36. * track: MediaTrack within the MediaStream,
  37. * videoType: "camera" or "desktop" or falsy,
  38. * sourceId: ID of the desktopsharing source,
  39. * sourceType: The desktopsharing source type,
  40. * effects: Array of effect types
  41. * }}
  42. */
  43. function _createLocalTracks(mediaStreamMetaData = []) {
  44. return mediaStreamMetaData.map(metaData => {
  45. const {
  46. sourceId,
  47. sourceType,
  48. stream,
  49. track,
  50. videoType,
  51. effects
  52. } = metaData;
  53. const { deviceId, facingMode } = track.getSettings();
  54. // FIXME Move rtcTrackIdCounter to a static method in JitsiLocalTrack
  55. // so RTC does not need to handle ID management. This move would be
  56. // safer to do once the old createLocalTracks is removed.
  57. rtcTrackIdCounter = safeCounterIncrement(rtcTrackIdCounter);
  58. return new JitsiLocalTrack({
  59. deviceId,
  60. facingMode,
  61. mediaType: track.kind,
  62. rtcId: rtcTrackIdCounter,
  63. sourceId,
  64. sourceType,
  65. stream,
  66. track,
  67. videoType: videoType || null,
  68. effects
  69. });
  70. });
  71. }
  72. /**
  73. *
  74. */
  75. export default class RTC extends Listenable {
  76. /**
  77. *
  78. * @param conference
  79. * @param options
  80. */
  81. constructor(conference, options = {}) {
  82. super();
  83. this.conference = conference;
  84. /**
  85. * A map of active <tt>TraceablePeerConnection</tt>.
  86. * @type {Map.<number, TraceablePeerConnection>}
  87. */
  88. this.peerConnections = new Map();
  89. this.localTracks = [];
  90. this.options = options;
  91. // BridgeChannel instance.
  92. // @private
  93. // @type {BridgeChannel}
  94. this._channel = null;
  95. /**
  96. * The value specified to the last invocation of setLastN before the
  97. * channel completed opening. If non-null, the value will be sent
  98. * through a channel (once) as soon as it opens and will then be
  99. * discarded.
  100. * @private
  101. * @type {number}
  102. */
  103. this._lastN = undefined;
  104. /**
  105. * Defines the last N endpoints list. It can be null or an array once
  106. * initialised with a channel last N event.
  107. * @type {Array<string>|null}
  108. * @private
  109. */
  110. this._lastNEndpoints = null;
  111. /**
  112. * Defines the forwarded sources list. It can be null or an array once initialised with a channel forwarded
  113. * sources event.
  114. *
  115. * @type {Array<string>|null}
  116. * @private
  117. */
  118. this._forwardedSources = null;
  119. /**
  120. * The number representing the maximum video height the local client
  121. * should receive from the bridge.
  122. *
  123. * @type {number|undefined}
  124. * @private
  125. */
  126. this._maxFrameHeight = undefined;
  127. /**
  128. * The endpoint IDs of currently selected participants.
  129. *
  130. * @type {Array}
  131. * @private
  132. */
  133. this._selectedEndpoints = null;
  134. // The last N change listener.
  135. this._lastNChangeListener = this._onLastNChanged.bind(this);
  136. // The forwarded sources change listener.
  137. this._forwardedSourcesChangeListener = this._onForwardedSourcesChanged.bind(this);
  138. this._onDeviceListChanged = this._onDeviceListChanged.bind(this);
  139. this._updateAudioOutputForAudioTracks
  140. = this._updateAudioOutputForAudioTracks.bind(this);
  141. /**
  142. * The default video type assumed by the bridge.
  143. * @deprecated this will go away with multiple streams support
  144. * @type {BridgeVideoType}
  145. * @private
  146. */
  147. this._videoType = BridgeVideoType.NONE;
  148. // Switch audio output device on all remote audio tracks. Local audio
  149. // tracks handle this event by themselves.
  150. if (RTCUtils.isDeviceChangeAvailable('output')) {
  151. RTCUtils.addListener(
  152. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  153. this._updateAudioOutputForAudioTracks
  154. );
  155. RTCUtils.addListener(
  156. RTCEvents.DEVICE_LIST_CHANGED,
  157. this._onDeviceListChanged
  158. );
  159. }
  160. }
  161. /**
  162. * Removes any listeners and stored state from this {@code RTC} instance.
  163. *
  164. * @returns {void}
  165. */
  166. destroy() {
  167. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED, this._updateAudioOutputForAudioTracks);
  168. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED, this._onDeviceListChanged);
  169. if (this._channelOpenListener) {
  170. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN, this._channelOpenListener);
  171. }
  172. }
  173. /**
  174. * Exposes the private helper for converting a WebRTC MediaStream to a
  175. * JitsiLocalTrack.
  176. *
  177. * @param {Array<Object>} tracksInfo
  178. * @returns {Array<JitsiLocalTrack>}
  179. */
  180. static createLocalTracks(tracksInfo) {
  181. return _createLocalTracks(tracksInfo);
  182. }
  183. /**
  184. * Creates the local MediaStreams.
  185. * @param {object} [options] Optional parameters.
  186. * @param {array} options.devices The devices that will be requested.
  187. * @param {string} options.resolution Resolution constraints.
  188. * @param {string} options.cameraDeviceId
  189. * @param {string} options.micDeviceId
  190. * @returns {*} Promise object that will receive the new JitsiTracks
  191. */
  192. static obtainAudioAndVideoPermissions(options) {
  193. return RTCUtils.obtainAudioAndVideoPermissions(options)
  194. .then(tracksInfo => _createLocalTracks(tracksInfo));
  195. }
  196. /**
  197. * Initializes the bridge channel of this instance.
  198. * At least one of both, peerconnection or wsUrl parameters, must be
  199. * given.
  200. * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
  201. * instance.
  202. * @param {string} [wsUrl] WebSocket URL.
  203. */
  204. initializeBridgeChannel(peerconnection, wsUrl) {
  205. this._channel = new BridgeChannel(peerconnection, wsUrl, this.eventEmitter);
  206. this._channelOpenListener = () => {
  207. const logError = (error, msgType, value) => {
  208. GlobalOnErrorHandler.callErrorHandler(error);
  209. logger.error(`Cannot send ${msgType}(${JSON.stringify(value)}) endpoint message`, error);
  210. };
  211. // When the channel becomes available, tell the bridge about video selections so that it can do adaptive
  212. // simulcast, we want the notification to trigger even if userJid is undefined, or null.
  213. if (this._receiverVideoConstraints) {
  214. try {
  215. this._channel.sendNewReceiverVideoConstraintsMessage(this._receiverVideoConstraints);
  216. } catch (error) {
  217. logError(error, 'ReceiverVideoConstraints', this._receiverVideoConstraints);
  218. }
  219. }
  220. if (this._selectedEndpoints) {
  221. try {
  222. this._channel.sendSelectedEndpointsMessage(this._selectedEndpoints);
  223. } catch (error) {
  224. logError(error, 'SelectedEndpointsChangedEvent', this._selectedEndpoints);
  225. }
  226. }
  227. if (typeof this._maxFrameHeight !== 'undefined') {
  228. try {
  229. this._channel.sendReceiverVideoConstraintMessage(this._maxFrameHeight);
  230. } catch (error) {
  231. logError(error, 'ReceiverVideoConstraint', this._maxFrameHeight);
  232. }
  233. }
  234. if (typeof this._lastN !== 'undefined' && this._lastN !== -1) {
  235. try {
  236. this._channel.sendSetLastNMessage(this._lastN);
  237. } catch (error) {
  238. logError(error, 'LastNChangedEvent', this._lastN);
  239. }
  240. }
  241. if (!FeatureFlags.isSourceNameSignalingEnabled()) {
  242. try {
  243. this._channel.sendVideoTypeMessage(this._videoType);
  244. } catch (error) {
  245. logError(error, 'VideoTypeMessage', this._videoType);
  246. }
  247. }
  248. };
  249. this.addListener(RTCEvents.DATA_CHANNEL_OPEN, this._channelOpenListener);
  250. // Add Last N change listener.
  251. this.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED, this._lastNChangeListener);
  252. if (FeatureFlags.isSourceNameSignalingEnabled()) {
  253. // Add forwarded sources change listener.
  254. this.addListener(RTCEvents.FORWARDED_SOURCES_CHANGED, this._forwardedSourcesChangeListener);
  255. }
  256. }
  257. /**
  258. * Callback invoked when the list of known audio and video devices has
  259. * been updated. Attempts to update the known available audio output
  260. * devices.
  261. *
  262. * @private
  263. * @returns {void}
  264. */
  265. _onDeviceListChanged() {
  266. this._updateAudioOutputForAudioTracks(RTCUtils.getAudioOutputDevice());
  267. }
  268. /**
  269. * Receives events when Last N had changed.
  270. * @param {array} lastNEndpoints The new Last N endpoints.
  271. * @private
  272. */
  273. _onLastNChanged(lastNEndpoints = []) {
  274. const oldLastNEndpoints = this._lastNEndpoints || [];
  275. let leavingLastNEndpoints = [];
  276. let enteringLastNEndpoints = [];
  277. this._lastNEndpoints = lastNEndpoints;
  278. leavingLastNEndpoints = oldLastNEndpoints.filter(
  279. id => !this.isInLastN(id));
  280. enteringLastNEndpoints = lastNEndpoints.filter(
  281. id => oldLastNEndpoints.indexOf(id) === -1);
  282. this.conference.eventEmitter.emit(
  283. JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
  284. leavingLastNEndpoints,
  285. enteringLastNEndpoints);
  286. }
  287. /**
  288. * Receives events when forwarded sources had changed.
  289. *
  290. * @param {array} forwardedSources The new forwarded sources.
  291. * @private
  292. */
  293. _onForwardedSourcesChanged(forwardedSources = []) {
  294. const oldForwardedSources = this._forwardedSources || [];
  295. let leavingForwardedSources = [];
  296. let enteringForwardedSources = [];
  297. this._forwardedSources = forwardedSources;
  298. leavingForwardedSources = oldForwardedSources.filter(sourceName => !this.isInForwardedSources(sourceName));
  299. enteringForwardedSources = forwardedSources.filter(
  300. sourceName => oldForwardedSources.indexOf(sourceName) === -1);
  301. this.conference.eventEmitter.emit(
  302. JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED,
  303. leavingForwardedSources,
  304. enteringForwardedSources,
  305. Date.now());
  306. }
  307. /**
  308. * Should be called when current media session ends and after the
  309. * PeerConnection has been closed using PeerConnection.close() method.
  310. */
  311. onCallEnded() {
  312. if (this._channel) {
  313. // The BridgeChannel is not explicitly closed as the PeerConnection
  314. // is closed on call ended which triggers datachannel onclose
  315. // events. If using a WebSocket, the channel must be closed since
  316. // it is not managed by the PeerConnection.
  317. // The reference is cleared to disable any logic related to the
  318. // channel.
  319. if (this._channel && this._channel.mode === 'websocket') {
  320. this._channel.close();
  321. }
  322. this._channel = null;
  323. }
  324. }
  325. /**
  326. * Sets the capture frame rate to be used for desktop tracks.
  327. *
  328. * @param {number} maxFps framerate to be used for desktop track capture.
  329. */
  330. setDesktopSharingFrameRate(maxFps) {
  331. RTCUtils.setDesktopSharingFrameRate(maxFps);
  332. }
  333. /**
  334. * Sets the receiver video constraints that determine how bitrate is allocated to each of the video streams
  335. * requested from the bridge. The constraints are cached and sent through the bridge channel once the channel
  336. * is established.
  337. * @param {*} constraints
  338. */
  339. setNewReceiverVideoConstraints(constraints) {
  340. this._receiverVideoConstraints = constraints;
  341. if (this._channel && this._channel.isOpen()) {
  342. this._channel.sendNewReceiverVideoConstraintsMessage(constraints);
  343. }
  344. }
  345. /**
  346. * Sets the maximum video size the local participant should receive from
  347. * remote participants. Will cache the value and send it through the channel
  348. * once it is created.
  349. *
  350. * @param {number} maxFrameHeightPixels the maximum frame height, in pixels,
  351. * this receiver is willing to receive.
  352. * @returns {void}
  353. */
  354. setReceiverVideoConstraint(maxFrameHeight) {
  355. this._maxFrameHeight = maxFrameHeight;
  356. if (this._channel && this._channel.isOpen()) {
  357. this._channel.sendReceiverVideoConstraintMessage(maxFrameHeight);
  358. }
  359. }
  360. /**
  361. * Sets the video type and availability for the local video source.
  362. *
  363. * @param {string} videoType 'camera' for camera, 'desktop' for screenshare and
  364. * 'none' for when local video source is muted or removed from the peerconnection.
  365. * @returns {void}
  366. */
  367. setVideoType(videoType) {
  368. if (this._videoType !== videoType) {
  369. this._videoType = videoType;
  370. if (this._channel && this._channel.isOpen()) {
  371. this._channel.sendVideoTypeMessage(videoType);
  372. }
  373. }
  374. }
  375. /**
  376. * Sends the track's video type to the JVB.
  377. * @param {SourceName} sourceName - the track's source name.
  378. * @param {BridgeVideoType} videoType - the track's video type.
  379. */
  380. sendSourceVideoType(sourceName, videoType) {
  381. if (this._channel && this._channel.isOpen()) {
  382. this._channel.sendSourceVideoTypeMessage(sourceName, videoType);
  383. }
  384. }
  385. /**
  386. * Elects the participants with the given ids to be the selected
  387. * participants in order to always receive video for this participant (even
  388. * when last n is enabled). If there is no channel we store it and send it
  389. * through the channel once it is created.
  390. *
  391. * @param {Array<string>} ids - The user ids.
  392. * @throws NetworkError or InvalidStateError or Error if the operation
  393. * fails.
  394. * @returns {void}
  395. */
  396. selectEndpoints(ids) {
  397. this._selectedEndpoints = ids;
  398. if (this._channel && this._channel.isOpen()) {
  399. this._channel.sendSelectedEndpointsMessage(ids);
  400. }
  401. }
  402. /**
  403. *
  404. * @param eventType
  405. * @param listener
  406. */
  407. static addListener(eventType, listener) {
  408. RTCUtils.addListener(eventType, listener);
  409. }
  410. /**
  411. *
  412. * @param eventType
  413. * @param listener
  414. */
  415. static removeListener(eventType, listener) {
  416. RTCUtils.removeListener(eventType, listener);
  417. }
  418. /**
  419. *
  420. * @param options
  421. */
  422. static init(options = {}) {
  423. this.options = options;
  424. return RTCUtils.init(this.options);
  425. }
  426. /* eslint-disable max-params */
  427. /**
  428. * Creates new <tt>TraceablePeerConnection</tt>
  429. * @param {SignalingLayer} signaling The signaling layer that will provide information about the media or
  430. * participants which is not carried over SDP.
  431. * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
  432. * @param {boolean} isP2P Indicates whether or not the new TPC will be used in a peer to peer type of session.
  433. * @param {object} options The config options.
  434. * @param {boolean} options.enableInsertableStreams - Set to true when the insertable streams constraints is to be
  435. * enabled on the PeerConnection.
  436. * @param {boolean} options.disableSimulcast If set to 'true' will disable the simulcast.
  437. * @param {boolean} options.disableRtx If set to 'true' will disable the RTX.
  438. * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
  439. * @return {TraceablePeerConnection}
  440. */
  441. createPeerConnection(signaling, pcConfig, isP2P, options) {
  442. const pcConstraints = JSON.parse(JSON.stringify(RTCUtils.pcConstraints));
  443. if (typeof options.abtestSuspendVideo !== 'undefined') {
  444. RTCUtils.setSuspendVideo(pcConstraints, options.abtestSuspendVideo);
  445. Statistics.analytics.addPermanentProperties(
  446. { abtestSuspendVideo: options.abtestSuspendVideo });
  447. }
  448. if (options.enableInsertableStreams) {
  449. logger.debug('E2EE - setting insertable streams constraints');
  450. pcConfig.encodedInsertableStreams = true;
  451. }
  452. const supportsSdpSemantics = browser.isReactNative()
  453. || (browser.isChromiumBased() && !options.usesUnifiedPlan);
  454. if (supportsSdpSemantics) {
  455. logger.debug('WebRTC application is running in plan-b mode');
  456. pcConfig.sdpSemantics = 'plan-b';
  457. }
  458. if (options.forceTurnRelay) {
  459. pcConfig.iceTransportPolicy = 'relay';
  460. }
  461. // Set the RTCBundlePolicy to max-bundle so that only one set of ice candidates is generated.
  462. // The default policy generates separate ice candidates for audio and video connections.
  463. // This change is necessary for Unified plan to work properly on Chrome and Safari.
  464. pcConfig.bundlePolicy = 'max-bundle';
  465. peerConnectionIdCounter = safeCounterIncrement(peerConnectionIdCounter);
  466. const newConnection
  467. = new TraceablePeerConnection(
  468. this,
  469. peerConnectionIdCounter,
  470. signaling,
  471. pcConfig, pcConstraints,
  472. isP2P, options);
  473. this.peerConnections.set(newConnection.id, newConnection);
  474. return newConnection;
  475. }
  476. /* eslint-enable max-params */
  477. /**
  478. * Removed given peer connection from this RTC module instance.
  479. * @param {TraceablePeerConnection} traceablePeerConnection
  480. * @return {boolean} <tt>true</tt> if the given peer connection was removed
  481. * successfully or <tt>false</tt> if there was no peer connection mapped in
  482. * this RTC instance.
  483. */
  484. _removePeerConnection(traceablePeerConnection) {
  485. const id = traceablePeerConnection.id;
  486. if (this.peerConnections.has(id)) {
  487. // NOTE Remote tracks are not removed here.
  488. this.peerConnections.delete(id);
  489. return true;
  490. }
  491. return false;
  492. }
  493. /**
  494. *
  495. * @param track
  496. */
  497. addLocalTrack(track) {
  498. if (!track) {
  499. throw new Error('track must not be null nor undefined');
  500. }
  501. this.localTracks.push(track);
  502. track.conference = this.conference;
  503. }
  504. /**
  505. * Get forwarded sources list.
  506. * @returns {Array<string>|null}
  507. */
  508. getForwardedSources() {
  509. return this._forwardedSources;
  510. }
  511. /**
  512. * Get local video track.
  513. * @returns {JitsiLocalTrack|undefined}
  514. */
  515. getLocalVideoTrack() {
  516. const localVideo = this.getLocalTracks(MediaType.VIDEO);
  517. return localVideo.length ? localVideo[0] : undefined;
  518. }
  519. /**
  520. * Returns all the local video tracks.
  521. * @returns {Array<JitsiLocalTrack>}
  522. */
  523. getLocalVideoTracks() {
  524. return this.getLocalTracks(MediaType.VIDEO);
  525. }
  526. /**
  527. * Get local audio track.
  528. * @returns {JitsiLocalTrack|undefined}
  529. */
  530. getLocalAudioTrack() {
  531. const localAudio = this.getLocalTracks(MediaType.AUDIO);
  532. return localAudio.length ? localAudio[0] : undefined;
  533. }
  534. /**
  535. * Returns the endpoint id for the local user.
  536. * @returns {string}
  537. */
  538. getLocalEndpointId() {
  539. return this.conference.myUserId();
  540. }
  541. /**
  542. * Returns the local tracks of the given media type, or all local tracks if
  543. * no specific type is given.
  544. * @param {MediaType} [mediaType] Optional media type filter.
  545. * (audio or video).
  546. */
  547. getLocalTracks(mediaType) {
  548. let tracks = this.localTracks.slice();
  549. if (mediaType !== undefined) {
  550. tracks = tracks.filter(
  551. track => track.getType() === mediaType);
  552. }
  553. return tracks;
  554. }
  555. /**
  556. * Obtains all remote tracks currently known to this RTC module instance.
  557. * @param {MediaType} [mediaType] The remote tracks will be filtered
  558. * by their media type if this argument is specified.
  559. * @return {Array<JitsiRemoteTrack>}
  560. */
  561. getRemoteTracks(mediaType) {
  562. let remoteTracks = [];
  563. for (const tpc of this.peerConnections.values()) {
  564. const pcRemoteTracks = tpc.getRemoteTracks(undefined, mediaType);
  565. if (pcRemoteTracks) {
  566. remoteTracks = remoteTracks.concat(pcRemoteTracks);
  567. }
  568. }
  569. return remoteTracks;
  570. }
  571. /**
  572. * Set mute for all local audio streams attached to the conference.
  573. * @param value The mute value.
  574. * @returns {Promise}
  575. */
  576. setAudioMute(value) {
  577. const mutePromises = [];
  578. this.getLocalTracks(MediaType.AUDIO).forEach(audioTrack => {
  579. // this is a Promise
  580. mutePromises.push(value ? audioTrack.mute() : audioTrack.unmute());
  581. });
  582. // We return a Promise from all Promises so we can wait for their
  583. // execution.
  584. return Promise.all(mutePromises);
  585. }
  586. /**
  587. * Set mute for all local video streams attached to the conference.
  588. * @param value The mute value.
  589. * @returns {Promise}
  590. */
  591. setVideoMute(value) {
  592. const mutePromises = [];
  593. this.getLocalTracks(MediaType.VIDEO).concat(this.getLocalTracks(MediaType.PRESENTER))
  594. .forEach(videoTrack => {
  595. // this is a Promise
  596. mutePromises.push(value ? videoTrack.mute() : videoTrack.unmute());
  597. });
  598. // We return a Promise from all Promises so we can wait for their
  599. // execution.
  600. return Promise.all(mutePromises);
  601. }
  602. /**
  603. *
  604. * @param track
  605. */
  606. removeLocalTrack(track) {
  607. const pos = this.localTracks.indexOf(track);
  608. if (pos === -1) {
  609. return;
  610. }
  611. this.localTracks.splice(pos, 1);
  612. }
  613. /**
  614. *
  615. * @param elSelector
  616. * @param stream
  617. */
  618. static attachMediaStream(elSelector, stream) {
  619. return RTCUtils.attachMediaStream(elSelector, stream);
  620. }
  621. /**
  622. * Returns the id of the given stream.
  623. * @param {MediaStream} stream
  624. */
  625. static getStreamID(stream) {
  626. return RTCUtils.getStreamID(stream);
  627. }
  628. /**
  629. * Returns the id of the given track.
  630. * @param {MediaStreamTrack} track
  631. */
  632. static getTrackID(track) {
  633. return RTCUtils.getTrackID(track);
  634. }
  635. /**
  636. * Returns true if retrieving the list of input devices is supported
  637. * and false if not.
  638. */
  639. static isDeviceListAvailable() {
  640. return RTCUtils.isDeviceListAvailable();
  641. }
  642. /**
  643. * Returns true if changing the input (camera / microphone) or output
  644. * (audio) device is supported and false if not.
  645. * @param {string} [deviceType] Type of device to change. Default is
  646. * undefined or 'input', 'output' - for audio output device change.
  647. * @returns {boolean} true if available, false otherwise.
  648. */
  649. static isDeviceChangeAvailable(deviceType) {
  650. return RTCUtils.isDeviceChangeAvailable(deviceType);
  651. }
  652. /**
  653. * Returns whether the current execution environment supports WebRTC (for
  654. * use within this library).
  655. *
  656. * @returns {boolean} {@code true} if WebRTC is supported in the current
  657. * execution environment (for use within this library); {@code false},
  658. * otherwise.
  659. */
  660. static isWebRtcSupported() {
  661. return browser.isSupported();
  662. }
  663. /**
  664. * Returns currently used audio output device id, '' stands for default
  665. * device
  666. * @returns {string}
  667. */
  668. static getAudioOutputDevice() {
  669. return RTCUtils.getAudioOutputDevice();
  670. }
  671. /**
  672. * Returns list of available media devices if its obtained, otherwise an
  673. * empty array is returned/
  674. * @returns {array} list of available media devices.
  675. */
  676. static getCurrentlyAvailableMediaDevices() {
  677. return RTCUtils.getCurrentlyAvailableMediaDevices();
  678. }
  679. /**
  680. * Returns whether available devices have permissions granted
  681. * @returns {Boolean}
  682. */
  683. static arePermissionsGrantedForAvailableDevices() {
  684. return RTCUtils.arePermissionsGrantedForAvailableDevices();
  685. }
  686. /**
  687. * Returns event data for device to be reported to stats.
  688. * @returns {MediaDeviceInfo} device.
  689. */
  690. static getEventDataForActiveDevice(device) {
  691. return RTCUtils.getEventDataForActiveDevice(device);
  692. }
  693. /**
  694. * Sets current audio output device.
  695. * @param {string} deviceId Id of 'audiooutput' device from
  696. * navigator.mediaDevices.enumerateDevices().
  697. * @returns {Promise} resolves when audio output is changed, is rejected
  698. * otherwise
  699. */
  700. static setAudioOutputDevice(deviceId) {
  701. return RTCUtils.setAudioOutputDevice(deviceId);
  702. }
  703. /**
  704. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  705. * "user" stream which means that it's not a "receive only" stream nor a
  706. * "mixed" JVB stream.
  707. *
  708. * Clients that implement Unified Plan, such as Firefox use recvonly
  709. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  710. * to Plan B where there are only 3 channels: audio, video and data.
  711. *
  712. * @param {MediaStream} stream The WebRTC MediaStream instance.
  713. * @returns {boolean}
  714. */
  715. static isUserStream(stream) {
  716. return RTC.isUserStreamById(RTCUtils.getStreamID(stream));
  717. }
  718. /**
  719. * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
  720. * ID is considered a valid "user" stream which means that it's not a
  721. * "receive only" stream nor a "mixed" JVB stream.
  722. *
  723. * Clients that implement Unified Plan, such as Firefox use recvonly
  724. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  725. * to Plan B where there are only 3 channels: audio, video and data.
  726. *
  727. * @param {string} streamId The id of WebRTC MediaStream.
  728. * @returns {boolean}
  729. */
  730. static isUserStreamById(streamId) {
  731. return streamId && streamId !== 'mixedmslabel'
  732. && streamId !== 'default';
  733. }
  734. /**
  735. * Allows to receive list of available cameras/microphones.
  736. * @param {function} callback Would receive array of devices as an
  737. * argument.
  738. */
  739. static enumerateDevices(callback) {
  740. RTCUtils.enumerateDevices(callback);
  741. }
  742. /**
  743. * A method to handle stopping of the stream.
  744. * One point to handle the differences in various implementations.
  745. * @param {MediaStream} mediaStream MediaStream object to stop.
  746. */
  747. static stopMediaStream(mediaStream) {
  748. RTCUtils.stopMediaStream(mediaStream);
  749. }
  750. /**
  751. * Returns whether the desktop sharing is enabled or not.
  752. * @returns {boolean}
  753. */
  754. static isDesktopSharingEnabled() {
  755. return RTCUtils.isDesktopSharingEnabled();
  756. }
  757. /**
  758. * Closes the currently opened bridge channel.
  759. */
  760. closeBridgeChannel() {
  761. if (this._channel) {
  762. this._channel.close();
  763. this._channel = null;
  764. this.removeListener(RTCEvents.LASTN_ENDPOINT_CHANGED, this._lastNChangeListener);
  765. }
  766. }
  767. /* eslint-disable max-params */
  768. /**
  769. *
  770. * @param {TraceablePeerConnection} tpc
  771. * @param {number} ssrc
  772. * @param {number} audioLevel
  773. * @param {boolean} isLocal
  774. */
  775. setAudioLevel(tpc, ssrc, audioLevel, isLocal) {
  776. const track = tpc.getTrackBySSRC(ssrc);
  777. if (!track) {
  778. return;
  779. } else if (!track.isAudioTrack()) {
  780. logger.warn(`Received audio level for non-audio track: ${ssrc}`);
  781. return;
  782. } else if (track.isLocal() !== isLocal) {
  783. logger.error(
  784. `${track} was expected to ${isLocal ? 'be' : 'not be'} local`);
  785. }
  786. track.setAudioLevel(audioLevel, tpc);
  787. }
  788. /**
  789. * Sends message via the bridge channel.
  790. * @param {string} to The id of the endpoint that should receive the
  791. * message. If "" the message will be sent to all participants.
  792. * @param {object} payload The payload of the message.
  793. * @throws NetworkError or InvalidStateError or Error if the operation
  794. * fails or there is no data channel created.
  795. */
  796. sendChannelMessage(to, payload) {
  797. if (this._channel) {
  798. this._channel.sendMessage(to, payload);
  799. } else {
  800. throw new Error('Channel support is disabled!');
  801. }
  802. }
  803. /**
  804. * Sends the local stats via the bridge channel.
  805. * @param {Object} payload The payload of the message.
  806. * @throws NetworkError/InvalidStateError/Error if the operation fails or if there is no data channel created.
  807. */
  808. sendEndpointStatsMessage(payload) {
  809. if (this._channel && this._channel.isOpen()) {
  810. this._channel.sendEndpointStatsMessage(payload);
  811. }
  812. }
  813. /**
  814. * Selects a new value for "lastN". The requested amount of videos are going
  815. * to be delivered after the value is in effect. Set to -1 for unlimited or
  816. * all available videos.
  817. * @param {number} value the new value for lastN.
  818. */
  819. setLastN(value) {
  820. if (this._lastN !== value) {
  821. this._lastN = value;
  822. if (this._channel && this._channel.isOpen()) {
  823. this._channel.sendSetLastNMessage(value);
  824. }
  825. this.eventEmitter.emit(RTCEvents.LASTN_VALUE_CHANGED, value);
  826. }
  827. }
  828. /**
  829. * Indicates if the endpoint id is currently included in the last N.
  830. * @param {string} id The endpoint id that we check for last N.
  831. * @returns {boolean} true if the endpoint id is in the last N or if we
  832. * don't have bridge channel support, otherwise we return false.
  833. */
  834. isInLastN(id) {
  835. return !this._lastNEndpoints // lastNEndpoints not initialised yet.
  836. || this._lastNEndpoints.indexOf(id) > -1;
  837. }
  838. /**
  839. * Indicates if the source name is currently included in the forwarded sources.
  840. *
  841. * @param {string} sourceName The source name that we check for forwarded sources.
  842. * @returns {boolean} true if the source name is in the forwarded sources or if we don't have bridge channel
  843. * support, otherwise we return false.
  844. */
  845. isInForwardedSources(sourceName) {
  846. return !this._forwardedSources // forwardedSources not initialised yet.
  847. || this._forwardedSources.indexOf(sourceName) > -1;
  848. }
  849. /**
  850. * Updates the target audio output device for all remote audio tracks.
  851. *
  852. * @param {string} deviceId - The device id of the audio ouput device to
  853. * use for all remote tracks.
  854. * @private
  855. * @returns {void}
  856. */
  857. _updateAudioOutputForAudioTracks(deviceId) {
  858. const remoteAudioTracks = this.getRemoteTracks(MediaType.AUDIO);
  859. for (const track of remoteAudioTracks) {
  860. track.setAudioOutput(deviceId);
  861. }
  862. }
  863. }