modified lib-jitsi-meet dev repo
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTC.js 32KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007
  1. import { getLogger } from '@jitsi/logger';
  2. import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
  3. import BridgeVideoType from '../../service/RTC/BridgeVideoType';
  4. import * as MediaType from '../../service/RTC/MediaType';
  5. import RTCEvents from '../../service/RTC/RTCEvents';
  6. import browser from '../browser';
  7. import FeatureFlags from '../flags/FeatureFlags';
  8. import Statistics from '../statistics/statistics';
  9. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  10. import Listenable from '../util/Listenable';
  11. import { safeCounterIncrement } from '../util/MathUtil';
  12. import BridgeChannel from './BridgeChannel';
  13. import JitsiLocalTrack from './JitsiLocalTrack';
  14. import RTCUtils from './RTCUtils';
  15. import TraceablePeerConnection from './TraceablePeerConnection';
  16. const logger = getLogger(__filename);
  17. /**
  18. * The counter used to generated id numbers assigned to peer connections
  19. * @type {number}
  20. */
  21. let peerConnectionIdCounter = 0;
  22. /**
  23. * The counter used to generate id number for the local
  24. * <code>MediaStreamTrack</code>s.
  25. * @type {number}
  26. */
  27. let rtcTrackIdCounter = 0;
  28. /**
  29. * Creates {@code JitsiLocalTrack} instances from the passed in meta information
  30. * about MedieaTracks.
  31. *
  32. * @param {Object[]} mediaStreamMetaData - An array of meta information with
  33. * MediaTrack instances. Each can look like:
  34. * {{
  35. * stream: MediaStream instance that holds a track with audio or video,
  36. * track: MediaTrack within the MediaStream,
  37. * videoType: "camera" or "desktop" or falsy,
  38. * sourceId: ID of the desktopsharing source,
  39. * sourceType: The desktopsharing source type,
  40. * effects: Array of effect types
  41. * }}
  42. */
  43. function _createLocalTracks(mediaStreamMetaData = []) {
  44. return mediaStreamMetaData.map(metaData => {
  45. const {
  46. sourceId,
  47. sourceType,
  48. stream,
  49. track,
  50. videoType,
  51. effects
  52. } = metaData;
  53. const { deviceId, facingMode } = track.getSettings();
  54. // FIXME Move rtcTrackIdCounter to a static method in JitsiLocalTrack
  55. // so RTC does not need to handle ID management. This move would be
  56. // safer to do once the old createLocalTracks is removed.
  57. rtcTrackIdCounter = safeCounterIncrement(rtcTrackIdCounter);
  58. return new JitsiLocalTrack({
  59. deviceId,
  60. facingMode,
  61. mediaType: track.kind,
  62. rtcId: rtcTrackIdCounter,
  63. sourceId,
  64. sourceType,
  65. stream,
  66. track,
  67. videoType: videoType || null,
  68. effects
  69. });
  70. });
  71. }
  72. /**
  73. *
  74. */
  75. export default class RTC extends Listenable {
  76. /**
  77. *
  78. * @param conference
  79. * @param options
  80. */
  81. constructor(conference, options = {}) {
  82. super();
  83. this.conference = conference;
  84. /**
  85. * A map of active <tt>TraceablePeerConnection</tt>.
  86. * @type {Map.<number, TraceablePeerConnection>}
  87. */
  88. this.peerConnections = new Map();
  89. this.localTracks = [];
  90. this.options = options;
  91. // BridgeChannel instance.
  92. // @private
  93. // @type {BridgeChannel}
  94. this._channel = null;
  95. /**
  96. * The value specified to the last invocation of setLastN before the
  97. * channel completed opening. If non-null, the value will be sent
  98. * through a channel (once) as soon as it opens and will then be
  99. * discarded.
  100. * @private
  101. * @type {number}
  102. */
  103. this._lastN = undefined;
  104. /**
  105. * Defines the last N endpoints list. It can be null or an array once
  106. * initialised with a channel last N event.
  107. * @type {Array<string>|null}
  108. * @private
  109. */
  110. this._lastNEndpoints = null;
  111. /**
  112. * Defines the forwarded sources list. It can be null or an array once initialised with a channel forwarded
  113. * sources event.
  114. *
  115. * @type {Array<string>|null}
  116. * @private
  117. */
  118. this._forwardedSources = null;
  119. /**
  120. * The number representing the maximum video height the local client
  121. * should receive from the bridge.
  122. *
  123. * @type {number|undefined}
  124. * @private
  125. */
  126. this._maxFrameHeight = undefined;
  127. /**
  128. * The endpoint IDs of currently selected participants.
  129. *
  130. * @type {Array}
  131. * @private
  132. */
  133. this._selectedEndpoints = null;
  134. // The last N change listener.
  135. this._lastNChangeListener = this._onLastNChanged.bind(this);
  136. // The forwarded sources change listener.
  137. this._forwardedSourcesChangeListener = this._onForwardedSourcesChanged.bind(this);
  138. this._onDeviceListChanged = this._onDeviceListChanged.bind(this);
  139. this._updateAudioOutputForAudioTracks
  140. = this._updateAudioOutputForAudioTracks.bind(this);
  141. /**
  142. * The default video type assumed by the bridge.
  143. * @deprecated this will go away with multiple streams support
  144. * @type {BridgeVideoType}
  145. * @private
  146. */
  147. this._videoType = BridgeVideoType.NONE;
  148. // Switch audio output device on all remote audio tracks. Local audio
  149. // tracks handle this event by themselves.
  150. if (RTCUtils.isDeviceChangeAvailable('output')) {
  151. RTCUtils.addListener(
  152. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  153. this._updateAudioOutputForAudioTracks
  154. );
  155. RTCUtils.addListener(
  156. RTCEvents.DEVICE_LIST_CHANGED,
  157. this._onDeviceListChanged
  158. );
  159. }
  160. }
  161. /**
  162. * Removes any listeners and stored state from this {@code RTC} instance.
  163. *
  164. * @returns {void}
  165. */
  166. destroy() {
  167. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED, this._updateAudioOutputForAudioTracks);
  168. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED, this._onDeviceListChanged);
  169. if (this._channelOpenListener) {
  170. this.removeListener(
  171. RTCEvents.DATA_CHANNEL_OPEN,
  172. this._channelOpenListener
  173. );
  174. }
  175. }
  176. /**
  177. * Exposes the private helper for converting a WebRTC MediaStream to a
  178. * JitsiLocalTrack.
  179. *
  180. * @param {Array<Object>} tracksInfo
  181. * @returns {Array<JitsiLocalTrack>}
  182. */
  183. static createLocalTracks(tracksInfo) {
  184. return _createLocalTracks(tracksInfo);
  185. }
  186. /**
  187. * Creates the local MediaStreams.
  188. * @param {object} [options] Optional parameters.
  189. * @param {array} options.devices The devices that will be requested.
  190. * @param {string} options.resolution Resolution constraints.
  191. * @param {string} options.cameraDeviceId
  192. * @param {string} options.micDeviceId
  193. * @returns {*} Promise object that will receive the new JitsiTracks
  194. */
  195. static obtainAudioAndVideoPermissions(options) {
  196. return RTCUtils.obtainAudioAndVideoPermissions(options)
  197. .then(tracksInfo => _createLocalTracks(tracksInfo));
  198. }
  199. /**
  200. * Initializes the bridge channel of this instance.
  201. * At least one of both, peerconnection or wsUrl parameters, must be
  202. * given.
  203. * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
  204. * instance.
  205. * @param {string} [wsUrl] WebSocket URL.
  206. */
  207. initializeBridgeChannel(peerconnection, wsUrl) {
  208. this._channel = new BridgeChannel(peerconnection, wsUrl, this.eventEmitter);
  209. this._channelOpenListener = () => {
  210. const logError = (error, msgType, value) => {
  211. GlobalOnErrorHandler.callErrorHandler(error);
  212. logger.error(`Cannot send ${msgType}(${JSON.stringify(value)}) endpoint message`, error);
  213. };
  214. // When the channel becomes available, tell the bridge about video selections so that it can do adaptive
  215. // simulcast, we want the notification to trigger even if userJid is undefined, or null.
  216. if (this._receiverVideoConstraints) {
  217. try {
  218. this._channel.sendNewReceiverVideoConstraintsMessage(this._receiverVideoConstraints);
  219. } catch (error) {
  220. logError(error, 'ReceiverVideoConstraints', this._receiverVideoConstraints);
  221. }
  222. }
  223. if (this._selectedEndpoints) {
  224. try {
  225. this._channel.sendSelectedEndpointsMessage(this._selectedEndpoints);
  226. } catch (error) {
  227. logError(error, 'SelectedEndpointsChangedEvent', this._selectedEndpoints);
  228. }
  229. }
  230. if (typeof this._maxFrameHeight !== 'undefined') {
  231. try {
  232. this._channel.sendReceiverVideoConstraintMessage(this._maxFrameHeight);
  233. } catch (error) {
  234. logError(error, 'ReceiverVideoConstraint', this._maxFrameHeight);
  235. }
  236. }
  237. if (typeof this._lastN !== 'undefined' && this._lastN !== -1) {
  238. try {
  239. this._channel.sendSetLastNMessage(this._lastN);
  240. } catch (error) {
  241. logError(error, 'LastNChangedEvent', this._lastN);
  242. }
  243. }
  244. if (!FeatureFlags.isSourceNameSignalingEnabled()) {
  245. try {
  246. this._channel.sendVideoTypeMessage(this._videoType);
  247. } catch (error) {
  248. logError(error, 'VideoTypeMessage', this._videoType);
  249. }
  250. }
  251. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN, this._channelOpenListener);
  252. this._channelOpenListener = null;
  253. };
  254. this.addListener(RTCEvents.DATA_CHANNEL_OPEN, this._channelOpenListener);
  255. // Add Last N change listener.
  256. this.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED, this._lastNChangeListener);
  257. if (FeatureFlags.isSourceNameSignalingEnabled()) {
  258. // Add forwarded sources change listener.
  259. this.addListener(RTCEvents.FORWARDED_SOURCES_CHANGED, this._forwardedSourcesChangeListener);
  260. }
  261. }
  262. /**
  263. * Callback invoked when the list of known audio and video devices has
  264. * been updated. Attempts to update the known available audio output
  265. * devices.
  266. *
  267. * @private
  268. * @returns {void}
  269. */
  270. _onDeviceListChanged() {
  271. this._updateAudioOutputForAudioTracks(RTCUtils.getAudioOutputDevice());
  272. }
  273. /**
  274. * Receives events when Last N had changed.
  275. * @param {array} lastNEndpoints The new Last N endpoints.
  276. * @private
  277. */
  278. _onLastNChanged(lastNEndpoints = []) {
  279. const oldLastNEndpoints = this._lastNEndpoints || [];
  280. let leavingLastNEndpoints = [];
  281. let enteringLastNEndpoints = [];
  282. this._lastNEndpoints = lastNEndpoints;
  283. leavingLastNEndpoints = oldLastNEndpoints.filter(
  284. id => !this.isInLastN(id));
  285. enteringLastNEndpoints = lastNEndpoints.filter(
  286. id => oldLastNEndpoints.indexOf(id) === -1);
  287. this.conference.eventEmitter.emit(
  288. JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
  289. leavingLastNEndpoints,
  290. enteringLastNEndpoints);
  291. }
  292. /**
  293. * Receives events when forwarded sources had changed.
  294. *
  295. * @param {array} forwardedSources The new forwarded sources.
  296. * @private
  297. */
  298. _onForwardedSourcesChanged(forwardedSources = []) {
  299. const oldForwardedSources = this._forwardedSources || [];
  300. let leavingForwardedSources = [];
  301. let enteringForwardedSources = [];
  302. this._forwardedSources = forwardedSources;
  303. leavingForwardedSources = oldForwardedSources.filter(sourceName => !this.isInForwardedSources(sourceName));
  304. enteringForwardedSources = forwardedSources.filter(
  305. sourceName => oldForwardedSources.indexOf(sourceName) === -1);
  306. this.conference.eventEmitter.emit(
  307. JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED,
  308. leavingForwardedSources,
  309. enteringForwardedSources,
  310. Date.now());
  311. }
  312. /**
  313. * Should be called when current media session ends and after the
  314. * PeerConnection has been closed using PeerConnection.close() method.
  315. */
  316. onCallEnded() {
  317. if (this._channel) {
  318. // The BridgeChannel is not explicitly closed as the PeerConnection
  319. // is closed on call ended which triggers datachannel onclose
  320. // events. If using a WebSocket, the channel must be closed since
  321. // it is not managed by the PeerConnection.
  322. // The reference is cleared to disable any logic related to the
  323. // channel.
  324. if (this._channel && this._channel.mode === 'websocket') {
  325. this._channel.close();
  326. }
  327. this._channel = null;
  328. }
  329. }
  330. /**
  331. * Sets the capture frame rate to be used for desktop tracks.
  332. *
  333. * @param {number} maxFps framerate to be used for desktop track capture.
  334. */
  335. setDesktopSharingFrameRate(maxFps) {
  336. RTCUtils.setDesktopSharingFrameRate(maxFps);
  337. }
  338. /**
  339. * Sets the receiver video constraints that determine how bitrate is allocated to each of the video streams
  340. * requested from the bridge. The constraints are cached and sent through the bridge channel once the channel
  341. * is established.
  342. * @param {*} constraints
  343. */
  344. setNewReceiverVideoConstraints(constraints) {
  345. this._receiverVideoConstraints = constraints;
  346. if (this._channel && this._channel.isOpen()) {
  347. this._channel.sendNewReceiverVideoConstraintsMessage(constraints);
  348. }
  349. }
  350. /**
  351. * Sets the maximum video size the local participant should receive from
  352. * remote participants. Will cache the value and send it through the channel
  353. * once it is created.
  354. *
  355. * @param {number} maxFrameHeightPixels the maximum frame height, in pixels,
  356. * this receiver is willing to receive.
  357. * @returns {void}
  358. */
  359. setReceiverVideoConstraint(maxFrameHeight) {
  360. this._maxFrameHeight = maxFrameHeight;
  361. if (this._channel && this._channel.isOpen()) {
  362. this._channel.sendReceiverVideoConstraintMessage(maxFrameHeight);
  363. }
  364. }
  365. /**
  366. * Sets the video type and availability for the local video source.
  367. *
  368. * @param {string} videoType 'camera' for camera, 'desktop' for screenshare and
  369. * 'none' for when local video source is muted or removed from the peerconnection.
  370. * @returns {void}
  371. */
  372. setVideoType(videoType) {
  373. if (this._videoType !== videoType) {
  374. this._videoType = videoType;
  375. if (this._channel && this._channel.isOpen()) {
  376. this._channel.sendVideoTypeMessage(videoType);
  377. }
  378. }
  379. }
  380. /**
  381. * Sends the track's video type to the JVB.
  382. * @param {SourceName} sourceName - the track's source name.
  383. * @param {BridgeVideoType} videoType - the track's video type.
  384. */
  385. sendSourceVideoType(sourceName, videoType) {
  386. if (this._channel && this._channel.isOpen()) {
  387. this._channel.sendSourceVideoTypeMessage(sourceName, videoType);
  388. }
  389. }
  390. /**
  391. * Elects the participants with the given ids to be the selected
  392. * participants in order to always receive video for this participant (even
  393. * when last n is enabled). If there is no channel we store it and send it
  394. * through the channel once it is created.
  395. *
  396. * @param {Array<string>} ids - The user ids.
  397. * @throws NetworkError or InvalidStateError or Error if the operation
  398. * fails.
  399. * @returns {void}
  400. */
  401. selectEndpoints(ids) {
  402. this._selectedEndpoints = ids;
  403. if (this._channel && this._channel.isOpen()) {
  404. this._channel.sendSelectedEndpointsMessage(ids);
  405. }
  406. }
  407. /**
  408. *
  409. * @param eventType
  410. * @param listener
  411. */
  412. static addListener(eventType, listener) {
  413. RTCUtils.addListener(eventType, listener);
  414. }
  415. /**
  416. *
  417. * @param eventType
  418. * @param listener
  419. */
  420. static removeListener(eventType, listener) {
  421. RTCUtils.removeListener(eventType, listener);
  422. }
  423. /**
  424. *
  425. * @param options
  426. */
  427. static init(options = {}) {
  428. this.options = options;
  429. return RTCUtils.init(this.options);
  430. }
  431. /* eslint-disable max-params */
  432. /**
  433. * Creates new <tt>TraceablePeerConnection</tt>
  434. * @param {SignalingLayer} signaling The signaling layer that will provide information about the media or
  435. * participants which is not carried over SDP.
  436. * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
  437. * @param {boolean} isP2P Indicates whether or not the new TPC will be used in a peer to peer type of session.
  438. * @param {object} options The config options.
  439. * @param {boolean} options.enableInsertableStreams - Set to true when the insertable streams constraints is to be
  440. * enabled on the PeerConnection.
  441. * @param {boolean} options.disableSimulcast If set to 'true' will disable the simulcast.
  442. * @param {boolean} options.disableRtx If set to 'true' will disable the RTX.
  443. * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
  444. * @return {TraceablePeerConnection}
  445. */
  446. createPeerConnection(signaling, pcConfig, isP2P, options) {
  447. const pcConstraints = JSON.parse(JSON.stringify(RTCUtils.pcConstraints));
  448. if (typeof options.abtestSuspendVideo !== 'undefined') {
  449. RTCUtils.setSuspendVideo(pcConstraints, options.abtestSuspendVideo);
  450. Statistics.analytics.addPermanentProperties(
  451. { abtestSuspendVideo: options.abtestSuspendVideo });
  452. }
  453. if (options.enableInsertableStreams) {
  454. logger.debug('E2EE - setting insertable streams constraints');
  455. pcConfig.encodedInsertableStreams = true;
  456. }
  457. const supportsSdpSemantics = browser.isReactNative()
  458. || (browser.isChromiumBased() && !options.usesUnifiedPlan);
  459. if (supportsSdpSemantics) {
  460. logger.debug('WebRTC application is running in plan-b mode');
  461. pcConfig.sdpSemantics = 'plan-b';
  462. }
  463. if (options.forceTurnRelay) {
  464. pcConfig.iceTransportPolicy = 'relay';
  465. }
  466. // Set the RTCBundlePolicy to max-bundle so that only one set of ice candidates is generated.
  467. // The default policy generates separate ice candidates for audio and video connections.
  468. // This change is necessary for Unified plan to work properly on Chrome and Safari.
  469. pcConfig.bundlePolicy = 'max-bundle';
  470. peerConnectionIdCounter = safeCounterIncrement(peerConnectionIdCounter);
  471. const newConnection
  472. = new TraceablePeerConnection(
  473. this,
  474. peerConnectionIdCounter,
  475. signaling,
  476. pcConfig, pcConstraints,
  477. isP2P, options);
  478. this.peerConnections.set(newConnection.id, newConnection);
  479. return newConnection;
  480. }
  481. /* eslint-enable max-params */
  482. /**
  483. * Removed given peer connection from this RTC module instance.
  484. * @param {TraceablePeerConnection} traceablePeerConnection
  485. * @return {boolean} <tt>true</tt> if the given peer connection was removed
  486. * successfully or <tt>false</tt> if there was no peer connection mapped in
  487. * this RTC instance.
  488. */
  489. _removePeerConnection(traceablePeerConnection) {
  490. const id = traceablePeerConnection.id;
  491. if (this.peerConnections.has(id)) {
  492. // NOTE Remote tracks are not removed here.
  493. this.peerConnections.delete(id);
  494. return true;
  495. }
  496. return false;
  497. }
  498. /**
  499. *
  500. * @param track
  501. */
  502. addLocalTrack(track) {
  503. if (!track) {
  504. throw new Error('track must not be null nor undefined');
  505. }
  506. this.localTracks.push(track);
  507. track.conference = this.conference;
  508. }
  509. /**
  510. * Get forwarded sources list.
  511. * @returns {Array<string>|null}
  512. */
  513. getForwardedSources() {
  514. return this._forwardedSources;
  515. }
  516. /**
  517. * Get local video track.
  518. * @returns {JitsiLocalTrack|undefined}
  519. */
  520. getLocalVideoTrack() {
  521. const localVideo = this.getLocalTracks(MediaType.VIDEO);
  522. return localVideo.length ? localVideo[0] : undefined;
  523. }
  524. /**
  525. * Returns all the local video tracks.
  526. * @returns {Array<JitsiLocalTrack>}
  527. */
  528. getLocalVideoTracks() {
  529. return this.getLocalTracks(MediaType.VIDEO);
  530. }
  531. /**
  532. * Get local audio track.
  533. * @returns {JitsiLocalTrack|undefined}
  534. */
  535. getLocalAudioTrack() {
  536. const localAudio = this.getLocalTracks(MediaType.AUDIO);
  537. return localAudio.length ? localAudio[0] : undefined;
  538. }
  539. /**
  540. * Returns the endpoint id for the local user.
  541. * @returns {string}
  542. */
  543. getLocalEndpointId() {
  544. return this.conference.myUserId();
  545. }
  546. /**
  547. * Returns the local tracks of the given media type, or all local tracks if
  548. * no specific type is given.
  549. * @param {MediaType} [mediaType] Optional media type filter.
  550. * (audio or video).
  551. */
  552. getLocalTracks(mediaType) {
  553. let tracks = this.localTracks.slice();
  554. if (mediaType !== undefined) {
  555. tracks = tracks.filter(
  556. track => track.getType() === mediaType);
  557. }
  558. return tracks;
  559. }
  560. /**
  561. * Obtains all remote tracks currently known to this RTC module instance.
  562. * @param {MediaType} [mediaType] The remote tracks will be filtered
  563. * by their media type if this argument is specified.
  564. * @return {Array<JitsiRemoteTrack>}
  565. */
  566. getRemoteTracks(mediaType) {
  567. let remoteTracks = [];
  568. for (const tpc of this.peerConnections.values()) {
  569. const pcRemoteTracks = tpc.getRemoteTracks(undefined, mediaType);
  570. if (pcRemoteTracks) {
  571. remoteTracks = remoteTracks.concat(pcRemoteTracks);
  572. }
  573. }
  574. return remoteTracks;
  575. }
  576. /**
  577. * Set mute for all local audio streams attached to the conference.
  578. * @param value The mute value.
  579. * @returns {Promise}
  580. */
  581. setAudioMute(value) {
  582. const mutePromises = [];
  583. this.getLocalTracks(MediaType.AUDIO).forEach(audioTrack => {
  584. // this is a Promise
  585. mutePromises.push(value ? audioTrack.mute() : audioTrack.unmute());
  586. });
  587. // We return a Promise from all Promises so we can wait for their
  588. // execution.
  589. return Promise.all(mutePromises);
  590. }
  591. /**
  592. * Set mute for all local video streams attached to the conference.
  593. * @param value The mute value.
  594. * @returns {Promise}
  595. */
  596. setVideoMute(value) {
  597. const mutePromises = [];
  598. this.getLocalTracks(MediaType.VIDEO).concat(this.getLocalTracks(MediaType.PRESENTER))
  599. .forEach(videoTrack => {
  600. // this is a Promise
  601. mutePromises.push(value ? videoTrack.mute() : videoTrack.unmute());
  602. });
  603. // We return a Promise from all Promises so we can wait for their
  604. // execution.
  605. return Promise.all(mutePromises);
  606. }
  607. /**
  608. *
  609. * @param track
  610. */
  611. removeLocalTrack(track) {
  612. const pos = this.localTracks.indexOf(track);
  613. if (pos === -1) {
  614. return;
  615. }
  616. this.localTracks.splice(pos, 1);
  617. }
  618. /**
  619. *
  620. * @param elSelector
  621. * @param stream
  622. */
  623. static attachMediaStream(elSelector, stream) {
  624. return RTCUtils.attachMediaStream(elSelector, stream);
  625. }
  626. /**
  627. * Returns the id of the given stream.
  628. * @param {MediaStream} stream
  629. */
  630. static getStreamID(stream) {
  631. return RTCUtils.getStreamID(stream);
  632. }
  633. /**
  634. * Returns the id of the given track.
  635. * @param {MediaStreamTrack} track
  636. */
  637. static getTrackID(track) {
  638. return RTCUtils.getTrackID(track);
  639. }
  640. /**
  641. * Returns true if retrieving the list of input devices is supported
  642. * and false if not.
  643. */
  644. static isDeviceListAvailable() {
  645. return RTCUtils.isDeviceListAvailable();
  646. }
  647. /**
  648. * Returns true if changing the input (camera / microphone) or output
  649. * (audio) device is supported and false if not.
  650. * @param {string} [deviceType] Type of device to change. Default is
  651. * undefined or 'input', 'output' - for audio output device change.
  652. * @returns {boolean} true if available, false otherwise.
  653. */
  654. static isDeviceChangeAvailable(deviceType) {
  655. return RTCUtils.isDeviceChangeAvailable(deviceType);
  656. }
  657. /**
  658. * Returns whether the current execution environment supports WebRTC (for
  659. * use within this library).
  660. *
  661. * @returns {boolean} {@code true} if WebRTC is supported in the current
  662. * execution environment (for use within this library); {@code false},
  663. * otherwise.
  664. */
  665. static isWebRtcSupported() {
  666. return browser.isSupported();
  667. }
  668. /**
  669. * Returns currently used audio output device id, '' stands for default
  670. * device
  671. * @returns {string}
  672. */
  673. static getAudioOutputDevice() {
  674. return RTCUtils.getAudioOutputDevice();
  675. }
  676. /**
  677. * Returns list of available media devices if its obtained, otherwise an
  678. * empty array is returned/
  679. * @returns {array} list of available media devices.
  680. */
  681. static getCurrentlyAvailableMediaDevices() {
  682. return RTCUtils.getCurrentlyAvailableMediaDevices();
  683. }
  684. /**
  685. * Returns whether available devices have permissions granted
  686. * @returns {Boolean}
  687. */
  688. static arePermissionsGrantedForAvailableDevices() {
  689. return RTCUtils.arePermissionsGrantedForAvailableDevices();
  690. }
  691. /**
  692. * Returns event data for device to be reported to stats.
  693. * @returns {MediaDeviceInfo} device.
  694. */
  695. static getEventDataForActiveDevice(device) {
  696. return RTCUtils.getEventDataForActiveDevice(device);
  697. }
  698. /**
  699. * Sets current audio output device.
  700. * @param {string} deviceId Id of 'audiooutput' device from
  701. * navigator.mediaDevices.enumerateDevices().
  702. * @returns {Promise} resolves when audio output is changed, is rejected
  703. * otherwise
  704. */
  705. static setAudioOutputDevice(deviceId) {
  706. return RTCUtils.setAudioOutputDevice(deviceId);
  707. }
  708. /**
  709. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  710. * "user" stream which means that it's not a "receive only" stream nor a
  711. * "mixed" JVB stream.
  712. *
  713. * Clients that implement Unified Plan, such as Firefox use recvonly
  714. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  715. * to Plan B where there are only 3 channels: audio, video and data.
  716. *
  717. * @param {MediaStream} stream The WebRTC MediaStream instance.
  718. * @returns {boolean}
  719. */
  720. static isUserStream(stream) {
  721. return RTC.isUserStreamById(RTCUtils.getStreamID(stream));
  722. }
  723. /**
  724. * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
  725. * ID is considered a valid "user" stream which means that it's not a
  726. * "receive only" stream nor a "mixed" JVB stream.
  727. *
  728. * Clients that implement Unified Plan, such as Firefox use recvonly
  729. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  730. * to Plan B where there are only 3 channels: audio, video and data.
  731. *
  732. * @param {string} streamId The id of WebRTC MediaStream.
  733. * @returns {boolean}
  734. */
  735. static isUserStreamById(streamId) {
  736. return streamId && streamId !== 'mixedmslabel'
  737. && streamId !== 'default';
  738. }
  739. /**
  740. * Allows to receive list of available cameras/microphones.
  741. * @param {function} callback Would receive array of devices as an
  742. * argument.
  743. */
  744. static enumerateDevices(callback) {
  745. RTCUtils.enumerateDevices(callback);
  746. }
  747. /**
  748. * A method to handle stopping of the stream.
  749. * One point to handle the differences in various implementations.
  750. * @param {MediaStream} mediaStream MediaStream object to stop.
  751. */
  752. static stopMediaStream(mediaStream) {
  753. RTCUtils.stopMediaStream(mediaStream);
  754. }
  755. /**
  756. * Returns whether the desktop sharing is enabled or not.
  757. * @returns {boolean}
  758. */
  759. static isDesktopSharingEnabled() {
  760. return RTCUtils.isDesktopSharingEnabled();
  761. }
  762. /**
  763. * Closes the currently opened bridge channel.
  764. */
  765. closeBridgeChannel() {
  766. if (this._channel) {
  767. this._channel.close();
  768. this._channel = null;
  769. this.removeListener(RTCEvents.LASTN_ENDPOINT_CHANGED, this._lastNChangeListener);
  770. }
  771. }
  772. /* eslint-disable max-params */
  773. /**
  774. *
  775. * @param {TraceablePeerConnection} tpc
  776. * @param {number} ssrc
  777. * @param {number} audioLevel
  778. * @param {boolean} isLocal
  779. */
  780. setAudioLevel(tpc, ssrc, audioLevel, isLocal) {
  781. const track = tpc.getTrackBySSRC(ssrc);
  782. if (!track) {
  783. return;
  784. } else if (!track.isAudioTrack()) {
  785. logger.warn(`Received audio level for non-audio track: ${ssrc}`);
  786. return;
  787. } else if (track.isLocal() !== isLocal) {
  788. logger.error(
  789. `${track} was expected to ${isLocal ? 'be' : 'not be'} local`);
  790. }
  791. track.setAudioLevel(audioLevel, tpc);
  792. }
  793. /**
  794. * Sends message via the bridge channel.
  795. * @param {string} to The id of the endpoint that should receive the
  796. * message. If "" the message will be sent to all participants.
  797. * @param {object} payload The payload of the message.
  798. * @throws NetworkError or InvalidStateError or Error if the operation
  799. * fails or there is no data channel created.
  800. */
  801. sendChannelMessage(to, payload) {
  802. if (this._channel) {
  803. this._channel.sendMessage(to, payload);
  804. } else {
  805. throw new Error('Channel support is disabled!');
  806. }
  807. }
  808. /**
  809. * Sends the local stats via the bridge channel.
  810. * @param {Object} payload The payload of the message.
  811. * @throws NetworkError/InvalidStateError/Error if the operation fails or if there is no data channel created.
  812. */
  813. sendEndpointStatsMessage(payload) {
  814. if (this._channel && this._channel.isOpen()) {
  815. this._channel.sendEndpointStatsMessage(payload);
  816. }
  817. }
  818. /**
  819. * Selects a new value for "lastN". The requested amount of videos are going
  820. * to be delivered after the value is in effect. Set to -1 for unlimited or
  821. * all available videos.
  822. * @param {number} value the new value for lastN.
  823. */
  824. setLastN(value) {
  825. if (this._lastN !== value) {
  826. this._lastN = value;
  827. if (this._channel && this._channel.isOpen()) {
  828. this._channel.sendSetLastNMessage(value);
  829. }
  830. this.eventEmitter.emit(RTCEvents.LASTN_VALUE_CHANGED, value);
  831. }
  832. }
  833. /**
  834. * Indicates if the endpoint id is currently included in the last N.
  835. * @param {string} id The endpoint id that we check for last N.
  836. * @returns {boolean} true if the endpoint id is in the last N or if we
  837. * don't have bridge channel support, otherwise we return false.
  838. */
  839. isInLastN(id) {
  840. return !this._lastNEndpoints // lastNEndpoints not initialised yet.
  841. || this._lastNEndpoints.indexOf(id) > -1;
  842. }
  843. /**
  844. * Indicates if the source name is currently included in the forwarded sources.
  845. *
  846. * @param {string} sourceName The source name that we check for forwarded sources.
  847. * @returns {boolean} true if the source name is in the forwarded sources or if we don't have bridge channel
  848. * support, otherwise we return false.
  849. */
  850. isInForwardedSources(sourceName) {
  851. return !this._forwardedSources // forwardedSources not initialised yet.
  852. || this._forwardedSources.indexOf(sourceName) > -1;
  853. }
  854. /**
  855. * Updates the target audio output device for all remote audio tracks.
  856. *
  857. * @param {string} deviceId - The device id of the audio ouput device to
  858. * use for all remote tracks.
  859. * @private
  860. * @returns {void}
  861. */
  862. _updateAudioOutputForAudioTracks(deviceId) {
  863. const remoteAudioTracks = this.getRemoteTracks(MediaType.AUDIO);
  864. for (const track of remoteAudioTracks) {
  865. track.setAudioOutput(deviceId);
  866. }
  867. }
  868. }