You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852
  1. /* global __filename */
  2. import { getLogger } from 'jitsi-meet-logger';
  3. import BridgeChannel from './BridgeChannel';
  4. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  5. import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
  6. import JitsiLocalTrack from './JitsiLocalTrack';
  7. import JitsiTrackError from '../../JitsiTrackError';
  8. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  9. import Listenable from '../util/Listenable';
  10. import * as MediaType from '../../service/RTC/MediaType';
  11. import RTCBrowserType from './RTCBrowserType';
  12. import RTCEvents from '../../service/RTC/RTCEvents';
  13. import RTCUtils from './RTCUtils';
  14. import Statistics from '../statistics/statistics';
  15. import TraceablePeerConnection from './TraceablePeerConnection';
  16. import VideoType from '../../service/RTC/VideoType';
  17. const logger = getLogger(__filename);
  18. let rtcTrackIdCounter = 0;
  19. /**
  20. *
  21. * @param tracksInfo
  22. * @param options
  23. */
  24. function createLocalTracks(tracksInfo, options) {
  25. const newTracks = [];
  26. let deviceId = null;
  27. tracksInfo.forEach(trackInfo => {
  28. if (trackInfo.mediaType === MediaType.AUDIO) {
  29. deviceId = options.micDeviceId;
  30. } else if (trackInfo.videoType === VideoType.CAMERA) {
  31. deviceId = options.cameraDeviceId;
  32. }
  33. rtcTrackIdCounter += 1;
  34. const localTrack = new JitsiLocalTrack({
  35. ...trackInfo,
  36. deviceId,
  37. facingMode: options.facingMode,
  38. rtcId: rtcTrackIdCounter
  39. });
  40. newTracks.push(localTrack);
  41. });
  42. return newTracks;
  43. }
  44. /**
  45. * Creates {@code JitsiLocalTrack} instances from the passed in meta information
  46. * about MedieaTracks.
  47. *
  48. * @param {Object[]} mediaStreamMetaData - An array of meta information with
  49. * MediaTrack instances. Each can look like:
  50. * {{
  51. * stream: MediaStream instance that holds a track with audio or video,
  52. * track: MediaTrack within the MediaStream,
  53. * videoType: "camera" or "desktop" or falsy,
  54. * sourceId: ID of the desktopsharing source,
  55. * sourceType: The desktopsharing source type
  56. * }}
  57. */
  58. function _newCreateLocalTracks(mediaStreamMetaData = []) {
  59. return mediaStreamMetaData.map(metaData => {
  60. const {
  61. sourceId,
  62. sourceType,
  63. stream,
  64. track,
  65. videoType
  66. } = metaData;
  67. const { deviceId, facingMode } = track.getSettings();
  68. // FIXME Move rtcTrackIdCounter to a static method in JitsiLocalTrack
  69. // so RTC does not need to handle ID management. This move would be
  70. // safer to do once the old createLocalTracks is removed.
  71. rtcTrackIdCounter += 1;
  72. return new JitsiLocalTrack({
  73. deviceId,
  74. facingMode,
  75. mediaType: track.kind,
  76. rtcId: rtcTrackIdCounter,
  77. sourceId,
  78. sourceType,
  79. stream,
  80. track,
  81. videoType: videoType || null
  82. });
  83. });
  84. }
  85. /**
  86. *
  87. */
  88. export default class RTC extends Listenable {
  89. /**
  90. *
  91. * @param conference
  92. * @param options
  93. */
  94. constructor(conference, options = {}) {
  95. super();
  96. this.conference = conference;
  97. /**
  98. * A map of active <tt>TraceablePeerConnection</tt>.
  99. * @type {Map.<number, TraceablePeerConnection>}
  100. */
  101. this.peerConnections = new Map();
  102. /**
  103. * The counter used to generated id numbers assigned to peer connections
  104. * @type {number}
  105. */
  106. this.peerConnectionIdCounter = 1;
  107. this.localTracks = [];
  108. this.options = options;
  109. // BridgeChannel instance.
  110. // @private
  111. // @type {BridgeChannel}
  112. this._channel = null;
  113. // A flag whether we had received that the channel had opened we can
  114. // get this flag out of sync if for some reason channel got closed
  115. // from server, a desired behaviour so we can see errors when this
  116. // happen.
  117. // @private
  118. // @type {boolean}
  119. this._channelOpen = false;
  120. /**
  121. * The value specified to the last invocation of setLastN before the
  122. * channel completed opening. If non-null, the value will be sent
  123. * through a channel (once) as soon as it opens and will then be
  124. * discarded.
  125. * @private
  126. * @type {number}
  127. */
  128. this._lastN = -1;
  129. /**
  130. * Defines the last N endpoints list. It can be null or an array once
  131. * initialised with a channel last N event.
  132. * @type {Array<string>|null}
  133. * @private
  134. */
  135. this._lastNEndpoints = null;
  136. /**
  137. * The endpoint ID of currently pinned participant or <tt>null</tt> if
  138. * no user is pinned.
  139. * @type {string|null}
  140. * @private
  141. */
  142. this._pinnedEndpoint = null;
  143. /**
  144. * The endpoint ID of currently selected participant or <tt>null</tt> if
  145. * no user is selected.
  146. * @type {string|null}
  147. * @private
  148. */
  149. this._selectedEndpoint = null;
  150. // The last N change listener.
  151. this._lastNChangeListener = this._onLastNChanged.bind(this);
  152. // Switch audio output device on all remote audio tracks. Local audio
  153. // tracks handle this event by themselves.
  154. if (RTCUtils.isDeviceChangeAvailable('output')) {
  155. RTCUtils.addListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  156. deviceId => {
  157. const remoteAudioTracks
  158. = this.getRemoteTracks(MediaType.AUDIO);
  159. for (const track of remoteAudioTracks) {
  160. track.setAudioOutput(deviceId);
  161. }
  162. });
  163. }
  164. }
  165. /**
  166. * Creates the local MediaStreams.
  167. * @param {object} [options] Optional parameters.
  168. * @param {array} options.devices The devices that will be requested.
  169. * @param {string} options.resolution Resolution constraints.
  170. * @param {bool} options.dontCreateJitsiTrack If <tt>true</tt> objects with
  171. * the following structure {stream: the Media Stream, type: "audio" or
  172. * "video", videoType: "camera" or "desktop"} will be returned trough
  173. * the Promise, otherwise JitsiTrack objects will be returned.
  174. * @param {string} options.cameraDeviceId
  175. * @param {string} options.micDeviceId
  176. * @returns {*} Promise object that will receive the new JitsiTracks
  177. */
  178. static obtainAudioAndVideoPermissions(options) {
  179. const usesNewGumFlow = RTCBrowserType.usesNewGumFlow();
  180. const obtainMediaPromise = usesNewGumFlow
  181. ? RTCUtils.newObtainAudioAndVideoPermissions(options)
  182. : RTCUtils.obtainAudioAndVideoPermissions(options);
  183. return obtainMediaPromise.then(
  184. tracksInfo => {
  185. const tracks = usesNewGumFlow
  186. ? _newCreateLocalTracks(tracksInfo)
  187. : createLocalTracks(tracksInfo, options);
  188. return tracks.some(track => !track._isReceivingData())
  189. ? Promise.reject(
  190. new JitsiTrackError(
  191. JitsiTrackErrors.NO_DATA_FROM_SOURCE))
  192. : tracks;
  193. });
  194. }
  195. /**
  196. * Initializes the bridge channel of this instance.
  197. * At least one of both, peerconnection or wsUrl parameters, must be
  198. * given.
  199. * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
  200. * instance.
  201. * @param {string} [wsUrl] WebSocket URL.
  202. */
  203. initializeBridgeChannel(peerconnection, wsUrl) {
  204. this._channel = new BridgeChannel(
  205. peerconnection, wsUrl, this.eventEmitter);
  206. this._channelOpenListener = () => {
  207. // Mark that channel as opened.
  208. this._channelOpen = true;
  209. // When the channel becomes available, tell the bridge about
  210. // video selections so that it can do adaptive simulcast,
  211. // we want the notification to trigger even if userJid
  212. // is undefined, or null.
  213. try {
  214. this._channel.sendPinnedEndpointMessage(
  215. this._pinnedEndpoint);
  216. this._channel.sendSelectedEndpointMessage(
  217. this._selectedEndpoint);
  218. } catch (error) {
  219. GlobalOnErrorHandler.callErrorHandler(error);
  220. logger.error(
  221. `Cannot send selected(${this._selectedEndpoint})`
  222. + `pinned(${this._pinnedEndpoint}) endpoint message.`,
  223. error);
  224. }
  225. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN,
  226. this._channelOpenListener);
  227. this._channelOpenListener = null;
  228. // If setLastN was invoked before the bridge channel completed
  229. // opening, apply the specified value now that the channel
  230. // is open. NOTE that -1 is the default value assumed by both
  231. // RTC module and the JVB.
  232. if (this._lastN !== -1) {
  233. this._channel.sendSetLastNMessage(this._lastN);
  234. }
  235. };
  236. this.addListener(RTCEvents.DATA_CHANNEL_OPEN,
  237. this._channelOpenListener);
  238. // Add Last N change listener.
  239. this.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED,
  240. this._lastNChangeListener);
  241. }
  242. /**
  243. * Receives events when Last N had changed.
  244. * @param {array} lastNEndpoints The new Last N endpoints.
  245. * @private
  246. */
  247. _onLastNChanged(lastNEndpoints = []) {
  248. const oldLastNEndpoints = this._lastNEndpoints || [];
  249. let leavingLastNEndpoints = [];
  250. let enteringLastNEndpoints = [];
  251. this._lastNEndpoints = lastNEndpoints;
  252. leavingLastNEndpoints = oldLastNEndpoints.filter(
  253. id => !this.isInLastN(id));
  254. enteringLastNEndpoints = lastNEndpoints.filter(
  255. id => oldLastNEndpoints.indexOf(id) === -1);
  256. this.conference.eventEmitter.emit(
  257. JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
  258. leavingLastNEndpoints,
  259. enteringLastNEndpoints);
  260. }
  261. /**
  262. * Should be called when current media session ends and after the
  263. * PeerConnection has been closed using PeerConnection.close() method.
  264. */
  265. onCallEnded() {
  266. if (this._channel) {
  267. // The BridgeChannel is not explicitly closed as the PeerConnection
  268. // is closed on call ended which triggers datachannel onclose
  269. // events. If using a WebSocket, the channel must be closed since
  270. // it is not managed by the PeerConnection.
  271. // The reference is cleared to disable any logic related to the
  272. // channel.
  273. if (this._channel && this._channel.mode === 'websocket') {
  274. this._channel.close();
  275. }
  276. this._channel = null;
  277. this._channelOpen = false;
  278. }
  279. }
  280. /**
  281. * Sets the maximum video size the local participant should receive from
  282. * remote participants. Will no-op if no data channel has been established.
  283. *
  284. * @param {number} maxFrameHeightPixels the maximum frame height, in pixels,
  285. * this receiver is willing to receive.
  286. * @returns {void}
  287. */
  288. setReceiverVideoConstraint(maxFrameHeight) {
  289. if (this._channel) {
  290. this._channel.sendReceiverVideoConstraintMessage(maxFrameHeight);
  291. }
  292. }
  293. /**
  294. * Elects the participant with the given id to be the selected participant
  295. * in order to always receive video for this participant (even when last n
  296. * is enabled).
  297. * If there is no channel we store it and send it through the channel once
  298. * it is created.
  299. * @param {string} id The user id.
  300. * @throws NetworkError or InvalidStateError or Error if the operation
  301. * fails.
  302. */
  303. selectEndpoint(id) {
  304. // Cache the value if channel is missing, till we open it.
  305. this._selectedEndpoint = id;
  306. if (this._channel && this._channelOpen) {
  307. this._channel.sendSelectedEndpointMessage(id);
  308. }
  309. }
  310. /**
  311. * Elects the participant with the given id to be the pinned participant in
  312. * order to always receive video for this participant (even when last n is
  313. * enabled).
  314. * @param {stirng} id The user id.
  315. * @throws NetworkError or InvalidStateError or Error if the operation
  316. * fails.
  317. */
  318. pinEndpoint(id) {
  319. // Cache the value if channel is missing, till we open it.
  320. this._pinnedEndpoint = id;
  321. if (this._channel && this._channelOpen) {
  322. this._channel.sendPinnedEndpointMessage(id);
  323. }
  324. }
  325. /**
  326. *
  327. * @param eventType
  328. * @param listener
  329. */
  330. static addListener(eventType, listener) {
  331. RTCUtils.addListener(eventType, listener);
  332. }
  333. /**
  334. *
  335. * @param eventType
  336. * @param listener
  337. */
  338. static removeListener(eventType, listener) {
  339. RTCUtils.removeListener(eventType, listener);
  340. }
  341. /**
  342. *
  343. */
  344. static isRTCReady() {
  345. return RTCUtils.isRTCReady();
  346. }
  347. /**
  348. *
  349. * @param options
  350. */
  351. static init(options = {}) {
  352. this.options = options;
  353. return RTCUtils.init(this.options);
  354. }
  355. /**
  356. *
  357. */
  358. static getDeviceAvailability() {
  359. return RTCUtils.getDeviceAvailability();
  360. }
  361. /* eslint-disable max-params */
  362. /**
  363. * Creates new <tt>TraceablePeerConnection</tt>
  364. * @param {SignalingLayer} signaling The signaling layer that will
  365. * provide information about the media or participants which is not
  366. * carried over SDP.
  367. * @param {object} iceConfig An object describing the ICE config like
  368. * defined in the WebRTC specification.
  369. * @param {boolean} isP2P Indicates whether or not the new TPC will be used
  370. * in a peer to peer type of session.
  371. * @param {object} options The config options.
  372. * @param {boolean} options.disableSimulcast If set to 'true' will disable
  373. * the simulcast.
  374. * @param {boolean} options.disableRtx If set to 'true' will disable the
  375. * RTX.
  376. * @param {boolean} options.disableH264 If set to 'true' H264 will be
  377. * disabled by removing it from the SDP.
  378. * @param {boolean} options.preferH264 If set to 'true' H264 will be
  379. * preferred over other video codecs.
  380. * @return {TraceablePeerConnection}
  381. */
  382. createPeerConnection(signaling, iceConfig, isP2P, options) {
  383. const pcConstraints = RTC.getPCConstraints(isP2P);
  384. if (typeof options.abtestSuspendVideo !== 'undefined') {
  385. RTCUtils.setSuspendVideo(pcConstraints, options.abtestSuspendVideo);
  386. Statistics.analytics.addPermanentProperties(
  387. { abtestSuspendVideo: options.abtestSuspendVideo });
  388. }
  389. const newConnection
  390. = new TraceablePeerConnection(
  391. this,
  392. this.peerConnectionIdCounter,
  393. signaling,
  394. iceConfig, pcConstraints,
  395. isP2P, options);
  396. this.peerConnections.set(newConnection.id, newConnection);
  397. this.peerConnectionIdCounter += 1;
  398. return newConnection;
  399. }
  400. /* eslint-enable max-params */
  401. /**
  402. * Removed given peer connection from this RTC module instance.
  403. * @param {TraceablePeerConnection} traceablePeerConnection
  404. * @return {boolean} <tt>true</tt> if the given peer connection was removed
  405. * successfully or <tt>false</tt> if there was no peer connection mapped in
  406. * this RTC instance.
  407. */
  408. _removePeerConnection(traceablePeerConnection) {
  409. const id = traceablePeerConnection.id;
  410. if (this.peerConnections.has(id)) {
  411. // NOTE Remote tracks are not removed here.
  412. this.peerConnections.delete(id);
  413. return true;
  414. }
  415. return false;
  416. }
  417. /**
  418. *
  419. * @param track
  420. */
  421. addLocalTrack(track) {
  422. if (!track) {
  423. throw new Error('track must not be null nor undefined');
  424. }
  425. this.localTracks.push(track);
  426. track.conference = this.conference;
  427. }
  428. /**
  429. * Returns the current value for "lastN" - the amount of videos are going
  430. * to be delivered. When set to -1 for unlimited or all available videos.
  431. * @return {number}
  432. */
  433. getLastN() {
  434. return this._lastN;
  435. }
  436. /**
  437. * Get local video track.
  438. * @returns {JitsiLocalTrack|undefined}
  439. */
  440. getLocalVideoTrack() {
  441. const localVideo = this.getLocalTracks(MediaType.VIDEO);
  442. return localVideo.length ? localVideo[0] : undefined;
  443. }
  444. /**
  445. * Get local audio track.
  446. * @returns {JitsiLocalTrack|undefined}
  447. */
  448. getLocalAudioTrack() {
  449. const localAudio = this.getLocalTracks(MediaType.AUDIO);
  450. return localAudio.length ? localAudio[0] : undefined;
  451. }
  452. /**
  453. * Returns the local tracks of the given media type, or all local tracks if
  454. * no specific type is given.
  455. * @param {MediaType} [mediaType] Optional media type filter.
  456. * (audio or video).
  457. */
  458. getLocalTracks(mediaType) {
  459. let tracks = this.localTracks.slice();
  460. if (mediaType !== undefined) {
  461. tracks = tracks.filter(
  462. track => track.getType() === mediaType);
  463. }
  464. return tracks;
  465. }
  466. /**
  467. * Obtains all remote tracks currently known to this RTC module instance.
  468. * @param {MediaType} [mediaType] The remote tracks will be filtered
  469. * by their media type if this argument is specified.
  470. * @return {Array<JitsiRemoteTrack>}
  471. */
  472. getRemoteTracks(mediaType) {
  473. let remoteTracks = [];
  474. for (const tpc of this.peerConnections.values()) {
  475. const pcRemoteTracks = tpc.getRemoteTracks(undefined, mediaType);
  476. if (pcRemoteTracks) {
  477. remoteTracks = remoteTracks.concat(pcRemoteTracks);
  478. }
  479. }
  480. return remoteTracks;
  481. }
  482. /**
  483. * Set mute for all local audio streams attached to the conference.
  484. * @param value The mute value.
  485. * @returns {Promise}
  486. */
  487. setAudioMute(value) {
  488. const mutePromises = [];
  489. this.getLocalTracks(MediaType.AUDIO).forEach(audioTrack => {
  490. // this is a Promise
  491. mutePromises.push(value ? audioTrack.mute() : audioTrack.unmute());
  492. });
  493. // We return a Promise from all Promises so we can wait for their
  494. // execution.
  495. return Promise.all(mutePromises);
  496. }
  497. /**
  498. *
  499. * @param track
  500. */
  501. removeLocalTrack(track) {
  502. const pos = this.localTracks.indexOf(track);
  503. if (pos === -1) {
  504. return;
  505. }
  506. this.localTracks.splice(pos, 1);
  507. }
  508. /**
  509. * Removes all JitsiRemoteTracks associated with given MUC nickname
  510. * (resource part of the JID). Returns array of removed tracks.
  511. *
  512. * @param {string} Owner The resource part of the MUC JID.
  513. * @returns {JitsiRemoteTrack[]}
  514. */
  515. removeRemoteTracks(owner) {
  516. let removedTracks = [];
  517. for (const tpc of this.peerConnections.values()) {
  518. const pcRemovedTracks = tpc.removeRemoteTracks(owner);
  519. removedTracks = removedTracks.concat(pcRemovedTracks);
  520. }
  521. logger.debug(
  522. `Removed remote tracks for ${owner}`
  523. + ` count: ${removedTracks.length}`);
  524. return removedTracks;
  525. }
  526. /**
  527. *
  528. */
  529. static getPCConstraints(isP2P) {
  530. const pcConstraints
  531. = isP2P ? RTCUtils.p2pPcConstraints : RTCUtils.pcConstraints;
  532. return JSON.parse(JSON.stringify(pcConstraints));
  533. }
  534. /**
  535. *
  536. * @param elSelector
  537. * @param stream
  538. */
  539. static attachMediaStream(elSelector, stream) {
  540. return RTCUtils.attachMediaStream(elSelector, stream);
  541. }
  542. /**
  543. * Returns the id of the given stream.
  544. * @param {MediaStream} stream
  545. */
  546. static getStreamID(stream) {
  547. return RTCUtils.getStreamID(stream);
  548. }
  549. /**
  550. * Returns the id of the given track.
  551. * @param {MediaStreamTrack} track
  552. */
  553. static getTrackID(track) {
  554. return RTCUtils.getTrackID(track);
  555. }
  556. /**
  557. * Returns true if retrieving the the list of input devices is supported
  558. * and false if not.
  559. */
  560. static isDeviceListAvailable() {
  561. return RTCUtils.isDeviceListAvailable();
  562. }
  563. /**
  564. * Returns true if changing the input (camera / microphone) or output
  565. * (audio) device is supported and false if not.
  566. * @param {string} [deviceType] Type of device to change. Default is
  567. * undefined or 'input', 'output' - for audio output device change.
  568. * @returns {boolean} true if available, false otherwise.
  569. */
  570. static isDeviceChangeAvailable(deviceType) {
  571. return RTCUtils.isDeviceChangeAvailable(deviceType);
  572. }
  573. /**
  574. * Returns currently used audio output device id, '' stands for default
  575. * device
  576. * @returns {string}
  577. */
  578. static getAudioOutputDevice() {
  579. return RTCUtils.getAudioOutputDevice();
  580. }
  581. /**
  582. * Returns list of available media devices if its obtained, otherwise an
  583. * empty array is returned/
  584. * @returns {array} list of available media devices.
  585. */
  586. static getCurrentlyAvailableMediaDevices() {
  587. return RTCUtils.getCurrentlyAvailableMediaDevices();
  588. }
  589. /**
  590. * Returns event data for device to be reported to stats.
  591. * @returns {MediaDeviceInfo} device.
  592. */
  593. static getEventDataForActiveDevice(device) {
  594. return RTCUtils.getEventDataForActiveDevice(device);
  595. }
  596. /**
  597. * Sets current audio output device.
  598. * @param {string} deviceId Id of 'audiooutput' device from
  599. * navigator.mediaDevices.enumerateDevices().
  600. * @returns {Promise} resolves when audio output is changed, is rejected
  601. * otherwise
  602. */
  603. static setAudioOutputDevice(deviceId) {
  604. return RTCUtils.setAudioOutputDevice(deviceId);
  605. }
  606. /**
  607. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  608. * "user" stream which means that it's not a "receive only" stream nor a
  609. * "mixed" JVB stream.
  610. *
  611. * Clients that implement Unified Plan, such as Firefox use recvonly
  612. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  613. * to Plan B where there are only 3 channels: audio, video and data.
  614. *
  615. * @param {MediaStream} stream The WebRTC MediaStream instance.
  616. * @returns {boolean}
  617. */
  618. static isUserStream(stream) {
  619. return RTC.isUserStreamById(RTCUtils.getStreamID(stream));
  620. }
  621. /**
  622. * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
  623. * ID is considered a valid "user" stream which means that it's not a
  624. * "receive only" stream nor a "mixed" JVB stream.
  625. *
  626. * Clients that implement Unified Plan, such as Firefox use recvonly
  627. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  628. * to Plan B where there are only 3 channels: audio, video and data.
  629. *
  630. * @param {string} streamId The id of WebRTC MediaStream.
  631. * @returns {boolean}
  632. */
  633. static isUserStreamById(streamId) {
  634. return streamId && streamId !== 'mixedmslabel'
  635. && streamId !== 'default';
  636. }
  637. /**
  638. * Allows to receive list of available cameras/microphones.
  639. * @param {function} callback Would receive array of devices as an
  640. * argument.
  641. */
  642. static enumerateDevices(callback) {
  643. RTCUtils.enumerateDevices(callback);
  644. }
  645. /**
  646. * A method to handle stopping of the stream.
  647. * One point to handle the differences in various implementations.
  648. * @param {MediaStream} mediaStream MediaStream object to stop.
  649. */
  650. static stopMediaStream(mediaStream) {
  651. RTCUtils.stopMediaStream(mediaStream);
  652. }
  653. /**
  654. * Returns whether the desktop sharing is enabled or not.
  655. * @returns {boolean}
  656. */
  657. static isDesktopSharingEnabled() {
  658. return RTCUtils.isDesktopSharingEnabled();
  659. }
  660. /**
  661. * Closes the currently opened bridge channel.
  662. */
  663. closeBridgeChannel() {
  664. if (this._channel) {
  665. this._channel.close();
  666. this._channelOpen = false;
  667. this.removeListener(RTCEvents.LASTN_ENDPOINT_CHANGED,
  668. this._lastNChangeListener);
  669. }
  670. }
  671. /* eslint-disable max-params */
  672. /**
  673. *
  674. * @param {TraceablePeerConnection} tpc
  675. * @param {number} ssrc
  676. * @param {number} audioLevel
  677. * @param {boolean} isLocal
  678. */
  679. setAudioLevel(tpc, ssrc, audioLevel, isLocal) {
  680. const track = tpc.getTrackBySSRC(ssrc);
  681. if (!track) {
  682. return;
  683. } else if (!track.isAudioTrack()) {
  684. logger.warn(`Received audio level for non-audio track: ${ssrc}`);
  685. return;
  686. } else if (track.isLocal() !== isLocal) {
  687. logger.error(
  688. `${track} was expected to ${isLocal ? 'be' : 'not be'} local`);
  689. }
  690. track.setAudioLevel(audioLevel, tpc);
  691. }
  692. /* eslint-enable max-params */
  693. /**
  694. * Sends message via the bridge channel.
  695. * @param {string} to The id of the endpoint that should receive the
  696. * message. If "" the message will be sent to all participants.
  697. * @param {object} payload The payload of the message.
  698. * @throws NetworkError or InvalidStateError or Error if the operation
  699. * fails or there is no data channel created.
  700. */
  701. sendChannelMessage(to, payload) {
  702. if (this._channel) {
  703. this._channel.sendMessage(to, payload);
  704. } else {
  705. throw new Error('Channel support is disabled!');
  706. }
  707. }
  708. /**
  709. * Selects a new value for "lastN". The requested amount of videos are going
  710. * to be delivered after the value is in effect. Set to -1 for unlimited or
  711. * all available videos.
  712. * @param {number} value the new value for lastN.
  713. */
  714. setLastN(value) {
  715. if (this._lastN !== value) {
  716. this._lastN = value;
  717. if (this._channel && this._channelOpen) {
  718. this._channel.sendSetLastNMessage(value);
  719. }
  720. this.eventEmitter.emit(RTCEvents.LASTN_VALUE_CHANGED, value);
  721. }
  722. }
  723. /**
  724. * Indicates if the endpoint id is currently included in the last N.
  725. * @param {string} id The endpoint id that we check for last N.
  726. * @returns {boolean} true if the endpoint id is in the last N or if we
  727. * don't have bridge channel support, otherwise we return false.
  728. */
  729. isInLastN(id) {
  730. return !this._lastNEndpoints // lastNEndpoints not initialised yet.
  731. || this._lastNEndpoints.indexOf(id) > -1;
  732. }
  733. }