You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTC.js 25KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790
  1. /* global __filename */
  2. import DataChannels from './DataChannels';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  5. import JitsiLocalTrack from './JitsiLocalTrack.js';
  6. import JitsiRemoteTrack from './JitsiRemoteTrack.js';
  7. import JitsiTrackError from '../../JitsiTrackError';
  8. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  9. import Listenable from '../util/Listenable';
  10. import * as MediaType from '../../service/RTC/MediaType';
  11. import RTCEvents from '../../service/RTC/RTCEvents.js';
  12. import RTCUtils from './RTCUtils.js';
  13. import TraceablePeerConnection from './TraceablePeerConnection';
  14. import VideoType from '../../service/RTC/VideoType';
  15. const logger = getLogger(__filename);
  16. function createLocalTracks(tracksInfo, options) {
  17. const newTracks = [];
  18. let deviceId = null;
  19. tracksInfo.forEach(trackInfo => {
  20. if (trackInfo.mediaType === MediaType.AUDIO) {
  21. deviceId = options.micDeviceId;
  22. } else if (trackInfo.videoType === VideoType.CAMERA) {
  23. deviceId = options.cameraDeviceId;
  24. }
  25. const localTrack
  26. = new JitsiLocalTrack(
  27. trackInfo.stream,
  28. trackInfo.track,
  29. trackInfo.mediaType,
  30. trackInfo.videoType,
  31. trackInfo.resolution,
  32. deviceId,
  33. options.facingMode);
  34. newTracks.push(localTrack);
  35. });
  36. return newTracks;
  37. }
  38. export default class RTC extends Listenable {
  39. constructor(conference, options = {}) {
  40. super();
  41. this.conference = conference;
  42. /**
  43. * A map of active <tt>TraceablePeerConnection</tt>.
  44. * @type {Map.<number, TraceablePeerConnection>}
  45. */
  46. this.peerConnections = new Map();
  47. /**
  48. * The counter used to generated id numbers assigned to peer connections
  49. * @type {number}
  50. */
  51. this.peerConnectionIdCounter = 1;
  52. this.localTracks = [];
  53. // FIXME: We should support multiple streams per jid.
  54. this.remoteTracks = {};
  55. this.options = options;
  56. // A flag whether we had received that the data channel had opened
  57. // we can get this flag out of sync if for some reason data channel got
  58. // closed from server, a desired behaviour so we can see errors when
  59. // this happen
  60. this.dataChannelsOpen = false;
  61. // Switch audio output device on all remote audio tracks. Local audio
  62. // tracks handle this event by themselves.
  63. if (RTCUtils.isDeviceChangeAvailable('output')) {
  64. RTCUtils.addListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  65. deviceId => {
  66. const remoteAudioTracks
  67. = this.getRemoteTracks(MediaType.AUDIO);
  68. for (const track of remoteAudioTracks) {
  69. track.setAudioOutput(deviceId);
  70. }
  71. });
  72. }
  73. }
  74. /**
  75. * Creates the local MediaStreams.
  76. * @param {Object} [options] optional parameters
  77. * @param {Array} options.devices the devices that will be requested
  78. * @param {string} options.resolution resolution constraints
  79. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with
  80. * the following structure {stream: the Media Stream, type: "audio" or
  81. * "video", videoType: "camera" or "desktop"} will be returned trough the
  82. * Promise, otherwise JitsiTrack objects will be returned.
  83. * @param {string} options.cameraDeviceId
  84. * @param {string} options.micDeviceId
  85. * @returns {*} Promise object that will receive the new JitsiTracks
  86. */
  87. static obtainAudioAndVideoPermissions(options) {
  88. return RTCUtils.obtainAudioAndVideoPermissions(options).then(
  89. tracksInfo => {
  90. const tracks = createLocalTracks(tracksInfo, options);
  91. return tracks.some(track => !track._isReceivingData())
  92. ? Promise.reject(
  93. new JitsiTrackError(
  94. JitsiTrackErrors.NO_DATA_FROM_SOURCE))
  95. : tracks;
  96. });
  97. }
  98. /**
  99. * Initializes the data channels of this instance.
  100. * @param peerconnection the associated PeerConnection.
  101. */
  102. initializeDataChannels(peerconnection) {
  103. if (this.options.config.openSctp) {
  104. this.dataChannels = new DataChannels(peerconnection,
  105. this.eventEmitter);
  106. this._dataChannelOpenListener = () => {
  107. // mark that dataChannel is opened
  108. this.dataChannelsOpen = true;
  109. // when the data channel becomes available, tell the bridge
  110. // about video selections so that it can do adaptive simulcast,
  111. // we want the notification to trigger even if userJid
  112. // is undefined, or null.
  113. // XXX why do we not do the same for pinned endpoints?
  114. try {
  115. this.dataChannels.sendSelectedEndpointMessage(
  116. this.selectedEndpoint);
  117. } catch (error) {
  118. GlobalOnErrorHandler.callErrorHandler(error);
  119. logger.error('Cannot sendSelectedEndpointMessage ',
  120. this.selectedEndpoint, '. Error: ', error);
  121. }
  122. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN,
  123. this._dataChannelOpenListener);
  124. this._dataChannelOpenListener = null;
  125. };
  126. this.addListener(RTCEvents.DATA_CHANNEL_OPEN,
  127. this._dataChannelOpenListener);
  128. }
  129. }
  130. /**
  131. * Should be called when current media session ends and after the
  132. * PeerConnection has been closed using PeerConnection.close() method.
  133. */
  134. onCallEnded() {
  135. if (this.dataChannels) {
  136. // DataChannels are not explicitly closed as the PeerConnection
  137. // is closed on call ended which triggers data channel onclose
  138. // events. The reference is cleared to disable any logic related
  139. // to the data channels.
  140. this.dataChannels = null;
  141. this.dataChannelsOpen = false;
  142. }
  143. }
  144. /**
  145. * Elects the participant with the given id to be the selected participant
  146. * in order to always receive video for this participant (even when last n
  147. * is enabled).
  148. * If there is no data channel we store it and send it through the channel
  149. * once it is created.
  150. * @param id {string} the user id.
  151. * @throws NetworkError or InvalidStateError or Error if the operation
  152. * fails.
  153. */
  154. selectEndpoint(id) {
  155. // cache the value if channel is missing, till we open it
  156. this.selectedEndpoint = id;
  157. if (this.dataChannels && this.dataChannelsOpen) {
  158. this.dataChannels.sendSelectedEndpointMessage(id);
  159. }
  160. }
  161. /**
  162. * Elects the participant with the given id to be the pinned participant in
  163. * order to always receive video for this participant (even when last n is
  164. * enabled).
  165. * @param id {string} the user id
  166. * @throws NetworkError or InvalidStateError or Error if the operation
  167. * fails.
  168. */
  169. pinEndpoint(id) {
  170. if (this.dataChannels) {
  171. this.dataChannels.sendPinnedEndpointMessage(id);
  172. } else {
  173. // FIXME: cache value while there is no data channel created
  174. // and send the cached state once channel is created
  175. throw new Error('Data channels support is disabled!');
  176. }
  177. }
  178. static addListener(eventType, listener) {
  179. RTCUtils.addListener(eventType, listener);
  180. }
  181. static removeListener(eventType, listener) {
  182. RTCUtils.removeListener(eventType, listener);
  183. }
  184. static isRTCReady() {
  185. return RTCUtils.isRTCReady();
  186. }
  187. static init(options = {}) {
  188. this.options = options;
  189. return RTCUtils.init(this.options);
  190. }
  191. static getDeviceAvailability() {
  192. return RTCUtils.getDeviceAvailability();
  193. }
  194. /**
  195. * Creates new <tt>TraceablePeerConnection</tt>
  196. * @param {SignalingLayer} signaling the signaling layer that will
  197. * provide information about the media or participants which is not carried
  198. * over SDP.
  199. * @param {Object} iceConfig an object describing the ICE config like
  200. * defined in the WebRTC specification.
  201. * @param {Object} options the config options
  202. * @param {boolean} options.disableSimulcast if set to 'true' will disable
  203. * the simulcast
  204. * @param {boolean} options.disableRtx if set to 'true' will disable the RTX
  205. * @param {boolean} options.preferH264 if set to 'true' H264 will be
  206. * preferred over other video codecs.
  207. * @return {TraceablePeerConnection}
  208. */
  209. createPeerConnection(signaling, iceConfig, options) {
  210. const newConnection
  211. = new TraceablePeerConnection(
  212. this,
  213. this.peerConnectionIdCounter,
  214. signaling, iceConfig, RTC.getPCConstraints(), options);
  215. this.peerConnections.set(newConnection.id, newConnection);
  216. this.peerConnectionIdCounter += 1;
  217. return newConnection;
  218. }
  219. /**
  220. * Removed given peer connection from this RTC module instance.
  221. * @param {TraceablePeerConnection} traceablePeerConnection
  222. * @return {boolean} <tt>true</tt> if the given peer connection was removed
  223. * successfully or <tt>false</tt> if there was no peer connection mapped in
  224. * this RTC instance.
  225. */
  226. _removePeerConnection(traceablePeerConnection) {
  227. const id = traceablePeerConnection.id;
  228. if (this.peerConnections.has(id)) {
  229. // NOTE Remote tracks are not removed here.
  230. this.peerConnections.delete(id);
  231. return true;
  232. }
  233. return false;
  234. }
  235. addLocalTrack(track) {
  236. if (!track) {
  237. throw new Error('track must not be null nor undefined');
  238. }
  239. this.localTracks.push(track);
  240. track.conference = this.conference;
  241. }
  242. /**
  243. * Get local video track.
  244. * @returns {JitsiLocalTrack|undefined}
  245. */
  246. getLocalVideoTrack() {
  247. const localVideo = this.getLocalTracks(MediaType.VIDEO);
  248. return localVideo.length ? localVideo[0] : undefined;
  249. }
  250. /**
  251. * Get local audio track.
  252. * @returns {JitsiLocalTrack|undefined}
  253. */
  254. getLocalAudioTrack() {
  255. const localAudio = this.getLocalTracks(MediaType.AUDIO);
  256. return localAudio.length ? localAudio[0] : undefined;
  257. }
  258. /**
  259. * Returns the local tracks of the given media type, or all local tracks if
  260. * no specific type is given.
  261. * @param {MediaType} [mediaType] optional media type filter
  262. * (audio or video).
  263. */
  264. getLocalTracks(mediaType) {
  265. let tracks = this.localTracks.slice();
  266. if (mediaType !== undefined) {
  267. tracks = tracks.filter(
  268. track => track.getType() === mediaType);
  269. }
  270. return tracks;
  271. }
  272. /**
  273. * Obtains all remote tracks currently known to this RTC module instance.
  274. * @param {MediaType} [mediaType] the remote tracks will be filtered
  275. * by their media type if this argument is specified.
  276. * @return {Array<JitsiRemoteTrack>}
  277. */
  278. getRemoteTracks(mediaType) {
  279. const remoteTracks = [];
  280. const remoteEndpoints = Object.keys(this.remoteTracks);
  281. for (const endpoint of remoteEndpoints) {
  282. const endpointMediaTypes = Object.keys(this.remoteTracks[endpoint]);
  283. for (const trackMediaType of endpointMediaTypes) {
  284. // per media type filtering
  285. if (!mediaType || mediaType === trackMediaType) {
  286. const mediaTrack
  287. = this.remoteTracks[endpoint][trackMediaType];
  288. if (mediaTrack) {
  289. remoteTracks.push(mediaTrack);
  290. }
  291. }
  292. }
  293. }
  294. return remoteTracks;
  295. }
  296. /**
  297. * Gets JitsiRemoteTrack for the passed MediaType associated with given MUC
  298. * nickname (resource part of the JID).
  299. * @param type audio or video.
  300. * @param resource the resource part of the MUC JID
  301. * @returns {JitsiRemoteTrack|null}
  302. */
  303. getRemoteTrackByType(type, resource) {
  304. if (this.remoteTracks[resource]) {
  305. return this.remoteTracks[resource][type];
  306. }
  307. return null;
  308. }
  309. /**
  310. * Gets JitsiRemoteTrack for AUDIO MediaType associated with given MUC
  311. * nickname (resource part of the JID).
  312. * @param resource the resource part of the MUC JID
  313. * @returns {JitsiRemoteTrack|null}
  314. */
  315. getRemoteAudioTrack(resource) {
  316. return this.getRemoteTrackByType(MediaType.AUDIO, resource);
  317. }
  318. /**
  319. * Gets JitsiRemoteTrack for VIDEO MediaType associated with given MUC
  320. * nickname (resource part of the JID).
  321. * @param resource the resource part of the MUC JID
  322. * @returns {JitsiRemoteTrack|null}
  323. */
  324. getRemoteVideoTrack(resource) {
  325. return this.getRemoteTrackByType(MediaType.VIDEO, resource);
  326. }
  327. /**
  328. * Set mute for all local audio streams attached to the conference.
  329. * @param value the mute value
  330. * @returns {Promise}
  331. */
  332. setAudioMute(value) {
  333. const mutePromises = [];
  334. this.getLocalTracks(MediaType.AUDIO).forEach(audioTrack => {
  335. // this is a Promise
  336. mutePromises.push(value ? audioTrack.mute() : audioTrack.unmute());
  337. });
  338. // We return a Promise from all Promises so we can wait for their
  339. // execution.
  340. return Promise.all(mutePromises);
  341. }
  342. removeLocalTrack(track) {
  343. const pos = this.localTracks.indexOf(track);
  344. if (pos === -1) {
  345. return;
  346. }
  347. this.localTracks.splice(pos, 1);
  348. }
  349. /* eslint-disable max-params */
  350. /**
  351. * Initializes a new JitsiRemoteTrack instance with the data provided by
  352. * the signaling layer and SDP.
  353. *
  354. * @param {string} ownerEndpointId
  355. * @param {MediaStream} stream
  356. * @param {MediaStreamTrack} track
  357. * @param {MediaType} mediaType
  358. * @param {VideoType|undefined} videoType
  359. * @param {string} ssrc
  360. * @param {boolean} muted
  361. */
  362. _createRemoteTrack(
  363. ownerEndpointId,
  364. stream,
  365. track,
  366. mediaType,
  367. videoType,
  368. ssrc,
  369. muted) {
  370. const remoteTrack
  371. = new JitsiRemoteTrack(
  372. this,
  373. this.conference,
  374. ownerEndpointId,
  375. stream,
  376. track,
  377. mediaType,
  378. videoType,
  379. ssrc,
  380. muted);
  381. const remoteTracks
  382. = this.remoteTracks[ownerEndpointId]
  383. || (this.remoteTracks[ownerEndpointId] = {});
  384. if (remoteTracks[mediaType]) {
  385. logger.error(
  386. 'Overwriting remote track!',
  387. ownerEndpointId,
  388. mediaType);
  389. }
  390. remoteTracks[mediaType] = remoteTrack;
  391. this.eventEmitter.emit(RTCEvents.REMOTE_TRACK_ADDED, remoteTrack);
  392. }
  393. /* eslint-enable max-params */
  394. /**
  395. * Removes all JitsiRemoteTracks associated with given MUC nickname
  396. * (resource part of the JID). Returns array of removed tracks.
  397. *
  398. * @param {string} owner - The resource part of the MUC JID.
  399. * @returns {JitsiRemoteTrack[]}
  400. */
  401. removeRemoteTracks(owner) {
  402. const removedTracks = [];
  403. if (this.remoteTracks[owner]) {
  404. const removedAudioTrack
  405. = this.remoteTracks[owner][MediaType.AUDIO];
  406. const removedVideoTrack
  407. = this.remoteTracks[owner][MediaType.VIDEO];
  408. removedAudioTrack && removedTracks.push(removedAudioTrack);
  409. removedVideoTrack && removedTracks.push(removedVideoTrack);
  410. delete this.remoteTracks[owner];
  411. }
  412. return removedTracks;
  413. }
  414. /**
  415. * Finds remote track by it's stream and track ids.
  416. * @param {string} streamId the media stream id as defined by the WebRTC
  417. * @param {string} trackId the media track id as defined by the WebRTC
  418. * @return {JitsiRemoteTrack|undefined}
  419. * @private
  420. */
  421. _getRemoteTrackById(streamId, trackId) {
  422. let result;
  423. // .find will break the loop once the first match is found
  424. Object.keys(this.remoteTracks).find(endpoint => {
  425. const endpointTracks = this.remoteTracks[endpoint];
  426. return endpointTracks && Object.keys(endpointTracks).find(
  427. mediaType => {
  428. const mediaTrack = endpointTracks[mediaType];
  429. if (mediaTrack
  430. && mediaTrack.getStreamId() === streamId
  431. && mediaTrack.getTrackId() === trackId) {
  432. result = mediaTrack;
  433. return true;
  434. }
  435. return false;
  436. });
  437. });
  438. return result;
  439. }
  440. /**
  441. * Removes <tt>JitsiRemoteTrack</tt> identified by given stream and track
  442. * ids.
  443. *
  444. * @param {string} streamId media stream id as defined by the WebRTC
  445. * @param {string} trackId media track id as defined by the WebRTC
  446. * @returns {JitsiRemoteTrack|undefined} the track which has been removed or
  447. * <tt>undefined</tt> if no track matching given stream and track ids was
  448. * found.
  449. */
  450. _removeRemoteTrack(streamId, trackId) {
  451. const toBeRemoved = this._getRemoteTrackById(streamId, trackId);
  452. if (toBeRemoved) {
  453. toBeRemoved.dispose();
  454. delete this.remoteTracks[
  455. toBeRemoved.getParticipantId()][toBeRemoved.getType()];
  456. this.eventEmitter.emit(
  457. RTCEvents.REMOTE_TRACK_REMOVED, toBeRemoved);
  458. }
  459. return toBeRemoved;
  460. }
  461. static getPCConstraints() {
  462. return RTCUtils.pcConstraints;
  463. }
  464. static attachMediaStream(elSelector, stream) {
  465. return RTCUtils.attachMediaStream(elSelector, stream);
  466. }
  467. static getStreamID(stream) {
  468. return RTCUtils.getStreamID(stream);
  469. }
  470. /**
  471. * Returns true if retrieving the the list of input devices is supported
  472. * and false if not.
  473. */
  474. static isDeviceListAvailable() {
  475. return RTCUtils.isDeviceListAvailable();
  476. }
  477. /**
  478. * Returns true if changing the input (camera / microphone) or output
  479. * (audio) device is supported and false if not.
  480. * @params {string} [deviceType] - type of device to change. Default is
  481. * undefined or 'input', 'output' - for audio output device change.
  482. * @returns {boolean} true if available, false otherwise.
  483. */
  484. static isDeviceChangeAvailable(deviceType) {
  485. return RTCUtils.isDeviceChangeAvailable(deviceType);
  486. }
  487. /**
  488. * Returns currently used audio output device id, '' stands for default
  489. * device
  490. * @returns {string}
  491. */
  492. static getAudioOutputDevice() {
  493. return RTCUtils.getAudioOutputDevice();
  494. }
  495. /**
  496. * Returns list of available media devices if its obtained, otherwise an
  497. * empty array is returned/
  498. * @returns {Array} list of available media devices.
  499. */
  500. static getCurrentlyAvailableMediaDevices() {
  501. return RTCUtils.getCurrentlyAvailableMediaDevices();
  502. }
  503. /**
  504. * Returns event data for device to be reported to stats.
  505. * @returns {MediaDeviceInfo} device.
  506. */
  507. static getEventDataForActiveDevice(device) {
  508. return RTCUtils.getEventDataForActiveDevice(device);
  509. }
  510. /**
  511. * Sets current audio output device.
  512. * @param {string} deviceId - id of 'audiooutput' device from
  513. * navigator.mediaDevices.enumerateDevices()
  514. * @returns {Promise} - resolves when audio output is changed, is rejected
  515. * otherwise
  516. */
  517. static setAudioOutputDevice(deviceId) {
  518. return RTCUtils.setAudioOutputDevice(deviceId);
  519. }
  520. /**
  521. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  522. * "user" stream which means that it's not a "receive only" stream nor a
  523. * "mixed" JVB stream.
  524. *
  525. * Clients that implement Unified Plan, such as Firefox use recvonly
  526. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  527. * to Plan B where there are only 3 channels: audio, video and data.
  528. *
  529. * @param {MediaStream} stream the WebRTC MediaStream instance
  530. * @returns {boolean}
  531. */
  532. static isUserStream(stream) {
  533. return RTC.isUserStreamById(RTCUtils.getStreamID(stream));
  534. }
  535. /**
  536. * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
  537. * ID is considered a valid "user" stream which means that it's not a
  538. * "receive only" stream nor a "mixed" JVB stream.
  539. *
  540. * Clients that implement Unified Plan, such as Firefox use recvonly
  541. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  542. * to Plan B where there are only 3 channels: audio, video and data.
  543. *
  544. * @param {string} streamId the id of WebRTC MediaStream
  545. * @returns {boolean}
  546. */
  547. static isUserStreamById(streamId) {
  548. return streamId && streamId !== 'mixedmslabel'
  549. && streamId !== 'default';
  550. }
  551. /**
  552. * Allows to receive list of available cameras/microphones.
  553. * @param {function} callback would receive array of devices as an argument
  554. */
  555. static enumerateDevices(callback) {
  556. RTCUtils.enumerateDevices(callback);
  557. }
  558. /**
  559. * A method to handle stopping of the stream.
  560. * One point to handle the differences in various implementations.
  561. * @param mediaStream MediaStream object to stop.
  562. */
  563. static stopMediaStream(mediaStream) {
  564. RTCUtils.stopMediaStream(mediaStream);
  565. }
  566. /**
  567. * Returns whether the desktop sharing is enabled or not.
  568. * @returns {boolean}
  569. */
  570. static isDesktopSharingEnabled() {
  571. return RTCUtils.isDesktopSharingEnabled();
  572. }
  573. /**
  574. * Closes all currently opened data channels.
  575. */
  576. closeAllDataChannels() {
  577. if (this.dataChannels) {
  578. this.dataChannels.closeAllChannels();
  579. this.dataChannelsOpen = false;
  580. }
  581. }
  582. setAudioLevel(resource, audioLevel) {
  583. if (!resource) {
  584. return;
  585. }
  586. const audioTrack = this.getRemoteAudioTrack(resource);
  587. if (audioTrack) {
  588. audioTrack.setAudioLevel(audioLevel);
  589. }
  590. }
  591. /**
  592. * Searches in localTracks(session stores ssrc for audio and video) and
  593. * remoteTracks for the ssrc and returns the corresponding resource.
  594. * @param ssrc the ssrc to check.
  595. */
  596. getResourceBySSRC(ssrc) {
  597. // FIXME: Convert the SSRCs in whole project to use the same type.
  598. // Now we are using number and string.
  599. if (this.getLocalTracks().find(
  600. // eslint-disable-next-line eqeqeq
  601. localTrack => localTrack.getSSRC() == ssrc)) {
  602. return this.conference.myUserId();
  603. }
  604. const track = this.getRemoteTrackBySSRC(ssrc);
  605. return track ? track.getParticipantId() : null;
  606. }
  607. /**
  608. * Searches in remoteTracks for the ssrc and returns the corresponding
  609. * track.
  610. * @param ssrc the ssrc to check.
  611. * @return {JitsiRemoteTrack|undefined} return the first remote track that
  612. * matches given SSRC or <tt>undefined</tt> if no such track was found.
  613. */
  614. getRemoteTrackBySSRC(ssrc) {
  615. // FIXME: Convert the SSRCs in whole project to use the same type.
  616. // Now we are using number and string.
  617. // eslint-disable-next-line eqeqeq
  618. return this.getRemoteTracks().find(t => ssrc == t.getSSRC());
  619. }
  620. /**
  621. * Handles remote track mute / unmute events.
  622. * @param type {string} "audio" or "video"
  623. * @param isMuted {boolean} the new mute state
  624. * @param from {string} user id
  625. */
  626. handleRemoteTrackMute(type, isMuted, from) {
  627. const track = this.getRemoteTrackByType(type, from);
  628. if (track) {
  629. track.setMute(isMuted);
  630. }
  631. }
  632. /**
  633. * Handles remote track video type events
  634. * @param value {string} the new video type
  635. * @param from {string} user id
  636. */
  637. handleRemoteTrackVideoTypeChanged(value, from) {
  638. const videoTrack = this.getRemoteVideoTrack(from);
  639. if (videoTrack) {
  640. videoTrack._setVideoType(value);
  641. }
  642. }
  643. /**
  644. * Sends message via the datachannels.
  645. * @param to {string} the id of the endpoint that should receive the
  646. * message. If "" the message will be sent to all participants.
  647. * @param payload {object} the payload of the message.
  648. * @throws NetworkError or InvalidStateError or Error if the operation
  649. * fails or there is no data channel created
  650. */
  651. sendDataChannelMessage(to, payload) {
  652. if (this.dataChannels) {
  653. this.dataChannels.sendDataChannelMessage(to, payload);
  654. } else {
  655. throw new Error('Data channels support is disabled!');
  656. }
  657. }
  658. /**
  659. * Selects a new value for "lastN". The requested amount of videos are going
  660. * to be delivered after the value is in effect. Set to -1 for unlimited or
  661. * all available videos.
  662. * @param value {int} the new value for lastN.
  663. * @trows Error if there is no data channel created.
  664. */
  665. setLastN(value) {
  666. if (this.dataChannels) {
  667. this.dataChannels.sendSetLastNMessage(value);
  668. } else {
  669. throw new Error('Data channels support is disabled!');
  670. }
  671. }
  672. }