You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTC.js 24KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768
  1. /* global __filename */
  2. import DataChannels from './DataChannels';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  5. import * as JitsiConferenceEvents from '../../JitsiConferenceEvents';
  6. import JitsiLocalTrack from './JitsiLocalTrack.js';
  7. import JitsiTrackError from '../../JitsiTrackError';
  8. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  9. import Listenable from '../util/Listenable';
  10. import * as MediaType from '../../service/RTC/MediaType';
  11. import RTCEvents from '../../service/RTC/RTCEvents.js';
  12. import RTCUtils from './RTCUtils.js';
  13. import TraceablePeerConnection from './TraceablePeerConnection';
  14. import VideoType from '../../service/RTC/VideoType';
  15. const logger = getLogger(__filename);
  16. let rtcTrackIdCounter = 0;
  17. /**
  18. *
  19. * @param tracksInfo
  20. * @param options
  21. */
  22. function createLocalTracks(tracksInfo, options) {
  23. const newTracks = [];
  24. let deviceId = null;
  25. tracksInfo.forEach(trackInfo => {
  26. if (trackInfo.mediaType === MediaType.AUDIO) {
  27. deviceId = options.micDeviceId;
  28. } else if (trackInfo.videoType === VideoType.CAMERA) {
  29. deviceId = options.cameraDeviceId;
  30. }
  31. rtcTrackIdCounter += 1;
  32. const localTrack
  33. = new JitsiLocalTrack(
  34. rtcTrackIdCounter,
  35. trackInfo.stream,
  36. trackInfo.track,
  37. trackInfo.mediaType,
  38. trackInfo.videoType,
  39. trackInfo.resolution,
  40. deviceId,
  41. options.facingMode);
  42. newTracks.push(localTrack);
  43. });
  44. return newTracks;
  45. }
  46. /**
  47. *
  48. */
  49. export default class RTC extends Listenable {
  50. /**
  51. *
  52. * @param conference
  53. * @param options
  54. */
  55. constructor(conference, options = {}) {
  56. super();
  57. this.conference = conference;
  58. /**
  59. * A map of active <tt>TraceablePeerConnection</tt>.
  60. * @type {Map.<number, TraceablePeerConnection>}
  61. */
  62. this.peerConnections = new Map();
  63. /**
  64. * The counter used to generated id numbers assigned to peer connections
  65. * @type {number}
  66. */
  67. this.peerConnectionIdCounter = 1;
  68. this.localTracks = [];
  69. this.options = options;
  70. // A flag whether we had received that the data channel had opened
  71. // we can get this flag out of sync if for some reason data channel got
  72. // closed from server, a desired behaviour so we can see errors when
  73. // this happen
  74. this.dataChannelsOpen = false;
  75. /**
  76. * The value specified to the last invocation of setLastN before the
  77. * data channels completed opening. If non-null, the value will be sent
  78. * through a data channel (once) as soon as it opens and will then be
  79. * discarded.
  80. *
  81. * @private
  82. * @type {number}
  83. */
  84. this._lastN = null;
  85. // Defines the last N endpoints list. It can be null or an array once
  86. // initialised with a datachannel last N event.
  87. // @type {Array<string>|null}
  88. this._lastNEndpoints = null;
  89. // The last N change listener.
  90. this._lastNChangeListener = this._onLastNChanged.bind(this);
  91. // Switch audio output device on all remote audio tracks. Local audio
  92. // tracks handle this event by themselves.
  93. if (RTCUtils.isDeviceChangeAvailable('output')) {
  94. RTCUtils.addListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  95. deviceId => {
  96. const remoteAudioTracks
  97. = this.getRemoteTracks(MediaType.AUDIO);
  98. for (const track of remoteAudioTracks) {
  99. track.setAudioOutput(deviceId);
  100. }
  101. });
  102. }
  103. }
  104. /**
  105. * Creates the local MediaStreams.
  106. * @param {Object} [options] optional parameters
  107. * @param {Array} options.devices the devices that will be requested
  108. * @param {string} options.resolution resolution constraints
  109. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with
  110. * the following structure {stream: the Media Stream, type: "audio" or
  111. * "video", videoType: "camera" or "desktop"} will be returned trough the
  112. * Promise, otherwise JitsiTrack objects will be returned.
  113. * @param {string} options.cameraDeviceId
  114. * @param {string} options.micDeviceId
  115. * @returns {*} Promise object that will receive the new JitsiTracks
  116. */
  117. static obtainAudioAndVideoPermissions(options) {
  118. return RTCUtils.obtainAudioAndVideoPermissions(options).then(
  119. tracksInfo => {
  120. const tracks = createLocalTracks(tracksInfo, options);
  121. return tracks.some(track => !track._isReceivingData())
  122. ? Promise.reject(
  123. new JitsiTrackError(
  124. JitsiTrackErrors.NO_DATA_FROM_SOURCE))
  125. : tracks;
  126. });
  127. }
  128. /**
  129. * Initializes the data channels of this instance.
  130. * @param peerconnection the associated PeerConnection.
  131. */
  132. initializeDataChannels(peerconnection) {
  133. if (this.options.config.openSctp) {
  134. this.dataChannels = new DataChannels(peerconnection,
  135. this.eventEmitter);
  136. this._dataChannelOpenListener = () => {
  137. // mark that dataChannel is opened
  138. this.dataChannelsOpen = true;
  139. // when the data channel becomes available, tell the bridge
  140. // about video selections so that it can do adaptive simulcast,
  141. // we want the notification to trigger even if userJid
  142. // is undefined, or null.
  143. // XXX why do we not do the same for pinned endpoints?
  144. try {
  145. this.dataChannels.sendSelectedEndpointMessage(
  146. this.selectedEndpoint);
  147. } catch (error) {
  148. GlobalOnErrorHandler.callErrorHandler(error);
  149. logger.error('Cannot sendSelectedEndpointMessage ',
  150. this.selectedEndpoint, '. Error: ', error);
  151. }
  152. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN,
  153. this._dataChannelOpenListener);
  154. this._dataChannelOpenListener = null;
  155. // If setLastN was invoked before the data channels completed
  156. // opening, apply the specified value now that the data channels
  157. // are open.
  158. if (this._lastN !== null) {
  159. this.setLastN(this._lastN);
  160. this._lastN = null;
  161. }
  162. };
  163. this.addListener(RTCEvents.DATA_CHANNEL_OPEN,
  164. this._dataChannelOpenListener);
  165. // Add Last N change listener.
  166. this.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED,
  167. this._lastNChangeListener);
  168. }
  169. }
  170. /**
  171. * Receives events when Last N had changed.
  172. * @param {array} lastNEndpoints the new Last N endpoints.
  173. * @private
  174. */
  175. _onLastNChanged(lastNEndpoints = []) {
  176. const oldLastNEndpoints = this._lastNEndpoints || [];
  177. let leavingLastNEndpoints = [];
  178. let enteringLastNEndpoints = [];
  179. this._lastNEndpoints = lastNEndpoints;
  180. leavingLastNEndpoints = oldLastNEndpoints.filter(
  181. id => !this.isInLastN(id));
  182. enteringLastNEndpoints = lastNEndpoints.filter(
  183. id => oldLastNEndpoints.indexOf(id) === -1);
  184. this.conference.eventEmitter.emit(
  185. JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED,
  186. leavingLastNEndpoints,
  187. enteringLastNEndpoints);
  188. }
  189. /**
  190. * Should be called when current media session ends and after the
  191. * PeerConnection has been closed using PeerConnection.close() method.
  192. */
  193. onCallEnded() {
  194. if (this.dataChannels) {
  195. // DataChannels are not explicitly closed as the PeerConnection
  196. // is closed on call ended which triggers data channel onclose
  197. // events. The reference is cleared to disable any logic related
  198. // to the data channels.
  199. this.dataChannels = null;
  200. this.dataChannelsOpen = false;
  201. }
  202. }
  203. /**
  204. * Elects the participant with the given id to be the selected participant
  205. * in order to always receive video for this participant (even when last n
  206. * is enabled).
  207. * If there is no data channel we store it and send it through the channel
  208. * once it is created.
  209. * @param id {string} the user id.
  210. * @throws NetworkError or InvalidStateError or Error if the operation
  211. * fails.
  212. */
  213. selectEndpoint(id) {
  214. // cache the value if channel is missing, till we open it
  215. this.selectedEndpoint = id;
  216. if (this.dataChannels && this.dataChannelsOpen) {
  217. this.dataChannels.sendSelectedEndpointMessage(id);
  218. }
  219. }
  220. /**
  221. * Elects the participant with the given id to be the pinned participant in
  222. * order to always receive video for this participant (even when last n is
  223. * enabled).
  224. * @param id {string} the user id
  225. * @throws NetworkError or InvalidStateError or Error if the operation
  226. * fails.
  227. */
  228. pinEndpoint(id) {
  229. if (this.dataChannels) {
  230. this.dataChannels.sendPinnedEndpointMessage(id);
  231. } else {
  232. // FIXME: cache value while there is no data channel created
  233. // and send the cached state once channel is created
  234. throw new Error('Data channels support is disabled!');
  235. }
  236. }
  237. /**
  238. *
  239. * @param eventType
  240. * @param listener
  241. */
  242. static addListener(eventType, listener) {
  243. RTCUtils.addListener(eventType, listener);
  244. }
  245. /**
  246. *
  247. * @param eventType
  248. * @param listener
  249. */
  250. static removeListener(eventType, listener) {
  251. RTCUtils.removeListener(eventType, listener);
  252. }
  253. /**
  254. *
  255. */
  256. static isRTCReady() {
  257. return RTCUtils.isRTCReady();
  258. }
  259. /**
  260. *
  261. * @param options
  262. */
  263. static init(options = {}) {
  264. this.options = options;
  265. return RTCUtils.init(this.options);
  266. }
  267. /**
  268. *
  269. */
  270. static getDeviceAvailability() {
  271. return RTCUtils.getDeviceAvailability();
  272. }
  273. /* eslint-disable max-params */
  274. /**
  275. * Creates new <tt>TraceablePeerConnection</tt>
  276. * @param {SignalingLayer} signaling the signaling layer that will
  277. * provide information about the media or participants which is not carried
  278. * over SDP.
  279. * @param {Object} iceConfig an object describing the ICE config like
  280. * defined in the WebRTC specification.
  281. * @param {boolean} isP2P indicates whether or not the new TPC will be used
  282. * in a peer to peer type of session
  283. * @param {Object} options the config options
  284. * @param {boolean} options.disableSimulcast if set to 'true' will disable
  285. * the simulcast
  286. * @param {boolean} options.disableRtx if set to 'true' will disable the RTX
  287. * @param {boolean} options.preferH264 if set to 'true' H264 will be
  288. * preferred over other video codecs.
  289. * @return {TraceablePeerConnection}
  290. */
  291. createPeerConnection(signaling, iceConfig, isP2P, options) {
  292. const newConnection
  293. = new TraceablePeerConnection(
  294. this,
  295. this.peerConnectionIdCounter,
  296. signaling, iceConfig, RTC.getPCConstraints(), isP2P, options);
  297. this.peerConnections.set(newConnection.id, newConnection);
  298. this.peerConnectionIdCounter += 1;
  299. return newConnection;
  300. }
  301. /* eslint-enable max-params */
  302. /**
  303. * Removed given peer connection from this RTC module instance.
  304. * @param {TraceablePeerConnection} traceablePeerConnection
  305. * @return {boolean} <tt>true</tt> if the given peer connection was removed
  306. * successfully or <tt>false</tt> if there was no peer connection mapped in
  307. * this RTC instance.
  308. */
  309. _removePeerConnection(traceablePeerConnection) {
  310. const id = traceablePeerConnection.id;
  311. if (this.peerConnections.has(id)) {
  312. // NOTE Remote tracks are not removed here.
  313. this.peerConnections.delete(id);
  314. return true;
  315. }
  316. return false;
  317. }
  318. /**
  319. *
  320. * @param track
  321. */
  322. addLocalTrack(track) {
  323. if (!track) {
  324. throw new Error('track must not be null nor undefined');
  325. }
  326. this.localTracks.push(track);
  327. track.conference = this.conference;
  328. }
  329. /**
  330. * Get local video track.
  331. * @returns {JitsiLocalTrack|undefined}
  332. */
  333. getLocalVideoTrack() {
  334. const localVideo = this.getLocalTracks(MediaType.VIDEO);
  335. return localVideo.length ? localVideo[0] : undefined;
  336. }
  337. /**
  338. * Get local audio track.
  339. * @returns {JitsiLocalTrack|undefined}
  340. */
  341. getLocalAudioTrack() {
  342. const localAudio = this.getLocalTracks(MediaType.AUDIO);
  343. return localAudio.length ? localAudio[0] : undefined;
  344. }
  345. /**
  346. * Returns the local tracks of the given media type, or all local tracks if
  347. * no specific type is given.
  348. * @param {MediaType} [mediaType] optional media type filter
  349. * (audio or video).
  350. */
  351. getLocalTracks(mediaType) {
  352. let tracks = this.localTracks.slice();
  353. if (mediaType !== undefined) {
  354. tracks = tracks.filter(
  355. track => track.getType() === mediaType);
  356. }
  357. return tracks;
  358. }
  359. /**
  360. * Obtains all remote tracks currently known to this RTC module instance.
  361. * @param {MediaType} [mediaType] the remote tracks will be filtered
  362. * by their media type if this argument is specified.
  363. * @return {Array<JitsiRemoteTrack>}
  364. */
  365. getRemoteTracks(mediaType) {
  366. let remoteTracks = [];
  367. for (const tpc of this.peerConnections.values()) {
  368. const pcRemoteTracks = tpc.getRemoteTracks(undefined, mediaType);
  369. if (pcRemoteTracks) {
  370. remoteTracks = remoteTracks.concat(pcRemoteTracks);
  371. }
  372. }
  373. return remoteTracks;
  374. }
  375. /**
  376. * Set mute for all local audio streams attached to the conference.
  377. * @param value the mute value
  378. * @returns {Promise}
  379. */
  380. setAudioMute(value) {
  381. const mutePromises = [];
  382. this.getLocalTracks(MediaType.AUDIO).forEach(audioTrack => {
  383. // this is a Promise
  384. mutePromises.push(value ? audioTrack.mute() : audioTrack.unmute());
  385. });
  386. // We return a Promise from all Promises so we can wait for their
  387. // execution.
  388. return Promise.all(mutePromises);
  389. }
  390. /**
  391. *
  392. * @param track
  393. */
  394. removeLocalTrack(track) {
  395. const pos = this.localTracks.indexOf(track);
  396. if (pos === -1) {
  397. return;
  398. }
  399. this.localTracks.splice(pos, 1);
  400. }
  401. /**
  402. * Removes all JitsiRemoteTracks associated with given MUC nickname
  403. * (resource part of the JID). Returns array of removed tracks.
  404. *
  405. * @param {string} owner - The resource part of the MUC JID.
  406. * @returns {JitsiRemoteTrack[]}
  407. */
  408. removeRemoteTracks(owner) {
  409. let removedTracks = [];
  410. for (const tpc of this.peerConnections.values()) {
  411. const pcRemovedTracks = tpc.removeRemoteTracks(owner);
  412. removedTracks = removedTracks.concat(pcRemovedTracks);
  413. }
  414. logger.debug(
  415. `Removed remote tracks for ${owner}`
  416. + ` count: ${removedTracks.length}`);
  417. return removedTracks;
  418. }
  419. /**
  420. *
  421. */
  422. static getPCConstraints() {
  423. return RTCUtils.pcConstraints;
  424. }
  425. /**
  426. *
  427. * @param elSelector
  428. * @param stream
  429. */
  430. static attachMediaStream(elSelector, stream) {
  431. return RTCUtils.attachMediaStream(elSelector, stream);
  432. }
  433. /**
  434. *
  435. * @param stream
  436. */
  437. static getStreamID(stream) {
  438. return RTCUtils.getStreamID(stream);
  439. }
  440. /**
  441. * Returns true if retrieving the the list of input devices is supported
  442. * and false if not.
  443. */
  444. static isDeviceListAvailable() {
  445. return RTCUtils.isDeviceListAvailable();
  446. }
  447. /**
  448. * Returns true if changing the input (camera / microphone) or output
  449. * (audio) device is supported and false if not.
  450. * @params {string} [deviceType] - type of device to change. Default is
  451. * undefined or 'input', 'output' - for audio output device change.
  452. * @returns {boolean} true if available, false otherwise.
  453. */
  454. static isDeviceChangeAvailable(deviceType) {
  455. return RTCUtils.isDeviceChangeAvailable(deviceType);
  456. }
  457. /**
  458. * Returns currently used audio output device id, '' stands for default
  459. * device
  460. * @returns {string}
  461. */
  462. static getAudioOutputDevice() {
  463. return RTCUtils.getAudioOutputDevice();
  464. }
  465. /**
  466. * Returns list of available media devices if its obtained, otherwise an
  467. * empty array is returned/
  468. * @returns {Array} list of available media devices.
  469. */
  470. static getCurrentlyAvailableMediaDevices() {
  471. return RTCUtils.getCurrentlyAvailableMediaDevices();
  472. }
  473. /**
  474. * Returns event data for device to be reported to stats.
  475. * @returns {MediaDeviceInfo} device.
  476. */
  477. static getEventDataForActiveDevice(device) {
  478. return RTCUtils.getEventDataForActiveDevice(device);
  479. }
  480. /**
  481. * Sets current audio output device.
  482. * @param {string} deviceId - id of 'audiooutput' device from
  483. * navigator.mediaDevices.enumerateDevices()
  484. * @returns {Promise} - resolves when audio output is changed, is rejected
  485. * otherwise
  486. */
  487. static setAudioOutputDevice(deviceId) {
  488. return RTCUtils.setAudioOutputDevice(deviceId);
  489. }
  490. /**
  491. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  492. * "user" stream which means that it's not a "receive only" stream nor a
  493. * "mixed" JVB stream.
  494. *
  495. * Clients that implement Unified Plan, such as Firefox use recvonly
  496. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  497. * to Plan B where there are only 3 channels: audio, video and data.
  498. *
  499. * @param {MediaStream} stream the WebRTC MediaStream instance
  500. * @returns {boolean}
  501. */
  502. static isUserStream(stream) {
  503. return RTC.isUserStreamById(RTCUtils.getStreamID(stream));
  504. }
  505. /**
  506. * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
  507. * ID is considered a valid "user" stream which means that it's not a
  508. * "receive only" stream nor a "mixed" JVB stream.
  509. *
  510. * Clients that implement Unified Plan, such as Firefox use recvonly
  511. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  512. * to Plan B where there are only 3 channels: audio, video and data.
  513. *
  514. * @param {string} streamId the id of WebRTC MediaStream
  515. * @returns {boolean}
  516. */
  517. static isUserStreamById(streamId) {
  518. return streamId && streamId !== 'mixedmslabel'
  519. && streamId !== 'default';
  520. }
  521. /**
  522. * Allows to receive list of available cameras/microphones.
  523. * @param {function} callback would receive array of devices as an argument
  524. */
  525. static enumerateDevices(callback) {
  526. RTCUtils.enumerateDevices(callback);
  527. }
  528. /**
  529. * A method to handle stopping of the stream.
  530. * One point to handle the differences in various implementations.
  531. * @param mediaStream MediaStream object to stop.
  532. */
  533. static stopMediaStream(mediaStream) {
  534. RTCUtils.stopMediaStream(mediaStream);
  535. }
  536. /**
  537. * Returns whether the desktop sharing is enabled or not.
  538. * @returns {boolean}
  539. */
  540. static isDesktopSharingEnabled() {
  541. return RTCUtils.isDesktopSharingEnabled();
  542. }
  543. /**
  544. * Closes all currently opened data channels.
  545. */
  546. closeAllDataChannels() {
  547. if (this.dataChannels) {
  548. this.dataChannels.closeAllChannels();
  549. this.dataChannelsOpen = false;
  550. this.removeListener(RTCEvents.LASTN_ENDPOINT_CHANGED,
  551. this._lastNChangeListener);
  552. }
  553. }
  554. /**
  555. *
  556. * @param resource
  557. * @param audioLevel
  558. */
  559. setAudioLevel(ssrc, audioLevel) {
  560. const track = this._getTrackBySSRC(ssrc);
  561. if (!track) {
  562. return;
  563. }
  564. if (!track.isAudioTrack()) {
  565. logger.warn(`Received audio level for non-audio track: ${ssrc}`);
  566. return;
  567. }
  568. track.setAudioLevel(audioLevel);
  569. }
  570. /**
  571. * Searches in localTracks(session stores ssrc for audio and video) and
  572. * remoteTracks for the ssrc and returns the corresponding resource.
  573. * @param ssrc the ssrc to check.
  574. */
  575. getResourceBySSRC(ssrc) {
  576. const track = this._getTrackBySSRC(ssrc);
  577. return track ? track.getParticipantId() : null;
  578. }
  579. /**
  580. * Finds a track (either local or remote) which runs on the given SSRC.
  581. * @param {string|number} ssrc
  582. * @return {JitsiTrack|undefined}
  583. *
  584. * FIXME figure out where SSRC is stored as a string and convert to number
  585. * @private
  586. */
  587. _getTrackBySSRC(ssrc) {
  588. let track
  589. = this.getLocalTracks().find(
  590. localTrack =>
  591. // It is important that SSRC is not compared with ===,
  592. // because the code calling this method is inconsistent
  593. // about string vs number types
  594. Array.from(this.peerConnections.values())
  595. .find(pc => pc.getLocalSSRC(localTrack) == ssrc) // eslint-disable-line eqeqeq, max-len
  596. );
  597. if (!track) {
  598. track = this._getRemoteTrackBySSRC(ssrc);
  599. }
  600. return track;
  601. }
  602. /**
  603. * Searches in remoteTracks for the ssrc and returns the corresponding
  604. * track.
  605. * @param ssrc the ssrc to check.
  606. * @return {JitsiRemoteTrack|undefined} return the first remote track that
  607. * matches given SSRC or <tt>undefined</tt> if no such track was found.
  608. * @private
  609. */
  610. _getRemoteTrackBySSRC(ssrc) {
  611. /* eslint-disable eqeqeq */
  612. // FIXME: Convert the SSRCs in whole project to use the same type.
  613. // Now we are using number and string.
  614. return this.getRemoteTracks().find(
  615. remoteTrack => ssrc == remoteTrack.getSSRC());
  616. /* eslint-enable eqeqeq */
  617. }
  618. /**
  619. * Sends message via the datachannels.
  620. * @param to {string} the id of the endpoint that should receive the
  621. * message. If "" the message will be sent to all participants.
  622. * @param payload {object} the payload of the message.
  623. * @throws NetworkError or InvalidStateError or Error if the operation
  624. * fails or there is no data channel created
  625. */
  626. sendDataChannelMessage(to, payload) {
  627. if (this.dataChannels) {
  628. this.dataChannels.sendDataChannelMessage(to, payload);
  629. } else {
  630. throw new Error('Data channels support is disabled!');
  631. }
  632. }
  633. /**
  634. * Selects a new value for "lastN". The requested amount of videos are going
  635. * to be delivered after the value is in effect. Set to -1 for unlimited or
  636. * all available videos.
  637. * @param value {number} the new value for lastN.
  638. */
  639. setLastN(value) {
  640. if (this.dataChannels && this.dataChannelsOpen) {
  641. this.dataChannels.sendSetLastNMessage(value);
  642. } else {
  643. // No data channel has been initialized or has completed opening
  644. // yet. Remember the specified value and apply it as soon as a data
  645. // channel opens.
  646. this._lastN = value;
  647. }
  648. }
  649. /**
  650. * Indicates if the endpoint id is currently included in the last N.
  651. *
  652. * @param {string} id the endpoint id that we check for last N.
  653. * @returns {boolean} true if the endpoint id is in the last N or if we
  654. * don't have data channel support, otherwise we return false.
  655. */
  656. isInLastN(id) {
  657. return !this._lastNEndpoints // lastNEndpoints not initialised yet
  658. || this._lastNEndpoints.indexOf(id) > -1;
  659. }
  660. }