Du kan inte välja fler än 25 ämnen Ämnen måste starta med en bokstav eller siffra, kan innehålla bindestreck ('-') och vara max 35 tecken långa.

RTC.js 27KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882
  1. /* global __filename */
  2. import DataChannels from './DataChannels';
  3. import { getLogger } from 'jitsi-meet-logger';
  4. import GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
  5. import JitsiLocalTrack from './JitsiLocalTrack.js';
  6. import JitsiRemoteTrack from './JitsiRemoteTrack.js';
  7. import JitsiTrackError from '../../JitsiTrackError';
  8. import * as JitsiTrackErrors from '../../JitsiTrackErrors';
  9. import Listenable from '../util/Listenable';
  10. import * as MediaType from '../../service/RTC/MediaType';
  11. import RTCEvents from '../../service/RTC/RTCEvents.js';
  12. import RTCUtils from './RTCUtils.js';
  13. import TraceablePeerConnection from './TraceablePeerConnection';
  14. import VideoType from '../../service/RTC/VideoType';
  15. const logger = getLogger(__filename);
  16. /**
  17. *
  18. * @param tracksInfo
  19. * @param options
  20. */
  21. function createLocalTracks(tracksInfo, options) {
  22. const newTracks = [];
  23. let deviceId = null;
  24. tracksInfo.forEach(trackInfo => {
  25. if (trackInfo.mediaType === MediaType.AUDIO) {
  26. deviceId = options.micDeviceId;
  27. } else if (trackInfo.videoType === VideoType.CAMERA) {
  28. deviceId = options.cameraDeviceId;
  29. }
  30. const localTrack
  31. = new JitsiLocalTrack(
  32. trackInfo.stream,
  33. trackInfo.track,
  34. trackInfo.mediaType,
  35. trackInfo.videoType,
  36. trackInfo.resolution,
  37. deviceId,
  38. options.facingMode);
  39. newTracks.push(localTrack);
  40. });
  41. return newTracks;
  42. }
  43. /**
  44. *
  45. */
  46. export default class RTC extends Listenable {
  47. /**
  48. *
  49. * @param conference
  50. * @param options
  51. */
  52. constructor(conference, options = {}) {
  53. super();
  54. this.conference = conference;
  55. /**
  56. * A map of active <tt>TraceablePeerConnection</tt>.
  57. * @type {Map.<number, TraceablePeerConnection>}
  58. */
  59. this.peerConnections = new Map();
  60. /**
  61. * The counter used to generated id numbers assigned to peer connections
  62. * @type {number}
  63. */
  64. this.peerConnectionIdCounter = 1;
  65. this.localTracks = [];
  66. // FIXME: We should support multiple streams per jid.
  67. this.remoteTracks = {};
  68. this.options = options;
  69. // A flag whether we had received that the data channel had opened
  70. // we can get this flag out of sync if for some reason data channel got
  71. // closed from server, a desired behaviour so we can see errors when
  72. // this happen
  73. this.dataChannelsOpen = false;
  74. // Defines the last N endpoints list. It can be null or an array once
  75. // initialised with a datachannel last N event.
  76. // @type {Array<string>|null}
  77. this._lastNEndpoints = null;
  78. // The last N change listener.
  79. this._lastNChangeListener = null;
  80. // Switch audio output device on all remote audio tracks. Local audio
  81. // tracks handle this event by themselves.
  82. if (RTCUtils.isDeviceChangeAvailable('output')) {
  83. RTCUtils.addListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  84. deviceId => {
  85. const remoteAudioTracks
  86. = this.getRemoteTracks(MediaType.AUDIO);
  87. for (const track of remoteAudioTracks) {
  88. track.setAudioOutput(deviceId);
  89. }
  90. });
  91. }
  92. }
  93. /**
  94. * Creates the local MediaStreams.
  95. * @param {Object} [options] optional parameters
  96. * @param {Array} options.devices the devices that will be requested
  97. * @param {string} options.resolution resolution constraints
  98. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with
  99. * the following structure {stream: the Media Stream, type: "audio" or
  100. * "video", videoType: "camera" or "desktop"} will be returned trough the
  101. * Promise, otherwise JitsiTrack objects will be returned.
  102. * @param {string} options.cameraDeviceId
  103. * @param {string} options.micDeviceId
  104. * @returns {*} Promise object that will receive the new JitsiTracks
  105. */
  106. static obtainAudioAndVideoPermissions(options) {
  107. return RTCUtils.obtainAudioAndVideoPermissions(options).then(
  108. tracksInfo => {
  109. const tracks = createLocalTracks(tracksInfo, options);
  110. return tracks.some(track => !track._isReceivingData())
  111. ? Promise.reject(
  112. new JitsiTrackError(
  113. JitsiTrackErrors.NO_DATA_FROM_SOURCE))
  114. : tracks;
  115. });
  116. }
  117. /**
  118. * Initializes the data channels of this instance.
  119. * @param peerconnection the associated PeerConnection.
  120. */
  121. initializeDataChannels(peerconnection) {
  122. if (this.options.config.openSctp) {
  123. this.dataChannels = new DataChannels(peerconnection,
  124. this.eventEmitter);
  125. this._dataChannelOpenListener = () => {
  126. // mark that dataChannel is opened
  127. this.dataChannelsOpen = true;
  128. // when the data channel becomes available, tell the bridge
  129. // about video selections so that it can do adaptive simulcast,
  130. // we want the notification to trigger even if userJid
  131. // is undefined, or null.
  132. // XXX why do we not do the same for pinned endpoints?
  133. try {
  134. this.dataChannels.sendSelectedEndpointMessage(
  135. this.selectedEndpoint);
  136. } catch (error) {
  137. GlobalOnErrorHandler.callErrorHandler(error);
  138. logger.error('Cannot sendSelectedEndpointMessage ',
  139. this.selectedEndpoint, '. Error: ', error);
  140. }
  141. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN,
  142. this._dataChannelOpenListener);
  143. this._dataChannelOpenListener = null;
  144. };
  145. this.addListener(RTCEvents.DATA_CHANNEL_OPEN,
  146. this._dataChannelOpenListener);
  147. // Add Last N change listener.
  148. this._lastNChangeListener = lastNEndpoints => {
  149. this._lastNEndpoints = lastNEndpoints;
  150. };
  151. this.addListener(RTCEvents.LASTN_ENDPOINT_CHANGED,
  152. this._lastNChangeListener);
  153. }
  154. }
  155. /**
  156. * Should be called when current media session ends and after the
  157. * PeerConnection has been closed using PeerConnection.close() method.
  158. */
  159. onCallEnded() {
  160. if (this.dataChannels) {
  161. // DataChannels are not explicitly closed as the PeerConnection
  162. // is closed on call ended which triggers data channel onclose
  163. // events. The reference is cleared to disable any logic related
  164. // to the data channels.
  165. this.dataChannels = null;
  166. this.dataChannelsOpen = false;
  167. }
  168. }
  169. /**
  170. * Elects the participant with the given id to be the selected participant
  171. * in order to always receive video for this participant (even when last n
  172. * is enabled).
  173. * If there is no data channel we store it and send it through the channel
  174. * once it is created.
  175. * @param id {string} the user id.
  176. * @throws NetworkError or InvalidStateError or Error if the operation
  177. * fails.
  178. */
  179. selectEndpoint(id) {
  180. // cache the value if channel is missing, till we open it
  181. this.selectedEndpoint = id;
  182. if (this.dataChannels && this.dataChannelsOpen) {
  183. this.dataChannels.sendSelectedEndpointMessage(id);
  184. }
  185. }
  186. /**
  187. * Elects the participant with the given id to be the pinned participant in
  188. * order to always receive video for this participant (even when last n is
  189. * enabled).
  190. * @param id {string} the user id
  191. * @throws NetworkError or InvalidStateError or Error if the operation
  192. * fails.
  193. */
  194. pinEndpoint(id) {
  195. if (this.dataChannels) {
  196. this.dataChannels.sendPinnedEndpointMessage(id);
  197. } else {
  198. // FIXME: cache value while there is no data channel created
  199. // and send the cached state once channel is created
  200. throw new Error('Data channels support is disabled!');
  201. }
  202. }
  203. /**
  204. *
  205. * @param eventType
  206. * @param listener
  207. */
  208. static addListener(eventType, listener) {
  209. RTCUtils.addListener(eventType, listener);
  210. }
  211. /**
  212. *
  213. * @param eventType
  214. * @param listener
  215. */
  216. static removeListener(eventType, listener) {
  217. RTCUtils.removeListener(eventType, listener);
  218. }
  219. /**
  220. *
  221. */
  222. static isRTCReady() {
  223. return RTCUtils.isRTCReady();
  224. }
  225. /**
  226. *
  227. * @param options
  228. */
  229. static init(options = {}) {
  230. this.options = options;
  231. return RTCUtils.init(this.options);
  232. }
  233. /**
  234. *
  235. */
  236. static getDeviceAvailability() {
  237. return RTCUtils.getDeviceAvailability();
  238. }
  239. /**
  240. * Creates new <tt>TraceablePeerConnection</tt>
  241. * @param {SignalingLayer} signaling the signaling layer that will
  242. * provide information about the media or participants which is not carried
  243. * over SDP.
  244. * @param {Object} iceConfig an object describing the ICE config like
  245. * defined in the WebRTC specification.
  246. * @param {Object} options the config options
  247. * @param {boolean} options.disableSimulcast if set to 'true' will disable
  248. * the simulcast
  249. * @param {boolean} options.disableRtx if set to 'true' will disable the RTX
  250. * @param {boolean} options.preferH264 if set to 'true' H264 will be
  251. * preferred over other video codecs.
  252. * @return {TraceablePeerConnection}
  253. */
  254. createPeerConnection(signaling, iceConfig, options) {
  255. const newConnection
  256. = new TraceablePeerConnection(
  257. this,
  258. this.peerConnectionIdCounter,
  259. signaling, iceConfig, RTC.getPCConstraints(), options);
  260. this.peerConnections.set(newConnection.id, newConnection);
  261. this.peerConnectionIdCounter += 1;
  262. return newConnection;
  263. }
  264. /**
  265. * Removed given peer connection from this RTC module instance.
  266. * @param {TraceablePeerConnection} traceablePeerConnection
  267. * @return {boolean} <tt>true</tt> if the given peer connection was removed
  268. * successfully or <tt>false</tt> if there was no peer connection mapped in
  269. * this RTC instance.
  270. */
  271. _removePeerConnection(traceablePeerConnection) {
  272. const id = traceablePeerConnection.id;
  273. if (this.peerConnections.has(id)) {
  274. // NOTE Remote tracks are not removed here.
  275. this.peerConnections.delete(id);
  276. return true;
  277. }
  278. return false;
  279. }
  280. /**
  281. *
  282. * @param track
  283. */
  284. addLocalTrack(track) {
  285. if (!track) {
  286. throw new Error('track must not be null nor undefined');
  287. }
  288. this.localTracks.push(track);
  289. track.conference = this.conference;
  290. }
  291. /**
  292. * Get local video track.
  293. * @returns {JitsiLocalTrack|undefined}
  294. */
  295. getLocalVideoTrack() {
  296. const localVideo = this.getLocalTracks(MediaType.VIDEO);
  297. return localVideo.length ? localVideo[0] : undefined;
  298. }
  299. /**
  300. * Get local audio track.
  301. * @returns {JitsiLocalTrack|undefined}
  302. */
  303. getLocalAudioTrack() {
  304. const localAudio = this.getLocalTracks(MediaType.AUDIO);
  305. return localAudio.length ? localAudio[0] : undefined;
  306. }
  307. /**
  308. * Returns the local tracks of the given media type, or all local tracks if
  309. * no specific type is given.
  310. * @param {MediaType} [mediaType] optional media type filter
  311. * (audio or video).
  312. */
  313. getLocalTracks(mediaType) {
  314. let tracks = this.localTracks.slice();
  315. if (mediaType !== undefined) {
  316. tracks = tracks.filter(
  317. track => track.getType() === mediaType);
  318. }
  319. return tracks;
  320. }
  321. /**
  322. * Obtains all remote tracks currently known to this RTC module instance.
  323. * @param {MediaType} [mediaType] the remote tracks will be filtered
  324. * by their media type if this argument is specified.
  325. * @return {Array<JitsiRemoteTrack>}
  326. */
  327. getRemoteTracks(mediaType) {
  328. const remoteTracks = [];
  329. const remoteEndpoints = Object.keys(this.remoteTracks);
  330. for (const endpoint of remoteEndpoints) {
  331. const endpointMediaTypes = Object.keys(this.remoteTracks[endpoint]);
  332. for (const trackMediaType of endpointMediaTypes) {
  333. // per media type filtering
  334. if (!mediaType || mediaType === trackMediaType) {
  335. const mediaTrack
  336. = this.remoteTracks[endpoint][trackMediaType];
  337. if (mediaTrack) {
  338. remoteTracks.push(mediaTrack);
  339. }
  340. }
  341. }
  342. }
  343. return remoteTracks;
  344. }
  345. /**
  346. * Gets JitsiRemoteTrack for the passed MediaType associated with given MUC
  347. * nickname (resource part of the JID).
  348. * @param type audio or video.
  349. * @param resource the resource part of the MUC JID
  350. * @returns {JitsiRemoteTrack|null}
  351. */
  352. getRemoteTrackByType(type, resource) {
  353. if (this.remoteTracks[resource]) {
  354. return this.remoteTracks[resource][type];
  355. }
  356. return null;
  357. }
  358. /**
  359. * Gets JitsiRemoteTrack for AUDIO MediaType associated with given MUC
  360. * nickname (resource part of the JID).
  361. * @param resource the resource part of the MUC JID
  362. * @returns {JitsiRemoteTrack|null}
  363. */
  364. getRemoteAudioTrack(resource) {
  365. return this.getRemoteTrackByType(MediaType.AUDIO, resource);
  366. }
  367. /**
  368. * Gets JitsiRemoteTrack for VIDEO MediaType associated with given MUC
  369. * nickname (resource part of the JID).
  370. * @param resource the resource part of the MUC JID
  371. * @returns {JitsiRemoteTrack|null}
  372. */
  373. getRemoteVideoTrack(resource) {
  374. return this.getRemoteTrackByType(MediaType.VIDEO, resource);
  375. }
  376. /**
  377. * Set mute for all local audio streams attached to the conference.
  378. * @param value the mute value
  379. * @returns {Promise}
  380. */
  381. setAudioMute(value) {
  382. const mutePromises = [];
  383. this.getLocalTracks(MediaType.AUDIO).forEach(audioTrack => {
  384. // this is a Promise
  385. mutePromises.push(value ? audioTrack.mute() : audioTrack.unmute());
  386. });
  387. // We return a Promise from all Promises so we can wait for their
  388. // execution.
  389. return Promise.all(mutePromises);
  390. }
  391. /**
  392. *
  393. * @param track
  394. */
  395. removeLocalTrack(track) {
  396. const pos = this.localTracks.indexOf(track);
  397. if (pos === -1) {
  398. return;
  399. }
  400. this.localTracks.splice(pos, 1);
  401. }
  402. /* eslint-disable max-params */
  403. /**
  404. * Initializes a new JitsiRemoteTrack instance with the data provided by
  405. * the signaling layer and SDP.
  406. *
  407. * @param {string} ownerEndpointId
  408. * @param {MediaStream} stream
  409. * @param {MediaStreamTrack} track
  410. * @param {MediaType} mediaType
  411. * @param {VideoType|undefined} videoType
  412. * @param {string} ssrc
  413. * @param {boolean} muted
  414. */
  415. _createRemoteTrack(
  416. ownerEndpointId,
  417. stream,
  418. track,
  419. mediaType,
  420. videoType,
  421. ssrc,
  422. muted) {
  423. const remoteTrack
  424. = new JitsiRemoteTrack(
  425. this,
  426. this.conference,
  427. ownerEndpointId,
  428. stream,
  429. track,
  430. mediaType,
  431. videoType,
  432. ssrc,
  433. muted);
  434. const remoteTracks
  435. = this.remoteTracks[ownerEndpointId]
  436. || (this.remoteTracks[ownerEndpointId] = {});
  437. if (remoteTracks[mediaType]) {
  438. logger.error(
  439. 'Overwriting remote track!',
  440. ownerEndpointId,
  441. mediaType);
  442. }
  443. remoteTracks[mediaType] = remoteTrack;
  444. this.eventEmitter.emit(RTCEvents.REMOTE_TRACK_ADDED, remoteTrack);
  445. }
  446. /* eslint-enable max-params */
  447. /**
  448. * Removes all JitsiRemoteTracks associated with given MUC nickname
  449. * (resource part of the JID). Returns array of removed tracks.
  450. *
  451. * @param {string} owner - The resource part of the MUC JID.
  452. * @returns {JitsiRemoteTrack[]}
  453. */
  454. removeRemoteTracks(owner) {
  455. const removedTracks = [];
  456. if (this.remoteTracks[owner]) {
  457. const removedAudioTrack
  458. = this.remoteTracks[owner][MediaType.AUDIO];
  459. const removedVideoTrack
  460. = this.remoteTracks[owner][MediaType.VIDEO];
  461. removedAudioTrack && removedTracks.push(removedAudioTrack);
  462. removedVideoTrack && removedTracks.push(removedVideoTrack);
  463. delete this.remoteTracks[owner];
  464. }
  465. return removedTracks;
  466. }
  467. /**
  468. * Finds remote track by it's stream and track ids.
  469. * @param {string} streamId the media stream id as defined by the WebRTC
  470. * @param {string} trackId the media track id as defined by the WebRTC
  471. * @return {JitsiRemoteTrack|undefined}
  472. * @private
  473. */
  474. _getRemoteTrackById(streamId, trackId) {
  475. let result;
  476. // .find will break the loop once the first match is found
  477. Object.keys(this.remoteTracks).find(endpoint => {
  478. const endpointTracks = this.remoteTracks[endpoint];
  479. return endpointTracks && Object.keys(endpointTracks).find(
  480. mediaType => {
  481. const mediaTrack = endpointTracks[mediaType];
  482. if (mediaTrack
  483. && mediaTrack.getStreamId() === streamId
  484. && mediaTrack.getTrackId() === trackId) {
  485. result = mediaTrack;
  486. return true;
  487. }
  488. return false;
  489. });
  490. });
  491. return result;
  492. }
  493. /**
  494. * Removes <tt>JitsiRemoteTrack</tt> identified by given stream and track
  495. * ids.
  496. *
  497. * @param {string} streamId media stream id as defined by the WebRTC
  498. * @param {string} trackId media track id as defined by the WebRTC
  499. * @returns {JitsiRemoteTrack|undefined} the track which has been removed or
  500. * <tt>undefined</tt> if no track matching given stream and track ids was
  501. * found.
  502. */
  503. _removeRemoteTrack(streamId, trackId) {
  504. const toBeRemoved = this._getRemoteTrackById(streamId, trackId);
  505. if (toBeRemoved) {
  506. toBeRemoved.dispose();
  507. delete this.remoteTracks[
  508. toBeRemoved.getParticipantId()][toBeRemoved.getType()];
  509. this.eventEmitter.emit(
  510. RTCEvents.REMOTE_TRACK_REMOVED, toBeRemoved);
  511. }
  512. return toBeRemoved;
  513. }
  514. /**
  515. *
  516. */
  517. static getPCConstraints() {
  518. return RTCUtils.pcConstraints;
  519. }
  520. /**
  521. *
  522. * @param elSelector
  523. * @param stream
  524. */
  525. static attachMediaStream(elSelector, stream) {
  526. return RTCUtils.attachMediaStream(elSelector, stream);
  527. }
  528. /**
  529. *
  530. * @param stream
  531. */
  532. static getStreamID(stream) {
  533. return RTCUtils.getStreamID(stream);
  534. }
  535. /**
  536. * Returns true if retrieving the the list of input devices is supported
  537. * and false if not.
  538. */
  539. static isDeviceListAvailable() {
  540. return RTCUtils.isDeviceListAvailable();
  541. }
  542. /**
  543. * Returns true if changing the input (camera / microphone) or output
  544. * (audio) device is supported and false if not.
  545. * @params {string} [deviceType] - type of device to change. Default is
  546. * undefined or 'input', 'output' - for audio output device change.
  547. * @returns {boolean} true if available, false otherwise.
  548. */
  549. static isDeviceChangeAvailable(deviceType) {
  550. return RTCUtils.isDeviceChangeAvailable(deviceType);
  551. }
  552. /**
  553. * Returns currently used audio output device id, '' stands for default
  554. * device
  555. * @returns {string}
  556. */
  557. static getAudioOutputDevice() {
  558. return RTCUtils.getAudioOutputDevice();
  559. }
  560. /**
  561. * Returns list of available media devices if its obtained, otherwise an
  562. * empty array is returned/
  563. * @returns {Array} list of available media devices.
  564. */
  565. static getCurrentlyAvailableMediaDevices() {
  566. return RTCUtils.getCurrentlyAvailableMediaDevices();
  567. }
  568. /**
  569. * Returns event data for device to be reported to stats.
  570. * @returns {MediaDeviceInfo} device.
  571. */
  572. static getEventDataForActiveDevice(device) {
  573. return RTCUtils.getEventDataForActiveDevice(device);
  574. }
  575. /**
  576. * Sets current audio output device.
  577. * @param {string} deviceId - id of 'audiooutput' device from
  578. * navigator.mediaDevices.enumerateDevices()
  579. * @returns {Promise} - resolves when audio output is changed, is rejected
  580. * otherwise
  581. */
  582. static setAudioOutputDevice(deviceId) {
  583. return RTCUtils.setAudioOutputDevice(deviceId);
  584. }
  585. /**
  586. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  587. * "user" stream which means that it's not a "receive only" stream nor a
  588. * "mixed" JVB stream.
  589. *
  590. * Clients that implement Unified Plan, such as Firefox use recvonly
  591. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  592. * to Plan B where there are only 3 channels: audio, video and data.
  593. *
  594. * @param {MediaStream} stream the WebRTC MediaStream instance
  595. * @returns {boolean}
  596. */
  597. static isUserStream(stream) {
  598. return RTC.isUserStreamById(RTCUtils.getStreamID(stream));
  599. }
  600. /**
  601. * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
  602. * ID is considered a valid "user" stream which means that it's not a
  603. * "receive only" stream nor a "mixed" JVB stream.
  604. *
  605. * Clients that implement Unified Plan, such as Firefox use recvonly
  606. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  607. * to Plan B where there are only 3 channels: audio, video and data.
  608. *
  609. * @param {string} streamId the id of WebRTC MediaStream
  610. * @returns {boolean}
  611. */
  612. static isUserStreamById(streamId) {
  613. return streamId && streamId !== 'mixedmslabel'
  614. && streamId !== 'default';
  615. }
  616. /**
  617. * Allows to receive list of available cameras/microphones.
  618. * @param {function} callback would receive array of devices as an argument
  619. */
  620. static enumerateDevices(callback) {
  621. RTCUtils.enumerateDevices(callback);
  622. }
  623. /**
  624. * A method to handle stopping of the stream.
  625. * One point to handle the differences in various implementations.
  626. * @param mediaStream MediaStream object to stop.
  627. */
  628. static stopMediaStream(mediaStream) {
  629. RTCUtils.stopMediaStream(mediaStream);
  630. }
  631. /**
  632. * Returns whether the desktop sharing is enabled or not.
  633. * @returns {boolean}
  634. */
  635. static isDesktopSharingEnabled() {
  636. return RTCUtils.isDesktopSharingEnabled();
  637. }
  638. /**
  639. * Closes all currently opened data channels.
  640. */
  641. closeAllDataChannels() {
  642. if (this.dataChannels) {
  643. this.dataChannels.closeAllChannels();
  644. this.dataChannelsOpen = false;
  645. if (this._lastNChangeListener) {
  646. this.removeListener(RTCEvents.LASTN_ENDPOINT_CHANGED,
  647. this._lastNChangeListener);
  648. }
  649. }
  650. }
  651. /**
  652. *
  653. * @param resource
  654. * @param audioLevel
  655. */
  656. setAudioLevel(resource, audioLevel) {
  657. if (!resource) {
  658. return;
  659. }
  660. const audioTrack = this.getRemoteAudioTrack(resource);
  661. if (audioTrack) {
  662. audioTrack.setAudioLevel(audioLevel);
  663. }
  664. }
  665. /**
  666. * Searches in localTracks(session stores ssrc for audio and video) and
  667. * remoteTracks for the ssrc and returns the corresponding resource.
  668. * @param ssrc the ssrc to check.
  669. */
  670. getResourceBySSRC(ssrc) {
  671. // FIXME: Convert the SSRCs in whole project to use the same type.
  672. // Now we are using number and string.
  673. if (this.getLocalTracks().find(
  674. // eslint-disable-next-line eqeqeq
  675. localTrack => localTrack.getSSRC() == ssrc)) {
  676. return this.conference.myUserId();
  677. }
  678. const track = this.getRemoteTrackBySSRC(ssrc);
  679. return track ? track.getParticipantId() : null;
  680. }
  681. /**
  682. * Searches in remoteTracks for the ssrc and returns the corresponding
  683. * track.
  684. * @param ssrc the ssrc to check.
  685. * @return {JitsiRemoteTrack|undefined} return the first remote track that
  686. * matches given SSRC or <tt>undefined</tt> if no such track was found.
  687. */
  688. getRemoteTrackBySSRC(ssrc) {
  689. // FIXME: Convert the SSRCs in whole project to use the same type.
  690. // Now we are using number and string.
  691. // eslint-disable-next-line eqeqeq
  692. return this.getRemoteTracks().find(t => ssrc == t.getSSRC());
  693. }
  694. /**
  695. * Handles remote track mute / unmute events.
  696. * @param type {string} "audio" or "video"
  697. * @param isMuted {boolean} the new mute state
  698. * @param from {string} user id
  699. */
  700. handleRemoteTrackMute(type, isMuted, from) {
  701. const track = this.getRemoteTrackByType(type, from);
  702. if (track) {
  703. track.setMute(isMuted);
  704. }
  705. }
  706. /**
  707. * Handles remote track video type events
  708. * @param value {string} the new video type
  709. * @param from {string} user id
  710. */
  711. handleRemoteTrackVideoTypeChanged(value, from) {
  712. const videoTrack = this.getRemoteVideoTrack(from);
  713. if (videoTrack) {
  714. videoTrack._setVideoType(value);
  715. }
  716. }
  717. /**
  718. * Sends message via the datachannels.
  719. * @param to {string} the id of the endpoint that should receive the
  720. * message. If "" the message will be sent to all participants.
  721. * @param payload {object} the payload of the message.
  722. * @throws NetworkError or InvalidStateError or Error if the operation
  723. * fails or there is no data channel created
  724. */
  725. sendDataChannelMessage(to, payload) {
  726. if (this.dataChannels) {
  727. this.dataChannels.sendDataChannelMessage(to, payload);
  728. } else {
  729. throw new Error('Data channels support is disabled!');
  730. }
  731. }
  732. /**
  733. * Selects a new value for "lastN". The requested amount of videos are going
  734. * to be delivered after the value is in effect. Set to -1 for unlimited or
  735. * all available videos.
  736. * @param value {int} the new value for lastN.
  737. * @trows Error if there is no data channel created.
  738. */
  739. setLastN(value) {
  740. if (this.dataChannels) {
  741. this.dataChannels.sendSetLastNMessage(value);
  742. } else {
  743. throw new Error('Data channels support is disabled!');
  744. }
  745. }
  746. /**
  747. * Indicates if the endpoint id is currently included in the last N.
  748. *
  749. * @param {string} id the endpoint id that we check for last N.
  750. * @returns {boolean} true if the endpoint id is in the last N or if we
  751. * don't have data channel support, otherwise we return false.
  752. */
  753. isInLastN(id) {
  754. return !this._lastNEndpoints // lastNEndpoints not initialised yet
  755. || this._lastNEndpoints.indexOf(id) > -1;
  756. }
  757. }