You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTC.js 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574
  1. /* global Strophe */
  2. var logger = require("jitsi-meet-logger").getLogger(__filename);
  3. var RTCEvents = require("../../service/RTC/RTCEvents.js");
  4. import RTCUtils from "./RTCUtils.js";
  5. var JitsiLocalTrack = require("./JitsiLocalTrack.js");
  6. import JitsiTrackError from "../../JitsiTrackError";
  7. import * as JitsiTrackErrors from "../../JitsiTrackErrors";
  8. var DataChannels = require("./DataChannels");
  9. var JitsiRemoteTrack = require("./JitsiRemoteTrack.js");
  10. var MediaType = require("../../service/RTC/MediaType");
  11. var VideoType = require("../../service/RTC/VideoType");
  12. var GlobalOnErrorHandler = require("../util/GlobalOnErrorHandler");
  13. import Listenable from "../util/Listenable";
  14. function createLocalTracks(tracksInfo, options) {
  15. var newTracks = [];
  16. var deviceId = null;
  17. tracksInfo.forEach(function(trackInfo){
  18. if (trackInfo.mediaType === MediaType.AUDIO) {
  19. deviceId = options.micDeviceId;
  20. } else if (trackInfo.videoType === VideoType.CAMERA){
  21. deviceId = options.cameraDeviceId;
  22. }
  23. var localTrack
  24. = new JitsiLocalTrack(
  25. trackInfo.stream,
  26. trackInfo.track,
  27. trackInfo.mediaType,
  28. trackInfo.videoType,
  29. trackInfo.resolution,
  30. deviceId,
  31. options.facingMode);
  32. newTracks.push(localTrack);
  33. });
  34. return newTracks;
  35. }
  36. export default class RTC extends Listenable {
  37. constructor(conference, options = {}) {
  38. super();
  39. this.conference = conference;
  40. this.localTracks = [];
  41. //FIXME: We should support multiple streams per jid.
  42. this.remoteTracks = {};
  43. this.localAudio = null;
  44. this.localVideo = null;
  45. this.options = options;
  46. // A flag whether we had received that the data channel had opened
  47. // we can get this flag out of sync if for some reason data channel got
  48. // closed from server, a desired behaviour so we can see errors when this
  49. // happen
  50. this.dataChannelsOpen = false;
  51. // Switch audio output device on all remote audio tracks. Local audio tracks
  52. // handle this event by themselves.
  53. if (RTCUtils.isDeviceChangeAvailable('output')) {
  54. RTCUtils.addListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  55. (deviceId) => {
  56. for (var key in this.remoteTracks) {
  57. if (this.remoteTracks.hasOwnProperty(key)
  58. && this.remoteTracks[key].audio) {
  59. this.remoteTracks[key].audio
  60. .setAudioOutput(deviceId);
  61. }
  62. }
  63. });
  64. }
  65. }
  66. /**
  67. * Creates the local MediaStreams.
  68. * @param {Object} [options] optional parameters
  69. * @param {Array} options.devices the devices that will be requested
  70. * @param {string} options.resolution resolution constraints
  71. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with the
  72. * following structure {stream: the Media Stream,
  73. * type: "audio" or "video", videoType: "camera" or "desktop"}
  74. * will be returned trough the Promise, otherwise JitsiTrack objects will be
  75. * returned.
  76. * @param {string} options.cameraDeviceId
  77. * @param {string} options.micDeviceId
  78. * @returns {*} Promise object that will receive the new JitsiTracks
  79. */
  80. static obtainAudioAndVideoPermissions (options) {
  81. return RTCUtils.obtainAudioAndVideoPermissions(options).then(
  82. function (tracksInfo) {
  83. var tracks = createLocalTracks(tracksInfo, options);
  84. return !tracks.some(track =>
  85. !track._isReceivingData())? tracks
  86. : Promise.reject(new JitsiTrackError(
  87. JitsiTrackErrors.NO_DATA_FROM_SOURCE));
  88. });
  89. }
  90. /**
  91. * Initializes the data channels of this instance.
  92. * @param peerconnection the associated PeerConnection.
  93. */
  94. initializeDataChannels (peerconnection) {
  95. if(this.options.config.openSctp) {
  96. this.dataChannels = new DataChannels(peerconnection,
  97. this.eventEmitter);
  98. this._dataChannelOpenListener = () => {
  99. // mark that dataChannel is opened
  100. this.dataChannelsOpen = true;
  101. // when the data channel becomes available, tell the bridge
  102. // about video selections so that it can do adaptive simulcast,
  103. // we want the notification to trigger even if userJid
  104. // is undefined, or null.
  105. // XXX why do we not do the same for pinned endpoints?
  106. try {
  107. this.dataChannels.sendSelectedEndpointMessage(
  108. this.selectedEndpoint);
  109. } catch (error) {
  110. GlobalOnErrorHandler.callErrorHandler(error);
  111. logger.error("Cannot sendSelectedEndpointMessage ",
  112. this.selectedEndpoint, ". Error: ", error);
  113. }
  114. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN,
  115. this._dataChannelOpenListener);
  116. this._dataChannelOpenListener = null;
  117. };
  118. this.addListener(RTCEvents.DATA_CHANNEL_OPEN,
  119. this._dataChannelOpenListener);
  120. }
  121. }
  122. /**
  123. * Should be called when current media session ends and after the
  124. * PeerConnection has been closed using PeerConnection.close() method.
  125. */
  126. onCallEnded () {
  127. if (this.dataChannels) {
  128. // DataChannels are not explicitly closed as the PeerConnection
  129. // is closed on call ended which triggers data channel onclose
  130. // events. The reference is cleared to disable any logic related
  131. // to the data channels.
  132. this.dataChannels = null;
  133. this.dataChannelsOpen = false;
  134. }
  135. }
  136. /**
  137. * Elects the participant with the given id to be the selected participant
  138. * in order to always receive video for this participant (even when last n
  139. * is enabled).
  140. * If there is no data channel we store it and send it through the channel
  141. * once it is created.
  142. * @param id {string} the user id.
  143. * @throws NetworkError or InvalidStateError or Error if the operation
  144. * fails.
  145. */
  146. selectEndpoint (id) {
  147. // cache the value if channel is missing, till we open it
  148. this.selectedEndpoint = id;
  149. if(this.dataChannels && this.dataChannelsOpen)
  150. this.dataChannels.sendSelectedEndpointMessage(id);
  151. }
  152. /**
  153. * Elects the participant with the given id to be the pinned participant in
  154. * order to always receive video for this participant (even when last n is
  155. * enabled).
  156. * @param id {string} the user id
  157. * @throws NetworkError or InvalidStateError or Error if the operation fails.
  158. */
  159. pinEndpoint (id) {
  160. if(this.dataChannels) {
  161. this.dataChannels.sendPinnedEndpointMessage(id);
  162. } else {
  163. // FIXME: cache value while there is no data channel created
  164. // and send the cached state once channel is created
  165. throw new Error("Data channels support is disabled!");
  166. }
  167. }
  168. static addListener (eventType, listener) {
  169. RTCUtils.addListener(eventType, listener);
  170. }
  171. static removeListener (eventType, listener) {
  172. RTCUtils.removeListener(eventType, listener);
  173. }
  174. static isRTCReady () {
  175. return RTCUtils.isRTCReady();
  176. }
  177. static init (options = {}) {
  178. this.options = options;
  179. return RTCUtils.init(this.options);
  180. }
  181. static getDeviceAvailability () {
  182. return RTCUtils.getDeviceAvailability();
  183. }
  184. addLocalTrack (track) {
  185. if (!track)
  186. throw new Error('track must not be null nor undefined');
  187. this.localTracks.push(track);
  188. track.conference = this.conference;
  189. if (track.isAudioTrack()) {
  190. this.localAudio = track;
  191. } else {
  192. this.localVideo = track;
  193. }
  194. }
  195. /**
  196. * Get local video track.
  197. * @returns {JitsiLocalTrack}
  198. */
  199. getLocalVideoTrack () {
  200. return this.localVideo;
  201. }
  202. /**
  203. * Gets JitsiRemoteTrack for the passed MediaType associated with given MUC
  204. * nickname (resource part of the JID).
  205. * @param type audio or video.
  206. * @param resource the resource part of the MUC JID
  207. * @returns {JitsiRemoteTrack|null}
  208. */
  209. getRemoteTrackByType (type, resource) {
  210. if (this.remoteTracks[resource])
  211. return this.remoteTracks[resource][type];
  212. else
  213. return null;
  214. }
  215. /**
  216. * Gets JitsiRemoteTrack for AUDIO MediaType associated with given MUC nickname
  217. * (resource part of the JID).
  218. * @param resource the resource part of the MUC JID
  219. * @returns {JitsiRemoteTrack|null}
  220. */
  221. getRemoteAudioTrack (resource) {
  222. return this.getRemoteTrackByType(MediaType.AUDIO, resource);
  223. }
  224. /**
  225. * Gets JitsiRemoteTrack for VIDEO MediaType associated with given MUC nickname
  226. * (resource part of the JID).
  227. * @param resource the resource part of the MUC JID
  228. * @returns {JitsiRemoteTrack|null}
  229. */
  230. getRemoteVideoTrack (resource) {
  231. return this.getRemoteTrackByType(MediaType.VIDEO, resource);
  232. }
  233. /**
  234. * Set mute for all local audio streams attached to the conference.
  235. * @param value the mute value
  236. * @returns {Promise}
  237. */
  238. setAudioMute (value) {
  239. var mutePromises = [];
  240. for(var i = 0; i < this.localTracks.length; i++) {
  241. var track = this.localTracks[i];
  242. if(track.getType() !== MediaType.AUDIO) {
  243. continue;
  244. }
  245. // this is a Promise
  246. mutePromises.push(value ? track.mute() : track.unmute());
  247. }
  248. // we return a Promise from all Promises so we can wait for their execution
  249. return Promise.all(mutePromises);
  250. }
  251. removeLocalTrack (track) {
  252. var pos = this.localTracks.indexOf(track);
  253. if (pos === -1) {
  254. return;
  255. }
  256. this.localTracks.splice(pos, 1);
  257. if (track.isAudioTrack()) {
  258. this.localAudio = null;
  259. } else {
  260. this.localVideo = null;
  261. }
  262. }
  263. /**
  264. * Initializes a new JitsiRemoteTrack instance with the data provided by (a)
  265. * ChatRoom to XMPPEvents.REMOTE_TRACK_ADDED.
  266. *
  267. * @param {Object} event the data provided by (a) ChatRoom to
  268. * XMPPEvents.REMOTE_TRACK_ADDED to (a)
  269. */
  270. createRemoteTrack (event) {
  271. var ownerJid = event.owner;
  272. var remoteTrack = new JitsiRemoteTrack(
  273. this, this.conference, ownerJid, event.stream, event.track,
  274. event.mediaType, event.videoType, event.ssrc, event.muted);
  275. var resource = Strophe.getResourceFromJid(ownerJid);
  276. var remoteTracks
  277. = this.remoteTracks[resource] || (this.remoteTracks[resource] = {});
  278. var mediaType = remoteTrack.getType();
  279. if (remoteTracks[mediaType]) {
  280. logger.warn("Overwriting remote track!", resource, mediaType);
  281. }
  282. remoteTracks[mediaType] = remoteTrack;
  283. return remoteTrack;
  284. }
  285. /**
  286. * Removes all JitsiRemoteTracks associated with given MUC nickname (resource
  287. * part of the JID). Returns array of removed tracks.
  288. *
  289. * @param {string} resource - The resource part of the MUC JID.
  290. * @returns {JitsiRemoteTrack[]}
  291. */
  292. removeRemoteTracks (resource) {
  293. var removedTracks = [];
  294. var removedAudioTrack = this.removeRemoteTrack(resource, MediaType.AUDIO);
  295. var removedVideoTrack = this.removeRemoteTrack(resource, MediaType.VIDEO);
  296. removedAudioTrack && removedTracks.push(removedAudioTrack);
  297. removedVideoTrack && removedTracks.push(removedVideoTrack);
  298. delete this.remoteTracks[resource];
  299. return removedTracks;
  300. }
  301. /**
  302. * Removes specified track type associated with given MUC nickname
  303. * (resource part of the JID). Returns removed track if any.
  304. *
  305. * @param {string} resource - The resource part of the MUC JID.
  306. * @param {string} mediaType - Type of track to remove.
  307. * @returns {JitsiRemoteTrack|undefined}
  308. */
  309. removeRemoteTrack (resource, mediaType) {
  310. var remoteTracksForResource = this.remoteTracks[resource];
  311. if (remoteTracksForResource && remoteTracksForResource[mediaType]) {
  312. var track = remoteTracksForResource[mediaType];
  313. track.dispose();
  314. delete remoteTracksForResource[mediaType];
  315. return track;
  316. }
  317. }
  318. static getPCConstraints () {
  319. return RTCUtils.pc_constraints;
  320. }
  321. static attachMediaStream (elSelector, stream) {
  322. return RTCUtils.attachMediaStream(elSelector, stream);
  323. }
  324. static getStreamID (stream) {
  325. return RTCUtils.getStreamID(stream);
  326. }
  327. /**
  328. * Returns true if retrieving the the list of input devices is supported
  329. * and false if not.
  330. */
  331. static isDeviceListAvailable () {
  332. return RTCUtils.isDeviceListAvailable();
  333. }
  334. /**
  335. * Returns true if changing the input (camera / microphone) or output
  336. * (audio) device is supported and false if not.
  337. * @params {string} [deviceType] - type of device to change. Default is
  338. * undefined or 'input', 'output' - for audio output device change.
  339. * @returns {boolean} true if available, false otherwise.
  340. */
  341. static isDeviceChangeAvailable (deviceType) {
  342. return RTCUtils.isDeviceChangeAvailable(deviceType);
  343. }
  344. /**
  345. * Returns currently used audio output device id, '' stands for default
  346. * device
  347. * @returns {string}
  348. */
  349. static getAudioOutputDevice () {
  350. return RTCUtils.getAudioOutputDevice();
  351. }
  352. /**
  353. * Returns list of available media devices if its obtained, otherwise an
  354. * empty array is returned/
  355. * @returns {Array} list of available media devices.
  356. */
  357. static getCurrentlyAvailableMediaDevices () {
  358. return RTCUtils.getCurrentlyAvailableMediaDevices();
  359. }
  360. /**
  361. * Returns event data for device to be reported to stats.
  362. * @returns {MediaDeviceInfo} device.
  363. */
  364. static getEventDataForActiveDevice (device) {
  365. return RTCUtils.getEventDataForActiveDevice(device);
  366. }
  367. /**
  368. * Sets current audio output device.
  369. * @param {string} deviceId - id of 'audiooutput' device from
  370. * navigator.mediaDevices.enumerateDevices()
  371. * @returns {Promise} - resolves when audio output is changed, is rejected
  372. * otherwise
  373. */
  374. static setAudioOutputDevice (deviceId) {
  375. return RTCUtils.setAudioOutputDevice(deviceId);
  376. }
  377. /**
  378. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  379. * "user" stream which means that it's not a "receive only" stream nor a
  380. * "mixed" JVB stream.
  381. *
  382. * Clients that implement Unified Plan, such as Firefox use recvonly
  383. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  384. * to Plan B where there are only 3 channels: audio, video and data.
  385. *
  386. * @param stream WebRTC MediaStream instance
  387. * @returns {boolean}
  388. */
  389. static isUserStream (stream) {
  390. var streamId = RTCUtils.getStreamID(stream);
  391. return (streamId && streamId !== "mixedmslabel"
  392. && streamId !== "default");
  393. }
  394. /**
  395. * Allows to receive list of available cameras/microphones.
  396. * @param {function} callback would receive array of devices as an argument
  397. */
  398. static enumerateDevices (callback) {
  399. RTCUtils.enumerateDevices(callback);
  400. }
  401. /**
  402. * A method to handle stopping of the stream.
  403. * One point to handle the differences in various implementations.
  404. * @param mediaStream MediaStream object to stop.
  405. */
  406. static stopMediaStream (mediaStream) {
  407. RTCUtils.stopMediaStream(mediaStream);
  408. }
  409. /**
  410. * Returns whether the desktop sharing is enabled or not.
  411. * @returns {boolean}
  412. */
  413. static isDesktopSharingEnabled() {
  414. return RTCUtils.isDesktopSharingEnabled();
  415. }
  416. /**
  417. * Closes all currently opened data channels.
  418. */
  419. closeAllDataChannels () {
  420. if(this.dataChannels) {
  421. this.dataChannels.closeAllChannels();
  422. this.dataChannelsOpen = false;
  423. }
  424. }
  425. dispose () { }
  426. setAudioLevel (resource, audioLevel) {
  427. if(!resource)
  428. return;
  429. var audioTrack = this.getRemoteAudioTrack(resource);
  430. if(audioTrack) {
  431. audioTrack.setAudioLevel(audioLevel);
  432. }
  433. }
  434. /**
  435. * Searches in localTracks(session stores ssrc for audio and video) and
  436. * remoteTracks for the ssrc and returns the corresponding resource.
  437. * @param ssrc the ssrc to check.
  438. */
  439. getResourceBySSRC (ssrc) {
  440. if((this.localVideo && ssrc == this.localVideo.getSSRC())
  441. || (this.localAudio && ssrc == this.localAudio.getSSRC())) {
  442. return this.conference.myUserId();
  443. }
  444. var track = this.getRemoteTrackBySSRC(ssrc);
  445. return track? track.getParticipantId() : null;
  446. }
  447. /**
  448. * Searches in remoteTracks for the ssrc and returns the corresponding
  449. * track.
  450. * @param ssrc the ssrc to check.
  451. */
  452. getRemoteTrackBySSRC (ssrc) {
  453. for (var resource in this.remoteTracks) {
  454. var track = this.getRemoteAudioTrack(resource);
  455. if(track && track.getSSRC() == ssrc) {
  456. return track;
  457. }
  458. track = this.getRemoteVideoTrack(resource);
  459. if(track && track.getSSRC() == ssrc) {
  460. return track;
  461. }
  462. }
  463. return null;
  464. }
  465. /**
  466. * Handles remote track mute / unmute events.
  467. * @param type {string} "audio" or "video"
  468. * @param isMuted {boolean} the new mute state
  469. * @param from {string} user id
  470. */
  471. handleRemoteTrackMute (type, isMuted, from) {
  472. var track = this.getRemoteTrackByType(type, from);
  473. if (track) {
  474. track.setMute(isMuted);
  475. }
  476. }
  477. /**
  478. * Handles remote track video type events
  479. * @param value {string} the new video type
  480. * @param from {string} user id
  481. */
  482. handleRemoteTrackVideoTypeChanged (value, from) {
  483. var videoTrack = this.getRemoteVideoTrack(from);
  484. if (videoTrack) {
  485. videoTrack._setVideoType(value);
  486. }
  487. }
  488. /**
  489. * Sends message via the datachannels.
  490. * @param to {string} the id of the endpoint that should receive the
  491. * message. If "" the message will be sent to all participants.
  492. * @param payload {object} the payload of the message.
  493. * @throws NetworkError or InvalidStateError or Error if the operation
  494. * fails or there is no data channel created
  495. */
  496. sendDataChannelMessage (to, payload) {
  497. if(this.dataChannels) {
  498. this.dataChannels.sendDataChannelMessage(to, payload);
  499. } else {
  500. throw new Error("Data channels support is disabled!");
  501. }
  502. }
  503. /**
  504. * Selects a new value for "lastN". The requested amount of videos are going
  505. * to be delivered after the value is in effect. Set to -1 for unlimited or
  506. * all available videos.
  507. * @param value {int} the new value for lastN.
  508. * @trows Error if there is no data channel created.
  509. */
  510. setLastN (value) {
  511. if (this.dataChannels) {
  512. this.dataChannels.sendSetLastNMessage(value);
  513. } else {
  514. throw new Error("Data channels support is disabled!");
  515. }
  516. }
  517. }