You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

RTC.js 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570
  1. /* global Strophe */
  2. var logger = require("jitsi-meet-logger").getLogger(__filename);
  3. var RTCEvents = require("../../service/RTC/RTCEvents.js");
  4. import RTCUtils from "./RTCUtils.js";
  5. var JitsiLocalTrack = require("./JitsiLocalTrack.js");
  6. import JitsiTrackError from "../../JitsiTrackError";
  7. import * as JitsiTrackErrors from "../../JitsiTrackErrors";
  8. var DataChannels = require("./DataChannels");
  9. var JitsiRemoteTrack = require("./JitsiRemoteTrack.js");
  10. var MediaType = require("../../service/RTC/MediaType");
  11. var VideoType = require("../../service/RTC/VideoType");
  12. var GlobalOnErrorHandler = require("../util/GlobalOnErrorHandler");
  13. import Listenable from "../util/Listenable";
  14. function createLocalTracks(tracksInfo, options) {
  15. var newTracks = [];
  16. var deviceId = null;
  17. tracksInfo.forEach(function(trackInfo){
  18. if (trackInfo.mediaType === MediaType.AUDIO) {
  19. deviceId = options.micDeviceId;
  20. } else if (trackInfo.videoType === VideoType.CAMERA){
  21. deviceId = options.cameraDeviceId;
  22. }
  23. var localTrack
  24. = new JitsiLocalTrack(
  25. trackInfo.stream,
  26. trackInfo.track,
  27. trackInfo.mediaType,
  28. trackInfo.videoType,
  29. trackInfo.resolution,
  30. deviceId,
  31. options.facingMode);
  32. newTracks.push(localTrack);
  33. });
  34. return newTracks;
  35. }
  36. export default class RTC extends Listenable {
  37. constructor(conference, options = {}) {
  38. super();
  39. this.conference = conference;
  40. this.localTracks = [];
  41. //FIXME: We should support multiple streams per jid.
  42. this.remoteTracks = {};
  43. this.localAudio = null;
  44. this.localVideo = null;
  45. this.options = options;
  46. // A flag whether we had received that the data channel had opened
  47. // we can get this flag out of sync if for some reason data channel got
  48. // closed from server, a desired behaviour so we can see errors when this
  49. // happen
  50. this.dataChannelsOpen = false;
  51. // Switch audio output device on all remote audio tracks. Local audio tracks
  52. // handle this event by themselves.
  53. if (RTCUtils.isDeviceChangeAvailable('output')) {
  54. RTCUtils.addListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  55. (deviceId) => {
  56. for (var key in this.remoteTracks) {
  57. if (this.remoteTracks.hasOwnProperty(key)
  58. && this.remoteTracks[key].audio) {
  59. this.remoteTracks[key].audio
  60. .setAudioOutput(deviceId);
  61. }
  62. }
  63. });
  64. }
  65. }
  66. /**
  67. * Creates the local MediaStreams.
  68. * @param {Object} [options] optional parameters
  69. * @param {Array} options.devices the devices that will be requested
  70. * @param {string} options.resolution resolution constraints
  71. * @param {bool} options.dontCreateJitsiTrack if <tt>true</tt> objects with the
  72. * following structure {stream: the Media Stream,
  73. * type: "audio" or "video", videoType: "camera" or "desktop"}
  74. * will be returned trough the Promise, otherwise JitsiTrack objects will be
  75. * returned.
  76. * @param {string} options.cameraDeviceId
  77. * @param {string} options.micDeviceId
  78. * @returns {*} Promise object that will receive the new JitsiTracks
  79. */
  80. static obtainAudioAndVideoPermissions (options) {
  81. return RTCUtils.obtainAudioAndVideoPermissions(options).then(
  82. function (tracksInfo) {
  83. var tracks = createLocalTracks(tracksInfo, options);
  84. return !tracks.some(track =>
  85. !track._isReceivingData())? tracks
  86. : Promise.reject(new JitsiTrackError(
  87. JitsiTrackErrors.NO_DATA_FROM_SOURCE));
  88. });
  89. }
  90. onIncommingCall (event) {
  91. if(this.options.config.openSctp) {
  92. this.dataChannels = new DataChannels(event.peerconnection,
  93. this.eventEmitter);
  94. this._dataChannelOpenListener = () => {
  95. // mark that dataChannel is opened
  96. this.dataChannelsOpen = true;
  97. // when the data channel becomes available, tell the bridge
  98. // about video selections so that it can do adaptive simulcast,
  99. // we want the notification to trigger even if userJid
  100. // is undefined, or null.
  101. // XXX why do we not do the same for pinned endpoints?
  102. try {
  103. this.dataChannels.sendSelectedEndpointMessage(
  104. this.selectedEndpoint);
  105. } catch (error) {
  106. GlobalOnErrorHandler.callErrorHandler(error);
  107. logger.error("Cannot sendSelectedEndpointMessage ",
  108. this.selectedEndpoint, ". Error: ", error);
  109. }
  110. this.removeListener(RTCEvents.DATA_CHANNEL_OPEN,
  111. this._dataChannelOpenListener);
  112. this._dataChannelOpenListener = null;
  113. };
  114. this.addListener(RTCEvents.DATA_CHANNEL_OPEN,
  115. this._dataChannelOpenListener);
  116. }
  117. }
  118. /**
  119. * Should be called when current media session ends and after the
  120. * PeerConnection has been closed using PeerConnection.close() method.
  121. */
  122. onCallEnded () {
  123. if (this.dataChannels) {
  124. // DataChannels are not explicitly closed as the PeerConnection
  125. // is closed on call ended which triggers data channel onclose
  126. // events. The reference is cleared to disable any logic related
  127. // to the data channels.
  128. this.dataChannels = null;
  129. this.dataChannelsOpen = false;
  130. }
  131. }
  132. /**
  133. * Elects the participant with the given id to be the selected participant
  134. * in order to always receive video for this participant (even when last n
  135. * is enabled).
  136. * If there is no data channel we store it and send it through the channel
  137. * once it is created.
  138. * @param id {string} the user id.
  139. * @throws NetworkError or InvalidStateError or Error if the operation
  140. * fails.
  141. */
  142. selectEndpoint (id) {
  143. // cache the value if channel is missing, till we open it
  144. this.selectedEndpoint = id;
  145. if(this.dataChannels && this.dataChannelsOpen)
  146. this.dataChannels.sendSelectedEndpointMessage(id);
  147. }
  148. /**
  149. * Elects the participant with the given id to be the pinned participant in
  150. * order to always receive video for this participant (even when last n is
  151. * enabled).
  152. * @param id {string} the user id
  153. * @throws NetworkError or InvalidStateError or Error if the operation fails.
  154. */
  155. pinEndpoint (id) {
  156. if(this.dataChannels) {
  157. this.dataChannels.sendPinnedEndpointMessage(id);
  158. } else {
  159. // FIXME: cache value while there is no data channel created
  160. // and send the cached state once channel is created
  161. throw new Error("Data channels support is disabled!");
  162. }
  163. }
  164. static addListener (eventType, listener) {
  165. RTCUtils.addListener(eventType, listener);
  166. }
  167. static removeListener (eventType, listener) {
  168. RTCUtils.removeListener(eventType, listener);
  169. }
  170. static isRTCReady () {
  171. return RTCUtils.isRTCReady();
  172. }
  173. static init (options = {}) {
  174. this.options = options;
  175. return RTCUtils.init(this.options);
  176. }
  177. static getDeviceAvailability () {
  178. return RTCUtils.getDeviceAvailability();
  179. }
  180. addLocalTrack (track) {
  181. if (!track)
  182. throw new Error('track must not be null nor undefined');
  183. this.localTracks.push(track);
  184. track.conference = this.conference;
  185. if (track.isAudioTrack()) {
  186. this.localAudio = track;
  187. } else {
  188. this.localVideo = track;
  189. }
  190. }
  191. /**
  192. * Get local video track.
  193. * @returns {JitsiLocalTrack}
  194. */
  195. getLocalVideoTrack () {
  196. return this.localVideo;
  197. }
  198. /**
  199. * Gets JitsiRemoteTrack for the passed MediaType associated with given MUC
  200. * nickname (resource part of the JID).
  201. * @param type audio or video.
  202. * @param resource the resource part of the MUC JID
  203. * @returns {JitsiRemoteTrack|null}
  204. */
  205. getRemoteTrackByType (type, resource) {
  206. if (this.remoteTracks[resource])
  207. return this.remoteTracks[resource][type];
  208. else
  209. return null;
  210. }
  211. /**
  212. * Gets JitsiRemoteTrack for AUDIO MediaType associated with given MUC nickname
  213. * (resource part of the JID).
  214. * @param resource the resource part of the MUC JID
  215. * @returns {JitsiRemoteTrack|null}
  216. */
  217. getRemoteAudioTrack (resource) {
  218. return this.getRemoteTrackByType(MediaType.AUDIO, resource);
  219. }
  220. /**
  221. * Gets JitsiRemoteTrack for VIDEO MediaType associated with given MUC nickname
  222. * (resource part of the JID).
  223. * @param resource the resource part of the MUC JID
  224. * @returns {JitsiRemoteTrack|null}
  225. */
  226. getRemoteVideoTrack (resource) {
  227. return this.getRemoteTrackByType(MediaType.VIDEO, resource);
  228. }
  229. /**
  230. * Set mute for all local audio streams attached to the conference.
  231. * @param value the mute value
  232. * @returns {Promise}
  233. */
  234. setAudioMute (value) {
  235. var mutePromises = [];
  236. for(var i = 0; i < this.localTracks.length; i++) {
  237. var track = this.localTracks[i];
  238. if(track.getType() !== MediaType.AUDIO) {
  239. continue;
  240. }
  241. // this is a Promise
  242. mutePromises.push(value ? track.mute() : track.unmute());
  243. }
  244. // we return a Promise from all Promises so we can wait for their execution
  245. return Promise.all(mutePromises);
  246. }
  247. removeLocalTrack (track) {
  248. var pos = this.localTracks.indexOf(track);
  249. if (pos === -1) {
  250. return;
  251. }
  252. this.localTracks.splice(pos, 1);
  253. if (track.isAudioTrack()) {
  254. this.localAudio = null;
  255. } else {
  256. this.localVideo = null;
  257. }
  258. }
  259. /**
  260. * Initializes a new JitsiRemoteTrack instance with the data provided by (a)
  261. * ChatRoom to XMPPEvents.REMOTE_TRACK_ADDED.
  262. *
  263. * @param {Object} event the data provided by (a) ChatRoom to
  264. * XMPPEvents.REMOTE_TRACK_ADDED to (a)
  265. */
  266. createRemoteTrack (event) {
  267. var ownerJid = event.owner;
  268. var remoteTrack = new JitsiRemoteTrack(
  269. this, this.conference, ownerJid, event.stream, event.track,
  270. event.mediaType, event.videoType, event.ssrc, event.muted);
  271. var resource = Strophe.getResourceFromJid(ownerJid);
  272. var remoteTracks
  273. = this.remoteTracks[resource] || (this.remoteTracks[resource] = {});
  274. var mediaType = remoteTrack.getType();
  275. if (remoteTracks[mediaType]) {
  276. logger.warn("Overwriting remote track!", resource, mediaType);
  277. }
  278. remoteTracks[mediaType] = remoteTrack;
  279. return remoteTrack;
  280. }
  281. /**
  282. * Removes all JitsiRemoteTracks associated with given MUC nickname (resource
  283. * part of the JID). Returns array of removed tracks.
  284. *
  285. * @param {string} resource - The resource part of the MUC JID.
  286. * @returns {JitsiRemoteTrack[]}
  287. */
  288. removeRemoteTracks (resource) {
  289. var removedTracks = [];
  290. var removedAudioTrack = this.removeRemoteTrack(resource, MediaType.AUDIO);
  291. var removedVideoTrack = this.removeRemoteTrack(resource, MediaType.VIDEO);
  292. removedAudioTrack && removedTracks.push(removedAudioTrack);
  293. removedVideoTrack && removedTracks.push(removedVideoTrack);
  294. delete this.remoteTracks[resource];
  295. return removedTracks;
  296. }
  297. /**
  298. * Removes specified track type associated with given MUC nickname
  299. * (resource part of the JID). Returns removed track if any.
  300. *
  301. * @param {string} resource - The resource part of the MUC JID.
  302. * @param {string} mediaType - Type of track to remove.
  303. * @returns {JitsiRemoteTrack|undefined}
  304. */
  305. removeRemoteTrack (resource, mediaType) {
  306. var remoteTracksForResource = this.remoteTracks[resource];
  307. if (remoteTracksForResource && remoteTracksForResource[mediaType]) {
  308. var track = remoteTracksForResource[mediaType];
  309. track.dispose();
  310. delete remoteTracksForResource[mediaType];
  311. return track;
  312. }
  313. }
  314. static getPCConstraints () {
  315. return RTCUtils.pc_constraints;
  316. }
  317. static attachMediaStream (elSelector, stream) {
  318. return RTCUtils.attachMediaStream(elSelector, stream);
  319. }
  320. static getStreamID (stream) {
  321. return RTCUtils.getStreamID(stream);
  322. }
  323. /**
  324. * Returns true if retrieving the the list of input devices is supported
  325. * and false if not.
  326. */
  327. static isDeviceListAvailable () {
  328. return RTCUtils.isDeviceListAvailable();
  329. }
  330. /**
  331. * Returns true if changing the input (camera / microphone) or output
  332. * (audio) device is supported and false if not.
  333. * @params {string} [deviceType] - type of device to change. Default is
  334. * undefined or 'input', 'output' - for audio output device change.
  335. * @returns {boolean} true if available, false otherwise.
  336. */
  337. static isDeviceChangeAvailable (deviceType) {
  338. return RTCUtils.isDeviceChangeAvailable(deviceType);
  339. }
  340. /**
  341. * Returns currently used audio output device id, '' stands for default
  342. * device
  343. * @returns {string}
  344. */
  345. static getAudioOutputDevice () {
  346. return RTCUtils.getAudioOutputDevice();
  347. }
  348. /**
  349. * Returns list of available media devices if its obtained, otherwise an
  350. * empty array is returned/
  351. * @returns {Array} list of available media devices.
  352. */
  353. static getCurrentlyAvailableMediaDevices () {
  354. return RTCUtils.getCurrentlyAvailableMediaDevices();
  355. }
  356. /**
  357. * Returns event data for device to be reported to stats.
  358. * @returns {MediaDeviceInfo} device.
  359. */
  360. static getEventDataForActiveDevice (device) {
  361. return RTCUtils.getEventDataForActiveDevice(device);
  362. }
  363. /**
  364. * Sets current audio output device.
  365. * @param {string} deviceId - id of 'audiooutput' device from
  366. * navigator.mediaDevices.enumerateDevices()
  367. * @returns {Promise} - resolves when audio output is changed, is rejected
  368. * otherwise
  369. */
  370. static setAudioOutputDevice (deviceId) {
  371. return RTCUtils.setAudioOutputDevice(deviceId);
  372. }
  373. /**
  374. * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
  375. * "user" stream which means that it's not a "receive only" stream nor a
  376. * "mixed" JVB stream.
  377. *
  378. * Clients that implement Unified Plan, such as Firefox use recvonly
  379. * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
  380. * to Plan B where there are only 3 channels: audio, video and data.
  381. *
  382. * @param stream WebRTC MediaStream instance
  383. * @returns {boolean}
  384. */
  385. static isUserStream (stream) {
  386. var streamId = RTCUtils.getStreamID(stream);
  387. return (streamId && streamId !== "mixedmslabel"
  388. && streamId !== "default");
  389. }
  390. /**
  391. * Allows to receive list of available cameras/microphones.
  392. * @param {function} callback would receive array of devices as an argument
  393. */
  394. static enumerateDevices (callback) {
  395. RTCUtils.enumerateDevices(callback);
  396. }
  397. /**
  398. * A method to handle stopping of the stream.
  399. * One point to handle the differences in various implementations.
  400. * @param mediaStream MediaStream object to stop.
  401. */
  402. static stopMediaStream (mediaStream) {
  403. RTCUtils.stopMediaStream(mediaStream);
  404. }
  405. /**
  406. * Returns whether the desktop sharing is enabled or not.
  407. * @returns {boolean}
  408. */
  409. static isDesktopSharingEnabled() {
  410. return RTCUtils.isDesktopSharingEnabled();
  411. }
  412. /**
  413. * Closes all currently opened data channels.
  414. */
  415. closeAllDataChannels () {
  416. if(this.dataChannels) {
  417. this.dataChannels.closeAllChannels();
  418. this.dataChannelsOpen = false;
  419. }
  420. }
  421. dispose () { }
  422. setAudioLevel (resource, audioLevel) {
  423. if(!resource)
  424. return;
  425. var audioTrack = this.getRemoteAudioTrack(resource);
  426. if(audioTrack) {
  427. audioTrack.setAudioLevel(audioLevel);
  428. }
  429. }
  430. /**
  431. * Searches in localTracks(session stores ssrc for audio and video) and
  432. * remoteTracks for the ssrc and returns the corresponding resource.
  433. * @param ssrc the ssrc to check.
  434. */
  435. getResourceBySSRC (ssrc) {
  436. if((this.localVideo && ssrc == this.localVideo.getSSRC())
  437. || (this.localAudio && ssrc == this.localAudio.getSSRC())) {
  438. return this.conference.myUserId();
  439. }
  440. var track = this.getRemoteTrackBySSRC(ssrc);
  441. return track? track.getParticipantId() : null;
  442. }
  443. /**
  444. * Searches in remoteTracks for the ssrc and returns the corresponding
  445. * track.
  446. * @param ssrc the ssrc to check.
  447. */
  448. getRemoteTrackBySSRC (ssrc) {
  449. for (var resource in this.remoteTracks) {
  450. var track = this.getRemoteAudioTrack(resource);
  451. if(track && track.getSSRC() == ssrc) {
  452. return track;
  453. }
  454. track = this.getRemoteVideoTrack(resource);
  455. if(track && track.getSSRC() == ssrc) {
  456. return track;
  457. }
  458. }
  459. return null;
  460. }
  461. /**
  462. * Handles remote track mute / unmute events.
  463. * @param type {string} "audio" or "video"
  464. * @param isMuted {boolean} the new mute state
  465. * @param from {string} user id
  466. */
  467. handleRemoteTrackMute (type, isMuted, from) {
  468. var track = this.getRemoteTrackByType(type, from);
  469. if (track) {
  470. track.setMute(isMuted);
  471. }
  472. }
  473. /**
  474. * Handles remote track video type events
  475. * @param value {string} the new video type
  476. * @param from {string} user id
  477. */
  478. handleRemoteTrackVideoTypeChanged (value, from) {
  479. var videoTrack = this.getRemoteVideoTrack(from);
  480. if (videoTrack) {
  481. videoTrack._setVideoType(value);
  482. }
  483. }
  484. /**
  485. * Sends message via the datachannels.
  486. * @param to {string} the id of the endpoint that should receive the
  487. * message. If "" the message will be sent to all participants.
  488. * @param payload {object} the payload of the message.
  489. * @throws NetworkError or InvalidStateError or Error if the operation
  490. * fails or there is no data channel created
  491. */
  492. sendDataChannelMessage (to, payload) {
  493. if(this.dataChannels) {
  494. this.dataChannels.sendDataChannelMessage(to, payload);
  495. } else {
  496. throw new Error("Data channels support is disabled!");
  497. }
  498. }
  499. /**
  500. * Selects a new value for "lastN". The requested amount of videos are going
  501. * to be delivered after the value is in effect. Set to -1 for unlimited or
  502. * all available videos.
  503. * @param value {int} the new value for lastN.
  504. * @trows Error if there is no data channel created.
  505. */
  506. setLastN (value) {
  507. if (this.dataChannels) {
  508. this.dataChannels.sendSetLastNMessage(value);
  509. } else {
  510. throw new Error("Data channels support is disabled!");
  511. }
  512. }
  513. }