You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiLocalTrack.js 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583
  1. /* global __filename, Promise */
  2. var logger = require("jitsi-meet-logger").getLogger(__filename);
  3. var JitsiTrack = require("./JitsiTrack");
  4. var RTCBrowserType = require("./RTCBrowserType");
  5. import JitsiTrackError from "../../JitsiTrackError";
  6. import * as JitsiTrackErrors from "../../JitsiTrackErrors";
  7. import * as JitsiTrackEvents from "../../JitsiTrackEvents";
  8. var RTCEvents = require("../../service/RTC/RTCEvents");
  9. var RTCUtils = require("./RTCUtils");
  10. var MediaType = require('../../service/RTC/MediaType');
  11. var VideoType = require('../../service/RTC/VideoType');
  12. var CameraFacingMode = require('../../service/RTC/CameraFacingMode');
  13. /**
  14. * Represents a single media track(either audio or video).
  15. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  16. * @param stream WebRTC MediaStream, parent of the track
  17. * @param track underlying WebRTC MediaStreamTrack for new JitsiRemoteTrack
  18. * @param mediaType the MediaType of the JitsiRemoteTrack
  19. * @param videoType the VideoType of the JitsiRemoteTrack
  20. * @param resolution the video resoultion if it's a video track
  21. * @param deviceId the ID of the local device for this track
  22. * @param facingMode the camera facing mode used in getUserMedia call
  23. * @constructor
  24. */
  25. function JitsiLocalTrack(stream, track, mediaType, videoType, resolution,
  26. deviceId, facingMode) {
  27. var self = this;
  28. JitsiTrack.call(this,
  29. null /* RTC */, stream, track,
  30. function () {
  31. if(!this.dontFireRemoveEvent)
  32. this.eventEmitter.emit(
  33. JitsiTrackEvents.LOCAL_TRACK_STOPPED);
  34. this.dontFireRemoveEvent = false;
  35. }.bind(this) /* inactiveHandler */,
  36. mediaType, videoType, null /* ssrc */);
  37. this.dontFireRemoveEvent = false;
  38. this.resolution = resolution;
  39. this.deviceId = deviceId;
  40. this.startMuted = false;
  41. this.initialMSID = this.getMSID();
  42. this.inMuteOrUnmuteProgress = false;
  43. /**
  44. * The facing mode of the camera from which this JitsiLocalTrack instance
  45. * was obtained.
  46. */
  47. this._facingMode = facingMode;
  48. // Currently there is no way to know the MediaStreamTrack ended due to to
  49. // device disconnect in Firefox through e.g. "readyState" property. Instead
  50. // we will compare current track's label with device labels from
  51. // enumerateDevices() list.
  52. this._trackEnded = false;
  53. /**
  54. * The value of bytes sent received from the statistics module.
  55. */
  56. this._bytesSent = null;
  57. /**
  58. * Used only for detection of audio problems. We want to check only once
  59. * whether the track is sending bytes ot not. This flag is set to false
  60. * after the check.
  61. */
  62. this._testByteSent = true;
  63. // Currently there is no way to determine with what device track was
  64. // created (until getConstraints() support), however we can associate tracks
  65. // with real devices obtained from enumerateDevices() call as soon as it's
  66. // called.
  67. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  68. /**
  69. * Indicates that we have called RTCUtils.stopMediaStream for the
  70. * MediaStream related to this JitsiTrack object.
  71. */
  72. this.stopStreamInProgress = false;
  73. this._onDeviceListChanged = function (devices) {
  74. self._setRealDeviceIdFromDeviceList(devices);
  75. // Mark track as ended for those browsers that do not support
  76. // "readyState" property. We do not touch tracks created with default
  77. // device ID "".
  78. if (typeof self.getTrack().readyState === 'undefined'
  79. && typeof self._realDeviceId !== 'undefined'
  80. && !devices.find(function (d) {
  81. return d.deviceId === self._realDeviceId;
  82. })) {
  83. self._trackEnded = true;
  84. }
  85. };
  86. // Subscribe each created local audio track to
  87. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  88. // handling this event for remote tracks (which are handled in RTC.js),
  89. // because there might be local tracks not attached to a conference.
  90. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  91. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  92. RTCUtils.addListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  93. this._onAudioOutputDeviceChanged);
  94. }
  95. RTCUtils.addListener(RTCEvents.DEVICE_LIST_CHANGED,
  96. this._onDeviceListChanged);
  97. // FIXME: Removed temporary until we verify that we don't fire the
  98. // the event when the camera is working.
  99. // if(this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  100. // this._setHandler("track_mute", () => {
  101. // if(this._checkForCameraIssues())
  102. // this.eventEmitter.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  103. // });
  104. // this._setHandler("track_ended", () => {
  105. // if(this._checkForCameraIssues())
  106. // this.eventEmitter.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  107. // });
  108. // }
  109. }
  110. JitsiLocalTrack.prototype = Object.create(JitsiTrack.prototype);
  111. JitsiLocalTrack.prototype.constructor = JitsiLocalTrack;
  112. /**
  113. * Returns if associated MediaStreamTrack is in the 'ended' state
  114. * @returns {boolean}
  115. */
  116. JitsiLocalTrack.prototype.isEnded = function () {
  117. return this.getTrack().readyState === 'ended' || this._trackEnded;
  118. };
  119. /**
  120. * Sets real device ID by comparing track information with device information.
  121. * This is temporary solution until getConstraints() method will be implemented
  122. * in browsers.
  123. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  124. * enumerateDevices() call
  125. */
  126. JitsiLocalTrack.prototype._setRealDeviceIdFromDeviceList = function (devices) {
  127. var track = this.getTrack(),
  128. device = devices.find(function (d) {
  129. return d.kind === track.kind + 'input' && d.label === track.label;
  130. });
  131. if (device) {
  132. this._realDeviceId = device.deviceId;
  133. }
  134. };
  135. /**
  136. * Mutes the track. Will reject the Promise if there is mute/unmute operation
  137. * in progress.
  138. * @returns {Promise}
  139. */
  140. JitsiLocalTrack.prototype.mute = function () {
  141. return createMuteUnmutePromise(this, true);
  142. };
  143. /**
  144. * Unmutes the track. Will reject the Promise if there is mute/unmute operation
  145. * in progress.
  146. * @returns {Promise}
  147. */
  148. JitsiLocalTrack.prototype.unmute = function () {
  149. return createMuteUnmutePromise(this, false);
  150. };
  151. /**
  152. * Creates Promise for mute/unmute operation.
  153. *
  154. * @param {JitsiLocalTrack} track - The track that will be muted/unmuted.
  155. * @param {boolean} mute - Whether to mute or unmute the track.
  156. * @returns {Promise}
  157. */
  158. function createMuteUnmutePromise(track, mute) {
  159. if (track.inMuteOrUnmuteProgress) {
  160. return Promise.reject(
  161. new JitsiTrackError(JitsiTrackErrors.TRACK_MUTE_UNMUTE_IN_PROGRESS)
  162. );
  163. }
  164. track.inMuteOrUnmuteProgress = true;
  165. return track._setMute(mute)
  166. .then(function() {
  167. track.inMuteOrUnmuteProgress = false;
  168. })
  169. .catch(function(status) {
  170. track.inMuteOrUnmuteProgress = false;
  171. throw status;
  172. });
  173. }
  174. /**
  175. * Mutes / unmutes the track.
  176. *
  177. * @param {boolean} mute - If true the track will be muted. Otherwise the track
  178. * will be unmuted.
  179. * @private
  180. * @returns {Promise}
  181. */
  182. JitsiLocalTrack.prototype._setMute = function (mute) {
  183. if (this.isMuted() === mute) {
  184. return Promise.resolve();
  185. }
  186. var promise = Promise.resolve();
  187. var self = this;
  188. // Local track can be used out of conference, so we need to handle that
  189. // case and mark that track should start muted or not when added to
  190. // conference.
  191. if(!this.conference || !this.conference.room) {
  192. this.startMuted = mute;
  193. }
  194. this.dontFireRemoveEvent = false;
  195. // FIXME FF does not support 'removeStream' method used to mute
  196. if (window.location.protocol !== "https:" ||
  197. this.isAudioTrack() ||
  198. this.videoType === VideoType.DESKTOP ||
  199. RTCBrowserType.isFirefox()) {
  200. if(this.track)
  201. this.track.enabled = !mute;
  202. } else {
  203. if(mute) {
  204. this.dontFireRemoveEvent = true;
  205. promise = new Promise( (resolve, reject) => {
  206. this._removeStreamFromConferenceAsMute(() => {
  207. //FIXME: Maybe here we should set the SRC for the containers
  208. // to something
  209. this._stopMediaStream();
  210. this._setStream(null);
  211. resolve();
  212. }, (err) => {
  213. reject(err);
  214. });
  215. });
  216. } else {
  217. // This path is only for camera.
  218. var streamOptions = {
  219. cameraDeviceId: this.getDeviceId(),
  220. devices: [ MediaType.VIDEO ],
  221. facingMode: this.getCameraFacingMode(),
  222. resolution: this.resolution
  223. };
  224. promise = RTCUtils.obtainAudioAndVideoPermissions(streamOptions)
  225. .then(function (streamsInfo) {
  226. var mediaType = self.getType();
  227. var streamInfo = streamsInfo.find(function(info) {
  228. return info.mediaType === mediaType;
  229. });
  230. if(!streamInfo) {
  231. throw new JitsiTrackError(
  232. JitsiTrackErrors.TRACK_NO_STREAM_FOUND);
  233. }else {
  234. self._setStream(streamInfo.stream);
  235. self.track = streamInfo.track;
  236. // This is not good when video type changes after
  237. // unmute, but let's not crash here
  238. if (self.videoType !== streamInfo.videoType) {
  239. logger.warn(
  240. "Video type has changed after unmute!",
  241. self.videoType, streamInfo.videoType);
  242. self.videoType = streamInfo.videoType;
  243. }
  244. }
  245. self.containers = self.containers.map(function(cont) {
  246. return RTCUtils.attachMediaStream(cont, self.stream);
  247. });
  248. return self._addStreamToConferenceAsUnmute();
  249. });
  250. }
  251. }
  252. return promise
  253. .then(function() {
  254. return self._sendMuteStatus(mute);
  255. })
  256. .then(function() {
  257. self.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED);
  258. });
  259. };
  260. /**
  261. * Adds stream to conference and marks it as "unmute" operation.
  262. *
  263. * @private
  264. * @returns {Promise}
  265. */
  266. JitsiLocalTrack.prototype._addStreamToConferenceAsUnmute = function () {
  267. if (!this.conference || !this.conference.room) {
  268. return Promise.resolve();
  269. }
  270. var self = this;
  271. return new Promise(function(resolve, reject) {
  272. self.conference.room.addStream(
  273. self.stream,
  274. resolve,
  275. reject,
  276. {
  277. mtype: self.type,
  278. type: "unmute",
  279. ssrc: self.ssrc,
  280. msid: self.getMSID()
  281. });
  282. });
  283. };
  284. /**
  285. * Removes stream from conference and marks it as "mute" operation.
  286. * @param {Function} successCallback will be called on success
  287. * @param {Function} errorCallback will be called on error
  288. * @private
  289. */
  290. JitsiLocalTrack.prototype._removeStreamFromConferenceAsMute =
  291. function (successCallback, errorCallback) {
  292. if (!this.conference || !this.conference.room) {
  293. successCallback();
  294. return;
  295. }
  296. this.conference.room.removeStream(
  297. this.stream,
  298. successCallback,
  299. errorCallback,
  300. {
  301. mtype: this.type,
  302. type: "mute",
  303. ssrc: this.ssrc
  304. });
  305. };
  306. /**
  307. * Sends mute status for a track to conference if any.
  308. *
  309. * @param {boolean} mute - If track is muted.
  310. * @private
  311. * @returns {Promise}
  312. */
  313. JitsiLocalTrack.prototype._sendMuteStatus = function(mute) {
  314. if (!this.conference || !this.conference.room) {
  315. return Promise.resolve();
  316. }
  317. var self = this;
  318. return new Promise(function(resolve) {
  319. self.conference.room[
  320. self.isAudioTrack()
  321. ? 'setAudioMute'
  322. : 'setVideoMute'](mute, resolve);
  323. });
  324. };
  325. /**
  326. * @inheritdoc
  327. *
  328. * Stops sending the media track. And removes it from the HTML.
  329. * NOTE: Works for local tracks only.
  330. *
  331. * @extends JitsiTrack#dispose
  332. * @returns {Promise}
  333. */
  334. JitsiLocalTrack.prototype.dispose = function () {
  335. var self = this;
  336. var promise = Promise.resolve();
  337. if (this.conference){
  338. promise = this.conference.removeTrack(this);
  339. }
  340. if (this.stream) {
  341. this._stopMediaStream();
  342. this.detach();
  343. }
  344. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED,
  345. this._onDeviceListChanged);
  346. if (this._onAudioOutputDeviceChanged) {
  347. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  348. this._onAudioOutputDeviceChanged);
  349. }
  350. return promise
  351. .then(function() {
  352. return JitsiTrack.prototype.dispose.call(self); // super.dispose();
  353. });
  354. };
  355. /**
  356. * Returns <tt>true</tt> - if the stream is muted
  357. * and <tt>false</tt> otherwise.
  358. * @returns {boolean} <tt>true</tt> - if the stream is muted
  359. * and <tt>false</tt> otherwise.
  360. */
  361. JitsiLocalTrack.prototype.isMuted = function () {
  362. // this.stream will be null when we mute local video on Chrome
  363. if (!this.stream)
  364. return true;
  365. if (this.isVideoTrack() && !this.isActive()) {
  366. return true;
  367. } else {
  368. return !this.track || !this.track.enabled;
  369. }
  370. };
  371. /**
  372. * Updates the SSRC associated with the MediaStream in JitsiLocalTrack object.
  373. * @ssrc the new ssrc
  374. */
  375. JitsiLocalTrack.prototype._setSSRC = function (ssrc) {
  376. this.ssrc = ssrc;
  377. };
  378. /**
  379. * Sets the JitsiConference object associated with the track. This is temp
  380. * solution.
  381. * @param conference the JitsiConference object
  382. */
  383. JitsiLocalTrack.prototype._setConference = function(conference) {
  384. this.conference = conference;
  385. // We want to keep up with postponed events which should have been fired
  386. // on "attach" call, but for local track we not always have the conference
  387. // before attaching. However this may result in duplicated events if they
  388. // have been triggered on "attach" already.
  389. for(var i = 0; i < this.containers.length; i++)
  390. {
  391. this._maybeFireTrackAttached(this.containers[i]);
  392. }
  393. };
  394. /**
  395. * Gets the SSRC of this local track if it's available already or <tt>null</tt>
  396. * otherwise. That's because we don't know the SSRC until local description is
  397. * created.
  398. * In case of video and simulcast returns the the primarySSRC.
  399. * @returns {string} or {null}
  400. */
  401. JitsiLocalTrack.prototype.getSSRC = function () {
  402. if(this.ssrc && this.ssrc.groups && this.ssrc.groups.length)
  403. return this.ssrc.groups[0].primarySSRC;
  404. else if(this.ssrc && this.ssrc.ssrcs && this.ssrc.ssrcs.length)
  405. return this.ssrc.ssrcs[0];
  406. else
  407. return null;
  408. };
  409. /**
  410. * Returns <tt>true</tt>.
  411. * @returns {boolean} <tt>true</tt>
  412. */
  413. JitsiLocalTrack.prototype.isLocal = function () {
  414. return true;
  415. };
  416. /**
  417. * Returns device id associated with track.
  418. * @returns {string}
  419. */
  420. JitsiLocalTrack.prototype.getDeviceId = function () {
  421. return this._realDeviceId || this.deviceId;
  422. };
  423. /**
  424. * Sets the value of bytes sent statistic.
  425. * @param bytesSent {intiger} the new value
  426. * NOTE: used only for audio tracks to detect audio issues.
  427. */
  428. JitsiLocalTrack.prototype._setByteSent = function (bytesSent) {
  429. this._bytesSent = bytesSent;
  430. if(this._testByteSent) {
  431. setTimeout(function () {
  432. if(this._bytesSent <= 0){
  433. //we are not receiving anything from the microphone
  434. this.eventEmitter.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  435. }
  436. }.bind(this), 3000);
  437. this._testByteSent = false;
  438. }
  439. }
  440. /**
  441. * Returns facing mode for video track from camera. For other cases (e.g. audio
  442. * track or 'desktop' video track) returns undefined.
  443. *
  444. * @returns {CameraFacingMode|undefined}
  445. */
  446. JitsiLocalTrack.prototype.getCameraFacingMode = function () {
  447. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  448. // MediaStreamTrack#getSettings() is not implemented in many browsers,
  449. // so we need feature checking here. Progress on the respective
  450. // browser's implementation can be tracked at
  451. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for Chromium
  452. // and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517 for Firefox.
  453. // Even if a browser implements getSettings() already, it might still
  454. // not return anything for 'facingMode'.
  455. var trackSettings;
  456. try {
  457. trackSettings = this.track.getSettings();
  458. } catch (e) {
  459. // XXX React-native-webrtc, for example, defines
  460. // MediaStreamTrack#getSettings() but the implementation throws a
  461. // "Not implemented" Error.
  462. }
  463. if (trackSettings && 'facingMode' in trackSettings) {
  464. return trackSettings.facingMode;
  465. }
  466. if (typeof this._facingMode !== 'undefined') {
  467. return this._facingMode;
  468. }
  469. // In most cases we are showing a webcam. So if we've gotten here, it
  470. // should be relatively safe to assume that we are probably showing
  471. // the user-facing camera.
  472. return CameraFacingMode.USER;
  473. }
  474. return undefined;
  475. };
  476. /**
  477. * Stops the associated MediaStream.
  478. */
  479. JitsiLocalTrack.prototype._stopMediaStream = function () {
  480. this.stopStreamInProgress = true;
  481. RTCUtils.stopMediaStream(this.stream);
  482. this.stopStreamInProgress = false;
  483. }
  484. /**
  485. * Detects camera issues on ended and mute events from MediaStreamTrack.
  486. * @returns {boolean} true if an issue is detected and false otherwise
  487. */
  488. JitsiLocalTrack.prototype._checkForCameraIssues = function () {
  489. if(!this.isVideoTrack() || this.stopStreamInProgress ||
  490. this.videoType === VideoType.DESKTOP)
  491. return false;
  492. return !this._isReceivingData();
  493. }
  494. /**
  495. * Checks whether the attached MediaStream is reveiving data from source or
  496. * not. If the stream property is null(because of mute or another reason) this
  497. * method will return false.
  498. * NOTE: This method doesn't indicate problem with the streams directly.
  499. * For example in case of video mute the method will return false or if the
  500. * user has disposed the track.
  501. * @returns {boolean} true if the stream is receiving data and false otherwise.
  502. */
  503. JitsiLocalTrack.prototype._isReceivingData = function () {
  504. if(!this.stream)
  505. return false;
  506. // In older version of the spec there is no muted property and
  507. // readyState can have value muted. In the latest versions
  508. // readyState can have values "live" and "ended" and there is
  509. // muted boolean property. If the stream is muted that means that
  510. // we aren't receiving any data from the source. We want to notify
  511. // the users for error if the stream is muted or ended on it's
  512. // creation.
  513. return this.stream.getTracks().some(track =>
  514. ((!("readyState" in track) || track.readyState === "live")
  515. && (!("muted" in track) || track.muted === false)));
  516. }
  517. module.exports = JitsiLocalTrack;