選択できるのは25トピックまでです。 トピックは、先頭が英数字で、英数字とダッシュ('-')を使用した35文字以内のものにしてください。

JitsiLocalTrack.js 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581
  1. /* global __filename, Promise */
  2. var logger = require("jitsi-meet-logger").getLogger(__filename);
  3. var JitsiTrack = require("./JitsiTrack");
  4. var RTCBrowserType = require("./RTCBrowserType");
  5. import JitsiTrackError from "../../JitsiTrackError";
  6. import * as JitsiTrackErrors from "../../JitsiTrackErrors";
  7. import * as JitsiTrackEvents from "../../JitsiTrackEvents";
  8. var RTCEvents = require("../../service/RTC/RTCEvents");
  9. var RTCUtils = require("./RTCUtils");
  10. var MediaType = require('../../service/RTC/MediaType');
  11. var VideoType = require('../../service/RTC/VideoType');
  12. var CameraFacingMode = require('../../service/RTC/CameraFacingMode');
  13. /**
  14. * Represents a single media track(either audio or video).
  15. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  16. * @param stream WebRTC MediaStream, parent of the track
  17. * @param track underlying WebRTC MediaStreamTrack for new JitsiRemoteTrack
  18. * @param mediaType the MediaType of the JitsiRemoteTrack
  19. * @param videoType the VideoType of the JitsiRemoteTrack
  20. * @param resolution the video resoultion if it's a video track
  21. * @param deviceId the ID of the local device for this track
  22. * @param facingMode the camera facing mode used in getUserMedia call
  23. * @constructor
  24. */
  25. function JitsiLocalTrack(stream, track, mediaType, videoType, resolution,
  26. deviceId, facingMode) {
  27. var self = this;
  28. JitsiTrack.call(this,
  29. null /* RTC */, stream, track,
  30. function () {
  31. if(!this.dontFireRemoveEvent)
  32. this.eventEmitter.emit(
  33. JitsiTrackEvents.LOCAL_TRACK_STOPPED);
  34. this.dontFireRemoveEvent = false;
  35. }.bind(this) /* inactiveHandler */,
  36. mediaType, videoType, null /* ssrc */);
  37. this.dontFireRemoveEvent = false;
  38. this.resolution = resolution;
  39. this.deviceId = deviceId;
  40. this.startMuted = false;
  41. this.initialMSID = this.getMSID();
  42. this.inMuteOrUnmuteProgress = false;
  43. /**
  44. * The facing mode of the camera from which this JitsiLocalTrack instance
  45. * was obtained.
  46. */
  47. this._facingMode = facingMode;
  48. // Currently there is no way to know the MediaStreamTrack ended due to to
  49. // device disconnect in Firefox through e.g. "readyState" property. Instead
  50. // we will compare current track's label with device labels from
  51. // enumerateDevices() list.
  52. this._trackEnded = false;
  53. /**
  54. * The value of bytes sent received from the statistics module.
  55. */
  56. this._bytesSent = null;
  57. /**
  58. * Used only for detection of audio problems. We want to check only once
  59. * whether the track is sending bytes ot not. This flag is set to false
  60. * after the check.
  61. */
  62. this._testByteSent = true;
  63. // Currently there is no way to determine with what device track was
  64. // created (until getConstraints() support), however we can associate tracks
  65. // with real devices obtained from enumerateDevices() call as soon as it's
  66. // called.
  67. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  68. /**
  69. * Indicates that we have called RTCUtils.stopMediaStream for the
  70. * MediaStream related to this JitsiTrack object.
  71. */
  72. this.stopStreamInProgress = false;
  73. this._onDeviceListChanged = function (devices) {
  74. self._setRealDeviceIdFromDeviceList(devices);
  75. // Mark track as ended for those browsers that do not support
  76. // "readyState" property. We do not touch tracks created with default
  77. // device ID "".
  78. if (typeof self.getTrack().readyState === 'undefined'
  79. && typeof self._realDeviceId !== 'undefined'
  80. && !devices.find(function (d) {
  81. return d.deviceId === self._realDeviceId;
  82. })) {
  83. self._trackEnded = true;
  84. }
  85. };
  86. // Subscribe each created local audio track to
  87. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  88. // handling this event for remote tracks (which are handled in RTC.js),
  89. // because there might be local tracks not attached to a conference.
  90. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  91. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  92. RTCUtils.addListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  93. this._onAudioOutputDeviceChanged);
  94. }
  95. RTCUtils.addListener(RTCEvents.DEVICE_LIST_CHANGED,
  96. this._onDeviceListChanged);
  97. if(this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  98. this._setHandler("track_mute", () => {
  99. if(this._checkForCameraIssues())
  100. this.eventEmitter.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  101. });
  102. this._setHandler("track_ended", () => {
  103. if(this._checkForCameraIssues())
  104. this.eventEmitter.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  105. });
  106. }
  107. }
  108. JitsiLocalTrack.prototype = Object.create(JitsiTrack.prototype);
  109. JitsiLocalTrack.prototype.constructor = JitsiLocalTrack;
  110. /**
  111. * Returns if associated MediaStreamTrack is in the 'ended' state
  112. * @returns {boolean}
  113. */
  114. JitsiLocalTrack.prototype.isEnded = function () {
  115. return this.getTrack().readyState === 'ended' || this._trackEnded;
  116. };
  117. /**
  118. * Sets real device ID by comparing track information with device information.
  119. * This is temporary solution until getConstraints() method will be implemented
  120. * in browsers.
  121. * @param {MediaDeviceInfo[]} devices - list of devices obtained from
  122. * enumerateDevices() call
  123. */
  124. JitsiLocalTrack.prototype._setRealDeviceIdFromDeviceList = function (devices) {
  125. var track = this.getTrack(),
  126. device = devices.find(function (d) {
  127. return d.kind === track.kind + 'input' && d.label === track.label;
  128. });
  129. if (device) {
  130. this._realDeviceId = device.deviceId;
  131. }
  132. };
  133. /**
  134. * Mutes the track. Will reject the Promise if there is mute/unmute operation
  135. * in progress.
  136. * @returns {Promise}
  137. */
  138. JitsiLocalTrack.prototype.mute = function () {
  139. return createMuteUnmutePromise(this, true);
  140. };
  141. /**
  142. * Unmutes the track. Will reject the Promise if there is mute/unmute operation
  143. * in progress.
  144. * @returns {Promise}
  145. */
  146. JitsiLocalTrack.prototype.unmute = function () {
  147. return createMuteUnmutePromise(this, false);
  148. };
  149. /**
  150. * Creates Promise for mute/unmute operation.
  151. *
  152. * @param {JitsiLocalTrack} track - The track that will be muted/unmuted.
  153. * @param {boolean} mute - Whether to mute or unmute the track.
  154. * @returns {Promise}
  155. */
  156. function createMuteUnmutePromise(track, mute) {
  157. if (track.inMuteOrUnmuteProgress) {
  158. return Promise.reject(
  159. new JitsiTrackError(JitsiTrackErrors.TRACK_MUTE_UNMUTE_IN_PROGRESS)
  160. );
  161. }
  162. track.inMuteOrUnmuteProgress = true;
  163. return track._setMute(mute)
  164. .then(function() {
  165. track.inMuteOrUnmuteProgress = false;
  166. })
  167. .catch(function(status) {
  168. track.inMuteOrUnmuteProgress = false;
  169. throw status;
  170. });
  171. }
  172. /**
  173. * Mutes / unmutes the track.
  174. *
  175. * @param {boolean} mute - If true the track will be muted. Otherwise the track
  176. * will be unmuted.
  177. * @private
  178. * @returns {Promise}
  179. */
  180. JitsiLocalTrack.prototype._setMute = function (mute) {
  181. if (this.isMuted() === mute) {
  182. return Promise.resolve();
  183. }
  184. var promise = Promise.resolve();
  185. var self = this;
  186. // Local track can be used out of conference, so we need to handle that
  187. // case and mark that track should start muted or not when added to
  188. // conference.
  189. if(!this.conference || !this.conference.room) {
  190. this.startMuted = mute;
  191. }
  192. this.dontFireRemoveEvent = false;
  193. // FIXME FF does not support 'removeStream' method used to mute
  194. if (window.location.protocol !== "https:" ||
  195. this.isAudioTrack() ||
  196. this.videoType === VideoType.DESKTOP ||
  197. RTCBrowserType.isFirefox()) {
  198. if(this.track)
  199. this.track.enabled = !mute;
  200. } else {
  201. if(mute) {
  202. this.dontFireRemoveEvent = true;
  203. promise = new Promise( (resolve, reject) => {
  204. this._removeStreamFromConferenceAsMute(() => {
  205. //FIXME: Maybe here we should set the SRC for the containers
  206. // to something
  207. this._stopMediaStream();
  208. this._setStream(null);
  209. resolve();
  210. }, (err) => {
  211. reject(err);
  212. });
  213. });
  214. } else {
  215. // This path is only for camera.
  216. var streamOptions = {
  217. cameraDeviceId: this.getDeviceId(),
  218. devices: [ MediaType.VIDEO ],
  219. facingMode: this.getCameraFacingMode(),
  220. resolution: this.resolution
  221. };
  222. promise = RTCUtils.obtainAudioAndVideoPermissions(streamOptions)
  223. .then(function (streamsInfo) {
  224. var mediaType = self.getType();
  225. var streamInfo = streamsInfo.find(function(info) {
  226. return info.mediaType === mediaType;
  227. });
  228. if(!streamInfo) {
  229. throw new JitsiTrackError(
  230. JitsiTrackErrors.TRACK_NO_STREAM_FOUND);
  231. }else {
  232. self._setStream(streamInfo.stream);
  233. self.track = streamInfo.track;
  234. // This is not good when video type changes after
  235. // unmute, but let's not crash here
  236. if (self.videoType !== streamInfo.videoType) {
  237. logger.warn(
  238. "Video type has changed after unmute!",
  239. self.videoType, streamInfo.videoType);
  240. self.videoType = streamInfo.videoType;
  241. }
  242. }
  243. self.containers = self.containers.map(function(cont) {
  244. return RTCUtils.attachMediaStream(cont, self.stream);
  245. });
  246. return self._addStreamToConferenceAsUnmute();
  247. });
  248. }
  249. }
  250. return promise
  251. .then(function() {
  252. return self._sendMuteStatus(mute);
  253. })
  254. .then(function() {
  255. self.eventEmitter.emit(JitsiTrackEvents.TRACK_MUTE_CHANGED);
  256. });
  257. };
  258. /**
  259. * Adds stream to conference and marks it as "unmute" operation.
  260. *
  261. * @private
  262. * @returns {Promise}
  263. */
  264. JitsiLocalTrack.prototype._addStreamToConferenceAsUnmute = function () {
  265. if (!this.conference || !this.conference.room) {
  266. return Promise.resolve();
  267. }
  268. var self = this;
  269. return new Promise(function(resolve, reject) {
  270. self.conference.room.addStream(
  271. self.stream,
  272. resolve,
  273. reject,
  274. {
  275. mtype: self.type,
  276. type: "unmute",
  277. ssrc: self.ssrc,
  278. msid: self.getMSID()
  279. });
  280. });
  281. };
  282. /**
  283. * Removes stream from conference and marks it as "mute" operation.
  284. * @param {Function} successCallback will be called on success
  285. * @param {Function} errorCallback will be called on error
  286. * @private
  287. */
  288. JitsiLocalTrack.prototype._removeStreamFromConferenceAsMute =
  289. function (successCallback, errorCallback) {
  290. if (!this.conference || !this.conference.room) {
  291. successCallback();
  292. return;
  293. }
  294. this.conference.room.removeStream(
  295. this.stream,
  296. successCallback,
  297. errorCallback,
  298. {
  299. mtype: this.type,
  300. type: "mute",
  301. ssrc: this.ssrc
  302. });
  303. };
  304. /**
  305. * Sends mute status for a track to conference if any.
  306. *
  307. * @param {boolean} mute - If track is muted.
  308. * @private
  309. * @returns {Promise}
  310. */
  311. JitsiLocalTrack.prototype._sendMuteStatus = function(mute) {
  312. if (!this.conference || !this.conference.room) {
  313. return Promise.resolve();
  314. }
  315. var self = this;
  316. return new Promise(function(resolve) {
  317. self.conference.room[
  318. self.isAudioTrack()
  319. ? 'setAudioMute'
  320. : 'setVideoMute'](mute, resolve);
  321. });
  322. };
  323. /**
  324. * @inheritdoc
  325. *
  326. * Stops sending the media track. And removes it from the HTML.
  327. * NOTE: Works for local tracks only.
  328. *
  329. * @extends JitsiTrack#dispose
  330. * @returns {Promise}
  331. */
  332. JitsiLocalTrack.prototype.dispose = function () {
  333. var self = this;
  334. var promise = Promise.resolve();
  335. if (this.conference){
  336. promise = this.conference.removeTrack(this);
  337. }
  338. if (this.stream) {
  339. this._stopMediaStream();
  340. this.detach();
  341. }
  342. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_CHANGED,
  343. this._onDeviceListChanged);
  344. if (this._onAudioOutputDeviceChanged) {
  345. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  346. this._onAudioOutputDeviceChanged);
  347. }
  348. return promise
  349. .then(function() {
  350. return JitsiTrack.prototype.dispose.call(self); // super.dispose();
  351. });
  352. };
  353. /**
  354. * Returns <tt>true</tt> - if the stream is muted
  355. * and <tt>false</tt> otherwise.
  356. * @returns {boolean} <tt>true</tt> - if the stream is muted
  357. * and <tt>false</tt> otherwise.
  358. */
  359. JitsiLocalTrack.prototype.isMuted = function () {
  360. // this.stream will be null when we mute local video on Chrome
  361. if (!this.stream)
  362. return true;
  363. if (this.isVideoTrack() && !this.isActive()) {
  364. return true;
  365. } else {
  366. return !this.track || !this.track.enabled;
  367. }
  368. };
  369. /**
  370. * Updates the SSRC associated with the MediaStream in JitsiLocalTrack object.
  371. * @ssrc the new ssrc
  372. */
  373. JitsiLocalTrack.prototype._setSSRC = function (ssrc) {
  374. this.ssrc = ssrc;
  375. };
  376. /**
  377. * Sets the JitsiConference object associated with the track. This is temp
  378. * solution.
  379. * @param conference the JitsiConference object
  380. */
  381. JitsiLocalTrack.prototype._setConference = function(conference) {
  382. this.conference = conference;
  383. // We want to keep up with postponed events which should have been fired
  384. // on "attach" call, but for local track we not always have the conference
  385. // before attaching. However this may result in duplicated events if they
  386. // have been triggered on "attach" already.
  387. for(var i = 0; i < this.containers.length; i++)
  388. {
  389. this._maybeFireTrackAttached(this.containers[i]);
  390. }
  391. };
  392. /**
  393. * Gets the SSRC of this local track if it's available already or <tt>null</tt>
  394. * otherwise. That's because we don't know the SSRC until local description is
  395. * created.
  396. * In case of video and simulcast returns the the primarySSRC.
  397. * @returns {string} or {null}
  398. */
  399. JitsiLocalTrack.prototype.getSSRC = function () {
  400. if(this.ssrc && this.ssrc.groups && this.ssrc.groups.length)
  401. return this.ssrc.groups[0].primarySSRC;
  402. else if(this.ssrc && this.ssrc.ssrcs && this.ssrc.ssrcs.length)
  403. return this.ssrc.ssrcs[0];
  404. else
  405. return null;
  406. };
  407. /**
  408. * Returns <tt>true</tt>.
  409. * @returns {boolean} <tt>true</tt>
  410. */
  411. JitsiLocalTrack.prototype.isLocal = function () {
  412. return true;
  413. };
  414. /**
  415. * Returns device id associated with track.
  416. * @returns {string}
  417. */
  418. JitsiLocalTrack.prototype.getDeviceId = function () {
  419. return this._realDeviceId || this.deviceId;
  420. };
  421. /**
  422. * Sets the value of bytes sent statistic.
  423. * @param bytesSent {intiger} the new value
  424. * NOTE: used only for audio tracks to detect audio issues.
  425. */
  426. JitsiLocalTrack.prototype._setByteSent = function (bytesSent) {
  427. this._bytesSent = bytesSent;
  428. if(this._testByteSent) {
  429. setTimeout(function () {
  430. if(this._bytesSent <= 0){
  431. //we are not receiving anything from the microphone
  432. this.eventEmitter.emit(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
  433. }
  434. }.bind(this), 3000);
  435. this._testByteSent = false;
  436. }
  437. }
  438. /**
  439. * Returns facing mode for video track from camera. For other cases (e.g. audio
  440. * track or 'desktop' video track) returns undefined.
  441. *
  442. * @returns {CameraFacingMode|undefined}
  443. */
  444. JitsiLocalTrack.prototype.getCameraFacingMode = function () {
  445. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  446. // MediaStreamTrack#getSettings() is not implemented in many browsers,
  447. // so we need feature checking here. Progress on the respective
  448. // browser's implementation can be tracked at
  449. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for Chromium
  450. // and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517 for Firefox.
  451. // Even if a browser implements getSettings() already, it might still
  452. // not return anything for 'facingMode'.
  453. var trackSettings;
  454. try {
  455. trackSettings = this.track.getSettings();
  456. } catch (e) {
  457. // XXX React-native-webrtc, for example, defines
  458. // MediaStreamTrack#getSettings() but the implementation throws a
  459. // "Not implemented" Error.
  460. }
  461. if (trackSettings && 'facingMode' in trackSettings) {
  462. return trackSettings.facingMode;
  463. }
  464. if (typeof this._facingMode !== 'undefined') {
  465. return this._facingMode;
  466. }
  467. // In most cases we are showing a webcam. So if we've gotten here, it
  468. // should be relatively safe to assume that we are probably showing
  469. // the user-facing camera.
  470. return CameraFacingMode.USER;
  471. }
  472. return undefined;
  473. };
  474. /**
  475. * Stops the associated MediaStream.
  476. */
  477. JitsiLocalTrack.prototype._stopMediaStream = function () {
  478. this.stopStreamInProgress = true;
  479. RTCUtils.stopMediaStream(this.stream);
  480. this.stopStreamInProgress = false;
  481. }
  482. /**
  483. * Detects camera issues on ended and mute events from MediaStreamTrack.
  484. * @returns {boolean} true if an issue is detected and false otherwise
  485. */
  486. JitsiLocalTrack.prototype._checkForCameraIssues = function () {
  487. if(!this.isVideoTrack() || this.stopStreamInProgress ||
  488. this.videoType === VideoType.DESKTOP)
  489. return false;
  490. return !this._isReceivingData();
  491. }
  492. /**
  493. * Checks whether the attached MediaStream is reveiving data from source or
  494. * not. If the stream property is null(because of mute or another reason) this
  495. * method will return false.
  496. * NOTE: This method doesn't indicate problem with the streams directly.
  497. * For example in case of video mute the method will return false or if the
  498. * user has disposed the track.
  499. * @returns {boolean} true if the stream is receiving data and false otherwise.
  500. */
  501. JitsiLocalTrack.prototype._isReceivingData = function () {
  502. if(!this.stream)
  503. return false;
  504. // In older version of the spec there is no muted property and
  505. // readyState can have value muted. In the latest versions
  506. // readyState can have values "live" and "ended" and there is
  507. // muted boolean property. If the stream is muted that means that
  508. // we aren't receiving any data from the source. We want to notify
  509. // the users for error if the stream is muted or ended on it's
  510. // creation.
  511. return this.stream.getTracks().some(track =>
  512. ((!("readyState" in track) || track.readyState === "live")
  513. && (!("muted" in track) || track.muted === false)));
  514. }
  515. module.exports = JitsiLocalTrack;