You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

JitsiLocalTrack.ts 37KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048
  1. import { getLogger } from '@jitsi/logger';
  2. import JitsiConference from '../../JitsiConference';
  3. import JitsiTrackError from '../../JitsiTrackError';
  4. import {
  5. TRACK_IS_DISPOSED,
  6. TRACK_NO_STREAM_FOUND
  7. } from '../../JitsiTrackErrors';
  8. import {
  9. LOCAL_TRACK_STOPPED,
  10. NO_DATA_FROM_SOURCE,
  11. TRACK_MUTE_CHANGED
  12. } from '../../JitsiTrackEvents';
  13. import { CameraFacingMode } from '../../service/RTC/CameraFacingMode';
  14. import { MediaType } from '../../service/RTC/MediaType';
  15. import RTCEvents from '../../service/RTC/RTCEvents';
  16. import { VideoType } from '../../service/RTC/VideoType';
  17. import {
  18. NO_BYTES_SENT,
  19. TRACK_UNMUTED,
  20. createNoDataFromSourceEvent
  21. } from '../../service/statistics/AnalyticsEvents';
  22. import browser from '../browser';
  23. import Statistics from '../statistics/statistics';
  24. import { isValidNumber } from '../util/MathUtil';
  25. import JitsiTrack from './JitsiTrack';
  26. import RTCUtils from './RTCUtils';
  27. import TraceablePeerConnection from './TraceablePeerConnection';
  28. const logger = getLogger('modules/RTC/JitsiLocalTrack');
  29. export interface IStreamEffect {
  30. isEnabled: (track: JitsiLocalTrack) => boolean;
  31. isMuted?: () => boolean;
  32. setMuted?: (muted: boolean) => void;
  33. startEffect: (stream: MediaStream) => MediaStream;
  34. stopEffect: () => void;
  35. }
  36. export interface ITrackMetadata {
  37. displaySurface?: string;
  38. timestamp: number;
  39. }
  40. export interface ITrackConstraints {
  41. [key: string]: any;
  42. height?: any;
  43. width?: any;
  44. }
  45. export interface ITrackInfo {
  46. constraints: ITrackConstraints;
  47. deviceId: string;
  48. effects?: IStreamEffect[];
  49. facingMode?: CameraFacingMode;
  50. mediaType: MediaType;
  51. rtcId: number;
  52. sourceId?: string;
  53. sourceType?: string;
  54. stream: MediaStream;
  55. track: MediaStreamTrack;
  56. videoType?: VideoType;
  57. }
  58. export interface IStreamInfo {
  59. stream: MediaStream;
  60. track: MediaStreamTrack;
  61. videoType?: VideoType;
  62. }
  63. /**
  64. * Represents a single media track(either audio or video).
  65. * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
  66. */
  67. export default class JitsiLocalTrack extends JitsiTrack {
  68. private _setEffectInProgress: boolean;
  69. private _constraints: ITrackConstraints;
  70. private _prevSetMuted: Promise<void>;
  71. private _facingMode?: CameraFacingMode;
  72. private _trackEnded: boolean;
  73. private _hasSentData: boolean;
  74. private _testDataSent: boolean;
  75. private _realDeviceId?: string;
  76. private _sourceName?: string;
  77. private _ssrc?: number;
  78. private _trackMutedTS: number;
  79. private _onDeviceListWillChange: (devices: MediaDeviceInfo[]) => void;
  80. private _onAudioOutputDeviceChanged?: () => void;
  81. private _streamEffect?: IStreamEffect;
  82. private _originalStream?: MediaStream;
  83. private _stopStreamInProgress: boolean;
  84. private _effectEnabled?: boolean;
  85. public metadata: ITrackMetadata;
  86. public rtcId: number;
  87. public sourceId?: string;
  88. public sourceType?: string;
  89. public deviceId: string;
  90. public resolution?: number;
  91. public maxEnabledResolution?: number;
  92. public conference: JitsiConference | null;
  93. /**
  94. * Constructs a new JitsiLocalTrack instance.
  95. *
  96. * @constructor
  97. * @param {Object} trackInfo
  98. * @param {Object} trackInfo.constraints - The contraints used for creating the track.
  99. * @param {number} trackInfo.rtcId - The ID assigned by the RTC module.
  100. * @param {Object} trackInfo.stream - The WebRTC MediaStream, parent of the track.
  101. * @param {Object} trackInfo.track - The underlying WebRTC MediaStreamTrack for new JitsiLocalTrack.
  102. * @param {string} trackInfo.mediaType - The MediaType of the JitsiLocalTrack.
  103. * @param {string} trackInfo.videoType - The VideoType of the JitsiLocalTrack.
  104. * @param {Array<Object>} trackInfo.effects - The effects to be applied to the JitsiLocalTrack.
  105. * @param {string} trackInfo.deviceId - The ID of the local device for this track.
  106. * @param {string} trackInfo.facingMode - Thehe camera facing mode used in getUserMedia call (for mobile only).
  107. * @param {string} trackInfo.sourceId - The id of the desktop sharing source, which is the Chrome media source ID,
  108. * returned by Desktop Picker on Electron. NOTE: defined for desktop sharing tracks only.
  109. * @param {string} trackInfo.sourceType - The type of source the track originates from.
  110. */
  111. constructor({
  112. constraints,
  113. deviceId,
  114. facingMode,
  115. mediaType,
  116. rtcId,
  117. sourceId,
  118. sourceType,
  119. stream,
  120. track,
  121. videoType,
  122. effects = []
  123. }: ITrackInfo) {
  124. super(
  125. /* conference */ null,
  126. stream,
  127. track,
  128. /* streamInactiveHandler */ () => this.emit(LOCAL_TRACK_STOPPED, this),
  129. mediaType,
  130. videoType);
  131. this._setEffectInProgress = false;
  132. const effect = effects.find(e => e.isEnabled(this));
  133. if (effect) {
  134. this._startStreamEffect(effect);
  135. }
  136. const displaySurface = videoType === VideoType.DESKTOP
  137. ? track.getSettings().displaySurface
  138. : null;
  139. /**
  140. * Track metadata.
  141. */
  142. this.metadata = {
  143. timestamp: Date.now(),
  144. ...displaySurface ? { displaySurface } : {}
  145. };
  146. /**
  147. * The ID assigned by the RTC module on instance creation.
  148. *
  149. * @type {number}
  150. */
  151. this.rtcId = rtcId;
  152. this.sourceId = sourceId;
  153. this.sourceType = sourceType ?? displaySurface;
  154. // Cache the constraints of the track in case of any this track
  155. // model needs to call getUserMedia again, such as when unmuting.
  156. this._constraints = track.getConstraints();
  157. if (mediaType === MediaType.VIDEO) {
  158. if (videoType === VideoType.CAMERA) {
  159. // Safari returns an empty constraints object, construct the constraints using getSettings.
  160. // Firefox in "fingerprint resistance mode" does a similar thing, except a `mediaSource` key is set.
  161. if (!this._constraints.height || !this._constraints.width) {
  162. this._constraints = {
  163. height: { ideal: this.getHeight() },
  164. width: { ideal: this.getWidth() }
  165. };
  166. }
  167. // If the constraints are still empty, fallback to the constraints used for initial gUM.
  168. if (!isValidNumber(this._constraints.height.ideal) && !isValidNumber(this._constraints.width.ideal)) {
  169. this._constraints.height = { ideal: constraints.height.ideal };
  170. this._constraints.width = { ideal: constraints.width.ideal };
  171. }
  172. }
  173. // Get the resolution from the track itself since we do not know what camera capability the browser has
  174. // picked for the given constraints, fallback to the constraints if MediaStreamTrack.getSettings() doesn't
  175. // return the height.
  176. this.resolution = this.getHeight();
  177. if (!isValidNumber(this.resolution) && this._constraints.height?.ideal) {
  178. this.resolution = this._constraints.height.ideal;
  179. }
  180. this.maxEnabledResolution = this.resolution;
  181. }
  182. this.deviceId = deviceId;
  183. /**
  184. * The <tt>Promise</tt> which represents the progress of a previously
  185. * queued/scheduled {@link _setMuted} (from the point of view of
  186. * {@link _queueSetMuted}).
  187. *
  188. * @private
  189. * @type {Promise}
  190. */
  191. this._prevSetMuted = Promise.resolve();
  192. /**
  193. * The facing mode of the camera from which this JitsiLocalTrack
  194. * instance was obtained.
  195. *
  196. * @private
  197. * @type {CameraFacingMode|undefined}
  198. */
  199. this._facingMode = facingMode;
  200. // Currently there is no way to know the MediaStreamTrack ended due to
  201. // to device disconnect in Firefox through e.g. "readyState" property.
  202. // Instead we will compare current track's label with device labels from
  203. // enumerateDevices() list.
  204. this._trackEnded = false;
  205. /**
  206. * Indicates whether data has been sent or not.
  207. */
  208. this._hasSentData = false;
  209. /**
  210. * Used only for detection of audio problems. We want to check only once
  211. * whether the track is sending data ot not. This flag is set to false
  212. * after the check.
  213. */
  214. this._testDataSent = true;
  215. // Currently there is no way to determine with what device track was
  216. // created (until getConstraints() support), however we can associate
  217. // tracks with real devices obtained from enumerateDevices() call as
  218. // soon as it's called.
  219. // NOTE: this.deviceId corresponds to the device id specified in GUM constraints and this._realDeviceId seems to
  220. // correspond to the id of a matching device from the available device list.
  221. this._realDeviceId = this.deviceId === '' ? undefined : this.deviceId;
  222. // The source name that will be signaled for this track.
  223. this._sourceName = null;
  224. // The primary SSRC associated with the local media track. This will be set after the local desc
  225. // is processed once the track is added to the peerconnection.
  226. this._ssrc = null;
  227. this._trackMutedTS = 0;
  228. this._onDeviceListWillChange = (devices: MediaDeviceInfo[]) => {
  229. const oldRealDeviceId = this._realDeviceId;
  230. this._setRealDeviceIdFromDeviceList(devices);
  231. if (
  232. // Mark track as ended for those browsers that do not support
  233. // "readyState" property. We do not touch tracks created with
  234. // default device ID "".
  235. (typeof this.getTrack().readyState === 'undefined'
  236. && typeof this._realDeviceId !== 'undefined'
  237. && !devices.find(d => d.deviceId === this._realDeviceId))
  238. // If there was an associated realDeviceID and after the device change the realDeviceId is undefined
  239. // then the associated device has been disconnected and the _trackEnded flag needs to be set. In
  240. // addition on some Chrome versions the readyState property is set after the device change event is
  241. // triggered which causes issues in jitsi-meet with the selection of a new device because we don't
  242. // detect that the old one was removed.
  243. || (typeof oldRealDeviceId !== 'undefined' && typeof this._realDeviceId === 'undefined')
  244. ) {
  245. this._trackEnded = true;
  246. }
  247. };
  248. // Subscribe each created local audio track to
  249. // RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED event. This is different from
  250. // handling this event for remote tracks (which are handled in RTC.js),
  251. // because there might be local tracks not attached to a conference.
  252. if (this.isAudioTrack() && RTCUtils.isDeviceChangeAvailable('output')) {
  253. this._onAudioOutputDeviceChanged = this.setAudioOutput.bind(this);
  254. RTCUtils.addListener(
  255. RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  256. this._onAudioOutputDeviceChanged);
  257. }
  258. RTCUtils.addListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  259. this._initNoDataFromSourceHandlers();
  260. }
  261. /**
  262. * Adds stream to conference and marks it as "unmute" operation.
  263. *
  264. * @private
  265. * @returns {Promise}
  266. */
  267. private _addStreamToConferenceAsUnmute(): Promise<void> {
  268. if (!this.conference) {
  269. return Promise.resolve();
  270. }
  271. // FIXME it would be good to not included conference as part of this process. Only TraceablePeerConnections to
  272. // which the track is attached should care about this action. The TPCs to which the track is not attached can
  273. // sync up when track is re-attached. A problem with that is that the "modify sources" queue is part of the
  274. // JingleSessionPC and it would be excluded from the process. One solution would be to extract class between
  275. // TPC and JingleSessionPC which would contain the queue and would notify the signaling layer when local SSRCs
  276. // are changed. This would help to separate XMPP from the RTC module.
  277. return new Promise((resolve, reject) => {
  278. this.conference._addLocalTrackToPc(this)
  279. .then(resolve, (error: any) => reject(new Error(error)));
  280. });
  281. }
  282. /**
  283. * Fires NO_DATA_FROM_SOURCE event and logs it to analytics
  284. *
  285. * @private
  286. * @returns {void}
  287. */
  288. private _fireNoDataFromSourceEvent(): void {
  289. const value = !this.isReceivingData();
  290. this.emit(NO_DATA_FROM_SOURCE, value);
  291. logger.debug(`NO_DATA_FROM_SOURCE event with value ${value} detected for track: ${this}`);
  292. // FIXME: Should we report all of those events
  293. Statistics.sendAnalytics(createNoDataFromSourceEvent(this.getType(), value));
  294. }
  295. /**
  296. * Sets handlers to the MediaStreamTrack object that will detect camera issues.
  297. *
  298. * @private
  299. * @returns {void}
  300. */
  301. private _initNoDataFromSourceHandlers(): void {
  302. if (!this._isNoDataFromSourceEventsEnabled()) {
  303. return;
  304. }
  305. this._setHandler('track_mute', () => {
  306. this._trackMutedTS = window.performance.now();
  307. this._fireNoDataFromSourceEvent();
  308. });
  309. this._setHandler('track_unmute', () => {
  310. this._fireNoDataFromSourceEvent();
  311. Statistics.sendAnalyticsAndLog(
  312. TRACK_UNMUTED,
  313. {
  314. 'media_type': this.getType(),
  315. 'track_type': 'local',
  316. value: window.performance.now() - this._trackMutedTS
  317. });
  318. });
  319. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  320. this._setHandler('track_ended', () => {
  321. if (!this.isReceivingData()) {
  322. this._fireNoDataFromSourceEvent();
  323. }
  324. });
  325. }
  326. }
  327. /**
  328. * Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  329. *
  330. * @private
  331. * @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
  332. */
  333. private _isNoDataFromSourceEventsEnabled(): boolean {
  334. // Disable the events for screen sharing.
  335. return !this.isVideoTrack() || this.videoType !== VideoType.DESKTOP;
  336. }
  337. /**
  338. * Initializes a new Promise to execute {@link #_setMuted}. May be called multiple times in a row and the
  339. * invocations of {@link #_setMuted} and, consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
  340. * serialized fashion.
  341. *
  342. * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
  343. * @private
  344. * @returns {Promise}
  345. */
  346. private _queueSetMuted(muted: boolean): Promise<void> {
  347. const setMuted = this._setMuted.bind(this, muted);
  348. this._prevSetMuted = this._prevSetMuted.then(setMuted, setMuted);
  349. return this._prevSetMuted;
  350. }
  351. /**
  352. * Removes stream from conference and marks it as "mute" operation.
  353. *
  354. * @param {Function} successCallback - Callback that will be called when the operation is successful.
  355. * @param {Function} errorCallback - Callback that will be called when the operation fails.
  356. * @private
  357. * @returns {Promise}
  358. */
  359. private _removeStreamFromConferenceAsMute(successCallback: () => void, errorCallback: (error: Error) => void): void {
  360. if (!this.conference) {
  361. successCallback();
  362. return;
  363. }
  364. this.conference._removeLocalTrackFromPc(this).then(
  365. successCallback,
  366. (error: any) => errorCallback(new Error(error)));
  367. }
  368. /**
  369. * Sends mute status for a track to conference if any.
  370. *
  371. * @param {boolean} mute - If track is muted.
  372. * @private
  373. * @returns {void}
  374. */
  375. private _sendMuteStatus(mute: boolean): void {
  376. if (this.conference) {
  377. this.conference._setTrackMuteStatus(this.getType(), this, mute) && this.conference.room.sendPresence();
  378. }
  379. }
  380. /**
  381. * Mutes / unmutes this track.
  382. *
  383. * @param {boolean} muted - If <tt>true</tt>, this track will be muted; otherwise, this track will be unmuted.
  384. * @private
  385. * @returns {Promise}
  386. */
  387. private _setMuted(muted: boolean): Promise<void> {
  388. if (this.isMuted() === muted && this.videoType !== VideoType.DESKTOP) {
  389. return Promise.resolve();
  390. }
  391. if (this.disposed) {
  392. return Promise.reject(new JitsiTrackError(TRACK_IS_DISPOSED));
  393. }
  394. let promise: Promise<void | IStreamInfo[]> = Promise.resolve();
  395. // A function that will print info about muted status transition
  396. const logMuteInfo = () => logger.info(`Mute ${this}: ${muted}`);
  397. // In React Native we mute the camera by setting track.enabled but that doesn't
  398. // work for screen-share tracks, so do the remove-as-mute for those.
  399. const doesVideoMuteByStreamRemove
  400. = browser.isReactNative() ? this.videoType === VideoType.DESKTOP : browser.doesVideoMuteByStreamRemove();
  401. // In the multi-stream mode, desktop tracks are muted from jitsi-meet instead of being removed from the
  402. // conference. This is needed because we don't want the client to signal a source-remove to the remote peer for
  403. // the desktop track when screenshare is stopped. Later when screenshare is started again, the same sender will
  404. // be re-used without the need for signaling a new ssrc through source-add.
  405. if (this.isAudioTrack() || !doesVideoMuteByStreamRemove) {
  406. logMuteInfo();
  407. // If we have a stream effect that implements its own mute functionality, prioritize it before
  408. // normal mute e.g. the stream effect that implements system audio sharing has a custom
  409. // mute state in which if the user mutes, system audio still has to go through.
  410. if (this._streamEffect?.setMuted) {
  411. this._streamEffect.setMuted(muted);
  412. } else if (this.track) {
  413. this.track.enabled = !muted;
  414. }
  415. } else if (muted) {
  416. promise = new Promise((resolve, reject) => {
  417. logMuteInfo();
  418. this._removeStreamFromConferenceAsMute(
  419. () => {
  420. if (this._streamEffect) {
  421. this._stopStreamEffect();
  422. }
  423. // FIXME: Maybe here we should set the SRC for the
  424. // containers to something
  425. // We don't want any events to be fired on this stream
  426. this._unregisterHandlers();
  427. this.stopStream();
  428. this._setStream(null);
  429. resolve();
  430. },
  431. reject);
  432. });
  433. } else {
  434. logMuteInfo();
  435. // This path is only for camera.
  436. const streamOptions = {
  437. cameraDeviceId: this.getDeviceId(),
  438. devices: [ MediaType.VIDEO ],
  439. effects: this._streamEffect ? [ this._streamEffect ] : [],
  440. facingMode: this.getCameraFacingMode()
  441. };
  442. promise
  443. = RTCUtils.obtainAudioAndVideoPermissions({
  444. ...streamOptions,
  445. constraints: { video: this._constraints } } as any) as Promise<IStreamInfo[]>;
  446. promise = promise.then((streamsInfo: IStreamInfo[]) => {
  447. const streamInfo = streamsInfo.find(info => info.track.kind === this.getType());
  448. if (streamInfo) {
  449. this._setStream(streamInfo.stream);
  450. this.track = streamInfo.track;
  451. // This is not good when video type changes after
  452. // unmute, but let's not crash here
  453. if (this.videoType !== streamInfo.videoType) {
  454. logger.warn(
  455. `${this}: video type has changed after unmute!`,
  456. this.videoType, streamInfo.videoType);
  457. this.videoType = streamInfo.videoType;
  458. }
  459. } else {
  460. throw new JitsiTrackError(TRACK_NO_STREAM_FOUND);
  461. }
  462. if (this._streamEffect) {
  463. this._startStreamEffect(this._streamEffect);
  464. }
  465. this.containers.map(cont => RTCUtils.attachMediaStream(cont, this.stream).catch(() => {
  466. logger.error(`Attach media failed for ${this} on video unmute!`);
  467. }));
  468. return this._addStreamToConferenceAsUnmute();
  469. });
  470. }
  471. return promise
  472. .then(() => {
  473. this._sendMuteStatus(muted);
  474. // Send the videoType message to the bridge.
  475. this.isVideoTrack() && this.conference?._sendBridgeVideoTypeMessage(this);
  476. this.emit(TRACK_MUTE_CHANGED, this);
  477. });
  478. }
  479. /**
  480. * Sets real device ID by comparing track information with device information. This is temporary solution until
  481. * getConstraints() method will be implemented in browsers.
  482. *
  483. * @param {MediaDeviceInfo[]} devices - The list of devices obtained from enumerateDevices() call.
  484. * @private
  485. * @returns {void}
  486. */
  487. private _setRealDeviceIdFromDeviceList(devices: MediaDeviceInfo[]): void {
  488. const track = this.getTrack();
  489. const kind = `${track.kind}input`;
  490. // We need to match by deviceId as well, in case of multiple devices with the same label.
  491. let device = devices.find(d => d.kind === kind && d.label === track.label && d.deviceId === this.deviceId);
  492. if (!device && this._realDeviceId === 'default') { // the default device has been changed.
  493. // If the default device was 'A' and the default device is changed to 'B' the label for the track will
  494. // remain 'Default - A' but the label for the device in the device list will be updated to 'A'. That's
  495. // why in order to match it we need to remove the 'Default - ' part.
  496. const label = (track.label || '').replace('Default - ', '');
  497. device = devices.find(d => d.kind === kind && d.label === label);
  498. }
  499. if (device) {
  500. this._realDeviceId = device.deviceId;
  501. } else {
  502. this._realDeviceId = undefined;
  503. }
  504. }
  505. /**
  506. * Starts the effect process and returns the modified stream.
  507. *
  508. * @param {Object} effect - Represents effect instance
  509. * @private
  510. * @returns {void}
  511. */
  512. private _startStreamEffect(effect: IStreamEffect): void {
  513. this._streamEffect = effect;
  514. this._originalStream = this.stream;
  515. this._setStream(this._streamEffect.startEffect(this._originalStream));
  516. this.track = this.stream.getTracks()[0];
  517. }
  518. /**
  519. * Stops the effect process and returns the original stream.
  520. *
  521. * @private
  522. * @returns {void}
  523. */
  524. private _stopStreamEffect(): void {
  525. if (this._streamEffect) {
  526. this._streamEffect.stopEffect();
  527. this._setStream(this._originalStream);
  528. this._originalStream = null;
  529. this.track = this.stream ? this.stream.getTracks()[0] : null;
  530. }
  531. }
  532. /**
  533. * Switches the camera facing mode if the WebRTC implementation supports the custom MediaStreamTrack._switchCamera
  534. * method. Currently, the method in question is implemented in react-native-webrtc only. When such a WebRTC
  535. * implementation is executing, the method is the preferred way to switch between the front/user-facing and the
  536. * back/environment-facing cameras because it will likely be (as is the case of react-native-webrtc) noticeably
  537. * faster that creating a new MediaStreamTrack via a new getUserMedia call with the switched facingMode constraint
  538. * value. Moreover, the approach with a new getUserMedia call may not even work: WebRTC on Android and iOS is
  539. * either very slow to open the camera a second time or plainly freezes attempting to do that.
  540. *
  541. * @returns {void}
  542. */
  543. private _switchCamera(): void {
  544. if (this.isVideoTrack()
  545. && this.videoType === VideoType.CAMERA
  546. && typeof this.track._switchCamera === 'function') {
  547. this.track._switchCamera();
  548. this._facingMode
  549. = this._facingMode === CameraFacingMode.ENVIRONMENT
  550. ? CameraFacingMode.USER
  551. : CameraFacingMode.ENVIRONMENT;
  552. }
  553. }
  554. /**
  555. * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
  556. *
  557. * @param {Object|undefined} effect - The new effect to be set.
  558. * @private
  559. * @returns {void}
  560. */
  561. private _switchStreamEffect(effect?: IStreamEffect): void {
  562. if (this._streamEffect) {
  563. this._stopStreamEffect();
  564. this._streamEffect = undefined;
  565. }
  566. if (effect) {
  567. this._startStreamEffect(effect);
  568. }
  569. }
  570. /**
  571. * Sets the stream property of JitsiLocalTrack object and sets all stored handlers to it.
  572. *
  573. * @param {MediaStream} stream - The new MediaStream.
  574. * @private
  575. * @returns {void}
  576. */
  577. protected _setStream(stream: MediaStream | null): void {
  578. super._setStream(stream);
  579. }
  580. /**
  581. * @inheritdoc
  582. *
  583. * Stops sending the media track. And removes it from the HTML. NOTE: Works for local tracks only.
  584. *
  585. * @extends JitsiTrack#dispose
  586. * @returns {Promise}
  587. */
  588. async dispose(): Promise<void> {
  589. if (this.disposed) {
  590. return;
  591. }
  592. // Remove the effect instead of stopping it so that the original stream is restored
  593. // on both the local track and on the peerconnection.
  594. if (this._streamEffect) {
  595. await this.setEffect(undefined);
  596. }
  597. if (this.conference) {
  598. await this.conference.removeTrack(this);
  599. }
  600. if (this.stream) {
  601. this.stopStream();
  602. }
  603. RTCUtils.removeListener(RTCEvents.DEVICE_LIST_WILL_CHANGE, this._onDeviceListWillChange);
  604. if (this._onAudioOutputDeviceChanged) {
  605. RTCUtils.removeListener(RTCEvents.AUDIO_OUTPUT_DEVICE_CHANGED,
  606. this._onAudioOutputDeviceChanged);
  607. }
  608. return super.dispose();
  609. }
  610. /**
  611. * Returns facing mode for video track from camera. For other cases (e.g. audio track or 'desktop' video track)
  612. * returns undefined.
  613. *
  614. * @returns {CameraFacingMode|undefined}
  615. */
  616. getCameraFacingMode(): CameraFacingMode | undefined {
  617. if (this.isVideoTrack() && this.videoType === VideoType.CAMERA) {
  618. // MediaStreamTrack#getSettings() is not implemented in many
  619. // browsers, so we need feature checking here. Progress on the
  620. // respective browser's implementation can be tracked at
  621. // https://bugs.chromium.org/p/webrtc/issues/detail?id=2481 for
  622. // Chromium and https://bugzilla.mozilla.org/show_bug.cgi?id=1213517
  623. // for Firefox. Even if a browser implements getSettings() already,
  624. // it might still not return anything for 'facingMode'.
  625. const trackSettings = this.track.getSettings?.();
  626. if (trackSettings && 'facingMode' in trackSettings) {
  627. return trackSettings.facingMode as CameraFacingMode;
  628. }
  629. if (typeof this._facingMode !== 'undefined') {
  630. return this._facingMode;
  631. }
  632. // In most cases we are showing a webcam. So if we've gotten here,
  633. // it should be relatively safe to assume that we are probably
  634. // showing the user-facing camera.
  635. return CameraFacingMode.USER;
  636. }
  637. return undefined;
  638. }
  639. /**
  640. * Returns the capture resolution of the video track.
  641. *
  642. * @returns {Number}
  643. */
  644. getCaptureResolution(): number {
  645. if (this.videoType === VideoType.CAMERA || !browser.isWebKitBased()) {
  646. return this.resolution!;
  647. }
  648. return this.getHeight();
  649. }
  650. /**
  651. * Returns device id associated with track.
  652. *
  653. * @returns {string}
  654. */
  655. getDeviceId(): string {
  656. return this._realDeviceId || this.deviceId;
  657. }
  658. /**
  659. * Get the duration of the track.
  660. *
  661. * @returns {Number} the duration of the track in seconds
  662. */
  663. getDuration(): number {
  664. return (Date.now() / 1000) - (this.metadata.timestamp / 1000);
  665. }
  666. /**
  667. * Returns the participant id which owns the track.
  668. *
  669. * @returns {string} the id of the participants. It corresponds to the
  670. * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
  671. */
  672. getParticipantId(): string {
  673. return this.conference?.myUserId();
  674. }
  675. /**
  676. * Returns the source name associated with the jitsi track.
  677. *
  678. * @returns {string | null} source name
  679. */
  680. getSourceName(): string | null {
  681. return this._sourceName;
  682. }
  683. /**
  684. * Returns the primary SSRC associated with the track.
  685. * @returns {number}
  686. */
  687. getSsrc(): number | null {
  688. return this._ssrc;
  689. }
  690. /**
  691. * Returns if associated MediaStreamTrack is in the 'ended' state
  692. *
  693. * @returns {boolean}
  694. */
  695. isEnded(): boolean {
  696. if (this.isVideoTrack() && this.isMuted()) {
  697. // If a video track is muted the readyState will be ended, that's why we need to rely only on the
  698. // _trackEnded flag.
  699. return this._trackEnded;
  700. }
  701. return this.getTrack().readyState === 'ended' || this._trackEnded;
  702. }
  703. /**
  704. * Returns <tt>true</tt>.
  705. *
  706. * @returns {boolean} <tt>true</tt>
  707. */
  708. isLocal(): boolean {
  709. return true;
  710. }
  711. /**
  712. * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
  713. *
  714. * @returns {boolean} <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
  715. */
  716. isMuted(): boolean {
  717. // this.stream will be null when we mute local video on Chrome
  718. if (!this.stream) {
  719. return true;
  720. }
  721. if (this.isVideoTrack() && !this.isActive()) {
  722. return true;
  723. }
  724. // If currently used stream effect has its own muted state, use that.
  725. if (this._streamEffect?.isMuted) {
  726. return this._streamEffect.isMuted();
  727. }
  728. return !this.track?.enabled;
  729. }
  730. /**
  731. * Checks whether the attached MediaStream is receiving data from source or not. If the stream property is null
  732. * (because of mute or another reason) this method will return false.
  733. * NOTE: This method doesn't indicate problem with the streams directly. For example in case of video mute the
  734. * method will return false or if the user has disposed the track.
  735. *
  736. * @returns {boolean} true if the stream is receiving data and false this otherwise.
  737. */
  738. isReceivingData(): boolean {
  739. if (this.isVideoTrack()
  740. && (this.isMuted() || this._stopStreamInProgress || this.videoType === VideoType.DESKTOP)) {
  741. return true;
  742. }
  743. if (!this.stream) {
  744. return false;
  745. }
  746. // In older version of the spec there is no muted property and readyState can have value muted. In the latest
  747. // versions readyState can have values "live" and "ended" and there is muted boolean property. If the stream is
  748. // muted that means that we aren't receiving any data from the source. We want to notify the users for error if
  749. // the stream is muted or ended on it's creation.
  750. // For video blur enabled use the original video stream
  751. const stream = this._effectEnabled ? this._originalStream : this.stream;
  752. return stream.getTracks().some(track =>
  753. (!('readyState' in track) || track.readyState === 'live')
  754. && (!('muted' in track) || track.muted !== true));
  755. }
  756. /**
  757. * Asynchronously mutes this track.
  758. *
  759. * @returns {Promise}
  760. */
  761. mute(): Promise<void> {
  762. return this._queueSetMuted(true);
  763. }
  764. /**
  765. * Handles bytes sent statistics. NOTE: used only for audio tracks to detect audio issues.
  766. *
  767. * @param {TraceablePeerConnection} tpc - The peerconnection that is reporting the bytes sent stat.
  768. * @param {number} bytesSent - The new value.
  769. * @returns {void}
  770. */
  771. onByteSentStatsReceived(tpc: TraceablePeerConnection, bytesSent: number): void {
  772. if (bytesSent > 0) {
  773. this._hasSentData = true;
  774. }
  775. const iceConnectionState = tpc.getConnectionState();
  776. if (this._testDataSent && iceConnectionState === 'connected') {
  777. setTimeout(() => {
  778. if (!this._hasSentData) {
  779. logger.warn(`${this} 'bytes sent' <= 0: \
  780. ${bytesSent}`);
  781. Statistics.analytics.sendEvent(NO_BYTES_SENT, { 'media_type': this.getType() });
  782. }
  783. }, 3000);
  784. this._testDataSent = false;
  785. }
  786. }
  787. /**
  788. * Sets the JitsiConference object associated with the track. This is temp solution.
  789. *
  790. * @param conference - JitsiConference object.
  791. * @returns {void}
  792. */
  793. setConference(conference: JitsiConference): void {
  794. this.conference = conference;
  795. }
  796. /**
  797. * Sets the effect and switches between the modified stream and original one.
  798. *
  799. * @param {Object} effect - Represents the effect instance to be used.
  800. * @returns {Promise}
  801. */
  802. setEffect(effect?: IStreamEffect): Promise<void> {
  803. if (typeof this._streamEffect === 'undefined' && typeof effect === 'undefined') {
  804. return Promise.resolve();
  805. }
  806. if (typeof effect !== 'undefined' && !effect.isEnabled(this)) {
  807. return Promise.reject(new Error('Incompatible effect instance!'));
  808. }
  809. if (this._setEffectInProgress === true) {
  810. return Promise.reject(new Error('setEffect already in progress!'));
  811. }
  812. // In case we have an audio track that is being enhanced with an effect, we still want it to be applied,
  813. // even if the track is muted. Where as for video the actual track doesn't exists if it's muted.
  814. if (this.isMuted() && !this.isAudioTrack()) {
  815. this._streamEffect = effect;
  816. return Promise.resolve();
  817. }
  818. const conference = this.conference;
  819. if (!conference) {
  820. this._switchStreamEffect(effect);
  821. if (this.isVideoTrack()) {
  822. this.containers.forEach(cont => {
  823. RTCUtils.attachMediaStream(cont, this.stream).catch(() => {
  824. logger.error(`Attach media failed for ${this} when trying to set effect.`);
  825. });
  826. });
  827. }
  828. return Promise.resolve();
  829. }
  830. this._setEffectInProgress = true;
  831. return conference._removeLocalTrackFromPc(this)
  832. .then(() => {
  833. this._switchStreamEffect(effect);
  834. if (this.isVideoTrack()) {
  835. this.containers.forEach(cont => {
  836. RTCUtils.attachMediaStream(cont, this.stream).catch(() => {
  837. logger.error(`Attach media failed for ${this} when trying to set effect.`);
  838. });
  839. });
  840. }
  841. return conference._addLocalTrackToPc(this);
  842. })
  843. .then(() => {
  844. this._setEffectInProgress = false;
  845. })
  846. .catch((error: any) => {
  847. // Any error will be not recovarable and will trigger CONFERENCE_FAILED event. But let's try to cleanup
  848. // everyhting related to the effect functionality.
  849. this._setEffectInProgress = false;
  850. this._switchStreamEffect();
  851. logger.error('Failed to switch to the new stream!', error);
  852. throw error;
  853. });
  854. }
  855. /**
  856. * Sets the source name to be used for signaling the jitsi track.
  857. *
  858. * @param {string} name The source name.
  859. */
  860. setSourceName(name: string): void {
  861. this._sourceName = name;
  862. }
  863. /**
  864. * Sets the primary SSRC for the track.
  865. *
  866. * @param {number} ssrc The SSRC.
  867. */
  868. setSsrc(ssrc: number): void {
  869. if (isValidNumber(ssrc)) {
  870. this._ssrc = ssrc;
  871. }
  872. }
  873. /**
  874. * Stops the associated MediaStream.
  875. *
  876. * @returns {void}
  877. */
  878. stopStream(): void {
  879. /**
  880. * Indicates that we are executing {@link #stopStream} i.e.
  881. * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
  882. * associated with this <tt>JitsiTrack</tt> instance.
  883. *
  884. * @private
  885. * @type {boolean}
  886. */
  887. this._stopStreamInProgress = true;
  888. try {
  889. RTCUtils.stopMediaStream(this.stream);
  890. } finally {
  891. this._stopStreamInProgress = false;
  892. }
  893. }
  894. /**
  895. * Creates a text representation of this local track instance.
  896. *
  897. * @return {string}
  898. */
  899. toString(): string {
  900. return `LocalTrack[${this.rtcId},${this.getType()}]`;
  901. }
  902. /**
  903. * Asynchronously unmutes this track.
  904. *
  905. * @returns {Promise}
  906. */
  907. unmute(): Promise<void> {
  908. return this._queueSetMuted(false);
  909. }
  910. }