Browse Source

feat(ts) add auto-generated type declarations

This should allow us to monitor the progress when comparing them to hand-crafted
ones.
tags/v0.0.2
Saúl Ibarra Corretgé 3 years ago
parent
commit
e66c280524
100 changed files with 10837 additions and 1 deletions
  1. 1
    0
      .eslintignore
  2. 2
    1
      tsconfig.json
  3. 1179
    0
      types/auto/JitsiConference.d.ts
  4. 88
    0
      types/auto/JitsiConferenceErrors.d.ts
  5. 40
    0
      types/auto/JitsiConferenceEventManager.d.ts
  6. 383
    0
      types/auto/JitsiConferenceEvents.d.ts
  7. 106
    0
      types/auto/JitsiConnection.d.ts
  8. 28
    0
      types/auto/JitsiConnectionErrors.d.ts
  9. 42
    0
      types/auto/JitsiConnectionEvents.d.ts
  10. 100
    0
      types/auto/JitsiMediaDevices.d.ts
  11. 29
    0
      types/auto/JitsiMediaDevicesEvents.d.ts
  12. 2
    0
      types/auto/JitsiMeetJS.d.ts
  13. 201
    0
      types/auto/JitsiParticipant.d.ts
  14. 0
    0
      types/auto/JitsiParticipantEvents.d.ts
  15. 53
    0
      types/auto/JitsiTrackError.d.ts
  16. 58
    0
      types/auto/JitsiTrackErrors.d.ts
  17. 39
    0
      types/auto/JitsiTrackEvents.d.ts
  18. 12
    0
      types/auto/JitsiTranscriptionStatus.d.ts
  19. 81
    0
      types/auto/authenticateAndUpgradeRole.d.ts
  20. 23
    0
      types/auto/connection_optimization/external_connect.d.ts
  21. 2
    0
      types/auto/index.d.ts
  22. 133
    0
      types/auto/modules/RTC/BridgeChannel.d.ts
  23. 106
    0
      types/auto/modules/RTC/CodecSelection.d.ts
  24. 295
    0
      types/auto/modules/RTC/JitsiLocalTrack.d.ts
  25. 109
    0
      types/auto/modules/RTC/JitsiRemoteTrack.d.ts
  26. 240
    0
      types/auto/modules/RTC/JitsiTrack.d.ts
  27. 68
    0
      types/auto/modules/RTC/MockClasses.d.ts
  28. 435
    0
      types/auto/modules/RTC/RTC.d.ts
  29. 181
    0
      types/auto/modules/RTC/RTCUtils.d.ts
  30. 118
    0
      types/auto/modules/RTC/ScreenObtainer.d.ts
  31. 161
    0
      types/auto/modules/RTC/TPCUtils.d.ts
  32. 785
    0
      types/auto/modules/RTC/TraceablePeerConnection.d.ts
  33. 197
    0
      types/auto/modules/browser/BrowserCapabilities.d.ts
  34. 3
    0
      types/auto/modules/browser/index.d.ts
  35. 97
    0
      types/auto/modules/connectivity/ConnectionQuality.d.ts
  36. 36
    0
      types/auto/modules/connectivity/IceFailedHandling.d.ts
  37. 33
    0
      types/auto/modules/connectivity/NetworkInfo.d.ts
  38. 350
    0
      types/auto/modules/connectivity/ParticipantConnectionStatus.d.ts
  39. 6
    0
      types/auto/modules/detection/ActiveDeviceDetector.d.ts
  40. 54
    0
      types/auto/modules/detection/DetectionEvents.d.ts
  41. 57
    0
      types/auto/modules/detection/NoAudioSignalDetection.d.ts
  42. 25
    0
      types/auto/modules/detection/P2PDominantSpeakerDetection.d.ts
  43. 129
    0
      types/auto/modules/detection/TrackVADEmitter.d.ts
  44. 105
    0
      types/auto/modules/detection/VADAudioAnalyser.d.ts
  45. 85
    0
      types/auto/modules/detection/VADNoiseDetection.d.ts
  46. 96
    0
      types/auto/modules/detection/VADReportingService.d.ts
  47. 70
    0
      types/auto/modules/detection/VADTalkMutedDetection.d.ts
  48. 91
    0
      types/auto/modules/e2ee/Context.d.ts
  49. 58
    0
      types/auto/modules/e2ee/E2EEContext.d.ts
  50. 42
    0
      types/auto/modules/e2ee/E2EEncryption.d.ts
  51. 19
    0
      types/auto/modules/e2ee/ExternallyManagedKeyHandler.d.ts
  52. 69
    0
      types/auto/modules/e2ee/KeyHandler.d.ts
  53. 73
    0
      types/auto/modules/e2ee/ManagedKeyHandler.d.ts
  54. 166
    0
      types/auto/modules/e2ee/OlmAdapter.d.ts
  55. 1
    0
      types/auto/modules/e2ee/Worker.d.ts
  56. 25
    0
      types/auto/modules/e2ee/crypto-utils.d.ts
  57. 4
    0
      types/auto/modules/e2ee/utils.d.ts
  58. 75
    0
      types/auto/modules/e2eping/e2eping.d.ts
  59. 26
    0
      types/auto/modules/event/Jvb121EventGenerator.d.ts
  60. 20
    0
      types/auto/modules/flags/FeatureFlags.d.ts
  61. 167
    0
      types/auto/modules/proxyconnection/ProxyConnectionPC.d.ts
  62. 141
    0
      types/auto/modules/proxyconnection/ProxyConnectionService.d.ts
  63. 8
    0
      types/auto/modules/proxyconnection/constants.d.ts
  64. 123
    0
      types/auto/modules/qualitycontrol/ReceiveVideoController.d.ts
  65. 53
    0
      types/auto/modules/qualitycontrol/SendVideoController.d.ts
  66. 165
    0
      types/auto/modules/recording/JibriSession.d.ts
  67. 112
    0
      types/auto/modules/recording/RecordingManager.d.ts
  68. 19
    0
      types/auto/modules/recording/recordingConstants.d.ts
  69. 77
    0
      types/auto/modules/recording/recordingXMLUtils.d.ts
  70. 91
    0
      types/auto/modules/sdp/LocalSdpMunger.d.ts
  71. 49
    0
      types/auto/modules/sdp/RtxModifier.d.ts
  72. 48
    0
      types/auto/modules/sdp/SDP.d.ts
  73. 25
    0
      types/auto/modules/sdp/SDPDiffer.d.ts
  74. 335
    0
      types/auto/modules/sdp/SDPUtil.d.ts
  75. 2
    0
      types/auto/modules/sdp/SampleSdpStrings.d.ts
  76. 50
    0
      types/auto/modules/sdp/SdpConsistency.d.ts
  77. 218
    0
      types/auto/modules/sdp/SdpTransformUtil.d.ts
  78. 18
    0
      types/auto/modules/settings/Settings.d.ts
  79. 155
    0
      types/auto/modules/statistics/AnalyticsAdapter.d.ts
  80. 46
    0
      types/auto/modules/statistics/AudioOutputProblemDetector.d.ts
  81. 386
    0
      types/auto/modules/statistics/AvgRTPStatsReporter.d.ts
  82. 249
    0
      types/auto/modules/statistics/CallStats.d.ts
  83. 43
    0
      types/auto/modules/statistics/LocalStatsCollector.d.ts
  84. 42
    0
      types/auto/modules/statistics/PerformanceObserverStats.d.ts
  85. 75
    0
      types/auto/modules/statistics/PrecallTest.d.ts
  86. 151
    0
      types/auto/modules/statistics/RTPStatsCollector.d.ts
  87. 114
    0
      types/auto/modules/statistics/SpeakerStats.d.ts
  88. 79
    0
      types/auto/modules/statistics/SpeakerStatsCollector.d.ts
  89. 7
    0
      types/auto/modules/statistics/constants.d.ts
  90. 353
    0
      types/auto/modules/statistics/statistics.d.ts
  91. 82
    0
      types/auto/modules/transcription/audioRecorder.d.ts
  92. 18
    0
      types/auto/modules/transcription/recordingResult.d.ts
  93. 16
    0
      types/auto/modules/transcription/trackRecorder.d.ts
  94. 79
    0
      types/auto/modules/transcription/transcriber.d.ts
  95. 3
    0
      types/auto/modules/transcription/transcriberHolder.d.ts
  96. 49
    0
      types/auto/modules/transcription/transcriptionServices/AbstractTranscriptionService.d.ts
  97. 7
    0
      types/auto/modules/transcription/transcriptionServices/SphinxTranscriptionService.d.ts
  98. 32
    0
      types/auto/modules/transcription/word.d.ts
  99. 38
    0
      types/auto/modules/util/AsyncQueue.d.ts
  100. 0
    0
      types/auto/modules/util/AuthUtil.d.ts

+ 1
- 0
.eslintignore View File

@@ -1,6 +1,7 @@
1 1
 # The build artifacts of the lib-jiti-meet project.
2 2
 lib-jitsi-meet.*
3 3
 dist/
4
+types/
4 5
 
5 6
 # Third-party source code which we (1) do not want to modify or (2) try to
6 7
 # modify as little as possible.

+ 2
- 1
tsconfig.json View File

@@ -2,7 +2,8 @@
2 2
   "compilerOptions": {
3 3
     "target": "es6",
4 4
     "module": "es6",
5
-    "declaration": false,
5
+    "declaration": true,
6
+    "declarationDir": "types/auto/",
6 7
     "sourceMap": true,
7 8
     "allowJs": true,
8 9
     "skipLibCheck": true,

+ 1179
- 0
types/auto/JitsiConference.d.ts
File diff suppressed because it is too large
View File


+ 88
- 0
types/auto/JitsiConferenceErrors.d.ts View File

@@ -0,0 +1,88 @@
1
+/**
2
+ * The errors for the conference.
3
+ */
4
+/**
5
+ * Indicates that client must be authenticated to create the conference.
6
+ */
7
+export const AUTHENTICATION_REQUIRED: "conference.authenticationRequired";
8
+/**
9
+ * Indicates that chat error occurred.
10
+ */
11
+export const CHAT_ERROR: "conference.chatError";
12
+/**
13
+ * Indicates that a settings error occurred.
14
+ */
15
+export const SETTINGS_ERROR: "conference.settingsError";
16
+/**
17
+ * Indicates that conference has been destroyed.
18
+ */
19
+export const CONFERENCE_DESTROYED: "conference.destroyed";
20
+/**
21
+ * Indicates that max users limit has been reached.
22
+ */
23
+export const CONFERENCE_MAX_USERS: "conference.max_users";
24
+/**
25
+ * Indicates that a connection error occurred when trying to join a conference.
26
+ */
27
+export const CONNECTION_ERROR: "conference.connectionError";
28
+/**
29
+ * Indicates that the client has been forced to restart by jicofo when the
30
+ * conference was migrated from one bridge to another.
31
+ */
32
+export const CONFERENCE_RESTARTED: "conference.restarted";
33
+/**
34
+ * Indicates that a connection error is due to not allowed,
35
+ * occurred when trying to join a conference.
36
+ */
37
+export const NOT_ALLOWED_ERROR: "conference.connectionError.notAllowed";
38
+/**
39
+ * Indicates that a connection error is due to not allowed,
40
+ * occurred when trying to join a conference, only approved members are allowed to join.
41
+ */
42
+export const MEMBERS_ONLY_ERROR: "conference.connectionError.membersOnly";
43
+/**
44
+ * Indicates that a connection error is due to denied access to the room,
45
+ * occurred after joining a lobby room and access is denied by the room moderators.
46
+ */
47
+export const CONFERENCE_ACCESS_DENIED: "conference.connectionError.accessDenied";
48
+/**
49
+ * Indicates that focus error happened.
50
+ */
51
+export const FOCUS_DISCONNECTED: "conference.focusDisconnected";
52
+/**
53
+ * Indicates that focus left the conference.
54
+ */
55
+export const FOCUS_LEFT: "conference.focusLeft";
56
+/**
57
+ * Indicates that graceful shutdown happened.
58
+ */
59
+export const GRACEFUL_SHUTDOWN: "conference.gracefulShutdown";
60
+/**
61
+ * Indicates that the media connection has failed.
62
+ */
63
+export const ICE_FAILED: "conference.iceFailed";
64
+/**
65
+ * Indicates that the versions of the server side components are incompatible
66
+ * with the client side.
67
+ */
68
+export const INCOMPATIBLE_SERVER_VERSIONS: "conference.incompatible_server_versions";
69
+/**
70
+ * Indicates that offer/answer had failed.
71
+ */
72
+export const OFFER_ANSWER_FAILED: "conference.offerAnswerFailed";
73
+/**
74
+ * Indicates that password cannot be set for this conference.
75
+ */
76
+export const PASSWORD_NOT_SUPPORTED: "conference.passwordNotSupported";
77
+/**
78
+ * Indicates that a password is required in order to join the conference.
79
+ */
80
+export const PASSWORD_REQUIRED: "conference.passwordRequired";
81
+/**
82
+ * Indicates that reservation system returned error.
83
+ */
84
+export const RESERVATION_ERROR: "conference.reservationError";
85
+/**
86
+ * Indicates that there is no available videobridge.
87
+ */
88
+export const VIDEOBRIDGE_NOT_AVAILABLE: "conference.videobridgeNotAvailable";

+ 40
- 0
types/auto/JitsiConferenceEventManager.d.ts View File

@@ -0,0 +1,40 @@
1
+/**
2
+ * Setups all event listeners related to conference
3
+ * @param conference {JitsiConference} the conference
4
+ */
5
+export default function JitsiConferenceEventManager(conference: any): void;
6
+export default class JitsiConferenceEventManager {
7
+    /**
8
+     * Setups all event listeners related to conference
9
+     * @param conference {JitsiConference} the conference
10
+     */
11
+    constructor(conference: any);
12
+    conference: any;
13
+    xmppListeners: {};
14
+    /**
15
+     * Setups event listeners related to conference.chatRoom
16
+     */
17
+    setupChatRoomListeners(): void;
18
+    chatRoomForwarder: EventEmitterForwarder;
19
+    /**
20
+     * Setups event listeners related to conference.rtc
21
+     */
22
+    setupRTCListeners(): void;
23
+    /**
24
+     * Removes event listeners related to conference.xmpp
25
+     */
26
+    removeXMPPListeners(): void;
27
+    /**
28
+     * Setups event listeners related to conference.xmpp
29
+     */
30
+    setupXMPPListeners(): void;
31
+    /**
32
+     * Add XMPP listener and save its reference for remove on leave conference.
33
+     */
34
+    _addConferenceXMPPListener(eventName: any, listener: any): void;
35
+    /**
36
+     * Setups event listeners related to conference.statistics
37
+     */
38
+    setupStatisticsListeners(): void;
39
+}
40
+import EventEmitterForwarder from "./modules/util/EventEmitterForwarder";

+ 383
- 0
types/auto/JitsiConferenceEvents.d.ts View File

@@ -0,0 +1,383 @@
1
+/**
2
+ * The events for the conference.
3
+ */
4
+/**
5
+ * Event indicates that the current conference audio input switched between audio
6
+ * input states,i.e. with or without audio input.
7
+ */
8
+export const AUDIO_INPUT_STATE_CHANGE: "conference.audio_input_state_changed";
9
+/**
10
+ * Event indicates that the permission for unmuting audio has changed based on the number of audio senders in the call
11
+ * and the audio sender limit configured in Jicofo.
12
+ */
13
+export const AUDIO_UNMUTE_PERMISSIONS_CHANGED: "conference.audio_unmute_permissions_changed";
14
+/**
15
+ * Indicates that authentication status changed.
16
+ */
17
+export const AUTH_STATUS_CHANGED: "conference.auth_status_changed";
18
+/**
19
+ * Fired just before the statistics module is disposed and it's the last chance
20
+ * to submit some logs to the statistics service (ex. CallStats if enabled),
21
+ * before it's disconnected.
22
+ */
23
+export const BEFORE_STATISTICS_DISPOSED: "conference.beforeStatisticsDisposed";
24
+/**
25
+ * Indicates that an error occured.
26
+ */
27
+export const CONFERENCE_ERROR: "conference.error";
28
+/**
29
+ * Indicates that conference failed.
30
+ */
31
+export const CONFERENCE_FAILED: "conference.failed";
32
+/**
33
+ * Indicates that conference is in progress of joining.
34
+ */
35
+export const CONFERENCE_JOIN_IN_PROGRESS: "conference.join_in_progress";
36
+/**
37
+ * Indicates that conference has been joined. The event does NOT provide any
38
+ * parameters to its listeners.
39
+ */
40
+export const CONFERENCE_JOINED: "conference.joined";
41
+/**
42
+ * Indicates that conference has been left.
43
+ */
44
+export const CONFERENCE_LEFT: "conference.left";
45
+/**
46
+ * Indicates that the conference unique identifier has been set.
47
+ */
48
+export const CONFERENCE_UNIQUE_ID_SET: "conference.unique_id_set";
49
+/**
50
+ * Indicates that the connection to the conference has been established
51
+ * XXX This is currently fired whenVthe *ICE* connection enters 'connected'
52
+ * state for the first time.
53
+ */
54
+export const CONNECTION_ESTABLISHED: "conference.connectionEstablished";
55
+/**
56
+ * Indicates that the connection to the conference has been interrupted for some
57
+ * reason.
58
+ * XXX This is currently fired when the *ICE* connection is interrupted.
59
+ */
60
+export const CONNECTION_INTERRUPTED: "conference.connectionInterrupted";
61
+/**
62
+ * Indicates that the connection to the conference has been restored.
63
+ * XXX This is currently fired when the *ICE* connection is restored.
64
+ */
65
+export const CONNECTION_RESTORED: "conference.connectionRestored";
66
+/**
67
+ * A connection to the video bridge's data channel has been established.
68
+ */
69
+export const DATA_CHANNEL_OPENED: "conference.dataChannelOpened";
70
+/**
71
+ * A user has changed it display name
72
+ */
73
+export const DISPLAY_NAME_CHANGED: "conference.displayNameChanged";
74
+/**
75
+ * The dominant speaker was changed.
76
+ */
77
+export const DOMINANT_SPEAKER_CHANGED: "conference.dominantSpeaker";
78
+/**
79
+ * UTC conference timestamp when first participant joined.
80
+ */
81
+export const CONFERENCE_CREATED_TIMESTAMP: "conference.createdTimestamp";
82
+/**
83
+ * Indicates that DTMF support changed.
84
+ */
85
+export const DTMF_SUPPORT_CHANGED: "conference.dtmfSupportChanged";
86
+/**
87
+ * Indicates that a message from another participant is received on data
88
+ * channel.
89
+ */
90
+export const ENDPOINT_MESSAGE_RECEIVED: "conference.endpoint_message_received";
91
+/**
92
+ * Indicates that a message for the remote endpoint statistics has been received on the bridge channel.
93
+ */
94
+export const ENDPOINT_STATS_RECEIVED: "conference.endpoint_stats_received";
95
+/**
96
+ * NOTE This is lib-jitsi-meet internal event and can be removed at any time !
97
+ *
98
+ * Event emitted when conference transits, between one to one and multiparty JVB
99
+ * conference. If the conference switches to P2P it's neither one to one nor
100
+ * a multiparty JVB conference, but P2P (the status argument of this event will
101
+ * be <tt>false</tt>).
102
+ *
103
+ * The first argument is a boolean which carries the previous value and
104
+ * the seconds argument is a boolean with the new status. The event is emitted
105
+ * only if the previous and the new values are different.
106
+ *
107
+ * @type {string}
108
+ */
109
+export const JVB121_STATUS: string;
110
+/**
111
+ * You are kicked from the conference.
112
+ * @param {JitsiParticipant} the participant that initiated the kick.
113
+ */
114
+export const KICKED: "conference.kicked";
115
+/**
116
+ * Participant was kicked from the conference.
117
+ * @param {JitsiParticipant} the participant that initiated the kick.
118
+ * @param {JitsiParticipant} the participant that was kicked.
119
+ */
120
+export const PARTICIPANT_KICKED: "conference.participant_kicked";
121
+/**
122
+ * The Last N set is changed.
123
+ *
124
+ * @param {Array<string>|null} leavingEndpointIds the ids of all the endpoints
125
+ * which are leaving Last N
126
+ * @param {Array<string>|null} enteringEndpointIds the ids of all the endpoints
127
+ * which are entering Last N
128
+ */
129
+export const LAST_N_ENDPOINTS_CHANGED: "conference.lastNEndpointsChanged";
130
+/**
131
+ * Indicates that the room has been locked or unlocked.
132
+ */
133
+export const LOCK_STATE_CHANGED: "conference.lock_state_changed";
134
+/**
135
+ * Indicates that the region of the media server (jitsi-videobridge) that we
136
+ * are connected to changed (or was initially set).
137
+ * @type {string} the region.
138
+ */
139
+export const SERVER_REGION_CHANGED: string;
140
+/**
141
+ * An event(library-private) fired when a new media session is added to the conference.
142
+ * @type {string}
143
+ * @private
144
+ */
145
+export const _MEDIA_SESSION_STARTED: string;
146
+/**
147
+ * An event(library-private) fired when the conference switches the currently active media session.
148
+ * @type {string}
149
+ * @private
150
+ */
151
+export const _MEDIA_SESSION_ACTIVE_CHANGED: string;
152
+/**
153
+ * Indicates that the conference had changed to members only enabled/disabled.
154
+ * The first argument of this event is a <tt>boolean</tt> which when set to
155
+ * <tt>true</tt> means that the conference is running in members only mode.
156
+ * You may need to use Lobby if supported to ask for permissions to enter the conference.
157
+ */
158
+export const MEMBERS_ONLY_CHANGED: "conference.membersOnlyChanged";
159
+/**
160
+ * New text message was received.
161
+ */
162
+export const MESSAGE_RECEIVED: "conference.messageReceived";
163
+/**
164
+ * Event indicates that the current selected input device has no signal
165
+ */
166
+export const NO_AUDIO_INPUT: "conference.no_audio_input";
167
+/**
168
+ * Event indicates that the current microphone used by the conference is noisy.
169
+ */
170
+export const NOISY_MIC: "conference.noisy_mic";
171
+/**
172
+ * Indicates that a message from the local user or from the Prosody backend
173
+ * was received on the data channel.
174
+ */
175
+export const NON_PARTICIPANT_MESSAGE_RECEIVED: "conference.non_participant_message_received";
176
+/**
177
+ * New private text message was received.
178
+ */
179
+export const PRIVATE_MESSAGE_RECEIVED: "conference.privateMessageReceived";
180
+/**
181
+ * Event fired when JVB sends notification about interrupted/restored user's
182
+ * ICE connection status or we detect local problem with the video track.
183
+ * First argument is the ID of the participant and
184
+ * the seconds is a string indicating if the connection is currently
185
+ * - active - the connection is active
186
+ * - inactive - the connection is inactive, was intentionally interrupted by
187
+ * the bridge
188
+ * - interrupted - a network problem occurred
189
+ * - restoring - the connection was inactive and is restoring now
190
+ *
191
+ * The current status value can be obtained by calling
192
+ * JitsiParticipant.getConnectionStatus().
193
+ */
194
+export const PARTICIPANT_CONN_STATUS_CHANGED: "conference.participant_conn_status_changed";
195
+/**
196
+ * Indicates that the features of the participant has been changed.
197
+ */
198
+export const PARTCIPANT_FEATURES_CHANGED: "conference.partcipant_features_changed";
199
+/**
200
+ * Indicates that a the value of a specific property of a specific participant
201
+ * has changed.
202
+ */
203
+export const PARTICIPANT_PROPERTY_CHANGED: "conference.participant_property_changed";
204
+/**
205
+ * Indicates that the conference has switched between JVB and P2P connections.
206
+ * The first argument of this event is a <tt>boolean</tt> which when set to
207
+ * <tt>true</tt> means that the conference is running on the P2P connection.
208
+ */
209
+export const P2P_STATUS: "conference.p2pStatus";
210
+/**
211
+ * Indicates that phone number changed.
212
+ */
213
+export const PHONE_NUMBER_CHANGED: "conference.phoneNumberChanged";
214
+/**
215
+ * The conference properties changed.
216
+ * @type {string}
217
+ */
218
+export const PROPERTIES_CHANGED: string;
219
+/**
220
+ * Indicates that recording state changed.
221
+ */
222
+export const RECORDER_STATE_CHANGED: "conference.recorderStateChanged";
223
+/**
224
+ * Indicates that video SIP GW state changed.
225
+ * @param {VideoSIPGWConstants} status.
226
+ */
227
+export const VIDEO_SIP_GW_AVAILABILITY_CHANGED: "conference.videoSIPGWAvailabilityChanged";
228
+/**
229
+ * Indicates that video SIP GW Session state changed.
230
+ * @param {options} event - {
231
+ *     {string} address,
232
+ *     {VideoSIPGWConstants} oldState,
233
+ *     {VideoSIPGWConstants} newState,
234
+ *     {string} displayName}
235
+ * }.
236
+ */
237
+export const VIDEO_SIP_GW_SESSION_STATE_CHANGED: "conference.videoSIPGWSessionStateChanged";
238
+/**
239
+ * Indicates that start muted settings changed.
240
+ */
241
+export const START_MUTED_POLICY_CHANGED: "conference.start_muted_policy_changed";
242
+/**
243
+ * Indicates that the local user has started muted.
244
+ */
245
+export const STARTED_MUTED: "conference.started_muted";
246
+/**
247
+ * Indicates that subject of the conference has changed.
248
+ */
249
+export const SUBJECT_CHANGED: "conference.subjectChanged";
250
+/**
251
+ * Indicates that DTMF support changed.
252
+ */
253
+export const SUSPEND_DETECTED: "conference.suspendDetected";
254
+/**
255
+ * Event indicates that local user is talking while he muted himself
256
+ */
257
+export const TALK_WHILE_MUTED: "conference.talk_while_muted";
258
+/**
259
+ * A new media track was added to the conference. The event provides the
260
+ * following parameters to its listeners:
261
+ *
262
+ * @param {JitsiTrack} track the added JitsiTrack
263
+ */
264
+export const TRACK_ADDED: "conference.trackAdded";
265
+/**
266
+ * Audio levels of a media track ( attached to the conference) was changed.
267
+ */
268
+export const TRACK_AUDIO_LEVEL_CHANGED: "conference.audioLevelsChanged";
269
+/**
270
+ * A media track ( attached to the conference) mute status was changed.
271
+ * @param {JitsiParticipant|null} the participant that initiated the mute
272
+ * if it is a remote mute.
273
+ */
274
+export const TRACK_MUTE_CHANGED: "conference.trackMuteChanged";
275
+/**
276
+ * The media track was removed from the conference. The event provides the
277
+ * following parameters to its listeners:
278
+ *
279
+ * @param {JitsiTrack} track the removed JitsiTrack
280
+ */
281
+export const TRACK_REMOVED: "conference.trackRemoved";
282
+/**
283
+ * The source-add for unmuting of a media track was rejected by Jicofo.
284
+ *
285
+ */
286
+export const TRACK_UNMUTE_REJECTED: "conference.trackUnmuteRejected";
287
+/**
288
+ * Notifies for transcription status changes. The event provides the
289
+ * following parameters to its listeners:
290
+ *
291
+ * @param {String} status - The new status.
292
+ */
293
+export const TRANSCRIPTION_STATUS_CHANGED: "conference.transcriptionStatusChanged";
294
+/**
295
+ * A new user joined the conference.
296
+ */
297
+export const USER_JOINED: "conference.userJoined";
298
+/**
299
+ * A user has left the conference.
300
+ */
301
+export const USER_LEFT: "conference.userLeft";
302
+/**
303
+ * User role changed.
304
+ */
305
+export const USER_ROLE_CHANGED: "conference.roleChanged";
306
+/**
307
+ * User status changed.
308
+ */
309
+export const USER_STATUS_CHANGED: "conference.statusChanged";
310
+/**
311
+ * Event indicates that the permission for unmuting video has changed based on the number of video senders in the call
312
+ * and the video sender limit configured in Jicofo.
313
+ */
314
+export const VIDEO_UNMUTE_PERMISSIONS_CHANGED: "conference.video_unmute_permissions_changed";
315
+/**
316
+ * Event indicates that the bot participant type changed.
317
+ */
318
+export const BOT_TYPE_CHANGED: "conference.bot_type_changed";
319
+/**
320
+ * A new user joined the lobby room.
321
+ */
322
+export const LOBBY_USER_JOINED: "conference.lobby.userJoined";
323
+/**
324
+ * A user from the lobby room has been update.
325
+ */
326
+export const LOBBY_USER_UPDATED: "conference.lobby.userUpdated";
327
+/**
328
+ * A user left the lobby room.
329
+ */
330
+export const LOBBY_USER_LEFT: "conference.lobby.userLeft";
331
+/**
332
+ * The local participant was approved to be able to unmute.
333
+ * @param {options} event - {
334
+ *     {MediaType} mediaType
335
+ * }.
336
+ */
337
+export const AV_MODERATION_APPROVED: "conference.av_moderation.approved";
338
+/**
339
+ * The local participant was blocked to be able to unmute.
340
+ * @param {options} event - {
341
+ *     {MediaType} mediaType
342
+ * }.
343
+ */
344
+export const AV_MODERATION_REJECTED: "conference.av_moderation.rejected";
345
+/**
346
+ * AV Moderation was enabled/disabled. The actor is the participant that is currently in the meeting,
347
+ * or undefined if that participant has left the meeting.
348
+ *
349
+ * @param {options} event - {
350
+ *     {boolean} enabled,
351
+ *     {MediaType} mediaType,
352
+ *     {JitsiParticipant} actor
353
+ * }.
354
+ */
355
+export const AV_MODERATION_CHANGED: "conference.av_moderation.changed";
356
+/**
357
+ * AV Moderation, report for user being approved to unmute.
358
+ * @param {options} event - {
359
+ *     {JitsiParticipant} participant,
360
+ *     {MediaType} mediaType
361
+ * }.
362
+ */
363
+export const AV_MODERATION_PARTICIPANT_APPROVED: "conference.av_moderation.participant.approved";
364
+/**
365
+ * AV Moderation, report for user being blocked to unmute.
366
+ * @param {options} event - {
367
+ *     {JitsiParticipant} participant,
368
+ *     {MediaType} mediaType
369
+ * }.
370
+ */
371
+export const AV_MODERATION_PARTICIPANT_REJECTED: "conference.av_moderation.participant.rejected";
372
+/**
373
+ * A new facial expression is added with its duration for a participant
374
+ */
375
+export const FACIAL_EXPRESSION_ADDED: "conference.facial_expression.added";
376
+/**
377
+ * Event fired when a participant is requested to join a given (breakout) room.
378
+ */
379
+export const BREAKOUT_ROOMS_MOVE_TO_ROOM: "conference.breakout-rooms.move-to-room";
380
+/**
381
+ * Event fired when the breakout rooms data was updated.
382
+ */
383
+export const BREAKOUT_ROOMS_UPDATED: "conference.breakout-rooms.updated";

+ 106
- 0
types/auto/JitsiConnection.d.ts View File

@@ -0,0 +1,106 @@
1
+/**
2
+ * Creates a new connection object for the Jitsi Meet server side video
3
+ * conferencing service. Provides access to the JitsiConference interface.
4
+ * @param appID identification for the provider of Jitsi Meet video conferencing
5
+ * services.
6
+ * @param token the JWT token used to authenticate with the server(optional)
7
+ * @param options Object with properties / settings related to connection with
8
+ * the server.
9
+ * @constructor
10
+ */
11
+export default function JitsiConnection(appID: any, token: any, options: any): void;
12
+export default class JitsiConnection {
13
+    /**
14
+     * Creates a new connection object for the Jitsi Meet server side video
15
+     * conferencing service. Provides access to the JitsiConference interface.
16
+     * @param appID identification for the provider of Jitsi Meet video conferencing
17
+     * services.
18
+     * @param token the JWT token used to authenticate with the server(optional)
19
+     * @param options Object with properties / settings related to connection with
20
+     * the server.
21
+     * @constructor
22
+     */
23
+    constructor(appID: any, token: any, options: any);
24
+    appID: any;
25
+    token: any;
26
+    options: any;
27
+    xmpp: XMPP;
28
+    /**
29
+     * Connect the client with the server.
30
+     * @param options {object} connecting options
31
+     * (for example authentications parameters).
32
+     */
33
+    connect(options?: object): void;
34
+    /**
35
+     * Attach to existing connection. Can be used for optimizations. For example:
36
+     * if the connection is created on the server we can attach to it and start
37
+     * using it.
38
+     *
39
+     * @param options {object} connecting options - rid, sid and jid.
40
+     */
41
+    attach(options: object): void;
42
+    /**
43
+     * Disconnect the client from the server.
44
+     * @returns {Promise} - Resolves when the disconnect process is finished or rejects with an error.
45
+     */
46
+    disconnect(...args: any[]): Promise<any>;
47
+    /**
48
+     * Returns the jid of the participant associated with the XMPP connection.
49
+     *
50
+     * @returns {string} The jid of the participant.
51
+     */
52
+    getJid(): string;
53
+    /**
54
+     * This method allows renewal of the tokens if they are expiring.
55
+     * @param token the new token.
56
+     */
57
+    setToken(token: any): void;
58
+    /**
59
+     * Creates and joins new conference.
60
+     * @param name the name of the conference; if null - a generated name will be
61
+     * provided from the api
62
+     * @param options Object with properties / settings related to the conference
63
+     * that will be created.
64
+     * @returns {JitsiConference} returns the new conference object.
65
+     */
66
+    initJitsiConference(name: any, options: any): JitsiConference;
67
+    /**
68
+     * Subscribes the passed listener to the event.
69
+     * @param event {JitsiConnectionEvents} the connection event.
70
+     * @param listener {Function} the function that will receive the event
71
+     */
72
+    addEventListener(event: typeof JitsiConnectionEvents, listener: Function): void;
73
+    /**
74
+     * Unsubscribes the passed handler.
75
+     * @param event {JitsiConnectionEvents} the connection event.
76
+     * @param listener {Function} the function that will receive the event
77
+     */
78
+    removeEventListener(event: typeof JitsiConnectionEvents, listener: Function): void;
79
+    /**
80
+     * Returns measured connectionTimes.
81
+     */
82
+    getConnectionTimes(): {};
83
+    /**
84
+     * Adds new feature to the list of supported features for the local
85
+     * participant.
86
+     * @param {String} feature the name of the feature.
87
+     * @param {boolean} submit if true - the new list of features will be
88
+     * immediately submitted to the others.
89
+     */
90
+    addFeature(feature: string, submit?: boolean): void;
91
+    /**
92
+     * Removes a feature from the list of supported features for the local
93
+     * participant
94
+     * @param {String} feature the name of the feature.
95
+     * @param {boolean} submit if true - the new list of features will be
96
+     * immediately submitted to the others.
97
+     */
98
+    removeFeature(feature: string, submit?: boolean): void;
99
+    /**
100
+     * Get object with internal logs.
101
+     */
102
+    getLogs(): any;
103
+}
104
+import XMPP from "./modules/xmpp/xmpp";
105
+import JitsiConference from "./JitsiConference";
106
+import * as JitsiConnectionEvents from "./JitsiConnectionEvents";

+ 28
- 0
types/auto/JitsiConnectionErrors.d.ts View File

@@ -0,0 +1,28 @@
1
+/**
2
+ * The errors for the connection.
3
+ */
4
+/**
5
+ * Indicates that the connection was dropped with an error which was most likely
6
+ * caused by some networking issues. The dropped term in this context means that
7
+ * the connection was closed unexpectedly (not on user's request).
8
+ *
9
+ * One example is 'item-not-found' error thrown by Prosody when the BOSH session
10
+ * times out after 60 seconds of inactivity. On the other hand 'item-not-found'
11
+ * could also happen when BOSH request is sent to the server with the session-id
12
+ * that is not know to the server. But this should not happen in lib-jitsi-meet
13
+ * case as long as the service is configured correctly (there is no bug).
14
+ */
15
+export const CONNECTION_DROPPED_ERROR: "connection.droppedError";
16
+/**
17
+ * Not specified errors.
18
+ */
19
+export const OTHER_ERROR: "connection.otherError";
20
+/**
21
+ * Indicates that a password is required in order to join the conference.
22
+ */
23
+export const PASSWORD_REQUIRED: "connection.passwordRequired";
24
+/**
25
+ * Indicates that the connection was dropped, because of too many 5xx HTTP
26
+ * errors on BOSH requests.
27
+ */
28
+export const SERVER_ERROR: "connection.serverError";

+ 42
- 0
types/auto/JitsiConnectionEvents.d.ts View File

@@ -0,0 +1,42 @@
1
+/**
2
+ * The events for the connection.
3
+ */
4
+/**
5
+ * Indicates that the connection has been disconnected. The event provides
6
+ * the following parameters to its listeners:
7
+ *
8
+ * @param msg {string} a message associated with the disconnect such as the
9
+ * last (known) error message
10
+ */
11
+export const CONNECTION_DISCONNECTED: "connection.connectionDisconnected";
12
+/**
13
+ * Indicates that the connection has been established. The event provides
14
+ * the following parameters to its listeners:
15
+ *
16
+ * @param id {string} the ID of the local endpoint/participant/peer (within
17
+ * the context of the established connection)
18
+ */
19
+export const CONNECTION_ESTABLISHED: "connection.connectionEstablished";
20
+/**
21
+ * Indicates that the connection has been failed for some reason. The event
22
+ * provides the following parameters to its listeners:
23
+ *
24
+ * @param errType {JitsiConnectionErrors} the type of error associated with
25
+ * the failure
26
+ * @param errReason {string} the error (message) associated with the failure
27
+ * @param credentials {object} the credentials used to connect (if any)
28
+ * @param errReasonDetails {object} an optional object with details about
29
+ * the error, like shard moving, suspending. Used for analytics purposes.
30
+ */
31
+export const CONNECTION_FAILED: "connection.connectionFailed";
32
+/**
33
+ * Indicates that the performed action cannot be executed because the
34
+ * connection is not in the correct state(connected, disconnected, etc.)
35
+ */
36
+export const WRONG_STATE: "connection.wrongState";
37
+/**
38
+ * Indicates that the display name is required over this connection and need to be supplied when
39
+ * joining the room.
40
+ * There are cases like lobby room where display name is required.
41
+ */
42
+export const DISPLAY_NAME_REQUIRED: "connection.display_name_required";

+ 100
- 0
types/auto/JitsiMediaDevices.d.ts View File

@@ -0,0 +1,100 @@
1
+/// <reference types="node" />
2
+declare var _default: JitsiMediaDevices;
3
+export default _default;
4
+/**
5
+ * Media devices utilities for Jitsi.
6
+ */
7
+declare class JitsiMediaDevices {
8
+    _eventEmitter: EventEmitter;
9
+    _permissions: {};
10
+    _permissionsApiSupported: Promise<any>;
11
+    /**
12
+     * Parses a PermissionState object and returns true for granted and false otherwise.
13
+     *
14
+     * @param {PermissionState} permissionStatus - The PermissionState object retrieved from the Permissions API.
15
+     * @returns {boolean} - True for granted and false for denied.
16
+     * @throws {TypeError}
17
+     */
18
+    _parsePermissionState(permissionStatus?: PermissionState): boolean;
19
+    /**
20
+     * Updates the local granted/denied permissions cache. A permissions might be
21
+     * granted, denied, or undefined. This is represented by having its media
22
+     * type key set to {@code true} or {@code false} respectively.
23
+     *
24
+     * @param {Object} permissions - Object with the permissions.
25
+     */
26
+    _handlePermissionsChange(permissions: any): void;
27
+    /**
28
+     * Gathers data and sends it to statistics.
29
+     * @param deviceID the device id to log
30
+     * @param devices list of devices
31
+     */
32
+    _logOutputDevice(deviceID: any, devices: any): void;
33
+    /**
34
+     * Executes callback with list of media devices connected.
35
+     * @param {function} callback
36
+     */
37
+    enumerateDevices(callback: Function): void;
38
+    /**
39
+     * Checks if its possible to enumerate available cameras/micropones.
40
+     * @returns {Promise<boolean>} a Promise which will be resolved only once
41
+     * the WebRTC stack is ready, either with true if the device listing is
42
+     * available available or with false otherwise.
43
+     */
44
+    isDeviceListAvailable(): Promise<boolean>;
45
+    /**
46
+     * Returns true if changing the input (camera / microphone) or output
47
+     * (audio) device is supported and false if not.
48
+     * @param {string} [deviceType] - type of device to change. Default is
49
+     *      undefined or 'input', 'output' - for audio output device change.
50
+     * @returns {boolean} true if available, false otherwise.
51
+     */
52
+    isDeviceChangeAvailable(deviceType?: string): boolean;
53
+    /**
54
+     * Checks if the permission for the given device was granted.
55
+     *
56
+     * @param {'audio'|'video'} [type] - type of devices to check,
57
+     *      undefined stands for both 'audio' and 'video' together
58
+     * @returns {Promise<boolean>}
59
+     */
60
+    isDevicePermissionGranted(type?: 'audio' | 'video'): Promise<boolean>;
61
+    /**
62
+     * Returns true if it is possible to be simultaneously capturing audio from more than one device.
63
+     *
64
+     * @returns {boolean}
65
+     */
66
+    isMultipleAudioInputSupported(): boolean;
67
+    /**
68
+     * Returns currently used audio output device id, 'default' stands
69
+     * for default device
70
+     * @returns {string}
71
+     */
72
+    getAudioOutputDevice(): string;
73
+    /**
74
+     * Sets current audio output device.
75
+     * @param {string} deviceId - id of 'audiooutput' device from
76
+     *      navigator.mediaDevices.enumerateDevices(), 'default' is for
77
+     *      default device
78
+     * @returns {Promise} - resolves when audio output is changed, is rejected
79
+     *      otherwise
80
+     */
81
+    setAudioOutputDevice(deviceId: string): Promise<any>;
82
+    /**
83
+     * Adds an event handler.
84
+     * @param {string} event - event name
85
+     * @param {function} handler - event handler
86
+     */
87
+    addEventListener(event: string, handler: Function): void;
88
+    /**
89
+     * Removes event handler.
90
+     * @param {string} event - event name
91
+     * @param {function} handler - event handler
92
+     */
93
+    removeEventListener(event: string, handler: Function): void;
94
+    /**
95
+     * Emits an event.
96
+     * @param {string} event - event name
97
+     */
98
+    emitEvent(event: string, ...args: any[]): void;
99
+}
100
+import EventEmitter from "events";

+ 29
- 0
types/auto/JitsiMediaDevicesEvents.d.ts View File

@@ -0,0 +1,29 @@
1
+/**
2
+ * The events for the media devices.
3
+ */
4
+/**
5
+ * Indicates that the list of available media devices has been changed. The
6
+ * event provides the following parameters to its listeners:
7
+ *
8
+ * @param {MediaDeviceInfo[]} devices - array of MediaDeviceInfo or
9
+ *  MediaDeviceInfo-like objects that are currently connected.
10
+ *  @see https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo
11
+ */
12
+export const DEVICE_LIST_CHANGED: "mediaDevices.devicechange";
13
+/**
14
+ * Event emitted when the user granted/blocked a permission for the camera / mic.
15
+ * Used to keep track of the granted permissions on browsers which don't
16
+ * support the Permissions API.
17
+ */
18
+export const PERMISSIONS_CHANGED: "rtc.permissions_changed";
19
+/**
20
+ * Indicates that the environment is currently showing permission prompt to
21
+ * access camera and/or microphone. The event provides the following
22
+ * parameters to its listeners:
23
+ *
24
+ * @param {'chrome'|'opera'|'firefox'|'safari'|'nwjs'
25
+ *  |'react-native'|'android'} environmentType - type of browser or
26
+ *  other execution environment.
27
+ */
28
+export const PERMISSION_PROMPT_IS_SHOWN: "mediaDevices.permissionPromptIsShown";
29
+export const SLOW_GET_USER_MEDIA: "mediaDevices.slowGetUserMedia";

+ 2
- 0
types/auto/JitsiMeetJS.d.ts View File

@@ -0,0 +1,2 @@
1
+declare var _default: any;
2
+export default _default;

+ 201
- 0
types/auto/JitsiParticipant.d.ts View File

@@ -0,0 +1,201 @@
1
+/**
2
+ * Represents a participant in (i.e. a member of) a conference.
3
+ */
4
+export default class JitsiParticipant {
5
+    /**
6
+     * Initializes a new JitsiParticipant instance.
7
+     *
8
+     * @constructor
9
+     * @param jid the conference XMPP jid
10
+     * @param conference
11
+     * @param displayName
12
+     * @param {Boolean} hidden - True if the new JitsiParticipant instance is to
13
+     * represent a hidden participant; otherwise, false.
14
+     * @param {string} statsID - optional participant statsID
15
+     * @param {string} status - the initial status if any.
16
+     * @param {object} identity - the xmpp identity
17
+     * @param {boolean?} isReplacing - whether this is a participant replacing another into the meeting.
18
+     * @param {boolean?} isReplaced - whether this is a participant to be kicked and replaced into the meeting.
19
+     */
20
+    constructor(jid: any, conference: any, displayName: any, hidden: boolean, statsID: string, status: string, identity: object, isReplacing: boolean | null, isReplaced: boolean | null);
21
+    _jid: any;
22
+    _id: any;
23
+    _conference: any;
24
+    _displayName: any;
25
+    _supportsDTMF: boolean;
26
+    _tracks: any[];
27
+    _role: string;
28
+    _status: string;
29
+    _hidden: boolean;
30
+    _statsID: string;
31
+    _connectionStatus: string;
32
+    _properties: {};
33
+    _identity: any;
34
+    _isReplacing: boolean;
35
+    _isReplaced: boolean;
36
+    _features: Set<any>;
37
+    /**
38
+     * @returns {JitsiConference} The conference that this participant belongs
39
+     * to.
40
+     */
41
+    getConference(): any;
42
+    /**
43
+     * Gets the value of a property of this participant.
44
+     */
45
+    getProperty(name: any): any;
46
+    /**
47
+     * Checks whether this <tt>JitsiParticipant</tt> has any video tracks which
48
+     * are muted according to their underlying WebRTC <tt>MediaStreamTrack</tt>
49
+     * muted status.
50
+     * @return {boolean} <tt>true</tt> if this <tt>participant</tt> contains any
51
+     * video <tt>JitsiTrack</tt>s which are muted as defined in
52
+     * {@link JitsiTrack.isWebRTCTrackMuted}.
53
+     */
54
+    hasAnyVideoTrackWebRTCMuted(): boolean;
55
+    /**
56
+     * Updates participant's connection status.
57
+     * @param {string} state the current participant connection state.
58
+     * {@link ParticipantConnectionStatus}.
59
+     * @private
60
+     */
61
+    private _setConnectionStatus;
62
+    /**
63
+     * Return participant's connectivity status.
64
+     *
65
+     * @returns {string} the connection status
66
+     * <tt>ParticipantConnectionStatus</tt> of the user.
67
+     * {@link ParticipantConnectionStatus}.
68
+     */
69
+    getConnectionStatus(): string;
70
+    /**
71
+     * Sets the value of a property of this participant, and fires an event if
72
+     * the value has changed.
73
+     * @name the name of the property.
74
+     * @value the value to set.
75
+     */
76
+    setProperty(name: any, value: any): void;
77
+    /**
78
+     * @returns {Array.<JitsiTrack>} The list of media tracks for this
79
+     * participant.
80
+     */
81
+    getTracks(): Array<any>;
82
+    /**
83
+     * @param {MediaType} mediaType
84
+     * @returns {Array.<JitsiTrack>} an array of media tracks for this
85
+     * participant, for given media type.
86
+     */
87
+    getTracksByMediaType(mediaType: typeof MediaType): Array<any>;
88
+    /**
89
+     * @returns {String} The ID of this participant.
90
+     */
91
+    getId(): string;
92
+    /**
93
+     * @returns {String} The JID of this participant.
94
+     */
95
+    getJid(): string;
96
+    /**
97
+     * @returns {String} The human-readable display name of this participant.
98
+     */
99
+    getDisplayName(): string;
100
+    /**
101
+     * @returns {String} The stats ID of this participant.
102
+     */
103
+    getStatsID(): string;
104
+    /**
105
+     * @returns {String} The status of the participant.
106
+     */
107
+    getStatus(): string;
108
+    /**
109
+     * @returns {Boolean} Whether this participant is a moderator or not.
110
+     */
111
+    isModerator(): boolean;
112
+    /**
113
+     * @returns {Boolean} Whether this participant is a hidden participant. Some
114
+     * special system participants may want to join hidden (like for example the
115
+     * recorder).
116
+     */
117
+    isHidden(): boolean;
118
+    /**
119
+     * @returns {Boolean} Wheter this participants replaces another participant
120
+     * from the meeting.
121
+     */
122
+    isReplacing(): boolean;
123
+    /**
124
+     * @returns {Boolean} Wheter this participants will be replaced by another
125
+     * participant in the meeting.
126
+     */
127
+    isReplaced(): boolean;
128
+    /**
129
+     * @returns {Boolean} Whether this participant has muted their audio.
130
+     */
131
+    isAudioMuted(): boolean;
132
+    /**
133
+     * Determines whether all JitsiTracks which are of a specific MediaType and
134
+     * which belong to this JitsiParticipant are muted.
135
+     *
136
+     * @param {MediaType} mediaType - The MediaType of the JitsiTracks to be
137
+     * checked.
138
+     * @private
139
+     * @returns {Boolean} True if all JitsiTracks which are of the specified
140
+     * mediaType and which belong to this JitsiParticipant are muted; otherwise,
141
+     * false.
142
+     */
143
+    private _isMediaTypeMuted;
144
+    /**
145
+     * @returns {Boolean} Whether this participant has muted their video.
146
+     */
147
+    isVideoMuted(): boolean;
148
+    /**
149
+     * @returns {String} The role of this participant.
150
+     */
151
+    getRole(): string;
152
+    /**
153
+     * Sets a new participant role.
154
+     * @param {String} newRole - the new role.
155
+     */
156
+    setRole(newRole: string): void;
157
+    /**
158
+     * Sets whether participant is replacing another based on jwt.
159
+     * @param {String} newIsReplacing - whether is replacing.
160
+     */
161
+    setIsReplacing(newIsReplacing: string): void;
162
+    /**
163
+     * Sets whether participant is being replaced by another based on jwt.
164
+     * @param {boolean} newIsReplaced - whether is being replaced.
165
+     */
166
+    setIsReplaced(newIsReplaced: boolean): void;
167
+    /**
168
+     *
169
+     */
170
+    supportsDTMF(): boolean;
171
+    /**
172
+     * Returns a set with the features for the participant.
173
+     * @returns {Promise<Set<String>, Error>}
174
+     */
175
+    getFeatures(): Promise<Set<string>, Error>;
176
+    /**
177
+     * Checks current set features.
178
+     * @param {String} feature - the feature to check.
179
+     * @return {boolean} <tt>true</tt> if this <tt>participant</tt> contains the
180
+     * <tt>feature</tt>.
181
+     */
182
+    hasFeature(feature: string): boolean;
183
+    /**
184
+     * Set new features.
185
+     * @param {Set<String>|undefined} newFeatures - Sets new features.
186
+     */
187
+    setFeatures(newFeatures: Set<string> | undefined): void;
188
+    /**
189
+     * Returns the bot type for the participant.
190
+     *
191
+     * @returns {string|undefined} - The bot type of the participant.
192
+     */
193
+    getBotType(): string | undefined;
194
+    /**
195
+     * Sets the bot type for the participant.
196
+     * @param {String} newBotType - The new bot type to set.
197
+     */
198
+    setBotType(newBotType: string): void;
199
+    _botType: string;
200
+}
201
+import * as MediaType from "./service/RTC/MediaType";

+ 0
- 0
types/auto/JitsiParticipantEvents.d.ts View File


+ 53
- 0
types/auto/JitsiTrackError.d.ts View File

@@ -0,0 +1,53 @@
1
+export default JitsiTrackError;
2
+/**
3
+ *
4
+ * Represents an error that occurred to a JitsiTrack. Can represent various
5
+ * types of errors. For error descriptions (@see JitsiTrackErrors).
6
+ *
7
+ * @extends Error
8
+ *
9
+ *
10
+ * @constructor
11
+ * @param {Object|string} error - error object or error name
12
+ * @param {Object|string} (options) - getUserMedia constraints object or
13
+ * error message
14
+ * @param {('audio'|'video'|'desktop'|'screen'|'audiooutput')[]} (devices) -
15
+ * list of getUserMedia requested devices
16
+ */
17
+declare function JitsiTrackError(error: any | string, options: any, devices: any): void;
18
+declare class JitsiTrackError {
19
+    /**
20
+     *
21
+     * Represents an error that occurred to a JitsiTrack. Can represent various
22
+     * types of errors. For error descriptions (@see JitsiTrackErrors).
23
+     *
24
+     * @extends Error
25
+     *
26
+     *
27
+     * @constructor
28
+     * @param {Object|string} error - error object or error name
29
+     * @param {Object|string} (options) - getUserMedia constraints object or
30
+     * error message
31
+     * @param {('audio'|'video'|'desktop'|'screen'|'audiooutput')[]} (devices) -
32
+     * list of getUserMedia requested devices
33
+     */
34
+    constructor(error: any | string, options: any, devices: any);
35
+    /**
36
+     * Additional information about original getUserMedia error
37
+     * and constraints.
38
+     * @type {{
39
+     *     error: Object,
40
+     *     constraints: Object,
41
+     *     devices: Array.<'audio'|'video'|'desktop'|'screen'>
42
+     * }}
43
+     */
44
+    gum: {
45
+        error: any;
46
+        constraints: any;
47
+        devices: Array<'audio' | 'video' | 'desktop' | 'screen'>;
48
+    };
49
+    name: string;
50
+    message: any;
51
+    stack: any;
52
+    constructor: typeof JitsiTrackError;
53
+}

+ 58
- 0
types/auto/JitsiTrackErrors.d.ts View File

@@ -0,0 +1,58 @@
1
+/**
2
+ * The errors for the JitsiTrack objects.
3
+ */
4
+/**
5
+ * An error which indicates that some of requested constraints in
6
+ * getUserMedia call were not satisfied.
7
+ */
8
+export const CONSTRAINT_FAILED: "gum.constraint_failed";
9
+/**
10
+ * A generic error which indicates an error occurred while selecting
11
+ * a DesktopCapturerSource from the electron app.
12
+ */
13
+export const ELECTRON_DESKTOP_PICKER_ERROR: "gum.electron_desktop_picker_error";
14
+/**
15
+ * An error which indicates a custom desktop picker could not be detected
16
+ * for the electron app.
17
+ */
18
+export const ELECTRON_DESKTOP_PICKER_NOT_FOUND: "gum.electron_desktop_picker_not_found";
19
+/**
20
+ * Generic getUserMedia error.
21
+ */
22
+export const GENERAL: "gum.general";
23
+/**
24
+ * An error which indicates that requested device was not found.
25
+ */
26
+export const NOT_FOUND: "gum.not_found";
27
+/**
28
+ * An error which indicates that user denied permission to share requested
29
+ * device.
30
+ */
31
+export const PERMISSION_DENIED: "gum.permission_denied";
32
+/**
33
+ * Generic error for screensharing failure.
34
+ */
35
+export const SCREENSHARING_GENERIC_ERROR: "gum.screensharing_generic_error";
36
+/**
37
+ * An error which indicates that user canceled screen sharing window
38
+ * selection dialog.
39
+ */
40
+export const SCREENSHARING_USER_CANCELED: "gum.screensharing_user_canceled";
41
+/**
42
+ * Indicates that the timeout passed to the obtainAudioAndVideoPermissions has expired without GUM resolving.
43
+ */
44
+export const TIMEOUT: "gum.timeout";
45
+/**
46
+ * An error which indicates that track has been already disposed and cannot
47
+ * be longer used.
48
+ */
49
+export const TRACK_IS_DISPOSED: "track.track_is_disposed";
50
+/**
51
+ * An error which indicates that track has no MediaStream associated.
52
+ */
53
+export const TRACK_NO_STREAM_FOUND: "track.no_stream_found";
54
+/**
55
+ * An error which indicates that requested video resolution is not supported
56
+ * by a webcam.
57
+ */
58
+export const UNSUPPORTED_RESOLUTION: "gum.unsupported_resolution";

+ 39
- 0
types/auto/JitsiTrackEvents.d.ts View File

@@ -0,0 +1,39 @@
1
+/**
2
+ * The media track was removed to the conference.
3
+ */
4
+export const LOCAL_TRACK_STOPPED: "track.stopped";
5
+/**
6
+ * Audio levels of a this track was changed.
7
+ * The first argument is a number with audio level value in range [0, 1].
8
+ * The second argument is a <tt>TraceablePeerConnection</tt> which is the peer
9
+ * connection which measured the audio level (one audio track can be added
10
+ * to multiple peer connection at the same time). This argument is optional for
11
+ * local tracks for which we can measure audio level without the peer
12
+ * connection (the value will be <tt>undefined</tt>).
13
+ *
14
+ * NOTE The second argument should be treated as library internal and can be
15
+ * removed at any time.
16
+ */
17
+export const TRACK_AUDIO_LEVEL_CHANGED: "track.audioLevelsChanged";
18
+/**
19
+ * The audio output of the track was changed.
20
+ */
21
+export const TRACK_AUDIO_OUTPUT_CHANGED: "track.audioOutputChanged";
22
+/**
23
+ * A media track mute status was changed.
24
+ */
25
+export const TRACK_MUTE_CHANGED: "track.trackMuteChanged";
26
+/**
27
+ * The video type("camera" or "desktop") of the track was changed.
28
+ */
29
+export const TRACK_VIDEOTYPE_CHANGED: "track.videoTypeChanged";
30
+/**
31
+ * Indicates that the track is not receiving any data even though we expect it
32
+ * to receive data (i.e. the stream is not stopped).
33
+ */
34
+export const NO_DATA_FROM_SOURCE: "track.no_data_from_source";
35
+/**
36
+ * Indicates that the local audio track is not receiving any audio input from
37
+ * the microphone that is currently selected.
38
+ */
39
+export const NO_AUDIO_INPUT: "track.no_audio_input";

+ 12
- 0
types/auto/JitsiTranscriptionStatus.d.ts View File

@@ -0,0 +1,12 @@
1
+/**
2
+ * The transciption is on.
3
+ *
4
+ * @type {String}
5
+ */
6
+export const ON: string;
7
+/**
8
+ * The transciption is off.
9
+ *
10
+ * @type {String}
11
+ */
12
+export const OFF: string;

+ 81
- 0
types/auto/authenticateAndUpgradeRole.d.ts View File

@@ -0,0 +1,81 @@
1
+/**
2
+ * @typedef {Object} UpgradeRoleError
3
+ *
4
+ * @property {JitsiConnectionErrors} [connectionError] - One of
5
+ * {@link JitsiConnectionErrors} which occurred when trying to connect to the
6
+ * XMPP server.
7
+ * @property {String} [authenticationError] - One of XMPP error conditions
8
+ * returned by Jicofo on authentication attempt. See
9
+ * {@link https://xmpp.org/rfcs/rfc3920.html#streams-error}.
10
+ * @property {String} [message] - More details about the error.
11
+ * @property {Object} [credentials] - The credentials that failed the
12
+ * authentication.
13
+ * @property {String} [credentials.jid] - The XMPP ID part of the credentials
14
+ * that failed the authentication.
15
+ * @property {string} [credentials.password] - The password part of the
16
+ * credentials that failed the authentication.
17
+ *
18
+ * NOTE If neither one of the errors is present, then the operation has been
19
+ * canceled.
20
+ */
21
+/**
22
+ * Connects to the XMPP server using the specified credentials and contacts
23
+ * Jicofo in order to obtain a session ID (which is then stored in the local
24
+ * storage). The user's role of the parent conference will be upgraded to
25
+ * moderator (by Jicofo). It's also used to join the conference when starting
26
+ * from anonymous domain and only authenticated users are allowed to create new
27
+ * rooms.
28
+ *
29
+ * @param {Object} options
30
+ * @param {string} options.id - XMPP user's ID to log in. For example,
31
+ * user@xmpp-server.com.
32
+ * @param {string} options.password - XMPP user's password to log in with.
33
+ * @param {string} [options.roomPassword] - The password to join the MUC with.
34
+ * @param {Function} [options.onLoginSuccessful] - Callback called when logging
35
+ * into the XMPP server was successful. The next step will be to obtain a new
36
+ * session ID from Jicofo and join the MUC using it which will effectively
37
+ * upgrade the user's role to moderator.
38
+ * @returns {Object} A <tt>thenable</tt> which (1) settles when the process of
39
+ * authenticating and upgrading the role of the specified XMPP user finishes and
40
+ * (2) has a <tt>cancel</tt> method that allows the caller to interrupt the
41
+ * process. If the process finishes successfully, the session ID has been stored
42
+ * in the settings and the <tt>thenable</tt> is resolved. If the process
43
+ * finishes with failure, the <tt>thenable</tt> is rejected with reason of type
44
+ * {@link UpgradeRoleError} which will have either <tt>connectionError</tt> or
45
+ * <tt>authenticationError</tt> property set depending on which of the steps has
46
+ * failed. If <tt>cancel</tt> is called before the process finishes, then the
47
+ * thenable will be rejected with an empty object (i.e. no error property will
48
+ * be set on the rejection reason).
49
+ */
50
+export default function authenticateAndUpgradeRole({ id, password, onCreateResource, onLoginSuccessful, roomPassword }: {
51
+    id: string;
52
+    password: string;
53
+    roomPassword?: string;
54
+    onLoginSuccessful?: Function;
55
+}): any;
56
+export type UpgradeRoleError = {
57
+    /**
58
+     * - One of
59
+     * {@link JitsiConnectionErrors } which occurred when trying to connect to the
60
+     * XMPP server.
61
+     */
62
+    connectionError?: any;
63
+    /**
64
+     * - One of XMPP error conditions
65
+     * returned by Jicofo on authentication attempt. See
66
+     * {@link https ://xmpp.org/rfcs/rfc3920.html#streams-error}.
67
+     */
68
+    authenticationError?: string;
69
+    /**
70
+     * - More details about the error.
71
+     */
72
+    message?: string;
73
+    /**
74
+     * - The credentials that failed the
75
+     * authentication.
76
+     */
77
+    credentials?: {
78
+        jid?: string;
79
+        password?: string;
80
+    };
81
+};

+ 23
- 0
types/auto/connection_optimization/external_connect.d.ts View File

@@ -0,0 +1,23 @@
1
+/**
2
+ * Requests the given webservice that will create the connection and will return
3
+ * the necessary details(rid, sid and jid) to attach to this connection and
4
+ * start using it. This script can be used for optimizing the connection startup
5
+ * time. The function will send AJAX request to a webservice that should
6
+ * create the bosh session much faster than the client because the webservice
7
+ * can be started on the same machine as the XMPP serever.
8
+ *
9
+ * NOTE: It's vert important to execute this function as early as you can for
10
+ * optimal results.
11
+ *
12
+ * @param webserviceUrl the url for the web service that is going to create the
13
+ * connection.
14
+ * @param successCallback callback function called with the result of the AJAX
15
+ * request if the request was successfull. The callback will receive one
16
+ * parameter which will be JS Object with properties - rid, sid and jid. This
17
+ * result should be passed to JitsiConnection.attach method in order to use that
18
+ * connection.
19
+ * @param error_callback callback function called the AJAX request fail. This
20
+ * callback is going to receive one parameter which is going to be JS error
21
+ * object with a reason for failure in it.
22
+ */
23
+declare function createConnectionExternally(webserviceUrl: any, successCallback: any, error_callback: any): void;

+ 2
- 0
types/auto/index.d.ts View File

@@ -0,0 +1,2 @@
1
+declare const _exports: any;
2
+export = _exports;

+ 133
- 0
types/auto/modules/RTC/BridgeChannel.d.ts View File

@@ -0,0 +1,133 @@
1
+/**
2
+ * Handles a WebRTC RTCPeerConnection or a WebSocket instance to communicate
3
+ * with the videobridge.
4
+ */
5
+export default class BridgeChannel {
6
+    /**
7
+     * Binds "ondatachannel" event listener on the given RTCPeerConnection
8
+     * instance, or creates a WebSocket connection with the videobridge.
9
+     * At least one of both, peerconnection or wsUrl parameters, must be
10
+     * given.
11
+     * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
12
+     * instance.
13
+     * @param {string} [wsUrl] WebSocket URL.
14
+     * @param {EventEmitter} emitter the EventEmitter instance to use for event emission.
15
+     */
16
+    constructor(peerconnection?: RTCPeerConnection, wsUrl?: string, emitter: any);
17
+    _channel: any;
18
+    _eventEmitter: any;
19
+    _mode: string;
20
+    _areRetriesEnabled: boolean;
21
+    _closedFromClient: boolean;
22
+    _wsUrl: string;
23
+    /**
24
+     * Initializes the web socket channel.
25
+     *
26
+     * @returns {void}
27
+     */
28
+    _initWebSocket(): void;
29
+    /**
30
+     * Starts the websocket connection retries.
31
+     *
32
+     * @returns {void}
33
+     */
34
+    _startConnectionRetries(): void;
35
+    _retryTimeout: NodeJS.Timeout;
36
+    /**
37
+     * Stops the websocket connection retries.
38
+     *
39
+     * @returns {void}
40
+     */
41
+    _stopConnectionRetries(): void;
42
+    /**
43
+     * Retries to establish the websocket connection after the connection was closed by the server.
44
+     *
45
+     * @param {CloseEvent} closeEvent - The close event that triggered the retries.
46
+     * @returns {void}
47
+     */
48
+    _retryWebSocketConnection(closeEvent: CloseEvent): void;
49
+    /**
50
+     * The channel mode.
51
+     * @return {string} "datachannel" or "websocket" (or null if not yet set).
52
+     */
53
+    get mode(): string;
54
+    /**
55
+     * Closes the currently opened channel.
56
+     */
57
+    close(): void;
58
+    /**
59
+     * Whether there is an underlying RTCDataChannel or WebSocket and it's
60
+     * open.
61
+     * @return {boolean}
62
+     */
63
+    isOpen(): boolean;
64
+    /**
65
+     * Sends local stats via the bridge channel.
66
+     * @param {Object} payload The payload of the message.
67
+     * @throws NetworkError/InvalidStateError/Error if the operation fails or if there is no data channel created.
68
+     */
69
+    sendEndpointStatsMessage(payload: any): void;
70
+    /**
71
+     * Sends message via the channel.
72
+     * @param {string} to The id of the endpoint that should receive the
73
+     * message. If "" the message will be sent to all participants.
74
+     * @param  {object} payload The payload of the message.
75
+     * @throws NetworkError or InvalidStateError from RTCDataChannel#send (@see
76
+     * {@link https://developer.mozilla.org/docs/Web/API/RTCDataChannel/send})
77
+     * or from WebSocket#send or Error with "No opened channel" message.
78
+     */
79
+    sendMessage(to: string, payload: object): void;
80
+    /**
81
+     * Sends a "lastN value changed" message via the channel.
82
+     * @param {number} value The new value for lastN. -1 means unlimited.
83
+     */
84
+    sendSetLastNMessage(value: number): void;
85
+    /**
86
+     * Sends a "selected endpoints changed" message via the channel.
87
+     *
88
+     * @param {Array<string>} endpointIds - The ids of the selected endpoints.
89
+     * @throws NetworkError or InvalidStateError from RTCDataChannel#send (@see
90
+     * {@link https://developer.mozilla.org/docs/Web/API/RTCDataChannel/send})
91
+     * or from WebSocket#send or Error with "No opened channel" message.
92
+     */
93
+    sendSelectedEndpointsMessage(endpointIds: Array<string>): void;
94
+    /**
95
+     * Sends a "receiver video constraint" message via the channel.
96
+     * @param {Number} maxFrameHeightPixels the maximum frame height,
97
+     * in pixels, this receiver is willing to receive
98
+     */
99
+    sendReceiverVideoConstraintMessage(maxFrameHeightPixels: number): void;
100
+    /**
101
+     * Sends a 'ReceiverVideoConstraints' message via the bridge channel.
102
+     *
103
+     * @param {ReceiverVideoConstraints} constraints video constraints.
104
+     */
105
+    sendNewReceiverVideoConstraintsMessage(constraints: any): void;
106
+    /**
107
+     * Sends a 'VideoTypeMessage' message via the bridge channel.
108
+     *
109
+     * @param {string} videoType 'camera', 'desktop' or 'none'.
110
+     * @deprecated to be replaced with sendSourceVideoTypeMessage
111
+     */
112
+    sendVideoTypeMessage(videoType: string): void;
113
+    /**
114
+     * Sends a 'VideoTypeMessage' message via the bridge channel.
115
+     *
116
+     * @param {BridgeVideoType} videoType - the video type.
117
+     * @param {SourceName} sourceName - the source name of the video track.
118
+     * @returns {void}
119
+     */
120
+    sendSourceVideoTypeMessage(sourceName: any, videoType: any): void;
121
+    /**
122
+     * Set events on the given RTCDataChannel or WebSocket instance.
123
+     */
124
+    _handleChannel(channel: any): void;
125
+    /**
126
+     * Sends passed object via the channel.
127
+     * @param {object} jsonObject The object that will be sent.
128
+     * @throws NetworkError or InvalidStateError from RTCDataChannel#send (@see
129
+     * {@link https://developer.mozilla.org/docs/Web/API/RTCDataChannel/send})
130
+     * or from WebSocket#send or Error with "No opened channel" message.
131
+     */
132
+    _send(jsonObject: object): void;
133
+}

+ 106
- 0
types/auto/modules/RTC/CodecSelection.d.ts View File

@@ -0,0 +1,106 @@
1
+/**
2
+ * This class handles the codec selection mechanism for the conference based on the config.js settings.
3
+ * The preferred codec is selected based on the settings and the list of codecs supported by the browser.
4
+ * The preferred codec is published in presence which is then used by the other endpoints in the
5
+ * conference to pick a supported codec at join time and when the call transitions between p2p and jvb
6
+ * connections.
7
+ */
8
+export class CodecSelection {
9
+    /**
10
+     * Creates a new instance for a given conference.
11
+     *
12
+     * @param {JitsiConference} conference the conference instance
13
+     * @param {*} options
14
+     * @param {string} options.disabledCodec the codec that needs to be disabled.
15
+     * @param {boolean} options.enforcePreferredCodec whether codec preference has to be
16
+     * enforced even when an endpoints that doesn't support the preferred codec joins the call.
17
+     * Falling back to the standard codec will be skipped when this option is true, endpoints
18
+     * that do not support the preferred codec may not be able to encode/decode video when this happens.
19
+     * @param {string} options.jvbCodec the codec that is preferred on jvb connection.
20
+     * @param {string} options.p2pCodec the codec that is preferred on p2p connection.
21
+     */
22
+    constructor(conference: any, options: any);
23
+    conference: any;
24
+    options: any;
25
+    disabledCodec: {
26
+        H264: string;
27
+        OPUS: string;
28
+        ULPFEC: string;
29
+        VP8: string;
30
+        VP9: string;
31
+    };
32
+    jvbPreferredCodec: string | {
33
+        H264: string;
34
+        OPUS: string;
35
+        ULPFEC: string;
36
+        VP8: string;
37
+        VP9: string;
38
+    };
39
+    p2pPreferredCodec: string | {
40
+        H264: string;
41
+        OPUS: string;
42
+        ULPFEC: string;
43
+        VP8: string;
44
+        VP9: string;
45
+    };
46
+    /**
47
+     * Checks if a given string is a valid video codec mime type.
48
+     *
49
+     * @param {string} codec the codec string that needs to be validated.
50
+     * @returns {CodecMimeType|null} mime type if valid, null otherwise.
51
+     * @private
52
+     */
53
+    private _getCodecMimeType;
54
+    /**
55
+     * Checks if the given codec is supported by the browser.
56
+     *
57
+     * @param {CodecMimeType} preferredCodec codec to be checked.
58
+     * @returns {boolean} true if the given codec is supported, false otherwise.
59
+     * @private
60
+     */
61
+    private _isCodecSupported;
62
+    /**
63
+     * Handles the {@link JitsiConferenceEvents._MEDIA_SESSION_STARTED} event. Codecs need to be
64
+     * configured on the media session that is newly created.
65
+     *
66
+     * @param {JingleSessionPC} mediaSession media session that started.
67
+     * @returns {void}
68
+     * @private
69
+     */
70
+    private _onMediaSessionStarted;
71
+    /**
72
+     * Sets the codec on the media session based on the preferred codec setting and the supported codecs
73
+     * published by the remote participants in their presence.
74
+     *
75
+     * @param {JingleSessionPC} mediaSession session for which the codec selection has to be made.
76
+     * @param {CodecMimeType} preferredCodec preferred codec.
77
+     * @param {CodecMimeType} disabledCodec codec that needs to be disabled.
78
+     */
79
+    _selectPreferredCodec(mediaSession?: any, preferredCodec?: {
80
+        H264: string;
81
+        OPUS: string;
82
+        ULPFEC: string;
83
+        VP8: string;
84
+        VP9: string;
85
+    }, disabledCodec?: {
86
+        H264: string;
87
+        OPUS: string;
88
+        ULPFEC: string;
89
+        VP8: string;
90
+        VP9: string;
91
+    }): void;
92
+    /**
93
+     * Returns the preferred codec for the conference. The preferred codec for the JVB media session
94
+     * is the one that gets published in presence and a comparision is made whenever a participant joins
95
+     * or leaves the call.
96
+     *
97
+     * @returns {CodecMimeType} preferred codec.
98
+     */
99
+    getPreferredCodec(): {
100
+        H264: string;
101
+        OPUS: string;
102
+        ULPFEC: string;
103
+        VP8: string;
104
+        VP9: string;
105
+    };
106
+}

+ 295
- 0
types/auto/modules/RTC/JitsiLocalTrack.d.ts View File

@@ -0,0 +1,295 @@
1
+/**
2
+ * Represents a single media track(either audio or video).
3
+ * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
4
+ */
5
+export default class JitsiLocalTrack extends JitsiTrack {
6
+    /**
7
+     * Constructs a new JitsiLocalTrack instance.
8
+     *
9
+     * @constructor
10
+     * @param {Object} trackInfo
11
+     * @param {number} trackInfo.rtcId - The ID assigned by the RTC module.
12
+     * @param {Object} trackInfo.stream - The WebRTC MediaStream, parent of the track.
13
+     * @param {Object} trackInfo.track - The underlying WebRTC MediaStreamTrack for new JitsiLocalTrack.
14
+     * @param {string} trackInfo.mediaType - The MediaType of the JitsiLocalTrack.
15
+     * @param {string} trackInfo.videoType - The VideoType of the JitsiLocalTrack.
16
+     * @param {Array<Object>} trackInfo.effects - The effects to be applied to the JitsiLocalTrack.
17
+     * @param {number} trackInfo.resolution - The the video resolution if it's a video track
18
+     * @param {string} trackInfo.deviceId - The ID of the local device for this track.
19
+     * @param {string} trackInfo.facingMode - Thehe camera facing mode used in getUserMedia call (for mobile only).
20
+     * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing source. NOTE: defined for desktop sharing
21
+     * tracks only.
22
+     */
23
+    constructor({ deviceId, facingMode, mediaType, resolution, rtcId, sourceId, sourceType, stream, track, videoType, effects }: {
24
+        rtcId: number;
25
+        stream: any;
26
+        track: any;
27
+        mediaType: string;
28
+        videoType: string;
29
+        effects: Array<any>;
30
+        resolution: number;
31
+        deviceId: string;
32
+        facingMode: string;
33
+        sourceId: any;
34
+    });
35
+    _setEffectInProgress: boolean;
36
+    /**
37
+     * Track metadata.
38
+     */
39
+    metadata: {
40
+        displaySurface?: any;
41
+        timestamp: number;
42
+    };
43
+    /**
44
+     * The ID assigned by the RTC module on instance creation.
45
+     *
46
+     * @type {number}
47
+     */
48
+    rtcId: number;
49
+    sourceId: any;
50
+    sourceType: any;
51
+    resolution: any;
52
+    maxEnabledResolution: number;
53
+    _constraints: any;
54
+    deviceId: string;
55
+    /**
56
+     * The <tt>Promise</tt> which represents the progress of a previously
57
+     * queued/scheduled {@link _setMuted} (from the point of view of
58
+     * {@link _queueSetMuted}).
59
+     *
60
+     * @private
61
+     * @type {Promise}
62
+     */
63
+    private _prevSetMuted;
64
+    /**
65
+     * The facing mode of the camera from which this JitsiLocalTrack
66
+     * instance was obtained.
67
+     *
68
+     * @private
69
+     * @type {CameraFacingMode|undefined}
70
+     */
71
+    private _facingMode;
72
+    _trackEnded: boolean;
73
+    /**
74
+     * Indicates whether data has been sent or not.
75
+     */
76
+    _hasSentData: boolean;
77
+    /**
78
+     * Used only for detection of audio problems. We want to check only once
79
+     * whether the track is sending data ot not. This flag is set to false
80
+     * after the check.
81
+     */
82
+    _testDataSent: boolean;
83
+    _realDeviceId: string;
84
+    _trackMutedTS: number;
85
+    _onDeviceListWillChange: (devices: any) => void;
86
+    _onAudioOutputDeviceChanged: any;
87
+    /**
88
+     * Adds stream to conference and marks it as "unmute" operation.
89
+     *
90
+     * @private
91
+     * @returns {Promise}
92
+     */
93
+    private _addStreamToConferenceAsUnmute;
94
+    /**
95
+     * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
96
+     *
97
+     * @private
98
+     * @returns {void}
99
+     */
100
+    private _fireNoDataFromSourceEvent;
101
+    /**
102
+     * Sets handlers to the MediaStreamTrack object that will detect camera issues.
103
+     *
104
+     * @private
105
+     * @returns {void}
106
+     */
107
+    private _initNoDataFromSourceHandlers;
108
+    /**
109
+     * Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
110
+     *
111
+     * @private
112
+     * @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
113
+     */
114
+    private _isNoDataFromSourceEventsEnabled;
115
+    /**
116
+     * Initializes a new Promise to execute {@link #_setMuted}. May be called multiple times in a row and the
117
+     * invocations of {@link #_setMuted} and, consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
118
+     * serialized fashion.
119
+     *
120
+     * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
121
+     * @private
122
+     * @returns {Promise}
123
+     */
124
+    private _queueSetMuted;
125
+    /**
126
+     * Removes stream from conference and marks it as "mute" operation.
127
+     *
128
+     * @param {Function} successCallback - Callback that will be called when the operation is successful.
129
+     * @param {Function} errorCallback - Callback that will be called when the operation fails.
130
+     * @private
131
+     * @returns {Promise}
132
+     */
133
+    private _removeStreamFromConferenceAsMute;
134
+    /**
135
+     * Sends mute status for a track to conference if any.
136
+     *
137
+     * @param {boolean} mute - If track is muted.
138
+     * @private
139
+     * @returns {void}
140
+     */
141
+    private _sendMuteStatus;
142
+    /**
143
+     * Mutes / unmutes this track.
144
+     *
145
+     * @param {boolean} muted - If <tt>true</tt>, this track will be muted; otherwise, this track will be unmuted.
146
+     * @private
147
+     * @returns {Promise}
148
+     */
149
+    private _setMuted;
150
+    /**
151
+     * Sets real device ID by comparing track information with device information. This is temporary solution until
152
+     * getConstraints() method will be implemented in browsers.
153
+     *
154
+     * @param {MediaDeviceInfo[]} devices - The list of devices obtained from enumerateDevices() call.
155
+     * @private
156
+     * @returns {void}
157
+     */
158
+    private _setRealDeviceIdFromDeviceList;
159
+    storedMSID: string;
160
+    /**
161
+     * Starts the effect process and returns the modified stream.
162
+     *
163
+     * @param {Object} effect - Represents effect instance
164
+     * @private
165
+     * @returns {void}
166
+     */
167
+    private _startStreamEffect;
168
+    _streamEffect: any;
169
+    _originalStream: any;
170
+    /**
171
+     * Stops the effect process and returns the original stream.
172
+     *
173
+     * @private
174
+     * @returns {void}
175
+     */
176
+    private _stopStreamEffect;
177
+    /**
178
+     * Switches the camera facing mode if the WebRTC implementation supports the custom MediaStreamTrack._switchCamera
179
+     * method. Currently, the method in question is implemented in react-native-webrtc only. When such a WebRTC
180
+     * implementation is executing, the method is the preferred way to switch between the front/user-facing and the
181
+     * back/environment-facing cameras because it will likely be (as is the case of react-native-webrtc) noticeably
182
+     * faster that creating a new MediaStreamTrack via a new getUserMedia call with the switched facingMode constraint
183
+     * value. Moreover, the approach with a new getUserMedia call may not even work: WebRTC on Android and iOS is
184
+     * either very slow to open the camera a second time or plainly freezes attempting to do that.
185
+     *
186
+     * @returns {void}
187
+     */
188
+    _switchCamera(): void;
189
+    /**
190
+     * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
191
+     *
192
+     * @param {Object|undefined} effect - The new effect to be set.
193
+     * @private
194
+     * @returns {void}
195
+     */
196
+    private _switchStreamEffect;
197
+    /**
198
+     * Returns facing mode for video track from camera. For other cases (e.g. audio track or 'desktop' video track)
199
+     * returns undefined.
200
+     *
201
+     * @returns {CameraFacingMode|undefined}
202
+     */
203
+    getCameraFacingMode(): CameraFacingMode | undefined;
204
+    /**
205
+     * Returns device id associated with track.
206
+     *
207
+     * @returns {string}
208
+     */
209
+    getDeviceId(): string;
210
+    /**
211
+     * Get the duration of the track.
212
+     *
213
+     * @returns {Number} the duration of the track in seconds
214
+     */
215
+    getDuration(): number;
216
+    /**
217
+     * Returns the participant id which owns the track.
218
+     *
219
+     * @returns {string} the id of the participants. It corresponds to the
220
+     * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
221
+     */
222
+    getParticipantId(): string;
223
+    /**
224
+     * Returns if associated MediaStreamTrack is in the 'ended' state
225
+     *
226
+     * @returns {boolean}
227
+     */
228
+    isEnded(): boolean;
229
+    /**
230
+     * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
231
+     *
232
+     * @returns {boolean} <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
233
+     */
234
+    isMuted(): boolean;
235
+    /**
236
+     * Checks whether the attached MediaStream is receiving data from source or not. If the stream property is null
237
+     * (because of mute or another reason) this method will return false.
238
+     * NOTE: This method doesn't indicate problem with the streams directly. For example in case of video mute the
239
+     * method will return false or if the user has disposed the track.
240
+     *
241
+     * @returns {boolean} true if the stream is receiving data and false this otherwise.
242
+     */
243
+    isReceivingData(): boolean;
244
+    /**
245
+     * Asynchronously mutes this track.
246
+     *
247
+     * @returns {Promise}
248
+     */
249
+    mute(): Promise<any>;
250
+    /**
251
+     * Handles bytes sent statistics. NOTE: used only for audio tracks to detect audio issues.
252
+     *
253
+     * @param {TraceablePeerConnection} tpc - The peerconnection that is reporting the bytes sent stat.
254
+     * @param {number} bytesSent - The new value.
255
+     * @returns {void}
256
+     */
257
+    onByteSentStatsReceived(tpc: any, bytesSent: number): void;
258
+    /**
259
+     * Sets the JitsiConference object associated with the track. This is temp solution.
260
+     *
261
+     * @param conference - JitsiConference object.
262
+     * @returns {void}
263
+     */
264
+    setConference(conference: any): void;
265
+    /**
266
+     * Sets the effect and switches between the modified stream and original one.
267
+     *
268
+     * @param {Object} effect - Represents the effect instance to be used.
269
+     * @returns {Promise}
270
+     */
271
+    setEffect(effect: any): Promise<any>;
272
+    /**
273
+     * Stops the associated MediaStream.
274
+     *
275
+     * @returns {void}
276
+     */
277
+    stopStream(): void;
278
+    /**
279
+     * Indicates that we are executing {@link #stopStream} i.e.
280
+     * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
281
+     * associated with this <tt>JitsiTrack</tt> instance.
282
+     *
283
+     * @private
284
+     * @type {boolean}
285
+     */
286
+    private _stopStreamInProgress;
287
+    /**
288
+     * Asynchronously unmutes this track.
289
+     *
290
+     * @returns {Promise}
291
+     */
292
+    unmute(): Promise<any>;
293
+}
294
+import JitsiTrack from "./JitsiTrack";
295
+import CameraFacingMode from "../../service/RTC/CameraFacingMode";

+ 109
- 0
types/auto/modules/RTC/JitsiRemoteTrack.d.ts View File

@@ -0,0 +1,109 @@
1
+/**
2
+ * Represents a single media track (either audio or video).
3
+ */
4
+export default class JitsiRemoteTrack extends JitsiTrack {
5
+    /**
6
+     * Creates new JitsiRemoteTrack instance.
7
+     * @param {RTC} rtc the RTC service instance.
8
+     * @param {JitsiConference} conference the conference to which this track
9
+     *        belongs to
10
+     * @param {string} ownerEndpointId the endpoint ID of the track owner
11
+     * @param {MediaStream} stream WebRTC MediaStream, parent of the track
12
+     * @param {MediaStreamTrack} track underlying WebRTC MediaStreamTrack for
13
+     *        the new JitsiRemoteTrack
14
+     * @param {MediaType} mediaType the type of the media
15
+     * @param {VideoType} videoType the type of the video if applicable
16
+     * @param {number} ssrc the SSRC number of the Media Stream
17
+     * @param {boolean} muted the initial muted state
18
+     * @param {boolean} isP2P indicates whether or not this track belongs to a
19
+     * P2P session
20
+     * @param {String} sourceName the source name signaled for the track
21
+     * @throws {TypeError} if <tt>ssrc</tt> is not a number.
22
+     * @constructor
23
+     */
24
+    constructor(rtc: any, conference: any, ownerEndpointId: string, stream: MediaStream, track: MediaStreamTrack, mediaType: any, videoType: any, ssrc: number, muted: boolean, isP2P: boolean, sourceName: string);
25
+    rtc: any;
26
+    ssrc: number;
27
+    ownerEndpointId: string;
28
+    muted: boolean;
29
+    isP2P: boolean;
30
+    _sourceName: string;
31
+    hasBeenMuted: boolean;
32
+    _containerHandlers: {};
33
+    /**
34
+     * Attaches the track handlers.
35
+     *
36
+     * @returns {void}
37
+     */
38
+    _bindTrackHandlers(): void;
39
+    /**
40
+     * Callback invoked when the track is muted. Emits an event notifying
41
+     * listeners of the mute event.
42
+     *
43
+     * @private
44
+     * @returns {void}
45
+     */
46
+    private _onTrackMute;
47
+    /**
48
+     * Callback invoked when the track is unmuted. Emits an event notifying
49
+     * listeners of the mute event.
50
+     *
51
+     * @private
52
+     * @returns {void}
53
+     */
54
+    private _onTrackUnmute;
55
+    /**
56
+     * Sets current muted status and fires an events for the change.
57
+     * @param value the muted status.
58
+     */
59
+    setMute(value: any): void;
60
+    /**
61
+     * Returns the current muted status of the track.
62
+     * @returns {boolean|*|JitsiRemoteTrack.muted} <tt>true</tt> if the track is
63
+     * muted and <tt>false</tt> otherwise.
64
+     */
65
+    isMuted(): boolean | any | any;
66
+    /**
67
+     * Returns the participant id which owns the track.
68
+     *
69
+     * @returns {string} the id of the participants. It corresponds to the
70
+     * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
71
+     */
72
+    getParticipantId(): string;
73
+    /**
74
+     * Returns the synchronization source identifier (SSRC) of this remote
75
+     * track.
76
+     *
77
+     * @returns {number} the SSRC of this remote track.
78
+     */
79
+    getSSRC(): number;
80
+    /**
81
+     * Returns the tracks source name
82
+     *
83
+     * @returns {string} the track's source name
84
+     */
85
+    getSourceName(): string;
86
+    /**
87
+     * Changes the video type of the track.
88
+     *
89
+     * @param {string} type - The new video type("camera", "desktop").
90
+     */
91
+    _setVideoType(type: string): void;
92
+    /**
93
+     * Handles track play events.
94
+     */
95
+    _playCallback(): void;
96
+    /**
97
+     * An event handler for events triggered by the attached container.
98
+     *
99
+     * @param {string} type - The type of the event.
100
+     */
101
+    _containerEventHandler(type: string): void;
102
+    /**
103
+     * Returns a string with a description of the current status of the track.
104
+     *
105
+     * @returns {string}
106
+     */
107
+    _getStatus(): string;
108
+}
109
+import JitsiTrack from "./JitsiTrack";

+ 240
- 0
types/auto/modules/RTC/JitsiTrack.d.ts View File

@@ -0,0 +1,240 @@
1
+/// <reference types="node" />
2
+/**
3
+ * Represents a single media track (either audio or video).
4
+ */
5
+export default class JitsiTrack extends EventEmitter {
6
+    /**
7
+     * Represents a single media track (either audio or video).
8
+     * @constructor
9
+     * @param conference the rtc instance
10
+     * @param stream the WebRTC MediaStream instance
11
+     * @param track the WebRTC MediaStreamTrack instance, must be part of
12
+     * the given <tt>stream</tt>.
13
+     * @param streamInactiveHandler the function that will handle
14
+     *        onended/oninactive events of the stream.
15
+     * @param trackMediaType the media type of the JitsiTrack
16
+     * @param videoType the VideoType for this track if any
17
+     */
18
+    constructor(conference: any, stream: any, track: any, streamInactiveHandler: any, trackMediaType: any, videoType: any);
19
+    addEventListener: (eventName: string | symbol, listener: (...args: any[]) => void) => JitsiTrack;
20
+    removeEventListener: (eventName: string | symbol, listener: (...args: any[]) => void) => JitsiTrack;
21
+    /**
22
+     * Array with the HTML elements that are displaying the streams.
23
+     * @type {Array}
24
+     */
25
+    containers: any[];
26
+    conference: any;
27
+    audioLevel: number;
28
+    type: any;
29
+    track: any;
30
+    videoType: any;
31
+    handlers: Map<any, any>;
32
+    /**
33
+     * Indicates whether this JitsiTrack has been disposed. If true, this
34
+     * JitsiTrack is to be considered unusable and operations involving it
35
+     * are to fail (e.g. {@link JitsiConference#addTrack(JitsiTrack)},
36
+     * {@link JitsiConference#removeTrack(JitsiTrack)}).
37
+     * @type {boolean}
38
+     */
39
+    disposed: boolean;
40
+    /**
41
+     * The inactive handler which will be triggered when the underlying
42
+     * <tt>MediaStream</tt> ends.
43
+     *
44
+     * @private
45
+     * @type {Function}
46
+     */
47
+    private _streamInactiveHandler;
48
+    /**
49
+     * Adds onended/oninactive handler to a MediaStream or a MediaStreamTrack.
50
+     * Firefox doesn't fire a inactive event on the MediaStream, instead it fires
51
+     * a onended event on the MediaStreamTrack.
52
+     * @param {Function} handler the handler
53
+     */
54
+    _addMediaStreamInactiveHandler(handler: Function): void;
55
+    /**
56
+     * Sets handler to the WebRTC MediaStream or MediaStreamTrack object
57
+     * depending on the passed type.
58
+     * @param {string} type the type of the handler that is going to be set
59
+     * @param {Function} handler the handler.
60
+     */
61
+    _setHandler(type: string, handler: Function): void;
62
+    /**
63
+     * Unregisters all event handlers bound to the underlying media stream/track
64
+     * @private
65
+     */
66
+    private _unregisterHandlers;
67
+    /**
68
+     * Sets the stream property of JitsiTrack object and sets all stored
69
+     * handlers to it.
70
+     *
71
+     * @param {MediaStream} stream the new stream.
72
+     * @protected
73
+     */
74
+    protected _setStream(stream: MediaStream): void;
75
+    stream: any;
76
+    /**
77
+     * Returns the video type (camera or desktop) of this track.
78
+     */
79
+    getVideoType(): any;
80
+    /**
81
+     * Returns the type (audio or video) of this track.
82
+     */
83
+    getType(): any;
84
+    /**
85
+     * Check if this is an audio track.
86
+     */
87
+    isAudioTrack(): boolean;
88
+    /**
89
+     * Checks whether the underlying WebRTC <tt>MediaStreamTrack</tt> is muted
90
+     * according to it's 'muted' field status.
91
+     * @return {boolean} <tt>true</tt> if the underlying
92
+     * <tt>MediaStreamTrack</tt> is muted or <tt>false</tt> otherwise.
93
+     */
94
+    isWebRTCTrackMuted(): boolean;
95
+    /**
96
+     * Check if this is a video track.
97
+     */
98
+    isVideoTrack(): boolean;
99
+    /**
100
+     * Checks whether this is a local track.
101
+     * @abstract
102
+     * @return {boolean} 'true' if it's a local track or 'false' otherwise.
103
+     */
104
+    isLocal(): boolean;
105
+    /**
106
+     * Check whether this is a local audio track.
107
+     *
108
+     * @return {boolean} -  true if track represents a local audio track, false otherwise.
109
+     */
110
+    isLocalAudioTrack(): boolean;
111
+    /**
112
+     * Returns the WebRTC MediaStream instance.
113
+     */
114
+    getOriginalStream(): any;
115
+    /**
116
+     * Returns the ID of the underlying WebRTC Media Stream(if any)
117
+     * @returns {String|null}
118
+     */
119
+    getStreamId(): string | null;
120
+    /**
121
+     * Return the underlying WebRTC MediaStreamTrack
122
+     * @returns {MediaStreamTrack}
123
+     */
124
+    getTrack(): MediaStreamTrack;
125
+    /**
126
+     * Return the underlying WebRTC MediaStreamTrack label
127
+     * @returns {string}
128
+     */
129
+    getTrackLabel(): string;
130
+    /**
131
+     * Returns the ID of the underlying WebRTC MediaStreamTrack(if any)
132
+     * @returns {String|null}
133
+     */
134
+    getTrackId(): string | null;
135
+    /**
136
+     * Return meaningful usage label for this track depending on it's media and
137
+     * eventual video type.
138
+     * @returns {string}
139
+     */
140
+    getUsageLabel(): string;
141
+    /**
142
+     * Eventually will trigger RTCEvents.TRACK_ATTACHED event.
143
+     * @param container the video/audio container to which this stream is
144
+     *        attached and for which event will be fired.
145
+     * @private
146
+     */
147
+    private _maybeFireTrackAttached;
148
+    /**
149
+     * Attaches the MediaStream of this track to an HTML container.
150
+     * Adds the container to the list of containers that are displaying the
151
+     * track.
152
+     *
153
+     * @param container the HTML container which can be 'video' or 'audio'
154
+     * element.
155
+     *
156
+     * @returns {void}
157
+     */
158
+    attach(container: any): void;
159
+    /**
160
+     * Removes this JitsiTrack from the passed HTML container.
161
+     *
162
+     * @param container the HTML container to detach from this JitsiTrack. If
163
+     * <tt>null</tt> or <tt>undefined</tt>, all containers are removed. A
164
+     * container can be a 'video', 'audio' or 'object' HTML element instance to
165
+     * which this JitsiTrack is currently attached.
166
+     */
167
+    detach(container: any): void;
168
+    /**
169
+     * Called when the track has been attached to a new container.
170
+     *
171
+     * @param {HTMLElement} container the HTML container which can be 'video' or
172
+     * 'audio' element.
173
+     * @private
174
+     */
175
+    private _onTrackAttach;
176
+    /**
177
+     * Called when the track has been detached from a container.
178
+     *
179
+     * @param {HTMLElement} container the HTML container which can be 'video' or
180
+     * 'audio' element.
181
+     * @private
182
+     */
183
+    private _onTrackDetach;
184
+    /**
185
+     * Attach time to first media tracker only if there is conference and only
186
+     * for the first element.
187
+     *
188
+     * @param {HTMLElement} container the HTML container which can be 'video' or
189
+     * 'audio' element.
190
+     * @private
191
+     */
192
+    private _attachTTFMTracker;
193
+    /**
194
+     * Removes attached event listeners.
195
+     *
196
+     * @returns {Promise}
197
+     */
198
+    dispose(): Promise<any>;
199
+    /**
200
+     * Returns true if this is a video track and the source of the video is a
201
+     * screen capture as opposed to a camera.
202
+     */
203
+    isScreenSharing(): void;
204
+    /**
205
+     * Returns id of the track.
206
+     * @returns {string|null} id of the track or null if this is fake track.
207
+     */
208
+    getId(): string | null;
209
+    /**
210
+     * Checks whether the MediaStream is active/not ended.
211
+     * When there is no check for active we don't have information and so
212
+     * will return that stream is active (in case of FF).
213
+     * @returns {boolean} whether MediaStream is active.
214
+     */
215
+    isActive(): boolean;
216
+    /**
217
+     * Sets the audio level for the stream
218
+     * @param {number} audioLevel value between 0 and 1
219
+     * @param {TraceablePeerConnection} [tpc] the peerconnection instance which
220
+     * is source for the audio level. It can be <tt>undefined</tt> for
221
+     * a local track if the audio level was measured outside of the
222
+     * peerconnection (see /modules/statistics/LocalStatsCollector.js).
223
+     */
224
+    setAudioLevel(audioLevel: number, tpc?: any): void;
225
+    /**
226
+     * Returns the msid of the stream attached to the JitsiTrack object or null
227
+     * if no stream is attached.
228
+     */
229
+    getMSID(): string;
230
+    /**
231
+     * Sets new audio output device for track's DOM elements. Video tracks are
232
+     * ignored.
233
+     * @param {string} audioOutputDeviceId - id of 'audiooutput' device from
234
+     *      navigator.mediaDevices.enumerateDevices(), '' for default device
235
+     * @emits JitsiTrackEvents.TRACK_AUDIO_OUTPUT_CHANGED
236
+     * @returns {Promise}
237
+     */
238
+    setAudioOutput(audioOutputDeviceId: string): Promise<any>;
239
+}
240
+import EventEmitter from "events";

+ 68
- 0
types/auto/modules/RTC/MockClasses.d.ts View File

@@ -0,0 +1,68 @@
1
+/**
2
+ * Mock {@link TraceablePeerConnection} - add things as needed, but only things useful for all tests.
3
+ */
4
+export class MockPeerConnection {
5
+    /**
6
+     * Constructor.
7
+     *
8
+     * @param {string} id RTC id
9
+     * @param {boolean} usesUnifiedPlan
10
+     */
11
+    constructor(id: string, usesUnifiedPlan: boolean);
12
+    id: string;
13
+    _usesUnifiedPlan: boolean;
14
+    /**
15
+     * {@link TraceablePeerConnection.localDescription}.
16
+     *
17
+     * @returns {Object}
18
+     */
19
+    get localDescription(): any;
20
+    /**
21
+     * {@link TraceablePeerConnection.remoteDescription}.
22
+     *
23
+     * @returns {Object}
24
+     */
25
+    get remoteDescription(): any;
26
+    /**
27
+     * {@link TraceablePeerConnection.createAnswer}.
28
+     *
29
+     * @returns {Promise<Object>}
30
+     */
31
+    createAnswer(): Promise<any>;
32
+    /**
33
+     * {@link TraceablePeerConnection.setLocalDescription}.
34
+     *
35
+     * @returns {Promise<void>}
36
+     */
37
+    setLocalDescription(): Promise<void>;
38
+    /**
39
+     * {@link TraceablePeerConnection.setRemoteDescription}.
40
+     *
41
+     * @returns {Promise<void>}
42
+     */
43
+    setRemoteDescription(): Promise<void>;
44
+    /**
45
+     * {@link TraceablePeerConnection.setSenderVideoConstraints}.
46
+     */
47
+    setSenderVideoConstraints(): void;
48
+    /**
49
+     * {@link TraceablePeerConnection.setVideoTransferActive}.
50
+     */
51
+    setVideoTransferActive(): boolean;
52
+    /**
53
+     * {@link TraceablePeerConnection.usesUnifiedPlan}.
54
+     */
55
+    usesUnifiedPlan(): boolean;
56
+}
57
+/**
58
+ * Mock {@link RTC} - add things as needed, but only things useful for all tests.
59
+ */
60
+export class MockRTC {
61
+    /**
62
+     * {@link RTC.createPeerConnection}.
63
+     *
64
+     * @returns {MockPeerConnection}
65
+     */
66
+    createPeerConnection(): MockPeerConnection;
67
+    pc: MockPeerConnection;
68
+}

+ 435
- 0
types/auto/modules/RTC/RTC.d.ts View File

@@ -0,0 +1,435 @@
1
+/**
2
+ *
3
+ */
4
+export default class RTC extends Listenable {
5
+    /**
6
+     * Exposes the private helper for converting a WebRTC MediaStream to a
7
+     * JitsiLocalTrack.
8
+     *
9
+     * @param {Array<Object>} tracksInfo
10
+     * @returns {Array<JitsiLocalTrack>}
11
+     */
12
+    static createLocalTracks(tracksInfo: Array<any>): Array<JitsiLocalTrack>;
13
+    /**
14
+     * Creates the local MediaStreams.
15
+     * @param {object} [options] Optional parameters.
16
+     * @param {array} options.devices The devices that will be requested.
17
+     * @param {string} options.resolution Resolution constraints.
18
+     * @param {string} options.cameraDeviceId
19
+     * @param {string} options.micDeviceId
20
+     * @returns {*} Promise object that will receive the new JitsiTracks
21
+     */
22
+    static obtainAudioAndVideoPermissions(options?: {
23
+        devices: any[];
24
+        resolution: string;
25
+        cameraDeviceId: string;
26
+        micDeviceId: string;
27
+    }): any;
28
+    /**
29
+     *
30
+     * @param eventType
31
+     * @param listener
32
+     */
33
+    static addListener(eventType: any, listener: any): void;
34
+    /**
35
+     *
36
+     * @param eventType
37
+     * @param listener
38
+     */
39
+    static removeListener(eventType: any, listener: any): void;
40
+    /**
41
+     *
42
+     * @param options
43
+     */
44
+    static init(options?: {}): void;
45
+    /**
46
+     *
47
+     * @param elSelector
48
+     * @param stream
49
+     */
50
+    static attachMediaStream(elSelector: any, stream: any): any;
51
+    /**
52
+     * Returns the id of the given stream.
53
+     * @param {MediaStream} stream
54
+     */
55
+    static getStreamID(stream: MediaStream): any;
56
+    /**
57
+     * Returns the id of the given track.
58
+     * @param {MediaStreamTrack} track
59
+     */
60
+    static getTrackID(track: MediaStreamTrack): any;
61
+    /**
62
+     * Returns true if retrieving the list of input devices is supported
63
+     * and false if not.
64
+     */
65
+    static isDeviceListAvailable(): boolean;
66
+    /**
67
+     * Returns true if changing the input (camera / microphone) or output
68
+     * (audio) device is supported and false if not.
69
+     * @param {string} [deviceType] Type of device to change. Default is
70
+     *      undefined or 'input', 'output' - for audio output device change.
71
+     * @returns {boolean} true if available, false otherwise.
72
+     */
73
+    static isDeviceChangeAvailable(deviceType?: string): boolean;
74
+    /**
75
+     * Returns whether the current execution environment supports WebRTC (for
76
+     * use within this library).
77
+     *
78
+     * @returns {boolean} {@code true} if WebRTC is supported in the current
79
+     * execution environment (for use within this library); {@code false},
80
+     * otherwise.
81
+     */
82
+    static isWebRtcSupported(): boolean;
83
+    /**
84
+     * Returns currently used audio output device id, '' stands for default
85
+     * device
86
+     * @returns {string}
87
+     */
88
+    static getAudioOutputDevice(): string;
89
+    /**
90
+     * Returns list of available media devices if its obtained, otherwise an
91
+     * empty array is returned/
92
+     * @returns {array} list of available media devices.
93
+     */
94
+    static getCurrentlyAvailableMediaDevices(): any[];
95
+    /**
96
+     * Returns whether available devices have permissions granted
97
+     * @returns {Boolean}
98
+     */
99
+    static arePermissionsGrantedForAvailableDevices(): boolean;
100
+    /**
101
+     * Returns event data for device to be reported to stats.
102
+     * @returns {MediaDeviceInfo} device.
103
+     */
104
+    static getEventDataForActiveDevice(device: any): MediaDeviceInfo;
105
+    /**
106
+     * Sets current audio output device.
107
+     * @param {string} deviceId Id of 'audiooutput' device from
108
+     *      navigator.mediaDevices.enumerateDevices().
109
+     * @returns {Promise} resolves when audio output is changed, is rejected
110
+     *      otherwise
111
+     */
112
+    static setAudioOutputDevice(deviceId: string): Promise<any>;
113
+    /**
114
+     * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
115
+     * "user" stream which means that it's not a "receive only" stream nor a
116
+     * "mixed" JVB stream.
117
+     *
118
+     * Clients that implement Unified Plan, such as Firefox use recvonly
119
+     * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
120
+     * to Plan B where there are only 3 channels: audio, video and data.
121
+     *
122
+     * @param {MediaStream} stream The WebRTC MediaStream instance.
123
+     * @returns {boolean}
124
+     */
125
+    static isUserStream(stream: MediaStream): boolean;
126
+    /**
127
+     * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
128
+     * ID is considered a valid "user" stream which means that it's not a
129
+     * "receive only" stream nor a "mixed" JVB stream.
130
+     *
131
+     * Clients that implement Unified Plan, such as Firefox use recvonly
132
+     * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
133
+     * to Plan B where there are only 3 channels: audio, video and data.
134
+     *
135
+     * @param {string} streamId The id of WebRTC MediaStream.
136
+     * @returns {boolean}
137
+     */
138
+    static isUserStreamById(streamId: string): boolean;
139
+    /**
140
+     * Allows to receive list of available cameras/microphones.
141
+     * @param {function} callback Would receive array of devices as an
142
+     *      argument.
143
+     */
144
+    static enumerateDevices(callback: Function): void;
145
+    /**
146
+     * A method to handle stopping of the stream.
147
+     * One point to handle the differences in various implementations.
148
+     * @param {MediaStream} mediaStream MediaStream object to stop.
149
+     */
150
+    static stopMediaStream(mediaStream: MediaStream): void;
151
+    /**
152
+     * Returns whether the desktop sharing is enabled or not.
153
+     * @returns {boolean}
154
+     */
155
+    static isDesktopSharingEnabled(): boolean;
156
+    /**
157
+     *
158
+     * @param conference
159
+     * @param options
160
+     */
161
+    constructor(conference: any, options?: {});
162
+    conference: any;
163
+    /**
164
+     * A map of active <tt>TraceablePeerConnection</tt>.
165
+     * @type {Map.<number, TraceablePeerConnection>}
166
+     */
167
+    peerConnections: Map<number, TraceablePeerConnection>;
168
+    localTracks: any[];
169
+    options: {};
170
+    _channel: BridgeChannel;
171
+    /**
172
+     * The value specified to the last invocation of setLastN before the
173
+     * channel completed opening. If non-null, the value will be sent
174
+     * through a channel (once) as soon as it opens and will then be
175
+     * discarded.
176
+     * @private
177
+     * @type {number}
178
+     */
179
+    private _lastN;
180
+    /**
181
+     * Defines the last N endpoints list. It can be null or an array once
182
+     * initialised with a channel last N event.
183
+     * @type {Array<string>|null}
184
+     * @private
185
+     */
186
+    private _lastNEndpoints;
187
+    /**
188
+     * The number representing the maximum video height the local client
189
+     * should receive from the bridge.
190
+     *
191
+     * @type {number|undefined}
192
+     * @private
193
+     */
194
+    private _maxFrameHeight;
195
+    /**
196
+     * The endpoint IDs of currently selected participants.
197
+     *
198
+     * @type {Array}
199
+     * @private
200
+     */
201
+    private _selectedEndpoints;
202
+    _lastNChangeListener: any;
203
+    /**
204
+     * Callback invoked when the list of known audio and video devices has
205
+     * been updated. Attempts to update the known available audio output
206
+     * devices.
207
+     *
208
+     * @private
209
+     * @returns {void}
210
+     */
211
+    private _onDeviceListChanged;
212
+    /**
213
+     * Updates the target audio output device for all remote audio tracks.
214
+     *
215
+     * @param {string} deviceId - The device id of the audio ouput device to
216
+     * use for all remote tracks.
217
+     * @private
218
+     * @returns {void}
219
+     */
220
+    private _updateAudioOutputForAudioTracks;
221
+    /**
222
+     * The default video type assumed by the bridge.
223
+     * @deprecated this will go away with multiple streams support
224
+     * @type {BridgeVideoType}
225
+     * @private
226
+     */
227
+    private _videoType;
228
+    /**
229
+     * Removes any listeners and stored state from this {@code RTC} instance.
230
+     *
231
+     * @returns {void}
232
+     */
233
+    destroy(): void;
234
+    /**
235
+     * Initializes the bridge channel of this instance.
236
+     * At least one of both, peerconnection or wsUrl parameters, must be
237
+     * given.
238
+     * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
239
+     * instance.
240
+     * @param {string} [wsUrl] WebSocket URL.
241
+     */
242
+    initializeBridgeChannel(peerconnection?: RTCPeerConnection, wsUrl?: string): void;
243
+    _channelOpenListener: any;
244
+    /**
245
+     * Receives events when Last N had changed.
246
+     * @param {array} lastNEndpoints The new Last N endpoints.
247
+     * @private
248
+     */
249
+    private _onLastNChanged;
250
+    /**
251
+     * Should be called when current media session ends and after the
252
+     * PeerConnection has been closed using PeerConnection.close() method.
253
+     */
254
+    onCallEnded(): void;
255
+    /**
256
+     * Sets the capture frame rate to be used for desktop tracks.
257
+     *
258
+     * @param {number} maxFps framerate to be used for desktop track capture.
259
+     */
260
+    setDesktopSharingFrameRate(maxFps: number): void;
261
+    /**
262
+     * Sets the receiver video constraints that determine how bitrate is allocated to each of the video streams
263
+     * requested from the bridge. The constraints are cached and sent through the bridge channel once the channel
264
+     * is established.
265
+     * @param {*} constraints
266
+     */
267
+    setNewReceiverVideoConstraints(constraints: any): void;
268
+    _receiverVideoConstraints: any;
269
+    /**
270
+     * Sets the maximum video size the local participant should receive from
271
+     * remote participants. Will cache the value and send it through the channel
272
+     * once it is created.
273
+     *
274
+     * @param {number} maxFrameHeightPixels the maximum frame height, in pixels,
275
+     * this receiver is willing to receive.
276
+     * @returns {void}
277
+     */
278
+    setReceiverVideoConstraint(maxFrameHeight: any): void;
279
+    /**
280
+     * Sets the video type and availability for the local video source.
281
+     *
282
+     * @param {string} videoType 'camera' for camera, 'desktop' for screenshare and
283
+     * 'none' for when local video source is muted or removed from the peerconnection.
284
+     * @returns {void}
285
+     */
286
+    setVideoType(videoType: string): void;
287
+    /**
288
+     * Sends the track's  video type to the JVB.
289
+     * @param {SourceName} sourceName - the track's source name.
290
+     * @param {BridgeVideoType} videoType - the track's video type.
291
+     */
292
+    sendSourceVideoType(sourceName: any, videoType: {
293
+        CAMERA: string;
294
+        DESKTOP: string;
295
+        DESKTOP_HIGH_FPS: string;
296
+        NONE: string;
297
+    }): void;
298
+    /**
299
+     * Elects the participants with the given ids to be the selected
300
+     * participants in order to always receive video for this participant (even
301
+     * when last n is enabled). If there is no channel we store it and send it
302
+     * through the channel once it is created.
303
+     *
304
+     * @param {Array<string>} ids - The user ids.
305
+     * @throws NetworkError or InvalidStateError or Error if the operation
306
+     * fails.
307
+     * @returns {void}
308
+     */
309
+    selectEndpoints(ids: Array<string>): void;
310
+    /**
311
+     * Creates new <tt>TraceablePeerConnection</tt>
312
+     * @param {SignalingLayer} signaling The signaling layer that will provide information about the media or
313
+     * participants which is not carried over SDP.
314
+     * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
315
+     * @param {boolean} isP2P Indicates whether or not the new TPC will be used in a peer to peer type of session.
316
+     * @param {object} options The config options.
317
+     * @param {boolean} options.enableInsertableStreams - Set to true when the insertable streams constraints is to be
318
+     * enabled on the PeerConnection.
319
+     * @param {boolean} options.disableSimulcast If set to 'true' will disable the simulcast.
320
+     * @param {boolean} options.disableRtx If set to 'true' will disable the RTX.
321
+     * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
322
+     * @return {TraceablePeerConnection}
323
+     */
324
+    createPeerConnection(signaling: any, pcConfig: object, isP2P: boolean, options: {
325
+        enableInsertableStreams: boolean;
326
+        disableSimulcast: boolean;
327
+        disableRtx: boolean;
328
+        startSilent: boolean;
329
+    }): TraceablePeerConnection;
330
+    /**
331
+     * Removed given peer connection from this RTC module instance.
332
+     * @param {TraceablePeerConnection} traceablePeerConnection
333
+     * @return {boolean} <tt>true</tt> if the given peer connection was removed
334
+     * successfully or <tt>false</tt> if there was no peer connection mapped in
335
+     * this RTC instance.
336
+     */
337
+    _removePeerConnection(traceablePeerConnection: TraceablePeerConnection): boolean;
338
+    /**
339
+     *
340
+     * @param track
341
+     */
342
+    addLocalTrack(track: any): void;
343
+    /**
344
+     * Get local video track.
345
+     * @returns {JitsiLocalTrack|undefined}
346
+     */
347
+    getLocalVideoTrack(): JitsiLocalTrack | undefined;
348
+    /**
349
+     * Get local audio track.
350
+     * @returns {JitsiLocalTrack|undefined}
351
+     */
352
+    getLocalAudioTrack(): JitsiLocalTrack | undefined;
353
+    /**
354
+     * Returns the endpoint id for the local user.
355
+     * @returns {string}
356
+     */
357
+    getLocalEndpointId(): string;
358
+    /**
359
+     * Returns the local tracks of the given media type, or all local tracks if
360
+     * no specific type is given.
361
+     * @param {MediaType} [mediaType] Optional media type filter.
362
+     * (audio or video).
363
+     */
364
+    getLocalTracks(mediaType?: typeof MediaType): any[];
365
+    /**
366
+     * Obtains all remote tracks currently known to this RTC module instance.
367
+     * @param {MediaType} [mediaType] The remote tracks will be filtered
368
+     *      by their media type if this argument is specified.
369
+     * @return {Array<JitsiRemoteTrack>}
370
+     */
371
+    getRemoteTracks(mediaType?: typeof MediaType): Array<any>;
372
+    /**
373
+     * Set mute for all local audio streams attached to the conference.
374
+     * @param value The mute value.
375
+     * @returns {Promise}
376
+     */
377
+    setAudioMute(value: any): Promise<any>;
378
+    /**
379
+    * Set mute for all local video streams attached to the conference.
380
+    * @param value The mute value.
381
+    * @returns {Promise}
382
+    */
383
+    setVideoMute(value: any): Promise<any>;
384
+    /**
385
+     *
386
+     * @param track
387
+     */
388
+    removeLocalTrack(track: any): void;
389
+    /**
390
+     * Closes the currently opened bridge channel.
391
+     */
392
+    closeBridgeChannel(): void;
393
+    /**
394
+     *
395
+     * @param {TraceablePeerConnection} tpc
396
+     * @param {number} ssrc
397
+     * @param {number} audioLevel
398
+     * @param {boolean} isLocal
399
+     */
400
+    setAudioLevel(tpc: TraceablePeerConnection, ssrc: number, audioLevel: number, isLocal: boolean): void;
401
+    /**
402
+     * Sends message via the bridge channel.
403
+     * @param {string} to The id of the endpoint that should receive the
404
+     *      message. If "" the message will be sent to all participants.
405
+     * @param {object} payload The payload of the message.
406
+     * @throws NetworkError or InvalidStateError or Error if the operation
407
+     * fails or there is no data channel created.
408
+     */
409
+    sendChannelMessage(to: string, payload: object): void;
410
+    /**
411
+     * Sends the local stats via the bridge channel.
412
+     * @param {Object} payload The payload of the message.
413
+     * @throws NetworkError/InvalidStateError/Error if the operation fails or if there is no data channel created.
414
+     */
415
+    sendEndpointStatsMessage(payload: any): void;
416
+    /**
417
+     * Selects a new value for "lastN". The requested amount of videos are going
418
+     * to be delivered after the value is in effect. Set to -1 for unlimited or
419
+     * all available videos.
420
+     * @param {number} value the new value for lastN.
421
+     */
422
+    setLastN(value: number): void;
423
+    /**
424
+     * Indicates if the endpoint id is currently included in the last N.
425
+     * @param {string} id The endpoint id that we check for last N.
426
+     * @returns {boolean} true if the endpoint id is in the last N or if we
427
+     * don't have bridge channel support, otherwise we return false.
428
+     */
429
+    isInLastN(id: string): boolean;
430
+}
431
+import Listenable from "../util/Listenable";
432
+import TraceablePeerConnection from "./TraceablePeerConnection";
433
+import BridgeChannel from "./BridgeChannel";
434
+import JitsiLocalTrack from "./JitsiLocalTrack";
435
+import * as MediaType from "../../service/RTC/MediaType";

+ 181
- 0
types/auto/modules/RTC/RTCUtils.d.ts View File

@@ -0,0 +1,181 @@
1
+export default rtcUtils;
2
+declare const rtcUtils: RTCUtils;
3
+/**
4
+ *
5
+ */
6
+declare class RTCUtils extends Listenable {
7
+    /**
8
+     *
9
+     */
10
+    constructor();
11
+    /**
12
+     * Depending on the browser, sets difference instance methods for
13
+     * interacting with user media and adds methods to native WebRTC-related
14
+     * objects. Also creates an instance variable for peer connection
15
+     * constraints.
16
+     *
17
+     * @param {Object} options
18
+     * @returns {void}
19
+     */
20
+    init(options?: any): void;
21
+    RTCPeerConnectionType: {
22
+        new (configuration?: RTCConfiguration): RTCPeerConnection;
23
+        prototype: RTCPeerConnection;
24
+        generateCertificate(keygenAlgorithm: AlgorithmIdentifier): Promise<RTCCertificate>;
25
+    };
26
+    attachMediaStream: Function;
27
+    getStreamID: ({ id }: {
28
+        id: any;
29
+    }) => any;
30
+    getTrackID: ({ id }: {
31
+        id: any;
32
+    }) => any;
33
+    pcConstraints: {
34
+        optional: ({
35
+            googScreencastMinBitrate: number;
36
+            googCpuOveruseDetection?: undefined;
37
+        } | {
38
+            googCpuOveruseDetection: boolean;
39
+            googScreencastMinBitrate?: undefined;
40
+        })[];
41
+    } | {
42
+        optional?: undefined;
43
+    };
44
+    /**
45
+     *
46
+     * @param {Function} callback
47
+     */
48
+    enumerateDevices(callback: Function): void;
49
+    /**
50
+     * Acquires a media stream via getUserMedia that
51
+     * matches the given constraints
52
+     *
53
+     * @param {array} umDevices which devices to acquire (e.g. audio, video)
54
+     * @param {Object} constraints - Stream specifications to use.
55
+     * @param {number} timeout - The timeout in ms for GUM.
56
+     * @returns {Promise}
57
+     */
58
+    _getUserMedia(umDevices: any[], constraints?: any, timeout?: number): Promise<any>;
59
+    /**
60
+     * Acquire a display stream via the screenObtainer. This requires extra
61
+     * logic compared to use screenObtainer versus normal device capture logic
62
+     * in RTCUtils#_getUserMedia.
63
+     *
64
+     * @returns {Promise} A promise which will be resolved with an object which
65
+     * contains the acquired display stream. If desktop sharing is not supported
66
+     * then a rejected promise will be returned.
67
+     */
68
+    _getDesktopMedia(): Promise<any>;
69
+    /**
70
+     * Private utility for determining if the passed in MediaStream contains
71
+     * tracks of the type(s) specified in the requested devices.
72
+     *
73
+     * @param {string[]} requestedDevices - The track types that are expected to
74
+     * be includes in the stream.
75
+     * @param {MediaStream} stream - The MediaStream to check if it has the
76
+     * expected track types.
77
+     * @returns {string[]} An array of string with the missing track types. The
78
+     * array will be empty if all requestedDevices are found in the stream.
79
+     */
80
+    _getMissingTracks(requestedDevices: string[], stream: MediaStream): string[];
81
+    /**
82
+     * Gets streams from specified device types. This function intentionally
83
+     * ignores errors for upstream to catch and handle instead.
84
+     *
85
+     * @param {Object} options - A hash describing what devices to get and
86
+     * relevant constraints.
87
+     * @param {string[]} options.devices - The types of media to capture. Valid
88
+     * values are "desktop", "audio", and "video".
89
+     * @param {Object} options.desktopSharingFrameRate
90
+     * @param {Object} options.desktopSharingFrameRate.min - Minimum fps
91
+     * @param {Object} options.desktopSharingFrameRate.max - Maximum fps
92
+     * @param {String} options.desktopSharingSourceDevice - The device id or
93
+     * label for a video input source that should be used for screensharing.
94
+     * @returns {Promise} The promise, when successful, will return an array of
95
+     * meta data for the requested device type, which includes the stream and
96
+     * track. If an error occurs, it will be deferred to the caller for
97
+     * handling.
98
+     */
99
+    obtainAudioAndVideoPermissions(options: {
100
+        devices: string[];
101
+        desktopSharingFrameRate: {
102
+            min: any;
103
+            max: any;
104
+        };
105
+        desktopSharingSourceDevice: string;
106
+    }): Promise<any>;
107
+    /**
108
+     * Checks whether it is possible to enumerate available cameras/microphones.
109
+     *
110
+     * @returns {boolean} {@code true} if the device listing is available;
111
+     * {@code false}, otherwise.
112
+     */
113
+    isDeviceListAvailable(): boolean;
114
+    /**
115
+     * Returns true if changing the input (camera / microphone) or output
116
+     * (audio) device is supported and false if not.
117
+     * @params {string} [deviceType] - type of device to change. Default is
118
+     *      undefined or 'input', 'output' - for audio output device change.
119
+     * @returns {boolean} true if available, false otherwise.
120
+     */
121
+    isDeviceChangeAvailable(deviceType: any): boolean;
122
+    /**
123
+     * A method to handle stopping of the stream.
124
+     * One point to handle the differences in various implementations.
125
+     * @param mediaStream MediaStream object to stop.
126
+     */
127
+    stopMediaStream(mediaStream: any): void;
128
+    /**
129
+     * Returns whether the desktop sharing is enabled or not.
130
+     * @returns {boolean}
131
+     */
132
+    isDesktopSharingEnabled(): boolean;
133
+    /**
134
+     * Sets current audio output device.
135
+     * @param {string} deviceId - id of 'audiooutput' device from
136
+     *      navigator.mediaDevices.enumerateDevices(), 'default' for default
137
+     *      device
138
+     * @returns {Promise} - resolves when audio output is changed, is rejected
139
+     *      otherwise
140
+     */
141
+    setAudioOutputDevice(deviceId: string): Promise<any>;
142
+    /**
143
+     * Sets the capture frame rate for desktop tracks.
144
+     *
145
+     * @param {number} maxFps - max fps to be used as the capture frame rate.
146
+     * @returns {void}
147
+     */
148
+    setDesktopSharingFrameRate(maxFps: number): void;
149
+    /**
150
+     * Returns currently used audio output device id, '' stands for default
151
+     * device
152
+     * @returns {string}
153
+     */
154
+    getAudioOutputDevice(): string;
155
+    /**
156
+     * Returns list of available media devices if its obtained, otherwise an
157
+     * empty array is returned/
158
+     * @returns {Array} list of available media devices.
159
+     */
160
+    getCurrentlyAvailableMediaDevices(): any[];
161
+    /**
162
+     * Returns whether available devices have permissions granted
163
+     * @returns {Boolean}
164
+     */
165
+    arePermissionsGrantedForAvailableDevices(): boolean;
166
+    /**
167
+     * Returns event data for device to be reported to stats.
168
+     * @returns {MediaDeviceInfo} device.
169
+     */
170
+    getEventDataForActiveDevice(device: any): MediaDeviceInfo;
171
+    /**
172
+     * Configures the given PeerConnection constraints to either enable or
173
+     * disable (according to the value of the 'enable' parameter) the
174
+     * 'googSuspendBelowMinBitrate' option.
175
+     * @param constraints the constraints on which to operate.
176
+     * @param enable {boolean} whether to enable or disable the suspend video
177
+     * option.
178
+     */
179
+    setSuspendVideo(constraints: any, enable: boolean): void;
180
+}
181
+import Listenable from "../util/Listenable";

+ 118
- 0
types/auto/modules/RTC/ScreenObtainer.d.ts View File

@@ -0,0 +1,118 @@
1
+/**
2
+ * The default frame rate for Screen Sharing.
3
+ */
4
+export const SS_DEFAULT_FRAME_RATE: 5;
5
+export default ScreenObtainer;
6
+declare namespace ScreenObtainer {
7
+    const obtainStream: any;
8
+    /**
9
+     * Initializes the function used to obtain a screen capture
10
+     * (this.obtainStream).
11
+     *
12
+     * @param {object} options
13
+     */
14
+    function init(options?: any): void;
15
+    /**
16
+     * Initializes the function used to obtain a screen capture
17
+     * (this.obtainStream).
18
+     *
19
+     * @param {object} options
20
+     */
21
+    function init(options?: any): void;
22
+    /**
23
+     * Returns a method which will be used to obtain the screen sharing stream
24
+     * (based on the browser type).
25
+     *
26
+     * @returns {Function}
27
+     * @private
28
+     */
29
+    function _createObtainStreamMethod(): Function;
30
+    /**
31
+     * Returns a method which will be used to obtain the screen sharing stream
32
+     * (based on the browser type).
33
+     *
34
+     * @returns {Function}
35
+     * @private
36
+     */
37
+    function _createObtainStreamMethod(): Function;
38
+    /**
39
+     * Gets the appropriate constraints for audio sharing.
40
+     *
41
+     * @returns {Object|boolean}
42
+     */
43
+    function _getAudioConstraints(): any;
44
+    /**
45
+     * Gets the appropriate constraints for audio sharing.
46
+     *
47
+     * @returns {Object|boolean}
48
+     */
49
+    function _getAudioConstraints(): any;
50
+    /**
51
+     * Checks whether obtaining a screen capture is supported in the current
52
+     * environment.
53
+     * @returns {boolean}
54
+     */
55
+    function isSupported(): boolean;
56
+    /**
57
+     * Checks whether obtaining a screen capture is supported in the current
58
+     * environment.
59
+     * @returns {boolean}
60
+     */
61
+    function isSupported(): boolean;
62
+    /**
63
+     * Obtains a screen capture stream on Electron.
64
+     *
65
+     * @param onSuccess - Success callback.
66
+     * @param onFailure - Failure callback.
67
+     */
68
+    function obtainScreenOnElectron(onSuccess: any, onFailure: any): void;
69
+    /**
70
+     * Obtains a screen capture stream on Electron.
71
+     *
72
+     * @param onSuccess - Success callback.
73
+     * @param onFailure - Failure callback.
74
+     */
75
+    function obtainScreenOnElectron(onSuccess: any, onFailure: any): void;
76
+    /**
77
+     * Obtains a screen capture stream using getDisplayMedia.
78
+     *
79
+     * @param callback - The success callback.
80
+     * @param errorCallback - The error callback.
81
+     */
82
+    function obtainScreenFromGetDisplayMedia(callback: any, errorCallback: any): void;
83
+    /**
84
+     * Obtains a screen capture stream using getDisplayMedia.
85
+     *
86
+     * @param callback - The success callback.
87
+     * @param errorCallback - The error callback.
88
+     */
89
+    function obtainScreenFromGetDisplayMedia(callback: any, errorCallback: any): void;
90
+    /**
91
+     * Obtains a screen capture stream using getDisplayMedia.
92
+     *
93
+     * @param callback - The success callback.
94
+     * @param errorCallback - The error callback.
95
+     */
96
+    function obtainScreenFromGetDisplayMediaRN(callback: any, errorCallback: any): void;
97
+    /**
98
+     * Obtains a screen capture stream using getDisplayMedia.
99
+     *
100
+     * @param callback - The success callback.
101
+     * @param errorCallback - The error callback.
102
+     */
103
+    function obtainScreenFromGetDisplayMediaRN(callback: any, errorCallback: any): void;
104
+    /**
105
+     * Sets the max frame rate to be used for a desktop track capture.
106
+     *
107
+     * @param {number} maxFps capture frame rate to be used for desktop tracks.
108
+     * @returns {void}
109
+     */
110
+    function setDesktopSharingFrameRate(maxFps: number): void;
111
+    /**
112
+     * Sets the max frame rate to be used for a desktop track capture.
113
+     *
114
+     * @param {number} maxFps capture frame rate to be used for desktop tracks.
115
+     * @returns {void}
116
+     */
117
+    function setDesktopSharingFrameRate(maxFps: number): void;
118
+}

+ 161
- 0
types/auto/modules/RTC/TPCUtils.d.ts View File

@@ -0,0 +1,161 @@
1
+export const HD_BITRATE: 2500000;
2
+export const HD_SCALE_FACTOR: 1;
3
+export const LD_SCALE_FACTOR: 4;
4
+export const SD_SCALE_FACTOR: 2;
5
+export const SIM_LAYER_RIDS: string[];
6
+/**
7
+ * Handles track related operations on TraceablePeerConnection when browser is
8
+ * running in unified plan mode.
9
+ */
10
+export class TPCUtils {
11
+    /**
12
+     * Creates a new instance for a given TraceablePeerConnection
13
+     *
14
+     * @param peerconnection - the tpc instance for which we have utility functions.
15
+     */
16
+    constructor(peerconnection: any);
17
+    pc: any;
18
+    videoBitrates: any;
19
+    /**
20
+     * The startup configuration for the stream encodings that are applicable to
21
+     * the video stream when a new sender is created on the peerconnection. The initial
22
+     * config takes into account the differences in browser's simulcast implementation.
23
+     *
24
+     * Encoding parameters:
25
+     * active - determine the on/off state of a particular encoding.
26
+     * maxBitrate - max. bitrate value to be applied to that particular encoding
27
+     *  based on the encoding's resolution and config.js videoQuality settings if applicable.
28
+     * rid - Rtp Stream ID that is configured for a particular simulcast stream.
29
+     * scaleResolutionDownBy - the factor by which the encoding is scaled down from the
30
+     *  original resolution of the captured video.
31
+     */
32
+    localStreamEncodingsConfig: {
33
+        active: boolean;
34
+        maxBitrate: any;
35
+        rid: string;
36
+        scaleResolutionDownBy: number;
37
+    }[];
38
+    /**
39
+     * Obtains stream encodings that need to be configured on the given track based
40
+     * on the track media type and the simulcast setting.
41
+     * @param {JitsiLocalTrack} localTrack
42
+     */
43
+    _getStreamEncodings(localTrack: any): {
44
+        active: boolean;
45
+        maxBitrate: any;
46
+        rid: string;
47
+        scaleResolutionDownBy: number;
48
+    }[] | {
49
+        active: boolean;
50
+        maxBitrate: any;
51
+    }[] | {
52
+        active: boolean;
53
+    }[];
54
+    /**
55
+     * Ensures that the ssrcs associated with a FID ssrc-group appear in the correct order, i.e.,
56
+     * the primary ssrc first and the secondary rtx ssrc later. This is important for unified
57
+     * plan since we have only one FID group per media description.
58
+     * @param {Object} description the webRTC session description instance for the remote
59
+     * description.
60
+     * @private
61
+     */
62
+    private ensureCorrectOrderOfSsrcs;
63
+    /**
64
+     * Returns the transceiver associated with a given RTCRtpSender/RTCRtpReceiver.
65
+     *
66
+     * @param {string} mediaType - type of track associated with the transceiver 'audio' or 'video'.
67
+     * @param {JitsiLocalTrack} localTrack - local track to be used for lookup.
68
+     * @returns {RTCRtpTransceiver}
69
+     */
70
+    findTransceiver(mediaType: string, localTrack?: any): RTCRtpTransceiver;
71
+    /**
72
+     * Takes in a *unified plan* offer and inserts the appropriate
73
+     * parameters for adding simulcast receive support.
74
+     * @param {Object} desc - A session description object
75
+     * @param {String} desc.type - the type (offer/answer)
76
+     * @param {String} desc.sdp - the sdp content
77
+     *
78
+     * @return {Object} A session description (same format as above) object
79
+     * with its sdp field modified to advertise simulcast receive support
80
+     */
81
+    insertUnifiedPlanSimulcastReceive(desc: {
82
+        type: string;
83
+        sdp: string;
84
+    }): any;
85
+    /**
86
+    * Adds {@link JitsiLocalTrack} to the WebRTC peerconnection for the first time.
87
+    * @param {JitsiLocalTrack} track - track to be added to the peerconnection.
88
+    * @param {boolean} isInitiator - boolean that indicates if the endpoint is offerer in a p2p connection.
89
+    * @returns {void}
90
+    */
91
+    addTrack(localTrack: any, isInitiator: boolean): void;
92
+    /**
93
+     * Returns the calculated active state of the simulcast encodings based on the frame height requested for the send
94
+     * stream. All the encodings that have a resolution lower than the frame height requested will be enabled.
95
+     *
96
+     * @param {JitsiLocalTrack} localVideoTrack The local video track.
97
+     * @param {number} newHeight The resolution requested for the video track.
98
+     * @returns {Array<boolean>}
99
+     */
100
+    calculateEncodingsActiveState(localVideoTrack: any, newHeight: number): Array<boolean>;
101
+    /**
102
+     * Returns the calculates max bitrates that need to be configured on the simulcast encodings based on the video
103
+     * type and other considerations associated with screenshare.
104
+     *
105
+     * @param {JitsiLocalTrack} localVideoTrack The local video track.
106
+     * @returns {Array<number>}
107
+     */
108
+    calculateEncodingsBitrates(localVideoTrack: any): Array<number>;
109
+    /**
110
+     * Replaces the existing track on a RTCRtpSender with the given track.
111
+     * @param {JitsiLocalTrack} oldTrack - existing track on the sender that needs to be removed.
112
+     * @param {JitsiLocalTrack} newTrack - new track that needs to be added to the sender.
113
+     * @returns {Promise<void>} - resolved when done.
114
+     */
115
+    replaceTrack(oldTrack: any, newTrack: any): Promise<void>;
116
+    /**
117
+    * Enables/disables audio transmission on the peer connection. When
118
+    * disabled the audio transceiver direction will be set to 'inactive'
119
+    * which means that no data will be sent nor accepted, but
120
+    * the connection should be kept alive.
121
+    * @param {boolean} active - true to enable audio media transmission or
122
+    * false to disable.
123
+    * @returns {void}
124
+    */
125
+    setAudioTransferActive(active: boolean): void;
126
+    /**
127
+     * Set the simulcast stream encoding properties on the RTCRtpSender.
128
+     * @param {JitsiLocalTrack} track - the current track in use for which
129
+     * the encodings are to be set.
130
+     * @returns {Promise<void>} - resolved when done.
131
+     */
132
+    setEncodings(track: any): Promise<void>;
133
+    /**
134
+     * Enables/disables media transmission on the peerconnection by changing the direction
135
+     * on the transceiver for the specified media type.
136
+     * @param {String} mediaType - 'audio' or 'video'
137
+     * @param {boolean} active - true to enable media transmission or false
138
+     * to disable.
139
+     * @returns {void}
140
+     */
141
+    setMediaTransferActive(mediaType: string, active: boolean): void;
142
+    /**
143
+    * Enables/disables video media transmission on the peer connection. When
144
+    * disabled the SDP video media direction in the local SDP will be adjusted to
145
+    * 'inactive' which means that no data will be sent nor accepted, but
146
+    * the connection should be kept alive.
147
+    * @param {boolean} active - true to enable video media transmission or
148
+    * false to disable.
149
+    * @returns {void}
150
+    */
151
+    setVideoTransferActive(active: boolean): void;
152
+    /**
153
+     * Ensures that the resolution of the stream encodings are consistent with the values
154
+     * that were configured on the RTCRtpSender when the source was added to the peerconnection.
155
+     * This should prevent us from overriding the default values if the browser returns
156
+     * erroneous values when RTCRtpSender.getParameters is used for getting the encodings info.
157
+     * @param {Object} parameters - the RTCRtpEncodingParameters obtained from the browser.
158
+     * @returns {void}
159
+     */
160
+    updateEncodingsResolution(parameters: any): void;
161
+}

+ 785
- 0
types/auto/modules/RTC/TraceablePeerConnection.d.ts View File

@@ -0,0 +1,785 @@
1
+/**
2
+ * Creates new instance of 'TraceablePeerConnection'.
3
+ *
4
+ * @param {RTC} rtc the instance of <tt>RTC</tt> service
5
+ * @param {number} id the peer connection id assigned by the parent RTC module.
6
+ * @param {SignalingLayer} signalingLayer the signaling layer instance
7
+ * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
8
+ * @param {object} constraints WebRTC 'PeerConnection' constraints
9
+ * @param {boolean} isP2P indicates whether or not the new instance will be used in a peer to peer connection.
10
+ * @param {object} options <tt>TracablePeerConnection</tt> config options.
11
+ * @param {boolean} options.disableSimulcast if set to 'true' will disable the simulcast.
12
+ * @param {boolean} options.disableRtx if set to 'true' will disable the RTX.
13
+ * @param {string} options.disabledCodec the mime type of the code that should not be negotiated on the peerconnection.
14
+ * @param {string} options.preferredCodec the mime type of the codec that needs to be made the preferred codec for the
15
+ * peerconnection.
16
+ * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
17
+ * @param {boolean} options.usesUnifiedPlan Indicates if the  browser is running in unified plan mode.
18
+ *
19
+ * FIXME: initially the purpose of TraceablePeerConnection was to be able to
20
+ * debug the peer connection. Since many other responsibilities have been added
21
+ * it would make sense to extract a separate class from it and come up with
22
+ * a more suitable name.
23
+ *
24
+ * @constructor
25
+ */
26
+export default function TraceablePeerConnection(rtc: RTC, id: number, signalingLayer: any, pcConfig: object, constraints: object, isP2P: boolean, options: {
27
+    disableSimulcast: boolean;
28
+    disableRtx: boolean;
29
+    disabledCodec: string;
30
+    preferredCodec: string;
31
+    startSilent: boolean;
32
+    usesUnifiedPlan: boolean;
33
+}): void;
34
+export default class TraceablePeerConnection {
35
+    /**
36
+     * Creates new instance of 'TraceablePeerConnection'.
37
+     *
38
+     * @param {RTC} rtc the instance of <tt>RTC</tt> service
39
+     * @param {number} id the peer connection id assigned by the parent RTC module.
40
+     * @param {SignalingLayer} signalingLayer the signaling layer instance
41
+     * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
42
+     * @param {object} constraints WebRTC 'PeerConnection' constraints
43
+     * @param {boolean} isP2P indicates whether or not the new instance will be used in a peer to peer connection.
44
+     * @param {object} options <tt>TracablePeerConnection</tt> config options.
45
+     * @param {boolean} options.disableSimulcast if set to 'true' will disable the simulcast.
46
+     * @param {boolean} options.disableRtx if set to 'true' will disable the RTX.
47
+     * @param {string} options.disabledCodec the mime type of the code that should not be negotiated on the peerconnection.
48
+     * @param {string} options.preferredCodec the mime type of the codec that needs to be made the preferred codec for the
49
+     * peerconnection.
50
+     * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
51
+     * @param {boolean} options.usesUnifiedPlan Indicates if the  browser is running in unified plan mode.
52
+     *
53
+     * FIXME: initially the purpose of TraceablePeerConnection was to be able to
54
+     * debug the peer connection. Since many other responsibilities have been added
55
+     * it would make sense to extract a separate class from it and come up with
56
+     * a more suitable name.
57
+     *
58
+     * @constructor
59
+     */
60
+    constructor(rtc: RTC, id: number, signalingLayer: any, pcConfig: object, constraints: object, isP2P: boolean, options: {
61
+        disableSimulcast: boolean;
62
+        disableRtx: boolean;
63
+        disabledCodec: string;
64
+        preferredCodec: string;
65
+        startSilent: boolean;
66
+        usesUnifiedPlan: boolean;
67
+    });
68
+    /**
69
+     * Indicates whether or not this peer connection instance is actively
70
+     * sending/receiving audio media. When set to <tt>false</tt> the SDP audio
71
+     * media direction will be adjusted to 'inactive' in order to suspend
72
+     * the transmission.
73
+     * @type {boolean}
74
+     * @private
75
+     */
76
+    private audioTransferActive;
77
+    /**
78
+     * The DTMF sender instance used to send DTMF tones.
79
+     *
80
+     * @type {RTCDTMFSender|undefined}
81
+     * @private
82
+     */
83
+    private _dtmfSender;
84
+    /**
85
+     * @typedef {Object} TouchToneRequest
86
+     * @property {string} tones - The DTMF tones string as defined by
87
+     * {@code RTCDTMFSender.insertDTMF}, 'tones' argument.
88
+     * @property {number} duration - The amount of time in milliseconds that
89
+     * each DTMF should last.
90
+     * @property {string} interToneGap - The length of time in miliseconds to
91
+     * wait between tones.
92
+     */
93
+    /**
94
+     * TouchToneRequests which are waiting to be played. This queue is filled
95
+     * if there are touch tones currently being played.
96
+     *
97
+     * @type {Array<TouchToneRequest>}
98
+     * @private
99
+     */
100
+    private _dtmfTonesQueue;
101
+    /**
102
+     * Indicates whether or not this peer connection instance is actively
103
+     * sending/receiving video media. When set to <tt>false</tt> the SDP video
104
+     * media direction will be adjusted to 'inactive' in order to suspend
105
+     * the transmission.
106
+     * @type {boolean}
107
+     * @private
108
+     */
109
+    private videoTransferActive;
110
+    /**
111
+     * The parent instance of RTC service which created this
112
+     * <tt>TracablePeerConnection</tt>.
113
+     * @type {RTC}
114
+     */
115
+    rtc: RTC;
116
+    /**
117
+     * The peer connection identifier assigned by the RTC module.
118
+     * @type {number}
119
+     */
120
+    id: number;
121
+    /**
122
+     * Indicates whether or not this instance is used in a peer to peer
123
+     * connection.
124
+     * @type {boolean}
125
+     */
126
+    isP2P: boolean;
127
+    /**
128
+     * The map holds remote tracks associated with this peer connection.
129
+     * It maps user's JID to media type and remote track
130
+     * (one track per media type per user's JID).
131
+     * @type {Map<string, Map<MediaType, JitsiRemoteTrack>>}
132
+     */
133
+    remoteTracks: Map<string, Map<typeof MediaType, JitsiRemoteTrack>>;
134
+    /**
135
+     * A map which stores local tracks mapped by {@link JitsiLocalTrack.rtcId}
136
+     * @type {Map<number, JitsiLocalTrack>}
137
+     */
138
+    localTracks: Map<number, any>;
139
+    /**
140
+     * Keeps tracks of the WebRTC <tt>MediaStream</tt>s that have been added to
141
+     * the underlying WebRTC PeerConnection.
142
+     * @type {Array}
143
+     * @private
144
+     */
145
+    private _addedStreams;
146
+    /**
147
+     * @typedef {Object} TPCGroupInfo
148
+     * @property {string} semantics the SSRC groups semantics
149
+     * @property {Array<number>} ssrcs group's SSRCs in order where the first
150
+     * one is group's primary SSRC, the second one is secondary (RTX) and so
151
+     * on...
152
+     */
153
+    /**
154
+     * @typedef {Object} TPCSSRCInfo
155
+     * @property {Array<number>} ssrcs an array which holds all track's SSRCs
156
+     * @property {Array<TPCGroupInfo>} groups an array stores all track's SSRC
157
+     * groups
158
+     */
159
+    /**
160
+     * Holds the info about local track's SSRCs mapped per their
161
+     * {@link JitsiLocalTrack.rtcId}
162
+     * @type {Map<number, TPCSSRCInfo>}
163
+     */
164
+    localSSRCs: Map<number, {
165
+        /**
166
+         * an array which holds all track's SSRCs
167
+         */
168
+        ssrcs: Array<number>;
169
+        /**
170
+         * an array stores all track's SSRC
171
+         * groups
172
+         */
173
+        groups: {
174
+            /**
175
+             * the SSRC groups semantics
176
+             */
177
+            semantics: string;
178
+            /**
179
+             * group's SSRCs in order where the first
180
+             * one is group's primary SSRC, the second one is secondary (RTX) and so
181
+             * on...
182
+             */
183
+            ssrcs: Array<number>;
184
+        }[];
185
+    }>;
186
+    /**
187
+     * The local ICE username fragment for this session.
188
+     */
189
+    localUfrag: any;
190
+    /**
191
+     * The remote ICE username fragment for this session.
192
+     */
193
+    remoteUfrag: any;
194
+    /**
195
+     * The DTLS transport object for the PeerConnection.
196
+     * Note: this assume only one shared transport exists because we bundled
197
+     *       all streams on the same underlying transport.
198
+     */
199
+    _dtlsTransport: RTCDtlsTransport;
200
+    /**
201
+     * The signaling layer which operates this peer connection.
202
+     * @type {SignalingLayer}
203
+     */
204
+    signalingLayer: any;
205
+    _peerVideoTypeChanged: any;
206
+    _peerMutedChanged: any;
207
+    options: {
208
+        disableSimulcast: boolean;
209
+        disableRtx: boolean;
210
+        disabledCodec: string;
211
+        preferredCodec: string;
212
+        startSilent: boolean;
213
+        usesUnifiedPlan: boolean;
214
+    };
215
+    peerconnection: RTCPeerConnection;
216
+    tpcUtils: TPCUtils;
217
+    updateLog: any[];
218
+    stats: {};
219
+    statsinterval: number;
220
+    /**
221
+     * Flag used to indicate if simulcast is turned off and a cap of 500 Kbps is applied on screensharing.
222
+     */
223
+    _capScreenshareBitrate: any;
224
+    /**
225
+    * Flag used to indicate if the browser is running in unified  plan mode.
226
+    */
227
+    _usesUnifiedPlan: boolean;
228
+    /**
229
+     * Flag used to indicate if RTCRtpTransceiver#setCodecPreferences is to be used instead of SDP
230
+     * munging for codec selection.
231
+     */
232
+    _usesTransceiverCodecPreferences: boolean;
233
+    /**
234
+     * @type {number} The max number of stats to keep in this.stats. Limit to
235
+     * 300 values, i.e. 5 minutes; set to 0 to disable
236
+     */
237
+    maxstats: number;
238
+    interop: any;
239
+    simulcast: any;
240
+    sdpConsistency: SdpConsistency;
241
+    /**
242
+     * Munges local SDP provided to the Jingle Session in order to prevent from
243
+     * sending SSRC updates on attach/detach and mute/unmute (for video).
244
+     * @type {LocalSdpMunger}
245
+     */
246
+    localSdpMunger: LocalSdpMunger;
247
+    /**
248
+     * TracablePeerConnection uses RTC's eventEmitter
249
+     * @type {EventEmitter}
250
+     */
251
+    eventEmitter: any;
252
+    rtxModifier: RtxModifier;
253
+    /**
254
+     * The height constraint applied on the video sender. The default value is 2160 (4K) when layer suspension is
255
+     * explicitly disabled.
256
+     */
257
+    _senderVideoMaxHeight: number;
258
+    trace: (what: any, info: any) => void;
259
+    onicecandidate: any;
260
+    onTrack: (evt: any) => void;
261
+    onsignalingstatechange: any;
262
+    oniceconnectionstatechange: any;
263
+    onnegotiationneeded: any;
264
+    onconnectionstatechange: any;
265
+    ondatachannel: any;
266
+    private _processStat;
267
+    /**
268
+     * Forwards the {@link peerconnection.iceConnectionState} state except that it
269
+     * will convert "completed" into "connected" where both mean that the ICE has
270
+     * succeeded and is up and running. We never see "completed" state for
271
+     * the JVB connection, but it started appearing for the P2P one. This method
272
+     * allows to adapt old logic to this new situation.
273
+     * @return {string}
274
+     */
275
+    getConnectionState(): string;
276
+    private getDesiredMediaDirection;
277
+    /**
278
+     * Returns the list of RTCRtpReceivers created for the source of the given media type associated with
279
+     * the set of remote endpoints specified.
280
+     * @param {Array<string>} endpoints list of the endpoints
281
+     * @param {string} mediaType 'audio' or 'video'
282
+     * @returns {Array<RTCRtpReceiver>} list of receivers created by the peerconnection.
283
+     */
284
+    _getReceiversByEndpointIds(endpoints: Array<string>, mediaType: string): Array<RTCRtpReceiver>;
285
+    /**
286
+     * Tells whether or not this TPC instance is using Simulcast.
287
+     * @return {boolean} <tt>true</tt> if simulcast is enabled and active or
288
+     * <tt>false</tt> if it's turned off.
289
+     */
290
+    isSimulcastOn(): boolean;
291
+    /**
292
+     * Obtains audio levels of the remote audio tracks by getting the source information on the RTCRtpReceivers.
293
+     * The information relevant to the ssrc is updated each time a RTP packet constaining the ssrc is received.
294
+     * @param {Array<string>} speakerList list of endpoint ids for which audio levels are to be gathered.
295
+     * @returns {Object} containing ssrc and audio level information as a key-value pair.
296
+     */
297
+    getAudioLevels(speakerList?: Array<string>): any;
298
+    /**
299
+     * Obtains local tracks for given {@link MediaType}. If the <tt>mediaType</tt>
300
+     * argument is omitted the list of all local tracks will be returned.
301
+     * @param {MediaType} [mediaType]
302
+     * @return {Array<JitsiLocalTrack>}
303
+     */
304
+    getLocalTracks(mediaType?: typeof MediaType): Array<any>;
305
+    /**
306
+     * Retrieves the local video track.
307
+     *
308
+     * @returns {JitsiLocalTrack|undefined} - local video track.
309
+     */
310
+    getLocalVideoTrack(): any | undefined;
311
+    /**
312
+     * Checks whether or not this {@link TraceablePeerConnection} instance contains
313
+     * any local tracks for given <tt>mediaType</tt>.
314
+     * @param {MediaType} mediaType
315
+     * @return {boolean}
316
+     */
317
+    hasAnyTracksOfType(mediaType: typeof MediaType): boolean;
318
+    /**
319
+     * Obtains all remote tracks currently known to this PeerConnection instance.
320
+     * @param {string} [endpointId] the track owner's identifier (MUC nickname)
321
+     * @param {MediaType} [mediaType] the remote tracks will be filtered
322
+     * by their media type if this argument is specified.
323
+     * @return {Array<JitsiRemoteTrack>}
324
+     */
325
+    getRemoteTracks(endpointId?: string, mediaType?: typeof MediaType): Array<JitsiRemoteTrack>;
326
+    /**
327
+     * Parses the remote description and returns the sdp lines of the sources associated with a remote participant.
328
+     *
329
+     * @param {string} id Endpoint id of the remote participant.
330
+     * @returns {Array<string>} The sdp lines that have the ssrc information.
331
+     */
332
+    getRemoteSourceInfoByParticipant(id: string): Array<string>;
333
+    /**
334
+     * Returns the target bitrates configured for the local video source.
335
+     *
336
+     * @returns {Object}
337
+     */
338
+    getTargetVideoBitrates(): any;
339
+    /**
340
+     * Tries to find {@link JitsiTrack} for given SSRC number. It will search both
341
+     * local and remote tracks bound to this instance.
342
+     * @param {number} ssrc
343
+     * @return {JitsiTrack|null}
344
+     */
345
+    getTrackBySSRC(ssrc: number): any | null;
346
+    /**
347
+     * Tries to find SSRC number for given {@link JitsiTrack} id. It will search
348
+     * both local and remote tracks bound to this instance.
349
+     * @param {string} id
350
+     * @return {number|null}
351
+     */
352
+    getSsrcByTrackId(id: string): number | null;
353
+    /**
354
+     * Called when new remote MediaStream is added to the PeerConnection.
355
+     * @param {MediaStream} stream the WebRTC MediaStream for remote participant
356
+     */
357
+    _remoteStreamAdded(stream: MediaStream): void;
358
+    /**
359
+     * Called on "track added" and "stream added" PeerConnection events (because we
360
+     * handle streams on per track basis). Finds the owner and the SSRC for
361
+     * the track and passes that to ChatRoom for further processing.
362
+     * @param {MediaStream} stream the WebRTC MediaStream instance which is
363
+     * the parent of the track
364
+     * @param {MediaStreamTrack} track the WebRTC MediaStreamTrack added for remote
365
+     * participant.
366
+     * @param {RTCRtpTransceiver} transceiver the WebRTC transceiver that is created
367
+     * for the remote participant in unified plan.
368
+     */
369
+    _remoteTrackAdded(stream: MediaStream, track: MediaStreamTrack, transceiver?: RTCRtpTransceiver): void;
370
+    /**
371
+     * Initializes a new JitsiRemoteTrack instance with the data provided by
372
+     * the signaling layer and SDP.
373
+     *
374
+     * @param {string} ownerEndpointId the owner's endpoint ID (MUC nickname)
375
+     * @param {MediaStream} stream the WebRTC stream instance
376
+     * @param {MediaStreamTrack} track the WebRTC track instance
377
+     * @param {MediaType} mediaType the track's type of the media
378
+     * @param {VideoType} [videoType] the track's type of the video (if applicable)
379
+     * @param {number} ssrc the track's main SSRC number
380
+     * @param {boolean} muted the initial muted status
381
+     * @param {String} sourceName the track's source name
382
+     */
383
+    _createRemoteTrack(ownerEndpointId: string, stream: MediaStream, track: MediaStreamTrack, mediaType: typeof MediaType, videoType?: {
384
+        CAMERA: string;
385
+        DESKTOP: string;
386
+    }, ssrc: number, muted: boolean, sourceName: string): void;
387
+    /**
388
+     * Handles remote stream removal.
389
+     * @param stream the WebRTC MediaStream object which is being removed from the
390
+     * PeerConnection
391
+     */
392
+    _remoteStreamRemoved(stream: any): void;
393
+    /**
394
+     * Handles remote media track removal.
395
+     * @param {MediaStream} stream WebRTC MediaStream instance which is the parent
396
+     * of the track.
397
+     * @param {MediaStreamTrack} track the WebRTC MediaStreamTrack which has been
398
+     * removed from the PeerConnection.
399
+     */
400
+    _remoteTrackRemoved(stream: MediaStream, track: MediaStreamTrack): void;
401
+    private _getRemoteTrackById;
402
+    /**
403
+     * Removes all JitsiRemoteTracks associated with given MUC nickname
404
+     * (resource part of the JID). Returns array of removed tracks.
405
+     *
406
+     * @param {string} owner - The resource part of the MUC JID.
407
+     * @returns {JitsiRemoteTrack[]}
408
+     */
409
+    removeRemoteTracks(owner: string): JitsiRemoteTrack[];
410
+    /**
411
+     * Removes and disposes given <tt>JitsiRemoteTrack</tt> instance. Emits
412
+     * {@link RTCEvents.REMOTE_TRACK_REMOVED}.
413
+     * @param {JitsiRemoteTrack} toBeRemoved
414
+     */
415
+    _removeRemoteTrack(toBeRemoved: JitsiRemoteTrack): void;
416
+    /**
417
+     * Removes and disposes <tt>JitsiRemoteTrack</tt> identified by given stream and
418
+     * track ids.
419
+     *
420
+     * @param {string} streamId the media stream id as defined by the WebRTC
421
+     * @param {string} trackId the media track id as defined by the WebRTC
422
+     * @returns {JitsiRemoteTrack|undefined} the track which has been removed or
423
+     * <tt>undefined</tt> if no track matching given stream and track ids was
424
+     * found.
425
+     */
426
+    _removeRemoteTrackById(streamId: string, trackId: string): JitsiRemoteTrack | undefined;
427
+    /**
428
+     * Returns a map with keys msid/mediaType and <tt>TrackSSRCInfo</tt> values.
429
+     * @param {RTCSessionDescription} desc the local description.
430
+     * @return {Map<string,TrackSSRCInfo>}
431
+     */
432
+    _extractSSRCMap(desc: RTCSessionDescription): Map<string, any>;
433
+    /**
434
+     *
435
+     * @param {JitsiLocalTrack} localTrack
436
+     */
437
+    getLocalSSRC(localTrack: any): number;
438
+    /**
439
+     * When doing unified plan simulcast, we'll have a set of ssrcs with the
440
+     * same msid but no ssrc-group, since unified plan signals the simulcast
441
+     * group via the a=simulcast line.  Unfortunately, Jicofo will complain
442
+     * if it sees ssrcs with matching msids but no ssrc-group, so we'll inject
443
+     * an ssrc-group line to make Jicofo happy.
444
+     * @param desc A session description object (with 'type' and 'sdp' fields)
445
+     * @return A session description object with its sdp field modified to
446
+     * contain an inject ssrc-group for simulcast
447
+     */
448
+    _injectSsrcGroupForUnifiedSimulcast(desc: any): any;
449
+    _getSSRC(rtcId: any): {
450
+        /**
451
+         * an array which holds all track's SSRCs
452
+         */
453
+        ssrcs: Array<number>;
454
+        /**
455
+         * an array stores all track's SSRC
456
+         * groups
457
+         */
458
+        groups: {
459
+            /**
460
+             * the SSRC groups semantics
461
+             */
462
+            semantics: string;
463
+            /**
464
+             * group's SSRCs in order where the first
465
+             * one is group's primary SSRC, the second one is secondary (RTX) and so
466
+             * on...
467
+             */
468
+            ssrcs: Array<number>;
469
+        }[];
470
+    };
471
+    private isSharingLowFpsScreen;
472
+    /**
473
+     * Checks if screensharing is in progress.
474
+     *
475
+     * @returns {boolean}  Returns true if a desktop track has been added to the
476
+     * peerconnection, false otherwise.
477
+     */
478
+    _isSharingScreen(): boolean;
479
+    /**
480
+     * Munges the order of the codecs in the SDP passed based on the preference
481
+     * set through config.js settings. All instances of the specified codec are
482
+     * moved up to the top of the list when it is preferred. The specified codec
483
+     * is deleted from the list if the configuration specifies that the codec be
484
+     * disabled.
485
+     * @param {RTCSessionDescription} description that needs to be munged.
486
+     * @returns {RTCSessionDescription} the munged description.
487
+     */
488
+    _mungeCodecOrder(description: RTCSessionDescription): RTCSessionDescription;
489
+    /**
490
+     * Add {@link JitsiLocalTrack} to this TPC.
491
+     * @param {JitsiLocalTrack} track
492
+     * @param {boolean} isInitiator indicates if the endpoint is the offerer.
493
+     * @returns {Promise<void>} - resolved when done.
494
+     */
495
+    addTrack(track: any, isInitiator?: boolean): Promise<void>;
496
+    /**
497
+     * Adds local track as part of the unmute operation.
498
+     * @param {JitsiLocalTrack} track the track to be added as part of the unmute operation.
499
+     *
500
+     * @return {Promise<boolean>} Promise that resolves to true if the underlying PeerConnection's
501
+     * state has changed and renegotiation is required, false if no renegotiation is needed or
502
+     * Promise is rejected when something goes wrong.
503
+     */
504
+    addTrackUnmute(track: any): Promise<boolean>;
505
+    private _addStream;
506
+    /**
507
+     * Removes WebRTC media stream from the underlying PeerConection
508
+     * @param {MediaStream} mediaStream
509
+     */
510
+    _removeStream(mediaStream: MediaStream): void;
511
+    private _assertTrackBelongs;
512
+    /**
513
+     * Returns the codec that is configured on the client as the preferred video codec.
514
+     * This takes into account the current order of codecs in the local description sdp.
515
+     *
516
+     * @returns {CodecMimeType} The codec that is set as the preferred codec to receive
517
+     * video in the local SDP.
518
+     */
519
+    getConfiguredVideoCodec(): {
520
+        H264: string;
521
+        OPUS: string;
522
+        ULPFEC: string;
523
+        VP8: string;
524
+        VP9: string;
525
+    };
526
+    /**
527
+     * Enables or disables simulcast for screenshare based on the frame rate requested for desktop track capture.
528
+     *
529
+     * @param {number} maxFps framerate to be used for desktop track capture.
530
+     */
531
+    setDesktopSharingFrameRate(maxFps: number): void;
532
+    /**
533
+     * Sets the codec preference on the peerconnection. The codec preference goes into effect when
534
+     * the next renegotiation happens.
535
+     *
536
+     * @param {CodecMimeType} preferredCodec the preferred codec.
537
+     * @param {CodecMimeType} disabledCodec the codec that needs to be disabled.
538
+     * @returns {void}
539
+     */
540
+    setVideoCodecs(preferredCodec?: {
541
+        H264: string;
542
+        OPUS: string;
543
+        ULPFEC: string;
544
+        VP8: string;
545
+        VP9: string;
546
+    }, disabledCodec?: {
547
+        H264: string;
548
+        OPUS: string;
549
+        ULPFEC: string;
550
+        VP8: string;
551
+        VP9: string;
552
+    }): void;
553
+    codecPreference: {
554
+        enable: boolean;
555
+        mediaType: string;
556
+        mimeType: {
557
+            H264: string;
558
+            OPUS: string;
559
+            ULPFEC: string;
560
+            VP8: string;
561
+            VP9: string;
562
+        };
563
+    };
564
+    /**
565
+     * Tells if the given WebRTC <tt>MediaStream</tt> has been added to
566
+     * the underlying WebRTC PeerConnection.
567
+     * @param {MediaStream} mediaStream
568
+     * @returns {boolean}
569
+     */
570
+    isMediaStreamInPc(mediaStream: MediaStream): boolean;
571
+    /**
572
+     * Remove local track from this TPC.
573
+     * @param {JitsiLocalTrack} localTrack the track to be removed from this TPC.
574
+     *
575
+     * FIXME It should probably remove a boolean just like {@link removeTrackMute}
576
+     *       The same applies to addTrack.
577
+     */
578
+    removeTrack(localTrack: any): void;
579
+    /**
580
+     * Returns the sender corresponding to the given media type.
581
+     * @param {MEDIA_TYPE} mediaType - The media type 'audio' or 'video' to be used for the search.
582
+     * @returns {RTPSender|undefined} - The found sender or undefined if no sender
583
+     * was found.
584
+     */
585
+    findSenderByKind(mediaType: any): any | undefined;
586
+    /**
587
+     * Returns the receiver corresponding to the given MediaStreamTrack.
588
+     *
589
+     * @param {MediaSreamTrack} track - The media stream track used for the search.
590
+     * @returns {RTCRtpReceiver|undefined} - The found receiver or undefined if no receiver
591
+     * was found.
592
+     */
593
+    findReceiverForTrack(track: any): RTCRtpReceiver | undefined;
594
+    /**
595
+     * Returns the sender corresponding to the given MediaStreamTrack.
596
+     *
597
+     * @param {MediaSreamTrack} track - The media stream track used for the search.
598
+     * @returns {RTCRtpSender|undefined} - The found sender or undefined if no sender
599
+     * was found.
600
+     */
601
+    findSenderForTrack(track: any): RTCRtpSender | undefined;
602
+    /**
603
+     * Replaces <tt>oldTrack</tt> with <tt>newTrack</tt> from the peer connection.
604
+     * Either <tt>oldTrack</tt> or <tt>newTrack</tt> can be null; replacing a valid
605
+     * <tt>oldTrack</tt> with a null <tt>newTrack</tt> effectively just removes
606
+     * <tt>oldTrack</tt>
607
+     *
608
+     * @param {JitsiLocalTrack|null} oldTrack - The current track in use to be replaced on the pc.
609
+     * @param {JitsiLocalTrack|null} newTrack - The new track to be used.
610
+     *
611
+     * @returns {Promise<boolean>} - If the promise resolves with true, renegotiation will be needed.
612
+     * Otherwise no renegotiation is needed.
613
+     */
614
+    replaceTrack(oldTrack: any | null, newTrack: any | null): Promise<boolean>;
615
+    /**
616
+     * Removes local track as part of the mute operation.
617
+     * @param {JitsiLocalTrack} localTrack the local track to be remove as part of
618
+     * the mute operation.
619
+     * @return {Promise<boolean>} Promise that resolves to true if the underlying PeerConnection's
620
+     * state has changed and renegotiation is required, false if no renegotiation is needed or
621
+     * Promise is rejected when something goes wrong.
622
+     */
623
+    removeTrackMute(localTrack: any): Promise<boolean>;
624
+    createDataChannel(label: any, opts: any): RTCDataChannel;
625
+    private _ensureSimulcastGroupIsLast;
626
+    private _adjustLocalMediaDirection;
627
+    private _adjustRemoteMediaDirection;
628
+    /**
629
+     * Munges the stereo flag as well as the opusMaxAverageBitrate in the SDP, based
630
+     * on values set through config.js, if present.
631
+     *
632
+     * @param {RTCSessionDescription} description that needs to be munged.
633
+     * @returns {RTCSessionDescription} the munged description.
634
+     */
635
+    _mungeOpus(description: RTCSessionDescription): RTCSessionDescription;
636
+    /**
637
+     * Sets up the _dtlsTransport object and initializes callbacks for it.
638
+     */
639
+    _initializeDtlsTransport(): void;
640
+    /**
641
+     * Configures the stream encodings depending on the video type and the bitrates configured.
642
+     *
643
+     * @returns {Promise} promise that will be resolved when the operation is successful and rejected otherwise.
644
+     */
645
+    configureSenderVideoEncodings(): Promise<any>;
646
+    setLocalDescription(description: any): Promise<any>;
647
+    /**
648
+     * Enables/disables audio media transmission on this peer connection. When
649
+     * disabled the SDP audio media direction in the local SDP will be adjusted to
650
+     * 'inactive' which means that no data will be sent nor accepted, but
651
+     * the connection should be kept alive.
652
+     * @param {boolean} active <tt>true</tt> to enable audio media transmission or
653
+     * <tt>false</tt> to disable. If the value is not a boolean the call will have
654
+     * no effect.
655
+     * @return {boolean} <tt>true</tt> if the value has changed and sRD/sLD cycle
656
+     * needs to be executed in order for the changes to take effect or
657
+     * <tt>false</tt> if the given value was the same as the previous one.
658
+     * @public
659
+     */
660
+    public setAudioTransferActive(active: boolean): boolean;
661
+    setRemoteDescription(description: any): Promise<any>;
662
+    /**
663
+     * Changes the resolution of the video stream that is sent to the peer based on the resolution requested by the peer
664
+     * and user preference, sets the degradation preference on the sender based on the video type, configures the maximum
665
+     * bitrates on the send stream.
666
+     *
667
+     * @param {number} frameHeight - The max frame height to be imposed on the outgoing video stream.
668
+     * @returns {Promise} promise that will be resolved when the operation is successful and rejected otherwise.
669
+     */
670
+    setSenderVideoConstraints(frameHeight: number): Promise<any>;
671
+    encodingsEnabledState: boolean[];
672
+    /**
673
+     * Enables/disables video media transmission on this peer connection. When
674
+     * disabled the SDP video media direction in the local SDP will be adjusted to
675
+     * 'inactive' which means that no data will be sent nor accepted, but
676
+     * the connection should be kept alive.
677
+     * @param {boolean} active <tt>true</tt> to enable video media transmission or
678
+     * <tt>false</tt> to disable. If the value is not a boolean the call will have
679
+     * no effect.
680
+     * @return {boolean} <tt>true</tt> if the value has changed and sRD/sLD cycle
681
+     * needs to be executed in order for the changes to take effect or
682
+     * <tt>false</tt> if the given value was the same as the previous one.
683
+     * @public
684
+     */
685
+    public setVideoTransferActive(active: boolean): boolean;
686
+    /**
687
+     * Sends DTMF tones if possible.
688
+     *
689
+     * @param {string} tones - The DTMF tones string as defined by {@code RTCDTMFSender.insertDTMF}, 'tones' argument.
690
+     * @param {number} duration - The amount of time in milliseconds that each DTMF should last. It's 200ms by default.
691
+     * @param {number} interToneGap - The length of time in miliseconds to wait between tones. It's 200ms by default.
692
+     *
693
+     * @returns {void}
694
+     */
695
+    sendTones(tones: string, duration?: number, interToneGap?: number): void;
696
+    private _onToneChange;
697
+    /**
698
+     * Makes the underlying TraceablePeerConnection generate new SSRC for
699
+     * the recvonly video stream.
700
+     */
701
+    generateRecvonlySsrc(): void;
702
+    /**
703
+     * Makes the underlying TraceablePeerConnection forget the current primary video
704
+     * SSRC.
705
+     */
706
+    clearRecvonlySsrc(): void;
707
+    /**
708
+     * Closes underlying WebRTC PeerConnection instance and removes all remote
709
+     * tracks by emitting {@link RTCEvents.REMOTE_TRACK_REMOVED} for each one of
710
+     * them.
711
+     */
712
+    close(): void;
713
+    createAnswer(constraints: any): Promise<any>;
714
+    createOffer(constraints: any): Promise<any>;
715
+    _createOfferOrAnswer(isOffer: any, constraints: any): Promise<any>;
716
+    /**
717
+     * Extract primary SSRC from given {@link TrackSSRCInfo} object.
718
+     * @param {TrackSSRCInfo} ssrcObj
719
+     * @return {number|null} the primary SSRC or <tt>null</tt>
720
+     */
721
+    _extractPrimarySSRC(ssrcObj: any): number | null;
722
+    private _processLocalSSRCsMap;
723
+    addIceCandidate(candidate: any): Promise<void>;
724
+    /**
725
+     * Returns the number of simulcast streams that are currently enabled on the peerconnection.
726
+     *
727
+     * @returns {number} The number of simulcast streams currently enabled or 1 when simulcast is disabled.
728
+     */
729
+    getActiveSimulcastStreams(): number;
730
+    /**
731
+     * Obtains call-related stats from the peer connection.
732
+     *
733
+     * @returns {Promise<Object>} Promise which resolves with data providing statistics about
734
+     * the peerconnection.
735
+     */
736
+    getStats(): Promise<any>;
737
+    /**
738
+     * Generates and stores new SSRC info object for given local track.
739
+     * The method should be called only for a video track being added to this TPC
740
+     * in the muted state (given that the current browser uses this strategy).
741
+     * @param {JitsiLocalTrack} track
742
+     * @return {TPCSSRCInfo}
743
+     */
744
+    generateNewStreamSSRCInfo(track: any): {
745
+        /**
746
+         * an array which holds all track's SSRCs
747
+         */
748
+        ssrcs: Array<number>;
749
+        /**
750
+         * an array stores all track's SSRC
751
+         * groups
752
+         */
753
+        groups: {
754
+            /**
755
+             * the SSRC groups semantics
756
+             */
757
+            semantics: string;
758
+            /**
759
+             * group's SSRCs in order where the first
760
+             * one is group's primary SSRC, the second one is secondary (RTX) and so
761
+             * on...
762
+             */
763
+            ssrcs: Array<number>;
764
+        }[];
765
+    };
766
+    /**
767
+     * Returns if the peer connection uses Unified plan implementation.
768
+     *
769
+     * @returns {boolean} True if the pc uses Unified plan, false otherwise.
770
+     */
771
+    usesUnifiedPlan(): boolean;
772
+    /**
773
+     * Creates a text representation of this <tt>TraceablePeerConnection</tt>
774
+     * instance.
775
+     * @return {string}
776
+     */
777
+    toString(): string;
778
+}
779
+import RTC from "./RTC";
780
+import * as MediaType from "../../service/RTC/MediaType";
781
+import JitsiRemoteTrack from "./JitsiRemoteTrack";
782
+import { TPCUtils } from "./TPCUtils";
783
+import SdpConsistency from "../sdp/SdpConsistency";
784
+import LocalSdpMunger from "../sdp/LocalSdpMunger";
785
+import RtxModifier from "../sdp/RtxModifier";

+ 197
- 0
types/auto/modules/browser/BrowserCapabilities.d.ts View File

@@ -0,0 +1,197 @@
1
+/**
2
+ * Implements browser capabilities for lib-jitsi-meet.
3
+ */
4
+export default class BrowserCapabilities {
5
+    /**
6
+     * Tells whether or not the <tt>MediaStream/tt> is removed from the <tt>PeerConnection</tt> and disposed on video
7
+     * mute (in order to turn off the camera device). This is needed on Firefox because of the following bug
8
+     * https://bugzilla.mozilla.org/show_bug.cgi?id=1735951
9
+     *
10
+     * @return {boolean} <tt>true</tt> if the current browser supports this strategy or <tt>false</tt> otherwise.
11
+     */
12
+    doesVideoMuteByStreamRemove(): boolean;
13
+    /**
14
+     * Checks if the current browser is Chromium based, i.e., it's either Chrome / Chromium or uses it as its engine,
15
+     * but doesn't identify as Chrome.
16
+     *
17
+     * This includes the following browsers:
18
+     * - Chrome and Chromium.
19
+     * - Other browsers which use the Chrome engine, but are detected as Chrome, such as Brave and Vivaldi.
20
+     * - Browsers which are NOT Chrome but use it as their engine, and have custom detection code: Opera, Electron
21
+     *   and NW.JS.
22
+     * This excludes
23
+     * - Chrome on iOS since it uses WKWebView.
24
+     */
25
+    isChromiumBased(): boolean;
26
+    /**
27
+     * Checks if the current platform is iOS.
28
+     *
29
+     * @returns {boolean}
30
+     */
31
+    isIosBrowser(): boolean;
32
+    /**
33
+     * Checks if the current browser is WebKit based. It's either
34
+     * Safari or uses WebKit as its engine.
35
+     *
36
+     * This includes Chrome and Firefox on iOS
37
+     *
38
+     * @returns {boolean}
39
+     */
40
+    isWebKitBased(): boolean;
41
+    /**
42
+     * Checks whether current running context is a Trusted Web Application.
43
+     *
44
+     * @returns {boolean} Whether the current context is a TWA.
45
+     */
46
+    isTwa(): boolean;
47
+    /**
48
+     * Checks if the current browser is supported.
49
+     *
50
+     * @returns {boolean} true if the browser is supported, false otherwise.
51
+     */
52
+    isSupported(): boolean;
53
+    /**
54
+     * Returns whether the browser is supported for Android
55
+     * @returns {boolean} true if the browser is supported for Android devices
56
+     */
57
+    isSupportedAndroidBrowser(): boolean;
58
+    /**
59
+     * Returns whether the browser is supported for iOS
60
+     * @returns {boolean} true if the browser is supported for iOS devices
61
+     */
62
+    isSupportedIOSBrowser(): boolean;
63
+    /**
64
+     * Returns whether or not the current environment needs a user interaction
65
+     * with the page before any unmute can occur.
66
+     *
67
+     * @returns {boolean}
68
+     */
69
+    isUserInteractionRequiredForUnmute(): boolean;
70
+    /**
71
+     * Checks if the current browser triggers 'onmute'/'onunmute' events when
72
+     * user's connection is interrupted and the video stops playback.
73
+     * @returns {*|boolean} 'true' if the event is supported or 'false'
74
+     * otherwise.
75
+     */
76
+    supportsVideoMuteOnConnInterrupted(): any | boolean;
77
+    /**
78
+     * Checks if the current browser reports upload and download bandwidth
79
+     * statistics.
80
+     * @return {boolean}
81
+     */
82
+    supportsBandwidthStatistics(): boolean;
83
+    /**
84
+     * Checks if the current browser supports setting codec preferences on the transceiver.
85
+     * @returns {boolean}
86
+     */
87
+    supportsCodecPreferences(): boolean;
88
+    /**
89
+     * Checks if the current browser support the device change event.
90
+     * @return {boolean}
91
+     */
92
+    supportsDeviceChangeEvent(): boolean;
93
+    /**
94
+     * Checks if the current browser supports RTT statistics for srflx local
95
+     * candidates through the legacy getStats() API.
96
+     */
97
+    supportsLocalCandidateRttStatistics(): any;
98
+    /**
99
+     * Checks if the current browser supports the Long Tasks API that lets us observe
100
+     * performance measurement events and be notified of tasks that take longer than
101
+     * 50ms to execute on the main thread.
102
+     */
103
+    supportsPerformanceObserver(): boolean;
104
+    /**
105
+     * Checks if the current browser supports audio level stats on the receivers.
106
+     */
107
+    supportsReceiverStats(): boolean;
108
+    /**
109
+     * Checks if the current browser reports round trip time statistics for
110
+     * the ICE candidate pair.
111
+     * @return {boolean}
112
+     */
113
+    supportsRTTStatistics(): boolean;
114
+    /**
115
+     * Returns true if VP9 is supported by the client on the browser. VP9 is currently disabled on Firefox and Safari
116
+     * because of issues with rendering. Please check https://bugzilla.mozilla.org/show_bug.cgi?id=1492500,
117
+     * https://bugs.webkit.org/show_bug.cgi?id=231071 and https://bugs.webkit.org/show_bug.cgi?id=231074 for details.
118
+     */
119
+    supportsVP9(): any;
120
+    /**
121
+     * Checks if the browser uses SDP munging for turning on simulcast.
122
+     *
123
+     * @returns {boolean}
124
+     */
125
+    usesSdpMungingForSimulcast(): boolean;
126
+    /**
127
+     * Checks if the browser uses webrtc-adapter. All browsers except React Native do.
128
+     *
129
+     * @returns {boolean}
130
+     */
131
+    usesAdapter(): boolean;
132
+    /**
133
+     * Checks if the browser uses RIDs/MIDs for siganling the simulcast streams
134
+     * to the bridge instead of the ssrcs.
135
+     */
136
+    usesRidsForSimulcast(): boolean;
137
+    /**
138
+     * Checks if the browser supports getDisplayMedia.
139
+     * @returns {boolean} {@code true} if the browser supports getDisplayMedia.
140
+     */
141
+    supportsGetDisplayMedia(): boolean;
142
+    /**
143
+     * Checks if the browser supports WebRTC Encoded Transform, an alternative
144
+     * to insertable streams.
145
+     *
146
+     * NOTE: At the time of this writing the only browser supporting this is
147
+     * Safari / WebKit, behind a flag.
148
+     *
149
+     * @returns {boolean} {@code true} if the browser supports it.
150
+     */
151
+    supportsEncodedTransform(): boolean;
152
+    /**
153
+     * Checks if the browser supports insertable streams, needed for E2EE.
154
+     * @returns {boolean} {@code true} if the browser supports insertable streams.
155
+     */
156
+    supportsInsertableStreams(): boolean;
157
+    /**
158
+     * Whether the browser supports the RED format for audio.
159
+     */
160
+    supportsAudioRed(): boolean;
161
+    /**
162
+     * Checks if the browser supports unified plan.
163
+     *
164
+     * @returns {boolean}
165
+     */
166
+    supportsUnifiedPlan(): boolean;
167
+    /**
168
+     * Checks if the browser supports voice activity detection via the @type {VADAudioAnalyser} service.
169
+     *
170
+     * @returns {boolean}
171
+     */
172
+    supportsVADDetection(): boolean;
173
+    /**
174
+     * Check if the browser supports the RTP RTX feature (and it is usable).
175
+     *
176
+     * @returns {boolean}
177
+     */
178
+    supportsRTX(): boolean;
179
+    /**
180
+     * Returns the version of a Chromium based browser.
181
+     *
182
+     * @returns {Number}
183
+     */
184
+    _getChromiumBasedVersion(): number;
185
+    /**
186
+     * Returns the version of a Safari browser.
187
+     *
188
+     * @returns {Number}
189
+     */
190
+    _getSafariVersion(): number;
191
+    /**
192
+     * Returns the version of an ios browser.
193
+     *
194
+     * @returns {Number}
195
+     */
196
+    _getIOSVersion(): number;
197
+}

+ 3
- 0
types/auto/modules/browser/index.d.ts View File

@@ -0,0 +1,3 @@
1
+declare var _default: BrowserCapabilities;
2
+export default _default;
3
+import BrowserCapabilities from "./BrowserCapabilities";

+ 97
- 0
types/auto/modules/connectivity/ConnectionQuality.d.ts View File

@@ -0,0 +1,97 @@
1
+/**
2
+ * A class which monitors the local statistics coming from the RTC modules, and
3
+ * calculates a "connection quality" value, in percent, for the media
4
+ * connection. A value of 100% indicates a very good network connection, and a
5
+ * value of 0% indicates a poor connection.
6
+ */
7
+export default class ConnectionQuality {
8
+    /**
9
+     *
10
+     * @param conference
11
+     * @param eventEmitter
12
+     * @param options
13
+     */
14
+    constructor(conference: any, eventEmitter: any, options: any);
15
+    eventEmitter: any;
16
+    /**
17
+     * The owning JitsiConference.
18
+     */
19
+    _conference: any;
20
+    /**
21
+     * Holds statistics about the local connection quality.
22
+     */
23
+    _localStats: {
24
+        connectionQuality: number;
25
+        jvbRTT: any;
26
+    };
27
+    /**
28
+     * The time this._localStats.connectionQuality was last updated.
29
+     */
30
+    _lastConnectionQualityUpdate: number;
31
+    /**
32
+     * Conference options.
33
+     */
34
+    _options: any;
35
+    /**
36
+     * Maps a participant ID to an object holding connection quality
37
+     * statistics received from this participant.
38
+     */
39
+    _remoteStats: {};
40
+    /**
41
+     * The time that the ICE state last changed to CONNECTED. We use this
42
+     * to calculate how much time we as a sender have had to ramp-up.
43
+     */
44
+    _timeIceConnected: number;
45
+    /**
46
+     * The time that local video was unmuted. We use this to calculate how
47
+     * much time we as a sender have had to ramp-up.
48
+     */
49
+    _timeVideoUnmuted: number;
50
+    /**
51
+     * Sets _timeVideoUnmuted if it was previously unset. If it was already set,
52
+     * doesn't change it.
53
+     */
54
+    _maybeUpdateUnmuteTime(): void;
55
+    /**
56
+     * Calculates a new "connection quality" value.
57
+     * @param videoType {VideoType} the type of the video source (camera or a screen capture).
58
+     * @param isMuted {boolean} whether the local video is muted.
59
+     * @param resolutionName {Resolution} the input resolution used by the camera.
60
+     * @returns {*} the newly calculated connection quality.
61
+     */
62
+    _calculateConnectionQuality(videoType: {
63
+        CAMERA: string;
64
+        DESKTOP: string;
65
+    }, isMuted: boolean, resolutionName: any): any;
66
+    /**
67
+     * Updates the localConnectionQuality value
68
+     * @param values {number} the new value. Should be in [0, 100].
69
+     */
70
+    _updateLocalConnectionQuality(value: any): void;
71
+    /**
72
+     * Broadcasts the local statistics to all other participants in the
73
+     * conference.
74
+     */
75
+    _broadcastLocalStats(): void;
76
+    /**
77
+     * Updates the local statistics
78
+     * @param {TraceablePeerConnection} tpc the peerconnection which emitted
79
+     * the stats
80
+     * @param data new statistics
81
+     */
82
+    _updateLocalStats(tpc: any, data: any): void;
83
+    /**
84
+     * Updates remote statistics
85
+     * @param id the id of the remote participant
86
+     * @param data the statistics received
87
+     */
88
+    _updateRemoteStats(id: any, data: any): void;
89
+    /**
90
+     * Returns the local statistics.
91
+     * Exported only for use in jitsi-meet-torture.
92
+     */
93
+    getStats(): {
94
+        connectionQuality: number;
95
+        jvbRTT: any;
96
+    };
97
+}

+ 36
- 0
types/auto/modules/connectivity/IceFailedHandling.d.ts View File

@@ -0,0 +1,36 @@
1
+/**
2
+ * This class deals with shenanigans around JVB media session's ICE failed status handling.
3
+ *
4
+ * If ICE restarts are NOT explicitly enabled by the {@code enableIceRestart} config option, then the conference will
5
+ * delay emitting the {@JitsiConferenceErrors.ICE_FAILED} event by 15 seconds. If the network info module reports
6
+ * the internet offline status then the time will start counting after the internet comes back online.
7
+ *
8
+ * If ICE restart are enabled, then a delayed ICE failed notification to Jicofo will be sent, only if the ICE connection
9
+ * does not recover soon after or before the XMPP connection is restored (if it was ever broken). If ICE fails while
10
+ * the XMPP connection is not broken then the notifications will be sent after 2 seconds delay.
11
+ */
12
+export default class IceFailedHandling {
13
+    /**
14
+     * Creates new {@code DelayedIceFailed} task.
15
+     * @param {JitsiConference} conference
16
+     */
17
+    constructor(conference: any);
18
+    _conference: any;
19
+    /**
20
+     * After making sure there's no way for the ICE connection to recover this method either sends ICE failed
21
+     * notification to Jicofo or emits the ice failed conference event.
22
+     * @private
23
+     * @returns {void}
24
+     */
25
+    private _actOnIceFailed;
26
+    /**
27
+     * Starts the task.
28
+     */
29
+    start(): void;
30
+    _iceFailedTimeout: any;
31
+    /**
32
+     * Cancels the task.
33
+     */
34
+    cancel(): void;
35
+    _canceled: boolean;
36
+}

+ 33
- 0
types/auto/modules/connectivity/NetworkInfo.d.ts View File

@@ -0,0 +1,33 @@
1
+export const NETWORK_INFO_EVENT: "NETWORK_INFO_CHANGED";
2
+/**
3
+ * Module provides information about the current status of the internet
4
+ * connection. Lib-jitsi-meet doesn't have any logic for detecting internet
5
+ * online/offline, but rather it relies on the information supplied by the app
6
+ * that uses it. By default the online state is assumed and the lib acts as if
7
+ * it was connected. See {@link JitsiMeetJS.setNetworkInfo}.
8
+ */
9
+export class NetworkInfo extends Listenable {
10
+    /**
11
+     * Creates new {@link NetworkInfo} instance.
12
+     */
13
+    constructor();
14
+    _current: {
15
+        isOnline: boolean;
16
+    };
17
+    /**
18
+     * Updates the network info state.
19
+     * @param {boolean} isOnline - {@code true} if internet is online or {@code false} otherwise.
20
+     */
21
+    updateNetworkInfo({ isOnline }: boolean): void;
22
+    /**
23
+     * Returns the online/offline internet status. By default the value is {@code true} and changes only if
24
+     * the lib's user wires the state through {@link JitsiMeetJS.setNetworkInfo} like the jitsi-meet does. Because of
25
+     * that any logic should still assume that the internet may be offline and should handle the failure gracefully.
26
+     * It's only a good hint in the other way around: to pause internet operations until it comes back online.
27
+     * @returns {boolean}
28
+     */
29
+    isOnline(): boolean;
30
+}
31
+export default networkInfo;
32
+import Listenable from "../util/Listenable";
33
+declare const networkInfo: NetworkInfo;

+ 350
- 0
types/auto/modules/connectivity/ParticipantConnectionStatus.d.ts View File

@@ -0,0 +1,350 @@
1
+/**
2
+ * Participant connection statuses.
3
+ *
4
+ * @type {{
5
+ *      ACTIVE: string,
6
+ *      INACTIVE: string,
7
+ *      INTERRUPTED: string,
8
+ *      RESTORING: string
9
+ * }}
10
+ */
11
+export const ParticipantConnectionStatus: {
12
+    ACTIVE: string;
13
+    INACTIVE: string;
14
+    INTERRUPTED: string;
15
+    RESTORING: string;
16
+};
17
+/**
18
+ * Class is responsible for emitting
19
+ * JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED events.
20
+ */
21
+export default class ParticipantConnectionStatusHandler {
22
+    /**
23
+     * Calculates the new {@link ParticipantConnectionStatus} based on
24
+     * the values given for some specific remote user. It is assumed that
25
+     * the conference is currently in the JVB mode (in contrary to the P2P mode)
26
+     * @param {boolean} isConnectionActiveByJvb true if the JVB did not get any
27
+     * data from the user for the last 15 seconds.
28
+     * @param {boolean} isInLastN indicates whether the user is in the last N
29
+     * set. When set to false it means that JVB is not sending any video for
30
+     * the user.
31
+     * @param {boolean} isRestoringTimedout if true it means that the user has
32
+     * been outside of last N too long to be considered
33
+     * {@link ParticipantConnectionStatus.RESTORING}.
34
+     * @param {boolean} isVideoMuted true if the user is video muted and we
35
+     * should not expect to receive any video.
36
+     * @param {boolean} isVideoTrackFrozen if the current browser support video
37
+     * frozen detection then it will be set to true when the video track is
38
+     * frozen. If the current browser does not support frozen detection the it's
39
+     * always false.
40
+     * @return {ParticipantConnectionStatus} the new connection status for
41
+     * the user for whom the values above were provided.
42
+     * @private
43
+     */
44
+    private static _getNewStateForJvbMode;
45
+    /**
46
+     * In P2P mode we don't care about any values coming from the JVB and
47
+     * the connection status can be only active or interrupted.
48
+     * @param {boolean} isVideoMuted the user if video muted
49
+     * @param {boolean} isVideoTrackFrozen true if the video track for
50
+     * the remote user is currently frozen. If the current browser does not
51
+     * support video frozen detection then it's always false.
52
+     * @return {ParticipantConnectionStatus}
53
+     * @private
54
+     */
55
+    private static _getNewStateForP2PMode;
56
+    /**
57
+     * Creates new instance of <tt>ParticipantConnectionStatus</tt>.
58
+     *
59
+     * @constructor
60
+     * @param {RTC} rtc the RTC service instance
61
+     * @param {JitsiConference} conference parent conference instance
62
+     * @param {Object} options
63
+     * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
64
+     * {@link ParticipantConnectionStatus.p2pRtcMuteTimeout}.
65
+     * @param {number} [options.rtcMuteTimeout=2000] custom value for
66
+     * {@link ParticipantConnectionStatus.rtcMuteTimeout}.
67
+     * @param {number} [options.outOfLastNTimeout=500] custom value for
68
+     * {@link ParticipantConnectionStatus.outOfLastNTimeout}.
69
+     */
70
+    constructor(rtc: any, conference: any, options: {
71
+        p2pRtcMuteTimeout?: number;
72
+        rtcMuteTimeout?: number;
73
+        outOfLastNTimeout?: number;
74
+    });
75
+    rtc: any;
76
+    conference: any;
77
+    /**
78
+     * A map of the "endpoint ID"(which corresponds to the resource part
79
+     * of MUC JID(nickname)) to the timeout callback IDs scheduled using
80
+     * window.setTimeout.
81
+     * @type {Object.<string, number>}
82
+     */
83
+    trackTimers: {
84
+        [x: string]: number;
85
+    };
86
+    /**
87
+     * This map holds the endpoint connection status received from the JVB
88
+     * (as it might be different than the one stored in JitsiParticipant).
89
+     * Required for getting back in sync when remote video track is removed.
90
+     * @type {Object.<string, boolean>}
91
+     */
92
+    connStatusFromJvb: {
93
+        [x: string]: boolean;
94
+    };
95
+    /**
96
+     * If video track frozen detection through RTC mute event is supported,
97
+     * we wait some time until video track is considered frozen. But because
98
+     * when the user falls out of last N it is expected for the video to
99
+     * freeze this timeout must be significantly reduced in "out of last N"
100
+     * case.
101
+     *
102
+     * Basically this value is used instead of {@link rtcMuteTimeout} when
103
+     * user is not in last N.
104
+     * @type {number}
105
+     */
106
+    outOfLastNTimeout: number;
107
+    /**
108
+     * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
109
+     * event is fired on the Media stream, before the connection interrupted is fired. The default value is
110
+     * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
111
+     *
112
+     * @type {number} amount of time in milliseconds.
113
+     */
114
+    p2pRtcMuteTimeout: number;
115
+    /**
116
+     * How long we're going to wait after the RTC video track muted event
117
+     * for the corresponding signalling mute event, before the connection
118
+     * interrupted is fired. The default value is
119
+     * {@link DEFAULT_RTC_MUTE_TIMEOUT}.
120
+     *
121
+     * @type {number} amount of time in milliseconds
122
+     */
123
+    rtcMuteTimeout: number;
124
+    /**
125
+     * This map holds a timestamp indicating  when participant's video track
126
+     * was RTC muted (it is assumed that each participant can have only 1
127
+     * video track at a time). The purpose of storing the timestamp is to
128
+     * avoid the transition to disconnected status in case of legitimate
129
+     * video mute operation where the signalling video muted event can
130
+     * arrive shortly after RTC muted event.
131
+     *
132
+     * The key is participant's ID which is the same as endpoint id in
133
+     * the Colibri conference allocated on the JVB.
134
+     *
135
+     * The value is a timestamp measured in milliseconds obtained with
136
+     * <tt>Date.now()</tt>.
137
+     *
138
+     * FIXME merge this logic with NO_DATA_FROM_SOURCE event
139
+     *       implemented in JitsiLocalTrack by extending the event to
140
+     *       the remote track and allowing to set different timeout for
141
+     *       local and remote tracks.
142
+     *
143
+     * @type {Object.<string, number>}
144
+     */
145
+    rtcMutedTimestamp: {
146
+        [x: string]: number;
147
+    };
148
+    /**
149
+     * This map holds the timestamps indicating when participant's video
150
+     * entered lastN set. Participants entering lastN will have connection
151
+     * status restoring and when we start receiving video will become
152
+     * active, but if video is not received for certain time
153
+     * {@link DEFAULT_RESTORING_TIMEOUT} that participant connection status
154
+     * will become interrupted.
155
+     *
156
+     * @type {Map<string, number>}
157
+     */
158
+    enteredLastNTimestamp: Map<string, number>;
159
+    /**
160
+     * A map of the "endpoint ID"(which corresponds to the resource part
161
+     * of MUC JID(nickname)) to the restoring timeout callback IDs
162
+     * scheduled using window.setTimeout.
163
+     *
164
+     * @type {Map<string, number>}
165
+     */
166
+    restoringTimers: Map<string, number>;
167
+    /**
168
+     * A map that holds the current connection status (along with all the internal events that happen
169
+     * while in that state).
170
+     *
171
+     * The goal is to send this information to the analytics backend for post-mortem analysis.
172
+     */
173
+    connectionStatusMap: Map<any, any>;
174
+    /**
175
+     * Gets the video frozen timeout for given user.
176
+     * @param {string} id endpoint/participant ID
177
+     * @return {number} how long are we going to wait since RTC video muted
178
+     * even, before a video track is considered frozen.
179
+     * @private
180
+     */
181
+    private _getVideoFrozenTimeout;
182
+    /**
183
+     * Initializes <tt>ParticipantConnectionStatus</tt> and bind required event
184
+     * listeners.
185
+     */
186
+    init(): void;
187
+    _onEndpointConnStatusChanged: any;
188
+    _onP2PStatus: any;
189
+    _onUserLeft: any;
190
+    _onTrackRtcMuted: any;
191
+    _onTrackRtcUnmuted: any;
192
+    _onRemoteTrackAdded: any;
193
+    _onRemoteTrackRemoved: any;
194
+    _onSignallingMuteChanged: any;
195
+    _onTrackVideoTypeChanged: any;
196
+    /**
197
+     * On change in Last N set check all leaving and entering participants to
198
+     * change their corresponding statuses.
199
+     *
200
+     * @param {Array<string>} leavingLastN - The array of ids leaving lastN.
201
+     * @param {Array<string>} enteringLastN - The array of ids entering lastN.
202
+     * @private
203
+     */
204
+    private _onLastNChanged;
205
+    _onLastNValueChanged: any;
206
+    /**
207
+     * Removes all event listeners and disposes of all resources held by this
208
+     * instance.
209
+     */
210
+    dispose(): void;
211
+    /**
212
+     * Handles RTCEvents.ENDPOINT_CONN_STATUS_CHANGED triggered when we receive
213
+     * notification over the data channel from the bridge about endpoint's
214
+     * connection status update.
215
+     * @param {string} endpointId - The endpoint ID(MUC nickname/resource JID).
216
+     * @param {boolean} isActive - true if the connection is OK or false otherwise.
217
+     */
218
+    onEndpointConnStatusChanged(endpointId: string, isActive: boolean): void;
219
+    /**
220
+     * Changes connection status.
221
+     * @param {JitsiParticipant} participant
222
+     * @param newStatus
223
+     */
224
+    _changeConnectionStatus(participant: any, newStatus: any): void;
225
+    /**
226
+     * Reset the postponed "connection interrupted" event which was previously
227
+     * scheduled as a timeout on RTC 'onmute' event.
228
+     *
229
+     * @param {string} participantId - The participant for which the "connection
230
+     * interrupted" timeout was scheduled.
231
+     */
232
+    clearTimeout(participantId: string): void;
233
+    /**
234
+     * Clears the timestamp of the RTC muted event for participant's video track
235
+     * @param {string} participantId the id of the conference participant which
236
+     * is the same as the Colibri endpoint ID of the video channel allocated for
237
+     * the user on the videobridge.
238
+     */
239
+    clearRtcMutedTimestamp(participantId: string): void;
240
+    /**
241
+     * Bind signalling mute event listeners for video {JitsiRemoteTrack} when
242
+     * a new one is added to the conference.
243
+     *
244
+     * @param {JitsiTrack} remoteTrack - The {JitsiTrack} which is being added to
245
+     * the conference.
246
+     */
247
+    onRemoteTrackAdded(remoteTrack: any): void;
248
+    /**
249
+     * Removes all event listeners bound to the remote video track and clears
250
+     * any related timeouts.
251
+     *
252
+     * @param {JitsiRemoteTrack} remoteTrack - The remote track which is being
253
+     * removed from the conference.
254
+     */
255
+    onRemoteTrackRemoved(remoteTrack: any): void;
256
+    /**
257
+     * Checks if given participant's video is considered frozen.
258
+     * @param {JitsiParticipant} participant - The participant.
259
+     * @return {boolean} <tt>true</tt> if the video has frozen for given
260
+     * participant or <tt>false</tt> when it's either not considered frozen
261
+     * (yet) or if freeze detection is not supported by the current browser.
262
+     *
263
+     * FIXME merge this logic with NO_DATA_FROM_SOURCE event
264
+     *       implemented in JitsiLocalTrack by extending the event to
265
+     *       the remote track and allowing to set different timeout for
266
+     *       local and remote tracks.
267
+     *
268
+     */
269
+    isVideoTrackFrozen(participant: any): boolean;
270
+    /**
271
+     * Goes over every participant and updates connectivity status.
272
+     * Should be called when a parameter which affects all of the participants
273
+     * is changed (P2P for example).
274
+     */
275
+    refreshConnectionStatusForAll(): void;
276
+    /**
277
+     * Figures out (and updates) the current connectivity status for
278
+     * the participant identified by the given id.
279
+     *
280
+     * @param {string} id - The participant's id (MUC nickname or Colibri endpoint ID).
281
+     */
282
+    figureOutConnectionStatus(id: string): void;
283
+    /**
284
+     * Computes the duration of the current connection status for the participant with the specified id (i.e. 15 seconds
285
+     * in the INTERRUPTED state) and sends a participant connection status event.
286
+     * @param {string} id - The jid of the participant.
287
+     * @param {Number} nowMs - The current time (in millis).
288
+     * @returns {void}
289
+     */
290
+    maybeSendParticipantConnectionStatusEvent(id: string, nowMs: number): void;
291
+    /**
292
+     * Clears the restoring timer for participant's video track and the
293
+     * timestamp for entering lastN.
294
+     *
295
+     * @param {string} participantId - The id of the conference participant which
296
+     * is the same as the Colibri endpoint ID of the video channel allocated for
297
+     * the user on the videobridge.
298
+     */
299
+    _clearRestoringTimer(participantId: string): void;
300
+    /**
301
+     * Checks whether a track had stayed enough in restoring state, compares
302
+     * current time and the time the track entered in lastN. If it hasn't
303
+     * timedout and there is no timer added, add new timer in order to give it
304
+     * more time to become active or mark it as interrupted on next check.
305
+     *
306
+     * @param {string} participantId - The id of the conference participant which
307
+     * is the same as the Colibri endpoint ID of the video channel allocated for
308
+     * the user on the videobridge.
309
+     * @returns {boolean} <tt>true</tt> if the track was in restoring state
310
+     * more than the timeout ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to
311
+     * set its status to interrupted.
312
+     * @private
313
+     */
314
+    private _isRestoringTimedout;
315
+    /**
316
+     * Sends a last/final participant connection status event for the participant that left the conference.
317
+     * @param {string} id - The id of the participant that left the conference.
318
+     * @returns {void}
319
+     */
320
+    onUserLeft(id: string): void;
321
+    /**
322
+     * Handles RTC 'onmute' event for the video track.
323
+     *
324
+     * @param {JitsiRemoteTrack} track - The video track for which 'onmute' event
325
+     * will be processed.
326
+     */
327
+    onTrackRtcMuted(track: any): void;
328
+    /**
329
+     * Handles RTC 'onunmute' event for the video track.
330
+     *
331
+     * @param {JitsiRemoteTrack} track - The video track for which 'onunmute'
332
+     * event will be processed.
333
+     */
334
+    onTrackRtcUnmuted(track: any): void;
335
+    /**
336
+     * Here the signalling "mute"/"unmute" events are processed.
337
+     *
338
+     * @param {JitsiRemoteTrack} track - The remote video track for which
339
+     * the signalling mute/unmute event will be processed.
340
+     */
341
+    onSignallingMuteChanged(track: any): void;
342
+    /**
343
+     * Sends a participant connection status event as a result of the video type
344
+     * changing.
345
+     * @param {JitsiRemoteTrack} track - The track.
346
+     * @param {VideoType} type - The video type.
347
+     * @returns {void}
348
+     */
349
+    onTrackVideoTypeChanged(track: any, type: any): void;
350
+}

+ 6
- 0
types/auto/modules/detection/ActiveDeviceDetector.d.ts View File

@@ -0,0 +1,6 @@
1
+/**
2
+ * Go through all audio devices on the system and return one that is active, i.e. has audio signal.
3
+ *
4
+ * @returns Promise<Object> - Object containing information about the found device.
5
+ */
6
+export default function getActiveAudioDevice(): Promise<any>;

+ 54
- 0
types/auto/modules/detection/DetectionEvents.d.ts View File

@@ -0,0 +1,54 @@
1
+/**
2
+ * Event triggered by a audio detector indicating that its active state has changed from active to inactive or vice
3
+ * versa.
4
+ * @event
5
+ * @type {boolean} - true when service has changed to active false otherwise.
6
+ */
7
+export const DETECTOR_STATE_CHANGE: boolean;
8
+/** Event triggered by {@link NoAudioSignalDetector} when the local audio device associated with a JitsiConference
9
+ * starts receiving audio levels with the value of 0 meaning no audio is being captured on that device, or when
10
+ * it starts receiving audio levels !== 0 after being in a state of no audio.
11
+ * @event
12
+ * @type {boolean} - true when the current conference audio track has audio input false otherwise.
13
+ */
14
+export const AUDIO_INPUT_STATE_CHANGE: boolean;
15
+/** Event triggered by NoAudioSignalDetector when the local audio device associated with a JitsiConference goes silent
16
+ * for a period of time, meaning that the device is either broken or hardware/software muted.
17
+ * @event
18
+ * @type {void}
19
+ */
20
+export const NO_AUDIO_INPUT: void;
21
+/**
22
+ *  Event generated by {@link VADNoiseDetection} when the tracked device is considered noisy.
23
+ *  @event
24
+ *  @type {Object}
25
+ */
26
+export const VAD_NOISY_DEVICE: any;
27
+/**
28
+ * Event generated by VADReportingService when if finishes creating a VAD report for the monitored devices.
29
+ * The generated objects are of type Array<Object>, one score for each monitored device.
30
+ * @event VAD_REPORT_PUBLISHED
31
+ * @type Array<Object> with the following structure:
32
+ * @property {Date} timestamp - Timestamp at which the compute took place.
33
+ * @property {number} avgVAD - Average VAD score over monitored period of time.
34
+ * @property {string} deviceId - Associate local audio device ID.
35
+ */
36
+export const VAD_REPORT_PUBLISHED: Array<any>;
37
+/**
38
+ * Event generated by {@link TrackVADEmitter} when PCM sample VAD score is available.
39
+ *
40
+ * @event
41
+ * @type {Object}
42
+ * @property {Date}   timestamp - Exact time at which processed PCM sample was generated.
43
+ * @property {number} score - VAD score on a scale from 0 to 1 (i.e. 0.7)
44
+ * @property {Float32Array} pcmData - Raw PCM data with which the VAD score was calculated.
45
+ * @property {string} deviceId - Device id of the associated track.
46
+ */
47
+export const VAD_SCORE_PUBLISHED: any;
48
+/**
49
+ *  Event generated by {@link VADTalkMutedDetection} when a user is talking while the mic is muted.
50
+ *
51
+ *  @event
52
+ *  @type {Object}
53
+ */
54
+export const VAD_TALK_WHILE_MUTED: any;

+ 57
- 0
types/auto/modules/detection/NoAudioSignalDetection.d.ts View File

@@ -0,0 +1,57 @@
1
+/// <reference types="node" />
2
+/**
3
+ * Detect if there is no audio input on the current TraceAblePeerConnection selected track. The no audio
4
+ * state must be constant for a configured amount of time in order for the event to be triggered.
5
+ * @fires DetectionEvents.AUDIO_INPUT_STATE_CHANGE
6
+ * @fires DetectionEvents.NO_AUDIO_INPUT
7
+ */
8
+export default class NoAudioSignalDetection extends EventEmitter {
9
+    /**
10
+     * Creates new NoAudioSignalDetection.
11
+     *
12
+     * @param conference the JitsiConference instance that created us.
13
+     * @constructor
14
+     */
15
+    constructor(conference: any);
16
+    _conference: any;
17
+    _timeoutTrigger: NodeJS.Timeout;
18
+    _hasAudioInput: boolean;
19
+    /**
20
+     * Clear the timeout state.
21
+     */
22
+    _clearTriggerTimeout(): void;
23
+    /**
24
+     * Generated event triggered by a change in the current conference audio input state.
25
+     *
26
+     * @param {*} audioLevel - The audio level of the ssrc.
27
+     * @fires DetectionEvents.AUDIO_INPUT_STATE_CHANGE
28
+     */
29
+    _handleAudioInputStateChange(audioLevel: any): void;
30
+    /**
31
+     * Generate event triggered by a prolonged period of no audio input.
32
+     *
33
+     * @param {number} audioLevel - The audio level of the ssrc.
34
+     * @fires DetectionEvents.NO_AUDIO_INPUT
35
+     */
36
+    _handleNoAudioInputDetection(audioLevel: number): void;
37
+    _eventFired: boolean;
38
+    /**
39
+     * Receives audio level events for all send and receive streams on the current TraceablePeerConnection.
40
+     *
41
+     * @param {TraceablePeerConnection} tpc - TraceablePeerConnection of the owning conference.
42
+     * @param {number} ssrc - The synchronization source identifier (SSRC) of the endpoint/participant/stream
43
+     * being reported.
44
+     * @param {number} audioLevel - The audio level of the ssrc.
45
+     * @param {boolean} isLocal - true for local/send streams or false for remote/receive streams.
46
+     */
47
+    _audioLevel(tpc: any, ssrc: number, audioLevel: number, isLocal: boolean): void;
48
+    /**
49
+     * Notifies NoAudioSignalDetection that a JitsiTrack was added to the associated JitsiConference.
50
+     * Only take into account local audio tracks.
51
+     *
52
+     * @param {JitsiTrack} track - The added JitsiTrack.
53
+     */
54
+    _trackAdded(track: any): void;
55
+    _audioTrack: any;
56
+}
57
+import EventEmitter from "events";

+ 25
- 0
types/auto/modules/detection/P2PDominantSpeakerDetection.d.ts View File

@@ -0,0 +1,25 @@
1
+/**
2
+ * The <tt>P2PDominantSpeakerDetection</tt> is activated only when p2p is
3
+ * currently used.
4
+ * Listens for changes in the audio level changes of the local p2p audio track
5
+ * or remote p2p one and fires dominant speaker events to be able to use
6
+ * features depending on those events (speaker stats), to make them work without
7
+ * the video bridge.
8
+ */
9
+export default class P2PDominantSpeakerDetection {
10
+    /**
11
+     * Creates P2PDominantSpeakerDetection
12
+     * @param conference the JitsiConference instance that created us.
13
+     * @constructor
14
+     */
15
+    constructor(conference: any);
16
+    conference: any;
17
+    myUserID: any;
18
+    /**
19
+     * Receives audio level events for all streams in the conference.
20
+     *
21
+     * @param {String} id - The participant id
22
+     * @param {number} audioLevel - The audio level.
23
+     */
24
+    _audioLevel(id: string, audioLevel: number): void;
25
+}

+ 129
- 0
types/auto/modules/detection/TrackVADEmitter.d.ts View File

@@ -0,0 +1,129 @@
1
+/// <reference types="node" />
2
+/**
3
+ * Connects an audio JitsiLocalTrack to a vadProcessor using WebAudio ScriptProcessorNode.
4
+ * Once an object is created audio from the local track flows through the ScriptProcessorNode as raw PCM.
5
+ * The PCM is processed by the injected vad module and a voice activity detection score is obtained, the
6
+ * score is published to consumers via an EventEmitter.
7
+ * After work is done with this service the destroy method needs to be called for a proper cleanup.
8
+ *
9
+ * @fires VAD_SCORE_PUBLISHED
10
+ */
11
+export default class TrackVADEmitter extends EventEmitter {
12
+    /**
13
+     * Factory method that sets up all the necessary components for the creation of the TrackVADEmitter.
14
+     *
15
+     * @param {string} micDeviceId - Target microphone device id.
16
+     * @param {number} procNodeSampleRate - Sample rate of the proc node.
17
+     * @param {Object} vadProcessor -Module that calculates the voice activity score for a certain audio PCM sample.
18
+     * The processor needs to implement the following functions:
19
+     * - <tt>getSampleLength()</tt> - Returns the sample size accepted by getSampleLength.
20
+     * - <tt>getRequiredPCMFrequency()</tt> - Returns the PCM frequency at which the processor operates.
21
+     * - <tt>calculateAudioFrameVAD(pcmSample)</tt> - Process a 32 float pcm sample of getSampleLength size.
22
+     * @returns {Promise<TrackVADEmitter>} - Promise resolving in a new instance of TrackVADEmitter.
23
+     */
24
+    static create(micDeviceId: string, procNodeSampleRate: number, vadProcessor: any): Promise<TrackVADEmitter>;
25
+    /**
26
+     * Constructor.
27
+     *
28
+     * @param {number} procNodeSampleRate - Sample rate of the ScriptProcessorNode. Possible values  256, 512, 1024,
29
+     *  2048, 4096, 8192, 16384. Passing other values will default to closes neighbor.
30
+     * @param {Object} vadProcessor - VAD processor that allows us to calculate VAD score for PCM samples.
31
+     * @param {JitsiLocalTrack} jitsiLocalTrack - JitsiLocalTrack corresponding to micDeviceId.
32
+     */
33
+    constructor(procNodeSampleRate: number, vadProcessor: any, jitsiLocalTrack: any);
34
+    /**
35
+     * Sample rate of the ScriptProcessorNode.
36
+     */
37
+    _procNodeSampleRate: number;
38
+    /**
39
+     * VAD Processor that allows us to calculate VAD score for PCM samples
40
+     */
41
+    _vadProcessor: any;
42
+    /**
43
+     * The JitsiLocalTrack instance.
44
+     */
45
+    _localTrack: any;
46
+    /**
47
+     * Buffer to hold residue PCM resulting after a ScriptProcessorNode callback
48
+     */
49
+    _bufferResidue: Float32Array;
50
+    /**
51
+     * The AudioContext instance with the preferred sample frequency.
52
+     */
53
+    _audioContext: AudioContext;
54
+    /**
55
+     * PCM Sample size expected by the VAD Processor instance. We cache it here as this value is used extensively,
56
+     * saves a couple of function calls.
57
+     */
58
+    _vadSampleSize: any;
59
+    /**
60
+     * ScriptProcessorNode callback, the input parameters contains the PCM audio that is then sent to rnnoise.
61
+     * Rnnoise only accepts PCM samples of 480 bytes whereas the webaudio processor node can't sample at a multiple
62
+     * of 480 thus after each _onAudioProcess callback there will remain and PCM buffer residue equal
63
+     * to _procNodeSampleRate / 480 which will be added to the next sample buffer and so on.\
64
+     *
65
+     *
66
+     * @param {AudioProcessingEvent} audioEvent - Audio event.
67
+     * @returns {void}
68
+     * @fires VAD_SCORE_PUBLISHED
69
+     */
70
+    _onAudioProcess(audioEvent: AudioProcessingEvent): void;
71
+    /**
72
+     * Sets up the audio graph in the AudioContext.
73
+     *
74
+     * @returns {void}
75
+     */
76
+    _initializeAudioContext(): void;
77
+    _audioSource: MediaStreamAudioSourceNode;
78
+    _audioProcessingNode: ScriptProcessorNode;
79
+    /**
80
+     * Connects the nodes in the AudioContext to start the flow of audio data.
81
+     *
82
+     * @returns {void}
83
+     */
84
+    _connectAudioGraph(): void;
85
+    /**
86
+     * Disconnects the nodes in the AudioContext.
87
+     *
88
+     * @returns {void}
89
+     */
90
+    _disconnectAudioGraph(): void;
91
+    /**
92
+     * Cleanup potentially acquired resources.
93
+     *
94
+     * @returns {void}
95
+     */
96
+    _cleanupResources(): void;
97
+    /**
98
+     * Get the associated track device ID.
99
+     *
100
+     * @returns {string}
101
+     */
102
+    getDeviceId(): string;
103
+    /**
104
+     * Get the associated track label.
105
+     *
106
+     * @returns {string}
107
+     */
108
+    getTrackLabel(): string;
109
+    /**
110
+     * Start the emitter by connecting the audio graph.
111
+     *
112
+     * @returns {void}
113
+     */
114
+    start(): void;
115
+    /**
116
+     * Stops the emitter by disconnecting the audio graph.
117
+     *
118
+     * @returns {void}
119
+     */
120
+    stop(): void;
121
+    /**
122
+     * Destroy TrackVADEmitter instance (release resources and stop callbacks).
123
+     *
124
+     * @returns {void}
125
+     */
126
+    destroy(): void;
127
+    _destroyed: boolean;
128
+}
129
+import EventEmitter from "events";

+ 105
- 0
types/auto/modules/detection/VADAudioAnalyser.d.ts View File

@@ -0,0 +1,105 @@
1
+/**
2
+ * Connects a TrackVADEmitter to the target conference local audio track and manages various services that use
3
+ * the data to produce audio analytics (VADTalkMutedDetection and VADNoiseDetection).
4
+ */
5
+export default class VADAudioAnalyser extends EventEmitter {
6
+    /**
7
+     * Creates <tt>VADAudioAnalyser</tt>
8
+     * @param {JitsiConference} conference - JitsiConference instance that created us.
9
+     * @param {Object} createVADProcessor - Function that creates a Voice activity detection processor. The processor
10
+     * needs to implement the following functions:
11
+     * - <tt>getSampleLength()</tt> - Returns the sample size accepted by getSampleLength.
12
+     * - <tt>getRequiredPCMFrequency()</tt> - Returns the PCM frequency at which the processor operates.
13
+     * - <tt>calculateAudioFrameVAD(pcmSample)</tt> - Process a 32 float pcm sample of getSampleLength size.
14
+     * @constructor
15
+     */
16
+    constructor(conference: any, createVADProcessor: any);
17
+    /**
18
+     * Member function that instantiates a VAD processor.
19
+     */
20
+    _createVADProcessor: any;
21
+    /**
22
+     * Current {@link TrackVADEmitter}. VAD Emitter uses a {@link JitsiLocalTrack} and VAD processor to generate
23
+     * period voice probability scores.
24
+     */
25
+    _vadEmitter: TrackVADEmitter;
26
+    /**
27
+     * Current state of the _vadEmitter
28
+     */
29
+    _isVADEmitterRunning: boolean;
30
+    /**
31
+     * Array of currently attached VAD processing services.
32
+     */
33
+    _detectionServices: any[];
34
+    /**
35
+     * Promise used to chain create and destroy operations associated with TRACK_ADDED and TRACK_REMOVED events
36
+     * coming from the conference.
37
+     * Because we have an async created component (VAD Processor) we need to make sure that it's initialized before
38
+     * we destroy it ( when changing the device for instance), or when we use it from an external point of entry
39
+     * i.e. (TRACK_MUTE_CHANGED event callback).
40
+     */
41
+    _vadInitTracker: Promise<void>;
42
+    /**
43
+     * Listens for {@link TrackVADEmitter} events and directs them to attached services as needed.
44
+     *
45
+     * @param {Object} vadScore -VAD score emitted by {@link TrackVADEmitter}
46
+     * @param {Date}   vadScore.timestamp - Exact time at which processed PCM sample was generated.
47
+     * @param {number} vadScore.score - VAD score on a scale from 0 to 1 (i.e. 0.7)
48
+     * @param {Float32Array} pcmData - Raw PCM data with which the VAD score was calculated.
49
+     * @param {string} vadScore.deviceId - Device id of the associated track.
50
+     * @listens VAD_SCORE_PUBLISHED
51
+     */
52
+    _processVADScore(vadScore: {
53
+        timestamp: Date;
54
+        score: number;
55
+    }): void;
56
+    /**
57
+     * Attach a VAD detector service to the analyser and handle it's state changes.
58
+     *
59
+     * @param {Object} vadTMDetector
60
+     */
61
+    addVADDetectionService(vadService: any): void;
62
+    /**
63
+     * Start the {@link TrackVADEmitter} and attach the event listener.
64
+     * @returns {void}
65
+     */
66
+    _startVADEmitter(): void;
67
+    /**
68
+     * Stop the {@link TrackVADEmitter} and detach the event listener.
69
+     * @returns {void}
70
+     */
71
+    _stopVADEmitter(): void;
72
+    /**
73
+     * Change the isMuted state of all attached detection services.
74
+     *
75
+     * @param {boolean} isMuted
76
+     */
77
+    _changeDetectorsMuteState(isMuted: boolean): void;
78
+    /**
79
+     * Notifies the detector that a track was added to the associated {@link JitsiConference}.
80
+     * Only take into account local audio tracks.
81
+     * @param {JitsiTrack} track - The added track.
82
+     * @returns {void}
83
+     * @listens TRACK_ADDED
84
+     */
85
+    _trackAdded(track: any): void;
86
+    /**
87
+     * Notifies the detector that the mute state of a {@link JitsiConference} track has changed. Only takes into account
88
+     * local audio tracks.
89
+     * @param {JitsiTrack} track - The track whose mute state has changed.
90
+     * @returns {void}
91
+     * @listens TRACK_MUTE_CHANGED
92
+     */
93
+    _trackMuteChanged(track: any): void;
94
+    /**
95
+     * Notifies the detector that a track associated with the {@link JitsiConference} was removed. Only takes into
96
+     * account local audio tracks. Cleans up resources associated with the track and resets the processing context.
97
+     *
98
+     * @param {JitsiTrack} track - The removed track.
99
+     * @returns {void}
100
+     * @listens TRACK_REMOVED
101
+     */
102
+    _trackRemoved(track: any): void;
103
+}
104
+import { EventEmitter } from "events";
105
+import TrackVADEmitter from "./TrackVADEmitter";

+ 85
- 0
types/auto/modules/detection/VADNoiseDetection.d.ts View File

@@ -0,0 +1,85 @@
1
+/**
2
+ * Detect if provided VAD score and PCM data is considered noise.
3
+ */
4
+export default class VADNoiseDetection extends EventEmitter {
5
+    /**
6
+     * Creates <tt>VADNoiseDetection</tt>
7
+     *
8
+     * @constructor
9
+     */
10
+    constructor();
11
+    /**
12
+     * Flag which denotes the current state of the detection service i.e.if there is already a processing operation
13
+     * ongoing.
14
+     */
15
+    _processing: boolean;
16
+    /**
17
+     * Buffer that keeps the VAD scores for a period of time.
18
+     */
19
+    _scoreArray: any[];
20
+    /**
21
+     * Buffer that keeps audio level samples for a period of time.
22
+     */
23
+    _audioLvlArray: any[];
24
+    /**
25
+     * Current state of the service, if it's not active no processing will occur.
26
+     */
27
+    _active: boolean;
28
+    /**
29
+     * Compute cumulative VAD score and PCM audio levels once the PROCESS_TIME_FRAME_SPAN_MS timeout has elapsed.
30
+     * If the score is above the set threshold fire the event.
31
+     * @returns {void}
32
+     * @fires VAD_NOISY_DEVICE
33
+     */
34
+    _calculateNoisyScore(): void;
35
+    /**
36
+     * Record the vad score and average volume in the appropriate buffers.
37
+     *
38
+     * @param {number} vadScore
39
+     * @param {number} avgAudioLvl - average audio level of the PCM sample associated with the VAD score.s
40
+     */
41
+    _recordValues(vadScore: number, avgAudioLvl: number): void;
42
+    /**
43
+     * Set the active state of the detection service and notify any listeners.
44
+     *
45
+     * @param {boolean} active
46
+     * @fires DETECTOR_STATE_CHANGE
47
+     */
48
+    _setActiveState(active: boolean): void;
49
+    /**
50
+     * Change the state according to the muted status of the tracked device.
51
+     *
52
+     * @param {boolean} isMuted - Is the device muted or not.
53
+     */
54
+    changeMuteState(isMuted: boolean): void;
55
+    /**
56
+     * Check whether or not the service is active or not.
57
+     *
58
+     * @returns {boolean}
59
+     */
60
+    isActive(): boolean;
61
+    /**
62
+     * Reset the processing context, clear buffers, cancel the timeout trigger.
63
+     *
64
+     * @returns {void}
65
+     */
66
+    reset(): void;
67
+    /**
68
+     * Listens for {@link TrackVADEmitter} events and processes them.
69
+     *
70
+     * @param {Object} vadScore -VAD score emitted by {@link TrackVADEmitter}
71
+     * @param {Date}   vadScore.timestamp - Exact time at which processed PCM sample was generated.
72
+     * @param {number} vadScore.score - VAD score on a scale from 0 to 1 (i.e. 0.7)
73
+     * @param {Float32Array} vadScore.pcmData - Raw PCM Data associated with the VAD score.
74
+     * @param {string} vadScore.deviceId - Device id of the associated track.
75
+     * @listens VAD_SCORE_PUBLISHED
76
+     */
77
+    processVADScore(vadScore: {
78
+        timestamp: Date;
79
+        score: number;
80
+        pcmData: Float32Array;
81
+        deviceId: string;
82
+    }): void;
83
+    _processTimeout: NodeJS.Timeout;
84
+}
85
+import { EventEmitter } from "events";

+ 96
- 0
types/auto/modules/detection/VADReportingService.d.ts View File

@@ -0,0 +1,96 @@
1
+/// <reference types="node" />
2
+/**
3
+ * Voice activity detection reporting service. The service create TrackVADEmitters for the provided devices and
4
+ * publishes an average of their VAD score over the specified interval via EventEmitter.
5
+ * The service is not reusable if destroyed a new one needs to be created, i.e. when a new device is added to the system
6
+ * a new service needs to be created and the old discarded.
7
+ */
8
+export default class VADReportingService extends EventEmitter {
9
+    /**
10
+     * Factory methods that creates the TrackVADEmitters for the associated array of devices and instantiates
11
+     * a VADReportingService.
12
+     *
13
+     * @param {Array<MediaDeviceInfo>} micDeviceList - Device list that is monitored inside the service.
14
+     * @param {number} intervalDelay - Delay at which to publish VAD score for monitored devices.
15
+     * @param {Object} createVADProcessor - Function that creates a Voice activity detection processor. The processor
16
+     * needs to implement the following functions:
17
+     * - <tt>getSampleLength()</tt> - Returns the sample size accepted by getSampleLength.
18
+     * - <tt>getRequiredPCMFrequency()</tt> - Returns the PCM frequency at which the processor operates.
19
+     * - <tt>calculateAudioFrameVAD(pcmSample)</tt> - Process a 32 float pcm sample of getSampleLength size.
20
+     *
21
+     * @returns {Promise<VADReportingService>}
22
+     */
23
+    static create(micDeviceList: Array<MediaDeviceInfo>, intervalDelay: number, createVADProcessor: any): Promise<VADReportingService>;
24
+    /**
25
+     *
26
+     * @param {number} intervalDelay - Delay at which to publish VAD score for monitored devices.
27
+     *
28
+     * @constructor
29
+     */
30
+    constructor(intervalDelay: number);
31
+    /**
32
+     * Map containing context for devices currently being monitored by the reporting service.
33
+     */
34
+    _contextMap: Map<any, any>;
35
+    /**
36
+     * State flag, check if the instance was destroyed.
37
+     */
38
+    _destroyed: boolean;
39
+    /**
40
+     * Delay at which to publish VAD score for monitored devices.
41
+     */
42
+    _intervalDelay: number;
43
+    /**
44
+     * Identifier for the interval publishing stats on the set interval.
45
+     */
46
+    _intervalId: NodeJS.Timer;
47
+    /**
48
+     * Destroy TrackVADEmitters and clear the context map.
49
+     *
50
+     * @returns {void}
51
+     */
52
+    _clearContextMap(): void;
53
+    /**
54
+     * Set the watched device contexts.
55
+     *
56
+     * @param {Array<VADDeviceContext>} vadContextArray - List of mics.
57
+     * @returns {void}
58
+     */
59
+    _setVADContextArray(vadContextArray: Array<any>): void;
60
+    /**
61
+     * Start the setInterval reporting process.
62
+     *
63
+     * @returns {void}.
64
+     */
65
+    _startPublish(): void;
66
+    /**
67
+     * Function called at set interval with selected compute. The result will be published on the set callback.
68
+     *
69
+     * @returns {void}
70
+     * @fires VAD_REPORT_PUBLISHED
71
+     */
72
+    _reportVadScore(): void;
73
+    /**
74
+     * Callback method passed to vad emitters in order to publish their score.
75
+     *
76
+     * @param {Object} vadScore -VAD score emitted by.
77
+     * @param {Date}   vadScore.timestamp - Exact time at which processed PCM sample was generated.
78
+     * @param {number} vadScore.score - VAD score on a scale from 0 to 1 (i.e. 0.7).
79
+     * @param {string} vadScore.deviceId - Device id of the associated track.
80
+     * @returns {void}
81
+     * @listens VAD_SCORE_PUBLISHED
82
+     */
83
+    _devicePublishVADScore(vadScore: {
84
+        timestamp: Date;
85
+        score: number;
86
+        deviceId: string;
87
+    }): void;
88
+    /**
89
+     * Destroy the VADReportingService, stops the setInterval reporting, destroys the emitters and clears the map.
90
+     * After this call the instance is no longer usable.
91
+     *
92
+     * @returns {void}.
93
+     */
94
+    destroy(): void;
95
+}
96
+import EventEmitter from "events";

+ 70
- 0
types/auto/modules/detection/VADTalkMutedDetection.d.ts View File

@@ -0,0 +1,70 @@
1
+/**
2
+ * Detect if provided VAD score which is generated on a muted device is voice and fires an event.
3
+ */
4
+export default class VADTalkMutedDetection extends EventEmitter {
5
+    /**
6
+     * Creates <tt>VADTalkMutedDetection</tt>
7
+     * @constructor
8
+     */
9
+    constructor();
10
+    /**
11
+     * Flag which denotes the current state of the detection service i.e.if there is already a processing operation
12
+     * ongoing.
13
+     */
14
+    _processing: boolean;
15
+    /**
16
+     * Buffer that keeps the VAD scores for a period of time.
17
+     */
18
+    _scoreArray: any[];
19
+    /**
20
+     * Current mute state of the audio track being monitored.
21
+     */
22
+    _active: boolean;
23
+    /**
24
+     * Compute cumulative VAD score function called once the PROCESS_TIME_FRAME_SPAN_MS timeout has elapsed.
25
+     * @returns {void}
26
+     * @fires VAD_TALK_WHILE_MUTED
27
+     */
28
+    _calculateVADScore(): void;
29
+    /**
30
+     * Set the active state of the detection service and notify any listeners.
31
+     *
32
+     * @param {boolean} active
33
+     * @fires DETECTOR_STATE_CHANGE
34
+     */
35
+    _setActiveState(active: boolean): void;
36
+    /**
37
+     * Change the state according to the muted status of the tracked device.
38
+     *
39
+     * @param {boolean} isMuted - Is the device muted or not.
40
+     */
41
+    changeMuteState(isMuted: boolean): void;
42
+    /**
43
+     * Check whether or not the service is active or not.
44
+     *
45
+     * @returns {boolean}
46
+     */
47
+    isActive(): boolean;
48
+    /**
49
+     * Listens for {@link TrackVADEmitter} events and processes them.
50
+     *
51
+     * @param {Object} vadScore -VAD score emitted by {@link TrackVADEmitter}
52
+     * @param {Date}   vadScore.timestamp - Exact time at which processed PCM sample was generated.
53
+     * @param {number} vadScore.score - VAD score on a scale from 0 to 1 (i.e. 0.7)
54
+     * @param {string} vadScore.deviceId - Device id of the associated track.
55
+     * @listens VAD_SCORE_PUBLISHED
56
+     */
57
+    processVADScore(vadScore: {
58
+        timestamp: Date;
59
+        score: number;
60
+        deviceId: string;
61
+    }): void;
62
+    _processTimeout: NodeJS.Timeout;
63
+    /**
64
+     * Reset the processing context, clear buffer, cancel the timeout trigger.
65
+     *
66
+     * @returns {void}
67
+     */
68
+    reset(): void;
69
+}
70
+import { EventEmitter } from "events";

+ 91
- 0
types/auto/modules/e2ee/Context.d.ts View File

@@ -0,0 +1,91 @@
1
+/**
2
+ * Per-participant context holding the cryptographic keys and
3
+ * encode/decode functions
4
+ */
5
+export class Context {
6
+    /**
7
+     * @param {Object} options
8
+     */
9
+    constructor({ sharedKey }?: any);
10
+    _cryptoKeyRing: any[];
11
+    _currentKeyIndex: number;
12
+    _sendCounts: Map<any, any>;
13
+    _sharedKey: any;
14
+    /**
15
+     * Derives the different subkeys and starts using them for encryption or
16
+     * decryption.
17
+     * @param {Uint8Array|false} key bytes. Pass false to disable.
18
+     * @param {Number} keyIndex
19
+     */
20
+    setKey(key: Uint8Array | false, keyIndex?: number): Promise<void>;
21
+    /**
22
+     * Sets a set of keys and resets the sendCount.
23
+     * decryption.
24
+     * @param {Object} keys set of keys.
25
+     * @param {Number} keyIndex optional
26
+     * @private
27
+     */
28
+    private _setKeys;
29
+    _sendCount: bigint;
30
+    /**
31
+     * Function that will be injected in a stream and will encrypt the given encoded frames.
32
+     *
33
+     * @param {RTCEncodedVideoFrame|RTCEncodedAudioFrame} encodedFrame - Encoded video frame.
34
+     * @param {TransformStreamDefaultController} controller - TransportStreamController.
35
+     *
36
+     * The VP8 payload descriptor described in
37
+     * https://tools.ietf.org/html/rfc7741#section-4.2
38
+     * is part of the RTP packet and not part of the frame and is not controllable by us.
39
+     * This is fine as the SFU keeps having access to it for routing.
40
+     *
41
+     * The encrypted frame is formed as follows:
42
+     * 1) Leave the first (10, 3, 1) bytes unencrypted, depending on the frame type and kind.
43
+     * 2) Form the GCM IV for the frame as described above.
44
+     * 3) Encrypt the rest of the frame using AES-GCM.
45
+     * 4) Allocate space for the encrypted frame.
46
+     * 5) Copy the unencrypted bytes to the start of the encrypted frame.
47
+     * 6) Append the ciphertext to the encrypted frame.
48
+     * 7) Append the IV.
49
+     * 8) Append a single byte for the key identifier.
50
+     * 9) Enqueue the encrypted frame for sending.
51
+     */
52
+    encodeFunction(encodedFrame: any | any, controller: TransformStreamDefaultController): Promise<void>;
53
+    /**
54
+     * Function that will be injected in a stream and will decrypt the given encoded frames.
55
+     *
56
+     * @param {RTCEncodedVideoFrame|RTCEncodedAudioFrame} encodedFrame - Encoded video frame.
57
+     * @param {TransformStreamDefaultController} controller - TransportStreamController.
58
+     */
59
+    decodeFunction(encodedFrame: any | any, controller: TransformStreamDefaultController): Promise<void>;
60
+    /**
61
+     * Function that will decrypt the given encoded frame. If the decryption fails, it will
62
+     * ratchet the key for up to RATCHET_WINDOW_SIZE times.
63
+     *
64
+     * @param {RTCEncodedVideoFrame|RTCEncodedAudioFrame} encodedFrame - Encoded video frame.
65
+     * @param {number} keyIndex - the index of the decryption data in _cryptoKeyRing array.
66
+     * @param {number} ratchetCount - the number of retries after ratcheting the key.
67
+     * @returns {RTCEncodedVideoFrame|RTCEncodedAudioFrame} - The decrypted frame.
68
+     * @private
69
+     */
70
+    private _decryptFrame;
71
+    /**
72
+     * Construct the IV used for AES-GCM and sent (in plain) with the packet similar to
73
+     * https://tools.ietf.org/html/rfc7714#section-8.1
74
+     * It concatenates
75
+     * - the 32 bit synchronization source (SSRC) given on the encoded frame,
76
+     * - the 32 bit rtp timestamp given on the encoded frame,
77
+     * - a send counter that is specific to the SSRC. Starts at a random number.
78
+     * The send counter is essentially the pictureId but we currently have to implement this ourselves.
79
+     * There is no XOR with a salt. Note that this IV leaks the SSRC to the receiver but since this is
80
+     * randomly generated and SFUs may not rewrite this is considered acceptable.
81
+     * The SSRC is used to allow demultiplexing multiple streams with the same key, as described in
82
+     *   https://tools.ietf.org/html/rfc3711#section-4.1.1
83
+     * The RTP timestamp is 32 bits and advances by the codec clock rate (90khz for video, 48khz for
84
+     * opus audio) every second. For video it rolls over roughly every 13 hours.
85
+     * The send counter will advance at the frame rate (30fps for video, 50fps for 20ms opus audio)
86
+     * every second. It will take a long time to roll over.
87
+     *
88
+     * See also https://developer.mozilla.org/en-US/docs/Web/API/AesGcmParams
89
+     */
90
+    _makeIV(synchronizationSource: any, timestamp: any): ArrayBuffer;
91
+}

+ 58
- 0
types/auto/modules/e2ee/E2EEContext.d.ts View File

@@ -0,0 +1,58 @@
1
+/**
2
+ * Context encapsulating the cryptography bits required for E2EE.
3
+ * This uses the WebRTC Insertable Streams API which is explained in
4
+ *   https://github.com/alvestrand/webrtc-media-streams/blob/master/explainer.md
5
+ * that provides access to the encoded frames and allows them to be transformed.
6
+ *
7
+ * The encoded frame format is explained below in the _encodeFunction method.
8
+ * High level design goals were:
9
+ * - do not require changes to existing SFUs and retain (VP8) metadata.
10
+ * - allow the SFU to rewrite SSRCs, timestamp, pictureId.
11
+ * - allow for the key to be rotated frequently.
12
+ */
13
+export default class E2EEcontext {
14
+    /**
15
+     * Build a new E2EE context instance, which will be used in a given conference.
16
+     * @param {boolean} [options.sharedKey] - whether there is a uniques key shared amoung all participants.
17
+     */
18
+    constructor({ sharedKey }?: boolean);
19
+    _worker: Worker;
20
+    /**
21
+     * Cleans up all state associated with the given participant. This is needed when a
22
+     * participant leaves the current conference.
23
+     *
24
+     * @param {string} participantId - The participant that just left.
25
+     */
26
+    cleanup(participantId: string): void;
27
+    /**
28
+     * Cleans up all state associated with all participants in the conference. This is needed when disabling e2ee.
29
+     *
30
+     */
31
+    cleanupAll(): void;
32
+    /**
33
+     * Handles the given {@code RTCRtpReceiver} by creating a {@code TransformStream} which will inject
34
+     * a frame decoder.
35
+     *
36
+     * @param {RTCRtpReceiver} receiver - The receiver which will get the decoding function injected.
37
+     * @param {string} kind - The kind of track this receiver belongs to.
38
+     * @param {string} participantId - The participant id that this receiver belongs to.
39
+     */
40
+    handleReceiver(receiver: RTCRtpReceiver, kind: string, participantId: string): void;
41
+    /**
42
+     * Handles the given {@code RTCRtpSender} by creating a {@code TransformStream} which will inject
43
+     * a frame encoder.
44
+     *
45
+     * @param {RTCRtpSender} sender - The sender which will get the encoding function injected.
46
+     * @param {string} kind - The kind of track this sender belongs to.
47
+     * @param {string} participantId - The participant id that this sender belongs to.
48
+     */
49
+    handleSender(sender: RTCRtpSender, kind: string, participantId: string): void;
50
+    /**
51
+     * Set the E2EE key for the specified participant.
52
+     *
53
+     * @param {string} participantId - the ID of the participant who's key we are setting.
54
+     * @param {Uint8Array | boolean} key - they key for the given participant.
55
+     * @param {Number} keyIndex - the key index.
56
+     */
57
+    setKey(participantId: string, key: Uint8Array | boolean, keyIndex: number): void;
58
+}

+ 42
- 0
types/auto/modules/e2ee/E2EEncryption.d.ts View File

@@ -0,0 +1,42 @@
1
+/**
2
+ * This module integrates {@link KeyHandler} with {@link JitsiConference} in order to enable E2E encryption.
3
+ */
4
+export class E2EEncryption {
5
+    /**
6
+     * Indicates if E2EE is supported in the current platform.
7
+     *
8
+     * @param {object} config - Global configuration.
9
+     * @returns {boolean}
10
+     */
11
+    static isSupported(config: object): boolean;
12
+    /**
13
+     * A constructor.
14
+     * @param {JitsiConference} conference - The conference instance for which E2E encryption is to be enabled.
15
+     */
16
+    constructor(conference: any);
17
+    _externallyManaged: any;
18
+    _keyHandler: ExternallyManagedKeyHandler | ManagedKeyHandler;
19
+    /**
20
+     * Indicates whether E2EE is currently enabled or not.
21
+     *
22
+     * @returns {boolean}
23
+     */
24
+    isEnabled(): boolean;
25
+    /**
26
+     * Enables / disables End-To-End encryption.
27
+     *
28
+     * @param {boolean} enabled - whether E2EE should be enabled or not.
29
+     * @returns {void}
30
+     */
31
+    setEnabled(enabled: boolean): void;
32
+    /**
33
+     * Sets the key and index for End-to-End encryption.
34
+     *
35
+     * @param {CryptoKey} [keyInfo.encryptionKey] - encryption key.
36
+     * @param {Number} [keyInfo.index] - the index of the encryption key.
37
+     * @returns {void}
38
+     */
39
+    setEncryptionKey(keyInfo: any): void;
40
+}
41
+import { ExternallyManagedKeyHandler } from "./ExternallyManagedKeyHandler";
42
+import { ManagedKeyHandler } from "./ManagedKeyHandler";

+ 19
- 0
types/auto/modules/e2ee/ExternallyManagedKeyHandler.d.ts View File

@@ -0,0 +1,19 @@
1
+/**
2
+ * This module integrates {@link E2EEContext} with {external} in order to set the keys for encryption.
3
+ */
4
+export class ExternallyManagedKeyHandler extends KeyHandler {
5
+    /**
6
+     * Build a new ExternallyManagedKeyHandler instance, which will be used in a given conference.
7
+     * @param conference - the current conference.
8
+     */
9
+    constructor(conference: any);
10
+    /**
11
+     * Sets the key and index for End-to-End encryption.
12
+     *
13
+     * @param {CryptoKey} [keyInfo.encryptionKey] - encryption key.
14
+     * @param {Number} [keyInfo.index] - the index of the encryption key.
15
+     * @returns {void}
16
+     */
17
+    setKey(keyInfo: any): void;
18
+}
19
+import { KeyHandler } from "./KeyHandler";

+ 69
- 0
types/auto/modules/e2ee/KeyHandler.d.ts View File

@@ -0,0 +1,69 @@
1
+/**
2
+ * Abstract class that integrates {@link E2EEContext} with a key management system.
3
+ */
4
+export class KeyHandler extends Listenable {
5
+    /**
6
+     * Build a new KeyHandler instance, which will be used in a given conference.
7
+     * @param {JitsiConference} conference - the current conference.
8
+     * @param {object} options - the options passed to {E2EEContext}, see implemention.
9
+     */
10
+    constructor(conference: any, options?: object);
11
+    conference: any;
12
+    e2eeCtx: E2EEContext;
13
+    enabled: boolean;
14
+    _enabling: Deferred;
15
+    /**
16
+     * Indicates whether E2EE is currently enabled or not.
17
+     *
18
+     * @returns {boolean}
19
+     */
20
+    isEnabled(): boolean;
21
+    /**
22
+     * Enables / disables End-To-End encryption.
23
+     *
24
+     * @param {boolean} enabled - whether E2EE should be enabled or not.
25
+     * @returns {void}
26
+     */
27
+    setEnabled(enabled: boolean): void;
28
+    /**
29
+     * Sets the key for End-to-End encryption.
30
+     *
31
+     * @returns {void}
32
+     */
33
+    setEncryptionKey(): void;
34
+    /**
35
+     * Setup E2EE on the new track that has been added to the conference, apply it on all the open peerconnections.
36
+     * @param {JitsiLocalTrack} track - the new track that's being added to the conference.
37
+     * @private
38
+     */
39
+    private _onLocalTrackAdded;
40
+    /**
41
+     * Setups E2E encryption for the new session.
42
+     * @param {JingleSessionPC} session - the new media session.
43
+     * @private
44
+     */
45
+    private _onMediaSessionStarted;
46
+    /**
47
+     * Setup E2EE for the receiving side.
48
+     *
49
+     * @private
50
+     */
51
+    private _setupReceiverE2EEForTrack;
52
+    /**
53
+     * Setup E2EE for the sending side.
54
+     *
55
+     * @param {JingleSessionPC} session - the session which sends the media produced by the track.
56
+     * @param {JitsiLocalTrack} track - the local track for which e2e encoder will be configured.
57
+     * @private
58
+     */
59
+    private _setupSenderE2EEForTrack;
60
+    /**
61
+     * Setup E2EE on the sender that is created for the unmuted local video track.
62
+     * @param {JitsiLocalTrack} track - the track for which muted status has changed.
63
+     * @private
64
+     */
65
+    private _trackMuteChanged;
66
+}
67
+import Listenable from "../util/Listenable";
68
+import E2EEContext from "./E2EEContext";
69
+import Deferred from "../util/Deferred";

+ 73
- 0
types/auto/modules/e2ee/ManagedKeyHandler.d.ts View File

@@ -0,0 +1,73 @@
1
+/**
2
+ * This module integrates {@link E2EEContext} with {@link OlmAdapter} in order to distribute the keys for encryption.
3
+ */
4
+export class ManagedKeyHandler extends KeyHandler {
5
+    /**
6
+     * Build a new AutomaticKeyHandler instance, which will be used in a given conference.
7
+     */
8
+    constructor(conference: any);
9
+    _key: any;
10
+    _conferenceJoined: boolean;
11
+    _olmAdapter: OlmAdapter;
12
+    _rotateKey: any;
13
+    _ratchetKey: any;
14
+    /**
15
+     * When E2EE is enabled it initializes sessions and sets the key.
16
+     * Cleans up the sessions when disabled.
17
+     *
18
+     * @param {boolean} enabled - whether E2EE should be enabled or not.
19
+     * @returns {void}
20
+     */
21
+    _setEnabled(enabled: boolean): void;
22
+    /**
23
+     * Handles an update in a participant's presence property.
24
+     *
25
+     * @param {JitsiParticipant} participant - The participant.
26
+     * @param {string} name - The name of the property that changed.
27
+     * @param {*} oldValue - The property's previous value.
28
+     * @param {*} newValue - The property's new value.
29
+     * @private
30
+     */
31
+    private _onParticipantPropertyChanged;
32
+    /**
33
+     * Advances (using ratcheting) the current key when a new participant joins the conference.
34
+     * @private
35
+     */
36
+    private _onParticipantJoined;
37
+    /**
38
+     * Rotates the current key when a participant leaves the conference.
39
+     * @private
40
+     */
41
+    private _onParticipantLeft;
42
+    /**
43
+     * Rotates the local key. Rotating the key implies creating a new one, then distributing it
44
+     * to all participants and once they all received it, start using it.
45
+     *
46
+     * @private
47
+     */
48
+    private _rotateKeyImpl;
49
+    /**
50
+     * Advances the current key by using ratcheting.
51
+     *
52
+     * @private
53
+     */
54
+    private _ratchetKeyImpl;
55
+    /**
56
+     * Handles an update in a participant's key.
57
+     *
58
+     * @param {string} id - The participant ID.
59
+     * @param {Uint8Array | boolean} key - The new key for the participant.
60
+     * @param {Number} index - The new key's index.
61
+     * @private
62
+     */
63
+    private _onParticipantKeyUpdated;
64
+    /**
65
+     * Generates a new 256 bit random key.
66
+     *
67
+     * @returns {Uint8Array}
68
+     * @private
69
+     */
70
+    private _generateKey;
71
+}
72
+import { KeyHandler } from "./KeyHandler";
73
+import { OlmAdapter } from "./OlmAdapter";

+ 166
- 0
types/auto/modules/e2ee/OlmAdapter.d.ts View File

@@ -0,0 +1,166 @@
1
+/**
2
+ * This class implements an End-to-End Encrypted communication channel between every two peers
3
+ * in the conference. This channel uses libolm to achieve E2EE.
4
+ *
5
+ * The created channel is then used to exchange the secret key that each participant will use
6
+ * to encrypt the actual media (see {@link E2EEContext}).
7
+ *
8
+ * A simple JSON message based protocol is implemented, which follows a request - response model:
9
+ * - session-init: Initiates an olm session establishment procedure. This message will be sent
10
+ *                 by the participant who just joined, to everyone else.
11
+ * - session-ack: Completes the olm session etablishment. This messsage may contain ancilliary
12
+ *                encrypted data, more specifically the sender's current key.
13
+ * - key-info: Includes the sender's most up to date key information.
14
+ * - key-info-ack: Acknowledges the reception of a key-info request. In addition, it may contain
15
+ *                 the sender's key information, if available.
16
+ * - error: Indicates a request processing error has occurred.
17
+ *
18
+ * These requessts and responses are transport independent. Currently they are sent using XMPP
19
+ * MUC private messages.
20
+ */
21
+export class OlmAdapter extends Listenable {
22
+    /**
23
+     * Indicates if olm is supported on the current platform.
24
+     *
25
+     * @returns {boolean}
26
+     */
27
+    static isSupported(): boolean;
28
+    /**
29
+     * Creates an adapter instance for the given conference.
30
+     */
31
+    constructor(conference: any);
32
+    _conf: any;
33
+    _init: Deferred;
34
+    _key: boolean | Uint8Array;
35
+    _keyIndex: number;
36
+    _reqs: Map<any, any>;
37
+    _sessionInitialization: Deferred;
38
+    /**
39
+     * Starts new olm sessions with every other participant that has the participantId "smaller" the localParticipantId.
40
+     */
41
+    initSessions(): Promise<void>;
42
+    /**
43
+     * Updates the current participant key and distributes it to all participants in the conference
44
+     * by sending a key-info message.
45
+     *
46
+     * @param {Uint8Array|boolean} key - The new key.
47
+     * @retrns {Promise<Number>}
48
+     */
49
+    updateKey(key: Uint8Array | boolean): Promise<number>;
50
+    /**
51
+     * Updates the current participant key.
52
+     * @param {Uint8Array|boolean} key - The new key.
53
+     * @returns {number}
54
+    */
55
+    updateCurrentKey(key: Uint8Array | boolean): number;
56
+    /**
57
+     * Frees the olmData session for the given participant.
58
+     *
59
+     */
60
+    clearParticipantSession(participant: any): void;
61
+    /**
62
+     * Frees the olmData sessions for all participants.
63
+     *
64
+     */
65
+    clearAllParticipantsSessions(): void;
66
+    /**
67
+     * Internal helper to bootstrap the olm library.
68
+     *
69
+     * @returns {Promise<void>}
70
+     * @private
71
+     */
72
+    private _bootstrapOlm;
73
+    _olmAccount: any;
74
+    _idKey: any;
75
+    /**
76
+     * Publishes our own Olmn id key in presence.
77
+     * @private
78
+     */
79
+    private _onIdKeyReady;
80
+    /**
81
+     * Event posted when the E2EE signalling channel has been established with the given participant.
82
+     * @private
83
+     */
84
+    private _onParticipantE2EEChannelReady;
85
+    /**
86
+     * Internal helper for encrypting the current key information for a given participant.
87
+     *
88
+     * @param {Olm.Session} session - Participant's session.
89
+     * @returns {string} - The encrypted text with the key information.
90
+     * @private
91
+     */
92
+    private _encryptKeyInfo;
93
+    /**
94
+     * Internal helper for getting the olm related data associated with a participant.
95
+     *
96
+     * @param {JitsiParticipant} participant - Participant whose data wants to be extracted.
97
+     * @returns {Object}
98
+     * @private
99
+     */
100
+    private _getParticipantOlmData;
101
+    /**
102
+     * Handles leaving the conference, cleaning up olm sessions.
103
+     *
104
+     * @private
105
+     */
106
+    private _onConferenceLeft;
107
+    /**
108
+     * Main message handler. Handles 1-to-1 messages received from other participants
109
+     * and send the appropriate replies.
110
+     *
111
+     * @private
112
+     */
113
+    private _onEndpointMessageReceived;
114
+    /**
115
+     * Handles a participant leaving. When a participant leaves their olm session is destroyed.
116
+     *
117
+     * @private
118
+     */
119
+    private _onParticipantLeft;
120
+    /**
121
+    * Handles an update in a participant's presence property.
122
+    *
123
+    * @param {JitsiParticipant} participant - The participant.
124
+    * @param {string} name - The name of the property that changed.
125
+    * @param {*} oldValue - The property's previous value.
126
+    * @param {*} newValue - The property's new value.
127
+    * @private
128
+    */
129
+    private _onParticipantPropertyChanged;
130
+    /**
131
+     * Builds and sends an error message to the target participant.
132
+     *
133
+     * @param {JitsiParticipant} participant - The target participant.
134
+     * @param {string} error - The error message.
135
+     * @returns {void}
136
+     */
137
+    _sendError(participant: any, error: string): void;
138
+    /**
139
+     * Internal helper to send the given object to the given participant ID.
140
+     * This function merely exists so the transport can be easily swapped.
141
+     * Currently messages are transmitted via XMPP MUC private messages.
142
+     *
143
+     * @param {object} data - The data that will be sent to the target participant.
144
+     * @param {string} participantId - ID of the target participant.
145
+     */
146
+    _sendMessage(data: object, participantId: string): void;
147
+    /**
148
+     * Builds and sends the session-init request to the target participant.
149
+     *
150
+     * @param {JitsiParticipant} participant - Participant to whom we'll send the request.
151
+     * @returns {Promise} - The promise will be resolved when the session-ack is received.
152
+     * @private
153
+     */
154
+    private _sendSessionInit;
155
+}
156
+export namespace OlmAdapter {
157
+    export { OlmAdapterEvents as events };
158
+}
159
+import Listenable from "../util/Listenable";
160
+import Deferred from "../util/Deferred";
161
+declare namespace OlmAdapterEvents {
162
+    const OLM_ID_KEY_READY: string;
163
+    const PARTICIPANT_E2EE_CHANNEL_READY: string;
164
+    const PARTICIPANT_KEY_UPDATED: string;
165
+}
166
+export {};

+ 1
- 0
types/auto/modules/e2ee/Worker.d.ts View File

@@ -0,0 +1 @@
1
+export {};

+ 25
- 0
types/auto/modules/e2ee/crypto-utils.d.ts View File

@@ -0,0 +1,25 @@
1
+/**
2
+ * Derives a set of keys from the master key.
3
+ * @param {CryptoKey} material - master key to derive from
4
+ *
5
+ * See https://tools.ietf.org/html/draft-omara-sframe-00#section-4.3.1
6
+ */
7
+export function deriveKeys(material: CryptoKey): Promise<{
8
+    material: CryptoKey;
9
+    encryptionKey: CryptoKey;
10
+}>;
11
+/**
12
+ * Ratchets a key. See
13
+ * https://tools.ietf.org/html/draft-omara-sframe-00#section-4.3.5.1
14
+ * @param {CryptoKey} material - base key material
15
+ * @returns {ArrayBuffer} - ratcheted key material
16
+ */
17
+export function ratchet(material: CryptoKey): ArrayBuffer;
18
+/**
19
+ * Converts a raw key into a WebCrypto key object with default options
20
+ * suitable for our usage.
21
+ * @param {ArrayBuffer} keyBytes - raw key
22
+ * @param {Array} keyUsages - key usages, see importKey documentation
23
+ * @returns {CryptoKey} - the WebCrypto key.
24
+ */
25
+export function importKey(keyBytes: ArrayBuffer): CryptoKey;

+ 4
- 0
types/auto/modules/e2ee/utils.d.ts View File

@@ -0,0 +1,4 @@
1
+/**
2
+ * Compares two byteArrays for equality.
3
+ */
4
+export function isArrayEqual(a1: any, a2: any): boolean;

+ 75
- 0
types/auto/modules/e2eping/e2eping.d.ts View File

@@ -0,0 +1,75 @@
1
+/**
2
+ * Implements end-to-end ping (from one conference participant to another) via
3
+ * the jitsi-videobridge channel (either WebRTC data channel or web socket).
4
+ *
5
+ * TODO: use a broadcast message instead of individual pings to each remote
6
+ * participant.
7
+ *
8
+ * This class:
9
+ * 1. Sends periodic ping requests to all other participants in the
10
+ * conference.
11
+ * 2. Responds to ping requests from other participants.
12
+ * 3. Fires events with the end-to-end RTT to each participant whenever a
13
+ * response is received.
14
+ * 4. Fires analytics events with the end-to-end RTT periodically.
15
+ */
16
+export default class E2ePing {
17
+    /**
18
+     * @param {JitsiConference} conference - The conference.
19
+     * @param {Function} sendMessage - The function to use to send a message.
20
+     * @param {Object} options
21
+     */
22
+    constructor(conference: any, options: any, sendMessage: Function);
23
+    conference: any;
24
+    eventEmitter: any;
25
+    sendMessage: Function;
26
+    pingIntervalMs: any;
27
+    analyticsIntervalMs: any;
28
+    participants: {};
29
+    isDataChannelOpen: boolean;
30
+    /**
31
+     * Handles a participant joining the conference. Starts to send ping
32
+     * requests to the participant.
33
+     *
34
+     * @param {String} id - The ID of the participant.
35
+     * @param {JitsiParticipant} participant - The participant that joined.
36
+     */
37
+    participantJoined(id: string, participant: any): void;
38
+    /**
39
+     * Handles a participant leaving the conference. Stops sending requests.
40
+     *
41
+     * @param {String} id - The ID of the participant.
42
+     */
43
+    participantLeft(id: string): void;
44
+    /**
45
+     * Handles a message that was received.
46
+     *
47
+     * @param participant - The message sender.
48
+     * @param payload - The payload of the message.
49
+     */
50
+    messageReceived(participant: any, payload: any): void;
51
+    /**
52
+     * Notifies this instance that the communications channel has been opened
53
+     * and it can now send messages via sendMessage.
54
+     */
55
+    dataChannelOpened(): void;
56
+    /**
57
+     * Handles a ping request coming from another participant.
58
+     *
59
+     * @param {string} participantId - The ID of the participant who sent the
60
+     * request.
61
+     * @param {Object} request - The request.
62
+     */
63
+    handleRequest(participantId: string, request: any): void;
64
+    /**
65
+     * Handles a ping response coming from another participant
66
+     * @param {string} participantId - The ID of the participant who sent the
67
+     * response.
68
+     * @param {Object} response - The response.
69
+     */
70
+    handleResponse(participantId: string, response: any): void;
71
+    /**
72
+     * Stops this E2ePing (i.e. stop sending requests).
73
+     */
74
+    stop(): void;
75
+}

+ 26
- 0
types/auto/modules/event/Jvb121EventGenerator.d.ts View File

@@ -0,0 +1,26 @@
1
+/**
2
+ * Emits {@link JitsiConferenceEvents.JVB121_STATUS} events based on the current
3
+ * P2P status and the conference participants count. See the event description
4
+ * for more info.
5
+ */
6
+export default class Jvb121EventGenerator {
7
+    /**
8
+     * Creates new <tt>Jvb121EventGenerator</tt> for the given conference.
9
+     * @param {JitsiConference} conference
10
+     */
11
+    constructor(conference: any);
12
+    _conference: any;
13
+    /**
14
+     * Indicates whether it's a one to one JVB conference (<tt>true</tt>)
15
+     * or a multiparty (<tt>false</tt>). Will be also <tt>false</tt> if
16
+     * the conference is currently in the P2P mode.
17
+     * @type {boolean}
18
+     * @private
19
+     */
20
+    private _jvb121;
21
+    /**
22
+     * Checks whether the JVB121 value should be updated and a new event
23
+     * emitted.
24
+     */
25
+    evaluateStatus(): void;
26
+}

+ 20
- 0
types/auto/modules/flags/FeatureFlags.d.ts View File

@@ -0,0 +1,20 @@
1
+declare var _default: FeatureFlags;
2
+export default _default;
3
+/**
4
+ * A global module for accessing information about different feature flags state.
5
+ */
6
+declare class FeatureFlags {
7
+    /**
8
+     * Configures the module.
9
+     *
10
+     * @param {boolean} flags.sourceNameSignaling - Enables source names in the signaling.
11
+     */
12
+    init(flags: any): void;
13
+    _sourceNameSignaling: boolean;
14
+    /**
15
+     * Checks if the source name signaling is enabled.
16
+     *
17
+     * @returns {boolean}
18
+     */
19
+    isSourceNameSignalingEnabled(): boolean;
20
+}

+ 167
- 0
types/auto/modules/proxyconnection/ProxyConnectionPC.d.ts View File

@@ -0,0 +1,167 @@
1
+/**
2
+ * An adapter around {@code JingleSessionPC} so its logic can be re-used without
3
+ * an XMPP connection. It is being re-used for consistency with the rest of the
4
+ * codebase and to leverage existing peer connection event handling. Also
5
+ * this class provides a facade to hide most of the API for
6
+ * {@code JingleSessionPC}.
7
+ */
8
+export default class ProxyConnectionPC {
9
+    /**
10
+     * Initializes a new {@code ProxyConnectionPC} instance.
11
+     *
12
+     * @param {Object} options - Values to initialize the instance with.
13
+     * @param {Object} [options.pcConfig] - The {@code RTCConfiguration} to use for the WebRTC peer connection.
14
+     * @param {boolean} [options.isInitiator] - If true, the local client should send offers. If false, the local
15
+     * client should send answers. Defaults to false.
16
+     * @param {Function} options.onRemoteStream - Callback to invoke when a remote media stream has been received
17
+     * through the peer connection.
18
+     * @param {string} options.peerJid - The jid of the remote client with which the peer connection is being establish
19
+     * and which should receive direct messages regarding peer connection updates.
20
+     * @param {boolean} [options.receiveVideo] - Whether or not the peer connection should accept incoming video
21
+     * streams. Defaults to false.
22
+     * @param {Function} options.onSendMessage - Callback to invoke when a message has to be sent (signaled) out.
23
+     */
24
+    constructor(options?: {
25
+        pcConfig?: any;
26
+        isInitiator?: boolean;
27
+        onRemoteStream: Function;
28
+        peerJid: string;
29
+        receiveVideo?: boolean;
30
+        onSendMessage: Function;
31
+    });
32
+    _options: {
33
+        pcConfig: any;
34
+        isInitiator: boolean;
35
+        onRemoteStream: Function;
36
+        peerJid: string;
37
+        receiveVideo: boolean;
38
+        onSendMessage: Function;
39
+        receiveAudio: boolean;
40
+    };
41
+    /**
42
+     * Instances of {@code JitsiTrack} associated with this instance of
43
+     * {@code ProxyConnectionPC}.
44
+     *
45
+     * @type {Array<JitsiTrack>}
46
+     */
47
+    _tracks: Array<any>;
48
+    /**
49
+     * The active instance of {@code JingleSessionPC}.
50
+     *
51
+     * @type {JingleSessionPC|null}
52
+     */
53
+    _peerConnection: JingleSessionPC | null;
54
+    /**
55
+     * Invoked when a connection related issue has been encountered.
56
+     *
57
+     * @param {string} errorType - The constant indicating the type of the error
58
+     * that occured.
59
+     * @param {string} details - Optional additional data about the error.
60
+     * @private
61
+     * @returns {void}
62
+     */
63
+    private _onError;
64
+    /**
65
+     * Callback invoked when the peer connection has received a remote media
66
+     * stream.
67
+     *
68
+     * @param {JitsiRemoteTrack} jitsiRemoteTrack - The remote media stream
69
+     * wrapped in {@code JitsiRemoteTrack}.
70
+     * @private
71
+     * @returns {void}
72
+     */
73
+    private _onRemoteStream;
74
+    /**
75
+     * Callback invoked when {@code JingleSessionPC} needs to signal a message
76
+     * out to the remote peer.
77
+     *
78
+     * @param {XML} iq - The message to signal out.
79
+     * @private
80
+     * @returns {void}
81
+     */
82
+    private _onSendMessage;
83
+    /**
84
+     * Returns the jid of the remote peer with which this peer connection should
85
+     * be established with.
86
+     *
87
+     * @returns {string}
88
+     */
89
+    getPeerJid(): string;
90
+    /**
91
+     * Updates the peer connection based on the passed in jingle.
92
+     *
93
+     * @param {Object} $jingle - An XML jingle element, wrapped in query,
94
+     * describing how the peer connection should be updated.
95
+     * @returns {void}
96
+     */
97
+    processMessage($jingle: any): void;
98
+    /**
99
+     * Instantiates a peer connection and starts the offer/answer cycle to
100
+     * establish a connection with a remote peer.
101
+     *
102
+     * @param {Array<JitsiLocalTrack>} localTracks - Initial local tracks to add
103
+     * to add to the peer connection.
104
+     * @returns {void}
105
+     */
106
+    start(localTracks?: Array<any>): void;
107
+    /**
108
+     * Begins the process of disconnecting from a remote peer and cleaning up
109
+     * the peer connection.
110
+     *
111
+     * @returns {void}
112
+     */
113
+    stop(): void;
114
+    /**
115
+     * Instantiates a new {@code JingleSessionPC} by stubbing out the various
116
+     * dependencies of {@code JingleSessionPC}.
117
+     *
118
+     * @private
119
+     * @returns {JingleSessionPC}
120
+     */
121
+    private _createPeerConnection;
122
+    /**
123
+     * Create an instance of {@code RTC} as it is required for peer
124
+     * connection creation by {@code JingleSessionPC}. An existing instance
125
+     * of {@code RTC} from elsewhere should not be re-used because it is
126
+     * a stateful grouping of utilities.
127
+     */
128
+    _rtc: RTC;
129
+    /**
130
+     * Callback invoked in response to an agreement to start a proxy connection.
131
+     * The passed in jingle element should contain an SDP answer to a previously
132
+     * sent SDP offer.
133
+     *
134
+     * @param {Object} $jingle - The jingle element wrapped in jQuery.
135
+     * @private
136
+     * @returns {void}
137
+     */
138
+    private _onSessionAccept;
139
+    /**
140
+     * Callback invoked in response to a request to start a proxy connection.
141
+     * The passed in jingle element should contain an SDP offer.
142
+     *
143
+     * @param {Object} $jingle - The jingle element wrapped in jQuery.
144
+     * @private
145
+     * @returns {void}
146
+     */
147
+    private _onSessionInitiate;
148
+    /**
149
+     * Callback invoked in response to a request to disconnect an active proxy
150
+     * connection. Cleans up tracks and the peer connection.
151
+     *
152
+     * @private
153
+     * @returns {void}
154
+     */
155
+    private _onSessionTerminate;
156
+    /**
157
+     * Callback invoked in response to ICE candidates from the remote peer.
158
+     * The passed in jingle element should contain an ICE candidate.
159
+     *
160
+     * @param {Object} $jingle - The jingle element wrapped in jQuery.
161
+     * @private
162
+     * @returns {void}
163
+     */
164
+    private _onTransportInfo;
165
+}
166
+import JingleSessionPC from "../xmpp/JingleSessionPC";
167
+import RTC from "../RTC/RTC";

+ 141
- 0
types/auto/modules/proxyconnection/ProxyConnectionService.d.ts View File

@@ -0,0 +1,141 @@
1
+/**
2
+ * Instantiates a new ProxyConnectionPC and ensures only one exists at a given
3
+ * time. Currently it assumes ProxyConnectionPC is used only for screensharing
4
+ * and assumes IQs to be used for communication.
5
+ */
6
+export default class ProxyConnectionService {
7
+    /**
8
+     * Initializes a new {@code ProxyConnectionService} instance.
9
+     *
10
+     * @param {Object} options - Values to initialize the instance with.
11
+     * @param {boolean} [options.convertVideoToDesktop] - Whether or not proxied video should be returned as a desktop
12
+     * stream. Defaults to false.
13
+     * @param {Object} [options.pcConfig] - The {@code RTCConfiguration} to use for the WebRTC peer connection.
14
+     * @param {JitsiConnection} [options.jitsiConnection] - The {@code JitsiConnection} which will be used to fetch
15
+     * TURN credentials for the P2P connection.
16
+     * @param {Function} options.onRemoteStream - Callback to invoke when a remote video stream has been received and
17
+     * converted to a {@code JitsiLocakTrack}. The {@code JitsiLocakTrack} will be passed in.
18
+     * @param {Function} options.onSendMessage - Callback to invoke when a message has to be sent (signaled) out. The
19
+     * arguments passed in are the jid to send the message to and the message.
20
+     */
21
+    constructor(options?: {
22
+        convertVideoToDesktop?: boolean;
23
+        pcConfig?: any;
24
+        jitsiConnection?: any;
25
+        onRemoteStream: Function;
26
+        onSendMessage: Function;
27
+    });
28
+    /**
29
+     * Holds a reference to the collection of all callbacks.
30
+     *
31
+     * @type {Object}
32
+     */
33
+    _options: any;
34
+    /**
35
+     * The active instance of {@code ProxyConnectionService}.
36
+     *
37
+     * @type {ProxyConnectionPC|null}
38
+     */
39
+    _peerConnection: ProxyConnectionPC | null;
40
+    /**
41
+     * Callback invoked when an error occurs that should cause
42
+     * {@code ProxyConnectionPC} to be closed if the peer is currently
43
+     * connected. Sends an error message/reply back to the peer.
44
+     *
45
+     * @param {string} peerJid - The peer jid with which the connection was
46
+     * attempted or started, and to which an iq with error details should be
47
+     * sent.
48
+     * @param {string} errorType - The constant indicating the type of the error
49
+     * that occured.
50
+     * @param {string} details - Optional additional data about the error.
51
+     * @private
52
+     * @returns {void}
53
+     */
54
+    private _onFatalError;
55
+    /**
56
+     * Formats and forwards a message an iq to be sent to a peer jid.
57
+     *
58
+     * @param {string} peerJid - The jid the iq should be sent to.
59
+     * @param {Object} iq - The iq which would be sent to the peer jid.
60
+     * @private
61
+     * @returns {void}
62
+     */
63
+    private _onSendMessage;
64
+    /**
65
+     * Callback invoked when the remote peer of the {@code ProxyConnectionPC}
66
+     * has offered a media stream. The stream is converted into a
67
+     * {@code JitsiLocalTrack} for local usage if the {@code onRemoteStream}
68
+     * callback is defined.
69
+     *
70
+     * @param {JitsiRemoteTrack} jitsiRemoteTrack - The {@code JitsiRemoteTrack}
71
+     * for the peer's media stream.
72
+     * @private
73
+     * @returns {void}
74
+     */
75
+    private _onRemoteStream;
76
+    /**
77
+     * Parses a message object regarding a proxy connection to create a new
78
+     * proxy connection or update and existing connection.
79
+     *
80
+     * @param {Object} message - A message object regarding establishing or
81
+     * updating a proxy connection.
82
+     * @param {Object} message.data - An object containing additional message
83
+     * details.
84
+     * @param {string} message.data.iq - The stringified iq which explains how
85
+     * and what to update regarding the proxy connection.
86
+     * @param {string} message.from - The message sender's full jid. Used for
87
+     * sending replies.
88
+     * @returns {void}
89
+     */
90
+    processMessage(message: {
91
+        data: {
92
+            iq: string;
93
+        };
94
+        from: string;
95
+    }): void;
96
+    /**
97
+     * Instantiates and initiates a proxy peer connection.
98
+     *
99
+     * @param {string} peerJid - The jid of the remote client that should
100
+     * receive messages.
101
+     * @param {Array<JitsiLocalTrack>} localTracks - Initial media tracks to
102
+     * send through to the peer.
103
+     * @returns {void}
104
+     */
105
+    start(peerJid: string, localTracks?: Array<any>): void;
106
+    /**
107
+     * Terminates any active proxy peer connection.
108
+     *
109
+     * @returns {void}
110
+     */
111
+    stop(): void;
112
+    /**
113
+     * Transforms a stringified xML into a XML wrapped in jQuery.
114
+     *
115
+     * @param {string} xml - The XML in string form.
116
+     * @private
117
+     * @returns {Object|null} A jQuery version of the xml. Null will be returned
118
+     * if an error is encountered during transformation.
119
+     */
120
+    private _convertStringToXML;
121
+    /**
122
+     * Helper for creating an instance of {@code ProxyConnectionPC}.
123
+     *
124
+     * @param {string} peerJid - The jid of the remote peer with which the
125
+     * {@code ProxyConnectionPC} will be established with.
126
+     * @param {Object} options - Additional defaults to instantiate the
127
+     * {@code ProxyConnectionPC} with. See the constructor of ProxyConnectionPC
128
+     * for more details.
129
+     * @private
130
+     * @returns {ProxyConnectionPC}
131
+     */
132
+    private _createPeerConnection;
133
+    /**
134
+     * Invoked when preemptively closing the {@code ProxyConnectionPC}.
135
+     *
136
+     * @private
137
+     * @returns {void}
138
+     */
139
+    private _selfCloseConnection;
140
+}
141
+import ProxyConnectionPC from "./ProxyConnectionPC";

+ 8
- 0
types/auto/modules/proxyconnection/constants.d.ts View File

@@ -0,0 +1,8 @@
1
+export namespace ACTIONS {
2
+    const ACCEPT: string;
3
+    const CONNECTION_ERROR: string;
4
+    const INITIATE: string;
5
+    const TERMINATE: string;
6
+    const TRANSPORT_INFO: string;
7
+    const UNAVAILABLE: string;
8
+}

+ 123
- 0
types/auto/modules/qualitycontrol/ReceiveVideoController.d.ts View File

@@ -0,0 +1,123 @@
1
+/**
2
+ * This class translates the legacy signaling format between the client and the bridge (that affects bandwidth
3
+ * allocation) to the new format described here https://github.com/jitsi/jitsi-videobridge/blob/master/doc/allocation.md
4
+ */
5
+export class ReceiverVideoConstraints {
6
+    _defaultConstraints: {
7
+        maxHeight: number;
8
+    };
9
+    _lastN: number;
10
+    _maxFrameHeight: number;
11
+    _selectedEndpoints: any[];
12
+    _receiverVideoConstraints: {
13
+        constraints: {};
14
+        defaultConstraints: any;
15
+        lastN: number;
16
+        onStageEndpoints: any[];
17
+        selectedEndpoints: any[];
18
+    };
19
+    /**
20
+     * Returns the receiver video constraints that need to be sent on the bridge channel.
21
+     */
22
+    get constraints(): {
23
+        constraints: {};
24
+        defaultConstraints: any;
25
+        lastN: number;
26
+        onStageEndpoints: any[];
27
+        selectedEndpoints: any[];
28
+    };
29
+    /**
30
+     * Updates the lastN field of the ReceiverVideoConstraints sent to the bridge.
31
+     *
32
+     * @param {number} value
33
+     * @returns {boolean} Returns true if the the value has been updated, false otherwise.
34
+     */
35
+    updateLastN(value: number): boolean;
36
+    /**
37
+     * Updates the resolution (height requested) in the contraints field of the ReceiverVideoConstraints
38
+     * sent to the bridge.
39
+     *
40
+     * @param {number} maxFrameHeight
41
+     * @requires {boolean} Returns true if the the value has been updated, false otherwise.
42
+     */
43
+    updateReceiveResolution(maxFrameHeight: number): boolean;
44
+    /**
45
+     * Updates the receiver constraints sent to the bridge.
46
+     *
47
+     * @param {Object} videoConstraints
48
+     * @returns {boolean} Returns true if the the value has been updated, false otherwise.
49
+     */
50
+    updateReceiverVideoConstraints(videoConstraints: any): boolean;
51
+    /**
52
+     * Updates the list of selected endpoints.
53
+     *
54
+     * @param {Array<string>} ids
55
+     * @returns {void}
56
+     */
57
+    updateSelectedEndpoints(ids: Array<string>): void;
58
+}
59
+/**
60
+ * This class manages the receive video contraints for a given {@link JitsiConference}. These constraints are
61
+ * determined by the application based on how the remote video streams need to be displayed. This class is responsible
62
+ * for communicating these constraints to the bridge over the bridge channel.
63
+ */
64
+export class ReceiveVideoController {
65
+    /**
66
+     * Creates a new instance for a given conference.
67
+     *
68
+     * @param {JitsiConference} conference the conference instance for which the new instance will be managing
69
+     * the receive video quality constraints.
70
+     * @param {RTC} rtc the rtc instance which is responsible for initializing the bridge channel.
71
+     */
72
+    constructor(conference: any, rtc: any);
73
+    _conference: any;
74
+    _rtc: any;
75
+    _lastN: any;
76
+    _maxFrameHeight: number;
77
+    _receiverVideoConstraints: ReceiverVideoConstraints;
78
+    _selectedEndpoints: any[];
79
+    /**
80
+     * Handles the {@link JitsiConferenceEvents.MEDIA_SESSION_STARTED}, that is when the conference creates new media
81
+     * session. The preferred receive frameHeight is applied on the media session.
82
+     *
83
+     * @param {JingleSessionPC} mediaSession - the started media session.
84
+     * @returns {void}
85
+     * @private
86
+     */
87
+    private _onMediaSessionStarted;
88
+    /**
89
+     * Returns the lastN value for the conference.
90
+     *
91
+     * @returns {number}
92
+     */
93
+    getLastN(): number;
94
+    /**
95
+     * Elects the participants with the given ids to be the selected participants in order to always receive video
96
+     * for this participant (even when last n is enabled).
97
+     *
98
+     * @param {Array<string>} ids - The user ids.
99
+     * @returns {void}
100
+     */
101
+    selectEndpoints(ids: Array<string>): void;
102
+    /**
103
+     * Selects a new value for "lastN". The requested amount of videos are going to be delivered after the value is
104
+     * in effect. Set to -1 for unlimited or all available videos.
105
+     *
106
+     * @param {number} value the new value for lastN.
107
+     * @returns {void}
108
+     */
109
+    setLastN(value: number): void;
110
+    /**
111
+     * Sets the maximum video resolution the local participant should receive from remote participants.
112
+     *
113
+     * @param {number|undefined} maxFrameHeight - the new value.
114
+     * @returns {void}
115
+     */
116
+    setPreferredReceiveMaxFrameHeight(maxFrameHeight: number | undefined): void;
117
+    /**
118
+     * Sets the receiver constraints for the conference.
119
+     *
120
+     * @param {Object} constraints The video constraints.
121
+     */
122
+    setReceiverConstraints(constraints: any): void;
123
+}

+ 53
- 0
types/auto/modules/qualitycontrol/SendVideoController.d.ts View File

@@ -0,0 +1,53 @@
1
+/**
2
+ * The class manages send video constraints across media sessions({@link JingleSessionPC}) which belong to
3
+ * {@link JitsiConference}. It finds the lowest common value, between the local user's send preference and
4
+ * the remote party's receive preference. Also this module will consider only the active session's receive value,
5
+ * because local tracks are shared and while JVB may have no preference, the remote p2p may have and they may be totally
6
+ * different.
7
+ */
8
+export class SendVideoController {
9
+    /**
10
+     * Creates new instance for a given conference.
11
+     *
12
+     * @param {JitsiConference} conference - the conference instance for which the new instance will be managing
13
+     * the send video quality constraints.
14
+     * @param {RTC} rtc - the rtc instance that is responsible for sending the messages on the bridge channel.
15
+     */
16
+    constructor(conference: any, rtc: any);
17
+    conference: any;
18
+    layerSuspensionEnabled: any;
19
+    rtc: any;
20
+    _senderVideoConstraints: any;
21
+    /**
22
+     * Handles the {@link JitsiConferenceEvents.MEDIA_SESSION_STARTED}, that is when the conference creates new media
23
+     * session. It doesn't mean it's already active though. For example the JVB connection may be created after
24
+     * the conference has entered the p2p mode already.
25
+     *
26
+     * @param {JingleSessionPC} mediaSession - the started media session.
27
+     * @private
28
+     */
29
+    private _onMediaSessionStarted;
30
+    /**
31
+     * Figures out the send video constraint as specified by {@link selectSendMaxFrameHeight} and sets it on all media
32
+     * sessions for the reasons mentioned in this class description.
33
+     *
34
+     * @returns {Promise<void[]>}
35
+     * @private
36
+     */
37
+    private _propagateSendMaxFrameHeight;
38
+    /**
39
+     * Selects the lowest common value for the local video send constraint by looking at local user's preference and
40
+     * the active media session's receive preference set by the remote party.
41
+     *
42
+     * @returns {number|undefined}
43
+     */
44
+    selectSendMaxFrameHeight(): number | undefined;
45
+    /**
46
+     * Sets local preference for max send video frame height.
47
+     *
48
+     * @param {number} maxFrameHeight - the new value to set.
49
+     * @returns {Promise<void[]>} - resolved when the operation is complete.
50
+     */
51
+    setPreferredSendMaxFrameHeight(maxFrameHeight: number): Promise<void[]>;
52
+    preferredSendMaxFrameHeight: number;
53
+}

+ 165
- 0
types/auto/modules/recording/JibriSession.d.ts View File

@@ -0,0 +1,165 @@
1
+/**
2
+ * Represents a recording session.
3
+ */
4
+export default class JibriSession {
5
+    /**
6
+     * Initializes a new JibriSession instance.
7
+     *
8
+     * @constructor
9
+     */
10
+    constructor(options?: {});
11
+    _connection: any;
12
+    _mode: any;
13
+    /**
14
+     * Returns the error related to the session instance, if any.
15
+     *
16
+     * @returns {string|undefined}
17
+     */
18
+    getError(): string | undefined;
19
+    /**
20
+     * Returns the session ID of the session instance.
21
+     *
22
+     * @returns {string|undefined}
23
+     */
24
+    getID(): string | undefined;
25
+    /**
26
+     * Returns the initiator of the session instance.
27
+     *
28
+     * @returns {JitsiParticipant|string} The participant that started the session.
29
+     */
30
+    getInitiator(): any | string;
31
+    /**
32
+     * Returns the streaming URL of the session.
33
+     *
34
+     * @returns {string|undefined}
35
+     */
36
+    getLiveStreamViewURL(): string | undefined;
37
+    /**
38
+     * Returns the current status of the session.
39
+     *
40
+     * @returns {string|undefined}
41
+     */
42
+    getStatus(): string | undefined;
43
+    /**
44
+     * Returns the jid of the participant that stopped the session.
45
+     *
46
+     * @returns {JitsiParticipant|string} The participant that stopped the session.
47
+     */
48
+    getTerminator(): any | string;
49
+    /**
50
+     * Returns the current recording mode of the session, such as "file".
51
+     *
52
+     * @returns {string}
53
+     */
54
+    getMode(): string;
55
+    /**
56
+     * Sets the last known error message related to the session.
57
+     *
58
+     * @param {string} error - The error string explaining why the session
59
+     * entered an error state.
60
+     * @returns {void}
61
+     */
62
+    setError(error: string): void;
63
+    _error: string;
64
+    /**
65
+     * Sets the last live stream URL for the session instance. Usually this is
66
+     * a YouTube URL and usually this is only set for "stream" sessions.
67
+     *
68
+     * @param {string} url - The live stream URL associated with the session.
69
+     * @returns {void}
70
+     */
71
+    setLiveStreamViewURL(url: string): void;
72
+    _liveStreamViewURL: string;
73
+    /**
74
+     * Sets the last known status for this recording session.
75
+     *
76
+     * @param {string} status - The new status to set.
77
+     * @returns {void}
78
+     */
79
+    setStatus(status: string): void;
80
+    _status: string;
81
+    /**
82
+     * Sets the participant that started the session.
83
+     * @param {JitsiParticipant | string} participant - The participant or resource id
84
+     * if local participant.
85
+     */
86
+    setInitiator(participant: any | string): void;
87
+    _initiator: any;
88
+    /**
89
+     * Sets the participant that stopped the session.
90
+     * @param {JitsiParticipant | string} participant - The participant or the resource id
91
+     * if local participant.
92
+     */
93
+    setTerminator(participant: any | string): void;
94
+    _terminator: any;
95
+    /**
96
+     * Sends a message to start the actual recording.
97
+     *
98
+     * @param {Object} options - Additional arguments for starting the
99
+     * recording.
100
+     * @param {string} [options.appData] - Data specific to the app/service that
101
+     * the result file will be uploaded.
102
+     * @param {string} [options.broadcastId] - The broadcast ID of an
103
+     * associated YouTube stream, used for knowing the URL from which the stream
104
+     * can be viewed.
105
+     * @param {string} options.focusMucJid - The JID of the focus participant
106
+     * that controls recording.
107
+     * @param {streamId} options.streamId - Necessary for live streaming, this
108
+     * is the stream key needed to start a live streaming session with the
109
+     * streaming service provider.
110
+     * @returns Promise
111
+     */
112
+    start({ appData, broadcastId, focusMucJid, streamId }: {
113
+        appData?: string;
114
+        broadcastId?: string;
115
+        focusMucJid: string;
116
+        streamId: any;
117
+    }): Promise<any>;
118
+    /**
119
+     * Sends a message to actually stop the recording session.
120
+     *
121
+     * @param {Object} options - Additional arguments for stopping the
122
+     * recording.
123
+     * @param {Object} options.focusMucJid - The JID of the focus participant
124
+     * that controls recording.
125
+     * @returns Promise
126
+     */
127
+    stop({ focusMucJid }: {
128
+        focusMucJid: any;
129
+    }): Promise<any>;
130
+    /**
131
+     * Generates the message to change the status of the recording session.
132
+     *
133
+     * @param {string} status - The new status to which the recording session
134
+     * should transition.
135
+     * @param {string} [options.appData] - Data specific to the app/service that
136
+     * the result file will be uploaded.
137
+     * @param {string} [options.broadcastId] - The broadcast ID of an
138
+     * associated YouTube stream, used for knowing the URL from which the stream
139
+     * can be viewed.
140
+     * @param {string} options.focusMucJid - The JID of the focus participant
141
+     * that controls recording.
142
+     * @param {streamId} options.streamId - Necessary for live streaming, this
143
+     * is the stream key needed to start a live streaming session with the
144
+     * streaming service provider.
145
+     * @returns Object - The XMPP IQ message.
146
+     */
147
+    _createIQ({ action, appData, broadcastId, focusMucJid, streamId }: string): any;
148
+    /**
149
+     * Handles the error from an iq and stores the error.
150
+     *
151
+     * @param {Node} errorIq - The error response from an Iq.
152
+     * @private
153
+     * @returns {void}
154
+     */
155
+    private _setErrorFromIq;
156
+    /**
157
+     * Sets the known session ID for this recording session.
158
+     *
159
+     * @param {string} sessionID
160
+     * @private
161
+     * @returns {void}
162
+     */
163
+    private _setSessionID;
164
+    _sessionID: string;
165
+}

+ 112
- 0
types/auto/modules/recording/RecordingManager.d.ts View File

@@ -0,0 +1,112 @@
1
+export default RecordingManager;
2
+/**
3
+ * A class responsible for starting and stopping recording sessions and emitting
4
+ * state updates for them.
5
+ */
6
+declare class RecordingManager {
7
+    /**
8
+     * Initialize {@code RecordingManager} with other objects that are necessary
9
+     * for starting a recording.
10
+     *
11
+     * @param {ChatRoom} chatRoom - The chat room to handle.
12
+     * @returns {void}
13
+     */
14
+    constructor(chatRoom: any);
15
+    /**
16
+     * All known recording sessions from the current conference.
17
+     */
18
+    _sessions: {};
19
+    _chatRoom: any;
20
+    /**
21
+     * Callback to invoke to parse through a presence update to find recording
22
+     * related updates (from Jibri participant doing the recording and the
23
+     * focus which controls recording).
24
+     *
25
+     * @param {Object} event - The presence data from the pubsub event.
26
+     * @param {Node} event.presence - An XMPP presence update.
27
+     * @param {boolean} event.fromHiddenDomain - Whether or not the update comes
28
+     * from a participant that is trusted but not visible, as would be the case
29
+     * with the Jibri recorder participant.
30
+     * @returns {void}
31
+     */
32
+    onPresence({ fromHiddenDomain, presence }: {
33
+        presence: Node;
34
+        fromHiddenDomain: boolean;
35
+    }): void;
36
+    /**
37
+     * Finds an existing recording session by session ID.
38
+     *
39
+     * @param {string} sessionID - The session ID associated with the recording.
40
+     * @returns {JibriSession|undefined}
41
+     */
42
+    getSession(sessionID: string): JibriSession | undefined;
43
+    /**
44
+     * Start a recording session.
45
+     *
46
+     * @param {Object} options - Configuration for the recording.
47
+     * @param {string} [options.appData] - Data specific to the app/service that
48
+     * the result file will be uploaded.
49
+     * @param {string} [optional] options.broadcastId - The channel on which a
50
+     * live stream will occur.
51
+     * @param {string} options.mode - The mode in which recording should be
52
+     * started. Recognized values are "file" and "stream".
53
+     * @param {string} [optional] options.streamId - The stream key to be used
54
+     * for live stream broadcasting. Required for live streaming.
55
+     * @returns {Promise} A promise for starting a recording, which will pass
56
+     * back the session on success. The promise resolves after receiving an
57
+     * acknowledgment of the start request success or fail.
58
+     */
59
+    startRecording(options: {
60
+        appData?: string;
61
+    }): Promise<any>;
62
+    /**
63
+     * Stop a recording session.
64
+     *
65
+     * @param {string} sessionID - The ID associated with the recording session
66
+     * to be stopped.
67
+     * @returns {Promise} The promise resolves after receiving an
68
+     * acknowledgment of the stop request success or fail.
69
+     */
70
+    stopRecording(sessionID: string): Promise<any>;
71
+    /**
72
+     * Stores a reference to the passed in JibriSession.
73
+     *
74
+     * @param {string} session - The JibriSession instance to store.
75
+     * @returns {void}
76
+     */
77
+    _addSession(session: string): void;
78
+    /**
79
+     * Create a new instance of a recording session and stores a reference to
80
+     * it.
81
+     *
82
+     * @param {string} sessionID - The session ID of the recording in progress.
83
+     * @param {string} status - The current status of the recording session.
84
+     * @param {string} mode - The recording mode of the session.
85
+     * @returns {JibriSession}
86
+     */
87
+    _createSession(sessionID: string, status: string, mode: string): JibriSession;
88
+    /**
89
+     * Notifies listeners of an update to a recording session.
90
+     *
91
+     * @param {JibriSession} session - The session that has been updated.
92
+     * @param {string|undefined} initiator - The jid of the initiator of the update.
93
+     */
94
+    _emitSessionUpdate(session: JibriSession, initiator: string | undefined): void;
95
+    /**
96
+     * Parses presence to update an existing JibriSession or to create a new
97
+     * JibriSession.
98
+     *
99
+     * @param {Node} presence - An XMPP presence update.
100
+     * @returns {void}
101
+     */
102
+    _handleFocusPresence(presence: Node): void;
103
+    /**
104
+     * Handles updates from the Jibri which can broadcast a YouTube URL that
105
+     * needs to be updated in a JibriSession.
106
+     *
107
+     * @param {Node} presence - An XMPP presence update.
108
+     * @returns {void}
109
+     */
110
+    _handleJibriPresence(presence: Node): void;
111
+}
112
+import JibriSession from "./JibriSession";

+ 19
- 0
types/auto/modules/recording/recordingConstants.d.ts View File

@@ -0,0 +1,19 @@
1
+declare namespace _default {
2
+    namespace error {
3
+        const BUSY: string;
4
+        const ERROR: string;
5
+        const RESOURCE_CONSTRAINT: string;
6
+        const UNEXPECTED_REQUEST: string;
7
+        const SERVICE_UNAVAILABLE: string;
8
+    }
9
+    namespace mode {
10
+        const FILE: string;
11
+        const STREAM: string;
12
+    }
13
+    namespace status {
14
+        const OFF: string;
15
+        const ON: string;
16
+        const PENDING: string;
17
+    }
18
+}
19
+export default _default;

+ 77
- 0
types/auto/modules/recording/recordingXMLUtils.d.ts View File

@@ -0,0 +1,77 @@
1
+declare namespace _default {
2
+    /**
3
+     * Parses the presence update of the focus and returns an object with the
4
+     * statuses related to recording.
5
+     *
6
+     * @param {Node} presence - An XMPP presence update.
7
+     * @returns {Object} The current presence values related to recording.
8
+     */
9
+    function getFocusRecordingUpdate(presence: Node): any;
10
+    /**
11
+     * Parses the presence update of the focus and returns an object with the
12
+     * statuses related to recording.
13
+     *
14
+     * @param {Node} presence - An XMPP presence update.
15
+     * @returns {Object} The current presence values related to recording.
16
+     */
17
+    function getFocusRecordingUpdate(presence: Node): any;
18
+    /**
19
+     * Parses the presence update from a hidden domain participant and returns
20
+     * an object with the statuses related to recording.
21
+     *
22
+     * @param {Node} presence - An XMPP presence update.
23
+     * @returns {Object} The current presence values related to recording.
24
+     */
25
+    function getHiddenDomainUpdate(presence: Node): any;
26
+    /**
27
+     * Parses the presence update from a hidden domain participant and returns
28
+     * an object with the statuses related to recording.
29
+     *
30
+     * @param {Node} presence - An XMPP presence update.
31
+     * @returns {Object} The current presence values related to recording.
32
+     */
33
+    function getHiddenDomainUpdate(presence: Node): any;
34
+    /**
35
+     * Returns the recording session ID from a successful IQ.
36
+     *
37
+     * @param {Node} response - The response from the IQ.
38
+     * @returns {string} The session ID of the recording session.
39
+     */
40
+    function getSessionIdFromIq(response: Node): string;
41
+    /**
42
+     * Returns the recording session ID from a successful IQ.
43
+     *
44
+     * @param {Node} response - The response from the IQ.
45
+     * @returns {string} The session ID of the recording session.
46
+     */
47
+    function getSessionIdFromIq(response: Node): string;
48
+    /**
49
+     * Returns the recording session ID from a presence, if it exists.
50
+     *
51
+     * @param {Node} presence - An XMPP presence update.
52
+     * @returns {string|undefined} The session ID of the recording session.
53
+     */
54
+    function getSessionId(presence: Node): string;
55
+    /**
56
+     * Returns the recording session ID from a presence, if it exists.
57
+     *
58
+     * @param {Node} presence - An XMPP presence update.
59
+     * @returns {string|undefined} The session ID of the recording session.
60
+     */
61
+    function getSessionId(presence: Node): string;
62
+    /**
63
+     * Returns whether or not a presence is from the focus.
64
+     *
65
+     * @param {Node} presence - An XMPP presence update.
66
+     * @returns {boolean} True if the presence is from the focus.
67
+     */
68
+    function isFromFocus(presence: Node): boolean;
69
+    /**
70
+     * Returns whether or not a presence is from the focus.
71
+     *
72
+     * @param {Node} presence - An XMPP presence update.
73
+     * @returns {boolean} True if the presence is from the focus.
74
+     */
75
+    function isFromFocus(presence: Node): boolean;
76
+}
77
+export default _default;

+ 91
- 0
types/auto/modules/sdp/LocalSdpMunger.d.ts View File

@@ -0,0 +1,91 @@
1
+/**
2
+ * Fakes local SDP exposed to {@link JingleSessionPC} through the local
3
+ * description getter. Modifies the SDP, so that it will contain muted local
4
+ * video tracks description, even though their underlying {MediaStreamTrack}s
5
+ * are no longer in the WebRTC peerconnection. That prevents from SSRC updates
6
+ * being sent to Jicofo/remote peer and prevents sRD/sLD cycle on the remote
7
+ * side.
8
+ */
9
+export default class LocalSdpMunger {
10
+    /**
11
+     * Creates new <tt>LocalSdpMunger</tt> instance.
12
+     *
13
+     * @param {TraceablePeerConnection} tpc
14
+     * @param {string} localEndpointId - The endpoint id of the local user.
15
+     */
16
+    constructor(tpc: any, localEndpointId: string);
17
+    tpc: any;
18
+    localEndpointId: string;
19
+    /**
20
+     * Makes sure that muted local video tracks associated with the parent
21
+     * {@link TraceablePeerConnection} are described in the local SDP. It's done
22
+     * in order to prevent from sending 'source-remove'/'source-add' Jingle
23
+     * notifications when local video track is muted (<tt>MediaStream</tt> is
24
+     * removed from the peerconnection).
25
+     *
26
+     * NOTE 1 video track is assumed
27
+     *
28
+     * @param {SdpTransformWrap} transformer the transformer instance which will
29
+     * be used to process the SDP.
30
+     * @return {boolean} <tt>true</tt> if there were any modifications to
31
+     * the SDP wrapped by <tt>transformer</tt>.
32
+     * @private
33
+     */
34
+    private _addMutedLocalVideoTracksToSDP;
35
+    /**
36
+     * Returns a string that can be set as the MSID attribute for a source.
37
+     *
38
+     * @param {string} mediaType - Media type of the source.
39
+     * @param {string} trackId - Id of the MediaStreamTrack associated with the source.
40
+     * @param {string} streamId - Id of the MediaStream associated with the source.
41
+     * @returns {string|null}
42
+     */
43
+    _generateMsidAttribute(mediaType: string, trackId: string, streamId?: string): string | null;
44
+    /**
45
+     * Modifies 'cname', 'msid', 'label' and 'mslabel' by appending
46
+     * the id of {@link LocalSdpMunger#tpc} at the end, preceding by a dash
47
+     * sign.
48
+     *
49
+     * @param {MLineWrap} mediaSection - The media part (audio or video) of the
50
+     * session description which will be modified in place.
51
+     * @returns {void}
52
+     * @private
53
+     */
54
+    private _transformMediaIdentifiers;
55
+    /**
56
+     * Maybe modifies local description to fake local video tracks SDP when
57
+     * those are muted.
58
+     *
59
+     * @param {object} desc the WebRTC SDP object instance for the local
60
+     * description.
61
+     * @returns {RTCSessionDescription}
62
+     */
63
+    maybeAddMutedLocalVideoTracksToSDP(desc: object): RTCSessionDescription;
64
+    /**
65
+     * This transformation will make sure that stream identifiers are unique
66
+     * across all of the local PeerConnections even if the same stream is used
67
+     * by multiple instances at the same time.
68
+     * Each PeerConnection assigns different SSRCs to the same local
69
+     * MediaStream, but the MSID remains the same as it's used to identify
70
+     * the stream by the WebRTC backend. The transformation will append
71
+     * {@link TraceablePeerConnection#id} at the end of each stream's identifier
72
+     * ("cname", "msid", "label" and "mslabel").
73
+     *
74
+     * @param {RTCSessionDescription} sessionDesc - The local session
75
+     * description (this instance remains unchanged).
76
+     * @return {RTCSessionDescription} - Transformed local session description
77
+     * (a modified copy of the one given as the input).
78
+     */
79
+    transformStreamIdentifiers(sessionDesc: RTCSessionDescription): RTCSessionDescription;
80
+    /**
81
+     * Injects source names. Source names are need to for multiple streams per endpoint support. The final plan is to
82
+     * use the "mid" attribute for source names, but because the SDP to Jingle conversion still operates in the Plan-B
83
+     * semantics (one source name per media), a custom "name" attribute is injected into SSRC lines..
84
+     *
85
+     * @param {MLineWrap} mediaSection - The media part (audio or video) of the session description which will be
86
+     * modified in place.
87
+     * @returns {void}
88
+     * @private
89
+     */
90
+    private _injectSourceNames;
91
+}

+ 49
- 0
types/auto/modules/sdp/RtxModifier.d.ts View File

@@ -0,0 +1,49 @@
1
+/**
2
+ * End helper functions
3
+ */
4
+/**
5
+ * Adds any missing RTX streams for video streams
6
+ *  and makes sure that they remain consistent
7
+ */
8
+export default class RtxModifier {
9
+    /**
10
+     * Map of video ssrc to corresponding RTX
11
+     *  ssrc
12
+     */
13
+    correspondingRtxSsrcs: Map<any, any>;
14
+    /**
15
+     * Clear the cached map of primary video ssrcs to
16
+     *  their corresponding rtx ssrcs so that they will
17
+     *  not be used for the next call to modifyRtxSsrcs
18
+     */
19
+    clearSsrcCache(): void;
20
+    /**
21
+     * Explicitly set the primary video ssrc -> rtx ssrc
22
+     *  mapping to be used in modifyRtxSsrcs
23
+     * @param {Map} ssrcMapping a mapping of primary video
24
+     *  ssrcs to their corresponding rtx ssrcs
25
+     */
26
+    setSsrcCache(ssrcMapping: Map<any, any>): void;
27
+    /**
28
+     * Adds RTX ssrcs for any video ssrcs that don't
29
+     *  already have them.  If the video ssrc has been
30
+     *  seen before, and already had an RTX ssrc generated,
31
+     *  the same RTX ssrc will be used again.
32
+     * @param {string} sdpStr sdp in raw string format
33
+     */
34
+    modifyRtxSsrcs(sdpStr: string): string;
35
+    /**
36
+     * Does the same thing as {@link modifyRtxSsrcs}, but takes the
37
+     *  {@link MLineWrap} instance wrapping video media as an argument.
38
+     * @param {MLineWrap} videoMLine
39
+     * @return {boolean} <tt>true</tt> if the SDP wrapped by
40
+     *  {@link SdpTransformWrap} has been modified or <tt>false</tt> otherwise.
41
+     */
42
+    modifyRtxSsrcs2(videoMLine: any): boolean;
43
+    /**
44
+     * Strip all rtx streams from the given sdp
45
+     * @param {string} sdpStr sdp in raw string format
46
+     * @returns {string} sdp string with all rtx streams stripped
47
+     */
48
+    stripRtx(sdpStr: string): string;
49
+}

+ 48
- 0
types/auto/modules/sdp/SDP.d.ts View File

@@ -0,0 +1,48 @@
1
+/**
2
+ *
3
+ * @param sdp
4
+ */
5
+export default function SDP(sdp: any): void;
6
+export default class SDP {
7
+    /**
8
+     *
9
+     * @param sdp
10
+     */
11
+    constructor(sdp: any);
12
+    media: any;
13
+    raw: string;
14
+    session: string;
15
+    /**
16
+     * A flag will make {@link transportToJingle} and {@link jingle2media} replace
17
+     * ICE candidates IPs with invalid value of '1.1.1.1' which will cause ICE
18
+     * failure. The flag is used in the automated testing.
19
+     * @type {boolean}
20
+     */
21
+    failICE: boolean;
22
+    /**
23
+     * Whether or not to remove TCP ice candidates when translating from/to jingle.
24
+     * @type {boolean}
25
+     */
26
+    removeTcpCandidates: boolean;
27
+    /**
28
+     * Whether or not to remove UDP ice candidates when translating from/to jingle.
29
+     * @type {boolean}
30
+     */
31
+    removeUdpCandidates: boolean;
32
+    /**
33
+     * Returns map of MediaChannel mapped per channel idx.
34
+     */
35
+    getMediaSsrcMap(): {};
36
+    /**
37
+     * Returns <tt>true</tt> if this SDP contains given SSRC.
38
+     * @param ssrc the ssrc to check.
39
+     * @returns {boolean} <tt>true</tt> if this SDP contains given SSRC.
40
+     */
41
+    containsSSRC(ssrc: any): boolean;
42
+    toJingle(elem: any, thecreator: any): any;
43
+    transportToJingle(mediaindex: any, elem: any): void;
44
+    rtcpFbToJingle(mediaindex: any, elem: any, payloadtype: any): void;
45
+    rtcpFbFromJingle(elem: any, payloadtype: any): string;
46
+    fromJingle(jingle: any): void;
47
+    jingle2media(content: any): string;
48
+}

+ 25
- 0
types/auto/modules/sdp/SDPDiffer.d.ts View File

@@ -0,0 +1,25 @@
1
+/**
2
+ *
3
+ * @param mySDP
4
+ * @param otherSDP
5
+ */
6
+export default function SDPDiffer(mySDP: any, otherSDP: any): void;
7
+export default class SDPDiffer {
8
+    /**
9
+     *
10
+     * @param mySDP
11
+     * @param otherSDP
12
+     */
13
+    constructor(mySDP: any, otherSDP: any);
14
+    mySDP: any;
15
+    otherSDP: any;
16
+    /**
17
+     * Returns map of MediaChannel that contains media contained in
18
+     * 'mySDP', but not contained in 'otherSdp'. Mapped by channel idx.
19
+     */
20
+    getNewMedia(): {};
21
+    /**
22
+     * TODO: document!
23
+     */
24
+    toJingle(modify: any): boolean;
25
+}

+ 335
- 0
types/auto/modules/sdp/SDPUtil.d.ts View File

@@ -0,0 +1,335 @@
1
+export default SDPUtil;
2
+declare namespace SDPUtil {
3
+    function filterSpecialChars(text: any): any;
4
+    function filterSpecialChars(text: any): any;
5
+    function iceparams(mediadesc: any, sessiondesc: any): {
6
+        ufrag: any;
7
+        pwd: any;
8
+    };
9
+    function iceparams(mediadesc: any, sessiondesc: any): {
10
+        ufrag: any;
11
+        pwd: any;
12
+    };
13
+    function parseICEUfrag(line: any): any;
14
+    function parseICEUfrag(line: any): any;
15
+    function buildICEUfrag(frag: any): string;
16
+    function buildICEUfrag(frag: any): string;
17
+    function parseICEPwd(line: any): any;
18
+    function parseICEPwd(line: any): any;
19
+    function buildICEPwd(pwd: any): string;
20
+    function buildICEPwd(pwd: any): string;
21
+    function parseMID(line: any): any;
22
+    function parseMID(line: any): any;
23
+    /**
24
+     * Finds the MSID attribute in the given array of SSRC attribute lines and returns the value.
25
+     *
26
+     * @param {string[]} ssrcLines - an array of lines similar to 'a:213123 msid:stream-id track-id'.
27
+     * @returns {undefined|string}
28
+     */
29
+    function parseMSIDAttribute(ssrcLines: string[]): string;
30
+    /**
31
+     * Finds the MSID attribute in the given array of SSRC attribute lines and returns the value.
32
+     *
33
+     * @param {string[]} ssrcLines - an array of lines similar to 'a:213123 msid:stream-id track-id'.
34
+     * @returns {undefined|string}
35
+     */
36
+    function parseMSIDAttribute(ssrcLines: string[]): string;
37
+    function parseMLine(line: any): {
38
+        media: any;
39
+        port: any;
40
+        proto: any;
41
+        fmt: any;
42
+    };
43
+    function parseMLine(line: any): {
44
+        media: any;
45
+        port: any;
46
+        proto: any;
47
+        fmt: any;
48
+    };
49
+    function buildMLine(mline: any): string;
50
+    function buildMLine(mline: any): string;
51
+    function parseRTPMap(line: any): {
52
+        id: any;
53
+        name: any;
54
+        clockrate: any;
55
+        channels: any;
56
+    };
57
+    function parseRTPMap(line: any): {
58
+        id: any;
59
+        name: any;
60
+        clockrate: any;
61
+        channels: any;
62
+    };
63
+    /**
64
+     * Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
65
+     * @param line eg. "a=sctpmap:5000 webrtc-datachannel"
66
+     * @returns [SCTP port number, protocol, streams]
67
+     */
68
+    function parseSCTPMap(line: any): any[];
69
+    /**
70
+     * Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
71
+     * @param line eg. "a=sctpmap:5000 webrtc-datachannel"
72
+     * @returns [SCTP port number, protocol, streams]
73
+     */
74
+    function parseSCTPMap(line: any): any[];
75
+    function parseSCTPPort(line: any): any;
76
+    function parseSCTPPort(line: any): any;
77
+    function buildRTPMap(el: any): string;
78
+    function buildRTPMap(el: any): string;
79
+    function parseCrypto(line: any): {
80
+        tag: any;
81
+        'crypto-suite': any;
82
+        'key-params': any;
83
+        'session-params': any;
84
+    };
85
+    function parseCrypto(line: any): {
86
+        tag: any;
87
+        'crypto-suite': any;
88
+        'key-params': any;
89
+        'session-params': any;
90
+    };
91
+    function parseFingerprint(line: any): {
92
+        hash: any;
93
+        fingerprint: any;
94
+    };
95
+    function parseFingerprint(line: any): {
96
+        hash: any;
97
+        fingerprint: any;
98
+    };
99
+    function parseFmtp(line: any): {
100
+        name: any;
101
+        value: any;
102
+    }[];
103
+    function parseFmtp(line: any): {
104
+        name: any;
105
+        value: any;
106
+    }[];
107
+    function parseICECandidate(line: any): {
108
+        foundation: any;
109
+        component: any;
110
+        protocol: any;
111
+        priority: any;
112
+        ip: any;
113
+        port: any;
114
+        type: any;
115
+        generation: any;
116
+        'rel-addr': any;
117
+        'rel-port': any;
118
+        tcptype: any;
119
+        network: string;
120
+        id: string;
121
+    };
122
+    function parseICECandidate(line: any): {
123
+        foundation: any;
124
+        component: any;
125
+        protocol: any;
126
+        priority: any;
127
+        ip: any;
128
+        port: any;
129
+        type: any;
130
+        generation: any;
131
+        'rel-addr': any;
132
+        'rel-port': any;
133
+        tcptype: any;
134
+        network: string;
135
+        id: string;
136
+    };
137
+    function buildICECandidate(cand: any): string;
138
+    function buildICECandidate(cand: any): string;
139
+    function parseSSRC(desc: any): Map<any, any>;
140
+    function parseSSRC(desc: any): Map<any, any>;
141
+    /**
142
+     * Gets the source name out of the name attribute "a=ssrc:254321 name:name1".
143
+     *
144
+     * @param {string[]} ssrcLines
145
+     * @returns {string | undefined}
146
+     */
147
+    function parseSourceNameLine(ssrcLines: string[]): string;
148
+    /**
149
+     * Gets the source name out of the name attribute "a=ssrc:254321 name:name1".
150
+     *
151
+     * @param {string[]} ssrcLines
152
+     * @returns {string | undefined}
153
+     */
154
+    function parseSourceNameLine(ssrcLines: string[]): string;
155
+    function parseRTCPFB(line: any): {
156
+        pt: any;
157
+        type: any;
158
+        params: any;
159
+    };
160
+    function parseRTCPFB(line: any): {
161
+        pt: any;
162
+        type: any;
163
+        params: any;
164
+    };
165
+    function parseExtmap(line: any): {
166
+        value: any;
167
+        direction: any;
168
+        uri: any;
169
+        params: any;
170
+    };
171
+    function parseExtmap(line: any): {
172
+        value: any;
173
+        direction: any;
174
+        uri: any;
175
+        params: any;
176
+    };
177
+    function findLine(haystack: any, needle: any, sessionpart: any): any;
178
+    function findLine(haystack: any, needle: any, sessionpart: any): any;
179
+    function findLines(haystack: any, needle: any, sessionpart: any): any[];
180
+    function findLines(haystack: any, needle: any, sessionpart: any): any[];
181
+    function candidateToJingle(line: any): {
182
+        foundation: any;
183
+        component: any;
184
+        protocol: any;
185
+        priority: any;
186
+        ip: any;
187
+        port: any;
188
+        type: any;
189
+        generation: any;
190
+        'rel-addr': any;
191
+        'rel-port': any;
192
+        tcptype: any;
193
+        network: string;
194
+        id: string;
195
+    };
196
+    function candidateToJingle(line: any): {
197
+        foundation: any;
198
+        component: any;
199
+        protocol: any;
200
+        priority: any;
201
+        ip: any;
202
+        port: any;
203
+        type: any;
204
+        generation: any;
205
+        'rel-addr': any;
206
+        'rel-port': any;
207
+        tcptype: any;
208
+        network: string;
209
+        id: string;
210
+    };
211
+    function candidateFromJingle(cand: any): string;
212
+    function candidateFromJingle(cand: any): string;
213
+    /**
214
+     * Parse the 'most' primary video ssrc from the given m line
215
+     * @param {object} mLine object as parsed from transform.parse
216
+     * @return {number} the primary video ssrc from the given m line
217
+     */
218
+    function parsePrimaryVideoSsrc(videoMLine: any): number;
219
+    /**
220
+     * Parse the 'most' primary video ssrc from the given m line
221
+     * @param {object} mLine object as parsed from transform.parse
222
+     * @return {number} the primary video ssrc from the given m line
223
+     */
224
+    function parsePrimaryVideoSsrc(videoMLine: any): number;
225
+    /**
226
+     * Generate an ssrc
227
+     * @returns {number} an ssrc
228
+     */
229
+    function generateSsrc(): number;
230
+    /**
231
+     * Generate an ssrc
232
+     * @returns {number} an ssrc
233
+     */
234
+    function generateSsrc(): number;
235
+    /**
236
+     * Get an attribute for the given ssrc with the given attributeName
237
+     *  from the given mline
238
+     * @param {object} mLine an mLine object as parsed from transform.parse
239
+     * @param {number} ssrc the ssrc for which an attribute is desired
240
+     * @param {string} attributeName the name of the desired attribute
241
+     * @returns {string} the value corresponding to the given ssrc
242
+     *  and attributeName
243
+     */
244
+    function getSsrcAttribute(mLine: any, ssrc: number, attributeName: string): string;
245
+    /**
246
+     * Get an attribute for the given ssrc with the given attributeName
247
+     *  from the given mline
248
+     * @param {object} mLine an mLine object as parsed from transform.parse
249
+     * @param {number} ssrc the ssrc for which an attribute is desired
250
+     * @param {string} attributeName the name of the desired attribute
251
+     * @returns {string} the value corresponding to the given ssrc
252
+     *  and attributeName
253
+     */
254
+    function getSsrcAttribute(mLine: any, ssrc: number, attributeName: string): string;
255
+    /**
256
+     * Parses the ssrcs from the group sdp line and
257
+     *  returns them as a list of numbers
258
+     * @param {object} the ssrcGroup object as parsed from
259
+     *  sdp-transform
260
+     * @returns {list<number>} a list of the ssrcs in the group
261
+     *  parsed as numbers
262
+     */
263
+    function parseGroupSsrcs(ssrcGroup: any): any;
264
+    /**
265
+     * Parses the ssrcs from the group sdp line and
266
+     *  returns them as a list of numbers
267
+     * @param {object} the ssrcGroup object as parsed from
268
+     *  sdp-transform
269
+     * @returns {list<number>} a list of the ssrcs in the group
270
+     *  parsed as numbers
271
+     */
272
+    function parseGroupSsrcs(ssrcGroup: any): any;
273
+    /**
274
+     * Get the mline of the given type from the given sdp
275
+     * @param {object} sdp sdp as parsed from transform.parse
276
+     * @param {string} type the type of the desired mline (e.g. "video")
277
+     * @returns {object} a media object
278
+     */
279
+    function getMedia(sdp: any, type: string): any;
280
+    /**
281
+     * Get the mline of the given type from the given sdp
282
+     * @param {object} sdp sdp as parsed from transform.parse
283
+     * @param {string} type the type of the desired mline (e.g. "video")
284
+     * @returns {object} a media object
285
+     */
286
+    function getMedia(sdp: any, type: string): any;
287
+    /**
288
+     * Extracts the ICE username fragment from an SDP string.
289
+     * @param {string} sdp the SDP in raw text format
290
+     */
291
+    function getUfrag(sdp: string): string;
292
+    /**
293
+     * Extracts the ICE username fragment from an SDP string.
294
+     * @param {string} sdp the SDP in raw text format
295
+     */
296
+    function getUfrag(sdp: string): string;
297
+    /**
298
+     * Sets the given codecName as the preferred codec by moving it to the beginning
299
+     * of the payload types list (modifies the given mline in place). All instances
300
+     * of the codec are moved up.
301
+     * @param {object} mLine the mline object from an sdp as parsed by transform.parse
302
+     * @param {string} codecName the name of the preferred codec
303
+     */
304
+    function preferCodec(mline: any, codecName: string): void;
305
+    /**
306
+     * Sets the given codecName as the preferred codec by moving it to the beginning
307
+     * of the payload types list (modifies the given mline in place). All instances
308
+     * of the codec are moved up.
309
+     * @param {object} mLine the mline object from an sdp as parsed by transform.parse
310
+     * @param {string} codecName the name of the preferred codec
311
+     */
312
+    function preferCodec(mline: any, codecName: string): void;
313
+    /**
314
+     * Strips the given codec from the given mline. All related RTX payload
315
+     * types are also stripped. If the resulting mline would have no codecs,
316
+     * it's disabled.
317
+     *
318
+     * @param {object} mLine the mline object from an sdp as parsed by transform.parse.
319
+     * @param {string} codecName the name of the codec which will be stripped.
320
+     * @param {boolean} highProfile determines if only the high profile H264 codec needs to be
321
+     * stripped from the sdp when the passed codecName is H264.
322
+     */
323
+    function stripCodec(mLine: any, codecName: string, highProfile?: boolean): void;
324
+    /**
325
+     * Strips the given codec from the given mline. All related RTX payload
326
+     * types are also stripped. If the resulting mline would have no codecs,
327
+     * it's disabled.
328
+     *
329
+     * @param {object} mLine the mline object from an sdp as parsed by transform.parse.
330
+     * @param {string} codecName the name of the codec which will be stripped.
331
+     * @param {boolean} highProfile determines if only the high profile H264 codec needs to be
332
+     * stripped from the sdp when the passed codecName is H264.
333
+     */
334
+    function stripCodec(mLine: any, codecName: string, highProfile?: boolean): void;
335
+}

+ 2
- 0
types/auto/modules/sdp/SampleSdpStrings.d.ts View File

@@ -0,0 +1,2 @@
1
+declare namespace _default { }
2
+export default _default;

+ 50
- 0
types/auto/modules/sdp/SdpConsistency.d.ts View File

@@ -0,0 +1,50 @@
1
+/**
2
+ * Handles the work of keeping video ssrcs consistent across multiple
3
+ * o/a cycles, making it such that all stream operations can be
4
+ * kept local and do not need to be signaled.
5
+ * NOTE: This only keeps the 'primary' video ssrc consistent: meaning
6
+ * the primary video stream
7
+ */
8
+export default class SdpConsistency {
9
+    /**
10
+     * Constructor
11
+     * @param {string} logPrefix the log prefix appended to every logged
12
+     * message, currently used to distinguish for which
13
+     * <tt>TraceablePeerConnection</tt> the instance works.
14
+     */
15
+    constructor(logPrefix: string);
16
+    logPrefix: string;
17
+    /**
18
+     * Clear the cached video primary and primary rtx ssrcs so that
19
+     *  they will not be used for the next call to
20
+     *  makeVideoPrimarySsrcsConsistent
21
+     */
22
+    clearVideoSsrcCache(): void;
23
+    cachedPrimarySsrc: number;
24
+    injectRecvOnly: boolean;
25
+    /**
26
+     * Explicitly set the primary ssrc to be used in
27
+     *  makeVideoPrimarySsrcsConsistent
28
+     * @param {number} primarySsrc the primarySsrc to be used
29
+     *  in future calls to makeVideoPrimarySsrcsConsistent
30
+     * @throws Error if <tt>primarySsrc</tt> is not a number
31
+     */
32
+    setPrimarySsrc(primarySsrc: number): void;
33
+    /**
34
+     * Checks whether or not there is a primary video SSRC cached already.
35
+     * @return {boolean}
36
+     */
37
+    hasPrimarySsrcCached(): boolean;
38
+    /**
39
+     * Given an sdp string, either:
40
+     *  1) record the primary video and primary rtx ssrcs to be
41
+     *   used in future calls to makeVideoPrimarySsrcsConsistent or
42
+     *  2) change the primary and primary rtx ssrcs in the given sdp
43
+     *   to match the ones previously cached
44
+     * @param {string} sdpStr the sdp string to (potentially)
45
+     *  change to make the video ssrcs consistent
46
+     * @returns {string} a (potentially) modified sdp string
47
+     *  with ssrcs consistent with this class' cache
48
+     */
49
+    makeVideoPrimarySsrcsConsistent(sdpStr: string): string;
50
+}

+ 218
- 0
types/auto/modules/sdp/SdpTransformUtil.d.ts View File

@@ -0,0 +1,218 @@
1
+/**
2
+ * Parses the primary SSRC of given SSRC group.
3
+ * @param {object} group the SSRC group object as defined by the 'sdp-transform'
4
+ * @return {Number} the primary SSRC number
5
+ */
6
+export function parsePrimarySSRC(group: object): number;
7
+/**
8
+ * Parses the secondary SSRC of given SSRC group.
9
+ * @param {object} group the SSRC group object as defined by the 'sdp-transform'
10
+ * @return {Number} the secondary SSRC number
11
+ */
12
+export function parseSecondarySSRC(group: object): number;
13
+/**
14
+ * Utility class for SDP manipulation using the 'sdp-transform' library.
15
+ *
16
+ * Typical use usage scenario:
17
+ *
18
+ * const transformer = new SdpTransformWrap(rawSdp);
19
+ * const videoMLine = transformer.selectMedia('video);
20
+ * if (videoMLine) {
21
+ *     videoMLiner.addSSRCAttribute({
22
+ *         id: 2342343,
23
+ *         attribute: "cname",
24
+ *         value: "someCname"
25
+ *     });
26
+ *     rawSdp = transformer.toRawSdp();
27
+ * }
28
+ */
29
+export class SdpTransformWrap {
30
+    /**
31
+     * Creates new instance and parses the raw SDP into objects using
32
+     * 'sdp-transform' lib.
33
+     * @param {string} rawSDP the SDP in raw text format.
34
+     */
35
+    constructor(rawSDP: string);
36
+    parsedSDP: any;
37
+    /**
38
+     * Selects the first media SDP of given name.
39
+     * @param {string} mediaType the name of the media e.g. 'audio', 'video',
40
+     * 'data'.
41
+     * @return {MLineWrap|null} return {@link MLineWrap} instance for the media
42
+     * line or <tt>null</tt> if not found. The object returned references
43
+     * the underlying SDP state held by this <tt>SdpTransformWrap</tt> instance
44
+     * (it's not a copy).
45
+     */
46
+    selectMedia(mediaType: string): MLineWrap | null;
47
+    /**
48
+     * Converts the currently stored SDP state in this instance to raw text SDP
49
+     * format.
50
+     * @return {string}
51
+     */
52
+    toRawSDP(): string;
53
+}
54
+/**
55
+ * A wrapper around 'sdp-transform' media description object which provides
56
+ * utility methods for common SDP/SSRC related operations.
57
+ */
58
+declare class MLineWrap {
59
+    /**
60
+     * Creates new <tt>MLineWrap</t>>
61
+     * @param {Object} mLine the media line object as defined by 'sdp-transform'
62
+     * lib.
63
+     */
64
+    constructor(mLine: any);
65
+    mLine: any;
66
+    /**
67
+     * Setter for the mLine's "ssrcs" array.
68
+     *
69
+     * @param {Array<Object>} ssrcs an array of 'sdp-transform' SSRC attributes
70
+     * objects.
71
+     */
72
+    set ssrcs(arg: any[]);
73
+    /**
74
+     * Getter for the mLine's "ssrcs" array. If the array was undefined an empty
75
+     * one will be preassigned.
76
+     *
77
+     * @return {Array<Object>} an array of 'sdp-transform' SSRC attributes
78
+     * objects.
79
+     */
80
+    get ssrcs(): any[];
81
+    /**
82
+     * Modifies the direction of the underlying media description.
83
+     * @param {string} direction the new direction to be set
84
+     */
85
+    set direction(arg: string);
86
+    /**
87
+     * Returns the direction of the underlying media description.
88
+     * @return {string} the media direction name as defined in the SDP.
89
+     */
90
+    get direction(): string;
91
+    /**
92
+     * Modifies the SSRC groups array of the underlying media description
93
+     * object.
94
+     * @param {Array.<Object>} ssrcGroups
95
+     */
96
+    set ssrcGroups(arg: any[]);
97
+    /**
98
+     * Exposes the SSRC group array of the underlying media description object.
99
+     * @return {Array.<Object>}
100
+     */
101
+    get ssrcGroups(): any[];
102
+    /**
103
+     * Obtains value from SSRC attribute.
104
+     * @param {number} ssrcNumber the SSRC number for which attribute is to be
105
+     * found
106
+     * @param {string} attrName the name of the SSRC attribute to be found.
107
+     * @return {string|undefined} the value of SSRC attribute or
108
+     * <tt>undefined</tt> if no such attribute exists.
109
+     */
110
+    getSSRCAttrValue(ssrcNumber: number, attrName: string): string | undefined;
111
+    /**
112
+     * Removes all attributes for given SSRC number.
113
+     * @param {number} ssrcNum the SSRC number for which all attributes will be
114
+     * removed.
115
+     */
116
+    removeSSRC(ssrcNum: number): void;
117
+    /**
118
+     * Adds SSRC attribute
119
+     * @param {object} ssrcObj the SSRC attribute object as defined in
120
+     * the 'sdp-transform' lib.
121
+     */
122
+    addSSRCAttribute(ssrcObj: object): void;
123
+    /**
124
+     * Finds a SSRC group matching both semantics and SSRCs in order.
125
+     * @param {string} semantics the name of the semantics
126
+     * @param {string} [ssrcs] group SSRCs as a string (like it's defined in
127
+     * SSRC group object of the 'sdp-transform' lib) e.g. "1232546 342344 25434"
128
+     * @return {object|undefined} the SSRC group object or <tt>undefined</tt> if
129
+     * not found.
130
+     */
131
+    findGroup(semantics: string, ssrcs?: string): object | undefined;
132
+    /**
133
+     * Finds all groups matching given semantic's name.
134
+     * @param {string} semantics the name of the semantics
135
+     * @return {Array.<object>} an array of SSRC group objects as defined by
136
+     * the 'sdp-transform' lib.
137
+     */
138
+    findGroups(semantics: string): Array<object>;
139
+    /**
140
+     * Finds all groups matching given semantic's name and group's primary SSRC.
141
+     * @param {string} semantics the name of the semantics
142
+     * @param {number} primarySSRC the primary SSRC number to be matched
143
+     * @return {Object} SSRC group object as defined by the 'sdp-transform' lib.
144
+     */
145
+    findGroupByPrimarySSRC(semantics: string, primarySSRC: number): any;
146
+    /**
147
+     * @param {string|null} msid the media stream id or <tt>null</tt> to match
148
+     * the first SSRC object with any 'msid' value.
149
+     * @return {Object|undefined} the SSRC object as defined by 'sdp-transform'
150
+     * lib.
151
+     */
152
+    findSSRCByMSID(msid: string | null): any | undefined;
153
+    /**
154
+     * Gets the SSRC count for the underlying media description.
155
+     * @return {number}
156
+     */
157
+    getSSRCCount(): number;
158
+    /**
159
+     * Checks whether the underlying media description contains any SSRC groups.
160
+     * @return {boolean} <tt>true</tt> if there are any SSRC groups or
161
+     * <tt>false</tt> otherwise.
162
+     */
163
+    containsAnySSRCGroups(): boolean;
164
+    /**
165
+     * Finds the primary video SSRC.
166
+     * @returns {number|undefined} the primary video ssrc
167
+     * @throws Error if the underlying media description is not a video
168
+     */
169
+    getPrimaryVideoSsrc(): number | undefined;
170
+    /**
171
+     * Obtains RTX SSRC from the underlying video description (the
172
+     * secondary SSRC of the first "FID" group found)
173
+     * @param {number} primarySsrc the video ssrc for which to find the
174
+     * corresponding rtx ssrc
175
+     * @returns {number|undefined} the rtx ssrc (or undefined if there isn't
176
+     * one)
177
+     */
178
+    getRtxSSRC(primarySsrc: number): number | undefined;
179
+    /**
180
+     * Obtains all SSRCs contained in the underlying media description.
181
+     * @return {Array.<number>} an array with all SSRC as numbers.
182
+     */
183
+    getSSRCs(): Array<number>;
184
+    /**
185
+     * Obtains primary video SSRCs.
186
+     * @return {Array.<number>} an array of all primary video SSRCs as numbers.
187
+     * @throws Error if the wrapped media description is not a video.
188
+     */
189
+    getPrimaryVideoSSRCs(): Array<number>;
190
+    /**
191
+     * Dumps all SSRC groups of this media description to JSON.
192
+     */
193
+    dumpSSRCGroups(): string;
194
+    /**
195
+     * Removes all SSRC groups which contain given SSRC number at any position.
196
+     * @param {number} ssrc the SSRC for which all matching groups are to be
197
+     * removed.
198
+     */
199
+    removeGroupsWithSSRC(ssrc: number): void;
200
+    /**
201
+     * Removes groups that match given semantics.
202
+     * @param {string} semantics e.g. "SIM" or "FID"
203
+     */
204
+    removeGroupsBySemantics(semantics: string): void;
205
+    /**
206
+     * Replaces SSRC (does not affect SSRC groups, but only attributes).
207
+     * @param {number} oldSSRC the old SSRC number
208
+     * @param {number} newSSRC the new SSRC number
209
+     */
210
+    replaceSSRC(oldSSRC: number, newSSRC: number): void;
211
+    /**
212
+     * Adds given SSRC group to this media description.
213
+     * @param {object} group the SSRC group object as defined by
214
+     * the 'sdp-transform' lib.
215
+     */
216
+    addSSRCGroup(group: object): void;
217
+}
218
+export {};

+ 18
- 0
types/auto/modules/settings/Settings.d.ts View File

@@ -0,0 +1,18 @@
1
+declare namespace _default {
2
+    export { jitsiLocalStorage as _storage };
3
+    /**
4
+     * Initializes the Settings class.
5
+     *
6
+     * @param {Storage|undefined} externalStorage - Object that implements the Storage interface. This object will be
7
+     * used for storing data instead of jitsiLocalStorage if specified.
8
+     */
9
+    export function init(externalStorage: Storage): void;
10
+    /**
11
+     * Initializes the Settings class.
12
+     *
13
+     * @param {Storage|undefined} externalStorage - Object that implements the Storage interface. This object will be
14
+     * used for storing data instead of jitsiLocalStorage if specified.
15
+     */
16
+    export function init(externalStorage: Storage): void;
17
+}
18
+export default _default;

+ 155
- 0
types/auto/modules/statistics/AnalyticsAdapter.d.ts View File

@@ -0,0 +1,155 @@
1
+declare var _default: AnalyticsAdapter;
2
+export default _default;
3
+/**
4
+ * This class provides an API to lib-jitsi-meet and its users for sending
5
+ * analytics events. It serves as a bridge to different backend implementations
6
+ * ("analytics handlers") and a cache for events attempted to be sent before
7
+ * the analytics handlers were enabled.
8
+ *
9
+ * The API is designed to be an easy replacement for the previous version of
10
+ * this adapter, and is meant to be extended with more convenience methods.
11
+ *
12
+ *
13
+ * The API calls are translated to objects with the following structure, which
14
+ * are then passed to the sendEvent(event) function of the underlying handlers:
15
+ *
16
+ * {
17
+ *    type,
18
+ *
19
+ *    action,
20
+ *    actionSubject,
21
+ *    actionSubjectId,
22
+ *    attributes,
23
+ *    categories,
24
+ *    containerId,
25
+ *    containerType,
26
+ *    name,
27
+ *    objectId,
28
+ *    objectType,
29
+ *    source,
30
+ *    tags
31
+ * }
32
+ *
33
+ * The 'type' is one of 'operational', 'page', 'track' or 'ui', and some of the
34
+ * other properties are considered required according to the type.
35
+ *
36
+ * For events with type 'page', the required properties are: name.
37
+ *
38
+ * For events with type 'operational' and 'ui', the required properties are:
39
+ * action, actionSubject, source
40
+ *
41
+ * For events with type 'page', the required properties are:
42
+ * action, actionSubject, source, containerType, containerId, objectType,
43
+ * objectId
44
+ */
45
+declare class AnalyticsAdapter {
46
+    /**
47
+     * Reset the state to the initial one.
48
+     *
49
+     * @returns {void}
50
+     */
51
+    reset(): void;
52
+    /**
53
+     * Whether this AnalyticsAdapter has been disposed of or not. Once this
54
+     * is set to true, the AnalyticsAdapter is disabled and does not accept
55
+     * any more events, and it can not be re-enabled.
56
+     * @type {boolean}
57
+     */
58
+    disposed: boolean;
59
+    /**
60
+     * The set of handlers to which events will be sent.
61
+     * @type {Set<any>}
62
+     */
63
+    analyticsHandlers: Set<any>;
64
+    /**
65
+     * The cache of events which are not sent yet. The cache is enabled
66
+     * while this field is truthy, and disabled otherwise.
67
+     * @type {Array}
68
+     */
69
+    cache: any[];
70
+    /**
71
+     * Map of properties that will be added to every event. Note that the
72
+     * keys will be prefixed with "permanent.".
73
+     */
74
+    permanentProperties: any;
75
+    /**
76
+     * The name of the conference that this AnalyticsAdapter is associated
77
+     * with.
78
+     * @type {null}
79
+     */
80
+    conferenceName: any;
81
+    /**
82
+     * Dispose analytics. Clears all handlers.
83
+     */
84
+    dispose(): void;
85
+    /**
86
+     * Sets the handlers that are going to be used to send analytics. Sends any
87
+     * cached events.
88
+     * @param {Array} handlers the handlers
89
+     */
90
+    setAnalyticsHandlers(handlers: any[]): void;
91
+    /**
92
+     * Set the user properties to the analytics handlers.
93
+     *
94
+     * @returns {void}
95
+     */
96
+    _setUserProperties(): void;
97
+    /**
98
+     * Adds a set of permanent properties to this this AnalyticsAdapter.
99
+     * Permanent properties will be added as "attributes" to events sent to
100
+     * the underlying "analytics handlers", and their keys will be prefixed
101
+     * by "permanent_", i.e. adding a permanent property {key: "value"} will
102
+     * result in {"permanent_key": "value"} object to be added to the
103
+     * "attributes" field of events.
104
+     *
105
+     * @param {Object} properties the properties to add
106
+     */
107
+    addPermanentProperties(properties: any): void;
108
+    /**
109
+     * Sets the name of the conference that this AnalyticsAdapter is associated
110
+     * with.
111
+     * @param name the name to set.
112
+     */
113
+    setConferenceName(name: any): void;
114
+    /**
115
+     * Sends an event with a given name and given properties. The first
116
+     * parameter is either a string or an object. If it is a string, it is used
117
+     * as the event name and the second parameter is used at the attributes to
118
+     * attach to the event. If it is an object, it represents the whole event,
119
+     * including any desired attributes, and the second parameter is ignored.
120
+     *
121
+     * @param {String|Object} eventName either a string to be used as the name
122
+     * of the event, or an event object. If an event object is passed, the
123
+     * properties parameters is ignored.
124
+     * @param {Object} properties the properties/attributes to attach to the
125
+     * event, if eventName is a string.
126
+     */
127
+    sendEvent(eventName: string | any, properties?: any): void;
128
+    /**
129
+     * Checks whether an event has all of the required fields set, and tries
130
+     * to fill in some of the missing fields with reasonable default values.
131
+     * Returns true if after this operation the event has all of the required
132
+     * fields set, and false otherwise (if some of the required fields were not
133
+     * set and the attempt to fill them in with a default failed).
134
+     *
135
+     * @param event the event object.
136
+     * @return {boolean} true if the event (after the call to this function)
137
+     * contains all of the required fields, and false otherwise.
138
+     * @private
139
+     */
140
+    private _verifyRequiredFields;
141
+    /**
142
+     * Saves an event to the cache, if the cache is enabled.
143
+     * @param event the event to save.
144
+     * @returns {boolean} true if the event was saved, and false otherwise (i.e.
145
+     * if the cache was disabled).
146
+     * @private
147
+     */
148
+    private _maybeCacheEvent;
149
+    /**
150
+     *
151
+     * @param event
152
+     * @private
153
+     */
154
+    private _sendEvent;
155
+}

+ 46
- 0
types/auto/modules/statistics/AudioOutputProblemDetector.d.ts View File

@@ -0,0 +1,46 @@
1
+/**
2
+ * Collects the average audio levels per participant from the local stats and the stats received by every remote
3
+ * participant and compares them to detect potential audio problem for a participant.
4
+ */
5
+export default class AudioOutputProblemDetector {
6
+    /**
7
+     * Creates new <tt>AudioOutputProblemDetector</tt> instance.
8
+     *
9
+     * @param {JitsiCofnerence} conference - The conference instance to be monitored.
10
+     */
11
+    constructor(conference: any);
12
+    _conference: any;
13
+    _localAudioLevelCache: {};
14
+    _reportedParticipants: any[];
15
+    _audioProblemCandidates: {};
16
+    _numberOfRemoteAudioLevelsReceived: {};
17
+    /**
18
+     * A listener for audio level data retrieved by the local stats.
19
+     *
20
+     * @param {TraceablePeerConnection} tpc - The <tt>TraceablePeerConnection</tt> instance used to gather the data.
21
+     * @param {Object} avgAudioLevels - The average audio levels per participant.
22
+     * @returns {void}
23
+     */
24
+    _onLocalAudioLevelsReport(tpc: any, { avgAudioLevels }: any): void;
25
+    /**
26
+     * A listener for audio level data received by a remote participant.
27
+     *
28
+     * @param {string} userID - The user id of the participant that sent the data.
29
+     * @param {number} audioLevel - The average audio level value.
30
+     * @returns {void}
31
+     */
32
+    _onRemoteAudioLevelReceived(userID: string, { avgAudioLevels }: number): void;
33
+    /**
34
+     * Clears the data stored for a participant.
35
+     *
36
+     * @param {string} userID - The id of the participant.
37
+     * @returns {void}
38
+     */
39
+    _clearUserData(userID: string): void;
40
+    /**
41
+     * Disposes the allocated resources.
42
+     *
43
+     * @returns {void}
44
+     */
45
+    dispose(): void;
46
+}

+ 386
- 0
types/auto/modules/statistics/AvgRTPStatsReporter.d.ts View File

@@ -0,0 +1,386 @@
1
+/**
2
+ * Reports average RTP statistics values (arithmetic mean) to the analytics
3
+ * module for things like bit rate, bandwidth, packet loss etc. It keeps track
4
+ * of the P2P vs JVB conference modes and submits the values under different
5
+ * namespaces (the events for P2P mode have 'p2p.' prefix). Every switch between
6
+ * P2P mode resets the data collected so far and averages are calculated from
7
+ * scratch.
8
+ */
9
+export default class AvgRTPStatsReporter {
10
+    /**
11
+     * Creates new instance of <tt>AvgRTPStatsReporter</tt>
12
+     * @param {JitsiConference} conference
13
+     * @param {number} n the number of samples, before arithmetic mean is to be
14
+     * calculated and values submitted to the analytics module.
15
+     */
16
+    constructor(conference: any, n: number);
17
+    /**
18
+     * How many {@link ConnectionQualityEvents.LOCAL_STATS_UPDATED} samples
19
+     * are to be included in arithmetic mean calculation.
20
+     * @type {number}
21
+     * @private
22
+     */
23
+    private _n;
24
+    /**
25
+     * The current sample index. Starts from 0 and goes up to {@link _n})
26
+     * when analytics report will be submitted.
27
+     * @type {number}
28
+     * @private
29
+     */
30
+    private _sampleIdx;
31
+    /**
32
+     * The conference for which stats will be collected and reported.
33
+     * @type {JitsiConference}
34
+     * @private
35
+     */
36
+    private _conference;
37
+    /**
38
+     * Average audio upload bitrate
39
+     * XXX What are the units?
40
+     * @type {AverageStatReport}
41
+     * @private
42
+     */
43
+    private _avgAudioBitrateUp;
44
+    /**
45
+     * Average audio download bitrate
46
+     * XXX What are the units?
47
+     * @type {AverageStatReport}
48
+     * @private
49
+     */
50
+    private _avgAudioBitrateDown;
51
+    /**
52
+     * Average video upload bitrate
53
+     * XXX What are the units?
54
+     * @type {AverageStatReport}
55
+     * @private
56
+     */
57
+    private _avgVideoBitrateUp;
58
+    /**
59
+     * Average video download bitrate
60
+     * XXX What are the units?
61
+     * @type {AverageStatReport}
62
+     * @private
63
+     */
64
+    private _avgVideoBitrateDown;
65
+    /**
66
+     * Average upload bandwidth
67
+     * XXX What are the units?
68
+     * @type {AverageStatReport}
69
+     * @private
70
+     */
71
+    private _avgBandwidthUp;
72
+    /**
73
+     * Average download bandwidth
74
+     * XXX What are the units?
75
+     * @type {AverageStatReport}
76
+     * @private
77
+     */
78
+    private _avgBandwidthDown;
79
+    /**
80
+     * Average total packet loss
81
+     * XXX What are the units?
82
+     * @type {AverageStatReport}
83
+     * @private
84
+     */
85
+    private _avgPacketLossTotal;
86
+    /**
87
+     * Average upload packet loss
88
+     * XXX What are the units?
89
+     * @type {AverageStatReport}
90
+     * @private
91
+     */
92
+    private _avgPacketLossUp;
93
+    /**
94
+     * Average download packet loss
95
+     * XXX What are the units?
96
+     * @type {AverageStatReport}
97
+     * @private
98
+     */
99
+    private _avgPacketLossDown;
100
+    /**
101
+     * Average FPS for remote videos
102
+     * @type {AverageStatReport}
103
+     * @private
104
+     */
105
+    private _avgRemoteFPS;
106
+    /**
107
+     * Average FPS for remote screen streaming videos (reported only if not
108
+     * a <tt>NaN</tt>).
109
+     * @type {AverageStatReport}
110
+     * @private
111
+     */
112
+    private _avgRemoteScreenFPS;
113
+    /**
114
+     * Average FPS for local video (camera)
115
+     * @type {AverageStatReport}
116
+     * @private
117
+     */
118
+    private _avgLocalFPS;
119
+    /**
120
+     * Average FPS for local screen streaming video (reported only if not
121
+     * a <tt>NaN</tt>).
122
+     * @type {AverageStatReport}
123
+     * @private
124
+     */
125
+    private _avgLocalScreenFPS;
126
+    /**
127
+     * Average pixels for remote screen streaming videos (reported only if
128
+     * not a <tt>NaN</tt>).
129
+     * @type {AverageStatReport}
130
+     * @private
131
+     */
132
+    private _avgRemoteCameraPixels;
133
+    /**
134
+     * Average pixels for remote screen streaming videos (reported only if
135
+     * not a <tt>NaN</tt>).
136
+     * @type {AverageStatReport}
137
+     * @private
138
+     */
139
+    private _avgRemoteScreenPixels;
140
+    /**
141
+     * Average pixels for local video (camera)
142
+     * @type {AverageStatReport}
143
+     * @private
144
+     */
145
+    private _avgLocalCameraPixels;
146
+    /**
147
+     * Average pixels for local screen streaming video (reported only if not
148
+     * a <tt>NaN</tt>).
149
+     * @type {AverageStatReport}
150
+     * @private
151
+     */
152
+    private _avgLocalScreenPixels;
153
+    /**
154
+     * Average connection quality as defined by
155
+     * the {@link ConnectionQuality} module.
156
+     * @type {AverageStatReport}
157
+     * @private
158
+     */
159
+    private _avgCQ;
160
+    _cachedTransportStats: {
161
+        p2p: any;
162
+        local_candidate_type: any;
163
+        remote_candidate_type: any;
164
+        transport_type: any;
165
+    };
166
+    _onLocalStatsUpdated: (data: any) => void;
167
+    _onP2PStatusChanged: () => void;
168
+    _onJvb121StatusChanged: (oldStatus: any, newStatus: any) => void;
169
+    jvbStatsMonitor: ConnectionAvgStats;
170
+    p2pStatsMonitor: ConnectionAvgStats;
171
+    /**
172
+     * Processes next batch of stats reported on
173
+     * {@link ConnectionQualityEvents.LOCAL_STATS_UPDATED}.
174
+     * @param {go figure} data
175
+     * @private
176
+     */
177
+    private _calculateAvgStats;
178
+    /**
179
+     * Calculates average number of pixels for the report
180
+     *
181
+     * @param {map} peerResolutions a map of peer resolutions
182
+     * @param {boolean} isLocal if the average is to be calculated for the local
183
+     * video or <tt>false</tt> if for remote videos.
184
+     * @param {VideoType} videoType
185
+     * @return {number|NaN} average number of pixels or <tt>NaN</tt> if there
186
+     * are no samples.
187
+     * @private
188
+     */
189
+    private _calculateAvgVideoPixels;
190
+    /**
191
+     * Calculate average pixels for either remote or local participant
192
+     * @param {object} videos maps resolution per video SSRC
193
+     * @param {JitsiParticipant|null} participant remote participant or
194
+     * <tt>null</tt> for local video pixels calculation.
195
+     * @param {VideoType} videoType the type of the video for which an average
196
+     * will be calculated.
197
+     * @return {number|NaN} average video pixels of all participant's videos or
198
+     * <tt>NaN</tt> if currently not available
199
+     * @private
200
+     */
201
+    private _calculatePeerAvgVideoPixels;
202
+    /**
203
+     * Calculates average FPS for the report
204
+     * @param {go figure} frameRate
205
+     * @param {boolean} isLocal if the average is to be calculated for the local
206
+     * video or <tt>false</tt> if for remote videos.
207
+     * @param {VideoType} videoType
208
+     * @return {number|NaN} average FPS or <tt>NaN</tt> if there are no samples.
209
+     * @private
210
+     */
211
+    private _calculateAvgVideoFps;
212
+    /**
213
+     * Calculate average FPS for either remote or local participant
214
+     * @param {object} videos maps FPS per video SSRC
215
+     * @param {JitsiParticipant|null} participant remote participant or
216
+     * <tt>null</tt> for local FPS calculation.
217
+     * @param {VideoType} videoType the type of the video for which an average
218
+     * will be calculated.
219
+     * @return {number|NaN} average FPS of all participant's videos or
220
+     * <tt>NaN</tt> if currently not available
221
+     * @private
222
+     */
223
+    private _calculatePeerAvgVideoFps;
224
+    /**
225
+     * Sends the 'transport.stats' analytics event whenever we detect that
226
+     * there is a change in the local or remote candidate type on the transport
227
+     * that is currently selected.
228
+     * @param {*} data
229
+     * @private
230
+     */
231
+    private _maybeSendTransportAnalyticsEvent;
232
+    /**
233
+     * Resets the stats related to JVB connection. Must not be called when in
234
+     * P2P mode, because then the {@link AverageStatReport} instances are
235
+     * tracking P2P stats. Note that this should never happen unless something
236
+     * is wrong with the P2P and JVB121 events.
237
+     * @private
238
+     */
239
+    private _resetAvgJvbStats;
240
+    /**
241
+     * Reset cache of all averages and {@link _sampleIdx}.
242
+     * @private
243
+     */
244
+    private _resetAvgStats;
245
+    /**
246
+     * Unregisters all event listeners and stops working.
247
+     */
248
+    dispose(): void;
249
+}
250
+/**
251
+ * Class gathers the stats that are calculated and reported for a
252
+ * {@link TraceablePeerConnection} even if it's not currently active. For
253
+ * example we want to monitor RTT for the JVB connection while in P2P mode.
254
+ */
255
+declare class ConnectionAvgStats {
256
+    /**
257
+     * Creates new <tt>ConnectionAvgStats</tt>
258
+     * @param {AvgRTPStatsReporter} avgRtpStatsReporter
259
+     * @param {boolean} isP2P
260
+     * @param {number} n the number of samples, before arithmetic mean is to be
261
+     * calculated and values submitted to the analytics module.
262
+     */
263
+    constructor(avgRtpStatsReporter: AvgRTPStatsReporter, isP2P: boolean, n: number);
264
+    /**
265
+     * Is this instance for JVB or P2P connection ?
266
+     * @type {boolean}
267
+     */
268
+    isP2P: boolean;
269
+    /**
270
+     * How many samples are to be included in arithmetic mean calculation.
271
+     * @type {number}
272
+     * @private
273
+     */
274
+    private _n;
275
+    /**
276
+     * The current sample index. Starts from 0 and goes up to {@link _n})
277
+     * when analytics report will be submitted.
278
+     * @type {number}
279
+     * @private
280
+     */
281
+    private _sampleIdx;
282
+    /**
283
+     * Average round trip time reported by the ICE candidate pair.
284
+     * @type {AverageStatReport}
285
+     */
286
+    _avgRTT: AverageStatReport;
287
+    /**
288
+     * Map stores average RTT to the JVB reported by remote participants.
289
+     * Mapped per participant id {@link JitsiParticipant.getId}.
290
+     *
291
+     * This is used only when {@link ConnectionAvgStats.isP2P} equals to
292
+     * <tt>false</tt>.
293
+     *
294
+     * @type {Map<string,AverageStatReport>}
295
+     * @private
296
+     */
297
+    private _avgRemoteRTTMap;
298
+    /**
299
+     * The conference for which stats will be collected and reported.
300
+     * @type {JitsiConference}
301
+     * @private
302
+     */
303
+    private _avgRtpStatsReporter;
304
+    /**
305
+     * The latest average E2E RTT for the JVB connection only.
306
+     *
307
+     * This is used only when {@link ConnectionAvgStats.isP2P} equals to
308
+     * <tt>false</tt>.
309
+     *
310
+     * @type {number}
311
+     */
312
+    _avgEnd2EndRTT: number;
313
+    _onConnectionStats: (tpc: any, stats: any) => void;
314
+    _onUserLeft: (id: any) => boolean;
315
+    _onRemoteStatsUpdated: (id: any, data: any) => void;
316
+    /**
317
+     * Processes next batch of stats.
318
+     * @param {go figure} data
319
+     * @private
320
+     */
321
+    private _calculateAvgStats;
322
+    /**
323
+     * Calculates arithmetic mean of all RTTs towards the JVB reported by
324
+     * participants.
325
+     * @return {number|NaN} NaN if not available (not enough data)
326
+     * @private
327
+     */
328
+    private _calculateAvgRemoteRTT;
329
+    /**
330
+     * Processes {@link ConnectionQualityEvents.REMOTE_STATS_UPDATED} to analyse
331
+     * RTT towards the JVB reported by each participant.
332
+     * @param {string} id {@link JitsiParticipant.getId}
333
+     * @param {go figure in ConnectionQuality.js} data
334
+     * @private
335
+     */
336
+    private _processRemoteStats;
337
+    /**
338
+     * Reset cache of all averages and {@link _sampleIdx}.
339
+     * @private
340
+     */
341
+    private _resetAvgStats;
342
+    /**
343
+     *
344
+     */
345
+    dispose(): void;
346
+}
347
+/**
348
+ * This will calculate an average for one, named stat and submit it to
349
+ * the analytics module when requested. It automatically counts the samples.
350
+ */
351
+declare class AverageStatReport {
352
+    /**
353
+     * Creates new <tt>AverageStatReport</tt> for given name.
354
+     * @param {string} name that's the name of the event that will be reported
355
+     * to the analytics module.
356
+     */
357
+    constructor(name: string);
358
+    name: string;
359
+    count: number;
360
+    sum: number;
361
+    samples: any[];
362
+    /**
363
+     * Adds the next value that will be included in the average when
364
+     * {@link calculate} is called.
365
+     * @param {number} nextValue
366
+     */
367
+    addNext(nextValue: number): void;
368
+    /**
369
+     * Calculates an average for the samples collected using {@link addNext}.
370
+     * @return {number|NaN} an average of all collected samples or <tt>NaN</tt>
371
+     * if no samples were collected.
372
+     */
373
+    calculate(): number | number;
374
+    /**
375
+     * Appends the report to the analytics "data" object. The object will be
376
+     * set under <tt>prefix</tt> + {@link this.name} key.
377
+     * @param {Object} report the analytics "data" object
378
+     */
379
+    appendReport(report: any): void;
380
+    /**
381
+     * Clears all memory of any samples collected, so that new average can be
382
+     * calculated using this instance.
383
+     */
384
+    reset(): void;
385
+}
386
+export {};

+ 249
- 0
types/auto/modules/statistics/CallStats.d.ts View File

@@ -0,0 +1,249 @@
1
+/**
2
+ * An instance of this class is a wrapper for the CallStats API fabric. A fabric
3
+ * reports one peer connection to the CallStats backend and is allocated with
4
+ * {@link callstats.addNewFabric}. It has a bunch of instance methods for
5
+ * reporting various events. A fabric is considered disposed when
6
+ * {@link CallStats.sendTerminateEvent} is executed.
7
+ *
8
+ * Currently only one backend instance can be created ever and it's done using
9
+ * {@link CallStats.initBackend}. At the time of this writing there is no way to
10
+ * explicitly shutdown the backend, but it's supposed to close it's connection
11
+ * automatically, after all fabrics have been terminated.
12
+ */
13
+declare class CallStats {
14
+    /**
15
+     * A callback passed to {@link callstats.addNewFabric}.
16
+     * @param {string} error 'success' means ok
17
+     * @param {string} msg some more details
18
+     * @private
19
+     */
20
+    private static _addNewFabricCallback;
21
+    /**
22
+     * Callback passed to {@link callstats.initialize} (backend initialization)
23
+     * @param {string} error 'success' means ok
24
+     * @param {String} msg
25
+     * @private
26
+     */
27
+    private static _initCallback;
28
+    /**
29
+     * Empties report queue.
30
+     *
31
+     * @param {CallStats} csInstance - The callstats instance.
32
+     * @private
33
+     */
34
+    private static _emptyReportQueue;
35
+    /**
36
+     * Reports an error to callstats.
37
+     *
38
+     * @param {CallStats} [cs]
39
+     * @param type the type of the error, which will be one of the wrtcFuncNames
40
+     * @param error the error
41
+     * @param pc the peerconnection
42
+     * @private
43
+     */
44
+    private static _reportError;
45
+    /**
46
+     * Reports an error to callstats.
47
+     *
48
+     * @param {CallStats} cs
49
+     * @param event the type of the event, which will be one of the fabricEvent
50
+     * @param eventData additional data to pass to event
51
+     * @private
52
+     */
53
+    private static _reportEvent;
54
+    /**
55
+     * Wraps some of the CallStats API method and logs their calls with
56
+     * arguments on the debug logging level. Also wraps some of the backend
57
+     * methods execution into try catch blocks to not crash the app in case
58
+     * there is a problem with the backend itself.
59
+     * @param {callstats} theBackend
60
+     * @private
61
+     */
62
+    private static _traceAndCatchBackendCalls;
63
+    /**
64
+     * Returns the Set with the currently existing {@link CallStats} instances.
65
+     * Lazily initializes the Set to allow any Set polyfills to be applied.
66
+     * @type {Set<CallStats>}
67
+     */
68
+    static get fabrics(): Set<CallStats>;
69
+    /**
70
+     * Initializes the CallStats backend. Should be called only if
71
+     * {@link CallStats.isBackendInitialized} returns <tt>false</tt>.
72
+     * @param {object} options
73
+     * @param {String} options.callStatsID CallStats credentials - ID
74
+     * @param {String} options.callStatsSecret CallStats credentials - secret
75
+     * @param {string} options.aliasName the <tt>aliasName</tt> part of
76
+     * the <tt>userID</tt> aka endpoint ID, see CallStats docs for more info.
77
+     * @param {string} options.userName the <tt>userName</tt> part of
78
+     * the <tt>userID</tt> aka display name, see CallStats docs for more info.
79
+     * @param {object} options.configParams the set of parameters
80
+     * to enable/disable certain features in the library. See CallStats docs for more info.
81
+     *
82
+     */
83
+    static initBackend(options: {
84
+        callStatsID: string;
85
+        callStatsSecret: string;
86
+        aliasName: string;
87
+        userName: string;
88
+        configParams: object;
89
+    }): boolean;
90
+    /**
91
+     * Checks if the CallStats backend has been created. It does not mean that
92
+     * it has been initialized, but only that the API instance has been
93
+     * allocated successfully.
94
+     * @return {boolean} <tt>true</tt> if backend exists or <tt>false</tt>
95
+     * otherwise
96
+     */
97
+    static isBackendInitialized(): boolean;
98
+    /**
99
+     * Notifies CallStats about active device.
100
+     * @param {{deviceList: {String:String}}} devicesData list of devices with
101
+     * their data
102
+     * @param {CallStats} cs callstats instance related to the event
103
+     */
104
+    static sendActiveDeviceListEvent(devicesData: {
105
+        deviceList: {
106
+            String: string;
107
+        };
108
+    }, cs: CallStats): void;
109
+    /**
110
+     * Notifies CallStats that there is a log we want to report.
111
+     *
112
+     * @param {Error} e error to send or {String} message
113
+     * @param {CallStats} cs callstats instance related to the error (optional)
114
+     */
115
+    static sendApplicationLog(e: Error, cs: CallStats): void;
116
+    /**
117
+     * Sends the given feedback through CallStats.
118
+     *
119
+     * @param {string} conferenceID the conference ID for which the feedback
120
+     * will be reported.
121
+     * @param overall an integer between 1 and 5 indicating the
122
+     * user feedback
123
+     * @param comment detailed feedback from the user.
124
+     */
125
+    static sendFeedback(conferenceID: string, overall: any, comment: any): Promise<any>;
126
+    /**
127
+     * Notifies CallStats that getUserMedia failed.
128
+     *
129
+     * @param {Error} e error to send
130
+     * @param {CallStats} cs callstats instance related to the error (optional)
131
+     */
132
+    static sendGetUserMediaFailed(e: Error, cs: CallStats): void;
133
+    /**
134
+     * Notifies CallStats for mute events
135
+     * @param mute {boolean} true for muted and false for not muted
136
+     * @param type {String} "audio"/"video"
137
+     * @param {CallStats} cs callstats instance related to the event
138
+     */
139
+    static sendMuteEvent(mute: boolean, type: string, cs: CallStats): void;
140
+    /**
141
+     * Creates new CallStats instance that handles all callstats API calls for
142
+     * given {@link TraceablePeerConnection}. Each instance is meant to handle
143
+     * one CallStats fabric added with 'addFabric' API method for the
144
+     * {@link TraceablePeerConnection} instance passed in the constructor.
145
+     * @param {TraceablePeerConnection} tpc
146
+     * @param {Object} options
147
+     * @param {string} options.confID the conference ID that wil be used to
148
+     * report the session.
149
+     * @param {string} [options.remoteUserID='jitsi'] the remote user ID to
150
+     * which given <tt>tpc</tt> is connected.
151
+     */
152
+    constructor(tpc: any, options: {
153
+        confID: string;
154
+        remoteUserID?: string;
155
+    });
156
+    confID: string;
157
+    tpc: any;
158
+    peerconnection: any;
159
+    remoteUserID: string;
160
+    hasFabric: boolean;
161
+    /**
162
+     * Initializes CallStats fabric by calling "addNewFabric" for
163
+     * the peer connection associated with this instance.
164
+     * @return {boolean} true if the call was successful or false otherwise.
165
+     */
166
+    _addNewFabric(): boolean;
167
+    /**
168
+     * Lets CallStats module know where is given SSRC rendered by providing
169
+     * renderer tag ID.
170
+     * If the lib is not initialized yet queue the call for later, when it's
171
+     * ready.
172
+     * @param {number} ssrc the SSRC of the stream
173
+     * @param {boolean} isLocal indicates whether this the stream is local
174
+     * @param {string|null} streamEndpointId if the stream is not local the it
175
+     * needs to contain the stream owner's ID
176
+     * @param {string} usageLabel meaningful usage label of this stream like
177
+     *        'microphone', 'camera' or 'screen'.
178
+     * @param {string} containerId  the id of media 'audio' or 'video' tag which
179
+     *        renders the stream.
180
+     */
181
+    associateStreamWithVideoTag(ssrc: number, isLocal: boolean, streamEndpointId: string | null, usageLabel: string, containerId: string): void;
182
+    /**
183
+     * Notifies CallStats that we are the new dominant speaker in the
184
+     * conference.
185
+     */
186
+    sendDominantSpeakerEvent(): void;
187
+    /**
188
+     * Notifies CallStats that the fabric for the underlying peerconnection was
189
+     * closed and no evens should be reported, after this call.
190
+     */
191
+    sendTerminateEvent(): void;
192
+    /**
193
+     * Notifies CallStats for ice connection failed
194
+     */
195
+    sendIceConnectionFailedEvent(): void;
196
+    /**
197
+     * Notifies CallStats that peer connection failed to create offer.
198
+     *
199
+     * @param {Error} e error to send
200
+     */
201
+    sendCreateOfferFailed(e: Error): void;
202
+    /**
203
+     * Notifies CallStats that peer connection failed to create answer.
204
+     *
205
+     * @param {Error} e error to send
206
+     */
207
+    sendCreateAnswerFailed(e: Error): void;
208
+    /**
209
+     * Sends either resume or hold event for the fabric associated with
210
+     * the underlying peerconnection.
211
+     * @param {boolean} isResume true to resume or false to hold
212
+     */
213
+    sendResumeOrHoldEvent(isResume: boolean): void;
214
+    /**
215
+     * Notifies CallStats for screen sharing events
216
+     * @param {boolean} start true for starting screen sharing and
217
+     * false for not stopping
218
+     * @param {string|null} ssrc - optional ssrc value, used only when
219
+     * starting screen sharing.
220
+     */
221
+    sendScreenSharingEvent(start: boolean, ssrc: string | null): void;
222
+    /**
223
+     * Notifies CallStats that peer connection failed to set local description.
224
+     *
225
+     * @param {Error} e error to send
226
+     */
227
+    sendSetLocalDescFailed(e: Error): void;
228
+    /**
229
+     * Notifies CallStats that peer connection failed to set remote description.
230
+     *
231
+     * @param {Error} e error to send
232
+     */
233
+    sendSetRemoteDescFailed(e: Error): void;
234
+    /**
235
+     * Notifies CallStats that peer connection failed to add ICE candidate.
236
+     *
237
+     * @param {Error} e error to send
238
+     */
239
+    sendAddIceCandidateFailed(e: Error): void;
240
+}
241
+declare namespace CallStats {
242
+    const backend: any;
243
+    const reportsQueue: any[];
244
+    const backendInitialized: boolean;
245
+    const callStatsID: string;
246
+    const callStatsSecret: string;
247
+    const userID: object;
248
+}
249
+export default CallStats;

+ 43
- 0
types/auto/modules/statistics/LocalStatsCollector.d.ts View File

@@ -0,0 +1,43 @@
1
+/**
2
+ * <tt>LocalStatsCollector</tt> calculates statistics for the local stream.
3
+ *
4
+ * @param stream the local stream
5
+ * @param interval stats refresh interval given in ms.
6
+ * @param callback function that receives the audio levels.
7
+ * @constructor
8
+ */
9
+declare function LocalStatsCollector(stream: any, interval: any, callback: any): void;
10
+declare class LocalStatsCollector {
11
+    /**
12
+     * <tt>LocalStatsCollector</tt> calculates statistics for the local stream.
13
+     *
14
+     * @param stream the local stream
15
+     * @param interval stats refresh interval given in ms.
16
+     * @param callback function that receives the audio levels.
17
+     * @constructor
18
+     */
19
+    constructor(stream: any, interval: any, callback: any);
20
+    stream: any;
21
+    intervalId: NodeJS.Timer;
22
+    intervalMilis: any;
23
+    audioLevel: number;
24
+    callback: any;
25
+    /**
26
+     * Starts the collecting the statistics.
27
+     */
28
+    start(): void;
29
+    /**
30
+     * Stops collecting the statistics.
31
+     */
32
+    stop(): void;
33
+}
34
+declare namespace LocalStatsCollector {
35
+    /**
36
+     * Checks if the environment has the necessary conditions to support
37
+     * collecting stats from local streams.
38
+     *
39
+     * @returns {boolean}
40
+     */
41
+    function isLocalStatsSupported(): boolean;
42
+}
43
+export default LocalStatsCollector;

+ 42
- 0
types/auto/modules/statistics/PerformanceObserverStats.d.ts View File

@@ -0,0 +1,42 @@
1
+/**
2
+ * This class creates an observer that monitors browser's performance measurement events
3
+ * as they are recorded in the browser's performance timeline and computes an average and
4
+ * a maximum value for the long task events. Tasks are classified as long tasks if they take
5
+ * longer than 50ms to execute on the main thread.
6
+ */
7
+export class PerformanceObserverStats {
8
+    /**
9
+     * Creates a new instance of Performance observer statistics.
10
+     *
11
+     * @param {*} emitter Event emitter for emitting stats periodically
12
+     * @param {*} statsInterval interval for calculating the stats
13
+     */
14
+    constructor(emitter: any, statsInterval: any);
15
+    eventEmitter: any;
16
+    longTasks: number;
17
+    maxDuration: number;
18
+    performanceStatsInterval: any;
19
+    stats: RunningAverage;
20
+    /**
21
+     * Obtains the average rate of long tasks observed per min and the
22
+     * duration of the longest task recorded by the observer.
23
+     * @returns {Object}
24
+     */
25
+    getLongTasksStats(): any;
26
+    /**
27
+     * Starts the performance observer by registering the callback function
28
+     * that calculates the performance statistics periodically.
29
+     * @returns {void}
30
+     */
31
+    startObserver(): void;
32
+    longTaskEventHandler: (list: any) => void;
33
+    observer: PerformanceObserver;
34
+    longTasksIntervalId: NodeJS.Timer;
35
+    _lastTimeStamp: number;
36
+    /**
37
+     * Stops the performance observer.
38
+     * @returns {void}
39
+     */
40
+    stopObserver(): void;
41
+}
42
+import { RunningAverage } from "../util/MathUtil";

+ 75
- 0
types/auto/modules/statistics/PrecallTest.d.ts View File

@@ -0,0 +1,75 @@
1
+/**
2
+ * Loads the callstats script and initializes the library.
3
+ *
4
+ * @param {Function} onResult - The callback to be invoked when results are received.
5
+ * @returns {Promise<void>}
6
+ */
7
+export function init(options: any): Promise<void>;
8
+/**
9
+ * Executes a pre call test.
10
+ *
11
+ * @typedef PrecallTestResults
12
+ * @type {Object}
13
+ * @property {boolean} mediaConnectivity - If there is media connectivity or not.
14
+ * @property {number} throughput  - The average throughput.
15
+ * @property {number} fractionalLoss - The packet loss.
16
+ * @property {number} rtt - The round trip time.
17
+ * @property {string} provider - It is usually 'callstats'.
18
+ *
19
+ * @returns {Promise<{PrecallTestResults}>}
20
+ */
21
+export function execute(): Promise<{
22
+    PrecallTestResults;
23
+}>;
24
+declare namespace _default {
25
+    export { init };
26
+    export { execute };
27
+}
28
+export default _default;
29
+/**
30
+ * Initializes the callstats lib and registers a callback to be invoked
31
+ * when there are 'preCallTestResults'.
32
+ */
33
+export type PrecallTestOptions = {
34
+    /**
35
+     * - Callstats credentials - the id.
36
+     */
37
+    callStatsID: string;
38
+    /**
39
+     * - Callstats credentials - the secret.
40
+     */
41
+    callStatsSecret: string;
42
+    /**
43
+     * - The user name to use when initializing callstats.
44
+     */
45
+    statisticsId: string;
46
+    /**
47
+     * - The user display name.
48
+     */
49
+    statisticsDisplayName: string;
50
+};
51
+/**
52
+ * Executes a pre call test.
53
+ */
54
+export type PrecallTestResults = {
55
+    /**
56
+     * - If there is media connectivity or not.
57
+     */
58
+    mediaConnectivity: boolean;
59
+    /**
60
+     * - The average throughput.
61
+     */
62
+    throughput: number;
63
+    /**
64
+     * - The packet loss.
65
+     */
66
+    fractionalLoss: number;
67
+    /**
68
+     * - The round trip time.
69
+     */
70
+    rtt: number;
71
+    /**
72
+     * - It is usually 'callstats'.
73
+     */
74
+    provider: string;
75
+};

+ 151
- 0
types/auto/modules/statistics/RTPStatsCollector.d.ts View File

@@ -0,0 +1,151 @@
1
+/**
2
+ * <tt>StatsCollector</tt> registers for stats updates of given
3
+ * <tt>peerconnection</tt> in given <tt>interval</tt>. On each update particular
4
+ * stats are extracted and put in {@link SsrcStats} objects. Once the processing
5
+ * is done <tt>audioLevelsUpdateCallback</tt> is called with <tt>this</tt>
6
+ * instance as an event source.
7
+ *
8
+ * @param peerconnection WebRTC PeerConnection object.
9
+ * @param audioLevelsInterval
10
+ * @param statsInterval stats refresh interval given in ms.
11
+ * @param eventEmitter
12
+ * @constructor
13
+ */
14
+export default function StatsCollector(peerconnection: any, audioLevelsInterval: any, statsInterval: any, eventEmitter: any): void;
15
+export default class StatsCollector {
16
+    /**
17
+     * <tt>StatsCollector</tt> registers for stats updates of given
18
+     * <tt>peerconnection</tt> in given <tt>interval</tt>. On each update particular
19
+     * stats are extracted and put in {@link SsrcStats} objects. Once the processing
20
+     * is done <tt>audioLevelsUpdateCallback</tt> is called with <tt>this</tt>
21
+     * instance as an event source.
22
+     *
23
+     * @param peerconnection WebRTC PeerConnection object.
24
+     * @param audioLevelsInterval
25
+     * @param statsInterval stats refresh interval given in ms.
26
+     * @param eventEmitter
27
+     * @constructor
28
+     */
29
+    constructor(peerconnection: any, audioLevelsInterval: any, statsInterval: any, eventEmitter: any);
30
+    peerconnection: any;
31
+    baselineAudioLevelsReport: any;
32
+    currentAudioLevelsReport: any;
33
+    currentStatsReport: any;
34
+    previousStatsReport: any;
35
+    audioLevelReportHistory: {};
36
+    audioLevelsIntervalId: NodeJS.Timer;
37
+    eventEmitter: any;
38
+    conferenceStats: ConferenceStats;
39
+    audioLevelsIntervalMilis: any;
40
+    speakerList: any[];
41
+    statsIntervalId: NodeJS.Timer;
42
+    statsIntervalMilis: any;
43
+    /**
44
+     * Maps SSRC numbers to {@link SsrcStats}.
45
+     * @type {Map<number,SsrcStats}
46
+     */
47
+    ssrc2stats: Map<number, SsrcStats>;
48
+    /**
49
+     * Set the list of the remote speakers for which audio levels are to be calculated.
50
+     *
51
+     * @param {Array<string>} speakerList - Endpoint ids.
52
+     * @returns {void}
53
+     */
54
+    setSpeakerList(speakerList: Array<string>): void;
55
+    /**
56
+     * Stops stats updates.
57
+     */
58
+    stop(): void;
59
+    /**
60
+     * Callback passed to <tt>getStats</tt> method.
61
+     * @param error an error that occurred on <tt>getStats</tt> call.
62
+     */
63
+    errorCallback(error: any): void;
64
+    /**
65
+     * Starts stats updates.
66
+     */
67
+    start(startAudioLevelStats: any): void;
68
+    /**
69
+     *
70
+     */
71
+    _processAndEmitReport(): void;
72
+    private getNonNegativeValue;
73
+    private _calculateBitrate;
74
+    /**
75
+     * Stats processing for spec-compliant RTCPeerConnection#getStats.
76
+     */
77
+    processStatsReport(): void;
78
+    /**
79
+     * Stats processing logic.
80
+     */
81
+    processAudioLevelReport(): void;
82
+}
83
+/**
84
+ *
85
+ */
86
+declare function ConferenceStats(): void;
87
+declare class ConferenceStats {
88
+    /**
89
+     * The bandwidth
90
+     * @type {{}}
91
+     */
92
+    bandwidth: {};
93
+    /**
94
+     * The bit rate
95
+     * @type {{}}
96
+     */
97
+    bitrate: {};
98
+    /**
99
+     * The packet loss rate
100
+     * @type {{}}
101
+     */
102
+    packetLoss: {};
103
+    /**
104
+     * Array with the transport information.
105
+     * @type {Array}
106
+     */
107
+    transport: any[];
108
+}
109
+/**
110
+ * Holds "statistics" for a single SSRC.
111
+ * @constructor
112
+ */
113
+declare function SsrcStats(): void;
114
+declare class SsrcStats {
115
+    loss: {};
116
+    bitrate: {
117
+        download: number;
118
+        upload: number;
119
+    };
120
+    resolution: {};
121
+    framerate: number;
122
+    codec: string;
123
+    /**
124
+     * Sets the "loss" object.
125
+     * @param loss the value to set.
126
+     */
127
+    setLoss(loss: any): void;
128
+    /**
129
+     * Sets resolution that belong to the ssrc represented by this instance.
130
+     * @param resolution new resolution value to be set.
131
+     */
132
+    setResolution(resolution: any): void;
133
+    /**
134
+     * Adds the "download" and "upload" fields from the "bitrate" parameter to
135
+     * the respective fields of the "bitrate" field of this object.
136
+     * @param bitrate an object holding the values to add.
137
+     */
138
+    addBitrate(bitrate: any): void;
139
+    /**
140
+     * Resets the bit rate for given <tt>ssrc</tt> that belong to the peer
141
+     * represented by this instance.
142
+     */
143
+    resetBitrate(): void;
144
+    /**
145
+     * Sets the "framerate".
146
+     * @param framerate the value to set.
147
+     */
148
+    setFramerate(framerate: any): void;
149
+    setCodec(codec: any): void;
150
+}
151
+export {};

+ 114
- 0
types/auto/modules/statistics/SpeakerStats.d.ts View File

@@ -0,0 +1,114 @@
1
+export = SpeakerStats;
2
+/**
3
+ * A model for keeping track of each user's total
4
+ * time as a dominant speaker. The model also
5
+ * keeps track of the user's last known name
6
+ * in case the user has left the meeting,
7
+ * which is also tracked.
8
+ */
9
+declare class SpeakerStats {
10
+    /**
11
+     * Initializes a new SpeakerStats instance.
12
+     *
13
+     * @constructor
14
+     * @param {string} userId - The id of the user being tracked.
15
+     * @param {string} displayName - The name of the user being tracked.
16
+     * @param {boolean} isLocalStats - True if the stats model tracks
17
+     * the local user.
18
+     * @returns {void}
19
+     */
20
+    constructor(userId: string, displayName: string, isLocalStats: boolean);
21
+    _userId: string;
22
+    _isLocalStats: boolean;
23
+    totalDominantSpeakerTime: number;
24
+    _dominantSpeakerStart: number;
25
+    _hasLeft: boolean;
26
+    _facialExpressions: {
27
+        happy: number;
28
+        neutral: number;
29
+        surprised: number;
30
+        angry: number;
31
+        fearful: number;
32
+        disgusted: number;
33
+        sad: number;
34
+    };
35
+    /**
36
+     * Get the user id being tracked.
37
+     *
38
+     * @returns {string} The user id.
39
+     */
40
+    getUserId(): string;
41
+    /**
42
+     * Get the name of the user being tracked.
43
+     *
44
+     * @returns {string} The user name.
45
+     */
46
+    getDisplayName(): string;
47
+    /**
48
+     * Updates the last known name of the user being tracked.
49
+     *
50
+     * @param {string} - The user name.
51
+     * @returns {void}
52
+     */
53
+    setDisplayName(newName: any): void;
54
+    displayName: any;
55
+    /**
56
+     * Returns true if the stats are tracking the local user.
57
+     *
58
+     * @returns {boolean}
59
+     */
60
+    isLocalStats(): boolean;
61
+    /**
62
+     * Returns true if the tracked user is currently a dominant speaker.
63
+     *
64
+     * @returns {boolean}
65
+     */
66
+    isDominantSpeaker(): boolean;
67
+    /**
68
+     * Returns true if the tracked user is currently a dominant speaker.
69
+     *
70
+     * @param {boolean} - If true, the user will being accumulating time
71
+     * as dominant speaker. If false, the user will not accumulate time
72
+     * and will record any time accumulated since starting as dominant speaker.
73
+     * @returns {void}
74
+     */
75
+    setDominantSpeaker(isNowDominantSpeaker: any): void;
76
+    /**
77
+     * Get how long the tracked user has been dominant speaker.
78
+     *
79
+     * @returns {number} - The speaker time in milliseconds.
80
+     */
81
+    getTotalDominantSpeakerTime(): number;
82
+    /**
83
+     * Get whether or not the user is still in the meeting.
84
+     *
85
+     * @returns {boolean} True if the user is no longer in the meeting.
86
+     */
87
+    hasLeft(): boolean;
88
+    /**
89
+     * Set the user as having left the meeting.
90
+     *
91
+     * @returns {void}
92
+     */
93
+    markAsHasLeft(): void;
94
+    /**
95
+     * Gets the facial expressions of the user.
96
+     *
97
+     * @returns {Object}
98
+     */
99
+    getFacialExpressions(): any;
100
+    /**
101
+     * Sets the facial expressions of the user.
102
+     *
103
+     * @param {Object} facialExpressions - object with facial expressions.
104
+     * @returns {void}
105
+     */
106
+    setFacialExpressions(facialExpressions: any): void;
107
+    /**
108
+     * Adds a new facial expression to speaker stats.
109
+     *
110
+     * @param  {string} facialExpression
111
+     * @param {number} duration
112
+     */
113
+    addFacialExpression(facialExpression: string, duration: number): void;
114
+}

+ 79
- 0
types/auto/modules/statistics/SpeakerStatsCollector.d.ts View File

@@ -0,0 +1,79 @@
1
+/**
2
+ * A collection for tracking speaker stats. Attaches listeners
3
+ * to the conference to automatically update on tracked events.
4
+ */
5
+export default class SpeakerStatsCollector {
6
+    /**
7
+     * Initializes a new SpeakerStatsCollector instance.
8
+     *
9
+     * @constructor
10
+     * @param {JitsiConference} conference - The conference to track.
11
+     * @returns {void}
12
+     */
13
+    constructor(conference: any);
14
+    stats: {
15
+        users: {};
16
+        dominantSpeakerId: any;
17
+    };
18
+    conference: any;
19
+    /**
20
+     * Reacts to dominant speaker change events by changing its speaker stats
21
+     * models to reflect the current dominant speaker.
22
+     *
23
+     * @param {string} dominantSpeakerId - The user id of the new
24
+     * dominant speaker.
25
+     * @returns {void}
26
+     * @private
27
+     */
28
+    private _onDominantSpeaker;
29
+    /**
30
+     * Reacts to user join events by creating a new SpeakerStats model.
31
+     *
32
+     * @param {string} userId - The user id of the new user.
33
+     * @param {JitsiParticipant} - The JitsiParticipant model for the new user.
34
+     * @returns {void}
35
+     * @private
36
+     */
37
+    private _onUserJoin;
38
+    /**
39
+     * Reacts to user leave events by updating the associated user's
40
+     * SpeakerStats model.
41
+     *
42
+     * @param {string} userId - The user id of the user that left.
43
+     * @returns {void}
44
+     * @private
45
+     */
46
+    private _onUserLeave;
47
+    /**
48
+     * Reacts to user name change events by updating the last known name
49
+     * tracked in the associated SpeakerStats model.
50
+     *
51
+     * @param {string} userId - The user id of the user that left.
52
+     * @returns {void}
53
+     * @private
54
+     */
55
+    private _onDisplayNameChange;
56
+    /**
57
+     * Adds a new facial expression with its duration of a remote user.
58
+     *
59
+     * @param {string} userId - The user id of the user that left.
60
+     * @param {Object} data - The facial expression with its duration.
61
+     * @returns {void}
62
+     * @private
63
+     */
64
+    private _onFacialExpressionAdd;
65
+    /**
66
+     * Return a copy of the tracked SpeakerStats models.
67
+     *
68
+     * @returns {Object} The keys are the user ids and the values are the
69
+     * associated user's SpeakerStats model.
70
+     */
71
+    getStats(): any;
72
+    /**
73
+     * Updates of the current stats is requested, passing the new values.
74
+     *
75
+     * @param {Object} newStats - The new values used to update current one.
76
+     * @private
77
+     */
78
+    private _updateStats;
79
+}

+ 7
- 0
types/auto/modules/statistics/constants.d.ts View File

@@ -0,0 +1,7 @@
1
+export const CALLSTATS_SCRIPT_URL: "https://api.callstats.io/static/callstats-ws.min.js";
2
+/**
3
+ * The number of remote speakers for which the audio levels will be calculated using
4
+ * RTCRtpReceiver#getSynchronizationSources. Limit the number of endpoints to save cpu on the client as this API call
5
+ * is known to take longer to execute when there are many audio receivers.
6
+ */
7
+export const SPEAKERS_AUDIO_LEVELS: 5;

+ 353
- 0
types/auto/modules/statistics/statistics.d.ts View File

@@ -0,0 +1,353 @@
1
+/// <reference types="node" />
2
+/**
3
+ * The options to configure Statistics.
4
+ * @typedef {Object} StatisticsOptions
5
+ * @property {string} applicationName - The application name to pass to
6
+ * callstats.
7
+ * @property {string} aliasName - The alias name to use when initializing callstats.
8
+ * @property {string} userName - The user name to use when initializing callstats.
9
+ * @property {string} confID - The callstats conference ID to use.
10
+ * @property {string} callStatsID - Callstats credentials - the id.
11
+ * @property {string} callStatsSecret - Callstats credentials - the secret.
12
+ * @property {string} customScriptUrl - A custom lib url to use when downloading
13
+ * callstats library.
14
+ * @property {string} roomName - The room name we are currently in.
15
+ * @property {string} configParams - The set of parameters
16
+ * to enable/disable certain features in the library. See CallStats docs for more info.
17
+ */
18
+/**
19
+ *
20
+ * @param xmpp
21
+ * @param {StatisticsOptions} options - The options to use creating the
22
+ * Statistics.
23
+ */
24
+declare function Statistics(xmpp: any, options: StatisticsOptions): void;
25
+declare class Statistics {
26
+    /**
27
+     * The options to configure Statistics.
28
+     * @typedef {Object} StatisticsOptions
29
+     * @property {string} applicationName - The application name to pass to
30
+     * callstats.
31
+     * @property {string} aliasName - The alias name to use when initializing callstats.
32
+     * @property {string} userName - The user name to use when initializing callstats.
33
+     * @property {string} confID - The callstats conference ID to use.
34
+     * @property {string} callStatsID - Callstats credentials - the id.
35
+     * @property {string} callStatsSecret - Callstats credentials - the secret.
36
+     * @property {string} customScriptUrl - A custom lib url to use when downloading
37
+     * callstats library.
38
+     * @property {string} roomName - The room name we are currently in.
39
+     * @property {string} configParams - The set of parameters
40
+     * to enable/disable certain features in the library. See CallStats docs for more info.
41
+     */
42
+    /**
43
+     *
44
+     * @param xmpp
45
+     * @param {StatisticsOptions} options - The options to use creating the
46
+     * Statistics.
47
+     */
48
+    constructor(xmpp: any, options: StatisticsOptions);
49
+    /**
50
+     * {@link RTPStats} mapped by {@link TraceablePeerConnection.id} which
51
+     * collect RTP statistics for each peerconnection.
52
+     * @type {Map<string, RTPStats}
53
+     */
54
+    rtpStatsMap: Map<string, RTPStats>;
55
+    eventEmitter: EventEmitter;
56
+    xmpp: any;
57
+    options: {};
58
+    callStatsIntegrationEnabled: boolean;
59
+    callStatsApplicationLogsDisabled: any;
60
+    /**
61
+     * Stores {@link CallStats} instances for each
62
+     * {@link TraceablePeerConnection} (one {@link CallStats} instance serves
63
+     * one TPC). The instances are mapped by {@link TraceablePeerConnection.id}.
64
+     * @type {Map<number, CallStats>}
65
+     */
66
+    callsStatsInstances: Map<number, CallStats>;
67
+    /**
68
+     * Starts collecting RTP stats for given peerconnection.
69
+     * @param {TraceablePeerConnection} peerconnection
70
+     */
71
+    startRemoteStats(peerconnection: any): void;
72
+    addAudioLevelListener(listener: any): void;
73
+    removeAudioLevelListener(listener: any): void;
74
+    addBeforeDisposedListener(listener: any): void;
75
+    removeBeforeDisposedListener(listener: any): void;
76
+    addConnectionStatsListener(listener: any): void;
77
+    removeConnectionStatsListener(listener: any): void;
78
+    addByteSentStatsListener(listener: any): void;
79
+    removeByteSentStatsListener(listener: any): void;
80
+    /**
81
+     * Add a listener that would be notified on a LONG_TASKS_STATS event.
82
+     *
83
+     * @param {Function} listener a function that would be called when notified.
84
+     * @returns {void}
85
+     */
86
+    addLongTasksStatsListener(listener: Function): void;
87
+    /**
88
+     * Creates an instance of {@link PerformanceObserverStats} and starts the
89
+     * observer that records the stats periodically.
90
+     *
91
+     * @returns {void}
92
+     */
93
+    attachLongTasksStats(conference: any): void;
94
+    performanceObserverStats: PerformanceObserverStats;
95
+    /**
96
+     * Obtains the current value of the LongTasks event statistics.
97
+     *
98
+     * @returns {Object|null} stats object if the observer has been
99
+     * created, null otherwise.
100
+     */
101
+    getLongTasksStats(): any | null;
102
+    /**
103
+     * Removes the given listener for the LONG_TASKS_STATS event.
104
+     *
105
+     * @param {Function} listener the listener we want to remove.
106
+     * @returns {void}
107
+     */
108
+    removeLongTasksStatsListener(listener: Function): void;
109
+    /**
110
+     * Updates the list of speakers for which the audio levels are to be calculated. This is needed for the jvb pc only.
111
+     *
112
+     * @param {Array<string>} speakerList The list of remote endpoint ids.
113
+     * @returns {void}
114
+     */
115
+    setSpeakerList(speakerList: Array<string>): void;
116
+    dispose(): void;
117
+    private _stopRemoteStats;
118
+    /**
119
+     * Stops collecting RTP stats for given peerconnection
120
+     * @param {TraceablePeerConnection} tpc
121
+     */
122
+    stopRemoteStats(tpc: any): void;
123
+    /**
124
+     * Initializes the callstats.io API.
125
+     * @param {TraceablePeerConnection} tpc the {@link TraceablePeerConnection}
126
+     * instance for which CalStats will be started.
127
+     * @param {string} remoteUserID
128
+     */
129
+    startCallStats(tpc: any, remoteUserID: string): void;
130
+    /**
131
+     * Removes the callstats.io instances.
132
+     */
133
+    stopCallStats(tpc: any): void;
134
+    /**
135
+     * Returns true if the callstats integration is enabled, otherwise returns
136
+     * false.
137
+     *
138
+     * @returns true if the callstats integration is enabled, otherwise returns
139
+     * false.
140
+     */
141
+    isCallstatsEnabled(): boolean;
142
+    /**
143
+     * Logs either resume or hold event for the given peer connection.
144
+     * @param {TraceablePeerConnection} tpc the connection for which event will be
145
+     * reported
146
+     * @param {boolean} isResume true for resume or false for hold
147
+     */
148
+    sendConnectionResumeOrHoldEvent(tpc: any, isResume: boolean): void;
149
+    /**
150
+     * Notifies CallStats and analytics (if present) for ice connection failed
151
+     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
152
+     */
153
+    sendIceConnectionFailedEvent(tpc: any): void;
154
+    /**
155
+     * Notifies CallStats for mute events
156
+     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
157
+     * @param {boolean} muted true for muted and false for not muted
158
+     * @param {String} type "audio"/"video"
159
+     */
160
+    sendMuteEvent(tpc: any, muted: boolean, type: string): void;
161
+    /**
162
+     * Notifies CallStats for screen sharing events
163
+     * @param start {boolean} true for starting screen sharing and
164
+     * false for not stopping
165
+     * @param {string|null} ssrc - optional ssrc value, used only when
166
+     * starting screen sharing.
167
+     */
168
+    sendScreenSharingEvent(start: boolean, ssrc: string | null): void;
169
+    /**
170
+     * Notifies the statistics module that we are now the dominant speaker of the
171
+     * conference.
172
+     * @param {String} roomJid - The room jid where the speaker event occurred.
173
+     */
174
+    sendDominantSpeakerEvent(roomJid: string): void;
175
+    /**
176
+     * Lets the underlying statistics module know where is given SSRC rendered by
177
+     * providing renderer tag ID.
178
+     * @param {TraceablePeerConnection} tpc the connection to which the stream
179
+     * belongs to
180
+     * @param {number} ssrc the SSRC of the stream
181
+     * @param {boolean} isLocal
182
+     * @param {string} userId
183
+     * @param {string} usageLabel  meaningful usage label of this stream like
184
+     *        'microphone', 'camera' or 'screen'.
185
+     * @param {string} containerId the id of media 'audio' or 'video' tag which
186
+     *        renders the stream.
187
+     */
188
+    associateStreamWithVideoTag(tpc: any, ssrc: number, isLocal: boolean, userId: string, usageLabel: string, containerId: string): void;
189
+    /**
190
+     * Notifies CallStats that peer connection failed to create offer.
191
+     *
192
+     * @param {Error} e error to send
193
+     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
194
+     */
195
+    sendCreateOfferFailed(e: Error, tpc: any): void;
196
+    /**
197
+     * Notifies CallStats that peer connection failed to create answer.
198
+     *
199
+     * @param {Error} e error to send
200
+     * @param {TraceablePeerConnection} tpc connection on which failure occured.
201
+     */
202
+    sendCreateAnswerFailed(e: Error, tpc: any): void;
203
+    /**
204
+     * Notifies CallStats that peer connection failed to set local description.
205
+     *
206
+     * @param {Error} e error to send
207
+     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
208
+     */
209
+    sendSetLocalDescFailed(e: Error, tpc: any): void;
210
+    /**
211
+     * Notifies CallStats that peer connection failed to set remote description.
212
+     *
213
+     * @param {Error} e error to send
214
+     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
215
+     */
216
+    sendSetRemoteDescFailed(e: Error, tpc: any): void;
217
+    /**
218
+     * Notifies CallStats that peer connection failed to add ICE candidate.
219
+     *
220
+     * @param {Error} e error to send
221
+     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
222
+     */
223
+    sendAddIceCandidateFailed(e: Error, tpc: any): void;
224
+    /**
225
+     * Sends the given feedback through CallStats.
226
+     *
227
+     * @param overall an integer between 1 and 5 indicating the user's rating.
228
+     * @param comment the comment from the user.
229
+     * @returns {Promise} Resolves when callstats feedback has been submitted
230
+     * successfully.
231
+     */
232
+    sendFeedback(overall: any, comment: any): Promise<any>;
233
+}
234
+declare namespace Statistics {
235
+    /**
236
+     * Init statistic options
237
+     * @param options
238
+     */
239
+    export function init(options: any): void;
240
+    export const audioLevelsEnabled: boolean;
241
+    export const audioLevelsInterval: number;
242
+    export const pcStatsInterval: number;
243
+    export const disableThirdPartyRequests: boolean;
244
+    export { analytics };
245
+    export const instances: any;
246
+    export const localStats: any[];
247
+    export function startLocalStats(stream: any, callback: any): void;
248
+    export function stopLocalStats(stream: any): void;
249
+    /**
250
+     * Obtains the list of *all* {@link CallStats} instances collected from every
251
+     * valid {@link Statistics} instance.
252
+     * @return {Set<CallStats>}
253
+     * @private
254
+     */
255
+    export function _getAllCallStatsInstances(): Set<CallStats>;
256
+    /**
257
+     * Notifies about active device.
258
+     * @param {{deviceList: {String:String}}} devicesData - list of devices with
259
+     *      their data
260
+     */
261
+    export function sendActiveDeviceListEvent(devicesData: {
262
+        deviceList: {
263
+            String: string;
264
+        };
265
+    }): void;
266
+    /**
267
+     * Notifies CallStats that getUserMedia failed.
268
+     *
269
+     * @param {Error} e error to send
270
+     */
271
+    export function sendGetUserMediaFailed(e: Error): void;
272
+    /**
273
+     * Adds to CallStats an application log.
274
+     *
275
+     * @param {String} m a log message to send or an {Error} object to be reported
276
+     */
277
+    export function sendLog(m: string): void;
278
+    export const LOCAL_JID: string;
279
+    /**
280
+     * Reports global error to CallStats.
281
+     *
282
+     * @param {Error} error
283
+     */
284
+    export function reportGlobalError(error: Error): void;
285
+    /**
286
+     * Sends event to analytics and logs a message to the logger/console. Console
287
+     * messages might also be logged to callstats automatically.
288
+     *
289
+     * @param {string | Object} event the event name, or an object which
290
+     * represents the entire event.
291
+     * @param {Object} properties properties to attach to the event (if an event
292
+     * name as opposed to an event object is provided).
293
+     */
294
+    export function sendAnalyticsAndLog(event: any, properties?: any): void;
295
+    /**
296
+     * Sends event to analytics.
297
+     *
298
+     * @param {string | Object} eventName the event name, or an object which
299
+     * represents the entire event.
300
+     * @param {Object} properties properties to attach to the event
301
+     */
302
+    export function sendAnalytics(eventName: any, properties?: any): void;
303
+}
304
+export default Statistics;
305
+/**
306
+ * The options to configure Statistics.
307
+ */
308
+export type StatisticsOptions = {
309
+    /**
310
+     * - The application name to pass to
311
+     * callstats.
312
+     */
313
+    applicationName: string;
314
+    /**
315
+     * - The alias name to use when initializing callstats.
316
+     */
317
+    aliasName: string;
318
+    /**
319
+     * - The user name to use when initializing callstats.
320
+     */
321
+    userName: string;
322
+    /**
323
+     * - The callstats conference ID to use.
324
+     */
325
+    confID: string;
326
+    /**
327
+     * - Callstats credentials - the id.
328
+     */
329
+    callStatsID: string;
330
+    /**
331
+     * - Callstats credentials - the secret.
332
+     */
333
+    callStatsSecret: string;
334
+    /**
335
+     * - A custom lib url to use when downloading
336
+     * callstats library.
337
+     */
338
+    customScriptUrl: string;
339
+    /**
340
+     * - The room name we are currently in.
341
+     */
342
+    roomName: string;
343
+    /**
344
+     * - The set of parameters
345
+     * to enable/disable certain features in the library. See CallStats docs for more info.
346
+     */
347
+    configParams: string;
348
+};
349
+import RTPStats from "./RTPStatsCollector";
350
+import EventEmitter from "events";
351
+import CallStats from "./CallStats";
352
+import { PerformanceObserverStats } from "./PerformanceObserverStats";
353
+import analytics from "./AnalyticsAdapter";

+ 82
- 0
types/auto/modules/transcription/audioRecorder.d.ts View File

@@ -0,0 +1,82 @@
1
+export default AudioRecorder;
2
+/**
3
+ * main exported object of the file, holding all
4
+ * relevant functions and variables for the outside world
5
+ * @param jitsiConference the jitsiConference which this object
6
+ * is going to record
7
+ */
8
+declare function AudioRecorder(jitsiConference: any): void;
9
+declare class AudioRecorder {
10
+    /**
11
+     * main exported object of the file, holding all
12
+     * relevant functions and variables for the outside world
13
+     * @param jitsiConference the jitsiConference which this object
14
+     * is going to record
15
+     */
16
+    constructor(jitsiConference: any);
17
+    recorders: any[];
18
+    fileType: string;
19
+    isRecording: boolean;
20
+    jitsiConference: any;
21
+    /**
22
+     * Adds a new TrackRecorder object to the array.
23
+     *
24
+     * @param track the track potentially holding an audio stream
25
+     */
26
+    addTrack(track: any): void;
27
+    /**
28
+     * Creates a TrackRecorder object. Also creates the MediaRecorder and
29
+     * data array for the trackRecorder.
30
+     * @param track the JitsiTrack holding the audio MediaStream(s)
31
+     */
32
+    instantiateTrackRecorder(track: any): TrackRecorder;
33
+    /**
34
+     * Notifies the module that a specific track has stopped, e.g participant left
35
+     * the conference.
36
+     * if the recording has not started yet, the TrackRecorder will be removed from
37
+     * the array. If the recording has started, the recorder will stop recording
38
+     * but not removed from the array so that the recorded stream can still be
39
+     * accessed
40
+     *
41
+     * @param {JitsiTrack} track the JitsiTrack to remove from the recording session
42
+     */
43
+    removeTrack(track: any): void;
44
+    /**
45
+     * Tries to update the name value of all TrackRecorder in the array.
46
+     * If it hasn't changed,it will keep the exiting name. If it changes to a
47
+     * undefined value, the old value will also be kept.
48
+     */
49
+    updateNames(): void;
50
+    /**
51
+     * Starts the audio recording of every local and remote track
52
+     */
53
+    start(): void;
54
+    /**
55
+     * Stops the audio recording of every local and remote track
56
+     */
57
+    stop(): void;
58
+    /**
59
+     * link hacking to download all recorded audio streams
60
+     */
61
+    download(): void;
62
+    /**
63
+     * returns the audio files of all recorders as an array of objects,
64
+     * which include the name of the owner of the track and the starting time stamp
65
+     * @returns {Array} an array of RecordingResult objects
66
+     */
67
+    getRecordingResults(): any[];
68
+    /**
69
+     * Gets the mime type of the recorder audio
70
+     * @returns {String} the mime type of the recorder audio
71
+     */
72
+    getFileType(): string;
73
+}
74
+declare namespace AudioRecorder {
75
+    export { determineCorrectFileType };
76
+}
77
+import TrackRecorder from "./trackRecorder";
78
+/**
79
+ * Determines which kind of audio recording the browser supports
80
+ * chrome supports "audio/webm" and firefox supports "audio/ogg"
81
+ */
82
+declare function determineCorrectFileType(): "audio/webm" | "audio/ogg";

+ 18
- 0
types/auto/modules/transcription/recordingResult.d.ts View File

@@ -0,0 +1,18 @@
1
+/**
2
+ * This object stores variables needed around the recording of an audio stream
3
+ * and passing this recording along with additional information along to
4
+ * different processes
5
+ */
6
+export default class RecordingResult {
7
+    /**
8
+     * @param blob the recording audio stream as a single blob
9
+     * @param name the name of the person of the audio stream
10
+     * @param startTime the time in UTC when recording of the audiostream started
11
+     * @param wordArray the recorder audio stream transcribed as an array of Word objects
12
+     */
13
+    constructor(blob: any, name: any, startTime: any, wordArray: any);
14
+    blob: any;
15
+    name: any;
16
+    startTime: any;
17
+    wordArray: any;
18
+}

+ 16
- 0
types/auto/modules/transcription/trackRecorder.d.ts View File

@@ -0,0 +1,16 @@
1
+/**
2
+ * A TrackRecorder object holds all the information needed for recording a
3
+ * single JitsiTrack (either remote or local)
4
+ * @param track The JitsiTrack the object is going to hold
5
+ */
6
+export default class TrackRecorder {
7
+    /**
8
+     * @param track The JitsiTrack the object is going to hold
9
+     */
10
+    constructor(track: any);
11
+    track: any;
12
+    recorder: any;
13
+    data: any;
14
+    name: any;
15
+    startTime: any;
16
+}

+ 79
- 0
types/auto/modules/transcription/transcriber.d.ts View File

@@ -0,0 +1,79 @@
1
+export default Transcriber;
2
+/**
3
+ * This is the main object for handing the Transcription. It interacts with
4
+ * the audioRecorder to record every person in a conference and sends the
5
+ * recorder audio to a transcriptionService. The returned speech-to-text result
6
+ * will be merged to create a transcript
7
+ * @param {AudioRecorder} audioRecorder An audioRecorder recording a conference
8
+ */
9
+declare function Transcriber(): void;
10
+declare class Transcriber {
11
+    audioRecorder: AudioRecorder;
12
+    transcriptionService: SphinxService;
13
+    counter: any;
14
+    startTime: Date;
15
+    transcription: string;
16
+    callback: any;
17
+    results: any[];
18
+    state: string;
19
+    lineLength: number;
20
+    /**
21
+     * Method to start the transcription process. It will tell the audioRecorder
22
+     * to start storing all audio streams and record the start time for merging
23
+     * purposes
24
+     */
25
+    start(): void;
26
+    /**
27
+     * Method to stop the transcription process. It will tell the audioRecorder to
28
+     * stop, and get all the recorded audio to send it to the transcription service
29
+    
30
+     * @param callback a callback which will receive the transcription
31
+     */
32
+    stop(callback: any): void;
33
+    /**
34
+     * this method will check if the counter is zero. If it is, it will call
35
+     * the merging method
36
+     */
37
+    maybeMerge(): void;
38
+    /**
39
+     * This method will merge all speech-to-text arrays together in one
40
+     * readable transcription string
41
+     */
42
+    merge(): void;
43
+    /**
44
+     * Appends a word object to the transcription. It will make a new line with a
45
+     * name if a name is specified
46
+     * @param {Word} word the Word object holding the word to append
47
+     * @param {String|null} name the name of a new speaker. Null if not applicable
48
+     */
49
+    updateTranscription(word: any, name: string | null): void;
50
+    /**
51
+     * Gives the transcriber a JitsiTrack holding an audioStream to transcribe.
52
+     * The JitsiTrack is given to the audioRecorder. If it doesn't hold an
53
+     * audiostream, it will not be added by the audioRecorder
54
+     * @param {JitsiTrack} track the track to give to the audioRecorder
55
+     */
56
+    addTrack(track: any): void;
57
+    /**
58
+     * Remove the given track from the auioRecorder
59
+     * @param track
60
+     */
61
+    removeTrack(track: any): void;
62
+    /**
63
+     * Will return the created transcription if it's avialable or throw an error
64
+     * when it's not done yet
65
+     * @returns {String} the transcription as a String
66
+     */
67
+    getTranscription(): string;
68
+    /**
69
+     * Returns the current state of the transcription process
70
+     */
71
+    getState(): string;
72
+    /**
73
+     * Resets the state to the "before" state, such that it's again possible to
74
+     * call the start method
75
+     */
76
+    reset(): void;
77
+}
78
+import AudioRecorder from "./audioRecorder";
79
+import SphinxService from "./transcriptionServices/SphinxTranscriptionService";

+ 3
- 0
types/auto/modules/transcription/transcriberHolder.d.ts View File

@@ -0,0 +1,3 @@
1
+export const transcribers: any[];
2
+export function add(transcriber: any): void;
3
+export function add(transcriber: any): void;

+ 49
- 0
types/auto/modules/transcription/transcriptionServices/AbstractTranscriptionService.d.ts View File

@@ -0,0 +1,49 @@
1
+/**
2
+ * Abstract class representing an interface to implement a speech-to-text
3
+ * service on.
4
+ */
5
+export default class TranscriptionService {
6
+    /**
7
+     * This method can be used to send the recorder audio stream and
8
+     * retrieve the answer from the transcription service from the callback
9
+     *
10
+     * @param {RecordingResult} recordingResult a recordingResult object which
11
+     * includes the recorded audio stream as a blob
12
+     * @param {Function} callback  which will retrieve the a RecordingResult with
13
+     *        the answer as a WordArray
14
+     */
15
+    send(recordingResult: any, callback: Function): void;
16
+    /**
17
+     * Abstract method which will rend the recorder audio stream to the implemented
18
+     * transcription service and will retrieve an answer, which will be
19
+     * called on the given callback method
20
+     *
21
+     * @param {Blob} audioBlob the recorded audio stream as a single Blob
22
+     * @param {function} callback function which will retrieve the answer
23
+     *                            from the service
24
+     */
25
+    sendRequest(audioBlob: Blob, callback: Function): void;
26
+    /**
27
+     * Abstract method which will parse the output from the implemented
28
+     * transcription service to the expected format
29
+     *
30
+     * The transcriber class expect an array of word objects, where each word
31
+     * object is one transcribed word by the service.
32
+     *
33
+     * The expected output of this method is an array of word objects, in
34
+     * the correct order. That is, the first object in the array is the first word
35
+     * being said, and the last word in the array is the last word being said
36
+     *
37
+     * @param response the answer from the speech-to-text server which needs to be
38
+     *                 formatted
39
+     * @return {Array<Word>} an array of Word objects
40
+     */
41
+    formatResponse(response: any): Array<any>;
42
+    /**
43
+     * Abstract method which will verify that the response from the server is valid
44
+     *
45
+     * @param response the response from the server
46
+     * @return {boolean} true if response is valid, false otherwise
47
+     */
48
+    verify(response: any): boolean;
49
+}

+ 7
- 0
types/auto/modules/transcription/transcriptionServices/SphinxTranscriptionService.d.ts View File

@@ -0,0 +1,7 @@
1
+/**
2
+ * Implements a TranscriptionService for a Sphinx4 http server
3
+ */
4
+export default class SphinxService extends AbstractTranscriptionService {
5
+    url: string;
6
+}
7
+import AbstractTranscriptionService from "./AbstractTranscriptionService";

+ 32
- 0
types/auto/modules/transcription/word.d.ts View File

@@ -0,0 +1,32 @@
1
+/**
2
+ * An object representing a transcribed word, with some additional information
3
+ * @param word the word
4
+ * @param begin the time the word was started being uttered
5
+ * @param end the time the word stopped being uttered
6
+ */
7
+export default class Word {
8
+    /**
9
+     * @param word the word
10
+     * @param begin the time the word was started being uttered
11
+     * @param end the time the word stopped being uttered
12
+     */
13
+    constructor(word: any, begin: any, end: any);
14
+    word: any;
15
+    begin: any;
16
+    end: any;
17
+    /**
18
+     * Get the string representation of the word
19
+     * @returns {*} the word as a string
20
+     */
21
+    getWord(): any;
22
+    /**
23
+     * Get the time the word started being uttered
24
+     * @returns {*} the start time as an integer
25
+     */
26
+    getBeginTime(): any;
27
+    /**
28
+     * Get the time the word stopped being uttered
29
+     * @returns {*} the end time as an integer
30
+     */
31
+    getEndTime(): any;
32
+}

+ 38
- 0
types/auto/modules/util/AsyncQueue.d.ts View File

@@ -0,0 +1,38 @@
1
+/**
2
+ * A queue for async task execution.
3
+ */
4
+export default class AsyncQueue {
5
+    _queue: any;
6
+    _stopped: boolean;
7
+    /**
8
+     * Removes any pending tasks from the queue.
9
+     */
10
+    clear(): void;
11
+    /**
12
+     * Internal task processing implementation which makes things work.
13
+     */
14
+    _processQueueTasks(task: any, finishedCallback: any): void;
15
+    /**
16
+     * The 'task' function will be given a callback it MUST call with either:
17
+     *  1) No arguments if it was successful or
18
+     *  2) An error argument if there was an error
19
+     * If the task wants to process the success or failure of the task, it
20
+     * should pass the {@code callback} to the push function, e.g.:
21
+     * queue.push(task, (err) => {
22
+     *     if (err) {
23
+     *         // error handling
24
+     *     } else {
25
+     *         // success handling
26
+     *     }
27
+     * });
28
+     *
29
+     * @param {function} task - The task to be executed. See the description above.
30
+     * @param {function} [callback] - Optional callback to be called after the task has been executed.
31
+     */
32
+    push(task: Function, callback?: Function): void;
33
+    /**
34
+     * Shutdowns the queue. All already queued tasks will execute, but no future tasks can be added. If a task is added
35
+     * after the queue has been shutdown then the callback will be called with an error.
36
+     */
37
+    shutdown(): void;
38
+}

+ 0
- 0
types/auto/modules/util/AuthUtil.d.ts View File


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save