Browse Source

feat(ts) generate autp types when packing

tags/v0.0.2
Saúl Ibarra Corretgé 2 years ago
parent
commit
0ed1e07df6
100 changed files with 3 additions and 11561 deletions
  1. 1
    0
      .gitignore
  2. 2
    1
      package.json
  3. 0
    2
      tsconfig.json
  4. 0
    1244
      types/auto/JitsiConference.d.ts
  5. 0
    110
      types/auto/JitsiConferenceErrors.d.ts
  6. 0
    40
      types/auto/JitsiConferenceEventManager.d.ts
  7. 0
    472
      types/auto/JitsiConferenceEvents.d.ts
  8. 0
    106
      types/auto/JitsiConnection.d.ts
  9. 0
    34
      types/auto/JitsiConnectionErrors.d.ts
  10. 0
    49
      types/auto/JitsiConnectionEvents.d.ts
  11. 0
    100
      types/auto/JitsiMediaDevices.d.ts
  12. 0
    35
      types/auto/JitsiMediaDevicesEvents.d.ts
  13. 0
    2
      types/auto/JitsiMeetJS.d.ts
  14. 0
    219
      types/auto/JitsiParticipant.d.ts
  15. 0
    0
      types/auto/JitsiParticipantEvents.d.ts
  16. 0
    53
      types/auto/JitsiTrackError.d.ts
  17. 0
    72
      types/auto/JitsiTrackErrors.d.ts
  18. 0
    61
      types/auto/JitsiTrackEvents.d.ts
  19. 0
    12
      types/auto/JitsiTranscriptionStatus.d.ts
  20. 0
    79
      types/auto/authenticateAndUpgradeRole.d.ts
  21. 0
    23
      types/auto/connection_optimization/external_connect.d.ts
  22. 0
    133
      types/auto/modules/RTC/BridgeChannel.d.ts
  23. 0
    106
      types/auto/modules/RTC/CodecSelection.d.ts
  24. 0
    308
      types/auto/modules/RTC/JitsiLocalTrack.d.ts
  25. 0
    173
      types/auto/modules/RTC/JitsiRemoteTrack.d.ts
  26. 0
    235
      types/auto/modules/RTC/JitsiTrack.d.ts
  27. 0
    78
      types/auto/modules/RTC/MockClasses.d.ts
  28. 0
    459
      types/auto/modules/RTC/RTC.d.ts
  29. 0
    155
      types/auto/modules/RTC/RTCUtils.d.ts
  30. 0
    120
      types/auto/modules/RTC/ScreenObtainer.d.ts
  31. 0
    162
      types/auto/modules/RTC/TPCUtils.d.ts
  32. 0
    824
      types/auto/modules/RTC/TraceablePeerConnection.d.ts
  33. 0
    186
      types/auto/modules/browser/BrowserCapabilities.d.ts
  34. 0
    3
      types/auto/modules/browser/index.d.ts
  35. 0
    95
      types/auto/modules/connectivity/ConnectionQuality.d.ts
  36. 0
    36
      types/auto/modules/connectivity/IceFailedHandling.d.ts
  37. 0
    33
      types/auto/modules/connectivity/NetworkInfo.d.ts
  38. 0
    350
      types/auto/modules/connectivity/ParticipantConnectionStatus.d.ts
  39. 0
    239
      types/auto/modules/connectivity/TrackStreamingStatus.d.ts
  40. 0
    6
      types/auto/modules/detection/ActiveDeviceDetector.d.ts
  41. 0
    63
      types/auto/modules/detection/DetectionEvents.d.ts
  42. 0
    57
      types/auto/modules/detection/NoAudioSignalDetection.d.ts
  43. 0
    25
      types/auto/modules/detection/P2PDominantSpeakerDetection.d.ts
  44. 0
    129
      types/auto/modules/detection/TrackVADEmitter.d.ts
  45. 0
    105
      types/auto/modules/detection/VADAudioAnalyser.d.ts
  46. 0
    85
      types/auto/modules/detection/VADNoiseDetection.d.ts
  47. 0
    96
      types/auto/modules/detection/VADReportingService.d.ts
  48. 0
    70
      types/auto/modules/detection/VADTalkMutedDetection.d.ts
  49. 0
    91
      types/auto/modules/e2ee/Context.d.ts
  50. 0
    58
      types/auto/modules/e2ee/E2EEContext.d.ts
  51. 0
    42
      types/auto/modules/e2ee/E2EEncryption.d.ts
  52. 0
    19
      types/auto/modules/e2ee/ExternallyManagedKeyHandler.d.ts
  53. 0
    69
      types/auto/modules/e2ee/KeyHandler.d.ts
  54. 0
    73
      types/auto/modules/e2ee/ManagedKeyHandler.d.ts
  55. 0
    166
      types/auto/modules/e2ee/OlmAdapter.d.ts
  56. 0
    1
      types/auto/modules/e2ee/Worker.d.ts
  57. 0
    25
      types/auto/modules/e2ee/crypto-utils.d.ts
  58. 0
    4
      types/auto/modules/e2ee/utils.d.ts
  59. 0
    79
      types/auto/modules/e2eping/e2eping.d.ts
  60. 0
    26
      types/auto/modules/event/Jvb121EventGenerator.d.ts
  61. 0
    52
      types/auto/modules/flags/FeatureFlags.d.ts
  62. 0
    25
      types/auto/modules/proxyconnection/CustomSignalingLayer.d.ts
  63. 0
    167
      types/auto/modules/proxyconnection/ProxyConnectionPC.d.ts
  64. 0
    141
      types/auto/modules/proxyconnection/ProxyConnectionService.d.ts
  65. 0
    12
      types/auto/modules/proxyconnection/constants.d.ts
  66. 0
    138
      types/auto/modules/qualitycontrol/ReceiveVideoController.d.ts
  67. 0
    75
      types/auto/modules/qualitycontrol/SendVideoController.d.ts
  68. 0
    165
      types/auto/modules/recording/JibriSession.d.ts
  69. 0
    112
      types/auto/modules/recording/RecordingManager.d.ts
  70. 0
    19
      types/auto/modules/recording/recordingConstants.d.ts
  71. 0
    77
      types/auto/modules/recording/recordingXMLUtils.d.ts
  72. 0
    31
      types/auto/modules/red/red.d.ts
  73. 0
    92
      types/auto/modules/sdp/LocalSdpMunger.d.ts
  74. 0
    50
      types/auto/modules/sdp/RtxModifier.d.ts
  75. 0
    55
      types/auto/modules/sdp/SDP.d.ts
  76. 0
    25
      types/auto/modules/sdp/SDPDiffer.d.ts
  77. 0
    351
      types/auto/modules/sdp/SDPUtil.d.ts
  78. 0
    2
      types/auto/modules/sdp/SampleSdpStrings.d.ts
  79. 0
    50
      types/auto/modules/sdp/SdpConsistency.d.ts
  80. 0
    75
      types/auto/modules/sdp/SdpSimulcast.d.ts
  81. 0
    218
      types/auto/modules/sdp/SdpTransformUtil.d.ts
  82. 0
    18
      types/auto/modules/settings/Settings.d.ts
  83. 0
    155
      types/auto/modules/statistics/AnalyticsAdapter.d.ts
  84. 0
    46
      types/auto/modules/statistics/AudioOutputProblemDetector.d.ts
  85. 0
    386
      types/auto/modules/statistics/AvgRTPStatsReporter.d.ts
  86. 0
    249
      types/auto/modules/statistics/CallStats.d.ts
  87. 0
    53
      types/auto/modules/statistics/LocalStatsCollector.d.ts
  88. 0
    42
      types/auto/modules/statistics/PerformanceObserverStats.d.ts
  89. 0
    75
      types/auto/modules/statistics/PrecallTest.d.ts
  90. 0
    151
      types/auto/modules/statistics/RTPStatsCollector.d.ts
  91. 0
    117
      types/auto/modules/statistics/SpeakerStats.d.ts
  92. 0
    80
      types/auto/modules/statistics/SpeakerStatsCollector.d.ts
  93. 0
    7
      types/auto/modules/statistics/constants.d.ts
  94. 0
    354
      types/auto/modules/statistics/statistics.d.ts
  95. 0
    82
      types/auto/modules/transcription/audioRecorder.d.ts
  96. 0
    18
      types/auto/modules/transcription/recordingResult.d.ts
  97. 0
    16
      types/auto/modules/transcription/trackRecorder.d.ts
  98. 0
    79
      types/auto/modules/transcription/transcriber.d.ts
  99. 0
    3
      types/auto/modules/transcription/transcriberHolder.d.ts
  100. 0
    0
      types/auto/modules/transcription/transcriptionServices/AbstractTranscriptionService.d.ts

+ 1
- 0
.gitignore View File

@@ -14,6 +14,7 @@ npm-*.log
14 14
 stats.json
15 15
 .vscode
16 16
 dist
17
+types/auto
17 18
 types/types-comparer/auto.json
18 19
 types/types-comparer/hand-crafted.json
19 20
 *.tgz

+ 2
- 1
package.json View File

@@ -65,9 +65,10 @@
65 65
     "build": "npm run build:webpack && npm run build:tsc",
66 66
     "build:webpack": "LIB_JITSI_MEET_COMMIT_HASH=$(git rev-parse --short HEAD 2>/dev/null) webpack",
67 67
     "build:tsc": "tsc --build --clean && tsc",
68
+    "gen-types": "tsc --declaration --declarationDir types/auto --emitDeclarationOnly",
68 69
     "lint": "eslint .",
69 70
     "lint-fix": "eslint . --fix",
70
-    "prepack": "npm run build",
71
+    "prepack": "npm run build && npm run gen-types",
71 72
     "test": "karma start karma.conf.js",
72 73
     "test-watch": "karma start karma.conf.js --no-single-run",
73 74
     "validate": "npm ls",

+ 0
- 2
tsconfig.json View File

@@ -2,8 +2,6 @@
2 2
   "compilerOptions": {
3 3
     "target": "es6",
4 4
     "module": "es6",
5
-    "declaration": true,
6
-    "declarationDir": "types/auto/",
7 5
     "sourceMap": true,
8 6
     "allowJs": true,
9 7
     "skipLibCheck": true,

+ 0
- 1244
types/auto/JitsiConference.d.ts
File diff suppressed because it is too large
View File


+ 0
- 110
types/auto/JitsiConferenceErrors.d.ts View File

@@ -1,110 +0,0 @@
1
-/**
2
- * The errors for the conference.
3
- */
4
-export declare enum JitsiConferenceErrors {
5
-    /**
6
-     * Indicates that client must be authenticated to create the conference.
7
-     */
8
-    AUTHENTICATION_REQUIRED = "conference.authenticationRequired",
9
-    /**
10
-     * Indicates that chat error occurred.
11
-     */
12
-    CHAT_ERROR = "conference.chatError",
13
-    /**
14
-     * Indicates that a settings error occurred.
15
-     */
16
-    SETTINGS_ERROR = "conference.settingsError",
17
-    /**
18
-     * Indicates that conference has been destroyed.
19
-     */
20
-    CONFERENCE_DESTROYED = "conference.destroyed",
21
-    /**
22
-     * Indicates that max users limit has been reached.
23
-     */
24
-    CONFERENCE_MAX_USERS = "conference.max_users",
25
-    /**
26
-     * Indicates that a connection error occurred when trying to join a conference.
27
-     */
28
-    CONNECTION_ERROR = "conference.connectionError",
29
-    /**
30
-     * Indicates that the client has been forced to restart by jicofo when the
31
-     * conference was migrated from one bridge to another.
32
-     */
33
-    CONFERENCE_RESTARTED = "conference.restarted",
34
-    /**
35
-     * Indicates that a connection error is due to not allowed,
36
-     * occurred when trying to join a conference.
37
-     */
38
-    NOT_ALLOWED_ERROR = "conference.connectionError.notAllowed",
39
-    /**
40
-     * Indicates that a connection error is due to not allowed,
41
-     * occurred when trying to join a conference, only approved members are allowed to join.
42
-     */
43
-    MEMBERS_ONLY_ERROR = "conference.connectionError.membersOnly",
44
-    /**
45
-     * Indicates that a connection error is due to denied access to the room,
46
-     * occurred after joining a lobby room and access is denied by the room moderators.
47
-     */
48
-    CONFERENCE_ACCESS_DENIED = "conference.connectionError.accessDenied",
49
-    /**
50
-     * Indicates that focus error happened.
51
-     */
52
-    FOCUS_DISCONNECTED = "conference.focusDisconnected",
53
-    /**
54
-     * Indicates that focus left the conference.
55
-     */
56
-    FOCUS_LEFT = "conference.focusLeft",
57
-    /**
58
-     * Indicates that graceful shutdown happened.
59
-     */
60
-    GRACEFUL_SHUTDOWN = "conference.gracefulShutdown",
61
-    /**
62
-     * Indicates that the media connection has failed.
63
-     */
64
-    ICE_FAILED = "conference.iceFailed",
65
-    /**
66
-     * Indicates that the versions of the server side components are incompatible
67
-     * with the client side.
68
-     */
69
-    INCOMPATIBLE_SERVER_VERSIONS = "conference.incompatible_server_versions",
70
-    /**
71
-     * Indicates that offer/answer had failed.
72
-     */
73
-    OFFER_ANSWER_FAILED = "conference.offerAnswerFailed",
74
-    /**
75
-     * Indicates that password cannot be set for this conference.
76
-     */
77
-    PASSWORD_NOT_SUPPORTED = "conference.passwordNotSupported",
78
-    /**
79
-     * Indicates that a password is required in order to join the conference.
80
-     */
81
-    PASSWORD_REQUIRED = "conference.passwordRequired",
82
-    /**
83
-     * Indicates that reservation system returned error.
84
-     */
85
-    RESERVATION_ERROR = "conference.reservationError",
86
-    /**
87
-     * Indicates that there is no available videobridge.
88
-     */
89
-    VIDEOBRIDGE_NOT_AVAILABLE = "conference.videobridgeNotAvailable"
90
-}
91
-export declare const AUTHENTICATION_REQUIRED = JitsiConferenceErrors.AUTHENTICATION_REQUIRED;
92
-export declare const CHAT_ERROR = JitsiConferenceErrors.CHAT_ERROR;
93
-export declare const SETTINGS_ERROR = JitsiConferenceErrors.SETTINGS_ERROR;
94
-export declare const CONFERENCE_DESTROYED = JitsiConferenceErrors.CONFERENCE_DESTROYED;
95
-export declare const CONFERENCE_MAX_USERS = JitsiConferenceErrors.CONFERENCE_MAX_USERS;
96
-export declare const CONNECTION_ERROR = JitsiConferenceErrors.CONNECTION_ERROR;
97
-export declare const CONFERENCE_RESTARTED = JitsiConferenceErrors.CONFERENCE_RESTARTED;
98
-export declare const NOT_ALLOWED_ERROR = JitsiConferenceErrors.NOT_ALLOWED_ERROR;
99
-export declare const MEMBERS_ONLY_ERROR = JitsiConferenceErrors.MEMBERS_ONLY_ERROR;
100
-export declare const CONFERENCE_ACCESS_DENIED = JitsiConferenceErrors.CONFERENCE_ACCESS_DENIED;
101
-export declare const FOCUS_DISCONNECTED = JitsiConferenceErrors.FOCUS_DISCONNECTED;
102
-export declare const FOCUS_LEFT = JitsiConferenceErrors.FOCUS_LEFT;
103
-export declare const GRACEFUL_SHUTDOWN = JitsiConferenceErrors.GRACEFUL_SHUTDOWN;
104
-export declare const ICE_FAILED = JitsiConferenceErrors.ICE_FAILED;
105
-export declare const INCOMPATIBLE_SERVER_VERSIONS = JitsiConferenceErrors.INCOMPATIBLE_SERVER_VERSIONS;
106
-export declare const OFFER_ANSWER_FAILED = JitsiConferenceErrors.OFFER_ANSWER_FAILED;
107
-export declare const PASSWORD_NOT_SUPPORTED = JitsiConferenceErrors.PASSWORD_NOT_SUPPORTED;
108
-export declare const PASSWORD_REQUIRED = JitsiConferenceErrors.PASSWORD_REQUIRED;
109
-export declare const RESERVATION_ERROR = JitsiConferenceErrors.RESERVATION_ERROR;
110
-export declare const VIDEOBRIDGE_NOT_AVAILABLE = JitsiConferenceErrors.VIDEOBRIDGE_NOT_AVAILABLE;

+ 0
- 40
types/auto/JitsiConferenceEventManager.d.ts View File

@@ -1,40 +0,0 @@
1
-/**
2
- * Setups all event listeners related to conference
3
- * @param conference {JitsiConference} the conference
4
- */
5
-export default function JitsiConferenceEventManager(conference: any): void;
6
-export default class JitsiConferenceEventManager {
7
-    /**
8
-     * Setups all event listeners related to conference
9
-     * @param conference {JitsiConference} the conference
10
-     */
11
-    constructor(conference: any);
12
-    conference: any;
13
-    xmppListeners: {};
14
-    /**
15
-     * Setups event listeners related to conference.chatRoom
16
-     */
17
-    setupChatRoomListeners(): void;
18
-    chatRoomForwarder: EventEmitterForwarder;
19
-    /**
20
-     * Setups event listeners related to conference.rtc
21
-     */
22
-    setupRTCListeners(): void;
23
-    /**
24
-     * Removes event listeners related to conference.xmpp
25
-     */
26
-    removeXMPPListeners(): void;
27
-    /**
28
-     * Setups event listeners related to conference.xmpp
29
-     */
30
-    setupXMPPListeners(): void;
31
-    /**
32
-     * Add XMPP listener and save its reference for remove on leave conference.
33
-     */
34
-    _addConferenceXMPPListener(eventName: any, listener: any): void;
35
-    /**
36
-     * Setups event listeners related to conference.statistics
37
-     */
38
-    setupStatisticsListeners(): void;
39
-}
40
-import EventEmitterForwarder from "./modules/util/EventEmitterForwarder";

+ 0
- 472
types/auto/JitsiConferenceEvents.d.ts View File

@@ -1,472 +0,0 @@
1
-/**
2
- * The events for the conference.
3
- */
4
-export declare enum JitsiConferenceEvents {
5
-    /**
6
-     * Event indicates that the current conference audio input switched between audio
7
-     * input states,i.e. with or without audio input.
8
-     */
9
-    AUDIO_INPUT_STATE_CHANGE = "conference.audio_input_state_changed",
10
-    /**
11
-     * Event indicates that the permission for unmuting audio has changed based on the number of audio senders in the call
12
-     * and the audio sender limit configured in Jicofo.
13
-     */
14
-    AUDIO_UNMUTE_PERMISSIONS_CHANGED = "conference.audio_unmute_permissions_changed",
15
-    /**
16
-     * Indicates that authentication status changed.
17
-     */
18
-    AUTH_STATUS_CHANGED = "conference.auth_status_changed",
19
-    /**
20
-     * Fired just before the statistics module is disposed and it's the last chance
21
-     * to submit some logs to the statistics service (ex. CallStats if enabled),
22
-     * before it's disconnected.
23
-     */
24
-    BEFORE_STATISTICS_DISPOSED = "conference.beforeStatisticsDisposed",
25
-    /**
26
-     * Indicates that an error occurred.
27
-     */
28
-    CONFERENCE_ERROR = "conference.error",
29
-    /**
30
-     * Indicates that conference failed.
31
-     */
32
-    CONFERENCE_FAILED = "conference.failed",
33
-    /**
34
-     * Indicates that conference is in progress of joining.
35
-     */
36
-    CONFERENCE_JOIN_IN_PROGRESS = "conference.join_in_progress",
37
-    /**
38
-     * Indicates that conference has been joined. The event does NOT provide any
39
-     * parameters to its listeners.
40
-     */
41
-    CONFERENCE_JOINED = "conference.joined",
42
-    /**
43
-     * Indicates that conference has been left.
44
-     */
45
-    CONFERENCE_LEFT = "conference.left",
46
-    /**
47
-     * Indicates that the conference unique identifier has been set.
48
-     */
49
-    CONFERENCE_UNIQUE_ID_SET = "conference.unique_id_set",
50
-    /**
51
-     * Indicates that the connection to the conference has been established
52
-     * XXX This is currently fired when the *ICE* connection enters 'connected'
53
-     * state for the first time.
54
-     */
55
-    CONNECTION_ESTABLISHED = "conference.connectionEstablished",
56
-    /**
57
-     * Indicates that the connection to the conference has been interrupted for some
58
-     * reason.
59
-     * XXX This is currently fired when the *ICE* connection is interrupted.
60
-     */
61
-    CONNECTION_INTERRUPTED = "conference.connectionInterrupted",
62
-    /**
63
-     * Indicates that the connection to the conference has been restored.
64
-     * XXX This is currently fired when the *ICE* connection is restored.
65
-     */
66
-    CONNECTION_RESTORED = "conference.connectionRestored",
67
-    /**
68
-     * A connection to the video bridge's data channel has been established.
69
-     */
70
-    DATA_CHANNEL_OPENED = "conference.dataChannelOpened",
71
-    /**
72
-     * A user has changed it display name
73
-     */
74
-    DISPLAY_NAME_CHANGED = "conference.displayNameChanged",
75
-    /**
76
-     * The dominant speaker was changed.
77
-     */
78
-    DOMINANT_SPEAKER_CHANGED = "conference.dominantSpeaker",
79
-    /**
80
-     * UTC conference timestamp when first participant joined.
81
-     */
82
-    CONFERENCE_CREATED_TIMESTAMP = "conference.createdTimestamp",
83
-    /**
84
-     * Indicates that DTMF support changed.
85
-     */
86
-    DTMF_SUPPORT_CHANGED = "conference.dtmfSupportChanged",
87
-    /**
88
-     * Indicates that a message from another participant is received on data
89
-     * channel.
90
-     */
91
-    ENDPOINT_MESSAGE_RECEIVED = "conference.endpoint_message_received",
92
-    /**
93
-     * Indicates that a message for the remote endpoint statistics has been received on the bridge channel.
94
-     */
95
-    ENDPOINT_STATS_RECEIVED = "conference.endpoint_stats_received",
96
-    /**
97
-     * NOTE This is lib-jitsi-meet internal event and can be removed at any time !
98
-     *
99
-     * Event emitted when conference transits, between one to one and multiparty JVB
100
-     * conference. If the conference switches to P2P it's neither one to one nor
101
-     * a multiparty JVB conference, but P2P (the status argument of this event will
102
-     * be <tt>false</tt>).
103
-     *
104
-     * The first argument is a boolean which carries the previous value and
105
-     * the seconds argument is a boolean with the new status. The event is emitted
106
-     * only if the previous and the new values are different.
107
-     *
108
-     * @type {string}
109
-     */
110
-    JVB121_STATUS = "conference.jvb121Status",
111
-    /**
112
-     * You are kicked from the conference.
113
-     * @param {JitsiParticipant} the participant that initiated the kick.
114
-     */
115
-    KICKED = "conference.kicked",
116
-    /**
117
-     * Participant was kicked from the conference.
118
-     * @param {JitsiParticipant} the participant that initiated the kick.
119
-     * @param {JitsiParticipant} the participant that was kicked.
120
-     */
121
-    PARTICIPANT_KICKED = "conference.participant_kicked",
122
-    /**
123
-     * The Last N set is changed.
124
-     *
125
-     * @param {Array<string>|null} leavingEndpointIds the ids of all the endpoints
126
-     * which are leaving Last N
127
-     * @param {Array<string>|null} enteringEndpointIds the ids of all the endpoints
128
-     * which are entering Last N
129
-     */
130
-    LAST_N_ENDPOINTS_CHANGED = "conference.lastNEndpointsChanged",
131
-    /**
132
-     * The forwarded sources set is changed.
133
-     *
134
-     * @param {Array<string>} leavingForwardedSources the sourceNames of all the tracks which are leaving forwarded
135
-     * sources
136
-     * @param {Array<string>} enteringForwardedSources the sourceNames of all the tracks which are entering forwarded
137
-     * sources
138
-     */
139
-    FORWARDED_SOURCES_CHANGED = "conference.forwardedSourcesChanged",
140
-    /**
141
-     * Indicates that the room has been locked or unlocked.
142
-     */
143
-    LOCK_STATE_CHANGED = "conference.lock_state_changed",
144
-    /**
145
-     * Indicates that the region of the media server (jitsi-videobridge) that we
146
-     * are connected to changed (or was initially set).
147
-     * @type {string} the region.
148
-     */
149
-    SERVER_REGION_CHANGED = "conference.server_region_changed",
150
-    /**
151
-     * An event(library-private) fired when a new media session is added to the conference.
152
-     * @type {string}
153
-     * @private
154
-     */
155
-    _MEDIA_SESSION_STARTED = "conference.media_session.started",
156
-    /**
157
-     * An event(library-private) fired when the conference switches the currently active media session.
158
-     * @type {string}
159
-     * @private
160
-     */
161
-    _MEDIA_SESSION_ACTIVE_CHANGED = "conference.media_session.active_changed",
162
-    /**
163
-     * Indicates that the conference had changed to members only enabled/disabled.
164
-     * The first argument of this event is a <tt>boolean</tt> which when set to
165
-     * <tt>true</tt> means that the conference is running in members only mode.
166
-     * You may need to use Lobby if supported to ask for permissions to enter the conference.
167
-     */
168
-    MEMBERS_ONLY_CHANGED = "conference.membersOnlyChanged",
169
-    /**
170
-     * New text message was received.
171
-     */
172
-    MESSAGE_RECEIVED = "conference.messageReceived",
173
-    /**
174
-     * Event indicates that the current selected input device has no signal
175
-     */
176
-    NO_AUDIO_INPUT = "conference.no_audio_input",
177
-    /**
178
-     * Event indicates that the current microphone used by the conference is noisy.
179
-     */
180
-    NOISY_MIC = "conference.noisy_mic",
181
-    /**
182
-     * Indicates that a message from the local user or from the Prosody backend
183
-     * was received on the data channel.
184
-     */
185
-    NON_PARTICIPANT_MESSAGE_RECEIVED = "conference.non_participant_message_received",
186
-    /**
187
-     * New private text message was received.
188
-     */
189
-    PRIVATE_MESSAGE_RECEIVED = "conference.privateMessageReceived",
190
-    /**
191
-     * Event fired when JVB sends notification about interrupted/restored user's
192
-     * ICE connection status or we detect local problem with the video track.
193
-     * First argument is the ID of the participant and
194
-     * the seconds is a string indicating if the connection is currently
195
-     * - active - the connection is active
196
-     * - inactive - the connection is inactive, was intentionally interrupted by
197
-     * the bridge
198
-     * - interrupted - a network problem occurred
199
-     * - restoring - the connection was inactive and is restoring now
200
-     *
201
-     * The current status value can be obtained by calling
202
-     * JitsiParticipant.getConnectionStatus().
203
-     */
204
-    PARTICIPANT_CONN_STATUS_CHANGED = "conference.participant_conn_status_changed",
205
-    /**
206
-     * Indicates that the features of the participant has been changed.
207
-     * TODO: there is a spelling mistake in this event name and associated constants
208
-     */
209
-    PARTCIPANT_FEATURES_CHANGED = "conference.partcipant_features_changed",
210
-    /**
211
-     * Indicates that a the value of a specific property of a specific participant
212
-     * has changed.
213
-     */
214
-    PARTICIPANT_PROPERTY_CHANGED = "conference.participant_property_changed",
215
-    /**
216
-     * Indicates that the conference has switched between JVB and P2P connections.
217
-     * The first argument of this event is a <tt>boolean</tt> which when set to
218
-     * <tt>true</tt> means that the conference is running on the P2P connection.
219
-     */
220
-    P2P_STATUS = "conference.p2pStatus",
221
-    /**
222
-     * Indicates that phone number changed.
223
-     */
224
-    PHONE_NUMBER_CHANGED = "conference.phoneNumberChanged",
225
-    /**
226
-     * The conference properties changed.
227
-     * @type {string}
228
-     */
229
-    PROPERTIES_CHANGED = "conference.propertiesChanged",
230
-    /**
231
-     * Indicates that recording state changed.
232
-     */
233
-    RECORDER_STATE_CHANGED = "conference.recorderStateChanged",
234
-    /**
235
-     * Indicates that video SIP GW state changed.
236
-     * @param {VideoSIPGWConstants} status.
237
-     */
238
-    VIDEO_SIP_GW_AVAILABILITY_CHANGED = "conference.videoSIPGWAvailabilityChanged",
239
-    /**
240
-     * Indicates that video SIP GW Session state changed.
241
-     * @param {options} event - {
242
-     *     {string} address,
243
-     *     {VideoSIPGWConstants} oldState,
244
-     *     {VideoSIPGWConstants} newState,
245
-     *     {string} displayName}
246
-     * }.
247
-     */
248
-    VIDEO_SIP_GW_SESSION_STATE_CHANGED = "conference.videoSIPGWSessionStateChanged",
249
-    /**
250
-     * Indicates that start muted settings changed.
251
-     */
252
-    START_MUTED_POLICY_CHANGED = "conference.start_muted_policy_changed",
253
-    /**
254
-     * Indicates that the local user has started muted.
255
-     */
256
-    STARTED_MUTED = "conference.started_muted",
257
-    /**
258
-     * Indicates that subject of the conference has changed.
259
-     */
260
-    SUBJECT_CHANGED = "conference.subjectChanged",
261
-    /**
262
-     * Indicates that DTMF support changed.
263
-     */
264
-    SUSPEND_DETECTED = "conference.suspendDetected",
265
-    /**
266
-     * Event indicates that local user is talking while he muted himself
267
-     */
268
-    TALK_WHILE_MUTED = "conference.talk_while_muted",
269
-    /**
270
-     * A new media track was added to the conference. The event provides the
271
-     * following parameters to its listeners:
272
-     *
273
-     * @param {JitsiTrack} track the added JitsiTrack
274
-     */
275
-    TRACK_ADDED = "conference.trackAdded",
276
-    /**
277
-     * Audio levels of a media track ( attached to the conference) was changed.
278
-     */
279
-    TRACK_AUDIO_LEVEL_CHANGED = "conference.audioLevelsChanged",
280
-    /**
281
-     * A media track ( attached to the conference) mute status was changed.
282
-     * @param {JitsiParticipant|null} the participant that initiated the mute
283
-     * if it is a remote mute.
284
-     */
285
-    TRACK_MUTE_CHANGED = "conference.trackMuteChanged",
286
-    /**
287
-     * The media track was removed from the conference. The event provides the
288
-     * following parameters to its listeners:
289
-     *
290
-     * @param {JitsiTrack} track the removed JitsiTrack
291
-     */
292
-    TRACK_REMOVED = "conference.trackRemoved",
293
-    /**
294
-     * The source-add for unmuting of a media track was rejected by Jicofo.
295
-     *
296
-     */
297
-    TRACK_UNMUTE_REJECTED = "conference.trackUnmuteRejected",
298
-    /**
299
-     * Notifies for transcription status changes. The event provides the
300
-     * following parameters to its listeners:
301
-     *
302
-     * @param {String} status - The new status.
303
-     */
304
-    TRANSCRIPTION_STATUS_CHANGED = "conference.transcriptionStatusChanged",
305
-    /**
306
-     * A new user joined the conference.
307
-     */
308
-    USER_JOINED = "conference.userJoined",
309
-    /**
310
-     * A user has left the conference.
311
-     */
312
-    USER_LEFT = "conference.userLeft",
313
-    /**
314
-     * User role changed.
315
-     */
316
-    USER_ROLE_CHANGED = "conference.roleChanged",
317
-    /**
318
-     * User status changed.
319
-     */
320
-    USER_STATUS_CHANGED = "conference.statusChanged",
321
-    /**
322
-     * Event indicates that the permission for unmuting video has changed based on the number of video senders in the call
323
-     * and the video sender limit configured in Jicofo.
324
-     */
325
-    VIDEO_UNMUTE_PERMISSIONS_CHANGED = "conference.video_unmute_permissions_changed",
326
-    /**
327
-     * Event indicates that the bot participant type changed.
328
-     */
329
-    BOT_TYPE_CHANGED = "conference.bot_type_changed",
330
-    /**
331
-     * A new user joined the lobby room.
332
-     */
333
-    LOBBY_USER_JOINED = "conference.lobby.userJoined",
334
-    /**
335
-     * A user from the lobby room has been update.
336
-     */
337
-    LOBBY_USER_UPDATED = "conference.lobby.userUpdated",
338
-    /**
339
-     * A user left the lobby room.
340
-     */
341
-    LOBBY_USER_LEFT = "conference.lobby.userLeft",
342
-    /**
343
-     * The local participant was approved to be able to unmute.
344
-     * @param {options} event - {
345
-     *     {MediaType} mediaType
346
-     * }.
347
-     */
348
-    AV_MODERATION_APPROVED = "conference.av_moderation.approved",
349
-    /**
350
-     * The local participant was blocked to be able to unmute.
351
-     * @param {options} event - {
352
-     *     {MediaType} mediaType
353
-     * }.
354
-     */
355
-    AV_MODERATION_REJECTED = "conference.av_moderation.rejected",
356
-    /**
357
-     * AV Moderation was enabled/disabled. The actor is the participant that is currently in the meeting,
358
-     * or undefined if that participant has left the meeting.
359
-     *
360
-     * @param {options} event - {
361
-     *     {boolean} enabled,
362
-     *     {MediaType} mediaType,
363
-     *     {JitsiParticipant} actor
364
-     * }.
365
-     */
366
-    AV_MODERATION_CHANGED = "conference.av_moderation.changed",
367
-    /**
368
-     * AV Moderation, report for user being approved to unmute.
369
-     * @param {options} event - {
370
-     *     {JitsiParticipant} participant,
371
-     *     {MediaType} mediaType
372
-     * }.
373
-     */
374
-    AV_MODERATION_PARTICIPANT_APPROVED = "conference.av_moderation.participant.approved",
375
-    /**
376
-     * AV Moderation, report for user being blocked to unmute.
377
-     * @param {options} event - {
378
-     *     {JitsiParticipant} participant,
379
-     *     {MediaType} mediaType
380
-     * }.
381
-     */
382
-    AV_MODERATION_PARTICIPANT_REJECTED = "conference.av_moderation.participant.rejected",
383
-    /**
384
-     * A new face landmark object is added for a participant
385
-     */
386
-    FACE_LANDMARK_ADDED = "conference.face_landmark.added",
387
-    /**
388
-     * Event fired when a participant is requested to join a given (breakout) room.
389
-     */
390
-    BREAKOUT_ROOMS_MOVE_TO_ROOM = "conference.breakout-rooms.move-to-room",
391
-    /**
392
-     * Event fired when the breakout rooms data was updated.
393
-     */
394
-    BREAKOUT_ROOMS_UPDATED = "conference.breakout-rooms.updated",
395
-    /**
396
-     * Event fired when the conference metadata is updated.
397
-     */
398
-    METADATA_UPDATED = "conference.metadata.updated"
399
-}
400
-export declare const AUDIO_INPUT_STATE_CHANGE = JitsiConferenceEvents.AUDIO_INPUT_STATE_CHANGE;
401
-export declare const AUDIO_UNMUTE_PERMISSIONS_CHANGED = JitsiConferenceEvents.AUDIO_UNMUTE_PERMISSIONS_CHANGED;
402
-export declare const AUTH_STATUS_CHANGED = JitsiConferenceEvents.AUTH_STATUS_CHANGED;
403
-export declare const BEFORE_STATISTICS_DISPOSED = JitsiConferenceEvents.BEFORE_STATISTICS_DISPOSED;
404
-export declare const CONFERENCE_ERROR = JitsiConferenceEvents.CONFERENCE_ERROR;
405
-export declare const CONFERENCE_FAILED = JitsiConferenceEvents.CONFERENCE_FAILED;
406
-export declare const CONFERENCE_JOIN_IN_PROGRESS = JitsiConferenceEvents.CONFERENCE_JOIN_IN_PROGRESS;
407
-export declare const CONFERENCE_JOINED = JitsiConferenceEvents.CONFERENCE_JOINED;
408
-export declare const CONFERENCE_LEFT = JitsiConferenceEvents.CONFERENCE_LEFT;
409
-export declare const CONFERENCE_UNIQUE_ID_SET = JitsiConferenceEvents.CONFERENCE_UNIQUE_ID_SET;
410
-export declare const CONNECTION_ESTABLISHED = JitsiConferenceEvents.CONNECTION_ESTABLISHED;
411
-export declare const CONNECTION_INTERRUPTED = JitsiConferenceEvents.CONNECTION_INTERRUPTED;
412
-export declare const CONNECTION_RESTORED = JitsiConferenceEvents.CONNECTION_RESTORED;
413
-export declare const DATA_CHANNEL_OPENED = JitsiConferenceEvents.DATA_CHANNEL_OPENED;
414
-export declare const DISPLAY_NAME_CHANGED = JitsiConferenceEvents.DISPLAY_NAME_CHANGED;
415
-export declare const DOMINANT_SPEAKER_CHANGED = JitsiConferenceEvents.DOMINANT_SPEAKER_CHANGED;
416
-export declare const CONFERENCE_CREATED_TIMESTAMP = JitsiConferenceEvents.CONFERENCE_CREATED_TIMESTAMP;
417
-export declare const DTMF_SUPPORT_CHANGED = JitsiConferenceEvents.DTMF_SUPPORT_CHANGED;
418
-export declare const ENDPOINT_MESSAGE_RECEIVED = JitsiConferenceEvents.ENDPOINT_MESSAGE_RECEIVED;
419
-export declare const ENDPOINT_STATS_RECEIVED = JitsiConferenceEvents.ENDPOINT_STATS_RECEIVED;
420
-export declare const JVB121_STATUS = JitsiConferenceEvents.JVB121_STATUS;
421
-export declare const KICKED = JitsiConferenceEvents.KICKED;
422
-export declare const PARTICIPANT_KICKED = JitsiConferenceEvents.PARTICIPANT_KICKED;
423
-export declare const LAST_N_ENDPOINTS_CHANGED = JitsiConferenceEvents.LAST_N_ENDPOINTS_CHANGED;
424
-export declare const FORWARDED_SOURCES_CHANGED = JitsiConferenceEvents.FORWARDED_SOURCES_CHANGED;
425
-export declare const LOCK_STATE_CHANGED = JitsiConferenceEvents.LOCK_STATE_CHANGED;
426
-export declare const SERVER_REGION_CHANGED = JitsiConferenceEvents.SERVER_REGION_CHANGED;
427
-export declare const _MEDIA_SESSION_STARTED = JitsiConferenceEvents._MEDIA_SESSION_STARTED;
428
-export declare const _MEDIA_SESSION_ACTIVE_CHANGED = JitsiConferenceEvents._MEDIA_SESSION_ACTIVE_CHANGED;
429
-export declare const MEMBERS_ONLY_CHANGED = JitsiConferenceEvents.MEMBERS_ONLY_CHANGED;
430
-export declare const MESSAGE_RECEIVED = JitsiConferenceEvents.MESSAGE_RECEIVED;
431
-export declare const NO_AUDIO_INPUT = JitsiConferenceEvents.NO_AUDIO_INPUT;
432
-export declare const NOISY_MIC = JitsiConferenceEvents.NOISY_MIC;
433
-export declare const NON_PARTICIPANT_MESSAGE_RECEIVED = JitsiConferenceEvents.NON_PARTICIPANT_MESSAGE_RECEIVED;
434
-export declare const PRIVATE_MESSAGE_RECEIVED = JitsiConferenceEvents.PRIVATE_MESSAGE_RECEIVED;
435
-export declare const PARTICIPANT_CONN_STATUS_CHANGED = JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED;
436
-export declare const PARTCIPANT_FEATURES_CHANGED = JitsiConferenceEvents.PARTCIPANT_FEATURES_CHANGED;
437
-export declare const PARTICIPANT_PROPERTY_CHANGED = JitsiConferenceEvents.PARTICIPANT_PROPERTY_CHANGED;
438
-export declare const P2P_STATUS = JitsiConferenceEvents.P2P_STATUS;
439
-export declare const PHONE_NUMBER_CHANGED = JitsiConferenceEvents.PHONE_NUMBER_CHANGED;
440
-export declare const PROPERTIES_CHANGED = JitsiConferenceEvents.PROPERTIES_CHANGED;
441
-export declare const RECORDER_STATE_CHANGED = JitsiConferenceEvents.RECORDER_STATE_CHANGED;
442
-export declare const VIDEO_SIP_GW_AVAILABILITY_CHANGED = JitsiConferenceEvents.VIDEO_SIP_GW_AVAILABILITY_CHANGED;
443
-export declare const VIDEO_SIP_GW_SESSION_STATE_CHANGED = JitsiConferenceEvents.VIDEO_SIP_GW_SESSION_STATE_CHANGED;
444
-export declare const START_MUTED_POLICY_CHANGED = JitsiConferenceEvents.START_MUTED_POLICY_CHANGED;
445
-export declare const STARTED_MUTED = JitsiConferenceEvents.STARTED_MUTED;
446
-export declare const SUBJECT_CHANGED = JitsiConferenceEvents.SUBJECT_CHANGED;
447
-export declare const SUSPEND_DETECTED = JitsiConferenceEvents.SUSPEND_DETECTED;
448
-export declare const TALK_WHILE_MUTED = JitsiConferenceEvents.TALK_WHILE_MUTED;
449
-export declare const TRACK_ADDED = JitsiConferenceEvents.TRACK_ADDED;
450
-export declare const TRACK_AUDIO_LEVEL_CHANGED = JitsiConferenceEvents.TRACK_AUDIO_LEVEL_CHANGED;
451
-export declare const TRACK_MUTE_CHANGED = JitsiConferenceEvents.TRACK_MUTE_CHANGED;
452
-export declare const TRACK_REMOVED = JitsiConferenceEvents.TRACK_REMOVED;
453
-export declare const TRACK_UNMUTE_REJECTED = JitsiConferenceEvents.TRACK_UNMUTE_REJECTED;
454
-export declare const TRANSCRIPTION_STATUS_CHANGED = JitsiConferenceEvents.TRANSCRIPTION_STATUS_CHANGED;
455
-export declare const USER_JOINED = JitsiConferenceEvents.USER_JOINED;
456
-export declare const USER_LEFT = JitsiConferenceEvents.USER_LEFT;
457
-export declare const USER_ROLE_CHANGED = JitsiConferenceEvents.USER_ROLE_CHANGED;
458
-export declare const USER_STATUS_CHANGED = JitsiConferenceEvents.USER_STATUS_CHANGED;
459
-export declare const VIDEO_UNMUTE_PERMISSIONS_CHANGED = JitsiConferenceEvents.VIDEO_UNMUTE_PERMISSIONS_CHANGED;
460
-export declare const BOT_TYPE_CHANGED = JitsiConferenceEvents.BOT_TYPE_CHANGED;
461
-export declare const LOBBY_USER_JOINED = JitsiConferenceEvents.LOBBY_USER_JOINED;
462
-export declare const LOBBY_USER_UPDATED = JitsiConferenceEvents.LOBBY_USER_UPDATED;
463
-export declare const LOBBY_USER_LEFT = JitsiConferenceEvents.LOBBY_USER_LEFT;
464
-export declare const AV_MODERATION_APPROVED = JitsiConferenceEvents.AV_MODERATION_APPROVED;
465
-export declare const AV_MODERATION_REJECTED = JitsiConferenceEvents.AV_MODERATION_REJECTED;
466
-export declare const AV_MODERATION_CHANGED = JitsiConferenceEvents.AV_MODERATION_CHANGED;
467
-export declare const AV_MODERATION_PARTICIPANT_APPROVED = JitsiConferenceEvents.AV_MODERATION_PARTICIPANT_APPROVED;
468
-export declare const AV_MODERATION_PARTICIPANT_REJECTED = JitsiConferenceEvents.AV_MODERATION_PARTICIPANT_REJECTED;
469
-export declare const FACE_LANDMARK_ADDED = JitsiConferenceEvents.FACE_LANDMARK_ADDED;
470
-export declare const BREAKOUT_ROOMS_MOVE_TO_ROOM = JitsiConferenceEvents.BREAKOUT_ROOMS_MOVE_TO_ROOM;
471
-export declare const BREAKOUT_ROOMS_UPDATED = JitsiConferenceEvents.BREAKOUT_ROOMS_UPDATED;
472
-export declare const METADATA_UPDATED = JitsiConferenceEvents.METADATA_UPDATED;

+ 0
- 106
types/auto/JitsiConnection.d.ts View File

@@ -1,106 +0,0 @@
1
-/**
2
- * Creates a new connection object for the Jitsi Meet server side video
3
- * conferencing service. Provides access to the JitsiConference interface.
4
- * @param appID identification for the provider of Jitsi Meet video conferencing
5
- * services.
6
- * @param token the JWT token used to authenticate with the server(optional)
7
- * @param options Object with properties / settings related to connection with
8
- * the server.
9
- * @constructor
10
- */
11
-export default function JitsiConnection(appID: any, token: any, options: any): void;
12
-export default class JitsiConnection {
13
-    /**
14
-     * Creates a new connection object for the Jitsi Meet server side video
15
-     * conferencing service. Provides access to the JitsiConference interface.
16
-     * @param appID identification for the provider of Jitsi Meet video conferencing
17
-     * services.
18
-     * @param token the JWT token used to authenticate with the server(optional)
19
-     * @param options Object with properties / settings related to connection with
20
-     * the server.
21
-     * @constructor
22
-     */
23
-    constructor(appID: any, token: any, options: any);
24
-    appID: any;
25
-    token: any;
26
-    options: any;
27
-    xmpp: XMPP;
28
-    /**
29
-     * Connect the client with the server.
30
-     * @param options {object} connecting options
31
-     * (for example authentications parameters).
32
-     */
33
-    connect(options?: object): void;
34
-    /**
35
-     * Attach to existing connection. Can be used for optimizations. For example:
36
-     * if the connection is created on the server we can attach to it and start
37
-     * using it.
38
-     *
39
-     * @param options {object} connecting options - rid, sid and jid.
40
-     */
41
-    attach(options: object): void;
42
-    /**
43
-     * Disconnect the client from the server.
44
-     * @returns {Promise} - Resolves when the disconnect process is finished or rejects with an error.
45
-     */
46
-    disconnect(...args: any[]): Promise<any>;
47
-    /**
48
-     * Returns the jid of the participant associated with the XMPP connection.
49
-     *
50
-     * @returns {string} The jid of the participant.
51
-     */
52
-    getJid(): string;
53
-    /**
54
-     * This method allows renewal of the tokens if they are expiring.
55
-     * @param token the new token.
56
-     */
57
-    setToken(token: any): void;
58
-    /**
59
-     * Creates and joins new conference.
60
-     * @param name the name of the conference; if null - a generated name will be
61
-     * provided from the api
62
-     * @param options Object with properties / settings related to the conference
63
-     * that will be created.
64
-     * @returns {JitsiConference} returns the new conference object.
65
-     */
66
-    initJitsiConference(name: any, options: any): JitsiConference;
67
-    /**
68
-     * Subscribes the passed listener to the event.
69
-     * @param event {JitsiConnectionEvents} the connection event.
70
-     * @param listener {Function} the function that will receive the event
71
-     */
72
-    addEventListener(event: typeof JitsiConnectionEvents, listener: Function): void;
73
-    /**
74
-     * Unsubscribes the passed handler.
75
-     * @param event {JitsiConnectionEvents} the connection event.
76
-     * @param listener {Function} the function that will receive the event
77
-     */
78
-    removeEventListener(event: typeof JitsiConnectionEvents, listener: Function): void;
79
-    /**
80
-     * Returns measured connectionTimes.
81
-     */
82
-    getConnectionTimes(): {};
83
-    /**
84
-     * Adds new feature to the list of supported features for the local
85
-     * participant.
86
-     * @param {String} feature the name of the feature.
87
-     * @param {boolean} submit if true - the new list of features will be
88
-     * immediately submitted to the others.
89
-     */
90
-    addFeature(feature: string, submit?: boolean): void;
91
-    /**
92
-     * Removes a feature from the list of supported features for the local
93
-     * participant
94
-     * @param {String} feature the name of the feature.
95
-     * @param {boolean} submit if true - the new list of features will be
96
-     * immediately submitted to the others.
97
-     */
98
-    removeFeature(feature: string, submit?: boolean): void;
99
-    /**
100
-     * Get object with internal logs.
101
-     */
102
-    getLogs(): any;
103
-}
104
-import XMPP from "./modules/xmpp/xmpp";
105
-import JitsiConference from "./JitsiConference";
106
-import * as JitsiConnectionEvents from "./JitsiConnectionEvents";

+ 0
- 34
types/auto/JitsiConnectionErrors.d.ts View File

@@ -1,34 +0,0 @@
1
-/**
2
- * The errors for the connection.
3
- */
4
-export declare enum JitsiConnectionErrors {
5
-    /**
6
-     * Indicates that the connection was dropped with an error which was most likely
7
-     * caused by some networking issues. The dropped term in this context means that
8
-     * the connection was closed unexpectedly (not on user's request).
9
-     *
10
-     * One example is 'item-not-found' error thrown by Prosody when the BOSH session
11
-     * times out after 60 seconds of inactivity. On the other hand 'item-not-found'
12
-     * could also happen when BOSH request is sent to the server with the session-id
13
-     * that is not know to the server. But this should not happen in lib-jitsi-meet
14
-     * case as long as the service is configured correctly (there is no bug).
15
-     */
16
-    CONNECTION_DROPPED_ERROR = "connection.droppedError",
17
-    /**
18
-     * Not specified errors.
19
-     */
20
-    OTHER_ERROR = "connection.otherError",
21
-    /**
22
-     * Indicates that a password is required in order to join the conference.
23
-     */
24
-    PASSWORD_REQUIRED = "connection.passwordRequired",
25
-    /**
26
-     * Indicates that the connection was dropped, because of too many 5xx HTTP
27
-     * errors on BOSH requests.
28
-     */
29
-    SERVER_ERROR = "connection.serverError"
30
-}
31
-export declare const CONNECTION_DROPPED_ERROR = JitsiConnectionErrors.CONNECTION_DROPPED_ERROR;
32
-export declare const OTHER_ERROR = JitsiConnectionErrors.OTHER_ERROR;
33
-export declare const PASSWORD_REQUIRED = JitsiConnectionErrors.PASSWORD_REQUIRED;
34
-export declare const SERVER_ERROR = JitsiConnectionErrors.SERVER_ERROR;

+ 0
- 49
types/auto/JitsiConnectionEvents.d.ts View File

@@ -1,49 +0,0 @@
1
-/**
2
- * The events for the connection.
3
- */
4
-export declare enum JitsiConnectionEvents {
5
-    /**
6
-     * Indicates that the connection has been disconnected. The event provides
7
-     * the following parameters to its listeners:
8
-     *
9
-     * @param msg {string} a message associated with the disconnect such as the
10
-     * last (known) error message
11
-     */
12
-    CONNECTION_DISCONNECTED = "connection.connectionDisconnected",
13
-    /**
14
-     * Indicates that the connection has been established. The event provides
15
-     * the following parameters to its listeners:
16
-     *
17
-     * @param id {string} the ID of the local endpoint/participant/peer (within
18
-     * the context of the established connection)
19
-     */
20
-    CONNECTION_ESTABLISHED = "connection.connectionEstablished",
21
-    /**
22
-     * Indicates that the connection has been failed for some reason. The event
23
-     * provides the following parameters to its listeners:
24
-     *
25
-     * @param errType {JitsiConnectionErrors} the type of error associated with
26
-     * the failure
27
-     * @param errReason {string} the error (message) associated with the failure
28
-     * @param credentials {object} the credentials used to connect (if any)
29
-     * @param errReasonDetails {object} an optional object with details about
30
-     * the error, like shard moving, suspending. Used for analytics purposes.
31
-     */
32
-    CONNECTION_FAILED = "connection.connectionFailed",
33
-    /**
34
-     * Indicates that the performed action cannot be executed because the
35
-     * connection is not in the correct state(connected, disconnected, etc.)
36
-     */
37
-    WRONG_STATE = "connection.wrongState",
38
-    /**
39
-     * Indicates that the display name is required over this connection and need to be supplied when
40
-     * joining the room.
41
-     * There are cases like lobby room where display name is required.
42
-     */
43
-    DISPLAY_NAME_REQUIRED = "connection.display_name_required"
44
-}
45
-export declare const CONNECTION_DISCONNECTED = JitsiConnectionEvents.CONNECTION_DISCONNECTED;
46
-export declare const CONNECTION_ESTABLISHED = JitsiConnectionEvents.CONNECTION_ESTABLISHED;
47
-export declare const CONNECTION_FAILED = JitsiConnectionEvents.CONNECTION_FAILED;
48
-export declare const WRONG_STATE = JitsiConnectionEvents.WRONG_STATE;
49
-export declare const DISPLAY_NAME_REQUIRED = JitsiConnectionEvents.DISPLAY_NAME_REQUIRED;

+ 0
- 100
types/auto/JitsiMediaDevices.d.ts View File

@@ -1,100 +0,0 @@
1
-/// <reference types="node" />
2
-declare var _default: JitsiMediaDevices;
3
-export default _default;
4
-/**
5
- * Media devices utilities for Jitsi.
6
- */
7
-declare class JitsiMediaDevices {
8
-    _eventEmitter: EventEmitter;
9
-    _permissions: {};
10
-    _permissionsApiSupported: Promise<any>;
11
-    /**
12
-     * Parses a PermissionState object and returns true for granted and false otherwise.
13
-     *
14
-     * @param {PermissionState} permissionStatus - The PermissionState object retrieved from the Permissions API.
15
-     * @returns {boolean} - True for granted and false for denied.
16
-     * @throws {TypeError}
17
-     */
18
-    _parsePermissionState(permissionStatus?: PermissionState): boolean;
19
-    /**
20
-     * Updates the local granted/denied permissions cache. A permissions might be
21
-     * granted, denied, or undefined. This is represented by having its media
22
-     * type key set to {@code true} or {@code false} respectively.
23
-     *
24
-     * @param {Object} permissions - Object with the permissions.
25
-     */
26
-    _handlePermissionsChange(permissions: any): void;
27
-    /**
28
-     * Gathers data and sends it to statistics.
29
-     * @param deviceID the device id to log
30
-     * @param devices list of devices
31
-     */
32
-    _logOutputDevice(deviceID: any, devices: any): void;
33
-    /**
34
-     * Executes callback with list of media devices connected.
35
-     * @param {function} callback
36
-     */
37
-    enumerateDevices(callback: Function): void;
38
-    /**
39
-     * Checks if its possible to enumerate available cameras/micropones.
40
-     * @returns {Promise<boolean>} a Promise which will be resolved only once
41
-     * the WebRTC stack is ready, either with true if the device listing is
42
-     * available available or with false otherwise.
43
-     */
44
-    isDeviceListAvailable(): Promise<boolean>;
45
-    /**
46
-     * Returns true if changing the input (camera / microphone) or output
47
-     * (audio) device is supported and false if not.
48
-     * @param {string} [deviceType] - type of device to change. Default is
49
-     *      undefined or 'input', 'output' - for audio output device change.
50
-     * @returns {boolean} true if available, false otherwise.
51
-     */
52
-    isDeviceChangeAvailable(deviceType?: string): boolean;
53
-    /**
54
-     * Checks if the permission for the given device was granted.
55
-     *
56
-     * @param {'audio'|'video'} [type] - type of devices to check,
57
-     *      undefined stands for both 'audio' and 'video' together
58
-     * @returns {Promise<boolean>}
59
-     */
60
-    isDevicePermissionGranted(type?: 'audio' | 'video'): Promise<boolean>;
61
-    /**
62
-     * Returns true if it is possible to be simultaneously capturing audio from more than one device.
63
-     *
64
-     * @returns {boolean}
65
-     */
66
-    isMultipleAudioInputSupported(): boolean;
67
-    /**
68
-     * Returns currently used audio output device id, 'default' stands
69
-     * for default device
70
-     * @returns {string}
71
-     */
72
-    getAudioOutputDevice(): string;
73
-    /**
74
-     * Sets current audio output device.
75
-     * @param {string} deviceId - id of 'audiooutput' device from
76
-     *      navigator.mediaDevices.enumerateDevices(), 'default' is for
77
-     *      default device
78
-     * @returns {Promise} - resolves when audio output is changed, is rejected
79
-     *      otherwise
80
-     */
81
-    setAudioOutputDevice(deviceId: string): Promise<any>;
82
-    /**
83
-     * Adds an event handler.
84
-     * @param {string} event - event name
85
-     * @param {function} handler - event handler
86
-     */
87
-    addEventListener(event: string, handler: Function): void;
88
-    /**
89
-     * Removes event handler.
90
-     * @param {string} event - event name
91
-     * @param {function} handler - event handler
92
-     */
93
-    removeEventListener(event: string, handler: Function): void;
94
-    /**
95
-     * Emits an event.
96
-     * @param {string} event - event name
97
-     */
98
-    emitEvent(event: string, ...args: any[]): void;
99
-}
100
-import EventEmitter from "events";

+ 0
- 35
types/auto/JitsiMediaDevicesEvents.d.ts View File

@@ -1,35 +0,0 @@
1
-/**
2
- * The events for the media devices.
3
- */
4
-export declare enum JitsiMediaDevicesEvents {
5
-    /**
6
-     * Indicates that the list of available media devices has been changed. The
7
-     * event provides the following parameters to its listeners:
8
-     *
9
-     * @param {MediaDeviceInfo[]} devices - array of MediaDeviceInfo or
10
-     *  MediaDeviceInfo-like objects that are currently connected.
11
-     *  @see https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo
12
-     */
13
-    DEVICE_LIST_CHANGED = "mediaDevices.devicechange",
14
-    /**
15
-     * Event emitted when the user granted/blocked a permission for the camera / mic.
16
-     * Used to keep track of the granted permissions on browsers which don't
17
-     * support the Permissions API.
18
-     */
19
-    PERMISSIONS_CHANGED = "rtc.permissions_changed",
20
-    /**
21
-     * Indicates that the environment is currently showing permission prompt to
22
-     * access camera and/or microphone. The event provides the following
23
-     * parameters to its listeners:
24
-     *
25
-     * @param {'chrome'|'opera'|'firefox'|'safari'|'nwjs'
26
-     *  |'react-native'|'android'} environmentType - type of browser or
27
-     *  other execution environment.
28
-     */
29
-    PERMISSION_PROMPT_IS_SHOWN = "mediaDevices.permissionPromptIsShown",
30
-    SLOW_GET_USER_MEDIA = "mediaDevices.slowGetUserMedia"
31
-}
32
-export declare const DEVICE_LIST_CHANGED = JitsiMediaDevicesEvents.DEVICE_LIST_CHANGED;
33
-export declare const PERMISSIONS_CHANGED = JitsiMediaDevicesEvents.PERMISSIONS_CHANGED;
34
-export declare const PERMISSION_PROMPT_IS_SHOWN = JitsiMediaDevicesEvents.PERMISSION_PROMPT_IS_SHOWN;
35
-export declare const SLOW_GET_USER_MEDIA = JitsiMediaDevicesEvents.SLOW_GET_USER_MEDIA;

+ 0
- 2
types/auto/JitsiMeetJS.d.ts View File

@@ -1,2 +0,0 @@
1
-declare var _default: any;
2
-export default _default;

+ 0
- 219
types/auto/JitsiParticipant.d.ts View File

@@ -1,219 +0,0 @@
1
-/**
2
- * Represents a participant in (i.e. a member of) a conference.
3
- */
4
-export default class JitsiParticipant {
5
-    /**
6
-     * Initializes a new JitsiParticipant instance.
7
-     *
8
-     * @constructor
9
-     * @param jid the conference XMPP jid
10
-     * @param conference
11
-     * @param displayName
12
-     * @param {Boolean} hidden - True if the new JitsiParticipant instance is to
13
-     * represent a hidden participant; otherwise, false.
14
-     * @param {string} statsID - optional participant statsID
15
-     * @param {string} status - the initial status if any.
16
-     * @param {object} identity - the xmpp identity
17
-     * @param {boolean?} isReplacing - whether this is a participant replacing another into the meeting.
18
-     * @param {boolean?} isReplaced - whether this is a participant to be kicked and replaced into the meeting.
19
-     */
20
-    constructor(jid: any, conference: any, displayName: any, hidden: boolean, statsID: string, status: string, identity: object, isReplacing: boolean | null, isReplaced: boolean | null);
21
-    _jid: any;
22
-    _id: any;
23
-    _conference: any;
24
-    _displayName: any;
25
-    _supportsDTMF: boolean;
26
-    _tracks: any[];
27
-    _role: string;
28
-    _status: string;
29
-    _hidden: boolean;
30
-    _statsID: string;
31
-    _connectionStatus: string;
32
-    _properties: {};
33
-    _identity: any;
34
-    _isReplacing: boolean;
35
-    _isReplaced: boolean;
36
-    _features: Set<any>;
37
-    /**
38
-     * @returns {JitsiConference} The conference that this participant belongs
39
-     * to.
40
-     */
41
-    getConference(): any;
42
-    /**
43
-     * Gets the value of a property of this participant.
44
-     */
45
-    getProperty(name: any): any;
46
-    /**
47
-     * Checks whether this <tt>JitsiParticipant</tt> has any video tracks which
48
-     * are muted according to their underlying WebRTC <tt>MediaStreamTrack</tt>
49
-     * muted status.
50
-     * @return {boolean} <tt>true</tt> if this <tt>participant</tt> contains any
51
-     * video <tt>JitsiTrack</tt>s which are muted as defined in
52
-     * {@link JitsiTrack.isWebRTCTrackMuted}.
53
-     */
54
-    hasAnyVideoTrackWebRTCMuted(): boolean;
55
-    /**
56
-     * Updates participant's connection status.
57
-     * @param {string} state the current participant connection state.
58
-     * {@link ParticipantConnectionStatus}.
59
-     * @private
60
-     */
61
-    private _setConnectionStatus;
62
-    /**
63
-     * Return participant's connectivity status.
64
-     *
65
-     * @returns {string} the connection status
66
-     * <tt>ParticipantConnectionStatus</tt> of the user.
67
-     * {@link ParticipantConnectionStatus}.
68
-     */
69
-    getConnectionStatus(): string;
70
-    /**
71
-     * Sets the value of a property of this participant, and fires an event if
72
-     * the value has changed.
73
-     * @name the name of the property.
74
-     * @value the value to set.
75
-     */
76
-    setProperty(name: any, value: any): void;
77
-    /**
78
-     * @returns {Array.<JitsiTrack>} The list of media tracks for this
79
-     * participant.
80
-     */
81
-    getTracks(): Array<any>;
82
-    /**
83
-     * @param {MediaType} mediaType
84
-     * @returns {Array.<JitsiTrack>} an array of media tracks for this
85
-     * participant, for given media type.
86
-     */
87
-    getTracksByMediaType(mediaType: MediaType): Array<any>;
88
-    /**
89
-     * @returns {String} The ID of this participant.
90
-     */
91
-    getId(): string;
92
-    /**
93
-     * @returns {String} The JID of this participant.
94
-     */
95
-    getJid(): string;
96
-    /**
97
-     * @returns {String} The human-readable display name of this participant.
98
-     */
99
-    getDisplayName(): string;
100
-    /**
101
-     * @returns {String} The stats ID of this participant.
102
-     */
103
-    getStatsID(): string;
104
-    /**
105
-     * @returns {String} The status of the participant.
106
-     */
107
-    getStatus(): string;
108
-    /**
109
-     * @returns {Boolean} Whether this participant is a moderator or not.
110
-     */
111
-    isModerator(): boolean;
112
-    /**
113
-     * @returns {Boolean} Whether this participant is a hidden participant. Some
114
-     * special system participants may want to join hidden (like for example the
115
-     * recorder).
116
-     */
117
-    isHidden(): boolean;
118
-    /**
119
-     * @returns {Boolean} Whether this participant is a hidden participant. Some
120
-     * special system participants may want to join hidden (like for example the
121
-     * recorder).
122
-     */
123
-    isHiddenFromRecorder(): boolean;
124
-    /**
125
-     * @returns {Boolean} Whether this participant replaces another participant
126
-     * from the meeting.
127
-     */
128
-    isReplacing(): boolean;
129
-    /**
130
-     * @returns {Boolean} Wheter this participants will be replaced by another
131
-     * participant in the meeting.
132
-     */
133
-    isReplaced(): boolean;
134
-    /**
135
-     * @returns {Boolean} Whether this participant has muted their audio.
136
-     */
137
-    isAudioMuted(): boolean;
138
-    /**
139
-     * Determines whether all JitsiTracks which are of a specific MediaType and
140
-     * which belong to this JitsiParticipant are muted.
141
-     *
142
-     * @param {MediaType} mediaType - The MediaType of the JitsiTracks to be
143
-     * checked.
144
-     * @private
145
-     * @returns {Boolean} True if all JitsiTracks which are of the specified
146
-     * mediaType and which belong to this JitsiParticipant are muted; otherwise,
147
-     * false.
148
-     */
149
-    private _isMediaTypeMuted;
150
-    /**
151
-     * @returns {Boolean} Whether this participant has muted their video.
152
-     */
153
-    isVideoMuted(): boolean;
154
-    /**
155
-     * @returns {String} The role of this participant.
156
-     */
157
-    getRole(): string;
158
-    /**
159
-     * Sets a new participant role.
160
-     * @param {String} newRole - the new role.
161
-     */
162
-    setRole(newRole: string): void;
163
-    /**
164
-     * Sets whether participant is replacing another based on jwt.
165
-     * @param {String} newIsReplacing - whether is replacing.
166
-     */
167
-    setIsReplacing(newIsReplacing: string): void;
168
-    /**
169
-     * Sets whether participant is being replaced by another based on jwt.
170
-     * @param {boolean} newIsReplaced - whether is being replaced.
171
-     */
172
-    setIsReplaced(newIsReplaced: boolean): void;
173
-    /**
174
-     *
175
-     */
176
-    supportsDTMF(): boolean;
177
-    /**
178
-     * Returns a set with the features for the participant.
179
-     * @returns {Promise<Set<String>, Error>}
180
-     */
181
-    getFeatures(): Promise<Set<string>, Error>;
182
-    /**
183
-     * Checks current set features.
184
-     * @param {String} feature - the feature to check.
185
-     * @return {boolean} <tt>true</tt> if this <tt>participant</tt> contains the
186
-     * <tt>feature</tt>.
187
-     */
188
-    hasFeature(feature: string): boolean;
189
-    /**
190
-     * Set new features.
191
-     * @param {Set<String>|undefined} newFeatures - Sets new features.
192
-     */
193
-    setFeatures(newFeatures: Set<string> | undefined): void;
194
-    /**
195
-     * Returns the bot type for the participant.
196
-     *
197
-     * @returns {string|undefined} - The bot type of the participant.
198
-     */
199
-    getBotType(): string | undefined;
200
-    /**
201
-     * Sets the bot type for the participant.
202
-     * @param {String} newBotType - The new bot type to set.
203
-     */
204
-    setBotType(newBotType: string): void;
205
-    _botType: string;
206
-    /**
207
-     * Returns the connection jid for the participant.
208
-     *
209
-     * @returns {string|undefined} - The connection jid of the participant.
210
-     */
211
-    getConnectionJid(): string | undefined;
212
-    /**
213
-     * Sets the connection jid for the participant.
214
-     * @param {String} newJid - The connection jid to set.
215
-     */
216
-    setConnectionJid(newJid: string): void;
217
-    _connectionJid: string;
218
-}
219
-import { MediaType } from "./service/RTC/MediaType";

+ 0
- 0
types/auto/JitsiParticipantEvents.d.ts View File


+ 0
- 53
types/auto/JitsiTrackError.d.ts View File

@@ -1,53 +0,0 @@
1
-export default JitsiTrackError;
2
-/**
3
- *
4
- * Represents an error that occurred to a JitsiTrack. Can represent various
5
- * types of errors. For error descriptions (@see JitsiTrackErrors).
6
- *
7
- * @extends Error
8
- *
9
- *
10
- * @constructor
11
- * @param {Object|string} error - error object or error name
12
- * @param {Object|string} (options) - getUserMedia constraints object or
13
- * error message
14
- * @param {('audio'|'video'|'desktop'|'screen'|'audiooutput')[]} (devices) -
15
- * list of getUserMedia requested devices
16
- */
17
-declare function JitsiTrackError(error: any | string, options: any, devices: any): void;
18
-declare class JitsiTrackError {
19
-    /**
20
-     *
21
-     * Represents an error that occurred to a JitsiTrack. Can represent various
22
-     * types of errors. For error descriptions (@see JitsiTrackErrors).
23
-     *
24
-     * @extends Error
25
-     *
26
-     *
27
-     * @constructor
28
-     * @param {Object|string} error - error object or error name
29
-     * @param {Object|string} (options) - getUserMedia constraints object or
30
-     * error message
31
-     * @param {('audio'|'video'|'desktop'|'screen'|'audiooutput')[]} (devices) -
32
-     * list of getUserMedia requested devices
33
-     */
34
-    constructor(error: any | string, options: any, devices: any);
35
-    /**
36
-     * Additional information about original getUserMedia error
37
-     * and constraints.
38
-     * @type {{
39
-     *     error: Object,
40
-     *     constraints: Object,
41
-     *     devices: Array.<'audio'|'video'|'desktop'|'screen'>
42
-     * }}
43
-     */
44
-    gum: {
45
-        error: any;
46
-        constraints: any;
47
-        devices: Array<'audio' | 'video' | 'desktop' | 'screen'>;
48
-    };
49
-    name: string;
50
-    message: any;
51
-    stack: any;
52
-    constructor: typeof JitsiTrackError;
53
-}

+ 0
- 72
types/auto/JitsiTrackErrors.d.ts View File

@@ -1,72 +0,0 @@
1
-/**
2
- * The errors for the JitsiTrack objects.
3
- */
4
-export declare enum JitsiTrackErrors {
5
-    /**
6
-     * An error which indicates that some of requested constraints in
7
-     * getUserMedia call were not satisfied.
8
-     */
9
-    CONSTRAINT_FAILED = "gum.constraint_failed",
10
-    /**
11
-     * A generic error which indicates an error occurred while selecting
12
-     * a DesktopCapturerSource from the electron app.
13
-     */
14
-    ELECTRON_DESKTOP_PICKER_ERROR = "gum.electron_desktop_picker_error",
15
-    /**
16
-     * An error which indicates a custom desktop picker could not be detected
17
-     * for the electron app.
18
-     */
19
-    ELECTRON_DESKTOP_PICKER_NOT_FOUND = "gum.electron_desktop_picker_not_found",
20
-    /**
21
-     * Generic getUserMedia error.
22
-     */
23
-    GENERAL = "gum.general",
24
-    /**
25
-     * An error which indicates that requested device was not found.
26
-     */
27
-    NOT_FOUND = "gum.not_found",
28
-    /**
29
-     * An error which indicates that user denied permission to share requested
30
-     * device.
31
-     */
32
-    PERMISSION_DENIED = "gum.permission_denied",
33
-    /**
34
-     * Generic error for screensharing failure.
35
-     */
36
-    SCREENSHARING_GENERIC_ERROR = "gum.screensharing_generic_error",
37
-    /**
38
-     * An error which indicates that user canceled screen sharing window
39
-     * selection dialog.
40
-     */
41
-    SCREENSHARING_USER_CANCELED = "gum.screensharing_user_canceled",
42
-    /**
43
-     * Indicates that the timeout passed to the obtainAudioAndVideoPermissions has expired without GUM resolving.
44
-     */
45
-    TIMEOUT = "gum.timeout",
46
-    /**
47
-     * An error which indicates that track has been already disposed and cannot
48
-     * be longer used.
49
-     */
50
-    TRACK_IS_DISPOSED = "track.track_is_disposed",
51
-    /**
52
-     * An error which indicates that track has no MediaStream associated.
53
-     */
54
-    TRACK_NO_STREAM_FOUND = "track.no_stream_found",
55
-    /**
56
-     * An error which indicates that requested video resolution is not supported
57
-     * by a webcam.
58
-     */
59
-    UNSUPPORTED_RESOLUTION = "gum.unsupported_resolution"
60
-}
61
-export declare const CONSTRAINT_FAILED = JitsiTrackErrors.CONSTRAINT_FAILED;
62
-export declare const ELECTRON_DESKTOP_PICKER_ERROR = JitsiTrackErrors.ELECTRON_DESKTOP_PICKER_ERROR;
63
-export declare const ELECTRON_DESKTOP_PICKER_NOT_FOUND = JitsiTrackErrors.ELECTRON_DESKTOP_PICKER_NOT_FOUND;
64
-export declare const GENERAL = JitsiTrackErrors.GENERAL;
65
-export declare const NOT_FOUND = JitsiTrackErrors.NOT_FOUND;
66
-export declare const PERMISSION_DENIED = JitsiTrackErrors.PERMISSION_DENIED;
67
-export declare const SCREENSHARING_GENERIC_ERROR = JitsiTrackErrors.SCREENSHARING_GENERIC_ERROR;
68
-export declare const SCREENSHARING_USER_CANCELED = JitsiTrackErrors.SCREENSHARING_USER_CANCELED;
69
-export declare const TIMEOUT = JitsiTrackErrors.TIMEOUT;
70
-export declare const TRACK_IS_DISPOSED = JitsiTrackErrors.TRACK_IS_DISPOSED;
71
-export declare const TRACK_NO_STREAM_FOUND = JitsiTrackErrors.TRACK_NO_STREAM_FOUND;
72
-export declare const UNSUPPORTED_RESOLUTION = JitsiTrackErrors.UNSUPPORTED_RESOLUTION;

+ 0
- 61
types/auto/JitsiTrackEvents.d.ts View File

@@ -1,61 +0,0 @@
1
-export declare enum JitsiTrackEvents {
2
-    /**
3
-     * The media track was removed to the conference.
4
-     */
5
-    LOCAL_TRACK_STOPPED = "track.stopped",
6
-    /**
7
-     * Audio levels of a this track was changed.
8
-     * The first argument is a number with audio level value in range [0, 1].
9
-     * The second argument is a <tt>TraceablePeerConnection</tt> which is the peer
10
-     * connection which measured the audio level (one audio track can be added
11
-     * to multiple peer connection at the same time). This argument is optional for
12
-     * local tracks for which we can measure audio level without the peer
13
-     * connection (the value will be <tt>undefined</tt>).
14
-     *
15
-     * NOTE The second argument should be treated as library internal and can be
16
-     * removed at any time.
17
-     */
18
-    TRACK_AUDIO_LEVEL_CHANGED = "track.audioLevelsChanged",
19
-    /**
20
-     * The audio output of the track was changed.
21
-     */
22
-    TRACK_AUDIO_OUTPUT_CHANGED = "track.audioOutputChanged",
23
-    /**
24
-     * A media track mute status was changed.
25
-     */
26
-    TRACK_MUTE_CHANGED = "track.trackMuteChanged",
27
-    /**
28
-     * The video type("camera" or "desktop") of the track was changed.
29
-     */
30
-    TRACK_VIDEOTYPE_CHANGED = "track.videoTypeChanged",
31
-    /**
32
-     * Indicates that the track is not receiving any data even though we expect it
33
-     * to receive data (i.e. the stream is not stopped).
34
-     */
35
-    NO_DATA_FROM_SOURCE = "track.no_data_from_source",
36
-    /**
37
-     * Indicates that the local audio track is not receiving any audio input from
38
-     * the microphone that is currently selected.
39
-     */
40
-    NO_AUDIO_INPUT = "track.no_audio_input",
41
-    /**
42
-     * Event fired whenever video track's streaming changes.
43
-     * First argument is the sourceName of the track and the second is a string indicating if the connection is currently
44
-     * - active - the connection is active.
45
-     * - inactive - the connection is inactive, was intentionally interrupted by the bridge because of low BWE or because
46
-     *   of the endpoint falling out of last N.
47
-     * - interrupted - a network problem occurred.
48
-     * - restoring - the connection was inactive and is restoring now.
49
-     *
50
-     * The current status value can be obtained by calling JitsiRemoteTrack.getTrackStreamingStatus().
51
-     */
52
-    TRACK_STREAMING_STATUS_CHANGED = "track.streaming_status_changed"
53
-}
54
-export declare const LOCAL_TRACK_STOPPED = JitsiTrackEvents.LOCAL_TRACK_STOPPED;
55
-export declare const TRACK_AUDIO_LEVEL_CHANGED = JitsiTrackEvents.TRACK_AUDIO_LEVEL_CHANGED;
56
-export declare const TRACK_AUDIO_OUTPUT_CHANGED = JitsiTrackEvents.TRACK_AUDIO_OUTPUT_CHANGED;
57
-export declare const TRACK_MUTE_CHANGED = JitsiTrackEvents.TRACK_MUTE_CHANGED;
58
-export declare const TRACK_VIDEOTYPE_CHANGED = JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED;
59
-export declare const NO_DATA_FROM_SOURCE = JitsiTrackEvents.NO_DATA_FROM_SOURCE;
60
-export declare const NO_AUDIO_INPUT = JitsiTrackEvents.NO_AUDIO_INPUT;
61
-export declare const TRACK_STREAMING_STATUS_CHANGED = JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED;

+ 0
- 12
types/auto/JitsiTranscriptionStatus.d.ts View File

@@ -1,12 +0,0 @@
1
-export declare enum JitsiTranscriptionStatus {
2
-    /**
3
-     * The transcription is on.
4
-     */
5
-    ON = "on",
6
-    /**
7
-     * The transcription is off.
8
-     */
9
-    OFF = "off"
10
-}
11
-export declare const ON = JitsiTranscriptionStatus.ON;
12
-export declare const OFF = JitsiTranscriptionStatus.OFF;

+ 0
- 79
types/auto/authenticateAndUpgradeRole.d.ts View File

@@ -1,79 +0,0 @@
1
-/**
2
- * @typedef {Object} UpgradeRoleError
3
- *
4
- * @property {JitsiConnectionErrors} [connectionError] - One of
5
- * {@link JitsiConnectionErrors} which occurred when trying to connect to the
6
- * XMPP server.
7
- * @property {String} [authenticationError] - One of XMPP error conditions
8
- * returned by Jicofo on authentication attempt. See
9
- * {@link https://xmpp.org/rfcs/rfc3920.html#streams-error}.
10
- * @property {String} [message] - More details about the error.
11
- * @property {Object} [credentials] - The credentials that failed the
12
- * authentication.
13
- * @property {String} [credentials.jid] - The XMPP ID part of the credentials
14
- * that failed the authentication.
15
- * @property {string} [credentials.password] - The password part of the
16
- * credentials that failed the authentication.
17
- *
18
- * NOTE If neither one of the errors is present, then the operation has been
19
- * canceled.
20
- */
21
-/**
22
- * Connects to the XMPP server using the specified credentials and contacts
23
- * Jicofo in order to obtain a session ID (which is then stored in the local
24
- * storage). The user's role of the parent conference will be upgraded to
25
- * moderator (by Jicofo). It's also used to join the conference when starting
26
- * from anonymous domain and only authenticated users are allowed to create new
27
- * rooms.
28
- *
29
- * @param {Object} options
30
- * @param {string} options.id - XMPP user's ID to log in. For example,
31
- * user@xmpp-server.com.
32
- * @param {string} options.password - XMPP user's password to log in with.
33
- * @param {Function} [options.onLoginSuccessful] - Callback called when logging
34
- * into the XMPP server was successful. The next step will be to obtain a new
35
- * session ID from Jicofo and join the MUC using it which will effectively
36
- * upgrade the user's role to moderator.
37
- * @returns {Object} A <tt>thenable</tt> which (1) settles when the process of
38
- * authenticating and upgrading the role of the specified XMPP user finishes and
39
- * (2) has a <tt>cancel</tt> method that allows the caller to interrupt the
40
- * process. If the process finishes successfully, the session ID has been stored
41
- * in the settings and the <tt>thenable</tt> is resolved. If the process
42
- * finishes with failure, the <tt>thenable</tt> is rejected with reason of type
43
- * {@link UpgradeRoleError} which will have either <tt>connectionError</tt> or
44
- * <tt>authenticationError</tt> property set depending on which of the steps has
45
- * failed. If <tt>cancel</tt> is called before the process finishes, then the
46
- * thenable will be rejected with an empty object (i.e. no error property will
47
- * be set on the rejection reason).
48
- */
49
-export default function authenticateAndUpgradeRole({ id, password, onCreateResource, onLoginSuccessful }: {
50
-    id: string;
51
-    password: string;
52
-    onLoginSuccessful?: Function;
53
-}): any;
54
-export type UpgradeRoleError = {
55
-    /**
56
-     * - One of
57
-     * {@link JitsiConnectionErrors } which occurred when trying to connect to the
58
-     * XMPP server.
59
-     */
60
-    connectionError?: any;
61
-    /**
62
-     * - One of XMPP error conditions
63
-     * returned by Jicofo on authentication attempt. See
64
-     * {@link https ://xmpp.org/rfcs/rfc3920.html#streams-error}.
65
-     */
66
-    authenticationError?: string;
67
-    /**
68
-     * - More details about the error.
69
-     */
70
-    message?: string;
71
-    /**
72
-     * - The credentials that failed the
73
-     * authentication.
74
-     */
75
-    credentials?: {
76
-        jid?: string;
77
-        password?: string;
78
-    };
79
-};

+ 0
- 23
types/auto/connection_optimization/external_connect.d.ts View File

@@ -1,23 +0,0 @@
1
-/**
2
- * Requests the given webservice that will create the connection and will return
3
- * the necessary details(rid, sid and jid) to attach to this connection and
4
- * start using it. This script can be used for optimizing the connection startup
5
- * time. The function will send AJAX request to a webservice that should
6
- * create the bosh session much faster than the client because the webservice
7
- * can be started on the same machine as the XMPP serever.
8
- *
9
- * NOTE: It's vert important to execute this function as early as you can for
10
- * optimal results.
11
- *
12
- * @param webserviceUrl the url for the web service that is going to create the
13
- * connection.
14
- * @param successCallback callback function called with the result of the AJAX
15
- * request if the request was successfull. The callback will receive one
16
- * parameter which will be JS Object with properties - rid, sid and jid. This
17
- * result should be passed to JitsiConnection.attach method in order to use that
18
- * connection.
19
- * @param error_callback callback function called the AJAX request fail. This
20
- * callback is going to receive one parameter which is going to be JS error
21
- * object with a reason for failure in it.
22
- */
23
-declare function createConnectionExternally(webserviceUrl: any, successCallback: any, error_callback: any): void;

+ 0
- 133
types/auto/modules/RTC/BridgeChannel.d.ts View File

@@ -1,133 +0,0 @@
1
-/**
2
- * Handles a WebRTC RTCPeerConnection or a WebSocket instance to communicate
3
- * with the videobridge.
4
- */
5
-export default class BridgeChannel {
6
-    /**
7
-     * Binds "ondatachannel" event listener on the given RTCPeerConnection
8
-     * instance, or creates a WebSocket connection with the videobridge.
9
-     * At least one of both, peerconnection or wsUrl parameters, must be
10
-     * given.
11
-     * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
12
-     * instance.
13
-     * @param {string} [wsUrl] WebSocket URL.
14
-     * @param {EventEmitter} emitter the EventEmitter instance to use for event emission.
15
-     */
16
-    constructor(peerconnection?: RTCPeerConnection, wsUrl?: string, emitter: any);
17
-    _channel: any;
18
-    _eventEmitter: any;
19
-    _mode: string;
20
-    _areRetriesEnabled: boolean;
21
-    _closedFromClient: boolean;
22
-    _wsUrl: string;
23
-    /**
24
-     * Initializes the web socket channel.
25
-     *
26
-     * @returns {void}
27
-     */
28
-    _initWebSocket(): void;
29
-    /**
30
-     * Starts the websocket connection retries.
31
-     *
32
-     * @returns {void}
33
-     */
34
-    _startConnectionRetries(): void;
35
-    _retryTimeout: NodeJS.Timeout;
36
-    /**
37
-     * Stops the websocket connection retries.
38
-     *
39
-     * @returns {void}
40
-     */
41
-    _stopConnectionRetries(): void;
42
-    /**
43
-     * Retries to establish the websocket connection after the connection was closed by the server.
44
-     *
45
-     * @param {CloseEvent} closeEvent - The close event that triggered the retries.
46
-     * @returns {void}
47
-     */
48
-    _retryWebSocketConnection(closeEvent: CloseEvent): void;
49
-    /**
50
-     * The channel mode.
51
-     * @return {string} "datachannel" or "websocket" (or null if not yet set).
52
-     */
53
-    get mode(): string;
54
-    /**
55
-     * Closes the currently opened channel.
56
-     */
57
-    close(): void;
58
-    /**
59
-     * Whether there is an underlying RTCDataChannel or WebSocket and it's
60
-     * open.
61
-     * @return {boolean}
62
-     */
63
-    isOpen(): boolean;
64
-    /**
65
-     * Sends local stats via the bridge channel.
66
-     * @param {Object} payload The payload of the message.
67
-     * @throws NetworkError/InvalidStateError/Error if the operation fails or if there is no data channel created.
68
-     */
69
-    sendEndpointStatsMessage(payload: any): void;
70
-    /**
71
-     * Sends message via the channel.
72
-     * @param {string} to The id of the endpoint that should receive the
73
-     * message. If "" the message will be sent to all participants.
74
-     * @param  {object} payload The payload of the message.
75
-     * @throws NetworkError or InvalidStateError from RTCDataChannel#send (@see
76
-     * {@link https://developer.mozilla.org/docs/Web/API/RTCDataChannel/send})
77
-     * or from WebSocket#send or Error with "No opened channel" message.
78
-     */
79
-    sendMessage(to: string, payload: object): void;
80
-    /**
81
-     * Sends a "lastN value changed" message via the channel.
82
-     * @param {number} value The new value for lastN. -1 means unlimited.
83
-     */
84
-    sendSetLastNMessage(value: number): void;
85
-    /**
86
-     * Sends a "selected endpoints changed" message via the channel.
87
-     *
88
-     * @param {Array<string>} endpointIds - The ids of the selected endpoints.
89
-     * @throws NetworkError or InvalidStateError from RTCDataChannel#send (@see
90
-     * {@link https://developer.mozilla.org/docs/Web/API/RTCDataChannel/send})
91
-     * or from WebSocket#send or Error with "No opened channel" message.
92
-     */
93
-    sendSelectedEndpointsMessage(endpointIds: Array<string>): void;
94
-    /**
95
-     * Sends a "receiver video constraint" message via the channel.
96
-     * @param {Number} maxFrameHeightPixels the maximum frame height,
97
-     * in pixels, this receiver is willing to receive
98
-     */
99
-    sendReceiverVideoConstraintMessage(maxFrameHeightPixels: number): void;
100
-    /**
101
-     * Sends a 'ReceiverVideoConstraints' message via the bridge channel.
102
-     *
103
-     * @param {ReceiverVideoConstraints} constraints video constraints.
104
-     */
105
-    sendNewReceiverVideoConstraintsMessage(constraints: any): void;
106
-    /**
107
-     * Sends a 'VideoTypeMessage' message via the bridge channel.
108
-     *
109
-     * @param {string} videoType 'camera', 'desktop' or 'none'.
110
-     * @deprecated to be replaced with sendSourceVideoTypeMessage
111
-     */
112
-    sendVideoTypeMessage(videoType: string): void;
113
-    /**
114
-     * Sends a 'VideoTypeMessage' message via the bridge channel.
115
-     *
116
-     * @param {BridgeVideoType} videoType - the video type.
117
-     * @param {SourceName} sourceName - the source name of the video track.
118
-     * @returns {void}
119
-     */
120
-    sendSourceVideoTypeMessage(sourceName: any, videoType: any): void;
121
-    /**
122
-     * Set events on the given RTCDataChannel or WebSocket instance.
123
-     */
124
-    _handleChannel(channel: any): void;
125
-    /**
126
-     * Sends passed object via the channel.
127
-     * @param {object} jsonObject The object that will be sent.
128
-     * @throws NetworkError or InvalidStateError from RTCDataChannel#send (@see
129
-     * {@link https://developer.mozilla.org/docs/Web/API/RTCDataChannel/send})
130
-     * or from WebSocket#send or Error with "No opened channel" message.
131
-     */
132
-    _send(jsonObject: object): void;
133
-}

+ 0
- 106
types/auto/modules/RTC/CodecSelection.d.ts View File

@@ -1,106 +0,0 @@
1
-/**
2
- * This class handles the codec selection mechanism for the conference based on the config.js settings.
3
- * The preferred codec is selected based on the settings and the list of codecs supported by the browser.
4
- * The preferred codec is published in presence which is then used by the other endpoints in the
5
- * conference to pick a supported codec at join time and when the call transitions between p2p and jvb
6
- * connections.
7
- */
8
-export class CodecSelection {
9
-    /**
10
-     * Creates a new instance for a given conference.
11
-     *
12
-     * @param {JitsiConference} conference the conference instance
13
-     * @param {*} options
14
-     * @param {string} options.disabledCodec the codec that needs to be disabled.
15
-     * @param {boolean} options.enforcePreferredCodec whether codec preference has to be
16
-     * enforced even when an endpoints that doesn't support the preferred codec joins the call.
17
-     * Falling back to the standard codec will be skipped when this option is true, endpoints
18
-     * that do not support the preferred codec may not be able to encode/decode video when this happens.
19
-     * @param {string} options.jvbCodec the codec that is preferred on jvb connection.
20
-     * @param {string} options.p2pCodec the codec that is preferred on p2p connection.
21
-     */
22
-    constructor(conference: any, options: any);
23
-    conference: any;
24
-    options: any;
25
-    disabledCodec: {
26
-        H264: string;
27
-        OPUS: string;
28
-        ULPFEC: string;
29
-        VP8: string;
30
-        VP9: string;
31
-    };
32
-    jvbPreferredCodec: string | {
33
-        H264: string;
34
-        OPUS: string;
35
-        ULPFEC: string;
36
-        VP8: string;
37
-        VP9: string;
38
-    };
39
-    p2pPreferredCodec: string | {
40
-        H264: string;
41
-        OPUS: string;
42
-        ULPFEC: string;
43
-        VP8: string;
44
-        VP9: string;
45
-    };
46
-    /**
47
-     * Checks if a given string is a valid video codec mime type.
48
-     *
49
-     * @param {string} codec the codec string that needs to be validated.
50
-     * @returns {CodecMimeType|null} mime type if valid, null otherwise.
51
-     * @private
52
-     */
53
-    private _getCodecMimeType;
54
-    /**
55
-     * Checks if the given codec is supported by the browser.
56
-     *
57
-     * @param {CodecMimeType} preferredCodec codec to be checked.
58
-     * @returns {boolean} true if the given codec is supported, false otherwise.
59
-     * @private
60
-     */
61
-    private _isCodecSupported;
62
-    /**
63
-     * Handles the {@link JitsiConferenceEvents._MEDIA_SESSION_STARTED} event. Codecs need to be
64
-     * configured on the media session that is newly created.
65
-     *
66
-     * @param {JingleSessionPC} mediaSession media session that started.
67
-     * @returns {void}
68
-     * @private
69
-     */
70
-    private _onMediaSessionStarted;
71
-    /**
72
-     * Sets the codec on the media session based on the preferred codec setting and the supported codecs
73
-     * published by the remote participants in their presence.
74
-     *
75
-     * @param {JingleSessionPC} mediaSession session for which the codec selection has to be made.
76
-     * @param {CodecMimeType} preferredCodec preferred codec.
77
-     * @param {CodecMimeType} disabledCodec codec that needs to be disabled.
78
-     */
79
-    _selectPreferredCodec(mediaSession?: any, preferredCodec?: {
80
-        H264: string;
81
-        OPUS: string;
82
-        ULPFEC: string;
83
-        VP8: string;
84
-        VP9: string;
85
-    }, disabledCodec?: {
86
-        H264: string;
87
-        OPUS: string;
88
-        ULPFEC: string;
89
-        VP8: string;
90
-        VP9: string;
91
-    }): void;
92
-    /**
93
-     * Returns the preferred codec for the conference. The preferred codec for the JVB media session
94
-     * is the one that gets published in presence and a comparision is made whenever a participant joins
95
-     * or leaves the call.
96
-     *
97
-     * @returns {CodecMimeType} preferred codec.
98
-     */
99
-    getPreferredCodec(): {
100
-        H264: string;
101
-        OPUS: string;
102
-        ULPFEC: string;
103
-        VP8: string;
104
-        VP9: string;
105
-    };
106
-}

+ 0
- 308
types/auto/modules/RTC/JitsiLocalTrack.d.ts View File

@@ -1,308 +0,0 @@
1
-/**
2
- * Represents a single media track(either audio or video).
3
- * One <tt>JitsiLocalTrack</tt> corresponds to one WebRTC MediaStreamTrack.
4
- */
5
-export default class JitsiLocalTrack extends JitsiTrack {
6
-    /**
7
-     * Constructs a new JitsiLocalTrack instance.
8
-     *
9
-     * @constructor
10
-     * @param {Object} trackInfo
11
-     * @param {number} trackInfo.rtcId - The ID assigned by the RTC module.
12
-     * @param {Object} trackInfo.stream - The WebRTC MediaStream, parent of the track.
13
-     * @param {Object} trackInfo.track - The underlying WebRTC MediaStreamTrack for new JitsiLocalTrack.
14
-     * @param {string} trackInfo.mediaType - The MediaType of the JitsiLocalTrack.
15
-     * @param {string} trackInfo.videoType - The VideoType of the JitsiLocalTrack.
16
-     * @param {Array<Object>} trackInfo.effects - The effects to be applied to the JitsiLocalTrack.
17
-     * @param {number} trackInfo.resolution - The the video resolution if it's a video track
18
-     * @param {string} trackInfo.deviceId - The ID of the local device for this track.
19
-     * @param {string} trackInfo.facingMode - Thehe camera facing mode used in getUserMedia call (for mobile only).
20
-     * @param {sourceId} trackInfo.sourceId - The id of the desktop sharing source. NOTE: defined for desktop sharing
21
-     * tracks only.
22
-     */
23
-    constructor({ deviceId, facingMode, mediaType, resolution, rtcId, sourceId, sourceType, stream, track, videoType, effects }: {
24
-        rtcId: number;
25
-        stream: any;
26
-        track: any;
27
-        mediaType: string;
28
-        videoType: string;
29
-        effects: Array<any>;
30
-        resolution: number;
31
-        deviceId: string;
32
-        facingMode: string;
33
-        sourceId: any;
34
-    });
35
-    _setEffectInProgress: boolean;
36
-    /**
37
-     * Track metadata.
38
-     */
39
-    metadata: {
40
-        displaySurface?: any;
41
-        timestamp: number;
42
-    };
43
-    /**
44
-     * The ID assigned by the RTC module on instance creation.
45
-     *
46
-     * @type {number}
47
-     */
48
-    rtcId: number;
49
-    sourceId: any;
50
-    sourceType: any;
51
-    resolution: any;
52
-    maxEnabledResolution: number;
53
-    _constraints: any;
54
-    deviceId: string;
55
-    /**
56
-     * The <tt>Promise</tt> which represents the progress of a previously
57
-     * queued/scheduled {@link _setMuted} (from the point of view of
58
-     * {@link _queueSetMuted}).
59
-     *
60
-     * @private
61
-     * @type {Promise}
62
-     */
63
-    private _prevSetMuted;
64
-    /**
65
-     * The facing mode of the camera from which this JitsiLocalTrack
66
-     * instance was obtained.
67
-     *
68
-     * @private
69
-     * @type {CameraFacingMode|undefined}
70
-     */
71
-    private _facingMode;
72
-    _trackEnded: boolean;
73
-    /**
74
-     * Indicates whether data has been sent or not.
75
-     */
76
-    _hasSentData: boolean;
77
-    /**
78
-     * Used only for detection of audio problems. We want to check only once
79
-     * whether the track is sending data ot not. This flag is set to false
80
-     * after the check.
81
-     */
82
-    _testDataSent: boolean;
83
-    _realDeviceId: string;
84
-    _sourceName: string;
85
-    _trackMutedTS: number;
86
-    _onDeviceListWillChange: (devices: any) => void;
87
-    _onAudioOutputDeviceChanged: any;
88
-    /**
89
-     * Adds stream to conference and marks it as "unmute" operation.
90
-     *
91
-     * @private
92
-     * @returns {Promise}
93
-     */
94
-    private _addStreamToConferenceAsUnmute;
95
-    /**
96
-     * Fires NO_DATA_FROM_SOURCE event and logs it to analytics and callstats.
97
-     *
98
-     * @private
99
-     * @returns {void}
100
-     */
101
-    private _fireNoDataFromSourceEvent;
102
-    /**
103
-     * Sets handlers to the MediaStreamTrack object that will detect camera issues.
104
-     *
105
-     * @private
106
-     * @returns {void}
107
-     */
108
-    private _initNoDataFromSourceHandlers;
109
-    /**
110
-     * Returns true if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
111
-     *
112
-     * @private
113
-     * @returns {boolean} - True if no data from source events are enabled for this JitsiLocalTrack and false otherwise.
114
-     */
115
-    private _isNoDataFromSourceEventsEnabled;
116
-    /**
117
-     * Initializes a new Promise to execute {@link #_setMuted}. May be called multiple times in a row and the
118
-     * invocations of {@link #_setMuted} and, consequently, {@link #mute} and/or {@link #unmute} will be resolved in a
119
-     * serialized fashion.
120
-     *
121
-     * @param {boolean} muted - The value to invoke <tt>_setMuted</tt> with.
122
-     * @private
123
-     * @returns {Promise}
124
-     */
125
-    private _queueSetMuted;
126
-    /**
127
-     * Removes stream from conference and marks it as "mute" operation.
128
-     *
129
-     * @param {Function} successCallback - Callback that will be called when the operation is successful.
130
-     * @param {Function} errorCallback - Callback that will be called when the operation fails.
131
-     * @private
132
-     * @returns {Promise}
133
-     */
134
-    private _removeStreamFromConferenceAsMute;
135
-    /**
136
-     * Sends mute status for a track to conference if any.
137
-     *
138
-     * @param {boolean} mute - If track is muted.
139
-     * @private
140
-     * @returns {void}
141
-     */
142
-    private _sendMuteStatus;
143
-    /**
144
-     * Mutes / unmutes this track.
145
-     *
146
-     * @param {boolean} muted - If <tt>true</tt>, this track will be muted; otherwise, this track will be unmuted.
147
-     * @private
148
-     * @returns {Promise}
149
-     */
150
-    private _setMuted;
151
-    /**
152
-     * Sets real device ID by comparing track information with device information. This is temporary solution until
153
-     * getConstraints() method will be implemented in browsers.
154
-     *
155
-     * @param {MediaDeviceInfo[]} devices - The list of devices obtained from enumerateDevices() call.
156
-     * @private
157
-     * @returns {void}
158
-     */
159
-    private _setRealDeviceIdFromDeviceList;
160
-    storedMSID: string;
161
-    /**
162
-     * Starts the effect process and returns the modified stream.
163
-     *
164
-     * @param {Object} effect - Represents effect instance
165
-     * @private
166
-     * @returns {void}
167
-     */
168
-    private _startStreamEffect;
169
-    _streamEffect: any;
170
-    _originalStream: any;
171
-    /**
172
-     * Stops the effect process and returns the original stream.
173
-     *
174
-     * @private
175
-     * @returns {void}
176
-     */
177
-    private _stopStreamEffect;
178
-    /**
179
-     * Switches the camera facing mode if the WebRTC implementation supports the custom MediaStreamTrack._switchCamera
180
-     * method. Currently, the method in question is implemented in react-native-webrtc only. When such a WebRTC
181
-     * implementation is executing, the method is the preferred way to switch between the front/user-facing and the
182
-     * back/environment-facing cameras because it will likely be (as is the case of react-native-webrtc) noticeably
183
-     * faster that creating a new MediaStreamTrack via a new getUserMedia call with the switched facingMode constraint
184
-     * value. Moreover, the approach with a new getUserMedia call may not even work: WebRTC on Android and iOS is
185
-     * either very slow to open the camera a second time or plainly freezes attempting to do that.
186
-     *
187
-     * @returns {void}
188
-     */
189
-    _switchCamera(): void;
190
-    /**
191
-     * Stops the currently used effect (if there is one) and starts the passed effect (if there is one).
192
-     *
193
-     * @param {Object|undefined} effect - The new effect to be set.
194
-     * @private
195
-     * @returns {void}
196
-     */
197
-    private _switchStreamEffect;
198
-    /**
199
-     * Returns facing mode for video track from camera. For other cases (e.g. audio track or 'desktop' video track)
200
-     * returns undefined.
201
-     *
202
-     * @returns {CameraFacingMode|undefined}
203
-     */
204
-    getCameraFacingMode(): CameraFacingMode | undefined;
205
-    /**
206
-     * Returns device id associated with track.
207
-     *
208
-     * @returns {string}
209
-     */
210
-    getDeviceId(): string;
211
-    /**
212
-     * Get the duration of the track.
213
-     *
214
-     * @returns {Number} the duration of the track in seconds
215
-     */
216
-    getDuration(): number;
217
-    /**
218
-     * Returns the participant id which owns the track.
219
-     *
220
-     * @returns {string} the id of the participants. It corresponds to the
221
-     * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
222
-     */
223
-    getParticipantId(): string;
224
-    /**
225
-     * Returns the source name associated with the jitsi track.
226
-     *
227
-     * @returns {string | null} source name
228
-     */
229
-    getSourceName(): string | null;
230
-    /**
231
-     * Returns if associated MediaStreamTrack is in the 'ended' state
232
-     *
233
-     * @returns {boolean}
234
-     */
235
-    isEnded(): boolean;
236
-    /**
237
-     * Returns <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
238
-     *
239
-     * @returns {boolean} <tt>true</tt> - if the stream is muted and <tt>false</tt> otherwise.
240
-     */
241
-    isMuted(): boolean;
242
-    /**
243
-     * Checks whether the attached MediaStream is receiving data from source or not. If the stream property is null
244
-     * (because of mute or another reason) this method will return false.
245
-     * NOTE: This method doesn't indicate problem with the streams directly. For example in case of video mute the
246
-     * method will return false or if the user has disposed the track.
247
-     *
248
-     * @returns {boolean} true if the stream is receiving data and false this otherwise.
249
-     */
250
-    isReceivingData(): boolean;
251
-    /**
252
-     * Asynchronously mutes this track.
253
-     *
254
-     * @returns {Promise}
255
-     */
256
-    mute(): Promise<any>;
257
-    /**
258
-     * Handles bytes sent statistics. NOTE: used only for audio tracks to detect audio issues.
259
-     *
260
-     * @param {TraceablePeerConnection} tpc - The peerconnection that is reporting the bytes sent stat.
261
-     * @param {number} bytesSent - The new value.
262
-     * @returns {void}
263
-     */
264
-    onByteSentStatsReceived(tpc: any, bytesSent: number): void;
265
-    /**
266
-     * Sets the JitsiConference object associated with the track. This is temp solution.
267
-     *
268
-     * @param conference - JitsiConference object.
269
-     * @returns {void}
270
-     */
271
-    setConference(conference: any): void;
272
-    /**
273
-     * Sets the effect and switches between the modified stream and original one.
274
-     *
275
-     * @param {Object} effect - Represents the effect instance to be used.
276
-     * @returns {Promise}
277
-     */
278
-    setEffect(effect: any): Promise<any>;
279
-    /**
280
-     * Sets the source name to be used for signaling the jitsi track.
281
-     *
282
-     * @param {string} name The source name.
283
-     */
284
-    setSourceName(name: string): void;
285
-    /**
286
-     * Stops the associated MediaStream.
287
-     *
288
-     * @returns {void}
289
-     */
290
-    stopStream(): void;
291
-    /**
292
-     * Indicates that we are executing {@link #stopStream} i.e.
293
-     * {@link RTCUtils#stopMediaStream} for the <tt>MediaStream</tt>
294
-     * associated with this <tt>JitsiTrack</tt> instance.
295
-     *
296
-     * @private
297
-     * @type {boolean}
298
-     */
299
-    private _stopStreamInProgress;
300
-    /**
301
-     * Asynchronously unmutes this track.
302
-     *
303
-     * @returns {Promise}
304
-     */
305
-    unmute(): Promise<any>;
306
-}
307
-import JitsiTrack from "./JitsiTrack";
308
-import CameraFacingMode from "../../service/RTC/CameraFacingMode";

+ 0
- 173
types/auto/modules/RTC/JitsiRemoteTrack.d.ts View File

@@ -1,173 +0,0 @@
1
-/**
2
- * Represents a single media track (either audio or video).
3
- */
4
-export default class JitsiRemoteTrack extends JitsiTrack {
5
-    /**
6
-     * Creates new JitsiRemoteTrack instance.
7
-     * @param {RTC} rtc the RTC service instance.
8
-     * @param {JitsiConference} conference the conference to which this track
9
-     *        belongs to
10
-     * @param {string} ownerEndpointId the endpoint ID of the track owner
11
-     * @param {MediaStream} stream WebRTC MediaStream, parent of the track
12
-     * @param {MediaStreamTrack} track underlying WebRTC MediaStreamTrack for
13
-     *        the new JitsiRemoteTrack
14
-     * @param {MediaType} mediaType the type of the media
15
-     * @param {VideoType} videoType the type of the video if applicable
16
-     * @param {number} ssrc the SSRC number of the Media Stream
17
-     * @param {boolean} muted the initial muted state
18
-     * @param {boolean} isP2P indicates whether or not this track belongs to a
19
-     * P2P session
20
-     * @param {String} sourceName the source name signaled for the track
21
-     * @throws {TypeError} if <tt>ssrc</tt> is not a number.
22
-     * @constructor
23
-     */
24
-    constructor(rtc: any, conference: any, ownerEndpointId: string, stream: MediaStream, track: MediaStreamTrack, mediaType: any, videoType: any, ssrc: number, muted: boolean, isP2P: boolean, sourceName: string);
25
-    rtc: any;
26
-    ssrc: number;
27
-    ownerEndpointId: string;
28
-    muted: boolean;
29
-    isP2P: boolean;
30
-    _sourceName: string;
31
-    _trackStreamingStatus: any;
32
-    _trackStreamingStatusImpl: TrackStreamingStatusImpl;
33
-    /**
34
-     * This holds the timestamp indicating when remote video track entered forwarded sources set. Track entering
35
-     * forwardedSources will have streaming status restoring and when we start receiving video will become active,
36
-     * but if video is not received for certain time {@link DEFAULT_RESTORING_TIMEOUT} that track streaming status
37
-     * will become interrupted.
38
-     */
39
-    _enteredForwardedSourcesTimestamp: number;
40
-    hasBeenMuted: boolean;
41
-    _containerHandlers: {};
42
-    /**
43
-     * Attaches the track handlers.
44
-     *
45
-     * @returns {void}
46
-     */
47
-    _bindTrackHandlers(): void;
48
-    /**
49
-     * Overrides addEventListener method to init TrackStreamingStatus instance when there are listeners for the
50
-     * {@link JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED} event.
51
-     *
52
-     * @param {string} event - event name
53
-     * @param {function} handler - event handler
54
-     */
55
-    _addEventListener(event: string, handler: Function): void;
56
-    /**
57
-     * Overrides removeEventListener method to dispose TrackStreamingStatus instance.
58
-     *
59
-     * @param {string} event - event name
60
-     * @param {function} handler - event handler
61
-     */
62
-    _removeEventListener(event: string, handler: Function): void;
63
-    /**
64
-     * Callback invoked when the track is muted. Emits an event notifying
65
-     * listeners of the mute event.
66
-     *
67
-     * @private
68
-     * @returns {void}
69
-     */
70
-    private _onTrackMute;
71
-    /**
72
-     * Callback invoked when the track is unmuted. Emits an event notifying
73
-     * listeners of the mute event.
74
-     *
75
-     * @private
76
-     * @returns {void}
77
-     */
78
-    private _onTrackUnmute;
79
-    /**
80
-     * Sets current muted status and fires an events for the change.
81
-     * @param value the muted status.
82
-     */
83
-    setMute(value: any): void;
84
-    /**
85
-     * Returns the current muted status of the track.
86
-     * @returns {boolean|*|JitsiRemoteTrack.muted} <tt>true</tt> if the track is
87
-     * muted and <tt>false</tt> otherwise.
88
-     */
89
-    isMuted(): boolean | any | any;
90
-    /**
91
-     * Returns the participant id which owns the track.
92
-     *
93
-     * @returns {string} the id of the participants. It corresponds to the
94
-     * Colibri endpoint id/MUC nickname in case of Jitsi-meet.
95
-     */
96
-    getParticipantId(): string;
97
-    /**
98
-     * Returns the synchronization source identifier (SSRC) of this remote
99
-     * track.
100
-     *
101
-     * @returns {number} the SSRC of this remote track.
102
-     */
103
-    getSSRC(): number;
104
-    /**
105
-     * Returns the tracks source name
106
-     *
107
-     * @returns {string} the track's source name
108
-     */
109
-    getSourceName(): string;
110
-    /**
111
-     * Changes the video type of the track.
112
-     *
113
-     * @param {string} type - The new video type("camera", "desktop").
114
-     */
115
-    _setVideoType(type: string): void;
116
-    /**
117
-     * Handles track play events.
118
-     */
119
-    _playCallback(): void;
120
-    /**
121
-     * An event handler for events triggered by the attached container.
122
-     *
123
-     * @param {string} type - The type of the event.
124
-     */
125
-    _containerEventHandler(type: string): void;
126
-    /**
127
-     * Returns a string with a description of the current status of the track.
128
-     *
129
-     * @returns {string}
130
-     */
131
-    _getStatus(): string;
132
-    /**
133
-     * Initializes trackStreamingStatusImpl.
134
-     */
135
-    _initTrackStreamingStatus(): void;
136
-    /**
137
-     * Disposes trackStreamingStatusImpl and clears trackStreamingStatus.
138
-     */
139
-    _disposeTrackStreamingStatus(): void;
140
-    /**
141
-     * Updates track's streaming status.
142
-     *
143
-     * @param {string} state the current track streaming state. {@link TrackStreamingStatus}.
144
-     */
145
-    _setTrackStreamingStatus(status: any): void;
146
-    /**
147
-     * Returns track's streaming status.
148
-     *
149
-     * @returns {string} the streaming status <tt>TrackStreamingStatus</tt> of the track. Returns null
150
-     * if trackStreamingStatusImpl hasn't been initialized.
151
-     *
152
-     * {@link TrackStreamingStatus}.
153
-     */
154
-    getTrackStreamingStatus(): string;
155
-    /**
156
-     * Clears the timestamp of when the track entered forwarded sources.
157
-     */
158
-    _clearEnteredForwardedSourcesTimestamp(): void;
159
-    /**
160
-     * Updates the timestamp of when the track entered forwarded sources.
161
-     *
162
-     * @param {number} timestamp the time in millis
163
-     */
164
-    _setEnteredForwardedSourcesTimestamp(timestamp: number): void;
165
-    /**
166
-     * Returns the timestamp of when the track entered forwarded sources.
167
-     *
168
-     * @returns {number} the time in millis
169
-     */
170
-    _getEnteredForwardedSourcesTimestamp(): number;
171
-}
172
-import JitsiTrack from "./JitsiTrack";
173
-import TrackStreamingStatusImpl from "../connectivity/TrackStreamingStatus";

+ 0
- 235
types/auto/modules/RTC/JitsiTrack.d.ts View File

@@ -1,235 +0,0 @@
1
-/// <reference types="node" />
2
-/**
3
- * Represents a single media track (either audio or video).
4
- */
5
-export default class JitsiTrack extends EventEmitter {
6
-    /**
7
-     * Represents a single media track (either audio or video).
8
-     * @constructor
9
-     * @param conference the rtc instance
10
-     * @param stream the WebRTC MediaStream instance
11
-     * @param track the WebRTC MediaStreamTrack instance, must be part of
12
-     * the given <tt>stream</tt>.
13
-     * @param streamInactiveHandler the function that will handle
14
-     *        onended/oninactive events of the stream.
15
-     * @param trackMediaType the media type of the JitsiTrack
16
-     * @param videoType the VideoType for this track if any
17
-     */
18
-    constructor(conference: any, stream: any, track: any, streamInactiveHandler: any, trackMediaType: any, videoType: any);
19
-    addEventListener: (eventName: string | symbol, listener: (...args: any[]) => void) => JitsiTrack;
20
-    removeEventListener: (eventName: string | symbol, listener: (...args: any[]) => void) => JitsiTrack;
21
-    /**
22
-     * Array with the HTML elements that are displaying the streams.
23
-     * @type {Array}
24
-     */
25
-    containers: any[];
26
-    conference: any;
27
-    audioLevel: number;
28
-    type: any;
29
-    track: any;
30
-    videoType: any;
31
-    handlers: Map<any, any>;
32
-    /**
33
-     * Indicates whether this JitsiTrack has been disposed. If true, this
34
-     * JitsiTrack is to be considered unusable and operations involving it
35
-     * are to fail (e.g. {@link JitsiConference#addTrack(JitsiTrack)},
36
-     * {@link JitsiConference#removeTrack(JitsiTrack)}).
37
-     * @type {boolean}
38
-     */
39
-    disposed: boolean;
40
-    /**
41
-     * The inactive handler which will be triggered when the underlying
42
-     * <tt>MediaStream</tt> ends.
43
-     *
44
-     * @private
45
-     * @type {Function}
46
-     */
47
-    private _streamInactiveHandler;
48
-    /**
49
-     * Adds onended/oninactive handler to a MediaStream or a MediaStreamTrack.
50
-     * Firefox doesn't fire a inactive event on the MediaStream, instead it fires
51
-     * a onended event on the MediaStreamTrack.
52
-     * @param {Function} handler the handler
53
-     */
54
-    _addMediaStreamInactiveHandler(handler: Function): void;
55
-    /**
56
-     * Sets handler to the WebRTC MediaStream or MediaStreamTrack object
57
-     * depending on the passed type.
58
-     * @param {string} type the type of the handler that is going to be set
59
-     * @param {Function} handler the handler.
60
-     */
61
-    _setHandler(type: string, handler: Function): void;
62
-    /**
63
-     * Unregisters all event handlers bound to the underlying media stream/track
64
-     * @private
65
-     */
66
-    private _unregisterHandlers;
67
-    /**
68
-     * Sets the stream property of JitsiTrack object and sets all stored
69
-     * handlers to it.
70
-     *
71
-     * @param {MediaStream} stream the new stream.
72
-     * @protected
73
-     */
74
-    protected _setStream(stream: MediaStream): void;
75
-    stream: any;
76
-    /**
77
-     * Returns the video type (camera or desktop) of this track.
78
-     */
79
-    getVideoType(): any;
80
-    /**
81
-     * Returns the type (audio or video) of this track.
82
-     */
83
-    getType(): any;
84
-    /**
85
-     * Check if this is an audio track.
86
-     */
87
-    isAudioTrack(): boolean;
88
-    /**
89
-     * Checks whether the underlying WebRTC <tt>MediaStreamTrack</tt> is muted
90
-     * according to it's 'muted' field status.
91
-     * @return {boolean} <tt>true</tt> if the underlying
92
-     * <tt>MediaStreamTrack</tt> is muted or <tt>false</tt> otherwise.
93
-     */
94
-    isWebRTCTrackMuted(): boolean;
95
-    /**
96
-     * Check if this is a video track.
97
-     */
98
-    isVideoTrack(): boolean;
99
-    /**
100
-     * Checks whether this is a local track.
101
-     * @abstract
102
-     * @return {boolean} 'true' if it's a local track or 'false' otherwise.
103
-     */
104
-    isLocal(): boolean;
105
-    /**
106
-     * Check whether this is a local audio track.
107
-     *
108
-     * @return {boolean} -  true if track represents a local audio track, false otherwise.
109
-     */
110
-    isLocalAudioTrack(): boolean;
111
-    /**
112
-     * Returns the WebRTC MediaStream instance.
113
-     */
114
-    getOriginalStream(): any;
115
-    /**
116
-     * Returns the ID of the underlying WebRTC Media Stream(if any)
117
-     * @returns {String|null}
118
-     */
119
-    getStreamId(): string | null;
120
-    /**
121
-     * Return the underlying WebRTC MediaStreamTrack
122
-     * @returns {MediaStreamTrack}
123
-     */
124
-    getTrack(): MediaStreamTrack;
125
-    /**
126
-     * Return the underlying WebRTC MediaStreamTrack label
127
-     * @returns {string}
128
-     */
129
-    getTrackLabel(): string;
130
-    /**
131
-     * Returns the ID of the underlying WebRTC MediaStreamTrack(if any)
132
-     * @returns {String|null}
133
-     */
134
-    getTrackId(): string | null;
135
-    /**
136
-     * Return meaningful usage label for this track depending on it's media and
137
-     * eventual video type.
138
-     * @returns {string}
139
-     */
140
-    getUsageLabel(): string;
141
-    /**
142
-     * Eventually will trigger RTCEvents.TRACK_ATTACHED event.
143
-     * @param container the video/audio container to which this stream is
144
-     *        attached and for which event will be fired.
145
-     * @private
146
-     */
147
-    private _maybeFireTrackAttached;
148
-    /**
149
-     * Attaches the MediaStream of this track to an HTML container.
150
-     * Adds the container to the list of containers that are displaying the
151
-     * track.
152
-     *
153
-     * @param container the HTML container which can be 'video' or 'audio'
154
-     * element.
155
-     *
156
-     * @returns {void}
157
-     */
158
-    attach(container: any): void;
159
-    /**
160
-     * Removes this JitsiTrack from the passed HTML container.
161
-     *
162
-     * @param container the HTML container to detach from this JitsiTrack. If
163
-     * <tt>null</tt> or <tt>undefined</tt>, all containers are removed. A
164
-     * container can be a 'video', 'audio' or 'object' HTML element instance to
165
-     * which this JitsiTrack is currently attached.
166
-     */
167
-    detach(container: any): void;
168
-    /**
169
-     * Called when the track has been attached to a new container.
170
-     *
171
-     * @param {HTMLElement} container the HTML container which can be 'video' or
172
-     * 'audio' element.
173
-     * @private
174
-     */
175
-    private _onTrackAttach;
176
-    /**
177
-     * Called when the track has been detached from a container.
178
-     *
179
-     * @param {HTMLElement} container the HTML container which can be 'video' or
180
-     * 'audio' element.
181
-     * @private
182
-     */
183
-    private _onTrackDetach;
184
-    /**
185
-     * Attach time to first media tracker only if there is conference and only
186
-     * for the first element.
187
-     *
188
-     * @param {HTMLElement} container the HTML container which can be 'video' or
189
-     * 'audio' element.
190
-     * @private
191
-     */
192
-    private _attachTTFMTracker;
193
-    /**
194
-     * Removes attached event listeners.
195
-     *
196
-     * @returns {Promise}
197
-     */
198
-    dispose(): Promise<any>;
199
-    /**
200
-     * Returns id of the track.
201
-     * @returns {string|null} id of the track or null if this is fake track.
202
-     */
203
-    getId(): string | null;
204
-    /**
205
-     * Checks whether the MediaStream is active/not ended.
206
-     * When there is no check for active we don't have information and so
207
-     * will return that stream is active (in case of FF).
208
-     * @returns {boolean} whether MediaStream is active.
209
-     */
210
-    isActive(): boolean;
211
-    /**
212
-     * Sets the audio level for the stream
213
-     * @param {number} audioLevel value between 0 and 1
214
-     * @param {TraceablePeerConnection} [tpc] the peerconnection instance which
215
-     * is source for the audio level. It can be <tt>undefined</tt> for
216
-     * a local track if the audio level was measured outside of the
217
-     * peerconnection (see /modules/statistics/LocalStatsCollector.js).
218
-     */
219
-    setAudioLevel(audioLevel: number, tpc?: any): void;
220
-    /**
221
-     * Returns the msid of the stream attached to the JitsiTrack object or null
222
-     * if no stream is attached.
223
-     */
224
-    getMSID(): string;
225
-    /**
226
-     * Sets new audio output device for track's DOM elements. Video tracks are
227
-     * ignored.
228
-     * @param {string} audioOutputDeviceId - id of 'audiooutput' device from
229
-     *      navigator.mediaDevices.enumerateDevices(), '' for default device
230
-     * @emits JitsiTrackEvents.TRACK_AUDIO_OUTPUT_CHANGED
231
-     * @returns {Promise}
232
-     */
233
-    setAudioOutput(audioOutputDeviceId: string): Promise<any>;
234
-}
235
-import EventEmitter from "events";

+ 0
- 78
types/auto/modules/RTC/MockClasses.d.ts View File

@@ -1,78 +0,0 @@
1
-/**
2
- * Mock {@link TraceablePeerConnection} - add things as needed, but only things useful for all tests.
3
- */
4
-export class MockPeerConnection {
5
-    /**
6
-     * Constructor.
7
-     *
8
-     * @param {string} id RTC id
9
-     * @param {boolean} usesUnifiedPlan
10
-     */
11
-    constructor(id: string, usesUnifiedPlan: boolean);
12
-    id: string;
13
-    _usesUnifiedPlan: boolean;
14
-    /**
15
-     * {@link TraceablePeerConnection.localDescription}.
16
-     *
17
-     * @returns {Object}
18
-     */
19
-    get localDescription(): any;
20
-    /**
21
-     * {@link TraceablePeerConnection.remoteDescription}.
22
-     *
23
-     * @returns {Object}
24
-     */
25
-    get remoteDescription(): any;
26
-    /**
27
-     * {@link TraceablePeerConnection.createAnswer}.
28
-     *
29
-     * @returns {Promise<Object>}
30
-     */
31
-    createAnswer(): Promise<any>;
32
-    /**
33
-     * {@link TraceablePeerConnection.processLocalSdpForTransceiverInfo}.
34
-     *
35
-     * @returns {void}
36
-     */
37
-    processLocalSdpForTransceiverInfo(): void;
38
-    /**
39
-     * {@link TraceablePeerConnection.setLocalDescription}.
40
-     *
41
-     * @returns {Promise<void>}
42
-     */
43
-    setLocalDescription(): Promise<void>;
44
-    /**
45
-     * {@link TraceablePeerConnection.setRemoteDescription}.
46
-     *
47
-     * @returns {Promise<void>}
48
-     */
49
-    setRemoteDescription(): Promise<void>;
50
-    /**
51
-     * {@link TraceablePeerConnection.setSenderVideoConstraints}.
52
-     */
53
-    setSenderVideoConstraints(): void;
54
-    /**
55
-     * {@link TraceablePeerConnection.setVideoTransferActive}.
56
-     */
57
-    setVideoTransferActive(): boolean;
58
-    /**
59
-     * {@link TraceablePeerConnection.usesUnifiedPlan}.
60
-     */
61
-    usesUnifiedPlan(): boolean;
62
-    /**
63
-     * {@link TraceablePeerConnection.getLocalVideoTracks}.
64
-     */
65
-    getLocalVideoTracks(): any[];
66
-}
67
-/**
68
- * Mock {@link RTC} - add things as needed, but only things useful for all tests.
69
- */
70
-export class MockRTC {
71
-    /**
72
-     * {@link RTC.createPeerConnection}.
73
-     *
74
-     * @returns {MockPeerConnection}
75
-     */
76
-    createPeerConnection(): MockPeerConnection;
77
-    pc: MockPeerConnection;
78
-}

+ 0
- 459
types/auto/modules/RTC/RTC.d.ts View File

@@ -1,459 +0,0 @@
1
-/**
2
- *
3
- */
4
-export default class RTC extends Listenable {
5
-    /**
6
-     * Exposes the private helper for converting a WebRTC MediaStream to a
7
-     * JitsiLocalTrack.
8
-     *
9
-     * @param {Array<Object>} tracksInfo
10
-     * @returns {Array<JitsiLocalTrack>}
11
-     */
12
-    static createLocalTracks(tracksInfo: Array<any>): Array<JitsiLocalTrack>;
13
-    /**
14
-     * Creates the local MediaStreams.
15
-     * @param {object} [options] Optional parameters.
16
-     * @param {array} options.devices The devices that will be requested.
17
-     * @param {string} options.resolution Resolution constraints.
18
-     * @param {string} options.cameraDeviceId
19
-     * @param {string} options.micDeviceId
20
-     * @returns {*} Promise object that will receive the new JitsiTracks
21
-     */
22
-    static obtainAudioAndVideoPermissions(options?: {
23
-        devices: any[];
24
-        resolution: string;
25
-        cameraDeviceId: string;
26
-        micDeviceId: string;
27
-    }): any;
28
-    /**
29
-     *
30
-     * @param eventType
31
-     * @param listener
32
-     */
33
-    static addListener(eventType: any, listener: any): void;
34
-    /**
35
-     *
36
-     * @param eventType
37
-     * @param listener
38
-     */
39
-    static removeListener(eventType: any, listener: any): void;
40
-    /**
41
-     *
42
-     * @param options
43
-     */
44
-    static init(options?: {}): void;
45
-    /**
46
-     *
47
-     * @param elSelector
48
-     * @param stream
49
-     */
50
-    static attachMediaStream(elSelector: any, stream: any): any;
51
-    /**
52
-     * Returns true if retrieving the list of input devices is supported
53
-     * and false if not.
54
-     */
55
-    static isDeviceListAvailable(): boolean;
56
-    /**
57
-     * Returns true if changing the input (camera / microphone) or output
58
-     * (audio) device is supported and false if not.
59
-     * @param {string} [deviceType] Type of device to change. Default is
60
-     *      undefined or 'input', 'output' - for audio output device change.
61
-     * @returns {boolean} true if available, false otherwise.
62
-     */
63
-    static isDeviceChangeAvailable(deviceType?: string): boolean;
64
-    /**
65
-     * Returns whether the current execution environment supports WebRTC (for
66
-     * use within this library).
67
-     *
68
-     * @returns {boolean} {@code true} if WebRTC is supported in the current
69
-     * execution environment (for use within this library); {@code false},
70
-     * otherwise.
71
-     */
72
-    static isWebRtcSupported(): boolean;
73
-    /**
74
-     * Returns currently used audio output device id, '' stands for default
75
-     * device
76
-     * @returns {string}
77
-     */
78
-    static getAudioOutputDevice(): string;
79
-    /**
80
-     * Returns list of available media devices if its obtained, otherwise an
81
-     * empty array is returned/
82
-     * @returns {array} list of available media devices.
83
-     */
84
-    static getCurrentlyAvailableMediaDevices(): any[];
85
-    /**
86
-     * Returns whether available devices have permissions granted
87
-     * @returns {Boolean}
88
-     */
89
-    static arePermissionsGrantedForAvailableDevices(): boolean;
90
-    /**
91
-     * Returns event data for device to be reported to stats.
92
-     * @returns {MediaDeviceInfo} device.
93
-     */
94
-    static getEventDataForActiveDevice(device: any): MediaDeviceInfo;
95
-    /**
96
-     * Sets current audio output device.
97
-     * @param {string} deviceId Id of 'audiooutput' device from
98
-     *      navigator.mediaDevices.enumerateDevices().
99
-     * @returns {Promise} resolves when audio output is changed, is rejected
100
-     *      otherwise
101
-     */
102
-    static setAudioOutputDevice(deviceId: string): Promise<any>;
103
-    /**
104
-     * Returns <tt>true<tt/> if given WebRTC MediaStream is considered a valid
105
-     * "user" stream which means that it's not a "receive only" stream nor a
106
-     * "mixed" JVB stream.
107
-     *
108
-     * Clients that implement Unified Plan, such as Firefox use recvonly
109
-     * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
110
-     * to Plan B where there are only 3 channels: audio, video and data.
111
-     *
112
-     * @param {MediaStream} stream The WebRTC MediaStream instance.
113
-     * @returns {boolean}
114
-     */
115
-    static isUserStream(stream: MediaStream): boolean;
116
-    /**
117
-     * Returns <tt>true<tt/> if a WebRTC MediaStream identified by given stream
118
-     * ID is considered a valid "user" stream which means that it's not a
119
-     * "receive only" stream nor a "mixed" JVB stream.
120
-     *
121
-     * Clients that implement Unified Plan, such as Firefox use recvonly
122
-     * "streams/channels/tracks" for receiving remote stream/tracks, as opposed
123
-     * to Plan B where there are only 3 channels: audio, video and data.
124
-     *
125
-     * @param {string} streamId The id of WebRTC MediaStream.
126
-     * @returns {boolean}
127
-     */
128
-    static isUserStreamById(streamId: string): boolean;
129
-    /**
130
-     * Allows to receive list of available cameras/microphones.
131
-     * @param {function} callback Would receive array of devices as an
132
-     *      argument.
133
-     */
134
-    static enumerateDevices(callback: Function): void;
135
-    /**
136
-     * A method to handle stopping of the stream.
137
-     * One point to handle the differences in various implementations.
138
-     * @param {MediaStream} mediaStream MediaStream object to stop.
139
-     */
140
-    static stopMediaStream(mediaStream: MediaStream): void;
141
-    /**
142
-     * Returns whether the desktop sharing is enabled or not.
143
-     * @returns {boolean}
144
-     */
145
-    static isDesktopSharingEnabled(): boolean;
146
-    /**
147
-     *
148
-     * @param conference
149
-     * @param options
150
-     */
151
-    constructor(conference: any, options?: {});
152
-    conference: any;
153
-    /**
154
-     * A map of active <tt>TraceablePeerConnection</tt>.
155
-     * @type {Map.<number, TraceablePeerConnection>}
156
-     */
157
-    peerConnections: Map<number, TraceablePeerConnection>;
158
-    localTracks: any[];
159
-    options: {};
160
-    _channel: BridgeChannel;
161
-    /**
162
-     * The value specified to the last invocation of setLastN before the
163
-     * channel completed opening. If non-null, the value will be sent
164
-     * through a channel (once) as soon as it opens and will then be
165
-     * discarded.
166
-     * @private
167
-     * @type {number}
168
-     */
169
-    private _lastN;
170
-    /**
171
-     * Defines the last N endpoints list. It can be null or an array once
172
-     * initialised with a channel last N event.
173
-     * @type {Array<string>|null}
174
-     * @private
175
-     */
176
-    private _lastNEndpoints;
177
-    /**
178
-     * Defines the forwarded sources list. It can be null or an array once initialised with a channel forwarded
179
-     * sources event.
180
-     *
181
-     * @type {Array<string>|null}
182
-     * @private
183
-     */
184
-    private _forwardedSources;
185
-    /**
186
-     * The number representing the maximum video height the local client
187
-     * should receive from the bridge.
188
-     *
189
-     * @type {number|undefined}
190
-     * @private
191
-     */
192
-    private _maxFrameHeight;
193
-    /**
194
-     * The endpoint IDs of currently selected participants.
195
-     *
196
-     * @type {Array}
197
-     * @private
198
-     */
199
-    private _selectedEndpoints;
200
-    _lastNChangeListener: any;
201
-    _forwardedSourcesChangeListener: any;
202
-    /**
203
-     * Callback invoked when the list of known audio and video devices has
204
-     * been updated. Attempts to update the known available audio output
205
-     * devices.
206
-     *
207
-     * @private
208
-     * @returns {void}
209
-     */
210
-    private _onDeviceListChanged;
211
-    /**
212
-     * Updates the target audio output device for all remote audio tracks.
213
-     *
214
-     * @param {string} deviceId - The device id of the audio ouput device to
215
-     * use for all remote tracks.
216
-     * @private
217
-     * @returns {void}
218
-     */
219
-    private _updateAudioOutputForAudioTracks;
220
-    /**
221
-     * The default video type assumed by the bridge.
222
-     * @deprecated this will go away with multiple streams support
223
-     * @type {BridgeVideoType}
224
-     * @private
225
-     */
226
-    private _videoType;
227
-    /**
228
-     * Removes any listeners and stored state from this {@code RTC} instance.
229
-     *
230
-     * @returns {void}
231
-     */
232
-    destroy(): void;
233
-    /**
234
-     * Initializes the bridge channel of this instance.
235
-     * At least one of both, peerconnection or wsUrl parameters, must be
236
-     * given.
237
-     * @param {RTCPeerConnection} [peerconnection] WebRTC peer connection
238
-     * instance.
239
-     * @param {string} [wsUrl] WebSocket URL.
240
-     */
241
-    initializeBridgeChannel(peerconnection?: RTCPeerConnection, wsUrl?: string): void;
242
-    _channelOpenListener: () => void;
243
-    /**
244
-     * Receives events when Last N had changed.
245
-     * @param {array} lastNEndpoints The new Last N endpoints.
246
-     * @private
247
-     */
248
-    private _onLastNChanged;
249
-    /**
250
-     * Receives events when forwarded sources had changed.
251
-     *
252
-     * @param {array} forwardedSources The new forwarded sources.
253
-     * @private
254
-     */
255
-    private _onForwardedSourcesChanged;
256
-    /**
257
-     * Should be called when current media session ends and after the
258
-     * PeerConnection has been closed using PeerConnection.close() method.
259
-     */
260
-    onCallEnded(): void;
261
-    /**
262
-     * Sets the capture frame rate to be used for desktop tracks.
263
-     *
264
-     * @param {number} maxFps framerate to be used for desktop track capture.
265
-     */
266
-    setDesktopSharingFrameRate(maxFps: number): void;
267
-    /**
268
-     * Sets the receiver video constraints that determine how bitrate is allocated to each of the video streams
269
-     * requested from the bridge. The constraints are cached and sent through the bridge channel once the channel
270
-     * is established.
271
-     * @param {*} constraints
272
-     */
273
-    setNewReceiverVideoConstraints(constraints: any): void;
274
-    _receiverVideoConstraints: any;
275
-    /**
276
-     * Sets the maximum video size the local participant should receive from
277
-     * remote participants. Will cache the value and send it through the channel
278
-     * once it is created.
279
-     *
280
-     * @param {number} maxFrameHeightPixels the maximum frame height, in pixels,
281
-     * this receiver is willing to receive.
282
-     * @returns {void}
283
-     */
284
-    setReceiverVideoConstraint(maxFrameHeight: any): void;
285
-    /**
286
-     * Sets the video type and availability for the local video source.
287
-     *
288
-     * @param {string} videoType 'camera' for camera, 'desktop' for screenshare and
289
-     * 'none' for when local video source is muted or removed from the peerconnection.
290
-     * @returns {void}
291
-     */
292
-    setVideoType(videoType: string): void;
293
-    /**
294
-     * Sends the track's  video type to the JVB.
295
-     * @param {SourceName} sourceName - the track's source name.
296
-     * @param {BridgeVideoType} videoType - the track's video type.
297
-     */
298
-    sendSourceVideoType(sourceName: any, videoType: {
299
-        CAMERA: string;
300
-        DESKTOP: string;
301
-        DESKTOP_HIGH_FPS: string;
302
-        NONE: string;
303
-    }): void;
304
-    /**
305
-     * Elects the participants with the given ids to be the selected
306
-     * participants in order to always receive video for this participant (even
307
-     * when last n is enabled). If there is no channel we store it and send it
308
-     * through the channel once it is created.
309
-     *
310
-     * @param {Array<string>} ids - The user ids.
311
-     * @throws NetworkError or InvalidStateError or Error if the operation
312
-     * fails.
313
-     * @returns {void}
314
-     */
315
-    selectEndpoints(ids: Array<string>): void;
316
-    /**
317
-     * Creates new <tt>TraceablePeerConnection</tt>
318
-     * @param {SignalingLayer} signaling The signaling layer that will provide information about the media or
319
-     * participants which is not carried over SDP.
320
-     * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
321
-     * @param {boolean} isP2P Indicates whether or not the new TPC will be used in a peer to peer type of session.
322
-     * @param {object} options The config options.
323
-     * @param {boolean} options.enableInsertableStreams - Set to true when the insertable streams constraints is to be
324
-     * enabled on the PeerConnection.
325
-     * @param {boolean} options.disableSimulcast If set to 'true' will disable the simulcast.
326
-     * @param {boolean} options.disableRtx If set to 'true' will disable the RTX.
327
-     * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
328
-     * @return {TraceablePeerConnection}
329
-     */
330
-    createPeerConnection(signaling: any, pcConfig: object, isP2P: boolean, options: {
331
-        enableInsertableStreams: boolean;
332
-        disableSimulcast: boolean;
333
-        disableRtx: boolean;
334
-        startSilent: boolean;
335
-    }): TraceablePeerConnection;
336
-    /**
337
-     * Removed given peer connection from this RTC module instance.
338
-     * @param {TraceablePeerConnection} traceablePeerConnection
339
-     * @return {boolean} <tt>true</tt> if the given peer connection was removed
340
-     * successfully or <tt>false</tt> if there was no peer connection mapped in
341
-     * this RTC instance.
342
-     */
343
-    _removePeerConnection(traceablePeerConnection: TraceablePeerConnection): boolean;
344
-    /**
345
-     *
346
-     * @param track
347
-     */
348
-    addLocalTrack(track: any): void;
349
-    /**
350
-     * Get forwarded sources list.
351
-     * @returns {Array<string>|null}
352
-     */
353
-    getForwardedSources(): Array<string> | null;
354
-    /**
355
-     * Get local video track.
356
-     * @returns {JitsiLocalTrack|undefined}
357
-     */
358
-    getLocalVideoTrack(): JitsiLocalTrack | undefined;
359
-    /**
360
-     * Returns all the local video tracks.
361
-     * @returns {Array<JitsiLocalTrack>}
362
-     */
363
-    getLocalVideoTracks(): Array<JitsiLocalTrack>;
364
-    /**
365
-     * Get local audio track.
366
-     * @returns {JitsiLocalTrack|undefined}
367
-     */
368
-    getLocalAudioTrack(): JitsiLocalTrack | undefined;
369
-    /**
370
-     * Returns the endpoint id for the local user.
371
-     * @returns {string}
372
-     */
373
-    getLocalEndpointId(): string;
374
-    /**
375
-     * Returns the local tracks of the given media type, or all local tracks if
376
-     * no specific type is given.
377
-     * @param {MediaType} [mediaType] Optional media type filter.
378
-     * (audio or video).
379
-     */
380
-    getLocalTracks(mediaType?: MediaType): any[];
381
-    /**
382
-     * Obtains all remote tracks currently known to this RTC module instance.
383
-     * @param {MediaType} [mediaType] The remote tracks will be filtered
384
-     *      by their media type if this argument is specified.
385
-     * @return {Array<JitsiRemoteTrack>}
386
-     */
387
-    getRemoteTracks(mediaType?: MediaType): Array<any>;
388
-    /**
389
-     * Set mute for all local audio streams attached to the conference.
390
-     * @param value The mute value.
391
-     * @returns {Promise}
392
-     */
393
-    setAudioMute(value: any): Promise<any>;
394
-    /**
395
-    * Set mute for all local video streams attached to the conference.
396
-    * @param value The mute value.
397
-    * @returns {Promise}
398
-    */
399
-    setVideoMute(value: any): Promise<any>;
400
-    /**
401
-     *
402
-     * @param track
403
-     */
404
-    removeLocalTrack(track: any): void;
405
-    /**
406
-     * Closes the currently opened bridge channel.
407
-     */
408
-    closeBridgeChannel(): void;
409
-    /**
410
-     *
411
-     * @param {TraceablePeerConnection} tpc
412
-     * @param {number} ssrc
413
-     * @param {number} audioLevel
414
-     * @param {boolean} isLocal
415
-     */
416
-    setAudioLevel(tpc: TraceablePeerConnection, ssrc: number, audioLevel: number, isLocal: boolean): void;
417
-    /**
418
-     * Sends message via the bridge channel.
419
-     * @param {string} to The id of the endpoint that should receive the
420
-     *      message. If "" the message will be sent to all participants.
421
-     * @param {object} payload The payload of the message.
422
-     * @throws NetworkError or InvalidStateError or Error if the operation
423
-     * fails or there is no data channel created.
424
-     */
425
-    sendChannelMessage(to: string, payload: object): void;
426
-    /**
427
-     * Sends the local stats via the bridge channel.
428
-     * @param {Object} payload The payload of the message.
429
-     * @throws NetworkError/InvalidStateError/Error if the operation fails or if there is no data channel created.
430
-     */
431
-    sendEndpointStatsMessage(payload: any): void;
432
-    /**
433
-     * Selects a new value for "lastN". The requested amount of videos are going
434
-     * to be delivered after the value is in effect. Set to -1 for unlimited or
435
-     * all available videos.
436
-     * @param {number} value the new value for lastN.
437
-     */
438
-    setLastN(value: number): void;
439
-    /**
440
-     * Indicates if the endpoint id is currently included in the last N.
441
-     * @param {string} id The endpoint id that we check for last N.
442
-     * @returns {boolean} true if the endpoint id is in the last N or if we
443
-     * don't have bridge channel support, otherwise we return false.
444
-     */
445
-    isInLastN(id: string): boolean;
446
-    /**
447
-     * Indicates if the source name is currently included in the forwarded sources.
448
-     *
449
-     * @param {string} sourceName The source name that we check for forwarded sources.
450
-     * @returns {boolean} true if the source name is in the forwarded sources or if we don't have bridge channel
451
-     * support, otherwise we return false.
452
-     */
453
-    isInForwardedSources(sourceName: string): boolean;
454
-}
455
-import Listenable from "../util/Listenable";
456
-import TraceablePeerConnection from "./TraceablePeerConnection";
457
-import BridgeChannel from "./BridgeChannel";
458
-import JitsiLocalTrack from "./JitsiLocalTrack";
459
-import { MediaType } from "../../service/RTC/MediaType";

+ 0
- 155
types/auto/modules/RTC/RTCUtils.d.ts View File

@@ -1,155 +0,0 @@
1
-export default rtcUtils;
2
-declare const rtcUtils: RTCUtils;
3
-/**
4
- *
5
- */
6
-declare class RTCUtils extends Listenable {
7
-    /**
8
-     *
9
-     */
10
-    constructor();
11
-    /**
12
-     * Depending on the browser, sets difference instance methods for
13
-     * interacting with user media and adds methods to native WebRTC-related
14
-     * objects. Also creates an instance variable for peer connection
15
-     * constraints.
16
-     *
17
-     * @param {Object} options
18
-     * @returns {void}
19
-     */
20
-    init(options?: any): void;
21
-    attachMediaStream: Function;
22
-    pcConstraints: {};
23
-    /**
24
-     *
25
-     * @param {Function} callback
26
-     */
27
-    enumerateDevices(callback: Function): void;
28
-    /**
29
-     * Acquires a media stream via getUserMedia that
30
-     * matches the given constraints
31
-     *
32
-     * @param {array} umDevices which devices to acquire (e.g. audio, video)
33
-     * @param {Object} constraints - Stream specifications to use.
34
-     * @param {number} timeout - The timeout in ms for GUM.
35
-     * @returns {Promise}
36
-     */
37
-    _getUserMedia(umDevices: any[], constraints?: any, timeout?: number): Promise<any>;
38
-    /**
39
-     * Acquire a display stream via the screenObtainer. This requires extra
40
-     * logic compared to use screenObtainer versus normal device capture logic
41
-     * in RTCUtils#_getUserMedia.
42
-     *
43
-     * @param {Object} options - Optional parameters.
44
-     * @returns {Promise} A promise which will be resolved with an object which
45
-     * contains the acquired display stream. If desktop sharing is not supported
46
-     * then a rejected promise will be returned.
47
-     */
48
-    _getDesktopMedia(options: any): Promise<any>;
49
-    /**
50
-     * Private utility for determining if the passed in MediaStream contains
51
-     * tracks of the type(s) specified in the requested devices.
52
-     *
53
-     * @param {string[]} requestedDevices - The track types that are expected to
54
-     * be includes in the stream.
55
-     * @param {MediaStream} stream - The MediaStream to check if it has the
56
-     * expected track types.
57
-     * @returns {string[]} An array of string with the missing track types. The
58
-     * array will be empty if all requestedDevices are found in the stream.
59
-     */
60
-    _getMissingTracks(requestedDevices: string[], stream: MediaStream): string[];
61
-    /**
62
-     * Gets streams from specified device types. This function intentionally
63
-     * ignores errors for upstream to catch and handle instead.
64
-     *
65
-     * @param {Object} options - A hash describing what devices to get and
66
-     * relevant constraints.
67
-     * @param {string[]} options.devices - The types of media to capture. Valid
68
-     * values are "desktop", "audio", and "video".
69
-     * @param {Object} options.desktopSharingFrameRate
70
-     * @param {Object} options.desktopSharingFrameRate.min - Minimum fps
71
-     * @param {Object} options.desktopSharingFrameRate.max - Maximum fps
72
-     * @param {String} options.desktopSharingSourceDevice - The device id or
73
-     * label for a video input source that should be used for screensharing.
74
-     * @param {Array<string>} options.desktopSharingSources - The types of sources ("screen", "window", etc)
75
-     * from which the user can select what to share.
76
-     * @returns {Promise} The promise, when successful, will return an array of
77
-     * meta data for the requested device type, which includes the stream and
78
-     * track. If an error occurs, it will be deferred to the caller for
79
-     * handling.
80
-     */
81
-    obtainAudioAndVideoPermissions(options: {
82
-        devices: string[];
83
-        desktopSharingFrameRate: {
84
-            min: any;
85
-            max: any;
86
-        };
87
-        desktopSharingSourceDevice: string;
88
-        desktopSharingSources: Array<string>;
89
-    }): Promise<any>;
90
-    /**
91
-     * Checks whether it is possible to enumerate available cameras/microphones.
92
-     *
93
-     * @returns {boolean} {@code true} if the device listing is available;
94
-     * {@code false}, otherwise.
95
-     */
96
-    isDeviceListAvailable(): boolean;
97
-    /**
98
-     * Returns true if changing the input (camera / microphone) or output
99
-     * (audio) device is supported and false if not.
100
-     * @params {string} [deviceType] - type of device to change. Default is
101
-     *      undefined or 'input', 'output' - for audio output device change.
102
-     * @returns {boolean} true if available, false otherwise.
103
-     */
104
-    isDeviceChangeAvailable(deviceType: any): boolean;
105
-    /**
106
-     * A method to handle stopping of the stream.
107
-     * One point to handle the differences in various implementations.
108
-     * @param mediaStream MediaStream object to stop.
109
-     */
110
-    stopMediaStream(mediaStream: any): void;
111
-    /**
112
-     * Returns whether the desktop sharing is enabled or not.
113
-     * @returns {boolean}
114
-     */
115
-    isDesktopSharingEnabled(): boolean;
116
-    /**
117
-     * Sets current audio output device.
118
-     * @param {string} deviceId - id of 'audiooutput' device from
119
-     *      navigator.mediaDevices.enumerateDevices(), 'default' for default
120
-     *      device
121
-     * @returns {Promise} - resolves when audio output is changed, is rejected
122
-     *      otherwise
123
-     */
124
-    setAudioOutputDevice(deviceId: string): Promise<any>;
125
-    /**
126
-     * Sets the capture frame rate for desktop tracks.
127
-     *
128
-     * @param {number} maxFps - max fps to be used as the capture frame rate.
129
-     * @returns {void}
130
-     */
131
-    setDesktopSharingFrameRate(maxFps: number): void;
132
-    /**
133
-     * Returns currently used audio output device id, '' stands for default
134
-     * device
135
-     * @returns {string}
136
-     */
137
-    getAudioOutputDevice(): string;
138
-    /**
139
-     * Returns list of available media devices if its obtained, otherwise an
140
-     * empty array is returned/
141
-     * @returns {Array} list of available media devices.
142
-     */
143
-    getCurrentlyAvailableMediaDevices(): any[];
144
-    /**
145
-     * Returns whether available devices have permissions granted
146
-     * @returns {Boolean}
147
-     */
148
-    arePermissionsGrantedForAvailableDevices(): boolean;
149
-    /**
150
-     * Returns event data for device to be reported to stats.
151
-     * @returns {MediaDeviceInfo} device.
152
-     */
153
-    getEventDataForActiveDevice(device: any): MediaDeviceInfo;
154
-}
155
-import Listenable from "../util/Listenable";

+ 0
- 120
types/auto/modules/RTC/ScreenObtainer.d.ts View File

@@ -1,120 +0,0 @@
1
-/**
2
- * The default frame rate for Screen Sharing.
3
- */
4
-export const SS_DEFAULT_FRAME_RATE: 5;
5
-export default ScreenObtainer;
6
-declare namespace ScreenObtainer {
7
-    const obtainStream: any;
8
-    /**
9
-     * Initializes the function used to obtain a screen capture
10
-     * (this.obtainStream).
11
-     *
12
-     * @param {object} options
13
-     */
14
-    function init(options?: any): void;
15
-    /**
16
-     * Initializes the function used to obtain a screen capture
17
-     * (this.obtainStream).
18
-     *
19
-     * @param {object} options
20
-     */
21
-    function init(options?: any): void;
22
-    /**
23
-     * Returns a method which will be used to obtain the screen sharing stream
24
-     * (based on the browser type).
25
-     *
26
-     * @returns {Function}
27
-     * @private
28
-     */
29
-    function _createObtainStreamMethod(): Function;
30
-    /**
31
-     * Returns a method which will be used to obtain the screen sharing stream
32
-     * (based on the browser type).
33
-     *
34
-     * @returns {Function}
35
-     * @private
36
-     */
37
-    function _createObtainStreamMethod(): Function;
38
-    /**
39
-     * Gets the appropriate constraints for audio sharing.
40
-     *
41
-     * @returns {Object|boolean}
42
-     */
43
-    function _getAudioConstraints(): any;
44
-    /**
45
-     * Gets the appropriate constraints for audio sharing.
46
-     *
47
-     * @returns {Object|boolean}
48
-     */
49
-    function _getAudioConstraints(): any;
50
-    /**
51
-     * Checks whether obtaining a screen capture is supported in the current
52
-     * environment.
53
-     * @returns {boolean}
54
-     */
55
-    function isSupported(): boolean;
56
-    /**
57
-     * Checks whether obtaining a screen capture is supported in the current
58
-     * environment.
59
-     * @returns {boolean}
60
-     */
61
-    function isSupported(): boolean;
62
-    /**
63
-     * Obtains a screen capture stream on Electron.
64
-     *
65
-     * @param onSuccess - Success callback.
66
-     * @param onFailure - Failure callback.
67
-     * @param {Object} options - Optional parameters.
68
-     */
69
-    function obtainScreenOnElectron(onSuccess: any, onFailure: any, options?: any): void;
70
-    /**
71
-     * Obtains a screen capture stream on Electron.
72
-     *
73
-     * @param onSuccess - Success callback.
74
-     * @param onFailure - Failure callback.
75
-     * @param {Object} options - Optional parameters.
76
-     */
77
-    function obtainScreenOnElectron(onSuccess: any, onFailure: any, options?: any): void;
78
-    /**
79
-     * Obtains a screen capture stream using getDisplayMedia.
80
-     *
81
-     * @param callback - The success callback.
82
-     * @param errorCallback - The error callback.
83
-     */
84
-    function obtainScreenFromGetDisplayMedia(callback: any, errorCallback: any): void;
85
-    /**
86
-     * Obtains a screen capture stream using getDisplayMedia.
87
-     *
88
-     * @param callback - The success callback.
89
-     * @param errorCallback - The error callback.
90
-     */
91
-    function obtainScreenFromGetDisplayMedia(callback: any, errorCallback: any): void;
92
-    /**
93
-     * Obtains a screen capture stream using getDisplayMedia.
94
-     *
95
-     * @param callback - The success callback.
96
-     * @param errorCallback - The error callback.
97
-     */
98
-    function obtainScreenFromGetDisplayMediaRN(callback: any, errorCallback: any): void;
99
-    /**
100
-     * Obtains a screen capture stream using getDisplayMedia.
101
-     *
102
-     * @param callback - The success callback.
103
-     * @param errorCallback - The error callback.
104
-     */
105
-    function obtainScreenFromGetDisplayMediaRN(callback: any, errorCallback: any): void;
106
-    /**
107
-     * Sets the max frame rate to be used for a desktop track capture.
108
-     *
109
-     * @param {number} maxFps capture frame rate to be used for desktop tracks.
110
-     * @returns {void}
111
-     */
112
-    function setDesktopSharingFrameRate(maxFps: number): void;
113
-    /**
114
-     * Sets the max frame rate to be used for a desktop track capture.
115
-     *
116
-     * @param {number} maxFps capture frame rate to be used for desktop tracks.
117
-     * @returns {void}
118
-     */
119
-    function setDesktopSharingFrameRate(maxFps: number): void;
120
-}

+ 0
- 162
types/auto/modules/RTC/TPCUtils.d.ts View File

@@ -1,162 +0,0 @@
1
-export const HD_BITRATE: 2500000;
2
-export const HD_SCALE_FACTOR: 1;
3
-export const LD_SCALE_FACTOR: 4;
4
-export const SD_SCALE_FACTOR: 2;
5
-export const SIM_LAYER_RIDS: string[];
6
-/**
7
- * Handles track related operations on TraceablePeerConnection when browser is
8
- * running in unified plan mode.
9
- */
10
-export class TPCUtils {
11
-    /**
12
-     * Creates a new instance for a given TraceablePeerConnection
13
-     *
14
-     * @param peerconnection - the tpc instance for which we have utility functions.
15
-     */
16
-    constructor(peerconnection: any);
17
-    pc: any;
18
-    videoBitrates: any;
19
-    /**
20
-     * The startup configuration for the stream encodings that are applicable to
21
-     * the video stream when a new sender is created on the peerconnection. The initial
22
-     * config takes into account the differences in browser's simulcast implementation.
23
-     *
24
-     * Encoding parameters:
25
-     * active - determine the on/off state of a particular encoding.
26
-     * maxBitrate - max. bitrate value to be applied to that particular encoding
27
-     *  based on the encoding's resolution and config.js videoQuality settings if applicable.
28
-     * rid - Rtp Stream ID that is configured for a particular simulcast stream.
29
-     * scaleResolutionDownBy - the factor by which the encoding is scaled down from the
30
-     *  original resolution of the captured video.
31
-     */
32
-    localStreamEncodingsConfig: {
33
-        active: boolean;
34
-        maxBitrate: any;
35
-        rid: string;
36
-        scaleResolutionDownBy: number;
37
-    }[];
38
-    /**
39
-     * Obtains stream encodings that need to be configured on the given track based
40
-     * on the track media type and the simulcast setting.
41
-     * @param {JitsiLocalTrack} localTrack
42
-     */
43
-    _getStreamEncodings(localTrack: any): {
44
-        active: boolean;
45
-        maxBitrate: any;
46
-        rid: string;
47
-        scaleResolutionDownBy: number;
48
-    }[] | {
49
-        active: boolean;
50
-        maxBitrate: any;
51
-    }[] | {
52
-        active: boolean;
53
-    }[];
54
-    /**
55
-     * Ensures that the ssrcs associated with a FID ssrc-group appear in the correct order, i.e.,
56
-     * the primary ssrc first and the secondary rtx ssrc later. This is important for unified
57
-     * plan since we have only one FID group per media description.
58
-     * @param {Object} description the webRTC session description instance for the remote
59
-     * description.
60
-     * @private
61
-     */
62
-    private ensureCorrectOrderOfSsrcs;
63
-    /**
64
-     * Returns the transceiver associated with a given RTCRtpSender/RTCRtpReceiver.
65
-     *
66
-     * @param {string} mediaType - type of track associated with the transceiver 'audio' or 'video'.
67
-     * @param {JitsiLocalTrack} localTrack - local track to be used for lookup.
68
-     * @returns {RTCRtpTransceiver}
69
-     */
70
-    findTransceiver(mediaType: string, localTrack?: any): RTCRtpTransceiver;
71
-    /**
72
-     * Takes in a *unified plan* offer and inserts the appropriate
73
-     * parameters for adding simulcast receive support.
74
-     * @param {Object} desc - A session description object
75
-     * @param {String} desc.type - the type (offer/answer)
76
-     * @param {String} desc.sdp - the sdp content
77
-     *
78
-     * @return {Object} A session description (same format as above) object
79
-     * with its sdp field modified to advertise simulcast receive support
80
-     */
81
-    insertUnifiedPlanSimulcastReceive(desc: {
82
-        type: string;
83
-        sdp: string;
84
-    }): any;
85
-    /**
86
-    * Adds {@link JitsiLocalTrack} to the WebRTC peerconnection for the first time.
87
-    * @param {JitsiLocalTrack} track - track to be added to the peerconnection.
88
-    * @param {boolean} isInitiator - boolean that indicates if the endpoint is offerer in a p2p connection.
89
-    * @returns {void}
90
-    */
91
-    addTrack(localTrack: any, isInitiator: boolean): void;
92
-    /**
93
-     * Returns the calculated active state of the simulcast encodings based on the frame height requested for the send
94
-     * stream. All the encodings that have a resolution lower than the frame height requested will be enabled.
95
-     *
96
-     * @param {JitsiLocalTrack} localVideoTrack The local video track.
97
-     * @param {number} newHeight The resolution requested for the video track.
98
-     * @returns {Array<boolean>}
99
-     */
100
-    calculateEncodingsActiveState(localVideoTrack: any, newHeight: number): Array<boolean>;
101
-    /**
102
-     * Returns the calculates max bitrates that need to be configured on the simulcast encodings based on the video
103
-     * type and other considerations associated with screenshare.
104
-     *
105
-     * @param {JitsiLocalTrack} localVideoTrack The local video track.
106
-     * @returns {Array<number>}
107
-     */
108
-    calculateEncodingsBitrates(localVideoTrack: any): Array<number>;
109
-    /**
110
-     * Replaces the existing track on a RTCRtpSender with the given track.
111
-     *
112
-     * @param {JitsiLocalTrack} oldTrack - existing track on the sender that needs to be removed.
113
-     * @param {JitsiLocalTrack} newTrack - new track that needs to be added to the sender.
114
-     * @returns {Promise<RTCRtpTransceiver>} - resolved with the associated transceiver when done, rejected otherwise.
115
-     */
116
-    replaceTrack(oldTrack: any, newTrack: any): Promise<RTCRtpTransceiver>;
117
-    /**
118
-    * Enables/disables audio transmission on the peer connection. When
119
-    * disabled the audio transceiver direction will be set to 'inactive'
120
-    * which means that no data will be sent nor accepted, but
121
-    * the connection should be kept alive.
122
-    * @param {boolean} active - true to enable audio media transmission or
123
-    * false to disable.
124
-    * @returns {void}
125
-    */
126
-    setAudioTransferActive(active: boolean): void;
127
-    /**
128
-     * Set the simulcast stream encoding properties on the RTCRtpSender.
129
-     * @param {JitsiLocalTrack} track - the current track in use for which
130
-     * the encodings are to be set.
131
-     * @returns {Promise<void>} - resolved when done.
132
-     */
133
-    setEncodings(track: any): Promise<void>;
134
-    /**
135
-     * Enables/disables media transmission on the peerconnection by changing the direction
136
-     * on the transceiver for the specified media type.
137
-     * @param {String} mediaType - 'audio' or 'video'
138
-     * @param {boolean} active - true to enable media transmission or false
139
-     * to disable.
140
-     * @returns {void}
141
-     */
142
-    setMediaTransferActive(mediaType: string, active: boolean): void;
143
-    /**
144
-    * Enables/disables video media transmission on the peer connection. When
145
-    * disabled the SDP video media direction in the local SDP will be adjusted to
146
-    * 'inactive' which means that no data will be sent nor accepted, but
147
-    * the connection should be kept alive.
148
-    * @param {boolean} active - true to enable video media transmission or
149
-    * false to disable.
150
-    * @returns {void}
151
-    */
152
-    setVideoTransferActive(active: boolean): void;
153
-    /**
154
-     * Ensures that the resolution of the stream encodings are consistent with the values
155
-     * that were configured on the RTCRtpSender when the source was added to the peerconnection.
156
-     * This should prevent us from overriding the default values if the browser returns
157
-     * erroneous values when RTCRtpSender.getParameters is used for getting the encodings info.
158
-     * @param {Object} parameters - the RTCRtpEncodingParameters obtained from the browser.
159
-     * @returns {void}
160
-     */
161
-    updateEncodingsResolution(parameters: any): void;
162
-}

+ 0
- 824
types/auto/modules/RTC/TraceablePeerConnection.d.ts View File

@@ -1,824 +0,0 @@
1
-/**
2
- * Creates new instance of 'TraceablePeerConnection'.
3
- *
4
- * @param {RTC} rtc the instance of <tt>RTC</tt> service
5
- * @param {number} id the peer connection id assigned by the parent RTC module.
6
- * @param {SignalingLayer} signalingLayer the signaling layer instance
7
- * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
8
- * @param {object} constraints WebRTC 'PeerConnection' constraints
9
- * @param {boolean} isP2P indicates whether or not the new instance will be used in a peer to peer connection.
10
- * @param {object} options <tt>TracablePeerConnection</tt> config options.
11
- * @param {boolean} options.disableSimulcast if set to 'true' will disable the simulcast.
12
- * @param {boolean} options.disableRtx if set to 'true' will disable the RTX.
13
- * @param {string} options.disabledCodec the mime type of the code that should not be negotiated on the peerconnection.
14
- * @param {string} options.preferredCodec the mime type of the codec that needs to be made the preferred codec for the
15
- * peerconnection.
16
- * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
17
- * @param {boolean} options.usesUnifiedPlan Indicates if the  browser is running in unified plan mode.
18
- *
19
- * FIXME: initially the purpose of TraceablePeerConnection was to be able to
20
- * debug the peer connection. Since many other responsibilities have been added
21
- * it would make sense to extract a separate class from it and come up with
22
- * a more suitable name.
23
- *
24
- * @constructor
25
- */
26
-export default function TraceablePeerConnection(rtc: RTC, id: number, signalingLayer: any, pcConfig: object, constraints: object, isP2P: boolean, options: {
27
-    disableSimulcast: boolean;
28
-    disableRtx: boolean;
29
-    disabledCodec: string;
30
-    preferredCodec: string;
31
-    startSilent: boolean;
32
-    usesUnifiedPlan: boolean;
33
-}): void;
34
-export default class TraceablePeerConnection {
35
-    /**
36
-     * Creates new instance of 'TraceablePeerConnection'.
37
-     *
38
-     * @param {RTC} rtc the instance of <tt>RTC</tt> service
39
-     * @param {number} id the peer connection id assigned by the parent RTC module.
40
-     * @param {SignalingLayer} signalingLayer the signaling layer instance
41
-     * @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
42
-     * @param {object} constraints WebRTC 'PeerConnection' constraints
43
-     * @param {boolean} isP2P indicates whether or not the new instance will be used in a peer to peer connection.
44
-     * @param {object} options <tt>TracablePeerConnection</tt> config options.
45
-     * @param {boolean} options.disableSimulcast if set to 'true' will disable the simulcast.
46
-     * @param {boolean} options.disableRtx if set to 'true' will disable the RTX.
47
-     * @param {string} options.disabledCodec the mime type of the code that should not be negotiated on the peerconnection.
48
-     * @param {string} options.preferredCodec the mime type of the codec that needs to be made the preferred codec for the
49
-     * peerconnection.
50
-     * @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
51
-     * @param {boolean} options.usesUnifiedPlan Indicates if the  browser is running in unified plan mode.
52
-     *
53
-     * FIXME: initially the purpose of TraceablePeerConnection was to be able to
54
-     * debug the peer connection. Since many other responsibilities have been added
55
-     * it would make sense to extract a separate class from it and come up with
56
-     * a more suitable name.
57
-     *
58
-     * @constructor
59
-     */
60
-    constructor(rtc: RTC, id: number, signalingLayer: any, pcConfig: object, constraints: object, isP2P: boolean, options: {
61
-        disableSimulcast: boolean;
62
-        disableRtx: boolean;
63
-        disabledCodec: string;
64
-        preferredCodec: string;
65
-        startSilent: boolean;
66
-        usesUnifiedPlan: boolean;
67
-    });
68
-    /**
69
-     * Indicates whether or not this peer connection instance is actively
70
-     * sending/receiving audio media. When set to <tt>false</tt> the SDP audio
71
-     * media direction will be adjusted to 'inactive' in order to suspend
72
-     * the transmission.
73
-     * @type {boolean}
74
-     * @private
75
-     */
76
-    private audioTransferActive;
77
-    /**
78
-     * The DTMF sender instance used to send DTMF tones.
79
-     *
80
-     * @type {RTCDTMFSender|undefined}
81
-     * @private
82
-     */
83
-    private _dtmfSender;
84
-    /**
85
-     * @typedef {Object} TouchToneRequest
86
-     * @property {string} tones - The DTMF tones string as defined by
87
-     * {@code RTCDTMFSender.insertDTMF}, 'tones' argument.
88
-     * @property {number} duration - The amount of time in milliseconds that
89
-     * each DTMF should last.
90
-     * @property {string} interToneGap - The length of time in miliseconds to
91
-     * wait between tones.
92
-     */
93
-    /**
94
-     * TouchToneRequests which are waiting to be played. This queue is filled
95
-     * if there are touch tones currently being played.
96
-     *
97
-     * @type {Array<TouchToneRequest>}
98
-     * @private
99
-     */
100
-    private _dtmfTonesQueue;
101
-    /**
102
-     * Indicates whether or not this peer connection instance is actively
103
-     * sending/receiving video media. When set to <tt>false</tt> the SDP video
104
-     * media direction will be adjusted to 'inactive' in order to suspend
105
-     * the transmission.
106
-     * @type {boolean}
107
-     * @private
108
-     */
109
-    private videoTransferActive;
110
-    /**
111
-     * The parent instance of RTC service which created this
112
-     * <tt>TracablePeerConnection</tt>.
113
-     * @type {RTC}
114
-     */
115
-    rtc: RTC;
116
-    /**
117
-     * The peer connection identifier assigned by the RTC module.
118
-     * @type {number}
119
-     */
120
-    id: number;
121
-    /**
122
-     * Indicates whether or not this instance is used in a peer to peer
123
-     * connection.
124
-     * @type {boolean}
125
-     */
126
-    isP2P: boolean;
127
-    /**
128
-     * The map holds remote tracks associated with this peer connection. It maps user's JID to media type and a set of
129
-     * remote tracks.
130
-     * @type {Map<string, Map<MediaType, Set<JitsiRemoteTrack>>>}
131
-     */
132
-    remoteTracks: Map<string, Map<MediaType, Set<JitsiRemoteTrack>>>;
133
-    /**
134
-     * A map which stores local tracks mapped by {@link JitsiLocalTrack.rtcId}
135
-     * @type {Map<number, JitsiLocalTrack>}
136
-     */
137
-    localTracks: Map<number, any>;
138
-    /**
139
-     * Keeps tracks of the WebRTC <tt>MediaStream</tt>s that have been added to
140
-     * the underlying WebRTC PeerConnection.
141
-     * @type {Array}
142
-     * @private
143
-     */
144
-    private _addedStreams;
145
-    /**
146
-     * @typedef {Object} TPCGroupInfo
147
-     * @property {string} semantics the SSRC groups semantics
148
-     * @property {Array<number>} ssrcs group's SSRCs in order where the first
149
-     * one is group's primary SSRC, the second one is secondary (RTX) and so
150
-     * on...
151
-     */
152
-    /**
153
-     * @typedef {Object} TPCSSRCInfo
154
-     * @property {Array<number>} ssrcs an array which holds all track's SSRCs
155
-     * @property {Array<TPCGroupInfo>} groups an array stores all track's SSRC
156
-     * groups
157
-     */
158
-    /**
159
-     * Holds the info about local track's SSRCs mapped per their
160
-     * {@link JitsiLocalTrack.rtcId}
161
-     * @type {Map<number, TPCSSRCInfo>}
162
-     */
163
-    localSSRCs: Map<number, {
164
-        /**
165
-         * an array which holds all track's SSRCs
166
-         */
167
-        ssrcs: Array<number>;
168
-        /**
169
-         * an array stores all track's SSRC
170
-         * groups
171
-         */
172
-        groups: {
173
-            /**
174
-             * the SSRC groups semantics
175
-             */
176
-            semantics: string;
177
-            /**
178
-             * group's SSRCs in order where the first
179
-             * one is group's primary SSRC, the second one is secondary (RTX) and so
180
-             * on...
181
-             */
182
-            ssrcs: Array<number>;
183
-        }[];
184
-    }>;
185
-    /**
186
-     * The local ICE username fragment for this session.
187
-     */
188
-    localUfrag: any;
189
-    /**
190
-     * The remote ICE username fragment for this session.
191
-     */
192
-    remoteUfrag: any;
193
-    /**
194
-     * The DTLS transport object for the PeerConnection.
195
-     * Note: this assume only one shared transport exists because we bundled
196
-     *       all streams on the same underlying transport.
197
-     */
198
-    _dtlsTransport: RTCDtlsTransport;
199
-    /**
200
-     * The signaling layer which operates this peer connection.
201
-     * @type {SignalingLayer}
202
-     */
203
-    signalingLayer: any;
204
-    _peerVideoTypeChanged: any;
205
-    _peerMutedChanged: any;
206
-    options: {
207
-        disableSimulcast: boolean;
208
-        disableRtx: boolean;
209
-        disabledCodec: string;
210
-        preferredCodec: string;
211
-        startSilent: boolean;
212
-        usesUnifiedPlan: boolean;
213
-    };
214
-    peerconnection: RTCPeerConnection;
215
-    tpcUtils: TPCUtils;
216
-    updateLog: any[];
217
-    stats: {};
218
-    statsinterval: number;
219
-    /**
220
-     * Flag used to indicate if simulcast is turned off and a cap of 500 Kbps is applied on screensharing.
221
-     */
222
-    _capScreenshareBitrate: any;
223
-    /**
224
-    * Flag used to indicate if the browser is running in unified  plan mode.
225
-    */
226
-    _usesUnifiedPlan: boolean;
227
-    /**
228
-     * Flag used to indicate if RTCRtpTransceiver#setCodecPreferences is to be used instead of SDP
229
-     * munging for codec selection.
230
-     */
231
-    _usesTransceiverCodecPreferences: boolean;
232
-    /**
233
-     * Indicates whether an audio track has ever been added to the peer connection.
234
-     */
235
-    _hasHadAudioTrack: boolean;
236
-    /**
237
-     * Indicates whether a video track has ever been added to the peer connection.
238
-     */
239
-    _hasHadVideoTrack: boolean;
240
-    /**
241
-     * @type {number} The max number of stats to keep in this.stats. Limit to
242
-     * 300 values, i.e. 5 minutes; set to 0 to disable
243
-     */
244
-    maxstats: number;
245
-    interop: any;
246
-    simulcast: any;
247
-    sdpConsistency: SdpConsistency;
248
-    /**
249
-     * Munges local SDP provided to the Jingle Session in order to prevent from
250
-     * sending SSRC updates on attach/detach and mute/unmute (for video).
251
-     * @type {LocalSdpMunger}
252
-     */
253
-    localSdpMunger: LocalSdpMunger;
254
-    /**
255
-     * TracablePeerConnection uses RTC's eventEmitter
256
-     * @type {EventEmitter}
257
-     */
258
-    eventEmitter: any;
259
-    rtxModifier: RtxModifier;
260
-    /**
261
-     * The height constraint applied on the video sender. The default value is 2160 (4K) when layer suspension is
262
-     * explicitly disabled.
263
-     */
264
-    _senderVideoMaxHeight: number;
265
-    /**
266
-     * The height constraints to be applied on the sender per local video source (source name as the key).
267
-     * @type {Map<string, number>}
268
-     */
269
-    _senderMaxHeights: Map<string, number>;
270
-    /**
271
-     * Holds the RTCRtpTransceiver mids that the local tracks are attached to, mapped per their
272
-     * {@link JitsiLocalTrack.rtcId}.
273
-     * @type {Map<string, string>}
274
-     */
275
-    _localTrackTransceiverMids: Map<string, string>;
276
-    trace: (what: any, info: any) => void;
277
-    onicecandidate: any;
278
-    onTrack: (evt: any) => void;
279
-    onsignalingstatechange: any;
280
-    oniceconnectionstatechange: any;
281
-    onnegotiationneeded: any;
282
-    onconnectionstatechange: any;
283
-    ondatachannel: any;
284
-    private _processStat;
285
-    /**
286
-     * Forwards the {@link peerconnection.iceConnectionState} state except that it
287
-     * will convert "completed" into "connected" where both mean that the ICE has
288
-     * succeeded and is up and running. We never see "completed" state for
289
-     * the JVB connection, but it started appearing for the P2P one. This method
290
-     * allows to adapt old logic to this new situation.
291
-     * @return {string}
292
-     */
293
-    getConnectionState(): string;
294
-    private getDesiredMediaDirection;
295
-    /**
296
-     * Returns the list of RTCRtpReceivers created for the source of the given media type associated with
297
-     * the set of remote endpoints specified.
298
-     * @param {Array<string>} endpoints list of the endpoints
299
-     * @param {string} mediaType 'audio' or 'video'
300
-     * @returns {Array<RTCRtpReceiver>} list of receivers created by the peerconnection.
301
-     */
302
-    _getReceiversByEndpointIds(endpoints: Array<string>, mediaType: string): Array<RTCRtpReceiver>;
303
-    /**
304
-     * Tells whether or not this TPC instance is using Simulcast.
305
-     * @return {boolean} <tt>true</tt> if simulcast is enabled and active or
306
-     * <tt>false</tt> if it's turned off.
307
-     */
308
-    isSimulcastOn(): boolean;
309
-    /**
310
-     * Handles remote source mute and unmute changed events.
311
-     *
312
-     * @param {string} sourceName - The name of the remote source.
313
-     * @param {boolean} isMuted - The new mute state.
314
-     */
315
-    _sourceMutedChanged(sourceName: string, isMuted: boolean): void;
316
-    /**
317
-     * Handles remote source videoType changed events.
318
-     *
319
-     * @param {string} sourceName - The name of the remote source.
320
-     * @param {boolean} isMuted - The new value.
321
-     */
322
-    _sourceVideoTypeChanged(sourceName: string, videoType: any): void;
323
-    /**
324
-     * Obtains audio levels of the remote audio tracks by getting the source information on the RTCRtpReceivers.
325
-     * The information relevant to the ssrc is updated each time a RTP packet constaining the ssrc is received.
326
-     * @param {Array<string>} speakerList list of endpoint ids for which audio levels are to be gathered.
327
-     * @returns {Object} containing ssrc and audio level information as a key-value pair.
328
-     */
329
-    getAudioLevels(speakerList?: Array<string>): any;
330
-    /**
331
-     * Obtains local tracks for given {@link MediaType}. If the <tt>mediaType</tt>
332
-     * argument is omitted the list of all local tracks will be returned.
333
-     * @param {MediaType} [mediaType]
334
-     * @return {Array<JitsiLocalTrack>}
335
-     */
336
-    getLocalTracks(mediaType?: MediaType): Array<any>;
337
-    /**
338
-     * Retrieves the local video tracks.
339
-     *
340
-     * @returns {Array<JitsiLocalTrack>} - local video tracks.
341
-     */
342
-    getLocalVideoTracks(): Array<any>;
343
-    /**
344
-     * Checks whether or not this {@link TraceablePeerConnection} instance contains any local tracks for given
345
-     * <tt>mediaType</tt>.
346
-     *
347
-     * @param {MediaType} mediaType - The media type.
348
-     * @return {boolean}
349
-     */
350
-    hasAnyTracksOfType(mediaType: MediaType): boolean;
351
-    /**
352
-     * Obtains all remote tracks currently known to this PeerConnection instance.
353
-     *
354
-     * @param {string} [endpointId] - The track owner's identifier (MUC nickname)
355
-     * @param {MediaType} [mediaType] - The remote tracks will be filtered by their media type if this argument is
356
-     * specified.
357
-     * @return {Array<JitsiRemoteTrack>}
358
-     */
359
-    getRemoteTracks(endpointId?: string, mediaType?: MediaType): Array<JitsiRemoteTrack>;
360
-    /**
361
-     * Parses the remote description and returns the sdp lines of the sources associated with a remote participant.
362
-     *
363
-     * @param {string} id Endpoint id of the remote participant.
364
-     * @returns {Array<string>} The sdp lines that have the ssrc information.
365
-     */
366
-    getRemoteSourceInfoByParticipant(id: string): Array<string>;
367
-    /**
368
-     * Returns the target bitrates configured for the local video source.
369
-     *
370
-     * @returns {Object}
371
-     */
372
-    getTargetVideoBitrates(): any;
373
-    /**
374
-     * Tries to find {@link JitsiTrack} for given SSRC number. It will search both
375
-     * local and remote tracks bound to this instance.
376
-     * @param {number} ssrc
377
-     * @return {JitsiTrack|null}
378
-     */
379
-    getTrackBySSRC(ssrc: number): any | null;
380
-    /**
381
-     * Tries to find SSRC number for given {@link JitsiTrack} id. It will search
382
-     * both local and remote tracks bound to this instance.
383
-     * @param {string} id
384
-     * @return {number|null}
385
-     */
386
-    getSsrcByTrackId(id: string): number | null;
387
-    /**
388
-     * Called when new remote MediaStream is added to the PeerConnection.
389
-     * @param {MediaStream} stream the WebRTC MediaStream for remote participant
390
-     */
391
-    _remoteStreamAdded(stream: MediaStream): void;
392
-    /**
393
-     * Called on "track added" and "stream added" PeerConnection events (because we
394
-     * handle streams on per track basis). Finds the owner and the SSRC for
395
-     * the track and passes that to ChatRoom for further processing.
396
-     * @param {MediaStream} stream the WebRTC MediaStream instance which is
397
-     * the parent of the track
398
-     * @param {MediaStreamTrack} track the WebRTC MediaStreamTrack added for remote
399
-     * participant.
400
-     * @param {RTCRtpTransceiver} transceiver the WebRTC transceiver that is created
401
-     * for the remote participant in unified plan.
402
-     */
403
-    _remoteTrackAdded(stream: MediaStream, track: MediaStreamTrack, transceiver?: RTCRtpTransceiver): void;
404
-    /**
405
-     * Initializes a new JitsiRemoteTrack instance with the data provided by
406
-     * the signaling layer and SDP.
407
-     *
408
-     * @param {string} ownerEndpointId the owner's endpoint ID (MUC nickname)
409
-     * @param {MediaStream} stream the WebRTC stream instance
410
-     * @param {MediaStreamTrack} track the WebRTC track instance
411
-     * @param {MediaType} mediaType the track's type of the media
412
-     * @param {VideoType} [videoType] the track's type of the video (if applicable)
413
-     * @param {number} ssrc the track's main SSRC number
414
-     * @param {boolean} muted the initial muted status
415
-     * @param {String} sourceName the track's source name
416
-     */
417
-    _createRemoteTrack(ownerEndpointId: string, stream: MediaStream, track: MediaStreamTrack, mediaType: MediaType, videoType?: VideoType, ssrc: number, muted: boolean, sourceName: string): void;
418
-    /**
419
-     * Handles remote stream removal.
420
-     * @param stream the WebRTC MediaStream object which is being removed from the
421
-     * PeerConnection
422
-     */
423
-    _remoteStreamRemoved(stream: any): void;
424
-    /**
425
-     * Handles remote media track removal.
426
-     *
427
-     * @param {MediaStream} stream - WebRTC MediaStream instance which is the parent of the track.
428
-     * @param {MediaStreamTrack} track - WebRTC MediaStreamTrack which has been removed from the PeerConnection.
429
-     * @returns {void}
430
-     */
431
-    _remoteTrackRemoved(stream: MediaStream, track: MediaStreamTrack): void;
432
-    /**
433
-     * Removes all JitsiRemoteTracks associated with given MUC nickname (resource part of the JID).
434
-     *
435
-     * @param {string} owner - The resource part of the MUC JID.
436
-     * @returns {JitsiRemoteTrack[]} - The array of removed tracks.
437
-     */
438
-    removeRemoteTracks(owner: string): JitsiRemoteTrack[];
439
-    /**
440
-     * Removes and disposes given <tt>JitsiRemoteTrack</tt> instance. Emits {@link RTCEvents.REMOTE_TRACK_REMOVED}.
441
-     *
442
-     * @param {JitsiRemoteTrack} toBeRemoved - The remote track to be removed.
443
-     * @returns {void}
444
-     */
445
-    _removeRemoteTrack(toBeRemoved: JitsiRemoteTrack): void;
446
-    /**
447
-     * Returns a map with keys msid/mediaType and <tt>TrackSSRCInfo</tt> values.
448
-     * @param {RTCSessionDescription} desc the local description.
449
-     * @return {Map<string,TrackSSRCInfo>}
450
-     */
451
-    _extractSSRCMap(desc: RTCSessionDescription): Map<string, any>;
452
-    /**
453
-     *
454
-     * @param {JitsiLocalTrack} localTrack
455
-     */
456
-    getLocalSSRC(localTrack: any): number;
457
-    /**
458
-     * When doing unified plan simulcast, we'll have a set of ssrcs but no ssrc-groups on Firefox. Unfortunately, Jicofo
459
-     * will complain if it sees ssrcs with matching msids but no ssrc-group, so a ssrc-group line is injected to make
460
-     * Jicofo happy.
461
-     *
462
-     * @param desc A session description object (with 'type' and 'sdp' fields)
463
-     * @return A session description object with its sdp field modified to contain an inject ssrc-group for simulcast.
464
-     */
465
-    _injectSsrcGroupForUnifiedSimulcast(desc: any): any;
466
-    _getSSRC(rtcId: any): {
467
-        /**
468
-         * an array which holds all track's SSRCs
469
-         */
470
-        ssrcs: Array<number>;
471
-        /**
472
-         * an array stores all track's SSRC
473
-         * groups
474
-         */
475
-        groups: {
476
-            /**
477
-             * the SSRC groups semantics
478
-             */
479
-            semantics: string;
480
-            /**
481
-             * group's SSRCs in order where the first
482
-             * one is group's primary SSRC, the second one is secondary (RTX) and so
483
-             * on...
484
-             */
485
-            ssrcs: Array<number>;
486
-        }[];
487
-    };
488
-    private isSharingLowFpsScreen;
489
-    /**
490
-     * Checks if screensharing is in progress.
491
-     *
492
-     * @returns {boolean}  Returns true if a desktop track has been added to the peerconnection, false otherwise.
493
-     */
494
-    _isSharingScreen(): boolean;
495
-    /**
496
-     * Munges the order of the codecs in the SDP passed based on the preference
497
-     * set through config.js settings. All instances of the specified codec are
498
-     * moved up to the top of the list when it is preferred. The specified codec
499
-     * is deleted from the list if the configuration specifies that the codec be
500
-     * disabled.
501
-     * @param {RTCSessionDescription} description that needs to be munged.
502
-     * @returns {RTCSessionDescription} the munged description.
503
-     */
504
-    _mungeCodecOrder(description: RTCSessionDescription): RTCSessionDescription;
505
-    /**
506
-     * Add {@link JitsiLocalTrack} to this TPC.
507
-     * @param {JitsiLocalTrack} track
508
-     * @param {boolean} isInitiator indicates if the endpoint is the offerer.
509
-     * @returns {Promise<void>} - resolved when done.
510
-     */
511
-    addTrack(track: any, isInitiator?: boolean): Promise<void>;
512
-    /**
513
-     * Adds local track to the RTCPeerConnection.
514
-     *
515
-     * @param {JitsiLocalTrack} track the track to be added to the pc.
516
-     * @return {Promise<boolean>} Promise that resolves to true if the underlying PeerConnection's state has changed and
517
-     * renegotiation is required, false if no renegotiation is needed or Promise is rejected when something goes wrong.
518
-     */
519
-    addTrackToPc(track: any): Promise<boolean>;
520
-    private _addStream;
521
-    /**
522
-     * Removes WebRTC media stream from the underlying PeerConection
523
-     * @param {MediaStream} mediaStream
524
-     */
525
-    _removeStream(mediaStream: MediaStream): void;
526
-    private _assertTrackBelongs;
527
-    /**
528
-     * Returns the codec that is configured on the client as the preferred video codec.
529
-     * This takes into account the current order of codecs in the local description sdp.
530
-     *
531
-     * @returns {CodecMimeType} The codec that is set as the preferred codec to receive
532
-     * video in the local SDP.
533
-     */
534
-    getConfiguredVideoCodec(): {
535
-        H264: string;
536
-        OPUS: string;
537
-        ULPFEC: string;
538
-        VP8: string;
539
-        VP9: string;
540
-    };
541
-    /**
542
-     * Enables or disables simulcast for screenshare based on the frame rate requested for desktop track capture.
543
-     *
544
-     * @param {number} maxFps framerate to be used for desktop track capture.
545
-     */
546
-    setDesktopSharingFrameRate(maxFps: number): void;
547
-    /**
548
-     * Sets the codec preference on the peerconnection. The codec preference goes into effect when
549
-     * the next renegotiation happens.
550
-     *
551
-     * @param {CodecMimeType} preferredCodec the preferred codec.
552
-     * @param {CodecMimeType} disabledCodec the codec that needs to be disabled.
553
-     * @returns {void}
554
-     */
555
-    setVideoCodecs(preferredCodec?: {
556
-        H264: string;
557
-        OPUS: string;
558
-        ULPFEC: string;
559
-        VP8: string;
560
-        VP9: string;
561
-    }, disabledCodec?: {
562
-        H264: string;
563
-        OPUS: string;
564
-        ULPFEC: string;
565
-        VP8: string;
566
-        VP9: string;
567
-    }): void;
568
-    codecPreference: {
569
-        enable: boolean;
570
-        mediaType: MediaType;
571
-        mimeType: {
572
-            H264: string;
573
-            OPUS: string;
574
-            ULPFEC: string;
575
-            VP8: string;
576
-            VP9: string;
577
-        };
578
-    };
579
-    /**
580
-     * Tells if the given WebRTC <tt>MediaStream</tt> has been added to
581
-     * the underlying WebRTC PeerConnection.
582
-     * @param {MediaStream} mediaStream
583
-     * @returns {boolean}
584
-     */
585
-    isMediaStreamInPc(mediaStream: MediaStream): boolean;
586
-    /**
587
-     * Remove local track from this TPC.
588
-     * @param {JitsiLocalTrack} localTrack the track to be removed from this TPC.
589
-     *
590
-     * FIXME It should probably remove a boolean just like {@link removeTrackFromPc}
591
-     *       The same applies to addTrack.
592
-     */
593
-    removeTrack(localTrack: any): void;
594
-    /**
595
-     * Returns the sender corresponding to the given media type.
596
-     * @param {MEDIA_TYPE} mediaType - The media type 'audio' or 'video' to be used for the search.
597
-     * @returns {RTPSender|undefined} - The found sender or undefined if no sender
598
-     * was found.
599
-     */
600
-    findSenderByKind(mediaType: any): any | undefined;
601
-    /**
602
-     * Returns the receiver corresponding to the given MediaStreamTrack.
603
-     *
604
-     * @param {MediaSreamTrack} track - The media stream track used for the search.
605
-     * @returns {RTCRtpReceiver|undefined} - The found receiver or undefined if no receiver
606
-     * was found.
607
-     */
608
-    findReceiverForTrack(track: any): RTCRtpReceiver | undefined;
609
-    /**
610
-     * Returns the sender corresponding to the given MediaStreamTrack.
611
-     *
612
-     * @param {MediaSreamTrack} track - The media stream track used for the search.
613
-     * @returns {RTCRtpSender|undefined} - The found sender or undefined if no sender
614
-     * was found.
615
-     */
616
-    findSenderForTrack(track: any): RTCRtpSender | undefined;
617
-    /**
618
-     * Processes the local description SDP and caches the mids of the mlines associated with the given tracks.
619
-     *
620
-     * @param {Array<JitsiLocalTrack>} localTracks - local tracks that are added to the peerconnection.
621
-     * @returns {void}
622
-     */
623
-    processLocalSdpForTransceiverInfo(localTracks: Array<any>): void;
624
-    /**
625
-     * Replaces <tt>oldTrack</tt> with <tt>newTrack</tt> from the peer connection.
626
-     * Either <tt>oldTrack</tt> or <tt>newTrack</tt> can be null; replacing a valid
627
-     * <tt>oldTrack</tt> with a null <tt>newTrack</tt> effectively just removes
628
-     * <tt>oldTrack</tt>
629
-     *
630
-     * @param {JitsiLocalTrack|null} oldTrack - The current track in use to be replaced on the pc.
631
-     * @param {JitsiLocalTrack|null} newTrack - The new track to be used.
632
-     *
633
-     * @returns {Promise<boolean>} - If the promise resolves with true, renegotiation will be needed.
634
-     * Otherwise no renegotiation is needed.
635
-     */
636
-    replaceTrack(oldTrack: any | null, newTrack: any | null): Promise<boolean>;
637
-    /**
638
-     * Removes local track from the RTCPeerConnection.
639
-     *
640
-     * @param {JitsiLocalTrack} localTrack the local track to be removed.
641
-     * @return {Promise<boolean>} Promise that resolves to true if the underlying PeerConnection's state has changed and
642
-     * renegotiation is required, false if no renegotiation is needed or Promise is rejected when something goes wrong.
643
-     */
644
-    removeTrackFromPc(localTrack: any): Promise<boolean>;
645
-    createDataChannel(label: any, opts: any): RTCDataChannel;
646
-    private _ensureSimulcastGroupIsLast;
647
-    private _adjustLocalMediaDirection;
648
-    private _adjustRemoteMediaDirection;
649
-    /**
650
-     * Munges the stereo flag as well as the opusMaxAverageBitrate in the SDP, based
651
-     * on values set through config.js, if present.
652
-     *
653
-     * @param {RTCSessionDescription} description that needs to be munged.
654
-     * @returns {RTCSessionDescription} the munged description.
655
-     */
656
-    _mungeOpus(description: RTCSessionDescription): RTCSessionDescription;
657
-    /**
658
-     * Munges the SDP to set all directions to inactive and drop all ssrc and ssrc-groups.
659
-     *
660
-     * @param {RTCSessionDescription} description that needs to be munged.
661
-     * @returns {RTCSessionDescription} the munged description.
662
-     */
663
-    _mungeInactive(description: RTCSessionDescription): RTCSessionDescription;
664
-    /**
665
-     * Sets up the _dtlsTransport object and initializes callbacks for it.
666
-     */
667
-    _initializeDtlsTransport(): void;
668
-    /**
669
-     * Sets the max bitrates on the video m-lines when VP9 is the selected codec.
670
-     *
671
-     * @param {RTCSessionDescription} description - The local description that needs to be munged.
672
-     * @param {boolean} isLocalSdp - Whether the max bitrate (via b=AS line in SDP) is set on local SDP.
673
-     * @returns RTCSessionDescription
674
-     */
675
-    _setVp9MaxBitrates(description: RTCSessionDescription, isLocalSdp?: boolean): RTCSessionDescription;
676
-    /**
677
-     * Configures the stream encodings depending on the video type and the bitrates configured.
678
-     *
679
-     * @param {JitsiLocalTrack} - The local track for which the sender encodings have to configured.
680
-     * @returns {Promise} promise that will be resolved when the operation is successful and rejected otherwise.
681
-     */
682
-    configureSenderVideoEncodings(localVideoTrack?: any): Promise<any>;
683
-    setLocalDescription(description: any): Promise<any>;
684
-    /**
685
-     * Enables/disables audio media transmission on this peer connection. When
686
-     * disabled the SDP audio media direction in the local SDP will be adjusted to
687
-     * 'inactive' which means that no data will be sent nor accepted, but
688
-     * the connection should be kept alive.
689
-     * @param {boolean} active <tt>true</tt> to enable audio media transmission or
690
-     * <tt>false</tt> to disable. If the value is not a boolean the call will have
691
-     * no effect.
692
-     * @return {boolean} <tt>true</tt> if the value has changed and sRD/sLD cycle
693
-     * needs to be executed in order for the changes to take effect or
694
-     * <tt>false</tt> if the given value was the same as the previous one.
695
-     * @public
696
-     */
697
-    public setAudioTransferActive(active: boolean): boolean;
698
-    setRemoteDescription(description: any): Promise<any>;
699
-    /**
700
-     * Changes the resolution of the video stream that is sent to the peer based on the resolution requested by the peer
701
-     * and user preference, sets the degradation preference on the sender based on the video type, configures the maximum
702
-     * bitrates on the send stream.
703
-     *
704
-     * @param {number} frameHeight - The max frame height to be imposed on the outgoing video stream.
705
-     * @param {JitsiLocalTrack} - The local track for which the sender constraints have to be applied.
706
-     * @returns {Promise} promise that will be resolved when the operation is successful and rejected otherwise.
707
-     */
708
-    setSenderVideoConstraints(frameHeight: number, localVideoTrack: any): Promise<any>;
709
-    encodingsEnabledState: boolean[];
710
-    /**
711
-     * Enables/disables video media transmission on this peer connection. When
712
-     * disabled the SDP video media direction in the local SDP will be adjusted to
713
-     * 'inactive' which means that no data will be sent nor accepted, but
714
-     * the connection should be kept alive.
715
-     * @param {boolean} active <tt>true</tt> to enable video media transmission or
716
-     * <tt>false</tt> to disable. If the value is not a boolean the call will have
717
-     * no effect.
718
-     * @return {boolean} <tt>true</tt> if the value has changed and sRD/sLD cycle
719
-     * needs to be executed in order for the changes to take effect or
720
-     * <tt>false</tt> if the given value was the same as the previous one.
721
-     * @public
722
-     */
723
-    public setVideoTransferActive(active: boolean): boolean;
724
-    /**
725
-     * Sends DTMF tones if possible.
726
-     *
727
-     * @param {string} tones - The DTMF tones string as defined by {@code RTCDTMFSender.insertDTMF}, 'tones' argument.
728
-     * @param {number} duration - The amount of time in milliseconds that each DTMF should last. It's 200ms by default.
729
-     * @param {number} interToneGap - The length of time in miliseconds to wait between tones. It's 200ms by default.
730
-     *
731
-     * @returns {void}
732
-     */
733
-    sendTones(tones: string, duration?: number, interToneGap?: number): void;
734
-    private _onToneChange;
735
-    /**
736
-     * Makes the underlying TraceablePeerConnection generate new SSRC for
737
-     * the recvonly video stream.
738
-     */
739
-    generateRecvonlySsrc(): void;
740
-    /**
741
-     * Makes the underlying TraceablePeerConnection forget the current primary video
742
-     * SSRC.
743
-     */
744
-    clearRecvonlySsrc(): void;
745
-    /**
746
-     * Closes underlying WebRTC PeerConnection instance and removes all remote
747
-     * tracks by emitting {@link RTCEvents.REMOTE_TRACK_REMOVED} for each one of
748
-     * them.
749
-     */
750
-    close(): void;
751
-    createAnswer(constraints: any): Promise<any>;
752
-    createOffer(constraints: any): Promise<any>;
753
-    _createOfferOrAnswer(isOffer: any, constraints: any): Promise<any>;
754
-    /**
755
-     * Extract primary SSRC from given {@link TrackSSRCInfo} object.
756
-     * @param {TrackSSRCInfo} ssrcObj
757
-     * @return {number|null} the primary SSRC or <tt>null</tt>
758
-     */
759
-    _extractPrimarySSRC(ssrcObj: any): number | null;
760
-    private _processLocalSSRCsMap;
761
-    addIceCandidate(candidate: any): Promise<void>;
762
-    /**
763
-     * Returns the number of simulcast streams that are currently enabled on the peerconnection.
764
-     *
765
-     * @returns {number} The number of simulcast streams currently enabled or 1 when simulcast is disabled.
766
-     */
767
-    getActiveSimulcastStreams(): number;
768
-    /**
769
-     * Obtains call-related stats from the peer connection.
770
-     *
771
-     * @returns {Promise<Object>} Promise which resolves with data providing statistics about
772
-     * the peerconnection.
773
-     */
774
-    getStats(): Promise<any>;
775
-    /**
776
-     * Generates and stores new SSRC info object for given local track.
777
-     * The method should be called only for a video track being added to this TPC
778
-     * in the muted state (given that the current browser uses this strategy).
779
-     * @param {JitsiLocalTrack} track
780
-     * @return {TPCSSRCInfo}
781
-     */
782
-    generateNewStreamSSRCInfo(track: any): {
783
-        /**
784
-         * an array which holds all track's SSRCs
785
-         */
786
-        ssrcs: Array<number>;
787
-        /**
788
-         * an array stores all track's SSRC
789
-         * groups
790
-         */
791
-        groups: {
792
-            /**
793
-             * the SSRC groups semantics
794
-             */
795
-            semantics: string;
796
-            /**
797
-             * group's SSRCs in order where the first
798
-             * one is group's primary SSRC, the second one is secondary (RTX) and so
799
-             * on...
800
-             */
801
-            ssrcs: Array<number>;
802
-        }[];
803
-    };
804
-    /**
805
-     * Returns if the peer connection uses Unified plan implementation.
806
-     *
807
-     * @returns {boolean} True if the pc uses Unified plan, false otherwise.
808
-     */
809
-    usesUnifiedPlan(): boolean;
810
-    /**
811
-     * Creates a text representation of this <tt>TraceablePeerConnection</tt>
812
-     * instance.
813
-     * @return {string}
814
-     */
815
-    toString(): string;
816
-}
817
-import RTC from "./RTC";
818
-import { MediaType } from "../../service/RTC/MediaType";
819
-import JitsiRemoteTrack from "./JitsiRemoteTrack";
820
-import { TPCUtils } from "./TPCUtils";
821
-import SdpConsistency from "../sdp/SdpConsistency";
822
-import LocalSdpMunger from "../sdp/LocalSdpMunger";
823
-import RtxModifier from "../sdp/RtxModifier";
824
-import { VideoType } from "../../service/RTC/VideoType";

+ 0
- 186
types/auto/modules/browser/BrowserCapabilities.d.ts View File

@@ -1,186 +0,0 @@
1
-/**
2
- * Implements browser capabilities for lib-jitsi-meet.
3
- */
4
-export default class BrowserCapabilities {
5
-    /**
6
-     * Tells whether or not the <tt>MediaStream/tt> is removed from the <tt>PeerConnection</tt> and disposed on video
7
-     * mute (in order to turn off the camera device). This is needed on Firefox because of the following bug
8
-     * https://bugzilla.mozilla.org/show_bug.cgi?id=1735951
9
-     *
10
-     * @return {boolean} <tt>true</tt> if the current browser supports this strategy or <tt>false</tt> otherwise.
11
-     */
12
-    doesVideoMuteByStreamRemove(): boolean;
13
-    /**
14
-     * Checks if the current browser is Chromium based, i.e., it's either Chrome / Chromium or uses it as its engine,
15
-     * but doesn't identify as Chrome.
16
-     *
17
-     * This includes the following browsers:
18
-     * - Chrome and Chromium.
19
-     * - Other browsers which use the Chrome engine, but are detected as Chrome, such as Brave and Vivaldi.
20
-     * - Browsers which are NOT Chrome but use it as their engine, and have custom detection code: Opera, Electron
21
-     *   and NW.JS.
22
-     * This excludes
23
-     * - Chrome on iOS since it uses WKWebView.
24
-     */
25
-    isChromiumBased(): boolean;
26
-    /**
27
-     * Checks if the current platform is iOS.
28
-     *
29
-     * @returns {boolean}
30
-     */
31
-    isIosBrowser(): boolean;
32
-    /**
33
-     * Checks if the current browser is WebKit based. It's either
34
-     * Safari or uses WebKit as its engine.
35
-     *
36
-     * This includes Chrome and Firefox on iOS
37
-     *
38
-     * @returns {boolean}
39
-     */
40
-    isWebKitBased(): boolean;
41
-    /**
42
-     * Checks whether current running context is a Trusted Web Application.
43
-     *
44
-     * @returns {boolean} Whether the current context is a TWA.
45
-     */
46
-    isTwa(): boolean;
47
-    /**
48
-     * Checks if the current browser is supported.
49
-     *
50
-     * @returns {boolean} true if the browser is supported, false otherwise.
51
-     */
52
-    isSupported(): boolean;
53
-    /**
54
-     * Returns whether the browser is supported for Android
55
-     * @returns {boolean} true if the browser is supported for Android devices
56
-     */
57
-    isSupportedAndroidBrowser(): boolean;
58
-    /**
59
-     * Returns whether the browser is supported for iOS
60
-     * @returns {boolean} true if the browser is supported for iOS devices
61
-     */
62
-    isSupportedIOSBrowser(): boolean;
63
-    /**
64
-     * Returns whether or not the current environment needs a user interaction
65
-     * with the page before any unmute can occur.
66
-     *
67
-     * @returns {boolean}
68
-     */
69
-    isUserInteractionRequiredForUnmute(): boolean;
70
-    /**
71
-     * Checks if the current browser triggers 'onmute'/'onunmute' events when
72
-     * user's connection is interrupted and the video stops playback.
73
-     * @returns {*|boolean} 'true' if the event is supported or 'false'
74
-     * otherwise.
75
-     */
76
-    supportsVideoMuteOnConnInterrupted(): any | boolean;
77
-    /**
78
-     * Checks if the current browser reports upload and download bandwidth
79
-     * statistics.
80
-     * @return {boolean}
81
-     */
82
-    supportsBandwidthStatistics(): boolean;
83
-    /**
84
-     * Checks if the current browser supports setting codec preferences on the transceiver.
85
-     * @returns {boolean}
86
-     */
87
-    supportsCodecPreferences(): boolean;
88
-    /**
89
-     * Checks if the current browser support the device change event.
90
-     * @return {boolean}
91
-     */
92
-    supportsDeviceChangeEvent(): boolean;
93
-    /**
94
-     * Checks if the current browser supports the Long Tasks API that lets us observe
95
-     * performance measurement events and be notified of tasks that take longer than
96
-     * 50ms to execute on the main thread.
97
-     */
98
-    supportsPerformanceObserver(): boolean;
99
-    /**
100
-     * Checks if the current browser supports audio level stats on the receivers.
101
-     */
102
-    supportsReceiverStats(): boolean;
103
-    /**
104
-     * Checks if the current browser reports round trip time statistics for
105
-     * the ICE candidate pair.
106
-     * @return {boolean}
107
-     */
108
-    supportsRTTStatistics(): boolean;
109
-    /**
110
-     * Returns true if VP9 is supported by the client on the browser. VP9 is currently disabled on Firefox and Safari
111
-     * because of issues with rendering. Please check https://bugzilla.mozilla.org/show_bug.cgi?id=1492500,
112
-     * https://bugs.webkit.org/show_bug.cgi?id=231071 and https://bugs.webkit.org/show_bug.cgi?id=231074 for details.
113
-     */
114
-    supportsVP9(): any;
115
-    /**
116
-     * Checks if the browser uses SDP munging for turning on simulcast.
117
-     *
118
-     * @returns {boolean}
119
-     */
120
-    usesSdpMungingForSimulcast(): boolean;
121
-    /**
122
-     * Checks if the browser uses RIDs/MIDs for siganling the simulcast streams
123
-     * to the bridge instead of the ssrcs.
124
-     */
125
-    usesRidsForSimulcast(): boolean;
126
-    /**
127
-     * Checks if the browser supports getDisplayMedia.
128
-     * @returns {boolean} {@code true} if the browser supports getDisplayMedia.
129
-     */
130
-    supportsGetDisplayMedia(): boolean;
131
-    /**
132
-     * Checks if the browser supports WebRTC Encoded Transform, an alternative
133
-     * to insertable streams.
134
-     *
135
-     * NOTE: At the time of this writing the only browser supporting this is
136
-     * Safari / WebKit, behind a flag.
137
-     *
138
-     * @returns {boolean} {@code true} if the browser supports it.
139
-     */
140
-    supportsEncodedTransform(): boolean;
141
-    /**
142
-     * Checks if the browser supports insertable streams, needed for E2EE.
143
-     * @returns {boolean} {@code true} if the browser supports insertable streams.
144
-     */
145
-    supportsInsertableStreams(): boolean;
146
-    /**
147
-     * Whether the browser supports the RED format for audio.
148
-     */
149
-    supportsAudioRed(): boolean;
150
-    /**
151
-     * Checks if the browser supports unified plan.
152
-     *
153
-     * @returns {boolean}
154
-     */
155
-    supportsUnifiedPlan(): boolean;
156
-    /**
157
-     * Checks if the browser supports voice activity detection via the @type {VADAudioAnalyser} service.
158
-     *
159
-     * @returns {boolean}
160
-     */
161
-    supportsVADDetection(): boolean;
162
-    /**
163
-     * Check if the browser supports the RTP RTX feature (and it is usable).
164
-     *
165
-     * @returns {boolean}
166
-     */
167
-    supportsRTX(): boolean;
168
-    /**
169
-     * Returns the version of a Chromium based browser.
170
-     *
171
-     * @returns {Number}
172
-     */
173
-    _getChromiumBasedVersion(): number;
174
-    /**
175
-     * Returns the version of a Safari browser.
176
-     *
177
-     * @returns {Number}
178
-     */
179
-    _getSafariVersion(): number;
180
-    /**
181
-     * Returns the version of an ios browser.
182
-     *
183
-     * @returns {Number}
184
-     */
185
-    _getIOSVersion(): number;
186
-}

+ 0
- 3
types/auto/modules/browser/index.d.ts View File

@@ -1,3 +0,0 @@
1
-declare var _default: BrowserCapabilities;
2
-export default _default;
3
-import BrowserCapabilities from "./BrowserCapabilities";

+ 0
- 95
types/auto/modules/connectivity/ConnectionQuality.d.ts View File

@@ -1,95 +0,0 @@
1
-/**
2
- * A class which monitors the local statistics coming from the RTC modules, and
3
- * calculates a "connection quality" value, in percent, for the media
4
- * connection. A value of 100% indicates a very good network connection, and a
5
- * value of 0% indicates a poor connection.
6
- */
7
-export default class ConnectionQuality {
8
-    /**
9
-     *
10
-     * @param conference
11
-     * @param eventEmitter
12
-     * @param options
13
-     */
14
-    constructor(conference: any, eventEmitter: any, options: any);
15
-    eventEmitter: any;
16
-    /**
17
-     * The owning JitsiConference.
18
-     */
19
-    _conference: any;
20
-    /**
21
-     * Holds statistics about the local connection quality.
22
-     */
23
-    _localStats: {
24
-        connectionQuality: number;
25
-        jvbRTT: any;
26
-    };
27
-    /**
28
-     * The time this._localStats.connectionQuality was last updated.
29
-     */
30
-    _lastConnectionQualityUpdate: number;
31
-    /**
32
-     * Conference options.
33
-     */
34
-    _options: any;
35
-    /**
36
-     * Maps a participant ID to an object holding connection quality
37
-     * statistics received from this participant.
38
-     */
39
-    _remoteStats: {};
40
-    /**
41
-     * The time that the ICE state last changed to CONNECTED. We use this
42
-     * to calculate how much time we as a sender have had to ramp-up.
43
-     */
44
-    _timeIceConnected: number;
45
-    /**
46
-     * The time that local video was unmuted. We use this to calculate how
47
-     * much time we as a sender have had to ramp-up.
48
-     */
49
-    _timeVideoUnmuted: number;
50
-    /**
51
-     * Sets _timeVideoUnmuted if it was previously unset. If it was already set,
52
-     * doesn't change it.
53
-     */
54
-    _maybeUpdateUnmuteTime(): void;
55
-    /**
56
-     * Calculates a new "connection quality" value.
57
-     * @param videoType {VideoType} the type of the video source (camera or a screen capture).
58
-     * @param isMuted {boolean} whether the local video is muted.
59
-     * @param resolutionName {Resolution} the input resolution used by the camera.
60
-     * @returns {*} the newly calculated connection quality.
61
-     */
62
-    _calculateConnectionQuality(videoType: VideoType, isMuted: boolean, resolutionName: any): any;
63
-    /**
64
-     * Updates the localConnectionQuality value
65
-     * @param values {number} the new value. Should be in [0, 100].
66
-     */
67
-    _updateLocalConnectionQuality(value: any): void;
68
-    /**
69
-     * Broadcasts the local statistics to all other participants in the
70
-     * conference.
71
-     */
72
-    _broadcastLocalStats(): void;
73
-    /**
74
-     * Updates the local statistics
75
-     * @param {TraceablePeerConnection} tpc the peerconnection which emitted
76
-     * the stats
77
-     * @param data new statistics
78
-     */
79
-    _updateLocalStats(tpc: any, data: any): void;
80
-    /**
81
-     * Updates remote statistics
82
-     * @param id the id of the remote participant
83
-     * @param data the statistics received
84
-     */
85
-    _updateRemoteStats(id: any, data: any): void;
86
-    /**
87
-     * Returns the local statistics.
88
-     * Exported only for use in jitsi-meet-torture.
89
-     */
90
-    getStats(): {
91
-        connectionQuality: number;
92
-        jvbRTT: any;
93
-    };
94
-}
95
-import { VideoType } from "../../service/RTC/VideoType";

+ 0
- 36
types/auto/modules/connectivity/IceFailedHandling.d.ts View File

@@ -1,36 +0,0 @@
1
-/**
2
- * This class deals with shenanigans around JVB media session's ICE failed status handling.
3
- *
4
- * If ICE restarts are NOT explicitly enabled by the {@code enableIceRestart} config option, then the conference will
5
- * delay emitting the {@JitsiConferenceErrors.ICE_FAILED} event by 15 seconds. If the network info module reports
6
- * the internet offline status then the time will start counting after the internet comes back online.
7
- *
8
- * If ICE restart are enabled, then a delayed ICE failed notification to Jicofo will be sent, only if the ICE connection
9
- * does not recover soon after or before the XMPP connection is restored (if it was ever broken). If ICE fails while
10
- * the XMPP connection is not broken then the notifications will be sent after 2 seconds delay.
11
- */
12
-export default class IceFailedHandling {
13
-    /**
14
-     * Creates new {@code DelayedIceFailed} task.
15
-     * @param {JitsiConference} conference
16
-     */
17
-    constructor(conference: any);
18
-    _conference: any;
19
-    /**
20
-     * After making sure there's no way for the ICE connection to recover this method either sends ICE failed
21
-     * notification to Jicofo or emits the ice failed conference event.
22
-     * @private
23
-     * @returns {void}
24
-     */
25
-    private _actOnIceFailed;
26
-    /**
27
-     * Starts the task.
28
-     */
29
-    start(): void;
30
-    _iceFailedTimeout: any;
31
-    /**
32
-     * Cancels the task.
33
-     */
34
-    cancel(): void;
35
-    _canceled: boolean;
36
-}

+ 0
- 33
types/auto/modules/connectivity/NetworkInfo.d.ts View File

@@ -1,33 +0,0 @@
1
-export const NETWORK_INFO_EVENT: "NETWORK_INFO_CHANGED";
2
-/**
3
- * Module provides information about the current status of the internet
4
- * connection. Lib-jitsi-meet doesn't have any logic for detecting internet
5
- * online/offline, but rather it relies on the information supplied by the app
6
- * that uses it. By default the online state is assumed and the lib acts as if
7
- * it was connected. See {@link JitsiMeetJS.setNetworkInfo}.
8
- */
9
-export class NetworkInfo extends Listenable {
10
-    /**
11
-     * Creates new {@link NetworkInfo} instance.
12
-     */
13
-    constructor();
14
-    _current: {
15
-        isOnline: boolean;
16
-    };
17
-    /**
18
-     * Updates the network info state.
19
-     * @param {boolean} isOnline - {@code true} if internet is online or {@code false} otherwise.
20
-     */
21
-    updateNetworkInfo({ isOnline }: boolean): void;
22
-    /**
23
-     * Returns the online/offline internet status. By default the value is {@code true} and changes only if
24
-     * the lib's user wires the state through {@link JitsiMeetJS.setNetworkInfo} like the jitsi-meet does. Because of
25
-     * that any logic should still assume that the internet may be offline and should handle the failure gracefully.
26
-     * It's only a good hint in the other way around: to pause internet operations until it comes back online.
27
-     * @returns {boolean}
28
-     */
29
-    isOnline(): boolean;
30
-}
31
-export default networkInfo;
32
-import Listenable from "../util/Listenable";
33
-declare const networkInfo: NetworkInfo;

+ 0
- 350
types/auto/modules/connectivity/ParticipantConnectionStatus.d.ts View File

@@ -1,350 +0,0 @@
1
-/**
2
- * Participant connection statuses.
3
- *
4
- * @type {{
5
- *      ACTIVE: string,
6
- *      INACTIVE: string,
7
- *      INTERRUPTED: string,
8
- *      RESTORING: string
9
- * }}
10
- */
11
-export const ParticipantConnectionStatus: {
12
-    ACTIVE: string;
13
-    INACTIVE: string;
14
-    INTERRUPTED: string;
15
-    RESTORING: string;
16
-};
17
-/**
18
- * Class is responsible for emitting
19
- * JitsiConferenceEvents.PARTICIPANT_CONN_STATUS_CHANGED events.
20
- */
21
-export default class ParticipantConnectionStatusHandler {
22
-    /**
23
-     * Calculates the new {@link ParticipantConnectionStatus} based on
24
-     * the values given for some specific remote user. It is assumed that
25
-     * the conference is currently in the JVB mode (in contrary to the P2P mode)
26
-     * @param {boolean} isConnectionActiveByJvb true if the JVB did not get any
27
-     * data from the user for the last 15 seconds.
28
-     * @param {boolean} isInLastN indicates whether the user is in the last N
29
-     * set. When set to false it means that JVB is not sending any video for
30
-     * the user.
31
-     * @param {boolean} isRestoringTimedout if true it means that the user has
32
-     * been outside of last N too long to be considered
33
-     * {@link ParticipantConnectionStatus.RESTORING}.
34
-     * @param {boolean} isVideoMuted true if the user is video muted and we
35
-     * should not expect to receive any video.
36
-     * @param {boolean} isVideoTrackFrozen if the current browser support video
37
-     * frozen detection then it will be set to true when the video track is
38
-     * frozen. If the current browser does not support frozen detection the it's
39
-     * always false.
40
-     * @return {ParticipantConnectionStatus} the new connection status for
41
-     * the user for whom the values above were provided.
42
-     * @private
43
-     */
44
-    private static _getNewStateForJvbMode;
45
-    /**
46
-     * In P2P mode we don't care about any values coming from the JVB and
47
-     * the connection status can be only active or interrupted.
48
-     * @param {boolean} isVideoMuted the user if video muted
49
-     * @param {boolean} isVideoTrackFrozen true if the video track for
50
-     * the remote user is currently frozen. If the current browser does not
51
-     * support video frozen detection then it's always false.
52
-     * @return {ParticipantConnectionStatus}
53
-     * @private
54
-     */
55
-    private static _getNewStateForP2PMode;
56
-    /**
57
-     * Creates new instance of <tt>ParticipantConnectionStatus</tt>.
58
-     *
59
-     * @constructor
60
-     * @param {RTC} rtc the RTC service instance
61
-     * @param {JitsiConference} conference parent conference instance
62
-     * @param {Object} options
63
-     * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
64
-     * {@link ParticipantConnectionStatus.p2pRtcMuteTimeout}.
65
-     * @param {number} [options.rtcMuteTimeout=10000] custom value for
66
-     * {@link ParticipantConnectionStatus.rtcMuteTimeout}.
67
-     * @param {number} [options.outOfLastNTimeout=500] custom value for
68
-     * {@link ParticipantConnectionStatus.outOfLastNTimeout}.
69
-     */
70
-    constructor(rtc: any, conference: any, options: {
71
-        p2pRtcMuteTimeout?: number;
72
-        rtcMuteTimeout?: number;
73
-        outOfLastNTimeout?: number;
74
-    });
75
-    rtc: any;
76
-    conference: any;
77
-    /**
78
-     * A map of the "endpoint ID"(which corresponds to the resource part
79
-     * of MUC JID(nickname)) to the timeout callback IDs scheduled using
80
-     * window.setTimeout.
81
-     * @type {Object.<string, number>}
82
-     */
83
-    trackTimers: {
84
-        [x: string]: number;
85
-    };
86
-    /**
87
-     * This map holds the endpoint connection status received from the JVB
88
-     * (as it might be different than the one stored in JitsiParticipant).
89
-     * Required for getting back in sync when remote video track is removed.
90
-     * @type {Object.<string, boolean>}
91
-     */
92
-    connStatusFromJvb: {
93
-        [x: string]: boolean;
94
-    };
95
-    /**
96
-     * If video track frozen detection through RTC mute event is supported,
97
-     * we wait some time until video track is considered frozen. But because
98
-     * when the user falls out of last N it is expected for the video to
99
-     * freeze this timeout must be significantly reduced in "out of last N"
100
-     * case.
101
-     *
102
-     * Basically this value is used instead of {@link rtcMuteTimeout} when
103
-     * user is not in last N.
104
-     * @type {number}
105
-     */
106
-    outOfLastNTimeout: number;
107
-    /**
108
-     * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
109
-     * event is fired on the Media stream, before the connection interrupted is fired. The default value is
110
-     * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
111
-     *
112
-     * @type {number} amount of time in milliseconds.
113
-     */
114
-    p2pRtcMuteTimeout: number;
115
-    /**
116
-     * How long we're going to wait after the RTC video track muted event
117
-     * for the corresponding signalling mute event, before the connection
118
-     * interrupted is fired. The default value is
119
-     * {@link DEFAULT_RTC_MUTE_TIMEOUT}.
120
-     *
121
-     * @type {number} amount of time in milliseconds
122
-     */
123
-    rtcMuteTimeout: number;
124
-    /**
125
-     * This map holds a timestamp indicating  when participant's video track
126
-     * was RTC muted (it is assumed that each participant can have only 1
127
-     * video track at a time). The purpose of storing the timestamp is to
128
-     * avoid the transition to disconnected status in case of legitimate
129
-     * video mute operation where the signalling video muted event can
130
-     * arrive shortly after RTC muted event.
131
-     *
132
-     * The key is participant's ID which is the same as endpoint id in
133
-     * the Colibri conference allocated on the JVB.
134
-     *
135
-     * The value is a timestamp measured in milliseconds obtained with
136
-     * <tt>Date.now()</tt>.
137
-     *
138
-     * FIXME merge this logic with NO_DATA_FROM_SOURCE event
139
-     *       implemented in JitsiLocalTrack by extending the event to
140
-     *       the remote track and allowing to set different timeout for
141
-     *       local and remote tracks.
142
-     *
143
-     * @type {Object.<string, number>}
144
-     */
145
-    rtcMutedTimestamp: {
146
-        [x: string]: number;
147
-    };
148
-    /**
149
-     * This map holds the timestamps indicating when participant's video
150
-     * entered lastN set. Participants entering lastN will have connection
151
-     * status restoring and when we start receiving video will become
152
-     * active, but if video is not received for certain time
153
-     * {@link DEFAULT_RESTORING_TIMEOUT} that participant connection status
154
-     * will become interrupted.
155
-     *
156
-     * @type {Map<string, number>}
157
-     */
158
-    enteredLastNTimestamp: Map<string, number>;
159
-    /**
160
-     * A map of the "endpoint ID"(which corresponds to the resource part
161
-     * of MUC JID(nickname)) to the restoring timeout callback IDs
162
-     * scheduled using window.setTimeout.
163
-     *
164
-     * @type {Map<string, number>}
165
-     */
166
-    restoringTimers: Map<string, number>;
167
-    /**
168
-     * A map that holds the current connection status (along with all the internal events that happen
169
-     * while in that state).
170
-     *
171
-     * The goal is to send this information to the analytics backend for post-mortem analysis.
172
-     */
173
-    connectionStatusMap: Map<any, any>;
174
-    /**
175
-     * Gets the video frozen timeout for given user.
176
-     * @param {string} id endpoint/participant ID
177
-     * @return {number} how long are we going to wait since RTC video muted
178
-     * even, before a video track is considered frozen.
179
-     * @private
180
-     */
181
-    private _getVideoFrozenTimeout;
182
-    /**
183
-     * Initializes <tt>ParticipantConnectionStatus</tt> and bind required event
184
-     * listeners.
185
-     */
186
-    init(): void;
187
-    _onEndpointConnStatusChanged: any;
188
-    _onP2PStatus: any;
189
-    _onUserLeft: any;
190
-    _onTrackRtcMuted: any;
191
-    _onTrackRtcUnmuted: any;
192
-    _onRemoteTrackAdded: any;
193
-    _onRemoteTrackRemoved: any;
194
-    _onSignallingMuteChanged: any;
195
-    _onTrackVideoTypeChanged: any;
196
-    /**
197
-     * On change in Last N set check all leaving and entering participants to
198
-     * change their corresponding statuses.
199
-     *
200
-     * @param {Array<string>} leavingLastN - The array of ids leaving lastN.
201
-     * @param {Array<string>} enteringLastN - The array of ids entering lastN.
202
-     * @private
203
-     */
204
-    private _onLastNChanged;
205
-    _onLastNValueChanged: any;
206
-    /**
207
-     * Removes all event listeners and disposes of all resources held by this
208
-     * instance.
209
-     */
210
-    dispose(): void;
211
-    /**
212
-     * Handles RTCEvents.ENDPOINT_CONN_STATUS_CHANGED triggered when we receive
213
-     * notification over the data channel from the bridge about endpoint's
214
-     * connection status update.
215
-     * @param {string} endpointId - The endpoint ID(MUC nickname/resource JID).
216
-     * @param {boolean} isActive - true if the connection is OK or false otherwise.
217
-     */
218
-    onEndpointConnStatusChanged(endpointId: string, isActive: boolean): void;
219
-    /**
220
-     * Changes connection status.
221
-     * @param {JitsiParticipant} participant
222
-     * @param newStatus
223
-     */
224
-    _changeConnectionStatus(participant: any, newStatus: any): void;
225
-    /**
226
-     * Reset the postponed "connection interrupted" event which was previously
227
-     * scheduled as a timeout on RTC 'onmute' event.
228
-     *
229
-     * @param {string} participantId - The participant for which the "connection
230
-     * interrupted" timeout was scheduled.
231
-     */
232
-    clearTimeout(participantId: string): void;
233
-    /**
234
-     * Clears the timestamp of the RTC muted event for participant's video track
235
-     * @param {string} participantId the id of the conference participant which
236
-     * is the same as the Colibri endpoint ID of the video channel allocated for
237
-     * the user on the videobridge.
238
-     */
239
-    clearRtcMutedTimestamp(participantId: string): void;
240
-    /**
241
-     * Bind signalling mute event listeners for video {JitsiRemoteTrack} when
242
-     * a new one is added to the conference.
243
-     *
244
-     * @param {JitsiTrack} remoteTrack - The {JitsiTrack} which is being added to
245
-     * the conference.
246
-     */
247
-    onRemoteTrackAdded(remoteTrack: any): void;
248
-    /**
249
-     * Removes all event listeners bound to the remote video track and clears
250
-     * any related timeouts.
251
-     *
252
-     * @param {JitsiRemoteTrack} remoteTrack - The remote track which is being
253
-     * removed from the conference.
254
-     */
255
-    onRemoteTrackRemoved(remoteTrack: any): void;
256
-    /**
257
-     * Checks if given participant's video is considered frozen.
258
-     * @param {JitsiParticipant} participant - The participant.
259
-     * @return {boolean} <tt>true</tt> if the video has frozen for given
260
-     * participant or <tt>false</tt> when it's either not considered frozen
261
-     * (yet) or if freeze detection is not supported by the current browser.
262
-     *
263
-     * FIXME merge this logic with NO_DATA_FROM_SOURCE event
264
-     *       implemented in JitsiLocalTrack by extending the event to
265
-     *       the remote track and allowing to set different timeout for
266
-     *       local and remote tracks.
267
-     *
268
-     */
269
-    isVideoTrackFrozen(participant: any): boolean;
270
-    /**
271
-     * Goes over every participant and updates connectivity status.
272
-     * Should be called when a parameter which affects all of the participants
273
-     * is changed (P2P for example).
274
-     */
275
-    refreshConnectionStatusForAll(): void;
276
-    /**
277
-     * Figures out (and updates) the current connectivity status for
278
-     * the participant identified by the given id.
279
-     *
280
-     * @param {string} id - The participant's id (MUC nickname or Colibri endpoint ID).
281
-     */
282
-    figureOutConnectionStatus(id: string): void;
283
-    /**
284
-     * Computes the duration of the current connection status for the participant with the specified id (i.e. 15 seconds
285
-     * in the INTERRUPTED state) and sends a participant connection status event.
286
-     * @param {string} id - The jid of the participant.
287
-     * @param {Number} nowMs - The current time (in millis).
288
-     * @returns {void}
289
-     */
290
-    maybeSendParticipantConnectionStatusEvent(id: string, nowMs: number): void;
291
-    /**
292
-     * Clears the restoring timer for participant's video track and the
293
-     * timestamp for entering lastN.
294
-     *
295
-     * @param {string} participantId - The id of the conference participant which
296
-     * is the same as the Colibri endpoint ID of the video channel allocated for
297
-     * the user on the videobridge.
298
-     */
299
-    _clearRestoringTimer(participantId: string): void;
300
-    /**
301
-     * Checks whether a track had stayed enough in restoring state, compares
302
-     * current time and the time the track entered in lastN. If it hasn't
303
-     * timedout and there is no timer added, add new timer in order to give it
304
-     * more time to become active or mark it as interrupted on next check.
305
-     *
306
-     * @param {string} participantId - The id of the conference participant which
307
-     * is the same as the Colibri endpoint ID of the video channel allocated for
308
-     * the user on the videobridge.
309
-     * @returns {boolean} <tt>true</tt> if the track was in restoring state
310
-     * more than the timeout ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to
311
-     * set its status to interrupted.
312
-     * @private
313
-     */
314
-    private _isRestoringTimedout;
315
-    /**
316
-     * Sends a last/final participant connection status event for the participant that left the conference.
317
-     * @param {string} id - The id of the participant that left the conference.
318
-     * @returns {void}
319
-     */
320
-    onUserLeft(id: string): void;
321
-    /**
322
-     * Handles RTC 'onmute' event for the video track.
323
-     *
324
-     * @param {JitsiRemoteTrack} track - The video track for which 'onmute' event
325
-     * will be processed.
326
-     */
327
-    onTrackRtcMuted(track: any): void;
328
-    /**
329
-     * Handles RTC 'onunmute' event for the video track.
330
-     *
331
-     * @param {JitsiRemoteTrack} track - The video track for which 'onunmute'
332
-     * event will be processed.
333
-     */
334
-    onTrackRtcUnmuted(track: any): void;
335
-    /**
336
-     * Here the signalling "mute"/"unmute" events are processed.
337
-     *
338
-     * @param {JitsiRemoteTrack} track - The remote video track for which
339
-     * the signalling mute/unmute event will be processed.
340
-     */
341
-    onSignallingMuteChanged(track: any): void;
342
-    /**
343
-     * Sends a participant connection status event as a result of the video type
344
-     * changing.
345
-     * @param {JitsiRemoteTrack} track - The track.
346
-     * @param {VideoType} type - The video type.
347
-     * @returns {void}
348
-     */
349
-    onTrackVideoTypeChanged(track: any, type: any): void;
350
-}

+ 0
- 239
types/auto/modules/connectivity/TrackStreamingStatus.d.ts View File

@@ -1,239 +0,0 @@
1
-import JitsiConference from '../../types/hand-crafted/JitsiConference';
2
-import JitsiRemoteTrack from '../../types/hand-crafted/modules/RTC/JitsiRemoteTrack';
3
-import RTC from '../../types/hand-crafted/modules/RTC/RTC';
4
-import { VideoType } from '../../types/hand-crafted/service/RTC/VideoType';
5
-/** Track streaming statuses. */
6
-export declare enum TrackStreamingStatus {
7
-    /**
8
-     * Status indicating that streaming is currently active.
9
-     */
10
-    ACTIVE = "active",
11
-    /**
12
-     * Status indicating that streaming is currently inactive.
13
-     * Inactive means the streaming was stopped on purpose from the bridge, like exiting forwarded sources or
14
-     * adaptivity decided to drop video because of not enough bandwidth.
15
-     */
16
-    INACTIVE = "inactive",
17
-    /**
18
-     * Status indicating that streaming is currently interrupted.
19
-     */
20
-    INTERRUPTED = "interrupted",
21
-    /**
22
-     * Status indicating that streaming is currently restoring.
23
-     */
24
-    RESTORING = "restoring"
25
-}
26
-declare type StreamingStatusMap = {
27
-    videoType?: VideoType;
28
-    startedMs?: number;
29
-    p2p?: boolean;
30
-    streamingStatus?: string;
31
-    value?: number;
32
-};
33
-/**
34
- * Class is responsible for emitting JitsiTrackEvents.TRACK_STREAMING_STATUS_CHANGED events.
35
- */
36
-export declare class TrackStreamingStatusImpl {
37
-    rtc: RTC;
38
-    conference: JitsiConference;
39
-    track: JitsiRemoteTrack;
40
-    /**  This holds the timeout callback ID scheduled using window.setTimeout. */
41
-    trackTimer: number | null;
42
-    /**
43
-     * If video track frozen detection through RTC mute event is supported, we wait some time until video track is
44
-     * considered frozen. But because when the track falls out of forwarded sources it is expected for the video to
45
-     * freeze this timeout must be significantly reduced in "out of forwarded sources" case.
46
-     *
47
-     * Basically this value is used instead of {@link rtcMuteTimeout} when track is not in forwarded sources.
48
-     */
49
-    outOfForwardedSourcesTimeout: number;
50
-    /**
51
-     * How long we are going to wait for the corresponding signaling mute event after the RTC video track muted
52
-     * event is fired on the Media stream, before the connection interrupted is fired. The default value is
53
-     * {@link DEFAULT_P2P_RTC_MUTE_TIMEOUT}.
54
-     */
55
-    p2pRtcMuteTimeout: number;
56
-    /**
57
-     * How long we're going to wait after the RTC video track muted event for the corresponding signalling mute
58
-     * event, before the connection interrupted is fired. The default value is {@link DEFAULT_RTC_MUTE_TIMEOUT}.
59
-     *
60
-     * @returns amount of time in milliseconds
61
-     */
62
-    rtcMuteTimeout: number;
63
-    /**
64
-     * This holds a timestamp indicating  when remote video track was RTC muted. The purpose of storing the
65
-     * timestamp is to avoid the transition to disconnected status in case of legitimate video mute operation where
66
-     * the signalling video muted event can arrive shortly after RTC muted event.
67
-     *
68
-     * The timestamp is measured in milliseconds obtained with <tt>Date.now()</tt>.
69
-     *
70
-     * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event
71
-     * to the remote track and allowing to set different timeout for local and remote tracks.
72
-     */
73
-    rtcMutedTimestamp: number | null;
74
-    /** This holds the restoring timeout callback ID scheduled using window.setTimeout. */
75
-    restoringTimer: ReturnType<typeof setTimeout> | null;
76
-    /**
77
-     * This holds the current streaming status (along with all the internal events that happen while in that
78
-     * state).
79
-     *
80
-     * The goal is to send this information to the analytics backend for post-mortem analysis.
81
-     */
82
-    streamingStatusMap: StreamingStatusMap;
83
-    _onP2PStatus: () => void;
84
-    _onUserLeft: () => void;
85
-    _onTrackRtcMuted: () => void;
86
-    _onTrackRtcUnmuted: () => void;
87
-    _onSignallingMuteChanged: () => void;
88
-    _onTrackVideoTypeChanged: () => void;
89
-    _onLastNValueChanged: () => void;
90
-    _onForwardedSourcesChanged: () => void;
91
-    /**
92
-     * Calculates the new {@link TrackStreamingStatus} based on the values given for some specific remote track. It is
93
-     * assumed that the conference is currently in the JVB mode (in contrary to the P2P mode)
94
-     * @param isInForwardedSources - indicates whether the track is in the forwarded sources set. When set to
95
-     * false it means that JVB is not sending any video for the track.
96
-     * @param isRestoringTimedout - if true it means that the track has been outside of forwarded sources too
97
-     * long to be considered {@link TrackStreamingStatus.RESTORING}.
98
-     * @param isVideoMuted - true if the track is video muted and we should not expect to receive any video.
99
-     * @param isVideoTrackFrozen - if the current browser support video frozen detection then it will be set to
100
-     * true when the video track is frozen. If the current browser does not support frozen detection the it's always
101
-     * false.
102
-     * @return {TrackStreamingStatus} the new streaming status for the track for whom the values above were provided.
103
-     * @private
104
-     */
105
-    static _getNewStateForJvbMode(isInForwardedSources: boolean, isRestoringTimedout: boolean, isVideoMuted: boolean, isVideoTrackFrozen: boolean): TrackStreamingStatus;
106
-    /**
107
-     * In P2P mode we don't care about any values coming from the JVB and the streaming status can be only active or
108
-     * interrupted.
109
-     * @param isVideoMuted - true if video muted
110
-     * @param isVideoTrackFrozen - true if the video track for the remote track is currently frozen. If the
111
-     * current browser does not support video frozen detection then it's always false.
112
-     * @return {TrackStreamingStatus}
113
-     * @private
114
-     */
115
-    static _getNewStateForP2PMode(isVideoMuted: boolean, isVideoTrackFrozen: boolean): TrackStreamingStatus;
116
-    /**
117
-     * Creates new instance of <tt>TrackStreamingStatus</tt>.
118
-     *
119
-     * @constructor
120
-     * @param rtc - the RTC service instance
121
-     * @param conference - parent conference instance
122
-     * @param {Object} options
123
-     * @param {number} [options.p2pRtcMuteTimeout=2500] custom value for
124
-     * {@link TrackStreamingStatusImpl.p2pRtcMuteTimeout}.
125
-     * @param {number} [options.rtcMuteTimeout=2000] custom value for
126
-     * {@link TrackStreamingStatusImpl.rtcMuteTimeout}.
127
-     * @param {number} [options.outOfForwardedSourcesTimeout=500] custom value for
128
-     * {@link TrackStreamingStatusImpl.outOfForwardedSourcesTimeout}.
129
-     */
130
-    constructor(rtc: RTC, conference: JitsiConference, track: JitsiRemoteTrack, options: {
131
-        outOfForwardedSourcesTimeout: number;
132
-        p2pRtcMuteTimeout: number;
133
-        rtcMuteTimeout: number;
134
-    });
135
-    /**
136
-     * Gets the video frozen timeout for given source name.
137
-     * @return how long are we going to wait since RTC video muted even, before a video track is considered
138
-     * frozen.
139
-     * @private
140
-     */
141
-    _getVideoFrozenTimeout(): number;
142
-    /**
143
-     * Initializes <tt>TrackStreamingStatus</tt> and bind required event listeners.
144
-     */
145
-    init(): void;
146
-    /**
147
-     * Removes all event listeners and disposes of all resources held by this instance.
148
-     */
149
-    dispose(): void;
150
-    /**
151
-     * Changes streaming status.
152
-     * @param newStatus
153
-     */
154
-    _changeStreamingStatus(newStatus: TrackStreamingStatus): void;
155
-    /**
156
-     * Reset the postponed "streaming interrupted" event which was previously scheduled as a timeout on RTC 'onmute'
157
-     * event.
158
-     */
159
-    clearTimeout(): void;
160
-    /**
161
-     * Clears the timestamp of the RTC muted event for remote video track.
162
-     */
163
-    clearRtcMutedTimestamp(): void;
164
-    /**
165
-     * Checks if track is considered frozen.
166
-     * @return <tt>true</tt> if the video has frozen or <tt>false</tt> when it's either not considered frozen
167
-     * (yet) or if freeze detection is not supported by the current browser.
168
-     *
169
-     * FIXME merge this logic with NO_DATA_FROM_SOURCE event implemented in JitsiLocalTrack by extending the event to
170
-     *       the remote track and allowing to set different timeout for local and remote tracks.
171
-     */
172
-    isVideoTrackFrozen(): boolean;
173
-    /**
174
-     * Figures out (and updates) the current streaming status for the track identified by the source name.
175
-     */
176
-    figureOutStreamingStatus(): void;
177
-    /**
178
-     * Computes the duration of the current streaming status for the track (i.e. 15 seconds in the INTERRUPTED state)
179
-     * and sends a track streaming status event.
180
-     * @param nowMs - The current time (in millis).
181
-     */
182
-    maybeSendTrackStreamingStatusEvent(nowMs: number): void;
183
-    /**
184
-     * On change in forwarded sources set check all leaving and entering track to change their corresponding statuses.
185
-     *
186
-     * @param leavingForwardedSources - The array of sourceName leaving forwarded sources.
187
-     * @param enteringForwardedSources - The array of sourceName entering forwarded sources.
188
-     * @param timestamp - The time in millis
189
-     * @private
190
-     */
191
-    onForwardedSourcesChanged(leavingForwardedSources: string[], enteringForwardedSources: string[], timestamp: number): void;
192
-    /**
193
-     * Clears the restoring timer for video track and the timestamp for entering forwarded sources.
194
-     */
195
-    _clearRestoringTimer(): void;
196
-    /**
197
-     * Checks whether a track had stayed enough in restoring state, compares current time and the time the track
198
-     * entered in forwarded sources. If it hasn't timedout and there is no timer added, add new timer in order to give
199
-     * it more time to become active or mark it as interrupted on next check.
200
-     *
201
-     * @returns <tt>true</tt> if the track was in restoring state more than the timeout
202
-     * ({@link DEFAULT_RESTORING_TIMEOUT}.) in order to set its status to interrupted.
203
-     * @private
204
-     */
205
-    _isRestoringTimedout(): boolean;
206
-    /** Checks whether a track is the current track. */
207
-    _isCurrentTrack(track: JitsiRemoteTrack): boolean;
208
-    /**
209
-     * Sends a last/final track streaming status event for the track of the user that left the conference.
210
-     * @param id - The id of the participant that left the conference.
211
-     */
212
-    onUserLeft(id: string): void;
213
-    /**
214
-     * Handles RTC 'onmute' event for the video track.
215
-     *
216
-     * @param track - The video track for which 'onmute' event will be processed.
217
-     */
218
-    onTrackRtcMuted(track: JitsiRemoteTrack): void;
219
-    /**
220
-     * Handles RTC 'onunmute' event for the video track.
221
-     *
222
-     * @param track - The video track for which 'onunmute' event will be processed.
223
-     */
224
-    onTrackRtcUnmuted(track: JitsiRemoteTrack): void;
225
-    /**
226
-     * Here the signalling "mute"/"unmute" events are processed.
227
-     *
228
-     * @param track - The remote video track for which the signalling mute/unmute event will be
229
-     * processed.
230
-     */
231
-    onSignallingMuteChanged(track: JitsiRemoteTrack): void;
232
-    /**
233
-     * Sends a track streaming status event as a result of the video type changing.
234
-     * @deprecated this will go away with full multiple streams support
235
-     * @param type - The video type.
236
-     */
237
-    onTrackVideoTypeChanged(type: VideoType): void;
238
-}
239
-export default TrackStreamingStatusImpl;

+ 0
- 6
types/auto/modules/detection/ActiveDeviceDetector.d.ts View File

@@ -1,6 +0,0 @@
1
-/**
2
- * Go through all audio devices on the system and return one that is active, i.e. has audio signal.
3
- *
4
- * @returns Promise<Object> - Object containing information about the found device.
5
- */
6
-export default function getActiveAudioDevice(): Promise<any>;

+ 0
- 63
types/auto/modules/detection/DetectionEvents.d.ts View File

@@ -1,63 +0,0 @@
1
-export declare enum DetectionEvents {
2
-    /**
3
-     * Event triggered by a audio detector indicating that its active state has changed from active to inactive or vice
4
-     * versa.
5
-     * @event
6
-     * @type {boolean} - true when service has changed to active false otherwise.
7
-     */
8
-    DETECTOR_STATE_CHANGE = "detector_state_change",
9
-    /** Event triggered by {@link NoAudioSignalDetector} when the local audio device associated with a JitsiConference
10
-     * starts receiving audio levels with the value of 0 meaning no audio is being captured on that device, or when
11
-     * it starts receiving audio levels !== 0 after being in a state of no audio.
12
-     * @event
13
-     * @type {boolean} - true when the current conference audio track has audio input false otherwise.
14
-     */
15
-    AUDIO_INPUT_STATE_CHANGE = "audio_input_state_changed",
16
-    /** Event triggered by NoAudioSignalDetector when the local audio device associated with a JitsiConference goes silent
17
-     * for a period of time, meaning that the device is either broken or hardware/software muted.
18
-     * @event
19
-     * @type {void}
20
-     */
21
-    NO_AUDIO_INPUT = "no_audio_input_detected",
22
-    /**
23
-     *  Event generated by {@link VADNoiseDetection} when the tracked device is considered noisy.
24
-     *  @event
25
-     *  @type {Object}
26
-     */
27
-    VAD_NOISY_DEVICE = "detection.vad_noise_device",
28
-    /**
29
-     * Event generated by VADReportingService when if finishes creating a VAD report for the monitored devices.
30
-     * The generated objects are of type Array<Object>, one score for each monitored device.
31
-     * @event VAD_REPORT_PUBLISHED
32
-     * @type Array<Object> with the following structure:
33
-     * @property {Date} timestamp - Timestamp at which the compute took place.
34
-     * @property {number} avgVAD - Average VAD score over monitored period of time.
35
-     * @property {string} deviceId - Associate local audio device ID.
36
-     */
37
-    VAD_REPORT_PUBLISHED = "vad-report-published",
38
-    /**
39
-     * Event generated by {@link TrackVADEmitter} when PCM sample VAD score is available.
40
-     *
41
-     * @event
42
-     * @type {Object}
43
-     * @property {Date}   timestamp - Exact time at which processed PCM sample was generated.
44
-     * @property {number} score - VAD score on a scale from 0 to 1 (i.e. 0.7)
45
-     * @property {Float32Array} pcmData - Raw PCM data with which the VAD score was calculated.
46
-     * @property {string} deviceId - Device id of the associated track.
47
-     */
48
-    VAD_SCORE_PUBLISHED = "detection.vad_score_published",
49
-    /**
50
-     *  Event generated by {@link VADTalkMutedDetection} when a user is talking while the mic is muted.
51
-     *
52
-     *  @event
53
-     *  @type {Object}
54
-     */
55
-    VAD_TALK_WHILE_MUTED = "detection.vad_talk_while_muted"
56
-}
57
-export declare const DETECTOR_STATE_CHANGE = DetectionEvents.DETECTOR_STATE_CHANGE;
58
-export declare const AUDIO_INPUT_STATE_CHANGE = DetectionEvents.AUDIO_INPUT_STATE_CHANGE;
59
-export declare const NO_AUDIO_INPUT = DetectionEvents.NO_AUDIO_INPUT;
60
-export declare const VAD_NOISY_DEVICE = DetectionEvents.VAD_NOISY_DEVICE;
61
-export declare const VAD_REPORT_PUBLISHED = DetectionEvents.VAD_REPORT_PUBLISHED;
62
-export declare const VAD_SCORE_PUBLISHED = DetectionEvents.VAD_SCORE_PUBLISHED;
63
-export declare const VAD_TALK_WHILE_MUTED = DetectionEvents.VAD_TALK_WHILE_MUTED;

+ 0
- 57
types/auto/modules/detection/NoAudioSignalDetection.d.ts View File

@@ -1,57 +0,0 @@
1
-/// <reference types="node" />
2
-/**
3
- * Detect if there is no audio input on the current TraceAblePeerConnection selected track. The no audio
4
- * state must be constant for a configured amount of time in order for the event to be triggered.
5
- * @fires DetectionEvents.AUDIO_INPUT_STATE_CHANGE
6
- * @fires DetectionEvents.NO_AUDIO_INPUT
7
- */
8
-export default class NoAudioSignalDetection extends EventEmitter {
9
-    /**
10
-     * Creates new NoAudioSignalDetection.
11
-     *
12
-     * @param conference the JitsiConference instance that created us.
13
-     * @constructor
14
-     */
15
-    constructor(conference: any);
16
-    _conference: any;
17
-    _timeoutTrigger: NodeJS.Timeout;
18
-    _hasAudioInput: boolean;
19
-    /**
20
-     * Clear the timeout state.
21
-     */
22
-    _clearTriggerTimeout(): void;
23
-    /**
24
-     * Generated event triggered by a change in the current conference audio input state.
25
-     *
26
-     * @param {*} audioLevel - The audio level of the ssrc.
27
-     * @fires DetectionEvents.AUDIO_INPUT_STATE_CHANGE
28
-     */
29
-    _handleAudioInputStateChange(audioLevel: any): void;
30
-    /**
31
-     * Generate event triggered by a prolonged period of no audio input.
32
-     *
33
-     * @param {number} audioLevel - The audio level of the ssrc.
34
-     * @fires DetectionEvents.NO_AUDIO_INPUT
35
-     */
36
-    _handleNoAudioInputDetection(audioLevel: number): void;
37
-    _eventFired: boolean;
38
-    /**
39
-     * Receives audio level events for all send and receive streams on the current TraceablePeerConnection.
40
-     *
41
-     * @param {TraceablePeerConnection} tpc - TraceablePeerConnection of the owning conference.
42
-     * @param {number} ssrc - The synchronization source identifier (SSRC) of the endpoint/participant/stream
43
-     * being reported.
44
-     * @param {number} audioLevel - The audio level of the ssrc.
45
-     * @param {boolean} isLocal - true for local/send streams or false for remote/receive streams.
46
-     */
47
-    _audioLevel(tpc: any, ssrc: number, audioLevel: number, isLocal: boolean): void;
48
-    /**
49
-     * Notifies NoAudioSignalDetection that a JitsiTrack was added to the associated JitsiConference.
50
-     * Only take into account local audio tracks.
51
-     *
52
-     * @param {JitsiTrack} track - The added JitsiTrack.
53
-     */
54
-    _trackAdded(track: any): void;
55
-    _audioTrack: any;
56
-}
57
-import EventEmitter from "events";

+ 0
- 25
types/auto/modules/detection/P2PDominantSpeakerDetection.d.ts View File

@@ -1,25 +0,0 @@
1
-/**
2
- * The <tt>P2PDominantSpeakerDetection</tt> is activated only when p2p is
3
- * currently used.
4
- * Listens for changes in the audio level changes of the local p2p audio track
5
- * or remote p2p one and fires dominant speaker events to be able to use
6
- * features depending on those events (speaker stats), to make them work without
7
- * the video bridge.
8
- */
9
-export default class P2PDominantSpeakerDetection {
10
-    /**
11
-     * Creates P2PDominantSpeakerDetection
12
-     * @param conference the JitsiConference instance that created us.
13
-     * @constructor
14
-     */
15
-    constructor(conference: any);
16
-    conference: any;
17
-    myUserID: any;
18
-    /**
19
-     * Receives audio level events for all streams in the conference.
20
-     *
21
-     * @param {String} id - The participant id
22
-     * @param {number} audioLevel - The audio level.
23
-     */
24
-    _audioLevel(id: string, audioLevel: number): void;
25
-}

+ 0
- 129
types/auto/modules/detection/TrackVADEmitter.d.ts View File

@@ -1,129 +0,0 @@
1
-/// <reference types="node" />
2
-/**
3
- * Connects an audio JitsiLocalTrack to a vadProcessor using WebAudio ScriptProcessorNode.
4
- * Once an object is created audio from the local track flows through the ScriptProcessorNode as raw PCM.
5
- * The PCM is processed by the injected vad module and a voice activity detection score is obtained, the
6
- * score is published to consumers via an EventEmitter.
7
- * After work is done with this service the destroy method needs to be called for a proper cleanup.
8
- *
9
- * @fires VAD_SCORE_PUBLISHED
10
- */
11
-export default class TrackVADEmitter extends EventEmitter {
12
-    /**
13
-     * Factory method that sets up all the necessary components for the creation of the TrackVADEmitter.
14
-     *
15
-     * @param {string} micDeviceId - Target microphone device id.
16
-     * @param {number} procNodeSampleRate - Sample rate of the proc node.
17
-     * @param {Object} vadProcessor -Module that calculates the voice activity score for a certain audio PCM sample.
18
-     * The processor needs to implement the following functions:
19
-     * - <tt>getSampleLength()</tt> - Returns the sample size accepted by getSampleLength.
20
-     * - <tt>getRequiredPCMFrequency()</tt> - Returns the PCM frequency at which the processor operates.
21
-     * - <tt>calculateAudioFrameVAD(pcmSample)</tt> - Process a 32 float pcm sample of getSampleLength size.
22
-     * @returns {Promise<TrackVADEmitter>} - Promise resolving in a new instance of TrackVADEmitter.
23
-     */
24
-    static create(micDeviceId: string, procNodeSampleRate: number, vadProcessor: any): Promise<TrackVADEmitter>;
25
-    /**
26
-     * Constructor.
27
-     *
28
-     * @param {number} procNodeSampleRate - Sample rate of the ScriptProcessorNode. Possible values  256, 512, 1024,
29
-     *  2048, 4096, 8192, 16384. Passing other values will default to closes neighbor.
30
-     * @param {Object} vadProcessor - VAD processor that allows us to calculate VAD score for PCM samples.
31
-     * @param {JitsiLocalTrack} jitsiLocalTrack - JitsiLocalTrack corresponding to micDeviceId.
32
-     */
33
-    constructor(procNodeSampleRate: number, vadProcessor: any, jitsiLocalTrack: any);
34
-    /**
35
-     * Sample rate of the ScriptProcessorNode.
36
-     */
37
-    _procNodeSampleRate: number;
38
-    /**
39
-     * VAD Processor that allows us to calculate VAD score for PCM samples
40
-     */
41
-    _vadProcessor: any;
42
-    /**
43
-     * The JitsiLocalTrack instance.
44
-     */
45
-    _localTrack: any;
46
-    /**
47
-     * Buffer to hold residue PCM resulting after a ScriptProcessorNode callback
48
-     */
49
-    _bufferResidue: Float32Array;
50
-    /**
51
-     * The AudioContext instance with the preferred sample frequency.
52
-     */
53
-    _audioContext: AudioContext;
54
-    /**
55
-     * PCM Sample size expected by the VAD Processor instance. We cache it here as this value is used extensively,
56
-     * saves a couple of function calls.
57
-     */
58
-    _vadSampleSize: any;
59
-    /**
60
-     * ScriptProcessorNode callback, the input parameters contains the PCM audio that is then sent to rnnoise.
61
-     * Rnnoise only accepts PCM samples of 480 bytes whereas the webaudio processor node can't sample at a multiple
62
-     * of 480 thus after each _onAudioProcess callback there will remain and PCM buffer residue equal
63
-     * to _procNodeSampleRate / 480 which will be added to the next sample buffer and so on.\
64
-     *
65
-     *
66
-     * @param {AudioProcessingEvent} audioEvent - Audio event.
67
-     * @returns {void}
68
-     * @fires VAD_SCORE_PUBLISHED
69
-     */
70
-    _onAudioProcess(audioEvent: AudioProcessingEvent): void;
71
-    /**
72
-     * Sets up the audio graph in the AudioContext.
73
-     *
74
-     * @returns {void}
75
-     */
76
-    _initializeAudioContext(): void;
77
-    _audioSource: MediaStreamAudioSourceNode;
78
-    _audioProcessingNode: ScriptProcessorNode;
79
-    /**
80
-     * Connects the nodes in the AudioContext to start the flow of audio data.
81
-     *
82
-     * @returns {void}
83
-     */
84
-    _connectAudioGraph(): void;
85
-    /**
86
-     * Disconnects the nodes in the AudioContext.
87
-     *
88
-     * @returns {void}
89
-     */
90
-    _disconnectAudioGraph(): void;
91
-    /**
92
-     * Cleanup potentially acquired resources.
93
-     *
94
-     * @returns {void}
95
-     */
96
-    _cleanupResources(): void;
97
-    /**
98
-     * Get the associated track device ID.
99
-     *
100
-     * @returns {string}
101
-     */
102
-    getDeviceId(): string;
103
-    /**
104
-     * Get the associated track label.
105
-     *
106
-     * @returns {string}
107
-     */
108
-    getTrackLabel(): string;
109
-    /**
110
-     * Start the emitter by connecting the audio graph.
111
-     *
112
-     * @returns {void}
113
-     */
114
-    start(): void;
115
-    /**
116
-     * Stops the emitter by disconnecting the audio graph.
117
-     *
118
-     * @returns {void}
119
-     */
120
-    stop(): void;
121
-    /**
122
-     * Destroy TrackVADEmitter instance (release resources and stop callbacks).
123
-     *
124
-     * @returns {void}
125
-     */
126
-    destroy(): void;
127
-    _destroyed: boolean;
128
-}
129
-import EventEmitter from "events";

+ 0
- 105
types/auto/modules/detection/VADAudioAnalyser.d.ts View File

@@ -1,105 +0,0 @@
1
-/**
2
- * Connects a TrackVADEmitter to the target conference local audio track and manages various services that use
3
- * the data to produce audio analytics (VADTalkMutedDetection and VADNoiseDetection).
4
- */
5
-export default class VADAudioAnalyser extends EventEmitter {
6
-    /**
7
-     * Creates <tt>VADAudioAnalyser</tt>
8
-     * @param {JitsiConference} conference - JitsiConference instance that created us.
9
-     * @param {Object} createVADProcessor - Function that creates a Voice activity detection processor. The processor
10
-     * needs to implement the following functions:
11
-     * - <tt>getSampleLength()</tt> - Returns the sample size accepted by getSampleLength.
12
-     * - <tt>getRequiredPCMFrequency()</tt> - Returns the PCM frequency at which the processor operates.
13
-     * - <tt>calculateAudioFrameVAD(pcmSample)</tt> - Process a 32 float pcm sample of getSampleLength size.
14
-     * @constructor
15
-     */
16
-    constructor(conference: any, createVADProcessor: any);
17
-    /**
18
-     * Member function that instantiates a VAD processor.
19
-     */
20
-    _createVADProcessor: any;
21
-    /**
22
-     * Current {@link TrackVADEmitter}. VAD Emitter uses a {@link JitsiLocalTrack} and VAD processor to generate
23
-     * period voice probability scores.
24
-     */
25
-    _vadEmitter: TrackVADEmitter;
26
-    /**
27
-     * Current state of the _vadEmitter
28
-     */
29
-    _isVADEmitterRunning: boolean;
30
-    /**
31
-     * Array of currently attached VAD processing services.
32
-     */
33
-    _detectionServices: any[];
34
-    /**
35
-     * Promise used to chain create and destroy operations associated with TRACK_ADDED and TRACK_REMOVED events
36
-     * coming from the conference.
37
-     * Because we have an async created component (VAD Processor) we need to make sure that it's initialized before
38
-     * we destroy it ( when changing the device for instance), or when we use it from an external point of entry
39
-     * i.e. (TRACK_MUTE_CHANGED event callback).
40
-     */
41
-    _vadInitTracker: Promise<void>;
42
-    /**
43
-     * Listens for {@link TrackVADEmitter} events and directs them to attached services as needed.
44
-     *
45
-     * @param {Object} vadScore -VAD score emitted by {@link TrackVADEmitter}
46
-     * @param {Date}   vadScore.timestamp - Exact time at which processed PCM sample was generated.
47
-     * @param {number} vadScore.score - VAD score on a scale from 0 to 1 (i.e. 0.7)
48
-     * @param {Float32Array} pcmData - Raw PCM data with which the VAD score was calculated.
49
-     * @param {string} vadScore.deviceId - Device id of the associated track.
50
-     * @listens VAD_SCORE_PUBLISHED
51
-     */
52
-    _processVADScore(vadScore: {
53
-        timestamp: Date;
54
-        score: number;
55
-    }): void;
56
-    /**
57
-     * Attach a VAD detector service to the analyser and handle it's state changes.
58
-     *
59
-     * @param {Object} vadTMDetector
60
-     */
61
-    addVADDetectionService(vadService: any): void;
62
-    /**
63
-     * Start the {@link TrackVADEmitter} and attach the event listener.
64
-     * @returns {void}
65
-     */
66
-    _startVADEmitter(): void;
67
-    /**
68
-     * Stop the {@link TrackVADEmitter} and detach the event listener.
69
-     * @returns {void}
70
-     */
71
-    _stopVADEmitter(): void;
72
-    /**
73
-     * Change the isMuted state of all attached detection services.
74
-     *
75
-     * @param {boolean} isMuted
76
-     */
77
-    _changeDetectorsMuteState(isMuted: boolean): void;
78
-    /**
79
-     * Notifies the detector that a track was added to the associated {@link JitsiConference}.
80
-     * Only take into account local audio tracks.
81
-     * @param {JitsiTrack} track - The added track.
82
-     * @returns {void}
83
-     * @listens TRACK_ADDED
84
-     */
85
-    _trackAdded(track: any): void;
86
-    /**
87
-     * Notifies the detector that the mute state of a {@link JitsiConference} track has changed. Only takes into account
88
-     * local audio tracks.
89
-     * @param {JitsiTrack} track - The track whose mute state has changed.
90
-     * @returns {void}
91
-     * @listens TRACK_MUTE_CHANGED
92
-     */
93
-    _trackMuteChanged(track: any): void;
94
-    /**
95
-     * Notifies the detector that a track associated with the {@link JitsiConference} was removed. Only takes into
96
-     * account local audio tracks. Cleans up resources associated with the track and resets the processing context.
97
-     *
98
-     * @param {JitsiTrack} track - The removed track.
99
-     * @returns {void}
100
-     * @listens TRACK_REMOVED
101
-     */
102
-    _trackRemoved(track: any): void;
103
-}
104
-import { EventEmitter } from "events";
105
-import TrackVADEmitter from "./TrackVADEmitter";

+ 0
- 85
types/auto/modules/detection/VADNoiseDetection.d.ts View File

@@ -1,85 +0,0 @@
1
-/**
2
- * Detect if provided VAD score and PCM data is considered noise.
3
- */
4
-export default class VADNoiseDetection extends EventEmitter {
5
-    /**
6
-     * Creates <tt>VADNoiseDetection</tt>
7
-     *
8
-     * @constructor
9
-     */
10
-    constructor();
11
-    /**
12
-     * Flag which denotes the current state of the detection service i.e.if there is already a processing operation
13
-     * ongoing.
14
-     */
15
-    _processing: boolean;
16
-    /**
17
-     * Buffer that keeps the VAD scores for a period of time.
18
-     */
19
-    _scoreArray: any[];
20
-    /**
21
-     * Buffer that keeps audio level samples for a period of time.
22
-     */
23
-    _audioLvlArray: any[];
24
-    /**
25
-     * Current state of the service, if it's not active no processing will occur.
26
-     */
27
-    _active: boolean;
28
-    /**
29
-     * Compute cumulative VAD score and PCM audio levels once the PROCESS_TIME_FRAME_SPAN_MS timeout has elapsed.
30
-     * If the score is above the set threshold fire the event.
31
-     * @returns {void}
32
-     * @fires VAD_NOISY_DEVICE
33
-     */
34
-    _calculateNoisyScore(): void;
35
-    /**
36
-     * Record the vad score and average volume in the appropriate buffers.
37
-     *
38
-     * @param {number} vadScore
39
-     * @param {number} avgAudioLvl - average audio level of the PCM sample associated with the VAD score.s
40
-     */
41
-    _recordValues(vadScore: number, avgAudioLvl: number): void;
42
-    /**
43
-     * Set the active state of the detection service and notify any listeners.
44
-     *
45
-     * @param {boolean} active
46
-     * @fires DETECTOR_STATE_CHANGE
47
-     */
48
-    _setActiveState(active: boolean): void;
49
-    /**
50
-     * Change the state according to the muted status of the tracked device.
51
-     *
52
-     * @param {boolean} isMuted - Is the device muted or not.
53
-     */
54
-    changeMuteState(isMuted: boolean): void;
55
-    /**
56
-     * Check whether or not the service is active or not.
57
-     *
58
-     * @returns {boolean}
59
-     */
60
-    isActive(): boolean;
61
-    /**
62
-     * Reset the processing context, clear buffers, cancel the timeout trigger.
63
-     *
64
-     * @returns {void}
65
-     */
66
-    reset(): void;
67
-    /**
68
-     * Listens for {@link TrackVADEmitter} events and processes them.
69
-     *
70
-     * @param {Object} vadScore -VAD score emitted by {@link TrackVADEmitter}
71
-     * @param {Date}   vadScore.timestamp - Exact time at which processed PCM sample was generated.
72
-     * @param {number} vadScore.score - VAD score on a scale from 0 to 1 (i.e. 0.7)
73
-     * @param {Float32Array} vadScore.pcmData - Raw PCM Data associated with the VAD score.
74
-     * @param {string} vadScore.deviceId - Device id of the associated track.
75
-     * @listens VAD_SCORE_PUBLISHED
76
-     */
77
-    processVADScore(vadScore: {
78
-        timestamp: Date;
79
-        score: number;
80
-        pcmData: Float32Array;
81
-        deviceId: string;
82
-    }): void;
83
-    _processTimeout: NodeJS.Timeout;
84
-}
85
-import { EventEmitter } from "events";

+ 0
- 96
types/auto/modules/detection/VADReportingService.d.ts View File

@@ -1,96 +0,0 @@
1
-/// <reference types="node" />
2
-/**
3
- * Voice activity detection reporting service. The service create TrackVADEmitters for the provided devices and
4
- * publishes an average of their VAD score over the specified interval via EventEmitter.
5
- * The service is not reusable if destroyed a new one needs to be created, i.e. when a new device is added to the system
6
- * a new service needs to be created and the old discarded.
7
- */
8
-export default class VADReportingService extends EventEmitter {
9
-    /**
10
-     * Factory methods that creates the TrackVADEmitters for the associated array of devices and instantiates
11
-     * a VADReportingService.
12
-     *
13
-     * @param {Array<MediaDeviceInfo>} micDeviceList - Device list that is monitored inside the service.
14
-     * @param {number} intervalDelay - Delay at which to publish VAD score for monitored devices.
15
-     * @param {Object} createVADProcessor - Function that creates a Voice activity detection processor. The processor
16
-     * needs to implement the following functions:
17
-     * - <tt>getSampleLength()</tt> - Returns the sample size accepted by getSampleLength.
18
-     * - <tt>getRequiredPCMFrequency()</tt> - Returns the PCM frequency at which the processor operates.
19
-     * - <tt>calculateAudioFrameVAD(pcmSample)</tt> - Process a 32 float pcm sample of getSampleLength size.
20
-     *
21
-     * @returns {Promise<VADReportingService>}
22
-     */
23
-    static create(micDeviceList: Array<MediaDeviceInfo>, intervalDelay: number, createVADProcessor: any): Promise<VADReportingService>;
24
-    /**
25
-     *
26
-     * @param {number} intervalDelay - Delay at which to publish VAD score for monitored devices.
27
-     *
28
-     * @constructor
29
-     */
30
-    constructor(intervalDelay: number);
31
-    /**
32
-     * Map containing context for devices currently being monitored by the reporting service.
33
-     */
34
-    _contextMap: Map<any, any>;
35
-    /**
36
-     * State flag, check if the instance was destroyed.
37
-     */
38
-    _destroyed: boolean;
39
-    /**
40
-     * Delay at which to publish VAD score for monitored devices.
41
-     */
42
-    _intervalDelay: number;
43
-    /**
44
-     * Identifier for the interval publishing stats on the set interval.
45
-     */
46
-    _intervalId: NodeJS.Timer;
47
-    /**
48
-     * Destroy TrackVADEmitters and clear the context map.
49
-     *
50
-     * @returns {void}
51
-     */
52
-    _clearContextMap(): void;
53
-    /**
54
-     * Set the watched device contexts.
55
-     *
56
-     * @param {Array<VADDeviceContext>} vadContextArray - List of mics.
57
-     * @returns {void}
58
-     */
59
-    _setVADContextArray(vadContextArray: Array<any>): void;
60
-    /**
61
-     * Start the setInterval reporting process.
62
-     *
63
-     * @returns {void}.
64
-     */
65
-    _startPublish(): void;
66
-    /**
67
-     * Function called at set interval with selected compute. The result will be published on the set callback.
68
-     *
69
-     * @returns {void}
70
-     * @fires VAD_REPORT_PUBLISHED
71
-     */
72
-    _reportVadScore(): void;
73
-    /**
74
-     * Callback method passed to vad emitters in order to publish their score.
75
-     *
76
-     * @param {Object} vadScore -VAD score emitted by.
77
-     * @param {Date}   vadScore.timestamp - Exact time at which processed PCM sample was generated.
78
-     * @param {number} vadScore.score - VAD score on a scale from 0 to 1 (i.e. 0.7).
79
-     * @param {string} vadScore.deviceId - Device id of the associated track.
80
-     * @returns {void}
81
-     * @listens VAD_SCORE_PUBLISHED
82
-     */
83
-    _devicePublishVADScore(vadScore: {
84
-        timestamp: Date;
85
-        score: number;
86
-        deviceId: string;
87
-    }): void;
88
-    /**
89
-     * Destroy the VADReportingService, stops the setInterval reporting, destroys the emitters and clears the map.
90
-     * After this call the instance is no longer usable.
91
-     *
92
-     * @returns {void}.
93
-     */
94
-    destroy(): void;
95
-}
96
-import EventEmitter from "events";

+ 0
- 70
types/auto/modules/detection/VADTalkMutedDetection.d.ts View File

@@ -1,70 +0,0 @@
1
-/**
2
- * Detect if provided VAD score which is generated on a muted device is voice and fires an event.
3
- */
4
-export default class VADTalkMutedDetection extends EventEmitter {
5
-    /**
6
-     * Creates <tt>VADTalkMutedDetection</tt>
7
-     * @constructor
8
-     */
9
-    constructor();
10
-    /**
11
-     * Flag which denotes the current state of the detection service i.e.if there is already a processing operation
12
-     * ongoing.
13
-     */
14
-    _processing: boolean;
15
-    /**
16
-     * Buffer that keeps the VAD scores for a period of time.
17
-     */
18
-    _scoreArray: any[];
19
-    /**
20
-     * Current mute state of the audio track being monitored.
21
-     */
22
-    _active: boolean;
23
-    /**
24
-     * Compute cumulative VAD score function called once the PROCESS_TIME_FRAME_SPAN_MS timeout has elapsed.
25
-     * @returns {void}
26
-     * @fires VAD_TALK_WHILE_MUTED
27
-     */
28
-    _calculateVADScore(): void;
29
-    /**
30
-     * Set the active state of the detection service and notify any listeners.
31
-     *
32
-     * @param {boolean} active
33
-     * @fires DETECTOR_STATE_CHANGE
34
-     */
35
-    _setActiveState(active: boolean): void;
36
-    /**
37
-     * Change the state according to the muted status of the tracked device.
38
-     *
39
-     * @param {boolean} isMuted - Is the device muted or not.
40
-     */
41
-    changeMuteState(isMuted: boolean): void;
42
-    /**
43
-     * Check whether or not the service is active or not.
44
-     *
45
-     * @returns {boolean}
46
-     */
47
-    isActive(): boolean;
48
-    /**
49
-     * Listens for {@link TrackVADEmitter} events and processes them.
50
-     *
51
-     * @param {Object} vadScore -VAD score emitted by {@link TrackVADEmitter}
52
-     * @param {Date}   vadScore.timestamp - Exact time at which processed PCM sample was generated.
53
-     * @param {number} vadScore.score - VAD score on a scale from 0 to 1 (i.e. 0.7)
54
-     * @param {string} vadScore.deviceId - Device id of the associated track.
55
-     * @listens VAD_SCORE_PUBLISHED
56
-     */
57
-    processVADScore(vadScore: {
58
-        timestamp: Date;
59
-        score: number;
60
-        deviceId: string;
61
-    }): void;
62
-    _processTimeout: NodeJS.Timeout;
63
-    /**
64
-     * Reset the processing context, clear buffer, cancel the timeout trigger.
65
-     *
66
-     * @returns {void}
67
-     */
68
-    reset(): void;
69
-}
70
-import { EventEmitter } from "events";

+ 0
- 91
types/auto/modules/e2ee/Context.d.ts View File

@@ -1,91 +0,0 @@
1
-/**
2
- * Per-participant context holding the cryptographic keys and
3
- * encode/decode functions
4
- */
5
-export class Context {
6
-    /**
7
-     * @param {Object} options
8
-     */
9
-    constructor({ sharedKey }?: any);
10
-    _cryptoKeyRing: any[];
11
-    _currentKeyIndex: number;
12
-    _sendCounts: Map<any, any>;
13
-    _sharedKey: any;
14
-    /**
15
-     * Derives the different subkeys and starts using them for encryption or
16
-     * decryption.
17
-     * @param {Uint8Array|false} key bytes. Pass false to disable.
18
-     * @param {Number} keyIndex
19
-     */
20
-    setKey(key: Uint8Array | false, keyIndex?: number): Promise<void>;
21
-    /**
22
-     * Sets a set of keys and resets the sendCount.
23
-     * decryption.
24
-     * @param {Object} keys set of keys.
25
-     * @param {Number} keyIndex optional
26
-     * @private
27
-     */
28
-    private _setKeys;
29
-    _sendCount: bigint;
30
-    /**
31
-     * Function that will be injected in a stream and will encrypt the given encoded frames.
32
-     *
33
-     * @param {RTCEncodedVideoFrame|RTCEncodedAudioFrame} encodedFrame - Encoded video frame.
34
-     * @param {TransformStreamDefaultController} controller - TransportStreamController.
35
-     *
36
-     * The VP8 payload descriptor described in
37
-     * https://tools.ietf.org/html/rfc7741#section-4.2
38
-     * is part of the RTP packet and not part of the frame and is not controllable by us.
39
-     * This is fine as the SFU keeps having access to it for routing.
40
-     *
41
-     * The encrypted frame is formed as follows:
42
-     * 1) Leave the first (10, 3, 1) bytes unencrypted, depending on the frame type and kind.
43
-     * 2) Form the GCM IV for the frame as described above.
44
-     * 3) Encrypt the rest of the frame using AES-GCM.
45
-     * 4) Allocate space for the encrypted frame.
46
-     * 5) Copy the unencrypted bytes to the start of the encrypted frame.
47
-     * 6) Append the ciphertext to the encrypted frame.
48
-     * 7) Append the IV.
49
-     * 8) Append a single byte for the key identifier.
50
-     * 9) Enqueue the encrypted frame for sending.
51
-     */
52
-    encodeFunction(encodedFrame: any | any, controller: TransformStreamDefaultController): Promise<void>;
53
-    /**
54
-     * Function that will be injected in a stream and will decrypt the given encoded frames.
55
-     *
56
-     * @param {RTCEncodedVideoFrame|RTCEncodedAudioFrame} encodedFrame - Encoded video frame.
57
-     * @param {TransformStreamDefaultController} controller - TransportStreamController.
58
-     */
59
-    decodeFunction(encodedFrame: any | any, controller: TransformStreamDefaultController): Promise<void>;
60
-    /**
61
-     * Function that will decrypt the given encoded frame. If the decryption fails, it will
62
-     * ratchet the key for up to RATCHET_WINDOW_SIZE times.
63
-     *
64
-     * @param {RTCEncodedVideoFrame|RTCEncodedAudioFrame} encodedFrame - Encoded video frame.
65
-     * @param {number} keyIndex - the index of the decryption data in _cryptoKeyRing array.
66
-     * @param {number} ratchetCount - the number of retries after ratcheting the key.
67
-     * @returns {Promise<RTCEncodedVideoFrame|RTCEncodedAudioFrame>} - The decrypted frame.
68
-     * @private
69
-     */
70
-    private _decryptFrame;
71
-    /**
72
-     * Construct the IV used for AES-GCM and sent (in plain) with the packet similar to
73
-     * https://tools.ietf.org/html/rfc7714#section-8.1
74
-     * It concatenates
75
-     * - the 32 bit synchronization source (SSRC) given on the encoded frame,
76
-     * - the 32 bit rtp timestamp given on the encoded frame,
77
-     * - a send counter that is specific to the SSRC. Starts at a random number.
78
-     * The send counter is essentially the pictureId but we currently have to implement this ourselves.
79
-     * There is no XOR with a salt. Note that this IV leaks the SSRC to the receiver but since this is
80
-     * randomly generated and SFUs may not rewrite this is considered acceptable.
81
-     * The SSRC is used to allow demultiplexing multiple streams with the same key, as described in
82
-     *   https://tools.ietf.org/html/rfc3711#section-4.1.1
83
-     * The RTP timestamp is 32 bits and advances by the codec clock rate (90khz for video, 48khz for
84
-     * opus audio) every second. For video it rolls over roughly every 13 hours.
85
-     * The send counter will advance at the frame rate (30fps for video, 50fps for 20ms opus audio)
86
-     * every second. It will take a long time to roll over.
87
-     *
88
-     * See also https://developer.mozilla.org/en-US/docs/Web/API/AesGcmParams
89
-     */
90
-    _makeIV(synchronizationSource: any, timestamp: any): ArrayBuffer;
91
-}

+ 0
- 58
types/auto/modules/e2ee/E2EEContext.d.ts View File

@@ -1,58 +0,0 @@
1
-/**
2
- * Context encapsulating the cryptography bits required for E2EE.
3
- * This uses the WebRTC Insertable Streams API which is explained in
4
- *   https://github.com/alvestrand/webrtc-media-streams/blob/master/explainer.md
5
- * that provides access to the encoded frames and allows them to be transformed.
6
- *
7
- * The encoded frame format is explained below in the _encodeFunction method.
8
- * High level design goals were:
9
- * - do not require changes to existing SFUs and retain (VP8) metadata.
10
- * - allow the SFU to rewrite SSRCs, timestamp, pictureId.
11
- * - allow for the key to be rotated frequently.
12
- */
13
-export default class E2EEcontext {
14
-    /**
15
-     * Build a new E2EE context instance, which will be used in a given conference.
16
-     * @param {boolean} [options.sharedKey] - whether there is a uniques key shared amoung all participants.
17
-     */
18
-    constructor({ sharedKey }?: boolean);
19
-    _worker: Worker;
20
-    /**
21
-     * Cleans up all state associated with the given participant. This is needed when a
22
-     * participant leaves the current conference.
23
-     *
24
-     * @param {string} participantId - The participant that just left.
25
-     */
26
-    cleanup(participantId: string): void;
27
-    /**
28
-     * Cleans up all state associated with all participants in the conference. This is needed when disabling e2ee.
29
-     *
30
-     */
31
-    cleanupAll(): void;
32
-    /**
33
-     * Handles the given {@code RTCRtpReceiver} by creating a {@code TransformStream} which will inject
34
-     * a frame decoder.
35
-     *
36
-     * @param {RTCRtpReceiver} receiver - The receiver which will get the decoding function injected.
37
-     * @param {string} kind - The kind of track this receiver belongs to.
38
-     * @param {string} participantId - The participant id that this receiver belongs to.
39
-     */
40
-    handleReceiver(receiver: RTCRtpReceiver, kind: string, participantId: string): void;
41
-    /**
42
-     * Handles the given {@code RTCRtpSender} by creating a {@code TransformStream} which will inject
43
-     * a frame encoder.
44
-     *
45
-     * @param {RTCRtpSender} sender - The sender which will get the encoding function injected.
46
-     * @param {string} kind - The kind of track this sender belongs to.
47
-     * @param {string} participantId - The participant id that this sender belongs to.
48
-     */
49
-    handleSender(sender: RTCRtpSender, kind: string, participantId: string): void;
50
-    /**
51
-     * Set the E2EE key for the specified participant.
52
-     *
53
-     * @param {string} participantId - the ID of the participant who's key we are setting.
54
-     * @param {Uint8Array | boolean} key - they key for the given participant.
55
-     * @param {Number} keyIndex - the key index.
56
-     */
57
-    setKey(participantId: string, key: Uint8Array | boolean, keyIndex: number): void;
58
-}

+ 0
- 42
types/auto/modules/e2ee/E2EEncryption.d.ts View File

@@ -1,42 +0,0 @@
1
-/**
2
- * This module integrates {@link KeyHandler} with {@link JitsiConference} in order to enable E2E encryption.
3
- */
4
-export class E2EEncryption {
5
-    /**
6
-     * Indicates if E2EE is supported in the current platform.
7
-     *
8
-     * @param {object} config - Global configuration.
9
-     * @returns {boolean}
10
-     */
11
-    static isSupported(config: object): boolean;
12
-    /**
13
-     * A constructor.
14
-     * @param {JitsiConference} conference - The conference instance for which E2E encryption is to be enabled.
15
-     */
16
-    constructor(conference: any);
17
-    _externallyManaged: any;
18
-    _keyHandler: ExternallyManagedKeyHandler | ManagedKeyHandler;
19
-    /**
20
-     * Indicates whether E2EE is currently enabled or not.
21
-     *
22
-     * @returns {boolean}
23
-     */
24
-    isEnabled(): boolean;
25
-    /**
26
-     * Enables / disables End-To-End encryption.
27
-     *
28
-     * @param {boolean} enabled - whether E2EE should be enabled or not.
29
-     * @returns {void}
30
-     */
31
-    setEnabled(enabled: boolean): void;
32
-    /**
33
-     * Sets the key and index for End-to-End encryption.
34
-     *
35
-     * @param {CryptoKey} [keyInfo.encryptionKey] - encryption key.
36
-     * @param {Number} [keyInfo.index] - the index of the encryption key.
37
-     * @returns {void}
38
-     */
39
-    setEncryptionKey(keyInfo: any): void;
40
-}
41
-import { ExternallyManagedKeyHandler } from "./ExternallyManagedKeyHandler";
42
-import { ManagedKeyHandler } from "./ManagedKeyHandler";

+ 0
- 19
types/auto/modules/e2ee/ExternallyManagedKeyHandler.d.ts View File

@@ -1,19 +0,0 @@
1
-/**
2
- * This module integrates {@link E2EEContext} with {external} in order to set the keys for encryption.
3
- */
4
-export class ExternallyManagedKeyHandler extends KeyHandler {
5
-    /**
6
-     * Build a new ExternallyManagedKeyHandler instance, which will be used in a given conference.
7
-     * @param conference - the current conference.
8
-     */
9
-    constructor(conference: any);
10
-    /**
11
-     * Sets the key and index for End-to-End encryption.
12
-     *
13
-     * @param {CryptoKey} [keyInfo.encryptionKey] - encryption key.
14
-     * @param {Number} [keyInfo.index] - the index of the encryption key.
15
-     * @returns {void}
16
-     */
17
-    setKey(keyInfo: any): void;
18
-}
19
-import { KeyHandler } from "./KeyHandler";

+ 0
- 69
types/auto/modules/e2ee/KeyHandler.d.ts View File

@@ -1,69 +0,0 @@
1
-/**
2
- * Abstract class that integrates {@link E2EEContext} with a key management system.
3
- */
4
-export class KeyHandler extends Listenable {
5
-    /**
6
-     * Build a new KeyHandler instance, which will be used in a given conference.
7
-     * @param {JitsiConference} conference - the current conference.
8
-     * @param {object} options - the options passed to {E2EEContext}, see implemention.
9
-     */
10
-    constructor(conference: any, options?: object);
11
-    conference: any;
12
-    e2eeCtx: E2EEContext;
13
-    enabled: boolean;
14
-    _enabling: Deferred;
15
-    /**
16
-     * Indicates whether E2EE is currently enabled or not.
17
-     *
18
-     * @returns {boolean}
19
-     */
20
-    isEnabled(): boolean;
21
-    /**
22
-     * Enables / disables End-To-End encryption.
23
-     *
24
-     * @param {boolean} enabled - whether E2EE should be enabled or not.
25
-     * @returns {void}
26
-     */
27
-    setEnabled(enabled: boolean): void;
28
-    /**
29
-     * Sets the key for End-to-End encryption.
30
-     *
31
-     * @returns {void}
32
-     */
33
-    setEncryptionKey(): void;
34
-    /**
35
-     * Setup E2EE on the new track that has been added to the conference, apply it on all the open peerconnections.
36
-     * @param {JitsiLocalTrack} track - the new track that's being added to the conference.
37
-     * @private
38
-     */
39
-    private _onLocalTrackAdded;
40
-    /**
41
-     * Setups E2E encryption for the new session.
42
-     * @param {JingleSessionPC} session - the new media session.
43
-     * @private
44
-     */
45
-    private _onMediaSessionStarted;
46
-    /**
47
-     * Setup E2EE for the receiving side.
48
-     *
49
-     * @private
50
-     */
51
-    private _setupReceiverE2EEForTrack;
52
-    /**
53
-     * Setup E2EE for the sending side.
54
-     *
55
-     * @param {JingleSessionPC} session - the session which sends the media produced by the track.
56
-     * @param {JitsiLocalTrack} track - the local track for which e2e encoder will be configured.
57
-     * @private
58
-     */
59
-    private _setupSenderE2EEForTrack;
60
-    /**
61
-     * Setup E2EE on the sender that is created for the unmuted local video track.
62
-     * @param {JitsiLocalTrack} track - the track for which muted status has changed.
63
-     * @private
64
-     */
65
-    private _trackMuteChanged;
66
-}
67
-import Listenable from "../util/Listenable";
68
-import E2EEContext from "./E2EEContext";
69
-import Deferred from "../util/Deferred";

+ 0
- 73
types/auto/modules/e2ee/ManagedKeyHandler.d.ts View File

@@ -1,73 +0,0 @@
1
-/**
2
- * This module integrates {@link E2EEContext} with {@link OlmAdapter} in order to distribute the keys for encryption.
3
- */
4
-export class ManagedKeyHandler extends KeyHandler {
5
-    /**
6
-     * Build a new AutomaticKeyHandler instance, which will be used in a given conference.
7
-     */
8
-    constructor(conference: any);
9
-    _key: any;
10
-    _conferenceJoined: boolean;
11
-    _olmAdapter: OlmAdapter;
12
-    _rotateKey: any;
13
-    _ratchetKey: any;
14
-    /**
15
-     * When E2EE is enabled it initializes sessions and sets the key.
16
-     * Cleans up the sessions when disabled.
17
-     *
18
-     * @param {boolean} enabled - whether E2EE should be enabled or not.
19
-     * @returns {void}
20
-     */
21
-    _setEnabled(enabled: boolean): void;
22
-    /**
23
-     * Handles an update in a participant's presence property.
24
-     *
25
-     * @param {JitsiParticipant} participant - The participant.
26
-     * @param {string} name - The name of the property that changed.
27
-     * @param {*} oldValue - The property's previous value.
28
-     * @param {*} newValue - The property's new value.
29
-     * @private
30
-     */
31
-    private _onParticipantPropertyChanged;
32
-    /**
33
-     * Advances (using ratcheting) the current key when a new participant joins the conference.
34
-     * @private
35
-     */
36
-    private _onParticipantJoined;
37
-    /**
38
-     * Rotates the current key when a participant leaves the conference.
39
-     * @private
40
-     */
41
-    private _onParticipantLeft;
42
-    /**
43
-     * Rotates the local key. Rotating the key implies creating a new one, then distributing it
44
-     * to all participants and once they all received it, start using it.
45
-     *
46
-     * @private
47
-     */
48
-    private _rotateKeyImpl;
49
-    /**
50
-     * Advances the current key by using ratcheting.
51
-     *
52
-     * @private
53
-     */
54
-    private _ratchetKeyImpl;
55
-    /**
56
-     * Handles an update in a participant's key.
57
-     *
58
-     * @param {string} id - The participant ID.
59
-     * @param {Uint8Array | boolean} key - The new key for the participant.
60
-     * @param {Number} index - The new key's index.
61
-     * @private
62
-     */
63
-    private _onParticipantKeyUpdated;
64
-    /**
65
-     * Generates a new 256 bit random key.
66
-     *
67
-     * @returns {Uint8Array}
68
-     * @private
69
-     */
70
-    private _generateKey;
71
-}
72
-import { KeyHandler } from "./KeyHandler";
73
-import { OlmAdapter } from "./OlmAdapter";

+ 0
- 166
types/auto/modules/e2ee/OlmAdapter.d.ts View File

@@ -1,166 +0,0 @@
1
-/**
2
- * This class implements an End-to-End Encrypted communication channel between every two peers
3
- * in the conference. This channel uses libolm to achieve E2EE.
4
- *
5
- * The created channel is then used to exchange the secret key that each participant will use
6
- * to encrypt the actual media (see {@link E2EEContext}).
7
- *
8
- * A simple JSON message based protocol is implemented, which follows a request - response model:
9
- * - session-init: Initiates an olm session establishment procedure. This message will be sent
10
- *                 by the participant who just joined, to everyone else.
11
- * - session-ack: Completes the olm session etablishment. This messsage may contain ancilliary
12
- *                encrypted data, more specifically the sender's current key.
13
- * - key-info: Includes the sender's most up to date key information.
14
- * - key-info-ack: Acknowledges the reception of a key-info request. In addition, it may contain
15
- *                 the sender's key information, if available.
16
- * - error: Indicates a request processing error has occurred.
17
- *
18
- * These requessts and responses are transport independent. Currently they are sent using XMPP
19
- * MUC private messages.
20
- */
21
-export class OlmAdapter extends Listenable {
22
-    /**
23
-     * Indicates if olm is supported on the current platform.
24
-     *
25
-     * @returns {boolean}
26
-     */
27
-    static isSupported(): boolean;
28
-    /**
29
-     * Creates an adapter instance for the given conference.
30
-     */
31
-    constructor(conference: any);
32
-    _conf: any;
33
-    _init: Deferred;
34
-    _key: boolean | Uint8Array;
35
-    _keyIndex: number;
36
-    _reqs: Map<any, any>;
37
-    _sessionInitialization: Deferred;
38
-    /**
39
-     * Starts new olm sessions with every other participant that has the participantId "smaller" the localParticipantId.
40
-     */
41
-    initSessions(): Promise<void>;
42
-    /**
43
-     * Updates the current participant key and distributes it to all participants in the conference
44
-     * by sending a key-info message.
45
-     *
46
-     * @param {Uint8Array|boolean} key - The new key.
47
-     * @retrns {Promise<Number>}
48
-     */
49
-    updateKey(key: Uint8Array | boolean): Promise<number>;
50
-    /**
51
-     * Updates the current participant key.
52
-     * @param {Uint8Array|boolean} key - The new key.
53
-     * @returns {number}
54
-    */
55
-    updateCurrentKey(key: Uint8Array | boolean): number;
56
-    /**
57
-     * Frees the olmData session for the given participant.
58
-     *
59
-     */
60
-    clearParticipantSession(participant: any): void;
61
-    /**
62
-     * Frees the olmData sessions for all participants.
63
-     *
64
-     */
65
-    clearAllParticipantsSessions(): void;
66
-    /**
67
-     * Internal helper to bootstrap the olm library.
68
-     *
69
-     * @returns {Promise<void>}
70
-     * @private
71
-     */
72
-    private _bootstrapOlm;
73
-    _olmAccount: any;
74
-    _idKey: any;
75
-    /**
76
-     * Publishes our own Olmn id key in presence.
77
-     * @private
78
-     */
79
-    private _onIdKeyReady;
80
-    /**
81
-     * Event posted when the E2EE signalling channel has been established with the given participant.
82
-     * @private
83
-     */
84
-    private _onParticipantE2EEChannelReady;
85
-    /**
86
-     * Internal helper for encrypting the current key information for a given participant.
87
-     *
88
-     * @param {Olm.Session} session - Participant's session.
89
-     * @returns {string} - The encrypted text with the key information.
90
-     * @private
91
-     */
92
-    private _encryptKeyInfo;
93
-    /**
94
-     * Internal helper for getting the olm related data associated with a participant.
95
-     *
96
-     * @param {JitsiParticipant} participant - Participant whose data wants to be extracted.
97
-     * @returns {Object}
98
-     * @private
99
-     */
100
-    private _getParticipantOlmData;
101
-    /**
102
-     * Handles leaving the conference, cleaning up olm sessions.
103
-     *
104
-     * @private
105
-     */
106
-    private _onConferenceLeft;
107
-    /**
108
-     * Main message handler. Handles 1-to-1 messages received from other participants
109
-     * and send the appropriate replies.
110
-     *
111
-     * @private
112
-     */
113
-    private _onEndpointMessageReceived;
114
-    /**
115
-     * Handles a participant leaving. When a participant leaves their olm session is destroyed.
116
-     *
117
-     * @private
118
-     */
119
-    private _onParticipantLeft;
120
-    /**
121
-    * Handles an update in a participant's presence property.
122
-    *
123
-    * @param {JitsiParticipant} participant - The participant.
124
-    * @param {string} name - The name of the property that changed.
125
-    * @param {*} oldValue - The property's previous value.
126
-    * @param {*} newValue - The property's new value.
127
-    * @private
128
-    */
129
-    private _onParticipantPropertyChanged;
130
-    /**
131
-     * Builds and sends an error message to the target participant.
132
-     *
133
-     * @param {JitsiParticipant} participant - The target participant.
134
-     * @param {string} error - The error message.
135
-     * @returns {void}
136
-     */
137
-    _sendError(participant: any, error: string): void;
138
-    /**
139
-     * Internal helper to send the given object to the given participant ID.
140
-     * This function merely exists so the transport can be easily swapped.
141
-     * Currently messages are transmitted via XMPP MUC private messages.
142
-     *
143
-     * @param {object} data - The data that will be sent to the target participant.
144
-     * @param {string} participantId - ID of the target participant.
145
-     */
146
-    _sendMessage(data: object, participantId: string): void;
147
-    /**
148
-     * Builds and sends the session-init request to the target participant.
149
-     *
150
-     * @param {JitsiParticipant} participant - Participant to whom we'll send the request.
151
-     * @returns {Promise} - The promise will be resolved when the session-ack is received.
152
-     * @private
153
-     */
154
-    private _sendSessionInit;
155
-}
156
-export namespace OlmAdapter {
157
-    export { OlmAdapterEvents as events };
158
-}
159
-import Listenable from "../util/Listenable";
160
-import Deferred from "../util/Deferred";
161
-declare namespace OlmAdapterEvents {
162
-    const OLM_ID_KEY_READY: string;
163
-    const PARTICIPANT_E2EE_CHANNEL_READY: string;
164
-    const PARTICIPANT_KEY_UPDATED: string;
165
-}
166
-export {};

+ 0
- 1
types/auto/modules/e2ee/Worker.d.ts View File

@@ -1 +0,0 @@
1
-export {};

+ 0
- 25
types/auto/modules/e2ee/crypto-utils.d.ts View File

@@ -1,25 +0,0 @@
1
-/**
2
- * Derives a set of keys from the master key.
3
- * @param {CryptoKey} material - master key to derive from
4
- *
5
- * See https://tools.ietf.org/html/draft-omara-sframe-00#section-4.3.1
6
- */
7
-export function deriveKeys(material: CryptoKey): Promise<{
8
-    material: CryptoKey;
9
-    encryptionKey: CryptoKey;
10
-}>;
11
-/**
12
- * Ratchets a key. See
13
- * https://tools.ietf.org/html/draft-omara-sframe-00#section-4.3.5.1
14
- * @param {CryptoKey} material - base key material
15
- * @returns {Promise<ArrayBuffer>} - ratcheted key material
16
- */
17
-export function ratchet(material: CryptoKey): Promise<ArrayBuffer>;
18
-/**
19
- * Converts a raw key into a WebCrypto key object with default options
20
- * suitable for our usage.
21
- * @param {ArrayBuffer} keyBytes - raw key
22
- * @param {Array} keyUsages - key usages, see importKey documentation
23
- * @returns {Promise<CryptoKey>} - the WebCrypto key.
24
- */
25
-export function importKey(keyBytes: ArrayBuffer): Promise<CryptoKey>;

+ 0
- 4
types/auto/modules/e2ee/utils.d.ts View File

@@ -1,4 +0,0 @@
1
-/**
2
- * Compares two byteArrays for equality.
3
- */
4
-export function isArrayEqual(a1: any, a2: any): boolean;

+ 0
- 79
types/auto/modules/e2eping/e2eping.d.ts View File

@@ -1,79 +0,0 @@
1
-/**
2
- * Implements end-to-end ping (from one conference participant to another) via
3
- * the jitsi-videobridge channel (either WebRTC data channel or web socket).
4
- *
5
- * TODO: use a broadcast message instead of individual pings to each remote
6
- * participant.
7
- *
8
- * This class:
9
- * 1. Sends periodic ping requests to all other participants in the
10
- * conference.
11
- * 2. Responds to ping requests from other participants.
12
- * 3. Fires events with the end-to-end RTT to each participant whenever a
13
- * response is received.
14
- * 4. Fires analytics events with the end-to-end RTT periodically.
15
- */
16
-export default class E2ePing {
17
-    /**
18
-     * @param {JitsiConference} conference - The conference.
19
-     * @param {Function} sendMessage - The function to use to send a message.
20
-     * @param {Object} options
21
-     */
22
-    constructor(conference: any, options: any, sendMessage: Function);
23
-    conference: any;
24
-    eventEmitter: any;
25
-    sendMessage: Function;
26
-    participants: {};
27
-    numRequests: any;
28
-    maxConferenceSize: any;
29
-    maxMessagesPerSecond: any;
30
-    /**
31
-     * Handles a participant joining the conference. Starts to send ping
32
-     * requests to the participant.
33
-     *
34
-     * @param {String} id - The ID of the participant.
35
-     * @param {JitsiParticipant} participant - The participant that joined.
36
-     */
37
-    participantJoined(id: string, participant: any): void;
38
-    /**
39
-     * Handles a participant leaving the conference. Stops sending requests.
40
-     *
41
-     * @param {String} id - The ID of the participant.
42
-     */
43
-    participantLeft(id: string): void;
44
-    /**
45
-     * Handles a message that was received.
46
-     *
47
-     * @param participant - The message sender.
48
-     * @param payload - The payload of the message.
49
-     */
50
-    messageReceived(participant: any, payload: any): void;
51
-    /**
52
-     * Delay processing USER_JOINED events until the MUC is fully joined,
53
-     * otherwise the apparent conference size will be wrong.
54
-     */
55
-    conferenceJoined(): void;
56
-    /**
57
-     * Remove a participant without calling "stop".
58
-     */
59
-    removeParticipant(id: any): void;
60
-    /**
61
-     * Handles a ping request coming from another participant.
62
-     *
63
-     * @param {string} participantId - The ID of the participant who sent the
64
-     * request.
65
-     * @param {Object} request - The request.
66
-     */
67
-    handleRequest(participantId: string, request: any): void;
68
-    /**
69
-     * Handles a ping response coming from another participant
70
-     * @param {string} participantId - The ID of the participant who sent the
71
-     * response.
72
-     * @param {Object} response - The response.
73
-     */
74
-    handleResponse(participantId: string, response: any): void;
75
-    /**
76
-     * Stops this E2ePing (i.e. stop sending requests).
77
-     */
78
-    stop(): void;
79
-}

+ 0
- 26
types/auto/modules/event/Jvb121EventGenerator.d.ts View File

@@ -1,26 +0,0 @@
1
-/**
2
- * Emits {@link JitsiConferenceEvents.JVB121_STATUS} events based on the current
3
- * P2P status and the conference participants count. See the event description
4
- * for more info.
5
- */
6
-export default class Jvb121EventGenerator {
7
-    /**
8
-     * Creates new <tt>Jvb121EventGenerator</tt> for the given conference.
9
-     * @param {JitsiConference} conference
10
-     */
11
-    constructor(conference: any);
12
-    _conference: any;
13
-    /**
14
-     * Indicates whether it's a one to one JVB conference (<tt>true</tt>)
15
-     * or a multiparty (<tt>false</tt>). Will be also <tt>false</tt> if
16
-     * the conference is currently in the P2P mode.
17
-     * @type {boolean}
18
-     * @private
19
-     */
20
-    private _jvb121;
21
-    /**
22
-     * Checks whether the JVB121 value should be updated and a new event
23
-     * emitted.
24
-     */
25
-    evaluateStatus(): void;
26
-}

+ 0
- 52
types/auto/modules/flags/FeatureFlags.d.ts View File

@@ -1,52 +0,0 @@
1
-declare var _default: FeatureFlags;
2
-export default _default;
3
-/**
4
- * A global module for accessing information about different feature flags state.
5
- */
6
-declare class FeatureFlags {
7
-    /**
8
-     * Configures the module.
9
-     *
10
-     * @param {boolean} flags.runInLiteMode - Enables lite mode for testing to disable media decoding.
11
-     * @param {boolean} flags.sourceNameSignaling - Enables source names in the signaling.
12
-     * @param {boolean} flags.receiveMultipleVideoStreams - Signal support for receiving multiple video streams.
13
-     */
14
-    init(flags: any): void;
15
-    _receiveMultipleVideoStreams: any;
16
-    _runInLiteMode: boolean;
17
-    _sendMultipleVideoStreams: any;
18
-    _sourceNameSignaling: any;
19
-    _ssrcRewriting: boolean;
20
-    _usesUnifiedPlan: any;
21
-    /**
22
-     * Checks if multiple local video streams support is enabled.
23
-     *
24
-     * @returns {boolean}
25
-     */
26
-    isMultiStreamSupportEnabled(): boolean;
27
-    /**
28
-     * Checks if receiving multiple video streams is supported.
29
-     *
30
-     * @returns {boolean}
31
-     */
32
-    isReceiveMultipleVideoStreamsSupported(): boolean;
33
-    /**
34
-     * Checks if the run in lite mode is enabled.
35
-     * This will cause any media to be received and not decoded. (Directions are inactive and no ssrc and ssrc-groups
36
-     * are added to the remote description). This can be used for various test scenarios.
37
-     *
38
-     * @returns {boolean}
39
-     */
40
-    isRunInLiteModeEnabled(): boolean;
41
-    /**
42
-     * Checks if the source name signaling is enabled.
43
-     *
44
-     * @returns {boolean}
45
-     */
46
-    isSourceNameSignalingEnabled(): boolean;
47
-    /**
48
-     * Checks if the clients supports re-writing of the SSRCs on the media streams by the bridge.
49
-     * @returns {boolean}
50
-     */
51
-    isSsrcRewritingSupported(): boolean;
52
-}

+ 0
- 25
types/auto/modules/proxyconnection/CustomSignalingLayer.d.ts View File

@@ -1,25 +0,0 @@
1
-/**
2
- * Custom semi-mock implementation for the Proxy connection service.
3
- */
4
-export default class CustomSignalingLayer extends SignalingLayer {
5
-    /**
6
-     * Creates new instance.
7
-     */
8
-    constructor();
9
-    /**
10
-     * A map that stores SSRCs of remote streams.
11
-     * @type {Map<number, string>} maps SSRC number to jid
12
-     */
13
-    ssrcOwners: Map<number, string>;
14
-    /**
15
-     *
16
-     * @type {ChatRoom|null}
17
-     */
18
-    chatRoom: any | null;
19
-    /**
20
-     * Sets the <tt>ChatRoom</tt> instance used.
21
-     * @param {ChatRoom} room
22
-     */
23
-    setChatRoom(room: any): void;
24
-}
25
-import SignalingLayer from "../../service/RTC/SignalingLayer";

+ 0
- 167
types/auto/modules/proxyconnection/ProxyConnectionPC.d.ts View File

@@ -1,167 +0,0 @@
1
-/**
2
- * An adapter around {@code JingleSessionPC} so its logic can be re-used without
3
- * an XMPP connection. It is being re-used for consistency with the rest of the
4
- * codebase and to leverage existing peer connection event handling. Also
5
- * this class provides a facade to hide most of the API for
6
- * {@code JingleSessionPC}.
7
- */
8
-export default class ProxyConnectionPC {
9
-    /**
10
-     * Initializes a new {@code ProxyConnectionPC} instance.
11
-     *
12
-     * @param {Object} options - Values to initialize the instance with.
13
-     * @param {Object} [options.pcConfig] - The {@code RTCConfiguration} to use for the WebRTC peer connection.
14
-     * @param {boolean} [options.isInitiator] - If true, the local client should send offers. If false, the local
15
-     * client should send answers. Defaults to false.
16
-     * @param {Function} options.onRemoteStream - Callback to invoke when a remote media stream has been received
17
-     * through the peer connection.
18
-     * @param {string} options.peerJid - The jid of the remote client with which the peer connection is being establish
19
-     * and which should receive direct messages regarding peer connection updates.
20
-     * @param {boolean} [options.receiveVideo] - Whether or not the peer connection should accept incoming video
21
-     * streams. Defaults to false.
22
-     * @param {Function} options.onSendMessage - Callback to invoke when a message has to be sent (signaled) out.
23
-     */
24
-    constructor(options?: {
25
-        pcConfig?: any;
26
-        isInitiator?: boolean;
27
-        onRemoteStream: Function;
28
-        peerJid: string;
29
-        receiveVideo?: boolean;
30
-        onSendMessage: Function;
31
-    });
32
-    _options: {
33
-        pcConfig: any;
34
-        isInitiator: boolean;
35
-        onRemoteStream: Function;
36
-        peerJid: string;
37
-        receiveVideo: boolean;
38
-        onSendMessage: Function;
39
-        receiveAudio: boolean;
40
-    };
41
-    /**
42
-     * Instances of {@code JitsiTrack} associated with this instance of
43
-     * {@code ProxyConnectionPC}.
44
-     *
45
-     * @type {Array<JitsiTrack>}
46
-     */
47
-    _tracks: Array<any>;
48
-    /**
49
-     * The active instance of {@code JingleSessionPC}.
50
-     *
51
-     * @type {JingleSessionPC|null}
52
-     */
53
-    _peerConnection: JingleSessionPC | null;
54
-    /**
55
-     * Invoked when a connection related issue has been encountered.
56
-     *
57
-     * @param {string} errorType - The constant indicating the type of the error
58
-     * that occured.
59
-     * @param {string} details - Optional additional data about the error.
60
-     * @private
61
-     * @returns {void}
62
-     */
63
-    private _onError;
64
-    /**
65
-     * Callback invoked when the peer connection has received a remote media
66
-     * stream.
67
-     *
68
-     * @param {JitsiRemoteTrack} jitsiRemoteTrack - The remote media stream
69
-     * wrapped in {@code JitsiRemoteTrack}.
70
-     * @private
71
-     * @returns {void}
72
-     */
73
-    private _onRemoteStream;
74
-    /**
75
-     * Callback invoked when {@code JingleSessionPC} needs to signal a message
76
-     * out to the remote peer.
77
-     *
78
-     * @param {XML} iq - The message to signal out.
79
-     * @private
80
-     * @returns {void}
81
-     */
82
-    private _onSendMessage;
83
-    /**
84
-     * Returns the jid of the remote peer with which this peer connection should
85
-     * be established with.
86
-     *
87
-     * @returns {string}
88
-     */
89
-    getPeerJid(): string;
90
-    /**
91
-     * Updates the peer connection based on the passed in jingle.
92
-     *
93
-     * @param {Object} $jingle - An XML jingle element, wrapped in query,
94
-     * describing how the peer connection should be updated.
95
-     * @returns {void}
96
-     */
97
-    processMessage($jingle: any): void;
98
-    /**
99
-     * Instantiates a peer connection and starts the offer/answer cycle to
100
-     * establish a connection with a remote peer.
101
-     *
102
-     * @param {Array<JitsiLocalTrack>} localTracks - Initial local tracks to add
103
-     * to add to the peer connection.
104
-     * @returns {void}
105
-     */
106
-    start(localTracks?: Array<any>): void;
107
-    /**
108
-     * Begins the process of disconnecting from a remote peer and cleaning up
109
-     * the peer connection.
110
-     *
111
-     * @returns {void}
112
-     */
113
-    stop(): void;
114
-    /**
115
-     * Instantiates a new {@code JingleSessionPC} by stubbing out the various
116
-     * dependencies of {@code JingleSessionPC}.
117
-     *
118
-     * @private
119
-     * @returns {JingleSessionPC}
120
-     */
121
-    private _createPeerConnection;
122
-    /**
123
-     * Create an instance of {@code RTC} as it is required for peer
124
-     * connection creation by {@code JingleSessionPC}. An existing instance
125
-     * of {@code RTC} from elsewhere should not be re-used because it is
126
-     * a stateful grouping of utilities.
127
-     */
128
-    _rtc: RTC;
129
-    /**
130
-     * Callback invoked in response to an agreement to start a proxy connection.
131
-     * The passed in jingle element should contain an SDP answer to a previously
132
-     * sent SDP offer.
133
-     *
134
-     * @param {Object} $jingle - The jingle element wrapped in jQuery.
135
-     * @private
136
-     * @returns {void}
137
-     */
138
-    private _onSessionAccept;
139
-    /**
140
-     * Callback invoked in response to a request to start a proxy connection.
141
-     * The passed in jingle element should contain an SDP offer.
142
-     *
143
-     * @param {Object} $jingle - The jingle element wrapped in jQuery.
144
-     * @private
145
-     * @returns {void}
146
-     */
147
-    private _onSessionInitiate;
148
-    /**
149
-     * Callback invoked in response to a request to disconnect an active proxy
150
-     * connection. Cleans up tracks and the peer connection.
151
-     *
152
-     * @private
153
-     * @returns {void}
154
-     */
155
-    private _onSessionTerminate;
156
-    /**
157
-     * Callback invoked in response to ICE candidates from the remote peer.
158
-     * The passed in jingle element should contain an ICE candidate.
159
-     *
160
-     * @param {Object} $jingle - The jingle element wrapped in jQuery.
161
-     * @private
162
-     * @returns {void}
163
-     */
164
-    private _onTransportInfo;
165
-}
166
-import JingleSessionPC from "../xmpp/JingleSessionPC";
167
-import RTC from "../RTC/RTC";

+ 0
- 141
types/auto/modules/proxyconnection/ProxyConnectionService.d.ts View File

@@ -1,141 +0,0 @@
1
-/**
2
- * Instantiates a new ProxyConnectionPC and ensures only one exists at a given
3
- * time. Currently it assumes ProxyConnectionPC is used only for screensharing
4
- * and assumes IQs to be used for communication.
5
- */
6
-export default class ProxyConnectionService {
7
-    /**
8
-     * Initializes a new {@code ProxyConnectionService} instance.
9
-     *
10
-     * @param {Object} options - Values to initialize the instance with.
11
-     * @param {boolean} [options.convertVideoToDesktop] - Whether or not proxied video should be returned as a desktop
12
-     * stream. Defaults to false.
13
-     * @param {Object} [options.pcConfig] - The {@code RTCConfiguration} to use for the WebRTC peer connection.
14
-     * @param {JitsiConnection} [options.jitsiConnection] - The {@code JitsiConnection} which will be used to fetch
15
-     * TURN credentials for the P2P connection.
16
-     * @param {Function} options.onRemoteStream - Callback to invoke when a remote video stream has been received and
17
-     * converted to a {@code JitsiLocakTrack}. The {@code JitsiLocakTrack} will be passed in.
18
-     * @param {Function} options.onSendMessage - Callback to invoke when a message has to be sent (signaled) out. The
19
-     * arguments passed in are the jid to send the message to and the message.
20
-     */
21
-    constructor(options?: {
22
-        convertVideoToDesktop?: boolean;
23
-        pcConfig?: any;
24
-        jitsiConnection?: any;
25
-        onRemoteStream: Function;
26
-        onSendMessage: Function;
27
-    });
28
-    /**
29
-     * Holds a reference to the collection of all callbacks.
30
-     *
31
-     * @type {Object}
32
-     */
33
-    _options: any;
34
-    /**
35
-     * The active instance of {@code ProxyConnectionService}.
36
-     *
37
-     * @type {ProxyConnectionPC|null}
38
-     */
39
-    _peerConnection: ProxyConnectionPC | null;
40
-    /**
41
-     * Callback invoked when an error occurs that should cause
42
-     * {@code ProxyConnectionPC} to be closed if the peer is currently
43
-     * connected. Sends an error message/reply back to the peer.
44
-     *
45
-     * @param {string} peerJid - The peer jid with which the connection was
46
-     * attempted or started, and to which an iq with error details should be
47
-     * sent.
48
-     * @param {string} errorType - The constant indicating the type of the error
49
-     * that occured.
50
-     * @param {string} details - Optional additional data about the error.
51
-     * @private
52
-     * @returns {void}
53
-     */
54
-    private _onFatalError;
55
-    /**
56
-     * Formats and forwards a message an iq to be sent to a peer jid.
57
-     *
58
-     * @param {string} peerJid - The jid the iq should be sent to.
59
-     * @param {Object} iq - The iq which would be sent to the peer jid.
60
-     * @private
61
-     * @returns {void}
62
-     */
63
-    private _onSendMessage;
64
-    /**
65
-     * Callback invoked when the remote peer of the {@code ProxyConnectionPC}
66
-     * has offered a media stream. The stream is converted into a
67
-     * {@code JitsiLocalTrack} for local usage if the {@code onRemoteStream}
68
-     * callback is defined.
69
-     *
70
-     * @param {JitsiRemoteTrack} jitsiRemoteTrack - The {@code JitsiRemoteTrack}
71
-     * for the peer's media stream.
72
-     * @private
73
-     * @returns {void}
74
-     */
75
-    private _onRemoteStream;
76
-    /**
77
-     * Parses a message object regarding a proxy connection to create a new
78
-     * proxy connection or update and existing connection.
79
-     *
80
-     * @param {Object} message - A message object regarding establishing or
81
-     * updating a proxy connection.
82
-     * @param {Object} message.data - An object containing additional message
83
-     * details.
84
-     * @param {string} message.data.iq - The stringified iq which explains how
85
-     * and what to update regarding the proxy connection.
86
-     * @param {string} message.from - The message sender's full jid. Used for
87
-     * sending replies.
88
-     * @returns {void}
89
-     */
90
-    processMessage(message: {
91
-        data: {
92
-            iq: string;
93
-        };
94
-        from: string;
95
-    }): void;
96
-    /**
97
-     * Instantiates and initiates a proxy peer connection.
98
-     *
99
-     * @param {string} peerJid - The jid of the remote client that should
100
-     * receive messages.
101
-     * @param {Array<JitsiLocalTrack>} localTracks - Initial media tracks to
102
-     * send through to the peer.
103
-     * @returns {void}
104
-     */
105
-    start(peerJid: string, localTracks?: Array<any>): void;
106
-    /**
107
-     * Terminates any active proxy peer connection.
108
-     *
109
-     * @returns {void}
110
-     */
111
-    stop(): void;
112
-    /**
113
-     * Transforms a stringified xML into a XML wrapped in jQuery.
114
-     *
115
-     * @param {string} xml - The XML in string form.
116
-     * @private
117
-     * @returns {Object|null} A jQuery version of the xml. Null will be returned
118
-     * if an error is encountered during transformation.
119
-     */
120
-    private _convertStringToXML;
121
-    /**
122
-     * Helper for creating an instance of {@code ProxyConnectionPC}.
123
-     *
124
-     * @param {string} peerJid - The jid of the remote peer with which the
125
-     * {@code ProxyConnectionPC} will be established with.
126
-     * @param {Object} options - Additional defaults to instantiate the
127
-     * {@code ProxyConnectionPC} with. See the constructor of ProxyConnectionPC
128
-     * for more details.
129
-     * @private
130
-     * @returns {ProxyConnectionPC}
131
-     */
132
-    private _createPeerConnection;
133
-    /**
134
-     * Invoked when preemptively closing the {@code ProxyConnectionPC}.
135
-     *
136
-     * @private
137
-     * @returns {void}
138
-     */
139
-    private _selfCloseConnection;
140
-}
141
-import ProxyConnectionPC from "./ProxyConnectionPC";

+ 0
- 12
types/auto/modules/proxyconnection/constants.d.ts View File

@@ -1,12 +0,0 @@
1
-/**
2
- * The know jingle actions that can be sent and should be acted upon by
3
- * {@code ProxyConnectionService} and {@code ProxyConnectionPC}.
4
- */
5
-export declare enum ACTIONS {
6
-    ACCEPT = "session-accept",
7
-    CONNECTION_ERROR = "connection-error-encountered",
8
-    INITIATE = "session-initiate",
9
-    TERMINATE = "session-terminate",
10
-    TRANSPORT_INFO = "transport-info",
11
-    UNAVAILABLE = "unavailable"
12
-}

+ 0
- 138
types/auto/modules/qualitycontrol/ReceiveVideoController.d.ts View File

@@ -1,138 +0,0 @@
1
-/**
2
- * This class manages the receive video contraints for a given {@link JitsiConference}. These constraints are
3
- * determined by the application based on how the remote video streams need to be displayed. This class is responsible
4
- * for communicating these constraints to the bridge over the bridge channel.
5
- */
6
-export default class ReceiveVideoController {
7
-    /**
8
-     * Creates a new instance for a given conference.
9
-     *
10
-     * @param {JitsiConference} conference the conference instance for which the new instance will be managing
11
-     * the receive video quality constraints.
12
-     * @param {RTC} rtc the rtc instance which is responsible for initializing the bridge channel.
13
-     */
14
-    constructor(conference: any, rtc: any);
15
-    _conference: any;
16
-    _rtc: any;
17
-    _lastN: any;
18
-    _maxFrameHeight: number;
19
-    /**
20
-     * The map that holds the max frame height requested for each remote source when source-name signaling is
21
-     * enabled.
22
-     *
23
-     * @type Map<string, number>
24
-     */
25
-    _sourceReceiverConstraints: Map<string, number>;
26
-    _receiverVideoConstraints: ReceiverVideoConstraints;
27
-    _selectedEndpoints: any[];
28
-    /**
29
-     * Returns a map of all the remote source names and the corresponding max frame heights.
30
-     *
31
-     * @param {number} maxFrameHeight
32
-     * @returns
33
-     */
34
-    _getDefaultSourceReceiverConstraints(mediaSession: any, maxFrameHeight: number): Map<any, any>;
35
-    /**
36
-     * Handles the {@link JitsiConferenceEvents.MEDIA_SESSION_STARTED}, that is when the conference creates new media
37
-     * session. The preferred receive frameHeight is applied on the media session.
38
-     *
39
-     * @param {JingleSessionPC} mediaSession - the started media session.
40
-     * @returns {void}
41
-     * @private
42
-     */
43
-    private _onMediaSessionStarted;
44
-    /**
45
-     * Returns the lastN value for the conference.
46
-     *
47
-     * @returns {number}
48
-     */
49
-    getLastN(): number;
50
-    /**
51
-     * Elects the participants with the given ids to be the selected participants in order to always receive video
52
-     * for this participant (even when last n is enabled).
53
-     *
54
-     * @param {Array<string>} ids - The user ids.
55
-     * @returns {void}
56
-     */
57
-    selectEndpoints(ids: Array<string>): void;
58
-    /**
59
-     * Selects a new value for "lastN". The requested amount of videos are going to be delivered after the value is
60
-     * in effect. Set to -1 for unlimited or all available videos.
61
-     *
62
-     * @param {number} value the new value for lastN.
63
-     * @returns {void}
64
-     */
65
-    setLastN(value: number): void;
66
-    /**
67
-     * Sets the maximum video resolution the local participant should receive from remote participants.
68
-     *
69
-     * @param {number|undefined} maxFrameHeight - the new value.
70
-     * @returns {void}
71
-     */
72
-    setPreferredReceiveMaxFrameHeight(maxFrameHeight: number | undefined): void;
73
-    /**
74
-     * Sets the receiver constraints for the conference.
75
-     *
76
-     * @param {Object} constraints The video constraints.
77
-     */
78
-    setReceiverConstraints(constraints: any): void;
79
-}
80
-/**
81
- * This class translates the legacy signaling format between the client and the bridge (that affects bandwidth
82
- * allocation) to the new format described here https://github.com/jitsi/jitsi-videobridge/blob/master/doc/allocation.md
83
- */
84
-declare class ReceiverVideoConstraints {
85
-    _defaultConstraints: {
86
-        maxHeight: number;
87
-    };
88
-    _lastN: number;
89
-    _maxFrameHeight: number;
90
-    _selectedEndpoints: any[];
91
-    _receiverVideoConstraints: {
92
-        constraints: {};
93
-        defaultConstraints: any;
94
-        lastN: number;
95
-        onStageEndpoints: any[];
96
-        selectedEndpoints: any[];
97
-    };
98
-    /**
99
-     * Returns the receiver video constraints that need to be sent on the bridge channel.
100
-     */
101
-    get constraints(): {
102
-        constraints: {};
103
-        defaultConstraints: any;
104
-        lastN: number;
105
-        onStageEndpoints: any[];
106
-        selectedEndpoints: any[];
107
-    };
108
-    /**
109
-     * Updates the lastN field of the ReceiverVideoConstraints sent to the bridge.
110
-     *
111
-     * @param {number} value
112
-     * @returns {boolean} Returns true if the the value has been updated, false otherwise.
113
-     */
114
-    updateLastN(value: number): boolean;
115
-    /**
116
-     * Updates the resolution (height requested) in the contraints field of the ReceiverVideoConstraints
117
-     * sent to the bridge.
118
-     *
119
-     * @param {number} maxFrameHeight
120
-     * @requires {boolean} Returns true if the the value has been updated, false otherwise.
121
-     */
122
-    updateReceiveResolution(maxFrameHeight: number): boolean;
123
-    /**
124
-     * Updates the receiver constraints sent to the bridge.
125
-     *
126
-     * @param {Object} videoConstraints
127
-     * @returns {boolean} Returns true if the the value has been updated, false otherwise.
128
-     */
129
-    updateReceiverVideoConstraints(videoConstraints: any): boolean;
130
-    /**
131
-     * Updates the list of selected endpoints.
132
-     *
133
-     * @param {Array<string>} ids
134
-     * @returns {void}
135
-     */
136
-    updateSelectedEndpoints(ids: Array<string>): void;
137
-}
138
-export {};

+ 0
- 75
types/auto/modules/qualitycontrol/SendVideoController.d.ts View File

@@ -1,75 +0,0 @@
1
-/**
2
- * The class manages send video constraints across media sessions({@link JingleSessionPC}) which belong to
3
- * {@link JitsiConference}. It finds the lowest common value, between the local user's send preference and
4
- * the remote party's receive preference. Also this module will consider only the active session's receive value,
5
- * because local tracks are shared and while JVB may have no preference, the remote p2p may have and they may be totally
6
- * different.
7
- */
8
-export default class SendVideoController {
9
-    /**
10
-     * Creates new instance for a given conference.
11
-     *
12
-     * @param {JitsiConference} conference - the conference instance for which the new instance will be managing
13
-     * the send video quality constraints.
14
-     * @param {RTC} rtc - the rtc instance that is responsible for sending the messages on the bridge channel.
15
-     */
16
-    constructor(conference: any, rtc: any);
17
-    _conference: any;
18
-    _preferredSendMaxFrameHeight: number;
19
-    _rtc: any;
20
-    /**
21
-     * Source name based sender constraints.
22
-     * @type {Map<string, number>};
23
-     */
24
-    _sourceSenderConstraints: Map<string, number>;
25
-    /**
26
-     * Configures the video encodings on the local sources when a media connection is established or becomes active.
27
-     *
28
-     * @returns {Promise<void[]>}
29
-     * @private
30
-     */
31
-    private _configureConstraintsForLocalSources;
32
-    /**
33
-     * Handles the {@link JitsiConferenceEvents.MEDIA_SESSION_STARTED}, that is when the conference creates new media
34
-     * session. It doesn't mean it's already active though. For example the JVB connection may be created after
35
-     * the conference has entered the p2p mode already.
36
-     *
37
-     * @param {JingleSessionPC} mediaSession - the started media session.
38
-     * @private
39
-     */
40
-    private _onMediaSessionStarted;
41
-    /**
42
-     * Propagates the video constraints if they have changed.
43
-     *
44
-     * @param {Object} videoConstraints - The sender video constraints received from the bridge.
45
-     * @returns {Promise<void[]>}
46
-     * @private
47
-     */
48
-    private _onSenderConstraintsReceived;
49
-    _senderVideoConstraints: any;
50
-    /**
51
-     * Figures out the send video constraint as specified by {@link _selectSendMaxFrameHeight} and sets it on all media
52
-     * sessions for the reasons mentioned in this class description.
53
-     *
54
-     * @param {string} sourceName - The source for which sender constraints have changed.
55
-     * @returns {Promise<void[]>}
56
-     * @private
57
-     */
58
-    private _propagateSendMaxFrameHeight;
59
-    /**
60
-     * Selects the lowest common value for the local video send constraint by looking at local user's preference and
61
-     * the active media session's receive preference set by the remote party.
62
-     *
63
-     * @param {string} sourceName - The source for which sender constraints have changed.
64
-     * @returns {number|undefined}
65
-     * @private
66
-     */
67
-    private _selectSendMaxFrameHeight;
68
-    /**
69
-     * Sets local preference for max send video frame height.
70
-     *
71
-     * @param {number} maxFrameHeight - the new value to set.
72
-     * @returns {Promise<void[]>} - resolved when the operation is complete.
73
-     */
74
-    setPreferredSendMaxFrameHeight(maxFrameHeight: number): Promise<void[]>;
75
-}

+ 0
- 165
types/auto/modules/recording/JibriSession.d.ts View File

@@ -1,165 +0,0 @@
1
-/**
2
- * Represents a recording session.
3
- */
4
-export default class JibriSession {
5
-    /**
6
-     * Initializes a new JibriSession instance.
7
-     *
8
-     * @constructor
9
-     */
10
-    constructor(options?: {});
11
-    _connection: any;
12
-    _mode: any;
13
-    /**
14
-     * Returns the error related to the session instance, if any.
15
-     *
16
-     * @returns {string|undefined}
17
-     */
18
-    getError(): string | undefined;
19
-    /**
20
-     * Returns the session ID of the session instance.
21
-     *
22
-     * @returns {string|undefined}
23
-     */
24
-    getID(): string | undefined;
25
-    /**
26
-     * Returns the initiator of the session instance.
27
-     *
28
-     * @returns {JitsiParticipant|string} The participant that started the session.
29
-     */
30
-    getInitiator(): any | string;
31
-    /**
32
-     * Returns the streaming URL of the session.
33
-     *
34
-     * @returns {string|undefined}
35
-     */
36
-    getLiveStreamViewURL(): string | undefined;
37
-    /**
38
-     * Returns the current status of the session.
39
-     *
40
-     * @returns {string|undefined}
41
-     */
42
-    getStatus(): string | undefined;
43
-    /**
44
-     * Returns the jid of the participant that stopped the session.
45
-     *
46
-     * @returns {JitsiParticipant|string} The participant that stopped the session.
47
-     */
48
-    getTerminator(): any | string;
49
-    /**
50
-     * Returns the current recording mode of the session, such as "file".
51
-     *
52
-     * @returns {string}
53
-     */
54
-    getMode(): string;
55
-    /**
56
-     * Sets the last known error message related to the session.
57
-     *
58
-     * @param {string} error - The error string explaining why the session
59
-     * entered an error state.
60
-     * @returns {void}
61
-     */
62
-    setError(error: string): void;
63
-    _error: string;
64
-    /**
65
-     * Sets the last live stream URL for the session instance. Usually this is
66
-     * a YouTube URL and usually this is only set for "stream" sessions.
67
-     *
68
-     * @param {string} url - The live stream URL associated with the session.
69
-     * @returns {void}
70
-     */
71
-    setLiveStreamViewURL(url: string): void;
72
-    _liveStreamViewURL: string;
73
-    /**
74
-     * Sets the last known status for this recording session.
75
-     *
76
-     * @param {string} status - The new status to set.
77
-     * @returns {void}
78
-     */
79
-    setStatus(status: string): void;
80
-    _status: string;
81
-    /**
82
-     * Sets the participant that started the session.
83
-     * @param {JitsiParticipant | string} participant - The participant or resource id
84
-     * if local participant.
85
-     */
86
-    setInitiator(participant: any | string): void;
87
-    _initiator: any;
88
-    /**
89
-     * Sets the participant that stopped the session.
90
-     * @param {JitsiParticipant | string} participant - The participant or the resource id
91
-     * if local participant.
92
-     */
93
-    setTerminator(participant: any | string): void;
94
-    _terminator: any;
95
-    /**
96
-     * Sends a message to start the actual recording.
97
-     *
98
-     * @param {Object} options - Additional arguments for starting the
99
-     * recording.
100
-     * @param {string} [options.appData] - Data specific to the app/service that
101
-     * the result file will be uploaded.
102
-     * @param {string} [options.broadcastId] - The broadcast ID of an
103
-     * associated YouTube stream, used for knowing the URL from which the stream
104
-     * can be viewed.
105
-     * @param {string} options.focusMucJid - The JID of the focus participant
106
-     * that controls recording.
107
-     * @param {streamId} options.streamId - Necessary for live streaming, this
108
-     * is the stream key needed to start a live streaming session with the
109
-     * streaming service provider.
110
-     * @returns Promise
111
-     */
112
-    start({ appData, broadcastId, focusMucJid, streamId }: {
113
-        appData?: string;
114
-        broadcastId?: string;
115
-        focusMucJid: string;
116
-        streamId: any;
117
-    }): Promise<any>;
118
-    /**
119
-     * Sends a message to actually stop the recording session.
120
-     *
121
-     * @param {Object} options - Additional arguments for stopping the
122
-     * recording.
123
-     * @param {Object} options.focusMucJid - The JID of the focus participant
124
-     * that controls recording.
125
-     * @returns Promise
126
-     */
127
-    stop({ focusMucJid }: {
128
-        focusMucJid: any;
129
-    }): Promise<any>;
130
-    /**
131
-     * Generates the message to change the status of the recording session.
132
-     *
133
-     * @param {string} status - The new status to which the recording session
134
-     * should transition.
135
-     * @param {string} [options.appData] - Data specific to the app/service that
136
-     * the result file will be uploaded.
137
-     * @param {string} [options.broadcastId] - The broadcast ID of an
138
-     * associated YouTube stream, used for knowing the URL from which the stream
139
-     * can be viewed.
140
-     * @param {string} options.focusMucJid - The JID of the focus participant
141
-     * that controls recording.
142
-     * @param {streamId} options.streamId - Necessary for live streaming, this
143
-     * is the stream key needed to start a live streaming session with the
144
-     * streaming service provider.
145
-     * @returns Object - The XMPP IQ message.
146
-     */
147
-    _createIQ({ action, appData, broadcastId, focusMucJid, streamId }: string): any;
148
-    /**
149
-     * Handles the error from an iq and stores the error.
150
-     *
151
-     * @param {Node} errorIq - The error response from an Iq.
152
-     * @private
153
-     * @returns {void}
154
-     */
155
-    private _setErrorFromIq;
156
-    /**
157
-     * Sets the known session ID for this recording session.
158
-     *
159
-     * @param {string} sessionID
160
-     * @private
161
-     * @returns {void}
162
-     */
163
-    private _setSessionID;
164
-    _sessionID: string;
165
-}

+ 0
- 112
types/auto/modules/recording/RecordingManager.d.ts View File

@@ -1,112 +0,0 @@
1
-export default RecordingManager;
2
-/**
3
- * A class responsible for starting and stopping recording sessions and emitting
4
- * state updates for them.
5
- */
6
-declare class RecordingManager {
7
-    /**
8
-     * Initialize {@code RecordingManager} with other objects that are necessary
9
-     * for starting a recording.
10
-     *
11
-     * @param {ChatRoom} chatRoom - The chat room to handle.
12
-     * @returns {void}
13
-     */
14
-    constructor(chatRoom: any);
15
-    /**
16
-     * All known recording sessions from the current conference.
17
-     */
18
-    _sessions: {};
19
-    _chatRoom: any;
20
-    /**
21
-     * Callback to invoke to parse through a presence update to find recording
22
-     * related updates (from Jibri participant doing the recording and the
23
-     * focus which controls recording).
24
-     *
25
-     * @param {Object} event - The presence data from the pubsub event.
26
-     * @param {Node} event.presence - An XMPP presence update.
27
-     * @param {boolean} event.fromHiddenDomain - Whether or not the update comes
28
-     * from a participant that is trusted but not visible, as would be the case
29
-     * with the Jibri recorder participant.
30
-     * @returns {void}
31
-     */
32
-    onPresence({ fromHiddenDomain, presence }: {
33
-        presence: Node;
34
-        fromHiddenDomain: boolean;
35
-    }): void;
36
-    /**
37
-     * Finds an existing recording session by session ID.
38
-     *
39
-     * @param {string} sessionID - The session ID associated with the recording.
40
-     * @returns {JibriSession|undefined}
41
-     */
42
-    getSession(sessionID: string): JibriSession | undefined;
43
-    /**
44
-     * Start a recording session.
45
-     *
46
-     * @param {Object} options - Configuration for the recording.
47
-     * @param {string} [options.appData] - Data specific to the app/service that
48
-     * the result file will be uploaded.
49
-     * @param {string} [optional] options.broadcastId - The channel on which a
50
-     * live stream will occur.
51
-     * @param {string} options.mode - The mode in which recording should be
52
-     * started. Recognized values are "file" and "stream".
53
-     * @param {string} [optional] options.streamId - The stream key to be used
54
-     * for live stream broadcasting. Required for live streaming.
55
-     * @returns {Promise} A promise for starting a recording, which will pass
56
-     * back the session on success. The promise resolves after receiving an
57
-     * acknowledgment of the start request success or fail.
58
-     */
59
-    startRecording(options: {
60
-        appData?: string;
61
-    }): Promise<any>;
62
-    /**
63
-     * Stop a recording session.
64
-     *
65
-     * @param {string} sessionID - The ID associated with the recording session
66
-     * to be stopped.
67
-     * @returns {Promise} The promise resolves after receiving an
68
-     * acknowledgment of the stop request success or fail.
69
-     */
70
-    stopRecording(sessionID: string): Promise<any>;
71
-    /**
72
-     * Stores a reference to the passed in JibriSession.
73
-     *
74
-     * @param {string} session - The JibriSession instance to store.
75
-     * @returns {void}
76
-     */
77
-    _addSession(session: string): void;
78
-    /**
79
-     * Create a new instance of a recording session and stores a reference to
80
-     * it.
81
-     *
82
-     * @param {string} sessionID - The session ID of the recording in progress.
83
-     * @param {string} status - The current status of the recording session.
84
-     * @param {string} mode - The recording mode of the session.
85
-     * @returns {JibriSession}
86
-     */
87
-    _createSession(sessionID: string, status: string, mode: string): JibriSession;
88
-    /**
89
-     * Notifies listeners of an update to a recording session.
90
-     *
91
-     * @param {JibriSession} session - The session that has been updated.
92
-     * @param {string|undefined} initiator - The jid of the initiator of the update.
93
-     */
94
-    _emitSessionUpdate(session: JibriSession, initiator: string | undefined): void;
95
-    /**
96
-     * Parses presence to update an existing JibriSession or to create a new
97
-     * JibriSession.
98
-     *
99
-     * @param {Node} presence - An XMPP presence update.
100
-     * @returns {void}
101
-     */
102
-    _handleFocusPresence(presence: Node): void;
103
-    /**
104
-     * Handles updates from the Jibri which can broadcast a YouTube URL that
105
-     * needs to be updated in a JibriSession.
106
-     *
107
-     * @param {Node} presence - An XMPP presence update.
108
-     * @returns {void}
109
-     */
110
-    _handleJibriPresence(presence: Node): void;
111
-}
112
-import JibriSession from "./JibriSession";

+ 0
- 19
types/auto/modules/recording/recordingConstants.d.ts View File

@@ -1,19 +0,0 @@
1
-declare namespace _default {
2
-    namespace error {
3
-        const BUSY: string;
4
-        const ERROR: string;
5
-        const RESOURCE_CONSTRAINT: string;
6
-        const UNEXPECTED_REQUEST: string;
7
-        const SERVICE_UNAVAILABLE: string;
8
-    }
9
-    namespace mode {
10
-        const FILE: string;
11
-        const STREAM: string;
12
-    }
13
-    namespace status {
14
-        const OFF: string;
15
-        const ON: string;
16
-        const PENDING: string;
17
-    }
18
-}
19
-export default _default;

+ 0
- 77
types/auto/modules/recording/recordingXMLUtils.d.ts View File

@@ -1,77 +0,0 @@
1
-declare namespace _default {
2
-    /**
3
-     * Parses the presence update of the focus and returns an object with the
4
-     * statuses related to recording.
5
-     *
6
-     * @param {Node} presence - An XMPP presence update.
7
-     * @returns {Object} The current presence values related to recording.
8
-     */
9
-    function getFocusRecordingUpdate(presence: Node): any;
10
-    /**
11
-     * Parses the presence update of the focus and returns an object with the
12
-     * statuses related to recording.
13
-     *
14
-     * @param {Node} presence - An XMPP presence update.
15
-     * @returns {Object} The current presence values related to recording.
16
-     */
17
-    function getFocusRecordingUpdate(presence: Node): any;
18
-    /**
19
-     * Parses the presence update from a hidden domain participant and returns
20
-     * an object with the statuses related to recording.
21
-     *
22
-     * @param {Node} presence - An XMPP presence update.
23
-     * @returns {Object} The current presence values related to recording.
24
-     */
25
-    function getHiddenDomainUpdate(presence: Node): any;
26
-    /**
27
-     * Parses the presence update from a hidden domain participant and returns
28
-     * an object with the statuses related to recording.
29
-     *
30
-     * @param {Node} presence - An XMPP presence update.
31
-     * @returns {Object} The current presence values related to recording.
32
-     */
33
-    function getHiddenDomainUpdate(presence: Node): any;
34
-    /**
35
-     * Returns the recording session ID from a successful IQ.
36
-     *
37
-     * @param {Node} response - The response from the IQ.
38
-     * @returns {string} The session ID of the recording session.
39
-     */
40
-    function getSessionIdFromIq(response: Node): string;
41
-    /**
42
-     * Returns the recording session ID from a successful IQ.
43
-     *
44
-     * @param {Node} response - The response from the IQ.
45
-     * @returns {string} The session ID of the recording session.
46
-     */
47
-    function getSessionIdFromIq(response: Node): string;
48
-    /**
49
-     * Returns the recording session ID from a presence, if it exists.
50
-     *
51
-     * @param {Node} presence - An XMPP presence update.
52
-     * @returns {string|undefined} The session ID of the recording session.
53
-     */
54
-    function getSessionId(presence: Node): string;
55
-    /**
56
-     * Returns the recording session ID from a presence, if it exists.
57
-     *
58
-     * @param {Node} presence - An XMPP presence update.
59
-     * @returns {string|undefined} The session ID of the recording session.
60
-     */
61
-    function getSessionId(presence: Node): string;
62
-    /**
63
-     * Returns whether or not a presence is from the focus.
64
-     *
65
-     * @param {Node} presence - An XMPP presence update.
66
-     * @returns {boolean} True if the presence is from the focus.
67
-     */
68
-    function isFromFocus(presence: Node): boolean;
69
-    /**
70
-     * Returns whether or not a presence is from the focus.
71
-     *
72
-     * @param {Node} presence - An XMPP presence update.
73
-     * @returns {boolean} True if the presence is from the focus.
74
-     */
75
-    function isFromFocus(presence: Node): boolean;
76
-}
77
-export default _default;

+ 0
- 31
types/auto/modules/red/red.d.ts View File

@@ -1,31 +0,0 @@
1
-/**
2
- * An encoder for RFC 2198 redundancy using WebRTC Insertable Streams.
3
- */
4
-export class RFC2198Encoder {
5
-    /**
6
-     * @param {Number} targetRedundancy the desired amount of redundancy.
7
-     */
8
-    constructor(targetRedundancy?: number);
9
-    targetRedundancy: number;
10
-    frameBuffer: any[];
11
-    payloadType: number;
12
-    /**
13
-     * Set the desired level of redudancy. 4 means "four redundant frames plus current frame.
14
-     * It is possible to reduce this to 0 to minimize the overhead to one byte.
15
-     * @param {Number} targetRedundancy the desired amount of redundancy.
16
-     */
17
-    setRedundancy(targetRedundancy: number): void;
18
-    /**
19
-     * Set the "inner opus payload type". This is typically our RED payload type that we tell
20
-     * the other side as our opus payload type. Can be queried from the sender using getParameters()
21
-     * after setting the answer.
22
-     * @param {Number} payloadType the payload type to use for opus.
23
-     */
24
-    setPayloadType(payloadType: number): void;
25
-    /**
26
-     * This is the actual transform to add redundancy to a raw opus frame.
27
-     * @param {RTCEncodedAudioFrame} encodedFrame - Encoded audio frame.
28
-     * @param {TransformStreamDefaultController} controller - TransportStreamController.
29
-     */
30
-    addRedundancy(encodedFrame: any, controller: TransformStreamDefaultController): void;
31
-}

+ 0
- 92
types/auto/modules/sdp/LocalSdpMunger.d.ts View File

@@ -1,92 +0,0 @@
1
-/**
2
- * Fakes local SDP exposed to {@link JingleSessionPC} through the local
3
- * description getter. Modifies the SDP, so that it will contain muted local
4
- * video tracks description, even though their underlying {MediaStreamTrack}s
5
- * are no longer in the WebRTC peerconnection. That prevents from SSRC updates
6
- * being sent to Jicofo/remote peer and prevents sRD/sLD cycle on the remote
7
- * side.
8
- */
9
-export default class LocalSdpMunger {
10
-    /**
11
-     * Creates new <tt>LocalSdpMunger</tt> instance.
12
-     *
13
-     * @param {TraceablePeerConnection} tpc
14
-     * @param {string} localEndpointId - The endpoint id of the local user.
15
-     */
16
-    constructor(tpc: any, localEndpointId: string);
17
-    tpc: any;
18
-    localEndpointId: string;
19
-    audioSourcesToMsidMap: Map<any, any>;
20
-    videoSourcesToMsidMap: Map<any, any>;
21
-    /**
22
-     * Makes sure that muted local video tracks associated with the parent
23
-     * {@link TraceablePeerConnection} are described in the local SDP. It's done
24
-     * in order to prevent from sending 'source-remove'/'source-add' Jingle
25
-     * notifications when local video track is muted (<tt>MediaStream</tt> is
26
-     * removed from the peerconnection).
27
-     *
28
-     * NOTE 1 video track is assumed
29
-     *
30
-     * @param {SdpTransformWrap} transformer the transformer instance which will
31
-     * be used to process the SDP.
32
-     * @return {boolean} <tt>true</tt> if there were any modifications to
33
-     * the SDP wrapped by <tt>transformer</tt>.
34
-     * @private
35
-     */
36
-    private _addMutedLocalVideoTracksToSDP;
37
-    /**
38
-     * Returns a string that can be set as the MSID attribute for a source.
39
-     *
40
-     * @param {string} mediaType - Media type of the source.
41
-     * @param {string} trackId - Id of the MediaStreamTrack associated with the source.
42
-     * @param {string} streamId - Id of the MediaStream associated with the source.
43
-     * @returns {string|null}
44
-     */
45
-    _generateMsidAttribute(mediaType: string, trackId: string, streamId?: string): string | null;
46
-    /**
47
-     * Modifies 'cname', 'msid', 'label' and 'mslabel' by appending the id of {@link LocalSdpMunger#tpc} at the end,
48
-     * preceding by a dash sign.
49
-     *
50
-     * @param {MLineWrap} mediaSection - The media part (audio or video) of the session description which will be
51
-     * modified in place.
52
-     * @returns {void}
53
-     * @private
54
-     */
55
-    private _transformMediaIdentifiers;
56
-    /**
57
-     * Maybe modifies local description to fake local video tracks SDP when
58
-     * those are muted.
59
-     *
60
-     * @param {object} desc the WebRTC SDP object instance for the local
61
-     * description.
62
-     * @returns {RTCSessionDescription}
63
-     */
64
-    maybeAddMutedLocalVideoTracksToSDP(desc: object): RTCSessionDescription;
65
-    /**
66
-     * This transformation will make sure that stream identifiers are unique
67
-     * across all of the local PeerConnections even if the same stream is used
68
-     * by multiple instances at the same time.
69
-     * Each PeerConnection assigns different SSRCs to the same local
70
-     * MediaStream, but the MSID remains the same as it's used to identify
71
-     * the stream by the WebRTC backend. The transformation will append
72
-     * {@link TraceablePeerConnection#id} at the end of each stream's identifier
73
-     * ("cname", "msid", "label" and "mslabel").
74
-     *
75
-     * @param {RTCSessionDescription} sessionDesc - The local session
76
-     * description (this instance remains unchanged).
77
-     * @return {RTCSessionDescription} - Transformed local session description
78
-     * (a modified copy of the one given as the input).
79
-     */
80
-    transformStreamIdentifiers(sessionDesc: RTCSessionDescription): RTCSessionDescription;
81
-    /**
82
-     * Injects source names. Source names are need to for multiple streams per endpoint support. The final plan is to
83
-     * use the "mid" attribute for source names, but because the SDP to Jingle conversion still operates in the Plan-B
84
-     * semantics (one source name per media), a custom "name" attribute is injected into SSRC lines..
85
-     *
86
-     * @param {MLineWrap} mediaSection - The media part (audio or video) of the session description which will be
87
-     * modified in place.
88
-     * @returns {void}
89
-     * @private
90
-     */
91
-    private _injectSourceNames;
92
-}

+ 0
- 50
types/auto/modules/sdp/RtxModifier.d.ts View File

@@ -1,50 +0,0 @@
1
-/**
2
- * End helper functions
3
- */
4
-/**
5
- * Adds any missing RTX streams for video streams
6
- *  and makes sure that they remain consistent
7
- */
8
-export default class RtxModifier {
9
-    /**
10
-     * Map of video ssrc to corresponding RTX
11
-     *  ssrc
12
-     */
13
-    correspondingRtxSsrcs: Map<any, any>;
14
-    /**
15
-     * Clear the cached map of primary video ssrcs to
16
-     *  their corresponding rtx ssrcs so that they will
17
-     *  not be used for the next call to modifyRtxSsrcs
18
-     */
19
-    clearSsrcCache(): void;
20
-    /**
21
-     * Explicitly set the primary video ssrc -> rtx ssrc
22
-     *  mapping to be used in modifyRtxSsrcs
23
-     * @param {Map} ssrcMapping a mapping of primary video
24
-     *  ssrcs to their corresponding rtx ssrcs
25
-     */
26
-    setSsrcCache(ssrcMapping: Map<any, any>): void;
27
-    /**
28
-     * Adds RTX ssrcs for any video ssrcs that don't already have them.  If the video ssrc has been seen before, and
29
-     * already had an RTX ssrc generated, the same RTX ssrc will be used again.
30
-     *
31
-     * @param {string} sdpStr sdp in raw string format
32
-     * @returns {string} The modified sdp in raw string format.
33
-     */
34
-    modifyRtxSsrcs(sdpStr: string): string;
35
-    /**
36
-     * Does the same thing as {@link modifyRtxSsrcs}, but takes the {@link MLineWrap} instance wrapping video media as
37
-     * an argument.
38
-     * @param {MLineWrap} videoMLine
39
-     * @return {boolean} <tt>true</tt> if the SDP wrapped by {@link SdpTransformWrap} has been modified or
40
-     * <tt>false</tt> otherwise.
41
-     */
42
-    modifyRtxSsrcs2(videoMLine: any): boolean;
43
-    /**
44
-     * Strip all rtx streams from the given sdp.
45
-     *
46
-     * @param {string} sdpStr sdp in raw string format
47
-     * @returns {string} sdp string with all rtx streams stripped
48
-     */
49
-    stripRtx(sdpStr: string): string;
50
-}

+ 0
- 55
types/auto/modules/sdp/SDP.d.ts View File

@@ -1,55 +0,0 @@
1
-/**
2
- *
3
- * @param sdp
4
- */
5
-export default function SDP(sdp: any): void;
6
-export default class SDP {
7
-    /**
8
-     *
9
-     * @param sdp
10
-     */
11
-    constructor(sdp: any);
12
-    media: any;
13
-    raw: string;
14
-    session: string;
15
-    /**
16
-     * A flag will make {@link transportToJingle} and {@link jingle2media} replace
17
-     * ICE candidates IPs with invalid value of '1.1.1.1' which will cause ICE
18
-     * failure. The flag is used in the automated testing.
19
-     * @type {boolean}
20
-     */
21
-    failICE: boolean;
22
-    /**
23
-     * Whether or not to remove TCP ice candidates when translating from/to jingle.
24
-     * @type {boolean}
25
-     */
26
-    removeTcpCandidates: boolean;
27
-    /**
28
-     * Whether or not to remove UDP ice candidates when translating from/to jingle.
29
-     * @type {boolean}
30
-     */
31
-    removeUdpCandidates: boolean;
32
-    /**
33
-     * Adds a new m-line to the description so that a new local source can then be attached to the transceiver that gets
34
-     * added after a reneogtiation cycle.
35
-     *
36
-     * @param {MediaType} mediaType media type of the new source that is being added.
37
-     */
38
-    addMlineForNewLocalSource(mediaType: any): void;
39
-    /**
40
-     * Returns map of MediaChannel mapped per channel idx.
41
-     */
42
-    getMediaSsrcMap(): {};
43
-    /**
44
-     * Returns <tt>true</tt> if this SDP contains given SSRC.
45
-     * @param ssrc the ssrc to check.
46
-     * @returns {boolean} <tt>true</tt> if this SDP contains given SSRC.
47
-     */
48
-    containsSSRC(ssrc: any): boolean;
49
-    toJingle(elem: any, thecreator: any): any;
50
-    transportToJingle(mediaindex: any, elem: any): void;
51
-    rtcpFbToJingle(mediaindex: any, elem: any, payloadtype: any): void;
52
-    rtcpFbFromJingle(elem: any, payloadtype: any): string;
53
-    fromJingle(jingle: any): void;
54
-    jingle2media(content: any): string;
55
-}

+ 0
- 25
types/auto/modules/sdp/SDPDiffer.d.ts View File

@@ -1,25 +0,0 @@
1
-/**
2
- *
3
- * @param mySDP
4
- * @param otherSDP
5
- */
6
-export default function SDPDiffer(mySDP: any, otherSDP: any): void;
7
-export default class SDPDiffer {
8
-    /**
9
-     *
10
-     * @param mySDP
11
-     * @param otherSDP
12
-     */
13
-    constructor(mySDP: any, otherSDP: any);
14
-    mySDP: any;
15
-    otherSDP: any;
16
-    /**
17
-     * Returns map of MediaChannel that contains media contained in
18
-     * 'mySDP', but not contained in 'otherSdp'. Mapped by channel idx.
19
-     */
20
-    getNewMedia(): {};
21
-    /**
22
-     * TODO: document!
23
-     */
24
-    toJingle(modify: any): boolean;
25
-}

+ 0
- 351
types/auto/modules/sdp/SDPUtil.d.ts View File

@@ -1,351 +0,0 @@
1
-export default SDPUtil;
2
-declare namespace SDPUtil {
3
-    function filterSpecialChars(text: any): any;
4
-    function filterSpecialChars(text: any): any;
5
-    function iceparams(mediadesc: any, sessiondesc: any): {
6
-        ufrag: any;
7
-        pwd: any;
8
-    };
9
-    function iceparams(mediadesc: any, sessiondesc: any): {
10
-        ufrag: any;
11
-        pwd: any;
12
-    };
13
-    function parseICEUfrag(line: any): any;
14
-    function parseICEUfrag(line: any): any;
15
-    function buildICEUfrag(frag: any): string;
16
-    function buildICEUfrag(frag: any): string;
17
-    function parseICEPwd(line: any): any;
18
-    function parseICEPwd(line: any): any;
19
-    function buildICEPwd(pwd: any): string;
20
-    function buildICEPwd(pwd: any): string;
21
-    function parseMID(line: any): any;
22
-    function parseMID(line: any): any;
23
-    /**
24
-     * Finds the MSID attribute in the given array of SSRC attribute lines and returns the value.
25
-     *
26
-     * @param {string[]} ssrcLines - an array of lines similar to 'a:213123 msid:stream-id track-id'.
27
-     * @returns {undefined|string}
28
-     */
29
-    function parseMSIDAttribute(ssrcLines: string[]): string;
30
-    /**
31
-     * Finds the MSID attribute in the given array of SSRC attribute lines and returns the value.
32
-     *
33
-     * @param {string[]} ssrcLines - an array of lines similar to 'a:213123 msid:stream-id track-id'.
34
-     * @returns {undefined|string}
35
-     */
36
-    function parseMSIDAttribute(ssrcLines: string[]): string;
37
-    function parseMLine(line: any): {
38
-        media: any;
39
-        port: any;
40
-        proto: any;
41
-        fmt: any;
42
-    };
43
-    function parseMLine(line: any): {
44
-        media: any;
45
-        port: any;
46
-        proto: any;
47
-        fmt: any;
48
-    };
49
-    function buildMLine(mline: any): string;
50
-    function buildMLine(mline: any): string;
51
-    function parseRTPMap(line: any): {
52
-        id: any;
53
-        name: any;
54
-        clockrate: any;
55
-        channels: any;
56
-    };
57
-    function parseRTPMap(line: any): {
58
-        id: any;
59
-        name: any;
60
-        clockrate: any;
61
-        channels: any;
62
-    };
63
-    /**
64
-     * Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
65
-     * @param line eg. "a=sctpmap:5000 webrtc-datachannel"
66
-     * @returns [SCTP port number, protocol, streams]
67
-     */
68
-    function parseSCTPMap(line: any): any[];
69
-    /**
70
-     * Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
71
-     * @param line eg. "a=sctpmap:5000 webrtc-datachannel"
72
-     * @returns [SCTP port number, protocol, streams]
73
-     */
74
-    function parseSCTPMap(line: any): any[];
75
-    function parseSCTPPort(line: any): any;
76
-    function parseSCTPPort(line: any): any;
77
-    function buildRTPMap(el: any): string;
78
-    function buildRTPMap(el: any): string;
79
-    function parseCrypto(line: any): {
80
-        tag: any;
81
-        'crypto-suite': any;
82
-        'key-params': any;
83
-        'session-params': any;
84
-    };
85
-    function parseCrypto(line: any): {
86
-        tag: any;
87
-        'crypto-suite': any;
88
-        'key-params': any;
89
-        'session-params': any;
90
-    };
91
-    function parseFingerprint(line: any): {
92
-        hash: any;
93
-        fingerprint: any;
94
-    };
95
-    function parseFingerprint(line: any): {
96
-        hash: any;
97
-        fingerprint: any;
98
-    };
99
-    function parseFmtp(line: any): {
100
-        name: any;
101
-        value: any;
102
-    }[];
103
-    function parseFmtp(line: any): {
104
-        name: any;
105
-        value: any;
106
-    }[];
107
-    function parseICECandidate(line: any): {
108
-        foundation: any;
109
-        component: any;
110
-        protocol: any;
111
-        priority: any;
112
-        ip: any;
113
-        port: any;
114
-        type: any;
115
-        generation: any;
116
-        'rel-addr': any;
117
-        'rel-port': any;
118
-        tcptype: any;
119
-        network: string;
120
-        id: string;
121
-    };
122
-    function parseICECandidate(line: any): {
123
-        foundation: any;
124
-        component: any;
125
-        protocol: any;
126
-        priority: any;
127
-        ip: any;
128
-        port: any;
129
-        type: any;
130
-        generation: any;
131
-        'rel-addr': any;
132
-        'rel-port': any;
133
-        tcptype: any;
134
-        network: string;
135
-        id: string;
136
-    };
137
-    function buildICECandidate(cand: any): string;
138
-    function buildICECandidate(cand: any): string;
139
-    function parseSSRC(desc: any): Map<any, any>;
140
-    function parseSSRC(desc: any): Map<any, any>;
141
-    /**
142
-     * Gets the source name out of the name attribute "a=ssrc:254321 name:name1".
143
-     *
144
-     * @param {string[]} ssrcLines
145
-     * @returns {string | undefined}
146
-     */
147
-    function parseSourceNameLine(ssrcLines: string[]): string;
148
-    /**
149
-     * Gets the source name out of the name attribute "a=ssrc:254321 name:name1".
150
-     *
151
-     * @param {string[]} ssrcLines
152
-     * @returns {string | undefined}
153
-     */
154
-    function parseSourceNameLine(ssrcLines: string[]): string;
155
-    /**
156
-     * Parse the "videoType" attribute encoded in a set of SSRC attributes (e.g.
157
-     * "a=ssrc:1234 videoType:desktop")
158
-     *
159
-     * @param {string[]} ssrcLines
160
-     * @returns {string | undefined}
161
-     */
162
-    function parseVideoTypeLine(ssrcLines: string[]): string;
163
-    /**
164
-     * Parse the "videoType" attribute encoded in a set of SSRC attributes (e.g.
165
-     * "a=ssrc:1234 videoType:desktop")
166
-     *
167
-     * @param {string[]} ssrcLines
168
-     * @returns {string | undefined}
169
-     */
170
-    function parseVideoTypeLine(ssrcLines: string[]): string;
171
-    function parseRTCPFB(line: any): {
172
-        pt: any;
173
-        type: any;
174
-        params: any;
175
-    };
176
-    function parseRTCPFB(line: any): {
177
-        pt: any;
178
-        type: any;
179
-        params: any;
180
-    };
181
-    function parseExtmap(line: any): {
182
-        value: any;
183
-        direction: any;
184
-        uri: any;
185
-        params: any;
186
-    };
187
-    function parseExtmap(line: any): {
188
-        value: any;
189
-        direction: any;
190
-        uri: any;
191
-        params: any;
192
-    };
193
-    function findLine(haystack: any, needle: any, sessionpart: any): any;
194
-    function findLine(haystack: any, needle: any, sessionpart: any): any;
195
-    function findLines(haystack: any, needle: any, sessionpart: any): any[];
196
-    function findLines(haystack: any, needle: any, sessionpart: any): any[];
197
-    function candidateToJingle(line: any): {
198
-        foundation: any;
199
-        component: any;
200
-        protocol: any;
201
-        priority: any;
202
-        ip: any;
203
-        port: any;
204
-        type: any;
205
-        generation: any;
206
-        'rel-addr': any;
207
-        'rel-port': any;
208
-        tcptype: any;
209
-        network: string;
210
-        id: string;
211
-    };
212
-    function candidateToJingle(line: any): {
213
-        foundation: any;
214
-        component: any;
215
-        protocol: any;
216
-        priority: any;
217
-        ip: any;
218
-        port: any;
219
-        type: any;
220
-        generation: any;
221
-        'rel-addr': any;
222
-        'rel-port': any;
223
-        tcptype: any;
224
-        network: string;
225
-        id: string;
226
-    };
227
-    function candidateFromJingle(cand: any): string;
228
-    function candidateFromJingle(cand: any): string;
229
-    /**
230
-     * Parse the 'most' primary video ssrc from the given m line
231
-     * @param {object} mLine object as parsed from transform.parse
232
-     * @return {number} the primary video ssrc from the given m line
233
-     */
234
-    function parsePrimaryVideoSsrc(videoMLine: any): number;
235
-    /**
236
-     * Parse the 'most' primary video ssrc from the given m line
237
-     * @param {object} mLine object as parsed from transform.parse
238
-     * @return {number} the primary video ssrc from the given m line
239
-     */
240
-    function parsePrimaryVideoSsrc(videoMLine: any): number;
241
-    /**
242
-     * Generate an ssrc
243
-     * @returns {number} an ssrc
244
-     */
245
-    function generateSsrc(): number;
246
-    /**
247
-     * Generate an ssrc
248
-     * @returns {number} an ssrc
249
-     */
250
-    function generateSsrc(): number;
251
-    /**
252
-     * Get an attribute for the given ssrc with the given attributeName
253
-     *  from the given mline
254
-     * @param {object} mLine an mLine object as parsed from transform.parse
255
-     * @param {number} ssrc the ssrc for which an attribute is desired
256
-     * @param {string} attributeName the name of the desired attribute
257
-     * @returns {string} the value corresponding to the given ssrc
258
-     *  and attributeName
259
-     */
260
-    function getSsrcAttribute(mLine: any, ssrc: number, attributeName: string): string;
261
-    /**
262
-     * Get an attribute for the given ssrc with the given attributeName
263
-     *  from the given mline
264
-     * @param {object} mLine an mLine object as parsed from transform.parse
265
-     * @param {number} ssrc the ssrc for which an attribute is desired
266
-     * @param {string} attributeName the name of the desired attribute
267
-     * @returns {string} the value corresponding to the given ssrc
268
-     *  and attributeName
269
-     */
270
-    function getSsrcAttribute(mLine: any, ssrc: number, attributeName: string): string;
271
-    /**
272
-     * Parses the ssrcs from the group sdp line and
273
-     *  returns them as a list of numbers
274
-     * @param {object} the ssrcGroup object as parsed from
275
-     *  sdp-transform
276
-     * @returns {list<number>} a list of the ssrcs in the group
277
-     *  parsed as numbers
278
-     */
279
-    function parseGroupSsrcs(ssrcGroup: any): any;
280
-    /**
281
-     * Parses the ssrcs from the group sdp line and
282
-     *  returns them as a list of numbers
283
-     * @param {object} the ssrcGroup object as parsed from
284
-     *  sdp-transform
285
-     * @returns {list<number>} a list of the ssrcs in the group
286
-     *  parsed as numbers
287
-     */
288
-    function parseGroupSsrcs(ssrcGroup: any): any;
289
-    /**
290
-     * Get the mline of the given type from the given sdp
291
-     * @param {object} sdp sdp as parsed from transform.parse
292
-     * @param {string} type the type of the desired mline (e.g. "video")
293
-     * @returns {object} a media object
294
-     */
295
-    function getMedia(sdp: any, type: string): any;
296
-    /**
297
-     * Get the mline of the given type from the given sdp
298
-     * @param {object} sdp sdp as parsed from transform.parse
299
-     * @param {string} type the type of the desired mline (e.g. "video")
300
-     * @returns {object} a media object
301
-     */
302
-    function getMedia(sdp: any, type: string): any;
303
-    /**
304
-     * Extracts the ICE username fragment from an SDP string.
305
-     * @param {string} sdp the SDP in raw text format
306
-     */
307
-    function getUfrag(sdp: string): string;
308
-    /**
309
-     * Extracts the ICE username fragment from an SDP string.
310
-     * @param {string} sdp the SDP in raw text format
311
-     */
312
-    function getUfrag(sdp: string): string;
313
-    /**
314
-     * Sets the given codecName as the preferred codec by moving it to the beginning
315
-     * of the payload types list (modifies the given mline in place). All instances
316
-     * of the codec are moved up.
317
-     * @param {object} mLine the mline object from an sdp as parsed by transform.parse
318
-     * @param {string} codecName the name of the preferred codec
319
-     */
320
-    function preferCodec(mline: any, codecName: string): void;
321
-    /**
322
-     * Sets the given codecName as the preferred codec by moving it to the beginning
323
-     * of the payload types list (modifies the given mline in place). All instances
324
-     * of the codec are moved up.
325
-     * @param {object} mLine the mline object from an sdp as parsed by transform.parse
326
-     * @param {string} codecName the name of the preferred codec
327
-     */
328
-    function preferCodec(mline: any, codecName: string): void;
329
-    /**
330
-     * Strips the given codec from the given mline. All related RTX payload
331
-     * types are also stripped. If the resulting mline would have no codecs,
332
-     * it's disabled.
333
-     *
334
-     * @param {object} mLine the mline object from an sdp as parsed by transform.parse.
335
-     * @param {string} codecName the name of the codec which will be stripped.
336
-     * @param {boolean} highProfile determines if only the high profile H264 codec needs to be
337
-     * stripped from the sdp when the passed codecName is H264.
338
-     */
339
-    function stripCodec(mLine: any, codecName: string, highProfile?: boolean): void;
340
-    /**
341
-     * Strips the given codec from the given mline. All related RTX payload
342
-     * types are also stripped. If the resulting mline would have no codecs,
343
-     * it's disabled.
344
-     *
345
-     * @param {object} mLine the mline object from an sdp as parsed by transform.parse.
346
-     * @param {string} codecName the name of the codec which will be stripped.
347
-     * @param {boolean} highProfile determines if only the high profile H264 codec needs to be
348
-     * stripped from the sdp when the passed codecName is H264.
349
-     */
350
-    function stripCodec(mLine: any, codecName: string, highProfile?: boolean): void;
351
-}

+ 0
- 2
types/auto/modules/sdp/SampleSdpStrings.d.ts View File

@@ -1,2 +0,0 @@
1
-declare namespace _default { }
2
-export default _default;

+ 0
- 50
types/auto/modules/sdp/SdpConsistency.d.ts View File

@@ -1,50 +0,0 @@
1
-/**
2
- * Handles the work of keeping video ssrcs consistent across multiple
3
- * o/a cycles, making it such that all stream operations can be
4
- * kept local and do not need to be signaled.
5
- * NOTE: This only keeps the 'primary' video ssrc consistent: meaning
6
- * the primary video stream
7
- */
8
-export default class SdpConsistency {
9
-    /**
10
-     * Constructor
11
-     * @param {string} logPrefix the log prefix appended to every logged
12
-     * message, currently used to distinguish for which
13
-     * <tt>TraceablePeerConnection</tt> the instance works.
14
-     */
15
-    constructor(logPrefix: string);
16
-    logPrefix: string;
17
-    /**
18
-     * Clear the cached video primary and primary rtx ssrcs so that
19
-     *  they will not be used for the next call to
20
-     *  makeVideoPrimarySsrcsConsistent
21
-     */
22
-    clearVideoSsrcCache(): void;
23
-    cachedPrimarySsrc: any;
24
-    injectRecvOnly: boolean;
25
-    /**
26
-     * Explicitly set the primary ssrc to be used in
27
-     *  makeVideoPrimarySsrcsConsistent
28
-     * @param {number} primarySsrc the primarySsrc to be used
29
-     *  in future calls to makeVideoPrimarySsrcsConsistent
30
-     * @throws Error if <tt>primarySsrc</tt> is not a number
31
-     */
32
-    setPrimarySsrc(primarySsrc: number): void;
33
-    /**
34
-     * Checks whether or not there is a primary video SSRC cached already.
35
-     * @return {boolean}
36
-     */
37
-    hasPrimarySsrcCached(): boolean;
38
-    /**
39
-     * Given an sdp string, either:
40
-     *  1) record the primary video and primary rtx ssrcs to be
41
-     *   used in future calls to makeVideoPrimarySsrcsConsistent or
42
-     *  2) change the primary and primary rtx ssrcs in the given sdp
43
-     *   to match the ones previously cached
44
-     * @param {string} sdpStr the sdp string to (potentially)
45
-     *  change to make the video ssrcs consistent
46
-     * @returns {string} a (potentially) modified sdp string
47
-     *  with ssrcs consistent with this class' cache
48
-     */
49
-    makeVideoPrimarySsrcsConsistent(sdpStr: string): string;
50
-}

+ 0
- 75
types/auto/modules/sdp/SdpSimulcast.d.ts View File

@@ -1,75 +0,0 @@
1
-import * as transform from 'sdp-transform';
2
-interface Description {
3
-    type: RTCSdpType;
4
-    sdp: string;
5
-}
6
-interface Options {
7
-    numOfLayers?: number;
8
-}
9
-/**
10
- * This class handles SDP munging for enabling simulcast for local video streams in Unified plan. A set of random SSRCs
11
- * are generated for the higher layer streams and they are cached for a given mid. The cached SSRCs are then reused on
12
- * the subsequent iterations while munging the local description. This class also handles imploding of the simulcast
13
- * SSRCs for remote endpoints into the primary FID group in remote description since Jicofo signals all SSRCs relevant
14
- * to a given endpoint.
15
- */
16
-export default class SdpSimulcast {
17
-    private _options;
18
-    private _ssrcCache;
19
-    /**
20
-     * Creates a new instance.
21
-     *
22
-     * @param options
23
-     */
24
-    constructor(options: Options);
25
-    /**
26
-     * Updates the given media description using the SSRCs that were cached for the mid associated
27
-     * with the media description and returns the modified media description.
28
-     *
29
-     * @param mLine
30
-     * @returns
31
-     */
32
-    _fillSsrcsFromCache(mLine: transform.MediaDescription): any;
33
-    /**
34
-     * Generates a new set of SSRCs for the higher simulcast layers/streams and adds the attributes and SIM group to
35
-     * the given media description and returns the modified media description.
36
-     *
37
-     * @param mLine
38
-     * @param primarySsrc
39
-     * @returns
40
-     */
41
-    _generateNewSsrcsForSimulcast(mLine: transform.MediaDescription, primarySsrc: number): any;
42
-    /**
43
-     * Returns a random number to be used for the SSRC.
44
-     *
45
-     * @returns
46
-     */
47
-    _generateSsrc(): number;
48
-    /**
49
-     * Returns the requested attribute value for a SSRC from a given media description.
50
-     *
51
-     * @param mLine
52
-     * @param ssrc
53
-     * @param attributeName
54
-     * @returns
55
-     */
56
-    _getSsrcAttribute(mLine: transform.MediaDescription, ssrc: number, attributeName: string): string | undefined;
57
-    /**
58
-     * Returns an array of all the primary SSRCs in the SIM group for a given media description.
59
-     *
60
-     * @param mLine
61
-     * @returns
62
-     */
63
-    _parseSimLayers(mLine: transform.MediaDescription): Array<number> | null;
64
-    /**
65
-     * Munges the given media description to enable simulcast for the video media sections that are in either have
66
-     * SENDRECV or SENDONLY as the media direction thereby ignoring all the RECVONLY transceivers created for remote
67
-     * endpoints.
68
-     * NOTE: This needs to be called only when simulcast is enabled.
69
-     *
70
-     * @param description
71
-     * @returns
72
-     */
73
-    mungeLocalDescription(description: Description): Description;
74
-}
75
-export {};

+ 0
- 218
types/auto/modules/sdp/SdpTransformUtil.d.ts View File

@@ -1,218 +0,0 @@
1
-/**
2
- * Parses the primary SSRC of given SSRC group.
3
- * @param {object} group the SSRC group object as defined by the 'sdp-transform'
4
- * @return {Number} the primary SSRC number
5
- */
6
-export function parsePrimarySSRC(group: object): number;
7
-/**
8
- * Parses the secondary SSRC of given SSRC group.
9
- * @param {object} group the SSRC group object as defined by the 'sdp-transform'
10
- * @return {Number} the secondary SSRC number
11
- */
12
-export function parseSecondarySSRC(group: object): number;
13
-/**
14
- * Utility class for SDP manipulation using the 'sdp-transform' library.
15
- *
16
- * Typical use usage scenario:
17
- *
18
- * const transformer = new SdpTransformWrap(rawSdp);
19
- * const videoMLine = transformer.selectMedia('video);
20
- * if (videoMLine) {
21
- *     videoMLiner.addSSRCAttribute({
22
- *         id: 2342343,
23
- *         attribute: "cname",
24
- *         value: "someCname"
25
- *     });
26
- *     rawSdp = transformer.toRawSdp();
27
- * }
28
- */
29
-export class SdpTransformWrap {
30
-    /**
31
-     * Creates new instance and parses the raw SDP into objects using
32
-     * 'sdp-transform' lib.
33
-     * @param {string} rawSDP the SDP in raw text format.
34
-     */
35
-    constructor(rawSDP: string);
36
-    parsedSDP: transform.SessionDescription;
37
-    /**
38
-     * Selects all the m-lines from the SDP for a given media type.
39
-     *
40
-     * @param {string} mediaType the name of the media e.g. 'audio', 'video', 'data'.
41
-     * @return {MLineWrap|null} return {@link MLineWrap} instance for the media line or <tt>null</tt> if not found. The
42
-     * object returned references the underlying SDP state held by this <tt>SdpTransformWrap</tt> instance (it's not a
43
-     * copy).
44
-     */
45
-    selectMedia(mediaType: string): MLineWrap | null;
46
-    /**
47
-     * Converts the currently stored SDP state in this instance to raw text SDP
48
-     * format.
49
-     * @return {string}
50
-     */
51
-    toRawSDP(): string;
52
-}
53
-import * as transform from "sdp-transform";
54
-/**
55
- * A wrapper around 'sdp-transform' media description object which provides
56
- * utility methods for common SDP/SSRC related operations.
57
- */
58
-declare class MLineWrap {
59
-    /**
60
-     * Creates new <tt>MLineWrap</t>>
61
-     * @param {Object} mLine the media line object as defined by 'sdp-transform'
62
-     * lib.
63
-     */
64
-    constructor(mLine: any);
65
-    mLine: any;
66
-    /**
67
-     * Setter for the mLine's "ssrcs" array.
68
-     *
69
-     * @param {Array<Object>} ssrcs an array of 'sdp-transform' SSRC attributes
70
-     * objects.
71
-     */
72
-    set ssrcs(arg: any[]);
73
-    /**
74
-     * Getter for the mLine's "ssrcs" array. If the array was undefined an empty
75
-     * one will be preassigned.
76
-     *
77
-     * @return {Array<Object>} an array of 'sdp-transform' SSRC attributes
78
-     * objects.
79
-     */
80
-    get ssrcs(): any[];
81
-    /**
82
-     * Modifies the direction of the underlying media description.
83
-     * @param {string} direction the new direction to be set
84
-     */
85
-    set direction(arg: string);
86
-    /**
87
-     * Returns the direction of the underlying media description.
88
-     * @return {string} the media direction name as defined in the SDP.
89
-     */
90
-    get direction(): string;
91
-    /**
92
-     * Modifies the SSRC groups array of the underlying media description
93
-     * object.
94
-     * @param {Array.<Object>} ssrcGroups
95
-     */
96
-    set ssrcGroups(arg: any[]);
97
-    /**
98
-     * Exposes the SSRC group array of the underlying media description object.
99
-     * @return {Array.<Object>}
100
-     */
101
-    get ssrcGroups(): any[];
102
-    /**
103
-     * Obtains value from SSRC attribute.
104
-     * @param {number} ssrcNumber the SSRC number for which attribute is to be
105
-     * found
106
-     * @param {string} attrName the name of the SSRC attribute to be found.
107
-     * @return {string|undefined} the value of SSRC attribute or
108
-     * <tt>undefined</tt> if no such attribute exists.
109
-     */
110
-    getSSRCAttrValue(ssrcNumber: number, attrName: string): string | undefined;
111
-    /**
112
-     * Removes all attributes for given SSRC number.
113
-     * @param {number} ssrcNum the SSRC number for which all attributes will be
114
-     * removed.
115
-     */
116
-    removeSSRC(ssrcNum: number): void;
117
-    /**
118
-     * Adds SSRC attribute
119
-     * @param {object} ssrcObj the SSRC attribute object as defined in
120
-     * the 'sdp-transform' lib.
121
-     */
122
-    addSSRCAttribute(ssrcObj: object): void;
123
-    /**
124
-     * Finds a SSRC group matching both semantics and SSRCs in order.
125
-     * @param {string} semantics the name of the semantics
126
-     * @param {string} [ssrcs] group SSRCs as a string (like it's defined in
127
-     * SSRC group object of the 'sdp-transform' lib) e.g. "1232546 342344 25434"
128
-     * @return {object|undefined} the SSRC group object or <tt>undefined</tt> if
129
-     * not found.
130
-     */
131
-    findGroup(semantics: string, ssrcs?: string): object | undefined;
132
-    /**
133
-     * Finds all groups matching given semantic's name.
134
-     * @param {string} semantics the name of the semantics
135
-     * @return {Array.<object>} an array of SSRC group objects as defined by
136
-     * the 'sdp-transform' lib.
137
-     */
138
-    findGroups(semantics: string): Array<object>;
139
-    /**
140
-     * Finds all groups matching given semantic's name and group's primary SSRC.
141
-     * @param {string} semantics the name of the semantics
142
-     * @param {number} primarySSRC the primary SSRC number to be matched
143
-     * @return {Object} SSRC group object as defined by the 'sdp-transform' lib.
144
-     */
145
-    findGroupByPrimarySSRC(semantics: string, primarySSRC: number): any;
146
-    /**
147
-     * @param {string|null} msid the media stream id or <tt>null</tt> to match
148
-     * the first SSRC object with any 'msid' value.
149
-     * @return {Object|undefined} the SSRC object as defined by 'sdp-transform'
150
-     * lib.
151
-     */
152
-    findSSRCByMSID(msid: string | null): any | undefined;
153
-    /**
154
-     * Gets the SSRC count for the underlying media description.
155
-     * @return {number}
156
-     */
157
-    getSSRCCount(): number;
158
-    /**
159
-     * Checks whether the underlying media description contains any SSRC groups.
160
-     * @return {boolean} <tt>true</tt> if there are any SSRC groups or
161
-     * <tt>false</tt> otherwise.
162
-     */
163
-    containsAnySSRCGroups(): boolean;
164
-    /**
165
-     * Finds the primary video SSRC.
166
-     * @returns {number|undefined} the primary video ssrc
167
-     * @throws Error if the underlying media description is not a video
168
-     */
169
-    getPrimaryVideoSsrc(): number | undefined;
170
-    /**
171
-     * Obtains RTX SSRC from the underlying video description (the
172
-     * secondary SSRC of the first "FID" group found)
173
-     * @param {number} primarySsrc the video ssrc for which to find the
174
-     * corresponding rtx ssrc
175
-     * @returns {number|undefined} the rtx ssrc (or undefined if there isn't
176
-     * one)
177
-     */
178
-    getRtxSSRC(primarySsrc: number): number | undefined;
179
-    /**
180
-     * Obtains all SSRCs contained in the underlying media description.
181
-     * @return {Array.<number>} an array with all SSRC as numbers.
182
-     */
183
-    getSSRCs(): Array<number>;
184
-    /**
185
-     * Obtains primary video SSRCs.
186
-     * @return {Array.<number>} an array of all primary video SSRCs as numbers.
187
-     * @throws Error if the wrapped media description is not a video.
188
-     */
189
-    getPrimaryVideoSSRCs(): Array<number>;
190
-    /**
191
-     * Dumps all SSRC groups of this media description to JSON.
192
-     */
193
-    dumpSSRCGroups(): string;
194
-    /**
195
-     * Removes all SSRC groups which contain given SSRC number at any position.
196
-     * @param {number} ssrc the SSRC for which all matching groups are to be
197
-     * removed.
198
-     */
199
-    removeGroupsWithSSRC(ssrc: number): void;
200
-    /**
201
-     * Removes groups that match given semantics.
202
-     * @param {string} semantics e.g. "SIM" or "FID"
203
-     */
204
-    removeGroupsBySemantics(semantics: string): void;
205
-    /**
206
-     * Replaces SSRC (does not affect SSRC groups, but only attributes).
207
-     * @param {number} oldSSRC the old SSRC number
208
-     * @param {number} newSSRC the new SSRC number
209
-     */
210
-    replaceSSRC(oldSSRC: number, newSSRC: number): void;
211
-    /**
212
-     * Adds given SSRC group to this media description.
213
-     * @param {object} group the SSRC group object as defined by
214
-     * the 'sdp-transform' lib.
215
-     */
216
-    addSSRCGroup(group: object): void;
217
-}
218
-export {};

+ 0
- 18
types/auto/modules/settings/Settings.d.ts View File

@@ -1,18 +0,0 @@
1
-declare namespace _default {
2
-    export { jitsiLocalStorage as _storage };
3
-    /**
4
-     * Initializes the Settings class.
5
-     *
6
-     * @param {Storage|undefined} externalStorage - Object that implements the Storage interface. This object will be
7
-     * used for storing data instead of jitsiLocalStorage if specified.
8
-     */
9
-    export function init(externalStorage: Storage): void;
10
-    /**
11
-     * Initializes the Settings class.
12
-     *
13
-     * @param {Storage|undefined} externalStorage - Object that implements the Storage interface. This object will be
14
-     * used for storing data instead of jitsiLocalStorage if specified.
15
-     */
16
-    export function init(externalStorage: Storage): void;
17
-}
18
-export default _default;

+ 0
- 155
types/auto/modules/statistics/AnalyticsAdapter.d.ts View File

@@ -1,155 +0,0 @@
1
-declare var _default: AnalyticsAdapter;
2
-export default _default;
3
-/**
4
- * This class provides an API to lib-jitsi-meet and its users for sending
5
- * analytics events. It serves as a bridge to different backend implementations
6
- * ("analytics handlers") and a cache for events attempted to be sent before
7
- * the analytics handlers were enabled.
8
- *
9
- * The API is designed to be an easy replacement for the previous version of
10
- * this adapter, and is meant to be extended with more convenience methods.
11
- *
12
- *
13
- * The API calls are translated to objects with the following structure, which
14
- * are then passed to the sendEvent(event) function of the underlying handlers:
15
- *
16
- * {
17
- *    type,
18
- *
19
- *    action,
20
- *    actionSubject,
21
- *    actionSubjectId,
22
- *    attributes,
23
- *    categories,
24
- *    containerId,
25
- *    containerType,
26
- *    name,
27
- *    objectId,
28
- *    objectType,
29
- *    source,
30
- *    tags
31
- * }
32
- *
33
- * The 'type' is one of 'operational', 'page', 'track' or 'ui', and some of the
34
- * other properties are considered required according to the type.
35
- *
36
- * For events with type 'page', the required properties are: name.
37
- *
38
- * For events with type 'operational' and 'ui', the required properties are:
39
- * action, actionSubject, source
40
- *
41
- * For events with type 'page', the required properties are:
42
- * action, actionSubject, source, containerType, containerId, objectType,
43
- * objectId
44
- */
45
-declare class AnalyticsAdapter {
46
-    /**
47
-     * Reset the state to the initial one.
48
-     *
49
-     * @returns {void}
50
-     */
51
-    reset(): void;
52
-    /**
53
-     * Whether this AnalyticsAdapter has been disposed of or not. Once this
54
-     * is set to true, the AnalyticsAdapter is disabled and does not accept
55
-     * any more events, and it can not be re-enabled.
56
-     * @type {boolean}
57
-     */
58
-    disposed: boolean;
59
-    /**
60
-     * The set of handlers to which events will be sent.
61
-     * @type {Set<any>}
62
-     */
63
-    analyticsHandlers: Set<any>;
64
-    /**
65
-     * The cache of events which are not sent yet. The cache is enabled
66
-     * while this field is truthy, and disabled otherwise.
67
-     * @type {Array}
68
-     */
69
-    cache: any[];
70
-    /**
71
-     * Map of properties that will be added to every event. Note that the
72
-     * keys will be prefixed with "permanent.".
73
-     */
74
-    permanentProperties: any;
75
-    /**
76
-     * The name of the conference that this AnalyticsAdapter is associated
77
-     * with.
78
-     * @type {null}
79
-     */
80
-    conferenceName: any;
81
-    /**
82
-     * Dispose analytics. Clears all handlers.
83
-     */
84
-    dispose(): void;
85
-    /**
86
-     * Sets the handlers that are going to be used to send analytics. Sends any
87
-     * cached events.
88
-     * @param {Array} handlers the handlers
89
-     */
90
-    setAnalyticsHandlers(handlers: any[]): void;
91
-    /**
92
-     * Set the user properties to the analytics handlers.
93
-     *
94
-     * @returns {void}
95
-     */
96
-    _setUserProperties(): void;
97
-    /**
98
-     * Adds a set of permanent properties to this this AnalyticsAdapter.
99
-     * Permanent properties will be added as "attributes" to events sent to
100
-     * the underlying "analytics handlers", and their keys will be prefixed
101
-     * by "permanent_", i.e. adding a permanent property {key: "value"} will
102
-     * result in {"permanent_key": "value"} object to be added to the
103
-     * "attributes" field of events.
104
-     *
105
-     * @param {Object} properties the properties to add
106
-     */
107
-    addPermanentProperties(properties: any): void;
108
-    /**
109
-     * Sets the name of the conference that this AnalyticsAdapter is associated
110
-     * with.
111
-     * @param name the name to set.
112
-     */
113
-    setConferenceName(name: any): void;
114
-    /**
115
-     * Sends an event with a given name and given properties. The first
116
-     * parameter is either a string or an object. If it is a string, it is used
117
-     * as the event name and the second parameter is used at the attributes to
118
-     * attach to the event. If it is an object, it represents the whole event,
119
-     * including any desired attributes, and the second parameter is ignored.
120
-     *
121
-     * @param {String|Object} eventName either a string to be used as the name
122
-     * of the event, or an event object. If an event object is passed, the
123
-     * properties parameters is ignored.
124
-     * @param {Object} properties the properties/attributes to attach to the
125
-     * event, if eventName is a string.
126
-     */
127
-    sendEvent(eventName: string | any, properties?: any): void;
128
-    /**
129
-     * Checks whether an event has all of the required fields set, and tries
130
-     * to fill in some of the missing fields with reasonable default values.
131
-     * Returns true if after this operation the event has all of the required
132
-     * fields set, and false otherwise (if some of the required fields were not
133
-     * set and the attempt to fill them in with a default failed).
134
-     *
135
-     * @param event the event object.
136
-     * @return {boolean} true if the event (after the call to this function)
137
-     * contains all of the required fields, and false otherwise.
138
-     * @private
139
-     */
140
-    private _verifyRequiredFields;
141
-    /**
142
-     * Saves an event to the cache, if the cache is enabled.
143
-     * @param event the event to save.
144
-     * @returns {boolean} true if the event was saved, and false otherwise (i.e.
145
-     * if the cache was disabled).
146
-     * @private
147
-     */
148
-    private _maybeCacheEvent;
149
-    /**
150
-     *
151
-     * @param event
152
-     * @private
153
-     */
154
-    private _sendEvent;
155
-}

+ 0
- 46
types/auto/modules/statistics/AudioOutputProblemDetector.d.ts View File

@@ -1,46 +0,0 @@
1
-/**
2
- * Collects the average audio levels per participant from the local stats and the stats received by every remote
3
- * participant and compares them to detect potential audio problem for a participant.
4
- */
5
-export default class AudioOutputProblemDetector {
6
-    /**
7
-     * Creates new <tt>AudioOutputProblemDetector</tt> instance.
8
-     *
9
-     * @param {JitsiConference} conference - The conference instance to be monitored.
10
-     */
11
-    constructor(conference: any);
12
-    _conference: any;
13
-    _localAudioLevelCache: {};
14
-    _reportedParticipants: any[];
15
-    _audioProblemCandidates: {};
16
-    _numberOfRemoteAudioLevelsReceived: {};
17
-    /**
18
-     * A listener for audio level data retrieved by the local stats.
19
-     *
20
-     * @param {TraceablePeerConnection} tpc - The <tt>TraceablePeerConnection</tt> instance used to gather the data.
21
-     * @param {Object} avgAudioLevels - The average audio levels per participant.
22
-     * @returns {void}
23
-     */
24
-    _onLocalAudioLevelsReport(tpc: any, { avgAudioLevels }: any): void;
25
-    /**
26
-     * A listener for audio level data received by a remote participant.
27
-     *
28
-     * @param {string} userID - The user id of the participant that sent the data.
29
-     * @param {number} audioLevel - The average audio level value.
30
-     * @returns {void}
31
-     */
32
-    _onRemoteAudioLevelReceived(userID: string, { avgAudioLevels }: number): void;
33
-    /**
34
-     * Clears the data stored for a participant.
35
-     *
36
-     * @param {string} userID - The id of the participant.
37
-     * @returns {void}
38
-     */
39
-    _clearUserData(userID: string): void;
40
-    /**
41
-     * Disposes the allocated resources.
42
-     *
43
-     * @returns {void}
44
-     */
45
-    dispose(): void;
46
-}

+ 0
- 386
types/auto/modules/statistics/AvgRTPStatsReporter.d.ts View File

@@ -1,386 +0,0 @@
1
-/**
2
- * Reports average RTP statistics values (arithmetic mean) to the analytics
3
- * module for things like bit rate, bandwidth, packet loss etc. It keeps track
4
- * of the P2P vs JVB conference modes and submits the values under different
5
- * namespaces (the events for P2P mode have 'p2p.' prefix). Every switch between
6
- * P2P mode resets the data collected so far and averages are calculated from
7
- * scratch.
8
- */
9
-export default class AvgRTPStatsReporter {
10
-    /**
11
-     * Creates new instance of <tt>AvgRTPStatsReporter</tt>
12
-     * @param {JitsiConference} conference
13
-     * @param {number} n the number of samples, before arithmetic mean is to be
14
-     * calculated and values submitted to the analytics module.
15
-     */
16
-    constructor(conference: any, n: number);
17
-    /**
18
-     * How many {@link ConnectionQualityEvents.LOCAL_STATS_UPDATED} samples
19
-     * are to be included in arithmetic mean calculation.
20
-     * @type {number}
21
-     * @private
22
-     */
23
-    private _n;
24
-    /**
25
-     * The current sample index. Starts from 0 and goes up to {@link _n})
26
-     * when analytics report will be submitted.
27
-     * @type {number}
28
-     * @private
29
-     */
30
-    private _sampleIdx;
31
-    /**
32
-     * The conference for which stats will be collected and reported.
33
-     * @type {JitsiConference}
34
-     * @private
35
-     */
36
-    private _conference;
37
-    /**
38
-     * Average audio upload bitrate
39
-     * XXX What are the units?
40
-     * @type {AverageStatReport}
41
-     * @private
42
-     */
43
-    private _avgAudioBitrateUp;
44
-    /**
45
-     * Average audio download bitrate
46
-     * XXX What are the units?
47
-     * @type {AverageStatReport}
48
-     * @private
49
-     */
50
-    private _avgAudioBitrateDown;
51
-    /**
52
-     * Average video upload bitrate
53
-     * XXX What are the units?
54
-     * @type {AverageStatReport}
55
-     * @private
56
-     */
57
-    private _avgVideoBitrateUp;
58
-    /**
59
-     * Average video download bitrate
60
-     * XXX What are the units?
61
-     * @type {AverageStatReport}
62
-     * @private
63
-     */
64
-    private _avgVideoBitrateDown;
65
-    /**
66
-     * Average upload bandwidth
67
-     * XXX What are the units?
68
-     * @type {AverageStatReport}
69
-     * @private
70
-     */
71
-    private _avgBandwidthUp;
72
-    /**
73
-     * Average download bandwidth
74
-     * XXX What are the units?
75
-     * @type {AverageStatReport}
76
-     * @private
77
-     */
78
-    private _avgBandwidthDown;
79
-    /**
80
-     * Average total packet loss
81
-     * XXX What are the units?
82
-     * @type {AverageStatReport}
83
-     * @private
84
-     */
85
-    private _avgPacketLossTotal;
86
-    /**
87
-     * Average upload packet loss
88
-     * XXX What are the units?
89
-     * @type {AverageStatReport}
90
-     * @private
91
-     */
92
-    private _avgPacketLossUp;
93
-    /**
94
-     * Average download packet loss
95
-     * XXX What are the units?
96
-     * @type {AverageStatReport}
97
-     * @private
98
-     */
99
-    private _avgPacketLossDown;
100
-    /**
101
-     * Average FPS for remote videos
102
-     * @type {AverageStatReport}
103
-     * @private
104
-     */
105
-    private _avgRemoteFPS;
106
-    /**
107
-     * Average FPS for remote screen streaming videos (reported only if not
108
-     * a <tt>NaN</tt>).
109
-     * @type {AverageStatReport}
110
-     * @private
111
-     */
112
-    private _avgRemoteScreenFPS;
113
-    /**
114
-     * Average FPS for local video (camera)
115
-     * @type {AverageStatReport}
116
-     * @private
117
-     */
118
-    private _avgLocalFPS;
119
-    /**
120
-     * Average FPS for local screen streaming video (reported only if not
121
-     * a <tt>NaN</tt>).
122
-     * @type {AverageStatReport}
123
-     * @private
124
-     */
125
-    private _avgLocalScreenFPS;
126
-    /**
127
-     * Average pixels for remote screen streaming videos (reported only if
128
-     * not a <tt>NaN</tt>).
129
-     * @type {AverageStatReport}
130
-     * @private
131
-     */
132
-    private _avgRemoteCameraPixels;
133
-    /**
134
-     * Average pixels for remote screen streaming videos (reported only if
135
-     * not a <tt>NaN</tt>).
136
-     * @type {AverageStatReport}
137
-     * @private
138
-     */
139
-    private _avgRemoteScreenPixels;
140
-    /**
141
-     * Average pixels for local video (camera)
142
-     * @type {AverageStatReport}
143
-     * @private
144
-     */
145
-    private _avgLocalCameraPixels;
146
-    /**
147
-     * Average pixels for local screen streaming video (reported only if not
148
-     * a <tt>NaN</tt>).
149
-     * @type {AverageStatReport}
150
-     * @private
151
-     */
152
-    private _avgLocalScreenPixels;
153
-    /**
154
-     * Average connection quality as defined by
155
-     * the {@link ConnectionQuality} module.
156
-     * @type {AverageStatReport}
157
-     * @private
158
-     */
159
-    private _avgCQ;
160
-    _cachedTransportStats: {
161
-        p2p: any;
162
-        local_candidate_type: any;
163
-        remote_candidate_type: any;
164
-        transport_type: any;
165
-    };
166
-    _onLocalStatsUpdated: (data: any) => void;
167
-    _onP2PStatusChanged: () => void;
168
-    _onJvb121StatusChanged: (oldStatus: any, newStatus: any) => void;
169
-    jvbStatsMonitor: ConnectionAvgStats;
170
-    p2pStatsMonitor: ConnectionAvgStats;
171
-    /**
172
-     * Processes next batch of stats reported on
173
-     * {@link ConnectionQualityEvents.LOCAL_STATS_UPDATED}.
174
-     * @param {go figure} data
175
-     * @private
176
-     */
177
-    private _calculateAvgStats;
178
-    /**
179
-     * Calculates average number of pixels for the report
180
-     *
181
-     * @param {map} peerResolutions a map of peer resolutions
182
-     * @param {boolean} isLocal if the average is to be calculated for the local
183
-     * video or <tt>false</tt> if for remote videos.
184
-     * @param {VideoType} videoType
185
-     * @return {number|NaN} average number of pixels or <tt>NaN</tt> if there
186
-     * are no samples.
187
-     * @private
188
-     */
189
-    private _calculateAvgVideoPixels;
190
-    /**
191
-     * Calculate average pixels for either remote or local participant
192
-     * @param {object} videos maps resolution per video SSRC
193
-     * @param {JitsiParticipant|null} participant remote participant or
194
-     * <tt>null</tt> for local video pixels calculation.
195
-     * @param {VideoType} videoType the type of the video for which an average
196
-     * will be calculated.
197
-     * @return {number|NaN} average video pixels of all participant's videos or
198
-     * <tt>NaN</tt> if currently not available
199
-     * @private
200
-     */
201
-    private _calculatePeerAvgVideoPixels;
202
-    /**
203
-     * Calculates average FPS for the report
204
-     * @param {go figure} frameRate
205
-     * @param {boolean} isLocal if the average is to be calculated for the local
206
-     * video or <tt>false</tt> if for remote videos.
207
-     * @param {VideoType} videoType
208
-     * @return {number|NaN} average FPS or <tt>NaN</tt> if there are no samples.
209
-     * @private
210
-     */
211
-    private _calculateAvgVideoFps;
212
-    /**
213
-     * Calculate average FPS for either remote or local participant
214
-     * @param {object} videos maps FPS per video SSRC
215
-     * @param {JitsiParticipant|null} participant remote participant or
216
-     * <tt>null</tt> for local FPS calculation.
217
-     * @param {VideoType} videoType the type of the video for which an average
218
-     * will be calculated.
219
-     * @return {number|NaN} average FPS of all participant's videos or
220
-     * <tt>NaN</tt> if currently not available
221
-     * @private
222
-     */
223
-    private _calculatePeerAvgVideoFps;
224
-    /**
225
-     * Sends the 'transport.stats' analytics event whenever we detect that
226
-     * there is a change in the local or remote candidate type on the transport
227
-     * that is currently selected.
228
-     * @param {*} data
229
-     * @private
230
-     */
231
-    private _maybeSendTransportAnalyticsEvent;
232
-    /**
233
-     * Resets the stats related to JVB connection. Must not be called when in
234
-     * P2P mode, because then the {@link AverageStatReport} instances are
235
-     * tracking P2P stats. Note that this should never happen unless something
236
-     * is wrong with the P2P and JVB121 events.
237
-     * @private
238
-     */
239
-    private _resetAvgJvbStats;
240
-    /**
241
-     * Reset cache of all averages and {@link _sampleIdx}.
242
-     * @private
243
-     */
244
-    private _resetAvgStats;
245
-    /**
246
-     * Unregisters all event listeners and stops working.
247
-     */
248
-    dispose(): void;
249
-}
250
-/**
251
- * Class gathers the stats that are calculated and reported for a
252
- * {@link TraceablePeerConnection} even if it's not currently active. For
253
- * example we want to monitor RTT for the JVB connection while in P2P mode.
254
- */
255
-declare class ConnectionAvgStats {
256
-    /**
257
-     * Creates new <tt>ConnectionAvgStats</tt>
258
-     * @param {AvgRTPStatsReporter} avgRtpStatsReporter
259
-     * @param {boolean} isP2P
260
-     * @param {number} n the number of samples, before arithmetic mean is to be
261
-     * calculated and values submitted to the analytics module.
262
-     */
263
-    constructor(avgRtpStatsReporter: AvgRTPStatsReporter, isP2P: boolean, n: number);
264
-    /**
265
-     * Is this instance for JVB or P2P connection ?
266
-     * @type {boolean}
267
-     */
268
-    isP2P: boolean;
269
-    /**
270
-     * How many samples are to be included in arithmetic mean calculation.
271
-     * @type {number}
272
-     * @private
273
-     */
274
-    private _n;
275
-    /**
276
-     * The current sample index. Starts from 0 and goes up to {@link _n})
277
-     * when analytics report will be submitted.
278
-     * @type {number}
279
-     * @private
280
-     */
281
-    private _sampleIdx;
282
-    /**
283
-     * Average round trip time reported by the ICE candidate pair.
284
-     * @type {AverageStatReport}
285
-     */
286
-    _avgRTT: AverageStatReport;
287
-    /**
288
-     * Map stores average RTT to the JVB reported by remote participants.
289
-     * Mapped per participant id {@link JitsiParticipant.getId}.
290
-     *
291
-     * This is used only when {@link ConnectionAvgStats.isP2P} equals to
292
-     * <tt>false</tt>.
293
-     *
294
-     * @type {Map<string,AverageStatReport>}
295
-     * @private
296
-     */
297
-    private _avgRemoteRTTMap;
298
-    /**
299
-     * The conference for which stats will be collected and reported.
300
-     * @type {JitsiConference}
301
-     * @private
302
-     */
303
-    private _avgRtpStatsReporter;
304
-    /**
305
-     * The latest average E2E RTT for the JVB connection only.
306
-     *
307
-     * This is used only when {@link ConnectionAvgStats.isP2P} equals to
308
-     * <tt>false</tt>.
309
-     *
310
-     * @type {number}
311
-     */
312
-    _avgEnd2EndRTT: number;
313
-    _onConnectionStats: (tpc: any, stats: any) => void;
314
-    _onUserLeft: (id: any) => boolean;
315
-    _onRemoteStatsUpdated: (id: any, data: any) => void;
316
-    /**
317
-     * Processes next batch of stats.
318
-     * @param {go figure} data
319
-     * @private
320
-     */
321
-    private _calculateAvgStats;
322
-    /**
323
-     * Calculates arithmetic mean of all RTTs towards the JVB reported by
324
-     * participants.
325
-     * @return {number|NaN} NaN if not available (not enough data)
326
-     * @private
327
-     */
328
-    private _calculateAvgRemoteRTT;
329
-    /**
330
-     * Processes {@link ConnectionQualityEvents.REMOTE_STATS_UPDATED} to analyse
331
-     * RTT towards the JVB reported by each participant.
332
-     * @param {string} id {@link JitsiParticipant.getId}
333
-     * @param {go figure in ConnectionQuality.js} data
334
-     * @private
335
-     */
336
-    private _processRemoteStats;
337
-    /**
338
-     * Reset cache of all averages and {@link _sampleIdx}.
339
-     * @private
340
-     */
341
-    private _resetAvgStats;
342
-    /**
343
-     *
344
-     */
345
-    dispose(): void;
346
-}
347
-/**
348
- * This will calculate an average for one, named stat and submit it to
349
- * the analytics module when requested. It automatically counts the samples.
350
- */
351
-declare class AverageStatReport {
352
-    /**
353
-     * Creates new <tt>AverageStatReport</tt> for given name.
354
-     * @param {string} name that's the name of the event that will be reported
355
-     * to the analytics module.
356
-     */
357
-    constructor(name: string);
358
-    name: string;
359
-    count: number;
360
-    sum: number;
361
-    samples: any[];
362
-    /**
363
-     * Adds the next value that will be included in the average when
364
-     * {@link calculate} is called.
365
-     * @param {number} nextValue
366
-     */
367
-    addNext(nextValue: number): void;
368
-    /**
369
-     * Calculates an average for the samples collected using {@link addNext}.
370
-     * @return {number|NaN} an average of all collected samples or <tt>NaN</tt>
371
-     * if no samples were collected.
372
-     */
373
-    calculate(): number | number;
374
-    /**
375
-     * Appends the report to the analytics "data" object. The object will be
376
-     * set under <tt>prefix</tt> + {@link this.name} key.
377
-     * @param {Object} report the analytics "data" object
378
-     */
379
-    appendReport(report: any): void;
380
-    /**
381
-     * Clears all memory of any samples collected, so that new average can be
382
-     * calculated using this instance.
383
-     */
384
-    reset(): void;
385
-}
386
-export {};

+ 0
- 249
types/auto/modules/statistics/CallStats.d.ts View File

@@ -1,249 +0,0 @@
1
-/**
2
- * An instance of this class is a wrapper for the CallStats API fabric. A fabric
3
- * reports one peer connection to the CallStats backend and is allocated with
4
- * {@link callstats.addNewFabric}. It has a bunch of instance methods for
5
- * reporting various events. A fabric is considered disposed when
6
- * {@link CallStats.sendTerminateEvent} is executed.
7
- *
8
- * Currently only one backend instance can be created ever and it's done using
9
- * {@link CallStats.initBackend}. At the time of this writing there is no way to
10
- * explicitly shutdown the backend, but it's supposed to close it's connection
11
- * automatically, after all fabrics have been terminated.
12
- */
13
-declare class CallStats {
14
-    /**
15
-     * A callback passed to {@link callstats.addNewFabric}.
16
-     * @param {string} error 'success' means ok
17
-     * @param {string} msg some more details
18
-     * @private
19
-     */
20
-    private static _addNewFabricCallback;
21
-    /**
22
-     * Callback passed to {@link callstats.initialize} (backend initialization)
23
-     * @param {string} error 'success' means ok
24
-     * @param {String} msg
25
-     * @private
26
-     */
27
-    private static _initCallback;
28
-    /**
29
-     * Empties report queue.
30
-     *
31
-     * @param {CallStats} csInstance - The callstats instance.
32
-     * @private
33
-     */
34
-    private static _emptyReportQueue;
35
-    /**
36
-     * Reports an error to callstats.
37
-     *
38
-     * @param {CallStats} [cs]
39
-     * @param type the type of the error, which will be one of the wrtcFuncNames
40
-     * @param error the error
41
-     * @param pc the peerconnection
42
-     * @private
43
-     */
44
-    private static _reportError;
45
-    /**
46
-     * Reports an error to callstats.
47
-     *
48
-     * @param {CallStats} cs
49
-     * @param event the type of the event, which will be one of the fabricEvent
50
-     * @param eventData additional data to pass to event
51
-     * @private
52
-     */
53
-    private static _reportEvent;
54
-    /**
55
-     * Wraps some of the CallStats API method and logs their calls with
56
-     * arguments on the debug logging level. Also wraps some of the backend
57
-     * methods execution into try catch blocks to not crash the app in case
58
-     * there is a problem with the backend itself.
59
-     * @param {callstats} theBackend
60
-     * @private
61
-     */
62
-    private static _traceAndCatchBackendCalls;
63
-    /**
64
-     * Returns the Set with the currently existing {@link CallStats} instances.
65
-     * Lazily initializes the Set to allow any Set polyfills to be applied.
66
-     * @type {Set<CallStats>}
67
-     */
68
-    static get fabrics(): Set<CallStats>;
69
-    /**
70
-     * Initializes the CallStats backend. Should be called only if
71
-     * {@link CallStats.isBackendInitialized} returns <tt>false</tt>.
72
-     * @param {object} options
73
-     * @param {String} options.callStatsID CallStats credentials - ID
74
-     * @param {String} options.callStatsSecret CallStats credentials - secret
75
-     * @param {string} options.aliasName the <tt>aliasName</tt> part of
76
-     * the <tt>userID</tt> aka endpoint ID, see CallStats docs for more info.
77
-     * @param {string} options.userName the <tt>userName</tt> part of
78
-     * the <tt>userID</tt> aka display name, see CallStats docs for more info.
79
-     * @param {object} options.configParams the set of parameters
80
-     * to enable/disable certain features in the library. See CallStats docs for more info.
81
-     *
82
-     */
83
-    static initBackend(options: {
84
-        callStatsID: string;
85
-        callStatsSecret: string;
86
-        aliasName: string;
87
-        userName: string;
88
-        configParams: object;
89
-    }): boolean;
90
-    /**
91
-     * Checks if the CallStats backend has been created. It does not mean that
92
-     * it has been initialized, but only that the API instance has been
93
-     * allocated successfully.
94
-     * @return {boolean} <tt>true</tt> if backend exists or <tt>false</tt>
95
-     * otherwise
96
-     */
97
-    static isBackendInitialized(): boolean;
98
-    /**
99
-     * Notifies CallStats about active device.
100
-     * @param {{deviceList: {String:String}}} devicesData list of devices with
101
-     * their data
102
-     * @param {CallStats} cs callstats instance related to the event
103
-     */
104
-    static sendActiveDeviceListEvent(devicesData: {
105
-        deviceList: {
106
-            String: string;
107
-        };
108
-    }, cs: CallStats): void;
109
-    /**
110
-     * Notifies CallStats that there is a log we want to report.
111
-     *
112
-     * @param {Error} e error to send or {String} message
113
-     * @param {CallStats} cs callstats instance related to the error (optional)
114
-     */
115
-    static sendApplicationLog(e: Error, cs: CallStats): void;
116
-    /**
117
-     * Sends the given feedback through CallStats.
118
-     *
119
-     * @param {string} conferenceID the conference ID for which the feedback
120
-     * will be reported.
121
-     * @param overall an integer between 1 and 5 indicating the
122
-     * user feedback
123
-     * @param comment detailed feedback from the user.
124
-     */
125
-    static sendFeedback(conferenceID: string, overall: any, comment: any): Promise<any>;
126
-    /**
127
-     * Notifies CallStats that getUserMedia failed.
128
-     *
129
-     * @param {Error} e error to send
130
-     * @param {CallStats} cs callstats instance related to the error (optional)
131
-     */
132
-    static sendGetUserMediaFailed(e: Error, cs: CallStats): void;
133
-    /**
134
-     * Notifies CallStats for mute events
135
-     * @param mute {boolean} true for muted and false for not muted
136
-     * @param type {String} "audio"/"video"
137
-     * @param {CallStats} cs callstats instance related to the event
138
-     */
139
-    static sendMuteEvent(mute: boolean, type: string, cs: CallStats): void;
140
-    /**
141
-     * Creates new CallStats instance that handles all callstats API calls for
142
-     * given {@link TraceablePeerConnection}. Each instance is meant to handle
143
-     * one CallStats fabric added with 'addFabric' API method for the
144
-     * {@link TraceablePeerConnection} instance passed in the constructor.
145
-     * @param {TraceablePeerConnection} tpc
146
-     * @param {Object} options
147
-     * @param {string} options.confID the conference ID that wil be used to
148
-     * report the session.
149
-     * @param {string} [options.remoteUserID='jitsi'] the remote user ID to
150
-     * which given <tt>tpc</tt> is connected.
151
-     */
152
-    constructor(tpc: any, options: {
153
-        confID: string;
154
-        remoteUserID?: string;
155
-    });
156
-    confID: string;
157
-    tpc: any;
158
-    peerconnection: any;
159
-    remoteUserID: string;
160
-    hasFabric: boolean;
161
-    /**
162
-     * Initializes CallStats fabric by calling "addNewFabric" for
163
-     * the peer connection associated with this instance.
164
-     * @return {boolean} true if the call was successful or false otherwise.
165
-     */
166
-    _addNewFabric(): boolean;
167
-    /**
168
-     * Lets CallStats module know where is given SSRC rendered by providing
169
-     * renderer tag ID.
170
-     * If the lib is not initialized yet queue the call for later, when it's
171
-     * ready.
172
-     * @param {number} ssrc the SSRC of the stream
173
-     * @param {boolean} isLocal indicates whether this the stream is local
174
-     * @param {string|null} streamEndpointId if the stream is not local the it
175
-     * needs to contain the stream owner's ID
176
-     * @param {string} usageLabel meaningful usage label of this stream like
177
-     *        'microphone', 'camera' or 'screen'.
178
-     * @param {string} containerId  the id of media 'audio' or 'video' tag which
179
-     *        renders the stream.
180
-     */
181
-    associateStreamWithVideoTag(ssrc: number, isLocal: boolean, streamEndpointId: string | null, usageLabel: string, containerId: string): void;
182
-    /**
183
-     * Notifies CallStats that we are the new dominant speaker in the
184
-     * conference.
185
-     */
186
-    sendDominantSpeakerEvent(): void;
187
-    /**
188
-     * Notifies CallStats that the fabric for the underlying peerconnection was
189
-     * closed and no evens should be reported, after this call.
190
-     */
191
-    sendTerminateEvent(): void;
192
-    /**
193
-     * Notifies CallStats for ice connection failed
194
-     */
195
-    sendIceConnectionFailedEvent(): void;
196
-    /**
197
-     * Notifies CallStats that peer connection failed to create offer.
198
-     *
199
-     * @param {Error} e error to send
200
-     */
201
-    sendCreateOfferFailed(e: Error): void;
202
-    /**
203
-     * Notifies CallStats that peer connection failed to create answer.
204
-     *
205
-     * @param {Error} e error to send
206
-     */
207
-    sendCreateAnswerFailed(e: Error): void;
208
-    /**
209
-     * Sends either resume or hold event for the fabric associated with
210
-     * the underlying peerconnection.
211
-     * @param {boolean} isResume true to resume or false to hold
212
-     */
213
-    sendResumeOrHoldEvent(isResume: boolean): void;
214
-    /**
215
-     * Notifies CallStats for screen sharing events
216
-     * @param {boolean} start true for starting screen sharing and
217
-     * false for not stopping
218
-     * @param {string|null} ssrc - optional ssrc value, used only when
219
-     * starting screen sharing.
220
-     */
221
-    sendScreenSharingEvent(start: boolean, ssrc: string | null): void;
222
-    /**
223
-     * Notifies CallStats that peer connection failed to set local description.
224
-     *
225
-     * @param {Error} e error to send
226
-     */
227
-    sendSetLocalDescFailed(e: Error): void;
228
-    /**
229
-     * Notifies CallStats that peer connection failed to set remote description.
230
-     *
231
-     * @param {Error} e error to send
232
-     */
233
-    sendSetRemoteDescFailed(e: Error): void;
234
-    /**
235
-     * Notifies CallStats that peer connection failed to add ICE candidate.
236
-     *
237
-     * @param {Error} e error to send
238
-     */
239
-    sendAddIceCandidateFailed(e: Error): void;
240
-}
241
-declare namespace CallStats {
242
-    const backend: any;
243
-    const reportsQueue: any[];
244
-    const backendInitialized: boolean;
245
-    const callStatsID: string;
246
-    const callStatsSecret: string;
247
-    const userID: object;
248
-}
249
-export default CallStats;

+ 0
- 53
types/auto/modules/statistics/LocalStatsCollector.d.ts View File

@@ -1,53 +0,0 @@
1
-/**
2
- * <tt>LocalStatsCollector</tt> calculates statistics for the local stream.
3
- *
4
- * @param stream the local stream
5
- * @param interval stats refresh interval given in ms.
6
- * @param callback function that receives the audio levels.
7
- * @constructor
8
- */
9
-declare function LocalStatsCollector(stream: any, interval: any, callback: any): void;
10
-declare class LocalStatsCollector {
11
-    /**
12
-     * <tt>LocalStatsCollector</tt> calculates statistics for the local stream.
13
-     *
14
-     * @param stream the local stream
15
-     * @param interval stats refresh interval given in ms.
16
-     * @param callback function that receives the audio levels.
17
-     * @constructor
18
-     */
19
-    constructor(stream: any, interval: any, callback: any);
20
-    stream: any;
21
-    intervalId: NodeJS.Timer;
22
-    intervalMilis: any;
23
-    audioLevel: number;
24
-    callback: any;
25
-    source: MediaStreamAudioSourceNode;
26
-    analyser: AnalyserNode;
27
-    /**
28
-     * Starts the collecting the statistics.
29
-     */
30
-    start(): void;
31
-    /**
32
-     * Stops collecting the statistics.
33
-     */
34
-    stop(): void;
35
-}
36
-declare namespace LocalStatsCollector {
37
-    /**
38
-     * Checks if the environment has the necessary conditions to support
39
-     * collecting stats from local streams.
40
-     *
41
-     * @returns {boolean}
42
-     */
43
-    function isLocalStatsSupported(): boolean;
44
-    /**
45
-     * Disconnects the audio context.
46
-     */
47
-    function disconnectAudioContext(): Promise<void>;
48
-    /**
49
-     * Connects the audio context.
50
-     */
51
-    function connectAudioContext(): void;
52
-}
53
-export default LocalStatsCollector;

+ 0
- 42
types/auto/modules/statistics/PerformanceObserverStats.d.ts View File

@@ -1,42 +0,0 @@
1
-/**
2
- * This class creates an observer that monitors browser's performance measurement events
3
- * as they are recorded in the browser's performance timeline and computes an average and
4
- * a maximum value for the long task events. Tasks are classified as long tasks if they take
5
- * longer than 50ms to execute on the main thread.
6
- */
7
-export class PerformanceObserverStats {
8
-    /**
9
-     * Creates a new instance of Performance observer statistics.
10
-     *
11
-     * @param {*} emitter Event emitter for emitting stats periodically
12
-     * @param {*} statsInterval interval for calculating the stats
13
-     */
14
-    constructor(emitter: any, statsInterval: any);
15
-    eventEmitter: any;
16
-    longTasks: number;
17
-    maxDuration: number;
18
-    performanceStatsInterval: any;
19
-    stats: RunningAverage;
20
-    /**
21
-     * Obtains the average rate of long tasks observed per min and the
22
-     * duration of the longest task recorded by the observer.
23
-     * @returns {Object}
24
-     */
25
-    getLongTasksStats(): any;
26
-    /**
27
-     * Starts the performance observer by registering the callback function
28
-     * that calculates the performance statistics periodically.
29
-     * @returns {void}
30
-     */
31
-    startObserver(): void;
32
-    longTaskEventHandler: (list: any) => void;
33
-    observer: PerformanceObserver;
34
-    longTasksIntervalId: NodeJS.Timer;
35
-    _lastTimeStamp: number;
36
-    /**
37
-     * Stops the performance observer.
38
-     * @returns {void}
39
-     */
40
-    stopObserver(): void;
41
-}
42
-import { RunningAverage } from "../util/MathUtil";

+ 0
- 75
types/auto/modules/statistics/PrecallTest.d.ts View File

@@ -1,75 +0,0 @@
1
-/**
2
- * Loads the callstats script and initializes the library.
3
- *
4
- * @param {Function} onResult - The callback to be invoked when results are received.
5
- * @returns {Promise<void>}
6
- */
7
-export function init(options: any): Promise<void>;
8
-/**
9
- * Executes a pre call test.
10
- *
11
- * @typedef PrecallTestResults
12
- * @type {Object}
13
- * @property {boolean} mediaConnectivity - If there is media connectivity or not.
14
- * @property {number} throughput  - The average throughput.
15
- * @property {number} fractionalLoss - The packet loss.
16
- * @property {number} rtt - The round trip time.
17
- * @property {string} provider - It is usually 'callstats'.
18
- *
19
- * @returns {Promise<{PrecallTestResults}>}
20
- */
21
-export function execute(): Promise<{
22
-    PrecallTestResults;
23
-}>;
24
-declare namespace _default {
25
-    export { init };
26
-    export { execute };
27
-}
28
-export default _default;
29
-/**
30
- * Initializes the callstats lib and registers a callback to be invoked
31
- * when there are 'preCallTestResults'.
32
- */
33
-export type PrecallTestOptions = {
34
-    /**
35
-     * - Callstats credentials - the id.
36
-     */
37
-    callStatsID: string;
38
-    /**
39
-     * - Callstats credentials - the secret.
40
-     */
41
-    callStatsSecret: string;
42
-    /**
43
-     * - The user name to use when initializing callstats.
44
-     */
45
-    statisticsId: string;
46
-    /**
47
-     * - The user display name.
48
-     */
49
-    statisticsDisplayName: string;
50
-};
51
-/**
52
- * Executes a pre call test.
53
- */
54
-export type PrecallTestResults = {
55
-    /**
56
-     * - If there is media connectivity or not.
57
-     */
58
-    mediaConnectivity: boolean;
59
-    /**
60
-     * - The average throughput.
61
-     */
62
-    throughput: number;
63
-    /**
64
-     * - The packet loss.
65
-     */
66
-    fractionalLoss: number;
67
-    /**
68
-     * - The round trip time.
69
-     */
70
-    rtt: number;
71
-    /**
72
-     * - It is usually 'callstats'.
73
-     */
74
-    provider: string;
75
-};

+ 0
- 151
types/auto/modules/statistics/RTPStatsCollector.d.ts View File

@@ -1,151 +0,0 @@
1
-/**
2
- * <tt>StatsCollector</tt> registers for stats updates of given
3
- * <tt>peerconnection</tt> in given <tt>interval</tt>. On each update particular
4
- * stats are extracted and put in {@link SsrcStats} objects. Once the processing
5
- * is done <tt>audioLevelsUpdateCallback</tt> is called with <tt>this</tt>
6
- * instance as an event source.
7
- *
8
- * @param peerconnection WebRTC PeerConnection object.
9
- * @param audioLevelsInterval
10
- * @param statsInterval stats refresh interval given in ms.
11
- * @param eventEmitter
12
- * @constructor
13
- */
14
-export default function StatsCollector(peerconnection: any, audioLevelsInterval: any, statsInterval: any, eventEmitter: any): void;
15
-export default class StatsCollector {
16
-    /**
17
-     * <tt>StatsCollector</tt> registers for stats updates of given
18
-     * <tt>peerconnection</tt> in given <tt>interval</tt>. On each update particular
19
-     * stats are extracted and put in {@link SsrcStats} objects. Once the processing
20
-     * is done <tt>audioLevelsUpdateCallback</tt> is called with <tt>this</tt>
21
-     * instance as an event source.
22
-     *
23
-     * @param peerconnection WebRTC PeerConnection object.
24
-     * @param audioLevelsInterval
25
-     * @param statsInterval stats refresh interval given in ms.
26
-     * @param eventEmitter
27
-     * @constructor
28
-     */
29
-    constructor(peerconnection: any, audioLevelsInterval: any, statsInterval: any, eventEmitter: any);
30
-    peerconnection: any;
31
-    baselineAudioLevelsReport: any;
32
-    currentAudioLevelsReport: any;
33
-    currentStatsReport: any;
34
-    previousStatsReport: any;
35
-    audioLevelReportHistory: {};
36
-    audioLevelsIntervalId: NodeJS.Timer;
37
-    eventEmitter: any;
38
-    conferenceStats: ConferenceStats;
39
-    audioLevelsIntervalMilis: any;
40
-    speakerList: any[];
41
-    statsIntervalId: NodeJS.Timer;
42
-    statsIntervalMilis: any;
43
-    /**
44
-     * Maps SSRC numbers to {@link SsrcStats}.
45
-     * @type {Map<number,SsrcStats}
46
-     */
47
-    ssrc2stats: Map<number, SsrcStats>;
48
-    /**
49
-     * Set the list of the remote speakers for which audio levels are to be calculated.
50
-     *
51
-     * @param {Array<string>} speakerList - Endpoint ids.
52
-     * @returns {void}
53
-     */
54
-    setSpeakerList(speakerList: Array<string>): void;
55
-    /**
56
-     * Stops stats updates.
57
-     */
58
-    stop(): void;
59
-    /**
60
-     * Callback passed to <tt>getStats</tt> method.
61
-     * @param error an error that occurred on <tt>getStats</tt> call.
62
-     */
63
-    errorCallback(error: any): void;
64
-    /**
65
-     * Starts stats updates.
66
-     */
67
-    start(startAudioLevelStats: any): void;
68
-    /**
69
-     *
70
-     */
71
-    _processAndEmitReport(): void;
72
-    private getNonNegativeValue;
73
-    private _calculateBitrate;
74
-    /**
75
-     * Stats processing for spec-compliant RTCPeerConnection#getStats.
76
-     */
77
-    processStatsReport(): void;
78
-    /**
79
-     * Stats processing logic.
80
-     */
81
-    processAudioLevelReport(): void;
82
-}
83
-/**
84
- *
85
- */
86
-declare function ConferenceStats(): void;
87
-declare class ConferenceStats {
88
-    /**
89
-     * The bandwidth
90
-     * @type {{}}
91
-     */
92
-    bandwidth: {};
93
-    /**
94
-     * The bit rate
95
-     * @type {{}}
96
-     */
97
-    bitrate: {};
98
-    /**
99
-     * The packet loss rate
100
-     * @type {{}}
101
-     */
102
-    packetLoss: {};
103
-    /**
104
-     * Array with the transport information.
105
-     * @type {Array}
106
-     */
107
-    transport: any[];
108
-}
109
-/**
110
- * Holds "statistics" for a single SSRC.
111
- * @constructor
112
- */
113
-declare function SsrcStats(): void;
114
-declare class SsrcStats {
115
-    loss: {};
116
-    bitrate: {
117
-        download: number;
118
-        upload: number;
119
-    };
120
-    resolution: {};
121
-    framerate: number;
122
-    codec: string;
123
-    /**
124
-     * Sets the "loss" object.
125
-     * @param loss the value to set.
126
-     */
127
-    setLoss(loss: any): void;
128
-    /**
129
-     * Sets resolution that belong to the ssrc represented by this instance.
130
-     * @param resolution new resolution value to be set.
131
-     */
132
-    setResolution(resolution: any): void;
133
-    /**
134
-     * Adds the "download" and "upload" fields from the "bitrate" parameter to
135
-     * the respective fields of the "bitrate" field of this object.
136
-     * @param bitrate an object holding the values to add.
137
-     */
138
-    addBitrate(bitrate: any): void;
139
-    /**
140
-     * Resets the bit rate for given <tt>ssrc</tt> that belong to the peer
141
-     * represented by this instance.
142
-     */
143
-    resetBitrate(): void;
144
-    /**
145
-     * Sets the "framerate".
146
-     * @param framerate the value to set.
147
-     */
148
-    setFramerate(framerate: any): void;
149
-    setCodec(codec: any): void;
150
-}
151
-export {};

+ 0
- 117
types/auto/modules/statistics/SpeakerStats.d.ts View File

@@ -1,117 +0,0 @@
1
-export = SpeakerStats;
2
-/**
3
- * A model for keeping track of each user's total
4
- * time as a dominant speaker. The model also
5
- * keeps track of the user's last known name
6
- * in case the user has left the meeting,
7
- * which is also tracked.
8
- */
9
-declare class SpeakerStats {
10
-    /**
11
-     * Initializes a new SpeakerStats instance.
12
-     *
13
-     * @constructor
14
-     * @param {string} userId - The id of the user being tracked.
15
-     * @param {string} displayName - The name of the user being tracked.
16
-     * @param {boolean} isLocalStats - True if the stats model tracks
17
-     * the local user.
18
-     * @returns {void}
19
-     */
20
-    constructor(userId: string, displayName: string, isLocalStats: boolean);
21
-    _userId: string;
22
-    _isLocalStats: boolean;
23
-    totalDominantSpeakerTime: number;
24
-    _dominantSpeakerStart: number;
25
-    _isDominantSpeaker: boolean;
26
-    _isSilent: boolean;
27
-    _hasLeft: boolean;
28
-    _faceExpressions: {
29
-        happy: number;
30
-        neutral: number;
31
-        surprised: number;
32
-        angry: number;
33
-        fearful: number;
34
-        disgusted: number;
35
-        sad: number;
36
-    };
37
-    /**
38
-     * Get the user id being tracked.
39
-     *
40
-     * @returns {string} The user id.
41
-     */
42
-    getUserId(): string;
43
-    /**
44
-     * Get the name of the user being tracked.
45
-     *
46
-     * @returns {string} The user name.
47
-     */
48
-    getDisplayName(): string;
49
-    /**
50
-     * Updates the last known name of the user being tracked.
51
-     *
52
-     * @param {string} - The user name.
53
-     * @returns {void}
54
-     */
55
-    setDisplayName(newName: any): void;
56
-    displayName: any;
57
-    /**
58
-     * Returns true if the stats are tracking the local user.
59
-     *
60
-     * @returns {boolean}
61
-     */
62
-    isLocalStats(): boolean;
63
-    /**
64
-     * Returns true if the tracked user is currently a dominant speaker.
65
-     *
66
-     * @returns {boolean}
67
-     */
68
-    isDominantSpeaker(): boolean;
69
-    /**
70
-     * Returns true if the tracked user is currently a dominant speaker.
71
-     *
72
-     * @param {boolean} - If true, the user will being accumulating time
73
-     * as dominant speaker. If false, the user will not accumulate time
74
-     * and will record any time accumulated since starting as dominant speaker.
75
-     * @param {boolean} silence - Indecates whether the dominant speaker is silent or not.
76
-     * @returns {void}
77
-     */
78
-    setDominantSpeaker(isNowDominantSpeaker: any, silence: boolean): void;
79
-    /**
80
-     * Get how long the tracked user has been dominant speaker.
81
-     *
82
-     * @returns {number} - The speaker time in milliseconds.
83
-     */
84
-    getTotalDominantSpeakerTime(): number;
85
-    /**
86
-     * Get whether or not the user is still in the meeting.
87
-     *
88
-     * @returns {boolean} True if the user is no longer in the meeting.
89
-     */
90
-    hasLeft(): boolean;
91
-    /**
92
-     * Set the user as having left the meeting.
93
-     *
94
-     * @returns {void}
95
-     */
96
-    markAsHasLeft(): void;
97
-    /**
98
-     * Gets the face expressions of the user.
99
-     *
100
-     * @returns {Object}
101
-     */
102
-    getFaceExpressions(): any;
103
-    /**
104
-     * Sets the face expressions of the user.
105
-     *
106
-     * @param {Object} faceExpressions - object with face expressions.
107
-     * @returns {void}
108
-     */
109
-    setFaceExpressions(faceExpressions: any): void;
110
-    /**
111
-     * Adds a new face expression to speaker stats.
112
-     *
113
-     * @param  {string} faceExpression
114
-     * @param {number} duration
115
-     */
116
-    addFaceExpression(faceExpression: string, duration: number): void;
117
-}

+ 0
- 80
types/auto/modules/statistics/SpeakerStatsCollector.d.ts View File

@@ -1,80 +0,0 @@
1
-/**
2
- * A collection for tracking speaker stats. Attaches listeners
3
- * to the conference to automatically update on tracked events.
4
- */
5
-export default class SpeakerStatsCollector {
6
-    /**
7
-     * Initializes a new SpeakerStatsCollector instance.
8
-     *
9
-     * @constructor
10
-     * @param {JitsiConference} conference - The conference to track.
11
-     * @returns {void}
12
-     */
13
-    constructor(conference: any);
14
-    stats: {
15
-        users: {};
16
-        dominantSpeakerId: any;
17
-    };
18
-    conference: any;
19
-    /**
20
-     * Reacts to dominant speaker change events by changing its speaker stats
21
-     * models to reflect the current dominant speaker.
22
-     *
23
-     * @param {string} dominantSpeakerId - The user id of the new dominant speaker.
24
-     * @param {Array[string]} previous - The array with previous speakers.
25
-     * @param {boolean} silence - Indecates whether the dominant speaker is silent or not.
26
-     * @returns {void}
27
-     * @private
28
-     */
29
-    private _onDominantSpeaker;
30
-    /**
31
-     * Reacts to user join events by creating a new SpeakerStats model.
32
-     *
33
-     * @param {string} userId - The user id of the new user.
34
-     * @param {JitsiParticipant} - The JitsiParticipant model for the new user.
35
-     * @returns {void}
36
-     * @private
37
-     */
38
-    private _onUserJoin;
39
-    /**
40
-     * Reacts to user leave events by updating the associated user's
41
-     * SpeakerStats model.
42
-     *
43
-     * @param {string} userId - The user id of the user that left.
44
-     * @returns {void}
45
-     * @private
46
-     */
47
-    private _onUserLeave;
48
-    /**
49
-     * Reacts to user name change events by updating the last known name
50
-     * tracked in the associated SpeakerStats model.
51
-     *
52
-     * @param {string} userId - The user id of the user that left.
53
-     * @returns {void}
54
-     * @private
55
-     */
56
-    private _onDisplayNameChange;
57
-    /**
58
-     * Processes a new face landmark object of a remote user.
59
-     *
60
-     * @param {string} userId - The user id of the user that left.
61
-     * @param {Object} data - The face landmark object.
62
-     * @returns {void}
63
-     * @private
64
-     */
65
-    private _onFaceLandmarkAdd;
66
-    /**
67
-     * Return a copy of the tracked SpeakerStats models.
68
-     *
69
-     * @returns {Object} The keys are the user ids and the values are the
70
-     * associated user's SpeakerStats model.
71
-     */
72
-    getStats(): any;
73
-    /**
74
-     * Updates of the current stats is requested, passing the new values.
75
-     *
76
-     * @param {Object} newStats - The new values used to update current one.
77
-     * @private
78
-     */
79
-    private _updateStats;
80
-}

+ 0
- 7
types/auto/modules/statistics/constants.d.ts View File

@@ -1,7 +0,0 @@
1
-export const CALLSTATS_SCRIPT_URL: "https://api.callstats.io/static/callstats-ws.min.js";
2
-/**
3
- * The number of remote speakers for which the audio levels will be calculated using
4
- * RTCRtpReceiver#getSynchronizationSources. Limit the number of endpoints to save cpu on the client as this API call
5
- * is known to take longer to execute when there are many audio receivers.
6
- */
7
-export const SPEAKERS_AUDIO_LEVELS: 5;

+ 0
- 354
types/auto/modules/statistics/statistics.d.ts View File

@@ -1,354 +0,0 @@
1
-/// <reference types="node" />
2
-/**
3
- * The options to configure Statistics.
4
- * @typedef {Object} StatisticsOptions
5
- * @property {string} applicationName - The application name to pass to
6
- * callstats.
7
- * @property {string} aliasName - The alias name to use when initializing callstats.
8
- * @property {string} userName - The user name to use when initializing callstats.
9
- * @property {string} confID - The callstats conference ID to use.
10
- * @property {string} callStatsID - Callstats credentials - the id.
11
- * @property {string} callStatsSecret - Callstats credentials - the secret.
12
- * @property {string} customScriptUrl - A custom lib url to use when downloading
13
- * callstats library.
14
- * @property {string} roomName - The room name we are currently in.
15
- * @property {string} configParams - The set of parameters
16
- * to enable/disable certain features in the library. See CallStats docs for more info.
17
- */
18
-/**
19
- *
20
- * @param xmpp
21
- * @param {StatisticsOptions} options - The options to use creating the
22
- * Statistics.
23
- */
24
-declare function Statistics(xmpp: any, options: StatisticsOptions): void;
25
-declare class Statistics {
26
-    /**
27
-     * The options to configure Statistics.
28
-     * @typedef {Object} StatisticsOptions
29
-     * @property {string} applicationName - The application name to pass to
30
-     * callstats.
31
-     * @property {string} aliasName - The alias name to use when initializing callstats.
32
-     * @property {string} userName - The user name to use when initializing callstats.
33
-     * @property {string} confID - The callstats conference ID to use.
34
-     * @property {string} callStatsID - Callstats credentials - the id.
35
-     * @property {string} callStatsSecret - Callstats credentials - the secret.
36
-     * @property {string} customScriptUrl - A custom lib url to use when downloading
37
-     * callstats library.
38
-     * @property {string} roomName - The room name we are currently in.
39
-     * @property {string} configParams - The set of parameters
40
-     * to enable/disable certain features in the library. See CallStats docs for more info.
41
-     */
42
-    /**
43
-     *
44
-     * @param xmpp
45
-     * @param {StatisticsOptions} options - The options to use creating the
46
-     * Statistics.
47
-     */
48
-    constructor(xmpp: any, options: StatisticsOptions);
49
-    /**
50
-     * {@link RTPStats} mapped by {@link TraceablePeerConnection.id} which
51
-     * collect RTP statistics for each peerconnection.
52
-     * @type {Map<string, RTPStats}
53
-     */
54
-    rtpStatsMap: Map<string, RTPStats>;
55
-    eventEmitter: EventEmitter;
56
-    xmpp: any;
57
-    options: {};
58
-    callStatsIntegrationEnabled: boolean;
59
-    callStatsApplicationLogsDisabled: any;
60
-    /**
61
-     * Stores {@link CallStats} instances for each
62
-     * {@link TraceablePeerConnection} (one {@link CallStats} instance serves
63
-     * one TPC). The instances are mapped by {@link TraceablePeerConnection.id}.
64
-     * @type {Map<number, CallStats>}
65
-     */
66
-    callsStatsInstances: Map<number, CallStats>;
67
-    /**
68
-     * Starts collecting RTP stats for given peerconnection.
69
-     * @param {TraceablePeerConnection} peerconnection
70
-     */
71
-    startRemoteStats(peerconnection: any): void;
72
-    addAudioLevelListener(listener: any): void;
73
-    removeAudioLevelListener(listener: any): void;
74
-    addBeforeDisposedListener(listener: any): void;
75
-    removeBeforeDisposedListener(listener: any): void;
76
-    addConnectionStatsListener(listener: any): void;
77
-    removeConnectionStatsListener(listener: any): void;
78
-    addByteSentStatsListener(listener: any): void;
79
-    removeByteSentStatsListener(listener: any): void;
80
-    /**
81
-     * Add a listener that would be notified on a LONG_TASKS_STATS event.
82
-     *
83
-     * @param {Function} listener a function that would be called when notified.
84
-     * @returns {void}
85
-     */
86
-    addLongTasksStatsListener(listener: Function): void;
87
-    /**
88
-     * Creates an instance of {@link PerformanceObserverStats} and starts the
89
-     * observer that records the stats periodically.
90
-     *
91
-     * @returns {void}
92
-     */
93
-    attachLongTasksStats(conference: any): void;
94
-    performanceObserverStats: PerformanceObserverStats;
95
-    /**
96
-     * Obtains the current value of the LongTasks event statistics.
97
-     *
98
-     * @returns {Object|null} stats object if the observer has been
99
-     * created, null otherwise.
100
-     */
101
-    getLongTasksStats(): any | null;
102
-    /**
103
-     * Removes the given listener for the LONG_TASKS_STATS event.
104
-     *
105
-     * @param {Function} listener the listener we want to remove.
106
-     * @returns {void}
107
-     */
108
-    removeLongTasksStatsListener(listener: Function): void;
109
-    /**
110
-     * Updates the list of speakers for which the audio levels are to be calculated. This is needed for the jvb pc only.
111
-     *
112
-     * @param {Array<string>} speakerList The list of remote endpoint ids.
113
-     * @returns {void}
114
-     */
115
-    setSpeakerList(speakerList: Array<string>): void;
116
-    dispose(): void;
117
-    private _stopRemoteStats;
118
-    /**
119
-     * Stops collecting RTP stats for given peerconnection
120
-     * @param {TraceablePeerConnection} tpc
121
-     */
122
-    stopRemoteStats(tpc: any): void;
123
-    /**
124
-     * Initializes the callstats.io API.
125
-     * @param {TraceablePeerConnection} tpc the {@link TraceablePeerConnection}
126
-     * instance for which CalStats will be started.
127
-     * @param {string} remoteUserID
128
-     */
129
-    startCallStats(tpc: any, remoteUserID: string): void;
130
-    /**
131
-     * Removes the callstats.io instances.
132
-     */
133
-    stopCallStats(tpc: any): void;
134
-    /**
135
-     * Returns true if the callstats integration is enabled, otherwise returns
136
-     * false.
137
-     *
138
-     * @returns true if the callstats integration is enabled, otherwise returns
139
-     * false.
140
-     */
141
-    isCallstatsEnabled(): boolean;
142
-    /**
143
-     * Logs either resume or hold event for the given peer connection.
144
-     * @param {TraceablePeerConnection} tpc the connection for which event will be
145
-     * reported
146
-     * @param {boolean} isResume true for resume or false for hold
147
-     */
148
-    sendConnectionResumeOrHoldEvent(tpc: any, isResume: boolean): void;
149
-    /**
150
-     * Notifies CallStats and analytics (if present) for ice connection failed
151
-     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
152
-     */
153
-    sendIceConnectionFailedEvent(tpc: any): void;
154
-    /**
155
-     * Notifies CallStats for mute events
156
-     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
157
-     * @param {boolean} muted true for muted and false for not muted
158
-     * @param {String} type "audio"/"video"
159
-     */
160
-    sendMuteEvent(tpc: any, muted: boolean, type: string): void;
161
-    /**
162
-     * Notifies CallStats for screen sharing events
163
-     * @param start {boolean} true for starting screen sharing and
164
-     * false for not stopping
165
-     * @param {string|null} ssrc - optional ssrc value, used only when
166
-     * starting screen sharing.
167
-     */
168
-    sendScreenSharingEvent(start: boolean, ssrc: string | null): void;
169
-    /**
170
-     * Notifies the statistics module that we are now the dominant speaker of the
171
-     * conference.
172
-     * @param {String} roomJid - The room jid where the speaker event occurred.
173
-     * @param {boolean} silence - Whether the dominant speaker is silent or not.
174
-     */
175
-    sendDominantSpeakerEvent(roomJid: string, silence: boolean): void;
176
-    /**
177
-     * Lets the underlying statistics module know where is given SSRC rendered by
178
-     * providing renderer tag ID.
179
-     * @param {TraceablePeerConnection} tpc the connection to which the stream
180
-     * belongs to
181
-     * @param {number} ssrc the SSRC of the stream
182
-     * @param {boolean} isLocal
183
-     * @param {string} userId
184
-     * @param {string} usageLabel  meaningful usage label of this stream like
185
-     *        'microphone', 'camera' or 'screen'.
186
-     * @param {string} containerId the id of media 'audio' or 'video' tag which
187
-     *        renders the stream.
188
-     */
189
-    associateStreamWithVideoTag(tpc: any, ssrc: number, isLocal: boolean, userId: string, usageLabel: string, containerId: string): void;
190
-    /**
191
-     * Notifies CallStats that peer connection failed to create offer.
192
-     *
193
-     * @param {Error} e error to send
194
-     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
195
-     */
196
-    sendCreateOfferFailed(e: Error, tpc: any): void;
197
-    /**
198
-     * Notifies CallStats that peer connection failed to create answer.
199
-     *
200
-     * @param {Error} e error to send
201
-     * @param {TraceablePeerConnection} tpc connection on which failure occured.
202
-     */
203
-    sendCreateAnswerFailed(e: Error, tpc: any): void;
204
-    /**
205
-     * Notifies CallStats that peer connection failed to set local description.
206
-     *
207
-     * @param {Error} e error to send
208
-     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
209
-     */
210
-    sendSetLocalDescFailed(e: Error, tpc: any): void;
211
-    /**
212
-     * Notifies CallStats that peer connection failed to set remote description.
213
-     *
214
-     * @param {Error} e error to send
215
-     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
216
-     */
217
-    sendSetRemoteDescFailed(e: Error, tpc: any): void;
218
-    /**
219
-     * Notifies CallStats that peer connection failed to add ICE candidate.
220
-     *
221
-     * @param {Error} e error to send
222
-     * @param {TraceablePeerConnection} tpc connection on which failure occurred.
223
-     */
224
-    sendAddIceCandidateFailed(e: Error, tpc: any): void;
225
-    /**
226
-     * Sends the given feedback through CallStats.
227
-     *
228
-     * @param overall an integer between 1 and 5 indicating the user's rating.
229
-     * @param comment the comment from the user.
230
-     * @returns {Promise} Resolves when callstats feedback has been submitted
231
-     * successfully.
232
-     */
233
-    sendFeedback(overall: any, comment: any): Promise<any>;
234
-}
235
-declare namespace Statistics {
236
-    /**
237
-     * Init statistic options
238
-     * @param options
239
-     */
240
-    export function init(options: any): void;
241
-    export const audioLevelsEnabled: boolean;
242
-    export const audioLevelsInterval: number;
243
-    export const pcStatsInterval: number;
244
-    export const disableThirdPartyRequests: boolean;
245
-    export { analytics };
246
-    export const instances: any;
247
-    export const localStats: any[];
248
-    export function startLocalStats(track: any, callback: any): void;
249
-    export function stopLocalStats(track: any): void;
250
-    /**
251
-     * Obtains the list of *all* {@link CallStats} instances collected from every
252
-     * valid {@link Statistics} instance.
253
-     * @return {Set<CallStats>}
254
-     * @private
255
-     */
256
-    export function _getAllCallStatsInstances(): Set<CallStats>;
257
-    /**
258
-     * Notifies about active device.
259
-     * @param {{deviceList: {String:String}}} devicesData - list of devices with
260
-     *      their data
261
-     */
262
-    export function sendActiveDeviceListEvent(devicesData: {
263
-        deviceList: {
264
-            String: string;
265
-        };
266
-    }): void;
267
-    /**
268
-     * Notifies CallStats that getUserMedia failed.
269
-     *
270
-     * @param {Error} e error to send
271
-     */
272
-    export function sendGetUserMediaFailed(e: Error): void;
273
-    /**
274
-     * Adds to CallStats an application log.
275
-     *
276
-     * @param {String} m a log message to send or an {Error} object to be reported
277
-     */
278
-    export function sendLog(m: string): void;
279
-    export const LOCAL_JID: string;
280
-    /**
281
-     * Reports global error to CallStats.
282
-     *
283
-     * @param {Error} error
284
-     */
285
-    export function reportGlobalError(error: Error): void;
286
-    /**
287
-     * Sends event to analytics and logs a message to the logger/console. Console
288
-     * messages might also be logged to callstats automatically.
289
-     *
290
-     * @param {string | Object} event the event name, or an object which
291
-     * represents the entire event.
292
-     * @param {Object} properties properties to attach to the event (if an event
293
-     * name as opposed to an event object is provided).
294
-     */
295
-    export function sendAnalyticsAndLog(event: any, properties?: any): void;
296
-    /**
297
-     * Sends event to analytics.
298
-     *
299
-     * @param {string | Object} eventName the event name, or an object which
300
-     * represents the entire event.
301
-     * @param {Object} properties properties to attach to the event
302
-     */
303
-    export function sendAnalytics(eventName: any, properties?: any): void;
304
-}
305
-export default Statistics;
306
-/**
307
- * The options to configure Statistics.
308
- */
309
-export type StatisticsOptions = {
310
-    /**
311
-     * - The application name to pass to
312
-     * callstats.
313
-     */
314
-    applicationName: string;
315
-    /**
316
-     * - The alias name to use when initializing callstats.
317
-     */
318
-    aliasName: string;
319
-    /**
320
-     * - The user name to use when initializing callstats.
321
-     */
322
-    userName: string;
323
-    /**
324
-     * - The callstats conference ID to use.
325
-     */
326
-    confID: string;
327
-    /**
328
-     * - Callstats credentials - the id.
329
-     */
330
-    callStatsID: string;
331
-    /**
332
-     * - Callstats credentials - the secret.
333
-     */
334
-    callStatsSecret: string;
335
-    /**
336
-     * - A custom lib url to use when downloading
337
-     * callstats library.
338
-     */
339
-    customScriptUrl: string;
340
-    /**
341
-     * - The room name we are currently in.
342
-     */
343
-    roomName: string;
344
-    /**
345
-     * - The set of parameters
346
-     * to enable/disable certain features in the library. See CallStats docs for more info.
347
-     */
348
-    configParams: string;
349
-};
350
-import RTPStats from "./RTPStatsCollector";
351
-import EventEmitter from "events";
352
-import CallStats from "./CallStats";
353
-import { PerformanceObserverStats } from "./PerformanceObserverStats";
354
-import analytics from "./AnalyticsAdapter";

+ 0
- 82
types/auto/modules/transcription/audioRecorder.d.ts View File

@@ -1,82 +0,0 @@
1
-export default AudioRecorder;
2
-/**
3
- * main exported object of the file, holding all
4
- * relevant functions and variables for the outside world
5
- * @param jitsiConference the jitsiConference which this object
6
- * is going to record
7
- */
8
-declare function AudioRecorder(jitsiConference: any): void;
9
-declare class AudioRecorder {
10
-    /**
11
-     * main exported object of the file, holding all
12
-     * relevant functions and variables for the outside world
13
-     * @param jitsiConference the jitsiConference which this object
14
-     * is going to record
15
-     */
16
-    constructor(jitsiConference: any);
17
-    recorders: any[];
18
-    fileType: string;
19
-    isRecording: boolean;
20
-    jitsiConference: any;
21
-    /**
22
-     * Adds a new TrackRecorder object to the array.
23
-     *
24
-     * @param track the track potentially holding an audio stream
25
-     */
26
-    addTrack(track: any): void;
27
-    /**
28
-     * Creates a TrackRecorder object. Also creates the MediaRecorder and
29
-     * data array for the trackRecorder.
30
-     * @param track the JitsiTrack holding the audio MediaStream(s)
31
-     */
32
-    instantiateTrackRecorder(track: any): TrackRecorder;
33
-    /**
34
-     * Notifies the module that a specific track has stopped, e.g participant left
35
-     * the conference.
36
-     * if the recording has not started yet, the TrackRecorder will be removed from
37
-     * the array. If the recording has started, the recorder will stop recording
38
-     * but not removed from the array so that the recorded stream can still be
39
-     * accessed
40
-     *
41
-     * @param {JitsiTrack} track the JitsiTrack to remove from the recording session
42
-     */
43
-    removeTrack(track: any): void;
44
-    /**
45
-     * Tries to update the name value of all TrackRecorder in the array.
46
-     * If it hasn't changed,it will keep the exiting name. If it changes to a
47
-     * undefined value, the old value will also be kept.
48
-     */
49
-    updateNames(): void;
50
-    /**
51
-     * Starts the audio recording of every local and remote track
52
-     */
53
-    start(): void;
54
-    /**
55
-     * Stops the audio recording of every local and remote track
56
-     */
57
-    stop(): void;
58
-    /**
59
-     * link hacking to download all recorded audio streams
60
-     */
61
-    download(): void;
62
-    /**
63
-     * returns the audio files of all recorders as an array of objects,
64
-     * which include the name of the owner of the track and the starting time stamp
65
-     * @returns {Array} an array of RecordingResult objects
66
-     */
67
-    getRecordingResults(): any[];
68
-    /**
69
-     * Gets the mime type of the recorder audio
70
-     * @returns {String} the mime type of the recorder audio
71
-     */
72
-    getFileType(): string;
73
-}
74
-declare namespace AudioRecorder {
75
-    export { determineCorrectFileType };
76
-}
77
-import TrackRecorder from "./trackRecorder";
78
-/**
79
- * Determines which kind of audio recording the browser supports
80
- * chrome supports "audio/webm" and firefox supports "audio/ogg"
81
- */
82
-declare function determineCorrectFileType(): "audio/webm" | "audio/ogg";

+ 0
- 18
types/auto/modules/transcription/recordingResult.d.ts View File

@@ -1,18 +0,0 @@
1
-/**
2
- * This object stores variables needed around the recording of an audio stream
3
- * and passing this recording along with additional information along to
4
- * different processes
5
- */
6
-export default class RecordingResult {
7
-    /**
8
-     * @param blob the recording audio stream as a single blob
9
-     * @param name the name of the person of the audio stream
10
-     * @param startTime the time in UTC when recording of the audiostream started
11
-     * @param wordArray the recorder audio stream transcribed as an array of Word objects
12
-     */
13
-    constructor(blob: any, name: any, startTime: any, wordArray: any);
14
-    blob: any;
15
-    name: any;
16
-    startTime: any;
17
-    wordArray: any;
18
-}

+ 0
- 16
types/auto/modules/transcription/trackRecorder.d.ts View File

@@ -1,16 +0,0 @@
1
-/**
2
- * A TrackRecorder object holds all the information needed for recording a
3
- * single JitsiTrack (either remote or local)
4
- * @param track The JitsiTrack the object is going to hold
5
- */
6
-export default class TrackRecorder {
7
-    /**
8
-     * @param track The JitsiTrack the object is going to hold
9
-     */
10
-    constructor(track: any);
11
-    track: any;
12
-    recorder: any;
13
-    data: any;
14
-    name: any;
15
-    startTime: any;
16
-}

+ 0
- 79
types/auto/modules/transcription/transcriber.d.ts View File

@@ -1,79 +0,0 @@
1
-export default Transcriber;
2
-/**
3
- * This is the main object for handing the Transcription. It interacts with
4
- * the audioRecorder to record every person in a conference and sends the
5
- * recorder audio to a transcriptionService. The returned speech-to-text result
6
- * will be merged to create a transcript
7
- * @param {AudioRecorder} audioRecorder An audioRecorder recording a conference
8
- */
9
-declare function Transcriber(): void;
10
-declare class Transcriber {
11
-    audioRecorder: AudioRecorder;
12
-    transcriptionService: SphinxService;
13
-    counter: any;
14
-    startTime: Date;
15
-    transcription: string;
16
-    callback: any;
17
-    results: any[];
18
-    state: string;
19
-    lineLength: number;
20
-    /**
21
-     * Method to start the transcription process. It will tell the audioRecorder
22
-     * to start storing all audio streams and record the start time for merging
23
-     * purposes
24
-     */
25
-    start(): void;
26
-    /**
27
-     * Method to stop the transcription process. It will tell the audioRecorder to
28
-     * stop, and get all the recorded audio to send it to the transcription service
29
-    
30
-     * @param callback a callback which will receive the transcription
31
-     */
32
-    stop(callback: any): void;
33
-    /**
34
-     * this method will check if the counter is zero. If it is, it will call
35
-     * the merging method
36
-     */
37
-    maybeMerge(): void;
38
-    /**
39
-     * This method will merge all speech-to-text arrays together in one
40
-     * readable transcription string
41
-     */
42
-    merge(): void;
43
-    /**
44
-     * Appends a word object to the transcription. It will make a new line with a
45
-     * name if a name is specified
46
-     * @param {Word} word the Word object holding the word to append
47
-     * @param {String|null} name the name of a new speaker. Null if not applicable
48
-     */
49
-    updateTranscription(word: any, name: string | null): void;
50
-    /**
51
-     * Gives the transcriber a JitsiTrack holding an audioStream to transcribe.
52
-     * The JitsiTrack is given to the audioRecorder. If it doesn't hold an
53
-     * audiostream, it will not be added by the audioRecorder
54
-     * @param {JitsiTrack} track the track to give to the audioRecorder
55
-     */
56
-    addTrack(track: any): void;
57
-    /**
58
-     * Remove the given track from the auioRecorder
59
-     * @param track
60
-     */
61
-    removeTrack(track: any): void;
62
-    /**
63
-     * Will return the created transcription if it's avialable or throw an error
64
-     * when it's not done yet
65
-     * @returns {String} the transcription as a String
66
-     */
67
-    getTranscription(): string;
68
-    /**
69
-     * Returns the current state of the transcription process
70
-     */
71
-    getState(): string;
72
-    /**
73
-     * Resets the state to the "before" state, such that it's again possible to
74
-     * call the start method
75
-     */
76
-    reset(): void;
77
-}
78
-import AudioRecorder from "./audioRecorder";
79
-import SphinxService from "./transcriptionServices/SphinxTranscriptionService";

+ 0
- 3
types/auto/modules/transcription/transcriberHolder.d.ts View File

@@ -1,3 +0,0 @@
1
-export const transcribers: any[];
2
-export function add(transcriber: any): void;
3
-export function add(transcriber: any): void;

+ 0
- 0
types/auto/modules/transcription/transcriptionServices/AbstractTranscriptionService.d.ts View File


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save