You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

Worker.js 16KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381
  1. /* global TransformStream */
  2. // Worker for E2EE/Insertable streams.
  3. //
  4. /**
  5. * Polyfill RTCEncoded(Audio|Video)Frame.getMetadata() (not available in M83, available M84+).
  6. * The polyfill can not be done on the prototype since its not exposed in workers. Instead,
  7. * it is done as another transformation to keep it separate.
  8. */
  9. function polyFillEncodedFrameMetadata(encodedFrame, controller) {
  10. if (!encodedFrame.getMetadata) {
  11. encodedFrame.getMetadata = function() {
  12. return {
  13. // TODO: provide a more complete polyfill based on additionalData for video.
  14. synchronizationSource: this.synchronizationSource,
  15. contributingSources: this.contributingSources
  16. };
  17. };
  18. }
  19. controller.enqueue(encodedFrame);
  20. }
  21. // We use a ringbuffer of keys so we can change them and still decode packets that were
  22. // encrypted with an old key.
  23. const keyRingSize = 3;
  24. // We use a 96 bit IV for AES GCM. This is signalled in plain together with the
  25. // packet. See https://developer.mozilla.org/en-US/docs/Web/API/AesGcmParams
  26. const ivLength = 12;
  27. // We use a 128 bit key for AES GCM.
  28. const keyGenParameters = {
  29. name: 'AES-GCM',
  30. length: 128
  31. };
  32. // We copy the first bytes of the VP8 payload unencrypted.
  33. // For keyframes this is 10 bytes, for non-keyframes (delta) 3. See
  34. // https://tools.ietf.org/html/rfc6386#section-9.1
  35. // This allows the bridge to continue detecting keyframes (only one byte needed in the JVB)
  36. // and is also a bit easier for the VP8 decoder (i.e. it generates funny garbage pictures
  37. // instead of being unable to decode).
  38. // This is a bit for show and we might want to reduce to 1 unconditionally in the final version.
  39. //
  40. // For audio (where frame.type is not set) we do not encrypt the opus TOC byte:
  41. // https://tools.ietf.org/html/rfc6716#section-3.1
  42. const unencryptedBytes = {
  43. key: 10,
  44. delta: 3,
  45. undefined: 1 // frame.type is not set on audio
  46. };
  47. // Salt used in key derivation
  48. // FIXME: We currently use the MUC room name for this which has the same lifetime
  49. // as this worker. While not (pseudo)random as recommended in
  50. // https://developer.mozilla.org/en-US/docs/Web/API/Pbkdf2Params
  51. // this is easily available and the same for all participants.
  52. // We currently do not enforce a minimum length of 16 bytes either.
  53. let _keySalt;
  54. /**
  55. * Derives a AES-GCM key from the input using PBKDF2
  56. * The key length can be configured above and should be either 128 or 256 bits.
  57. * @param {Uint8Array} keyBytes - Value to derive key from
  58. * @param {Uint8Array} salt - Salt used in key derivation
  59. */
  60. async function deriveKey(keyBytes, salt) {
  61. // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/importKey
  62. const material = await crypto.subtle.importKey('raw', keyBytes,
  63. 'PBKDF2', false, [ 'deriveBits', 'deriveKey' ]);
  64. // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/deriveKey#PBKDF2
  65. return crypto.subtle.deriveKey({
  66. name: 'PBKDF2',
  67. salt,
  68. iterations: 100000,
  69. hash: 'SHA-256'
  70. }, material, keyGenParameters, false, [ 'encrypt', 'decrypt' ]);
  71. }
  72. /**
  73. * Per-participant context holding the cryptographic keys and
  74. * encode/decode functions
  75. */
  76. class Context {
  77. /**
  78. * @param {string} id - local muc resourcepart
  79. */
  80. constructor(id) {
  81. // An array (ring) of keys that we use for sending and receiving.
  82. this._cryptoKeyRing = new Array(keyRingSize);
  83. // A pointer to the currently used key.
  84. this._currentKeyIndex = -1;
  85. // We keep track of how many frames we have sent per ssrc.
  86. // Starts with a random offset similar to the RTP sequence number.
  87. this._sendCounts = new Map();
  88. this._id = id;
  89. }
  90. /**
  91. * Derives a per-participant key.
  92. * @param {Uint8Array} keyBytes - Value to derive key from
  93. * @param {Uint8Array} salt - Salt used in key derivation
  94. */
  95. async deriveKey(keyBytes, salt) {
  96. const encoder = new TextEncoder();
  97. const idBytes = encoder.encode(this._id);
  98. // Separate both parts by a null byte to avoid ambiguity attacks.
  99. const participantSalt = new Uint8Array(salt.byteLength + idBytes.byteLength + 1);
  100. participantSalt.set(salt);
  101. participantSalt.set(idBytes, salt.byteLength + 1);
  102. return deriveKey(keyBytes, participantSalt);
  103. }
  104. /**
  105. * Sets a key and starts using it for encrypting.
  106. * @param {CryptoKey} key
  107. * @param {Number} keyIndex
  108. */
  109. setKey(key, keyIndex) {
  110. this._currentKeyIndex = keyIndex % this._cryptoKeyRing.length;
  111. this._cryptoKeyRing[this._currentKeyIndex] = key;
  112. }
  113. /**
  114. * Construct the IV used for AES-GCM and sent (in plain) with the packet similar to
  115. * https://tools.ietf.org/html/rfc7714#section-8.1
  116. * It concatenates
  117. * - the 32 bit synchronization source (SSRC) given on the encoded frame,
  118. * - the 32 bit rtp timestamp given on the encoded frame,
  119. * - a send counter that is specific to the SSRC. Starts at a random number.
  120. * The send counter is essentially the pictureId but we currently have to implement this ourselves.
  121. * There is no XOR with a salt. Note that this IV leaks the SSRC to the receiver but since this is
  122. * randomly generated and SFUs may not rewrite this is considered acceptable.
  123. * The SSRC is used to allow demultiplexing multiple streams with the same key, as described in
  124. * https://tools.ietf.org/html/rfc3711#section-4.1.1
  125. * The RTP timestamp is 32 bits and advances by the codec clock rate (90khz for video, 48khz for
  126. * opus audio) every second. For video it rolls over roughly every 13 hours.
  127. * The send counter will advance at the frame rate (30fps for video, 50fps for 20ms opus audio)
  128. * every second. It will take a long time to roll over.
  129. *
  130. * See also https://developer.mozilla.org/en-US/docs/Web/API/AesGcmParams
  131. */
  132. makeIV(synchronizationSource, timestamp) {
  133. const iv = new ArrayBuffer(ivLength);
  134. const ivView = new DataView(iv);
  135. // having to keep our own send count (similar to a picture id) is not ideal.
  136. if (!this._sendCounts.has(synchronizationSource)) {
  137. // Initialize with a random offset, similar to the RTP sequence number.
  138. this._sendCounts.set(synchronizationSource, Math.floor(Math.random() * 0xFFFF));
  139. }
  140. const sendCount = this._sendCounts.get(synchronizationSource);
  141. ivView.setUint32(0, synchronizationSource);
  142. ivView.setUint32(4, timestamp);
  143. ivView.setUint32(8, sendCount % 0xFFFF);
  144. this._sendCounts.set(synchronizationSource, sendCount + 1);
  145. return iv;
  146. }
  147. /**
  148. * Function that will be injected in a stream and will encrypt the given encoded frames.
  149. *
  150. * @param {RTCEncodedVideoFrame|RTCEncodedAudioFrame} encodedFrame - Encoded video frame.
  151. * @param {TransformStreamDefaultController} controller - TransportStreamController.
  152. *
  153. * The packet format is described below. One of the design goals was to not require
  154. * changes to the SFU which for video requires not encrypting the keyframe bit of VP8
  155. * as SFUs need to detect a keyframe (framemarking or the generic frame descriptor will
  156. * solve this eventually). This also "hides" that a client is using E2EE a bit.
  157. *
  158. * Note that this operates on the full frame, i.e. for VP8 the data described in
  159. * https://tools.ietf.org/html/rfc6386#section-9.1
  160. *
  161. * The VP8 payload descriptor described in
  162. * https://tools.ietf.org/html/rfc7741#section-4.2
  163. * is part of the RTP packet and not part of the frame and is not controllable by us.
  164. * This is fine as the SFU keeps having access to it for routing.
  165. *
  166. * The encrypted frame is formed as follows:
  167. * 1) Leave the first (10, 3, 1) bytes unencrypted, depending on the frame type and kind.
  168. * 2) Form the GCM IV for the frame as described above.
  169. * 3) Encrypt the rest of the frame using AES-GCM.
  170. * 4) Allocate space for the encrypted frame.
  171. * 5) Copy the unencrypted bytes to the start of the encrypted frame.
  172. * 6) Append the ciphertext to the encrypted frame.
  173. * 7) Append the IV.
  174. * 8) Append a single byte for the key identifier. TODO: we don't need all the bits.
  175. * 9) Enqueue the encrypted frame for sending.
  176. */
  177. encodeFunction(encodedFrame, controller) {
  178. const keyIndex = this._currentKeyIndex;
  179. if (this._cryptoKeyRing[keyIndex]) {
  180. const iv = this.makeIV(encodedFrame.getMetadata().synchronizationSource, encodedFrame.timestamp);
  181. return crypto.subtle.encrypt({
  182. name: 'AES-GCM',
  183. iv,
  184. additionalData: new Uint8Array(encodedFrame.data, 0, unencryptedBytes[encodedFrame.type])
  185. }, this._cryptoKeyRing[keyIndex], new Uint8Array(encodedFrame.data,
  186. unencryptedBytes[encodedFrame.type]))
  187. .then(cipherText => {
  188. const newData = new ArrayBuffer(unencryptedBytes[encodedFrame.type] + cipherText.byteLength
  189. + iv.byteLength + 1);
  190. const newUint8 = new Uint8Array(newData);
  191. newUint8.set(
  192. new Uint8Array(encodedFrame.data, 0, unencryptedBytes[encodedFrame.type])); // copy first bytes.
  193. newUint8.set(
  194. new Uint8Array(cipherText), unencryptedBytes[encodedFrame.type]); // add ciphertext.
  195. newUint8.set(
  196. new Uint8Array(iv), unencryptedBytes[encodedFrame.type] + cipherText.byteLength); // append IV.
  197. newUint8[unencryptedBytes[encodedFrame.type] + cipherText.byteLength + ivLength]
  198. = keyIndex; // set key index.
  199. encodedFrame.data = newData;
  200. return controller.enqueue(encodedFrame);
  201. }, e => {
  202. console.error(e);
  203. // We are not enqueuing the frame here on purpose.
  204. });
  205. }
  206. /* NOTE WELL:
  207. * This will send unencrypted data (only protected by DTLS transport encryption) when no key is configured.
  208. * This is ok for demo purposes but should not be done once this becomes more relied upon.
  209. */
  210. controller.enqueue(encodedFrame);
  211. }
  212. /**
  213. * Function that will be injected in a stream and will decrypt the given encoded frames.
  214. *
  215. * @param {RTCEncodedVideoFrame|RTCEncodedAudioFrame} encodedFrame - Encoded video frame.
  216. * @param {TransformStreamDefaultController} controller - TransportStreamController.
  217. *
  218. * The decrypted frame is formed as follows:
  219. * 1) Extract the key index from the last byte of the encrypted frame.
  220. * If there is no key associated with the key index, the frame is enqueued for decoding
  221. * and these steps terminate.
  222. * 2) Determine the frame type in order to look up the number of unencrypted header bytes.
  223. * 2) Extract the 12-byte IV from its position near the end of the packet.
  224. * Note: the IV is treated as opaque and not reconstructed from the input.
  225. * 3) Decrypt the encrypted frame content after the unencrypted bytes using AES-GCM.
  226. * 4) Allocate space for the decrypted frame.
  227. * 5) Copy the unencrypted bytes from the start of the encrypted frame.
  228. * 6) Append the plaintext to the decrypted frame.
  229. * 7) Enqueue the decrypted frame for decoding.
  230. */
  231. decodeFunction(encodedFrame, controller) {
  232. const data = new Uint8Array(encodedFrame.data);
  233. const keyIndex = data[encodedFrame.data.byteLength - 1];
  234. if (this._cryptoKeyRing[keyIndex]) {
  235. const iv = new Uint8Array(encodedFrame.data, encodedFrame.data.byteLength - ivLength - 1, ivLength);
  236. const cipherTextStart = unencryptedBytes[encodedFrame.type];
  237. const cipherTextLength = encodedFrame.data.byteLength - (unencryptedBytes[encodedFrame.type]
  238. + ivLength + 1);
  239. return crypto.subtle.decrypt({
  240. name: 'AES-GCM',
  241. iv,
  242. additionalData: new Uint8Array(encodedFrame.data, 0, unencryptedBytes[encodedFrame.type])
  243. }, this._cryptoKeyRing[keyIndex], new Uint8Array(encodedFrame.data, cipherTextStart, cipherTextLength))
  244. .then(plainText => {
  245. const newData = new ArrayBuffer(unencryptedBytes[encodedFrame.type] + plainText.byteLength);
  246. const newUint8 = new Uint8Array(newData);
  247. newUint8.set(new Uint8Array(encodedFrame.data, 0, unencryptedBytes[encodedFrame.type]));
  248. newUint8.set(new Uint8Array(plainText), unencryptedBytes[encodedFrame.type]);
  249. encodedFrame.data = newData;
  250. return controller.enqueue(encodedFrame);
  251. }, e => {
  252. console.error(e);
  253. // TODO: notify the application about error status.
  254. // TODO: For video we need a better strategy since we do not want to based any
  255. // non-error frames on a garbage keyframe.
  256. if (encodedFrame.type === undefined) { // audio, replace with silence.
  257. // audio, replace with silence.
  258. const newData = new ArrayBuffer(3);
  259. const newUint8 = new Uint8Array(newData);
  260. newUint8.set([ 0xd8, 0xff, 0xfe ]); // opus silence frame.
  261. encodedFrame.data = newData;
  262. controller.enqueue(encodedFrame);
  263. }
  264. });
  265. } else if (keyIndex >= this._cryptoKeyRing.length && this._cryptoKeyRing[this._currentKeyIndex]) {
  266. // If we are encrypting but don't have a key for the remote drop the frame.
  267. // This is a heuristic since we don't know whether a packet is encrypted,
  268. // do not have a checksum and do not have signaling for whether a remote participant does
  269. // encrypt or not.
  270. return;
  271. }
  272. // TODO: this just passes through to the decoder. Is that ok? If we don't know the key yet
  273. // we might want to buffer a bit but it is still unclear how to do that (and for how long etc).
  274. controller.enqueue(encodedFrame);
  275. }
  276. }
  277. const contexts = new Map(); // Map participant id => context
  278. onmessage = async event => {
  279. const { operation } = event.data;
  280. if (operation === 'initialize') {
  281. _keySalt = event.data.salt;
  282. } else if (operation === 'encode') {
  283. const { readableStream, writableStream, participantId } = event.data;
  284. if (!contexts.has(participantId)) {
  285. contexts.set(participantId, new Context(participantId));
  286. }
  287. const context = contexts.get(participantId);
  288. const transformStream = new TransformStream({
  289. transform: context.encodeFunction.bind(context)
  290. });
  291. readableStream
  292. .pipeThrough(new TransformStream({
  293. transform: polyFillEncodedFrameMetadata // M83 polyfill.
  294. }))
  295. .pipeThrough(transformStream)
  296. .pipeTo(writableStream);
  297. } else if (operation === 'decode') {
  298. const { readableStream, writableStream, participantId } = event.data;
  299. if (!contexts.has(participantId)) {
  300. contexts.set(participantId, new Context(participantId));
  301. }
  302. const context = contexts.get(participantId);
  303. const transformStream = new TransformStream({
  304. transform: context.decodeFunction.bind(context)
  305. });
  306. readableStream
  307. .pipeThrough(new TransformStream({
  308. transform: polyFillEncodedFrameMetadata // M83 polyfill.
  309. }))
  310. .pipeThrough(transformStream)
  311. .pipeTo(writableStream);
  312. } else if (operation === 'setKey') {
  313. const { participantId, key, keyIndex } = event.data;
  314. if (!contexts.has(participantId)) {
  315. contexts.set(participantId, new Context(participantId));
  316. }
  317. const context = contexts.get(participantId);
  318. if (key) {
  319. context.setKey(await context.deriveKey(key, _keySalt), keyIndex);
  320. } else {
  321. context.setKey(false, keyIndex);
  322. }
  323. } else if (operation === 'cleanup') {
  324. const { participantId } = event.data;
  325. contexts.delete(participantId);
  326. } else {
  327. console.error('e2ee worker', operation);
  328. }
  329. };