Du kannst nicht mehr als 25 Themen auswählen Themen müssen mit entweder einem Buchstaben oder einer Ziffer beginnen. Sie können Bindestriche („-“) enthalten und bis zu 35 Zeichen lang sein.

encode.ts 10KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379
  1. import { deflate, inflate } from "pako";
  2. import { encryptData, decryptData } from "./encryption";
  3. // -----------------------------------------------------------------------------
  4. // byte (binary) strings
  5. // -----------------------------------------------------------------------------
  6. // fast, Buffer-compatible implem
  7. export const toByteString = (
  8. data: string | Uint8Array | ArrayBuffer,
  9. ): Promise<string> => {
  10. return new Promise((resolve, reject) => {
  11. const blob =
  12. typeof data === "string"
  13. ? new Blob([new TextEncoder().encode(data)])
  14. : new Blob([data instanceof Uint8Array ? data : new Uint8Array(data)]);
  15. const reader = new FileReader();
  16. reader.onload = (event) => {
  17. if (!event.target || typeof event.target.result !== "string") {
  18. return reject(new Error("couldn't convert to byte string"));
  19. }
  20. resolve(event.target.result);
  21. };
  22. reader.readAsBinaryString(blob);
  23. });
  24. };
  25. const byteStringToArrayBuffer = (byteString: string) => {
  26. const buffer = new ArrayBuffer(byteString.length);
  27. const bufferView = new Uint8Array(buffer);
  28. for (let i = 0, len = byteString.length; i < len; i++) {
  29. bufferView[i] = byteString.charCodeAt(i);
  30. }
  31. return buffer;
  32. };
  33. const byteStringToString = (byteString: string) => {
  34. return new TextDecoder("utf-8").decode(byteStringToArrayBuffer(byteString));
  35. };
  36. // -----------------------------------------------------------------------------
  37. // base64
  38. // -----------------------------------------------------------------------------
  39. /**
  40. * @param isByteString set to true if already byte string to prevent bloat
  41. * due to reencoding
  42. */
  43. export const stringToBase64 = async (str: string, isByteString = false) => {
  44. return isByteString ? window.btoa(str) : window.btoa(await toByteString(str));
  45. };
  46. // async to align with stringToBase64
  47. export const base64ToString = async (base64: string, isByteString = false) => {
  48. return isByteString
  49. ? window.atob(base64)
  50. : byteStringToString(window.atob(base64));
  51. };
  52. // -----------------------------------------------------------------------------
  53. // text encoding
  54. // -----------------------------------------------------------------------------
  55. type EncodedData = {
  56. encoded: string;
  57. encoding: "bstring";
  58. /** whether text is compressed (zlib) */
  59. compressed: boolean;
  60. /** version for potential migration purposes */
  61. version?: string;
  62. };
  63. /**
  64. * Encodes (and potentially compresses via zlib) text to byte string
  65. */
  66. export const encode = async ({
  67. text,
  68. compress,
  69. }: {
  70. text: string;
  71. /** defaults to `true`. If compression fails, falls back to bstring alone. */
  72. compress?: boolean;
  73. }): Promise<EncodedData> => {
  74. let deflated!: string;
  75. if (compress !== false) {
  76. try {
  77. deflated = await toByteString(deflate(text));
  78. } catch (error) {
  79. console.error("encode: cannot deflate", error);
  80. }
  81. }
  82. return {
  83. version: "1",
  84. encoding: "bstring",
  85. compressed: !!deflated,
  86. encoded: deflated || (await toByteString(text)),
  87. };
  88. };
  89. export const decode = async (data: EncodedData): Promise<string> => {
  90. let decoded: string;
  91. switch (data.encoding) {
  92. case "bstring":
  93. // if compressed, do not double decode the bstring
  94. decoded = data.compressed
  95. ? data.encoded
  96. : await byteStringToString(data.encoded);
  97. break;
  98. default:
  99. throw new Error(`decode: unknown encoding "${data.encoding}"`);
  100. }
  101. if (data.compressed) {
  102. return inflate(new Uint8Array(byteStringToArrayBuffer(decoded)), {
  103. to: "string",
  104. });
  105. }
  106. return decoded;
  107. };
  108. // -----------------------------------------------------------------------------
  109. // binary encoding
  110. // -----------------------------------------------------------------------------
  111. type FileEncodingInfo = {
  112. /* version 2 is the version we're shipping the initial image support with.
  113. version 1 was a PR version that a lot of people were using anyway.
  114. Thus, if there are issues we can check whether they're not using the
  115. unoffic version */
  116. version: 1 | 2;
  117. compression: "pako@1" | null;
  118. encryption: "AES-GCM" | null;
  119. };
  120. // -----------------------------------------------------------------------------
  121. const CONCAT_BUFFERS_VERSION = 1;
  122. /** how many bytes we use to encode how many bytes the next chunk has.
  123. * Corresponds to DataView setter methods (setUint32, setUint16, etc).
  124. *
  125. * NOTE ! values must not be changed, which would be backwards incompatible !
  126. */
  127. const VERSION_DATAVIEW_BYTES = 4;
  128. const NEXT_CHUNK_SIZE_DATAVIEW_BYTES = 4;
  129. // -----------------------------------------------------------------------------
  130. const DATA_VIEW_BITS_MAP = { 1: 8, 2: 16, 4: 32 } as const;
  131. // getter
  132. function dataView(buffer: Uint8Array, bytes: 1 | 2 | 4, offset: number): number;
  133. // setter
  134. function dataView(
  135. buffer: Uint8Array,
  136. bytes: 1 | 2 | 4,
  137. offset: number,
  138. value: number,
  139. ): Uint8Array;
  140. /**
  141. * abstraction over DataView that serves as a typed getter/setter in case
  142. * you're using constants for the byte size and want to ensure there's no
  143. * discrepenancy in the encoding across refactors.
  144. *
  145. * DataView serves for an endian-agnostic handling of numbers in ArrayBuffers.
  146. */
  147. function dataView(
  148. buffer: Uint8Array,
  149. bytes: 1 | 2 | 4,
  150. offset: number,
  151. value?: number,
  152. ): Uint8Array | number {
  153. if (value != null) {
  154. if (value > Math.pow(2, DATA_VIEW_BITS_MAP[bytes]) - 1) {
  155. throw new Error(
  156. `attempting to set value higher than the allocated bytes (value: ${value}, bytes: ${bytes})`,
  157. );
  158. }
  159. const method = `setUint${DATA_VIEW_BITS_MAP[bytes]}` as const;
  160. new DataView(buffer.buffer)[method](offset, value);
  161. return buffer;
  162. }
  163. const method = `getUint${DATA_VIEW_BITS_MAP[bytes]}` as const;
  164. return new DataView(buffer.buffer)[method](offset);
  165. }
  166. // -----------------------------------------------------------------------------
  167. /**
  168. * Resulting concatenated buffer has this format:
  169. *
  170. * [
  171. * VERSION chunk (4 bytes)
  172. * LENGTH chunk 1 (4 bytes)
  173. * DATA chunk 1 (up to 2^32 bits)
  174. * LENGTH chunk 2 (4 bytes)
  175. * DATA chunk 2 (up to 2^32 bits)
  176. * ...
  177. * ]
  178. *
  179. * @param buffers each buffer (chunk) must be at most 2^32 bits large (~4GB)
  180. */
  181. const concatBuffers = (...buffers: Uint8Array[]) => {
  182. const bufferView = new Uint8Array(
  183. VERSION_DATAVIEW_BYTES +
  184. NEXT_CHUNK_SIZE_DATAVIEW_BYTES * buffers.length +
  185. buffers.reduce((acc, buffer) => acc + buffer.byteLength, 0),
  186. );
  187. let cursor = 0;
  188. // as the first chunk we'll encode the version for backwards compatibility
  189. dataView(bufferView, VERSION_DATAVIEW_BYTES, cursor, CONCAT_BUFFERS_VERSION);
  190. cursor += VERSION_DATAVIEW_BYTES;
  191. for (const buffer of buffers) {
  192. dataView(
  193. bufferView,
  194. NEXT_CHUNK_SIZE_DATAVIEW_BYTES,
  195. cursor,
  196. buffer.byteLength,
  197. );
  198. cursor += NEXT_CHUNK_SIZE_DATAVIEW_BYTES;
  199. bufferView.set(buffer, cursor);
  200. cursor += buffer.byteLength;
  201. }
  202. return bufferView;
  203. };
  204. /** can only be used on buffers created via `concatBuffers()` */
  205. const splitBuffers = (concatenatedBuffer: Uint8Array) => {
  206. const buffers = [];
  207. let cursor = 0;
  208. // first chunk is the version (ignored for now)
  209. cursor += VERSION_DATAVIEW_BYTES;
  210. while (true) {
  211. const chunkSize = dataView(
  212. concatenatedBuffer,
  213. NEXT_CHUNK_SIZE_DATAVIEW_BYTES,
  214. cursor,
  215. );
  216. cursor += NEXT_CHUNK_SIZE_DATAVIEW_BYTES;
  217. buffers.push(concatenatedBuffer.slice(cursor, cursor + chunkSize));
  218. cursor += chunkSize;
  219. if (cursor >= concatenatedBuffer.byteLength) {
  220. break;
  221. }
  222. }
  223. return buffers;
  224. };
  225. // helpers for (de)compressing data with JSON metadata including encryption
  226. // -----------------------------------------------------------------------------
  227. /** @private */
  228. const _encryptAndCompress = async (
  229. data: Uint8Array | string,
  230. encryptionKey: string,
  231. ) => {
  232. const { encryptedBuffer, iv } = await encryptData(
  233. encryptionKey,
  234. deflate(data),
  235. );
  236. return { iv, buffer: new Uint8Array(encryptedBuffer) };
  237. };
  238. /**
  239. * The returned buffer has following format:
  240. * `[]` refers to a buffers wrapper (see `concatBuffers`)
  241. *
  242. * [
  243. * encodingMetadataBuffer,
  244. * iv,
  245. * [
  246. * contentsMetadataBuffer
  247. * contentsBuffer
  248. * ]
  249. * ]
  250. */
  251. export const compressData = async <T extends Record<string, any> = never>(
  252. dataBuffer: Uint8Array,
  253. options: {
  254. encryptionKey: string;
  255. } & ([T] extends [never]
  256. ? {
  257. metadata?: T;
  258. }
  259. : {
  260. metadata: T;
  261. }),
  262. ): Promise<Uint8Array> => {
  263. const fileInfo: FileEncodingInfo = {
  264. version: 2,
  265. compression: "pako@1",
  266. encryption: "AES-GCM",
  267. };
  268. const encodingMetadataBuffer = new TextEncoder().encode(
  269. JSON.stringify(fileInfo),
  270. );
  271. const contentsMetadataBuffer = new TextEncoder().encode(
  272. JSON.stringify(options.metadata || null),
  273. );
  274. const { iv, buffer } = await _encryptAndCompress(
  275. concatBuffers(contentsMetadataBuffer, dataBuffer),
  276. options.encryptionKey,
  277. );
  278. return concatBuffers(encodingMetadataBuffer, iv, buffer);
  279. };
  280. /** @private */
  281. const _decryptAndDecompress = async (
  282. iv: Uint8Array,
  283. decryptedBuffer: Uint8Array,
  284. decryptionKey: string,
  285. isCompressed: boolean,
  286. ) => {
  287. decryptedBuffer = new Uint8Array(
  288. await decryptData(iv, decryptedBuffer, decryptionKey),
  289. );
  290. if (isCompressed) {
  291. return inflate(decryptedBuffer);
  292. }
  293. return decryptedBuffer;
  294. };
  295. export const decompressData = async <T extends Record<string, any>>(
  296. bufferView: Uint8Array,
  297. options: { decryptionKey: string },
  298. ) => {
  299. // first chunk is encoding metadata (ignored for now)
  300. const [encodingMetadataBuffer, iv, buffer] = splitBuffers(bufferView);
  301. const encodingMetadata: FileEncodingInfo = JSON.parse(
  302. new TextDecoder().decode(encodingMetadataBuffer),
  303. );
  304. try {
  305. const [contentsMetadataBuffer, contentsBuffer] = splitBuffers(
  306. await _decryptAndDecompress(
  307. iv,
  308. buffer,
  309. options.decryptionKey,
  310. !!encodingMetadata.compression,
  311. ),
  312. );
  313. const metadata = JSON.parse(
  314. new TextDecoder().decode(contentsMetadataBuffer),
  315. ) as T;
  316. return {
  317. /** metadata source is always JSON so we can decode it here */
  318. metadata,
  319. /** data can be anything so the caller must decode it */
  320. data: contentsBuffer,
  321. };
  322. } catch (error) {
  323. console.error(
  324. `Error during decompressing and decrypting the file.`,
  325. encodingMetadata,
  326. );
  327. throw error;
  328. }
  329. };
  330. // -----------------------------------------------------------------------------