diff --git a/.dockerignore b/.dockerignore index 4f68a3b..6d312ec 100644 --- a/.dockerignore +++ b/.dockerignore @@ -12,6 +12,7 @@ node_modules /data /library /thumbnails +/uploads # OS .DS_Store diff --git a/.env.example b/.env.example index e3b6365..4e8b20b 100644 --- a/.env.example +++ b/.env.example @@ -12,3 +12,4 @@ USER_CLIENT_CHALLENGE_EXPIRES= SESSION_UPGRADE_CHALLENGE_EXPIRES= LIBRARY_PATH= THUMBNAILS_PATH= +UPLOADS_PATH= diff --git a/.gitignore b/.gitignore index 5078fa8..a200c74 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ node_modules /data /library /thumbnails +/uploads # OS .DS_Store diff --git a/docker-compose.yaml b/docker-compose.yaml index 2015066..a624d9f 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -20,6 +20,7 @@ services: - SESSION_UPGRADE_CHALLENGE_EXPIRES - LIBRARY_PATH=/app/data/library - THUMBNAILS_PATH=/app/data/thumbnails + - UPLOADS_PATH=/app/data/uploads # SvelteKit - ADDRESS_HEADER=${TRUST_PROXY:+X-Forwarded-For} - XFF_DEPTH=${TRUST_PROXY:-} diff --git a/package.json b/package.json index c16b700..952d53f 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "@eslint/compat": "^2.0.0", "@eslint/js": "^9.39.2", "@iconify-json/material-symbols": "^1.2.50", + "@noble/hashes": "^2.0.1", "@sveltejs/adapter-node": "^5.4.0", "@sveltejs/kit": "^2.49.2", "@sveltejs/vite-plugin-svelte": "^6.2.1", @@ -55,7 +56,6 @@ "vite": "^7.3.0" }, "dependencies": { - "@fastify/busboy": "^3.2.0", "@trpc/server": "^11.8.1", "argon2": "^0.44.0", "kysely": "^0.28.9", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e4e336f..f4c8e80 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,9 +8,6 @@ importers: .: dependencies: - '@fastify/busboy': - specifier: ^3.2.0 - version: 3.2.0 '@trpc/server': specifier: ^11.8.1 version: 11.8.1(typescript@5.9.3) @@ -48,6 +45,9 @@ importers: '@iconify-json/material-symbols': specifier: ^1.2.50 version: 1.2.50 + '@noble/hashes': + specifier: ^2.0.1 + version: 2.0.1 '@sveltejs/adapter-node': specifier: ^5.4.0 version: 5.4.0(@sveltejs/kit@2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))) @@ -370,9 +370,6 @@ packages: resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@fastify/busboy@3.2.0': - resolution: {integrity: sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==} - '@humanfs/core@0.19.1': resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} engines: {node: '>=18.18.0'} @@ -414,6 +411,10 @@ packages: '@jridgewell/trace-mapping@0.3.31': resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + '@noble/hashes@2.0.1': + resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==} + engines: {node: '>= 20.19.0'} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -2173,8 +2174,6 @@ snapshots: '@eslint/core': 0.17.0 levn: 0.4.1 - '@fastify/busboy@3.2.0': {} - '@humanfs/core@0.19.1': {} '@humanfs/node@0.16.7': @@ -2217,6 +2216,8 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@noble/hashes@2.0.1': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 diff --git a/src/hooks.client.ts b/src/hooks.client.ts index 99e11c9..a677d9f 100644 --- a/src/hooks.client.ts +++ b/src/hooks.client.ts @@ -1,7 +1,6 @@ import type { ClientInit } from "@sveltejs/kit"; import { cleanupDanglingInfos, getClientKey, getMasterKeys, getHmacSecrets } from "$lib/indexedDB"; import { prepareFileCache } from "$lib/modules/file"; -import { prepareOpfs } from "$lib/modules/opfs"; import { clientKeyStore, masterKeyStore, hmacSecretStore } from "$lib/stores"; const requestPersistentStorage = async () => { @@ -46,7 +45,6 @@ export const init: ClientInit = async () => { prepareClientKeyStore(), prepareMasterKeyStore(), prepareHmacSecretStore(), - prepareOpfs(), ]); cleanupDanglingInfos(); // Intended diff --git a/src/hooks.server.ts b/src/hooks.server.ts index 6f94a7e..b816f7f 100644 --- a/src/hooks.server.ts +++ b/src/hooks.server.ts @@ -7,6 +7,7 @@ import { cleanupExpiredSessions, cleanupExpiredSessionUpgradeChallenges, } from "$lib/server/db/session"; +import { cleanupExpiredUploadSessions } from "$lib/server/services/upload"; import { authenticate, setAgentInfo } from "$lib/server/middlewares"; export const init: ServerInit = async () => { @@ -16,6 +17,7 @@ export const init: ServerInit = async () => { cleanupExpiredUserClientChallenges(); cleanupExpiredSessions(); cleanupExpiredSessionUpgradeChallenges(); + cleanupExpiredUploadSessions(); }); }; diff --git a/src/lib/constants/index.ts b/src/lib/constants/index.ts new file mode 100644 index 0000000..4983846 --- /dev/null +++ b/src/lib/constants/index.ts @@ -0,0 +1,2 @@ +export * from "./serviceWorker"; +export * from "./upload"; diff --git a/src/lib/constants/serviceWorker.ts b/src/lib/constants/serviceWorker.ts new file mode 100644 index 0000000..abbaa3c --- /dev/null +++ b/src/lib/constants/serviceWorker.ts @@ -0,0 +1 @@ +export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decryptedFile/"; diff --git a/src/lib/constants/upload.ts b/src/lib/constants/upload.ts new file mode 100644 index 0000000..57934d6 --- /dev/null +++ b/src/lib/constants/upload.ts @@ -0,0 +1,6 @@ +export const AES_GCM_IV_SIZE = 12; +export const AES_GCM_TAG_SIZE = 16; +export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE; + +export const CHUNK_SIZE = 4 * 1024 * 1024; // 4 MiB +export const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ENCRYPTION_OVERHEAD; diff --git a/src/lib/indexedDB/keyStore.ts b/src/lib/indexedDB/keyStore.ts index 7a4c89e..86b8b79 100644 --- a/src/lib/indexedDB/keyStore.ts +++ b/src/lib/indexedDB/keyStore.ts @@ -70,12 +70,12 @@ export const storeMasterKeys = async (keys: MasterKey[]) => { }; export const getHmacSecrets = async () => { - return await keyStore.hmacSecret.toArray(); + return (await keyStore.hmacSecret.toArray()).filter(({ secret }) => secret.extractable); }; export const storeHmacSecrets = async (secrets: HmacSecret[]) => { - if (secrets.some(({ secret }) => secret.extractable)) { - throw new Error("Hmac secrets must be nonextractable"); + if (secrets.some(({ secret }) => !secret.extractable)) { + throw new Error("Hmac secrets must be extractable"); } await keyStore.hmacSecret.bulkPut(secrets); }; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index c911d26..35687e6 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -1,8 +1,15 @@ -import { encodeString, decodeString, encodeToBase64, decodeFromBase64 } from "./util"; +import { AES_GCM_IV_SIZE } from "$lib/constants"; +import { + encodeString, + decodeString, + encodeToBase64, + decodeFromBase64, + concatenateBuffers, +} from "./utils"; export const generateMasterKey = async () => { return { - masterKey: await window.crypto.subtle.generateKey( + masterKey: await crypto.subtle.generateKey( { name: "AES-KW", length: 256, @@ -15,7 +22,7 @@ export const generateMasterKey = async () => { export const generateDataKey = async () => { return { - dataKey: await window.crypto.subtle.generateKey( + dataKey: await crypto.subtle.generateKey( { name: "AES-GCM", length: 256, @@ -28,9 +35,9 @@ export const generateDataKey = async () => { }; export const makeAESKeyNonextractable = async (key: CryptoKey) => { - return await window.crypto.subtle.importKey( + return await crypto.subtle.importKey( "raw", - await window.crypto.subtle.exportKey("raw", key), + await crypto.subtle.exportKey("raw", key), key.algorithm, false, key.usages, @@ -38,12 +45,12 @@ export const makeAESKeyNonextractable = async (key: CryptoKey) => { }; export const wrapDataKey = async (dataKey: CryptoKey, masterKey: CryptoKey) => { - return encodeToBase64(await window.crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW")); + return encodeToBase64(await crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW")); }; export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey) => { return { - dataKey: await window.crypto.subtle.unwrapKey( + dataKey: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(dataKeyWrapped), masterKey, @@ -56,12 +63,12 @@ export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey }; export const wrapHmacSecret = async (hmacSecret: CryptoKey, masterKey: CryptoKey) => { - return encodeToBase64(await window.crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW")); + return encodeToBase64(await crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW")); }; export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: CryptoKey) => { return { - hmacSecret: await window.crypto.subtle.unwrapKey( + hmacSecret: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(hmacSecretWrapped), masterKey, @@ -70,15 +77,15 @@ export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: Cry name: "HMAC", hash: "SHA-256", } satisfies HmacImportParams, - false, // Nonextractable + true, // Extractable ["sign", "verify"], ), }; }; export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { - const iv = window.crypto.getRandomValues(new Uint8Array(12)); - const ciphertext = await window.crypto.subtle.encrypt( + const iv = crypto.getRandomValues(new Uint8Array(12)); + const ciphertext = await crypto.subtle.encrypt( { name: "AES-GCM", iv, @@ -86,7 +93,7 @@ export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { dataKey, data, ); - return { ciphertext, iv: encodeToBase64(iv.buffer) }; + return { ciphertext, iv: iv.buffer }; }; export const decryptData = async ( @@ -94,7 +101,7 @@ export const decryptData = async ( iv: string | BufferSource, dataKey: CryptoKey, ) => { - return await window.crypto.subtle.decrypt( + return await crypto.subtle.decrypt( { name: "AES-GCM", iv: typeof iv === "string" ? decodeFromBase64(iv) : iv, @@ -106,9 +113,22 @@ export const decryptData = async ( export const encryptString = async (plaintext: string, dataKey: CryptoKey) => { const { ciphertext, iv } = await encryptData(encodeString(plaintext), dataKey); - return { ciphertext: encodeToBase64(ciphertext), iv }; + return { ciphertext: encodeToBase64(ciphertext), iv: encodeToBase64(iv) }; }; export const decryptString = async (ciphertext: string, iv: string, dataKey: CryptoKey) => { return decodeString(await decryptData(decodeFromBase64(ciphertext), iv, dataKey)); }; + +export const encryptChunk = async (chunk: ArrayBuffer, dataKey: CryptoKey) => { + const { ciphertext, iv } = await encryptData(chunk, dataKey); + return concatenateBuffers(iv, ciphertext).buffer; +}; + +export const decryptChunk = async (encryptedChunk: ArrayBuffer, dataKey: CryptoKey) => { + return await decryptData( + encryptedChunk.slice(AES_GCM_IV_SIZE), + encryptedChunk.slice(0, AES_GCM_IV_SIZE), + dataKey, + ); +}; diff --git a/src/lib/modules/crypto/index.ts b/src/lib/modules/crypto/index.ts index e6972ba..e3c27a7 100644 --- a/src/lib/modules/crypto/index.ts +++ b/src/lib/modules/crypto/index.ts @@ -1,4 +1,4 @@ export * from "./aes"; export * from "./rsa"; export * from "./sha"; -export * from "./util"; +export * from "./utils"; diff --git a/src/lib/modules/crypto/rsa.ts b/src/lib/modules/crypto/rsa.ts index 13dfd46..78e17db 100644 --- a/src/lib/modules/crypto/rsa.ts +++ b/src/lib/modules/crypto/rsa.ts @@ -1,7 +1,7 @@ -import { encodeString, encodeToBase64, decodeFromBase64 } from "./util"; +import { encodeString, encodeToBase64, decodeFromBase64 } from "./utils"; export const generateEncryptionKeyPair = async () => { - const keyPair = await window.crypto.subtle.generateKey( + const keyPair = await crypto.subtle.generateKey( { name: "RSA-OAEP", modulusLength: 4096, @@ -18,7 +18,7 @@ export const generateEncryptionKeyPair = async () => { }; export const generateSigningKeyPair = async () => { - const keyPair = await window.crypto.subtle.generateKey( + const keyPair = await crypto.subtle.generateKey( { name: "RSA-PSS", modulusLength: 4096, @@ -37,7 +37,7 @@ export const generateSigningKeyPair = async () => { export const exportRSAKey = async (key: CryptoKey) => { const format = key.type === "public" ? ("spki" as const) : ("pkcs8" as const); return { - key: await window.crypto.subtle.exportKey(format, key), + key: await crypto.subtle.exportKey(format, key), format, }; }; @@ -54,14 +54,14 @@ export const importEncryptionKeyPairFromBase64 = async ( name: "RSA-OAEP", hash: "SHA-256", }; - const encryptKey = await window.crypto.subtle.importKey( + const encryptKey = await crypto.subtle.importKey( "spki", decodeFromBase64(encryptKeyBase64), algorithm, true, ["encrypt", "wrapKey"], ); - const decryptKey = await window.crypto.subtle.importKey( + const decryptKey = await crypto.subtle.importKey( "pkcs8", decodeFromBase64(decryptKeyBase64), algorithm, @@ -79,14 +79,14 @@ export const importSigningKeyPairFromBase64 = async ( name: "RSA-PSS", hash: "SHA-256", }; - const signKey = await window.crypto.subtle.importKey( + const signKey = await crypto.subtle.importKey( "pkcs8", decodeFromBase64(signKeyBase64), algorithm, true, ["sign"], ); - const verifyKey = await window.crypto.subtle.importKey( + const verifyKey = await crypto.subtle.importKey( "spki", decodeFromBase64(verifyKeyBase64), algorithm, @@ -98,17 +98,11 @@ export const importSigningKeyPairFromBase64 = async ( export const makeRSAKeyNonextractable = async (key: CryptoKey) => { const { key: exportedKey, format } = await exportRSAKey(key); - return await window.crypto.subtle.importKey( - format, - exportedKey, - key.algorithm, - false, - key.usages, - ); + return await crypto.subtle.importKey(format, exportedKey, key.algorithm, false, key.usages); }; export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) => { - return await window.crypto.subtle.decrypt( + return await crypto.subtle.decrypt( { name: "RSA-OAEP", } satisfies RsaOaepParams, @@ -119,7 +113,7 @@ export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) export const wrapMasterKey = async (masterKey: CryptoKey, encryptKey: CryptoKey) => { return encodeToBase64( - await window.crypto.subtle.wrapKey("raw", masterKey, encryptKey, { + await crypto.subtle.wrapKey("raw", masterKey, encryptKey, { name: "RSA-OAEP", } satisfies RsaOaepParams), ); @@ -131,7 +125,7 @@ export const unwrapMasterKey = async ( extractable = false, ) => { return { - masterKey: await window.crypto.subtle.unwrapKey( + masterKey: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(masterKeyWrapped), decryptKey, @@ -146,7 +140,7 @@ export const unwrapMasterKey = async ( }; export const signMessageRSA = async (message: BufferSource, signKey: CryptoKey) => { - return await window.crypto.subtle.sign( + return await crypto.subtle.sign( { name: "RSA-PSS", saltLength: 32, // SHA-256 @@ -161,7 +155,7 @@ export const verifySignatureRSA = async ( signature: BufferSource, verifyKey: CryptoKey, ) => { - return await window.crypto.subtle.verify( + return await crypto.subtle.verify( { name: "RSA-PSS", saltLength: 32, // SHA-256 diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 3acb258..61c2ed7 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -1,10 +1,13 @@ +import { hmac } from "@noble/hashes/hmac.js"; +import { sha256 } from "@noble/hashes/sha2.js"; + export const digestMessage = async (message: BufferSource) => { - return await window.crypto.subtle.digest("SHA-256", message); + return await crypto.subtle.digest("SHA-256", message); }; export const generateHmacSecret = async () => { return { - hmacSecret: await window.crypto.subtle.generateKey( + hmacSecret: await crypto.subtle.generateKey( { name: "HMAC", hash: "SHA-256", @@ -15,6 +18,10 @@ export const generateHmacSecret = async () => { }; }; -export const signMessageHmac = async (message: BufferSource, hmacSecret: CryptoKey) => { - return await window.crypto.subtle.sign("HMAC", hmacSecret, message); +export const createHmacStream = async (hmacSecret: CryptoKey) => { + const h = hmac.create(sha256, new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret))); + return { + update: (data: Uint8Array) => h.update(data), + digest: () => h.digest(), + }; }; diff --git a/src/lib/modules/crypto/util.ts b/src/lib/modules/crypto/utils.ts similarity index 82% rename from src/lib/modules/crypto/util.ts rename to src/lib/modules/crypto/utils.ts index a3e3bc0..215eaf2 100644 --- a/src/lib/modules/crypto/util.ts +++ b/src/lib/modules/crypto/utils.ts @@ -9,8 +9,8 @@ export const decodeString = (data: ArrayBuffer) => { return textDecoder.decode(data); }; -export const encodeToBase64 = (data: ArrayBuffer) => { - return btoa(String.fromCharCode(...new Uint8Array(data))); +export const encodeToBase64 = (data: ArrayBuffer | Uint8Array) => { + return btoa(String.fromCharCode(...(data instanceof ArrayBuffer ? new Uint8Array(data) : data))); }; export const decodeFromBase64 = (data: string) => { diff --git a/src/lib/modules/file/download.svelte.ts b/src/lib/modules/file/download.svelte.ts index 97f42ea..d438e3f 100644 --- a/src/lib/modules/file/download.svelte.ts +++ b/src/lib/modules/file/download.svelte.ts @@ -1,6 +1,7 @@ import axios from "axios"; import { limitFunction } from "p-limit"; -import { decryptData } from "$lib/modules/crypto"; +import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { decryptChunk, concatenateBuffers } from "$lib/modules/crypto"; export interface FileDownloadState { id: number; @@ -62,15 +63,24 @@ const requestFileDownload = limitFunction( ); const decryptFile = limitFunction( - async (state: FileDownloadState, fileEncrypted: ArrayBuffer, dataKey: CryptoKey) => { + async ( + state: FileDownloadState, + fileEncrypted: ArrayBuffer, + encryptedChunkSize: number, + dataKey: CryptoKey, + ) => { state.status = "decrypting"; - const fileBuffer = await decryptData( - fileEncrypted.slice(12), - fileEncrypted.slice(0, 12), - dataKey, - ); + const chunks: ArrayBuffer[] = []; + let offset = 0; + while (offset < fileEncrypted.byteLength) { + const nextOffset = Math.min(offset + encryptedChunkSize, fileEncrypted.byteLength); + chunks.push(await decryptChunk(fileEncrypted.slice(offset, nextOffset), dataKey)); + offset = nextOffset; + } + + const fileBuffer = concatenateBuffers(...chunks).buffer; state.status = "decrypted"; state.result = fileBuffer; return fileBuffer; @@ -78,7 +88,7 @@ const decryptFile = limitFunction( { concurrency: 4 }, ); -export const downloadFile = async (id: number, dataKey: CryptoKey) => { +export const downloadFile = async (id: number, dataKey: CryptoKey, isLegacy: boolean) => { downloadingFiles.push({ id, status: "download-pending", @@ -86,7 +96,13 @@ export const downloadFile = async (id: number, dataKey: CryptoKey) => { const state = downloadingFiles.at(-1)!; try { - return await decryptFile(state, await requestFileDownload(state, id), dataKey); + const fileEncrypted = await requestFileDownload(state, id); + return await decryptFile( + state, + fileEncrypted, + isLegacy ? fileEncrypted.byteLength : CHUNK_SIZE + ENCRYPTION_OVERHEAD, + dataKey, + ); } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index a632eb5..4bea638 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,24 +1,21 @@ -import axios from "axios"; import ExifReader from "exifreader"; -import { limitFunction } from "p-limit"; +import pLimit, { limitFunction } from "p-limit"; +import { CHUNK_SIZE } from "$lib/constants"; import { encodeToBase64, generateDataKey, wrapDataKey, encryptData, encryptString, + encryptChunk, digestMessage, - signMessageHmac, + createHmacStream, } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; -import { generateThumbnail } from "$lib/modules/thumbnail"; -import type { - FileThumbnailUploadRequest, - FileUploadRequest, - FileUploadResponse, -} from "$lib/server/schemas"; +import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; import type { MasterKey, HmacSecret } from "$lib/stores"; import { trpc } from "$trpc/client"; +import type { RouterInputs } from "$trpc/router.server"; export interface FileUploadState { name: string; @@ -42,7 +39,7 @@ export type LiveFileUploadState = FileUploadState & { }; const scheduler = new Scheduler< - { fileId: number; fileBuffer: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined + { fileId: number; fileBuffer?: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined >(); let uploadingFiles: FileUploadState[] = $state([]); @@ -62,15 +59,22 @@ export const clearUploadedFiles = () => { const requestDuplicateFileScan = limitFunction( async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { - const fileBuffer = await file.arrayBuffer(); - const fileSigned = encodeToBase64(await signMessageHmac(fileBuffer, hmacSecret.secret)); + const hmacStream = await createHmacStream(hmacSecret.secret); + const reader = file.stream().getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done) break; + hmacStream.update(value); + } + + const fileSigned = encodeToBase64(hmacStream.digest()); const files = await trpc().file.listByHash.query({ hskVersion: hmacSecret.version, contentHmac: fileSigned, }); if (files.length === 0 || (await onDuplicate())) { - return { fileBuffer, fileSigned }; + return { fileSigned }; } else { return {}; } @@ -110,29 +114,40 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => { return new Date(utcDate - offsetMs); }; -const encryptFile = limitFunction( - async (state: FileUploadState, file: File, fileBuffer: ArrayBuffer, masterKey: MasterKey) => { +const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { + const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = []; + let offset = 0; + + while (offset < fileBuffer.byteLength) { + const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength); + const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey); + chunksEncrypted.push({ + chunkEncrypted: chunkEncrypted, + chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)), + }); + offset = nextOffset; + } + + return chunksEncrypted; +}; + +const encryptImageFile = limitFunction( + async (state: FileUploadState, file: File, masterKey: MasterKey) => { state.status = "encrypting"; - const fileType = getFileType(file); - - let createdAt; - if (fileType.startsWith("image/")) { - createdAt = extractExifDateTime(fileBuffer); - } + const fileBuffer = await file.arrayBuffer(); + const createdAt = extractExifDateTime(fileBuffer); const { dataKey, dataKeyVersion } = await generateDataKey(); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); - - const fileEncrypted = await encryptData(fileBuffer, dataKey); - const fileEncryptedHash = encodeToBase64(await digestMessage(fileEncrypted.ciphertext)); + const chunksEncrypted = await encryptChunks(fileBuffer, dataKey); const nameEncrypted = await encryptString(file.name, dataKey); const createdAtEncrypted = createdAt && (await encryptString(createdAt.getTime().toString(), dataKey)); const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); - const thumbnail = await generateThumbnail(fileBuffer, fileType); + const thumbnail = await generateThumbnail(fileBuffer, getFileType(file)); const thumbnailBuffer = await thumbnail?.arrayBuffer(); const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey)); @@ -141,9 +156,7 @@ const encryptFile = limitFunction( return { dataKeyWrapped, dataKeyVersion, - fileType, - fileEncrypted, - fileEncryptedHash, + chunksEncrypted, nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, @@ -153,35 +166,238 @@ const encryptFile = limitFunction( { concurrency: 4 }, ); -const requestFileUpload = limitFunction( - async (state: FileUploadState, form: FormData, thumbnailForm: FormData | null) => { +const uploadThumbnail = async ( + fileId: number, + thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, + dataKeyVersion: Date, +) => { + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); + + const ivAndCiphertext = new Uint8Array( + thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength, + ); + ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0); + ivAndCiphertext.set( + new Uint8Array(thumbnailEncrypted.ciphertext), + thumbnailEncrypted.iv.byteLength, + ); + + const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); + + const response = await fetch(`/api/upload/${uploadId}/chunks/0`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkHash}:`, + }, + body: ivAndCiphertext, + }); + + if (!response.ok) { + throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`); + } + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); +}; + +const requestImageFileUpload = limitFunction( + async ( + state: FileUploadState, + metadata: RouterInputs["upload"]["startFileUpload"], + chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], + fileSigned: string | undefined, + thumbnailData: { ciphertext: ArrayBuffer; iv: ArrayBuffer; plaintext: ArrayBuffer } | null, + dataKeyVersion: Date, + ) => { state.status = "uploading"; - const res = await axios.post("/api/file/upload", form, { - onUploadProgress: ({ progress, rate, estimated }) => { - state.progress = progress; - state.rate = rate; - state.estimated = estimated; - }, - }); - const { file }: FileUploadResponse = res.data; + const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); - if (thumbnailForm) { + const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); + let uploadedBytes = 0; + const startTime = Date.now(); + + for (let i = 0; i < chunksEncrypted.length; i++) { + const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; + + const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, + }, + body: chunkEncrypted, + }); + + if (!response.ok) { + throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); + } + + uploadedBytes += chunkEncrypted.byteLength; + + const elapsed = (Date.now() - startTime) / 1000; + const rate = uploadedBytes / elapsed; + const remaining = totalBytes - uploadedBytes; + const estimated = rate > 0 ? remaining / rate : undefined; + + state.progress = uploadedBytes / totalBytes; + state.rate = rate; + state.estimated = estimated; + } + + const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ + uploadId, + contentHmac: fileSigned, + }); + + if (thumbnailData) { try { - await axios.post(`/api/file/${file}/thumbnail/upload`, thumbnailForm); + await uploadThumbnail(fileId, thumbnailData, dataKeyVersion); } catch (e) { - // TODO + // TODO: Error handling for thumbnail upload console.error(e); } } state.status = "uploaded"; - return { fileId: file }; + return { fileId, thumbnailBuffer: thumbnailData?.plaintext }; }, { concurrency: 1 }, ); +const requestFileUpload = async ( + state: FileUploadState, + file: File, + masterKey: MasterKey, + hmacSecret: HmacSecret, + fileSigned: string, + parentId: DirectoryId, +) => { + state.status = "uploading"; + + const fileType = getFileType(file); + const { dataKey, dataKeyVersion } = await generateDataKey(); + const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); + + const nameEncrypted = await encryptString(file.name, dataKey); + const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); + + const totalChunks = Math.ceil(file.size / CHUNK_SIZE); + const metadata = { + chunks: totalChunks, + parent: parentId, + mekVersion: masterKey.version, + dek: dataKeyWrapped, + dekVersion: dataKeyVersion, + hskVersion: hmacSecret.version, + contentType: fileType, + name: nameEncrypted.ciphertext, + nameIv: nameEncrypted.iv, + lastModifiedAt: lastModifiedAtEncrypted.ciphertext, + lastModifiedAtIv: lastModifiedAtEncrypted.iv, + }; + + const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); + + const reader = file.stream().getReader(); + const limit = pLimit(4); + let buffer = new Uint8Array(0); + let chunkIndex = 0; + const uploadPromises: Promise[] = []; + + const totalBytes = file.size; + let uploadedBytes = 0; + const startTime = Date.now(); + + const uploadChunk = async ( + index: number, + encryptedChunk: ArrayBuffer, + chunkHash: string, + originalChunkSize: number, + ) => { + const response = await fetch(`/api/upload/${uploadId}/chunks/${index}`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkHash}:`, + }, + body: encryptedChunk, + }); + + if (!response.ok) { + throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); + } + + uploadedBytes += originalChunkSize; + const elapsed = (Date.now() - startTime) / 1000; + const rate = uploadedBytes / elapsed; + const remaining = totalBytes - uploadedBytes; + const estimated = rate > 0 ? remaining / rate : undefined; + + state.progress = uploadedBytes / totalBytes; + state.rate = rate; + state.estimated = estimated; + }; + + while (true) { + const { done, value } = await reader.read(); + if (done && buffer.length === 0) break; + + if (value) { + const newBuffer = new Uint8Array(buffer.length + value.length); + newBuffer.set(buffer); + newBuffer.set(value, buffer.length); + buffer = newBuffer; + } + + while (buffer.length >= CHUNK_SIZE || (done && buffer.length > 0)) { + const chunkSize = Math.min(CHUNK_SIZE, buffer.length); + const chunk = buffer.slice(0, chunkSize); + buffer = buffer.slice(chunkSize); + + const encryptedChunk = await encryptChunk(chunk.buffer.slice(0, chunk.byteLength), dataKey); + const chunkHash = encodeToBase64(await digestMessage(encryptedChunk)); + const currentIndex = chunkIndex++; + + uploadPromises.push( + limit(() => uploadChunk(currentIndex, encryptedChunk, chunkHash, chunkSize)), + ); + } + + if (done) break; + } + + await Promise.all(uploadPromises); + + const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ + uploadId, + contentHmac: fileSigned, + }); + + if (fileType.startsWith("video/")) { + try { + const thumbnail = await generateThumbnailFromFile(file); + if (thumbnail) { + const thumbnailBuffer = await thumbnail.arrayBuffer(); + const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); + + await uploadThumbnail(fileId, thumbnailEncrypted, dataKeyVersion); + } + } catch (e) { + // Thumbnail upload failure is not critical + console.error(e); + } + } + + state.status = "uploaded"; + + return { fileId }; +}; + export const uploadFile = async ( file: File, parentId: "root" | number, @@ -200,67 +416,62 @@ export const uploadFile = async ( state.status = "encryption-pending"; try { - const { fileBuffer, fileSigned } = await requestDuplicateFileScan( - file, - hmacSecret, - onDuplicate, - ); - if (!fileBuffer || !fileSigned) { + const { fileSigned } = await requestDuplicateFileScan(file, hmacSecret, onDuplicate); + if (!fileSigned) { state.status = "canceled"; uploadingFiles = uploadingFiles.filter((file) => file !== state); - return undefined; + return; } - const { - dataKeyWrapped, - dataKeyVersion, - fileType, - fileEncrypted, - fileEncryptedHash, - nameEncrypted, - createdAtEncrypted, - lastModifiedAtEncrypted, - thumbnail, - } = await encryptFile(state, file, fileBuffer, masterKey); + const fileType = getFileType(file); + if (fileType.startsWith("image/")) { + const fileBuffer = await file.arrayBuffer(); + const { + dataKeyWrapped, + dataKeyVersion, + chunksEncrypted, + nameEncrypted, + createdAtEncrypted, + lastModifiedAtEncrypted, + thumbnail, + } = await encryptImageFile(state, file, masterKey); - const form = new FormData(); - form.set( - "metadata", - JSON.stringify({ + const metadata = { + chunks: chunksEncrypted.length, parent: parentId, mekVersion: masterKey.version, dek: dataKeyWrapped, - dekVersion: dataKeyVersion.toISOString(), + dekVersion: dataKeyVersion, hskVersion: hmacSecret.version, - contentHmac: fileSigned, contentType: fileType, - contentIv: fileEncrypted.iv, name: nameEncrypted.ciphertext, nameIv: nameEncrypted.iv, createdAt: createdAtEncrypted?.ciphertext, createdAtIv: createdAtEncrypted?.iv, lastModifiedAt: lastModifiedAtEncrypted.ciphertext, lastModifiedAtIv: lastModifiedAtEncrypted.iv, - } satisfies FileUploadRequest), - ); - form.set("content", new Blob([fileEncrypted.ciphertext])); - form.set("checksum", fileEncryptedHash); + }; - let thumbnailForm = null; - if (thumbnail) { - thumbnailForm = new FormData(); - thumbnailForm.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: thumbnail.iv, - } satisfies FileThumbnailUploadRequest), + const { fileId, thumbnailBuffer } = await requestImageFileUpload( + state, + metadata, + chunksEncrypted, + fileSigned, + thumbnail ?? null, + dataKeyVersion, ); - thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); + return { fileId, fileBuffer, thumbnailBuffer }; + } else { + const { fileId } = await requestFileUpload( + state, + file, + masterKey, + hmacSecret, + fileSigned, + parentId, + ); + return { fileId }; } - - const { fileId } = await requestFileUpload(state, form, thumbnailForm); - return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/filesystem/file.ts b/src/lib/modules/filesystem/file.ts index daf7fd6..d80a872 100644 --- a/src/lib/modules/filesystem/file.ts +++ b/src/lib/modules/filesystem/file.ts @@ -47,6 +47,7 @@ const cache = new FilesystemCache({ return storeToIndexedDB({ id, + isLegacy: file.isLegacy, parentId: file.parent, dataKey: metadata.dataKey, contentType: file.contentType, @@ -115,6 +116,7 @@ const cache = new FilesystemCache({ return { id, exists: true as const, + isLegacy: metadataRaw.isLegacy, parentId: metadataRaw.parent, contentType: metadataRaw.contentType, categories, diff --git a/src/lib/modules/filesystem/types.ts b/src/lib/modules/filesystem/types.ts index abac40c..f4ce9cf 100644 --- a/src/lib/modules/filesystem/types.ts +++ b/src/lib/modules/filesystem/types.ts @@ -28,6 +28,7 @@ export type SubDirectoryInfo = Omit; +type SerializedClientKeys = z.infer; type DeserializedClientKeys = { encryptKeyBase64: string; @@ -43,7 +43,7 @@ export const serializeClientKeys = ({ }; export const deserializeClientKeys = (serialized: string) => { - const zodRes = serializedClientKeysSchema.safeParse(JSON.parse(serialized)); + const zodRes = SerializedClientKeysSchema.safeParse(JSON.parse(serialized)); if (zodRes.success) { return { encryptKeyBase64: zodRes.data.encryptKey, diff --git a/src/lib/modules/opfs.ts b/src/lib/modules/opfs.ts index 41f1f72..a367aae 100644 --- a/src/lib/modules/opfs.ts +++ b/src/lib/modules/opfs.ts @@ -1,13 +1,5 @@ -let rootHandle: FileSystemDirectoryHandle | null = null; - -export const prepareOpfs = async () => { - rootHandle = await navigator.storage.getDirectory(); -}; - const getFileHandle = async (path: string, create = true) => { - if (!rootHandle) { - throw new Error("OPFS not prepared"); - } else if (path[0] !== "/") { + if (path[0] !== "/") { throw new Error("Path must be absolute"); } @@ -17,7 +9,7 @@ const getFileHandle = async (path: string, create = true) => { } try { - let directoryHandle = rootHandle; + let directoryHandle = await navigator.storage.getDirectory(); for (const part of parts.slice(0, -1)) { if (!part) continue; directoryHandle = await directoryHandle.getDirectoryHandle(part, { create }); @@ -34,12 +26,15 @@ const getFileHandle = async (path: string, create = true) => { } }; -export const readFile = async (path: string) => { +export const getFile = async (path: string) => { const { fileHandle } = await getFileHandle(path, false); if (!fileHandle) return null; - const file = await fileHandle.getFile(); - return await file.arrayBuffer(); + return await fileHandle.getFile(); +}; + +export const readFile = async (path: string) => { + return (await getFile(path))?.arrayBuffer() ?? null; }; export const writeFile = async (path: string, data: ArrayBuffer) => { @@ -61,9 +56,7 @@ export const deleteFile = async (path: string) => { }; const getDirectoryHandle = async (path: string) => { - if (!rootHandle) { - throw new Error("OPFS not prepared"); - } else if (path[0] !== "/") { + if (path[0] !== "/") { throw new Error("Path must be absolute"); } @@ -73,7 +66,7 @@ const getDirectoryHandle = async (path: string) => { } try { - let directoryHandle = rootHandle; + let directoryHandle = await navigator.storage.getDirectory(); let parentHandle; for (const part of parts.slice(1)) { if (!part) continue; diff --git a/src/lib/modules/thumbnail.ts b/src/lib/modules/thumbnail.ts index d9a995b..75b0168 100644 --- a/src/lib/modules/thumbnail.ts +++ b/src/lib/modules/thumbnail.ts @@ -122,6 +122,22 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin } }; +export const generateThumbnailFromFile = async (file: File) => { + if (!file.type.startsWith("video/")) return null; + + let url; + try { + url = URL.createObjectURL(file); + return await generateVideoThumbnail(url); + } catch { + return null; + } finally { + if (url) { + URL.revokeObjectURL(url); + } + } +}; + export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; }; diff --git a/src/lib/schemas/filesystem.ts b/src/lib/schemas/filesystem.ts new file mode 100644 index 0000000..d3a45f4 --- /dev/null +++ b/src/lib/schemas/filesystem.ts @@ -0,0 +1,4 @@ +import { z } from "zod"; + +export const DirectoryIdSchema = z.union([z.literal("root"), z.int().positive()]); +export const CategoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/schemas/index.ts b/src/lib/schemas/index.ts new file mode 100644 index 0000000..7d29e5d --- /dev/null +++ b/src/lib/schemas/index.ts @@ -0,0 +1 @@ +export * from "./filesystem"; diff --git a/src/lib/server/db/file.ts b/src/lib/server/db/file.ts index 472930a..0418bc6 100644 --- a/src/lib/server/db/file.ts +++ b/src/lib/server/db/file.ts @@ -15,8 +15,6 @@ interface Directory { encName: Ciphertext; } -export type NewDirectory = Omit; - interface File { id: number; parentId: DirectoryId; @@ -28,15 +26,13 @@ interface File { hskVersion: number | null; contentHmac: string | null; contentType: string; - encContentIv: string; + encContentIv: string | null; encContentHash: string; encName: Ciphertext; encCreatedAt: Ciphertext | null; encLastModifiedAt: Ciphertext; } -export type NewFile = Omit; - interface FileCategory { id: number; parentId: CategoryId; @@ -46,7 +42,7 @@ interface FileCategory { encName: Ciphertext; } -export const registerDirectory = async (params: NewDirectory) => { +export const registerDirectory = async (params: Omit) => { await db.transaction().execute(async (trx) => { const mek = await trx .selectFrom("master_encryption_key") @@ -214,69 +210,41 @@ export const unregisterDirectory = async (userId: number, directoryId: number) = }); }; -export const registerFile = async (params: NewFile) => { +export const registerFile = async (trx: typeof db, params: Omit) => { if ((params.hskVersion && !params.contentHmac) || (!params.hskVersion && params.contentHmac)) { throw new Error("Invalid arguments"); } - return await db.transaction().execute(async (trx) => { - const mek = await trx - .selectFrom("master_encryption_key") - .select("version") - .where("user_id", "=", params.userId) - .where("state", "=", "active") - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (mek?.version !== params.mekVersion) { - throw new IntegrityError("Inactive MEK version"); - } - - if (params.hskVersion) { - const hsk = await trx - .selectFrom("hmac_secret_key") - .select("version") - .where("user_id", "=", params.userId) - .where("state", "=", "active") - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (hsk?.version !== params.hskVersion) { - throw new IntegrityError("Inactive HSK version"); - } - } - - const { fileId } = await trx - .insertInto("file") - .values({ - parent_id: params.parentId !== "root" ? params.parentId : null, - user_id: params.userId, - path: params.path, - master_encryption_key_version: params.mekVersion, - encrypted_data_encryption_key: params.encDek, - data_encryption_key_version: params.dekVersion, - hmac_secret_key_version: params.hskVersion, - content_hmac: params.contentHmac, - content_type: params.contentType, - encrypted_content_iv: params.encContentIv, - encrypted_content_hash: params.encContentHash, - encrypted_name: params.encName, - encrypted_created_at: params.encCreatedAt, - encrypted_last_modified_at: params.encLastModifiedAt, - }) - .returning("id as fileId") - .executeTakeFirstOrThrow(); - await trx - .insertInto("file_log") - .values({ - file_id: fileId, - timestamp: new Date(), - action: "create", - new_name: params.encName, - }) - .execute(); - return { id: fileId }; - }); + const { fileId } = await trx + .insertInto("file") + .values({ + parent_id: params.parentId !== "root" ? params.parentId : null, + user_id: params.userId, + path: params.path, + master_encryption_key_version: params.mekVersion, + encrypted_data_encryption_key: params.encDek, + data_encryption_key_version: params.dekVersion, + hmac_secret_key_version: params.hskVersion, + content_hmac: params.contentHmac, + content_type: params.contentType, + encrypted_content_iv: params.encContentIv, + encrypted_content_hash: params.encContentHash, + encrypted_name: params.encName, + encrypted_created_at: params.encCreatedAt, + encrypted_last_modified_at: params.encLastModifiedAt, + }) + .returning("id as fileId") + .executeTakeFirstOrThrow(); + await trx + .insertInto("file_log") + .values({ + file_id: fileId, + timestamp: new Date(), + action: "create", + new_name: params.encName, + }) + .execute(); + return { id: fileId }; }; export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => { diff --git a/src/lib/server/db/index.ts b/src/lib/server/db/index.ts index 5c21deb..140cf7d 100644 --- a/src/lib/server/db/index.ts +++ b/src/lib/server/db/index.ts @@ -5,6 +5,7 @@ export * as HskRepo from "./hsk"; export * as MediaRepo from "./media"; export * as MekRepo from "./mek"; export * as SessionRepo from "./session"; +export * as UploadRepo from "./upload"; export * as UserRepo from "./user"; export * from "./error"; diff --git a/src/lib/server/db/media.ts b/src/lib/server/db/media.ts index 209e256..3e165c0 100644 --- a/src/lib/server/db/media.ts +++ b/src/lib/server/db/media.ts @@ -6,7 +6,7 @@ interface Thumbnail { id: number; path: string; updatedAt: Date; - encContentIv: string; + encContentIv: string | null; } interface FileThumbnail extends Thumbnail { @@ -14,54 +14,53 @@ interface FileThumbnail extends Thumbnail { } export const updateFileThumbnail = async ( + trx: typeof db, userId: number, fileId: number, dekVersion: Date, path: string, - encContentIv: string, + encContentIv: string | null, ) => { - return await db.transaction().execute(async (trx) => { - const file = await trx - .selectFrom("file") - .select("data_encryption_key_version") - .where("id", "=", fileId) - .where("user_id", "=", userId) - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (!file) { - throw new IntegrityError("File not found"); - } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { - throw new IntegrityError("Invalid DEK version"); - } + const file = await trx + .selectFrom("file") + .select("data_encryption_key_version") + .where("id", "=", fileId) + .where("user_id", "=", userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } - const thumbnail = await trx - .selectFrom("thumbnail") - .select("path as oldPath") - .where("file_id", "=", fileId) - .limit(1) - .forUpdate() - .executeTakeFirst(); - const now = new Date(); + const thumbnail = await trx + .selectFrom("thumbnail") + .select("path as oldPath") + .where("file_id", "=", fileId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + const now = new Date(); - await trx - .insertInto("thumbnail") - .values({ - file_id: fileId, + await trx + .insertInto("thumbnail") + .values({ + file_id: fileId, + path, + updated_at: now, + encrypted_content_iv: encContentIv, + }) + .onConflict((oc) => + oc.column("file_id").doUpdateSet({ path, updated_at: now, encrypted_content_iv: encContentIv, - }) - .onConflict((oc) => - oc.column("file_id").doUpdateSet({ - path, - updated_at: now, - encrypted_content_iv: encContentIv, - }), - ) - .execute(); - return thumbnail?.oldPath ?? null; - }); + }), + ) + .execute(); + return thumbnail?.oldPath ?? null; }; export const getFileThumbnail = async (userId: number, fileId: number) => { diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts new file mode 100644 index 0000000..be6a900 --- /dev/null +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -0,0 +1,63 @@ +import { Kysely, sql } from "kysely"; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const up = async (db: Kysely) => { + // file.ts + await db.schema + .alterTable("file") + .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) + .execute(); + + // media.ts + await db.schema + .alterTable("thumbnail") + .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) + .execute(); + + // upload.ts + await db.schema + .createTable("upload_session") + .addColumn("id", "uuid", (col) => col.primaryKey()) + .addColumn("type", "text", (col) => col.notNull()) + .addColumn("user_id", "integer", (col) => col.references("user.id").notNull()) + .addColumn("path", "text", (col) => col.notNull()) + .addColumn("total_chunks", "integer", (col) => col.notNull()) + .addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`)) + .addColumn("expires_at", "timestamp(3)", (col) => col.notNull()) + .addColumn("parent_id", "integer", (col) => col.references("directory.id")) + .addColumn("master_encryption_key_version", "integer") + .addColumn("encrypted_data_encryption_key", "text") + .addColumn("data_encryption_key_version", "timestamp(3)") + .addColumn("hmac_secret_key_version", "integer") + .addColumn("content_type", "text") + .addColumn("encrypted_name", "json") + .addColumn("encrypted_created_at", "json") + .addColumn("encrypted_last_modified_at", "json") + .addColumn("file_id", "integer", (col) => col.references("file.id")) + .addForeignKeyConstraint( + "upload_session_fk01", + ["user_id", "master_encryption_key_version"], + "master_encryption_key", + ["user_id", "version"], + ) + .addForeignKeyConstraint( + "upload_session_fk02", + ["user_id", "hmac_secret_key_version"], + "hmac_secret_key", + ["user_id", "version"], + ) + .execute(); +}; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const down = async (db: Kysely) => { + await db.schema.dropTable("upload_session").execute(); + await db.schema + .alterTable("thumbnail") + .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) + .execute(); + await db.schema + .alterTable("file") + .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) + .execute(); +}; diff --git a/src/lib/server/db/migrations/index.ts b/src/lib/server/db/migrations/index.ts index f58c2d0..ca3310a 100644 --- a/src/lib/server/db/migrations/index.ts +++ b/src/lib/server/db/migrations/index.ts @@ -1,9 +1,11 @@ import * as Initial1737357000 from "./1737357000-Initial"; import * as AddFileCategory1737422340 from "./1737422340-AddFileCategory"; import * as AddThumbnail1738409340 from "./1738409340-AddThumbnail"; +import * as AddChunkedUpload1768062380 from "./1768062380-AddChunkedUpload"; export default { "1737357000-Initial": Initial1737357000, "1737422340-AddFileCategory": AddFileCategory1737422340, "1738409340-AddThumbnail": AddThumbnail1738409340, + "1768062380-AddChunkedUpload": AddChunkedUpload1768062380, }; diff --git a/src/lib/server/db/schema/category.ts b/src/lib/server/db/schema/category.ts index 2304264..ccaba95 100644 --- a/src/lib/server/db/schema/category.ts +++ b/src/lib/server/db/schema/category.ts @@ -1,5 +1,5 @@ import type { Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface CategoryTable { id: Generated; diff --git a/src/lib/server/db/schema/file.ts b/src/lib/server/db/schema/file.ts index a1bf9bd..0774082 100644 --- a/src/lib/server/db/schema/file.ts +++ b/src/lib/server/db/schema/file.ts @@ -1,5 +1,5 @@ import type { ColumnType, Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface DirectoryTable { id: Generated; @@ -30,7 +30,7 @@ interface FileTable { hmac_secret_key_version: number | null; content_hmac: string | null; // Base64 content_type: string; - encrypted_content_iv: string; // Base64 + encrypted_content_iv: string | null; // Base64 encrypted_content_hash: string; // Base64 encrypted_name: Ciphertext; encrypted_created_at: Ciphertext | null; diff --git a/src/lib/server/db/schema/index.ts b/src/lib/server/db/schema/index.ts index 4e427fb..7a13395 100644 --- a/src/lib/server/db/schema/index.ts +++ b/src/lib/server/db/schema/index.ts @@ -5,8 +5,9 @@ export * from "./hsk"; export * from "./media"; export * from "./mek"; export * from "./session"; +export * from "./upload"; export * from "./user"; -export * from "./util"; +export * from "./utils"; // eslint-disable-next-line @typescript-eslint/no-empty-object-type export interface Database {} diff --git a/src/lib/server/db/schema/media.ts b/src/lib/server/db/schema/media.ts index ebfbf29..1fef90b 100644 --- a/src/lib/server/db/schema/media.ts +++ b/src/lib/server/db/schema/media.ts @@ -7,7 +7,7 @@ interface ThumbnailTable { category_id: number | null; path: string; updated_at: Date; - encrypted_content_iv: string; // Base64 + encrypted_content_iv: string | null; // Base64 } declare module "./index" { diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts new file mode 100644 index 0000000..fccde36 --- /dev/null +++ b/src/lib/server/db/schema/upload.ts @@ -0,0 +1,32 @@ +import type { Generated } from "kysely"; +import type { Ciphertext } from "./utils"; + +interface UploadSessionTable { + id: string; + type: "file" | "thumbnail"; + user_id: number; + path: string; + total_chunks: number; + uploaded_chunks: Generated; + expires_at: Date; + + // For file uploads + parent_id: number | null; + master_encryption_key_version: number | null; + encrypted_data_encryption_key: string | null; // Base64 + data_encryption_key_version: Date | null; + hmac_secret_key_version: number | null; + content_type: string | null; + encrypted_name: Ciphertext | null; + encrypted_created_at: Ciphertext | null; + encrypted_last_modified_at: Ciphertext | null; + + // For thumbnail uploads + file_id: number | null; +} + +declare module "./index" { + interface Database { + upload_session: UploadSessionTable; + } +} diff --git a/src/lib/server/db/schema/util.ts b/src/lib/server/db/schema/utils.ts similarity index 100% rename from src/lib/server/db/schema/util.ts rename to src/lib/server/db/schema/utils.ts diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts new file mode 100644 index 0000000..d506191 --- /dev/null +++ b/src/lib/server/db/upload.ts @@ -0,0 +1,185 @@ +import { sql } from "kysely"; +import { IntegrityError } from "./error"; +import db from "./kysely"; +import type { Ciphertext } from "./schema"; + +interface BaseUploadSession { + id: string; + userId: number; + path: string; + totalChunks: number; + uploadedChunks: number[]; + expiresAt: Date; +} + +interface FileUploadSession extends BaseUploadSession { + type: "file"; + parentId: DirectoryId; + mekVersion: number; + encDek: string; + dekVersion: Date; + hskVersion: number | null; + contentType: string; + encName: Ciphertext; + encCreatedAt: Ciphertext | null; + encLastModifiedAt: Ciphertext; +} + +interface ThumbnailUploadSession extends BaseUploadSession { + type: "thumbnail"; + fileId: number; + dekVersion: Date; +} + +export const createFileUploadSession = async ( + params: Omit, +) => { + await db.transaction().execute(async (trx) => { + const mek = await trx + .selectFrom("master_encryption_key") + .select("version") + .where("user_id", "=", params.userId) + .where("state", "=", "active") + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (mek?.version !== params.mekVersion) { + throw new IntegrityError("Inactive MEK version"); + } + + if (params.hskVersion) { + const hsk = await trx + .selectFrom("hmac_secret_key") + .select("version") + .where("user_id", "=", params.userId) + .where("state", "=", "active") + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (hsk?.version !== params.hskVersion) { + throw new IntegrityError("Inactive HSK version"); + } + } + + await trx + .insertInto("upload_session") + .values({ + id: params.id, + type: "file", + user_id: params.userId, + path: params.path, + total_chunks: params.totalChunks, + expires_at: params.expiresAt, + parent_id: params.parentId !== "root" ? params.parentId : null, + master_encryption_key_version: params.mekVersion, + encrypted_data_encryption_key: params.encDek, + data_encryption_key_version: params.dekVersion, + hmac_secret_key_version: params.hskVersion, + content_type: params.contentType, + encrypted_name: params.encName, + encrypted_created_at: params.encCreatedAt, + encrypted_last_modified_at: params.encLastModifiedAt, + }) + .execute(); + }); +}; + +export const createThumbnailUploadSession = async ( + params: Omit, +) => { + await db.transaction().execute(async (trx) => { + const file = await trx + .selectFrom("file") + .select("data_encryption_key_version") + .where("id", "=", params.fileId) + .where("user_id", "=", params.userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (file.data_encryption_key_version.getTime() !== params.dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } + + await trx + .insertInto("upload_session") + .values({ + id: params.id, + type: "thumbnail", + user_id: params.userId, + path: params.path, + total_chunks: params.totalChunks, + expires_at: params.expiresAt, + file_id: params.fileId, + data_encryption_key_version: params.dekVersion, + }) + .execute(); + }); +}; + +export const getUploadSession = async (sessionId: string, userId: number) => { + const session = await db + .selectFrom("upload_session") + .selectAll() + .where("id", "=", sessionId) + .where("user_id", "=", userId) + .where("expires_at", ">", new Date()) + .limit(1) + .executeTakeFirst(); + if (!session) { + return null; + } else if (session.type === "file") { + return { + type: "file", + id: session.id, + userId: session.user_id, + path: session.path, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + parentId: session.parent_id ?? "root", + mekVersion: session.master_encryption_key_version!, + encDek: session.encrypted_data_encryption_key!, + dekVersion: session.data_encryption_key_version!, + hskVersion: session.hmac_secret_key_version, + contentType: session.content_type!, + encName: session.encrypted_name!, + encCreatedAt: session.encrypted_created_at, + encLastModifiedAt: session.encrypted_last_modified_at!, + } satisfies FileUploadSession; + } else { + return { + type: "thumbnail", + id: session.id, + userId: session.user_id, + path: session.path, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + fileId: session.file_id!, + dekVersion: session.data_encryption_key_version!, + } satisfies ThumbnailUploadSession; + } +}; + +export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => { + await db + .updateTable("upload_session") + .set({ uploaded_chunks: sql`array_append(uploaded_chunks, ${chunkIndex})` }) + .where("id", "=", sessionId) + .execute(); +}; + +export const deleteUploadSession = async (trx: typeof db, sessionId: string) => { + await trx.deleteFrom("upload_session").where("id", "=", sessionId).execute(); +}; + +export const cleanupExpiredUploadSessions = async () => { + const sessions = await db + .deleteFrom("upload_session") + .where("expires_at", "<=", new Date()) + .returning("path") + .execute(); + return sessions.map(({ path }) => path); +}; diff --git a/src/lib/server/loadenv.ts b/src/lib/server/loadenv.ts index 3a805d8..f8fd68f 100644 --- a/src/lib/server/loadenv.ts +++ b/src/lib/server/loadenv.ts @@ -26,4 +26,5 @@ export default { }, libraryPath: env.LIBRARY_PATH || "library", thumbnailsPath: env.THUMBNAILS_PATH || "thumbnails", + uploadsPath: env.UPLOADS_PATH || "uploads", }; diff --git a/src/lib/server/modules/filesystem.ts b/src/lib/server/modules/filesystem.ts index 65cb9ec..ade7d73 100644 --- a/src/lib/server/modules/filesystem.ts +++ b/src/lib/server/modules/filesystem.ts @@ -1,4 +1,10 @@ -import { unlink } from "fs/promises"; +import { rm, unlink } from "fs/promises"; + +export const safeRecursiveRm = async (path: string | null | undefined) => { + if (path) { + await rm(path, { recursive: true }).catch(console.error); + } +}; export const safeUnlink = async (path: string | null | undefined) => { if (path) { diff --git a/src/lib/server/schemas/category.ts b/src/lib/server/schemas/category.ts deleted file mode 100644 index 0bb07a7..0000000 --- a/src/lib/server/schemas/category.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { z } from "zod"; - -export const categoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/server/schemas/directory.ts b/src/lib/server/schemas/directory.ts deleted file mode 100644 index dba44b9..0000000 --- a/src/lib/server/schemas/directory.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { z } from "zod"; - -export const directoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/server/schemas/file.ts b/src/lib/server/schemas/file.ts deleted file mode 100644 index 811e590..0000000 --- a/src/lib/server/schemas/file.ts +++ /dev/null @@ -1,36 +0,0 @@ -import mime from "mime"; -import { z } from "zod"; -import { directoryIdSchema } from "./directory"; - -export const fileThumbnailUploadRequest = z.object({ - dekVersion: z.iso.datetime(), - contentIv: z.base64().nonempty(), -}); -export type FileThumbnailUploadRequest = z.input; - -export const fileUploadRequest = z.object({ - parent: directoryIdSchema, - mekVersion: z.int().positive(), - dek: z.base64().nonempty(), - dekVersion: z.iso.datetime(), - hskVersion: z.int().positive(), - contentHmac: z.base64().nonempty(), - contentType: z - .string() - .trim() - .nonempty() - .refine((value) => mime.getExtension(value) !== null), // MIME type - contentIv: z.base64().nonempty(), - name: z.base64().nonempty(), - nameIv: z.base64().nonempty(), - createdAt: z.base64().nonempty().optional(), - createdAtIv: z.base64().nonempty().optional(), - lastModifiedAt: z.base64().nonempty(), - lastModifiedAtIv: z.base64().nonempty(), -}); -export type FileUploadRequest = z.input; - -export const fileUploadResponse = z.object({ - file: z.int().positive(), -}); -export type FileUploadResponse = z.output; diff --git a/src/lib/server/schemas/index.ts b/src/lib/server/schemas/index.ts deleted file mode 100644 index f7a2bc1..0000000 --- a/src/lib/server/schemas/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * from "./category"; -export * from "./directory"; -export * from "./file"; diff --git a/src/lib/server/services/file.ts b/src/lib/server/services/file.ts index e45b16e..0d67303 100644 --- a/src/lib/server/services/file.ts +++ b/src/lib/server/services/file.ts @@ -1,22 +1,16 @@ import { error } from "@sveltejs/kit"; -import { createHash } from "crypto"; -import { createReadStream, createWriteStream } from "fs"; -import { mkdir, stat } from "fs/promises"; -import { dirname } from "path"; +import { createReadStream } from "fs"; +import { stat } from "fs/promises"; import { Readable } from "stream"; -import { pipeline } from "stream/promises"; -import { v4 as uuidv4 } from "uuid"; -import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db"; -import env from "$lib/server/loadenv"; -import { safeUnlink } from "$lib/server/modules/filesystem"; +import { FileRepo, MediaRepo } from "$lib/server/db"; const createEncContentStream = async ( path: string, - iv: Buffer, + iv?: Buffer, range?: { start?: number; end?: number }, ) => { const { size: fileSize } = await stat(path); - const ivSize = iv.byteLength; + const ivSize = iv?.byteLength ?? 0; const totalSize = fileSize + ivSize; const start = range?.start ?? 0; @@ -30,7 +24,7 @@ const createEncContentStream = async ( Readable.from( (async function* () { if (start < ivSize) { - yield iv.subarray(start, Math.min(end + 1, ivSize)); + yield iv!.subarray(start, Math.min(end + 1, ivSize)); } if (end >= ivSize) { yield* createReadStream(path, { @@ -55,7 +49,11 @@ export const getFileStream = async ( error(404, "Invalid file id"); } - return createEncContentStream(file.path, Buffer.from(file.encContentIv, "base64"), range); + return createEncContentStream( + file.path, + file.encContentIv ? Buffer.from(file.encContentIv, "base64") : undefined, + range, + ); }; export const getFileThumbnailStream = async ( @@ -70,96 +68,7 @@ export const getFileThumbnailStream = async ( return createEncContentStream( thumbnail.path, - Buffer.from(thumbnail.encContentIv, "base64"), + thumbnail.encContentIv ? Buffer.from(thumbnail.encContentIv, "base64") : undefined, range, ); }; - -export const uploadFileThumbnail = async ( - userId: number, - fileId: number, - dekVersion: Date, - encContentIv: string, - encContentStream: Readable, -) => { - const path = `${env.thumbnailsPath}/${userId}/${uuidv4()}`; - await mkdir(dirname(path), { recursive: true }); - - try { - await pipeline(encContentStream, createWriteStream(path, { flags: "wx", mode: 0o600 })); - - const oldPath = await MediaRepo.updateFileThumbnail( - userId, - fileId, - dekVersion, - path, - encContentIv, - ); - safeUnlink(oldPath); // Intended - } catch (e) { - await safeUnlink(path); - - if (e instanceof IntegrityError) { - if (e.message === "File not found") { - error(404, "File not found"); - } else if (e.message === "Invalid DEK version") { - error(400, "Mismatched DEK version"); - } - } - throw e; - } -}; - -export const uploadFile = async ( - params: Omit, - encContentStream: Readable, - encContentHash: Promise, -) => { - const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000); - const oneMinuteLater = new Date(Date.now() + 60 * 1000); - if (params.dekVersion <= oneDayAgo || params.dekVersion >= oneMinuteLater) { - error(400, "Invalid DEK version"); - } - - const path = `${env.libraryPath}/${params.userId}/${uuidv4()}`; - await mkdir(dirname(path), { recursive: true }); - - try { - const hashStream = createHash("sha256"); - const [, hash] = await Promise.all([ - pipeline( - encContentStream, - async function* (source) { - for await (const chunk of source) { - hashStream.update(chunk); - yield chunk; - } - }, - createWriteStream(path, { flags: "wx", mode: 0o600 }), - ), - encContentHash, - ]); - if (hashStream.digest("base64") !== hash) { - throw new Error("Invalid checksum"); - } - - const { id: fileId } = await FileRepo.registerFile({ - ...params, - path, - encContentHash: hash, - }); - return { fileId }; - } catch (e) { - await safeUnlink(path); - - if (e instanceof IntegrityError && e.message === "Inactive MEK version") { - error(400, "Invalid MEK version"); - } else if ( - e instanceof Error && - (e.message === "Invalid request body" || e.message === "Invalid checksum") - ) { - error(400, "Invalid request body"); - } - throw e; - } -}; diff --git a/src/lib/server/services/upload.ts b/src/lib/server/services/upload.ts new file mode 100644 index 0000000..1f7043b --- /dev/null +++ b/src/lib/server/services/upload.ts @@ -0,0 +1,82 @@ +import { error } from "@sveltejs/kit"; +import { createHash } from "crypto"; +import { createWriteStream } from "fs"; +import { Readable } from "stream"; +import { ENCRYPTION_OVERHEAD, ENCRYPTED_CHUNK_SIZE } from "$lib/constants"; +import { UploadRepo } from "$lib/server/db"; +import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; + +const chunkLocks = new Set(); + +export const uploadChunk = async ( + userId: number, + sessionId: string, + chunkIndex: number, + encChunkStream: Readable, + encChunkHash: string, +) => { + const lockKey = `${sessionId}/${chunkIndex}`; + if (chunkLocks.has(lockKey)) { + error(409, "Chunk upload already in progress"); + } else { + chunkLocks.add(lockKey); + } + + let filePath; + + try { + const session = await UploadRepo.getUploadSession(sessionId, userId); + if (!session) { + error(404, "Invalid upload id"); + } else if (chunkIndex >= session.totalChunks) { + error(400, "Invalid chunk index"); + } else if (session.uploadedChunks.includes(chunkIndex)) { + error(409, "Chunk already uploaded"); + } + + const isLastChunk = chunkIndex === session.totalChunks - 1; + filePath = `${session.path}/${chunkIndex}`; + + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + let writtenBytes = 0; + + for await (const chunk of encChunkStream) { + hashStream.update(chunk); + writeStream.write(chunk); + writtenBytes += chunk.length; + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + if (hashStream.digest("base64") !== encChunkHash) { + throw new Error("Invalid checksum"); + } else if ( + (!isLastChunk && writtenBytes !== ENCRYPTED_CHUNK_SIZE) || + (isLastChunk && (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > ENCRYPTED_CHUNK_SIZE)) + ) { + throw new Error("Invalid chunk size"); + } + + await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex); + } catch (e) { + await safeUnlink(filePath); + + if ( + e instanceof Error && + (e.message === "Invalid checksum" || e.message === "Invalid chunk size") + ) { + error(400, "Invalid request body"); + } + throw e; + } finally { + chunkLocks.delete(lockKey); + } +}; + +export const cleanupExpiredUploadSessions = async () => { + const paths = await UploadRepo.cleanupExpiredUploadSessions(); + await Promise.all(paths.map(safeRecursiveRm)); +}; diff --git a/src/lib/serviceWorker/client.ts b/src/lib/serviceWorker/client.ts new file mode 100644 index 0000000..771c15e --- /dev/null +++ b/src/lib/serviceWorker/client.ts @@ -0,0 +1,39 @@ +import { DECRYPTED_FILE_URL_PREFIX } from "$lib/constants"; +import type { FileMetadata, ServiceWorkerMessage, ServiceWorkerResponse } from "./types"; + +const PREPARE_TIMEOUT_MS = 5000; + +const getServiceWorker = async () => { + const registration = await navigator.serviceWorker.ready; + const sw = registration.active; + if (!sw) { + throw new Error("Service worker not activated"); + } + return sw; +}; + +export const prepareFileDecryption = async (id: number, metadata: FileMetadata) => { + const sw = await getServiceWorker(); + return new Promise((resolve, reject) => { + const timeout = setTimeout( + () => reject(new Error("Service worker timeout")), + PREPARE_TIMEOUT_MS, + ); + const handler = (event: MessageEvent) => { + if (event.data.type === "decryption-ready" && event.data.fileId === id) { + clearTimeout(timeout); + navigator.serviceWorker.removeEventListener("message", handler); + resolve(); + } + }; + navigator.serviceWorker.addEventListener("message", handler); + + sw.postMessage({ + type: "decryption-prepare", + fileId: id, + ...metadata, + } satisfies ServiceWorkerMessage); + }); +}; + +export const getDecryptedFileUrl = (id: number) => `${DECRYPTED_FILE_URL_PREFIX}${id}`; diff --git a/src/lib/serviceWorker/index.ts b/src/lib/serviceWorker/index.ts new file mode 100644 index 0000000..d2ec230 --- /dev/null +++ b/src/lib/serviceWorker/index.ts @@ -0,0 +1,2 @@ +export * from "./client"; +export * from "./types"; diff --git a/src/lib/serviceWorker/types.ts b/src/lib/serviceWorker/types.ts new file mode 100644 index 0000000..97edd6d --- /dev/null +++ b/src/lib/serviceWorker/types.ts @@ -0,0 +1,19 @@ +export interface FileMetadata { + isLegacy: boolean; + dataKey: CryptoKey; + encContentSize: number; + contentType: string; +} + +export interface DecryptionPrepareMessage extends FileMetadata { + type: "decryption-prepare"; + fileId: number; +} + +export interface DecryptionReadyMessage { + type: "decryption-ready"; + fileId: number; +} + +export type ServiceWorkerMessage = DecryptionPrepareMessage; +export type ServiceWorkerResponse = DecryptionReadyMessage; diff --git a/src/lib/services/file.ts b/src/lib/services/file.ts index a0e769b..2f37f52 100644 --- a/src/lib/services/file.ts +++ b/src/lib/services/file.ts @@ -1,4 +1,5 @@ import { getAllFileInfos } from "$lib/indexedDB/filesystem"; +import { encodeToBase64, digestMessage } from "$lib/modules/crypto"; import { getFileCache, storeFileCache, @@ -6,14 +7,17 @@ import { downloadFile, deleteFileThumbnailCache, } from "$lib/modules/file"; -import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import { trpc } from "$trpc/client"; -export const requestFileDownload = async (fileId: number, dataKey: CryptoKey) => { +export const requestFileDownload = async ( + fileId: number, + dataKey: CryptoKey, + isLegacy: boolean, +) => { const cache = await getFileCache(fileId); if (cache) return cache; - const fileBuffer = await downloadFile(fileId, dataKey); + const fileBuffer = await downloadFile(fileId, dataKey, isLegacy); storeFileCache(fileId, fileBuffer); // Intended return fileBuffer; }; @@ -21,19 +25,40 @@ export const requestFileDownload = async (fileId: number, dataKey: CryptoKey) => export const requestFileThumbnailUpload = async ( fileId: number, dataKeyVersion: Date, - thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: string }, + thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, ) => { - const form = new FormData(); - form.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: thumbnailEncrypted.iv, - } satisfies FileThumbnailUploadRequest), - ); - form.set("content", new Blob([thumbnailEncrypted.ciphertext])); + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); - return await fetch(`/api/file/${fileId}/thumbnail/upload`, { method: "POST", body: form }); + // Prepend IV to ciphertext (consistent with file download format) + const ivAndCiphertext = new Uint8Array( + thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength, + ); + ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0); + ivAndCiphertext.set( + new Uint8Array(thumbnailEncrypted.ciphertext), + thumbnailEncrypted.iv.byteLength, + ); + + const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); + + const response = await fetch(`/api/upload/${uploadId}/chunks/0`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkHash}:`, + }, + body: ivAndCiphertext, + }); + + if (!response.ok) { + throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`); + } + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); + return response; }; export const requestDeletedFilesCleanup = async () => { diff --git a/src/routes/(fullscreen)/file/[id]/+page.svelte b/src/routes/(fullscreen)/file/[id]/+page.svelte index f325c5e..053d6bf 100644 --- a/src/routes/(fullscreen)/file/[id]/+page.svelte +++ b/src/routes/(fullscreen)/file/[id]/+page.svelte @@ -17,6 +17,7 @@ requestFileDownload, requestThumbnailUpload, requestFileAdditionToCategory, + requestVideoStream, } from "./service"; import TopBarMenu from "./TopBarMenu.svelte"; @@ -37,6 +38,7 @@ let viewerType: "image" | "video" | undefined = $state(); let fileBlob: Blob | undefined = $state(); let fileBlobUrl: string | undefined = $state(); + let videoStreamUrl: string | undefined = $state(); let videoElement: HTMLVideoElement | undefined = $state(); const updateViewer = async (buffer: ArrayBuffer, contentType: string) => { @@ -95,12 +97,27 @@ untrack(() => { if (!downloadState && !isDownloadRequested) { isDownloadRequested = true; - requestFileDownload(data.id, info!.dataKey!.key).then(async (buffer) => { - const blob = await updateViewer(buffer, contentType); - if (!viewerType) { - FileSaver.saveAs(blob, info!.name); - } - }); + + if (viewerType === "video" && !info!.isLegacy) { + requestVideoStream(data.id, info!.dataKey!.key, contentType).then((streamUrl) => { + if (streamUrl) { + videoStreamUrl = streamUrl; + } else { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then((buffer) => + updateViewer(buffer, contentType), + ); + } + }); + } else { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then( + async (buffer) => { + const blob = await updateViewer(buffer, contentType); + if (!viewerType) { + FileSaver.saveAs(blob, info!.name); + } + }, + ); + } } }); } @@ -137,6 +154,7 @@ ? info?.parentId : undefined} {fileBlob} + downloadUrl={videoStreamUrl} filename={info?.name} /> @@ -159,9 +177,10 @@ {@render viewerLoading("이미지를 불러오고 있어요.")} {/if} {:else if viewerType === "video"} - {#if fileBlobUrl} + {#if videoStreamUrl || fileBlobUrl}
- + updateThumbnail(info?.dataKey?.key!, info?.dataKey?.version!)} diff --git a/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte b/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte index a037b61..d713e8c 100644 --- a/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte +++ b/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte @@ -10,17 +10,29 @@ interface Props { directoryId?: "root" | number; + downloadUrl?: string; fileBlob?: Blob; filename?: string; isOpen: boolean; } - let { directoryId, fileBlob, filename, isOpen = $bindable() }: Props = $props(); + let { directoryId, downloadUrl, fileBlob, filename, isOpen = $bindable() }: Props = $props(); + + const handleDownload = () => { + if (fileBlob && filename) { + FileSaver.saveAs(fileBlob, filename); + } else if (downloadUrl && filename) { + // Use streaming download via Content-Disposition header + const url = new URL(downloadUrl, window.location.origin); + url.searchParams.set("download", filename); + window.open(url.toString(), "_blank"); + } + }; (isOpen = false)} /> -{#if isOpen && (directoryId || fileBlob)} +{#if isOpen && (directoryId || downloadUrl || fileBlob)}
{ - FileSaver.saveAs(fileBlob, filename); - })} + {#if fileBlob || downloadUrl} + {@render menuButton(IconCloudDownload, "다운로드", handleDownload)} {/if}
diff --git a/src/routes/(fullscreen)/file/[id]/service.ts b/src/routes/(fullscreen)/file/[id]/service.ts index 09ec86f..ea3e49c 100644 --- a/src/routes/(fullscreen)/file/[id]/service.ts +++ b/src/routes/(fullscreen)/file/[id]/service.ts @@ -1,11 +1,32 @@ import { encryptData } from "$lib/modules/crypto"; import { storeFileThumbnailCache } from "$lib/modules/file"; +import { prepareFileDecryption, getDecryptedFileUrl } from "$lib/serviceWorker"; import { requestFileThumbnailUpload } from "$lib/services/file"; import { trpc } from "$trpc/client"; export { requestCategoryCreation, requestFileRemovalFromCategory } from "$lib/services/category"; export { requestFileDownload } from "$lib/services/file"; +export const requestVideoStream = async ( + fileId: number, + dataKey: CryptoKey, + contentType: string, +) => { + const res = await fetch(`/api/file/${fileId}/download`, { method: "HEAD" }); + if (!res.ok) return null; + + const encContentSize = parseInt(res.headers.get("Content-Length") ?? "0", 10); + if (encContentSize <= 0) return null; + + try { + await prepareFileDecryption(fileId, { isLegacy: false, dataKey, encContentSize, contentType }); + return getDecryptedFileUrl(fileId); + } catch { + // TODO: Error Handling + return null; + } +}; + export const requestThumbnailUpload = async ( fileId: number, thumbnail: Blob, diff --git a/src/routes/(fullscreen)/settings/thumbnail/service.ts b/src/routes/(fullscreen)/settings/thumbnail/service.ts index 75c64b8..314cf5a 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/service.ts +++ b/src/routes/(fullscreen)/settings/thumbnail/service.ts @@ -50,7 +50,7 @@ const requestThumbnailUpload = limitFunction( async ( fileId: number, dataKeyVersion: Date, - thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: string }, + thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: ArrayBuffer }, ) => { statuses.set(fileId, "uploading"); @@ -77,7 +77,7 @@ export const requestThumbnailGeneration = async (fileInfo: FileInfo) => { await scheduler.schedule( async () => { statuses.set(fileInfo.id, "generation-pending"); - file = await requestFileDownload(fileInfo.id, fileInfo.dataKey?.key!); + file = await requestFileDownload(fileInfo.id, fileInfo.dataKey?.key!, fileInfo.isLegacy!); return file.byteLength; }, async () => { diff --git a/src/routes/(main)/directory/[[id]]/service.svelte.ts b/src/routes/(main)/directory/[[id]]/service.svelte.ts index f83bbaf..ccd5b14 100644 --- a/src/routes/(main)/directory/[[id]]/service.svelte.ts +++ b/src/routes/(main)/directory/[[id]]/service.svelte.ts @@ -88,7 +88,9 @@ export const requestFileUpload = async ( const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate); if (!res) return false; - storeFileCache(res.fileId, res.fileBuffer); // Intended + if (res.fileBuffer) { + storeFileCache(res.fileId, res.fileBuffer); // Intended + } if (res.thumbnailBuffer) { storeFileThumbnailCache(res.fileId, res.thumbnailBuffer); // Intended } diff --git a/src/routes/api/file/[id]/download/+server.ts b/src/routes/api/file/[id]/download/+server.ts index 974dd54..5324365 100644 --- a/src/routes/api/file/[id]/download/+server.ts +++ b/src/routes/api/file/[id]/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; +import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; import { authorize } from "$lib/server/modules/auth"; -import { parseRangeHeader, getContentRangeHeader } from "$lib/server/modules/http"; import { getFileStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/routes/api/file/[id]/thumbnail/download/+server.ts b/src/routes/api/file/[id]/thumbnail/download/+server.ts index 70d4cd3..85cdd8c 100644 --- a/src/routes/api/file/[id]/thumbnail/download/+server.ts +++ b/src/routes/api/file/[id]/thumbnail/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; +import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; import { authorize } from "$lib/server/modules/auth"; -import { parseRangeHeader, getContentRangeHeader } from "$lib/server/modules/http"; import { getFileThumbnailStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/routes/api/file/[id]/thumbnail/upload/+server.ts b/src/routes/api/file/[id]/thumbnail/upload/+server.ts deleted file mode 100644 index 62dfe42..0000000 --- a/src/routes/api/file/[id]/thumbnail/upload/+server.ts +++ /dev/null @@ -1,74 +0,0 @@ -import Busboy from "@fastify/busboy"; -import { error, text } from "@sveltejs/kit"; -import { Readable, Writable } from "stream"; -import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; -import { fileThumbnailUploadRequest, type FileThumbnailUploadRequest } from "$lib/server/schemas"; -import { uploadFileThumbnail } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -export const POST: RequestHandler = async ({ locals, params, request }) => { - const { userId } = await authorize(locals, "activeClient"); - - const zodRes = z - .object({ - id: z.coerce.number().int().positive(), - }) - .safeParse(params); - if (!zodRes.success) error(400, "Invalid path parameters"); - const { id } = zodRes.data; - - const contentType = request.headers.get("Content-Type"); - if (!contentType?.startsWith("multipart/form-data") || !request.body) { - error(400, "Invalid request body"); - } - - return new Promise((resolve, reject) => { - const bb = Busboy({ headers: { "content-type": contentType } }); - const handler = - (f: (...args: T) => Promise) => - (...args: T) => { - f(...args).catch(reject); - }; - - let metadata: FileThumbnailUploadRequest | null = null; - let content: Readable | null = null; - bb.on( - "field", - handler(async (fieldname, val) => { - if (fieldname === "metadata") { - // Ignore subsequent metadata fields - if (!metadata) { - const zodRes = fileThumbnailUploadRequest.safeParse(JSON.parse(val)); - if (!zodRes.success) error(400, "Invalid request body"); - metadata = zodRes.data; - } - } else { - error(400, "Invalid request body"); - } - }), - ); - bb.on( - "file", - handler(async (fieldname, file) => { - if (fieldname !== "content") error(400, "Invalid request body"); - if (!metadata || content) error(400, "Invalid request body"); - content = file; - - await uploadFileThumbnail( - userId, - id, - new Date(metadata.dekVersion), - metadata.contentIv, - content, - ); - resolve(text("Thumbnail uploaded", { headers: { "Content-Type": "text/plain" } })); - }), - ); - bb.on("error", (e) => { - content?.emit("error", e) ?? reject(e); - }); - - request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error - }); -}; diff --git a/src/routes/api/file/upload/+server.ts b/src/routes/api/file/upload/+server.ts deleted file mode 100644 index f9cbd53..0000000 --- a/src/routes/api/file/upload/+server.ts +++ /dev/null @@ -1,108 +0,0 @@ -import Busboy from "@fastify/busboy"; -import { error, json } from "@sveltejs/kit"; -import { Readable, Writable } from "stream"; -import { authorize } from "$lib/server/modules/auth"; -import { - fileUploadRequest, - fileUploadResponse, - type FileUploadResponse, -} from "$lib/server/schemas"; -import { uploadFile } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -type FileMetadata = Parameters[0]; - -const parseFileMetadata = (userId: number, json: string) => { - const zodRes = fileUploadRequest.safeParse(JSON.parse(json)); - if (!zodRes.success) error(400, "Invalid request body"); - const { - parent, - mekVersion, - dek, - dekVersion, - hskVersion, - contentHmac, - contentType, - contentIv, - name, - nameIv, - createdAt, - createdAtIv, - lastModifiedAt, - lastModifiedAtIv, - } = zodRes.data; - if ((createdAt && !createdAtIv) || (!createdAt && createdAtIv)) - error(400, "Invalid request body"); - - return { - userId, - parentId: parent, - mekVersion, - encDek: dek, - dekVersion: new Date(dekVersion), - hskVersion, - contentHmac, - contentType, - encContentIv: contentIv, - encName: { ciphertext: name, iv: nameIv }, - encCreatedAt: createdAt && createdAtIv ? { ciphertext: createdAt, iv: createdAtIv } : null, - encLastModifiedAt: { ciphertext: lastModifiedAt, iv: lastModifiedAtIv }, - } satisfies FileMetadata; -}; - -export const POST: RequestHandler = async ({ locals, request }) => { - const { userId } = await authorize(locals, "activeClient"); - - const contentType = request.headers.get("Content-Type"); - if (!contentType?.startsWith("multipart/form-data") || !request.body) { - error(400, "Invalid request body"); - } - - return new Promise((resolve, reject) => { - const bb = Busboy({ headers: { "content-type": contentType } }); - const handler = - (f: (...args: T) => Promise) => - (...args: T) => { - f(...args).catch(reject); - }; - - let metadata: FileMetadata | null = null; - let content: Readable | null = null; - const checksum = new Promise((resolveChecksum, rejectChecksum) => { - bb.on( - "field", - handler(async (fieldname, val) => { - if (fieldname === "metadata") { - // Ignore subsequent metadata fields - if (!metadata) { - metadata = parseFileMetadata(userId, val); - } - } else if (fieldname === "checksum") { - // Ignore subsequent checksum fields - resolveChecksum(val); - } else { - error(400, "Invalid request body"); - } - }), - ); - bb.on( - "file", - handler(async (fieldname, file) => { - if (fieldname !== "content") error(400, "Invalid request body"); - if (!metadata || content) error(400, "Invalid request body"); - content = file; - - const { fileId } = await uploadFile(metadata, content, checksum); - resolve(json(fileUploadResponse.parse({ file: fileId } satisfies FileUploadResponse))); - }), - ); - bb.on("finish", () => rejectChecksum(new Error("Invalid request body"))); - bb.on("error", (e) => { - content?.emit("error", e) ?? reject(e); - rejectChecksum(e); - }); - }); - - request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error - }); -}; diff --git a/src/routes/api/upload/[id]/chunks/[index]/+server.ts b/src/routes/api/upload/[id]/chunks/[index]/+server.ts new file mode 100644 index 0000000..47d6397 --- /dev/null +++ b/src/routes/api/upload/[id]/chunks/[index]/+server.ts @@ -0,0 +1,43 @@ +import { error, text } from "@sveltejs/kit"; +import { Readable } from "stream"; +import { z } from "zod"; +import { authorize } from "$lib/server/modules/auth"; +import { uploadChunk } from "$lib/server/services/upload"; +import type { RequestHandler } from "./$types"; + +export const POST: RequestHandler = async ({ locals, params, request }) => { + const { userId } = await authorize(locals, "activeClient"); + + const zodRes = z + .object({ + id: z.uuidv4(), + index: z.coerce.number().int().nonnegative(), + }) + .safeParse(params); + if (!zodRes.success) error(400, "Invalid path parameters"); + const { id: uploadId, index: chunkIndex } = zodRes.data; + + // Parse Content-Digest header (RFC 9530) + // Expected format: sha-256=:base64hash: + const contentDigest = request.headers.get("Content-Digest"); + if (!contentDigest) error(400, "Missing Content-Digest header"); + + const digestMatch = contentDigest.match(/^sha-256=:([A-Za-z0-9+/=]+):$/); + if (!digestMatch || !digestMatch[1]) + error(400, "Invalid Content-Digest format, must be sha-256=:base64:"); + const encChunkHash = digestMatch[1]; + + const contentType = request.headers.get("Content-Type"); + if (contentType !== "application/octet-stream" || !request.body) { + error(400, "Invalid request body"); + } + + // Convert web ReadableStream to Node Readable + const nodeReadable = Readable.fromWeb( + request.body as unknown as Parameters[0], + ); + + await uploadChunk(userId, uploadId, chunkIndex, nodeReadable, encChunkHash); + + return text("Chunk uploaded", { headers: { "Content-Type": "text/plain" } }); +}; diff --git a/src/service-worker/handlers/decryptFile.ts b/src/service-worker/handlers/decryptFile.ts new file mode 100644 index 0000000..22aa118 --- /dev/null +++ b/src/service-worker/handlers/decryptFile.ts @@ -0,0 +1,153 @@ +import { DECRYPTED_FILE_URL_PREFIX, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "../modules/constants"; +import { decryptChunk, getEncryptedRange, getDecryptedSize } from "../modules/crypto"; +import { parseRangeHeader, getContentRangeHeader } from "../modules/http"; +import { getFile } from "../modules/opfs"; +import { fileMetadataStore } from "../stores"; +import type { FileMetadata } from "../types"; + +const createResponse = ( + stream: ReadableStream, + isRangeRequest: boolean, + range: { start: number; end: number; total: number }, + contentType?: string, + downloadFilename?: string, +) => { + const headers: Record = { + "Accept-Ranges": "bytes", + "Content-Length": String(range.end - range.start + 1), + "Content-Type": contentType ?? "application/octet-stream", + ...(isRangeRequest ? getContentRangeHeader(range) : {}), + }; + + if (downloadFilename) { + headers["Content-Disposition"] = + `attachment; filename*=UTF-8''${encodeURIComponent(downloadFilename)}`; + } + + return new Response(stream, { + status: isRangeRequest ? 206 : 200, + headers, + }); +}; + +const streamFromOpfs = async ( + file: File, + metadata?: FileMetadata, + range?: { start?: number; end?: number }, + downloadFilename?: string, +) => { + const start = range?.start ?? 0; + const end = range?.end ?? file.size - 1; + if (start > end || start < 0 || end >= file.size) { + return new Response("Invalid range", { status: 416 }); + } + + return createResponse( + file.slice(start, end + 1).stream(), + !!range, + { start, end, total: file.size }, + metadata?.contentType, + downloadFilename, + ); +}; + +const streamFromServer = async ( + id: number, + metadata: FileMetadata, + range?: { start?: number; end?: number }, + downloadFilename?: string, +) => { + const totalSize = getDecryptedSize(metadata.encContentSize, metadata.isLegacy); + const start = range?.start ?? 0; + const end = + range?.end ?? + (range && !metadata.isLegacy ? Math.min(start + CHUNK_SIZE, totalSize) : totalSize) - 1; + if (start > end || start < 0 || end >= totalSize) { + return new Response("Invalid range", { status: 416 }); + } + + const encryptedRange = getEncryptedRange(start, end, metadata.encContentSize, metadata.isLegacy); + const apiResponse = await fetch(`/api/file/${id}/download`, { + headers: { Range: `bytes=${encryptedRange.start}-${encryptedRange.end}` }, + }); + if (apiResponse.status !== 206 || !apiResponse.body) { + return new Response("Failed to fetch encrypted file", { status: 502 }); + } + + if (metadata.isLegacy) { + const fileEncrypted = await apiResponse.arrayBuffer(); + const decrypted = await decryptChunk(fileEncrypted, metadata.dataKey); + return createResponse( + new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array(decrypted.slice(start, end + 1))); + controller.close(); + }, + }), + !!range, + { start, end, total: totalSize }, + metadata.contentType, + ); + } + + const totalChunks = encryptedRange.lastChunkIndex - encryptedRange.firstChunkIndex + 1; + let currentChunkIndex = 0; + let buffer = new Uint8Array(0); + + const decryptingStream = new TransformStream({ + async transform(chunk, controller) { + const newBuffer = new Uint8Array(buffer.length + chunk.length); + newBuffer.set(buffer); + newBuffer.set(chunk, buffer.length); + buffer = newBuffer; + + while (buffer.length >= ENCRYPTED_CHUNK_SIZE && currentChunkIndex < totalChunks - 1) { + const encryptedChunk = buffer.slice(0, ENCRYPTED_CHUNK_SIZE); + buffer = buffer.slice(ENCRYPTED_CHUNK_SIZE); + + const decrypted = await decryptChunk(encryptedChunk.buffer, metadata.dataKey); + const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0; + controller.enqueue(new Uint8Array(decrypted.slice(sliceStart))); + currentChunkIndex++; + } + }, + async flush(controller) { + if (buffer.length > 0) { + const decrypted = await decryptChunk(buffer.buffer, metadata.dataKey); + const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0; + const sliceEnd = (end % CHUNK_SIZE) + 1; + controller.enqueue(new Uint8Array(decrypted.slice(sliceStart, sliceEnd))); + } + }, + }); + + return createResponse( + apiResponse.body.pipeThrough(decryptingStream), + !!range, + { start, end, total: totalSize }, + metadata.contentType, + downloadFilename, + ); +}; + +const decryptFileHandler = async (request: Request) => { + const url = new URL(request.url); + const fileId = parseInt(url.pathname.slice(DECRYPTED_FILE_URL_PREFIX.length), 10); + if (isNaN(fileId)) { + throw new Response("Invalid file id", { status: 400 }); + } + + const downloadFilename = url.searchParams.get("download") ?? undefined; + const metadata = fileMetadataStore.get(fileId); + const range = parseRangeHeader(request.headers.get("Range")); + const cache = await getFile(`/cache/${fileId}`); + if (cache) { + return streamFromOpfs(cache, metadata, range, downloadFilename); + } else if (metadata) { + return streamFromServer(fileId, metadata, range, downloadFilename); + } else { + return new Response("Decryption not prepared", { status: 400 }); + } +}; + +export default decryptFileHandler; diff --git a/src/service-worker/handlers/index.ts b/src/service-worker/handlers/index.ts new file mode 100644 index 0000000..fe5b0f9 --- /dev/null +++ b/src/service-worker/handlers/index.ts @@ -0,0 +1 @@ +export { default as decryptFile } from "./decryptFile"; diff --git a/src/service-worker/index.ts b/src/service-worker/index.ts new file mode 100644 index 0000000..051f8d9 --- /dev/null +++ b/src/service-worker/index.ts @@ -0,0 +1,43 @@ +/// +/// +/// +/// + +import { DECRYPTED_FILE_URL_PREFIX } from "./modules/constants"; +import { decryptFile } from "./handlers"; +import { fileMetadataStore } from "./stores"; +import type { ServiceWorkerMessage, ServiceWorkerResponse } from "./types"; + +const self = globalThis.self as unknown as ServiceWorkerGlobalScope; + +self.addEventListener("message", (event) => { + const message: ServiceWorkerMessage = event.data; + switch (message.type) { + case "decryption-prepare": + fileMetadataStore.set(message.fileId, message); + event.source?.postMessage({ + type: "decryption-ready", + fileId: message.fileId, + } satisfies ServiceWorkerResponse); + break; + default: { + const exhaustive: never = message.type; + return exhaustive; + } + } +}); + +self.addEventListener("fetch", (event) => { + const url = new URL(event.request.url); + if (url.pathname.startsWith(DECRYPTED_FILE_URL_PREFIX)) { + event.respondWith(decryptFile(event.request)); + } +}); + +self.addEventListener("install", () => { + self.skipWaiting(); +}); + +self.addEventListener("activate", (event) => { + event.waitUntil(self.clients.claim()); +}); diff --git a/src/service-worker/modules/constants.ts b/src/service-worker/modules/constants.ts new file mode 100644 index 0000000..cca093e --- /dev/null +++ b/src/service-worker/modules/constants.ts @@ -0,0 +1 @@ +export * from "../../lib/constants"; diff --git a/src/service-worker/modules/crypto.ts b/src/service-worker/modules/crypto.ts new file mode 100644 index 0000000..1afee74 --- /dev/null +++ b/src/service-worker/modules/crypto.ts @@ -0,0 +1,40 @@ +import { ENCRYPTION_OVERHEAD, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "./constants"; + +export * from "../../lib/modules/crypto"; + +export const getEncryptedRange = ( + start: number, + end: number, + totalEncryptedSize: number, + isLegacy: boolean, +) => { + if (isLegacy) { + return { + firstChunkIndex: 0, + lastChunkIndex: 0, + start: 0, + end: totalEncryptedSize - 1, + }; + } + + const firstChunkIndex = Math.floor(start / CHUNK_SIZE); + const lastChunkIndex = Math.floor(end / CHUNK_SIZE); + return { + firstChunkIndex, + lastChunkIndex, + start: firstChunkIndex * ENCRYPTED_CHUNK_SIZE, + end: Math.min((lastChunkIndex + 1) * ENCRYPTED_CHUNK_SIZE - 1, totalEncryptedSize - 1), + }; +}; + +export const getDecryptedSize = (encryptedSize: number, isLegacy: boolean) => { + if (isLegacy) { + return encryptedSize - ENCRYPTION_OVERHEAD; + } + + const fullChunks = Math.floor(encryptedSize / ENCRYPTED_CHUNK_SIZE); + const lastChunkEncSize = encryptedSize % ENCRYPTED_CHUNK_SIZE; + return ( + fullChunks * CHUNK_SIZE + (lastChunkEncSize > 0 ? lastChunkEncSize - ENCRYPTION_OVERHEAD : 0) + ); +}; diff --git a/src/service-worker/modules/http.ts b/src/service-worker/modules/http.ts new file mode 100644 index 0000000..0d1bf5e --- /dev/null +++ b/src/service-worker/modules/http.ts @@ -0,0 +1 @@ +export * from "../../lib/modules/http"; diff --git a/src/service-worker/modules/opfs.ts b/src/service-worker/modules/opfs.ts new file mode 100644 index 0000000..0ef5769 --- /dev/null +++ b/src/service-worker/modules/opfs.ts @@ -0,0 +1 @@ +export * from "../../lib/modules/opfs"; diff --git a/src/service-worker/stores.ts b/src/service-worker/stores.ts new file mode 100644 index 0000000..22d899e --- /dev/null +++ b/src/service-worker/stores.ts @@ -0,0 +1,3 @@ +import type { FileMetadata } from "./types"; + +export const fileMetadataStore = new Map(); diff --git a/src/service-worker/types.ts b/src/service-worker/types.ts new file mode 100644 index 0000000..f04ed39 --- /dev/null +++ b/src/service-worker/types.ts @@ -0,0 +1 @@ +export * from "../lib/serviceWorker/types"; diff --git a/src/trpc/router.server.ts b/src/trpc/router.server.ts index 64d25c7..d343fa6 100644 --- a/src/trpc/router.server.ts +++ b/src/trpc/router.server.ts @@ -9,6 +9,7 @@ import { fileRouter, hskRouter, mekRouter, + uploadRouter, userRouter, } from "./routers"; @@ -20,6 +21,7 @@ export const appRouter = router({ file: fileRouter, hsk: hskRouter, mek: mekRouter, + upload: uploadRouter, user: userRouter, }); diff --git a/src/trpc/routers/category.ts b/src/trpc/routers/category.ts index a292889..34887f7 100644 --- a/src/trpc/routers/category.ts +++ b/src/trpc/routers/category.ts @@ -1,14 +1,14 @@ import { TRPCError } from "@trpc/server"; import { z } from "zod"; +import { CategoryIdSchema } from "$lib/schemas"; import { CategoryRepo, FileRepo, IntegrityError } from "$lib/server/db"; -import { categoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const categoryRouter = router({ get: roleProcedure["activeClient"] .input( z.object({ - id: categoryIdSchema, + id: CategoryIdSchema, recurse: z.boolean().default(false), }), ) @@ -65,7 +65,7 @@ const categoryRouter = router({ create: roleProcedure["activeClient"] .input( z.object({ - parent: categoryIdSchema, + parent: CategoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), diff --git a/src/trpc/routers/directory.ts b/src/trpc/routers/directory.ts index 6e1e358..15f16f3 100644 --- a/src/trpc/routers/directory.ts +++ b/src/trpc/routers/directory.ts @@ -1,15 +1,15 @@ import { TRPCError } from "@trpc/server"; import { z } from "zod"; +import { DirectoryIdSchema } from "$lib/schemas"; import { FileRepo, IntegrityError } from "$lib/server/db"; import { safeUnlink } from "$lib/server/modules/filesystem"; -import { directoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const directoryRouter = router({ get: roleProcedure["activeClient"] .input( z.object({ - id: directoryIdSchema, + id: DirectoryIdSchema, }), ) .query(async ({ ctx, input }) => { @@ -59,7 +59,7 @@ const directoryRouter = router({ create: roleProcedure["activeClient"] .input( z.object({ - parent: directoryIdSchema, + parent: DirectoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), diff --git a/src/trpc/routers/file.ts b/src/trpc/routers/file.ts index a56a91f..294300c 100644 --- a/src/trpc/routers/file.ts +++ b/src/trpc/routers/file.ts @@ -19,6 +19,7 @@ const fileRouter = router({ const categories = await FileRepo.getAllFileCategories(input.id); return { + isLegacy: !!file.encContentIv, parent: file.parentId, mekVersion: file.mekVersion, dek: file.encDek, @@ -52,6 +53,7 @@ const fileRouter = router({ const files = await FileRepo.getFilesWithCategories(ctx.session.userId, input.ids); return files.map((file) => ({ id: file.id, + isLegacy: !!file.encContentIv, parent: file.parentId, mekVersion: file.mekVersion, dek: file.encDek, diff --git a/src/trpc/routers/index.ts b/src/trpc/routers/index.ts index ab5b6a0..5c8df24 100644 --- a/src/trpc/routers/index.ts +++ b/src/trpc/routers/index.ts @@ -5,4 +5,5 @@ export { default as directoryRouter } from "./directory"; export { default as fileRouter } from "./file"; export { default as hskRouter } from "./hsk"; export { default as mekRouter } from "./mek"; +export { default as uploadRouter } from "./upload"; export { default as userRouter } from "./user"; diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts new file mode 100644 index 0000000..168e957 --- /dev/null +++ b/src/trpc/routers/upload.ts @@ -0,0 +1,255 @@ +import { TRPCError } from "@trpc/server"; +import { createHash } from "crypto"; +import { createReadStream, createWriteStream } from "fs"; +import { mkdir, rename } from "fs/promises"; +import mime from "mime"; +import { dirname } from "path"; +import { v4 as uuidv4 } from "uuid"; +import { z } from "zod"; +import { DirectoryIdSchema } from "$lib/schemas"; +import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; +import db from "$lib/server/db/kysely"; +import env from "$lib/server/loadenv"; +import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; +import { router, roleProcedure } from "../init.server"; + +const sessionLocks = new Set(); + +const generateSessionId = async () => { + const id = uuidv4(); + const path = `${env.uploadsPath}/${id}`; + await mkdir(path, { recursive: true }); + return { id, path }; +}; + +const uploadRouter = router({ + startFileUpload: roleProcedure["activeClient"] + .input( + z.object({ + chunks: z.int().positive(), + parent: DirectoryIdSchema, + mekVersion: z.int().positive(), + dek: z.base64().nonempty(), + dekVersion: z.date(), + hskVersion: z.int().positive().optional(), + contentType: z + .string() + .trim() + .nonempty() + .refine((value) => mime.getExtension(value) !== null), + name: z.base64().nonempty(), + nameIv: z.base64().nonempty(), + createdAt: z.base64().nonempty().optional(), + createdAtIv: z.base64().nonempty().optional(), + lastModifiedAt: z.base64().nonempty(), + lastModifiedAtIv: z.base64().nonempty(), + }), + ) + .mutation(async ({ ctx, input }) => { + const oneMinuteAgo = new Date(Date.now() - 60 * 1000); + const oneMinuteLater = new Date(Date.now() + 60 * 1000); + if (input.dekVersion <= oneMinuteAgo || input.dekVersion >= oneMinuteLater) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); + } + + const { id, path } = await generateSessionId(); + + try { + await UploadRepo.createFileUploadSession({ + id, + userId: ctx.session.userId, + path, + totalChunks: input.chunks, + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + parentId: input.parent, + mekVersion: input.mekVersion, + encDek: input.dek, + dekVersion: input.dekVersion, + hskVersion: input.hskVersion ?? null, + contentType: input.contentType, + encName: { ciphertext: input.name, iv: input.nameIv }, + encCreatedAt: + input.createdAt && input.createdAtIv + ? { ciphertext: input.createdAt, iv: input.createdAtIv } + : null, + encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, + }); + return { uploadId: id }; + } catch (e) { + await safeRecursiveRm(path); + + if (e instanceof IntegrityError) { + if (e.message === "Inactive MEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); + } else if (e.message === "Inactive HSK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid HSK version" }); + } + } + throw e; + } + }), + + startFileThumbnailUpload: roleProcedure["activeClient"] + .input( + z.object({ + file: z.int().positive(), + dekVersion: z.date(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { id, path } = await generateSessionId(); + + try { + await UploadRepo.createThumbnailUploadSession({ + id, + userId: ctx.session.userId, + path, + totalChunks: 1, // Up to 4 MiB + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + fileId: input.file, + dekVersion: input.dekVersion, + }); + return { uploadId: id }; + } catch (e) { + await safeRecursiveRm(path); + + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); + } else if (e.message === "Invalid DEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" }); + } + } + throw e; + } + }), + + completeFileUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + contentHmac: z.base64().nonempty().optional(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); + } else { + sessionLocks.add(uploadId); + } + + let filePath = ""; + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (!session || session.type !== "file") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if ( + (session.hskVersion && !input.contentHmac) || + (!session.hskVersion && input.contentHmac) + ) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content HMAC" }); + } else if (session.uploadedChunks.length < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); + } + + filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for (let i = 0; i < session.totalChunks; i++) { + for await (const chunk of createReadStream(`${session.path}/${i}`)) { + hashStream.update(chunk); + writeStream.write(chunk); + } + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + const hash = hashStream.digest("base64"); + const fileId = await db.transaction().execute(async (trx) => { + const { id: fileId } = await FileRepo.registerFile(trx, { + ...session, + userId: ctx.session.userId, + path: filePath, + contentHmac: input.contentHmac ?? null, + encContentHash: hash, + encContentIv: null, + }); + await UploadRepo.deleteUploadSession(trx, uploadId); + return fileId; + }); + + await safeRecursiveRm(session.path); + return { file: fileId }; + } catch (e) { + await safeUnlink(filePath); + throw e; + } finally { + sessionLocks.delete(uploadId); + } + }), + + completeFileThumbnailUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); + } else { + sessionLocks.add(uploadId); + } + + let thumbnailPath = ""; + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (!session || session.type !== "thumbnail") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if (session.uploadedChunks.length < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); + } + + thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uploadId}`; + await mkdir(dirname(thumbnailPath), { recursive: true }); + await rename(`${session.path}/0`, thumbnailPath); + + const oldThumbnailPath = await db.transaction().execute(async (trx) => { + const oldPath = await MediaRepo.updateFileThumbnail( + trx, + ctx.session.userId, + session.fileId, + session.dekVersion, + thumbnailPath, + null, + ); + await UploadRepo.deleteUploadSession(trx, uploadId); + return oldPath; + }); + await Promise.all([safeUnlink(oldThumbnailPath), safeRecursiveRm(session.path)]); + } catch (e) { + await safeUnlink(thumbnailPath); + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); + } else if (e.message === "Invalid DEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" }); + } + } + throw e; + } finally { + sessionLocks.delete(uploadId); + } + }), +}); + +export default uploadRouter;