From 4b783a36e92efa9733fb36f1d0f7acfbb26311e0 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 04:45:21 +0900 Subject: [PATCH 1/8] =?UTF-8?q?=ED=8C=8C=EC=9D=BC=20=EC=97=85=EB=A1=9C?= =?UTF-8?q?=EB=93=9C=20=EB=B0=A9=EC=8B=9D=EC=9D=84=20Chunking=20=EB=B0=A9?= =?UTF-8?q?=EC=8B=9D=EC=9C=BC=EB=A1=9C=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .dockerignore | 1 + .env.example | 1 + .gitignore | 1 + docker-compose.yaml | 1 + src/hooks.server.ts | 2 + src/lib/constants/index.ts | 1 + src/lib/constants/upload.ts | 5 + src/lib/modules/crypto/aes.ts | 26 ++- src/lib/modules/file/download.svelte.ts | 34 ++-- src/lib/modules/file/upload.svelte.ts | 147 +++++++++++------ src/lib/modules/filesystem/file.ts | 2 + src/lib/modules/filesystem/types.ts | 1 + src/lib/server/db/file.ts | 98 ++++-------- src/lib/server/db/index.ts | 1 + .../migrations/1768062380-AddChunkedUpload.ts | 50 ++++++ src/lib/server/db/migrations/index.ts | 2 + src/lib/server/db/schema/file.ts | 2 +- src/lib/server/db/schema/index.ts | 1 + src/lib/server/db/schema/upload.ts | 26 +++ src/lib/server/db/upload.ts | 122 +++++++++++++++ src/lib/server/loadenv.ts | 1 + src/lib/server/modules/filesystem.ts | 3 + src/lib/server/schemas/file.ts | 29 ---- src/lib/server/services/file.ts | 107 ++++++++----- src/lib/services/file.ts | 13 +- .../(fullscreen)/file/[id]/+page.svelte | 2 +- .../settings/thumbnail/service.ts | 4 +- src/routes/api/file/upload/+server.ts | 108 ------------- .../upload/[id]/chunks/[index]/+server.ts | 43 +++++ src/trpc/routers/file.ts | 148 +++++++++++++++++- 30 files changed, 667 insertions(+), 315 deletions(-) create mode 100644 src/lib/constants/index.ts create mode 100644 src/lib/constants/upload.ts create mode 100644 src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts create mode 100644 src/lib/server/db/schema/upload.ts create mode 100644 src/lib/server/db/upload.ts delete mode 100644 src/routes/api/file/upload/+server.ts create mode 100644 src/routes/api/file/upload/[id]/chunks/[index]/+server.ts diff --git a/.dockerignore b/.dockerignore index 4f68a3b..6d312ec 100644 --- a/.dockerignore +++ b/.dockerignore @@ -12,6 +12,7 @@ node_modules /data /library /thumbnails +/uploads # OS .DS_Store diff --git a/.env.example b/.env.example index e3b6365..4e8b20b 100644 --- a/.env.example +++ b/.env.example @@ -12,3 +12,4 @@ USER_CLIENT_CHALLENGE_EXPIRES= SESSION_UPGRADE_CHALLENGE_EXPIRES= LIBRARY_PATH= THUMBNAILS_PATH= +UPLOADS_PATH= diff --git a/.gitignore b/.gitignore index 5078fa8..a200c74 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ node_modules /data /library /thumbnails +/uploads # OS .DS_Store diff --git a/docker-compose.yaml b/docker-compose.yaml index 2015066..a624d9f 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -20,6 +20,7 @@ services: - SESSION_UPGRADE_CHALLENGE_EXPIRES - LIBRARY_PATH=/app/data/library - THUMBNAILS_PATH=/app/data/thumbnails + - UPLOADS_PATH=/app/data/uploads # SvelteKit - ADDRESS_HEADER=${TRUST_PROXY:+X-Forwarded-For} - XFF_DEPTH=${TRUST_PROXY:-} diff --git a/src/hooks.server.ts b/src/hooks.server.ts index 6f94a7e..1795ce3 100644 --- a/src/hooks.server.ts +++ b/src/hooks.server.ts @@ -7,6 +7,7 @@ import { cleanupExpiredSessions, cleanupExpiredSessionUpgradeChallenges, } from "$lib/server/db/session"; +import { cleanupExpiredUploadSessions } from "$lib/server/db/upload"; import { authenticate, setAgentInfo } from "$lib/server/middlewares"; export const init: ServerInit = async () => { @@ -16,6 +17,7 @@ export const init: ServerInit = async () => { cleanupExpiredUserClientChallenges(); cleanupExpiredSessions(); cleanupExpiredSessionUpgradeChallenges(); + cleanupExpiredUploadSessions(); }); }; diff --git a/src/lib/constants/index.ts b/src/lib/constants/index.ts new file mode 100644 index 0000000..ab6125a --- /dev/null +++ b/src/lib/constants/index.ts @@ -0,0 +1 @@ +export * from "./upload"; diff --git a/src/lib/constants/upload.ts b/src/lib/constants/upload.ts new file mode 100644 index 0000000..337700d --- /dev/null +++ b/src/lib/constants/upload.ts @@ -0,0 +1,5 @@ +export const CHUNK_SIZE = 4 * 1024 * 1024; + +export const AES_GCM_IV_SIZE = 12; +export const AES_GCM_TAG_SIZE = 16; +export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index c911d26..67f6a9f 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -1,4 +1,11 @@ -import { encodeString, decodeString, encodeToBase64, decodeFromBase64 } from "./util"; +import { AES_GCM_IV_SIZE } from "$lib/constants"; +import { + encodeString, + decodeString, + encodeToBase64, + decodeFromBase64, + concatenateBuffers, +} from "./util"; export const generateMasterKey = async () => { return { @@ -86,7 +93,7 @@ export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { dataKey, data, ); - return { ciphertext, iv: encodeToBase64(iv.buffer) }; + return { ciphertext, iv: iv.buffer }; }; export const decryptData = async ( @@ -106,9 +113,22 @@ export const decryptData = async ( export const encryptString = async (plaintext: string, dataKey: CryptoKey) => { const { ciphertext, iv } = await encryptData(encodeString(plaintext), dataKey); - return { ciphertext: encodeToBase64(ciphertext), iv }; + return { ciphertext: encodeToBase64(ciphertext), iv: encodeToBase64(iv) }; }; export const decryptString = async (ciphertext: string, iv: string, dataKey: CryptoKey) => { return decodeString(await decryptData(decodeFromBase64(ciphertext), iv, dataKey)); }; + +export const encryptChunk = async (chunk: ArrayBuffer, dataKey: CryptoKey) => { + const { ciphertext, iv } = await encryptData(chunk, dataKey); + return concatenateBuffers(iv, ciphertext).buffer; +}; + +export const decryptChunk = async (encryptedChunk: ArrayBuffer, dataKey: CryptoKey) => { + return await decryptData( + encryptedChunk.slice(AES_GCM_IV_SIZE), + encryptedChunk.slice(0, AES_GCM_IV_SIZE), + dataKey, + ); +}; diff --git a/src/lib/modules/file/download.svelte.ts b/src/lib/modules/file/download.svelte.ts index 97f42ea..d438e3f 100644 --- a/src/lib/modules/file/download.svelte.ts +++ b/src/lib/modules/file/download.svelte.ts @@ -1,6 +1,7 @@ import axios from "axios"; import { limitFunction } from "p-limit"; -import { decryptData } from "$lib/modules/crypto"; +import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { decryptChunk, concatenateBuffers } from "$lib/modules/crypto"; export interface FileDownloadState { id: number; @@ -62,15 +63,24 @@ const requestFileDownload = limitFunction( ); const decryptFile = limitFunction( - async (state: FileDownloadState, fileEncrypted: ArrayBuffer, dataKey: CryptoKey) => { + async ( + state: FileDownloadState, + fileEncrypted: ArrayBuffer, + encryptedChunkSize: number, + dataKey: CryptoKey, + ) => { state.status = "decrypting"; - const fileBuffer = await decryptData( - fileEncrypted.slice(12), - fileEncrypted.slice(0, 12), - dataKey, - ); + const chunks: ArrayBuffer[] = []; + let offset = 0; + while (offset < fileEncrypted.byteLength) { + const nextOffset = Math.min(offset + encryptedChunkSize, fileEncrypted.byteLength); + chunks.push(await decryptChunk(fileEncrypted.slice(offset, nextOffset), dataKey)); + offset = nextOffset; + } + + const fileBuffer = concatenateBuffers(...chunks).buffer; state.status = "decrypted"; state.result = fileBuffer; return fileBuffer; @@ -78,7 +88,7 @@ const decryptFile = limitFunction( { concurrency: 4 }, ); -export const downloadFile = async (id: number, dataKey: CryptoKey) => { +export const downloadFile = async (id: number, dataKey: CryptoKey, isLegacy: boolean) => { downloadingFiles.push({ id, status: "download-pending", @@ -86,7 +96,13 @@ export const downloadFile = async (id: number, dataKey: CryptoKey) => { const state = downloadingFiles.at(-1)!; try { - return await decryptFile(state, await requestFileDownload(state, id), dataKey); + const fileEncrypted = await requestFileDownload(state, id); + return await decryptFile( + state, + fileEncrypted, + isLegacy ? fileEncrypted.byteLength : CHUNK_SIZE + ENCRYPTION_OVERHEAD, + dataKey, + ); } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index a632eb5..2bb6c7c 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,24 +1,23 @@ import axios from "axios"; import ExifReader from "exifreader"; import { limitFunction } from "p-limit"; +import { CHUNK_SIZE } from "$lib/constants"; import { encodeToBase64, generateDataKey, wrapDataKey, encryptData, encryptString, + encryptChunk, digestMessage, signMessageHmac, } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; import { generateThumbnail } from "$lib/modules/thumbnail"; -import type { - FileThumbnailUploadRequest, - FileUploadRequest, - FileUploadResponse, -} from "$lib/server/schemas"; +import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import type { MasterKey, HmacSecret } from "$lib/stores"; import { trpc } from "$trpc/client"; +import type { RouterInputs } from "$trpc/router.server"; export interface FileUploadState { name: string; @@ -110,6 +109,23 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => { return new Date(utcDate - offsetMs); }; +const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { + const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = []; + let offset = 0; + + while (offset < fileBuffer.byteLength) { + const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength); + const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey); + chunksEncrypted.push({ + chunkEncrypted: chunkEncrypted, + chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)), + }); + offset = nextOffset; + } + + return chunksEncrypted; +}; + const encryptFile = limitFunction( async (state: FileUploadState, file: File, fileBuffer: ArrayBuffer, masterKey: MasterKey) => { state.status = "encrypting"; @@ -123,9 +139,7 @@ const encryptFile = limitFunction( const { dataKey, dataKeyVersion } = await generateDataKey(); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); - - const fileEncrypted = await encryptData(fileBuffer, dataKey); - const fileEncryptedHash = encodeToBase64(await digestMessage(fileEncrypted.ciphertext)); + const chunksEncrypted = await encryptChunks(fileBuffer, dataKey); const nameEncrypted = await encryptString(file.name, dataKey); const createdAtEncrypted = @@ -142,8 +156,7 @@ const encryptFile = limitFunction( dataKeyWrapped, dataKeyVersion, fileType, - fileEncrypted, - fileEncryptedHash, + chunksEncrypted, nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, @@ -154,30 +167,70 @@ const encryptFile = limitFunction( ); const requestFileUpload = limitFunction( - async (state: FileUploadState, form: FormData, thumbnailForm: FormData | null) => { + async ( + state: FileUploadState, + metadata: RouterInputs["file"]["startUpload"], + chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], + fileSigned: string | undefined, + thumbnailForm: FormData | null, + ) => { state.status = "uploading"; - const res = await axios.post("/api/file/upload", form, { - onUploadProgress: ({ progress, rate, estimated }) => { - state.progress = progress; - state.rate = rate; - state.estimated = estimated; - }, - }); - const { file }: FileUploadResponse = res.data; + const { uploadId } = await trpc().file.startUpload.mutate(metadata); + // Upload chunks with progress tracking + const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); + let uploadedBytes = 0; + const startTime = Date.now(); + + for (let i = 0; i < chunksEncrypted.length; i++) { + const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; + + const response = await fetch(`/api/file/upload/${uploadId}/chunks/${i}`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, + }, + body: chunkEncrypted, + }); + + if (!response.ok) { + throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); + } + + uploadedBytes += chunkEncrypted.byteLength; + + // Calculate progress, rate, estimated + const elapsed = (Date.now() - startTime) / 1000; // seconds + const rate = uploadedBytes / elapsed; // bytes per second + const remaining = totalBytes - uploadedBytes; + const estimated = rate > 0 ? remaining / rate : undefined; + + state.progress = uploadedBytes / totalBytes; + state.rate = rate; + state.estimated = estimated; + } + + // Complete upload + const { file: fileId } = await trpc().file.completeUpload.mutate({ + uploadId, + contentHmac: fileSigned, + }); + + // Upload thumbnail if exists if (thumbnailForm) { try { - await axios.post(`/api/file/${file}/thumbnail/upload`, thumbnailForm); + await axios.post(`/api/file/${fileId}/thumbnail/upload`, thumbnailForm); } catch (e) { - // TODO + // TODO: Error handling for thumbnail upload console.error(e); } } state.status = "uploaded"; - return { fileId: file }; + return { fileId }; }, { concurrency: 1 }, ); @@ -215,36 +268,28 @@ export const uploadFile = async ( dataKeyWrapped, dataKeyVersion, fileType, - fileEncrypted, - fileEncryptedHash, + chunksEncrypted, nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnail, } = await encryptFile(state, file, fileBuffer, masterKey); - const form = new FormData(); - form.set( - "metadata", - JSON.stringify({ - parent: parentId, - mekVersion: masterKey.version, - dek: dataKeyWrapped, - dekVersion: dataKeyVersion.toISOString(), - hskVersion: hmacSecret.version, - contentHmac: fileSigned, - contentType: fileType, - contentIv: fileEncrypted.iv, - name: nameEncrypted.ciphertext, - nameIv: nameEncrypted.iv, - createdAt: createdAtEncrypted?.ciphertext, - createdAtIv: createdAtEncrypted?.iv, - lastModifiedAt: lastModifiedAtEncrypted.ciphertext, - lastModifiedAtIv: lastModifiedAtEncrypted.iv, - } satisfies FileUploadRequest), - ); - form.set("content", new Blob([fileEncrypted.ciphertext])); - form.set("checksum", fileEncryptedHash); + const metadata = { + chunks: chunksEncrypted.length, + parent: parentId, + mekVersion: masterKey.version, + dek: dataKeyWrapped, + dekVersion: dataKeyVersion, + hskVersion: hmacSecret.version, + contentType: fileType, + name: nameEncrypted.ciphertext, + nameIv: nameEncrypted.iv, + createdAt: createdAtEncrypted?.ciphertext, + createdAtIv: createdAtEncrypted?.iv, + lastModifiedAt: lastModifiedAtEncrypted.ciphertext, + lastModifiedAtIv: lastModifiedAtEncrypted.iv, + }; let thumbnailForm = null; if (thumbnail) { @@ -253,13 +298,19 @@ export const uploadFile = async ( "metadata", JSON.stringify({ dekVersion: dataKeyVersion.toISOString(), - contentIv: thumbnail.iv, + contentIv: encodeToBase64(thumbnail.iv), } satisfies FileThumbnailUploadRequest), ); thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); } - const { fileId } = await requestFileUpload(state, form, thumbnailForm); + const { fileId } = await requestFileUpload( + state, + metadata, + chunksEncrypted, + fileSigned, + thumbnailForm, + ); return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; } catch (e) { state.status = "error"; diff --git a/src/lib/modules/filesystem/file.ts b/src/lib/modules/filesystem/file.ts index daf7fd6..d80a872 100644 --- a/src/lib/modules/filesystem/file.ts +++ b/src/lib/modules/filesystem/file.ts @@ -47,6 +47,7 @@ const cache = new FilesystemCache({ return storeToIndexedDB({ id, + isLegacy: file.isLegacy, parentId: file.parent, dataKey: metadata.dataKey, contentType: file.contentType, @@ -115,6 +116,7 @@ const cache = new FilesystemCache({ return { id, exists: true as const, + isLegacy: metadataRaw.isLegacy, parentId: metadataRaw.parent, contentType: metadataRaw.contentType, categories, diff --git a/src/lib/modules/filesystem/types.ts b/src/lib/modules/filesystem/types.ts index abac40c..f4ce9cf 100644 --- a/src/lib/modules/filesystem/types.ts +++ b/src/lib/modules/filesystem/types.ts @@ -28,6 +28,7 @@ export type SubDirectoryInfo = Omit; - interface File { id: number; parentId: DirectoryId; @@ -28,15 +26,13 @@ interface File { hskVersion: number | null; contentHmac: string | null; contentType: string; - encContentIv: string; + encContentIv: string | null; encContentHash: string; encName: Ciphertext; encCreatedAt: Ciphertext | null; encLastModifiedAt: Ciphertext; } -export type NewFile = Omit; - interface FileCategory { id: number; parentId: CategoryId; @@ -46,7 +42,7 @@ interface FileCategory { encName: Ciphertext; } -export const registerDirectory = async (params: NewDirectory) => { +export const registerDirectory = async (params: Omit) => { await db.transaction().execute(async (trx) => { const mek = await trx .selectFrom("master_encryption_key") @@ -214,69 +210,41 @@ export const unregisterDirectory = async (userId: number, directoryId: number) = }); }; -export const registerFile = async (params: NewFile) => { +export const registerFile = async (trx: typeof db, params: Omit) => { if ((params.hskVersion && !params.contentHmac) || (!params.hskVersion && params.contentHmac)) { throw new Error("Invalid arguments"); } - return await db.transaction().execute(async (trx) => { - const mek = await trx - .selectFrom("master_encryption_key") - .select("version") - .where("user_id", "=", params.userId) - .where("state", "=", "active") - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (mek?.version !== params.mekVersion) { - throw new IntegrityError("Inactive MEK version"); - } - - if (params.hskVersion) { - const hsk = await trx - .selectFrom("hmac_secret_key") - .select("version") - .where("user_id", "=", params.userId) - .where("state", "=", "active") - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (hsk?.version !== params.hskVersion) { - throw new IntegrityError("Inactive HSK version"); - } - } - - const { fileId } = await trx - .insertInto("file") - .values({ - parent_id: params.parentId !== "root" ? params.parentId : null, - user_id: params.userId, - path: params.path, - master_encryption_key_version: params.mekVersion, - encrypted_data_encryption_key: params.encDek, - data_encryption_key_version: params.dekVersion, - hmac_secret_key_version: params.hskVersion, - content_hmac: params.contentHmac, - content_type: params.contentType, - encrypted_content_iv: params.encContentIv, - encrypted_content_hash: params.encContentHash, - encrypted_name: params.encName, - encrypted_created_at: params.encCreatedAt, - encrypted_last_modified_at: params.encLastModifiedAt, - }) - .returning("id as fileId") - .executeTakeFirstOrThrow(); - await trx - .insertInto("file_log") - .values({ - file_id: fileId, - timestamp: new Date(), - action: "create", - new_name: params.encName, - }) - .execute(); - return { id: fileId }; - }); + const { fileId } = await trx + .insertInto("file") + .values({ + parent_id: params.parentId !== "root" ? params.parentId : null, + user_id: params.userId, + path: params.path, + master_encryption_key_version: params.mekVersion, + encrypted_data_encryption_key: params.encDek, + data_encryption_key_version: params.dekVersion, + hmac_secret_key_version: params.hskVersion, + content_hmac: params.contentHmac, + content_type: params.contentType, + encrypted_content_iv: params.encContentIv, + encrypted_content_hash: params.encContentHash, + encrypted_name: params.encName, + encrypted_created_at: params.encCreatedAt, + encrypted_last_modified_at: params.encLastModifiedAt, + }) + .returning("id as fileId") + .executeTakeFirstOrThrow(); + await trx + .insertInto("file_log") + .values({ + file_id: fileId, + timestamp: new Date(), + action: "create", + new_name: params.encName, + }) + .execute(); + return { id: fileId }; }; export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => { diff --git a/src/lib/server/db/index.ts b/src/lib/server/db/index.ts index 5c21deb..140cf7d 100644 --- a/src/lib/server/db/index.ts +++ b/src/lib/server/db/index.ts @@ -5,6 +5,7 @@ export * as HskRepo from "./hsk"; export * as MediaRepo from "./media"; export * as MekRepo from "./mek"; export * as SessionRepo from "./session"; +export * as UploadRepo from "./upload"; export * as UserRepo from "./user"; export * from "./error"; diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts new file mode 100644 index 0000000..fe8abd4 --- /dev/null +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -0,0 +1,50 @@ +import { Kysely, sql } from "kysely"; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const up = async (db: Kysely) => { + // file.ts + await db.schema + .alterTable("file") + .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) + .execute(); + + // upload.ts + await db.schema + .createTable("upload_session") + .addColumn("id", "uuid", (col) => col.primaryKey().defaultTo(sql`gen_random_uuid()`)) + .addColumn("user_id", "integer", (col) => col.references("user.id").notNull()) + .addColumn("total_chunks", "integer", (col) => col.notNull()) + .addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`)) + .addColumn("expires_at", "timestamp(3)", (col) => col.notNull()) + .addColumn("parent_id", "integer", (col) => col.references("directory.id")) + .addColumn("master_encryption_key_version", "integer", (col) => col.notNull()) + .addColumn("encrypted_data_encryption_key", "text", (col) => col.notNull()) + .addColumn("data_encryption_key_version", "timestamp(3)", (col) => col.notNull()) + .addColumn("hmac_secret_key_version", "integer") + .addColumn("content_type", "text", (col) => col.notNull()) + .addColumn("encrypted_name", "json", (col) => col.notNull()) + .addColumn("encrypted_created_at", "json") + .addColumn("encrypted_last_modified_at", "json", (col) => col.notNull()) + .addForeignKeyConstraint( + "upload_session_fk01", + ["user_id", "master_encryption_key_version"], + "master_encryption_key", + ["user_id", "version"], + ) + .addForeignKeyConstraint( + "upload_session_fk02", + ["user_id", "hmac_secret_key_version"], + "hmac_secret_key", + ["user_id", "version"], + ) + .execute(); +}; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const down = async (db: Kysely) => { + await db.schema.dropTable("upload_session").execute(); + await db.schema + .alterTable("file") + .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) + .execute(); +}; diff --git a/src/lib/server/db/migrations/index.ts b/src/lib/server/db/migrations/index.ts index f58c2d0..ca3310a 100644 --- a/src/lib/server/db/migrations/index.ts +++ b/src/lib/server/db/migrations/index.ts @@ -1,9 +1,11 @@ import * as Initial1737357000 from "./1737357000-Initial"; import * as AddFileCategory1737422340 from "./1737422340-AddFileCategory"; import * as AddThumbnail1738409340 from "./1738409340-AddThumbnail"; +import * as AddChunkedUpload1768062380 from "./1768062380-AddChunkedUpload"; export default { "1737357000-Initial": Initial1737357000, "1737422340-AddFileCategory": AddFileCategory1737422340, "1738409340-AddThumbnail": AddThumbnail1738409340, + "1768062380-AddChunkedUpload": AddChunkedUpload1768062380, }; diff --git a/src/lib/server/db/schema/file.ts b/src/lib/server/db/schema/file.ts index a1bf9bd..663aacd 100644 --- a/src/lib/server/db/schema/file.ts +++ b/src/lib/server/db/schema/file.ts @@ -30,7 +30,7 @@ interface FileTable { hmac_secret_key_version: number | null; content_hmac: string | null; // Base64 content_type: string; - encrypted_content_iv: string; // Base64 + encrypted_content_iv: string | null; // Base64 encrypted_content_hash: string; // Base64 encrypted_name: Ciphertext; encrypted_created_at: Ciphertext | null; diff --git a/src/lib/server/db/schema/index.ts b/src/lib/server/db/schema/index.ts index 4e427fb..dcc340b 100644 --- a/src/lib/server/db/schema/index.ts +++ b/src/lib/server/db/schema/index.ts @@ -5,6 +5,7 @@ export * from "./hsk"; export * from "./media"; export * from "./mek"; export * from "./session"; +export * from "./upload"; export * from "./user"; export * from "./util"; diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts new file mode 100644 index 0000000..3372955 --- /dev/null +++ b/src/lib/server/db/schema/upload.ts @@ -0,0 +1,26 @@ +import type { Generated } from "kysely"; +import type { Ciphertext } from "./util"; + +interface UploadSessionTable { + id: Generated; + user_id: number; + total_chunks: number; + uploaded_chunks: Generated; + expires_at: Date; + + parent_id: number | null; + master_encryption_key_version: number; + encrypted_data_encryption_key: string; // Base64 + data_encryption_key_version: Date; + hmac_secret_key_version: number | null; + content_type: string; + encrypted_name: Ciphertext; + encrypted_created_at: Ciphertext | null; + encrypted_last_modified_at: Ciphertext; +} + +declare module "./index" { + interface Database { + upload_session: UploadSessionTable; + } +} diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts new file mode 100644 index 0000000..935dc80 --- /dev/null +++ b/src/lib/server/db/upload.ts @@ -0,0 +1,122 @@ +import { sql } from "kysely"; +import { IntegrityError } from "./error"; +import db from "./kysely"; +import type { Ciphertext } from "./schema"; + +interface UploadSession { + id: string; + userId: number; + totalChunks: number; + uploadedChunks: number[]; + expiresAt: Date; + + parentId: DirectoryId; + mekVersion: number; + encDek: string; + dekVersion: Date; + hskVersion: number | null; + contentType: string; + encName: Ciphertext; + encCreatedAt: Ciphertext | null; + encLastModifiedAt: Ciphertext; +} + +export const createUploadSession = async (params: Omit) => { + return await db.transaction().execute(async (trx) => { + const mek = await trx + .selectFrom("master_encryption_key") + .select("version") + .where("user_id", "=", params.userId) + .where("state", "=", "active") + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (mek?.version !== params.mekVersion) { + throw new IntegrityError("Inactive MEK version"); + } + + if (params.hskVersion) { + const hsk = await trx + .selectFrom("hmac_secret_key") + .select("version") + .where("user_id", "=", params.userId) + .where("state", "=", "active") + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (hsk?.version !== params.hskVersion) { + throw new IntegrityError("Inactive HSK version"); + } + } + + const { sessionId } = await trx + .insertInto("upload_session") + .values({ + user_id: params.userId, + total_chunks: params.totalChunks, + expires_at: params.expiresAt, + parent_id: params.parentId !== "root" ? params.parentId : null, + master_encryption_key_version: params.mekVersion, + encrypted_data_encryption_key: params.encDek, + data_encryption_key_version: params.dekVersion, + hmac_secret_key_version: params.hskVersion, + content_type: params.contentType, + encrypted_name: params.encName, + encrypted_created_at: params.encCreatedAt, + encrypted_last_modified_at: params.encLastModifiedAt, + }) + .returning("id as sessionId") + .executeTakeFirstOrThrow(); + return { id: sessionId }; + }); +}; + +export const getUploadSession = async (sessionId: string, userId: number) => { + const session = await db + .selectFrom("upload_session") + .selectAll() + .where("id", "=", sessionId) + .where("user_id", "=", userId) + .where("expires_at", ">", new Date()) + .limit(1) + .executeTakeFirst(); + return session + ? ({ + id: session.id, + userId: session.user_id, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + parentId: session.parent_id ?? "root", + mekVersion: session.master_encryption_key_version, + encDek: session.encrypted_data_encryption_key, + dekVersion: session.data_encryption_key_version, + hskVersion: session.hmac_secret_key_version, + contentType: session.content_type, + encName: session.encrypted_name, + encCreatedAt: session.encrypted_created_at, + encLastModifiedAt: session.encrypted_last_modified_at, + } satisfies UploadSession) + : null; +}; + +export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => { + await db + .updateTable("upload_session") + .set({ uploaded_chunks: sql`array_append(uploaded_chunks, ${chunkIndex})` }) + .where("id", "=", sessionId) + .execute(); +}; + +export const deleteUploadSession = async (trx: typeof db, sessionId: string) => { + await trx.deleteFrom("upload_session").where("id", "=", sessionId).execute(); +}; + +export const cleanupExpiredUploadSessions = async () => { + const sessions = await db + .deleteFrom("upload_session") + .where("expires_at", "<", new Date()) + .returning("id") + .execute(); + return sessions.map(({ id }) => id); +}; diff --git a/src/lib/server/loadenv.ts b/src/lib/server/loadenv.ts index 3a805d8..f8fd68f 100644 --- a/src/lib/server/loadenv.ts +++ b/src/lib/server/loadenv.ts @@ -26,4 +26,5 @@ export default { }, libraryPath: env.LIBRARY_PATH || "library", thumbnailsPath: env.THUMBNAILS_PATH || "thumbnails", + uploadsPath: env.UPLOADS_PATH || "uploads", }; diff --git a/src/lib/server/modules/filesystem.ts b/src/lib/server/modules/filesystem.ts index 65cb9ec..b87fd65 100644 --- a/src/lib/server/modules/filesystem.ts +++ b/src/lib/server/modules/filesystem.ts @@ -1,4 +1,7 @@ import { unlink } from "fs/promises"; +import env from "$lib/server/loadenv"; + +export const getChunkDirectoryPath = (sessionId: string) => `${env.uploadsPath}/${sessionId}`; export const safeUnlink = async (path: string | null | undefined) => { if (path) { diff --git a/src/lib/server/schemas/file.ts b/src/lib/server/schemas/file.ts index 811e590..8ba14e7 100644 --- a/src/lib/server/schemas/file.ts +++ b/src/lib/server/schemas/file.ts @@ -1,36 +1,7 @@ -import mime from "mime"; import { z } from "zod"; -import { directoryIdSchema } from "./directory"; export const fileThumbnailUploadRequest = z.object({ dekVersion: z.iso.datetime(), contentIv: z.base64().nonempty(), }); export type FileThumbnailUploadRequest = z.input; - -export const fileUploadRequest = z.object({ - parent: directoryIdSchema, - mekVersion: z.int().positive(), - dek: z.base64().nonempty(), - dekVersion: z.iso.datetime(), - hskVersion: z.int().positive(), - contentHmac: z.base64().nonempty(), - contentType: z - .string() - .trim() - .nonempty() - .refine((value) => mime.getExtension(value) !== null), // MIME type - contentIv: z.base64().nonempty(), - name: z.base64().nonempty(), - nameIv: z.base64().nonempty(), - createdAt: z.base64().nonempty().optional(), - createdAtIv: z.base64().nonempty().optional(), - lastModifiedAt: z.base64().nonempty(), - lastModifiedAtIv: z.base64().nonempty(), -}); -export type FileUploadRequest = z.input; - -export const fileUploadResponse = z.object({ - file: z.int().positive(), -}); -export type FileUploadResponse = z.output; diff --git a/src/lib/server/services/file.ts b/src/lib/server/services/file.ts index e45b16e..9df6430 100644 --- a/src/lib/server/services/file.ts +++ b/src/lib/server/services/file.ts @@ -6,17 +6,20 @@ import { dirname } from "path"; import { Readable } from "stream"; import { pipeline } from "stream/promises"; import { v4 as uuidv4 } from "uuid"; -import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db"; +import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; import env from "$lib/server/loadenv"; -import { safeUnlink } from "$lib/server/modules/filesystem"; +import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; + +const uploadLocks = new Set(); const createEncContentStream = async ( path: string, - iv: Buffer, + iv?: Buffer, range?: { start?: number; end?: number }, ) => { const { size: fileSize } = await stat(path); - const ivSize = iv.byteLength; + const ivSize = iv?.byteLength ?? 0; const totalSize = fileSize + ivSize; const start = range?.start ?? 0; @@ -30,7 +33,7 @@ const createEncContentStream = async ( Readable.from( (async function* () { if (start < ivSize) { - yield iv.subarray(start, Math.min(end + 1, ivSize)); + yield iv!.subarray(start, Math.min(end + 1, ivSize)); } if (end >= ivSize) { yield* createReadStream(path, { @@ -55,7 +58,11 @@ export const getFileStream = async ( error(404, "Invalid file id"); } - return createEncContentStream(file.path, Buffer.from(file.encContentIv, "base64"), range); + return createEncContentStream( + file.path, + file.encContentIv ? Buffer.from(file.encContentIv, "base64") : undefined, + range, + ); }; export const getFileThumbnailStream = async ( @@ -110,56 +117,70 @@ export const uploadFileThumbnail = async ( } }; -export const uploadFile = async ( - params: Omit, - encContentStream: Readable, - encContentHash: Promise, +export const uploadChunk = async ( + userId: number, + sessionId: string, + chunkIndex: number, + encChunkStream: Readable, + encChunkHash: string, ) => { - const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000); - const oneMinuteLater = new Date(Date.now() + 60 * 1000); - if (params.dekVersion <= oneDayAgo || params.dekVersion >= oneMinuteLater) { - error(400, "Invalid DEK version"); + const lockKey = `${sessionId}/${chunkIndex}`; + if (uploadLocks.has(lockKey)) { + error(409, "Chunk already uploaded"); // TODO: Message + } else { + uploadLocks.add(lockKey); } - const path = `${env.libraryPath}/${params.userId}/${uuidv4()}`; - await mkdir(dirname(path), { recursive: true }); + const filePath = `${getChunkDirectoryPath(sessionId)}/${chunkIndex}`; try { - const hashStream = createHash("sha256"); - const [, hash] = await Promise.all([ - pipeline( - encContentStream, - async function* (source) { - for await (const chunk of source) { - hashStream.update(chunk); - yield chunk; - } - }, - createWriteStream(path, { flags: "wx", mode: 0o600 }), - ), - encContentHash, - ]); - if (hashStream.digest("base64") !== hash) { - throw new Error("Invalid checksum"); + const session = await UploadRepo.getUploadSession(sessionId, userId); + if (!session) { + error(404, "Invalid upload id"); + } else if (chunkIndex >= session.totalChunks) { + error(400, "Invalid chunk index"); + } else if (session.uploadedChunks.includes(chunkIndex)) { + error(409, "Chunk already uploaded"); } - const { id: fileId } = await FileRepo.registerFile({ - ...params, - path, - encContentHash: hash, - }); - return { fileId }; - } catch (e) { - await safeUnlink(path); + const isLastChunk = chunkIndex === session.totalChunks - 1; - if (e instanceof IntegrityError && e.message === "Inactive MEK version") { - error(400, "Invalid MEK version"); + let writtenBytes = 0; + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for await (const chunk of encChunkStream) { + writtenBytes += chunk.length; + hashStream.update(chunk); + writeStream.write(chunk); + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + if (hashStream.digest("base64") !== encChunkHash) { + throw new Error("Invalid checksum"); } else if ( + (!isLastChunk && writtenBytes !== CHUNK_SIZE + ENCRYPTION_OVERHEAD) || + (isLastChunk && + (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > CHUNK_SIZE + ENCRYPTION_OVERHEAD)) + ) { + throw new Error("Invalid chunk size"); + } + + await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex); + } catch (e) { + await safeUnlink(filePath); + + if ( e instanceof Error && - (e.message === "Invalid request body" || e.message === "Invalid checksum") + (e.message === "Invalid checksum" || e.message === "Invalid chunk size") ) { error(400, "Invalid request body"); } throw e; + } finally { + uploadLocks.delete(lockKey); } }; diff --git a/src/lib/services/file.ts b/src/lib/services/file.ts index a0e769b..5f95f42 100644 --- a/src/lib/services/file.ts +++ b/src/lib/services/file.ts @@ -1,4 +1,5 @@ import { getAllFileInfos } from "$lib/indexedDB/filesystem"; +import { encodeToBase64 } from "$lib/modules/crypto"; import { getFileCache, storeFileCache, @@ -9,11 +10,15 @@ import { import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import { trpc } from "$trpc/client"; -export const requestFileDownload = async (fileId: number, dataKey: CryptoKey) => { +export const requestFileDownload = async ( + fileId: number, + dataKey: CryptoKey, + isLegacy: boolean, +) => { const cache = await getFileCache(fileId); if (cache) return cache; - const fileBuffer = await downloadFile(fileId, dataKey); + const fileBuffer = await downloadFile(fileId, dataKey, isLegacy); storeFileCache(fileId, fileBuffer); // Intended return fileBuffer; }; @@ -21,14 +26,14 @@ export const requestFileDownload = async (fileId: number, dataKey: CryptoKey) => export const requestFileThumbnailUpload = async ( fileId: number, dataKeyVersion: Date, - thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: string }, + thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, ) => { const form = new FormData(); form.set( "metadata", JSON.stringify({ dekVersion: dataKeyVersion.toISOString(), - contentIv: thumbnailEncrypted.iv, + contentIv: encodeToBase64(thumbnailEncrypted.iv), } satisfies FileThumbnailUploadRequest), ); form.set("content", new Blob([thumbnailEncrypted.ciphertext])); diff --git a/src/routes/(fullscreen)/file/[id]/+page.svelte b/src/routes/(fullscreen)/file/[id]/+page.svelte index f325c5e..4aa6b42 100644 --- a/src/routes/(fullscreen)/file/[id]/+page.svelte +++ b/src/routes/(fullscreen)/file/[id]/+page.svelte @@ -95,7 +95,7 @@ untrack(() => { if (!downloadState && !isDownloadRequested) { isDownloadRequested = true; - requestFileDownload(data.id, info!.dataKey!.key).then(async (buffer) => { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then(async (buffer) => { const blob = await updateViewer(buffer, contentType); if (!viewerType) { FileSaver.saveAs(blob, info!.name); diff --git a/src/routes/(fullscreen)/settings/thumbnail/service.ts b/src/routes/(fullscreen)/settings/thumbnail/service.ts index 75c64b8..314cf5a 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/service.ts +++ b/src/routes/(fullscreen)/settings/thumbnail/service.ts @@ -50,7 +50,7 @@ const requestThumbnailUpload = limitFunction( async ( fileId: number, dataKeyVersion: Date, - thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: string }, + thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: ArrayBuffer }, ) => { statuses.set(fileId, "uploading"); @@ -77,7 +77,7 @@ export const requestThumbnailGeneration = async (fileInfo: FileInfo) => { await scheduler.schedule( async () => { statuses.set(fileInfo.id, "generation-pending"); - file = await requestFileDownload(fileInfo.id, fileInfo.dataKey?.key!); + file = await requestFileDownload(fileInfo.id, fileInfo.dataKey?.key!, fileInfo.isLegacy!); return file.byteLength; }, async () => { diff --git a/src/routes/api/file/upload/+server.ts b/src/routes/api/file/upload/+server.ts deleted file mode 100644 index f9cbd53..0000000 --- a/src/routes/api/file/upload/+server.ts +++ /dev/null @@ -1,108 +0,0 @@ -import Busboy from "@fastify/busboy"; -import { error, json } from "@sveltejs/kit"; -import { Readable, Writable } from "stream"; -import { authorize } from "$lib/server/modules/auth"; -import { - fileUploadRequest, - fileUploadResponse, - type FileUploadResponse, -} from "$lib/server/schemas"; -import { uploadFile } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -type FileMetadata = Parameters[0]; - -const parseFileMetadata = (userId: number, json: string) => { - const zodRes = fileUploadRequest.safeParse(JSON.parse(json)); - if (!zodRes.success) error(400, "Invalid request body"); - const { - parent, - mekVersion, - dek, - dekVersion, - hskVersion, - contentHmac, - contentType, - contentIv, - name, - nameIv, - createdAt, - createdAtIv, - lastModifiedAt, - lastModifiedAtIv, - } = zodRes.data; - if ((createdAt && !createdAtIv) || (!createdAt && createdAtIv)) - error(400, "Invalid request body"); - - return { - userId, - parentId: parent, - mekVersion, - encDek: dek, - dekVersion: new Date(dekVersion), - hskVersion, - contentHmac, - contentType, - encContentIv: contentIv, - encName: { ciphertext: name, iv: nameIv }, - encCreatedAt: createdAt && createdAtIv ? { ciphertext: createdAt, iv: createdAtIv } : null, - encLastModifiedAt: { ciphertext: lastModifiedAt, iv: lastModifiedAtIv }, - } satisfies FileMetadata; -}; - -export const POST: RequestHandler = async ({ locals, request }) => { - const { userId } = await authorize(locals, "activeClient"); - - const contentType = request.headers.get("Content-Type"); - if (!contentType?.startsWith("multipart/form-data") || !request.body) { - error(400, "Invalid request body"); - } - - return new Promise((resolve, reject) => { - const bb = Busboy({ headers: { "content-type": contentType } }); - const handler = - (f: (...args: T) => Promise) => - (...args: T) => { - f(...args).catch(reject); - }; - - let metadata: FileMetadata | null = null; - let content: Readable | null = null; - const checksum = new Promise((resolveChecksum, rejectChecksum) => { - bb.on( - "field", - handler(async (fieldname, val) => { - if (fieldname === "metadata") { - // Ignore subsequent metadata fields - if (!metadata) { - metadata = parseFileMetadata(userId, val); - } - } else if (fieldname === "checksum") { - // Ignore subsequent checksum fields - resolveChecksum(val); - } else { - error(400, "Invalid request body"); - } - }), - ); - bb.on( - "file", - handler(async (fieldname, file) => { - if (fieldname !== "content") error(400, "Invalid request body"); - if (!metadata || content) error(400, "Invalid request body"); - content = file; - - const { fileId } = await uploadFile(metadata, content, checksum); - resolve(json(fileUploadResponse.parse({ file: fileId } satisfies FileUploadResponse))); - }), - ); - bb.on("finish", () => rejectChecksum(new Error("Invalid request body"))); - bb.on("error", (e) => { - content?.emit("error", e) ?? reject(e); - rejectChecksum(e); - }); - }); - - request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error - }); -}; diff --git a/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts b/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts new file mode 100644 index 0000000..c44e425 --- /dev/null +++ b/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts @@ -0,0 +1,43 @@ +import { error, text } from "@sveltejs/kit"; +import { Readable } from "stream"; +import { z } from "zod"; +import { authorize } from "$lib/server/modules/auth"; +import { uploadChunk } from "$lib/server/services/file"; +import type { RequestHandler } from "./$types"; + +export const POST: RequestHandler = async ({ locals, params, request }) => { + const { userId } = await authorize(locals, "activeClient"); + + const zodRes = z + .object({ + id: z.uuidv4(), + index: z.coerce.number().int().nonnegative(), + }) + .safeParse(params); + if (!zodRes.success) error(400, "Invalid path parameters"); + const { id: uploadId, index: chunkIndex } = zodRes.data; + + // Parse Content-Digest header (RFC 9530) + // Expected format: sha-256=:base64hash: + const contentDigest = request.headers.get("Content-Digest"); + if (!contentDigest) error(400, "Missing Content-Digest header"); + + const digestMatch = contentDigest.match(/^sha-256=:([A-Za-z0-9+/=]+):$/); + if (!digestMatch || !digestMatch[1]) + error(400, "Invalid Content-Digest format, must be sha-256=:base64:"); + const encChunkHash = digestMatch[1]; + + const contentType = request.headers.get("Content-Type"); + if (contentType !== "application/octet-stream" || !request.body) { + error(400, "Invalid request body"); + } + + // Convert web ReadableStream to Node Readable + const nodeReadable = Readable.fromWeb( + request.body as unknown as Parameters[0], + ); + + await uploadChunk(userId, uploadId, chunkIndex, nodeReadable, encChunkHash); + + return text("Chunk uploaded", { headers: { "Content-Type": "text/plain" } }); +}; diff --git a/src/trpc/routers/file.ts b/src/trpc/routers/file.ts index a56a91f..eaf42ca 100644 --- a/src/trpc/routers/file.ts +++ b/src/trpc/routers/file.ts @@ -1,9 +1,20 @@ import { TRPCError } from "@trpc/server"; +import { createHash } from "crypto"; +import { createReadStream, createWriteStream } from "fs"; +import { mkdir, rm } from "fs/promises"; +import mime from "mime"; +import { dirname } from "path"; +import { v4 as uuidv4 } from "uuid"; import { z } from "zod"; -import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db"; -import { safeUnlink } from "$lib/server/modules/filesystem"; +import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; +import db from "$lib/server/db/kysely"; +import env from "$lib/server/loadenv"; +import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; +import { directoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; +const uploadLocks = new Set(); + const fileRouter = router({ get: roleProcedure["activeClient"] .input( @@ -19,6 +30,7 @@ const fileRouter = router({ const categories = await FileRepo.getAllFileCategories(input.id); return { + isLegacy: !!file.encContentIv, parent: file.parentId, mekVersion: file.mekVersion, dek: file.encDek, @@ -52,6 +64,7 @@ const fileRouter = router({ const files = await FileRepo.getFilesWithCategories(ctx.session.userId, input.ids); return files.map((file) => ({ id: file.id, + isLegacy: !!file.encContentIv, parent: file.parentId, mekVersion: file.mekVersion, dek: file.encDek, @@ -158,6 +171,137 @@ const fileRouter = router({ return { updatedAt: thumbnail.updatedAt }; }), + + startUpload: roleProcedure["activeClient"] + .input( + z.object({ + chunks: z.int().positive(), + parent: directoryIdSchema, + mekVersion: z.int().positive(), + dek: z.base64().nonempty(), + dekVersion: z.date(), + hskVersion: z.int().positive().optional(), + contentType: z + .string() + .trim() + .nonempty() + .refine((value) => mime.getExtension(value) !== null), + name: z.base64().nonempty(), + nameIv: z.base64().nonempty(), + createdAt: z.base64().nonempty().optional(), + createdAtIv: z.base64().nonempty().optional(), + lastModifiedAt: z.base64().nonempty(), + lastModifiedAtIv: z.base64().nonempty(), + }), + ) + .mutation(async ({ ctx, input }) => { + const oneMinuteAgo = new Date(Date.now() - 60 * 1000); + const oneMinuteLater = new Date(Date.now() + 60 * 1000); + if (input.dekVersion <= oneMinuteAgo || input.dekVersion >= oneMinuteLater) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); + } + + try { + const { id: sessionId } = await UploadRepo.createUploadSession({ + userId: ctx.session.userId, + totalChunks: input.chunks, + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + parentId: input.parent, + mekVersion: input.mekVersion, + encDek: input.dek, + dekVersion: input.dekVersion, + hskVersion: input.hskVersion ?? null, + contentType: input.contentType, + encName: { ciphertext: input.name, iv: input.nameIv }, + encCreatedAt: + input.createdAt && input.createdAtIv + ? { ciphertext: input.createdAt, iv: input.createdAtIv } + : null, + encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, + }); + await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); + return { uploadId: sessionId }; + } catch (e) { + if (e instanceof IntegrityError) { + if (e.message === "Inactive MEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); + } else if (e.message === "Inactive HSK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid HSK version" }); + } + } + throw e; + } + }), + + completeUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + contentHmac: z.base64().nonempty().optional(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (uploadLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); // TODO: Message + } else { + uploadLocks.add(uploadId); + } + + const filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (!session) { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if ( + (session.hskVersion && !input.contentHmac) || + (!session.hskVersion && input.contentHmac) + ) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content hmac" }); // TODO: message + } else if (session.uploadedChunks.length < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); // TODO: Message + } + + const chunkDirectoryPath = getChunkDirectoryPath(uploadId); + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for (let i = 0; i < session.totalChunks; i++) { + for await (const chunk of createReadStream(`${chunkDirectoryPath}/${i}`)) { + hashStream.update(chunk); + writeStream.write(chunk); + } + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + const hash = hashStream.digest("base64"); + const fileId = await db.transaction().execute(async (trx) => { + const { id: fileId } = await FileRepo.registerFile(trx, { + ...session, + userId: ctx.session.userId, + path: filePath, + contentHmac: input.contentHmac ?? null, + encContentHash: hash, + encContentIv: null, + }); + await UploadRepo.deleteUploadSession(trx, uploadId); + return fileId; + }); + + await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + return { file: fileId }; + } catch (e) { + await safeUnlink(filePath); + throw e; + } finally { + uploadLocks.delete(uploadId); + } + }), }); export default fileRouter; From 0c295a2ffa275f4cf47f451fd6de0a21a0ad7020 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 09:06:49 +0900 Subject: [PATCH 2/8] =?UTF-8?q?Service=20Worker=EB=A5=BC=20=ED=99=9C?= =?UTF-8?q?=EC=9A=A9=ED=95=9C=20=EC=8A=A4=ED=8A=B8=EB=A6=AC=EB=B0=8D=20?= =?UTF-8?q?=EB=B0=A9=EC=8B=9D=20=ED=8C=8C=EC=9D=BC=20=EB=B3=B5=ED=98=B8?= =?UTF-8?q?=ED=99=94=20=EA=B5=AC=ED=98=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/hooks.client.ts | 2 - src/lib/constants/index.ts | 1 + src/lib/constants/serviceWorker.ts | 1 + src/lib/constants/upload.ts | 5 +- src/lib/modules/crypto/aes.ts | 22 ++-- src/lib/modules/crypto/rsa.ts | 32 ++--- src/lib/modules/crypto/sha.ts | 6 +- src/lib/{server => }/modules/http.ts | 0 src/lib/modules/opfs.ts | 27 ++-- src/lib/serviceWorker/client.ts | 39 ++++++ src/lib/serviceWorker/index.ts | 2 + src/lib/serviceWorker/types.ts | 19 +++ .../(fullscreen)/file/[id]/+page.svelte | 34 +++-- src/routes/(fullscreen)/file/[id]/service.ts | 21 ++++ src/routes/api/file/[id]/download/+server.ts | 2 +- .../file/[id]/thumbnail/download/+server.ts | 2 +- src/service-worker/handlers/decryptFile.ts | 117 ++++++++++++++++++ src/service-worker/handlers/index.ts | 1 + src/service-worker/index.ts | 43 +++++++ src/service-worker/modules/constants.ts | 1 + src/service-worker/modules/crypto.ts | 40 ++++++ src/service-worker/modules/http.ts | 1 + src/service-worker/modules/opfs.ts | 1 + src/service-worker/stores.ts | 3 + src/service-worker/types.ts | 1 + 25 files changed, 359 insertions(+), 64 deletions(-) create mode 100644 src/lib/constants/serviceWorker.ts rename src/lib/{server => }/modules/http.ts (100%) create mode 100644 src/lib/serviceWorker/client.ts create mode 100644 src/lib/serviceWorker/index.ts create mode 100644 src/lib/serviceWorker/types.ts create mode 100644 src/service-worker/handlers/decryptFile.ts create mode 100644 src/service-worker/handlers/index.ts create mode 100644 src/service-worker/index.ts create mode 100644 src/service-worker/modules/constants.ts create mode 100644 src/service-worker/modules/crypto.ts create mode 100644 src/service-worker/modules/http.ts create mode 100644 src/service-worker/modules/opfs.ts create mode 100644 src/service-worker/stores.ts create mode 100644 src/service-worker/types.ts diff --git a/src/hooks.client.ts b/src/hooks.client.ts index 99e11c9..a677d9f 100644 --- a/src/hooks.client.ts +++ b/src/hooks.client.ts @@ -1,7 +1,6 @@ import type { ClientInit } from "@sveltejs/kit"; import { cleanupDanglingInfos, getClientKey, getMasterKeys, getHmacSecrets } from "$lib/indexedDB"; import { prepareFileCache } from "$lib/modules/file"; -import { prepareOpfs } from "$lib/modules/opfs"; import { clientKeyStore, masterKeyStore, hmacSecretStore } from "$lib/stores"; const requestPersistentStorage = async () => { @@ -46,7 +45,6 @@ export const init: ClientInit = async () => { prepareClientKeyStore(), prepareMasterKeyStore(), prepareHmacSecretStore(), - prepareOpfs(), ]); cleanupDanglingInfos(); // Intended diff --git a/src/lib/constants/index.ts b/src/lib/constants/index.ts index ab6125a..4983846 100644 --- a/src/lib/constants/index.ts +++ b/src/lib/constants/index.ts @@ -1 +1,2 @@ +export * from "./serviceWorker"; export * from "./upload"; diff --git a/src/lib/constants/serviceWorker.ts b/src/lib/constants/serviceWorker.ts new file mode 100644 index 0000000..8c09d05 --- /dev/null +++ b/src/lib/constants/serviceWorker.ts @@ -0,0 +1 @@ +export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decrypted-file/"; diff --git a/src/lib/constants/upload.ts b/src/lib/constants/upload.ts index 337700d..99d94bb 100644 --- a/src/lib/constants/upload.ts +++ b/src/lib/constants/upload.ts @@ -1,5 +1,6 @@ -export const CHUNK_SIZE = 4 * 1024 * 1024; - export const AES_GCM_IV_SIZE = 12; export const AES_GCM_TAG_SIZE = 16; export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE; + +export const CHUNK_SIZE = 4 * 1024 * 1024; +export const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ENCRYPTION_OVERHEAD; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index 67f6a9f..fe11afb 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -9,7 +9,7 @@ import { export const generateMasterKey = async () => { return { - masterKey: await window.crypto.subtle.generateKey( + masterKey: await crypto.subtle.generateKey( { name: "AES-KW", length: 256, @@ -22,7 +22,7 @@ export const generateMasterKey = async () => { export const generateDataKey = async () => { return { - dataKey: await window.crypto.subtle.generateKey( + dataKey: await crypto.subtle.generateKey( { name: "AES-GCM", length: 256, @@ -35,9 +35,9 @@ export const generateDataKey = async () => { }; export const makeAESKeyNonextractable = async (key: CryptoKey) => { - return await window.crypto.subtle.importKey( + return await crypto.subtle.importKey( "raw", - await window.crypto.subtle.exportKey("raw", key), + await crypto.subtle.exportKey("raw", key), key.algorithm, false, key.usages, @@ -45,12 +45,12 @@ export const makeAESKeyNonextractable = async (key: CryptoKey) => { }; export const wrapDataKey = async (dataKey: CryptoKey, masterKey: CryptoKey) => { - return encodeToBase64(await window.crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW")); + return encodeToBase64(await crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW")); }; export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey) => { return { - dataKey: await window.crypto.subtle.unwrapKey( + dataKey: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(dataKeyWrapped), masterKey, @@ -63,12 +63,12 @@ export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey }; export const wrapHmacSecret = async (hmacSecret: CryptoKey, masterKey: CryptoKey) => { - return encodeToBase64(await window.crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW")); + return encodeToBase64(await crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW")); }; export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: CryptoKey) => { return { - hmacSecret: await window.crypto.subtle.unwrapKey( + hmacSecret: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(hmacSecretWrapped), masterKey, @@ -84,8 +84,8 @@ export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: Cry }; export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { - const iv = window.crypto.getRandomValues(new Uint8Array(12)); - const ciphertext = await window.crypto.subtle.encrypt( + const iv = crypto.getRandomValues(new Uint8Array(12)); + const ciphertext = await crypto.subtle.encrypt( { name: "AES-GCM", iv, @@ -101,7 +101,7 @@ export const decryptData = async ( iv: string | BufferSource, dataKey: CryptoKey, ) => { - return await window.crypto.subtle.decrypt( + return await crypto.subtle.decrypt( { name: "AES-GCM", iv: typeof iv === "string" ? decodeFromBase64(iv) : iv, diff --git a/src/lib/modules/crypto/rsa.ts b/src/lib/modules/crypto/rsa.ts index 13dfd46..11e136f 100644 --- a/src/lib/modules/crypto/rsa.ts +++ b/src/lib/modules/crypto/rsa.ts @@ -1,7 +1,7 @@ import { encodeString, encodeToBase64, decodeFromBase64 } from "./util"; export const generateEncryptionKeyPair = async () => { - const keyPair = await window.crypto.subtle.generateKey( + const keyPair = await crypto.subtle.generateKey( { name: "RSA-OAEP", modulusLength: 4096, @@ -18,7 +18,7 @@ export const generateEncryptionKeyPair = async () => { }; export const generateSigningKeyPair = async () => { - const keyPair = await window.crypto.subtle.generateKey( + const keyPair = await crypto.subtle.generateKey( { name: "RSA-PSS", modulusLength: 4096, @@ -37,7 +37,7 @@ export const generateSigningKeyPair = async () => { export const exportRSAKey = async (key: CryptoKey) => { const format = key.type === "public" ? ("spki" as const) : ("pkcs8" as const); return { - key: await window.crypto.subtle.exportKey(format, key), + key: await crypto.subtle.exportKey(format, key), format, }; }; @@ -54,14 +54,14 @@ export const importEncryptionKeyPairFromBase64 = async ( name: "RSA-OAEP", hash: "SHA-256", }; - const encryptKey = await window.crypto.subtle.importKey( + const encryptKey = await crypto.subtle.importKey( "spki", decodeFromBase64(encryptKeyBase64), algorithm, true, ["encrypt", "wrapKey"], ); - const decryptKey = await window.crypto.subtle.importKey( + const decryptKey = await crypto.subtle.importKey( "pkcs8", decodeFromBase64(decryptKeyBase64), algorithm, @@ -79,14 +79,14 @@ export const importSigningKeyPairFromBase64 = async ( name: "RSA-PSS", hash: "SHA-256", }; - const signKey = await window.crypto.subtle.importKey( + const signKey = await crypto.subtle.importKey( "pkcs8", decodeFromBase64(signKeyBase64), algorithm, true, ["sign"], ); - const verifyKey = await window.crypto.subtle.importKey( + const verifyKey = await crypto.subtle.importKey( "spki", decodeFromBase64(verifyKeyBase64), algorithm, @@ -98,17 +98,11 @@ export const importSigningKeyPairFromBase64 = async ( export const makeRSAKeyNonextractable = async (key: CryptoKey) => { const { key: exportedKey, format } = await exportRSAKey(key); - return await window.crypto.subtle.importKey( - format, - exportedKey, - key.algorithm, - false, - key.usages, - ); + return await crypto.subtle.importKey(format, exportedKey, key.algorithm, false, key.usages); }; export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) => { - return await window.crypto.subtle.decrypt( + return await crypto.subtle.decrypt( { name: "RSA-OAEP", } satisfies RsaOaepParams, @@ -119,7 +113,7 @@ export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) export const wrapMasterKey = async (masterKey: CryptoKey, encryptKey: CryptoKey) => { return encodeToBase64( - await window.crypto.subtle.wrapKey("raw", masterKey, encryptKey, { + await crypto.subtle.wrapKey("raw", masterKey, encryptKey, { name: "RSA-OAEP", } satisfies RsaOaepParams), ); @@ -131,7 +125,7 @@ export const unwrapMasterKey = async ( extractable = false, ) => { return { - masterKey: await window.crypto.subtle.unwrapKey( + masterKey: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(masterKeyWrapped), decryptKey, @@ -146,7 +140,7 @@ export const unwrapMasterKey = async ( }; export const signMessageRSA = async (message: BufferSource, signKey: CryptoKey) => { - return await window.crypto.subtle.sign( + return await crypto.subtle.sign( { name: "RSA-PSS", saltLength: 32, // SHA-256 @@ -161,7 +155,7 @@ export const verifySignatureRSA = async ( signature: BufferSource, verifyKey: CryptoKey, ) => { - return await window.crypto.subtle.verify( + return await crypto.subtle.verify( { name: "RSA-PSS", saltLength: 32, // SHA-256 diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 3acb258..9bf2dea 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -1,10 +1,10 @@ export const digestMessage = async (message: BufferSource) => { - return await window.crypto.subtle.digest("SHA-256", message); + return await crypto.subtle.digest("SHA-256", message); }; export const generateHmacSecret = async () => { return { - hmacSecret: await window.crypto.subtle.generateKey( + hmacSecret: await crypto.subtle.generateKey( { name: "HMAC", hash: "SHA-256", @@ -16,5 +16,5 @@ export const generateHmacSecret = async () => { }; export const signMessageHmac = async (message: BufferSource, hmacSecret: CryptoKey) => { - return await window.crypto.subtle.sign("HMAC", hmacSecret, message); + return await crypto.subtle.sign("HMAC", hmacSecret, message); }; diff --git a/src/lib/server/modules/http.ts b/src/lib/modules/http.ts similarity index 100% rename from src/lib/server/modules/http.ts rename to src/lib/modules/http.ts diff --git a/src/lib/modules/opfs.ts b/src/lib/modules/opfs.ts index 41f1f72..a367aae 100644 --- a/src/lib/modules/opfs.ts +++ b/src/lib/modules/opfs.ts @@ -1,13 +1,5 @@ -let rootHandle: FileSystemDirectoryHandle | null = null; - -export const prepareOpfs = async () => { - rootHandle = await navigator.storage.getDirectory(); -}; - const getFileHandle = async (path: string, create = true) => { - if (!rootHandle) { - throw new Error("OPFS not prepared"); - } else if (path[0] !== "/") { + if (path[0] !== "/") { throw new Error("Path must be absolute"); } @@ -17,7 +9,7 @@ const getFileHandle = async (path: string, create = true) => { } try { - let directoryHandle = rootHandle; + let directoryHandle = await navigator.storage.getDirectory(); for (const part of parts.slice(0, -1)) { if (!part) continue; directoryHandle = await directoryHandle.getDirectoryHandle(part, { create }); @@ -34,12 +26,15 @@ const getFileHandle = async (path: string, create = true) => { } }; -export const readFile = async (path: string) => { +export const getFile = async (path: string) => { const { fileHandle } = await getFileHandle(path, false); if (!fileHandle) return null; - const file = await fileHandle.getFile(); - return await file.arrayBuffer(); + return await fileHandle.getFile(); +}; + +export const readFile = async (path: string) => { + return (await getFile(path))?.arrayBuffer() ?? null; }; export const writeFile = async (path: string, data: ArrayBuffer) => { @@ -61,9 +56,7 @@ export const deleteFile = async (path: string) => { }; const getDirectoryHandle = async (path: string) => { - if (!rootHandle) { - throw new Error("OPFS not prepared"); - } else if (path[0] !== "/") { + if (path[0] !== "/") { throw new Error("Path must be absolute"); } @@ -73,7 +66,7 @@ const getDirectoryHandle = async (path: string) => { } try { - let directoryHandle = rootHandle; + let directoryHandle = await navigator.storage.getDirectory(); let parentHandle; for (const part of parts.slice(1)) { if (!part) continue; diff --git a/src/lib/serviceWorker/client.ts b/src/lib/serviceWorker/client.ts new file mode 100644 index 0000000..771c15e --- /dev/null +++ b/src/lib/serviceWorker/client.ts @@ -0,0 +1,39 @@ +import { DECRYPTED_FILE_URL_PREFIX } from "$lib/constants"; +import type { FileMetadata, ServiceWorkerMessage, ServiceWorkerResponse } from "./types"; + +const PREPARE_TIMEOUT_MS = 5000; + +const getServiceWorker = async () => { + const registration = await navigator.serviceWorker.ready; + const sw = registration.active; + if (!sw) { + throw new Error("Service worker not activated"); + } + return sw; +}; + +export const prepareFileDecryption = async (id: number, metadata: FileMetadata) => { + const sw = await getServiceWorker(); + return new Promise((resolve, reject) => { + const timeout = setTimeout( + () => reject(new Error("Service worker timeout")), + PREPARE_TIMEOUT_MS, + ); + const handler = (event: MessageEvent) => { + if (event.data.type === "decryption-ready" && event.data.fileId === id) { + clearTimeout(timeout); + navigator.serviceWorker.removeEventListener("message", handler); + resolve(); + } + }; + navigator.serviceWorker.addEventListener("message", handler); + + sw.postMessage({ + type: "decryption-prepare", + fileId: id, + ...metadata, + } satisfies ServiceWorkerMessage); + }); +}; + +export const getDecryptedFileUrl = (id: number) => `${DECRYPTED_FILE_URL_PREFIX}${id}`; diff --git a/src/lib/serviceWorker/index.ts b/src/lib/serviceWorker/index.ts new file mode 100644 index 0000000..d2ec230 --- /dev/null +++ b/src/lib/serviceWorker/index.ts @@ -0,0 +1,2 @@ +export * from "./client"; +export * from "./types"; diff --git a/src/lib/serviceWorker/types.ts b/src/lib/serviceWorker/types.ts new file mode 100644 index 0000000..97edd6d --- /dev/null +++ b/src/lib/serviceWorker/types.ts @@ -0,0 +1,19 @@ +export interface FileMetadata { + isLegacy: boolean; + dataKey: CryptoKey; + encContentSize: number; + contentType: string; +} + +export interface DecryptionPrepareMessage extends FileMetadata { + type: "decryption-prepare"; + fileId: number; +} + +export interface DecryptionReadyMessage { + type: "decryption-ready"; + fileId: number; +} + +export type ServiceWorkerMessage = DecryptionPrepareMessage; +export type ServiceWorkerResponse = DecryptionReadyMessage; diff --git a/src/routes/(fullscreen)/file/[id]/+page.svelte b/src/routes/(fullscreen)/file/[id]/+page.svelte index 4aa6b42..674bc22 100644 --- a/src/routes/(fullscreen)/file/[id]/+page.svelte +++ b/src/routes/(fullscreen)/file/[id]/+page.svelte @@ -17,6 +17,7 @@ requestFileDownload, requestThumbnailUpload, requestFileAdditionToCategory, + requestVideoStream, } from "./service"; import TopBarMenu from "./TopBarMenu.svelte"; @@ -37,6 +38,7 @@ let viewerType: "image" | "video" | undefined = $state(); let fileBlob: Blob | undefined = $state(); let fileBlobUrl: string | undefined = $state(); + let videoStreamUrl: string | undefined = $state(); let videoElement: HTMLVideoElement | undefined = $state(); const updateViewer = async (buffer: ArrayBuffer, contentType: string) => { @@ -95,12 +97,27 @@ untrack(() => { if (!downloadState && !isDownloadRequested) { isDownloadRequested = true; - requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then(async (buffer) => { - const blob = await updateViewer(buffer, contentType); - if (!viewerType) { - FileSaver.saveAs(blob, info!.name); - } - }); + + if (viewerType === "video" && !info!.isLegacy) { + requestVideoStream(data.id, info!.dataKey!.key, contentType).then((streamUrl) => { + if (streamUrl) { + videoStreamUrl = streamUrl; + } else { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then((buffer) => + updateViewer(buffer, contentType), + ); + } + }); + } else { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then( + async (buffer) => { + const blob = await updateViewer(buffer, contentType); + if (!viewerType) { + FileSaver.saveAs(blob, info!.name); + } + }, + ); + } } }); } @@ -159,9 +176,10 @@ {@render viewerLoading("이미지를 불러오고 있어요.")} {/if} {:else if viewerType === "video"} - {#if fileBlobUrl} + {#if videoStreamUrl || fileBlobUrl}
- + updateThumbnail(info?.dataKey?.key!, info?.dataKey?.version!)} diff --git a/src/routes/(fullscreen)/file/[id]/service.ts b/src/routes/(fullscreen)/file/[id]/service.ts index 09ec86f..ea3e49c 100644 --- a/src/routes/(fullscreen)/file/[id]/service.ts +++ b/src/routes/(fullscreen)/file/[id]/service.ts @@ -1,11 +1,32 @@ import { encryptData } from "$lib/modules/crypto"; import { storeFileThumbnailCache } from "$lib/modules/file"; +import { prepareFileDecryption, getDecryptedFileUrl } from "$lib/serviceWorker"; import { requestFileThumbnailUpload } from "$lib/services/file"; import { trpc } from "$trpc/client"; export { requestCategoryCreation, requestFileRemovalFromCategory } from "$lib/services/category"; export { requestFileDownload } from "$lib/services/file"; +export const requestVideoStream = async ( + fileId: number, + dataKey: CryptoKey, + contentType: string, +) => { + const res = await fetch(`/api/file/${fileId}/download`, { method: "HEAD" }); + if (!res.ok) return null; + + const encContentSize = parseInt(res.headers.get("Content-Length") ?? "0", 10); + if (encContentSize <= 0) return null; + + try { + await prepareFileDecryption(fileId, { isLegacy: false, dataKey, encContentSize, contentType }); + return getDecryptedFileUrl(fileId); + } catch { + // TODO: Error Handling + return null; + } +}; + export const requestThumbnailUpload = async ( fileId: number, thumbnail: Blob, diff --git a/src/routes/api/file/[id]/download/+server.ts b/src/routes/api/file/[id]/download/+server.ts index 974dd54..68191ef 100644 --- a/src/routes/api/file/[id]/download/+server.ts +++ b/src/routes/api/file/[id]/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; import { authorize } from "$lib/server/modules/auth"; -import { parseRangeHeader, getContentRangeHeader } from "$lib/server/modules/http"; +import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; import { getFileStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/routes/api/file/[id]/thumbnail/download/+server.ts b/src/routes/api/file/[id]/thumbnail/download/+server.ts index 70d4cd3..4fc7c1a 100644 --- a/src/routes/api/file/[id]/thumbnail/download/+server.ts +++ b/src/routes/api/file/[id]/thumbnail/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; import { authorize } from "$lib/server/modules/auth"; -import { parseRangeHeader, getContentRangeHeader } from "$lib/server/modules/http"; +import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; import { getFileThumbnailStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/service-worker/handlers/decryptFile.ts b/src/service-worker/handlers/decryptFile.ts new file mode 100644 index 0000000..e374e5d --- /dev/null +++ b/src/service-worker/handlers/decryptFile.ts @@ -0,0 +1,117 @@ +import { DECRYPTED_FILE_URL_PREFIX, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "../modules/constants"; +import { decryptChunk, getEncryptedRange, getDecryptedSize } from "../modules/crypto"; +import { parseRangeHeader, getContentRangeHeader } from "../modules/http"; +import { getFile } from "../modules/opfs"; +import { fileMetadataStore } from "../stores"; +import type { FileMetadata } from "../types"; + +const createResponse = ( + stream: ReadableStream, + isRangeRequest: boolean, + range: { start: number; end: number; total: number }, + contentType?: string, +) => { + return new Response(stream, { + status: isRangeRequest ? 206 : 200, + headers: { + "Accept-Ranges": "bytes", + "Content-Length": String(range.end - range.start + 1), + "Content-Type": contentType ?? "application/octet-stream", + ...(isRangeRequest ? getContentRangeHeader(range) : {}), + }, + }); +}; + +const streamFromOpfs = async ( + file: File, + metadata?: FileMetadata, + range?: { start?: number; end?: number }, +) => { + const start = range?.start ?? 0; + const end = range?.end ?? file.size - 1; + if (start > end || start < 0 || end >= file.size) { + return new Response("Invalid range", { status: 416 }); + } + + return createResponse( + file.slice(start, end + 1).stream(), + !!range, + { start, end, total: file.size }, + metadata?.contentType, + ); +}; + +const streamFromServer = async ( + id: number, + metadata: FileMetadata, + range?: { start?: number; end?: number }, +) => { + const totalSize = getDecryptedSize(metadata.encContentSize, metadata.isLegacy); + const start = range?.start ?? 0; + const end = + range?.end ?? + (range && !metadata.isLegacy ? Math.min(start + CHUNK_SIZE, totalSize) : totalSize) - 1; + if (start > end || start < 0 || end >= totalSize) { + return new Response("Invalid range", { status: 416 }); + } + + const encryptedRange = getEncryptedRange(start, end, metadata.encContentSize, metadata.isLegacy); + const apiResponse = await fetch(`/api/file/${id}/download`, { + headers: { Range: `bytes=${encryptedRange.start}-${encryptedRange.end}` }, + }); + if (apiResponse.status !== 206) { + return new Response("Failed to fetch encrypted file", { status: 502 }); + } + + const fileEncrypted = await apiResponse.arrayBuffer(); + return createResponse( + new ReadableStream({ + async start(controller) { + if (metadata.isLegacy) { + const decrypted = await decryptChunk(fileEncrypted, metadata.dataKey); + controller.enqueue(new Uint8Array(decrypted.slice(start, end + 1))); + controller.close(); + return; + } + + const chunks = encryptedRange.lastChunkIndex - encryptedRange.firstChunkIndex + 1; + + for (let i = 0; i < chunks; i++) { + const chunk = await decryptChunk( + fileEncrypted.slice(i * ENCRYPTED_CHUNK_SIZE, (i + 1) * ENCRYPTED_CHUNK_SIZE), + metadata.dataKey, + ); + const sliceStart = i === 0 ? start % CHUNK_SIZE : 0; + const sliceEnd = i === chunks - 1 ? (end % CHUNK_SIZE) + 1 : chunk.byteLength; + controller.enqueue(new Uint8Array(chunk.slice(sliceStart, sliceEnd))); + } + + controller.close(); + }, + }), + !!range, + { start, end, total: totalSize }, + metadata.contentType, + ); +}; + +const decryptFileHandler = async (request: Request) => { + const url = new URL(request.url); + const fileId = parseInt(url.pathname.slice(DECRYPTED_FILE_URL_PREFIX.length), 10); + if (isNaN(fileId)) { + throw new Response("Invalid file id", { status: 400 }); + } + + const metadata = fileMetadataStore.get(fileId); + const range = parseRangeHeader(request.headers.get("Range")); + const cache = await getFile(`/cache/${fileId}`); + if (cache) { + return streamFromOpfs(cache, metadata, range); + } else if (metadata) { + return streamFromServer(fileId, metadata, range); + } else { + return new Response("Decryption not prepared", { status: 400 }); + } +}; + +export default decryptFileHandler; diff --git a/src/service-worker/handlers/index.ts b/src/service-worker/handlers/index.ts new file mode 100644 index 0000000..fe5b0f9 --- /dev/null +++ b/src/service-worker/handlers/index.ts @@ -0,0 +1 @@ +export { default as decryptFile } from "./decryptFile"; diff --git a/src/service-worker/index.ts b/src/service-worker/index.ts new file mode 100644 index 0000000..051f8d9 --- /dev/null +++ b/src/service-worker/index.ts @@ -0,0 +1,43 @@ +/// +/// +/// +/// + +import { DECRYPTED_FILE_URL_PREFIX } from "./modules/constants"; +import { decryptFile } from "./handlers"; +import { fileMetadataStore } from "./stores"; +import type { ServiceWorkerMessage, ServiceWorkerResponse } from "./types"; + +const self = globalThis.self as unknown as ServiceWorkerGlobalScope; + +self.addEventListener("message", (event) => { + const message: ServiceWorkerMessage = event.data; + switch (message.type) { + case "decryption-prepare": + fileMetadataStore.set(message.fileId, message); + event.source?.postMessage({ + type: "decryption-ready", + fileId: message.fileId, + } satisfies ServiceWorkerResponse); + break; + default: { + const exhaustive: never = message.type; + return exhaustive; + } + } +}); + +self.addEventListener("fetch", (event) => { + const url = new URL(event.request.url); + if (url.pathname.startsWith(DECRYPTED_FILE_URL_PREFIX)) { + event.respondWith(decryptFile(event.request)); + } +}); + +self.addEventListener("install", () => { + self.skipWaiting(); +}); + +self.addEventListener("activate", (event) => { + event.waitUntil(self.clients.claim()); +}); diff --git a/src/service-worker/modules/constants.ts b/src/service-worker/modules/constants.ts new file mode 100644 index 0000000..cca093e --- /dev/null +++ b/src/service-worker/modules/constants.ts @@ -0,0 +1 @@ +export * from "../../lib/constants"; diff --git a/src/service-worker/modules/crypto.ts b/src/service-worker/modules/crypto.ts new file mode 100644 index 0000000..1afee74 --- /dev/null +++ b/src/service-worker/modules/crypto.ts @@ -0,0 +1,40 @@ +import { ENCRYPTION_OVERHEAD, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "./constants"; + +export * from "../../lib/modules/crypto"; + +export const getEncryptedRange = ( + start: number, + end: number, + totalEncryptedSize: number, + isLegacy: boolean, +) => { + if (isLegacy) { + return { + firstChunkIndex: 0, + lastChunkIndex: 0, + start: 0, + end: totalEncryptedSize - 1, + }; + } + + const firstChunkIndex = Math.floor(start / CHUNK_SIZE); + const lastChunkIndex = Math.floor(end / CHUNK_SIZE); + return { + firstChunkIndex, + lastChunkIndex, + start: firstChunkIndex * ENCRYPTED_CHUNK_SIZE, + end: Math.min((lastChunkIndex + 1) * ENCRYPTED_CHUNK_SIZE - 1, totalEncryptedSize - 1), + }; +}; + +export const getDecryptedSize = (encryptedSize: number, isLegacy: boolean) => { + if (isLegacy) { + return encryptedSize - ENCRYPTION_OVERHEAD; + } + + const fullChunks = Math.floor(encryptedSize / ENCRYPTED_CHUNK_SIZE); + const lastChunkEncSize = encryptedSize % ENCRYPTED_CHUNK_SIZE; + return ( + fullChunks * CHUNK_SIZE + (lastChunkEncSize > 0 ? lastChunkEncSize - ENCRYPTION_OVERHEAD : 0) + ); +}; diff --git a/src/service-worker/modules/http.ts b/src/service-worker/modules/http.ts new file mode 100644 index 0000000..0d1bf5e --- /dev/null +++ b/src/service-worker/modules/http.ts @@ -0,0 +1 @@ +export * from "../../lib/modules/http"; diff --git a/src/service-worker/modules/opfs.ts b/src/service-worker/modules/opfs.ts new file mode 100644 index 0000000..0ef5769 --- /dev/null +++ b/src/service-worker/modules/opfs.ts @@ -0,0 +1 @@ +export * from "../../lib/modules/opfs"; diff --git a/src/service-worker/stores.ts b/src/service-worker/stores.ts new file mode 100644 index 0000000..22d899e --- /dev/null +++ b/src/service-worker/stores.ts @@ -0,0 +1,3 @@ +import type { FileMetadata } from "./types"; + +export const fileMetadataStore = new Map(); diff --git a/src/service-worker/types.ts b/src/service-worker/types.ts new file mode 100644 index 0000000..f04ed39 --- /dev/null +++ b/src/service-worker/types.ts @@ -0,0 +1 @@ +export * from "../lib/serviceWorker/types"; From 1efcdd68f1d5364fe7283cdec020d02707d5f4fc Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 09:25:40 +0900 Subject: [PATCH 3/8] =?UTF-8?q?=EC=8A=A4=ED=8A=B8=EB=A6=AC=EB=B0=8D=20?= =?UTF-8?q?=EB=B0=A9=EC=8B=9D=EC=9C=BC=EB=A1=9C=20=EB=8F=99=EC=98=81?= =?UTF-8?q?=EC=83=81=EC=9D=84=20=EB=B6=88=EB=9F=AC=EC=98=AC=20=EB=95=8C=20?= =?UTF-8?q?=EB=8B=A4=EC=9A=B4=EB=A1=9C=EB=93=9C=20=EB=A9=94=EB=89=B4?= =?UTF-8?q?=EA=B0=80=20=ED=91=9C=EC=8B=9C=EB=90=98=EC=A7=80=20=EC=95=8A?= =?UTF-8?q?=EB=8A=94=20=EB=B2=84=EA=B7=B8=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../(fullscreen)/file/[id]/+page.svelte | 1 + .../(fullscreen)/file/[id]/TopBarMenu.svelte | 22 +++-- src/service-worker/handlers/decryptFile.ts | 96 +++++++++++++------ 3 files changed, 83 insertions(+), 36 deletions(-) diff --git a/src/routes/(fullscreen)/file/[id]/+page.svelte b/src/routes/(fullscreen)/file/[id]/+page.svelte index 674bc22..053d6bf 100644 --- a/src/routes/(fullscreen)/file/[id]/+page.svelte +++ b/src/routes/(fullscreen)/file/[id]/+page.svelte @@ -154,6 +154,7 @@ ? info?.parentId : undefined} {fileBlob} + downloadUrl={videoStreamUrl} filename={info?.name} />
diff --git a/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte b/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte index a037b61..d713e8c 100644 --- a/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte +++ b/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte @@ -10,17 +10,29 @@ interface Props { directoryId?: "root" | number; + downloadUrl?: string; fileBlob?: Blob; filename?: string; isOpen: boolean; } - let { directoryId, fileBlob, filename, isOpen = $bindable() }: Props = $props(); + let { directoryId, downloadUrl, fileBlob, filename, isOpen = $bindable() }: Props = $props(); + + const handleDownload = () => { + if (fileBlob && filename) { + FileSaver.saveAs(fileBlob, filename); + } else if (downloadUrl && filename) { + // Use streaming download via Content-Disposition header + const url = new URL(downloadUrl, window.location.origin); + url.searchParams.set("download", filename); + window.open(url.toString(), "_blank"); + } + }; (isOpen = false)} /> -{#if isOpen && (directoryId || fileBlob)} +{#if isOpen && (directoryId || downloadUrl || fileBlob)}
{ - FileSaver.saveAs(fileBlob, filename); - })} + {#if fileBlob || downloadUrl} + {@render menuButton(IconCloudDownload, "다운로드", handleDownload)} {/if}
diff --git a/src/service-worker/handlers/decryptFile.ts b/src/service-worker/handlers/decryptFile.ts index e374e5d..22aa118 100644 --- a/src/service-worker/handlers/decryptFile.ts +++ b/src/service-worker/handlers/decryptFile.ts @@ -10,15 +10,23 @@ const createResponse = ( isRangeRequest: boolean, range: { start: number; end: number; total: number }, contentType?: string, + downloadFilename?: string, ) => { + const headers: Record = { + "Accept-Ranges": "bytes", + "Content-Length": String(range.end - range.start + 1), + "Content-Type": contentType ?? "application/octet-stream", + ...(isRangeRequest ? getContentRangeHeader(range) : {}), + }; + + if (downloadFilename) { + headers["Content-Disposition"] = + `attachment; filename*=UTF-8''${encodeURIComponent(downloadFilename)}`; + } + return new Response(stream, { status: isRangeRequest ? 206 : 200, - headers: { - "Accept-Ranges": "bytes", - "Content-Length": String(range.end - range.start + 1), - "Content-Type": contentType ?? "application/octet-stream", - ...(isRangeRequest ? getContentRangeHeader(range) : {}), - }, + headers, }); }; @@ -26,6 +34,7 @@ const streamFromOpfs = async ( file: File, metadata?: FileMetadata, range?: { start?: number; end?: number }, + downloadFilename?: string, ) => { const start = range?.start ?? 0; const end = range?.end ?? file.size - 1; @@ -38,6 +47,7 @@ const streamFromOpfs = async ( !!range, { start, end, total: file.size }, metadata?.contentType, + downloadFilename, ); }; @@ -45,6 +55,7 @@ const streamFromServer = async ( id: number, metadata: FileMetadata, range?: { start?: number; end?: number }, + downloadFilename?: string, ) => { const totalSize = getDecryptedSize(metadata.encContentSize, metadata.isLegacy); const start = range?.start ?? 0; @@ -59,39 +70,63 @@ const streamFromServer = async ( const apiResponse = await fetch(`/api/file/${id}/download`, { headers: { Range: `bytes=${encryptedRange.start}-${encryptedRange.end}` }, }); - if (apiResponse.status !== 206) { + if (apiResponse.status !== 206 || !apiResponse.body) { return new Response("Failed to fetch encrypted file", { status: 502 }); } - const fileEncrypted = await apiResponse.arrayBuffer(); - return createResponse( - new ReadableStream({ - async start(controller) { - if (metadata.isLegacy) { - const decrypted = await decryptChunk(fileEncrypted, metadata.dataKey); + if (metadata.isLegacy) { + const fileEncrypted = await apiResponse.arrayBuffer(); + const decrypted = await decryptChunk(fileEncrypted, metadata.dataKey); + return createResponse( + new ReadableStream({ + start(controller) { controller.enqueue(new Uint8Array(decrypted.slice(start, end + 1))); controller.close(); - return; - } + }, + }), + !!range, + { start, end, total: totalSize }, + metadata.contentType, + ); + } - const chunks = encryptedRange.lastChunkIndex - encryptedRange.firstChunkIndex + 1; + const totalChunks = encryptedRange.lastChunkIndex - encryptedRange.firstChunkIndex + 1; + let currentChunkIndex = 0; + let buffer = new Uint8Array(0); - for (let i = 0; i < chunks; i++) { - const chunk = await decryptChunk( - fileEncrypted.slice(i * ENCRYPTED_CHUNK_SIZE, (i + 1) * ENCRYPTED_CHUNK_SIZE), - metadata.dataKey, - ); - const sliceStart = i === 0 ? start % CHUNK_SIZE : 0; - const sliceEnd = i === chunks - 1 ? (end % CHUNK_SIZE) + 1 : chunk.byteLength; - controller.enqueue(new Uint8Array(chunk.slice(sliceStart, sliceEnd))); - } + const decryptingStream = new TransformStream({ + async transform(chunk, controller) { + const newBuffer = new Uint8Array(buffer.length + chunk.length); + newBuffer.set(buffer); + newBuffer.set(chunk, buffer.length); + buffer = newBuffer; - controller.close(); - }, - }), + while (buffer.length >= ENCRYPTED_CHUNK_SIZE && currentChunkIndex < totalChunks - 1) { + const encryptedChunk = buffer.slice(0, ENCRYPTED_CHUNK_SIZE); + buffer = buffer.slice(ENCRYPTED_CHUNK_SIZE); + + const decrypted = await decryptChunk(encryptedChunk.buffer, metadata.dataKey); + const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0; + controller.enqueue(new Uint8Array(decrypted.slice(sliceStart))); + currentChunkIndex++; + } + }, + async flush(controller) { + if (buffer.length > 0) { + const decrypted = await decryptChunk(buffer.buffer, metadata.dataKey); + const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0; + const sliceEnd = (end % CHUNK_SIZE) + 1; + controller.enqueue(new Uint8Array(decrypted.slice(sliceStart, sliceEnd))); + } + }, + }); + + return createResponse( + apiResponse.body.pipeThrough(decryptingStream), !!range, { start, end, total: totalSize }, metadata.contentType, + downloadFilename, ); }; @@ -102,13 +137,14 @@ const decryptFileHandler = async (request: Request) => { throw new Response("Invalid file id", { status: 400 }); } + const downloadFilename = url.searchParams.get("download") ?? undefined; const metadata = fileMetadataStore.get(fileId); const range = parseRangeHeader(request.headers.get("Range")); const cache = await getFile(`/cache/${fileId}`); if (cache) { - return streamFromOpfs(cache, metadata, range); + return streamFromOpfs(cache, metadata, range, downloadFilename); } else if (metadata) { - return streamFromServer(fileId, metadata, range); + return streamFromServer(fileId, metadata, range, downloadFilename); } else { return new Response("Decryption not prepared", { status: 400 }); } From 3628e6d21ae6a967a40770c5ad972861faa9eeb0 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 13:19:54 +0900 Subject: [PATCH 4/8] =?UTF-8?q?=EC=97=85=EB=A1=9C=EB=93=9C=ED=95=A0=20?= =?UTF-8?q?=EB=95=8C=EC=97=90=EB=8F=84=20=EC=8A=A4=ED=8A=B8=EB=A6=AC?= =?UTF-8?q?=EB=B0=8D=20=EB=B0=A9=EC=8B=9D=EC=9C=BC=EB=A1=9C=20=EC=B2=98?= =?UTF-8?q?=EB=A6=AC=ED=95=98=EB=8F=84=EB=A1=9D=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package.json | 1 + pnpm-lock.yaml | 9 + src/lib/indexedDB/keyStore.ts | 6 +- src/lib/modules/crypto/aes.ts | 2 +- src/lib/modules/crypto/sha.ts | 13 + src/lib/modules/crypto/util.ts | 4 +- src/lib/modules/file/upload.svelte.ts | 314 +++++++++++++++--- src/lib/modules/thumbnail.ts | 17 + .../(main)/directory/[[id]]/service.svelte.ts | 4 +- 9 files changed, 308 insertions(+), 62 deletions(-) diff --git a/package.json b/package.json index c16b700..17dad8d 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "@eslint/compat": "^2.0.0", "@eslint/js": "^9.39.2", "@iconify-json/material-symbols": "^1.2.50", + "@noble/hashes": "^2.0.1", "@sveltejs/adapter-node": "^5.4.0", "@sveltejs/kit": "^2.49.2", "@sveltejs/vite-plugin-svelte": "^6.2.1", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e4e336f..025aacd 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -48,6 +48,9 @@ importers: '@iconify-json/material-symbols': specifier: ^1.2.50 version: 1.2.50 + '@noble/hashes': + specifier: ^2.0.1 + version: 2.0.1 '@sveltejs/adapter-node': specifier: ^5.4.0 version: 5.4.0(@sveltejs/kit@2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))) @@ -414,6 +417,10 @@ packages: '@jridgewell/trace-mapping@0.3.31': resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + '@noble/hashes@2.0.1': + resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==} + engines: {node: '>= 20.19.0'} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -2217,6 +2224,8 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@noble/hashes@2.0.1': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 diff --git a/src/lib/indexedDB/keyStore.ts b/src/lib/indexedDB/keyStore.ts index 7a4c89e..86b8b79 100644 --- a/src/lib/indexedDB/keyStore.ts +++ b/src/lib/indexedDB/keyStore.ts @@ -70,12 +70,12 @@ export const storeMasterKeys = async (keys: MasterKey[]) => { }; export const getHmacSecrets = async () => { - return await keyStore.hmacSecret.toArray(); + return (await keyStore.hmacSecret.toArray()).filter(({ secret }) => secret.extractable); }; export const storeHmacSecrets = async (secrets: HmacSecret[]) => { - if (secrets.some(({ secret }) => secret.extractable)) { - throw new Error("Hmac secrets must be nonextractable"); + if (secrets.some(({ secret }) => !secret.extractable)) { + throw new Error("Hmac secrets must be extractable"); } await keyStore.hmacSecret.bulkPut(secrets); }; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index fe11afb..4035343 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -77,7 +77,7 @@ export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: Cry name: "HMAC", hash: "SHA-256", } satisfies HmacImportParams, - false, // Nonextractable + true, // Extractable ["sign", "verify"], ), }; diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 9bf2dea..883ac10 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -1,7 +1,20 @@ +import { hmac } from "@noble/hashes/hmac.js"; +import { sha256 } from "@noble/hashes/sha2.js"; + export const digestMessage = async (message: BufferSource) => { return await crypto.subtle.digest("SHA-256", message); }; +export const createStreamingHmac = async (hmacSecret: CryptoKey) => { + const keyBytes = new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret)); + const h = hmac.create(sha256, keyBytes); + + return { + update: (data: Uint8Array) => h.update(data), + digest: () => h.digest(), + }; +}; + export const generateHmacSecret = async () => { return { hmacSecret: await crypto.subtle.generateKey( diff --git a/src/lib/modules/crypto/util.ts b/src/lib/modules/crypto/util.ts index a3e3bc0..215eaf2 100644 --- a/src/lib/modules/crypto/util.ts +++ b/src/lib/modules/crypto/util.ts @@ -9,8 +9,8 @@ export const decodeString = (data: ArrayBuffer) => { return textDecoder.decode(data); }; -export const encodeToBase64 = (data: ArrayBuffer) => { - return btoa(String.fromCharCode(...new Uint8Array(data))); +export const encodeToBase64 = (data: ArrayBuffer | Uint8Array) => { + return btoa(String.fromCharCode(...(data instanceof ArrayBuffer ? new Uint8Array(data) : data))); }; export const decodeFromBase64 = (data: string) => { diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index 2bb6c7c..ac3010e 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,6 +1,6 @@ import axios from "axios"; import ExifReader from "exifreader"; -import { limitFunction } from "p-limit"; +import pLimit, { limitFunction } from "p-limit"; import { CHUNK_SIZE } from "$lib/constants"; import { encodeToBase64, @@ -11,9 +11,10 @@ import { encryptChunk, digestMessage, signMessageHmac, + createStreamingHmac, } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; -import { generateThumbnail } from "$lib/modules/thumbnail"; +import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import type { MasterKey, HmacSecret } from "$lib/stores"; import { trpc } from "$trpc/client"; @@ -41,7 +42,7 @@ export type LiveFileUploadState = FileUploadState & { }; const scheduler = new Scheduler< - { fileId: number; fileBuffer: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined + { fileId: number; fileBuffer?: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined >(); let uploadingFiles: FileUploadState[] = $state([]); @@ -77,6 +78,33 @@ const requestDuplicateFileScan = limitFunction( { concurrency: 1 }, ); +const isImageFile = (fileType: string) => fileType.startsWith("image/"); + +const requestDuplicateFileScanStreaming = limitFunction( + async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { + const hmacStream = await createStreamingHmac(hmacSecret.secret); + const reader = file.stream().getReader(); + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + hmacStream.update(value); + } + + const fileSigned = encodeToBase64(hmacStream.digest()); + const files = await trpc().file.listByHash.query({ + hskVersion: hmacSecret.version, + contentHmac: fileSigned, + }); + if (files.length === 0 || (await onDuplicate())) { + return { fileSigned }; + } else { + return {}; + } + }, + { concurrency: 1 }, +); + const getFileType = (file: File) => { if (file.type) return file.type; if (file.name.endsWith(".heic")) return "image/heic"; @@ -235,6 +263,148 @@ const requestFileUpload = limitFunction( { concurrency: 1 }, ); +const uploadFileStreaming = async ( + state: FileUploadState, + file: File, + masterKey: MasterKey, + hmacSecret: HmacSecret, + fileSigned: string, + parentId: DirectoryId, +) => { + state.status = "uploading"; + + const fileType = getFileType(file); + const { dataKey, dataKeyVersion } = await generateDataKey(); + const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); + + const nameEncrypted = await encryptString(file.name, dataKey); + const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); + + // Calculate total chunks for metadata + const totalChunks = Math.ceil(file.size / CHUNK_SIZE); + const metadata = { + chunks: totalChunks, + parent: parentId, + mekVersion: masterKey.version, + dek: dataKeyWrapped, + dekVersion: dataKeyVersion, + hskVersion: hmacSecret.version, + contentType: fileType, + name: nameEncrypted.ciphertext, + nameIv: nameEncrypted.iv, + lastModifiedAt: lastModifiedAtEncrypted.ciphertext, + lastModifiedAtIv: lastModifiedAtEncrypted.iv, + }; + + const { uploadId } = await trpc().file.startUpload.mutate(metadata); + + // Stream file, encrypt, and upload with concurrency limit + const reader = file.stream().getReader(); + const limit = pLimit(4); + let buffer = new Uint8Array(0); + let chunkIndex = 0; + const uploadPromises: Promise[] = []; + + const totalBytes = file.size; + let uploadedBytes = 0; + const startTime = Date.now(); + + const uploadChunk = async ( + index: number, + encryptedChunk: ArrayBuffer, + chunkHash: string, + originalChunkSize: number, + ) => { + const response = await fetch(`/api/file/upload/${uploadId}/chunks/${index}`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkHash}:`, + }, + body: encryptedChunk, + }); + + if (!response.ok) { + throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); + } + + // Update progress after upload completes + uploadedBytes += originalChunkSize; + const elapsed = (Date.now() - startTime) / 1000; + const rate = uploadedBytes / elapsed; + const remaining = totalBytes - uploadedBytes; + const estimated = rate > 0 ? remaining / rate : undefined; + + state.progress = uploadedBytes / totalBytes; + state.rate = rate; + state.estimated = estimated; + }; + + while (true) { + const { done, value } = await reader.read(); + if (done && buffer.length === 0) break; + + if (value) { + const newBuffer = new Uint8Array(buffer.length + value.length); + newBuffer.set(buffer); + newBuffer.set(value, buffer.length); + buffer = newBuffer; + } + + while (buffer.length >= CHUNK_SIZE || (done && buffer.length > 0)) { + const chunkSize = Math.min(CHUNK_SIZE, buffer.length); + const chunk = buffer.slice(0, chunkSize); + buffer = buffer.slice(chunkSize); + + const encryptedChunk = await encryptChunk(chunk.buffer.slice(0, chunk.byteLength), dataKey); + const chunkHash = encodeToBase64(await digestMessage(encryptedChunk)); + const currentIndex = chunkIndex++; + + uploadPromises.push( + limit(() => uploadChunk(currentIndex, encryptedChunk, chunkHash, chunkSize)), + ); + } + + if (done) break; + } + + await Promise.all(uploadPromises); + + const { file: fileId } = await trpc().file.completeUpload.mutate({ + uploadId, + contentHmac: fileSigned, + }); + + // Generate and upload thumbnail for video files + if (fileType.startsWith("video/")) { + try { + const thumbnail = await generateThumbnailFromFile(file); + if (thumbnail) { + const thumbnailBuffer = await thumbnail.arrayBuffer(); + const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); + + const thumbnailForm = new FormData(); + thumbnailForm.set( + "metadata", + JSON.stringify({ + dekVersion: dataKeyVersion.toISOString(), + contentIv: encodeToBase64(thumbnailEncrypted.iv), + } satisfies FileThumbnailUploadRequest), + ); + thumbnailForm.set("content", new Blob([thumbnailEncrypted.ciphertext])); + await axios.post(`/api/file/${fileId}/thumbnail/upload`, thumbnailForm); + } + } catch (e) { + // Thumbnail upload failure is not critical + console.error(e); + } + } + + state.status = "uploaded"; + + return { fileId }; +}; + export const uploadFile = async ( file: File, parentId: "root" | number, @@ -249,69 +419,103 @@ export const uploadFile = async ( }); const state = uploadingFiles.at(-1)!; + const fileType = getFileType(file); + + // Image files: use buffer-based approach (need EXIF + thumbnail) + if (isImageFile(fileType)) { + return await scheduler.schedule(file.size, async () => { + state.status = "encryption-pending"; + + try { + const { fileBuffer, fileSigned } = await requestDuplicateFileScan( + file, + hmacSecret, + onDuplicate, + ); + if (!fileBuffer || !fileSigned) { + state.status = "canceled"; + uploadingFiles = uploadingFiles.filter((file) => file !== state); + return undefined; + } + + const { + dataKeyWrapped, + dataKeyVersion, + fileType, + chunksEncrypted, + nameEncrypted, + createdAtEncrypted, + lastModifiedAtEncrypted, + thumbnail, + } = await encryptFile(state, file, fileBuffer, masterKey); + + const metadata = { + chunks: chunksEncrypted.length, + parent: parentId, + mekVersion: masterKey.version, + dek: dataKeyWrapped, + dekVersion: dataKeyVersion, + hskVersion: hmacSecret.version, + contentType: fileType, + name: nameEncrypted.ciphertext, + nameIv: nameEncrypted.iv, + createdAt: createdAtEncrypted?.ciphertext, + createdAtIv: createdAtEncrypted?.iv, + lastModifiedAt: lastModifiedAtEncrypted.ciphertext, + lastModifiedAtIv: lastModifiedAtEncrypted.iv, + }; + + let thumbnailForm = null; + if (thumbnail) { + thumbnailForm = new FormData(); + thumbnailForm.set( + "metadata", + JSON.stringify({ + dekVersion: dataKeyVersion.toISOString(), + contentIv: encodeToBase64(thumbnail.iv), + } satisfies FileThumbnailUploadRequest), + ); + thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); + } + + const { fileId } = await requestFileUpload( + state, + metadata, + chunksEncrypted, + fileSigned, + thumbnailForm, + ); + return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; + } catch (e) { + state.status = "error"; + throw e; + } + }); + } + + // Video and other files: use streaming approach return await scheduler.schedule(file.size, async () => { state.status = "encryption-pending"; try { - const { fileBuffer, fileSigned } = await requestDuplicateFileScan( - file, - hmacSecret, - onDuplicate, - ); - if (!fileBuffer || !fileSigned) { + // 1st pass: streaming HMAC for duplicate check + const { fileSigned } = await requestDuplicateFileScanStreaming(file, hmacSecret, onDuplicate); + if (!fileSigned) { state.status = "canceled"; - uploadingFiles = uploadingFiles.filter((file) => file !== state); + uploadingFiles = uploadingFiles.filter((f) => f !== state); return undefined; } - const { - dataKeyWrapped, - dataKeyVersion, - fileType, - chunksEncrypted, - nameEncrypted, - createdAtEncrypted, - lastModifiedAtEncrypted, - thumbnail, - } = await encryptFile(state, file, fileBuffer, masterKey); - - const metadata = { - chunks: chunksEncrypted.length, - parent: parentId, - mekVersion: masterKey.version, - dek: dataKeyWrapped, - dekVersion: dataKeyVersion, - hskVersion: hmacSecret.version, - contentType: fileType, - name: nameEncrypted.ciphertext, - nameIv: nameEncrypted.iv, - createdAt: createdAtEncrypted?.ciphertext, - createdAtIv: createdAtEncrypted?.iv, - lastModifiedAt: lastModifiedAtEncrypted.ciphertext, - lastModifiedAtIv: lastModifiedAtEncrypted.iv, - }; - - let thumbnailForm = null; - if (thumbnail) { - thumbnailForm = new FormData(); - thumbnailForm.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: encodeToBase64(thumbnail.iv), - } satisfies FileThumbnailUploadRequest), - ); - thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); - } - - const { fileId } = await requestFileUpload( + // 2nd pass: streaming encrypt + upload + const { fileId } = await uploadFileStreaming( state, - metadata, - chunksEncrypted, + file, + masterKey, + hmacSecret, fileSigned, - thumbnailForm, + parentId, ); - return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; + return { fileId, fileBuffer: undefined, thumbnailBuffer: undefined }; } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/thumbnail.ts b/src/lib/modules/thumbnail.ts index d9a995b..739c7af 100644 --- a/src/lib/modules/thumbnail.ts +++ b/src/lib/modules/thumbnail.ts @@ -125,3 +125,20 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; }; + +export const generateThumbnailFromFile = async (file: File) => { + const fileType = file.type || (file.name.endsWith(".heic") ? "image/heic" : ""); + if (!fileType.startsWith("video/")) return null; + + let url; + try { + url = URL.createObjectURL(file); + return await generateVideoThumbnail(url); + } catch { + return null; + } finally { + if (url) { + URL.revokeObjectURL(url); + } + } +}; diff --git a/src/routes/(main)/directory/[[id]]/service.svelte.ts b/src/routes/(main)/directory/[[id]]/service.svelte.ts index f83bbaf..ccd5b14 100644 --- a/src/routes/(main)/directory/[[id]]/service.svelte.ts +++ b/src/routes/(main)/directory/[[id]]/service.svelte.ts @@ -88,7 +88,9 @@ export const requestFileUpload = async ( const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate); if (!res) return false; - storeFileCache(res.fileId, res.fileBuffer); // Intended + if (res.fileBuffer) { + storeFileCache(res.fileId, res.fileBuffer); // Intended + } if (res.thumbnailBuffer) { storeFileThumbnailCache(res.fileId, res.thumbnailBuffer); // Intended } From 57c27b76bea9980756d52d609c6058bd14cd7690 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 14:07:32 +0900 Subject: [PATCH 5/8] =?UTF-8?q?=EC=8D=B8=EB=84=A4=EC=9D=BC=20=EC=97=85?= =?UTF-8?q?=EB=A1=9C=EB=93=9C=EB=8F=84=20=EC=83=88=EB=A1=9C=EC=9A=B4=20?= =?UTF-8?q?=EC=97=85=EB=A1=9C=EB=93=9C=20=EB=B0=A9=EC=8B=9D=EC=9C=BC?= =?UTF-8?q?=EB=A1=9C=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package.json | 1 - pnpm-lock.yaml | 8 - src/lib/modules/file/upload.svelte.ts | 93 ++++--- src/lib/server/db/media.ts | 4 +- .../migrations/1768062380-AddChunkedUpload.ts | 24 +- src/lib/server/db/schema/media.ts | 2 +- src/lib/server/db/schema/upload.ts | 17 +- src/lib/server/db/upload.ts | 101 ++++++-- src/lib/server/services/file.ts | 120 +-------- src/lib/server/services/upload.ts | 77 ++++++ src/lib/services/file.ts | 44 +++- .../api/file/[id]/thumbnail/upload/+server.ts | 74 ------ .../upload/[id]/chunks/[index]/+server.ts | 2 +- src/trpc/router.server.ts | 2 + src/trpc/routers/file.ts | 146 +---------- src/trpc/routers/index.ts | 1 + src/trpc/routers/upload.ts | 241 ++++++++++++++++++ 17 files changed, 527 insertions(+), 430 deletions(-) create mode 100644 src/lib/server/services/upload.ts delete mode 100644 src/routes/api/file/[id]/thumbnail/upload/+server.ts rename src/routes/api/{file => }/upload/[id]/chunks/[index]/+server.ts (96%) create mode 100644 src/trpc/routers/upload.ts diff --git a/package.json b/package.json index 17dad8d..952d53f 100644 --- a/package.json +++ b/package.json @@ -56,7 +56,6 @@ "vite": "^7.3.0" }, "dependencies": { - "@fastify/busboy": "^3.2.0", "@trpc/server": "^11.8.1", "argon2": "^0.44.0", "kysely": "^0.28.9", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 025aacd..f4c8e80 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,9 +8,6 @@ importers: .: dependencies: - '@fastify/busboy': - specifier: ^3.2.0 - version: 3.2.0 '@trpc/server': specifier: ^11.8.1 version: 11.8.1(typescript@5.9.3) @@ -373,9 +370,6 @@ packages: resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@fastify/busboy@3.2.0': - resolution: {integrity: sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==} - '@humanfs/core@0.19.1': resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} engines: {node: '>=18.18.0'} @@ -2180,8 +2174,6 @@ snapshots: '@eslint/core': 0.17.0 levn: 0.4.1 - '@fastify/busboy@3.2.0': {} - '@humanfs/core@0.19.1': {} '@humanfs/node@0.16.7': diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index ac3010e..eaa35df 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,4 +1,3 @@ -import axios from "axios"; import ExifReader from "exifreader"; import pLimit, { limitFunction } from "p-limit"; import { CHUNK_SIZE } from "$lib/constants"; @@ -15,7 +14,6 @@ import { } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; -import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import type { MasterKey, HmacSecret } from "$lib/stores"; import { trpc } from "$trpc/client"; import type { RouterInputs } from "$trpc/router.server"; @@ -194,17 +192,55 @@ const encryptFile = limitFunction( { concurrency: 4 }, ); +const uploadThumbnail = async ( + fileId: number, + thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, + dataKeyVersion: Date, +) => { + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); + + const ivAndCiphertext = new Uint8Array( + thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength, + ); + ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0); + ivAndCiphertext.set( + new Uint8Array(thumbnailEncrypted.ciphertext), + thumbnailEncrypted.iv.byteLength, + ); + + const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); + + const response = await fetch(`/api/upload/${uploadId}/chunks/0`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkHash}:`, + }, + body: ivAndCiphertext, + }); + + if (!response.ok) { + throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`); + } + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); +}; + const requestFileUpload = limitFunction( async ( state: FileUploadState, - metadata: RouterInputs["file"]["startUpload"], + metadata: RouterInputs["upload"]["startFileUpload"], chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], fileSigned: string | undefined, - thumbnailForm: FormData | null, + thumbnailData: { ciphertext: ArrayBuffer; iv: ArrayBuffer; plaintext: ArrayBuffer } | null, + dataKeyVersion: Date, ) => { state.status = "uploading"; - const { uploadId } = await trpc().file.startUpload.mutate(metadata); + const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); // Upload chunks with progress tracking const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); @@ -214,7 +250,7 @@ const requestFileUpload = limitFunction( for (let i = 0; i < chunksEncrypted.length; i++) { const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; - const response = await fetch(`/api/file/upload/${uploadId}/chunks/${i}`, { + const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, { method: "POST", headers: { "Content-Type": "application/octet-stream", @@ -241,15 +277,15 @@ const requestFileUpload = limitFunction( } // Complete upload - const { file: fileId } = await trpc().file.completeUpload.mutate({ + const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ uploadId, contentHmac: fileSigned, }); // Upload thumbnail if exists - if (thumbnailForm) { + if (thumbnailData) { try { - await axios.post(`/api/file/${fileId}/thumbnail/upload`, thumbnailForm); + await uploadThumbnail(fileId, thumbnailData, dataKeyVersion); } catch (e) { // TODO: Error handling for thumbnail upload console.error(e); @@ -258,7 +294,7 @@ const requestFileUpload = limitFunction( state.status = "uploaded"; - return { fileId }; + return { fileId, thumbnailBuffer: thumbnailData?.plaintext }; }, { concurrency: 1 }, ); @@ -296,7 +332,7 @@ const uploadFileStreaming = async ( lastModifiedAtIv: lastModifiedAtEncrypted.iv, }; - const { uploadId } = await trpc().file.startUpload.mutate(metadata); + const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); // Stream file, encrypt, and upload with concurrency limit const reader = file.stream().getReader(); @@ -315,7 +351,7 @@ const uploadFileStreaming = async ( chunkHash: string, originalChunkSize: number, ) => { - const response = await fetch(`/api/file/upload/${uploadId}/chunks/${index}`, { + const response = await fetch(`/api/upload/${uploadId}/chunks/${index}`, { method: "POST", headers: { "Content-Type": "application/octet-stream", @@ -370,7 +406,7 @@ const uploadFileStreaming = async ( await Promise.all(uploadPromises); - const { file: fileId } = await trpc().file.completeUpload.mutate({ + const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ uploadId, contentHmac: fileSigned, }); @@ -383,16 +419,7 @@ const uploadFileStreaming = async ( const thumbnailBuffer = await thumbnail.arrayBuffer(); const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); - const thumbnailForm = new FormData(); - thumbnailForm.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: encodeToBase64(thumbnailEncrypted.iv), - } satisfies FileThumbnailUploadRequest), - ); - thumbnailForm.set("content", new Blob([thumbnailEncrypted.ciphertext])); - await axios.post(`/api/file/${fileId}/thumbnail/upload`, thumbnailForm); + await uploadThumbnail(fileId, thumbnailEncrypted, dataKeyVersion); } } catch (e) { // Thumbnail upload failure is not critical @@ -465,27 +492,15 @@ export const uploadFile = async ( lastModifiedAtIv: lastModifiedAtEncrypted.iv, }; - let thumbnailForm = null; - if (thumbnail) { - thumbnailForm = new FormData(); - thumbnailForm.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: encodeToBase64(thumbnail.iv), - } satisfies FileThumbnailUploadRequest), - ); - thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); - } - - const { fileId } = await requestFileUpload( + const { fileId, thumbnailBuffer } = await requestFileUpload( state, metadata, chunksEncrypted, fileSigned, - thumbnailForm, + thumbnail ?? null, + dataKeyVersion, ); - return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; + return { fileId, fileBuffer, thumbnailBuffer }; } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/server/db/media.ts b/src/lib/server/db/media.ts index 209e256..c4d2a34 100644 --- a/src/lib/server/db/media.ts +++ b/src/lib/server/db/media.ts @@ -6,7 +6,7 @@ interface Thumbnail { id: number; path: string; updatedAt: Date; - encContentIv: string; + encContentIv: string | null; } interface FileThumbnail extends Thumbnail { @@ -18,7 +18,7 @@ export const updateFileThumbnail = async ( fileId: number, dekVersion: Date, path: string, - encContentIv: string, + encContentIv: string | null, ) => { return await db.transaction().execute(async (trx) => { const file = await trx diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts index fe8abd4..cf18c05 100644 --- a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -8,23 +8,31 @@ export const up = async (db: Kysely) => { .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) .execute(); + // media.ts + await db.schema + .alterTable("thumbnail") + .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) + .execute(); + // upload.ts await db.schema .createTable("upload_session") .addColumn("id", "uuid", (col) => col.primaryKey().defaultTo(sql`gen_random_uuid()`)) + .addColumn("type", "text", (col) => col.notNull()) .addColumn("user_id", "integer", (col) => col.references("user.id").notNull()) .addColumn("total_chunks", "integer", (col) => col.notNull()) .addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`)) .addColumn("expires_at", "timestamp(3)", (col) => col.notNull()) .addColumn("parent_id", "integer", (col) => col.references("directory.id")) - .addColumn("master_encryption_key_version", "integer", (col) => col.notNull()) - .addColumn("encrypted_data_encryption_key", "text", (col) => col.notNull()) - .addColumn("data_encryption_key_version", "timestamp(3)", (col) => col.notNull()) + .addColumn("master_encryption_key_version", "integer") + .addColumn("encrypted_data_encryption_key", "text") + .addColumn("data_encryption_key_version", "timestamp(3)") .addColumn("hmac_secret_key_version", "integer") - .addColumn("content_type", "text", (col) => col.notNull()) - .addColumn("encrypted_name", "json", (col) => col.notNull()) + .addColumn("content_type", "text") + .addColumn("encrypted_name", "json") .addColumn("encrypted_created_at", "json") - .addColumn("encrypted_last_modified_at", "json", (col) => col.notNull()) + .addColumn("encrypted_last_modified_at", "json") + .addColumn("file_id", "integer", (col) => col.references("file.id")) .addForeignKeyConstraint( "upload_session_fk01", ["user_id", "master_encryption_key_version"], @@ -43,6 +51,10 @@ export const up = async (db: Kysely) => { // eslint-disable-next-line @typescript-eslint/no-explicit-any export const down = async (db: Kysely) => { await db.schema.dropTable("upload_session").execute(); + await db.schema + .alterTable("thumbnail") + .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) + .execute(); await db.schema .alterTable("file") .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) diff --git a/src/lib/server/db/schema/media.ts b/src/lib/server/db/schema/media.ts index ebfbf29..1fef90b 100644 --- a/src/lib/server/db/schema/media.ts +++ b/src/lib/server/db/schema/media.ts @@ -7,7 +7,7 @@ interface ThumbnailTable { category_id: number | null; path: string; updated_at: Date; - encrypted_content_iv: string; // Base64 + encrypted_content_iv: string | null; // Base64 } declare module "./index" { diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts index 3372955..26eaac2 100644 --- a/src/lib/server/db/schema/upload.ts +++ b/src/lib/server/db/schema/upload.ts @@ -3,20 +3,25 @@ import type { Ciphertext } from "./util"; interface UploadSessionTable { id: Generated; + type: "file" | "thumbnail"; user_id: number; total_chunks: number; uploaded_chunks: Generated; expires_at: Date; + // For file uploads parent_id: number | null; - master_encryption_key_version: number; - encrypted_data_encryption_key: string; // Base64 - data_encryption_key_version: Date; + master_encryption_key_version: number | null; + encrypted_data_encryption_key: string | null; // Base64 + data_encryption_key_version: Date | null; hmac_secret_key_version: number | null; - content_type: string; - encrypted_name: Ciphertext; + content_type: string | null; + encrypted_name: Ciphertext | null; encrypted_created_at: Ciphertext | null; - encrypted_last_modified_at: Ciphertext; + encrypted_last_modified_at: Ciphertext | null; + + // For thumbnail uploads + file_id: number | null; } declare module "./index" { diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts index 935dc80..4c8da24 100644 --- a/src/lib/server/db/upload.ts +++ b/src/lib/server/db/upload.ts @@ -3,13 +3,16 @@ import { IntegrityError } from "./error"; import db from "./kysely"; import type { Ciphertext } from "./schema"; -interface UploadSession { +interface BaseUploadSession { id: string; userId: number; totalChunks: number; uploadedChunks: number[]; expiresAt: Date; +} +interface FileUploadSession extends BaseUploadSession { + type: "file"; parentId: DirectoryId; mekVersion: number; encDek: string; @@ -21,7 +24,15 @@ interface UploadSession { encLastModifiedAt: Ciphertext; } -export const createUploadSession = async (params: Omit) => { +interface ThumbnailUploadSession extends BaseUploadSession { + type: "thumbnail"; + fileId: number; + dekVersion: Date; +} + +export const createFileUploadSession = async ( + params: Omit, +) => { return await db.transaction().execute(async (trx) => { const mek = await trx .selectFrom("master_encryption_key") @@ -52,6 +63,7 @@ export const createUploadSession = async (params: Omit, +) => { + return await db.transaction().execute(async (trx) => { + const file = await trx + .selectFrom("file") + .select("data_encryption_key_version") + .where("id", "=", params.fileId) + .where("user_id", "=", params.userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (file.data_encryption_key_version.getTime() !== params.dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } + + const { sessionId } = await trx + .insertInto("upload_session") + .values({ + type: "thumbnail", + user_id: params.userId, + total_chunks: 1, + expires_at: params.expiresAt, + file_id: params.fileId, + data_encryption_key_version: params.dekVersion, + }) + .returning("id as sessionId") + .executeTakeFirstOrThrow(); + return { id: sessionId }; + }); +}; + export const getUploadSession = async (sessionId: string, userId: number) => { const session = await db .selectFrom("upload_session") @@ -80,24 +126,39 @@ export const getUploadSession = async (sessionId: string, userId: number) => { .where("expires_at", ">", new Date()) .limit(1) .executeTakeFirst(); - return session - ? ({ - id: session.id, - userId: session.user_id, - totalChunks: session.total_chunks, - uploadedChunks: session.uploaded_chunks, - expiresAt: session.expires_at, - parentId: session.parent_id ?? "root", - mekVersion: session.master_encryption_key_version, - encDek: session.encrypted_data_encryption_key, - dekVersion: session.data_encryption_key_version, - hskVersion: session.hmac_secret_key_version, - contentType: session.content_type, - encName: session.encrypted_name, - encCreatedAt: session.encrypted_created_at, - encLastModifiedAt: session.encrypted_last_modified_at, - } satisfies UploadSession) - : null; + + if (!session) return null; + + if (session.type === "file") { + return { + type: "file", + id: session.id, + userId: session.user_id, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + parentId: session.parent_id ?? "root", + mekVersion: session.master_encryption_key_version!, + encDek: session.encrypted_data_encryption_key!, + dekVersion: session.data_encryption_key_version!, + hskVersion: session.hmac_secret_key_version, + contentType: session.content_type!, + encName: session.encrypted_name!, + encCreatedAt: session.encrypted_created_at, + encLastModifiedAt: session.encrypted_last_modified_at!, + } satisfies FileUploadSession; + } else { + return { + type: "thumbnail", + id: session.id, + userId: session.user_id, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + fileId: session.file_id!, + dekVersion: session.data_encryption_key_version!, + } satisfies ThumbnailUploadSession; + } }; export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => { diff --git a/src/lib/server/services/file.ts b/src/lib/server/services/file.ts index 9df6430..0d67303 100644 --- a/src/lib/server/services/file.ts +++ b/src/lib/server/services/file.ts @@ -1,17 +1,8 @@ import { error } from "@sveltejs/kit"; -import { createHash } from "crypto"; -import { createReadStream, createWriteStream } from "fs"; -import { mkdir, stat } from "fs/promises"; -import { dirname } from "path"; +import { createReadStream } from "fs"; +import { stat } from "fs/promises"; import { Readable } from "stream"; -import { pipeline } from "stream/promises"; -import { v4 as uuidv4 } from "uuid"; -import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; -import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; -import env from "$lib/server/loadenv"; -import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; - -const uploadLocks = new Set(); +import { FileRepo, MediaRepo } from "$lib/server/db"; const createEncContentStream = async ( path: string, @@ -77,110 +68,7 @@ export const getFileThumbnailStream = async ( return createEncContentStream( thumbnail.path, - Buffer.from(thumbnail.encContentIv, "base64"), + thumbnail.encContentIv ? Buffer.from(thumbnail.encContentIv, "base64") : undefined, range, ); }; - -export const uploadFileThumbnail = async ( - userId: number, - fileId: number, - dekVersion: Date, - encContentIv: string, - encContentStream: Readable, -) => { - const path = `${env.thumbnailsPath}/${userId}/${uuidv4()}`; - await mkdir(dirname(path), { recursive: true }); - - try { - await pipeline(encContentStream, createWriteStream(path, { flags: "wx", mode: 0o600 })); - - const oldPath = await MediaRepo.updateFileThumbnail( - userId, - fileId, - dekVersion, - path, - encContentIv, - ); - safeUnlink(oldPath); // Intended - } catch (e) { - await safeUnlink(path); - - if (e instanceof IntegrityError) { - if (e.message === "File not found") { - error(404, "File not found"); - } else if (e.message === "Invalid DEK version") { - error(400, "Mismatched DEK version"); - } - } - throw e; - } -}; - -export const uploadChunk = async ( - userId: number, - sessionId: string, - chunkIndex: number, - encChunkStream: Readable, - encChunkHash: string, -) => { - const lockKey = `${sessionId}/${chunkIndex}`; - if (uploadLocks.has(lockKey)) { - error(409, "Chunk already uploaded"); // TODO: Message - } else { - uploadLocks.add(lockKey); - } - - const filePath = `${getChunkDirectoryPath(sessionId)}/${chunkIndex}`; - - try { - const session = await UploadRepo.getUploadSession(sessionId, userId); - if (!session) { - error(404, "Invalid upload id"); - } else if (chunkIndex >= session.totalChunks) { - error(400, "Invalid chunk index"); - } else if (session.uploadedChunks.includes(chunkIndex)) { - error(409, "Chunk already uploaded"); - } - - const isLastChunk = chunkIndex === session.totalChunks - 1; - - let writtenBytes = 0; - const hashStream = createHash("sha256"); - const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); - - for await (const chunk of encChunkStream) { - writtenBytes += chunk.length; - hashStream.update(chunk); - writeStream.write(chunk); - } - - await new Promise((resolve, reject) => { - writeStream.end((e: any) => (e ? reject(e) : resolve())); - }); - - if (hashStream.digest("base64") !== encChunkHash) { - throw new Error("Invalid checksum"); - } else if ( - (!isLastChunk && writtenBytes !== CHUNK_SIZE + ENCRYPTION_OVERHEAD) || - (isLastChunk && - (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > CHUNK_SIZE + ENCRYPTION_OVERHEAD)) - ) { - throw new Error("Invalid chunk size"); - } - - await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex); - } catch (e) { - await safeUnlink(filePath); - - if ( - e instanceof Error && - (e.message === "Invalid checksum" || e.message === "Invalid chunk size") - ) { - error(400, "Invalid request body"); - } - throw e; - } finally { - uploadLocks.delete(lockKey); - } -}; diff --git a/src/lib/server/services/upload.ts b/src/lib/server/services/upload.ts new file mode 100644 index 0000000..1be250d --- /dev/null +++ b/src/lib/server/services/upload.ts @@ -0,0 +1,77 @@ +import { error } from "@sveltejs/kit"; +import { createHash } from "crypto"; +import { createWriteStream } from "fs"; +import { Readable } from "stream"; +import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { UploadRepo } from "$lib/server/db"; +import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; + +const chunkLocks = new Set(); + +export const uploadChunk = async ( + userId: number, + sessionId: string, + chunkIndex: number, + encChunkStream: Readable, + encChunkHash: string, +) => { + const lockKey = `${sessionId}/${chunkIndex}`; + if (chunkLocks.has(lockKey)) { + error(409, "Chunk already uploaded"); // TODO: Message + } else { + chunkLocks.add(lockKey); + } + + const filePath = `${getChunkDirectoryPath(sessionId)}/${chunkIndex}`; + + try { + const session = await UploadRepo.getUploadSession(sessionId, userId); + if (!session) { + error(404, "Invalid upload id"); + } else if (chunkIndex >= session.totalChunks) { + error(400, "Invalid chunk index"); + } else if (session.uploadedChunks.includes(chunkIndex)) { + error(409, "Chunk already uploaded"); + } + + const isLastChunk = chunkIndex === session.totalChunks - 1; + + let writtenBytes = 0; + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for await (const chunk of encChunkStream) { + writtenBytes += chunk.length; + hashStream.update(chunk); + writeStream.write(chunk); + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + if (hashStream.digest("base64") !== encChunkHash) { + throw new Error("Invalid checksum"); + } else if ( + (!isLastChunk && writtenBytes !== CHUNK_SIZE + ENCRYPTION_OVERHEAD) || + (isLastChunk && + (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > CHUNK_SIZE + ENCRYPTION_OVERHEAD)) + ) { + throw new Error("Invalid chunk size"); + } + + await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex); + } catch (e) { + await safeUnlink(filePath); + + if ( + e instanceof Error && + (e.message === "Invalid checksum" || e.message === "Invalid chunk size") + ) { + error(400, "Invalid request body"); + } + throw e; + } finally { + chunkLocks.delete(lockKey); + } +}; diff --git a/src/lib/services/file.ts b/src/lib/services/file.ts index 5f95f42..2f37f52 100644 --- a/src/lib/services/file.ts +++ b/src/lib/services/file.ts @@ -1,5 +1,5 @@ import { getAllFileInfos } from "$lib/indexedDB/filesystem"; -import { encodeToBase64 } from "$lib/modules/crypto"; +import { encodeToBase64, digestMessage } from "$lib/modules/crypto"; import { getFileCache, storeFileCache, @@ -7,7 +7,6 @@ import { downloadFile, deleteFileThumbnailCache, } from "$lib/modules/file"; -import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import { trpc } from "$trpc/client"; export const requestFileDownload = async ( @@ -28,17 +27,38 @@ export const requestFileThumbnailUpload = async ( dataKeyVersion: Date, thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, ) => { - const form = new FormData(); - form.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: encodeToBase64(thumbnailEncrypted.iv), - } satisfies FileThumbnailUploadRequest), - ); - form.set("content", new Blob([thumbnailEncrypted.ciphertext])); + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); - return await fetch(`/api/file/${fileId}/thumbnail/upload`, { method: "POST", body: form }); + // Prepend IV to ciphertext (consistent with file download format) + const ivAndCiphertext = new Uint8Array( + thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength, + ); + ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0); + ivAndCiphertext.set( + new Uint8Array(thumbnailEncrypted.ciphertext), + thumbnailEncrypted.iv.byteLength, + ); + + const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); + + const response = await fetch(`/api/upload/${uploadId}/chunks/0`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkHash}:`, + }, + body: ivAndCiphertext, + }); + + if (!response.ok) { + throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`); + } + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); + return response; }; export const requestDeletedFilesCleanup = async () => { diff --git a/src/routes/api/file/[id]/thumbnail/upload/+server.ts b/src/routes/api/file/[id]/thumbnail/upload/+server.ts deleted file mode 100644 index 62dfe42..0000000 --- a/src/routes/api/file/[id]/thumbnail/upload/+server.ts +++ /dev/null @@ -1,74 +0,0 @@ -import Busboy from "@fastify/busboy"; -import { error, text } from "@sveltejs/kit"; -import { Readable, Writable } from "stream"; -import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; -import { fileThumbnailUploadRequest, type FileThumbnailUploadRequest } from "$lib/server/schemas"; -import { uploadFileThumbnail } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -export const POST: RequestHandler = async ({ locals, params, request }) => { - const { userId } = await authorize(locals, "activeClient"); - - const zodRes = z - .object({ - id: z.coerce.number().int().positive(), - }) - .safeParse(params); - if (!zodRes.success) error(400, "Invalid path parameters"); - const { id } = zodRes.data; - - const contentType = request.headers.get("Content-Type"); - if (!contentType?.startsWith("multipart/form-data") || !request.body) { - error(400, "Invalid request body"); - } - - return new Promise((resolve, reject) => { - const bb = Busboy({ headers: { "content-type": contentType } }); - const handler = - (f: (...args: T) => Promise) => - (...args: T) => { - f(...args).catch(reject); - }; - - let metadata: FileThumbnailUploadRequest | null = null; - let content: Readable | null = null; - bb.on( - "field", - handler(async (fieldname, val) => { - if (fieldname === "metadata") { - // Ignore subsequent metadata fields - if (!metadata) { - const zodRes = fileThumbnailUploadRequest.safeParse(JSON.parse(val)); - if (!zodRes.success) error(400, "Invalid request body"); - metadata = zodRes.data; - } - } else { - error(400, "Invalid request body"); - } - }), - ); - bb.on( - "file", - handler(async (fieldname, file) => { - if (fieldname !== "content") error(400, "Invalid request body"); - if (!metadata || content) error(400, "Invalid request body"); - content = file; - - await uploadFileThumbnail( - userId, - id, - new Date(metadata.dekVersion), - metadata.contentIv, - content, - ); - resolve(text("Thumbnail uploaded", { headers: { "Content-Type": "text/plain" } })); - }), - ); - bb.on("error", (e) => { - content?.emit("error", e) ?? reject(e); - }); - - request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error - }); -}; diff --git a/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts b/src/routes/api/upload/[id]/chunks/[index]/+server.ts similarity index 96% rename from src/routes/api/file/upload/[id]/chunks/[index]/+server.ts rename to src/routes/api/upload/[id]/chunks/[index]/+server.ts index c44e425..47d6397 100644 --- a/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts +++ b/src/routes/api/upload/[id]/chunks/[index]/+server.ts @@ -2,7 +2,7 @@ import { error, text } from "@sveltejs/kit"; import { Readable } from "stream"; import { z } from "zod"; import { authorize } from "$lib/server/modules/auth"; -import { uploadChunk } from "$lib/server/services/file"; +import { uploadChunk } from "$lib/server/services/upload"; import type { RequestHandler } from "./$types"; export const POST: RequestHandler = async ({ locals, params, request }) => { diff --git a/src/trpc/router.server.ts b/src/trpc/router.server.ts index 64d25c7..d343fa6 100644 --- a/src/trpc/router.server.ts +++ b/src/trpc/router.server.ts @@ -9,6 +9,7 @@ import { fileRouter, hskRouter, mekRouter, + uploadRouter, userRouter, } from "./routers"; @@ -20,6 +21,7 @@ export const appRouter = router({ file: fileRouter, hsk: hskRouter, mek: mekRouter, + upload: uploadRouter, user: userRouter, }); diff --git a/src/trpc/routers/file.ts b/src/trpc/routers/file.ts index eaf42ca..294300c 100644 --- a/src/trpc/routers/file.ts +++ b/src/trpc/routers/file.ts @@ -1,20 +1,9 @@ import { TRPCError } from "@trpc/server"; -import { createHash } from "crypto"; -import { createReadStream, createWriteStream } from "fs"; -import { mkdir, rm } from "fs/promises"; -import mime from "mime"; -import { dirname } from "path"; -import { v4 as uuidv4 } from "uuid"; import { z } from "zod"; -import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; -import db from "$lib/server/db/kysely"; -import env from "$lib/server/loadenv"; -import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; -import { directoryIdSchema } from "$lib/server/schemas"; +import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db"; +import { safeUnlink } from "$lib/server/modules/filesystem"; import { router, roleProcedure } from "../init.server"; -const uploadLocks = new Set(); - const fileRouter = router({ get: roleProcedure["activeClient"] .input( @@ -171,137 +160,6 @@ const fileRouter = router({ return { updatedAt: thumbnail.updatedAt }; }), - - startUpload: roleProcedure["activeClient"] - .input( - z.object({ - chunks: z.int().positive(), - parent: directoryIdSchema, - mekVersion: z.int().positive(), - dek: z.base64().nonempty(), - dekVersion: z.date(), - hskVersion: z.int().positive().optional(), - contentType: z - .string() - .trim() - .nonempty() - .refine((value) => mime.getExtension(value) !== null), - name: z.base64().nonempty(), - nameIv: z.base64().nonempty(), - createdAt: z.base64().nonempty().optional(), - createdAtIv: z.base64().nonempty().optional(), - lastModifiedAt: z.base64().nonempty(), - lastModifiedAtIv: z.base64().nonempty(), - }), - ) - .mutation(async ({ ctx, input }) => { - const oneMinuteAgo = new Date(Date.now() - 60 * 1000); - const oneMinuteLater = new Date(Date.now() + 60 * 1000); - if (input.dekVersion <= oneMinuteAgo || input.dekVersion >= oneMinuteLater) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); - } - - try { - const { id: sessionId } = await UploadRepo.createUploadSession({ - userId: ctx.session.userId, - totalChunks: input.chunks, - expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours - parentId: input.parent, - mekVersion: input.mekVersion, - encDek: input.dek, - dekVersion: input.dekVersion, - hskVersion: input.hskVersion ?? null, - contentType: input.contentType, - encName: { ciphertext: input.name, iv: input.nameIv }, - encCreatedAt: - input.createdAt && input.createdAtIv - ? { ciphertext: input.createdAt, iv: input.createdAtIv } - : null, - encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, - }); - await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); - return { uploadId: sessionId }; - } catch (e) { - if (e instanceof IntegrityError) { - if (e.message === "Inactive MEK version") { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); - } else if (e.message === "Inactive HSK version") { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid HSK version" }); - } - } - throw e; - } - }), - - completeUpload: roleProcedure["activeClient"] - .input( - z.object({ - uploadId: z.uuidv4(), - contentHmac: z.base64().nonempty().optional(), - }), - ) - .mutation(async ({ ctx, input }) => { - const { uploadId } = input; - if (uploadLocks.has(uploadId)) { - throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); // TODO: Message - } else { - uploadLocks.add(uploadId); - } - - const filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; - await mkdir(dirname(filePath), { recursive: true }); - - try { - const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); - if (!session) { - throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); - } else if ( - (session.hskVersion && !input.contentHmac) || - (!session.hskVersion && input.contentHmac) - ) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content hmac" }); // TODO: message - } else if (session.uploadedChunks.length < session.totalChunks) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); // TODO: Message - } - - const chunkDirectoryPath = getChunkDirectoryPath(uploadId); - const hashStream = createHash("sha256"); - const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); - - for (let i = 0; i < session.totalChunks; i++) { - for await (const chunk of createReadStream(`${chunkDirectoryPath}/${i}`)) { - hashStream.update(chunk); - writeStream.write(chunk); - } - } - - await new Promise((resolve, reject) => { - writeStream.end((e: any) => (e ? reject(e) : resolve())); - }); - - const hash = hashStream.digest("base64"); - const fileId = await db.transaction().execute(async (trx) => { - const { id: fileId } = await FileRepo.registerFile(trx, { - ...session, - userId: ctx.session.userId, - path: filePath, - contentHmac: input.contentHmac ?? null, - encContentHash: hash, - encContentIv: null, - }); - await UploadRepo.deleteUploadSession(trx, uploadId); - return fileId; - }); - - await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); - return { file: fileId }; - } catch (e) { - await safeUnlink(filePath); - throw e; - } finally { - uploadLocks.delete(uploadId); - } - }), }); export default fileRouter; diff --git a/src/trpc/routers/index.ts b/src/trpc/routers/index.ts index ab5b6a0..5c8df24 100644 --- a/src/trpc/routers/index.ts +++ b/src/trpc/routers/index.ts @@ -5,4 +5,5 @@ export { default as directoryRouter } from "./directory"; export { default as fileRouter } from "./file"; export { default as hskRouter } from "./hsk"; export { default as mekRouter } from "./mek"; +export { default as uploadRouter } from "./upload"; export { default as userRouter } from "./user"; diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts new file mode 100644 index 0000000..08d483f --- /dev/null +++ b/src/trpc/routers/upload.ts @@ -0,0 +1,241 @@ +import { TRPCError } from "@trpc/server"; +import { createHash } from "crypto"; +import { createReadStream, createWriteStream } from "fs"; +import { mkdir, rename, rm } from "fs/promises"; +import mime from "mime"; +import { dirname } from "path"; +import { v4 as uuidv4 } from "uuid"; +import { z } from "zod"; +import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; +import db from "$lib/server/db/kysely"; +import env from "$lib/server/loadenv"; +import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; +import { directoryIdSchema } from "$lib/server/schemas"; +import { router, roleProcedure } from "../init.server"; + +const uploadLocks = new Set(); + +const uploadRouter = router({ + startFileUpload: roleProcedure["activeClient"] + .input( + z.object({ + chunks: z.int().positive(), + parent: directoryIdSchema, + mekVersion: z.int().positive(), + dek: z.base64().nonempty(), + dekVersion: z.date(), + hskVersion: z.int().positive().optional(), + contentType: z + .string() + .trim() + .nonempty() + .refine((value) => mime.getExtension(value) !== null), + name: z.base64().nonempty(), + nameIv: z.base64().nonempty(), + createdAt: z.base64().nonempty().optional(), + createdAtIv: z.base64().nonempty().optional(), + lastModifiedAt: z.base64().nonempty(), + lastModifiedAtIv: z.base64().nonempty(), + }), + ) + .mutation(async ({ ctx, input }) => { + const oneMinuteAgo = new Date(Date.now() - 60 * 1000); + const oneMinuteLater = new Date(Date.now() + 60 * 1000); + if (input.dekVersion <= oneMinuteAgo || input.dekVersion >= oneMinuteLater) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); + } + + try { + const { id: sessionId } = await UploadRepo.createFileUploadSession({ + userId: ctx.session.userId, + totalChunks: input.chunks, + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + parentId: input.parent, + mekVersion: input.mekVersion, + encDek: input.dek, + dekVersion: input.dekVersion, + hskVersion: input.hskVersion ?? null, + contentType: input.contentType, + encName: { ciphertext: input.name, iv: input.nameIv }, + encCreatedAt: + input.createdAt && input.createdAtIv + ? { ciphertext: input.createdAt, iv: input.createdAtIv } + : null, + encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, + }); + await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); + return { uploadId: sessionId }; + } catch (e) { + if (e instanceof IntegrityError) { + if (e.message === "Inactive MEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); + } else if (e.message === "Inactive HSK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid HSK version" }); + } + } + throw e; + } + }), + + startFileThumbnailUpload: roleProcedure["activeClient"] + .input( + z.object({ + file: z.int().positive(), + dekVersion: z.date(), + }), + ) + .mutation(async ({ ctx, input }) => { + try { + const { id: sessionId } = await UploadRepo.createThumbnailUploadSession({ + userId: ctx.session.userId, + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + fileId: input.file, + dekVersion: input.dekVersion, + }); + await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); + return { uploadId: sessionId }; + } catch (e) { + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); + } else if (e.message === "Invalid DEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" }); + } + } + throw e; + } + }), + + completeFileUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + contentHmac: z.base64().nonempty().optional(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (uploadLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); + } else { + uploadLocks.add(uploadId); + } + + const filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (!session || session.type !== "file") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if ( + (session.hskVersion && !input.contentHmac) || + (!session.hskVersion && input.contentHmac) + ) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content hmac" }); + } else if (session.uploadedChunks.length < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); + } + + const chunkDirectoryPath = getChunkDirectoryPath(uploadId); + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for (let i = 0; i < session.totalChunks; i++) { + for await (const chunk of createReadStream(`${chunkDirectoryPath}/${i}`)) { + hashStream.update(chunk); + writeStream.write(chunk); + } + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + const hash = hashStream.digest("base64"); + const fileId = await db.transaction().execute(async (trx) => { + const { id: fileId } = await FileRepo.registerFile(trx, { + ...session, + userId: ctx.session.userId, + path: filePath, + contentHmac: input.contentHmac ?? null, + encContentHash: hash, + encContentIv: null, + }); + await UploadRepo.deleteUploadSession(trx, uploadId); + return fileId; + }); + + await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + return { file: fileId }; + } catch (e) { + await safeUnlink(filePath); + throw e; + } finally { + uploadLocks.delete(uploadId); + } + }), + + completeFileThumbnailUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (uploadLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); + } else { + uploadLocks.add(uploadId); + } + + const thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(thumbnailPath), { recursive: true }); + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (!session || session.type !== "thumbnail") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if (session.uploadedChunks.length < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); + } + + const chunkDirectoryPath = getChunkDirectoryPath(uploadId); + const chunkPath = `${chunkDirectoryPath}/0`; + + // Move chunk file to thumbnail path (IV is prepended to the content) + await rename(chunkPath, thumbnailPath); + + // Update thumbnail in database (null IV since it's prepended to the file) + const oldPath = await MediaRepo.updateFileThumbnail( + ctx.session.userId, + session.fileId, + session.dekVersion, + thumbnailPath, + null, + ); + safeUnlink(oldPath); // Intended + + await db.transaction().execute(async (trx) => { + await UploadRepo.deleteUploadSession(trx, uploadId); + }); + + await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + } catch (e) { + await safeUnlink(thumbnailPath); + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); + } else if (e.message === "Invalid DEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" }); + } + } + throw e; + } finally { + uploadLocks.delete(uploadId); + } + }), +}); + +export default uploadRouter; From 2801eed556f2c9407d498bfe008e2c6a00ed05f7 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 14:35:30 +0900 Subject: [PATCH 6/8] =?UTF-8?q?=EC=82=AC=EC=86=8C=ED=95=9C=20=EB=A6=AC?= =?UTF-8?q?=ED=8C=A9=ED=86=A0=EB=A7=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/constants/serviceWorker.ts | 2 +- src/lib/modules/crypto/aes.ts | 2 +- src/lib/modules/crypto/index.ts | 2 +- src/lib/modules/crypto/rsa.ts | 2 +- src/lib/modules/crypto/{util.ts => utils.ts} | 0 src/lib/modules/key.ts | 6 +++--- src/lib/schemas/filesystem.ts | 4 ++++ src/lib/schemas/index.ts | 1 + src/lib/server/db/schema/category.ts | 2 +- src/lib/server/db/schema/file.ts | 2 +- src/lib/server/db/schema/index.ts | 2 +- src/lib/server/db/schema/upload.ts | 2 +- src/lib/server/db/schema/{util.ts => utils.ts} | 0 src/lib/server/schemas/category.ts | 3 --- src/lib/server/schemas/directory.ts | 3 --- src/lib/server/schemas/file.ts | 7 ------- src/lib/server/schemas/index.ts | 3 --- src/routes/api/file/[id]/download/+server.ts | 2 +- src/routes/api/file/[id]/thumbnail/download/+server.ts | 2 +- src/trpc/routers/category.ts | 6 +++--- src/trpc/routers/directory.ts | 6 +++--- src/trpc/routers/upload.ts | 4 ++-- 22 files changed, 26 insertions(+), 37 deletions(-) rename src/lib/modules/crypto/{util.ts => utils.ts} (100%) create mode 100644 src/lib/schemas/filesystem.ts create mode 100644 src/lib/schemas/index.ts rename src/lib/server/db/schema/{util.ts => utils.ts} (100%) delete mode 100644 src/lib/server/schemas/category.ts delete mode 100644 src/lib/server/schemas/directory.ts delete mode 100644 src/lib/server/schemas/file.ts delete mode 100644 src/lib/server/schemas/index.ts diff --git a/src/lib/constants/serviceWorker.ts b/src/lib/constants/serviceWorker.ts index 8c09d05..abbaa3c 100644 --- a/src/lib/constants/serviceWorker.ts +++ b/src/lib/constants/serviceWorker.ts @@ -1 +1 @@ -export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decrypted-file/"; +export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decryptedFile/"; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index 4035343..35687e6 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -5,7 +5,7 @@ import { encodeToBase64, decodeFromBase64, concatenateBuffers, -} from "./util"; +} from "./utils"; export const generateMasterKey = async () => { return { diff --git a/src/lib/modules/crypto/index.ts b/src/lib/modules/crypto/index.ts index e6972ba..e3c27a7 100644 --- a/src/lib/modules/crypto/index.ts +++ b/src/lib/modules/crypto/index.ts @@ -1,4 +1,4 @@ export * from "./aes"; export * from "./rsa"; export * from "./sha"; -export * from "./util"; +export * from "./utils"; diff --git a/src/lib/modules/crypto/rsa.ts b/src/lib/modules/crypto/rsa.ts index 11e136f..78e17db 100644 --- a/src/lib/modules/crypto/rsa.ts +++ b/src/lib/modules/crypto/rsa.ts @@ -1,4 +1,4 @@ -import { encodeString, encodeToBase64, decodeFromBase64 } from "./util"; +import { encodeString, encodeToBase64, decodeFromBase64 } from "./utils"; export const generateEncryptionKeyPair = async () => { const keyPair = await crypto.subtle.generateKey( diff --git a/src/lib/modules/crypto/util.ts b/src/lib/modules/crypto/utils.ts similarity index 100% rename from src/lib/modules/crypto/util.ts rename to src/lib/modules/crypto/utils.ts diff --git a/src/lib/modules/key.ts b/src/lib/modules/key.ts index d5276a5..ca84477 100644 --- a/src/lib/modules/key.ts +++ b/src/lib/modules/key.ts @@ -2,7 +2,7 @@ import { z } from "zod"; import { storeClientKey } from "$lib/indexedDB"; import type { ClientKeys } from "$lib/stores"; -const serializedClientKeysSchema = z.intersection( +const SerializedClientKeysSchema = z.intersection( z.object({ generator: z.literal("ArkVault"), exportedAt: z.iso.datetime(), @@ -16,7 +16,7 @@ const serializedClientKeysSchema = z.intersection( }), ); -type SerializedClientKeys = z.infer; +type SerializedClientKeys = z.infer; type DeserializedClientKeys = { encryptKeyBase64: string; @@ -43,7 +43,7 @@ export const serializeClientKeys = ({ }; export const deserializeClientKeys = (serialized: string) => { - const zodRes = serializedClientKeysSchema.safeParse(JSON.parse(serialized)); + const zodRes = SerializedClientKeysSchema.safeParse(JSON.parse(serialized)); if (zodRes.success) { return { encryptKeyBase64: zodRes.data.encryptKey, diff --git a/src/lib/schemas/filesystem.ts b/src/lib/schemas/filesystem.ts new file mode 100644 index 0000000..d3a45f4 --- /dev/null +++ b/src/lib/schemas/filesystem.ts @@ -0,0 +1,4 @@ +import { z } from "zod"; + +export const DirectoryIdSchema = z.union([z.literal("root"), z.int().positive()]); +export const CategoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/schemas/index.ts b/src/lib/schemas/index.ts new file mode 100644 index 0000000..7d29e5d --- /dev/null +++ b/src/lib/schemas/index.ts @@ -0,0 +1 @@ +export * from "./filesystem"; diff --git a/src/lib/server/db/schema/category.ts b/src/lib/server/db/schema/category.ts index 2304264..ccaba95 100644 --- a/src/lib/server/db/schema/category.ts +++ b/src/lib/server/db/schema/category.ts @@ -1,5 +1,5 @@ import type { Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface CategoryTable { id: Generated; diff --git a/src/lib/server/db/schema/file.ts b/src/lib/server/db/schema/file.ts index 663aacd..0774082 100644 --- a/src/lib/server/db/schema/file.ts +++ b/src/lib/server/db/schema/file.ts @@ -1,5 +1,5 @@ import type { ColumnType, Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface DirectoryTable { id: Generated; diff --git a/src/lib/server/db/schema/index.ts b/src/lib/server/db/schema/index.ts index dcc340b..7a13395 100644 --- a/src/lib/server/db/schema/index.ts +++ b/src/lib/server/db/schema/index.ts @@ -7,7 +7,7 @@ export * from "./mek"; export * from "./session"; export * from "./upload"; export * from "./user"; -export * from "./util"; +export * from "./utils"; // eslint-disable-next-line @typescript-eslint/no-empty-object-type export interface Database {} diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts index 26eaac2..369c385 100644 --- a/src/lib/server/db/schema/upload.ts +++ b/src/lib/server/db/schema/upload.ts @@ -1,5 +1,5 @@ import type { Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface UploadSessionTable { id: Generated; diff --git a/src/lib/server/db/schema/util.ts b/src/lib/server/db/schema/utils.ts similarity index 100% rename from src/lib/server/db/schema/util.ts rename to src/lib/server/db/schema/utils.ts diff --git a/src/lib/server/schemas/category.ts b/src/lib/server/schemas/category.ts deleted file mode 100644 index 0bb07a7..0000000 --- a/src/lib/server/schemas/category.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { z } from "zod"; - -export const categoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/server/schemas/directory.ts b/src/lib/server/schemas/directory.ts deleted file mode 100644 index dba44b9..0000000 --- a/src/lib/server/schemas/directory.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { z } from "zod"; - -export const directoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/server/schemas/file.ts b/src/lib/server/schemas/file.ts deleted file mode 100644 index 8ba14e7..0000000 --- a/src/lib/server/schemas/file.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { z } from "zod"; - -export const fileThumbnailUploadRequest = z.object({ - dekVersion: z.iso.datetime(), - contentIv: z.base64().nonempty(), -}); -export type FileThumbnailUploadRequest = z.input; diff --git a/src/lib/server/schemas/index.ts b/src/lib/server/schemas/index.ts deleted file mode 100644 index f7a2bc1..0000000 --- a/src/lib/server/schemas/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * from "./category"; -export * from "./directory"; -export * from "./file"; diff --git a/src/routes/api/file/[id]/download/+server.ts b/src/routes/api/file/[id]/download/+server.ts index 68191ef..5324365 100644 --- a/src/routes/api/file/[id]/download/+server.ts +++ b/src/routes/api/file/[id]/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; +import { authorize } from "$lib/server/modules/auth"; import { getFileStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/routes/api/file/[id]/thumbnail/download/+server.ts b/src/routes/api/file/[id]/thumbnail/download/+server.ts index 4fc7c1a..85cdd8c 100644 --- a/src/routes/api/file/[id]/thumbnail/download/+server.ts +++ b/src/routes/api/file/[id]/thumbnail/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; +import { authorize } from "$lib/server/modules/auth"; import { getFileThumbnailStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/trpc/routers/category.ts b/src/trpc/routers/category.ts index a292889..34887f7 100644 --- a/src/trpc/routers/category.ts +++ b/src/trpc/routers/category.ts @@ -1,14 +1,14 @@ import { TRPCError } from "@trpc/server"; import { z } from "zod"; +import { CategoryIdSchema } from "$lib/schemas"; import { CategoryRepo, FileRepo, IntegrityError } from "$lib/server/db"; -import { categoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const categoryRouter = router({ get: roleProcedure["activeClient"] .input( z.object({ - id: categoryIdSchema, + id: CategoryIdSchema, recurse: z.boolean().default(false), }), ) @@ -65,7 +65,7 @@ const categoryRouter = router({ create: roleProcedure["activeClient"] .input( z.object({ - parent: categoryIdSchema, + parent: CategoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), diff --git a/src/trpc/routers/directory.ts b/src/trpc/routers/directory.ts index 6e1e358..15f16f3 100644 --- a/src/trpc/routers/directory.ts +++ b/src/trpc/routers/directory.ts @@ -1,15 +1,15 @@ import { TRPCError } from "@trpc/server"; import { z } from "zod"; +import { DirectoryIdSchema } from "$lib/schemas"; import { FileRepo, IntegrityError } from "$lib/server/db"; import { safeUnlink } from "$lib/server/modules/filesystem"; -import { directoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const directoryRouter = router({ get: roleProcedure["activeClient"] .input( z.object({ - id: directoryIdSchema, + id: DirectoryIdSchema, }), ) .query(async ({ ctx, input }) => { @@ -59,7 +59,7 @@ const directoryRouter = router({ create: roleProcedure["activeClient"] .input( z.object({ - parent: directoryIdSchema, + parent: DirectoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts index 08d483f..7a1680b 100644 --- a/src/trpc/routers/upload.ts +++ b/src/trpc/routers/upload.ts @@ -6,11 +6,11 @@ import mime from "mime"; import { dirname } from "path"; import { v4 as uuidv4 } from "uuid"; import { z } from "zod"; +import { DirectoryIdSchema } from "$lib/schemas"; import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; import db from "$lib/server/db/kysely"; import env from "$lib/server/loadenv"; import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; -import { directoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const uploadLocks = new Set(); @@ -20,7 +20,7 @@ const uploadRouter = router({ .input( z.object({ chunks: z.int().positive(), - parent: directoryIdSchema, + parent: DirectoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), From 83369f83e3adba13051f1f5c1f14893042532b24 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 15:16:03 +0900 Subject: [PATCH 7/8] =?UTF-8?q?DB=EC=97=90=20=EC=B2=AD=ED=81=AC=20?= =?UTF-8?q?=EC=97=85=EB=A1=9C=EB=93=9C=20=EA=B2=BD=EB=A1=9C=EB=A5=BC=20?= =?UTF-8?q?=EC=A0=80=EC=9E=A5=ED=95=98=EB=8F=84=EB=A1=9D=20=EB=B3=80?= =?UTF-8?q?=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/hooks.server.ts | 2 +- src/lib/constants/upload.ts | 2 +- src/lib/server/db/media.ts | 73 ++++++------ .../migrations/1768062380-AddChunkedUpload.ts | 3 +- src/lib/server/db/schema/upload.ts | 3 +- src/lib/server/db/upload.ts | 42 +++---- src/lib/server/modules/filesystem.ts | 9 +- src/lib/server/services/upload.ts | 23 ++-- src/trpc/routers/upload.ts | 104 ++++++++++-------- 9 files changed, 143 insertions(+), 118 deletions(-) diff --git a/src/hooks.server.ts b/src/hooks.server.ts index 1795ce3..b816f7f 100644 --- a/src/hooks.server.ts +++ b/src/hooks.server.ts @@ -7,7 +7,7 @@ import { cleanupExpiredSessions, cleanupExpiredSessionUpgradeChallenges, } from "$lib/server/db/session"; -import { cleanupExpiredUploadSessions } from "$lib/server/db/upload"; +import { cleanupExpiredUploadSessions } from "$lib/server/services/upload"; import { authenticate, setAgentInfo } from "$lib/server/middlewares"; export const init: ServerInit = async () => { diff --git a/src/lib/constants/upload.ts b/src/lib/constants/upload.ts index 99d94bb..57934d6 100644 --- a/src/lib/constants/upload.ts +++ b/src/lib/constants/upload.ts @@ -2,5 +2,5 @@ export const AES_GCM_IV_SIZE = 12; export const AES_GCM_TAG_SIZE = 16; export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE; -export const CHUNK_SIZE = 4 * 1024 * 1024; +export const CHUNK_SIZE = 4 * 1024 * 1024; // 4 MiB export const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ENCRYPTION_OVERHEAD; diff --git a/src/lib/server/db/media.ts b/src/lib/server/db/media.ts index c4d2a34..3e165c0 100644 --- a/src/lib/server/db/media.ts +++ b/src/lib/server/db/media.ts @@ -14,54 +14,53 @@ interface FileThumbnail extends Thumbnail { } export const updateFileThumbnail = async ( + trx: typeof db, userId: number, fileId: number, dekVersion: Date, path: string, encContentIv: string | null, ) => { - return await db.transaction().execute(async (trx) => { - const file = await trx - .selectFrom("file") - .select("data_encryption_key_version") - .where("id", "=", fileId) - .where("user_id", "=", userId) - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (!file) { - throw new IntegrityError("File not found"); - } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { - throw new IntegrityError("Invalid DEK version"); - } + const file = await trx + .selectFrom("file") + .select("data_encryption_key_version") + .where("id", "=", fileId) + .where("user_id", "=", userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } - const thumbnail = await trx - .selectFrom("thumbnail") - .select("path as oldPath") - .where("file_id", "=", fileId) - .limit(1) - .forUpdate() - .executeTakeFirst(); - const now = new Date(); + const thumbnail = await trx + .selectFrom("thumbnail") + .select("path as oldPath") + .where("file_id", "=", fileId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + const now = new Date(); - await trx - .insertInto("thumbnail") - .values({ - file_id: fileId, + await trx + .insertInto("thumbnail") + .values({ + file_id: fileId, + path, + updated_at: now, + encrypted_content_iv: encContentIv, + }) + .onConflict((oc) => + oc.column("file_id").doUpdateSet({ path, updated_at: now, encrypted_content_iv: encContentIv, - }) - .onConflict((oc) => - oc.column("file_id").doUpdateSet({ - path, - updated_at: now, - encrypted_content_iv: encContentIv, - }), - ) - .execute(); - return thumbnail?.oldPath ?? null; - }); + }), + ) + .execute(); + return thumbnail?.oldPath ?? null; }; export const getFileThumbnail = async (userId: number, fileId: number) => { diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts index cf18c05..be6a900 100644 --- a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -17,9 +17,10 @@ export const up = async (db: Kysely) => { // upload.ts await db.schema .createTable("upload_session") - .addColumn("id", "uuid", (col) => col.primaryKey().defaultTo(sql`gen_random_uuid()`)) + .addColumn("id", "uuid", (col) => col.primaryKey()) .addColumn("type", "text", (col) => col.notNull()) .addColumn("user_id", "integer", (col) => col.references("user.id").notNull()) + .addColumn("path", "text", (col) => col.notNull()) .addColumn("total_chunks", "integer", (col) => col.notNull()) .addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`)) .addColumn("expires_at", "timestamp(3)", (col) => col.notNull()) diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts index 369c385..fccde36 100644 --- a/src/lib/server/db/schema/upload.ts +++ b/src/lib/server/db/schema/upload.ts @@ -2,9 +2,10 @@ import type { Generated } from "kysely"; import type { Ciphertext } from "./utils"; interface UploadSessionTable { - id: Generated; + id: string; type: "file" | "thumbnail"; user_id: number; + path: string; total_chunks: number; uploaded_chunks: Generated; expires_at: Date; diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts index 4c8da24..d506191 100644 --- a/src/lib/server/db/upload.ts +++ b/src/lib/server/db/upload.ts @@ -6,6 +6,7 @@ import type { Ciphertext } from "./schema"; interface BaseUploadSession { id: string; userId: number; + path: string; totalChunks: number; uploadedChunks: number[]; expiresAt: Date; @@ -31,9 +32,9 @@ interface ThumbnailUploadSession extends BaseUploadSession { } export const createFileUploadSession = async ( - params: Omit, + params: Omit, ) => { - return await db.transaction().execute(async (trx) => { + await db.transaction().execute(async (trx) => { const mek = await trx .selectFrom("master_encryption_key") .select("version") @@ -60,11 +61,13 @@ export const createFileUploadSession = async ( } } - const { sessionId } = await trx + await trx .insertInto("upload_session") .values({ + id: params.id, type: "file", user_id: params.userId, + path: params.path, total_chunks: params.totalChunks, expires_at: params.expiresAt, parent_id: params.parentId !== "root" ? params.parentId : null, @@ -77,16 +80,14 @@ export const createFileUploadSession = async ( encrypted_created_at: params.encCreatedAt, encrypted_last_modified_at: params.encLastModifiedAt, }) - .returning("id as sessionId") - .executeTakeFirstOrThrow(); - return { id: sessionId }; + .execute(); }); }; export const createThumbnailUploadSession = async ( - params: Omit, + params: Omit, ) => { - return await db.transaction().execute(async (trx) => { + await db.transaction().execute(async (trx) => { const file = await trx .selectFrom("file") .select("data_encryption_key_version") @@ -101,19 +102,19 @@ export const createThumbnailUploadSession = async ( throw new IntegrityError("Invalid DEK version"); } - const { sessionId } = await trx + await trx .insertInto("upload_session") .values({ + id: params.id, type: "thumbnail", user_id: params.userId, - total_chunks: 1, + path: params.path, + total_chunks: params.totalChunks, expires_at: params.expiresAt, file_id: params.fileId, data_encryption_key_version: params.dekVersion, }) - .returning("id as sessionId") - .executeTakeFirstOrThrow(); - return { id: sessionId }; + .execute(); }); }; @@ -126,14 +127,14 @@ export const getUploadSession = async (sessionId: string, userId: number) => { .where("expires_at", ">", new Date()) .limit(1) .executeTakeFirst(); - - if (!session) return null; - - if (session.type === "file") { + if (!session) { + return null; + } else if (session.type === "file") { return { type: "file", id: session.id, userId: session.user_id, + path: session.path, totalChunks: session.total_chunks, uploadedChunks: session.uploaded_chunks, expiresAt: session.expires_at, @@ -152,6 +153,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => { type: "thumbnail", id: session.id, userId: session.user_id, + path: session.path, totalChunks: session.total_chunks, uploadedChunks: session.uploaded_chunks, expiresAt: session.expires_at, @@ -176,8 +178,8 @@ export const deleteUploadSession = async (trx: typeof db, sessionId: string) => export const cleanupExpiredUploadSessions = async () => { const sessions = await db .deleteFrom("upload_session") - .where("expires_at", "<", new Date()) - .returning("id") + .where("expires_at", "<=", new Date()) + .returning("path") .execute(); - return sessions.map(({ id }) => id); + return sessions.map(({ path }) => path); }; diff --git a/src/lib/server/modules/filesystem.ts b/src/lib/server/modules/filesystem.ts index b87fd65..ade7d73 100644 --- a/src/lib/server/modules/filesystem.ts +++ b/src/lib/server/modules/filesystem.ts @@ -1,7 +1,10 @@ -import { unlink } from "fs/promises"; -import env from "$lib/server/loadenv"; +import { rm, unlink } from "fs/promises"; -export const getChunkDirectoryPath = (sessionId: string) => `${env.uploadsPath}/${sessionId}`; +export const safeRecursiveRm = async (path: string | null | undefined) => { + if (path) { + await rm(path, { recursive: true }).catch(console.error); + } +}; export const safeUnlink = async (path: string | null | undefined) => { if (path) { diff --git a/src/lib/server/services/upload.ts b/src/lib/server/services/upload.ts index 1be250d..1f7043b 100644 --- a/src/lib/server/services/upload.ts +++ b/src/lib/server/services/upload.ts @@ -2,9 +2,9 @@ import { error } from "@sveltejs/kit"; import { createHash } from "crypto"; import { createWriteStream } from "fs"; import { Readable } from "stream"; -import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { ENCRYPTION_OVERHEAD, ENCRYPTED_CHUNK_SIZE } from "$lib/constants"; import { UploadRepo } from "$lib/server/db"; -import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; +import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; const chunkLocks = new Set(); @@ -17,12 +17,12 @@ export const uploadChunk = async ( ) => { const lockKey = `${sessionId}/${chunkIndex}`; if (chunkLocks.has(lockKey)) { - error(409, "Chunk already uploaded"); // TODO: Message + error(409, "Chunk upload already in progress"); } else { chunkLocks.add(lockKey); } - const filePath = `${getChunkDirectoryPath(sessionId)}/${chunkIndex}`; + let filePath; try { const session = await UploadRepo.getUploadSession(sessionId, userId); @@ -35,15 +35,16 @@ export const uploadChunk = async ( } const isLastChunk = chunkIndex === session.totalChunks - 1; + filePath = `${session.path}/${chunkIndex}`; - let writtenBytes = 0; const hashStream = createHash("sha256"); const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + let writtenBytes = 0; for await (const chunk of encChunkStream) { - writtenBytes += chunk.length; hashStream.update(chunk); writeStream.write(chunk); + writtenBytes += chunk.length; } await new Promise((resolve, reject) => { @@ -53,9 +54,8 @@ export const uploadChunk = async ( if (hashStream.digest("base64") !== encChunkHash) { throw new Error("Invalid checksum"); } else if ( - (!isLastChunk && writtenBytes !== CHUNK_SIZE + ENCRYPTION_OVERHEAD) || - (isLastChunk && - (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > CHUNK_SIZE + ENCRYPTION_OVERHEAD)) + (!isLastChunk && writtenBytes !== ENCRYPTED_CHUNK_SIZE) || + (isLastChunk && (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > ENCRYPTED_CHUNK_SIZE)) ) { throw new Error("Invalid chunk size"); } @@ -75,3 +75,8 @@ export const uploadChunk = async ( chunkLocks.delete(lockKey); } }; + +export const cleanupExpiredUploadSessions = async () => { + const paths = await UploadRepo.cleanupExpiredUploadSessions(); + await Promise.all(paths.map(safeRecursiveRm)); +}; diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts index 7a1680b..168e957 100644 --- a/src/trpc/routers/upload.ts +++ b/src/trpc/routers/upload.ts @@ -1,7 +1,7 @@ import { TRPCError } from "@trpc/server"; import { createHash } from "crypto"; import { createReadStream, createWriteStream } from "fs"; -import { mkdir, rename, rm } from "fs/promises"; +import { mkdir, rename } from "fs/promises"; import mime from "mime"; import { dirname } from "path"; import { v4 as uuidv4 } from "uuid"; @@ -10,10 +10,17 @@ import { DirectoryIdSchema } from "$lib/schemas"; import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; import db from "$lib/server/db/kysely"; import env from "$lib/server/loadenv"; -import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; +import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; import { router, roleProcedure } from "../init.server"; -const uploadLocks = new Set(); +const sessionLocks = new Set(); + +const generateSessionId = async () => { + const id = uuidv4(); + const path = `${env.uploadsPath}/${id}`; + await mkdir(path, { recursive: true }); + return { id, path }; +}; const uploadRouter = router({ startFileUpload: roleProcedure["activeClient"] @@ -45,9 +52,13 @@ const uploadRouter = router({ throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); } + const { id, path } = await generateSessionId(); + try { - const { id: sessionId } = await UploadRepo.createFileUploadSession({ + await UploadRepo.createFileUploadSession({ + id, userId: ctx.session.userId, + path, totalChunks: input.chunks, expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours parentId: input.parent, @@ -63,9 +74,10 @@ const uploadRouter = router({ : null, encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, }); - await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); - return { uploadId: sessionId }; + return { uploadId: id }; } catch (e) { + await safeRecursiveRm(path); + if (e instanceof IntegrityError) { if (e.message === "Inactive MEK version") { throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); @@ -85,16 +97,22 @@ const uploadRouter = router({ }), ) .mutation(async ({ ctx, input }) => { + const { id, path } = await generateSessionId(); + try { - const { id: sessionId } = await UploadRepo.createThumbnailUploadSession({ + await UploadRepo.createThumbnailUploadSession({ + id, userId: ctx.session.userId, + path, + totalChunks: 1, // Up to 4 MiB expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours fileId: input.file, dekVersion: input.dekVersion, }); - await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); - return { uploadId: sessionId }; + return { uploadId: id }; } catch (e) { + await safeRecursiveRm(path); + if (e instanceof IntegrityError) { if (e.message === "File not found") { throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); @@ -115,14 +133,13 @@ const uploadRouter = router({ ) .mutation(async ({ ctx, input }) => { const { uploadId } = input; - if (uploadLocks.has(uploadId)) { - throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); } else { - uploadLocks.add(uploadId); + sessionLocks.add(uploadId); } - const filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; - await mkdir(dirname(filePath), { recursive: true }); + let filePath = ""; try { const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); @@ -132,17 +149,19 @@ const uploadRouter = router({ (session.hskVersion && !input.contentHmac) || (!session.hskVersion && input.contentHmac) ) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content hmac" }); + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content HMAC" }); } else if (session.uploadedChunks.length < session.totalChunks) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); } - const chunkDirectoryPath = getChunkDirectoryPath(uploadId); + filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + const hashStream = createHash("sha256"); const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); for (let i = 0; i < session.totalChunks; i++) { - for await (const chunk of createReadStream(`${chunkDirectoryPath}/${i}`)) { + for await (const chunk of createReadStream(`${session.path}/${i}`)) { hashStream.update(chunk); writeStream.write(chunk); } @@ -166,13 +185,13 @@ const uploadRouter = router({ return fileId; }); - await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + await safeRecursiveRm(session.path); return { file: fileId }; } catch (e) { await safeUnlink(filePath); throw e; } finally { - uploadLocks.delete(uploadId); + sessionLocks.delete(uploadId); } }), @@ -184,44 +203,39 @@ const uploadRouter = router({ ) .mutation(async ({ ctx, input }) => { const { uploadId } = input; - if (uploadLocks.has(uploadId)) { - throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); } else { - uploadLocks.add(uploadId); + sessionLocks.add(uploadId); } - const thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uuidv4()}`; - await mkdir(dirname(thumbnailPath), { recursive: true }); + let thumbnailPath = ""; try { const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); if (!session || session.type !== "thumbnail") { throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); } else if (session.uploadedChunks.length < session.totalChunks) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); } - const chunkDirectoryPath = getChunkDirectoryPath(uploadId); - const chunkPath = `${chunkDirectoryPath}/0`; + thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uploadId}`; + await mkdir(dirname(thumbnailPath), { recursive: true }); + await rename(`${session.path}/0`, thumbnailPath); - // Move chunk file to thumbnail path (IV is prepended to the content) - await rename(chunkPath, thumbnailPath); - - // Update thumbnail in database (null IV since it's prepended to the file) - const oldPath = await MediaRepo.updateFileThumbnail( - ctx.session.userId, - session.fileId, - session.dekVersion, - thumbnailPath, - null, - ); - safeUnlink(oldPath); // Intended - - await db.transaction().execute(async (trx) => { + const oldThumbnailPath = await db.transaction().execute(async (trx) => { + const oldPath = await MediaRepo.updateFileThumbnail( + trx, + ctx.session.userId, + session.fileId, + session.dekVersion, + thumbnailPath, + null, + ); await UploadRepo.deleteUploadSession(trx, uploadId); + return oldPath; }); - - await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + await Promise.all([safeUnlink(oldThumbnailPath), safeRecursiveRm(session.path)]); } catch (e) { await safeUnlink(thumbnailPath); if (e instanceof IntegrityError) { @@ -233,7 +247,7 @@ const uploadRouter = router({ } throw e; } finally { - uploadLocks.delete(uploadId); + sessionLocks.delete(uploadId); } }), }); From 80368c3a295eb7ab7edfe3f544e6b2ba28cfc449 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 15:54:05 +0900 Subject: [PATCH 8/8] =?UTF-8?q?=EC=82=AC=EC=86=8C=ED=95=9C=20=EB=A6=AC?= =?UTF-8?q?=ED=8C=A9=ED=86=A0=EB=A7=81=202?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/crypto/sha.ts | 18 ++-- src/lib/modules/file/upload.svelte.ts | 129 +++++++------------------- src/lib/modules/thumbnail.ts | 11 +-- 3 files changed, 46 insertions(+), 112 deletions(-) diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 883ac10..61c2ed7 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -5,16 +5,6 @@ export const digestMessage = async (message: BufferSource) => { return await crypto.subtle.digest("SHA-256", message); }; -export const createStreamingHmac = async (hmacSecret: CryptoKey) => { - const keyBytes = new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret)); - const h = hmac.create(sha256, keyBytes); - - return { - update: (data: Uint8Array) => h.update(data), - digest: () => h.digest(), - }; -}; - export const generateHmacSecret = async () => { return { hmacSecret: await crypto.subtle.generateKey( @@ -28,6 +18,10 @@ export const generateHmacSecret = async () => { }; }; -export const signMessageHmac = async (message: BufferSource, hmacSecret: CryptoKey) => { - return await crypto.subtle.sign("HMAC", hmacSecret, message); +export const createHmacStream = async (hmacSecret: CryptoKey) => { + const h = hmac.create(sha256, new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret))); + return { + update: (data: Uint8Array) => h.update(data), + digest: () => h.digest(), + }; }; diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index eaa35df..4bea638 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -9,8 +9,7 @@ import { encryptString, encryptChunk, digestMessage, - signMessageHmac, - createStreamingHmac, + createHmacStream, } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; @@ -60,27 +59,7 @@ export const clearUploadedFiles = () => { const requestDuplicateFileScan = limitFunction( async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { - const fileBuffer = await file.arrayBuffer(); - const fileSigned = encodeToBase64(await signMessageHmac(fileBuffer, hmacSecret.secret)); - - const files = await trpc().file.listByHash.query({ - hskVersion: hmacSecret.version, - contentHmac: fileSigned, - }); - if (files.length === 0 || (await onDuplicate())) { - return { fileBuffer, fileSigned }; - } else { - return {}; - } - }, - { concurrency: 1 }, -); - -const isImageFile = (fileType: string) => fileType.startsWith("image/"); - -const requestDuplicateFileScanStreaming = limitFunction( - async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { - const hmacStream = await createStreamingHmac(hmacSecret.secret); + const hmacStream = await createHmacStream(hmacSecret.secret); const reader = file.stream().getReader(); while (true) { @@ -152,16 +131,12 @@ const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { return chunksEncrypted; }; -const encryptFile = limitFunction( - async (state: FileUploadState, file: File, fileBuffer: ArrayBuffer, masterKey: MasterKey) => { +const encryptImageFile = limitFunction( + async (state: FileUploadState, file: File, masterKey: MasterKey) => { state.status = "encrypting"; - const fileType = getFileType(file); - - let createdAt; - if (fileType.startsWith("image/")) { - createdAt = extractExifDateTime(fileBuffer); - } + const fileBuffer = await file.arrayBuffer(); + const createdAt = extractExifDateTime(fileBuffer); const { dataKey, dataKeyVersion } = await generateDataKey(); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); @@ -172,7 +147,7 @@ const encryptFile = limitFunction( createdAt && (await encryptString(createdAt.getTime().toString(), dataKey)); const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); - const thumbnail = await generateThumbnail(fileBuffer, fileType); + const thumbnail = await generateThumbnail(fileBuffer, getFileType(file)); const thumbnailBuffer = await thumbnail?.arrayBuffer(); const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey)); @@ -181,7 +156,6 @@ const encryptFile = limitFunction( return { dataKeyWrapped, dataKeyVersion, - fileType, chunksEncrypted, nameEncrypted, createdAtEncrypted, @@ -229,7 +203,7 @@ const uploadThumbnail = async ( await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); }; -const requestFileUpload = limitFunction( +const requestImageFileUpload = limitFunction( async ( state: FileUploadState, metadata: RouterInputs["upload"]["startFileUpload"], @@ -242,7 +216,6 @@ const requestFileUpload = limitFunction( const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); - // Upload chunks with progress tracking const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); let uploadedBytes = 0; const startTime = Date.now(); @@ -265,9 +238,8 @@ const requestFileUpload = limitFunction( uploadedBytes += chunkEncrypted.byteLength; - // Calculate progress, rate, estimated - const elapsed = (Date.now() - startTime) / 1000; // seconds - const rate = uploadedBytes / elapsed; // bytes per second + const elapsed = (Date.now() - startTime) / 1000; + const rate = uploadedBytes / elapsed; const remaining = totalBytes - uploadedBytes; const estimated = rate > 0 ? remaining / rate : undefined; @@ -276,13 +248,11 @@ const requestFileUpload = limitFunction( state.estimated = estimated; } - // Complete upload const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ uploadId, contentHmac: fileSigned, }); - // Upload thumbnail if exists if (thumbnailData) { try { await uploadThumbnail(fileId, thumbnailData, dataKeyVersion); @@ -299,7 +269,7 @@ const requestFileUpload = limitFunction( { concurrency: 1 }, ); -const uploadFileStreaming = async ( +const requestFileUpload = async ( state: FileUploadState, file: File, masterKey: MasterKey, @@ -316,7 +286,6 @@ const uploadFileStreaming = async ( const nameEncrypted = await encryptString(file.name, dataKey); const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); - // Calculate total chunks for metadata const totalChunks = Math.ceil(file.size / CHUNK_SIZE); const metadata = { chunks: totalChunks, @@ -334,7 +303,6 @@ const uploadFileStreaming = async ( const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); - // Stream file, encrypt, and upload with concurrency limit const reader = file.stream().getReader(); const limit = pLimit(4); let buffer = new Uint8Array(0); @@ -364,7 +332,6 @@ const uploadFileStreaming = async ( throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); } - // Update progress after upload completes uploadedBytes += originalChunkSize; const elapsed = (Date.now() - startTime) / 1000; const rate = uploadedBytes / elapsed; @@ -411,7 +378,6 @@ const uploadFileStreaming = async ( contentHmac: fileSigned, }); - // Generate and upload thumbnail for video files if (fileType.startsWith("video/")) { try { const thumbnail = await generateThumbnailFromFile(file); @@ -446,35 +412,29 @@ export const uploadFile = async ( }); const state = uploadingFiles.at(-1)!; - const fileType = getFileType(file); + return await scheduler.schedule(file.size, async () => { + state.status = "encryption-pending"; - // Image files: use buffer-based approach (need EXIF + thumbnail) - if (isImageFile(fileType)) { - return await scheduler.schedule(file.size, async () => { - state.status = "encryption-pending"; - - try { - const { fileBuffer, fileSigned } = await requestDuplicateFileScan( - file, - hmacSecret, - onDuplicate, - ); - if (!fileBuffer || !fileSigned) { - state.status = "canceled"; - uploadingFiles = uploadingFiles.filter((file) => file !== state); - return undefined; - } + try { + const { fileSigned } = await requestDuplicateFileScan(file, hmacSecret, onDuplicate); + if (!fileSigned) { + state.status = "canceled"; + uploadingFiles = uploadingFiles.filter((file) => file !== state); + return; + } + const fileType = getFileType(file); + if (fileType.startsWith("image/")) { + const fileBuffer = await file.arrayBuffer(); const { dataKeyWrapped, dataKeyVersion, - fileType, chunksEncrypted, nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnail, - } = await encryptFile(state, file, fileBuffer, masterKey); + } = await encryptImageFile(state, file, masterKey); const metadata = { chunks: chunksEncrypted.length, @@ -492,7 +452,7 @@ export const uploadFile = async ( lastModifiedAtIv: lastModifiedAtEncrypted.iv, }; - const { fileId, thumbnailBuffer } = await requestFileUpload( + const { fileId, thumbnailBuffer } = await requestImageFileUpload( state, metadata, chunksEncrypted, @@ -501,36 +461,17 @@ export const uploadFile = async ( dataKeyVersion, ); return { fileId, fileBuffer, thumbnailBuffer }; - } catch (e) { - state.status = "error"; - throw e; + } else { + const { fileId } = await requestFileUpload( + state, + file, + masterKey, + hmacSecret, + fileSigned, + parentId, + ); + return { fileId }; } - }); - } - - // Video and other files: use streaming approach - return await scheduler.schedule(file.size, async () => { - state.status = "encryption-pending"; - - try { - // 1st pass: streaming HMAC for duplicate check - const { fileSigned } = await requestDuplicateFileScanStreaming(file, hmacSecret, onDuplicate); - if (!fileSigned) { - state.status = "canceled"; - uploadingFiles = uploadingFiles.filter((f) => f !== state); - return undefined; - } - - // 2nd pass: streaming encrypt + upload - const { fileId } = await uploadFileStreaming( - state, - file, - masterKey, - hmacSecret, - fileSigned, - parentId, - ); - return { fileId, fileBuffer: undefined, thumbnailBuffer: undefined }; } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/thumbnail.ts b/src/lib/modules/thumbnail.ts index 739c7af..75b0168 100644 --- a/src/lib/modules/thumbnail.ts +++ b/src/lib/modules/thumbnail.ts @@ -122,13 +122,8 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin } }; -export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { - return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; -}; - export const generateThumbnailFromFile = async (file: File) => { - const fileType = file.type || (file.name.endsWith(".heic") ? "image/heic" : ""); - if (!fileType.startsWith("video/")) return null; + if (!file.type.startsWith("video/")) return null; let url; try { @@ -142,3 +137,7 @@ export const generateThumbnailFromFile = async (file: File) => { } } }; + +export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { + return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; +};