From b9e6f17b0c5c65279561f1a25ebf4359f035a775 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 00:29:59 +0900 Subject: [PATCH 01/19] =?UTF-8?q?IV=EB=A5=BC=20=EC=95=94=ED=98=B8=ED=99=94?= =?UTF-8?q?=EB=90=9C=20=ED=8C=8C=EC=9D=BC=20=EB=B0=8F=20=EC=8D=B8=EB=84=A4?= =?UTF-8?q?=EC=9D=BC=20=EC=95=9E=EC=97=90=20=ED=95=A9=EC=B3=90=EC=84=9C=20?= =?UTF-8?q?=EC=A0=84=EC=86=A1=ED=95=98=EB=8F=84=EB=A1=9D=20=EB=B3=80?= =?UTF-8?q?=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/crypto/aes.ts | 8 ++- src/lib/modules/file/download.svelte.ts | 17 +++-- src/lib/modules/file/thumbnail.ts | 30 ++++----- src/lib/modules/filesystem/file.ts | 2 - src/lib/modules/filesystem/types.ts | 3 +- src/lib/server/modules/http.ts | 14 +++++ src/lib/server/services/file.ts | 63 +++++++++++++++---- src/lib/services/file.ts | 8 +-- .../(fullscreen)/file/[id]/+page.svelte | 20 +++--- .../settings/thumbnail/service.ts | 2 +- src/routes/api/file/[id]/download/+server.ts | 33 ++++++++-- .../file/[id]/thumbnail/download/+server.ts | 33 ++++++++-- src/trpc/routers/file.ts | 4 +- 13 files changed, 161 insertions(+), 76 deletions(-) create mode 100644 src/lib/server/modules/http.ts diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index 3c096ba..c911d26 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -89,11 +89,15 @@ export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { return { ciphertext, iv: encodeToBase64(iv.buffer) }; }; -export const decryptData = async (ciphertext: BufferSource, iv: string, dataKey: CryptoKey) => { +export const decryptData = async ( + ciphertext: BufferSource, + iv: string | BufferSource, + dataKey: CryptoKey, +) => { return await window.crypto.subtle.decrypt( { name: "AES-GCM", - iv: decodeFromBase64(iv), + iv: typeof iv === "string" ? decodeFromBase64(iv) : iv, } satisfies AesGcmParams, dataKey, ciphertext, diff --git a/src/lib/modules/file/download.svelte.ts b/src/lib/modules/file/download.svelte.ts index bea8316..97f42ea 100644 --- a/src/lib/modules/file/download.svelte.ts +++ b/src/lib/modules/file/download.svelte.ts @@ -62,15 +62,14 @@ const requestFileDownload = limitFunction( ); const decryptFile = limitFunction( - async ( - state: FileDownloadState, - fileEncrypted: ArrayBuffer, - fileEncryptedIv: string, - dataKey: CryptoKey, - ) => { + async (state: FileDownloadState, fileEncrypted: ArrayBuffer, dataKey: CryptoKey) => { state.status = "decrypting"; - const fileBuffer = await decryptData(fileEncrypted, fileEncryptedIv, dataKey); + const fileBuffer = await decryptData( + fileEncrypted.slice(12), + fileEncrypted.slice(0, 12), + dataKey, + ); state.status = "decrypted"; state.result = fileBuffer; @@ -79,7 +78,7 @@ const decryptFile = limitFunction( { concurrency: 4 }, ); -export const downloadFile = async (id: number, fileEncryptedIv: string, dataKey: CryptoKey) => { +export const downloadFile = async (id: number, dataKey: CryptoKey) => { downloadingFiles.push({ id, status: "download-pending", @@ -87,7 +86,7 @@ export const downloadFile = async (id: number, fileEncryptedIv: string, dataKey: const state = downloadingFiles.at(-1)!; try { - return await decryptFile(state, await requestFileDownload(state, id), fileEncryptedIv, dataKey); + return await decryptFile(state, await requestFileDownload(state, id), dataKey); } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/file/thumbnail.ts b/src/lib/modules/file/thumbnail.ts index f923153..ed40e13 100644 --- a/src/lib/modules/file/thumbnail.ts +++ b/src/lib/modules/file/thumbnail.ts @@ -5,7 +5,6 @@ import { decryptData } from "$lib/modules/crypto"; import type { SummarizedFileInfo } from "$lib/modules/filesystem"; import { readFile, writeFile, deleteFile, deleteDirectory } from "$lib/modules/opfs"; import { getThumbnailUrl } from "$lib/modules/thumbnail"; -import { isTRPCClientError, trpc } from "$trpc/client"; const loadedThumbnails = new LRUCache>({ max: 100 }); const loadingThumbnails = new Map>(); @@ -18,25 +17,18 @@ const fetchFromOpfs = async (fileId: number) => { }; const fetchFromServer = async (fileId: number, dataKey: CryptoKey) => { - try { - const [thumbnailEncrypted, { contentIv: thumbnailEncryptedIv }] = await Promise.all([ - fetch(`/api/file/${fileId}/thumbnail/download`), - trpc().file.thumbnail.query({ id: fileId }), - ]); - const thumbnailBuffer = await decryptData( - await thumbnailEncrypted.arrayBuffer(), - thumbnailEncryptedIv, - dataKey, - ); + const res = await fetch(`/api/file/${fileId}/thumbnail/download`); + if (!res.ok) return null; - void writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer); - return getThumbnailUrl(thumbnailBuffer); - } catch (e) { - if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") { - return null; - } - throw e; - } + const thumbnailEncrypted = await res.arrayBuffer(); + const thumbnailBuffer = await decryptData( + thumbnailEncrypted.slice(12), + thumbnailEncrypted.slice(0, 12), + dataKey, + ); + + void writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer); + return getThumbnailUrl(thumbnailBuffer); }; export const getFileThumbnail = (file: SummarizedFileInfo) => { diff --git a/src/lib/modules/filesystem/file.ts b/src/lib/modules/filesystem/file.ts index 7d5feb9..daf7fd6 100644 --- a/src/lib/modules/filesystem/file.ts +++ b/src/lib/modules/filesystem/file.ts @@ -50,7 +50,6 @@ const cache = new FilesystemCache({ parentId: file.parent, dataKey: metadata.dataKey, contentType: file.contentType, - contentIv: file.contentIv, name: metadata.name, createdAt: metadata.createdAt, lastModifiedAt: metadata.lastModifiedAt, @@ -118,7 +117,6 @@ const cache = new FilesystemCache({ exists: true as const, parentId: metadataRaw.parent, contentType: metadataRaw.contentType, - contentIv: metadataRaw.contentIv, categories, ...metadata, }; diff --git a/src/lib/modules/filesystem/types.ts b/src/lib/modules/filesystem/types.ts index 9f33113..abac40c 100644 --- a/src/lib/modules/filesystem/types.ts +++ b/src/lib/modules/filesystem/types.ts @@ -31,7 +31,6 @@ export interface FileInfo { parentId: DirectoryId; dataKey?: DataKey; contentType: string; - contentIv?: string; name: string; createdAt?: Date; lastModifiedAt: Date; @@ -42,7 +41,7 @@ export type MaybeFileInfo = | (FileInfo & { exists: true }) | ({ id: number; exists: false } & AllUndefined>); -export type SummarizedFileInfo = Omit; +export type SummarizedFileInfo = Omit; export type CategoryFileInfo = SummarizedFileInfo & { isRecursive: boolean }; interface LocalCategoryInfo { diff --git a/src/lib/server/modules/http.ts b/src/lib/server/modules/http.ts new file mode 100644 index 0000000..4f79ec5 --- /dev/null +++ b/src/lib/server/modules/http.ts @@ -0,0 +1,14 @@ +export const parseRangeHeader = (rangeHeader: string | null) => { + if (!rangeHeader) return undefined; + + const firstRange = rangeHeader.split(",")[0]!.trim(); + const parts = firstRange.replace(/bytes=/, "").split("-"); + return { + start: parts[0] ? parseInt(parts[0], 10) : undefined, + end: parts[1] ? parseInt(parts[1], 10) : undefined, + }; +}; + +export const getContentRangeHeader = (range?: { start: number; end: number; total: number }) => { + return range && { "Content-Range": `bytes ${range.start}-${range.end}/${range.total}` }; +}; diff --git a/src/lib/server/services/file.ts b/src/lib/server/services/file.ts index 9032ffb..e45b16e 100644 --- a/src/lib/server/services/file.ts +++ b/src/lib/server/services/file.ts @@ -10,30 +10,69 @@ import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db"; import env from "$lib/server/loadenv"; import { safeUnlink } from "$lib/server/modules/filesystem"; -export const getFileStream = async (userId: number, fileId: number) => { +const createEncContentStream = async ( + path: string, + iv: Buffer, + range?: { start?: number; end?: number }, +) => { + const { size: fileSize } = await stat(path); + const ivSize = iv.byteLength; + const totalSize = fileSize + ivSize; + + const start = range?.start ?? 0; + const end = range?.end ?? totalSize - 1; + if (start > end || start < 0 || end >= totalSize) { + error(416, "Invalid range"); + } + + return { + encContentStream: Readable.toWeb( + Readable.from( + (async function* () { + if (start < ivSize) { + yield iv.subarray(start, Math.min(end + 1, ivSize)); + } + if (end >= ivSize) { + yield* createReadStream(path, { + start: Math.max(0, start - ivSize), + end: end - ivSize, + }); + } + })(), + ), + ), + range: { start, end, total: totalSize }, + }; +}; + +export const getFileStream = async ( + userId: number, + fileId: number, + range?: { start?: number; end?: number }, +) => { const file = await FileRepo.getFile(userId, fileId); if (!file) { error(404, "Invalid file id"); } - const { size } = await stat(file.path); - return { - encContentStream: Readable.toWeb(createReadStream(file.path)), - encContentSize: size, - }; + return createEncContentStream(file.path, Buffer.from(file.encContentIv, "base64"), range); }; -export const getFileThumbnailStream = async (userId: number, fileId: number) => { +export const getFileThumbnailStream = async ( + userId: number, + fileId: number, + range?: { start?: number; end?: number }, +) => { const thumbnail = await MediaRepo.getFileThumbnail(userId, fileId); if (!thumbnail) { error(404, "File or its thumbnail not found"); } - const { size } = await stat(thumbnail.path); - return { - encContentStream: Readable.toWeb(createReadStream(thumbnail.path)), - encContentSize: size, - }; + return createEncContentStream( + thumbnail.path, + Buffer.from(thumbnail.encContentIv, "base64"), + range, + ); }; export const uploadFileThumbnail = async ( diff --git a/src/lib/services/file.ts b/src/lib/services/file.ts index 05a92e1..a0e769b 100644 --- a/src/lib/services/file.ts +++ b/src/lib/services/file.ts @@ -9,15 +9,11 @@ import { import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import { trpc } from "$trpc/client"; -export const requestFileDownload = async ( - fileId: number, - fileEncryptedIv: string, - dataKey: CryptoKey, -) => { +export const requestFileDownload = async (fileId: number, dataKey: CryptoKey) => { const cache = await getFileCache(fileId); if (cache) return cache; - const fileBuffer = await downloadFile(fileId, fileEncryptedIv, dataKey); + const fileBuffer = await downloadFile(fileId, dataKey); storeFileCache(fileId, fileBuffer); // Intended return fileBuffer; }; diff --git a/src/routes/(fullscreen)/file/[id]/+page.svelte b/src/routes/(fullscreen)/file/[id]/+page.svelte index 0b344bc..f325c5e 100644 --- a/src/routes/(fullscreen)/file/[id]/+page.svelte +++ b/src/routes/(fullscreen)/file/[id]/+page.svelte @@ -5,7 +5,7 @@ import { page } from "$app/state"; import { FullscreenDiv } from "$lib/components/atoms"; import { Categories, IconEntryButton, TopBar } from "$lib/components/molecules"; - import { getFileInfo, type FileInfo, type MaybeFileInfo } from "$lib/modules/filesystem"; + import { getFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem"; import { captureVideoThumbnail } from "$lib/modules/thumbnail"; import { getFileDownloadState } from "$lib/modules/file"; import { masterKeyStore } from "$lib/stores"; @@ -95,14 +95,12 @@ untrack(() => { if (!downloadState && !isDownloadRequested) { isDownloadRequested = true; - requestFileDownload(data.id, info!.contentIv!, info!.dataKey!.key).then( - async (buffer) => { - const blob = await updateViewer(buffer, contentType); - if (!viewerType) { - FileSaver.saveAs(blob, info!.name); - } - }, - ); + requestFileDownload(data.id, info!.dataKey!.key).then(async (buffer) => { + const blob = await updateViewer(buffer, contentType); + if (!viewerType) { + FileSaver.saveAs(blob, info!.name); + } + }); } }); } @@ -110,7 +108,9 @@ $effect(() => { if (info?.exists && downloadState?.status === "decrypted") { - untrack(() => !isDownloadRequested && updateViewer(downloadState.result!, info!.contentIv!)); + untrack( + () => !isDownloadRequested && updateViewer(downloadState.result!, info!.contentType!), + ); } }); diff --git a/src/routes/(fullscreen)/settings/thumbnail/service.ts b/src/routes/(fullscreen)/settings/thumbnail/service.ts index 85226b0..75c64b8 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/service.ts +++ b/src/routes/(fullscreen)/settings/thumbnail/service.ts @@ -77,7 +77,7 @@ export const requestThumbnailGeneration = async (fileInfo: FileInfo) => { await scheduler.schedule( async () => { statuses.set(fileInfo.id, "generation-pending"); - file = await requestFileDownload(fileInfo.id, fileInfo.contentIv!, fileInfo.dataKey?.key!); + file = await requestFileDownload(fileInfo.id, fileInfo.dataKey?.key!); return file.byteLength; }, async () => { diff --git a/src/routes/api/file/[id]/download/+server.ts b/src/routes/api/file/[id]/download/+server.ts index 5040c73..974dd54 100644 --- a/src/routes/api/file/[id]/download/+server.ts +++ b/src/routes/api/file/[id]/download/+server.ts @@ -1,10 +1,15 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; import { authorize } from "$lib/server/modules/auth"; +import { parseRangeHeader, getContentRangeHeader } from "$lib/server/modules/http"; import { getFileStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; -export const GET: RequestHandler = async ({ locals, params }) => { +const downloadHandler = async ( + locals: App.Locals, + params: Record, + request: Request, +) => { const { userId } = await authorize(locals, "activeClient"); const zodRes = z @@ -15,11 +20,29 @@ export const GET: RequestHandler = async ({ locals, params }) => { if (!zodRes.success) error(400, "Invalid path parameters"); const { id } = zodRes.data; - const { encContentStream, encContentSize } = await getFileStream(userId, id); - return new Response(encContentStream as ReadableStream, { + const { encContentStream, range } = await getFileStream( + userId, + id, + parseRangeHeader(request.headers.get("Range")), + ); + return { + stream: encContentStream, headers: { + "Accept-Ranges": "bytes", + "Content-Length": (range.end - range.start + 1).toString(), "Content-Type": "application/octet-stream", - "Content-Length": encContentSize.toString(), + ...getContentRangeHeader(range), }, - }); + isRangeRequest: !!range, + }; +}; + +export const GET: RequestHandler = async ({ locals, params, request }) => { + const { stream, headers, isRangeRequest } = await downloadHandler(locals, params, request); + return new Response(stream as ReadableStream, { status: isRangeRequest ? 206 : 200, headers }); +}; + +export const HEAD: RequestHandler = async ({ locals, params, request }) => { + const { headers, isRangeRequest } = await downloadHandler(locals, params, request); + return new Response(null, { status: isRangeRequest ? 206 : 200, headers }); }; diff --git a/src/routes/api/file/[id]/thumbnail/download/+server.ts b/src/routes/api/file/[id]/thumbnail/download/+server.ts index addd800..70d4cd3 100644 --- a/src/routes/api/file/[id]/thumbnail/download/+server.ts +++ b/src/routes/api/file/[id]/thumbnail/download/+server.ts @@ -1,10 +1,15 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; import { authorize } from "$lib/server/modules/auth"; +import { parseRangeHeader, getContentRangeHeader } from "$lib/server/modules/http"; import { getFileThumbnailStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; -export const GET: RequestHandler = async ({ locals, params }) => { +const downloadHandler = async ( + locals: App.Locals, + params: Record, + request: Request, +) => { const { userId } = await authorize(locals, "activeClient"); const zodRes = z @@ -15,11 +20,29 @@ export const GET: RequestHandler = async ({ locals, params }) => { if (!zodRes.success) error(400, "Invalid path parameters"); const { id } = zodRes.data; - const { encContentStream, encContentSize } = await getFileThumbnailStream(userId, id); - return new Response(encContentStream as ReadableStream, { + const { encContentStream, range } = await getFileThumbnailStream( + userId, + id, + parseRangeHeader(request.headers.get("Range")), + ); + return { + stream: encContentStream, headers: { + "Accept-Ranges": "bytes", + "Content-Length": (range.end - range.start + 1).toString(), "Content-Type": "application/octet-stream", - "Content-Length": encContentSize.toString(), + ...getContentRangeHeader(range), }, - }); + isRangeRequest: !!range, + }; +}; + +export const GET: RequestHandler = async ({ locals, params, request }) => { + const { stream, headers, isRangeRequest } = await downloadHandler(locals, params, request); + return new Response(stream as ReadableStream, { status: isRangeRequest ? 206 : 200, headers }); +}; + +export const HEAD: RequestHandler = async ({ locals, params, request }) => { + const { headers, isRangeRequest } = await downloadHandler(locals, params, request); + return new Response(null, { status: isRangeRequest ? 206 : 200, headers }); }; diff --git a/src/trpc/routers/file.ts b/src/trpc/routers/file.ts index c3f8159..a56a91f 100644 --- a/src/trpc/routers/file.ts +++ b/src/trpc/routers/file.ts @@ -24,7 +24,6 @@ const fileRouter = router({ dek: file.encDek, dekVersion: file.dekVersion, contentType: file.contentType, - contentIv: file.encContentIv, name: file.encName.ciphertext, nameIv: file.encName.iv, createdAt: file.encCreatedAt?.ciphertext, @@ -58,7 +57,6 @@ const fileRouter = router({ dek: file.encDek, dekVersion: file.dekVersion, contentType: file.contentType, - contentIv: file.encContentIv, name: file.encName.ciphertext, nameIv: file.encName.iv, createdAt: file.encCreatedAt?.ciphertext, @@ -158,7 +156,7 @@ const fileRouter = router({ throw new TRPCError({ code: "NOT_FOUND", message: "File or its thumbnail not found" }); } - return { updatedAt: thumbnail.updatedAt, contentIv: thumbnail.encContentIv }; + return { updatedAt: thumbnail.updatedAt }; }), }); From 4b783a36e92efa9733fb36f1d0f7acfbb26311e0 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 04:45:21 +0900 Subject: [PATCH 02/19] =?UTF-8?q?=ED=8C=8C=EC=9D=BC=20=EC=97=85=EB=A1=9C?= =?UTF-8?q?=EB=93=9C=20=EB=B0=A9=EC=8B=9D=EC=9D=84=20Chunking=20=EB=B0=A9?= =?UTF-8?q?=EC=8B=9D=EC=9C=BC=EB=A1=9C=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .dockerignore | 1 + .env.example | 1 + .gitignore | 1 + docker-compose.yaml | 1 + src/hooks.server.ts | 2 + src/lib/constants/index.ts | 1 + src/lib/constants/upload.ts | 5 + src/lib/modules/crypto/aes.ts | 26 ++- src/lib/modules/file/download.svelte.ts | 34 ++-- src/lib/modules/file/upload.svelte.ts | 147 +++++++++++------ src/lib/modules/filesystem/file.ts | 2 + src/lib/modules/filesystem/types.ts | 1 + src/lib/server/db/file.ts | 98 ++++-------- src/lib/server/db/index.ts | 1 + .../migrations/1768062380-AddChunkedUpload.ts | 50 ++++++ src/lib/server/db/migrations/index.ts | 2 + src/lib/server/db/schema/file.ts | 2 +- src/lib/server/db/schema/index.ts | 1 + src/lib/server/db/schema/upload.ts | 26 +++ src/lib/server/db/upload.ts | 122 +++++++++++++++ src/lib/server/loadenv.ts | 1 + src/lib/server/modules/filesystem.ts | 3 + src/lib/server/schemas/file.ts | 29 ---- src/lib/server/services/file.ts | 107 ++++++++----- src/lib/services/file.ts | 13 +- .../(fullscreen)/file/[id]/+page.svelte | 2 +- .../settings/thumbnail/service.ts | 4 +- src/routes/api/file/upload/+server.ts | 108 ------------- .../upload/[id]/chunks/[index]/+server.ts | 43 +++++ src/trpc/routers/file.ts | 148 +++++++++++++++++- 30 files changed, 667 insertions(+), 315 deletions(-) create mode 100644 src/lib/constants/index.ts create mode 100644 src/lib/constants/upload.ts create mode 100644 src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts create mode 100644 src/lib/server/db/schema/upload.ts create mode 100644 src/lib/server/db/upload.ts delete mode 100644 src/routes/api/file/upload/+server.ts create mode 100644 src/routes/api/file/upload/[id]/chunks/[index]/+server.ts diff --git a/.dockerignore b/.dockerignore index 4f68a3b..6d312ec 100644 --- a/.dockerignore +++ b/.dockerignore @@ -12,6 +12,7 @@ node_modules /data /library /thumbnails +/uploads # OS .DS_Store diff --git a/.env.example b/.env.example index e3b6365..4e8b20b 100644 --- a/.env.example +++ b/.env.example @@ -12,3 +12,4 @@ USER_CLIENT_CHALLENGE_EXPIRES= SESSION_UPGRADE_CHALLENGE_EXPIRES= LIBRARY_PATH= THUMBNAILS_PATH= +UPLOADS_PATH= diff --git a/.gitignore b/.gitignore index 5078fa8..a200c74 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ node_modules /data /library /thumbnails +/uploads # OS .DS_Store diff --git a/docker-compose.yaml b/docker-compose.yaml index 2015066..a624d9f 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -20,6 +20,7 @@ services: - SESSION_UPGRADE_CHALLENGE_EXPIRES - LIBRARY_PATH=/app/data/library - THUMBNAILS_PATH=/app/data/thumbnails + - UPLOADS_PATH=/app/data/uploads # SvelteKit - ADDRESS_HEADER=${TRUST_PROXY:+X-Forwarded-For} - XFF_DEPTH=${TRUST_PROXY:-} diff --git a/src/hooks.server.ts b/src/hooks.server.ts index 6f94a7e..1795ce3 100644 --- a/src/hooks.server.ts +++ b/src/hooks.server.ts @@ -7,6 +7,7 @@ import { cleanupExpiredSessions, cleanupExpiredSessionUpgradeChallenges, } from "$lib/server/db/session"; +import { cleanupExpiredUploadSessions } from "$lib/server/db/upload"; import { authenticate, setAgentInfo } from "$lib/server/middlewares"; export const init: ServerInit = async () => { @@ -16,6 +17,7 @@ export const init: ServerInit = async () => { cleanupExpiredUserClientChallenges(); cleanupExpiredSessions(); cleanupExpiredSessionUpgradeChallenges(); + cleanupExpiredUploadSessions(); }); }; diff --git a/src/lib/constants/index.ts b/src/lib/constants/index.ts new file mode 100644 index 0000000..ab6125a --- /dev/null +++ b/src/lib/constants/index.ts @@ -0,0 +1 @@ +export * from "./upload"; diff --git a/src/lib/constants/upload.ts b/src/lib/constants/upload.ts new file mode 100644 index 0000000..337700d --- /dev/null +++ b/src/lib/constants/upload.ts @@ -0,0 +1,5 @@ +export const CHUNK_SIZE = 4 * 1024 * 1024; + +export const AES_GCM_IV_SIZE = 12; +export const AES_GCM_TAG_SIZE = 16; +export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index c911d26..67f6a9f 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -1,4 +1,11 @@ -import { encodeString, decodeString, encodeToBase64, decodeFromBase64 } from "./util"; +import { AES_GCM_IV_SIZE } from "$lib/constants"; +import { + encodeString, + decodeString, + encodeToBase64, + decodeFromBase64, + concatenateBuffers, +} from "./util"; export const generateMasterKey = async () => { return { @@ -86,7 +93,7 @@ export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { dataKey, data, ); - return { ciphertext, iv: encodeToBase64(iv.buffer) }; + return { ciphertext, iv: iv.buffer }; }; export const decryptData = async ( @@ -106,9 +113,22 @@ export const decryptData = async ( export const encryptString = async (plaintext: string, dataKey: CryptoKey) => { const { ciphertext, iv } = await encryptData(encodeString(plaintext), dataKey); - return { ciphertext: encodeToBase64(ciphertext), iv }; + return { ciphertext: encodeToBase64(ciphertext), iv: encodeToBase64(iv) }; }; export const decryptString = async (ciphertext: string, iv: string, dataKey: CryptoKey) => { return decodeString(await decryptData(decodeFromBase64(ciphertext), iv, dataKey)); }; + +export const encryptChunk = async (chunk: ArrayBuffer, dataKey: CryptoKey) => { + const { ciphertext, iv } = await encryptData(chunk, dataKey); + return concatenateBuffers(iv, ciphertext).buffer; +}; + +export const decryptChunk = async (encryptedChunk: ArrayBuffer, dataKey: CryptoKey) => { + return await decryptData( + encryptedChunk.slice(AES_GCM_IV_SIZE), + encryptedChunk.slice(0, AES_GCM_IV_SIZE), + dataKey, + ); +}; diff --git a/src/lib/modules/file/download.svelte.ts b/src/lib/modules/file/download.svelte.ts index 97f42ea..d438e3f 100644 --- a/src/lib/modules/file/download.svelte.ts +++ b/src/lib/modules/file/download.svelte.ts @@ -1,6 +1,7 @@ import axios from "axios"; import { limitFunction } from "p-limit"; -import { decryptData } from "$lib/modules/crypto"; +import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { decryptChunk, concatenateBuffers } from "$lib/modules/crypto"; export interface FileDownloadState { id: number; @@ -62,15 +63,24 @@ const requestFileDownload = limitFunction( ); const decryptFile = limitFunction( - async (state: FileDownloadState, fileEncrypted: ArrayBuffer, dataKey: CryptoKey) => { + async ( + state: FileDownloadState, + fileEncrypted: ArrayBuffer, + encryptedChunkSize: number, + dataKey: CryptoKey, + ) => { state.status = "decrypting"; - const fileBuffer = await decryptData( - fileEncrypted.slice(12), - fileEncrypted.slice(0, 12), - dataKey, - ); + const chunks: ArrayBuffer[] = []; + let offset = 0; + while (offset < fileEncrypted.byteLength) { + const nextOffset = Math.min(offset + encryptedChunkSize, fileEncrypted.byteLength); + chunks.push(await decryptChunk(fileEncrypted.slice(offset, nextOffset), dataKey)); + offset = nextOffset; + } + + const fileBuffer = concatenateBuffers(...chunks).buffer; state.status = "decrypted"; state.result = fileBuffer; return fileBuffer; @@ -78,7 +88,7 @@ const decryptFile = limitFunction( { concurrency: 4 }, ); -export const downloadFile = async (id: number, dataKey: CryptoKey) => { +export const downloadFile = async (id: number, dataKey: CryptoKey, isLegacy: boolean) => { downloadingFiles.push({ id, status: "download-pending", @@ -86,7 +96,13 @@ export const downloadFile = async (id: number, dataKey: CryptoKey) => { const state = downloadingFiles.at(-1)!; try { - return await decryptFile(state, await requestFileDownload(state, id), dataKey); + const fileEncrypted = await requestFileDownload(state, id); + return await decryptFile( + state, + fileEncrypted, + isLegacy ? fileEncrypted.byteLength : CHUNK_SIZE + ENCRYPTION_OVERHEAD, + dataKey, + ); } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index a632eb5..2bb6c7c 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,24 +1,23 @@ import axios from "axios"; import ExifReader from "exifreader"; import { limitFunction } from "p-limit"; +import { CHUNK_SIZE } from "$lib/constants"; import { encodeToBase64, generateDataKey, wrapDataKey, encryptData, encryptString, + encryptChunk, digestMessage, signMessageHmac, } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; import { generateThumbnail } from "$lib/modules/thumbnail"; -import type { - FileThumbnailUploadRequest, - FileUploadRequest, - FileUploadResponse, -} from "$lib/server/schemas"; +import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import type { MasterKey, HmacSecret } from "$lib/stores"; import { trpc } from "$trpc/client"; +import type { RouterInputs } from "$trpc/router.server"; export interface FileUploadState { name: string; @@ -110,6 +109,23 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => { return new Date(utcDate - offsetMs); }; +const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { + const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = []; + let offset = 0; + + while (offset < fileBuffer.byteLength) { + const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength); + const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey); + chunksEncrypted.push({ + chunkEncrypted: chunkEncrypted, + chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)), + }); + offset = nextOffset; + } + + return chunksEncrypted; +}; + const encryptFile = limitFunction( async (state: FileUploadState, file: File, fileBuffer: ArrayBuffer, masterKey: MasterKey) => { state.status = "encrypting"; @@ -123,9 +139,7 @@ const encryptFile = limitFunction( const { dataKey, dataKeyVersion } = await generateDataKey(); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); - - const fileEncrypted = await encryptData(fileBuffer, dataKey); - const fileEncryptedHash = encodeToBase64(await digestMessage(fileEncrypted.ciphertext)); + const chunksEncrypted = await encryptChunks(fileBuffer, dataKey); const nameEncrypted = await encryptString(file.name, dataKey); const createdAtEncrypted = @@ -142,8 +156,7 @@ const encryptFile = limitFunction( dataKeyWrapped, dataKeyVersion, fileType, - fileEncrypted, - fileEncryptedHash, + chunksEncrypted, nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, @@ -154,30 +167,70 @@ const encryptFile = limitFunction( ); const requestFileUpload = limitFunction( - async (state: FileUploadState, form: FormData, thumbnailForm: FormData | null) => { + async ( + state: FileUploadState, + metadata: RouterInputs["file"]["startUpload"], + chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], + fileSigned: string | undefined, + thumbnailForm: FormData | null, + ) => { state.status = "uploading"; - const res = await axios.post("/api/file/upload", form, { - onUploadProgress: ({ progress, rate, estimated }) => { - state.progress = progress; - state.rate = rate; - state.estimated = estimated; - }, - }); - const { file }: FileUploadResponse = res.data; + const { uploadId } = await trpc().file.startUpload.mutate(metadata); + // Upload chunks with progress tracking + const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); + let uploadedBytes = 0; + const startTime = Date.now(); + + for (let i = 0; i < chunksEncrypted.length; i++) { + const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; + + const response = await fetch(`/api/file/upload/${uploadId}/chunks/${i}`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, + }, + body: chunkEncrypted, + }); + + if (!response.ok) { + throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); + } + + uploadedBytes += chunkEncrypted.byteLength; + + // Calculate progress, rate, estimated + const elapsed = (Date.now() - startTime) / 1000; // seconds + const rate = uploadedBytes / elapsed; // bytes per second + const remaining = totalBytes - uploadedBytes; + const estimated = rate > 0 ? remaining / rate : undefined; + + state.progress = uploadedBytes / totalBytes; + state.rate = rate; + state.estimated = estimated; + } + + // Complete upload + const { file: fileId } = await trpc().file.completeUpload.mutate({ + uploadId, + contentHmac: fileSigned, + }); + + // Upload thumbnail if exists if (thumbnailForm) { try { - await axios.post(`/api/file/${file}/thumbnail/upload`, thumbnailForm); + await axios.post(`/api/file/${fileId}/thumbnail/upload`, thumbnailForm); } catch (e) { - // TODO + // TODO: Error handling for thumbnail upload console.error(e); } } state.status = "uploaded"; - return { fileId: file }; + return { fileId }; }, { concurrency: 1 }, ); @@ -215,36 +268,28 @@ export const uploadFile = async ( dataKeyWrapped, dataKeyVersion, fileType, - fileEncrypted, - fileEncryptedHash, + chunksEncrypted, nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnail, } = await encryptFile(state, file, fileBuffer, masterKey); - const form = new FormData(); - form.set( - "metadata", - JSON.stringify({ - parent: parentId, - mekVersion: masterKey.version, - dek: dataKeyWrapped, - dekVersion: dataKeyVersion.toISOString(), - hskVersion: hmacSecret.version, - contentHmac: fileSigned, - contentType: fileType, - contentIv: fileEncrypted.iv, - name: nameEncrypted.ciphertext, - nameIv: nameEncrypted.iv, - createdAt: createdAtEncrypted?.ciphertext, - createdAtIv: createdAtEncrypted?.iv, - lastModifiedAt: lastModifiedAtEncrypted.ciphertext, - lastModifiedAtIv: lastModifiedAtEncrypted.iv, - } satisfies FileUploadRequest), - ); - form.set("content", new Blob([fileEncrypted.ciphertext])); - form.set("checksum", fileEncryptedHash); + const metadata = { + chunks: chunksEncrypted.length, + parent: parentId, + mekVersion: masterKey.version, + dek: dataKeyWrapped, + dekVersion: dataKeyVersion, + hskVersion: hmacSecret.version, + contentType: fileType, + name: nameEncrypted.ciphertext, + nameIv: nameEncrypted.iv, + createdAt: createdAtEncrypted?.ciphertext, + createdAtIv: createdAtEncrypted?.iv, + lastModifiedAt: lastModifiedAtEncrypted.ciphertext, + lastModifiedAtIv: lastModifiedAtEncrypted.iv, + }; let thumbnailForm = null; if (thumbnail) { @@ -253,13 +298,19 @@ export const uploadFile = async ( "metadata", JSON.stringify({ dekVersion: dataKeyVersion.toISOString(), - contentIv: thumbnail.iv, + contentIv: encodeToBase64(thumbnail.iv), } satisfies FileThumbnailUploadRequest), ); thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); } - const { fileId } = await requestFileUpload(state, form, thumbnailForm); + const { fileId } = await requestFileUpload( + state, + metadata, + chunksEncrypted, + fileSigned, + thumbnailForm, + ); return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; } catch (e) { state.status = "error"; diff --git a/src/lib/modules/filesystem/file.ts b/src/lib/modules/filesystem/file.ts index daf7fd6..d80a872 100644 --- a/src/lib/modules/filesystem/file.ts +++ b/src/lib/modules/filesystem/file.ts @@ -47,6 +47,7 @@ const cache = new FilesystemCache({ return storeToIndexedDB({ id, + isLegacy: file.isLegacy, parentId: file.parent, dataKey: metadata.dataKey, contentType: file.contentType, @@ -115,6 +116,7 @@ const cache = new FilesystemCache({ return { id, exists: true as const, + isLegacy: metadataRaw.isLegacy, parentId: metadataRaw.parent, contentType: metadataRaw.contentType, categories, diff --git a/src/lib/modules/filesystem/types.ts b/src/lib/modules/filesystem/types.ts index abac40c..f4ce9cf 100644 --- a/src/lib/modules/filesystem/types.ts +++ b/src/lib/modules/filesystem/types.ts @@ -28,6 +28,7 @@ export type SubDirectoryInfo = Omit; - interface File { id: number; parentId: DirectoryId; @@ -28,15 +26,13 @@ interface File { hskVersion: number | null; contentHmac: string | null; contentType: string; - encContentIv: string; + encContentIv: string | null; encContentHash: string; encName: Ciphertext; encCreatedAt: Ciphertext | null; encLastModifiedAt: Ciphertext; } -export type NewFile = Omit; - interface FileCategory { id: number; parentId: CategoryId; @@ -46,7 +42,7 @@ interface FileCategory { encName: Ciphertext; } -export const registerDirectory = async (params: NewDirectory) => { +export const registerDirectory = async (params: Omit) => { await db.transaction().execute(async (trx) => { const mek = await trx .selectFrom("master_encryption_key") @@ -214,69 +210,41 @@ export const unregisterDirectory = async (userId: number, directoryId: number) = }); }; -export const registerFile = async (params: NewFile) => { +export const registerFile = async (trx: typeof db, params: Omit) => { if ((params.hskVersion && !params.contentHmac) || (!params.hskVersion && params.contentHmac)) { throw new Error("Invalid arguments"); } - return await db.transaction().execute(async (trx) => { - const mek = await trx - .selectFrom("master_encryption_key") - .select("version") - .where("user_id", "=", params.userId) - .where("state", "=", "active") - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (mek?.version !== params.mekVersion) { - throw new IntegrityError("Inactive MEK version"); - } - - if (params.hskVersion) { - const hsk = await trx - .selectFrom("hmac_secret_key") - .select("version") - .where("user_id", "=", params.userId) - .where("state", "=", "active") - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (hsk?.version !== params.hskVersion) { - throw new IntegrityError("Inactive HSK version"); - } - } - - const { fileId } = await trx - .insertInto("file") - .values({ - parent_id: params.parentId !== "root" ? params.parentId : null, - user_id: params.userId, - path: params.path, - master_encryption_key_version: params.mekVersion, - encrypted_data_encryption_key: params.encDek, - data_encryption_key_version: params.dekVersion, - hmac_secret_key_version: params.hskVersion, - content_hmac: params.contentHmac, - content_type: params.contentType, - encrypted_content_iv: params.encContentIv, - encrypted_content_hash: params.encContentHash, - encrypted_name: params.encName, - encrypted_created_at: params.encCreatedAt, - encrypted_last_modified_at: params.encLastModifiedAt, - }) - .returning("id as fileId") - .executeTakeFirstOrThrow(); - await trx - .insertInto("file_log") - .values({ - file_id: fileId, - timestamp: new Date(), - action: "create", - new_name: params.encName, - }) - .execute(); - return { id: fileId }; - }); + const { fileId } = await trx + .insertInto("file") + .values({ + parent_id: params.parentId !== "root" ? params.parentId : null, + user_id: params.userId, + path: params.path, + master_encryption_key_version: params.mekVersion, + encrypted_data_encryption_key: params.encDek, + data_encryption_key_version: params.dekVersion, + hmac_secret_key_version: params.hskVersion, + content_hmac: params.contentHmac, + content_type: params.contentType, + encrypted_content_iv: params.encContentIv, + encrypted_content_hash: params.encContentHash, + encrypted_name: params.encName, + encrypted_created_at: params.encCreatedAt, + encrypted_last_modified_at: params.encLastModifiedAt, + }) + .returning("id as fileId") + .executeTakeFirstOrThrow(); + await trx + .insertInto("file_log") + .values({ + file_id: fileId, + timestamp: new Date(), + action: "create", + new_name: params.encName, + }) + .execute(); + return { id: fileId }; }; export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => { diff --git a/src/lib/server/db/index.ts b/src/lib/server/db/index.ts index 5c21deb..140cf7d 100644 --- a/src/lib/server/db/index.ts +++ b/src/lib/server/db/index.ts @@ -5,6 +5,7 @@ export * as HskRepo from "./hsk"; export * as MediaRepo from "./media"; export * as MekRepo from "./mek"; export * as SessionRepo from "./session"; +export * as UploadRepo from "./upload"; export * as UserRepo from "./user"; export * from "./error"; diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts new file mode 100644 index 0000000..fe8abd4 --- /dev/null +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -0,0 +1,50 @@ +import { Kysely, sql } from "kysely"; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const up = async (db: Kysely) => { + // file.ts + await db.schema + .alterTable("file") + .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) + .execute(); + + // upload.ts + await db.schema + .createTable("upload_session") + .addColumn("id", "uuid", (col) => col.primaryKey().defaultTo(sql`gen_random_uuid()`)) + .addColumn("user_id", "integer", (col) => col.references("user.id").notNull()) + .addColumn("total_chunks", "integer", (col) => col.notNull()) + .addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`)) + .addColumn("expires_at", "timestamp(3)", (col) => col.notNull()) + .addColumn("parent_id", "integer", (col) => col.references("directory.id")) + .addColumn("master_encryption_key_version", "integer", (col) => col.notNull()) + .addColumn("encrypted_data_encryption_key", "text", (col) => col.notNull()) + .addColumn("data_encryption_key_version", "timestamp(3)", (col) => col.notNull()) + .addColumn("hmac_secret_key_version", "integer") + .addColumn("content_type", "text", (col) => col.notNull()) + .addColumn("encrypted_name", "json", (col) => col.notNull()) + .addColumn("encrypted_created_at", "json") + .addColumn("encrypted_last_modified_at", "json", (col) => col.notNull()) + .addForeignKeyConstraint( + "upload_session_fk01", + ["user_id", "master_encryption_key_version"], + "master_encryption_key", + ["user_id", "version"], + ) + .addForeignKeyConstraint( + "upload_session_fk02", + ["user_id", "hmac_secret_key_version"], + "hmac_secret_key", + ["user_id", "version"], + ) + .execute(); +}; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const down = async (db: Kysely) => { + await db.schema.dropTable("upload_session").execute(); + await db.schema + .alterTable("file") + .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) + .execute(); +}; diff --git a/src/lib/server/db/migrations/index.ts b/src/lib/server/db/migrations/index.ts index f58c2d0..ca3310a 100644 --- a/src/lib/server/db/migrations/index.ts +++ b/src/lib/server/db/migrations/index.ts @@ -1,9 +1,11 @@ import * as Initial1737357000 from "./1737357000-Initial"; import * as AddFileCategory1737422340 from "./1737422340-AddFileCategory"; import * as AddThumbnail1738409340 from "./1738409340-AddThumbnail"; +import * as AddChunkedUpload1768062380 from "./1768062380-AddChunkedUpload"; export default { "1737357000-Initial": Initial1737357000, "1737422340-AddFileCategory": AddFileCategory1737422340, "1738409340-AddThumbnail": AddThumbnail1738409340, + "1768062380-AddChunkedUpload": AddChunkedUpload1768062380, }; diff --git a/src/lib/server/db/schema/file.ts b/src/lib/server/db/schema/file.ts index a1bf9bd..663aacd 100644 --- a/src/lib/server/db/schema/file.ts +++ b/src/lib/server/db/schema/file.ts @@ -30,7 +30,7 @@ interface FileTable { hmac_secret_key_version: number | null; content_hmac: string | null; // Base64 content_type: string; - encrypted_content_iv: string; // Base64 + encrypted_content_iv: string | null; // Base64 encrypted_content_hash: string; // Base64 encrypted_name: Ciphertext; encrypted_created_at: Ciphertext | null; diff --git a/src/lib/server/db/schema/index.ts b/src/lib/server/db/schema/index.ts index 4e427fb..dcc340b 100644 --- a/src/lib/server/db/schema/index.ts +++ b/src/lib/server/db/schema/index.ts @@ -5,6 +5,7 @@ export * from "./hsk"; export * from "./media"; export * from "./mek"; export * from "./session"; +export * from "./upload"; export * from "./user"; export * from "./util"; diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts new file mode 100644 index 0000000..3372955 --- /dev/null +++ b/src/lib/server/db/schema/upload.ts @@ -0,0 +1,26 @@ +import type { Generated } from "kysely"; +import type { Ciphertext } from "./util"; + +interface UploadSessionTable { + id: Generated; + user_id: number; + total_chunks: number; + uploaded_chunks: Generated; + expires_at: Date; + + parent_id: number | null; + master_encryption_key_version: number; + encrypted_data_encryption_key: string; // Base64 + data_encryption_key_version: Date; + hmac_secret_key_version: number | null; + content_type: string; + encrypted_name: Ciphertext; + encrypted_created_at: Ciphertext | null; + encrypted_last_modified_at: Ciphertext; +} + +declare module "./index" { + interface Database { + upload_session: UploadSessionTable; + } +} diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts new file mode 100644 index 0000000..935dc80 --- /dev/null +++ b/src/lib/server/db/upload.ts @@ -0,0 +1,122 @@ +import { sql } from "kysely"; +import { IntegrityError } from "./error"; +import db from "./kysely"; +import type { Ciphertext } from "./schema"; + +interface UploadSession { + id: string; + userId: number; + totalChunks: number; + uploadedChunks: number[]; + expiresAt: Date; + + parentId: DirectoryId; + mekVersion: number; + encDek: string; + dekVersion: Date; + hskVersion: number | null; + contentType: string; + encName: Ciphertext; + encCreatedAt: Ciphertext | null; + encLastModifiedAt: Ciphertext; +} + +export const createUploadSession = async (params: Omit) => { + return await db.transaction().execute(async (trx) => { + const mek = await trx + .selectFrom("master_encryption_key") + .select("version") + .where("user_id", "=", params.userId) + .where("state", "=", "active") + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (mek?.version !== params.mekVersion) { + throw new IntegrityError("Inactive MEK version"); + } + + if (params.hskVersion) { + const hsk = await trx + .selectFrom("hmac_secret_key") + .select("version") + .where("user_id", "=", params.userId) + .where("state", "=", "active") + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (hsk?.version !== params.hskVersion) { + throw new IntegrityError("Inactive HSK version"); + } + } + + const { sessionId } = await trx + .insertInto("upload_session") + .values({ + user_id: params.userId, + total_chunks: params.totalChunks, + expires_at: params.expiresAt, + parent_id: params.parentId !== "root" ? params.parentId : null, + master_encryption_key_version: params.mekVersion, + encrypted_data_encryption_key: params.encDek, + data_encryption_key_version: params.dekVersion, + hmac_secret_key_version: params.hskVersion, + content_type: params.contentType, + encrypted_name: params.encName, + encrypted_created_at: params.encCreatedAt, + encrypted_last_modified_at: params.encLastModifiedAt, + }) + .returning("id as sessionId") + .executeTakeFirstOrThrow(); + return { id: sessionId }; + }); +}; + +export const getUploadSession = async (sessionId: string, userId: number) => { + const session = await db + .selectFrom("upload_session") + .selectAll() + .where("id", "=", sessionId) + .where("user_id", "=", userId) + .where("expires_at", ">", new Date()) + .limit(1) + .executeTakeFirst(); + return session + ? ({ + id: session.id, + userId: session.user_id, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + parentId: session.parent_id ?? "root", + mekVersion: session.master_encryption_key_version, + encDek: session.encrypted_data_encryption_key, + dekVersion: session.data_encryption_key_version, + hskVersion: session.hmac_secret_key_version, + contentType: session.content_type, + encName: session.encrypted_name, + encCreatedAt: session.encrypted_created_at, + encLastModifiedAt: session.encrypted_last_modified_at, + } satisfies UploadSession) + : null; +}; + +export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => { + await db + .updateTable("upload_session") + .set({ uploaded_chunks: sql`array_append(uploaded_chunks, ${chunkIndex})` }) + .where("id", "=", sessionId) + .execute(); +}; + +export const deleteUploadSession = async (trx: typeof db, sessionId: string) => { + await trx.deleteFrom("upload_session").where("id", "=", sessionId).execute(); +}; + +export const cleanupExpiredUploadSessions = async () => { + const sessions = await db + .deleteFrom("upload_session") + .where("expires_at", "<", new Date()) + .returning("id") + .execute(); + return sessions.map(({ id }) => id); +}; diff --git a/src/lib/server/loadenv.ts b/src/lib/server/loadenv.ts index 3a805d8..f8fd68f 100644 --- a/src/lib/server/loadenv.ts +++ b/src/lib/server/loadenv.ts @@ -26,4 +26,5 @@ export default { }, libraryPath: env.LIBRARY_PATH || "library", thumbnailsPath: env.THUMBNAILS_PATH || "thumbnails", + uploadsPath: env.UPLOADS_PATH || "uploads", }; diff --git a/src/lib/server/modules/filesystem.ts b/src/lib/server/modules/filesystem.ts index 65cb9ec..b87fd65 100644 --- a/src/lib/server/modules/filesystem.ts +++ b/src/lib/server/modules/filesystem.ts @@ -1,4 +1,7 @@ import { unlink } from "fs/promises"; +import env from "$lib/server/loadenv"; + +export const getChunkDirectoryPath = (sessionId: string) => `${env.uploadsPath}/${sessionId}`; export const safeUnlink = async (path: string | null | undefined) => { if (path) { diff --git a/src/lib/server/schemas/file.ts b/src/lib/server/schemas/file.ts index 811e590..8ba14e7 100644 --- a/src/lib/server/schemas/file.ts +++ b/src/lib/server/schemas/file.ts @@ -1,36 +1,7 @@ -import mime from "mime"; import { z } from "zod"; -import { directoryIdSchema } from "./directory"; export const fileThumbnailUploadRequest = z.object({ dekVersion: z.iso.datetime(), contentIv: z.base64().nonempty(), }); export type FileThumbnailUploadRequest = z.input; - -export const fileUploadRequest = z.object({ - parent: directoryIdSchema, - mekVersion: z.int().positive(), - dek: z.base64().nonempty(), - dekVersion: z.iso.datetime(), - hskVersion: z.int().positive(), - contentHmac: z.base64().nonempty(), - contentType: z - .string() - .trim() - .nonempty() - .refine((value) => mime.getExtension(value) !== null), // MIME type - contentIv: z.base64().nonempty(), - name: z.base64().nonempty(), - nameIv: z.base64().nonempty(), - createdAt: z.base64().nonempty().optional(), - createdAtIv: z.base64().nonempty().optional(), - lastModifiedAt: z.base64().nonempty(), - lastModifiedAtIv: z.base64().nonempty(), -}); -export type FileUploadRequest = z.input; - -export const fileUploadResponse = z.object({ - file: z.int().positive(), -}); -export type FileUploadResponse = z.output; diff --git a/src/lib/server/services/file.ts b/src/lib/server/services/file.ts index e45b16e..9df6430 100644 --- a/src/lib/server/services/file.ts +++ b/src/lib/server/services/file.ts @@ -6,17 +6,20 @@ import { dirname } from "path"; import { Readable } from "stream"; import { pipeline } from "stream/promises"; import { v4 as uuidv4 } from "uuid"; -import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db"; +import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; import env from "$lib/server/loadenv"; -import { safeUnlink } from "$lib/server/modules/filesystem"; +import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; + +const uploadLocks = new Set(); const createEncContentStream = async ( path: string, - iv: Buffer, + iv?: Buffer, range?: { start?: number; end?: number }, ) => { const { size: fileSize } = await stat(path); - const ivSize = iv.byteLength; + const ivSize = iv?.byteLength ?? 0; const totalSize = fileSize + ivSize; const start = range?.start ?? 0; @@ -30,7 +33,7 @@ const createEncContentStream = async ( Readable.from( (async function* () { if (start < ivSize) { - yield iv.subarray(start, Math.min(end + 1, ivSize)); + yield iv!.subarray(start, Math.min(end + 1, ivSize)); } if (end >= ivSize) { yield* createReadStream(path, { @@ -55,7 +58,11 @@ export const getFileStream = async ( error(404, "Invalid file id"); } - return createEncContentStream(file.path, Buffer.from(file.encContentIv, "base64"), range); + return createEncContentStream( + file.path, + file.encContentIv ? Buffer.from(file.encContentIv, "base64") : undefined, + range, + ); }; export const getFileThumbnailStream = async ( @@ -110,56 +117,70 @@ export const uploadFileThumbnail = async ( } }; -export const uploadFile = async ( - params: Omit, - encContentStream: Readable, - encContentHash: Promise, +export const uploadChunk = async ( + userId: number, + sessionId: string, + chunkIndex: number, + encChunkStream: Readable, + encChunkHash: string, ) => { - const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000); - const oneMinuteLater = new Date(Date.now() + 60 * 1000); - if (params.dekVersion <= oneDayAgo || params.dekVersion >= oneMinuteLater) { - error(400, "Invalid DEK version"); + const lockKey = `${sessionId}/${chunkIndex}`; + if (uploadLocks.has(lockKey)) { + error(409, "Chunk already uploaded"); // TODO: Message + } else { + uploadLocks.add(lockKey); } - const path = `${env.libraryPath}/${params.userId}/${uuidv4()}`; - await mkdir(dirname(path), { recursive: true }); + const filePath = `${getChunkDirectoryPath(sessionId)}/${chunkIndex}`; try { - const hashStream = createHash("sha256"); - const [, hash] = await Promise.all([ - pipeline( - encContentStream, - async function* (source) { - for await (const chunk of source) { - hashStream.update(chunk); - yield chunk; - } - }, - createWriteStream(path, { flags: "wx", mode: 0o600 }), - ), - encContentHash, - ]); - if (hashStream.digest("base64") !== hash) { - throw new Error("Invalid checksum"); + const session = await UploadRepo.getUploadSession(sessionId, userId); + if (!session) { + error(404, "Invalid upload id"); + } else if (chunkIndex >= session.totalChunks) { + error(400, "Invalid chunk index"); + } else if (session.uploadedChunks.includes(chunkIndex)) { + error(409, "Chunk already uploaded"); } - const { id: fileId } = await FileRepo.registerFile({ - ...params, - path, - encContentHash: hash, - }); - return { fileId }; - } catch (e) { - await safeUnlink(path); + const isLastChunk = chunkIndex === session.totalChunks - 1; - if (e instanceof IntegrityError && e.message === "Inactive MEK version") { - error(400, "Invalid MEK version"); + let writtenBytes = 0; + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for await (const chunk of encChunkStream) { + writtenBytes += chunk.length; + hashStream.update(chunk); + writeStream.write(chunk); + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + if (hashStream.digest("base64") !== encChunkHash) { + throw new Error("Invalid checksum"); } else if ( + (!isLastChunk && writtenBytes !== CHUNK_SIZE + ENCRYPTION_OVERHEAD) || + (isLastChunk && + (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > CHUNK_SIZE + ENCRYPTION_OVERHEAD)) + ) { + throw new Error("Invalid chunk size"); + } + + await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex); + } catch (e) { + await safeUnlink(filePath); + + if ( e instanceof Error && - (e.message === "Invalid request body" || e.message === "Invalid checksum") + (e.message === "Invalid checksum" || e.message === "Invalid chunk size") ) { error(400, "Invalid request body"); } throw e; + } finally { + uploadLocks.delete(lockKey); } }; diff --git a/src/lib/services/file.ts b/src/lib/services/file.ts index a0e769b..5f95f42 100644 --- a/src/lib/services/file.ts +++ b/src/lib/services/file.ts @@ -1,4 +1,5 @@ import { getAllFileInfos } from "$lib/indexedDB/filesystem"; +import { encodeToBase64 } from "$lib/modules/crypto"; import { getFileCache, storeFileCache, @@ -9,11 +10,15 @@ import { import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import { trpc } from "$trpc/client"; -export const requestFileDownload = async (fileId: number, dataKey: CryptoKey) => { +export const requestFileDownload = async ( + fileId: number, + dataKey: CryptoKey, + isLegacy: boolean, +) => { const cache = await getFileCache(fileId); if (cache) return cache; - const fileBuffer = await downloadFile(fileId, dataKey); + const fileBuffer = await downloadFile(fileId, dataKey, isLegacy); storeFileCache(fileId, fileBuffer); // Intended return fileBuffer; }; @@ -21,14 +26,14 @@ export const requestFileDownload = async (fileId: number, dataKey: CryptoKey) => export const requestFileThumbnailUpload = async ( fileId: number, dataKeyVersion: Date, - thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: string }, + thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, ) => { const form = new FormData(); form.set( "metadata", JSON.stringify({ dekVersion: dataKeyVersion.toISOString(), - contentIv: thumbnailEncrypted.iv, + contentIv: encodeToBase64(thumbnailEncrypted.iv), } satisfies FileThumbnailUploadRequest), ); form.set("content", new Blob([thumbnailEncrypted.ciphertext])); diff --git a/src/routes/(fullscreen)/file/[id]/+page.svelte b/src/routes/(fullscreen)/file/[id]/+page.svelte index f325c5e..4aa6b42 100644 --- a/src/routes/(fullscreen)/file/[id]/+page.svelte +++ b/src/routes/(fullscreen)/file/[id]/+page.svelte @@ -95,7 +95,7 @@ untrack(() => { if (!downloadState && !isDownloadRequested) { isDownloadRequested = true; - requestFileDownload(data.id, info!.dataKey!.key).then(async (buffer) => { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then(async (buffer) => { const blob = await updateViewer(buffer, contentType); if (!viewerType) { FileSaver.saveAs(blob, info!.name); diff --git a/src/routes/(fullscreen)/settings/thumbnail/service.ts b/src/routes/(fullscreen)/settings/thumbnail/service.ts index 75c64b8..314cf5a 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/service.ts +++ b/src/routes/(fullscreen)/settings/thumbnail/service.ts @@ -50,7 +50,7 @@ const requestThumbnailUpload = limitFunction( async ( fileId: number, dataKeyVersion: Date, - thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: string }, + thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: ArrayBuffer }, ) => { statuses.set(fileId, "uploading"); @@ -77,7 +77,7 @@ export const requestThumbnailGeneration = async (fileInfo: FileInfo) => { await scheduler.schedule( async () => { statuses.set(fileInfo.id, "generation-pending"); - file = await requestFileDownload(fileInfo.id, fileInfo.dataKey?.key!); + file = await requestFileDownload(fileInfo.id, fileInfo.dataKey?.key!, fileInfo.isLegacy!); return file.byteLength; }, async () => { diff --git a/src/routes/api/file/upload/+server.ts b/src/routes/api/file/upload/+server.ts deleted file mode 100644 index f9cbd53..0000000 --- a/src/routes/api/file/upload/+server.ts +++ /dev/null @@ -1,108 +0,0 @@ -import Busboy from "@fastify/busboy"; -import { error, json } from "@sveltejs/kit"; -import { Readable, Writable } from "stream"; -import { authorize } from "$lib/server/modules/auth"; -import { - fileUploadRequest, - fileUploadResponse, - type FileUploadResponse, -} from "$lib/server/schemas"; -import { uploadFile } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -type FileMetadata = Parameters[0]; - -const parseFileMetadata = (userId: number, json: string) => { - const zodRes = fileUploadRequest.safeParse(JSON.parse(json)); - if (!zodRes.success) error(400, "Invalid request body"); - const { - parent, - mekVersion, - dek, - dekVersion, - hskVersion, - contentHmac, - contentType, - contentIv, - name, - nameIv, - createdAt, - createdAtIv, - lastModifiedAt, - lastModifiedAtIv, - } = zodRes.data; - if ((createdAt && !createdAtIv) || (!createdAt && createdAtIv)) - error(400, "Invalid request body"); - - return { - userId, - parentId: parent, - mekVersion, - encDek: dek, - dekVersion: new Date(dekVersion), - hskVersion, - contentHmac, - contentType, - encContentIv: contentIv, - encName: { ciphertext: name, iv: nameIv }, - encCreatedAt: createdAt && createdAtIv ? { ciphertext: createdAt, iv: createdAtIv } : null, - encLastModifiedAt: { ciphertext: lastModifiedAt, iv: lastModifiedAtIv }, - } satisfies FileMetadata; -}; - -export const POST: RequestHandler = async ({ locals, request }) => { - const { userId } = await authorize(locals, "activeClient"); - - const contentType = request.headers.get("Content-Type"); - if (!contentType?.startsWith("multipart/form-data") || !request.body) { - error(400, "Invalid request body"); - } - - return new Promise((resolve, reject) => { - const bb = Busboy({ headers: { "content-type": contentType } }); - const handler = - (f: (...args: T) => Promise) => - (...args: T) => { - f(...args).catch(reject); - }; - - let metadata: FileMetadata | null = null; - let content: Readable | null = null; - const checksum = new Promise((resolveChecksum, rejectChecksum) => { - bb.on( - "field", - handler(async (fieldname, val) => { - if (fieldname === "metadata") { - // Ignore subsequent metadata fields - if (!metadata) { - metadata = parseFileMetadata(userId, val); - } - } else if (fieldname === "checksum") { - // Ignore subsequent checksum fields - resolveChecksum(val); - } else { - error(400, "Invalid request body"); - } - }), - ); - bb.on( - "file", - handler(async (fieldname, file) => { - if (fieldname !== "content") error(400, "Invalid request body"); - if (!metadata || content) error(400, "Invalid request body"); - content = file; - - const { fileId } = await uploadFile(metadata, content, checksum); - resolve(json(fileUploadResponse.parse({ file: fileId } satisfies FileUploadResponse))); - }), - ); - bb.on("finish", () => rejectChecksum(new Error("Invalid request body"))); - bb.on("error", (e) => { - content?.emit("error", e) ?? reject(e); - rejectChecksum(e); - }); - }); - - request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error - }); -}; diff --git a/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts b/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts new file mode 100644 index 0000000..c44e425 --- /dev/null +++ b/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts @@ -0,0 +1,43 @@ +import { error, text } from "@sveltejs/kit"; +import { Readable } from "stream"; +import { z } from "zod"; +import { authorize } from "$lib/server/modules/auth"; +import { uploadChunk } from "$lib/server/services/file"; +import type { RequestHandler } from "./$types"; + +export const POST: RequestHandler = async ({ locals, params, request }) => { + const { userId } = await authorize(locals, "activeClient"); + + const zodRes = z + .object({ + id: z.uuidv4(), + index: z.coerce.number().int().nonnegative(), + }) + .safeParse(params); + if (!zodRes.success) error(400, "Invalid path parameters"); + const { id: uploadId, index: chunkIndex } = zodRes.data; + + // Parse Content-Digest header (RFC 9530) + // Expected format: sha-256=:base64hash: + const contentDigest = request.headers.get("Content-Digest"); + if (!contentDigest) error(400, "Missing Content-Digest header"); + + const digestMatch = contentDigest.match(/^sha-256=:([A-Za-z0-9+/=]+):$/); + if (!digestMatch || !digestMatch[1]) + error(400, "Invalid Content-Digest format, must be sha-256=:base64:"); + const encChunkHash = digestMatch[1]; + + const contentType = request.headers.get("Content-Type"); + if (contentType !== "application/octet-stream" || !request.body) { + error(400, "Invalid request body"); + } + + // Convert web ReadableStream to Node Readable + const nodeReadable = Readable.fromWeb( + request.body as unknown as Parameters[0], + ); + + await uploadChunk(userId, uploadId, chunkIndex, nodeReadable, encChunkHash); + + return text("Chunk uploaded", { headers: { "Content-Type": "text/plain" } }); +}; diff --git a/src/trpc/routers/file.ts b/src/trpc/routers/file.ts index a56a91f..eaf42ca 100644 --- a/src/trpc/routers/file.ts +++ b/src/trpc/routers/file.ts @@ -1,9 +1,20 @@ import { TRPCError } from "@trpc/server"; +import { createHash } from "crypto"; +import { createReadStream, createWriteStream } from "fs"; +import { mkdir, rm } from "fs/promises"; +import mime from "mime"; +import { dirname } from "path"; +import { v4 as uuidv4 } from "uuid"; import { z } from "zod"; -import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db"; -import { safeUnlink } from "$lib/server/modules/filesystem"; +import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; +import db from "$lib/server/db/kysely"; +import env from "$lib/server/loadenv"; +import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; +import { directoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; +const uploadLocks = new Set(); + const fileRouter = router({ get: roleProcedure["activeClient"] .input( @@ -19,6 +30,7 @@ const fileRouter = router({ const categories = await FileRepo.getAllFileCategories(input.id); return { + isLegacy: !!file.encContentIv, parent: file.parentId, mekVersion: file.mekVersion, dek: file.encDek, @@ -52,6 +64,7 @@ const fileRouter = router({ const files = await FileRepo.getFilesWithCategories(ctx.session.userId, input.ids); return files.map((file) => ({ id: file.id, + isLegacy: !!file.encContentIv, parent: file.parentId, mekVersion: file.mekVersion, dek: file.encDek, @@ -158,6 +171,137 @@ const fileRouter = router({ return { updatedAt: thumbnail.updatedAt }; }), + + startUpload: roleProcedure["activeClient"] + .input( + z.object({ + chunks: z.int().positive(), + parent: directoryIdSchema, + mekVersion: z.int().positive(), + dek: z.base64().nonempty(), + dekVersion: z.date(), + hskVersion: z.int().positive().optional(), + contentType: z + .string() + .trim() + .nonempty() + .refine((value) => mime.getExtension(value) !== null), + name: z.base64().nonempty(), + nameIv: z.base64().nonempty(), + createdAt: z.base64().nonempty().optional(), + createdAtIv: z.base64().nonempty().optional(), + lastModifiedAt: z.base64().nonempty(), + lastModifiedAtIv: z.base64().nonempty(), + }), + ) + .mutation(async ({ ctx, input }) => { + const oneMinuteAgo = new Date(Date.now() - 60 * 1000); + const oneMinuteLater = new Date(Date.now() + 60 * 1000); + if (input.dekVersion <= oneMinuteAgo || input.dekVersion >= oneMinuteLater) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); + } + + try { + const { id: sessionId } = await UploadRepo.createUploadSession({ + userId: ctx.session.userId, + totalChunks: input.chunks, + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + parentId: input.parent, + mekVersion: input.mekVersion, + encDek: input.dek, + dekVersion: input.dekVersion, + hskVersion: input.hskVersion ?? null, + contentType: input.contentType, + encName: { ciphertext: input.name, iv: input.nameIv }, + encCreatedAt: + input.createdAt && input.createdAtIv + ? { ciphertext: input.createdAt, iv: input.createdAtIv } + : null, + encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, + }); + await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); + return { uploadId: sessionId }; + } catch (e) { + if (e instanceof IntegrityError) { + if (e.message === "Inactive MEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); + } else if (e.message === "Inactive HSK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid HSK version" }); + } + } + throw e; + } + }), + + completeUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + contentHmac: z.base64().nonempty().optional(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (uploadLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); // TODO: Message + } else { + uploadLocks.add(uploadId); + } + + const filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (!session) { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if ( + (session.hskVersion && !input.contentHmac) || + (!session.hskVersion && input.contentHmac) + ) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content hmac" }); // TODO: message + } else if (session.uploadedChunks.length < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); // TODO: Message + } + + const chunkDirectoryPath = getChunkDirectoryPath(uploadId); + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for (let i = 0; i < session.totalChunks; i++) { + for await (const chunk of createReadStream(`${chunkDirectoryPath}/${i}`)) { + hashStream.update(chunk); + writeStream.write(chunk); + } + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + const hash = hashStream.digest("base64"); + const fileId = await db.transaction().execute(async (trx) => { + const { id: fileId } = await FileRepo.registerFile(trx, { + ...session, + userId: ctx.session.userId, + path: filePath, + contentHmac: input.contentHmac ?? null, + encContentHash: hash, + encContentIv: null, + }); + await UploadRepo.deleteUploadSession(trx, uploadId); + return fileId; + }); + + await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + return { file: fileId }; + } catch (e) { + await safeUnlink(filePath); + throw e; + } finally { + uploadLocks.delete(uploadId); + } + }), }); export default fileRouter; From 0c295a2ffa275f4cf47f451fd6de0a21a0ad7020 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 09:06:49 +0900 Subject: [PATCH 03/19] =?UTF-8?q?Service=20Worker=EB=A5=BC=20=ED=99=9C?= =?UTF-8?q?=EC=9A=A9=ED=95=9C=20=EC=8A=A4=ED=8A=B8=EB=A6=AC=EB=B0=8D=20?= =?UTF-8?q?=EB=B0=A9=EC=8B=9D=20=ED=8C=8C=EC=9D=BC=20=EB=B3=B5=ED=98=B8?= =?UTF-8?q?=ED=99=94=20=EA=B5=AC=ED=98=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/hooks.client.ts | 2 - src/lib/constants/index.ts | 1 + src/lib/constants/serviceWorker.ts | 1 + src/lib/constants/upload.ts | 5 +- src/lib/modules/crypto/aes.ts | 22 ++-- src/lib/modules/crypto/rsa.ts | 32 ++--- src/lib/modules/crypto/sha.ts | 6 +- src/lib/{server => }/modules/http.ts | 0 src/lib/modules/opfs.ts | 27 ++-- src/lib/serviceWorker/client.ts | 39 ++++++ src/lib/serviceWorker/index.ts | 2 + src/lib/serviceWorker/types.ts | 19 +++ .../(fullscreen)/file/[id]/+page.svelte | 34 +++-- src/routes/(fullscreen)/file/[id]/service.ts | 21 ++++ src/routes/api/file/[id]/download/+server.ts | 2 +- .../file/[id]/thumbnail/download/+server.ts | 2 +- src/service-worker/handlers/decryptFile.ts | 117 ++++++++++++++++++ src/service-worker/handlers/index.ts | 1 + src/service-worker/index.ts | 43 +++++++ src/service-worker/modules/constants.ts | 1 + src/service-worker/modules/crypto.ts | 40 ++++++ src/service-worker/modules/http.ts | 1 + src/service-worker/modules/opfs.ts | 1 + src/service-worker/stores.ts | 3 + src/service-worker/types.ts | 1 + 25 files changed, 359 insertions(+), 64 deletions(-) create mode 100644 src/lib/constants/serviceWorker.ts rename src/lib/{server => }/modules/http.ts (100%) create mode 100644 src/lib/serviceWorker/client.ts create mode 100644 src/lib/serviceWorker/index.ts create mode 100644 src/lib/serviceWorker/types.ts create mode 100644 src/service-worker/handlers/decryptFile.ts create mode 100644 src/service-worker/handlers/index.ts create mode 100644 src/service-worker/index.ts create mode 100644 src/service-worker/modules/constants.ts create mode 100644 src/service-worker/modules/crypto.ts create mode 100644 src/service-worker/modules/http.ts create mode 100644 src/service-worker/modules/opfs.ts create mode 100644 src/service-worker/stores.ts create mode 100644 src/service-worker/types.ts diff --git a/src/hooks.client.ts b/src/hooks.client.ts index 99e11c9..a677d9f 100644 --- a/src/hooks.client.ts +++ b/src/hooks.client.ts @@ -1,7 +1,6 @@ import type { ClientInit } from "@sveltejs/kit"; import { cleanupDanglingInfos, getClientKey, getMasterKeys, getHmacSecrets } from "$lib/indexedDB"; import { prepareFileCache } from "$lib/modules/file"; -import { prepareOpfs } from "$lib/modules/opfs"; import { clientKeyStore, masterKeyStore, hmacSecretStore } from "$lib/stores"; const requestPersistentStorage = async () => { @@ -46,7 +45,6 @@ export const init: ClientInit = async () => { prepareClientKeyStore(), prepareMasterKeyStore(), prepareHmacSecretStore(), - prepareOpfs(), ]); cleanupDanglingInfos(); // Intended diff --git a/src/lib/constants/index.ts b/src/lib/constants/index.ts index ab6125a..4983846 100644 --- a/src/lib/constants/index.ts +++ b/src/lib/constants/index.ts @@ -1 +1,2 @@ +export * from "./serviceWorker"; export * from "./upload"; diff --git a/src/lib/constants/serviceWorker.ts b/src/lib/constants/serviceWorker.ts new file mode 100644 index 0000000..8c09d05 --- /dev/null +++ b/src/lib/constants/serviceWorker.ts @@ -0,0 +1 @@ +export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decrypted-file/"; diff --git a/src/lib/constants/upload.ts b/src/lib/constants/upload.ts index 337700d..99d94bb 100644 --- a/src/lib/constants/upload.ts +++ b/src/lib/constants/upload.ts @@ -1,5 +1,6 @@ -export const CHUNK_SIZE = 4 * 1024 * 1024; - export const AES_GCM_IV_SIZE = 12; export const AES_GCM_TAG_SIZE = 16; export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE; + +export const CHUNK_SIZE = 4 * 1024 * 1024; +export const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ENCRYPTION_OVERHEAD; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index 67f6a9f..fe11afb 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -9,7 +9,7 @@ import { export const generateMasterKey = async () => { return { - masterKey: await window.crypto.subtle.generateKey( + masterKey: await crypto.subtle.generateKey( { name: "AES-KW", length: 256, @@ -22,7 +22,7 @@ export const generateMasterKey = async () => { export const generateDataKey = async () => { return { - dataKey: await window.crypto.subtle.generateKey( + dataKey: await crypto.subtle.generateKey( { name: "AES-GCM", length: 256, @@ -35,9 +35,9 @@ export const generateDataKey = async () => { }; export const makeAESKeyNonextractable = async (key: CryptoKey) => { - return await window.crypto.subtle.importKey( + return await crypto.subtle.importKey( "raw", - await window.crypto.subtle.exportKey("raw", key), + await crypto.subtle.exportKey("raw", key), key.algorithm, false, key.usages, @@ -45,12 +45,12 @@ export const makeAESKeyNonextractable = async (key: CryptoKey) => { }; export const wrapDataKey = async (dataKey: CryptoKey, masterKey: CryptoKey) => { - return encodeToBase64(await window.crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW")); + return encodeToBase64(await crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW")); }; export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey) => { return { - dataKey: await window.crypto.subtle.unwrapKey( + dataKey: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(dataKeyWrapped), masterKey, @@ -63,12 +63,12 @@ export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey }; export const wrapHmacSecret = async (hmacSecret: CryptoKey, masterKey: CryptoKey) => { - return encodeToBase64(await window.crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW")); + return encodeToBase64(await crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW")); }; export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: CryptoKey) => { return { - hmacSecret: await window.crypto.subtle.unwrapKey( + hmacSecret: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(hmacSecretWrapped), masterKey, @@ -84,8 +84,8 @@ export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: Cry }; export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { - const iv = window.crypto.getRandomValues(new Uint8Array(12)); - const ciphertext = await window.crypto.subtle.encrypt( + const iv = crypto.getRandomValues(new Uint8Array(12)); + const ciphertext = await crypto.subtle.encrypt( { name: "AES-GCM", iv, @@ -101,7 +101,7 @@ export const decryptData = async ( iv: string | BufferSource, dataKey: CryptoKey, ) => { - return await window.crypto.subtle.decrypt( + return await crypto.subtle.decrypt( { name: "AES-GCM", iv: typeof iv === "string" ? decodeFromBase64(iv) : iv, diff --git a/src/lib/modules/crypto/rsa.ts b/src/lib/modules/crypto/rsa.ts index 13dfd46..11e136f 100644 --- a/src/lib/modules/crypto/rsa.ts +++ b/src/lib/modules/crypto/rsa.ts @@ -1,7 +1,7 @@ import { encodeString, encodeToBase64, decodeFromBase64 } from "./util"; export const generateEncryptionKeyPair = async () => { - const keyPair = await window.crypto.subtle.generateKey( + const keyPair = await crypto.subtle.generateKey( { name: "RSA-OAEP", modulusLength: 4096, @@ -18,7 +18,7 @@ export const generateEncryptionKeyPair = async () => { }; export const generateSigningKeyPair = async () => { - const keyPair = await window.crypto.subtle.generateKey( + const keyPair = await crypto.subtle.generateKey( { name: "RSA-PSS", modulusLength: 4096, @@ -37,7 +37,7 @@ export const generateSigningKeyPair = async () => { export const exportRSAKey = async (key: CryptoKey) => { const format = key.type === "public" ? ("spki" as const) : ("pkcs8" as const); return { - key: await window.crypto.subtle.exportKey(format, key), + key: await crypto.subtle.exportKey(format, key), format, }; }; @@ -54,14 +54,14 @@ export const importEncryptionKeyPairFromBase64 = async ( name: "RSA-OAEP", hash: "SHA-256", }; - const encryptKey = await window.crypto.subtle.importKey( + const encryptKey = await crypto.subtle.importKey( "spki", decodeFromBase64(encryptKeyBase64), algorithm, true, ["encrypt", "wrapKey"], ); - const decryptKey = await window.crypto.subtle.importKey( + const decryptKey = await crypto.subtle.importKey( "pkcs8", decodeFromBase64(decryptKeyBase64), algorithm, @@ -79,14 +79,14 @@ export const importSigningKeyPairFromBase64 = async ( name: "RSA-PSS", hash: "SHA-256", }; - const signKey = await window.crypto.subtle.importKey( + const signKey = await crypto.subtle.importKey( "pkcs8", decodeFromBase64(signKeyBase64), algorithm, true, ["sign"], ); - const verifyKey = await window.crypto.subtle.importKey( + const verifyKey = await crypto.subtle.importKey( "spki", decodeFromBase64(verifyKeyBase64), algorithm, @@ -98,17 +98,11 @@ export const importSigningKeyPairFromBase64 = async ( export const makeRSAKeyNonextractable = async (key: CryptoKey) => { const { key: exportedKey, format } = await exportRSAKey(key); - return await window.crypto.subtle.importKey( - format, - exportedKey, - key.algorithm, - false, - key.usages, - ); + return await crypto.subtle.importKey(format, exportedKey, key.algorithm, false, key.usages); }; export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) => { - return await window.crypto.subtle.decrypt( + return await crypto.subtle.decrypt( { name: "RSA-OAEP", } satisfies RsaOaepParams, @@ -119,7 +113,7 @@ export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) export const wrapMasterKey = async (masterKey: CryptoKey, encryptKey: CryptoKey) => { return encodeToBase64( - await window.crypto.subtle.wrapKey("raw", masterKey, encryptKey, { + await crypto.subtle.wrapKey("raw", masterKey, encryptKey, { name: "RSA-OAEP", } satisfies RsaOaepParams), ); @@ -131,7 +125,7 @@ export const unwrapMasterKey = async ( extractable = false, ) => { return { - masterKey: await window.crypto.subtle.unwrapKey( + masterKey: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(masterKeyWrapped), decryptKey, @@ -146,7 +140,7 @@ export const unwrapMasterKey = async ( }; export const signMessageRSA = async (message: BufferSource, signKey: CryptoKey) => { - return await window.crypto.subtle.sign( + return await crypto.subtle.sign( { name: "RSA-PSS", saltLength: 32, // SHA-256 @@ -161,7 +155,7 @@ export const verifySignatureRSA = async ( signature: BufferSource, verifyKey: CryptoKey, ) => { - return await window.crypto.subtle.verify( + return await crypto.subtle.verify( { name: "RSA-PSS", saltLength: 32, // SHA-256 diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 3acb258..9bf2dea 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -1,10 +1,10 @@ export const digestMessage = async (message: BufferSource) => { - return await window.crypto.subtle.digest("SHA-256", message); + return await crypto.subtle.digest("SHA-256", message); }; export const generateHmacSecret = async () => { return { - hmacSecret: await window.crypto.subtle.generateKey( + hmacSecret: await crypto.subtle.generateKey( { name: "HMAC", hash: "SHA-256", @@ -16,5 +16,5 @@ export const generateHmacSecret = async () => { }; export const signMessageHmac = async (message: BufferSource, hmacSecret: CryptoKey) => { - return await window.crypto.subtle.sign("HMAC", hmacSecret, message); + return await crypto.subtle.sign("HMAC", hmacSecret, message); }; diff --git a/src/lib/server/modules/http.ts b/src/lib/modules/http.ts similarity index 100% rename from src/lib/server/modules/http.ts rename to src/lib/modules/http.ts diff --git a/src/lib/modules/opfs.ts b/src/lib/modules/opfs.ts index 41f1f72..a367aae 100644 --- a/src/lib/modules/opfs.ts +++ b/src/lib/modules/opfs.ts @@ -1,13 +1,5 @@ -let rootHandle: FileSystemDirectoryHandle | null = null; - -export const prepareOpfs = async () => { - rootHandle = await navigator.storage.getDirectory(); -}; - const getFileHandle = async (path: string, create = true) => { - if (!rootHandle) { - throw new Error("OPFS not prepared"); - } else if (path[0] !== "/") { + if (path[0] !== "/") { throw new Error("Path must be absolute"); } @@ -17,7 +9,7 @@ const getFileHandle = async (path: string, create = true) => { } try { - let directoryHandle = rootHandle; + let directoryHandle = await navigator.storage.getDirectory(); for (const part of parts.slice(0, -1)) { if (!part) continue; directoryHandle = await directoryHandle.getDirectoryHandle(part, { create }); @@ -34,12 +26,15 @@ const getFileHandle = async (path: string, create = true) => { } }; -export const readFile = async (path: string) => { +export const getFile = async (path: string) => { const { fileHandle } = await getFileHandle(path, false); if (!fileHandle) return null; - const file = await fileHandle.getFile(); - return await file.arrayBuffer(); + return await fileHandle.getFile(); +}; + +export const readFile = async (path: string) => { + return (await getFile(path))?.arrayBuffer() ?? null; }; export const writeFile = async (path: string, data: ArrayBuffer) => { @@ -61,9 +56,7 @@ export const deleteFile = async (path: string) => { }; const getDirectoryHandle = async (path: string) => { - if (!rootHandle) { - throw new Error("OPFS not prepared"); - } else if (path[0] !== "/") { + if (path[0] !== "/") { throw new Error("Path must be absolute"); } @@ -73,7 +66,7 @@ const getDirectoryHandle = async (path: string) => { } try { - let directoryHandle = rootHandle; + let directoryHandle = await navigator.storage.getDirectory(); let parentHandle; for (const part of parts.slice(1)) { if (!part) continue; diff --git a/src/lib/serviceWorker/client.ts b/src/lib/serviceWorker/client.ts new file mode 100644 index 0000000..771c15e --- /dev/null +++ b/src/lib/serviceWorker/client.ts @@ -0,0 +1,39 @@ +import { DECRYPTED_FILE_URL_PREFIX } from "$lib/constants"; +import type { FileMetadata, ServiceWorkerMessage, ServiceWorkerResponse } from "./types"; + +const PREPARE_TIMEOUT_MS = 5000; + +const getServiceWorker = async () => { + const registration = await navigator.serviceWorker.ready; + const sw = registration.active; + if (!sw) { + throw new Error("Service worker not activated"); + } + return sw; +}; + +export const prepareFileDecryption = async (id: number, metadata: FileMetadata) => { + const sw = await getServiceWorker(); + return new Promise((resolve, reject) => { + const timeout = setTimeout( + () => reject(new Error("Service worker timeout")), + PREPARE_TIMEOUT_MS, + ); + const handler = (event: MessageEvent) => { + if (event.data.type === "decryption-ready" && event.data.fileId === id) { + clearTimeout(timeout); + navigator.serviceWorker.removeEventListener("message", handler); + resolve(); + } + }; + navigator.serviceWorker.addEventListener("message", handler); + + sw.postMessage({ + type: "decryption-prepare", + fileId: id, + ...metadata, + } satisfies ServiceWorkerMessage); + }); +}; + +export const getDecryptedFileUrl = (id: number) => `${DECRYPTED_FILE_URL_PREFIX}${id}`; diff --git a/src/lib/serviceWorker/index.ts b/src/lib/serviceWorker/index.ts new file mode 100644 index 0000000..d2ec230 --- /dev/null +++ b/src/lib/serviceWorker/index.ts @@ -0,0 +1,2 @@ +export * from "./client"; +export * from "./types"; diff --git a/src/lib/serviceWorker/types.ts b/src/lib/serviceWorker/types.ts new file mode 100644 index 0000000..97edd6d --- /dev/null +++ b/src/lib/serviceWorker/types.ts @@ -0,0 +1,19 @@ +export interface FileMetadata { + isLegacy: boolean; + dataKey: CryptoKey; + encContentSize: number; + contentType: string; +} + +export interface DecryptionPrepareMessage extends FileMetadata { + type: "decryption-prepare"; + fileId: number; +} + +export interface DecryptionReadyMessage { + type: "decryption-ready"; + fileId: number; +} + +export type ServiceWorkerMessage = DecryptionPrepareMessage; +export type ServiceWorkerResponse = DecryptionReadyMessage; diff --git a/src/routes/(fullscreen)/file/[id]/+page.svelte b/src/routes/(fullscreen)/file/[id]/+page.svelte index 4aa6b42..674bc22 100644 --- a/src/routes/(fullscreen)/file/[id]/+page.svelte +++ b/src/routes/(fullscreen)/file/[id]/+page.svelte @@ -17,6 +17,7 @@ requestFileDownload, requestThumbnailUpload, requestFileAdditionToCategory, + requestVideoStream, } from "./service"; import TopBarMenu from "./TopBarMenu.svelte"; @@ -37,6 +38,7 @@ let viewerType: "image" | "video" | undefined = $state(); let fileBlob: Blob | undefined = $state(); let fileBlobUrl: string | undefined = $state(); + let videoStreamUrl: string | undefined = $state(); let videoElement: HTMLVideoElement | undefined = $state(); const updateViewer = async (buffer: ArrayBuffer, contentType: string) => { @@ -95,12 +97,27 @@ untrack(() => { if (!downloadState && !isDownloadRequested) { isDownloadRequested = true; - requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then(async (buffer) => { - const blob = await updateViewer(buffer, contentType); - if (!viewerType) { - FileSaver.saveAs(blob, info!.name); - } - }); + + if (viewerType === "video" && !info!.isLegacy) { + requestVideoStream(data.id, info!.dataKey!.key, contentType).then((streamUrl) => { + if (streamUrl) { + videoStreamUrl = streamUrl; + } else { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then((buffer) => + updateViewer(buffer, contentType), + ); + } + }); + } else { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then( + async (buffer) => { + const blob = await updateViewer(buffer, contentType); + if (!viewerType) { + FileSaver.saveAs(blob, info!.name); + } + }, + ); + } } }); } @@ -159,9 +176,10 @@ {@render viewerLoading("이미지를 불러오고 있어요.")} {/if} {:else if viewerType === "video"} - {#if fileBlobUrl} + {#if videoStreamUrl || fileBlobUrl}
- + updateThumbnail(info?.dataKey?.key!, info?.dataKey?.version!)} diff --git a/src/routes/(fullscreen)/file/[id]/service.ts b/src/routes/(fullscreen)/file/[id]/service.ts index 09ec86f..ea3e49c 100644 --- a/src/routes/(fullscreen)/file/[id]/service.ts +++ b/src/routes/(fullscreen)/file/[id]/service.ts @@ -1,11 +1,32 @@ import { encryptData } from "$lib/modules/crypto"; import { storeFileThumbnailCache } from "$lib/modules/file"; +import { prepareFileDecryption, getDecryptedFileUrl } from "$lib/serviceWorker"; import { requestFileThumbnailUpload } from "$lib/services/file"; import { trpc } from "$trpc/client"; export { requestCategoryCreation, requestFileRemovalFromCategory } from "$lib/services/category"; export { requestFileDownload } from "$lib/services/file"; +export const requestVideoStream = async ( + fileId: number, + dataKey: CryptoKey, + contentType: string, +) => { + const res = await fetch(`/api/file/${fileId}/download`, { method: "HEAD" }); + if (!res.ok) return null; + + const encContentSize = parseInt(res.headers.get("Content-Length") ?? "0", 10); + if (encContentSize <= 0) return null; + + try { + await prepareFileDecryption(fileId, { isLegacy: false, dataKey, encContentSize, contentType }); + return getDecryptedFileUrl(fileId); + } catch { + // TODO: Error Handling + return null; + } +}; + export const requestThumbnailUpload = async ( fileId: number, thumbnail: Blob, diff --git a/src/routes/api/file/[id]/download/+server.ts b/src/routes/api/file/[id]/download/+server.ts index 974dd54..68191ef 100644 --- a/src/routes/api/file/[id]/download/+server.ts +++ b/src/routes/api/file/[id]/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; import { authorize } from "$lib/server/modules/auth"; -import { parseRangeHeader, getContentRangeHeader } from "$lib/server/modules/http"; +import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; import { getFileStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/routes/api/file/[id]/thumbnail/download/+server.ts b/src/routes/api/file/[id]/thumbnail/download/+server.ts index 70d4cd3..4fc7c1a 100644 --- a/src/routes/api/file/[id]/thumbnail/download/+server.ts +++ b/src/routes/api/file/[id]/thumbnail/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; import { authorize } from "$lib/server/modules/auth"; -import { parseRangeHeader, getContentRangeHeader } from "$lib/server/modules/http"; +import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; import { getFileThumbnailStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/service-worker/handlers/decryptFile.ts b/src/service-worker/handlers/decryptFile.ts new file mode 100644 index 0000000..e374e5d --- /dev/null +++ b/src/service-worker/handlers/decryptFile.ts @@ -0,0 +1,117 @@ +import { DECRYPTED_FILE_URL_PREFIX, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "../modules/constants"; +import { decryptChunk, getEncryptedRange, getDecryptedSize } from "../modules/crypto"; +import { parseRangeHeader, getContentRangeHeader } from "../modules/http"; +import { getFile } from "../modules/opfs"; +import { fileMetadataStore } from "../stores"; +import type { FileMetadata } from "../types"; + +const createResponse = ( + stream: ReadableStream, + isRangeRequest: boolean, + range: { start: number; end: number; total: number }, + contentType?: string, +) => { + return new Response(stream, { + status: isRangeRequest ? 206 : 200, + headers: { + "Accept-Ranges": "bytes", + "Content-Length": String(range.end - range.start + 1), + "Content-Type": contentType ?? "application/octet-stream", + ...(isRangeRequest ? getContentRangeHeader(range) : {}), + }, + }); +}; + +const streamFromOpfs = async ( + file: File, + metadata?: FileMetadata, + range?: { start?: number; end?: number }, +) => { + const start = range?.start ?? 0; + const end = range?.end ?? file.size - 1; + if (start > end || start < 0 || end >= file.size) { + return new Response("Invalid range", { status: 416 }); + } + + return createResponse( + file.slice(start, end + 1).stream(), + !!range, + { start, end, total: file.size }, + metadata?.contentType, + ); +}; + +const streamFromServer = async ( + id: number, + metadata: FileMetadata, + range?: { start?: number; end?: number }, +) => { + const totalSize = getDecryptedSize(metadata.encContentSize, metadata.isLegacy); + const start = range?.start ?? 0; + const end = + range?.end ?? + (range && !metadata.isLegacy ? Math.min(start + CHUNK_SIZE, totalSize) : totalSize) - 1; + if (start > end || start < 0 || end >= totalSize) { + return new Response("Invalid range", { status: 416 }); + } + + const encryptedRange = getEncryptedRange(start, end, metadata.encContentSize, metadata.isLegacy); + const apiResponse = await fetch(`/api/file/${id}/download`, { + headers: { Range: `bytes=${encryptedRange.start}-${encryptedRange.end}` }, + }); + if (apiResponse.status !== 206) { + return new Response("Failed to fetch encrypted file", { status: 502 }); + } + + const fileEncrypted = await apiResponse.arrayBuffer(); + return createResponse( + new ReadableStream({ + async start(controller) { + if (metadata.isLegacy) { + const decrypted = await decryptChunk(fileEncrypted, metadata.dataKey); + controller.enqueue(new Uint8Array(decrypted.slice(start, end + 1))); + controller.close(); + return; + } + + const chunks = encryptedRange.lastChunkIndex - encryptedRange.firstChunkIndex + 1; + + for (let i = 0; i < chunks; i++) { + const chunk = await decryptChunk( + fileEncrypted.slice(i * ENCRYPTED_CHUNK_SIZE, (i + 1) * ENCRYPTED_CHUNK_SIZE), + metadata.dataKey, + ); + const sliceStart = i === 0 ? start % CHUNK_SIZE : 0; + const sliceEnd = i === chunks - 1 ? (end % CHUNK_SIZE) + 1 : chunk.byteLength; + controller.enqueue(new Uint8Array(chunk.slice(sliceStart, sliceEnd))); + } + + controller.close(); + }, + }), + !!range, + { start, end, total: totalSize }, + metadata.contentType, + ); +}; + +const decryptFileHandler = async (request: Request) => { + const url = new URL(request.url); + const fileId = parseInt(url.pathname.slice(DECRYPTED_FILE_URL_PREFIX.length), 10); + if (isNaN(fileId)) { + throw new Response("Invalid file id", { status: 400 }); + } + + const metadata = fileMetadataStore.get(fileId); + const range = parseRangeHeader(request.headers.get("Range")); + const cache = await getFile(`/cache/${fileId}`); + if (cache) { + return streamFromOpfs(cache, metadata, range); + } else if (metadata) { + return streamFromServer(fileId, metadata, range); + } else { + return new Response("Decryption not prepared", { status: 400 }); + } +}; + +export default decryptFileHandler; diff --git a/src/service-worker/handlers/index.ts b/src/service-worker/handlers/index.ts new file mode 100644 index 0000000..fe5b0f9 --- /dev/null +++ b/src/service-worker/handlers/index.ts @@ -0,0 +1 @@ +export { default as decryptFile } from "./decryptFile"; diff --git a/src/service-worker/index.ts b/src/service-worker/index.ts new file mode 100644 index 0000000..051f8d9 --- /dev/null +++ b/src/service-worker/index.ts @@ -0,0 +1,43 @@ +/// +/// +/// +/// + +import { DECRYPTED_FILE_URL_PREFIX } from "./modules/constants"; +import { decryptFile } from "./handlers"; +import { fileMetadataStore } from "./stores"; +import type { ServiceWorkerMessage, ServiceWorkerResponse } from "./types"; + +const self = globalThis.self as unknown as ServiceWorkerGlobalScope; + +self.addEventListener("message", (event) => { + const message: ServiceWorkerMessage = event.data; + switch (message.type) { + case "decryption-prepare": + fileMetadataStore.set(message.fileId, message); + event.source?.postMessage({ + type: "decryption-ready", + fileId: message.fileId, + } satisfies ServiceWorkerResponse); + break; + default: { + const exhaustive: never = message.type; + return exhaustive; + } + } +}); + +self.addEventListener("fetch", (event) => { + const url = new URL(event.request.url); + if (url.pathname.startsWith(DECRYPTED_FILE_URL_PREFIX)) { + event.respondWith(decryptFile(event.request)); + } +}); + +self.addEventListener("install", () => { + self.skipWaiting(); +}); + +self.addEventListener("activate", (event) => { + event.waitUntil(self.clients.claim()); +}); diff --git a/src/service-worker/modules/constants.ts b/src/service-worker/modules/constants.ts new file mode 100644 index 0000000..cca093e --- /dev/null +++ b/src/service-worker/modules/constants.ts @@ -0,0 +1 @@ +export * from "../../lib/constants"; diff --git a/src/service-worker/modules/crypto.ts b/src/service-worker/modules/crypto.ts new file mode 100644 index 0000000..1afee74 --- /dev/null +++ b/src/service-worker/modules/crypto.ts @@ -0,0 +1,40 @@ +import { ENCRYPTION_OVERHEAD, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "./constants"; + +export * from "../../lib/modules/crypto"; + +export const getEncryptedRange = ( + start: number, + end: number, + totalEncryptedSize: number, + isLegacy: boolean, +) => { + if (isLegacy) { + return { + firstChunkIndex: 0, + lastChunkIndex: 0, + start: 0, + end: totalEncryptedSize - 1, + }; + } + + const firstChunkIndex = Math.floor(start / CHUNK_SIZE); + const lastChunkIndex = Math.floor(end / CHUNK_SIZE); + return { + firstChunkIndex, + lastChunkIndex, + start: firstChunkIndex * ENCRYPTED_CHUNK_SIZE, + end: Math.min((lastChunkIndex + 1) * ENCRYPTED_CHUNK_SIZE - 1, totalEncryptedSize - 1), + }; +}; + +export const getDecryptedSize = (encryptedSize: number, isLegacy: boolean) => { + if (isLegacy) { + return encryptedSize - ENCRYPTION_OVERHEAD; + } + + const fullChunks = Math.floor(encryptedSize / ENCRYPTED_CHUNK_SIZE); + const lastChunkEncSize = encryptedSize % ENCRYPTED_CHUNK_SIZE; + return ( + fullChunks * CHUNK_SIZE + (lastChunkEncSize > 0 ? lastChunkEncSize - ENCRYPTION_OVERHEAD : 0) + ); +}; diff --git a/src/service-worker/modules/http.ts b/src/service-worker/modules/http.ts new file mode 100644 index 0000000..0d1bf5e --- /dev/null +++ b/src/service-worker/modules/http.ts @@ -0,0 +1 @@ +export * from "../../lib/modules/http"; diff --git a/src/service-worker/modules/opfs.ts b/src/service-worker/modules/opfs.ts new file mode 100644 index 0000000..0ef5769 --- /dev/null +++ b/src/service-worker/modules/opfs.ts @@ -0,0 +1 @@ +export * from "../../lib/modules/opfs"; diff --git a/src/service-worker/stores.ts b/src/service-worker/stores.ts new file mode 100644 index 0000000..22d899e --- /dev/null +++ b/src/service-worker/stores.ts @@ -0,0 +1,3 @@ +import type { FileMetadata } from "./types"; + +export const fileMetadataStore = new Map(); diff --git a/src/service-worker/types.ts b/src/service-worker/types.ts new file mode 100644 index 0000000..f04ed39 --- /dev/null +++ b/src/service-worker/types.ts @@ -0,0 +1 @@ +export * from "../lib/serviceWorker/types"; From 1efcdd68f1d5364fe7283cdec020d02707d5f4fc Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 09:25:40 +0900 Subject: [PATCH 04/19] =?UTF-8?q?=EC=8A=A4=ED=8A=B8=EB=A6=AC=EB=B0=8D=20?= =?UTF-8?q?=EB=B0=A9=EC=8B=9D=EC=9C=BC=EB=A1=9C=20=EB=8F=99=EC=98=81?= =?UTF-8?q?=EC=83=81=EC=9D=84=20=EB=B6=88=EB=9F=AC=EC=98=AC=20=EB=95=8C=20?= =?UTF-8?q?=EB=8B=A4=EC=9A=B4=EB=A1=9C=EB=93=9C=20=EB=A9=94=EB=89=B4?= =?UTF-8?q?=EA=B0=80=20=ED=91=9C=EC=8B=9C=EB=90=98=EC=A7=80=20=EC=95=8A?= =?UTF-8?q?=EB=8A=94=20=EB=B2=84=EA=B7=B8=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../(fullscreen)/file/[id]/+page.svelte | 1 + .../(fullscreen)/file/[id]/TopBarMenu.svelte | 22 +++-- src/service-worker/handlers/decryptFile.ts | 96 +++++++++++++------ 3 files changed, 83 insertions(+), 36 deletions(-) diff --git a/src/routes/(fullscreen)/file/[id]/+page.svelte b/src/routes/(fullscreen)/file/[id]/+page.svelte index 674bc22..053d6bf 100644 --- a/src/routes/(fullscreen)/file/[id]/+page.svelte +++ b/src/routes/(fullscreen)/file/[id]/+page.svelte @@ -154,6 +154,7 @@ ? info?.parentId : undefined} {fileBlob} + downloadUrl={videoStreamUrl} filename={info?.name} />
diff --git a/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte b/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte index a037b61..d713e8c 100644 --- a/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte +++ b/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte @@ -10,17 +10,29 @@ interface Props { directoryId?: "root" | number; + downloadUrl?: string; fileBlob?: Blob; filename?: string; isOpen: boolean; } - let { directoryId, fileBlob, filename, isOpen = $bindable() }: Props = $props(); + let { directoryId, downloadUrl, fileBlob, filename, isOpen = $bindable() }: Props = $props(); + + const handleDownload = () => { + if (fileBlob && filename) { + FileSaver.saveAs(fileBlob, filename); + } else if (downloadUrl && filename) { + // Use streaming download via Content-Disposition header + const url = new URL(downloadUrl, window.location.origin); + url.searchParams.set("download", filename); + window.open(url.toString(), "_blank"); + } + }; (isOpen = false)} /> -{#if isOpen && (directoryId || fileBlob)} +{#if isOpen && (directoryId || downloadUrl || fileBlob)}
{ - FileSaver.saveAs(fileBlob, filename); - })} + {#if fileBlob || downloadUrl} + {@render menuButton(IconCloudDownload, "다운로드", handleDownload)} {/if}
diff --git a/src/service-worker/handlers/decryptFile.ts b/src/service-worker/handlers/decryptFile.ts index e374e5d..22aa118 100644 --- a/src/service-worker/handlers/decryptFile.ts +++ b/src/service-worker/handlers/decryptFile.ts @@ -10,15 +10,23 @@ const createResponse = ( isRangeRequest: boolean, range: { start: number; end: number; total: number }, contentType?: string, + downloadFilename?: string, ) => { + const headers: Record = { + "Accept-Ranges": "bytes", + "Content-Length": String(range.end - range.start + 1), + "Content-Type": contentType ?? "application/octet-stream", + ...(isRangeRequest ? getContentRangeHeader(range) : {}), + }; + + if (downloadFilename) { + headers["Content-Disposition"] = + `attachment; filename*=UTF-8''${encodeURIComponent(downloadFilename)}`; + } + return new Response(stream, { status: isRangeRequest ? 206 : 200, - headers: { - "Accept-Ranges": "bytes", - "Content-Length": String(range.end - range.start + 1), - "Content-Type": contentType ?? "application/octet-stream", - ...(isRangeRequest ? getContentRangeHeader(range) : {}), - }, + headers, }); }; @@ -26,6 +34,7 @@ const streamFromOpfs = async ( file: File, metadata?: FileMetadata, range?: { start?: number; end?: number }, + downloadFilename?: string, ) => { const start = range?.start ?? 0; const end = range?.end ?? file.size - 1; @@ -38,6 +47,7 @@ const streamFromOpfs = async ( !!range, { start, end, total: file.size }, metadata?.contentType, + downloadFilename, ); }; @@ -45,6 +55,7 @@ const streamFromServer = async ( id: number, metadata: FileMetadata, range?: { start?: number; end?: number }, + downloadFilename?: string, ) => { const totalSize = getDecryptedSize(metadata.encContentSize, metadata.isLegacy); const start = range?.start ?? 0; @@ -59,39 +70,63 @@ const streamFromServer = async ( const apiResponse = await fetch(`/api/file/${id}/download`, { headers: { Range: `bytes=${encryptedRange.start}-${encryptedRange.end}` }, }); - if (apiResponse.status !== 206) { + if (apiResponse.status !== 206 || !apiResponse.body) { return new Response("Failed to fetch encrypted file", { status: 502 }); } - const fileEncrypted = await apiResponse.arrayBuffer(); - return createResponse( - new ReadableStream({ - async start(controller) { - if (metadata.isLegacy) { - const decrypted = await decryptChunk(fileEncrypted, metadata.dataKey); + if (metadata.isLegacy) { + const fileEncrypted = await apiResponse.arrayBuffer(); + const decrypted = await decryptChunk(fileEncrypted, metadata.dataKey); + return createResponse( + new ReadableStream({ + start(controller) { controller.enqueue(new Uint8Array(decrypted.slice(start, end + 1))); controller.close(); - return; - } + }, + }), + !!range, + { start, end, total: totalSize }, + metadata.contentType, + ); + } - const chunks = encryptedRange.lastChunkIndex - encryptedRange.firstChunkIndex + 1; + const totalChunks = encryptedRange.lastChunkIndex - encryptedRange.firstChunkIndex + 1; + let currentChunkIndex = 0; + let buffer = new Uint8Array(0); - for (let i = 0; i < chunks; i++) { - const chunk = await decryptChunk( - fileEncrypted.slice(i * ENCRYPTED_CHUNK_SIZE, (i + 1) * ENCRYPTED_CHUNK_SIZE), - metadata.dataKey, - ); - const sliceStart = i === 0 ? start % CHUNK_SIZE : 0; - const sliceEnd = i === chunks - 1 ? (end % CHUNK_SIZE) + 1 : chunk.byteLength; - controller.enqueue(new Uint8Array(chunk.slice(sliceStart, sliceEnd))); - } + const decryptingStream = new TransformStream({ + async transform(chunk, controller) { + const newBuffer = new Uint8Array(buffer.length + chunk.length); + newBuffer.set(buffer); + newBuffer.set(chunk, buffer.length); + buffer = newBuffer; - controller.close(); - }, - }), + while (buffer.length >= ENCRYPTED_CHUNK_SIZE && currentChunkIndex < totalChunks - 1) { + const encryptedChunk = buffer.slice(0, ENCRYPTED_CHUNK_SIZE); + buffer = buffer.slice(ENCRYPTED_CHUNK_SIZE); + + const decrypted = await decryptChunk(encryptedChunk.buffer, metadata.dataKey); + const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0; + controller.enqueue(new Uint8Array(decrypted.slice(sliceStart))); + currentChunkIndex++; + } + }, + async flush(controller) { + if (buffer.length > 0) { + const decrypted = await decryptChunk(buffer.buffer, metadata.dataKey); + const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0; + const sliceEnd = (end % CHUNK_SIZE) + 1; + controller.enqueue(new Uint8Array(decrypted.slice(sliceStart, sliceEnd))); + } + }, + }); + + return createResponse( + apiResponse.body.pipeThrough(decryptingStream), !!range, { start, end, total: totalSize }, metadata.contentType, + downloadFilename, ); }; @@ -102,13 +137,14 @@ const decryptFileHandler = async (request: Request) => { throw new Response("Invalid file id", { status: 400 }); } + const downloadFilename = url.searchParams.get("download") ?? undefined; const metadata = fileMetadataStore.get(fileId); const range = parseRangeHeader(request.headers.get("Range")); const cache = await getFile(`/cache/${fileId}`); if (cache) { - return streamFromOpfs(cache, metadata, range); + return streamFromOpfs(cache, metadata, range, downloadFilename); } else if (metadata) { - return streamFromServer(fileId, metadata, range); + return streamFromServer(fileId, metadata, range, downloadFilename); } else { return new Response("Decryption not prepared", { status: 400 }); } From 3628e6d21ae6a967a40770c5ad972861faa9eeb0 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 13:19:54 +0900 Subject: [PATCH 05/19] =?UTF-8?q?=EC=97=85=EB=A1=9C=EB=93=9C=ED=95=A0=20?= =?UTF-8?q?=EB=95=8C=EC=97=90=EB=8F=84=20=EC=8A=A4=ED=8A=B8=EB=A6=AC?= =?UTF-8?q?=EB=B0=8D=20=EB=B0=A9=EC=8B=9D=EC=9C=BC=EB=A1=9C=20=EC=B2=98?= =?UTF-8?q?=EB=A6=AC=ED=95=98=EB=8F=84=EB=A1=9D=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package.json | 1 + pnpm-lock.yaml | 9 + src/lib/indexedDB/keyStore.ts | 6 +- src/lib/modules/crypto/aes.ts | 2 +- src/lib/modules/crypto/sha.ts | 13 + src/lib/modules/crypto/util.ts | 4 +- src/lib/modules/file/upload.svelte.ts | 314 +++++++++++++++--- src/lib/modules/thumbnail.ts | 17 + .../(main)/directory/[[id]]/service.svelte.ts | 4 +- 9 files changed, 308 insertions(+), 62 deletions(-) diff --git a/package.json b/package.json index c16b700..17dad8d 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "@eslint/compat": "^2.0.0", "@eslint/js": "^9.39.2", "@iconify-json/material-symbols": "^1.2.50", + "@noble/hashes": "^2.0.1", "@sveltejs/adapter-node": "^5.4.0", "@sveltejs/kit": "^2.49.2", "@sveltejs/vite-plugin-svelte": "^6.2.1", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e4e336f..025aacd 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -48,6 +48,9 @@ importers: '@iconify-json/material-symbols': specifier: ^1.2.50 version: 1.2.50 + '@noble/hashes': + specifier: ^2.0.1 + version: 2.0.1 '@sveltejs/adapter-node': specifier: ^5.4.0 version: 5.4.0(@sveltejs/kit@2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))) @@ -414,6 +417,10 @@ packages: '@jridgewell/trace-mapping@0.3.31': resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + '@noble/hashes@2.0.1': + resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==} + engines: {node: '>= 20.19.0'} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -2217,6 +2224,8 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@noble/hashes@2.0.1': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 diff --git a/src/lib/indexedDB/keyStore.ts b/src/lib/indexedDB/keyStore.ts index 7a4c89e..86b8b79 100644 --- a/src/lib/indexedDB/keyStore.ts +++ b/src/lib/indexedDB/keyStore.ts @@ -70,12 +70,12 @@ export const storeMasterKeys = async (keys: MasterKey[]) => { }; export const getHmacSecrets = async () => { - return await keyStore.hmacSecret.toArray(); + return (await keyStore.hmacSecret.toArray()).filter(({ secret }) => secret.extractable); }; export const storeHmacSecrets = async (secrets: HmacSecret[]) => { - if (secrets.some(({ secret }) => secret.extractable)) { - throw new Error("Hmac secrets must be nonextractable"); + if (secrets.some(({ secret }) => !secret.extractable)) { + throw new Error("Hmac secrets must be extractable"); } await keyStore.hmacSecret.bulkPut(secrets); }; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index fe11afb..4035343 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -77,7 +77,7 @@ export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: Cry name: "HMAC", hash: "SHA-256", } satisfies HmacImportParams, - false, // Nonextractable + true, // Extractable ["sign", "verify"], ), }; diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 9bf2dea..883ac10 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -1,7 +1,20 @@ +import { hmac } from "@noble/hashes/hmac.js"; +import { sha256 } from "@noble/hashes/sha2.js"; + export const digestMessage = async (message: BufferSource) => { return await crypto.subtle.digest("SHA-256", message); }; +export const createStreamingHmac = async (hmacSecret: CryptoKey) => { + const keyBytes = new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret)); + const h = hmac.create(sha256, keyBytes); + + return { + update: (data: Uint8Array) => h.update(data), + digest: () => h.digest(), + }; +}; + export const generateHmacSecret = async () => { return { hmacSecret: await crypto.subtle.generateKey( diff --git a/src/lib/modules/crypto/util.ts b/src/lib/modules/crypto/util.ts index a3e3bc0..215eaf2 100644 --- a/src/lib/modules/crypto/util.ts +++ b/src/lib/modules/crypto/util.ts @@ -9,8 +9,8 @@ export const decodeString = (data: ArrayBuffer) => { return textDecoder.decode(data); }; -export const encodeToBase64 = (data: ArrayBuffer) => { - return btoa(String.fromCharCode(...new Uint8Array(data))); +export const encodeToBase64 = (data: ArrayBuffer | Uint8Array) => { + return btoa(String.fromCharCode(...(data instanceof ArrayBuffer ? new Uint8Array(data) : data))); }; export const decodeFromBase64 = (data: string) => { diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index 2bb6c7c..ac3010e 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,6 +1,6 @@ import axios from "axios"; import ExifReader from "exifreader"; -import { limitFunction } from "p-limit"; +import pLimit, { limitFunction } from "p-limit"; import { CHUNK_SIZE } from "$lib/constants"; import { encodeToBase64, @@ -11,9 +11,10 @@ import { encryptChunk, digestMessage, signMessageHmac, + createStreamingHmac, } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; -import { generateThumbnail } from "$lib/modules/thumbnail"; +import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import type { MasterKey, HmacSecret } from "$lib/stores"; import { trpc } from "$trpc/client"; @@ -41,7 +42,7 @@ export type LiveFileUploadState = FileUploadState & { }; const scheduler = new Scheduler< - { fileId: number; fileBuffer: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined + { fileId: number; fileBuffer?: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined >(); let uploadingFiles: FileUploadState[] = $state([]); @@ -77,6 +78,33 @@ const requestDuplicateFileScan = limitFunction( { concurrency: 1 }, ); +const isImageFile = (fileType: string) => fileType.startsWith("image/"); + +const requestDuplicateFileScanStreaming = limitFunction( + async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { + const hmacStream = await createStreamingHmac(hmacSecret.secret); + const reader = file.stream().getReader(); + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + hmacStream.update(value); + } + + const fileSigned = encodeToBase64(hmacStream.digest()); + const files = await trpc().file.listByHash.query({ + hskVersion: hmacSecret.version, + contentHmac: fileSigned, + }); + if (files.length === 0 || (await onDuplicate())) { + return { fileSigned }; + } else { + return {}; + } + }, + { concurrency: 1 }, +); + const getFileType = (file: File) => { if (file.type) return file.type; if (file.name.endsWith(".heic")) return "image/heic"; @@ -235,6 +263,148 @@ const requestFileUpload = limitFunction( { concurrency: 1 }, ); +const uploadFileStreaming = async ( + state: FileUploadState, + file: File, + masterKey: MasterKey, + hmacSecret: HmacSecret, + fileSigned: string, + parentId: DirectoryId, +) => { + state.status = "uploading"; + + const fileType = getFileType(file); + const { dataKey, dataKeyVersion } = await generateDataKey(); + const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); + + const nameEncrypted = await encryptString(file.name, dataKey); + const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); + + // Calculate total chunks for metadata + const totalChunks = Math.ceil(file.size / CHUNK_SIZE); + const metadata = { + chunks: totalChunks, + parent: parentId, + mekVersion: masterKey.version, + dek: dataKeyWrapped, + dekVersion: dataKeyVersion, + hskVersion: hmacSecret.version, + contentType: fileType, + name: nameEncrypted.ciphertext, + nameIv: nameEncrypted.iv, + lastModifiedAt: lastModifiedAtEncrypted.ciphertext, + lastModifiedAtIv: lastModifiedAtEncrypted.iv, + }; + + const { uploadId } = await trpc().file.startUpload.mutate(metadata); + + // Stream file, encrypt, and upload with concurrency limit + const reader = file.stream().getReader(); + const limit = pLimit(4); + let buffer = new Uint8Array(0); + let chunkIndex = 0; + const uploadPromises: Promise[] = []; + + const totalBytes = file.size; + let uploadedBytes = 0; + const startTime = Date.now(); + + const uploadChunk = async ( + index: number, + encryptedChunk: ArrayBuffer, + chunkHash: string, + originalChunkSize: number, + ) => { + const response = await fetch(`/api/file/upload/${uploadId}/chunks/${index}`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkHash}:`, + }, + body: encryptedChunk, + }); + + if (!response.ok) { + throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); + } + + // Update progress after upload completes + uploadedBytes += originalChunkSize; + const elapsed = (Date.now() - startTime) / 1000; + const rate = uploadedBytes / elapsed; + const remaining = totalBytes - uploadedBytes; + const estimated = rate > 0 ? remaining / rate : undefined; + + state.progress = uploadedBytes / totalBytes; + state.rate = rate; + state.estimated = estimated; + }; + + while (true) { + const { done, value } = await reader.read(); + if (done && buffer.length === 0) break; + + if (value) { + const newBuffer = new Uint8Array(buffer.length + value.length); + newBuffer.set(buffer); + newBuffer.set(value, buffer.length); + buffer = newBuffer; + } + + while (buffer.length >= CHUNK_SIZE || (done && buffer.length > 0)) { + const chunkSize = Math.min(CHUNK_SIZE, buffer.length); + const chunk = buffer.slice(0, chunkSize); + buffer = buffer.slice(chunkSize); + + const encryptedChunk = await encryptChunk(chunk.buffer.slice(0, chunk.byteLength), dataKey); + const chunkHash = encodeToBase64(await digestMessage(encryptedChunk)); + const currentIndex = chunkIndex++; + + uploadPromises.push( + limit(() => uploadChunk(currentIndex, encryptedChunk, chunkHash, chunkSize)), + ); + } + + if (done) break; + } + + await Promise.all(uploadPromises); + + const { file: fileId } = await trpc().file.completeUpload.mutate({ + uploadId, + contentHmac: fileSigned, + }); + + // Generate and upload thumbnail for video files + if (fileType.startsWith("video/")) { + try { + const thumbnail = await generateThumbnailFromFile(file); + if (thumbnail) { + const thumbnailBuffer = await thumbnail.arrayBuffer(); + const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); + + const thumbnailForm = new FormData(); + thumbnailForm.set( + "metadata", + JSON.stringify({ + dekVersion: dataKeyVersion.toISOString(), + contentIv: encodeToBase64(thumbnailEncrypted.iv), + } satisfies FileThumbnailUploadRequest), + ); + thumbnailForm.set("content", new Blob([thumbnailEncrypted.ciphertext])); + await axios.post(`/api/file/${fileId}/thumbnail/upload`, thumbnailForm); + } + } catch (e) { + // Thumbnail upload failure is not critical + console.error(e); + } + } + + state.status = "uploaded"; + + return { fileId }; +}; + export const uploadFile = async ( file: File, parentId: "root" | number, @@ -249,69 +419,103 @@ export const uploadFile = async ( }); const state = uploadingFiles.at(-1)!; + const fileType = getFileType(file); + + // Image files: use buffer-based approach (need EXIF + thumbnail) + if (isImageFile(fileType)) { + return await scheduler.schedule(file.size, async () => { + state.status = "encryption-pending"; + + try { + const { fileBuffer, fileSigned } = await requestDuplicateFileScan( + file, + hmacSecret, + onDuplicate, + ); + if (!fileBuffer || !fileSigned) { + state.status = "canceled"; + uploadingFiles = uploadingFiles.filter((file) => file !== state); + return undefined; + } + + const { + dataKeyWrapped, + dataKeyVersion, + fileType, + chunksEncrypted, + nameEncrypted, + createdAtEncrypted, + lastModifiedAtEncrypted, + thumbnail, + } = await encryptFile(state, file, fileBuffer, masterKey); + + const metadata = { + chunks: chunksEncrypted.length, + parent: parentId, + mekVersion: masterKey.version, + dek: dataKeyWrapped, + dekVersion: dataKeyVersion, + hskVersion: hmacSecret.version, + contentType: fileType, + name: nameEncrypted.ciphertext, + nameIv: nameEncrypted.iv, + createdAt: createdAtEncrypted?.ciphertext, + createdAtIv: createdAtEncrypted?.iv, + lastModifiedAt: lastModifiedAtEncrypted.ciphertext, + lastModifiedAtIv: lastModifiedAtEncrypted.iv, + }; + + let thumbnailForm = null; + if (thumbnail) { + thumbnailForm = new FormData(); + thumbnailForm.set( + "metadata", + JSON.stringify({ + dekVersion: dataKeyVersion.toISOString(), + contentIv: encodeToBase64(thumbnail.iv), + } satisfies FileThumbnailUploadRequest), + ); + thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); + } + + const { fileId } = await requestFileUpload( + state, + metadata, + chunksEncrypted, + fileSigned, + thumbnailForm, + ); + return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; + } catch (e) { + state.status = "error"; + throw e; + } + }); + } + + // Video and other files: use streaming approach return await scheduler.schedule(file.size, async () => { state.status = "encryption-pending"; try { - const { fileBuffer, fileSigned } = await requestDuplicateFileScan( - file, - hmacSecret, - onDuplicate, - ); - if (!fileBuffer || !fileSigned) { + // 1st pass: streaming HMAC for duplicate check + const { fileSigned } = await requestDuplicateFileScanStreaming(file, hmacSecret, onDuplicate); + if (!fileSigned) { state.status = "canceled"; - uploadingFiles = uploadingFiles.filter((file) => file !== state); + uploadingFiles = uploadingFiles.filter((f) => f !== state); return undefined; } - const { - dataKeyWrapped, - dataKeyVersion, - fileType, - chunksEncrypted, - nameEncrypted, - createdAtEncrypted, - lastModifiedAtEncrypted, - thumbnail, - } = await encryptFile(state, file, fileBuffer, masterKey); - - const metadata = { - chunks: chunksEncrypted.length, - parent: parentId, - mekVersion: masterKey.version, - dek: dataKeyWrapped, - dekVersion: dataKeyVersion, - hskVersion: hmacSecret.version, - contentType: fileType, - name: nameEncrypted.ciphertext, - nameIv: nameEncrypted.iv, - createdAt: createdAtEncrypted?.ciphertext, - createdAtIv: createdAtEncrypted?.iv, - lastModifiedAt: lastModifiedAtEncrypted.ciphertext, - lastModifiedAtIv: lastModifiedAtEncrypted.iv, - }; - - let thumbnailForm = null; - if (thumbnail) { - thumbnailForm = new FormData(); - thumbnailForm.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: encodeToBase64(thumbnail.iv), - } satisfies FileThumbnailUploadRequest), - ); - thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); - } - - const { fileId } = await requestFileUpload( + // 2nd pass: streaming encrypt + upload + const { fileId } = await uploadFileStreaming( state, - metadata, - chunksEncrypted, + file, + masterKey, + hmacSecret, fileSigned, - thumbnailForm, + parentId, ); - return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; + return { fileId, fileBuffer: undefined, thumbnailBuffer: undefined }; } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/thumbnail.ts b/src/lib/modules/thumbnail.ts index d9a995b..739c7af 100644 --- a/src/lib/modules/thumbnail.ts +++ b/src/lib/modules/thumbnail.ts @@ -125,3 +125,20 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; }; + +export const generateThumbnailFromFile = async (file: File) => { + const fileType = file.type || (file.name.endsWith(".heic") ? "image/heic" : ""); + if (!fileType.startsWith("video/")) return null; + + let url; + try { + url = URL.createObjectURL(file); + return await generateVideoThumbnail(url); + } catch { + return null; + } finally { + if (url) { + URL.revokeObjectURL(url); + } + } +}; diff --git a/src/routes/(main)/directory/[[id]]/service.svelte.ts b/src/routes/(main)/directory/[[id]]/service.svelte.ts index f83bbaf..ccd5b14 100644 --- a/src/routes/(main)/directory/[[id]]/service.svelte.ts +++ b/src/routes/(main)/directory/[[id]]/service.svelte.ts @@ -88,7 +88,9 @@ export const requestFileUpload = async ( const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate); if (!res) return false; - storeFileCache(res.fileId, res.fileBuffer); // Intended + if (res.fileBuffer) { + storeFileCache(res.fileId, res.fileBuffer); // Intended + } if (res.thumbnailBuffer) { storeFileThumbnailCache(res.fileId, res.thumbnailBuffer); // Intended } From 57c27b76bea9980756d52d609c6058bd14cd7690 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 14:07:32 +0900 Subject: [PATCH 06/19] =?UTF-8?q?=EC=8D=B8=EB=84=A4=EC=9D=BC=20=EC=97=85?= =?UTF-8?q?=EB=A1=9C=EB=93=9C=EB=8F=84=20=EC=83=88=EB=A1=9C=EC=9A=B4=20?= =?UTF-8?q?=EC=97=85=EB=A1=9C=EB=93=9C=20=EB=B0=A9=EC=8B=9D=EC=9C=BC?= =?UTF-8?q?=EB=A1=9C=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package.json | 1 - pnpm-lock.yaml | 8 - src/lib/modules/file/upload.svelte.ts | 93 ++++--- src/lib/server/db/media.ts | 4 +- .../migrations/1768062380-AddChunkedUpload.ts | 24 +- src/lib/server/db/schema/media.ts | 2 +- src/lib/server/db/schema/upload.ts | 17 +- src/lib/server/db/upload.ts | 101 ++++++-- src/lib/server/services/file.ts | 120 +-------- src/lib/server/services/upload.ts | 77 ++++++ src/lib/services/file.ts | 44 +++- .../api/file/[id]/thumbnail/upload/+server.ts | 74 ------ .../upload/[id]/chunks/[index]/+server.ts | 2 +- src/trpc/router.server.ts | 2 + src/trpc/routers/file.ts | 146 +---------- src/trpc/routers/index.ts | 1 + src/trpc/routers/upload.ts | 241 ++++++++++++++++++ 17 files changed, 527 insertions(+), 430 deletions(-) create mode 100644 src/lib/server/services/upload.ts delete mode 100644 src/routes/api/file/[id]/thumbnail/upload/+server.ts rename src/routes/api/{file => }/upload/[id]/chunks/[index]/+server.ts (96%) create mode 100644 src/trpc/routers/upload.ts diff --git a/package.json b/package.json index 17dad8d..952d53f 100644 --- a/package.json +++ b/package.json @@ -56,7 +56,6 @@ "vite": "^7.3.0" }, "dependencies": { - "@fastify/busboy": "^3.2.0", "@trpc/server": "^11.8.1", "argon2": "^0.44.0", "kysely": "^0.28.9", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 025aacd..f4c8e80 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,9 +8,6 @@ importers: .: dependencies: - '@fastify/busboy': - specifier: ^3.2.0 - version: 3.2.0 '@trpc/server': specifier: ^11.8.1 version: 11.8.1(typescript@5.9.3) @@ -373,9 +370,6 @@ packages: resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@fastify/busboy@3.2.0': - resolution: {integrity: sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==} - '@humanfs/core@0.19.1': resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} engines: {node: '>=18.18.0'} @@ -2180,8 +2174,6 @@ snapshots: '@eslint/core': 0.17.0 levn: 0.4.1 - '@fastify/busboy@3.2.0': {} - '@humanfs/core@0.19.1': {} '@humanfs/node@0.16.7': diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index ac3010e..eaa35df 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,4 +1,3 @@ -import axios from "axios"; import ExifReader from "exifreader"; import pLimit, { limitFunction } from "p-limit"; import { CHUNK_SIZE } from "$lib/constants"; @@ -15,7 +14,6 @@ import { } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; -import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import type { MasterKey, HmacSecret } from "$lib/stores"; import { trpc } from "$trpc/client"; import type { RouterInputs } from "$trpc/router.server"; @@ -194,17 +192,55 @@ const encryptFile = limitFunction( { concurrency: 4 }, ); +const uploadThumbnail = async ( + fileId: number, + thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, + dataKeyVersion: Date, +) => { + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); + + const ivAndCiphertext = new Uint8Array( + thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength, + ); + ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0); + ivAndCiphertext.set( + new Uint8Array(thumbnailEncrypted.ciphertext), + thumbnailEncrypted.iv.byteLength, + ); + + const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); + + const response = await fetch(`/api/upload/${uploadId}/chunks/0`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkHash}:`, + }, + body: ivAndCiphertext, + }); + + if (!response.ok) { + throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`); + } + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); +}; + const requestFileUpload = limitFunction( async ( state: FileUploadState, - metadata: RouterInputs["file"]["startUpload"], + metadata: RouterInputs["upload"]["startFileUpload"], chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], fileSigned: string | undefined, - thumbnailForm: FormData | null, + thumbnailData: { ciphertext: ArrayBuffer; iv: ArrayBuffer; plaintext: ArrayBuffer } | null, + dataKeyVersion: Date, ) => { state.status = "uploading"; - const { uploadId } = await trpc().file.startUpload.mutate(metadata); + const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); // Upload chunks with progress tracking const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); @@ -214,7 +250,7 @@ const requestFileUpload = limitFunction( for (let i = 0; i < chunksEncrypted.length; i++) { const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; - const response = await fetch(`/api/file/upload/${uploadId}/chunks/${i}`, { + const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, { method: "POST", headers: { "Content-Type": "application/octet-stream", @@ -241,15 +277,15 @@ const requestFileUpload = limitFunction( } // Complete upload - const { file: fileId } = await trpc().file.completeUpload.mutate({ + const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ uploadId, contentHmac: fileSigned, }); // Upload thumbnail if exists - if (thumbnailForm) { + if (thumbnailData) { try { - await axios.post(`/api/file/${fileId}/thumbnail/upload`, thumbnailForm); + await uploadThumbnail(fileId, thumbnailData, dataKeyVersion); } catch (e) { // TODO: Error handling for thumbnail upload console.error(e); @@ -258,7 +294,7 @@ const requestFileUpload = limitFunction( state.status = "uploaded"; - return { fileId }; + return { fileId, thumbnailBuffer: thumbnailData?.plaintext }; }, { concurrency: 1 }, ); @@ -296,7 +332,7 @@ const uploadFileStreaming = async ( lastModifiedAtIv: lastModifiedAtEncrypted.iv, }; - const { uploadId } = await trpc().file.startUpload.mutate(metadata); + const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); // Stream file, encrypt, and upload with concurrency limit const reader = file.stream().getReader(); @@ -315,7 +351,7 @@ const uploadFileStreaming = async ( chunkHash: string, originalChunkSize: number, ) => { - const response = await fetch(`/api/file/upload/${uploadId}/chunks/${index}`, { + const response = await fetch(`/api/upload/${uploadId}/chunks/${index}`, { method: "POST", headers: { "Content-Type": "application/octet-stream", @@ -370,7 +406,7 @@ const uploadFileStreaming = async ( await Promise.all(uploadPromises); - const { file: fileId } = await trpc().file.completeUpload.mutate({ + const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ uploadId, contentHmac: fileSigned, }); @@ -383,16 +419,7 @@ const uploadFileStreaming = async ( const thumbnailBuffer = await thumbnail.arrayBuffer(); const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); - const thumbnailForm = new FormData(); - thumbnailForm.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: encodeToBase64(thumbnailEncrypted.iv), - } satisfies FileThumbnailUploadRequest), - ); - thumbnailForm.set("content", new Blob([thumbnailEncrypted.ciphertext])); - await axios.post(`/api/file/${fileId}/thumbnail/upload`, thumbnailForm); + await uploadThumbnail(fileId, thumbnailEncrypted, dataKeyVersion); } } catch (e) { // Thumbnail upload failure is not critical @@ -465,27 +492,15 @@ export const uploadFile = async ( lastModifiedAtIv: lastModifiedAtEncrypted.iv, }; - let thumbnailForm = null; - if (thumbnail) { - thumbnailForm = new FormData(); - thumbnailForm.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: encodeToBase64(thumbnail.iv), - } satisfies FileThumbnailUploadRequest), - ); - thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); - } - - const { fileId } = await requestFileUpload( + const { fileId, thumbnailBuffer } = await requestFileUpload( state, metadata, chunksEncrypted, fileSigned, - thumbnailForm, + thumbnail ?? null, + dataKeyVersion, ); - return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; + return { fileId, fileBuffer, thumbnailBuffer }; } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/server/db/media.ts b/src/lib/server/db/media.ts index 209e256..c4d2a34 100644 --- a/src/lib/server/db/media.ts +++ b/src/lib/server/db/media.ts @@ -6,7 +6,7 @@ interface Thumbnail { id: number; path: string; updatedAt: Date; - encContentIv: string; + encContentIv: string | null; } interface FileThumbnail extends Thumbnail { @@ -18,7 +18,7 @@ export const updateFileThumbnail = async ( fileId: number, dekVersion: Date, path: string, - encContentIv: string, + encContentIv: string | null, ) => { return await db.transaction().execute(async (trx) => { const file = await trx diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts index fe8abd4..cf18c05 100644 --- a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -8,23 +8,31 @@ export const up = async (db: Kysely) => { .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) .execute(); + // media.ts + await db.schema + .alterTable("thumbnail") + .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) + .execute(); + // upload.ts await db.schema .createTable("upload_session") .addColumn("id", "uuid", (col) => col.primaryKey().defaultTo(sql`gen_random_uuid()`)) + .addColumn("type", "text", (col) => col.notNull()) .addColumn("user_id", "integer", (col) => col.references("user.id").notNull()) .addColumn("total_chunks", "integer", (col) => col.notNull()) .addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`)) .addColumn("expires_at", "timestamp(3)", (col) => col.notNull()) .addColumn("parent_id", "integer", (col) => col.references("directory.id")) - .addColumn("master_encryption_key_version", "integer", (col) => col.notNull()) - .addColumn("encrypted_data_encryption_key", "text", (col) => col.notNull()) - .addColumn("data_encryption_key_version", "timestamp(3)", (col) => col.notNull()) + .addColumn("master_encryption_key_version", "integer") + .addColumn("encrypted_data_encryption_key", "text") + .addColumn("data_encryption_key_version", "timestamp(3)") .addColumn("hmac_secret_key_version", "integer") - .addColumn("content_type", "text", (col) => col.notNull()) - .addColumn("encrypted_name", "json", (col) => col.notNull()) + .addColumn("content_type", "text") + .addColumn("encrypted_name", "json") .addColumn("encrypted_created_at", "json") - .addColumn("encrypted_last_modified_at", "json", (col) => col.notNull()) + .addColumn("encrypted_last_modified_at", "json") + .addColumn("file_id", "integer", (col) => col.references("file.id")) .addForeignKeyConstraint( "upload_session_fk01", ["user_id", "master_encryption_key_version"], @@ -43,6 +51,10 @@ export const up = async (db: Kysely) => { // eslint-disable-next-line @typescript-eslint/no-explicit-any export const down = async (db: Kysely) => { await db.schema.dropTable("upload_session").execute(); + await db.schema + .alterTable("thumbnail") + .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) + .execute(); await db.schema .alterTable("file") .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) diff --git a/src/lib/server/db/schema/media.ts b/src/lib/server/db/schema/media.ts index ebfbf29..1fef90b 100644 --- a/src/lib/server/db/schema/media.ts +++ b/src/lib/server/db/schema/media.ts @@ -7,7 +7,7 @@ interface ThumbnailTable { category_id: number | null; path: string; updated_at: Date; - encrypted_content_iv: string; // Base64 + encrypted_content_iv: string | null; // Base64 } declare module "./index" { diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts index 3372955..26eaac2 100644 --- a/src/lib/server/db/schema/upload.ts +++ b/src/lib/server/db/schema/upload.ts @@ -3,20 +3,25 @@ import type { Ciphertext } from "./util"; interface UploadSessionTable { id: Generated; + type: "file" | "thumbnail"; user_id: number; total_chunks: number; uploaded_chunks: Generated; expires_at: Date; + // For file uploads parent_id: number | null; - master_encryption_key_version: number; - encrypted_data_encryption_key: string; // Base64 - data_encryption_key_version: Date; + master_encryption_key_version: number | null; + encrypted_data_encryption_key: string | null; // Base64 + data_encryption_key_version: Date | null; hmac_secret_key_version: number | null; - content_type: string; - encrypted_name: Ciphertext; + content_type: string | null; + encrypted_name: Ciphertext | null; encrypted_created_at: Ciphertext | null; - encrypted_last_modified_at: Ciphertext; + encrypted_last_modified_at: Ciphertext | null; + + // For thumbnail uploads + file_id: number | null; } declare module "./index" { diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts index 935dc80..4c8da24 100644 --- a/src/lib/server/db/upload.ts +++ b/src/lib/server/db/upload.ts @@ -3,13 +3,16 @@ import { IntegrityError } from "./error"; import db from "./kysely"; import type { Ciphertext } from "./schema"; -interface UploadSession { +interface BaseUploadSession { id: string; userId: number; totalChunks: number; uploadedChunks: number[]; expiresAt: Date; +} +interface FileUploadSession extends BaseUploadSession { + type: "file"; parentId: DirectoryId; mekVersion: number; encDek: string; @@ -21,7 +24,15 @@ interface UploadSession { encLastModifiedAt: Ciphertext; } -export const createUploadSession = async (params: Omit) => { +interface ThumbnailUploadSession extends BaseUploadSession { + type: "thumbnail"; + fileId: number; + dekVersion: Date; +} + +export const createFileUploadSession = async ( + params: Omit, +) => { return await db.transaction().execute(async (trx) => { const mek = await trx .selectFrom("master_encryption_key") @@ -52,6 +63,7 @@ export const createUploadSession = async (params: Omit, +) => { + return await db.transaction().execute(async (trx) => { + const file = await trx + .selectFrom("file") + .select("data_encryption_key_version") + .where("id", "=", params.fileId) + .where("user_id", "=", params.userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (file.data_encryption_key_version.getTime() !== params.dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } + + const { sessionId } = await trx + .insertInto("upload_session") + .values({ + type: "thumbnail", + user_id: params.userId, + total_chunks: 1, + expires_at: params.expiresAt, + file_id: params.fileId, + data_encryption_key_version: params.dekVersion, + }) + .returning("id as sessionId") + .executeTakeFirstOrThrow(); + return { id: sessionId }; + }); +}; + export const getUploadSession = async (sessionId: string, userId: number) => { const session = await db .selectFrom("upload_session") @@ -80,24 +126,39 @@ export const getUploadSession = async (sessionId: string, userId: number) => { .where("expires_at", ">", new Date()) .limit(1) .executeTakeFirst(); - return session - ? ({ - id: session.id, - userId: session.user_id, - totalChunks: session.total_chunks, - uploadedChunks: session.uploaded_chunks, - expiresAt: session.expires_at, - parentId: session.parent_id ?? "root", - mekVersion: session.master_encryption_key_version, - encDek: session.encrypted_data_encryption_key, - dekVersion: session.data_encryption_key_version, - hskVersion: session.hmac_secret_key_version, - contentType: session.content_type, - encName: session.encrypted_name, - encCreatedAt: session.encrypted_created_at, - encLastModifiedAt: session.encrypted_last_modified_at, - } satisfies UploadSession) - : null; + + if (!session) return null; + + if (session.type === "file") { + return { + type: "file", + id: session.id, + userId: session.user_id, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + parentId: session.parent_id ?? "root", + mekVersion: session.master_encryption_key_version!, + encDek: session.encrypted_data_encryption_key!, + dekVersion: session.data_encryption_key_version!, + hskVersion: session.hmac_secret_key_version, + contentType: session.content_type!, + encName: session.encrypted_name!, + encCreatedAt: session.encrypted_created_at, + encLastModifiedAt: session.encrypted_last_modified_at!, + } satisfies FileUploadSession; + } else { + return { + type: "thumbnail", + id: session.id, + userId: session.user_id, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + fileId: session.file_id!, + dekVersion: session.data_encryption_key_version!, + } satisfies ThumbnailUploadSession; + } }; export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => { diff --git a/src/lib/server/services/file.ts b/src/lib/server/services/file.ts index 9df6430..0d67303 100644 --- a/src/lib/server/services/file.ts +++ b/src/lib/server/services/file.ts @@ -1,17 +1,8 @@ import { error } from "@sveltejs/kit"; -import { createHash } from "crypto"; -import { createReadStream, createWriteStream } from "fs"; -import { mkdir, stat } from "fs/promises"; -import { dirname } from "path"; +import { createReadStream } from "fs"; +import { stat } from "fs/promises"; import { Readable } from "stream"; -import { pipeline } from "stream/promises"; -import { v4 as uuidv4 } from "uuid"; -import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; -import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; -import env from "$lib/server/loadenv"; -import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; - -const uploadLocks = new Set(); +import { FileRepo, MediaRepo } from "$lib/server/db"; const createEncContentStream = async ( path: string, @@ -77,110 +68,7 @@ export const getFileThumbnailStream = async ( return createEncContentStream( thumbnail.path, - Buffer.from(thumbnail.encContentIv, "base64"), + thumbnail.encContentIv ? Buffer.from(thumbnail.encContentIv, "base64") : undefined, range, ); }; - -export const uploadFileThumbnail = async ( - userId: number, - fileId: number, - dekVersion: Date, - encContentIv: string, - encContentStream: Readable, -) => { - const path = `${env.thumbnailsPath}/${userId}/${uuidv4()}`; - await mkdir(dirname(path), { recursive: true }); - - try { - await pipeline(encContentStream, createWriteStream(path, { flags: "wx", mode: 0o600 })); - - const oldPath = await MediaRepo.updateFileThumbnail( - userId, - fileId, - dekVersion, - path, - encContentIv, - ); - safeUnlink(oldPath); // Intended - } catch (e) { - await safeUnlink(path); - - if (e instanceof IntegrityError) { - if (e.message === "File not found") { - error(404, "File not found"); - } else if (e.message === "Invalid DEK version") { - error(400, "Mismatched DEK version"); - } - } - throw e; - } -}; - -export const uploadChunk = async ( - userId: number, - sessionId: string, - chunkIndex: number, - encChunkStream: Readable, - encChunkHash: string, -) => { - const lockKey = `${sessionId}/${chunkIndex}`; - if (uploadLocks.has(lockKey)) { - error(409, "Chunk already uploaded"); // TODO: Message - } else { - uploadLocks.add(lockKey); - } - - const filePath = `${getChunkDirectoryPath(sessionId)}/${chunkIndex}`; - - try { - const session = await UploadRepo.getUploadSession(sessionId, userId); - if (!session) { - error(404, "Invalid upload id"); - } else if (chunkIndex >= session.totalChunks) { - error(400, "Invalid chunk index"); - } else if (session.uploadedChunks.includes(chunkIndex)) { - error(409, "Chunk already uploaded"); - } - - const isLastChunk = chunkIndex === session.totalChunks - 1; - - let writtenBytes = 0; - const hashStream = createHash("sha256"); - const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); - - for await (const chunk of encChunkStream) { - writtenBytes += chunk.length; - hashStream.update(chunk); - writeStream.write(chunk); - } - - await new Promise((resolve, reject) => { - writeStream.end((e: any) => (e ? reject(e) : resolve())); - }); - - if (hashStream.digest("base64") !== encChunkHash) { - throw new Error("Invalid checksum"); - } else if ( - (!isLastChunk && writtenBytes !== CHUNK_SIZE + ENCRYPTION_OVERHEAD) || - (isLastChunk && - (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > CHUNK_SIZE + ENCRYPTION_OVERHEAD)) - ) { - throw new Error("Invalid chunk size"); - } - - await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex); - } catch (e) { - await safeUnlink(filePath); - - if ( - e instanceof Error && - (e.message === "Invalid checksum" || e.message === "Invalid chunk size") - ) { - error(400, "Invalid request body"); - } - throw e; - } finally { - uploadLocks.delete(lockKey); - } -}; diff --git a/src/lib/server/services/upload.ts b/src/lib/server/services/upload.ts new file mode 100644 index 0000000..1be250d --- /dev/null +++ b/src/lib/server/services/upload.ts @@ -0,0 +1,77 @@ +import { error } from "@sveltejs/kit"; +import { createHash } from "crypto"; +import { createWriteStream } from "fs"; +import { Readable } from "stream"; +import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { UploadRepo } from "$lib/server/db"; +import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; + +const chunkLocks = new Set(); + +export const uploadChunk = async ( + userId: number, + sessionId: string, + chunkIndex: number, + encChunkStream: Readable, + encChunkHash: string, +) => { + const lockKey = `${sessionId}/${chunkIndex}`; + if (chunkLocks.has(lockKey)) { + error(409, "Chunk already uploaded"); // TODO: Message + } else { + chunkLocks.add(lockKey); + } + + const filePath = `${getChunkDirectoryPath(sessionId)}/${chunkIndex}`; + + try { + const session = await UploadRepo.getUploadSession(sessionId, userId); + if (!session) { + error(404, "Invalid upload id"); + } else if (chunkIndex >= session.totalChunks) { + error(400, "Invalid chunk index"); + } else if (session.uploadedChunks.includes(chunkIndex)) { + error(409, "Chunk already uploaded"); + } + + const isLastChunk = chunkIndex === session.totalChunks - 1; + + let writtenBytes = 0; + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for await (const chunk of encChunkStream) { + writtenBytes += chunk.length; + hashStream.update(chunk); + writeStream.write(chunk); + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + if (hashStream.digest("base64") !== encChunkHash) { + throw new Error("Invalid checksum"); + } else if ( + (!isLastChunk && writtenBytes !== CHUNK_SIZE + ENCRYPTION_OVERHEAD) || + (isLastChunk && + (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > CHUNK_SIZE + ENCRYPTION_OVERHEAD)) + ) { + throw new Error("Invalid chunk size"); + } + + await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex); + } catch (e) { + await safeUnlink(filePath); + + if ( + e instanceof Error && + (e.message === "Invalid checksum" || e.message === "Invalid chunk size") + ) { + error(400, "Invalid request body"); + } + throw e; + } finally { + chunkLocks.delete(lockKey); + } +}; diff --git a/src/lib/services/file.ts b/src/lib/services/file.ts index 5f95f42..2f37f52 100644 --- a/src/lib/services/file.ts +++ b/src/lib/services/file.ts @@ -1,5 +1,5 @@ import { getAllFileInfos } from "$lib/indexedDB/filesystem"; -import { encodeToBase64 } from "$lib/modules/crypto"; +import { encodeToBase64, digestMessage } from "$lib/modules/crypto"; import { getFileCache, storeFileCache, @@ -7,7 +7,6 @@ import { downloadFile, deleteFileThumbnailCache, } from "$lib/modules/file"; -import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; import { trpc } from "$trpc/client"; export const requestFileDownload = async ( @@ -28,17 +27,38 @@ export const requestFileThumbnailUpload = async ( dataKeyVersion: Date, thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, ) => { - const form = new FormData(); - form.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: encodeToBase64(thumbnailEncrypted.iv), - } satisfies FileThumbnailUploadRequest), - ); - form.set("content", new Blob([thumbnailEncrypted.ciphertext])); + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); - return await fetch(`/api/file/${fileId}/thumbnail/upload`, { method: "POST", body: form }); + // Prepend IV to ciphertext (consistent with file download format) + const ivAndCiphertext = new Uint8Array( + thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength, + ); + ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0); + ivAndCiphertext.set( + new Uint8Array(thumbnailEncrypted.ciphertext), + thumbnailEncrypted.iv.byteLength, + ); + + const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); + + const response = await fetch(`/api/upload/${uploadId}/chunks/0`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkHash}:`, + }, + body: ivAndCiphertext, + }); + + if (!response.ok) { + throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`); + } + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); + return response; }; export const requestDeletedFilesCleanup = async () => { diff --git a/src/routes/api/file/[id]/thumbnail/upload/+server.ts b/src/routes/api/file/[id]/thumbnail/upload/+server.ts deleted file mode 100644 index 62dfe42..0000000 --- a/src/routes/api/file/[id]/thumbnail/upload/+server.ts +++ /dev/null @@ -1,74 +0,0 @@ -import Busboy from "@fastify/busboy"; -import { error, text } from "@sveltejs/kit"; -import { Readable, Writable } from "stream"; -import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; -import { fileThumbnailUploadRequest, type FileThumbnailUploadRequest } from "$lib/server/schemas"; -import { uploadFileThumbnail } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -export const POST: RequestHandler = async ({ locals, params, request }) => { - const { userId } = await authorize(locals, "activeClient"); - - const zodRes = z - .object({ - id: z.coerce.number().int().positive(), - }) - .safeParse(params); - if (!zodRes.success) error(400, "Invalid path parameters"); - const { id } = zodRes.data; - - const contentType = request.headers.get("Content-Type"); - if (!contentType?.startsWith("multipart/form-data") || !request.body) { - error(400, "Invalid request body"); - } - - return new Promise((resolve, reject) => { - const bb = Busboy({ headers: { "content-type": contentType } }); - const handler = - (f: (...args: T) => Promise) => - (...args: T) => { - f(...args).catch(reject); - }; - - let metadata: FileThumbnailUploadRequest | null = null; - let content: Readable | null = null; - bb.on( - "field", - handler(async (fieldname, val) => { - if (fieldname === "metadata") { - // Ignore subsequent metadata fields - if (!metadata) { - const zodRes = fileThumbnailUploadRequest.safeParse(JSON.parse(val)); - if (!zodRes.success) error(400, "Invalid request body"); - metadata = zodRes.data; - } - } else { - error(400, "Invalid request body"); - } - }), - ); - bb.on( - "file", - handler(async (fieldname, file) => { - if (fieldname !== "content") error(400, "Invalid request body"); - if (!metadata || content) error(400, "Invalid request body"); - content = file; - - await uploadFileThumbnail( - userId, - id, - new Date(metadata.dekVersion), - metadata.contentIv, - content, - ); - resolve(text("Thumbnail uploaded", { headers: { "Content-Type": "text/plain" } })); - }), - ); - bb.on("error", (e) => { - content?.emit("error", e) ?? reject(e); - }); - - request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error - }); -}; diff --git a/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts b/src/routes/api/upload/[id]/chunks/[index]/+server.ts similarity index 96% rename from src/routes/api/file/upload/[id]/chunks/[index]/+server.ts rename to src/routes/api/upload/[id]/chunks/[index]/+server.ts index c44e425..47d6397 100644 --- a/src/routes/api/file/upload/[id]/chunks/[index]/+server.ts +++ b/src/routes/api/upload/[id]/chunks/[index]/+server.ts @@ -2,7 +2,7 @@ import { error, text } from "@sveltejs/kit"; import { Readable } from "stream"; import { z } from "zod"; import { authorize } from "$lib/server/modules/auth"; -import { uploadChunk } from "$lib/server/services/file"; +import { uploadChunk } from "$lib/server/services/upload"; import type { RequestHandler } from "./$types"; export const POST: RequestHandler = async ({ locals, params, request }) => { diff --git a/src/trpc/router.server.ts b/src/trpc/router.server.ts index 64d25c7..d343fa6 100644 --- a/src/trpc/router.server.ts +++ b/src/trpc/router.server.ts @@ -9,6 +9,7 @@ import { fileRouter, hskRouter, mekRouter, + uploadRouter, userRouter, } from "./routers"; @@ -20,6 +21,7 @@ export const appRouter = router({ file: fileRouter, hsk: hskRouter, mek: mekRouter, + upload: uploadRouter, user: userRouter, }); diff --git a/src/trpc/routers/file.ts b/src/trpc/routers/file.ts index eaf42ca..294300c 100644 --- a/src/trpc/routers/file.ts +++ b/src/trpc/routers/file.ts @@ -1,20 +1,9 @@ import { TRPCError } from "@trpc/server"; -import { createHash } from "crypto"; -import { createReadStream, createWriteStream } from "fs"; -import { mkdir, rm } from "fs/promises"; -import mime from "mime"; -import { dirname } from "path"; -import { v4 as uuidv4 } from "uuid"; import { z } from "zod"; -import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; -import db from "$lib/server/db/kysely"; -import env from "$lib/server/loadenv"; -import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; -import { directoryIdSchema } from "$lib/server/schemas"; +import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db"; +import { safeUnlink } from "$lib/server/modules/filesystem"; import { router, roleProcedure } from "../init.server"; -const uploadLocks = new Set(); - const fileRouter = router({ get: roleProcedure["activeClient"] .input( @@ -171,137 +160,6 @@ const fileRouter = router({ return { updatedAt: thumbnail.updatedAt }; }), - - startUpload: roleProcedure["activeClient"] - .input( - z.object({ - chunks: z.int().positive(), - parent: directoryIdSchema, - mekVersion: z.int().positive(), - dek: z.base64().nonempty(), - dekVersion: z.date(), - hskVersion: z.int().positive().optional(), - contentType: z - .string() - .trim() - .nonempty() - .refine((value) => mime.getExtension(value) !== null), - name: z.base64().nonempty(), - nameIv: z.base64().nonempty(), - createdAt: z.base64().nonempty().optional(), - createdAtIv: z.base64().nonempty().optional(), - lastModifiedAt: z.base64().nonempty(), - lastModifiedAtIv: z.base64().nonempty(), - }), - ) - .mutation(async ({ ctx, input }) => { - const oneMinuteAgo = new Date(Date.now() - 60 * 1000); - const oneMinuteLater = new Date(Date.now() + 60 * 1000); - if (input.dekVersion <= oneMinuteAgo || input.dekVersion >= oneMinuteLater) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); - } - - try { - const { id: sessionId } = await UploadRepo.createUploadSession({ - userId: ctx.session.userId, - totalChunks: input.chunks, - expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours - parentId: input.parent, - mekVersion: input.mekVersion, - encDek: input.dek, - dekVersion: input.dekVersion, - hskVersion: input.hskVersion ?? null, - contentType: input.contentType, - encName: { ciphertext: input.name, iv: input.nameIv }, - encCreatedAt: - input.createdAt && input.createdAtIv - ? { ciphertext: input.createdAt, iv: input.createdAtIv } - : null, - encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, - }); - await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); - return { uploadId: sessionId }; - } catch (e) { - if (e instanceof IntegrityError) { - if (e.message === "Inactive MEK version") { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); - } else if (e.message === "Inactive HSK version") { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid HSK version" }); - } - } - throw e; - } - }), - - completeUpload: roleProcedure["activeClient"] - .input( - z.object({ - uploadId: z.uuidv4(), - contentHmac: z.base64().nonempty().optional(), - }), - ) - .mutation(async ({ ctx, input }) => { - const { uploadId } = input; - if (uploadLocks.has(uploadId)) { - throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); // TODO: Message - } else { - uploadLocks.add(uploadId); - } - - const filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; - await mkdir(dirname(filePath), { recursive: true }); - - try { - const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); - if (!session) { - throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); - } else if ( - (session.hskVersion && !input.contentHmac) || - (!session.hskVersion && input.contentHmac) - ) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content hmac" }); // TODO: message - } else if (session.uploadedChunks.length < session.totalChunks) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); // TODO: Message - } - - const chunkDirectoryPath = getChunkDirectoryPath(uploadId); - const hashStream = createHash("sha256"); - const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); - - for (let i = 0; i < session.totalChunks; i++) { - for await (const chunk of createReadStream(`${chunkDirectoryPath}/${i}`)) { - hashStream.update(chunk); - writeStream.write(chunk); - } - } - - await new Promise((resolve, reject) => { - writeStream.end((e: any) => (e ? reject(e) : resolve())); - }); - - const hash = hashStream.digest("base64"); - const fileId = await db.transaction().execute(async (trx) => { - const { id: fileId } = await FileRepo.registerFile(trx, { - ...session, - userId: ctx.session.userId, - path: filePath, - contentHmac: input.contentHmac ?? null, - encContentHash: hash, - encContentIv: null, - }); - await UploadRepo.deleteUploadSession(trx, uploadId); - return fileId; - }); - - await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); - return { file: fileId }; - } catch (e) { - await safeUnlink(filePath); - throw e; - } finally { - uploadLocks.delete(uploadId); - } - }), }); export default fileRouter; diff --git a/src/trpc/routers/index.ts b/src/trpc/routers/index.ts index ab5b6a0..5c8df24 100644 --- a/src/trpc/routers/index.ts +++ b/src/trpc/routers/index.ts @@ -5,4 +5,5 @@ export { default as directoryRouter } from "./directory"; export { default as fileRouter } from "./file"; export { default as hskRouter } from "./hsk"; export { default as mekRouter } from "./mek"; +export { default as uploadRouter } from "./upload"; export { default as userRouter } from "./user"; diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts new file mode 100644 index 0000000..08d483f --- /dev/null +++ b/src/trpc/routers/upload.ts @@ -0,0 +1,241 @@ +import { TRPCError } from "@trpc/server"; +import { createHash } from "crypto"; +import { createReadStream, createWriteStream } from "fs"; +import { mkdir, rename, rm } from "fs/promises"; +import mime from "mime"; +import { dirname } from "path"; +import { v4 as uuidv4 } from "uuid"; +import { z } from "zod"; +import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; +import db from "$lib/server/db/kysely"; +import env from "$lib/server/loadenv"; +import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; +import { directoryIdSchema } from "$lib/server/schemas"; +import { router, roleProcedure } from "../init.server"; + +const uploadLocks = new Set(); + +const uploadRouter = router({ + startFileUpload: roleProcedure["activeClient"] + .input( + z.object({ + chunks: z.int().positive(), + parent: directoryIdSchema, + mekVersion: z.int().positive(), + dek: z.base64().nonempty(), + dekVersion: z.date(), + hskVersion: z.int().positive().optional(), + contentType: z + .string() + .trim() + .nonempty() + .refine((value) => mime.getExtension(value) !== null), + name: z.base64().nonempty(), + nameIv: z.base64().nonempty(), + createdAt: z.base64().nonempty().optional(), + createdAtIv: z.base64().nonempty().optional(), + lastModifiedAt: z.base64().nonempty(), + lastModifiedAtIv: z.base64().nonempty(), + }), + ) + .mutation(async ({ ctx, input }) => { + const oneMinuteAgo = new Date(Date.now() - 60 * 1000); + const oneMinuteLater = new Date(Date.now() + 60 * 1000); + if (input.dekVersion <= oneMinuteAgo || input.dekVersion >= oneMinuteLater) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); + } + + try { + const { id: sessionId } = await UploadRepo.createFileUploadSession({ + userId: ctx.session.userId, + totalChunks: input.chunks, + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + parentId: input.parent, + mekVersion: input.mekVersion, + encDek: input.dek, + dekVersion: input.dekVersion, + hskVersion: input.hskVersion ?? null, + contentType: input.contentType, + encName: { ciphertext: input.name, iv: input.nameIv }, + encCreatedAt: + input.createdAt && input.createdAtIv + ? { ciphertext: input.createdAt, iv: input.createdAtIv } + : null, + encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, + }); + await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); + return { uploadId: sessionId }; + } catch (e) { + if (e instanceof IntegrityError) { + if (e.message === "Inactive MEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); + } else if (e.message === "Inactive HSK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid HSK version" }); + } + } + throw e; + } + }), + + startFileThumbnailUpload: roleProcedure["activeClient"] + .input( + z.object({ + file: z.int().positive(), + dekVersion: z.date(), + }), + ) + .mutation(async ({ ctx, input }) => { + try { + const { id: sessionId } = await UploadRepo.createThumbnailUploadSession({ + userId: ctx.session.userId, + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + fileId: input.file, + dekVersion: input.dekVersion, + }); + await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); + return { uploadId: sessionId }; + } catch (e) { + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); + } else if (e.message === "Invalid DEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" }); + } + } + throw e; + } + }), + + completeFileUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + contentHmac: z.base64().nonempty().optional(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (uploadLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); + } else { + uploadLocks.add(uploadId); + } + + const filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (!session || session.type !== "file") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if ( + (session.hskVersion && !input.contentHmac) || + (!session.hskVersion && input.contentHmac) + ) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content hmac" }); + } else if (session.uploadedChunks.length < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); + } + + const chunkDirectoryPath = getChunkDirectoryPath(uploadId); + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for (let i = 0; i < session.totalChunks; i++) { + for await (const chunk of createReadStream(`${chunkDirectoryPath}/${i}`)) { + hashStream.update(chunk); + writeStream.write(chunk); + } + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + const hash = hashStream.digest("base64"); + const fileId = await db.transaction().execute(async (trx) => { + const { id: fileId } = await FileRepo.registerFile(trx, { + ...session, + userId: ctx.session.userId, + path: filePath, + contentHmac: input.contentHmac ?? null, + encContentHash: hash, + encContentIv: null, + }); + await UploadRepo.deleteUploadSession(trx, uploadId); + return fileId; + }); + + await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + return { file: fileId }; + } catch (e) { + await safeUnlink(filePath); + throw e; + } finally { + uploadLocks.delete(uploadId); + } + }), + + completeFileThumbnailUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (uploadLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); + } else { + uploadLocks.add(uploadId); + } + + const thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(thumbnailPath), { recursive: true }); + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (!session || session.type !== "thumbnail") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if (session.uploadedChunks.length < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); + } + + const chunkDirectoryPath = getChunkDirectoryPath(uploadId); + const chunkPath = `${chunkDirectoryPath}/0`; + + // Move chunk file to thumbnail path (IV is prepended to the content) + await rename(chunkPath, thumbnailPath); + + // Update thumbnail in database (null IV since it's prepended to the file) + const oldPath = await MediaRepo.updateFileThumbnail( + ctx.session.userId, + session.fileId, + session.dekVersion, + thumbnailPath, + null, + ); + safeUnlink(oldPath); // Intended + + await db.transaction().execute(async (trx) => { + await UploadRepo.deleteUploadSession(trx, uploadId); + }); + + await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + } catch (e) { + await safeUnlink(thumbnailPath); + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); + } else if (e.message === "Invalid DEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" }); + } + } + throw e; + } finally { + uploadLocks.delete(uploadId); + } + }), +}); + +export default uploadRouter; From 2801eed556f2c9407d498bfe008e2c6a00ed05f7 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 14:35:30 +0900 Subject: [PATCH 07/19] =?UTF-8?q?=EC=82=AC=EC=86=8C=ED=95=9C=20=EB=A6=AC?= =?UTF-8?q?=ED=8C=A9=ED=86=A0=EB=A7=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/constants/serviceWorker.ts | 2 +- src/lib/modules/crypto/aes.ts | 2 +- src/lib/modules/crypto/index.ts | 2 +- src/lib/modules/crypto/rsa.ts | 2 +- src/lib/modules/crypto/{util.ts => utils.ts} | 0 src/lib/modules/key.ts | 6 +++--- src/lib/schemas/filesystem.ts | 4 ++++ src/lib/schemas/index.ts | 1 + src/lib/server/db/schema/category.ts | 2 +- src/lib/server/db/schema/file.ts | 2 +- src/lib/server/db/schema/index.ts | 2 +- src/lib/server/db/schema/upload.ts | 2 +- src/lib/server/db/schema/{util.ts => utils.ts} | 0 src/lib/server/schemas/category.ts | 3 --- src/lib/server/schemas/directory.ts | 3 --- src/lib/server/schemas/file.ts | 7 ------- src/lib/server/schemas/index.ts | 3 --- src/routes/api/file/[id]/download/+server.ts | 2 +- src/routes/api/file/[id]/thumbnail/download/+server.ts | 2 +- src/trpc/routers/category.ts | 6 +++--- src/trpc/routers/directory.ts | 6 +++--- src/trpc/routers/upload.ts | 4 ++-- 22 files changed, 26 insertions(+), 37 deletions(-) rename src/lib/modules/crypto/{util.ts => utils.ts} (100%) create mode 100644 src/lib/schemas/filesystem.ts create mode 100644 src/lib/schemas/index.ts rename src/lib/server/db/schema/{util.ts => utils.ts} (100%) delete mode 100644 src/lib/server/schemas/category.ts delete mode 100644 src/lib/server/schemas/directory.ts delete mode 100644 src/lib/server/schemas/file.ts delete mode 100644 src/lib/server/schemas/index.ts diff --git a/src/lib/constants/serviceWorker.ts b/src/lib/constants/serviceWorker.ts index 8c09d05..abbaa3c 100644 --- a/src/lib/constants/serviceWorker.ts +++ b/src/lib/constants/serviceWorker.ts @@ -1 +1 @@ -export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decrypted-file/"; +export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decryptedFile/"; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index 4035343..35687e6 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -5,7 +5,7 @@ import { encodeToBase64, decodeFromBase64, concatenateBuffers, -} from "./util"; +} from "./utils"; export const generateMasterKey = async () => { return { diff --git a/src/lib/modules/crypto/index.ts b/src/lib/modules/crypto/index.ts index e6972ba..e3c27a7 100644 --- a/src/lib/modules/crypto/index.ts +++ b/src/lib/modules/crypto/index.ts @@ -1,4 +1,4 @@ export * from "./aes"; export * from "./rsa"; export * from "./sha"; -export * from "./util"; +export * from "./utils"; diff --git a/src/lib/modules/crypto/rsa.ts b/src/lib/modules/crypto/rsa.ts index 11e136f..78e17db 100644 --- a/src/lib/modules/crypto/rsa.ts +++ b/src/lib/modules/crypto/rsa.ts @@ -1,4 +1,4 @@ -import { encodeString, encodeToBase64, decodeFromBase64 } from "./util"; +import { encodeString, encodeToBase64, decodeFromBase64 } from "./utils"; export const generateEncryptionKeyPair = async () => { const keyPair = await crypto.subtle.generateKey( diff --git a/src/lib/modules/crypto/util.ts b/src/lib/modules/crypto/utils.ts similarity index 100% rename from src/lib/modules/crypto/util.ts rename to src/lib/modules/crypto/utils.ts diff --git a/src/lib/modules/key.ts b/src/lib/modules/key.ts index d5276a5..ca84477 100644 --- a/src/lib/modules/key.ts +++ b/src/lib/modules/key.ts @@ -2,7 +2,7 @@ import { z } from "zod"; import { storeClientKey } from "$lib/indexedDB"; import type { ClientKeys } from "$lib/stores"; -const serializedClientKeysSchema = z.intersection( +const SerializedClientKeysSchema = z.intersection( z.object({ generator: z.literal("ArkVault"), exportedAt: z.iso.datetime(), @@ -16,7 +16,7 @@ const serializedClientKeysSchema = z.intersection( }), ); -type SerializedClientKeys = z.infer; +type SerializedClientKeys = z.infer; type DeserializedClientKeys = { encryptKeyBase64: string; @@ -43,7 +43,7 @@ export const serializeClientKeys = ({ }; export const deserializeClientKeys = (serialized: string) => { - const zodRes = serializedClientKeysSchema.safeParse(JSON.parse(serialized)); + const zodRes = SerializedClientKeysSchema.safeParse(JSON.parse(serialized)); if (zodRes.success) { return { encryptKeyBase64: zodRes.data.encryptKey, diff --git a/src/lib/schemas/filesystem.ts b/src/lib/schemas/filesystem.ts new file mode 100644 index 0000000..d3a45f4 --- /dev/null +++ b/src/lib/schemas/filesystem.ts @@ -0,0 +1,4 @@ +import { z } from "zod"; + +export const DirectoryIdSchema = z.union([z.literal("root"), z.int().positive()]); +export const CategoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/schemas/index.ts b/src/lib/schemas/index.ts new file mode 100644 index 0000000..7d29e5d --- /dev/null +++ b/src/lib/schemas/index.ts @@ -0,0 +1 @@ +export * from "./filesystem"; diff --git a/src/lib/server/db/schema/category.ts b/src/lib/server/db/schema/category.ts index 2304264..ccaba95 100644 --- a/src/lib/server/db/schema/category.ts +++ b/src/lib/server/db/schema/category.ts @@ -1,5 +1,5 @@ import type { Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface CategoryTable { id: Generated; diff --git a/src/lib/server/db/schema/file.ts b/src/lib/server/db/schema/file.ts index 663aacd..0774082 100644 --- a/src/lib/server/db/schema/file.ts +++ b/src/lib/server/db/schema/file.ts @@ -1,5 +1,5 @@ import type { ColumnType, Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface DirectoryTable { id: Generated; diff --git a/src/lib/server/db/schema/index.ts b/src/lib/server/db/schema/index.ts index dcc340b..7a13395 100644 --- a/src/lib/server/db/schema/index.ts +++ b/src/lib/server/db/schema/index.ts @@ -7,7 +7,7 @@ export * from "./mek"; export * from "./session"; export * from "./upload"; export * from "./user"; -export * from "./util"; +export * from "./utils"; // eslint-disable-next-line @typescript-eslint/no-empty-object-type export interface Database {} diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts index 26eaac2..369c385 100644 --- a/src/lib/server/db/schema/upload.ts +++ b/src/lib/server/db/schema/upload.ts @@ -1,5 +1,5 @@ import type { Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface UploadSessionTable { id: Generated; diff --git a/src/lib/server/db/schema/util.ts b/src/lib/server/db/schema/utils.ts similarity index 100% rename from src/lib/server/db/schema/util.ts rename to src/lib/server/db/schema/utils.ts diff --git a/src/lib/server/schemas/category.ts b/src/lib/server/schemas/category.ts deleted file mode 100644 index 0bb07a7..0000000 --- a/src/lib/server/schemas/category.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { z } from "zod"; - -export const categoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/server/schemas/directory.ts b/src/lib/server/schemas/directory.ts deleted file mode 100644 index dba44b9..0000000 --- a/src/lib/server/schemas/directory.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { z } from "zod"; - -export const directoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/server/schemas/file.ts b/src/lib/server/schemas/file.ts deleted file mode 100644 index 8ba14e7..0000000 --- a/src/lib/server/schemas/file.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { z } from "zod"; - -export const fileThumbnailUploadRequest = z.object({ - dekVersion: z.iso.datetime(), - contentIv: z.base64().nonempty(), -}); -export type FileThumbnailUploadRequest = z.input; diff --git a/src/lib/server/schemas/index.ts b/src/lib/server/schemas/index.ts deleted file mode 100644 index f7a2bc1..0000000 --- a/src/lib/server/schemas/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * from "./category"; -export * from "./directory"; -export * from "./file"; diff --git a/src/routes/api/file/[id]/download/+server.ts b/src/routes/api/file/[id]/download/+server.ts index 68191ef..5324365 100644 --- a/src/routes/api/file/[id]/download/+server.ts +++ b/src/routes/api/file/[id]/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; +import { authorize } from "$lib/server/modules/auth"; import { getFileStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/routes/api/file/[id]/thumbnail/download/+server.ts b/src/routes/api/file/[id]/thumbnail/download/+server.ts index 4fc7c1a..85cdd8c 100644 --- a/src/routes/api/file/[id]/thumbnail/download/+server.ts +++ b/src/routes/api/file/[id]/thumbnail/download/+server.ts @@ -1,7 +1,7 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; +import { authorize } from "$lib/server/modules/auth"; import { getFileThumbnailStream } from "$lib/server/services/file"; import type { RequestHandler } from "./$types"; diff --git a/src/trpc/routers/category.ts b/src/trpc/routers/category.ts index a292889..34887f7 100644 --- a/src/trpc/routers/category.ts +++ b/src/trpc/routers/category.ts @@ -1,14 +1,14 @@ import { TRPCError } from "@trpc/server"; import { z } from "zod"; +import { CategoryIdSchema } from "$lib/schemas"; import { CategoryRepo, FileRepo, IntegrityError } from "$lib/server/db"; -import { categoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const categoryRouter = router({ get: roleProcedure["activeClient"] .input( z.object({ - id: categoryIdSchema, + id: CategoryIdSchema, recurse: z.boolean().default(false), }), ) @@ -65,7 +65,7 @@ const categoryRouter = router({ create: roleProcedure["activeClient"] .input( z.object({ - parent: categoryIdSchema, + parent: CategoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), diff --git a/src/trpc/routers/directory.ts b/src/trpc/routers/directory.ts index 6e1e358..15f16f3 100644 --- a/src/trpc/routers/directory.ts +++ b/src/trpc/routers/directory.ts @@ -1,15 +1,15 @@ import { TRPCError } from "@trpc/server"; import { z } from "zod"; +import { DirectoryIdSchema } from "$lib/schemas"; import { FileRepo, IntegrityError } from "$lib/server/db"; import { safeUnlink } from "$lib/server/modules/filesystem"; -import { directoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const directoryRouter = router({ get: roleProcedure["activeClient"] .input( z.object({ - id: directoryIdSchema, + id: DirectoryIdSchema, }), ) .query(async ({ ctx, input }) => { @@ -59,7 +59,7 @@ const directoryRouter = router({ create: roleProcedure["activeClient"] .input( z.object({ - parent: directoryIdSchema, + parent: DirectoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts index 08d483f..7a1680b 100644 --- a/src/trpc/routers/upload.ts +++ b/src/trpc/routers/upload.ts @@ -6,11 +6,11 @@ import mime from "mime"; import { dirname } from "path"; import { v4 as uuidv4 } from "uuid"; import { z } from "zod"; +import { DirectoryIdSchema } from "$lib/schemas"; import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; import db from "$lib/server/db/kysely"; import env from "$lib/server/loadenv"; import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; -import { directoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const uploadLocks = new Set(); @@ -20,7 +20,7 @@ const uploadRouter = router({ .input( z.object({ chunks: z.int().positive(), - parent: directoryIdSchema, + parent: DirectoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), From 83369f83e3adba13051f1f5c1f14893042532b24 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 15:16:03 +0900 Subject: [PATCH 08/19] =?UTF-8?q?DB=EC=97=90=20=EC=B2=AD=ED=81=AC=20?= =?UTF-8?q?=EC=97=85=EB=A1=9C=EB=93=9C=20=EA=B2=BD=EB=A1=9C=EB=A5=BC=20?= =?UTF-8?q?=EC=A0=80=EC=9E=A5=ED=95=98=EB=8F=84=EB=A1=9D=20=EB=B3=80?= =?UTF-8?q?=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/hooks.server.ts | 2 +- src/lib/constants/upload.ts | 2 +- src/lib/server/db/media.ts | 73 ++++++------ .../migrations/1768062380-AddChunkedUpload.ts | 3 +- src/lib/server/db/schema/upload.ts | 3 +- src/lib/server/db/upload.ts | 42 +++---- src/lib/server/modules/filesystem.ts | 9 +- src/lib/server/services/upload.ts | 23 ++-- src/trpc/routers/upload.ts | 104 ++++++++++-------- 9 files changed, 143 insertions(+), 118 deletions(-) diff --git a/src/hooks.server.ts b/src/hooks.server.ts index 1795ce3..b816f7f 100644 --- a/src/hooks.server.ts +++ b/src/hooks.server.ts @@ -7,7 +7,7 @@ import { cleanupExpiredSessions, cleanupExpiredSessionUpgradeChallenges, } from "$lib/server/db/session"; -import { cleanupExpiredUploadSessions } from "$lib/server/db/upload"; +import { cleanupExpiredUploadSessions } from "$lib/server/services/upload"; import { authenticate, setAgentInfo } from "$lib/server/middlewares"; export const init: ServerInit = async () => { diff --git a/src/lib/constants/upload.ts b/src/lib/constants/upload.ts index 99d94bb..57934d6 100644 --- a/src/lib/constants/upload.ts +++ b/src/lib/constants/upload.ts @@ -2,5 +2,5 @@ export const AES_GCM_IV_SIZE = 12; export const AES_GCM_TAG_SIZE = 16; export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE; -export const CHUNK_SIZE = 4 * 1024 * 1024; +export const CHUNK_SIZE = 4 * 1024 * 1024; // 4 MiB export const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ENCRYPTION_OVERHEAD; diff --git a/src/lib/server/db/media.ts b/src/lib/server/db/media.ts index c4d2a34..3e165c0 100644 --- a/src/lib/server/db/media.ts +++ b/src/lib/server/db/media.ts @@ -14,54 +14,53 @@ interface FileThumbnail extends Thumbnail { } export const updateFileThumbnail = async ( + trx: typeof db, userId: number, fileId: number, dekVersion: Date, path: string, encContentIv: string | null, ) => { - return await db.transaction().execute(async (trx) => { - const file = await trx - .selectFrom("file") - .select("data_encryption_key_version") - .where("id", "=", fileId) - .where("user_id", "=", userId) - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (!file) { - throw new IntegrityError("File not found"); - } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { - throw new IntegrityError("Invalid DEK version"); - } + const file = await trx + .selectFrom("file") + .select("data_encryption_key_version") + .where("id", "=", fileId) + .where("user_id", "=", userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } - const thumbnail = await trx - .selectFrom("thumbnail") - .select("path as oldPath") - .where("file_id", "=", fileId) - .limit(1) - .forUpdate() - .executeTakeFirst(); - const now = new Date(); + const thumbnail = await trx + .selectFrom("thumbnail") + .select("path as oldPath") + .where("file_id", "=", fileId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + const now = new Date(); - await trx - .insertInto("thumbnail") - .values({ - file_id: fileId, + await trx + .insertInto("thumbnail") + .values({ + file_id: fileId, + path, + updated_at: now, + encrypted_content_iv: encContentIv, + }) + .onConflict((oc) => + oc.column("file_id").doUpdateSet({ path, updated_at: now, encrypted_content_iv: encContentIv, - }) - .onConflict((oc) => - oc.column("file_id").doUpdateSet({ - path, - updated_at: now, - encrypted_content_iv: encContentIv, - }), - ) - .execute(); - return thumbnail?.oldPath ?? null; - }); + }), + ) + .execute(); + return thumbnail?.oldPath ?? null; }; export const getFileThumbnail = async (userId: number, fileId: number) => { diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts index cf18c05..be6a900 100644 --- a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -17,9 +17,10 @@ export const up = async (db: Kysely) => { // upload.ts await db.schema .createTable("upload_session") - .addColumn("id", "uuid", (col) => col.primaryKey().defaultTo(sql`gen_random_uuid()`)) + .addColumn("id", "uuid", (col) => col.primaryKey()) .addColumn("type", "text", (col) => col.notNull()) .addColumn("user_id", "integer", (col) => col.references("user.id").notNull()) + .addColumn("path", "text", (col) => col.notNull()) .addColumn("total_chunks", "integer", (col) => col.notNull()) .addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`)) .addColumn("expires_at", "timestamp(3)", (col) => col.notNull()) diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts index 369c385..fccde36 100644 --- a/src/lib/server/db/schema/upload.ts +++ b/src/lib/server/db/schema/upload.ts @@ -2,9 +2,10 @@ import type { Generated } from "kysely"; import type { Ciphertext } from "./utils"; interface UploadSessionTable { - id: Generated; + id: string; type: "file" | "thumbnail"; user_id: number; + path: string; total_chunks: number; uploaded_chunks: Generated; expires_at: Date; diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts index 4c8da24..d506191 100644 --- a/src/lib/server/db/upload.ts +++ b/src/lib/server/db/upload.ts @@ -6,6 +6,7 @@ import type { Ciphertext } from "./schema"; interface BaseUploadSession { id: string; userId: number; + path: string; totalChunks: number; uploadedChunks: number[]; expiresAt: Date; @@ -31,9 +32,9 @@ interface ThumbnailUploadSession extends BaseUploadSession { } export const createFileUploadSession = async ( - params: Omit, + params: Omit, ) => { - return await db.transaction().execute(async (trx) => { + await db.transaction().execute(async (trx) => { const mek = await trx .selectFrom("master_encryption_key") .select("version") @@ -60,11 +61,13 @@ export const createFileUploadSession = async ( } } - const { sessionId } = await trx + await trx .insertInto("upload_session") .values({ + id: params.id, type: "file", user_id: params.userId, + path: params.path, total_chunks: params.totalChunks, expires_at: params.expiresAt, parent_id: params.parentId !== "root" ? params.parentId : null, @@ -77,16 +80,14 @@ export const createFileUploadSession = async ( encrypted_created_at: params.encCreatedAt, encrypted_last_modified_at: params.encLastModifiedAt, }) - .returning("id as sessionId") - .executeTakeFirstOrThrow(); - return { id: sessionId }; + .execute(); }); }; export const createThumbnailUploadSession = async ( - params: Omit, + params: Omit, ) => { - return await db.transaction().execute(async (trx) => { + await db.transaction().execute(async (trx) => { const file = await trx .selectFrom("file") .select("data_encryption_key_version") @@ -101,19 +102,19 @@ export const createThumbnailUploadSession = async ( throw new IntegrityError("Invalid DEK version"); } - const { sessionId } = await trx + await trx .insertInto("upload_session") .values({ + id: params.id, type: "thumbnail", user_id: params.userId, - total_chunks: 1, + path: params.path, + total_chunks: params.totalChunks, expires_at: params.expiresAt, file_id: params.fileId, data_encryption_key_version: params.dekVersion, }) - .returning("id as sessionId") - .executeTakeFirstOrThrow(); - return { id: sessionId }; + .execute(); }); }; @@ -126,14 +127,14 @@ export const getUploadSession = async (sessionId: string, userId: number) => { .where("expires_at", ">", new Date()) .limit(1) .executeTakeFirst(); - - if (!session) return null; - - if (session.type === "file") { + if (!session) { + return null; + } else if (session.type === "file") { return { type: "file", id: session.id, userId: session.user_id, + path: session.path, totalChunks: session.total_chunks, uploadedChunks: session.uploaded_chunks, expiresAt: session.expires_at, @@ -152,6 +153,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => { type: "thumbnail", id: session.id, userId: session.user_id, + path: session.path, totalChunks: session.total_chunks, uploadedChunks: session.uploaded_chunks, expiresAt: session.expires_at, @@ -176,8 +178,8 @@ export const deleteUploadSession = async (trx: typeof db, sessionId: string) => export const cleanupExpiredUploadSessions = async () => { const sessions = await db .deleteFrom("upload_session") - .where("expires_at", "<", new Date()) - .returning("id") + .where("expires_at", "<=", new Date()) + .returning("path") .execute(); - return sessions.map(({ id }) => id); + return sessions.map(({ path }) => path); }; diff --git a/src/lib/server/modules/filesystem.ts b/src/lib/server/modules/filesystem.ts index b87fd65..ade7d73 100644 --- a/src/lib/server/modules/filesystem.ts +++ b/src/lib/server/modules/filesystem.ts @@ -1,7 +1,10 @@ -import { unlink } from "fs/promises"; -import env from "$lib/server/loadenv"; +import { rm, unlink } from "fs/promises"; -export const getChunkDirectoryPath = (sessionId: string) => `${env.uploadsPath}/${sessionId}`; +export const safeRecursiveRm = async (path: string | null | undefined) => { + if (path) { + await rm(path, { recursive: true }).catch(console.error); + } +}; export const safeUnlink = async (path: string | null | undefined) => { if (path) { diff --git a/src/lib/server/services/upload.ts b/src/lib/server/services/upload.ts index 1be250d..1f7043b 100644 --- a/src/lib/server/services/upload.ts +++ b/src/lib/server/services/upload.ts @@ -2,9 +2,9 @@ import { error } from "@sveltejs/kit"; import { createHash } from "crypto"; import { createWriteStream } from "fs"; import { Readable } from "stream"; -import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { ENCRYPTION_OVERHEAD, ENCRYPTED_CHUNK_SIZE } from "$lib/constants"; import { UploadRepo } from "$lib/server/db"; -import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; +import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; const chunkLocks = new Set(); @@ -17,12 +17,12 @@ export const uploadChunk = async ( ) => { const lockKey = `${sessionId}/${chunkIndex}`; if (chunkLocks.has(lockKey)) { - error(409, "Chunk already uploaded"); // TODO: Message + error(409, "Chunk upload already in progress"); } else { chunkLocks.add(lockKey); } - const filePath = `${getChunkDirectoryPath(sessionId)}/${chunkIndex}`; + let filePath; try { const session = await UploadRepo.getUploadSession(sessionId, userId); @@ -35,15 +35,16 @@ export const uploadChunk = async ( } const isLastChunk = chunkIndex === session.totalChunks - 1; + filePath = `${session.path}/${chunkIndex}`; - let writtenBytes = 0; const hashStream = createHash("sha256"); const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + let writtenBytes = 0; for await (const chunk of encChunkStream) { - writtenBytes += chunk.length; hashStream.update(chunk); writeStream.write(chunk); + writtenBytes += chunk.length; } await new Promise((resolve, reject) => { @@ -53,9 +54,8 @@ export const uploadChunk = async ( if (hashStream.digest("base64") !== encChunkHash) { throw new Error("Invalid checksum"); } else if ( - (!isLastChunk && writtenBytes !== CHUNK_SIZE + ENCRYPTION_OVERHEAD) || - (isLastChunk && - (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > CHUNK_SIZE + ENCRYPTION_OVERHEAD)) + (!isLastChunk && writtenBytes !== ENCRYPTED_CHUNK_SIZE) || + (isLastChunk && (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > ENCRYPTED_CHUNK_SIZE)) ) { throw new Error("Invalid chunk size"); } @@ -75,3 +75,8 @@ export const uploadChunk = async ( chunkLocks.delete(lockKey); } }; + +export const cleanupExpiredUploadSessions = async () => { + const paths = await UploadRepo.cleanupExpiredUploadSessions(); + await Promise.all(paths.map(safeRecursiveRm)); +}; diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts index 7a1680b..168e957 100644 --- a/src/trpc/routers/upload.ts +++ b/src/trpc/routers/upload.ts @@ -1,7 +1,7 @@ import { TRPCError } from "@trpc/server"; import { createHash } from "crypto"; import { createReadStream, createWriteStream } from "fs"; -import { mkdir, rename, rm } from "fs/promises"; +import { mkdir, rename } from "fs/promises"; import mime from "mime"; import { dirname } from "path"; import { v4 as uuidv4 } from "uuid"; @@ -10,10 +10,17 @@ import { DirectoryIdSchema } from "$lib/schemas"; import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; import db from "$lib/server/db/kysely"; import env from "$lib/server/loadenv"; -import { getChunkDirectoryPath, safeUnlink } from "$lib/server/modules/filesystem"; +import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; import { router, roleProcedure } from "../init.server"; -const uploadLocks = new Set(); +const sessionLocks = new Set(); + +const generateSessionId = async () => { + const id = uuidv4(); + const path = `${env.uploadsPath}/${id}`; + await mkdir(path, { recursive: true }); + return { id, path }; +}; const uploadRouter = router({ startFileUpload: roleProcedure["activeClient"] @@ -45,9 +52,13 @@ const uploadRouter = router({ throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); } + const { id, path } = await generateSessionId(); + try { - const { id: sessionId } = await UploadRepo.createFileUploadSession({ + await UploadRepo.createFileUploadSession({ + id, userId: ctx.session.userId, + path, totalChunks: input.chunks, expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours parentId: input.parent, @@ -63,9 +74,10 @@ const uploadRouter = router({ : null, encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, }); - await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); - return { uploadId: sessionId }; + return { uploadId: id }; } catch (e) { + await safeRecursiveRm(path); + if (e instanceof IntegrityError) { if (e.message === "Inactive MEK version") { throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); @@ -85,16 +97,22 @@ const uploadRouter = router({ }), ) .mutation(async ({ ctx, input }) => { + const { id, path } = await generateSessionId(); + try { - const { id: sessionId } = await UploadRepo.createThumbnailUploadSession({ + await UploadRepo.createThumbnailUploadSession({ + id, userId: ctx.session.userId, + path, + totalChunks: 1, // Up to 4 MiB expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours fileId: input.file, dekVersion: input.dekVersion, }); - await mkdir(getChunkDirectoryPath(sessionId), { recursive: true }); - return { uploadId: sessionId }; + return { uploadId: id }; } catch (e) { + await safeRecursiveRm(path); + if (e instanceof IntegrityError) { if (e.message === "File not found") { throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); @@ -115,14 +133,13 @@ const uploadRouter = router({ ) .mutation(async ({ ctx, input }) => { const { uploadId } = input; - if (uploadLocks.has(uploadId)) { - throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); } else { - uploadLocks.add(uploadId); + sessionLocks.add(uploadId); } - const filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; - await mkdir(dirname(filePath), { recursive: true }); + let filePath = ""; try { const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); @@ -132,17 +149,19 @@ const uploadRouter = router({ (session.hskVersion && !input.contentHmac) || (!session.hskVersion && input.contentHmac) ) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content hmac" }); + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content HMAC" }); } else if (session.uploadedChunks.length < session.totalChunks) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); } - const chunkDirectoryPath = getChunkDirectoryPath(uploadId); + filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + const hashStream = createHash("sha256"); const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); for (let i = 0; i < session.totalChunks; i++) { - for await (const chunk of createReadStream(`${chunkDirectoryPath}/${i}`)) { + for await (const chunk of createReadStream(`${session.path}/${i}`)) { hashStream.update(chunk); writeStream.write(chunk); } @@ -166,13 +185,13 @@ const uploadRouter = router({ return fileId; }); - await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + await safeRecursiveRm(session.path); return { file: fileId }; } catch (e) { await safeUnlink(filePath); throw e; } finally { - uploadLocks.delete(uploadId); + sessionLocks.delete(uploadId); } }), @@ -184,44 +203,39 @@ const uploadRouter = router({ ) .mutation(async ({ ctx, input }) => { const { uploadId } = input; - if (uploadLocks.has(uploadId)) { - throw new TRPCError({ code: "CONFLICT", message: "Upload already in progress" }); + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); } else { - uploadLocks.add(uploadId); + sessionLocks.add(uploadId); } - const thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uuidv4()}`; - await mkdir(dirname(thumbnailPath), { recursive: true }); + let thumbnailPath = ""; try { const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); if (!session || session.type !== "thumbnail") { throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); } else if (session.uploadedChunks.length < session.totalChunks) { - throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not complete" }); + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); } - const chunkDirectoryPath = getChunkDirectoryPath(uploadId); - const chunkPath = `${chunkDirectoryPath}/0`; + thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uploadId}`; + await mkdir(dirname(thumbnailPath), { recursive: true }); + await rename(`${session.path}/0`, thumbnailPath); - // Move chunk file to thumbnail path (IV is prepended to the content) - await rename(chunkPath, thumbnailPath); - - // Update thumbnail in database (null IV since it's prepended to the file) - const oldPath = await MediaRepo.updateFileThumbnail( - ctx.session.userId, - session.fileId, - session.dekVersion, - thumbnailPath, - null, - ); - safeUnlink(oldPath); // Intended - - await db.transaction().execute(async (trx) => { + const oldThumbnailPath = await db.transaction().execute(async (trx) => { + const oldPath = await MediaRepo.updateFileThumbnail( + trx, + ctx.session.userId, + session.fileId, + session.dekVersion, + thumbnailPath, + null, + ); await UploadRepo.deleteUploadSession(trx, uploadId); + return oldPath; }); - - await rm(chunkDirectoryPath, { recursive: true }).catch((e) => console.error(e)); + await Promise.all([safeUnlink(oldThumbnailPath), safeRecursiveRm(session.path)]); } catch (e) { await safeUnlink(thumbnailPath); if (e instanceof IntegrityError) { @@ -233,7 +247,7 @@ const uploadRouter = router({ } throw e; } finally { - uploadLocks.delete(uploadId); + sessionLocks.delete(uploadId); } }), }); From 80368c3a295eb7ab7edfe3f544e6b2ba28cfc449 Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 15:54:05 +0900 Subject: [PATCH 09/19] =?UTF-8?q?=EC=82=AC=EC=86=8C=ED=95=9C=20=EB=A6=AC?= =?UTF-8?q?=ED=8C=A9=ED=86=A0=EB=A7=81=202?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/crypto/sha.ts | 18 ++-- src/lib/modules/file/upload.svelte.ts | 129 +++++++------------------- src/lib/modules/thumbnail.ts | 11 +-- 3 files changed, 46 insertions(+), 112 deletions(-) diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 883ac10..61c2ed7 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -5,16 +5,6 @@ export const digestMessage = async (message: BufferSource) => { return await crypto.subtle.digest("SHA-256", message); }; -export const createStreamingHmac = async (hmacSecret: CryptoKey) => { - const keyBytes = new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret)); - const h = hmac.create(sha256, keyBytes); - - return { - update: (data: Uint8Array) => h.update(data), - digest: () => h.digest(), - }; -}; - export const generateHmacSecret = async () => { return { hmacSecret: await crypto.subtle.generateKey( @@ -28,6 +18,10 @@ export const generateHmacSecret = async () => { }; }; -export const signMessageHmac = async (message: BufferSource, hmacSecret: CryptoKey) => { - return await crypto.subtle.sign("HMAC", hmacSecret, message); +export const createHmacStream = async (hmacSecret: CryptoKey) => { + const h = hmac.create(sha256, new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret))); + return { + update: (data: Uint8Array) => h.update(data), + digest: () => h.digest(), + }; }; diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index eaa35df..4bea638 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -9,8 +9,7 @@ import { encryptString, encryptChunk, digestMessage, - signMessageHmac, - createStreamingHmac, + createHmacStream, } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; @@ -60,27 +59,7 @@ export const clearUploadedFiles = () => { const requestDuplicateFileScan = limitFunction( async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { - const fileBuffer = await file.arrayBuffer(); - const fileSigned = encodeToBase64(await signMessageHmac(fileBuffer, hmacSecret.secret)); - - const files = await trpc().file.listByHash.query({ - hskVersion: hmacSecret.version, - contentHmac: fileSigned, - }); - if (files.length === 0 || (await onDuplicate())) { - return { fileBuffer, fileSigned }; - } else { - return {}; - } - }, - { concurrency: 1 }, -); - -const isImageFile = (fileType: string) => fileType.startsWith("image/"); - -const requestDuplicateFileScanStreaming = limitFunction( - async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { - const hmacStream = await createStreamingHmac(hmacSecret.secret); + const hmacStream = await createHmacStream(hmacSecret.secret); const reader = file.stream().getReader(); while (true) { @@ -152,16 +131,12 @@ const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { return chunksEncrypted; }; -const encryptFile = limitFunction( - async (state: FileUploadState, file: File, fileBuffer: ArrayBuffer, masterKey: MasterKey) => { +const encryptImageFile = limitFunction( + async (state: FileUploadState, file: File, masterKey: MasterKey) => { state.status = "encrypting"; - const fileType = getFileType(file); - - let createdAt; - if (fileType.startsWith("image/")) { - createdAt = extractExifDateTime(fileBuffer); - } + const fileBuffer = await file.arrayBuffer(); + const createdAt = extractExifDateTime(fileBuffer); const { dataKey, dataKeyVersion } = await generateDataKey(); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); @@ -172,7 +147,7 @@ const encryptFile = limitFunction( createdAt && (await encryptString(createdAt.getTime().toString(), dataKey)); const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); - const thumbnail = await generateThumbnail(fileBuffer, fileType); + const thumbnail = await generateThumbnail(fileBuffer, getFileType(file)); const thumbnailBuffer = await thumbnail?.arrayBuffer(); const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey)); @@ -181,7 +156,6 @@ const encryptFile = limitFunction( return { dataKeyWrapped, dataKeyVersion, - fileType, chunksEncrypted, nameEncrypted, createdAtEncrypted, @@ -229,7 +203,7 @@ const uploadThumbnail = async ( await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); }; -const requestFileUpload = limitFunction( +const requestImageFileUpload = limitFunction( async ( state: FileUploadState, metadata: RouterInputs["upload"]["startFileUpload"], @@ -242,7 +216,6 @@ const requestFileUpload = limitFunction( const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); - // Upload chunks with progress tracking const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); let uploadedBytes = 0; const startTime = Date.now(); @@ -265,9 +238,8 @@ const requestFileUpload = limitFunction( uploadedBytes += chunkEncrypted.byteLength; - // Calculate progress, rate, estimated - const elapsed = (Date.now() - startTime) / 1000; // seconds - const rate = uploadedBytes / elapsed; // bytes per second + const elapsed = (Date.now() - startTime) / 1000; + const rate = uploadedBytes / elapsed; const remaining = totalBytes - uploadedBytes; const estimated = rate > 0 ? remaining / rate : undefined; @@ -276,13 +248,11 @@ const requestFileUpload = limitFunction( state.estimated = estimated; } - // Complete upload const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ uploadId, contentHmac: fileSigned, }); - // Upload thumbnail if exists if (thumbnailData) { try { await uploadThumbnail(fileId, thumbnailData, dataKeyVersion); @@ -299,7 +269,7 @@ const requestFileUpload = limitFunction( { concurrency: 1 }, ); -const uploadFileStreaming = async ( +const requestFileUpload = async ( state: FileUploadState, file: File, masterKey: MasterKey, @@ -316,7 +286,6 @@ const uploadFileStreaming = async ( const nameEncrypted = await encryptString(file.name, dataKey); const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); - // Calculate total chunks for metadata const totalChunks = Math.ceil(file.size / CHUNK_SIZE); const metadata = { chunks: totalChunks, @@ -334,7 +303,6 @@ const uploadFileStreaming = async ( const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); - // Stream file, encrypt, and upload with concurrency limit const reader = file.stream().getReader(); const limit = pLimit(4); let buffer = new Uint8Array(0); @@ -364,7 +332,6 @@ const uploadFileStreaming = async ( throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); } - // Update progress after upload completes uploadedBytes += originalChunkSize; const elapsed = (Date.now() - startTime) / 1000; const rate = uploadedBytes / elapsed; @@ -411,7 +378,6 @@ const uploadFileStreaming = async ( contentHmac: fileSigned, }); - // Generate and upload thumbnail for video files if (fileType.startsWith("video/")) { try { const thumbnail = await generateThumbnailFromFile(file); @@ -446,35 +412,29 @@ export const uploadFile = async ( }); const state = uploadingFiles.at(-1)!; - const fileType = getFileType(file); + return await scheduler.schedule(file.size, async () => { + state.status = "encryption-pending"; - // Image files: use buffer-based approach (need EXIF + thumbnail) - if (isImageFile(fileType)) { - return await scheduler.schedule(file.size, async () => { - state.status = "encryption-pending"; - - try { - const { fileBuffer, fileSigned } = await requestDuplicateFileScan( - file, - hmacSecret, - onDuplicate, - ); - if (!fileBuffer || !fileSigned) { - state.status = "canceled"; - uploadingFiles = uploadingFiles.filter((file) => file !== state); - return undefined; - } + try { + const { fileSigned } = await requestDuplicateFileScan(file, hmacSecret, onDuplicate); + if (!fileSigned) { + state.status = "canceled"; + uploadingFiles = uploadingFiles.filter((file) => file !== state); + return; + } + const fileType = getFileType(file); + if (fileType.startsWith("image/")) { + const fileBuffer = await file.arrayBuffer(); const { dataKeyWrapped, dataKeyVersion, - fileType, chunksEncrypted, nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnail, - } = await encryptFile(state, file, fileBuffer, masterKey); + } = await encryptImageFile(state, file, masterKey); const metadata = { chunks: chunksEncrypted.length, @@ -492,7 +452,7 @@ export const uploadFile = async ( lastModifiedAtIv: lastModifiedAtEncrypted.iv, }; - const { fileId, thumbnailBuffer } = await requestFileUpload( + const { fileId, thumbnailBuffer } = await requestImageFileUpload( state, metadata, chunksEncrypted, @@ -501,36 +461,17 @@ export const uploadFile = async ( dataKeyVersion, ); return { fileId, fileBuffer, thumbnailBuffer }; - } catch (e) { - state.status = "error"; - throw e; + } else { + const { fileId } = await requestFileUpload( + state, + file, + masterKey, + hmacSecret, + fileSigned, + parentId, + ); + return { fileId }; } - }); - } - - // Video and other files: use streaming approach - return await scheduler.schedule(file.size, async () => { - state.status = "encryption-pending"; - - try { - // 1st pass: streaming HMAC for duplicate check - const { fileSigned } = await requestDuplicateFileScanStreaming(file, hmacSecret, onDuplicate); - if (!fileSigned) { - state.status = "canceled"; - uploadingFiles = uploadingFiles.filter((f) => f !== state); - return undefined; - } - - // 2nd pass: streaming encrypt + upload - const { fileId } = await uploadFileStreaming( - state, - file, - masterKey, - hmacSecret, - fileSigned, - parentId, - ); - return { fileId, fileBuffer: undefined, thumbnailBuffer: undefined }; } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/thumbnail.ts b/src/lib/modules/thumbnail.ts index 739c7af..75b0168 100644 --- a/src/lib/modules/thumbnail.ts +++ b/src/lib/modules/thumbnail.ts @@ -122,13 +122,8 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin } }; -export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { - return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; -}; - export const generateThumbnailFromFile = async (file: File) => { - const fileType = file.type || (file.name.endsWith(".heic") ? "image/heic" : ""); - if (!fileType.startsWith("video/")) return null; + if (!file.type.startsWith("video/")) return null; let url; try { @@ -142,3 +137,7 @@ export const generateThumbnailFromFile = async (file: File) => { } } }; + +export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { + return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; +}; From 614d0e74b491c32942971119c912e34e4627289c Mon Sep 17 00:00:00 2001 From: static Date: Sun, 11 Jan 2026 16:01:02 +0900 Subject: [PATCH 10/19] =?UTF-8?q?=ED=8C=A8=ED=82=A4=EC=A7=80=20=EB=B2=84?= =?UTF-8?q?=EC=A0=84=20=EC=97=85=EB=8D=B0=EC=9D=B4=ED=8A=B8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package.json | 22 +- pnpm-lock.yaml | 563 ++++++++++++++++++++++++++----------------------- 2 files changed, 312 insertions(+), 273 deletions(-) diff --git a/package.json b/package.json index 952d53f..02ed42d 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "arkvault", "private": true, - "version": "0.7.0", + "version": "0.8.0", "type": "module", "scripts": { "dev": "vite dev", @@ -16,14 +16,14 @@ "db:migrate": "kysely migrate" }, "devDependencies": { - "@eslint/compat": "^2.0.0", + "@eslint/compat": "^2.0.1", "@eslint/js": "^9.39.2", - "@iconify-json/material-symbols": "^1.2.50", + "@iconify-json/material-symbols": "^1.2.51", "@noble/hashes": "^2.0.1", "@sveltejs/adapter-node": "^5.4.0", - "@sveltejs/kit": "^2.49.2", - "@sveltejs/vite-plugin-svelte": "^6.2.1", - "@tanstack/svelte-virtual": "^3.13.16", + "@sveltejs/kit": "^2.49.4", + "@sveltejs/vite-plugin-svelte": "^6.2.4", + "@tanstack/svelte-virtual": "^3.13.18", "@trpc/client": "^11.8.1", "@types/file-saver": "^2.0.7", "@types/ms": "^0.7.34", @@ -34,11 +34,11 @@ "dexie": "^4.2.1", "eslint": "^9.39.2", "eslint-config-prettier": "^10.1.8", - "eslint-plugin-svelte": "^3.13.1", + "eslint-plugin-svelte": "^3.14.0", "eslint-plugin-tailwindcss": "^3.18.2", - "exifreader": "^4.33.1", + "exifreader": "^4.35.0", "file-saver": "^2.0.5", - "globals": "^16.5.0", + "globals": "^17.0.0", "heic2any": "^0.0.4", "kysely-ctl": "^0.19.0", "lru-cache": "^11.2.4", @@ -51,9 +51,9 @@ "svelte-check": "^4.3.5", "tailwindcss": "^3.4.19", "typescript": "^5.9.3", - "typescript-eslint": "^8.51.0", + "typescript-eslint": "^8.52.0", "unplugin-icons": "^22.5.0", - "vite": "^7.3.0" + "vite": "^7.3.1" }, "dependencies": { "@trpc/server": "^11.8.1", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f4c8e80..ac05c99 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -37,29 +37,29 @@ importers: version: 4.3.5 devDependencies: '@eslint/compat': - specifier: ^2.0.0 - version: 2.0.0(eslint@9.39.2(jiti@1.21.7)) + specifier: ^2.0.1 + version: 2.0.1(eslint@9.39.2(jiti@1.21.7)) '@eslint/js': specifier: ^9.39.2 version: 9.39.2 '@iconify-json/material-symbols': - specifier: ^1.2.50 - version: 1.2.50 + specifier: ^1.2.51 + version: 1.2.51 '@noble/hashes': specifier: ^2.0.1 version: 2.0.1 '@sveltejs/adapter-node': specifier: ^5.4.0 - version: 5.4.0(@sveltejs/kit@2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))) + version: 5.4.0(@sveltejs/kit@2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0))) '@sveltejs/kit': - specifier: ^2.49.2 - version: 2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) + specifier: ^2.49.4 + version: 2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) '@sveltejs/vite-plugin-svelte': - specifier: ^6.2.1 - version: 6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) + specifier: ^6.2.4 + version: 6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) '@tanstack/svelte-virtual': - specifier: ^3.13.16 - version: 3.13.16(svelte@5.46.1) + specifier: ^3.13.18 + version: 3.13.18(svelte@5.46.1) '@trpc/client': specifier: ^11.8.1 version: 11.8.1(@trpc/server@11.8.1(typescript@5.9.3))(typescript@5.9.3) @@ -91,20 +91,20 @@ importers: specifier: ^10.1.8 version: 10.1.8(eslint@9.39.2(jiti@1.21.7)) eslint-plugin-svelte: - specifier: ^3.13.1 - version: 3.13.1(eslint@9.39.2(jiti@1.21.7))(svelte@5.46.1) + specifier: ^3.14.0 + version: 3.14.0(eslint@9.39.2(jiti@1.21.7))(svelte@5.46.1) eslint-plugin-tailwindcss: specifier: ^3.18.2 version: 3.18.2(tailwindcss@3.4.19(yaml@2.8.0)) exifreader: - specifier: ^4.33.1 - version: 4.33.1 + specifier: ^4.35.0 + version: 4.35.0 file-saver: specifier: ^2.0.5 version: 2.0.5 globals: - specifier: ^16.5.0 - version: 16.5.0 + specifier: ^17.0.0 + version: 17.0.0 heic2any: specifier: ^0.0.4 version: 0.0.4 @@ -142,14 +142,14 @@ importers: specifier: ^5.9.3 version: 5.9.3 typescript-eslint: - specifier: ^8.51.0 - version: 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + specifier: ^8.52.0 + version: 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) unplugin-icons: specifier: ^22.5.0 version: 22.5.0(svelte@5.46.1) vite: - specifier: ^7.3.0 - version: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) + specifier: ^7.3.1 + version: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) packages: @@ -329,8 +329,8 @@ packages: resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} - '@eslint/compat@2.0.0': - resolution: {integrity: sha512-T9AfE1G1uv4wwq94ozgTGio5EUQBqAVe1X9qsQtSNVEYW6j3hvtZVm8Smr4qL1qDPFg+lOB2cL5RxTRMzq4CTA==} + '@eslint/compat@2.0.1': + resolution: {integrity: sha512-yl/JsgplclzuvGFNqwNYV4XNPhP3l62ZOP9w/47atNAdmDtIFCx6X7CSk/SlWUuBGkT4Et/5+UD+WyvX2iiIWA==} engines: {node: ^20.19.0 || ^22.13.0 || >=24} peerDependencies: eslint: ^8.40 || 9 @@ -350,8 +350,8 @@ packages: resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/core@1.0.0': - resolution: {integrity: sha512-PRfWP+8FOldvbApr6xL7mNCw4cJcSTq4GA7tYbgq15mRb0kWKO/wEB2jr+uwjFH3sZvEZneZyCUGTxsv4Sahyw==} + '@eslint/core@1.0.1': + resolution: {integrity: sha512-r18fEAj9uCk+VjzGt2thsbOmychS+4kxI14spVNibUO2vqKX7obOG+ymZljAwuPZl+S3clPGwCwTDtrdqTiY6Q==} engines: {node: ^20.19.0 || ^22.13.0 || >=24} '@eslint/eslintrc@3.3.3': @@ -386,8 +386,8 @@ packages: resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} engines: {node: '>=18.18'} - '@iconify-json/material-symbols@1.2.50': - resolution: {integrity: sha512-71tjHR70h46LHtBFab3fAd2V/wPTO7JMV5lKnRn3IcF303LaFgAlO0BZeTJDcmCv9d0snRZmnoLZAJVD7/eisw==} + '@iconify-json/material-symbols@1.2.51': + resolution: {integrity: sha512-GkxlK8ocHi3NVVozaW62jm3qR9fNY3xX2penFtIRvoe1OtNhJ2KD4KRzv8x34pugMOAZYK8sALMcU30gDgCi1A==} '@iconify/types@2.0.0': resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==} @@ -470,113 +470,128 @@ packages: rollup: optional: true - '@rollup/rollup-android-arm-eabi@4.54.0': - resolution: {integrity: sha512-OywsdRHrFvCdvsewAInDKCNyR3laPA2mc9bRYJ6LBp5IyvF3fvXbbNR0bSzHlZVFtn6E0xw2oZlyjg4rKCVcng==} + '@rollup/rollup-android-arm-eabi@4.55.1': + resolution: {integrity: sha512-9R0DM/ykwfGIlNu6+2U09ga0WXeZ9MRC2Ter8jnz8415VbuIykVuc6bhdrbORFZANDmTDvq26mJrEVTl8TdnDg==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.54.0': - resolution: {integrity: sha512-Skx39Uv+u7H224Af+bDgNinitlmHyQX1K/atIA32JP3JQw6hVODX5tkbi2zof/E69M1qH2UoN3Xdxgs90mmNYw==} + '@rollup/rollup-android-arm64@4.55.1': + resolution: {integrity: sha512-eFZCb1YUqhTysgW3sj/55du5cG57S7UTNtdMjCW7LwVcj3dTTcowCsC8p7uBdzKsZYa8J7IDE8lhMI+HX1vQvg==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.54.0': - resolution: {integrity: sha512-k43D4qta/+6Fq+nCDhhv9yP2HdeKeP56QrUUTW7E6PhZP1US6NDqpJj4MY0jBHlJivVJD5P8NxrjuobZBJTCRw==} + '@rollup/rollup-darwin-arm64@4.55.1': + resolution: {integrity: sha512-p3grE2PHcQm2e8PSGZdzIhCKbMCw/xi9XvMPErPhwO17vxtvCN5FEA2mSLgmKlCjHGMQTP6phuQTYWUnKewwGg==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.54.0': - resolution: {integrity: sha512-cOo7biqwkpawslEfox5Vs8/qj83M/aZCSSNIWpVzfU2CYHa2G3P1UN5WF01RdTHSgCkri7XOlTdtk17BezlV3A==} + '@rollup/rollup-darwin-x64@4.55.1': + resolution: {integrity: sha512-rDUjG25C9qoTm+e02Esi+aqTKSBYwVTaoS1wxcN47/Luqef57Vgp96xNANwt5npq9GDxsH7kXxNkJVEsWEOEaQ==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.54.0': - resolution: {integrity: sha512-miSvuFkmvFbgJ1BevMa4CPCFt5MPGw094knM64W9I0giUIMMmRYcGW/JWZDriaw/k1kOBtsWh1z6nIFV1vPNtA==} + '@rollup/rollup-freebsd-arm64@4.55.1': + resolution: {integrity: sha512-+JiU7Jbp5cdxekIgdte0jfcu5oqw4GCKr6i3PJTlXTCU5H5Fvtkpbs4XJHRmWNXF+hKmn4v7ogI5OQPaupJgOg==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.54.0': - resolution: {integrity: sha512-KGXIs55+b/ZfZsq9aR026tmr/+7tq6VG6MsnrvF4H8VhwflTIuYh+LFUlIsRdQSgrgmtM3fVATzEAj4hBQlaqQ==} + '@rollup/rollup-freebsd-x64@4.55.1': + resolution: {integrity: sha512-V5xC1tOVWtLLmr3YUk2f6EJK4qksksOYiz/TCsFHu/R+woubcLWdC9nZQmwjOAbmExBIVKsm1/wKmEy4z4u4Bw==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.54.0': - resolution: {integrity: sha512-EHMUcDwhtdRGlXZsGSIuXSYwD5kOT9NVnx9sqzYiwAc91wfYOE1g1djOEDseZJKKqtHAHGwnGPQu3kytmfaXLQ==} + '@rollup/rollup-linux-arm-gnueabihf@4.55.1': + resolution: {integrity: sha512-Rn3n+FUk2J5VWx+ywrG/HGPTD9jXNbicRtTM11e/uorplArnXZYsVifnPPqNNP5BsO3roI4n8332ukpY/zN7rQ==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.54.0': - resolution: {integrity: sha512-+pBrqEjaakN2ySv5RVrj/qLytYhPKEUwk+e3SFU5jTLHIcAtqh2rLrd/OkbNuHJpsBgxsD8ccJt5ga/SeG0JmA==} + '@rollup/rollup-linux-arm-musleabihf@4.55.1': + resolution: {integrity: sha512-grPNWydeKtc1aEdrJDWk4opD7nFtQbMmV7769hiAaYyUKCT1faPRm2av8CX1YJsZ4TLAZcg9gTR1KvEzoLjXkg==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.54.0': - resolution: {integrity: sha512-NSqc7rE9wuUaRBsBp5ckQ5CVz5aIRKCwsoa6WMF7G01sX3/qHUw/z4pv+D+ahL1EIKy6Enpcnz1RY8pf7bjwng==} + '@rollup/rollup-linux-arm64-gnu@4.55.1': + resolution: {integrity: sha512-a59mwd1k6x8tXKcUxSyISiquLwB5pX+fJW9TkWU46lCqD/GRDe9uDN31jrMmVP3feI3mhAdvcCClhV8V5MhJFQ==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.54.0': - resolution: {integrity: sha512-gr5vDbg3Bakga5kbdpqx81m2n9IX8M6gIMlQQIXiLTNeQW6CucvuInJ91EuCJ/JYvc+rcLLsDFcfAD1K7fMofg==} + '@rollup/rollup-linux-arm64-musl@4.55.1': + resolution: {integrity: sha512-puS1MEgWX5GsHSoiAsF0TYrpomdvkaXm0CofIMG5uVkP6IBV+ZO9xhC5YEN49nsgYo1DuuMquF9+7EDBVYu4uA==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-loong64-gnu@4.54.0': - resolution: {integrity: sha512-gsrtB1NA3ZYj2vq0Rzkylo9ylCtW/PhpLEivlgWe0bpgtX5+9j9EZa0wtZiCjgu6zmSeZWyI/e2YRX1URozpIw==} + '@rollup/rollup-linux-loong64-gnu@4.55.1': + resolution: {integrity: sha512-r3Wv40in+lTsULSb6nnoudVbARdOwb2u5fpeoOAZjFLznp6tDU8kd+GTHmJoqZ9lt6/Sys33KdIHUaQihFcu7g==} cpu: [loong64] os: [linux] - '@rollup/rollup-linux-ppc64-gnu@4.54.0': - resolution: {integrity: sha512-y3qNOfTBStmFNq+t4s7Tmc9hW2ENtPg8FeUD/VShI7rKxNW7O4fFeaYbMsd3tpFlIg1Q8IapFgy7Q9i2BqeBvA==} + '@rollup/rollup-linux-loong64-musl@4.55.1': + resolution: {integrity: sha512-MR8c0+UxAlB22Fq4R+aQSPBayvYa3+9DrwG/i1TKQXFYEaoW3B5b/rkSRIypcZDdWjWnpcvxbNaAJDcSbJU3Lw==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-ppc64-gnu@4.55.1': + resolution: {integrity: sha512-3KhoECe1BRlSYpMTeVrD4sh2Pw2xgt4jzNSZIIPLFEsnQn9gAnZagW9+VqDqAHgm1Xc77LzJOo2LdigS5qZ+gw==} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.54.0': - resolution: {integrity: sha512-89sepv7h2lIVPsFma8iwmccN7Yjjtgz0Rj/Ou6fEqg3HDhpCa+Et+YSufy27i6b0Wav69Qv4WBNl3Rs6pwhebQ==} + '@rollup/rollup-linux-ppc64-musl@4.55.1': + resolution: {integrity: sha512-ziR1OuZx0vdYZZ30vueNZTg73alF59DicYrPViG0NEgDVN8/Jl87zkAPu4u6VjZST2llgEUjaiNl9JM6HH1Vdw==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.55.1': + resolution: {integrity: sha512-uW0Y12ih2XJRERZ4jAfKamTyIHVMPQnTZcQjme2HMVDAHY4amf5u414OqNYC+x+LzRdRcnIG1YodLrrtA8xsxw==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-riscv64-musl@4.54.0': - resolution: {integrity: sha512-ZcU77ieh0M2Q8Ur7D5X7KvK+UxbXeDHwiOt/CPSBTI1fBmeDMivW0dPkdqkT4rOgDjrDDBUed9x4EgraIKoR2A==} + '@rollup/rollup-linux-riscv64-musl@4.55.1': + resolution: {integrity: sha512-u9yZ0jUkOED1BFrqu3BwMQoixvGHGZ+JhJNkNKY/hyoEgOwlqKb62qu+7UjbPSHYjiVy8kKJHvXKv5coH4wDeg==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.54.0': - resolution: {integrity: sha512-2AdWy5RdDF5+4YfG/YesGDDtbyJlC9LHmL6rZw6FurBJ5n4vFGupsOBGfwMRjBYH7qRQowT8D/U4LoSvVwOhSQ==} + '@rollup/rollup-linux-s390x-gnu@4.55.1': + resolution: {integrity: sha512-/0PenBCmqM4ZUd0190j7J0UsQ/1nsi735iPRakO8iPciE7BQ495Y6msPzaOmvx0/pn+eJVVlZrNrSh4WSYLxNg==} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.54.0': - resolution: {integrity: sha512-WGt5J8Ij/rvyqpFexxk3ffKqqbLf9AqrTBbWDk7ApGUzaIs6V+s2s84kAxklFwmMF/vBNGrVdYgbblCOFFezMQ==} + '@rollup/rollup-linux-x64-gnu@4.55.1': + resolution: {integrity: sha512-a8G4wiQxQG2BAvo+gU6XrReRRqj+pLS2NGXKm8io19goR+K8lw269eTrPkSdDTALwMmJp4th2Uh0D8J9bEV1vg==} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.54.0': - resolution: {integrity: sha512-JzQmb38ATzHjxlPHuTH6tE7ojnMKM2kYNzt44LO/jJi8BpceEC8QuXYA908n8r3CNuG/B3BV8VR3Hi1rYtmPiw==} + '@rollup/rollup-linux-x64-musl@4.55.1': + resolution: {integrity: sha512-bD+zjpFrMpP/hqkfEcnjXWHMw5BIghGisOKPj+2NaNDuVT+8Ds4mPf3XcPHuat1tz89WRL+1wbcxKY3WSbiT7w==} cpu: [x64] os: [linux] - '@rollup/rollup-openharmony-arm64@4.54.0': - resolution: {integrity: sha512-huT3fd0iC7jigGh7n3q/+lfPcXxBi+om/Rs3yiFxjvSxbSB6aohDFXbWvlspaqjeOh+hx7DDHS+5Es5qRkWkZg==} + '@rollup/rollup-openbsd-x64@4.55.1': + resolution: {integrity: sha512-eLXw0dOiqE4QmvikfQ6yjgkg/xDM+MdU9YJuP4ySTibXU0oAvnEWXt7UDJmD4UkYialMfOGFPJnIHSe/kdzPxg==} + cpu: [x64] + os: [openbsd] + + '@rollup/rollup-openharmony-arm64@4.55.1': + resolution: {integrity: sha512-xzm44KgEP11te3S2HCSyYf5zIzWmx3n8HDCc7EE59+lTcswEWNpvMLfd9uJvVX8LCg9QWG67Xt75AuHn4vgsXw==} cpu: [arm64] os: [openharmony] - '@rollup/rollup-win32-arm64-msvc@4.54.0': - resolution: {integrity: sha512-c2V0W1bsKIKfbLMBu/WGBz6Yci8nJ/ZJdheE0EwB73N3MvHYKiKGs3mVilX4Gs70eGeDaMqEob25Tw2Gb9Nqyw==} + '@rollup/rollup-win32-arm64-msvc@4.55.1': + resolution: {integrity: sha512-yR6Bl3tMC/gBok5cz/Qi0xYnVbIxGx5Fcf/ca0eB6/6JwOY+SRUcJfI0OpeTpPls7f194as62thCt/2BjxYN8g==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.54.0': - resolution: {integrity: sha512-woEHgqQqDCkAzrDhvDipnSirm5vxUXtSKDYTVpZG3nUdW/VVB5VdCYA2iReSj/u3yCZzXID4kuKG7OynPnB3WQ==} + '@rollup/rollup-win32-ia32-msvc@4.55.1': + resolution: {integrity: sha512-3fZBidchE0eY0oFZBnekYCfg+5wAB0mbpCBuofh5mZuzIU/4jIVkbESmd2dOsFNS78b53CYv3OAtwqkZZmU5nA==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-gnu@4.54.0': - resolution: {integrity: sha512-dzAc53LOuFvHwbCEOS0rPbXp6SIhAf2txMP5p6mGyOXXw5mWY8NGGbPMPrs4P1WItkfApDathBj/NzMLUZ9rtQ==} + '@rollup/rollup-win32-x64-gnu@4.55.1': + resolution: {integrity: sha512-xGGY5pXj69IxKb4yv/POoocPy/qmEGhimy/FoTpTSVju3FYXUQQMFCaZZXJVidsmGxRioZAwpThl/4zX41gRKg==} cpu: [x64] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.54.0': - resolution: {integrity: sha512-hYT5d3YNdSh3mbCU1gwQyPgQd3T2ne0A3KG8KSBdav5TiBg6eInVmV+TeR5uHufiIgSFg0XsOWGW5/RhNcSvPg==} + '@rollup/rollup-win32-x64-msvc@4.55.1': + resolution: {integrity: sha512-SPEpaL6DX4rmcXtnhdrQYgzQ5W2uW3SCJch88lB2zImhJRhIIK44fkUrgIV/Q8yUNfw5oyZ5vkeQsZLhCb06lw==} cpu: [x64] os: [win32] @@ -593,41 +608,44 @@ packages: peerDependencies: '@sveltejs/kit': ^2.4.0 - '@sveltejs/kit@2.49.2': - resolution: {integrity: sha512-Vp3zX/qlwerQmHMP6x0Ry1oY7eKKRcOWGc2P59srOp4zcqyn+etJyQpELgOi4+ZSUgteX8Y387NuwruLgGXLUQ==} + '@sveltejs/kit@2.49.4': + resolution: {integrity: sha512-JFtOqDoU0DI/+QSG8qnq5bKcehVb3tCHhOG4amsSYth5/KgO4EkJvi42xSAiyKmXAAULW1/Zdb6lkgGEgSxdZg==} engines: {node: '>=18.13'} hasBin: true peerDependencies: '@opentelemetry/api': ^1.0.0 '@sveltejs/vite-plugin-svelte': ^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0 svelte: ^4.0.0 || ^5.0.0-next.0 + typescript: ^5.3.3 vite: ^5.0.3 || ^6.0.0 || ^7.0.0-beta.0 peerDependenciesMeta: '@opentelemetry/api': optional: true + typescript: + optional: true - '@sveltejs/vite-plugin-svelte-inspector@5.0.1': - resolution: {integrity: sha512-ubWshlMk4bc8mkwWbg6vNvCeT7lGQojE3ijDh3QTR6Zr/R+GXxsGbyH4PExEPpiFmqPhYiVSVmHBjUcVc1JIrA==} + '@sveltejs/vite-plugin-svelte-inspector@5.0.2': + resolution: {integrity: sha512-TZzRTcEtZffICSAoZGkPSl6Etsj2torOVrx6Uw0KpXxrec9Gg6jFWQ60Q3+LmNGfZSxHRCZL7vXVZIWmuV50Ig==} engines: {node: ^20.19 || ^22.12 || >=24} peerDependencies: '@sveltejs/vite-plugin-svelte': ^6.0.0-next.0 svelte: ^5.0.0 vite: ^6.3.0 || ^7.0.0 - '@sveltejs/vite-plugin-svelte@6.2.1': - resolution: {integrity: sha512-YZs/OSKOQAQCnJvM/P+F1URotNnYNeU3P2s4oIpzm1uFaqUEqRxUB0g5ejMjEb5Gjb9/PiBI5Ktrq4rUUF8UVQ==} + '@sveltejs/vite-plugin-svelte@6.2.4': + resolution: {integrity: sha512-ou/d51QSdTyN26D7h6dSpusAKaZkAiGM55/AKYi+9AGZw7q85hElbjK3kEyzXHhLSnRISHOYzVge6x0jRZ7DXA==} engines: {node: ^20.19 || ^22.12 || >=24} peerDependencies: svelte: ^5.0.0 vite: ^6.3.0 || ^7.0.0 - '@tanstack/svelte-virtual@3.13.16': - resolution: {integrity: sha512-LRDPRzAPTIiDjiCA9lhNlFnZRLj/XsNhzNRsT5JEA8hzcBmZw8avdYYVjydPAy0ObFJgG1zBAm9Dtvwqju36sg==} + '@tanstack/svelte-virtual@3.13.18': + resolution: {integrity: sha512-BHh8WkFK58eE9KzLctPQkCkvCj46LnM9tIGkpwo5Unx5YaBPf0uBJBqvSdc2jMwdT8gLXLHFHtCnSujlZP69BA==} peerDependencies: svelte: ^3.48.0 || ^4.0.0 || ^5.0.0 - '@tanstack/virtual-core@3.13.16': - resolution: {integrity: sha512-njazUC8mDkrxWmyZmn/3eXrDcP8Msb3chSr4q6a65RmwdSbMlMCdnOphv6/8mLO7O3Fuza5s4M4DclmvAO5w0w==} + '@tanstack/virtual-core@3.13.18': + resolution: {integrity: sha512-Mx86Hqu1k39icq2Zusq+Ey2J6dDWTjDvEv43PJtRCoEYTLyfaPnxIQ6iy7YAOK0NV/qOEmZQ/uCufrppZxTgcg==} '@trpc/client@11.8.1': resolution: {integrity: sha512-L/SJFGanr9xGABmuDoeXR4xAdHJmsXsiF9OuH+apecJ+8sUITzVT1EPeqp0ebqA6lBhEl5pPfg3rngVhi/h60Q==} @@ -658,8 +676,8 @@ packages: '@types/node-schedule@2.1.8': resolution: {integrity: sha512-k00g6Yj/oUg/CDC+MeLHUzu0+OFxWbIqrFfDiLi6OPKxTujvpv29mHGM8GtKr7B+9Vv92FcK/8mRqi1DK5f3hA==} - '@types/node@25.0.3': - resolution: {integrity: sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==} + '@types/node@25.0.6': + resolution: {integrity: sha512-NNu0sjyNxpoiW3YuVFfNz7mxSQ+S4X2G28uqg2s+CzoqoQjLPsWSbsFFyztIAqt2vb8kfEAsJNepMGPTxFDx3Q==} '@types/pg@8.16.0': resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} @@ -667,63 +685,63 @@ packages: '@types/resolve@1.20.2': resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==} - '@typescript-eslint/eslint-plugin@8.51.0': - resolution: {integrity: sha512-XtssGWJvypyM2ytBnSnKtHYOGT+4ZwTnBVl36TA4nRO2f4PRNGz5/1OszHzcZCvcBMh+qb7I06uoCmLTRdR9og==} + '@typescript-eslint/eslint-plugin@8.52.0': + resolution: {integrity: sha512-okqtOgqu2qmZJ5iN4TWlgfF171dZmx2FzdOv2K/ixL2LZWDStL8+JgQerI2sa8eAEfoydG9+0V96m7V+P8yE1Q==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.51.0 + '@typescript-eslint/parser': ^8.52.0 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.51.0': - resolution: {integrity: sha512-3xP4XzzDNQOIqBMWogftkwxhg5oMKApqY0BAflmLZiFYHqyhSOxv/cd/zPQLTcCXr4AkaKb25joocY0BD1WC6A==} + '@typescript-eslint/parser@8.52.0': + resolution: {integrity: sha512-iIACsx8pxRnguSYhHiMn2PvhvfpopO9FXHyn1mG5txZIsAaB6F0KwbFnUQN3KCiG3Jcuad/Cao2FAs1Wp7vAyg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.51.0': - resolution: {integrity: sha512-Luv/GafO07Z7HpiI7qeEW5NW8HUtZI/fo/kE0YbtQEFpJRUuR0ajcWfCE5bnMvL7QQFrmT/odMe8QZww8X2nfQ==} + '@typescript-eslint/project-service@8.52.0': + resolution: {integrity: sha512-xD0MfdSdEmeFa3OmVqonHi+Cciab96ls1UhIF/qX/O/gPu5KXD0bY9lu33jj04fjzrXHcuvjBcBC+D3SNSadaw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/scope-manager@8.51.0': - resolution: {integrity: sha512-JhhJDVwsSx4hiOEQPeajGhCWgBMBwVkxC/Pet53EpBVs7zHHtayKefw1jtPaNRXpI9RA2uocdmpdfE7T+NrizA==} + '@typescript-eslint/scope-manager@8.52.0': + resolution: {integrity: sha512-ixxqmmCcc1Nf8S0mS0TkJ/3LKcC8mruYJPOU6Ia2F/zUUR4pApW7LzrpU3JmtePbRUTes9bEqRc1Gg4iyRnDzA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/tsconfig-utils@8.51.0': - resolution: {integrity: sha512-Qi5bSy/vuHeWyir2C8u/uqGMIlIDu8fuiYWv48ZGlZ/k+PRPHtaAu7erpc7p5bzw2WNNSniuxoMSO4Ar6V9OXw==} + '@typescript-eslint/tsconfig-utils@8.52.0': + resolution: {integrity: sha512-jl+8fzr/SdzdxWJznq5nvoI7qn2tNYV/ZBAEcaFMVXf+K6jmXvAFrgo/+5rxgnL152f//pDEAYAhhBAZGrVfwg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.51.0': - resolution: {integrity: sha512-0XVtYzxnobc9K0VU7wRWg1yiUrw4oQzexCG2V2IDxxCxhqBMSMbjB+6o91A+Uc0GWtgjCa3Y8bi7hwI0Tu4n5Q==} + '@typescript-eslint/type-utils@8.52.0': + resolution: {integrity: sha512-JD3wKBRWglYRQkAtsyGz1AewDu3mTc7NtRjR/ceTyGoPqmdS5oCdx/oZMWD5Zuqmo6/MpsYs0wp6axNt88/2EQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/types@8.51.0': - resolution: {integrity: sha512-TizAvWYFM6sSscmEakjY3sPqGwxZRSywSsPEiuZF6d5GmGD9Gvlsv0f6N8FvAAA0CD06l3rIcWNbsN1e5F/9Ag==} + '@typescript-eslint/types@8.52.0': + resolution: {integrity: sha512-LWQV1V4q9V4cT4H5JCIx3481iIFxH1UkVk+ZkGGAV1ZGcjGI9IoFOfg3O6ywz8QqCDEp7Inlg6kovMofsNRaGg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/typescript-estree@8.51.0': - resolution: {integrity: sha512-1qNjGqFRmlq0VW5iVlcyHBbCjPB7y6SxpBkrbhNWMy/65ZoncXCEPJxkRZL8McrseNH6lFhaxCIaX+vBuFnRng==} + '@typescript-eslint/typescript-estree@8.52.0': + resolution: {integrity: sha512-XP3LClsCc0FsTK5/frGjolyADTh3QmsLp6nKd476xNI9CsSsLnmn4f0jrzNoAulmxlmNIpeXuHYeEQv61Q6qeQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.51.0': - resolution: {integrity: sha512-11rZYxSe0zabiKaCP2QAwRf/dnmgFgvTmeDTtZvUvXG3UuAdg/GU02NExmmIXzz3vLGgMdtrIosI84jITQOxUA==} + '@typescript-eslint/utils@8.52.0': + resolution: {integrity: sha512-wYndVMWkweqHpEpwPhwqE2lnD2DxC6WVLupU/DOt/0/v+/+iQbbzO3jOHjmBMnhu0DgLULvOaU4h4pwHYi2oRQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/visitor-keys@8.51.0': - resolution: {integrity: sha512-mM/JRQOzhVN1ykejrvwnBRV3+7yTKK8tVANVN3o1O0t0v7o+jqdVu9crPy5Y9dov15TJk/FTIgoUGHrTOVL3Zg==} + '@typescript-eslint/visitor-keys@8.52.0': + resolution: {integrity: sha512-ink3/Zofus34nmBsPjow63FP5M7IGff0RKAgqR6+CFpdk22M7aLwC9gOcLGYqr7MczLPzZVERW9hRog3O4n1sQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@xmldom/xmldom@0.9.8': @@ -788,8 +806,8 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - baseline-browser-mapping@2.9.11: - resolution: {integrity: sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==} + baseline-browser-mapping@2.9.14: + resolution: {integrity: sha512-B0xUquLkiGLgHhpPBqvl7GWegWBUNuujQ6kXd/r1U38ElPT6Ok8KZ8e+FpUGEc2ZoRQUzq/aUnaKFc/svWUGSg==} hasBin: true binary-extensions@2.3.0: @@ -831,8 +849,8 @@ packages: resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} engines: {node: '>= 6'} - caniuse-lite@1.0.30001762: - resolution: {integrity: sha512-PxZwGNvH7Ak8WX5iXzoK1KPZttBXNPuaOvI2ZYU7NrlM+d9Ov+TUvlLOBNGzVXAntMSMMlJPd+jY6ovrVjSmUw==} + caniuse-lite@1.0.30001764: + resolution: {integrity: sha512-9JGuzl2M+vPL+pz70gtMF9sHdMFbY9FJaQBi186cHKH3pSzDvzoUJUPV6fqiKIMyXbud9ZLg4F3Yza1vJ1+93g==} chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} @@ -998,8 +1016,8 @@ packages: peerDependencies: eslint: '>=7.0.0' - eslint-plugin-svelte@3.13.1: - resolution: {integrity: sha512-Ng+kV/qGS8P/isbNYVE3sJORtubB+yLEcYICMkUWNaDTb0SwZni/JhAYXh/Dz/q2eThUwWY0VMPZ//KYD1n3eQ==} + eslint-plugin-svelte@3.14.0: + resolution: {integrity: sha512-Isw0GvaMm0yHxAj71edAdGFh28ufYs+6rk2KlbbZphnqZAzrH3Se3t12IFh2H9+1F/jlDhBBL4oiOJmLqmYX0g==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.1 || ^9.0.0 @@ -1065,8 +1083,8 @@ packages: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} - exifreader@4.33.1: - resolution: {integrity: sha512-KsVc4bRfZW255PSst5Opt5jUeLp+SD2+q6fmXQkMMkphpFCDBFjzNAvswgQa1YcMrXq+9Na6HJ6gS3wo2x7RRw==} + exifreader@4.35.0: + resolution: {integrity: sha512-qiMONyOObmwI6sIXy13vRGqlcoi9VUKr70iGI1aefP+xJsbcXp+hcyL/4J6hov/yG9UhS7Hq1OQ1hAoSEZl+RA==} exsolve@1.0.8: resolution: {integrity: sha512-LmDxfWXwcTArk8fUEnOfSZpHOJ6zOMUJKOtFLFqJLoKJetuQG874Uc7/Kki7zFLzYybmZhp1M7+98pfMqeX8yA==} @@ -1170,6 +1188,10 @@ packages: resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==} engines: {node: '>=18'} + globals@17.0.0: + resolution: {integrity: sha512-gv5BeD2EssA793rlFWVPMMCqefTlpusw6/2TbAVMy0FzcG8wKJn4O+NqJ4+XWmmwrayJgw5TzrmWjFgmz1XPqw==} + engines: {node: '>=18'} + gopd@1.2.0: resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} engines: {node: '>= 0.4'} @@ -1432,6 +1454,9 @@ packages: resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==} engines: {node: '>= 6'} + obug@2.1.1: + resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + ofetch@1.5.1: resolution: {integrity: sha512-2W4oUZlVaqAPAil6FUg/difl6YhqhUR7x2eZY4bQCko22UXg3hptq9KLQdqFClV+Wu85UX7hNtdGTngi/1BxcA==} @@ -1742,8 +1767,8 @@ packages: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - rollup@4.54.0: - resolution: {integrity: sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==} + rollup@4.55.1: + resolution: {integrity: sha512-wDv/Ht1BNHB4upNbK74s9usvl7hObDnvVzknxqY/E/O3X6rW1U1rV1aENEfJ54eFZDTNo7zv1f5N4edCluH7+A==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -1881,8 +1906,8 @@ packages: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} - typescript-eslint@8.51.0: - resolution: {integrity: sha512-jh8ZuM5oEh2PSdyQG9YAEM1TCGuWenLSuSUhf/irbVUNW9O5FhbFVONviN2TgMTBnUmyHv7E56rYnfLZK6TkiA==} + typescript-eslint@8.52.0: + resolution: {integrity: sha512-atlQQJ2YkO4pfTVQmQ+wvYQwexPDOIgo+RaVcD7gHgzy/IQA+XTyuxNM9M9TVXvttkF7koBHmcwisKdOAf2EcA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -1893,8 +1918,8 @@ packages: engines: {node: '>=14.17'} hasBin: true - ufo@1.6.1: - resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==} + ufo@1.6.2: + resolution: {integrity: sha512-heMioaxBcG9+Znsda5Q8sQbWnLJSl98AFDXTO80wELWEzX3hordXsTdxrIfMQoO9IY1MEnoGoPjpoKpMj+Yx0Q==} undici-types@7.16.0: resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} @@ -1942,8 +1967,8 @@ packages: resolution: {integrity: sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w==} hasBin: true - vite@7.3.0: - resolution: {integrity: sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==} + vite@7.3.1: + resolution: {integrity: sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: @@ -2125,9 +2150,9 @@ snapshots: '@eslint-community/regexpp@4.12.2': {} - '@eslint/compat@2.0.0(eslint@9.39.2(jiti@1.21.7))': + '@eslint/compat@2.0.1(eslint@9.39.2(jiti@1.21.7))': dependencies: - '@eslint/core': 1.0.0 + '@eslint/core': 1.0.1 optionalDependencies: eslint: 9.39.2(jiti@1.21.7) @@ -2147,7 +2172,7 @@ snapshots: dependencies: '@types/json-schema': 7.0.15 - '@eslint/core@1.0.0': + '@eslint/core@1.0.1': dependencies: '@types/json-schema': 7.0.15 @@ -2185,7 +2210,7 @@ snapshots: '@humanwhocodes/retry@0.4.3': {} - '@iconify-json/material-symbols@1.2.50': + '@iconify-json/material-symbols@1.2.51': dependencies: '@iconify/types': 2.0.0 @@ -2234,9 +2259,9 @@ snapshots: '@polka/url@1.0.0-next.29': {} - '@rollup/plugin-commonjs@28.0.9(rollup@4.54.0)': + '@rollup/plugin-commonjs@28.0.9(rollup@4.55.1)': dependencies: - '@rollup/pluginutils': 5.3.0(rollup@4.54.0) + '@rollup/pluginutils': 5.3.0(rollup@4.55.1) commondir: 1.0.1 estree-walker: 2.0.2 fdir: 6.5.0(picomatch@4.0.3) @@ -2244,96 +2269,105 @@ snapshots: magic-string: 0.30.21 picomatch: 4.0.3 optionalDependencies: - rollup: 4.54.0 + rollup: 4.55.1 - '@rollup/plugin-json@6.1.0(rollup@4.54.0)': + '@rollup/plugin-json@6.1.0(rollup@4.55.1)': dependencies: - '@rollup/pluginutils': 5.3.0(rollup@4.54.0) + '@rollup/pluginutils': 5.3.0(rollup@4.55.1) optionalDependencies: - rollup: 4.54.0 + rollup: 4.55.1 - '@rollup/plugin-node-resolve@16.0.3(rollup@4.54.0)': + '@rollup/plugin-node-resolve@16.0.3(rollup@4.55.1)': dependencies: - '@rollup/pluginutils': 5.3.0(rollup@4.54.0) + '@rollup/pluginutils': 5.3.0(rollup@4.55.1) '@types/resolve': 1.20.2 deepmerge: 4.3.1 is-module: 1.0.0 resolve: 1.22.11 optionalDependencies: - rollup: 4.54.0 + rollup: 4.55.1 - '@rollup/pluginutils@5.3.0(rollup@4.54.0)': + '@rollup/pluginutils@5.3.0(rollup@4.55.1)': dependencies: '@types/estree': 1.0.8 estree-walker: 2.0.2 picomatch: 4.0.3 optionalDependencies: - rollup: 4.54.0 + rollup: 4.55.1 - '@rollup/rollup-android-arm-eabi@4.54.0': + '@rollup/rollup-android-arm-eabi@4.55.1': optional: true - '@rollup/rollup-android-arm64@4.54.0': + '@rollup/rollup-android-arm64@4.55.1': optional: true - '@rollup/rollup-darwin-arm64@4.54.0': + '@rollup/rollup-darwin-arm64@4.55.1': optional: true - '@rollup/rollup-darwin-x64@4.54.0': + '@rollup/rollup-darwin-x64@4.55.1': optional: true - '@rollup/rollup-freebsd-arm64@4.54.0': + '@rollup/rollup-freebsd-arm64@4.55.1': optional: true - '@rollup/rollup-freebsd-x64@4.54.0': + '@rollup/rollup-freebsd-x64@4.55.1': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.54.0': + '@rollup/rollup-linux-arm-gnueabihf@4.55.1': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.54.0': + '@rollup/rollup-linux-arm-musleabihf@4.55.1': optional: true - '@rollup/rollup-linux-arm64-gnu@4.54.0': + '@rollup/rollup-linux-arm64-gnu@4.55.1': optional: true - '@rollup/rollup-linux-arm64-musl@4.54.0': + '@rollup/rollup-linux-arm64-musl@4.55.1': optional: true - '@rollup/rollup-linux-loong64-gnu@4.54.0': + '@rollup/rollup-linux-loong64-gnu@4.55.1': optional: true - '@rollup/rollup-linux-ppc64-gnu@4.54.0': + '@rollup/rollup-linux-loong64-musl@4.55.1': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.54.0': + '@rollup/rollup-linux-ppc64-gnu@4.55.1': optional: true - '@rollup/rollup-linux-riscv64-musl@4.54.0': + '@rollup/rollup-linux-ppc64-musl@4.55.1': optional: true - '@rollup/rollup-linux-s390x-gnu@4.54.0': + '@rollup/rollup-linux-riscv64-gnu@4.55.1': optional: true - '@rollup/rollup-linux-x64-gnu@4.54.0': + '@rollup/rollup-linux-riscv64-musl@4.55.1': optional: true - '@rollup/rollup-linux-x64-musl@4.54.0': + '@rollup/rollup-linux-s390x-gnu@4.55.1': optional: true - '@rollup/rollup-openharmony-arm64@4.54.0': + '@rollup/rollup-linux-x64-gnu@4.55.1': optional: true - '@rollup/rollup-win32-arm64-msvc@4.54.0': + '@rollup/rollup-linux-x64-musl@4.55.1': optional: true - '@rollup/rollup-win32-ia32-msvc@4.54.0': + '@rollup/rollup-openbsd-x64@4.55.1': optional: true - '@rollup/rollup-win32-x64-gnu@4.54.0': + '@rollup/rollup-openharmony-arm64@4.55.1': optional: true - '@rollup/rollup-win32-x64-msvc@4.54.0': + '@rollup/rollup-win32-arm64-msvc@4.55.1': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.55.1': + optional: true + + '@rollup/rollup-win32-x64-gnu@4.55.1': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.55.1': optional: true '@standard-schema/spec@1.1.0': {} @@ -2342,19 +2376,19 @@ snapshots: dependencies: acorn: 8.15.0 - '@sveltejs/adapter-node@5.4.0(@sveltejs/kit@2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))': + '@sveltejs/adapter-node@5.4.0(@sveltejs/kit@2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))': dependencies: - '@rollup/plugin-commonjs': 28.0.9(rollup@4.54.0) - '@rollup/plugin-json': 6.1.0(rollup@4.54.0) - '@rollup/plugin-node-resolve': 16.0.3(rollup@4.54.0) - '@sveltejs/kit': 2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) - rollup: 4.54.0 + '@rollup/plugin-commonjs': 28.0.9(rollup@4.55.1) + '@rollup/plugin-json': 6.1.0(rollup@4.55.1) + '@rollup/plugin-node-resolve': 16.0.3(rollup@4.55.1) + '@sveltejs/kit': 2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) + rollup: 4.55.1 - '@sveltejs/kit@2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))': + '@sveltejs/kit@2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0))': dependencies: '@standard-schema/spec': 1.1.0 '@sveltejs/acorn-typescript': 1.0.8(acorn@8.15.0) - '@sveltejs/vite-plugin-svelte': 6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) + '@sveltejs/vite-plugin-svelte': 6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) '@types/cookie': 0.6.0 acorn: 8.15.0 cookie: 0.6.0 @@ -2367,35 +2401,33 @@ snapshots: set-cookie-parser: 2.7.2 sirv: 3.0.2 svelte: 5.46.1 - vite: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) + vite: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) + optionalDependencies: + typescript: 5.9.3 - '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))': + '@sveltejs/vite-plugin-svelte-inspector@5.0.2(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0))': dependencies: - '@sveltejs/vite-plugin-svelte': 6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) - debug: 4.4.3 + '@sveltejs/vite-plugin-svelte': 6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) + obug: 2.1.1 svelte: 5.46.1 - vite: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) - transitivePeerDependencies: - - supports-color + vite: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) - '@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))': + '@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0))': dependencies: - '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) - debug: 4.4.3 + '@sveltejs/vite-plugin-svelte-inspector': 5.0.2(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) deepmerge: 4.3.1 magic-string: 0.30.21 + obug: 2.1.1 svelte: 5.46.1 - vite: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) - vitefu: 1.1.1(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) - transitivePeerDependencies: - - supports-color + vite: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) + vitefu: 1.1.1(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) - '@tanstack/svelte-virtual@3.13.16(svelte@5.46.1)': + '@tanstack/svelte-virtual@3.13.18(svelte@5.46.1)': dependencies: - '@tanstack/virtual-core': 3.13.16 + '@tanstack/virtual-core': 3.13.18 svelte: 5.46.1 - '@tanstack/virtual-core@3.13.16': {} + '@tanstack/virtual-core@3.13.18': {} '@trpc/client@11.8.1(@trpc/server@11.8.1(typescript@5.9.3))(typescript@5.9.3)': dependencies: @@ -2418,28 +2450,28 @@ snapshots: '@types/node-schedule@2.1.8': dependencies: - '@types/node': 25.0.3 + '@types/node': 25.0.6 - '@types/node@25.0.3': + '@types/node@25.0.6': dependencies: undici-types: 7.16.0 '@types/pg@8.16.0': dependencies: - '@types/node': 25.0.3 + '@types/node': 25.0.6 pg-protocol: 1.10.3 pg-types: 2.2.0 '@types/resolve@1.20.2': {} - '@typescript-eslint/eslint-plugin@8.51.0(@typescript-eslint/parser@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/eslint-plugin@8.52.0(@typescript-eslint/parser@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.51.0 - '@typescript-eslint/type-utils': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/utils': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.51.0 + '@typescript-eslint/parser': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.52.0 + '@typescript-eslint/type-utils': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/utils': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.52.0 eslint: 9.39.2(jiti@1.21.7) ignore: 7.0.5 natural-compare: 1.4.0 @@ -2448,41 +2480,41 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/parser@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/scope-manager': 8.51.0 - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/typescript-estree': 8.51.0(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.51.0 + '@typescript-eslint/scope-manager': 8.52.0 + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/typescript-estree': 8.52.0(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.52.0 debug: 4.4.3 eslint: 9.39.2(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.51.0(typescript@5.9.3)': + '@typescript-eslint/project-service@8.52.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.51.0(typescript@5.9.3) - '@typescript-eslint/types': 8.51.0 + '@typescript-eslint/tsconfig-utils': 8.52.0(typescript@5.9.3) + '@typescript-eslint/types': 8.52.0 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/scope-manager@8.51.0': + '@typescript-eslint/scope-manager@8.52.0': dependencies: - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/visitor-keys': 8.51.0 + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/visitor-keys': 8.52.0 - '@typescript-eslint/tsconfig-utils@8.51.0(typescript@5.9.3)': + '@typescript-eslint/tsconfig-utils@8.52.0(typescript@5.9.3)': dependencies: typescript: 5.9.3 - '@typescript-eslint/type-utils@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/type-utils@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/typescript-estree': 8.51.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/typescript-estree': 8.52.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) debug: 4.4.3 eslint: 9.39.2(jiti@1.21.7) ts-api-utils: 2.4.0(typescript@5.9.3) @@ -2490,14 +2522,14 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/types@8.51.0': {} + '@typescript-eslint/types@8.52.0': {} - '@typescript-eslint/typescript-estree@8.51.0(typescript@5.9.3)': + '@typescript-eslint/typescript-estree@8.52.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/project-service': 8.51.0(typescript@5.9.3) - '@typescript-eslint/tsconfig-utils': 8.51.0(typescript@5.9.3) - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/visitor-keys': 8.51.0 + '@typescript-eslint/project-service': 8.52.0(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.52.0(typescript@5.9.3) + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/visitor-keys': 8.52.0 debug: 4.4.3 minimatch: 9.0.5 semver: 7.7.3 @@ -2507,20 +2539,20 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/utils@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@1.21.7)) - '@typescript-eslint/scope-manager': 8.51.0 - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/typescript-estree': 8.51.0(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.52.0 + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/typescript-estree': 8.52.0(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/visitor-keys@8.51.0': + '@typescript-eslint/visitor-keys@8.52.0': dependencies: - '@typescript-eslint/types': 8.51.0 + '@typescript-eslint/types': 8.52.0 eslint-visitor-keys: 4.2.1 '@xmldom/xmldom@0.9.8': @@ -2568,7 +2600,7 @@ snapshots: autoprefixer@10.4.23(postcss@8.5.6): dependencies: browserslist: 4.28.1 - caniuse-lite: 1.0.30001762 + caniuse-lite: 1.0.30001764 fraction.js: 5.3.4 picocolors: 1.1.1 postcss: 8.5.6 @@ -2586,7 +2618,7 @@ snapshots: balanced-match@1.0.2: {} - baseline-browser-mapping@2.9.11: {} + baseline-browser-mapping@2.9.14: {} binary-extensions@2.3.0: {} @@ -2605,8 +2637,8 @@ snapshots: browserslist@4.28.1: dependencies: - baseline-browser-mapping: 2.9.11 - caniuse-lite: 1.0.30001762 + baseline-browser-mapping: 2.9.14 + caniuse-lite: 1.0.30001764 electron-to-chromium: 1.5.267 node-releases: 2.0.27 update-browserslist-db: 1.2.3(browserslist@4.28.1) @@ -2635,7 +2667,7 @@ snapshots: camelcase-css@2.0.1: {} - caniuse-lite@1.0.30001762: {} + caniuse-lite@1.0.30001764: {} chalk@4.1.2: dependencies: @@ -2797,7 +2829,7 @@ snapshots: dependencies: eslint: 9.39.2(jiti@1.21.7) - eslint-plugin-svelte@3.13.1(eslint@9.39.2(jiti@1.21.7))(svelte@5.46.1): + eslint-plugin-svelte@3.14.0(eslint@9.39.2(jiti@1.21.7))(svelte@5.46.1): dependencies: '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@1.21.7)) '@jridgewell/sourcemap-codec': 1.5.5 @@ -2897,7 +2929,7 @@ snapshots: esutils@2.0.3: {} - exifreader@4.33.1: + exifreader@4.35.0: optionalDependencies: '@xmldom/xmldom': 0.9.8 @@ -3003,6 +3035,8 @@ snapshots: globals@16.5.0: {} + globals@17.0.0: {} + gopd@1.2.0: {} has-flag@4.0.0: {} @@ -3167,7 +3201,7 @@ snapshots: acorn: 8.15.0 pathe: 2.0.3 pkg-types: 1.3.1 - ufo: 1.6.1 + ufo: 1.6.2 mri@1.2.0: {} @@ -3213,11 +3247,13 @@ snapshots: object-hash@3.0.0: {} + obug@2.1.1: {} + ofetch@1.5.1: dependencies: destr: 2.0.5 node-fetch-native: 1.6.7 - ufo: 1.6.1 + ufo: 1.6.2 ohash@2.0.11: {} @@ -3433,32 +3469,35 @@ snapshots: reusify@1.1.0: {} - rollup@4.54.0: + rollup@4.55.1: dependencies: '@types/estree': 1.0.8 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.54.0 - '@rollup/rollup-android-arm64': 4.54.0 - '@rollup/rollup-darwin-arm64': 4.54.0 - '@rollup/rollup-darwin-x64': 4.54.0 - '@rollup/rollup-freebsd-arm64': 4.54.0 - '@rollup/rollup-freebsd-x64': 4.54.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.54.0 - '@rollup/rollup-linux-arm-musleabihf': 4.54.0 - '@rollup/rollup-linux-arm64-gnu': 4.54.0 - '@rollup/rollup-linux-arm64-musl': 4.54.0 - '@rollup/rollup-linux-loong64-gnu': 4.54.0 - '@rollup/rollup-linux-ppc64-gnu': 4.54.0 - '@rollup/rollup-linux-riscv64-gnu': 4.54.0 - '@rollup/rollup-linux-riscv64-musl': 4.54.0 - '@rollup/rollup-linux-s390x-gnu': 4.54.0 - '@rollup/rollup-linux-x64-gnu': 4.54.0 - '@rollup/rollup-linux-x64-musl': 4.54.0 - '@rollup/rollup-openharmony-arm64': 4.54.0 - '@rollup/rollup-win32-arm64-msvc': 4.54.0 - '@rollup/rollup-win32-ia32-msvc': 4.54.0 - '@rollup/rollup-win32-x64-gnu': 4.54.0 - '@rollup/rollup-win32-x64-msvc': 4.54.0 + '@rollup/rollup-android-arm-eabi': 4.55.1 + '@rollup/rollup-android-arm64': 4.55.1 + '@rollup/rollup-darwin-arm64': 4.55.1 + '@rollup/rollup-darwin-x64': 4.55.1 + '@rollup/rollup-freebsd-arm64': 4.55.1 + '@rollup/rollup-freebsd-x64': 4.55.1 + '@rollup/rollup-linux-arm-gnueabihf': 4.55.1 + '@rollup/rollup-linux-arm-musleabihf': 4.55.1 + '@rollup/rollup-linux-arm64-gnu': 4.55.1 + '@rollup/rollup-linux-arm64-musl': 4.55.1 + '@rollup/rollup-linux-loong64-gnu': 4.55.1 + '@rollup/rollup-linux-loong64-musl': 4.55.1 + '@rollup/rollup-linux-ppc64-gnu': 4.55.1 + '@rollup/rollup-linux-ppc64-musl': 4.55.1 + '@rollup/rollup-linux-riscv64-gnu': 4.55.1 + '@rollup/rollup-linux-riscv64-musl': 4.55.1 + '@rollup/rollup-linux-s390x-gnu': 4.55.1 + '@rollup/rollup-linux-x64-gnu': 4.55.1 + '@rollup/rollup-linux-x64-musl': 4.55.1 + '@rollup/rollup-openbsd-x64': 4.55.1 + '@rollup/rollup-openharmony-arm64': 4.55.1 + '@rollup/rollup-win32-arm64-msvc': 4.55.1 + '@rollup/rollup-win32-ia32-msvc': 4.55.1 + '@rollup/rollup-win32-x64-gnu': 4.55.1 + '@rollup/rollup-win32-x64-msvc': 4.55.1 fsevents: 2.3.3 run-parallel@1.2.0: @@ -3619,12 +3658,12 @@ snapshots: dependencies: prelude-ls: 1.2.1 - typescript-eslint@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + typescript-eslint@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@typescript-eslint/eslint-plugin': 8.51.0(@typescript-eslint/parser@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/parser': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/typescript-estree': 8.51.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/eslint-plugin': 8.52.0(@typescript-eslint/parser@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/parser': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/typescript-estree': 8.52.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: @@ -3632,7 +3671,7 @@ snapshots: typescript@5.9.3: {} - ufo@1.6.1: {} + ufo@1.6.2: {} undici-types@7.16.0: {} @@ -3669,23 +3708,23 @@ snapshots: uuid@13.0.0: {} - vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0): + vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0): dependencies: esbuild: 0.27.2 fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 postcss: 8.5.6 - rollup: 4.54.0 + rollup: 4.55.1 tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 25.0.3 + '@types/node': 25.0.6 fsevents: 2.3.3 jiti: 1.21.7 yaml: 2.8.0 - vitefu@1.1.1(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)): + vitefu@1.1.1(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)): optionalDependencies: - vite: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) + vite: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) webpack-virtual-modules@0.6.2: {} From 594c3654c92464c67fa35703608677abb116f5ba Mon Sep 17 00:00:00 2001 From: static Date: Mon, 12 Jan 2026 05:04:07 +0900 Subject: [PATCH 11/19] =?UTF-8?q?=ED=8C=8C=EC=9D=BC=20=EB=B0=8F=20?= =?UTF-8?q?=EC=8D=B8=EB=84=A4=EC=9D=BC=20=EB=8B=A4=EC=9A=B4=EB=A1=9C?= =?UTF-8?q?=EB=93=9C=20Endpoint=EC=9D=98=20=ED=95=B8=EB=93=A4=EB=9F=AC?= =?UTF-8?q?=EB=A5=BC=20=ED=95=98=EB=82=98=EB=A1=9C=20=ED=86=B5=ED=95=A9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/http.ts | 14 ++++-- src/params/thumbnail.ts | 5 ++ .../download/+server.ts | 24 ++++------ .../file/[id]/thumbnail/download/+server.ts | 48 ------------------- .../api/upload/[id]/chunks/[index]/+server.ts | 32 +++++-------- 5 files changed, 39 insertions(+), 84 deletions(-) create mode 100644 src/params/thumbnail.ts rename src/routes/api/file/[id]/{ => [[thumbnail=thumbnail]]}/download/+server.ts (54%) delete mode 100644 src/routes/api/file/[id]/thumbnail/download/+server.ts diff --git a/src/lib/modules/http.ts b/src/lib/modules/http.ts index 4f79ec5..4116c18 100644 --- a/src/lib/modules/http.ts +++ b/src/lib/modules/http.ts @@ -1,7 +1,7 @@ -export const parseRangeHeader = (rangeHeader: string | null) => { - if (!rangeHeader) return undefined; +export const parseRangeHeader = (value: string | null) => { + if (!value) return undefined; - const firstRange = rangeHeader.split(",")[0]!.trim(); + const firstRange = value.split(",")[0]!.trim(); const parts = firstRange.replace(/bytes=/, "").split("-"); return { start: parts[0] ? parseInt(parts[0], 10) : undefined, @@ -12,3 +12,11 @@ export const parseRangeHeader = (rangeHeader: string | null) => { export const getContentRangeHeader = (range?: { start: number; end: number; total: number }) => { return range && { "Content-Range": `bytes ${range.start}-${range.end}/${range.total}` }; }; + +export const parseContentDigestHeader = (value: string | null) => { + if (!value) return undefined; + + const firstDigest = value.split(",")[0]!.trim(); + const match = firstDigest.match(/^sha-256=:([A-Za-z0-9+/=]+):$/); + return match?.[1]; +}; diff --git a/src/params/thumbnail.ts b/src/params/thumbnail.ts new file mode 100644 index 0000000..3faf298 --- /dev/null +++ b/src/params/thumbnail.ts @@ -0,0 +1,5 @@ +import type { ParamMatcher } from "@sveltejs/kit"; + +export const match: ParamMatcher = (param) => { + return param === "thumbnail"; +}; diff --git a/src/routes/api/file/[id]/download/+server.ts b/src/routes/api/file/[id]/[[thumbnail=thumbnail]]/download/+server.ts similarity index 54% rename from src/routes/api/file/[id]/download/+server.ts rename to src/routes/api/file/[id]/[[thumbnail=thumbnail]]/download/+server.ts index 5324365..a79da41 100644 --- a/src/routes/api/file/[id]/download/+server.ts +++ b/src/routes/api/file/[id]/[[thumbnail=thumbnail]]/download/+server.ts @@ -2,14 +2,10 @@ import { error } from "@sveltejs/kit"; import { z } from "zod"; import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; import { authorize } from "$lib/server/modules/auth"; -import { getFileStream } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; +import { getFileStream, getFileThumbnailStream } from "$lib/server/services/file"; +import type { RequestHandler, RouteParams } from "./$types"; -const downloadHandler = async ( - locals: App.Locals, - params: Record, - request: Request, -) => { +const downloadHandler = async (locals: App.Locals, params: RouteParams, request: Request) => { const { userId } = await authorize(locals, "activeClient"); const zodRes = z @@ -20,29 +16,29 @@ const downloadHandler = async ( if (!zodRes.success) error(400, "Invalid path parameters"); const { id } = zodRes.data; - const { encContentStream, range } = await getFileStream( + const getStream = params.thumbnail ? getFileThumbnailStream : getFileStream; + const { encContentStream, range } = await getStream( userId, id, parseRangeHeader(request.headers.get("Range")), ); return { stream: encContentStream, + status: range ? 206 : 200, headers: { "Accept-Ranges": "bytes", - "Content-Length": (range.end - range.start + 1).toString(), + "Content-Length": String(range.end - range.start + 1), "Content-Type": "application/octet-stream", ...getContentRangeHeader(range), }, - isRangeRequest: !!range, }; }; export const GET: RequestHandler = async ({ locals, params, request }) => { - const { stream, headers, isRangeRequest } = await downloadHandler(locals, params, request); - return new Response(stream as ReadableStream, { status: isRangeRequest ? 206 : 200, headers }); + const { stream, ...init } = await downloadHandler(locals, params, request); + return new Response(stream as ReadableStream, init); }; export const HEAD: RequestHandler = async ({ locals, params, request }) => { - const { headers, isRangeRequest } = await downloadHandler(locals, params, request); - return new Response(null, { status: isRangeRequest ? 206 : 200, headers }); + return new Response(null, await downloadHandler(locals, params, request)); }; diff --git a/src/routes/api/file/[id]/thumbnail/download/+server.ts b/src/routes/api/file/[id]/thumbnail/download/+server.ts deleted file mode 100644 index 85cdd8c..0000000 --- a/src/routes/api/file/[id]/thumbnail/download/+server.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { error } from "@sveltejs/kit"; -import { z } from "zod"; -import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; -import { authorize } from "$lib/server/modules/auth"; -import { getFileThumbnailStream } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -const downloadHandler = async ( - locals: App.Locals, - params: Record, - request: Request, -) => { - const { userId } = await authorize(locals, "activeClient"); - - const zodRes = z - .object({ - id: z.coerce.number().int().positive(), - }) - .safeParse(params); - if (!zodRes.success) error(400, "Invalid path parameters"); - const { id } = zodRes.data; - - const { encContentStream, range } = await getFileThumbnailStream( - userId, - id, - parseRangeHeader(request.headers.get("Range")), - ); - return { - stream: encContentStream, - headers: { - "Accept-Ranges": "bytes", - "Content-Length": (range.end - range.start + 1).toString(), - "Content-Type": "application/octet-stream", - ...getContentRangeHeader(range), - }, - isRangeRequest: !!range, - }; -}; - -export const GET: RequestHandler = async ({ locals, params, request }) => { - const { stream, headers, isRangeRequest } = await downloadHandler(locals, params, request); - return new Response(stream as ReadableStream, { status: isRangeRequest ? 206 : 200, headers }); -}; - -export const HEAD: RequestHandler = async ({ locals, params, request }) => { - const { headers, isRangeRequest } = await downloadHandler(locals, params, request); - return new Response(null, { status: isRangeRequest ? 206 : 200, headers }); -}; diff --git a/src/routes/api/upload/[id]/chunks/[index]/+server.ts b/src/routes/api/upload/[id]/chunks/[index]/+server.ts index 47d6397..689d313 100644 --- a/src/routes/api/upload/[id]/chunks/[index]/+server.ts +++ b/src/routes/api/upload/[id]/chunks/[index]/+server.ts @@ -1,6 +1,8 @@ import { error, text } from "@sveltejs/kit"; import { Readable } from "stream"; +import { ReadableStream } from "stream/web"; import { z } from "zod"; +import { parseContentDigestHeader } from "$lib/modules/http"; import { authorize } from "$lib/server/modules/auth"; import { uploadChunk } from "$lib/server/services/upload"; import type { RequestHandler } from "./$types"; @@ -15,29 +17,21 @@ export const POST: RequestHandler = async ({ locals, params, request }) => { }) .safeParse(params); if (!zodRes.success) error(400, "Invalid path parameters"); - const { id: uploadId, index: chunkIndex } = zodRes.data; + const { id: sessionId, index: chunkIndex } = zodRes.data; - // Parse Content-Digest header (RFC 9530) - // Expected format: sha-256=:base64hash: - const contentDigest = request.headers.get("Content-Digest"); - if (!contentDigest) error(400, "Missing Content-Digest header"); - - const digestMatch = contentDigest.match(/^sha-256=:([A-Za-z0-9+/=]+):$/); - if (!digestMatch || !digestMatch[1]) - error(400, "Invalid Content-Digest format, must be sha-256=:base64:"); - const encChunkHash = digestMatch[1]; - - const contentType = request.headers.get("Content-Type"); - if (contentType !== "application/octet-stream" || !request.body) { + const encContentHash = parseContentDigestHeader(request.headers.get("Content-Digest")); + if (!encContentHash) { + error(400, "Invalid request headers"); + } else if (!request.body) { error(400, "Invalid request body"); } - // Convert web ReadableStream to Node Readable - const nodeReadable = Readable.fromWeb( - request.body as unknown as Parameters[0], + await uploadChunk( + userId, + sessionId, + chunkIndex, + Readable.fromWeb(request.body as ReadableStream), + encContentHash, ); - - await uploadChunk(userId, uploadId, chunkIndex, nodeReadable, encChunkHash); - return text("Chunk uploaded", { headers: { "Content-Type": "text/plain" } }); }; From 27e90ef4d7972b8d673c141bda25ccd2bb4eb086 Mon Sep 17 00:00:00 2001 From: static Date: Mon, 12 Jan 2026 08:40:07 +0900 Subject: [PATCH 12/19] =?UTF-8?q?=EC=9D=B4=EC=A0=84=20=EB=B2=84=EC=A0=84?= =?UTF-8?q?=EC=97=90=EC=84=9C=20=EC=97=85=EB=A1=9C=EB=93=9C=EB=90=9C=20?= =?UTF-8?q?=ED=8C=8C=EC=9D=BC=EC=9D=84=20=EC=B2=AD=ED=81=AC=20=EC=97=85?= =?UTF-8?q?=EB=A1=9C=EB=93=9C=20=EB=B0=A9=EC=8B=9D=EC=9C=BC=EB=A1=9C=20?= =?UTF-8?q?=EB=A7=88=EC=9D=B4=EA=B7=B8=EB=A0=88=EC=9D=B4=EC=85=98=ED=95=A0?= =?UTF-8?q?=20=EC=88=98=20=EC=9E=88=EB=8A=94=20=EA=B8=B0=EB=8A=A5=20?= =?UTF-8?q?=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/server/db/error.ts | 1 + src/lib/server/db/file.ts | 56 ++++++ src/lib/server/db/schema/file.ts | 2 +- src/lib/server/db/schema/upload.ts | 2 +- src/lib/server/db/upload.ts | 51 +++++- .../settings/migration/+page.server.ts | 7 + .../settings/migration/+page.svelte | 79 +++++++++ .../settings/migration/File.svelte | 55 ++++++ .../settings/migration/service.svelte.ts | 165 ++++++++++++++++++ src/routes/(main)/menu/+page.svelte | 8 + src/trpc/routers/file.ts | 4 + src/trpc/routers/upload.ts | 104 +++++++++++ 12 files changed, 531 insertions(+), 3 deletions(-) create mode 100644 src/routes/(fullscreen)/settings/migration/+page.server.ts create mode 100644 src/routes/(fullscreen)/settings/migration/+page.svelte create mode 100644 src/routes/(fullscreen)/settings/migration/File.svelte create mode 100644 src/routes/(fullscreen)/settings/migration/service.svelte.ts diff --git a/src/lib/server/db/error.ts b/src/lib/server/db/error.ts index a145f14..0d61d72 100644 --- a/src/lib/server/db/error.ts +++ b/src/lib/server/db/error.ts @@ -9,6 +9,7 @@ type IntegrityErrorMessages = // File | "Directory not found" | "File not found" + | "File is not legacy" | "File not found in category" | "File already added to category" | "Invalid DEK version" diff --git a/src/lib/server/db/file.ts b/src/lib/server/db/file.ts index 0418bc6..9314f4b 100644 --- a/src/lib/server/db/file.ts +++ b/src/lib/server/db/file.ts @@ -334,6 +334,16 @@ export const getAllFileIds = async (userId: number) => { return files.map(({ id }) => id); }; +export const getLegacyFileIds = async (userId: number) => { + const files = await db + .selectFrom("file") + .select("id") + .where("user_id", "=", userId) + .where("encrypted_content_iv", "is not", null) + .execute(); + return files.map(({ id }) => id); +}; + export const getAllFileIdsByContentHmac = async ( userId: number, hskVersion: number, @@ -482,6 +492,52 @@ export const unregisterFile = async (userId: number, fileId: number) => { }); }; +export const migrateFileContent = async ( + trx: typeof db, + userId: number, + fileId: number, + newPath: string, + encContentHash: string, +) => { + const file = await trx + .selectFrom("file") + .select(["path", "encrypted_content_iv"]) + .where("id", "=", fileId) + .where("user_id", "=", userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + + if (!file) { + throw new IntegrityError("File not found"); + } + if (!file.encrypted_content_iv) { + throw new IntegrityError("File is not legacy"); + } + + await trx + .updateTable("file") + .set({ + path: newPath, + encrypted_content_iv: null, + encrypted_content_hash: encContentHash, + }) + .where("id", "=", fileId) + .where("user_id", "=", userId) + .execute(); + + await trx + .insertInto("file_log") + .values({ + file_id: fileId, + timestamp: new Date(), + action: "migrate", + }) + .execute(); + + return file.path; +}; + export const addFileToCategory = async (fileId: number, categoryId: number) => { await db.transaction().execute(async (trx) => { try { diff --git a/src/lib/server/db/schema/file.ts b/src/lib/server/db/schema/file.ts index 0774082..3680d1d 100644 --- a/src/lib/server/db/schema/file.ts +++ b/src/lib/server/db/schema/file.ts @@ -41,7 +41,7 @@ interface FileLogTable { id: Generated; file_id: number; timestamp: ColumnType; - action: "create" | "rename" | "add-to-category" | "remove-from-category"; + action: "create" | "rename" | "migrate" | "add-to-category" | "remove-from-category"; new_name: Ciphertext | null; category_id: number | null; } diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts index fccde36..e20227d 100644 --- a/src/lib/server/db/schema/upload.ts +++ b/src/lib/server/db/schema/upload.ts @@ -3,7 +3,7 @@ import type { Ciphertext } from "./utils"; interface UploadSessionTable { id: string; - type: "file" | "thumbnail"; + type: "file" | "thumbnail" | "migration"; user_id: number; path: string; total_chunks: number; diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts index d506191..876c150 100644 --- a/src/lib/server/db/upload.ts +++ b/src/lib/server/db/upload.ts @@ -31,6 +31,11 @@ interface ThumbnailUploadSession extends BaseUploadSession { dekVersion: Date; } +interface MigrationUploadSession extends BaseUploadSession { + type: "migration"; + fileId: number; +} + export const createFileUploadSession = async ( params: Omit, ) => { @@ -118,6 +123,39 @@ export const createThumbnailUploadSession = async ( }); }; +export const createMigrationUploadSession = async ( + params: Omit, +) => { + await db.transaction().execute(async (trx) => { + const file = await trx + .selectFrom("file") + .select("encrypted_content_iv") + .where("id", "=", params.fileId) + .where("user_id", "=", params.userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (!file.encrypted_content_iv) { + throw new IntegrityError("File is not legacy"); + } + + await trx + .insertInto("upload_session") + .values({ + id: params.id, + type: "migration", + user_id: params.userId, + path: params.path, + total_chunks: params.totalChunks, + expires_at: params.expiresAt, + file_id: params.fileId, + }) + .execute(); + }); +}; + export const getUploadSession = async (sessionId: string, userId: number) => { const session = await db .selectFrom("upload_session") @@ -148,7 +186,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => { encCreatedAt: session.encrypted_created_at, encLastModifiedAt: session.encrypted_last_modified_at!, } satisfies FileUploadSession; - } else { + } else if (session.type === "thumbnail") { return { type: "thumbnail", id: session.id, @@ -160,6 +198,17 @@ export const getUploadSession = async (sessionId: string, userId: number) => { fileId: session.file_id!, dekVersion: session.data_encryption_key_version!, } satisfies ThumbnailUploadSession; + } else { + return { + type: "migration", + id: session.id, + userId: session.user_id, + path: session.path, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + fileId: session.file_id!, + } satisfies MigrationUploadSession; } }; diff --git a/src/routes/(fullscreen)/settings/migration/+page.server.ts b/src/routes/(fullscreen)/settings/migration/+page.server.ts new file mode 100644 index 0000000..3e1c32a --- /dev/null +++ b/src/routes/(fullscreen)/settings/migration/+page.server.ts @@ -0,0 +1,7 @@ +import { createCaller } from "$trpc/router.server"; +import type { PageServerLoad } from "./$types"; + +export const load: PageServerLoad = async (event) => { + const files = await createCaller(event).file.listLegacy(); + return { files }; +}; diff --git a/src/routes/(fullscreen)/settings/migration/+page.svelte b/src/routes/(fullscreen)/settings/migration/+page.svelte new file mode 100644 index 0000000..b4bc9cb --- /dev/null +++ b/src/routes/(fullscreen)/settings/migration/+page.svelte @@ -0,0 +1,79 @@ + + + + 암호화 마이그레이션 + + + + + {#if files.length > 0} +
+

+ 이전 버전의 ArkVault에서 업로드된 {files.length}개 파일을 다시 암호화할 수 있어요. +

+
+ {#each files as { info, state } (info.id)} + {#if info.exists} + goto(`/file/${id}`)} + onMigrateClick={requestFileMigration} + /> + {/if} + {/each} +
+
+ + + + {:else} +
+

+ {#if data.files.length === 0} + 마이그레이션할 파일이 없어요. + {:else} + 파일 목록을 불러오고 있어요. + {/if} +

+
+ {/if} +
diff --git a/src/routes/(fullscreen)/settings/migration/File.svelte b/src/routes/(fullscreen)/settings/migration/File.svelte new file mode 100644 index 0000000..ec9d25b --- /dev/null +++ b/src/routes/(fullscreen)/settings/migration/File.svelte @@ -0,0 +1,55 @@ + + + + + onclick(info)} + actionButtonIcon={!state || state.status === "error" ? IconSync : undefined} + onActionButtonClick={() => onMigrateClick(info)} + actionButtonClass="text-gray-800" +> + + diff --git a/src/routes/(fullscreen)/settings/migration/service.svelte.ts b/src/routes/(fullscreen)/settings/migration/service.svelte.ts new file mode 100644 index 0000000..d59f46e --- /dev/null +++ b/src/routes/(fullscreen)/settings/migration/service.svelte.ts @@ -0,0 +1,165 @@ +import { limitFunction } from "p-limit"; +import { SvelteMap } from "svelte/reactivity"; +import { CHUNK_SIZE } from "$lib/constants"; +import { encodeToBase64, encryptChunk, digestMessage } from "$lib/modules/crypto"; +import { deleteFileCache } from "$lib/modules/file"; +import type { FileInfo } from "$lib/modules/filesystem"; +import { Scheduler } from "$lib/modules/scheduler"; +import { requestFileDownload } from "$lib/services/file"; +import { trpc } from "$trpc/client"; + +export type MigrationStatus = + | "queued" + | "download-pending" + | "downloading" + | "encryption-pending" + | "encrypting" + | "upload-pending" + | "uploading" + | "completed" + | "error"; + +export interface MigrationState { + status: MigrationStatus; + progress?: number; + rate?: number; +} + +const scheduler = new Scheduler(); +const states = new SvelteMap(); + +const createState = (status: MigrationStatus): MigrationState => { + const state = $state({ status }); + return state; +}; + +export const getMigrationState = (fileId: number) => { + return states.get(fileId); +}; + +export const clearMigrationStates = () => { + for (const [id, state] of states) { + if (state.status === "completed" || state.status === "error") { + states.delete(id); + } + } +}; + +const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { + const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = []; + let offset = 0; + + while (offset < fileBuffer.byteLength) { + const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength); + const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey); + chunksEncrypted.push({ + chunkEncrypted: chunkEncrypted, + chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)), + }); + offset = nextOffset; + } + + return chunksEncrypted; +}; + +const uploadMigrationChunks = limitFunction( + async ( + state: MigrationState, + fileId: number, + chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], + ) => { + state.status = "uploading"; + + const { uploadId } = await trpc().upload.startMigrationUpload.mutate({ + file: fileId, + chunks: chunksEncrypted.length, + }); + + const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); + let uploadedBytes = 0; + const startTime = Date.now(); + + for (let i = 0; i < chunksEncrypted.length; i++) { + const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; + + const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, { + method: "POST", + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, + }, + body: chunkEncrypted, + }); + + if (!response.ok) { + throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); + } + + uploadedBytes += chunkEncrypted.byteLength; + + const elapsed = (Date.now() - startTime) / 1000; + const rate = uploadedBytes / elapsed; + + state.progress = uploadedBytes / totalBytes; + state.rate = rate; + } + + await trpc().upload.completeMigrationUpload.mutate({ uploadId }); + }, + { concurrency: 1 }, +); + +const encryptFile = limitFunction( + async (state: MigrationState, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { + state.status = "encrypting"; + const chunksEncrypted = await encryptChunks(fileBuffer, dataKey); + state.status = "upload-pending"; + return chunksEncrypted; + }, + { concurrency: 4 }, +); + +export const requestFileMigration = async (fileInfo: FileInfo & { exists: true }) => { + let state = states.get(fileInfo.id); + if (state) { + if (state.status !== "error") return; + state.status = "queued"; + state.progress = undefined; + state.rate = undefined; + } else { + state = createState("queued"); + states.set(fileInfo.id, state); + } + + try { + const dataKey = fileInfo.dataKey?.key; + if (!dataKey) { + throw new Error("Data key not available"); + } + + let fileBuffer: ArrayBuffer | undefined; + + await scheduler.schedule( + async () => { + state.status = "download-pending"; + state.status = "downloading"; + fileBuffer = await requestFileDownload(fileInfo.id, dataKey, true); + return fileBuffer.byteLength; + }, + async () => { + state.status = "encryption-pending"; + const chunksEncrypted = await encryptFile(state, fileBuffer!, dataKey); + + await uploadMigrationChunks(state, fileInfo.id, chunksEncrypted); + + // Clear file cache since the file format has changed + await deleteFileCache(fileInfo.id); + + state.status = "completed"; + }, + ); + } catch (e) { + state.status = "error"; + throw e; + } +}; diff --git a/src/routes/(main)/menu/+page.svelte b/src/routes/(main)/menu/+page.svelte index 40f4a26..2bfd3fc 100644 --- a/src/routes/(main)/menu/+page.svelte +++ b/src/routes/(main)/menu/+page.svelte @@ -5,6 +5,7 @@ import IconStorage from "~icons/material-symbols/storage"; import IconImage from "~icons/material-symbols/image"; + import IconLockReset from "~icons/material-symbols/lock-reset"; import IconPassword from "~icons/material-symbols/password"; import IconLogout from "~icons/material-symbols/logout"; @@ -41,6 +42,13 @@ > 썸네일 + goto("/settings/migration")} + icon={IconLockReset} + iconColor="text-teal-500" + > + 암호화 마이그레이션 +

보안

diff --git a/src/trpc/routers/file.ts b/src/trpc/routers/file.ts index 294300c..d6d658c 100644 --- a/src/trpc/routers/file.ts +++ b/src/trpc/routers/file.ts @@ -100,6 +100,10 @@ const fileRouter = router({ return await MediaRepo.getMissingFileThumbnails(ctx.session.userId); }), + listLegacy: roleProcedure["activeClient"].query(async ({ ctx }) => { + return await FileRepo.getLegacyFileIds(ctx.session.userId); + }), + rename: roleProcedure["activeClient"] .input( z.object({ diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts index 168e957..adc0a3e 100644 --- a/src/trpc/routers/upload.ts +++ b/src/trpc/routers/upload.ts @@ -250,6 +250,110 @@ const uploadRouter = router({ sessionLocks.delete(uploadId); } }), + + startMigrationUpload: roleProcedure["activeClient"] + .input( + z.object({ + file: z.int().positive(), + chunks: z.int().positive(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { id, path } = await generateSessionId(); + + try { + await UploadRepo.createMigrationUploadSession({ + id, + userId: ctx.session.userId, + path, + totalChunks: input.chunks, + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + fileId: input.file, + }); + return { uploadId: id }; + } catch (e) { + await safeRecursiveRm(path); + + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); + } else if (e.message === "File is not legacy") { + throw new TRPCError({ code: "BAD_REQUEST", message: "File is not legacy" }); + } + } + throw e; + } + }), + + completeMigrationUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); + } else { + sessionLocks.add(uploadId); + } + + let filePath = ""; + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (!session || session.type !== "migration") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if (session.uploadedChunks.length < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); + } + + filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for (let i = 0; i < session.totalChunks; i++) { + for await (const chunk of createReadStream(`${session.path}/${i}`)) { + hashStream.update(chunk); + writeStream.write(chunk); + } + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + const hash = hashStream.digest("base64"); + const oldPath = await db.transaction().execute(async (trx) => { + const oldPath = await FileRepo.migrateFileContent( + trx, + ctx.session.userId, + session.fileId, + filePath, + hash, + ); + await UploadRepo.deleteUploadSession(trx, uploadId); + return oldPath; + }); + + await Promise.all([safeUnlink(oldPath), safeRecursiveRm(session.path)]); + } catch (e) { + await safeUnlink(filePath); + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); + } else if (e.message === "File is not legacy") { + throw new TRPCError({ code: "BAD_REQUEST", message: "File is not legacy" }); + } + } + throw e; + } finally { + sessionLocks.delete(uploadId); + } + }), }); export default uploadRouter; From b636d75ea08554ec6b413043b8da86e11c3c2f32 Mon Sep 17 00:00:00 2001 From: static Date: Mon, 12 Jan 2026 12:02:20 +0900 Subject: [PATCH 13/19] =?UTF-8?q?=ED=8C=8C=EC=9D=BC=20=EC=97=85=EB=A1=9C?= =?UTF-8?q?=EB=93=9C=20=EB=A1=9C=EC=A7=81=20=EB=A6=AC=ED=8C=A9=ED=86=A0?= =?UTF-8?q?=EB=A7=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/file/upload.svelte.ts | 367 ++++-------------- src/lib/modules/thumbnail.ts | 35 +- src/lib/modules/upload.ts | 103 +++++ .../settings/migration/service.svelte.ts | 80 +--- .../(main)/directory/[[id]]/+page.svelte | 2 +- .../(main)/directory/[[id]]/service.svelte.ts | 4 +- 6 files changed, 193 insertions(+), 398 deletions(-) create mode 100644 src/lib/modules/upload.ts diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index 4bea638..9e9f784 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,21 +1,18 @@ import ExifReader from "exifreader"; -import pLimit, { limitFunction } from "p-limit"; +import { limitFunction } from "p-limit"; import { CHUNK_SIZE } from "$lib/constants"; import { encodeToBase64, generateDataKey, wrapDataKey, - encryptData, encryptString, - encryptChunk, - digestMessage, createHmacStream, } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; -import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; +import { generateThumbnail } from "$lib/modules/thumbnail"; +import { uploadBlob } from "$lib/modules/upload"; import type { MasterKey, HmacSecret } from "$lib/stores"; import { trpc } from "$trpc/client"; -import type { RouterInputs } from "$trpc/router.server"; export interface FileUploadState { name: string; @@ -114,295 +111,83 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => { return new Date(utcDate - offsetMs); }; -const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { - const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = []; - let offset = 0; - - while (offset < fileBuffer.byteLength) { - const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength); - const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey); - chunksEncrypted.push({ - chunkEncrypted: chunkEncrypted, - chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)), - }); - offset = nextOffset; - } - - return chunksEncrypted; -}; - -const encryptImageFile = limitFunction( - async (state: FileUploadState, file: File, masterKey: MasterKey) => { - state.status = "encrypting"; - - const fileBuffer = await file.arrayBuffer(); - const createdAt = extractExifDateTime(fileBuffer); - - const { dataKey, dataKeyVersion } = await generateDataKey(); - const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); - const chunksEncrypted = await encryptChunks(fileBuffer, dataKey); - - const nameEncrypted = await encryptString(file.name, dataKey); - const createdAtEncrypted = - createdAt && (await encryptString(createdAt.getTime().toString(), dataKey)); - const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); - - const thumbnail = await generateThumbnail(fileBuffer, getFileType(file)); - const thumbnailBuffer = await thumbnail?.arrayBuffer(); - const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey)); - - state.status = "upload-pending"; - - return { - dataKeyWrapped, - dataKeyVersion, - chunksEncrypted, - nameEncrypted, - createdAtEncrypted, - lastModifiedAtEncrypted, - thumbnail: thumbnailEncrypted && { plaintext: thumbnailBuffer, ...thumbnailEncrypted }, - }; - }, - { concurrency: 4 }, -); - -const uploadThumbnail = async ( - fileId: number, - thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, - dataKeyVersion: Date, -) => { - const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ - file: fileId, - dekVersion: dataKeyVersion, - }); - - const ivAndCiphertext = new Uint8Array( - thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength, - ); - ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0); - ivAndCiphertext.set( - new Uint8Array(thumbnailEncrypted.ciphertext), - thumbnailEncrypted.iv.byteLength, - ); - - const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); - - const response = await fetch(`/api/upload/${uploadId}/chunks/0`, { - method: "POST", - headers: { - "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkHash}:`, - }, - body: ivAndCiphertext, - }); - - if (!response.ok) { - throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`); - } - - await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); -}; - -const requestImageFileUpload = limitFunction( - async ( - state: FileUploadState, - metadata: RouterInputs["upload"]["startFileUpload"], - chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], - fileSigned: string | undefined, - thumbnailData: { ciphertext: ArrayBuffer; iv: ArrayBuffer; plaintext: ArrayBuffer } | null, - dataKeyVersion: Date, - ) => { - state.status = "uploading"; - - const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); - - const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); - let uploadedBytes = 0; - const startTime = Date.now(); - - for (let i = 0; i < chunksEncrypted.length; i++) { - const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; - - const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, { - method: "POST", - headers: { - "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, - }, - body: chunkEncrypted, - }); - - if (!response.ok) { - throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); - } - - uploadedBytes += chunkEncrypted.byteLength; - - const elapsed = (Date.now() - startTime) / 1000; - const rate = uploadedBytes / elapsed; - const remaining = totalBytes - uploadedBytes; - const estimated = rate > 0 ? remaining / rate : undefined; - - state.progress = uploadedBytes / totalBytes; - state.rate = rate; - state.estimated = estimated; - } - - const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ - uploadId, - contentHmac: fileSigned, - }); - - if (thumbnailData) { - try { - await uploadThumbnail(fileId, thumbnailData, dataKeyVersion); - } catch (e) { - // TODO: Error handling for thumbnail upload - console.error(e); - } - } - - state.status = "uploaded"; - - return { fileId, thumbnailBuffer: thumbnailData?.plaintext }; - }, - { concurrency: 1 }, -); - -const requestFileUpload = async ( +const requestFileUpload2 = async ( state: FileUploadState, - file: File, + file: Blob, + fileSigned: string, + fileMetadata: { + parentId: "root" | number; + name: string; + createdAt?: Date; + lastModifiedAt: Date; + }, masterKey: MasterKey, hmacSecret: HmacSecret, - fileSigned: string, - parentId: DirectoryId, ) => { - state.status = "uploading"; + state.status = "encrypting"; - const fileType = getFileType(file); const { dataKey, dataKeyVersion } = await generateDataKey(); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); - const nameEncrypted = await encryptString(file.name, dataKey); - const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); + const [nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnailBuffer] = + await Promise.all([ + encryptString(fileMetadata.name, dataKey), + fileMetadata.createdAt && encryptString(fileMetadata.createdAt.getTime().toString(), dataKey), + encryptString(fileMetadata.lastModifiedAt.getTime().toString(), dataKey), + generateThumbnail(file).then((blob) => blob?.arrayBuffer()), + ]); - const totalChunks = Math.ceil(file.size / CHUNK_SIZE); - const metadata = { - chunks: totalChunks, - parent: parentId, + const { uploadId } = await trpc().upload.startFileUpload.mutate({ + chunks: Math.ceil(file.size / CHUNK_SIZE), + parent: fileMetadata.parentId, mekVersion: masterKey.version, dek: dataKeyWrapped, dekVersion: dataKeyVersion, hskVersion: hmacSecret.version, - contentType: fileType, + contentType: file.type, name: nameEncrypted.ciphertext, nameIv: nameEncrypted.iv, + createdAt: createdAtEncrypted?.ciphertext, + createdAtIv: createdAtEncrypted?.iv, lastModifiedAt: lastModifiedAtEncrypted.ciphertext, lastModifiedAtIv: lastModifiedAtEncrypted.iv, - }; + }); - const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); + state.status = "uploading"; - const reader = file.stream().getReader(); - const limit = pLimit(4); - let buffer = new Uint8Array(0); - let chunkIndex = 0; - const uploadPromises: Promise[] = []; - - const totalBytes = file.size; - let uploadedBytes = 0; - const startTime = Date.now(); - - const uploadChunk = async ( - index: number, - encryptedChunk: ArrayBuffer, - chunkHash: string, - originalChunkSize: number, - ) => { - const response = await fetch(`/api/upload/${uploadId}/chunks/${index}`, { - method: "POST", - headers: { - "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkHash}:`, - }, - body: encryptedChunk, - }); - - if (!response.ok) { - throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); - } - - uploadedBytes += originalChunkSize; - const elapsed = (Date.now() - startTime) / 1000; - const rate = uploadedBytes / elapsed; - const remaining = totalBytes - uploadedBytes; - const estimated = rate > 0 ? remaining / rate : undefined; - - state.progress = uploadedBytes / totalBytes; - state.rate = rate; - state.estimated = estimated; - }; - - while (true) { - const { done, value } = await reader.read(); - if (done && buffer.length === 0) break; - - if (value) { - const newBuffer = new Uint8Array(buffer.length + value.length); - newBuffer.set(buffer); - newBuffer.set(value, buffer.length); - buffer = newBuffer; - } - - while (buffer.length >= CHUNK_SIZE || (done && buffer.length > 0)) { - const chunkSize = Math.min(CHUNK_SIZE, buffer.length); - const chunk = buffer.slice(0, chunkSize); - buffer = buffer.slice(chunkSize); - - const encryptedChunk = await encryptChunk(chunk.buffer.slice(0, chunk.byteLength), dataKey); - const chunkHash = encodeToBase64(await digestMessage(encryptedChunk)); - const currentIndex = chunkIndex++; - - uploadPromises.push( - limit(() => uploadChunk(currentIndex, encryptedChunk, chunkHash, chunkSize)), - ); - } - - if (done) break; - } - - await Promise.all(uploadPromises); + await uploadBlob(uploadId, file, dataKey, { + onProgress(s) { + state.progress = s.progress; + state.rate = s.rateBps; + }, + }); const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ uploadId, contentHmac: fileSigned, }); - if (fileType.startsWith("video/")) { - try { - const thumbnail = await generateThumbnailFromFile(file); - if (thumbnail) { - const thumbnailBuffer = await thumbnail.arrayBuffer(); - const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); + if (thumbnailBuffer) { + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); - await uploadThumbnail(fileId, thumbnailEncrypted, dataKeyVersion); - } - } catch (e) { - // Thumbnail upload failure is not critical - console.error(e); - } + await uploadBlob(uploadId, new Blob([thumbnailBuffer]), dataKey); + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); } state.status = "uploaded"; - return { fileId }; + return { fileId, thumbnailBuffer }; }; export const uploadFile = async ( file: File, parentId: "root" | number, - hmacSecret: HmacSecret, masterKey: MasterKey, + hmacSecret: HmacSecret, onDuplicate: () => Promise, ) => { uploadingFiles.push({ @@ -426,51 +211,37 @@ export const uploadFile = async ( const fileType = getFileType(file); if (fileType.startsWith("image/")) { const fileBuffer = await file.arrayBuffer(); - const { - dataKeyWrapped, - dataKeyVersion, - chunksEncrypted, - nameEncrypted, - createdAtEncrypted, - lastModifiedAtEncrypted, - thumbnail, - } = await encryptImageFile(state, file, masterKey); + const fileCreatedAt = extractExifDateTime(fileBuffer); - const metadata = { - chunks: chunksEncrypted.length, - parent: parentId, - mekVersion: masterKey.version, - dek: dataKeyWrapped, - dekVersion: dataKeyVersion, - hskVersion: hmacSecret.version, - contentType: fileType, - name: nameEncrypted.ciphertext, - nameIv: nameEncrypted.iv, - createdAt: createdAtEncrypted?.ciphertext, - createdAtIv: createdAtEncrypted?.iv, - lastModifiedAt: lastModifiedAtEncrypted.ciphertext, - lastModifiedAtIv: lastModifiedAtEncrypted.iv, - }; - - const { fileId, thumbnailBuffer } = await requestImageFileUpload( + const { fileId, thumbnailBuffer } = await requestFileUpload2( state, - metadata, - chunksEncrypted, + new Blob([fileBuffer], { type: fileType }), fileSigned, - thumbnail ?? null, - dataKeyVersion, - ); - return { fileId, fileBuffer, thumbnailBuffer }; - } else { - const { fileId } = await requestFileUpload( - state, - file, + { + parentId, + name: file.name, + createdAt: fileCreatedAt, + lastModifiedAt: new Date(file.lastModified), + }, masterKey, hmacSecret, - fileSigned, - parentId, ); - return { fileId }; + + return { fileId, fileBuffer, thumbnailBuffer }; + } else { + const { fileId, thumbnailBuffer } = await requestFileUpload2( + state, + file, + fileSigned, + { + parentId, + name: file.name, + lastModifiedAt: new Date(file.lastModified), + }, + masterKey, + hmacSecret, + ); + return { fileId, thumbnailBuffer }; } } catch (e) { state.status = "error"; diff --git a/src/lib/modules/thumbnail.ts b/src/lib/modules/thumbnail.ts index 75b0168..18b0745 100644 --- a/src/lib/modules/thumbnail.ts +++ b/src/lib/modules/thumbnail.ts @@ -52,7 +52,6 @@ const generateImageThumbnail = (imageUrl: string) => { .catch(reject); }; image.onerror = reject; - image.src = imageUrl; }); }; @@ -85,31 +84,27 @@ const generateVideoThumbnail = (videoUrl: string, time = 0) => { }); }; -export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: string) => { +export const generateThumbnail = async (blob: Blob) => { let url; try { - if (fileType.startsWith("image/")) { - const fileBlob = new Blob([fileBuffer], { type: fileType }); - url = URL.createObjectURL(fileBlob); - + if (blob.type.startsWith("image/")) { + url = URL.createObjectURL(blob); try { return await generateImageThumbnail(url); } catch { URL.revokeObjectURL(url); url = undefined; - if (fileType === "image/heic") { + if (blob.type === "image/heic") { const { default: heic2any } = await import("heic2any"); - url = URL.createObjectURL( - (await heic2any({ blob: fileBlob, toType: "image/png" })) as Blob, - ); + url = URL.createObjectURL((await heic2any({ blob, toType: "image/png" })) as Blob); return await generateImageThumbnail(url); } else { return null; } } - } else if (fileType.startsWith("video/")) { - url = URL.createObjectURL(new Blob([fileBuffer], { type: fileType })); + } else if (blob.type.startsWith("video/")) { + url = URL.createObjectURL(blob); return await generateVideoThumbnail(url); } return null; @@ -122,22 +117,6 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin } }; -export const generateThumbnailFromFile = async (file: File) => { - if (!file.type.startsWith("video/")) return null; - - let url; - try { - url = URL.createObjectURL(file); - return await generateVideoThumbnail(url); - } catch { - return null; - } finally { - if (url) { - URL.revokeObjectURL(url); - } - } -}; - export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; }; diff --git a/src/lib/modules/upload.ts b/src/lib/modules/upload.ts new file mode 100644 index 0000000..231b54b --- /dev/null +++ b/src/lib/modules/upload.ts @@ -0,0 +1,103 @@ +import axios from "axios"; +import pLimit from "p-limit"; +import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants"; +import { encryptChunk, digestMessage, encodeToBase64 } from "$lib/modules/crypto"; + +type UploadStats = { + progress: number; // 0..1 (암호화 후 기준) + rateBps: number; // bytes/sec + uploadedBytes: number; + totalBytes: number; +}; + +function createSpeedMeter(windowMs = 1500) { + const samples: Array<{ t: number; b: number }> = []; + return (bytesNow: number) => { + const now = performance.now(); + samples.push({ t: now, b: bytesNow }); + const cutoff = now - windowMs; + while (samples.length > 2 && samples[0]!.t < cutoff) samples.shift(); + + const first = samples[0]!; + const dt = now - first.t; + const db = bytesNow - first.b; + return dt > 0 ? (db / dt) * 1000 : 0; + }; +} + +const uploadChunk = async ( + uploadId: string, + chunkIndex: number, + chunk: Blob, + dataKey: CryptoKey, + onChunkProgress: (chunkIndex: number, loaded: number) => void, +) => { + const chunkEncrypted = await encryptChunk(await chunk.arrayBuffer(), dataKey); + const chunkEncryptedHash = encodeToBase64(await digestMessage(chunkEncrypted)); + + await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex}`, chunkEncrypted, { + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, + }, + onUploadProgress(e) { + onChunkProgress(chunkIndex, e.loaded ?? 0); + }, + }); + + onChunkProgress(chunkIndex, chunkEncrypted.byteLength); +}; + +export const uploadBlob = async ( + uploadId: string, + blob: Blob, + dataKey: CryptoKey, + options?: { concurrency?: number; onProgress?: (s: UploadStats) => void }, +) => { + const onProgress = options?.onProgress; + + const totalChunks = Math.ceil(blob.size / CHUNK_SIZE); + const totalBytes = blob.size + totalChunks * ENCRYPTION_OVERHEAD; + + const uploadedByChunk = new Array(totalChunks).fill(0); + const speedMeter = createSpeedMeter(1500); + + const emit = () => { + if (!onProgress) return; + + const uploadedBytes = uploadedByChunk.reduce((a, b) => a + b, 0); + const rateBps = speedMeter(uploadedBytes); + const progress = Math.min(1, uploadedBytes / totalBytes); + + onProgress({ progress, rateBps, uploadedBytes, totalBytes }); + }; + + const onChunkProgress = (idx: number, loaded: number) => { + uploadedByChunk[idx] = loaded; + emit(); + }; + + const limit = pLimit(options?.concurrency ?? 4); + + await Promise.all( + Array.from({ length: totalChunks }, (_, chunkIndex) => + limit(() => + uploadChunk( + uploadId, + chunkIndex, + blob.slice(chunkIndex * CHUNK_SIZE, (chunkIndex + 1) * CHUNK_SIZE), + dataKey, + onChunkProgress, + ), + ), + ), + ); + + // 완료 보정 + onProgress?.({ + progress: 1, + rateBps: 0, + uploadedBytes: totalBytes, + totalBytes, + }); +}; diff --git a/src/routes/(fullscreen)/settings/migration/service.svelte.ts b/src/routes/(fullscreen)/settings/migration/service.svelte.ts index d59f46e..9d08db2 100644 --- a/src/routes/(fullscreen)/settings/migration/service.svelte.ts +++ b/src/routes/(fullscreen)/settings/migration/service.svelte.ts @@ -1,10 +1,9 @@ import { limitFunction } from "p-limit"; import { SvelteMap } from "svelte/reactivity"; import { CHUNK_SIZE } from "$lib/constants"; -import { encodeToBase64, encryptChunk, digestMessage } from "$lib/modules/crypto"; -import { deleteFileCache } from "$lib/modules/file"; import type { FileInfo } from "$lib/modules/filesystem"; import { Scheduler } from "$lib/modules/scheduler"; +import { uploadBlob } from "$lib/modules/upload"; import { requestFileDownload } from "$lib/services/file"; import { trpc } from "$trpc/client"; @@ -45,81 +44,28 @@ export const clearMigrationStates = () => { } }; -const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { - const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = []; - let offset = 0; - - while (offset < fileBuffer.byteLength) { - const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength); - const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey); - chunksEncrypted.push({ - chunkEncrypted: chunkEncrypted, - chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)), - }); - offset = nextOffset; - } - - return chunksEncrypted; -}; - const uploadMigrationChunks = limitFunction( - async ( - state: MigrationState, - fileId: number, - chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], - ) => { + async (state: MigrationState, fileId: number, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { state.status = "uploading"; const { uploadId } = await trpc().upload.startMigrationUpload.mutate({ file: fileId, - chunks: chunksEncrypted.length, + chunks: Math.ceil(fileBuffer.byteLength / CHUNK_SIZE), }); - const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); - let uploadedBytes = 0; - const startTime = Date.now(); - - for (let i = 0; i < chunksEncrypted.length; i++) { - const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; - - const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, { - method: "POST", - headers: { - "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, - }, - body: chunkEncrypted, - }); - - if (!response.ok) { - throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); - } - - uploadedBytes += chunkEncrypted.byteLength; - - const elapsed = (Date.now() - startTime) / 1000; - const rate = uploadedBytes / elapsed; - - state.progress = uploadedBytes / totalBytes; - state.rate = rate; - } + await uploadBlob(uploadId, new Blob([fileBuffer]), dataKey, { + onProgress(s) { + state.progress = s.progress; + state.rate = s.rateBps; + }, + }); await trpc().upload.completeMigrationUpload.mutate({ uploadId }); }, { concurrency: 1 }, ); -const encryptFile = limitFunction( - async (state: MigrationState, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { - state.status = "encrypting"; - const chunksEncrypted = await encryptChunks(fileBuffer, dataKey); - state.status = "upload-pending"; - return chunksEncrypted; - }, - { concurrency: 4 }, -); - -export const requestFileMigration = async (fileInfo: FileInfo & { exists: true }) => { +export const requestFileMigration = async (fileInfo: FileInfo) => { let state = states.get(fileInfo.id); if (state) { if (state.status !== "error") return; @@ -148,12 +94,8 @@ export const requestFileMigration = async (fileInfo: FileInfo & { exists: true } }, async () => { state.status = "encryption-pending"; - const chunksEncrypted = await encryptFile(state, fileBuffer!, dataKey); - await uploadMigrationChunks(state, fileInfo.id, chunksEncrypted); - - // Clear file cache since the file format has changed - await deleteFileCache(fileInfo.id); + await uploadMigrationChunks(state, fileInfo.id, fileBuffer!, dataKey); state.status = "completed"; }, diff --git a/src/routes/(main)/directory/[[id]]/+page.svelte b/src/routes/(main)/directory/[[id]]/+page.svelte index a0a4d53..f500f34 100644 --- a/src/routes/(main)/directory/[[id]]/+page.svelte +++ b/src/routes/(main)/directory/[[id]]/+page.svelte @@ -51,7 +51,7 @@ if (!files || files.length === 0) return; for (const file of files) { - requestFileUpload(file, data.id, $hmacSecretStore?.get(1)!, $masterKeyStore?.get(1)!, () => { + requestFileUpload(file, data.id, $masterKeyStore?.get(1)!, $hmacSecretStore?.get(1)!, () => { return new Promise((resolve) => { duplicatedFile = file; resolveForDuplicateFileModal = resolve; diff --git a/src/routes/(main)/directory/[[id]]/service.svelte.ts b/src/routes/(main)/directory/[[id]]/service.svelte.ts index ccd5b14..be6392c 100644 --- a/src/routes/(main)/directory/[[id]]/service.svelte.ts +++ b/src/routes/(main)/directory/[[id]]/service.svelte.ts @@ -81,11 +81,11 @@ export const requestDirectoryCreation = async ( export const requestFileUpload = async ( file: File, parentId: "root" | number, - hmacSecret: HmacSecret, masterKey: MasterKey, + hmacSecret: HmacSecret, onDuplicate: () => Promise, ) => { - const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate); + const res = await uploadFile(file, parentId, masterKey, hmacSecret, onDuplicate); if (!res) return false; if (res.fileBuffer) { From e7dc96bb47d8cdb74ae8558ded9c8509022739e2 Mon Sep 17 00:00:00 2001 From: static Date: Mon, 12 Jan 2026 15:16:43 +0900 Subject: [PATCH 14/19] =?UTF-8?q?HMAC=20=EA=B3=84=EC=82=B0=EC=9D=84=20Web?= =?UTF-8?q?=20Worker=EC=97=90=EC=84=9C=20=EC=B2=98=EB=A6=AC=ED=95=98?= =?UTF-8?q?=EB=8F=84=EB=A1=9D=20=EB=B3=80=EA=B2=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/crypto/sha.ts | 30 ++++++++++++++++++++------- src/lib/modules/file/upload.svelte.ts | 21 ++++--------------- src/workers/hmac.ts | 25 ++++++++++++++++++++++ svelte.config.js | 1 + 4 files changed, 52 insertions(+), 25 deletions(-) create mode 100644 src/workers/hmac.ts diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 61c2ed7..5e9e3fa 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -1,5 +1,5 @@ -import { hmac } from "@noble/hashes/hmac.js"; -import { sha256 } from "@noble/hashes/sha2.js"; +import HmacWorker from "$workers/hmac?worker"; +import type { ComputeMessage, ResultMessage } from "$workers/hmac"; export const digestMessage = async (message: BufferSource) => { return await crypto.subtle.digest("SHA-256", message); @@ -18,10 +18,24 @@ export const generateHmacSecret = async () => { }; }; -export const createHmacStream = async (hmacSecret: CryptoKey) => { - const h = hmac.create(sha256, new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret))); - return { - update: (data: Uint8Array) => h.update(data), - digest: () => h.digest(), - }; +export const signMessageHmac = async (message: Blob, hmacSecret: CryptoKey) => { + const worker = new HmacWorker(); + const stream = message.stream(); + const hmacSecretRaw = new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret)); + + return new Promise((resolve, reject) => { + worker.onmessage = (event: MessageEvent) => { + resolve(event.data.result); + worker.terminate(); + }; + + worker.onerror = ({ error }) => { + reject(error); + worker.terminate(); + }; + + worker.postMessage({ stream, key: hmacSecretRaw } satisfies ComputeMessage, { + transfer: [stream, hmacSecretRaw.buffer], + }); + }); }; diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index 9e9f784..d066d4f 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,13 +1,8 @@ import ExifReader from "exifreader"; import { limitFunction } from "p-limit"; import { CHUNK_SIZE } from "$lib/constants"; -import { - encodeToBase64, - generateDataKey, - wrapDataKey, - encryptString, - createHmacStream, -} from "$lib/modules/crypto"; +import { encodeToBase64, generateDataKey, wrapDataKey, encryptString } from "$lib/modules/crypto"; +import { signMessageHmac } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; import { generateThumbnail } from "$lib/modules/thumbnail"; import { uploadBlob } from "$lib/modules/upload"; @@ -56,16 +51,8 @@ export const clearUploadedFiles = () => { const requestDuplicateFileScan = limitFunction( async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { - const hmacStream = await createHmacStream(hmacSecret.secret); - const reader = file.stream().getReader(); - - while (true) { - const { done, value } = await reader.read(); - if (done) break; - hmacStream.update(value); - } - - const fileSigned = encodeToBase64(hmacStream.digest()); + const hmacResult = await signMessageHmac(file, hmacSecret.secret); + const fileSigned = encodeToBase64(hmacResult); const files = await trpc().file.listByHash.query({ hskVersion: hmacSecret.version, contentHmac: fileSigned, diff --git a/src/workers/hmac.ts b/src/workers/hmac.ts new file mode 100644 index 0000000..1b20235 --- /dev/null +++ b/src/workers/hmac.ts @@ -0,0 +1,25 @@ +import { hmac } from "@noble/hashes/hmac.js"; +import { sha256 } from "@noble/hashes/sha2.js"; + +export interface ComputeMessage { + stream: ReadableStream; + key: Uint8Array; +} + +export interface ResultMessage { + result: Uint8Array; +} + +self.onmessage = async (event: MessageEvent) => { + const h = hmac.create(sha256, event.data.key); + const reader = event.data.stream.getReader(); + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + h.update(value); + } + + const result = h.digest(); + self.postMessage({ result } satisfies ResultMessage, { transfer: [result.buffer] }); +}; diff --git a/svelte.config.js b/svelte.config.js index 4ffc844..6562b93 100644 --- a/svelte.config.js +++ b/svelte.config.js @@ -8,6 +8,7 @@ const config = { adapter: adapter(), alias: { $trpc: "./src/trpc", + $workers: "./src/workers", }, }, }; From c778a4fb9e342f7b77bf4be9c8f6d488c5c6ff3b Mon Sep 17 00:00:00 2001 From: static Date: Mon, 12 Jan 2026 16:58:28 +0900 Subject: [PATCH 15/19] =?UTF-8?q?=ED=8C=8C=EC=9D=BC=20=EC=97=85=EB=A1=9C?= =?UTF-8?q?=EB=93=9C=20=EB=A1=9C=EC=A7=81=20=EB=A6=AC=ED=8C=A9=ED=86=A0?= =?UTF-8?q?=EB=A7=81=202?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/file/upload.svelte.ts | 218 ++++++++++-------- .../settings/migration/+page.svelte | 2 +- .../settings/migration/File.svelte | 5 +- .../settings/migration/service.svelte.ts | 19 +- .../settings/thumbnail/service.ts | 2 +- 5 files changed, 129 insertions(+), 117 deletions(-) diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index d066d4f..9bf043a 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -50,7 +50,14 @@ export const clearUploadedFiles = () => { }; const requestDuplicateFileScan = limitFunction( - async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { + async ( + state: FileUploadState, + file: File, + hmacSecret: HmacSecret, + onDuplicate: () => Promise, + ) => { + state.status = "encryption-pending"; + const hmacResult = await signMessageHmac(file, hmacSecret.secret); const fileSigned = encodeToBase64(hmacResult); const files = await trpc().file.listByHash.query({ @@ -98,77 +105,101 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => { return new Date(utcDate - offsetMs); }; -const requestFileUpload2 = async ( - state: FileUploadState, - file: Blob, - fileSigned: string, - fileMetadata: { - parentId: "root" | number; - name: string; - createdAt?: Date; - lastModifiedAt: Date; - }, - masterKey: MasterKey, - hmacSecret: HmacSecret, -) => { - state.status = "encrypting"; +interface FileMetadata { + parentId: "root" | number; + name: string; + createdAt?: Date; + lastModifiedAt: Date; +} - const { dataKey, dataKeyVersion } = await generateDataKey(); - const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); +const requestFileMetadataEncryption = limitFunction( + async ( + state: FileUploadState, + file: Blob, + fileMetadata: FileMetadata, + masterKey: MasterKey, + hmacSecret: HmacSecret, + ) => { + state.status = "encrypting"; - const [nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnailBuffer] = - await Promise.all([ - encryptString(fileMetadata.name, dataKey), - fileMetadata.createdAt && encryptString(fileMetadata.createdAt.getTime().toString(), dataKey), - encryptString(fileMetadata.lastModifiedAt.getTime().toString(), dataKey), - generateThumbnail(file).then((blob) => blob?.arrayBuffer()), - ]); + const { dataKey, dataKeyVersion } = await generateDataKey(); + const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); - const { uploadId } = await trpc().upload.startFileUpload.mutate({ - chunks: Math.ceil(file.size / CHUNK_SIZE), - parent: fileMetadata.parentId, - mekVersion: masterKey.version, - dek: dataKeyWrapped, - dekVersion: dataKeyVersion, - hskVersion: hmacSecret.version, - contentType: file.type, - name: nameEncrypted.ciphertext, - nameIv: nameEncrypted.iv, - createdAt: createdAtEncrypted?.ciphertext, - createdAtIv: createdAtEncrypted?.iv, - lastModifiedAt: lastModifiedAtEncrypted.ciphertext, - lastModifiedAtIv: lastModifiedAtEncrypted.iv, - }); + const [nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnailBuffer] = + await Promise.all([ + encryptString(fileMetadata.name, dataKey), + fileMetadata.createdAt && + encryptString(fileMetadata.createdAt.getTime().toString(), dataKey), + encryptString(fileMetadata.lastModifiedAt.getTime().toString(), dataKey), + generateThumbnail(file).then((blob) => blob?.arrayBuffer()), + ]); - state.status = "uploading"; - - await uploadBlob(uploadId, file, dataKey, { - onProgress(s) { - state.progress = s.progress; - state.rate = s.rateBps; - }, - }); - - const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ - uploadId, - contentHmac: fileSigned, - }); - - if (thumbnailBuffer) { - const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ - file: fileId, + const { uploadId } = await trpc().upload.startFileUpload.mutate({ + chunks: Math.ceil(file.size / CHUNK_SIZE), + parent: fileMetadata.parentId, + mekVersion: masterKey.version, + dek: dataKeyWrapped, dekVersion: dataKeyVersion, + hskVersion: hmacSecret.version, + contentType: file.type, + name: nameEncrypted.ciphertext, + nameIv: nameEncrypted.iv, + createdAt: createdAtEncrypted?.ciphertext, + createdAtIv: createdAtEncrypted?.iv, + lastModifiedAt: lastModifiedAtEncrypted.ciphertext, + lastModifiedAtIv: lastModifiedAtEncrypted.iv, }); - await uploadBlob(uploadId, new Blob([thumbnailBuffer]), dataKey); + state.status = "upload-pending"; + return { uploadId, thumbnailBuffer, dataKey, dataKeyVersion }; + }, + { concurrency: 4 }, +); - await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); - } +const requestFileUpload = limitFunction( + async ( + state: FileUploadState, + uploadId: string, + file: Blob, + fileSigned: string, + thumbnailBuffer: ArrayBuffer | undefined, + dataKey: CryptoKey, + dataKeyVersion: Date, + ) => { + state.status = "uploading"; - state.status = "uploaded"; + await uploadBlob(uploadId, file, dataKey, { + onProgress(s) { + state.progress = s.progress; + state.rate = s.rateBps; + }, + }); - return { fileId, thumbnailBuffer }; -}; + const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ + uploadId, + contentHmac: fileSigned, + }); + + if (thumbnailBuffer) { + try { + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); + + await uploadBlob(uploadId, new Blob([thumbnailBuffer]), dataKey); + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); + } catch (e) { + console.error(e); + } + } + + state.status = "uploaded"; + return { fileId }; + }, + { concurrency: 1 }, +); export const uploadFile = async ( file: File, @@ -185,51 +216,44 @@ export const uploadFile = async ( const state = uploadingFiles.at(-1)!; return await scheduler.schedule(file.size, async () => { - state.status = "encryption-pending"; - try { - const { fileSigned } = await requestDuplicateFileScan(file, hmacSecret, onDuplicate); + const { fileSigned } = await requestDuplicateFileScan(state, file, hmacSecret, onDuplicate); + if (!fileSigned) { state.status = "canceled"; uploadingFiles = uploadingFiles.filter((file) => file !== state); return; } + let fileBuffer; const fileType = getFileType(file); + const fileMetadata: FileMetadata = { + parentId, + name: file.name, + lastModifiedAt: new Date(file.lastModified), + }; + if (fileType.startsWith("image/")) { - const fileBuffer = await file.arrayBuffer(); - const fileCreatedAt = extractExifDateTime(fileBuffer); - - const { fileId, thumbnailBuffer } = await requestFileUpload2( - state, - new Blob([fileBuffer], { type: fileType }), - fileSigned, - { - parentId, - name: file.name, - createdAt: fileCreatedAt, - lastModifiedAt: new Date(file.lastModified), - }, - masterKey, - hmacSecret, - ); - - return { fileId, fileBuffer, thumbnailBuffer }; - } else { - const { fileId, thumbnailBuffer } = await requestFileUpload2( - state, - file, - fileSigned, - { - parentId, - name: file.name, - lastModifiedAt: new Date(file.lastModified), - }, - masterKey, - hmacSecret, - ); - return { fileId, thumbnailBuffer }; + fileBuffer = await file.arrayBuffer(); + fileMetadata.createdAt = extractExifDateTime(fileBuffer); } + + const blob = new Blob([file], { type: fileType }); + + const { uploadId, thumbnailBuffer, dataKey, dataKeyVersion } = + await requestFileMetadataEncryption(state, blob, fileMetadata, masterKey, hmacSecret); + + const { fileId } = await requestFileUpload( + state, + uploadId, + blob, + fileSigned, + thumbnailBuffer, + dataKey, + dataKeyVersion, + ); + + return { fileId, fileBuffer, thumbnailBuffer }; } catch (e) { state.status = "error"; throw e; diff --git a/src/routes/(fullscreen)/settings/migration/+page.svelte b/src/routes/(fullscreen)/settings/migration/+page.svelte index b4bc9cb..4db6a80 100644 --- a/src/routes/(fullscreen)/settings/migration/+page.svelte +++ b/src/routes/(fullscreen)/settings/migration/+page.svelte @@ -18,7 +18,7 @@ info, state: getMigrationState(info.id), })) - .filter((file) => file.state?.status !== "completed"), + .filter((file) => file.state?.status !== "uploaded"), ); const migrateAllFiles = () => { diff --git a/src/routes/(fullscreen)/settings/migration/File.svelte b/src/routes/(fullscreen)/settings/migration/File.svelte index ec9d25b..d16e800 100644 --- a/src/routes/(fullscreen)/settings/migration/File.svelte +++ b/src/routes/(fullscreen)/settings/migration/File.svelte @@ -1,12 +1,9 @@ diff --git a/src/routes/(fullscreen)/settings/migration/service.svelte.ts b/src/routes/(fullscreen)/settings/migration/service.svelte.ts index 9d08db2..67201b0 100644 --- a/src/routes/(fullscreen)/settings/migration/service.svelte.ts +++ b/src/routes/(fullscreen)/settings/migration/service.svelte.ts @@ -9,13 +9,10 @@ import { trpc } from "$trpc/client"; export type MigrationStatus = | "queued" - | "download-pending" | "downloading" - | "encryption-pending" - | "encrypting" | "upload-pending" | "uploading" - | "completed" + | "uploaded" | "error"; export interface MigrationState { @@ -38,13 +35,13 @@ export const getMigrationState = (fileId: number) => { export const clearMigrationStates = () => { for (const [id, state] of states) { - if (state.status === "completed" || state.status === "error") { + if (state.status === "uploaded" || state.status === "error") { states.delete(id); } } }; -const uploadMigrationChunks = limitFunction( +const requestFileUpload = limitFunction( async (state: MigrationState, fileId: number, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { state.status = "uploading"; @@ -61,6 +58,7 @@ const uploadMigrationChunks = limitFunction( }); await trpc().upload.completeMigrationUpload.mutate({ uploadId }); + state.status = "uploaded"; }, { concurrency: 1 }, ); @@ -87,18 +85,11 @@ export const requestFileMigration = async (fileInfo: FileInfo) => { await scheduler.schedule( async () => { - state.status = "download-pending"; state.status = "downloading"; fileBuffer = await requestFileDownload(fileInfo.id, dataKey, true); return fileBuffer.byteLength; }, - async () => { - state.status = "encryption-pending"; - - await uploadMigrationChunks(state, fileInfo.id, fileBuffer!, dataKey); - - state.status = "completed"; - }, + () => requestFileUpload(state, fileInfo.id, fileBuffer!, dataKey), ); } catch (e) { state.status = "error"; diff --git a/src/routes/(fullscreen)/settings/thumbnail/service.ts b/src/routes/(fullscreen)/settings/thumbnail/service.ts index 314cf5a..381ed53 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/service.ts +++ b/src/routes/(fullscreen)/settings/thumbnail/service.ts @@ -35,7 +35,7 @@ const generateThumbnail = limitFunction( async (fileId: number, fileBuffer: ArrayBuffer, fileType: string, dataKey: CryptoKey) => { statuses.set(fileId, "generating"); - const thumbnail = await doGenerateThumbnail(fileBuffer, fileType); + const thumbnail = await doGenerateThumbnail(new Blob([fileBuffer], { type: fileType })); if (!thumbnail) return null; const thumbnailBuffer = await thumbnail.arrayBuffer(); From 00b9858db7199f78208803c46e72fc5c137e2b00 Mon Sep 17 00:00:00 2001 From: static Date: Mon, 12 Jan 2026 18:37:36 +0900 Subject: [PATCH 16/19] =?UTF-8?q?=EC=97=85=EB=A1=9C=EB=93=9C=EB=90=9C=20?= =?UTF-8?q?=EC=B2=AD=ED=81=AC=20=EB=AA=A9=EB=A1=9D=EC=9D=84=20=EB=B9=84?= =?UTF-8?q?=ED=8A=B8=EB=A7=B5=EC=9D=84=20=ED=99=9C=EC=9A=A9=ED=95=B4=20?= =?UTF-8?q?=ED=9A=A8=EC=9C=A8=EC=A0=81=EC=9C=BC=EB=A1=9C=20=EC=A0=80?= =?UTF-8?q?=EC=9E=A5=ED=95=98=EB=8F=84=EB=A1=9D=20=EA=B0=9C=EC=84=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/upload.ts | 6 +- .../migrations/1768062380-AddChunkedUpload.ts | 13 ++++- src/lib/server/db/schema/upload.ts | 3 +- src/lib/server/db/upload.ts | 19 +++++-- src/lib/server/services/upload.ts | 12 +++- src/lib/services/file.ts | 45 +++++---------- src/routes/(fullscreen)/file/[id]/service.ts | 9 +-- .../settings/thumbnail/service.ts | 56 +++++++------------ .../api/upload/[id]/chunks/[index]/+server.ts | 2 +- src/trpc/routers/upload.ts | 12 ++-- 10 files changed, 83 insertions(+), 94 deletions(-) diff --git a/src/lib/modules/upload.ts b/src/lib/modules/upload.ts index 231b54b..83f4fd3 100644 --- a/src/lib/modules/upload.ts +++ b/src/lib/modules/upload.ts @@ -80,12 +80,12 @@ export const uploadBlob = async ( const limit = pLimit(options?.concurrency ?? 4); await Promise.all( - Array.from({ length: totalChunks }, (_, chunkIndex) => + Array.from({ length: totalChunks }, (_, i) => limit(() => uploadChunk( uploadId, - chunkIndex, - blob.slice(chunkIndex * CHUNK_SIZE, (chunkIndex + 1) * CHUNK_SIZE), + i + 1, // 1-based chunk index + blob.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE), dataKey, onChunkProgress, ), diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts index be6a900..26e6ae8 100644 --- a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -21,8 +21,14 @@ export const up = async (db: Kysely) => { .addColumn("type", "text", (col) => col.notNull()) .addColumn("user_id", "integer", (col) => col.references("user.id").notNull()) .addColumn("path", "text", (col) => col.notNull()) + .addColumn("bitmap", "bytea", (col) => col.notNull()) .addColumn("total_chunks", "integer", (col) => col.notNull()) - .addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`)) + .addColumn("uploaded_chunks", "integer", (col) => + col + .generatedAlwaysAs(sql`bit_count(bitmap)`) + .stored() + .notNull(), + ) .addColumn("expires_at", "timestamp(3)", (col) => col.notNull()) .addColumn("parent_id", "integer", (col) => col.references("directory.id")) .addColumn("master_encryption_key_version", "integer") @@ -46,6 +52,11 @@ export const up = async (db: Kysely) => { "hmac_secret_key", ["user_id", "version"], ) + .addCheckConstraint("upload_session_ck01", sql`uploaded_chunks <= total_chunks`) + .addCheckConstraint( + "upload_session_ck02", + sql`length(bitmap) = ceil(total_chunks / 8.0)::integer`, + ) .execute(); }; diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts index e20227d..7aefc5d 100644 --- a/src/lib/server/db/schema/upload.ts +++ b/src/lib/server/db/schema/upload.ts @@ -6,8 +6,9 @@ interface UploadSessionTable { type: "file" | "thumbnail" | "migration"; user_id: number; path: string; + bitmap: Buffer; total_chunks: number; - uploaded_chunks: Generated; + uploaded_chunks: Generated; expires_at: Date; // For file uploads diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts index 876c150..db19cbf 100644 --- a/src/lib/server/db/upload.ts +++ b/src/lib/server/db/upload.ts @@ -7,8 +7,9 @@ interface BaseUploadSession { id: string; userId: number; path: string; + bitmap: Buffer; totalChunks: number; - uploadedChunks: number[]; + uploadedChunks: number; expiresAt: Date; } @@ -37,7 +38,7 @@ interface MigrationUploadSession extends BaseUploadSession { } export const createFileUploadSession = async ( - params: Omit, + params: Omit, ) => { await db.transaction().execute(async (trx) => { const mek = await trx @@ -73,6 +74,7 @@ export const createFileUploadSession = async ( type: "file", user_id: params.userId, path: params.path, + bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)), total_chunks: params.totalChunks, expires_at: params.expiresAt, parent_id: params.parentId !== "root" ? params.parentId : null, @@ -90,7 +92,7 @@ export const createFileUploadSession = async ( }; export const createThumbnailUploadSession = async ( - params: Omit, + params: Omit, ) => { await db.transaction().execute(async (trx) => { const file = await trx @@ -114,6 +116,7 @@ export const createThumbnailUploadSession = async ( type: "thumbnail", user_id: params.userId, path: params.path, + bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)), total_chunks: params.totalChunks, expires_at: params.expiresAt, file_id: params.fileId, @@ -124,7 +127,7 @@ export const createThumbnailUploadSession = async ( }; export const createMigrationUploadSession = async ( - params: Omit, + params: Omit, ) => { await db.transaction().execute(async (trx) => { const file = await trx @@ -148,6 +151,7 @@ export const createMigrationUploadSession = async ( type: "migration", user_id: params.userId, path: params.path, + bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)), total_chunks: params.totalChunks, expires_at: params.expiresAt, file_id: params.fileId, @@ -173,6 +177,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => { id: session.id, userId: session.user_id, path: session.path, + bitmap: session.bitmap, totalChunks: session.total_chunks, uploadedChunks: session.uploaded_chunks, expiresAt: session.expires_at, @@ -192,6 +197,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => { id: session.id, userId: session.user_id, path: session.path, + bitmap: session.bitmap, totalChunks: session.total_chunks, uploadedChunks: session.uploaded_chunks, expiresAt: session.expires_at, @@ -204,6 +210,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => { id: session.id, userId: session.user_id, path: session.path, + bitmap: session.bitmap, totalChunks: session.total_chunks, uploadedChunks: session.uploaded_chunks, expiresAt: session.expires_at, @@ -215,7 +222,9 @@ export const getUploadSession = async (sessionId: string, userId: number) => { export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => { await db .updateTable("upload_session") - .set({ uploaded_chunks: sql`array_append(uploaded_chunks, ${chunkIndex})` }) + .set({ + bitmap: sql`set_bit(${sql.ref("bitmap")}, ${chunkIndex - 1}, 1)`, + }) .where("id", "=", sessionId) .execute(); }; diff --git a/src/lib/server/services/upload.ts b/src/lib/server/services/upload.ts index 1f7043b..d654f42 100644 --- a/src/lib/server/services/upload.ts +++ b/src/lib/server/services/upload.ts @@ -8,6 +8,12 @@ import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; const chunkLocks = new Set(); +const isChunkUploaded = (bitmap: Buffer, chunkIndex: number) => { + chunkIndex -= 1; + const byte = bitmap[Math.floor(chunkIndex / 8)]; + return !!byte && (byte & (1 << (chunkIndex % 8))) !== 0; // Postgres sucks +}; + export const uploadChunk = async ( userId: number, sessionId: string, @@ -28,13 +34,13 @@ export const uploadChunk = async ( const session = await UploadRepo.getUploadSession(sessionId, userId); if (!session) { error(404, "Invalid upload id"); - } else if (chunkIndex >= session.totalChunks) { + } else if (chunkIndex > session.totalChunks) { error(400, "Invalid chunk index"); - } else if (session.uploadedChunks.includes(chunkIndex)) { + } else if (isChunkUploaded(session.bitmap, chunkIndex)) { error(409, "Chunk already uploaded"); } - const isLastChunk = chunkIndex === session.totalChunks - 1; + const isLastChunk = chunkIndex === session.totalChunks; filePath = `${session.path}/${chunkIndex}`; const hashStream = createHash("sha256"); diff --git a/src/lib/services/file.ts b/src/lib/services/file.ts index 2f37f52..b8db243 100644 --- a/src/lib/services/file.ts +++ b/src/lib/services/file.ts @@ -1,5 +1,4 @@ import { getAllFileInfos } from "$lib/indexedDB/filesystem"; -import { encodeToBase64, digestMessage } from "$lib/modules/crypto"; import { getFileCache, storeFileCache, @@ -7,6 +6,7 @@ import { downloadFile, deleteFileThumbnailCache, } from "$lib/modules/file"; +import { uploadBlob } from "$lib/modules/upload"; import { trpc } from "$trpc/client"; export const requestFileDownload = async ( @@ -24,41 +24,24 @@ export const requestFileDownload = async ( export const requestFileThumbnailUpload = async ( fileId: number, + thumbnail: Blob, + dataKey: CryptoKey, dataKeyVersion: Date, - thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, ) => { - const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ - file: fileId, - dekVersion: dataKeyVersion, - }); + try { + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); - // Prepend IV to ciphertext (consistent with file download format) - const ivAndCiphertext = new Uint8Array( - thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength, - ); - ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0); - ivAndCiphertext.set( - new Uint8Array(thumbnailEncrypted.ciphertext), - thumbnailEncrypted.iv.byteLength, - ); + await uploadBlob(uploadId, thumbnail, dataKey); - const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); - - const response = await fetch(`/api/upload/${uploadId}/chunks/0`, { - method: "POST", - headers: { - "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkHash}:`, - }, - body: ivAndCiphertext, - }); - - if (!response.ok) { - throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`); + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); + return true; + } catch { + // TODO: Error Handling + return false; } - - await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); - return response; }; export const requestDeletedFilesCleanup = async () => { diff --git a/src/routes/(fullscreen)/file/[id]/service.ts b/src/routes/(fullscreen)/file/[id]/service.ts index ea3e49c..598418b 100644 --- a/src/routes/(fullscreen)/file/[id]/service.ts +++ b/src/routes/(fullscreen)/file/[id]/service.ts @@ -1,4 +1,3 @@ -import { encryptData } from "$lib/modules/crypto"; import { storeFileThumbnailCache } from "$lib/modules/file"; import { prepareFileDecryption, getDecryptedFileUrl } from "$lib/serviceWorker"; import { requestFileThumbnailUpload } from "$lib/services/file"; @@ -33,12 +32,10 @@ export const requestThumbnailUpload = async ( dataKey: CryptoKey, dataKeyVersion: Date, ) => { - const thumbnailBuffer = await thumbnail.arrayBuffer(); - const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); - const res = await requestFileThumbnailUpload(fileId, dataKeyVersion, thumbnailEncrypted); - if (!res.ok) return false; + const res = await requestFileThumbnailUpload(fileId, thumbnail, dataKey, dataKeyVersion); + if (!res) return false; - storeFileThumbnailCache(fileId, thumbnailBuffer); // Intended + void thumbnail.arrayBuffer().then((buffer) => storeFileThumbnailCache(fileId, buffer)); return true; }; diff --git a/src/routes/(fullscreen)/settings/thumbnail/service.ts b/src/routes/(fullscreen)/settings/thumbnail/service.ts index 381ed53..83b2890 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/service.ts +++ b/src/routes/(fullscreen)/settings/thumbnail/service.ts @@ -1,10 +1,9 @@ import { limitFunction } from "p-limit"; import { SvelteMap } from "svelte/reactivity"; -import { encryptData } from "$lib/modules/crypto"; import { storeFileThumbnailCache } from "$lib/modules/file"; import type { FileInfo } from "$lib/modules/filesystem"; import { Scheduler } from "$lib/modules/scheduler"; -import { generateThumbnail as doGenerateThumbnail } from "$lib/modules/thumbnail"; +import { generateThumbnail } from "$lib/modules/thumbnail"; import { requestFileDownload, requestFileThumbnailUpload } from "$lib/services/file"; export type GenerationStatus = @@ -31,33 +30,25 @@ export const clearThumbnailGenerationStatuses = () => { } }; -const generateThumbnail = limitFunction( - async (fileId: number, fileBuffer: ArrayBuffer, fileType: string, dataKey: CryptoKey) => { - statuses.set(fileId, "generating"); - - const thumbnail = await doGenerateThumbnail(new Blob([fileBuffer], { type: fileType })); - if (!thumbnail) return null; - - const thumbnailBuffer = await thumbnail.arrayBuffer(); - const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); - statuses.set(fileId, "upload-pending"); - return { plaintext: thumbnailBuffer, ...thumbnailEncrypted }; - }, - { concurrency: 4 }, -); - const requestThumbnailUpload = limitFunction( - async ( - fileId: number, - dataKeyVersion: Date, - thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: ArrayBuffer }, - ) => { - statuses.set(fileId, "uploading"); + async (fileInfo: FileInfo, fileBuffer: ArrayBuffer) => { + statuses.set(fileInfo.id, "generating"); - const res = await requestFileThumbnailUpload(fileId, dataKeyVersion, thumbnail); - if (!res.ok) return false; - statuses.set(fileId, "uploaded"); - storeFileThumbnailCache(fileId, thumbnail.plaintext); // Intended + const thumbnail = await generateThumbnail( + new Blob([fileBuffer], { type: fileInfo.contentType }), + ); + if (!thumbnail) return false; + + const res = await requestFileThumbnailUpload( + fileInfo.id, + thumbnail, + fileInfo.dataKey?.key!, + fileInfo.dataKey?.version!, + ); + if (!res) return false; + + statuses.set(fileInfo.id, "uploaded"); + void thumbnail.arrayBuffer().then((buffer) => storeFileThumbnailCache(fileInfo.id, buffer)); return true; }, { concurrency: 4 }, @@ -81,16 +72,7 @@ export const requestThumbnailGeneration = async (fileInfo: FileInfo) => { return file.byteLength; }, async () => { - const thumbnail = await generateThumbnail( - fileInfo.id, - file!, - fileInfo.contentType, - fileInfo.dataKey?.key!, - ); - if ( - !thumbnail || - !(await requestThumbnailUpload(fileInfo.id, fileInfo.dataKey?.version!, thumbnail)) - ) { + if (!(await requestThumbnailUpload(fileInfo, file!))) { statuses.set(fileInfo.id, "error"); } }, diff --git a/src/routes/api/upload/[id]/chunks/[index]/+server.ts b/src/routes/api/upload/[id]/chunks/[index]/+server.ts index 689d313..179030e 100644 --- a/src/routes/api/upload/[id]/chunks/[index]/+server.ts +++ b/src/routes/api/upload/[id]/chunks/[index]/+server.ts @@ -13,7 +13,7 @@ export const POST: RequestHandler = async ({ locals, params, request }) => { const zodRes = z .object({ id: z.uuidv4(), - index: z.coerce.number().int().nonnegative(), + index: z.coerce.number().int().positive(), }) .safeParse(params); if (!zodRes.success) error(400, "Invalid path parameters"); diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts index adc0a3e..3289aad 100644 --- a/src/trpc/routers/upload.ts +++ b/src/trpc/routers/upload.ts @@ -150,7 +150,7 @@ const uploadRouter = router({ (!session.hskVersion && input.contentHmac) ) { throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content HMAC" }); - } else if (session.uploadedChunks.length < session.totalChunks) { + } else if (session.uploadedChunks < session.totalChunks) { throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); } @@ -160,7 +160,7 @@ const uploadRouter = router({ const hashStream = createHash("sha256"); const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); - for (let i = 0; i < session.totalChunks; i++) { + for (let i = 1; i <= session.totalChunks; i++) { for await (const chunk of createReadStream(`${session.path}/${i}`)) { hashStream.update(chunk); writeStream.write(chunk); @@ -215,13 +215,13 @@ const uploadRouter = router({ const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); if (!session || session.type !== "thumbnail") { throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); - } else if (session.uploadedChunks.length < session.totalChunks) { + } else if (session.uploadedChunks < session.totalChunks) { throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); } thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uploadId}`; await mkdir(dirname(thumbnailPath), { recursive: true }); - await rename(`${session.path}/0`, thumbnailPath); + await rename(`${session.path}/1`, thumbnailPath); const oldThumbnailPath = await db.transaction().execute(async (trx) => { const oldPath = await MediaRepo.updateFileThumbnail( @@ -305,7 +305,7 @@ const uploadRouter = router({ const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); if (!session || session.type !== "migration") { throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); - } else if (session.uploadedChunks.length < session.totalChunks) { + } else if (session.uploadedChunks < session.totalChunks) { throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); } @@ -315,7 +315,7 @@ const uploadRouter = router({ const hashStream = createHash("sha256"); const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); - for (let i = 0; i < session.totalChunks; i++) { + for (let i = 1; i <= session.totalChunks; i++) { for await (const chunk of createReadStream(`${session.path}/${i}`)) { hashStream.update(chunk); writeStream.write(chunk); From a4912c8952cf6b706571a767e7f5b297fe756d84 Mon Sep 17 00:00:00 2001 From: static Date: Mon, 12 Jan 2026 20:50:19 +0900 Subject: [PATCH 17/19] =?UTF-8?q?=EC=82=AC=EC=86=8C=ED=95=9C=20=EB=A6=AC?= =?UTF-8?q?=ED=8C=A9=ED=86=A0=EB=A7=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docker-compose.yaml | 1 + src/hooks.server.ts | 2 +- src/lib/modules/crypto/sha.ts | 6 +- src/lib/modules/file/download.svelte.ts | 4 +- src/lib/modules/file/thumbnail.ts | 9 +- src/lib/modules/file/upload.svelte.ts | 5 +- src/lib/modules/upload.ts | 43 ++++--- src/lib/server/db/file.ts | 13 +- .../migrations/1768062380-AddChunkedUpload.ts | 4 +- src/lib/server/db/schema/upload.ts | 3 - src/lib/server/db/upload.ts | 67 ++-------- .../settings/migration/service.svelte.ts | 17 ++- .../settings/thumbnail/File.svelte | 1 - .../settings/thumbnail/service.ts | 3 +- .../api/upload/[id]/chunks/[index]/+server.ts | 2 +- src/service-worker/constants.ts | 1 + src/service-worker/handlers/decryptFile.ts | 7 +- src/service-worker/index.ts | 2 +- src/service-worker/modules/constants.ts | 1 - src/service-worker/modules/crypto.ts | 2 +- src/trpc/routers/upload.ts | 119 +++++++++--------- 21 files changed, 132 insertions(+), 180 deletions(-) create mode 100644 src/service-worker/constants.ts delete mode 100644 src/service-worker/modules/constants.ts diff --git a/docker-compose.yaml b/docker-compose.yaml index a624d9f..3544f14 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -9,6 +9,7 @@ services: volumes: - ./data/library:/app/data/library - ./data/thumbnails:/app/data/thumbnails + - ./data/uploads:/app/data/uploads environment: # ArkVault - DATABASE_HOST=database diff --git a/src/hooks.server.ts b/src/hooks.server.ts index b816f7f..c670968 100644 --- a/src/hooks.server.ts +++ b/src/hooks.server.ts @@ -7,8 +7,8 @@ import { cleanupExpiredSessions, cleanupExpiredSessionUpgradeChallenges, } from "$lib/server/db/session"; -import { cleanupExpiredUploadSessions } from "$lib/server/services/upload"; import { authenticate, setAgentInfo } from "$lib/server/middlewares"; +import { cleanupExpiredUploadSessions } from "$lib/server/services/upload"; export const init: ServerInit = async () => { await migrateDB(); diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 5e9e3fa..286e6f2 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -19,13 +19,13 @@ export const generateHmacSecret = async () => { }; export const signMessageHmac = async (message: Blob, hmacSecret: CryptoKey) => { - const worker = new HmacWorker(); const stream = message.stream(); const hmacSecretRaw = new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret)); + const worker = new HmacWorker(); return new Promise((resolve, reject) => { - worker.onmessage = (event: MessageEvent) => { - resolve(event.data.result); + worker.onmessage = ({ data }: MessageEvent) => { + resolve(data.result); worker.terminate(); }; diff --git a/src/lib/modules/file/download.svelte.ts b/src/lib/modules/file/download.svelte.ts index d438e3f..88f1e9e 100644 --- a/src/lib/modules/file/download.svelte.ts +++ b/src/lib/modules/file/download.svelte.ts @@ -1,6 +1,6 @@ import axios from "axios"; import { limitFunction } from "p-limit"; -import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; +import { ENCRYPTED_CHUNK_SIZE } from "$lib/constants"; import { decryptChunk, concatenateBuffers } from "$lib/modules/crypto"; export interface FileDownloadState { @@ -100,7 +100,7 @@ export const downloadFile = async (id: number, dataKey: CryptoKey, isLegacy: boo return await decryptFile( state, fileEncrypted, - isLegacy ? fileEncrypted.byteLength : CHUNK_SIZE + ENCRYPTION_OVERHEAD, + isLegacy ? fileEncrypted.byteLength : ENCRYPTED_CHUNK_SIZE, dataKey, ); } catch (e) { diff --git a/src/lib/modules/file/thumbnail.ts b/src/lib/modules/file/thumbnail.ts index ed40e13..b33a4af 100644 --- a/src/lib/modules/file/thumbnail.ts +++ b/src/lib/modules/file/thumbnail.ts @@ -1,7 +1,7 @@ import { LRUCache } from "lru-cache"; import { writable, type Writable } from "svelte/store"; import { browser } from "$app/environment"; -import { decryptData } from "$lib/modules/crypto"; +import { decryptChunk } from "$lib/modules/crypto"; import type { SummarizedFileInfo } from "$lib/modules/filesystem"; import { readFile, writeFile, deleteFile, deleteDirectory } from "$lib/modules/opfs"; import { getThumbnailUrl } from "$lib/modules/thumbnail"; @@ -20,12 +20,7 @@ const fetchFromServer = async (fileId: number, dataKey: CryptoKey) => { const res = await fetch(`/api/file/${fileId}/thumbnail/download`); if (!res.ok) return null; - const thumbnailEncrypted = await res.arrayBuffer(); - const thumbnailBuffer = await decryptData( - thumbnailEncrypted.slice(12), - thumbnailEncrypted.slice(0, 12), - dataKey, - ); + const thumbnailBuffer = await decryptChunk(await res.arrayBuffer(), dataKey); void writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer); return getThumbnailUrl(thumbnailBuffer); diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index 9bf043a..6deac1f 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -58,8 +58,7 @@ const requestDuplicateFileScan = limitFunction( ) => { state.status = "encryption-pending"; - const hmacResult = await signMessageHmac(file, hmacSecret.secret); - const fileSigned = encodeToBase64(hmacResult); + const fileSigned = encodeToBase64(await signMessageHmac(file, hmacSecret.secret)); const files = await trpc().file.listByHash.query({ hskVersion: hmacSecret.version, contentHmac: fileSigned, @@ -171,7 +170,7 @@ const requestFileUpload = limitFunction( await uploadBlob(uploadId, file, dataKey, { onProgress(s) { state.progress = s.progress; - state.rate = s.rateBps; + state.rate = s.rate; }, }); diff --git a/src/lib/modules/upload.ts b/src/lib/modules/upload.ts index 83f4fd3..a540e22 100644 --- a/src/lib/modules/upload.ts +++ b/src/lib/modules/upload.ts @@ -3,27 +3,32 @@ import pLimit from "p-limit"; import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants"; import { encryptChunk, digestMessage, encodeToBase64 } from "$lib/modules/crypto"; -type UploadStats = { - progress: number; // 0..1 (암호화 후 기준) - rateBps: number; // bytes/sec - uploadedBytes: number; - totalBytes: number; -}; +interface UploadStats { + progress: number; + rate: number; +} + +const createSpeedMeter = (timeWindow = 1500) => { + const samples: { t: number; b: number }[] = []; + let lastSpeed = 0; + + return (bytesNow?: number) => { + if (!bytesNow) return lastSpeed; -function createSpeedMeter(windowMs = 1500) { - const samples: Array<{ t: number; b: number }> = []; - return (bytesNow: number) => { const now = performance.now(); samples.push({ t: now, b: bytesNow }); - const cutoff = now - windowMs; + + const cutoff = now - timeWindow; while (samples.length > 2 && samples[0]!.t < cutoff) samples.shift(); const first = samples[0]!; const dt = now - first.t; const db = bytesNow - first.b; - return dt > 0 ? (db / dt) * 1000 : 0; + + lastSpeed = dt > 0 ? (db / dt) * 1000 : 0; + return lastSpeed; }; -} +}; const uploadChunk = async ( uploadId: string, @@ -66,10 +71,10 @@ export const uploadBlob = async ( if (!onProgress) return; const uploadedBytes = uploadedByChunk.reduce((a, b) => a + b, 0); - const rateBps = speedMeter(uploadedBytes); + const rate = speedMeter(uploadedBytes); const progress = Math.min(1, uploadedBytes / totalBytes); - onProgress({ progress, rateBps, uploadedBytes, totalBytes }); + onProgress({ progress, rate }); }; const onChunkProgress = (idx: number, loaded: number) => { @@ -84,7 +89,7 @@ export const uploadBlob = async ( limit(() => uploadChunk( uploadId, - i + 1, // 1-based chunk index + i + 1, blob.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE), dataKey, onChunkProgress, @@ -93,11 +98,5 @@ export const uploadBlob = async ( ), ); - // 완료 보정 - onProgress?.({ - progress: 1, - rateBps: 0, - uploadedBytes: totalBytes, - totalBytes, - }); + onProgress?.({ progress: 1, rate: speedMeter() }); }; diff --git a/src/lib/server/db/file.ts b/src/lib/server/db/file.ts index 9314f4b..d0c54cc 100644 --- a/src/lib/server/db/file.ts +++ b/src/lib/server/db/file.ts @@ -497,21 +497,22 @@ export const migrateFileContent = async ( userId: number, fileId: number, newPath: string, + dekVersion: Date, encContentHash: string, ) => { const file = await trx .selectFrom("file") - .select(["path", "encrypted_content_iv"]) + .select(["path", "data_encryption_key_version", "encrypted_content_iv"]) .where("id", "=", fileId) .where("user_id", "=", userId) .limit(1) .forUpdate() .executeTakeFirst(); - if (!file) { throw new IntegrityError("File not found"); - } - if (!file.encrypted_content_iv) { + } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } else if (!file.encrypted_content_iv) { throw new IntegrityError("File is not legacy"); } @@ -525,7 +526,6 @@ export const migrateFileContent = async ( .where("id", "=", fileId) .where("user_id", "=", userId) .execute(); - await trx .insertInto("file_log") .values({ @@ -534,8 +534,7 @@ export const migrateFileContent = async ( action: "migrate", }) .execute(); - - return file.path; + return { oldPath: file.path }; }; export const addFileToCategory = async (fileId: number, categoryId: number) => { diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts index 26e6ae8..22676aa 100644 --- a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -52,11 +52,11 @@ export const up = async (db: Kysely) => { "hmac_secret_key", ["user_id", "version"], ) - .addCheckConstraint("upload_session_ck01", sql`uploaded_chunks <= total_chunks`) .addCheckConstraint( - "upload_session_ck02", + "upload_session_ck01", sql`length(bitmap) = ceil(total_chunks / 8.0)::integer`, ) + .addCheckConstraint("upload_session_ck02", sql`uploaded_chunks <= total_chunks`) .execute(); }; diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts index 7aefc5d..5635921 100644 --- a/src/lib/server/db/schema/upload.ts +++ b/src/lib/server/db/schema/upload.ts @@ -11,7 +11,6 @@ interface UploadSessionTable { uploaded_chunks: Generated; expires_at: Date; - // For file uploads parent_id: number | null; master_encryption_key_version: number | null; encrypted_data_encryption_key: string | null; // Base64 @@ -21,8 +20,6 @@ interface UploadSessionTable { encrypted_name: Ciphertext | null; encrypted_created_at: Ciphertext | null; encrypted_last_modified_at: Ciphertext | null; - - // For thumbnail uploads file_id: number | null; } diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts index db19cbf..9dd85a0 100644 --- a/src/lib/server/db/upload.ts +++ b/src/lib/server/db/upload.ts @@ -26,17 +26,12 @@ interface FileUploadSession extends BaseUploadSession { encLastModifiedAt: Ciphertext; } -interface ThumbnailUploadSession extends BaseUploadSession { - type: "thumbnail"; +interface ThumbnailOrMigrationUploadSession extends BaseUploadSession { + type: "thumbnail" | "migration"; fileId: number; dekVersion: Date; } -interface MigrationUploadSession extends BaseUploadSession { - type: "migration"; - fileId: number; -} - export const createFileUploadSession = async ( params: Omit, ) => { @@ -91,8 +86,8 @@ export const createFileUploadSession = async ( }); }; -export const createThumbnailUploadSession = async ( - params: Omit, +export const createThumbnailOrMigrationUploadSession = async ( + params: Omit, ) => { await db.transaction().execute(async (trx) => { const file = await trx @@ -113,7 +108,7 @@ export const createThumbnailUploadSession = async ( .insertInto("upload_session") .values({ id: params.id, - type: "thumbnail", + type: params.type, user_id: params.userId, path: params.path, bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)), @@ -126,40 +121,6 @@ export const createThumbnailUploadSession = async ( }); }; -export const createMigrationUploadSession = async ( - params: Omit, -) => { - await db.transaction().execute(async (trx) => { - const file = await trx - .selectFrom("file") - .select("encrypted_content_iv") - .where("id", "=", params.fileId) - .where("user_id", "=", params.userId) - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (!file) { - throw new IntegrityError("File not found"); - } else if (!file.encrypted_content_iv) { - throw new IntegrityError("File is not legacy"); - } - - await trx - .insertInto("upload_session") - .values({ - id: params.id, - type: "migration", - user_id: params.userId, - path: params.path, - bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)), - total_chunks: params.totalChunks, - expires_at: params.expiresAt, - file_id: params.fileId, - }) - .execute(); - }); -}; - export const getUploadSession = async (sessionId: string, userId: number) => { const session = await db .selectFrom("upload_session") @@ -191,9 +152,9 @@ export const getUploadSession = async (sessionId: string, userId: number) => { encCreatedAt: session.encrypted_created_at, encLastModifiedAt: session.encrypted_last_modified_at!, } satisfies FileUploadSession; - } else if (session.type === "thumbnail") { + } else { return { - type: "thumbnail", + type: session.type, id: session.id, userId: session.user_id, path: session.path, @@ -203,19 +164,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => { expiresAt: session.expires_at, fileId: session.file_id!, dekVersion: session.data_encryption_key_version!, - } satisfies ThumbnailUploadSession; - } else { - return { - type: "migration", - id: session.id, - userId: session.user_id, - path: session.path, - bitmap: session.bitmap, - totalChunks: session.total_chunks, - uploadedChunks: session.uploaded_chunks, - expiresAt: session.expires_at, - fileId: session.file_id!, - } satisfies MigrationUploadSession; + } satisfies ThumbnailOrMigrationUploadSession; } }; diff --git a/src/routes/(fullscreen)/settings/migration/service.svelte.ts b/src/routes/(fullscreen)/settings/migration/service.svelte.ts index 67201b0..dfb0edd 100644 --- a/src/routes/(fullscreen)/settings/migration/service.svelte.ts +++ b/src/routes/(fullscreen)/settings/migration/service.svelte.ts @@ -42,18 +42,25 @@ export const clearMigrationStates = () => { }; const requestFileUpload = limitFunction( - async (state: MigrationState, fileId: number, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { + async ( + state: MigrationState, + fileId: number, + fileBuffer: ArrayBuffer, + dataKey: CryptoKey, + dataKeyVersion: Date, + ) => { state.status = "uploading"; const { uploadId } = await trpc().upload.startMigrationUpload.mutate({ file: fileId, chunks: Math.ceil(fileBuffer.byteLength / CHUNK_SIZE), + dekVersion: dataKeyVersion, }); await uploadBlob(uploadId, new Blob([fileBuffer]), dataKey, { onProgress(s) { state.progress = s.progress; - state.rate = s.rateBps; + state.rate = s.rate; }, }); @@ -76,7 +83,7 @@ export const requestFileMigration = async (fileInfo: FileInfo) => { } try { - const dataKey = fileInfo.dataKey?.key; + const dataKey = fileInfo.dataKey; if (!dataKey) { throw new Error("Data key not available"); } @@ -86,10 +93,10 @@ export const requestFileMigration = async (fileInfo: FileInfo) => { await scheduler.schedule( async () => { state.status = "downloading"; - fileBuffer = await requestFileDownload(fileInfo.id, dataKey, true); + fileBuffer = await requestFileDownload(fileInfo.id, dataKey.key, true); return fileBuffer.byteLength; }, - () => requestFileUpload(state, fileInfo.id, fileBuffer!, dataKey), + () => requestFileUpload(state, fileInfo.id, fileBuffer!, dataKey.key, dataKey.version), ); } catch (e) { state.status = "error"; diff --git a/src/routes/(fullscreen)/settings/thumbnail/File.svelte b/src/routes/(fullscreen)/settings/thumbnail/File.svelte index 4440cf2..edb7e91 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/File.svelte +++ b/src/routes/(fullscreen)/settings/thumbnail/File.svelte @@ -3,7 +3,6 @@ queued: "대기 중", "generation-pending": "준비 중", generating: "생성하는 중", - "upload-pending": "업로드를 기다리는 중", uploading: "업로드하는 중", error: "실패", } as const; diff --git a/src/routes/(fullscreen)/settings/thumbnail/service.ts b/src/routes/(fullscreen)/settings/thumbnail/service.ts index 83b2890..fdf0303 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/service.ts +++ b/src/routes/(fullscreen)/settings/thumbnail/service.ts @@ -10,7 +10,6 @@ export type GenerationStatus = | "queued" | "generation-pending" | "generating" - | "upload-pending" | "uploading" | "uploaded" | "error"; @@ -39,6 +38,8 @@ const requestThumbnailUpload = limitFunction( ); if (!thumbnail) return false; + statuses.set(fileInfo.id, "uploading"); + const res = await requestFileThumbnailUpload( fileInfo.id, thumbnail, diff --git a/src/routes/api/upload/[id]/chunks/[index]/+server.ts b/src/routes/api/upload/[id]/chunks/[index]/+server.ts index 179030e..3b2e85b 100644 --- a/src/routes/api/upload/[id]/chunks/[index]/+server.ts +++ b/src/routes/api/upload/[id]/chunks/[index]/+server.ts @@ -1,6 +1,6 @@ import { error, text } from "@sveltejs/kit"; import { Readable } from "stream"; -import { ReadableStream } from "stream/web"; +import type { ReadableStream } from "stream/web"; import { z } from "zod"; import { parseContentDigestHeader } from "$lib/modules/http"; import { authorize } from "$lib/server/modules/auth"; diff --git a/src/service-worker/constants.ts b/src/service-worker/constants.ts new file mode 100644 index 0000000..4938d61 --- /dev/null +++ b/src/service-worker/constants.ts @@ -0,0 +1 @@ +export * from "../lib/constants"; diff --git a/src/service-worker/handlers/decryptFile.ts b/src/service-worker/handlers/decryptFile.ts index 22aa118..9aa9717 100644 --- a/src/service-worker/handlers/decryptFile.ts +++ b/src/service-worker/handlers/decryptFile.ts @@ -1,4 +1,4 @@ -import { DECRYPTED_FILE_URL_PREFIX, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "../modules/constants"; +import { DECRYPTED_FILE_URL_PREFIX, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "../constants"; import { decryptChunk, getEncryptedRange, getDecryptedSize } from "../modules/crypto"; import { parseRangeHeader, getContentRangeHeader } from "../modules/http"; import { getFile } from "../modules/opfs"; @@ -15,10 +15,13 @@ const createResponse = ( const headers: Record = { "Accept-Ranges": "bytes", "Content-Length": String(range.end - range.start + 1), - "Content-Type": contentType ?? "application/octet-stream", ...(isRangeRequest ? getContentRangeHeader(range) : {}), }; + if (contentType) { + headers["Content-Type"] = contentType; + } + if (downloadFilename) { headers["Content-Disposition"] = `attachment; filename*=UTF-8''${encodeURIComponent(downloadFilename)}`; diff --git a/src/service-worker/index.ts b/src/service-worker/index.ts index 051f8d9..2861166 100644 --- a/src/service-worker/index.ts +++ b/src/service-worker/index.ts @@ -3,7 +3,7 @@ /// /// -import { DECRYPTED_FILE_URL_PREFIX } from "./modules/constants"; +import { DECRYPTED_FILE_URL_PREFIX } from "./constants"; import { decryptFile } from "./handlers"; import { fileMetadataStore } from "./stores"; import type { ServiceWorkerMessage, ServiceWorkerResponse } from "./types"; diff --git a/src/service-worker/modules/constants.ts b/src/service-worker/modules/constants.ts deleted file mode 100644 index cca093e..0000000 --- a/src/service-worker/modules/constants.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "../../lib/constants"; diff --git a/src/service-worker/modules/crypto.ts b/src/service-worker/modules/crypto.ts index 1afee74..ed35094 100644 --- a/src/service-worker/modules/crypto.ts +++ b/src/service-worker/modules/crypto.ts @@ -1,4 +1,4 @@ -import { ENCRYPTION_OVERHEAD, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "./constants"; +import { ENCRYPTION_OVERHEAD, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "../constants"; export * from "../../lib/modules/crypto"; diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts index 3289aad..11b0a84 100644 --- a/src/trpc/routers/upload.ts +++ b/src/trpc/routers/upload.ts @@ -1,7 +1,7 @@ import { TRPCError } from "@trpc/server"; import { createHash } from "crypto"; import { createReadStream, createWriteStream } from "fs"; -import { mkdir, rename } from "fs/promises"; +import { copyFile, mkdir } from "fs/promises"; import mime from "mime"; import { dirname } from "path"; import { v4 as uuidv4 } from "uuid"; @@ -13,6 +13,8 @@ import env from "$lib/server/loadenv"; import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; import { router, roleProcedure } from "../init.server"; +const UPLOADS_EXPIRES = 24 * 3600 * 1000; // 24 hours + const sessionLocks = new Set(); const generateSessionId = async () => { @@ -60,7 +62,7 @@ const uploadRouter = router({ userId: ctx.session.userId, path, totalChunks: input.chunks, - expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + expiresAt: new Date(Date.now() + UPLOADS_EXPIRES), parentId: input.parent, mekVersion: input.mekVersion, encDek: input.dek, @@ -89,41 +91,6 @@ const uploadRouter = router({ } }), - startFileThumbnailUpload: roleProcedure["activeClient"] - .input( - z.object({ - file: z.int().positive(), - dekVersion: z.date(), - }), - ) - .mutation(async ({ ctx, input }) => { - const { id, path } = await generateSessionId(); - - try { - await UploadRepo.createThumbnailUploadSession({ - id, - userId: ctx.session.userId, - path, - totalChunks: 1, // Up to 4 MiB - expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours - fileId: input.file, - dekVersion: input.dekVersion, - }); - return { uploadId: id }; - } catch (e) { - await safeRecursiveRm(path); - - if (e instanceof IntegrityError) { - if (e.message === "File not found") { - throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); - } else if (e.message === "Invalid DEK version") { - throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" }); - } - } - throw e; - } - }), - completeFileUpload: roleProcedure["activeClient"] .input( z.object({ @@ -143,7 +110,7 @@ const uploadRouter = router({ try { const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); - if (!session || session.type !== "file") { + if (session?.type !== "file") { throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); } else if ( (session.hskVersion && !input.contentHmac) || @@ -195,6 +162,42 @@ const uploadRouter = router({ } }), + startFileThumbnailUpload: roleProcedure["activeClient"] + .input( + z.object({ + file: z.int().positive(), + dekVersion: z.date(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { id, path } = await generateSessionId(); + + try { + await UploadRepo.createThumbnailOrMigrationUploadSession({ + id, + type: "thumbnail", + userId: ctx.session.userId, + path, + totalChunks: 1, // Up to 4 MiB + expiresAt: new Date(Date.now() + UPLOADS_EXPIRES), + fileId: input.file, + dekVersion: input.dekVersion, + }); + return { uploadId: id }; + } catch (e) { + await safeRecursiveRm(path); + + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid file id" }); + } else if (e.message === "Invalid DEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: e.message }); + } + } + throw e; + } + }), + completeFileThumbnailUpload: roleProcedure["activeClient"] .input( z.object({ @@ -213,7 +216,7 @@ const uploadRouter = router({ try { const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); - if (!session || session.type !== "thumbnail") { + if (session?.type !== "thumbnail") { throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); } else if (session.uploadedChunks < session.totalChunks) { throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); @@ -221,7 +224,7 @@ const uploadRouter = router({ thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uploadId}`; await mkdir(dirname(thumbnailPath), { recursive: true }); - await rename(`${session.path}/1`, thumbnailPath); + await copyFile(`${session.path}/1`, thumbnailPath); const oldThumbnailPath = await db.transaction().execute(async (trx) => { const oldPath = await MediaRepo.updateFileThumbnail( @@ -238,12 +241,10 @@ const uploadRouter = router({ await Promise.all([safeUnlink(oldThumbnailPath), safeRecursiveRm(session.path)]); } catch (e) { await safeUnlink(thumbnailPath); - if (e instanceof IntegrityError) { - if (e.message === "File not found") { - throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); - } else if (e.message === "Invalid DEK version") { - throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" }); - } + + if (e instanceof IntegrityError && e.message === "Invalid DEK version") { + // DEK rotated after this upload started + throw new TRPCError({ code: "CONFLICT", message: e.message }); } throw e; } finally { @@ -256,19 +257,22 @@ const uploadRouter = router({ z.object({ file: z.int().positive(), chunks: z.int().positive(), + dekVersion: z.date(), }), ) .mutation(async ({ ctx, input }) => { const { id, path } = await generateSessionId(); try { - await UploadRepo.createMigrationUploadSession({ + await UploadRepo.createThumbnailOrMigrationUploadSession({ id, + type: "migration", userId: ctx.session.userId, path, totalChunks: input.chunks, - expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours + expiresAt: new Date(Date.now() + UPLOADS_EXPIRES), fileId: input.file, + dekVersion: input.dekVersion, }); return { uploadId: id }; } catch (e) { @@ -276,9 +280,9 @@ const uploadRouter = router({ if (e instanceof IntegrityError) { if (e.message === "File not found") { - throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid file id" }); } else if (e.message === "File is not legacy") { - throw new TRPCError({ code: "BAD_REQUEST", message: "File is not legacy" }); + throw new TRPCError({ code: "BAD_REQUEST", message: e.message }); } } throw e; @@ -303,7 +307,7 @@ const uploadRouter = router({ try { const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); - if (!session || session.type !== "migration") { + if (session?.type !== "migration") { throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); } else if (session.uploadedChunks < session.totalChunks) { throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); @@ -328,11 +332,12 @@ const uploadRouter = router({ const hash = hashStream.digest("base64"); const oldPath = await db.transaction().execute(async (trx) => { - const oldPath = await FileRepo.migrateFileContent( + const { oldPath } = await FileRepo.migrateFileContent( trx, ctx.session.userId, session.fileId, filePath, + session.dekVersion!, hash, ); await UploadRepo.deleteUploadSession(trx, uploadId); @@ -342,12 +347,10 @@ const uploadRouter = router({ await Promise.all([safeUnlink(oldPath), safeRecursiveRm(session.path)]); } catch (e) { await safeUnlink(filePath); - if (e instanceof IntegrityError) { - if (e.message === "File not found") { - throw new TRPCError({ code: "NOT_FOUND", message: "File not found" }); - } else if (e.message === "File is not legacy") { - throw new TRPCError({ code: "BAD_REQUEST", message: "File is not legacy" }); - } + + if (e instanceof IntegrityError && e.message === "File is not legacy") { + // File migrated after this upload started + throw new TRPCError({ code: "CONFLICT", message: e.message }); } throw e; } finally { From 4cdf2b342f17e777f81ce22b9ef2835465c46c83 Mon Sep 17 00:00:00 2001 From: static Date: Mon, 12 Jan 2026 23:37:04 +0900 Subject: [PATCH 18/19] =?UTF-8?q?=EC=B2=AD=ED=81=AC=20=EC=97=85=EB=A1=9C?= =?UTF-8?q?=EB=93=9C=20=EC=84=B1=EB=8A=A5=20=EA=B0=9C=EC=84=A0=20=EB=B0=8F?= =?UTF-8?q?=20=EB=84=A4=ED=8A=B8=EC=9B=8C=ED=81=AC=20=EC=86=8D=EB=8F=84?= =?UTF-8?q?=EB=A5=BC=20=EB=8D=94=20=EC=A0=95=ED=99=95=ED=95=98=EA=B2=8C=20?= =?UTF-8?q?=EC=B8=A1=EC=A0=95=ED=95=98=EB=8F=84=EB=A1=9D=20=EA=B0=9C?= =?UTF-8?q?=EC=84=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/file/upload.svelte.ts | 2 +- src/lib/modules/upload.ts | 135 ++++++++++++++---- src/lib/utils/concurrency/BoundedQueue.ts | 44 ++++++ .../utils/{ => concurrency}/HybridPromise.ts | 0 .../concurrency/Scheduler.ts} | 0 src/lib/utils/concurrency/index.ts | 3 + src/lib/utils/index.ts | 2 +- .../settings/migration/service.svelte.ts | 2 +- .../settings/thumbnail/service.ts | 2 +- 9 files changed, 159 insertions(+), 31 deletions(-) create mode 100644 src/lib/utils/concurrency/BoundedQueue.ts rename src/lib/utils/{ => concurrency}/HybridPromise.ts (100%) rename src/lib/{modules/scheduler.ts => utils/concurrency/Scheduler.ts} (100%) create mode 100644 src/lib/utils/concurrency/index.ts diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index 6deac1f..7ac15ce 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -3,10 +3,10 @@ import { limitFunction } from "p-limit"; import { CHUNK_SIZE } from "$lib/constants"; import { encodeToBase64, generateDataKey, wrapDataKey, encryptString } from "$lib/modules/crypto"; import { signMessageHmac } from "$lib/modules/crypto"; -import { Scheduler } from "$lib/modules/scheduler"; import { generateThumbnail } from "$lib/modules/thumbnail"; import { uploadBlob } from "$lib/modules/upload"; import type { MasterKey, HmacSecret } from "$lib/stores"; +import { Scheduler } from "$lib/utils"; import { trpc } from "$trpc/client"; export interface FileUploadState { diff --git a/src/lib/modules/upload.ts b/src/lib/modules/upload.ts index a540e22..cab51b7 100644 --- a/src/lib/modules/upload.ts +++ b/src/lib/modules/upload.ts @@ -2,55 +2,99 @@ import axios from "axios"; import pLimit from "p-limit"; import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants"; import { encryptChunk, digestMessage, encodeToBase64 } from "$lib/modules/crypto"; +import { BoundedQueue } from "$lib/utils"; interface UploadStats { progress: number; rate: number; } -const createSpeedMeter = (timeWindow = 1500) => { +interface EncryptedChunk { + index: number; + data: ArrayBuffer; + hash: string; +} + +const createSpeedMeter = (timeWindow = 3000, minInterval = 200, warmupPeriod = 500) => { const samples: { t: number; b: number }[] = []; let lastSpeed = 0; + let startTime: number | null = null; return (bytesNow?: number) => { - if (!bytesNow) return lastSpeed; + if (bytesNow === undefined) return lastSpeed; const now = performance.now(); + + // Initialize start time on first call + if (startTime === null) { + startTime = now; + } + + // Check if enough time has passed since the last sample + const lastSample = samples[samples.length - 1]; + if (lastSample && now - lastSample.t < minInterval) { + return lastSpeed; + } + samples.push({ t: now, b: bytesNow }); + // Remove old samples outside the time window const cutoff = now - timeWindow; while (samples.length > 2 && samples[0]!.t < cutoff) samples.shift(); + // Need at least 2 samples to calculate speed + if (samples.length < 2) { + return lastSpeed; + } + const first = samples[0]!; const dt = now - first.t; const db = bytesNow - first.b; - lastSpeed = dt > 0 ? (db / dt) * 1000 : 0; + if (dt >= minInterval) { + const instantSpeed = (db / dt) * 1000; + // Apply EMA for smoother speed transitions + const alpha = 0.3; + const rawSpeed = + lastSpeed === 0 ? instantSpeed : alpha * instantSpeed + (1 - alpha) * lastSpeed; + + // Apply warmup ramp to prevent initial overestimation + const elapsed = now - startTime; + const warmupWeight = Math.min(1, elapsed / warmupPeriod); + lastSpeed = rawSpeed * warmupWeight; + } + return lastSpeed; }; }; -const uploadChunk = async ( - uploadId: string, - chunkIndex: number, +const encryptChunkData = async ( chunk: Blob, dataKey: CryptoKey, +): Promise<{ data: ArrayBuffer; hash: string }> => { + const encrypted = await encryptChunk(await chunk.arrayBuffer(), dataKey); + const hash = encodeToBase64(await digestMessage(encrypted)); + return { data: encrypted, hash }; +}; + +const uploadEncryptedChunk = async ( + uploadId: string, + chunkIndex: number, + encrypted: ArrayBuffer, + hash: string, onChunkProgress: (chunkIndex: number, loaded: number) => void, ) => { - const chunkEncrypted = await encryptChunk(await chunk.arrayBuffer(), dataKey); - const chunkEncryptedHash = encodeToBase64(await digestMessage(chunkEncrypted)); - - await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex}`, chunkEncrypted, { + await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex + 1}`, encrypted, { headers: { "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, + "Content-Digest": `sha-256=:${hash}:`, }, onUploadProgress(e) { onChunkProgress(chunkIndex, e.loaded ?? 0); }, }); - onChunkProgress(chunkIndex, chunkEncrypted.byteLength); + onChunkProgress(chunkIndex, encrypted.byteLength); }; export const uploadBlob = async ( @@ -60,12 +104,14 @@ export const uploadBlob = async ( options?: { concurrency?: number; onProgress?: (s: UploadStats) => void }, ) => { const onProgress = options?.onProgress; + const networkConcurrency = options?.concurrency ?? 4; + const maxQueueSize = 8; const totalChunks = Math.ceil(blob.size / CHUNK_SIZE); const totalBytes = blob.size + totalChunks * ENCRYPTION_OVERHEAD; const uploadedByChunk = new Array(totalChunks).fill(0); - const speedMeter = createSpeedMeter(1500); + const speedMeter = createSpeedMeter(3000, 200); const emit = () => { if (!onProgress) return; @@ -82,21 +128,56 @@ export const uploadBlob = async ( emit(); }; - const limit = pLimit(options?.concurrency ?? 4); + const queue = new BoundedQueue(maxQueueSize); + let encryptionError: Error | null = null; - await Promise.all( - Array.from({ length: totalChunks }, (_, i) => - limit(() => - uploadChunk( - uploadId, - i + 1, - blob.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE), - dataKey, - onChunkProgress, - ), - ), - ), - ); + // Producer: encrypt chunks and push to queue + const encryptionProducer = async () => { + try { + for (let i = 0; i < totalChunks; i++) { + const chunk = blob.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE); + const { data, hash } = await encryptChunkData(chunk, dataKey); + await queue.push({ index: i, data, hash }); + } + } catch (e) { + encryptionError = e instanceof Error ? e : new Error(String(e)); + } finally { + queue.close(); + } + }; + + // Consumer: upload chunks from queue with concurrency limit + const uploadConsumer = async () => { + const limit = pLimit(networkConcurrency); + const activeTasks = new Set>(); + + while (true) { + const item = await queue.pop(); + if (item === null) break; + if (encryptionError) throw encryptionError; + + const task = limit(async () => { + try { + await uploadEncryptedChunk(uploadId, item.index, item.data, item.hash, onChunkProgress); + } finally { + // @ts-ignore + item.data = null; + } + }); + + activeTasks.add(task); + task.finally(() => activeTasks.delete(task)); + + if (activeTasks.size >= networkConcurrency) { + await Promise.race(activeTasks); + } + } + + await Promise.all(activeTasks); + }; + + // Run producer and consumer concurrently + await Promise.all([encryptionProducer(), uploadConsumer()]); onProgress?.({ progress: 1, rate: speedMeter() }); }; diff --git a/src/lib/utils/concurrency/BoundedQueue.ts b/src/lib/utils/concurrency/BoundedQueue.ts new file mode 100644 index 0000000..5970914 --- /dev/null +++ b/src/lib/utils/concurrency/BoundedQueue.ts @@ -0,0 +1,44 @@ +export class BoundedQueue { + private isClosed = false; + private reservedCount = 0; + private items: T[] = []; + + private waitersNotFull: (() => void)[] = []; + private waitersNotEmpty: (() => void)[] = []; + + constructor(private readonly maxSize: number) {} + + async push(item: T) { + if (this.isClosed) { + throw new Error("Queue closed"); + } + + while (this.reservedCount >= this.maxSize) { + await new Promise((resolve) => this.waitersNotFull.push(resolve)); + if (this.isClosed) throw new Error("Queue closed"); + } + + this.reservedCount++; + this.items.push(item); + this.waitersNotEmpty.shift()?.(); + } + + async pop() { + while (this.items.length === 0) { + if (this.isClosed) return null; + await new Promise((resolve) => this.waitersNotEmpty.push(resolve)); + } + + const item = this.items.shift()!; + this.reservedCount--; + this.waitersNotFull.shift()?.(); + + return item; + } + + close() { + this.isClosed = true; + while (this.waitersNotEmpty.length > 0) this.waitersNotEmpty.shift()!(); + while (this.waitersNotFull.length > 0) this.waitersNotFull.shift()!(); + } +} diff --git a/src/lib/utils/HybridPromise.ts b/src/lib/utils/concurrency/HybridPromise.ts similarity index 100% rename from src/lib/utils/HybridPromise.ts rename to src/lib/utils/concurrency/HybridPromise.ts diff --git a/src/lib/modules/scheduler.ts b/src/lib/utils/concurrency/Scheduler.ts similarity index 100% rename from src/lib/modules/scheduler.ts rename to src/lib/utils/concurrency/Scheduler.ts diff --git a/src/lib/utils/concurrency/index.ts b/src/lib/utils/concurrency/index.ts new file mode 100644 index 0000000..59fe81d --- /dev/null +++ b/src/lib/utils/concurrency/index.ts @@ -0,0 +1,3 @@ +export * from "./BoundedQueue"; +export * from "./HybridPromise"; +export * from "./Scheduler"; diff --git a/src/lib/utils/index.ts b/src/lib/utils/index.ts index 5d5b9d4..4c576d5 100644 --- a/src/lib/utils/index.ts +++ b/src/lib/utils/index.ts @@ -1,4 +1,4 @@ +export * from "./concurrency"; export * from "./format"; export * from "./gotoStateful"; -export * from "./HybridPromise"; export * from "./sort"; diff --git a/src/routes/(fullscreen)/settings/migration/service.svelte.ts b/src/routes/(fullscreen)/settings/migration/service.svelte.ts index dfb0edd..1bdf869 100644 --- a/src/routes/(fullscreen)/settings/migration/service.svelte.ts +++ b/src/routes/(fullscreen)/settings/migration/service.svelte.ts @@ -2,9 +2,9 @@ import { limitFunction } from "p-limit"; import { SvelteMap } from "svelte/reactivity"; import { CHUNK_SIZE } from "$lib/constants"; import type { FileInfo } from "$lib/modules/filesystem"; -import { Scheduler } from "$lib/modules/scheduler"; import { uploadBlob } from "$lib/modules/upload"; import { requestFileDownload } from "$lib/services/file"; +import { Scheduler } from "$lib/utils"; import { trpc } from "$trpc/client"; export type MigrationStatus = diff --git a/src/routes/(fullscreen)/settings/thumbnail/service.ts b/src/routes/(fullscreen)/settings/thumbnail/service.ts index fdf0303..5c4c61d 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/service.ts +++ b/src/routes/(fullscreen)/settings/thumbnail/service.ts @@ -2,9 +2,9 @@ import { limitFunction } from "p-limit"; import { SvelteMap } from "svelte/reactivity"; import { storeFileThumbnailCache } from "$lib/modules/file"; import type { FileInfo } from "$lib/modules/filesystem"; -import { Scheduler } from "$lib/modules/scheduler"; import { generateThumbnail } from "$lib/modules/thumbnail"; import { requestFileDownload, requestFileThumbnailUpload } from "$lib/services/file"; +import { Scheduler } from "$lib/utils"; export type GenerationStatus = | "queued" From b952bfae86e5753a382edb34baf1e5c07f59e5a5 Mon Sep 17 00:00:00 2001 From: static Date: Tue, 13 Jan 2026 00:28:52 +0900 Subject: [PATCH 19/19] =?UTF-8?q?=EB=A6=B4=EB=A6=AC=EC=A6=88=20=EB=95=8C?= =?UTF-8?q?=EB=A7=88=EB=8B=A4=20Docker=20=EC=9D=B4=EB=AF=B8=EC=A7=80?= =?UTF-8?q?=EB=A5=BC=20=EC=9E=90=EB=8F=99=EC=9C=BC=EB=A1=9C=20=EB=B9=8C?= =?UTF-8?q?=EB=93=9C=ED=95=98=EB=8A=94=20Action=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/docker.yaml | 45 +++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 .github/workflows/docker.yaml diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml new file mode 100644 index 0000000..6f0627d --- /dev/null +++ b/.github/workflows/docker.yaml @@ -0,0 +1,45 @@ +name: Docker Image Build + +on: + release: + types: [published] + +jobs: + build-and-push: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract Docker metadata + uses: docker/metadata-action@v5 + id: meta + with: + images: ghcr.io/${{ github.repository }} + tags: | + type=semver,value={{version}} + type=raw,value=latest + type=sha + + - name: Build and push Docker image + uses: docker/build-push-action@v6 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + cache-from: type=gha + cache-to: type=gha,mode=max