파일 업로드 로직 리팩토링

This commit is contained in:
static
2026-01-12 12:02:20 +09:00
parent 27e90ef4d7
commit b636d75ea0
6 changed files with 193 additions and 398 deletions

View File

@@ -1,10 +1,9 @@
import { limitFunction } from "p-limit";
import { SvelteMap } from "svelte/reactivity";
import { CHUNK_SIZE } from "$lib/constants";
import { encodeToBase64, encryptChunk, digestMessage } from "$lib/modules/crypto";
import { deleteFileCache } from "$lib/modules/file";
import type { FileInfo } from "$lib/modules/filesystem";
import { Scheduler } from "$lib/modules/scheduler";
import { uploadBlob } from "$lib/modules/upload";
import { requestFileDownload } from "$lib/services/file";
import { trpc } from "$trpc/client";
@@ -45,81 +44,28 @@ export const clearMigrationStates = () => {
}
};
const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => {
const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = [];
let offset = 0;
while (offset < fileBuffer.byteLength) {
const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength);
const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey);
chunksEncrypted.push({
chunkEncrypted: chunkEncrypted,
chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)),
});
offset = nextOffset;
}
return chunksEncrypted;
};
const uploadMigrationChunks = limitFunction(
async (
state: MigrationState,
fileId: number,
chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[],
) => {
async (state: MigrationState, fileId: number, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => {
state.status = "uploading";
const { uploadId } = await trpc().upload.startMigrationUpload.mutate({
file: fileId,
chunks: chunksEncrypted.length,
chunks: Math.ceil(fileBuffer.byteLength / CHUNK_SIZE),
});
const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0);
let uploadedBytes = 0;
const startTime = Date.now();
for (let i = 0; i < chunksEncrypted.length; i++) {
const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!;
const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkEncryptedHash}:`,
},
body: chunkEncrypted,
});
if (!response.ok) {
throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`);
}
uploadedBytes += chunkEncrypted.byteLength;
const elapsed = (Date.now() - startTime) / 1000;
const rate = uploadedBytes / elapsed;
state.progress = uploadedBytes / totalBytes;
state.rate = rate;
}
await uploadBlob(uploadId, new Blob([fileBuffer]), dataKey, {
onProgress(s) {
state.progress = s.progress;
state.rate = s.rateBps;
},
});
await trpc().upload.completeMigrationUpload.mutate({ uploadId });
},
{ concurrency: 1 },
);
const encryptFile = limitFunction(
async (state: MigrationState, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => {
state.status = "encrypting";
const chunksEncrypted = await encryptChunks(fileBuffer, dataKey);
state.status = "upload-pending";
return chunksEncrypted;
},
{ concurrency: 4 },
);
export const requestFileMigration = async (fileInfo: FileInfo & { exists: true }) => {
export const requestFileMigration = async (fileInfo: FileInfo) => {
let state = states.get(fileInfo.id);
if (state) {
if (state.status !== "error") return;
@@ -148,12 +94,8 @@ export const requestFileMigration = async (fileInfo: FileInfo & { exists: true }
},
async () => {
state.status = "encryption-pending";
const chunksEncrypted = await encryptFile(state, fileBuffer!, dataKey);
await uploadMigrationChunks(state, fileInfo.id, chunksEncrypted);
// Clear file cache since the file format has changed
await deleteFileCache(fileInfo.id);
await uploadMigrationChunks(state, fileInfo.id, fileBuffer!, dataKey);
state.status = "completed";
},