업로드된 청크 목록을 비트맵을 활용해 효율적으로 저장하도록 개선

This commit is contained in:
static
2026-01-12 18:37:36 +09:00
parent c778a4fb9e
commit 00b9858db7
10 changed files with 83 additions and 94 deletions

View File

@@ -21,8 +21,14 @@ export const up = async (db: Kysely<any>) => {
.addColumn("type", "text", (col) => col.notNull())
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("path", "text", (col) => col.notNull())
.addColumn("bitmap", "bytea", (col) => col.notNull())
.addColumn("total_chunks", "integer", (col) => col.notNull())
.addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`))
.addColumn("uploaded_chunks", "integer", (col) =>
col
.generatedAlwaysAs(sql`bit_count(bitmap)`)
.stored()
.notNull(),
)
.addColumn("expires_at", "timestamp(3)", (col) => col.notNull())
.addColumn("parent_id", "integer", (col) => col.references("directory.id"))
.addColumn("master_encryption_key_version", "integer")
@@ -46,6 +52,11 @@ export const up = async (db: Kysely<any>) => {
"hmac_secret_key",
["user_id", "version"],
)
.addCheckConstraint("upload_session_ck01", sql`uploaded_chunks <= total_chunks`)
.addCheckConstraint(
"upload_session_ck02",
sql`length(bitmap) = ceil(total_chunks / 8.0)::integer`,
)
.execute();
};

View File

@@ -6,8 +6,9 @@ interface UploadSessionTable {
type: "file" | "thumbnail" | "migration";
user_id: number;
path: string;
bitmap: Buffer;
total_chunks: number;
uploaded_chunks: Generated<number[]>;
uploaded_chunks: Generated<number>;
expires_at: Date;
// For file uploads

View File

@@ -7,8 +7,9 @@ interface BaseUploadSession {
id: string;
userId: number;
path: string;
bitmap: Buffer;
totalChunks: number;
uploadedChunks: number[];
uploadedChunks: number;
expiresAt: Date;
}
@@ -37,7 +38,7 @@ interface MigrationUploadSession extends BaseUploadSession {
}
export const createFileUploadSession = async (
params: Omit<FileUploadSession, "type" | "uploadedChunks">,
params: Omit<FileUploadSession, "type" | "bitmap" | "uploadedChunks">,
) => {
await db.transaction().execute(async (trx) => {
const mek = await trx
@@ -73,6 +74,7 @@ export const createFileUploadSession = async (
type: "file",
user_id: params.userId,
path: params.path,
bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)),
total_chunks: params.totalChunks,
expires_at: params.expiresAt,
parent_id: params.parentId !== "root" ? params.parentId : null,
@@ -90,7 +92,7 @@ export const createFileUploadSession = async (
};
export const createThumbnailUploadSession = async (
params: Omit<ThumbnailUploadSession, "type" | "uploadedChunks">,
params: Omit<ThumbnailUploadSession, "type" | "bitmap" | "uploadedChunks">,
) => {
await db.transaction().execute(async (trx) => {
const file = await trx
@@ -114,6 +116,7 @@ export const createThumbnailUploadSession = async (
type: "thumbnail",
user_id: params.userId,
path: params.path,
bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)),
total_chunks: params.totalChunks,
expires_at: params.expiresAt,
file_id: params.fileId,
@@ -124,7 +127,7 @@ export const createThumbnailUploadSession = async (
};
export const createMigrationUploadSession = async (
params: Omit<MigrationUploadSession, "type" | "uploadedChunks">,
params: Omit<MigrationUploadSession, "type" | "bitmap" | "uploadedChunks">,
) => {
await db.transaction().execute(async (trx) => {
const file = await trx
@@ -148,6 +151,7 @@ export const createMigrationUploadSession = async (
type: "migration",
user_id: params.userId,
path: params.path,
bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)),
total_chunks: params.totalChunks,
expires_at: params.expiresAt,
file_id: params.fileId,
@@ -173,6 +177,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => {
id: session.id,
userId: session.user_id,
path: session.path,
bitmap: session.bitmap,
totalChunks: session.total_chunks,
uploadedChunks: session.uploaded_chunks,
expiresAt: session.expires_at,
@@ -192,6 +197,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => {
id: session.id,
userId: session.user_id,
path: session.path,
bitmap: session.bitmap,
totalChunks: session.total_chunks,
uploadedChunks: session.uploaded_chunks,
expiresAt: session.expires_at,
@@ -204,6 +210,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => {
id: session.id,
userId: session.user_id,
path: session.path,
bitmap: session.bitmap,
totalChunks: session.total_chunks,
uploadedChunks: session.uploaded_chunks,
expiresAt: session.expires_at,
@@ -215,7 +222,9 @@ export const getUploadSession = async (sessionId: string, userId: number) => {
export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => {
await db
.updateTable("upload_session")
.set({ uploaded_chunks: sql`array_append(uploaded_chunks, ${chunkIndex})` })
.set({
bitmap: sql`set_bit(${sql.ref("bitmap")}, ${chunkIndex - 1}, 1)`,
})
.where("id", "=", sessionId)
.execute();
};

View File

@@ -8,6 +8,12 @@ import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem";
const chunkLocks = new Set<string>();
const isChunkUploaded = (bitmap: Buffer, chunkIndex: number) => {
chunkIndex -= 1;
const byte = bitmap[Math.floor(chunkIndex / 8)];
return !!byte && (byte & (1 << (chunkIndex % 8))) !== 0; // Postgres sucks
};
export const uploadChunk = async (
userId: number,
sessionId: string,
@@ -28,13 +34,13 @@ export const uploadChunk = async (
const session = await UploadRepo.getUploadSession(sessionId, userId);
if (!session) {
error(404, "Invalid upload id");
} else if (chunkIndex >= session.totalChunks) {
} else if (chunkIndex > session.totalChunks) {
error(400, "Invalid chunk index");
} else if (session.uploadedChunks.includes(chunkIndex)) {
} else if (isChunkUploaded(session.bitmap, chunkIndex)) {
error(409, "Chunk already uploaded");
}
const isLastChunk = chunkIndex === session.totalChunks - 1;
const isLastChunk = chunkIndex === session.totalChunks;
filePath = `${session.path}/${chunkIndex}`;
const hashStream = createHash("sha256");