mirror of
https://github.com/kmc7468/arkvault.git
synced 2025-12-14 22:08:45 +00:00
파일 업로드시의 체크섬 검사 구현
This commit is contained in:
1
drizzle/0002_good_talisman.sql
Normal file
1
drizzle/0002_good_talisman.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE `file` ADD `encrypted_content_hash` text NOT NULL;
|
||||
1308
drizzle/meta/0002_snapshot.json
Normal file
1308
drizzle/meta/0002_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -15,6 +15,13 @@
|
||||
"when": 1736720831242,
|
||||
"tag": "0001_blushing_alice",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"version": "6",
|
||||
"when": 1737191517463,
|
||||
"tag": "0002_good_talisman",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
wrapDataKey,
|
||||
encryptData,
|
||||
encryptString,
|
||||
digestMessage,
|
||||
signMessageHmac,
|
||||
} from "$lib/modules/crypto";
|
||||
import type {
|
||||
@@ -97,6 +98,8 @@ const encryptFile = limitFunction(
|
||||
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
|
||||
|
||||
const fileEncrypted = await encryptData(fileBuffer, dataKey);
|
||||
const fileEncryptedHash = encodeToBase64(await digestMessage(fileEncrypted.ciphertext));
|
||||
|
||||
const nameEncrypted = await encryptString(file.name, dataKey);
|
||||
const createdAtEncrypted =
|
||||
createdAt && (await encryptString(createdAt.getTime().toString(), dataKey));
|
||||
@@ -110,8 +113,9 @@ const encryptFile = limitFunction(
|
||||
return {
|
||||
dataKeyWrapped,
|
||||
dataKeyVersion,
|
||||
fileEncrypted,
|
||||
fileType,
|
||||
fileEncrypted,
|
||||
fileEncryptedHash,
|
||||
nameEncrypted,
|
||||
createdAtEncrypted,
|
||||
lastModifiedAtEncrypted,
|
||||
@@ -184,8 +188,9 @@ export const uploadFile = async (
|
||||
const {
|
||||
dataKeyWrapped,
|
||||
dataKeyVersion,
|
||||
fileEncrypted,
|
||||
fileType,
|
||||
fileEncrypted,
|
||||
fileEncryptedHash,
|
||||
nameEncrypted,
|
||||
createdAtEncrypted,
|
||||
lastModifiedAtEncrypted,
|
||||
@@ -212,6 +217,7 @@ export const uploadFile = async (
|
||||
} as FileUploadRequest),
|
||||
);
|
||||
form.set("content", new Blob([fileEncrypted.ciphertext]));
|
||||
form.set("checksum", fileEncryptedHash);
|
||||
|
||||
await requestFileUpload(status, form);
|
||||
return true;
|
||||
|
||||
@@ -26,6 +26,7 @@ export interface NewFileParams {
|
||||
contentHmac: string | null;
|
||||
contentType: string;
|
||||
encContentIv: string;
|
||||
encContentHash: string;
|
||||
encName: string;
|
||||
encNameIv: string;
|
||||
encCreatedAt: string | null;
|
||||
@@ -198,11 +199,12 @@ export const registerFile = async (params: NewFileParams) => {
|
||||
userId: params.userId,
|
||||
mekVersion: params.mekVersion,
|
||||
hskVersion: params.hskVersion,
|
||||
contentHmac: params.contentHmac,
|
||||
contentType: params.contentType,
|
||||
encDek: params.encDek,
|
||||
dekVersion: params.dekVersion,
|
||||
contentHmac: params.contentHmac,
|
||||
contentType: params.contentType,
|
||||
encContentIv: params.encContentIv,
|
||||
encContentHash: params.encContentHash,
|
||||
encName: { ciphertext: params.encName, iv: params.encNameIv },
|
||||
encCreatedAt:
|
||||
params.encCreatedAt && params.encCreatedAtIv
|
||||
|
||||
@@ -60,6 +60,7 @@ export const file = sqliteTable(
|
||||
contentHmac: text("content_hmac"), // Base64
|
||||
contentType: text("content_type").notNull(),
|
||||
encContentIv: text("encrypted_content_iv").notNull(), // Base64
|
||||
encContentHash: text("encrypted_content_hash").notNull(), // Base64
|
||||
encName: ciphertext("encrypted_name").notNull(),
|
||||
encCreatedAt: ciphertext("encrypted_created_at"),
|
||||
encLastModifiedAt: ciphertext("encrypted_last_modified_at").notNull(),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { error } from "@sveltejs/kit";
|
||||
import { createHash } from "crypto";
|
||||
import { createReadStream, createWriteStream } from "fs";
|
||||
import { mkdir, stat, unlink } from "fs/promises";
|
||||
import { dirname } from "path";
|
||||
@@ -95,8 +96,9 @@ const safeUnlink = async (path: string) => {
|
||||
};
|
||||
|
||||
export const uploadFile = async (
|
||||
params: Omit<NewFileParams, "path">,
|
||||
params: Omit<NewFileParams, "path" | "encContentHash">,
|
||||
encContentStream: Readable,
|
||||
encContentHash: Promise<string>,
|
||||
) => {
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
const oneMinuteLater = new Date(Date.now() + 60 * 1000);
|
||||
@@ -108,16 +110,30 @@ export const uploadFile = async (
|
||||
await mkdir(dirname(path), { recursive: true });
|
||||
|
||||
try {
|
||||
await pipeline(encContentStream, createWriteStream(path, { flags: "wx", mode: 0o600 }));
|
||||
const hashStream = createHash("sha256");
|
||||
const [_, hash] = await Promise.all([
|
||||
pipeline(encContentStream, hashStream, createWriteStream(path, { flags: "wx", mode: 0o600 })),
|
||||
encContentHash,
|
||||
]);
|
||||
if (hashStream.digest("base64") != hash) {
|
||||
throw new Error("Invalid checksum");
|
||||
}
|
||||
|
||||
await registerFile({
|
||||
...params,
|
||||
path,
|
||||
encContentHash: hash,
|
||||
});
|
||||
} catch (e) {
|
||||
await safeUnlink(path);
|
||||
|
||||
if (e instanceof IntegrityError && e.message === "Inactive MEK version") {
|
||||
error(400, "Invalid MEK version");
|
||||
} else if (
|
||||
e instanceof Error &&
|
||||
(e.message === "Invalid request body" || e.message === "Invalid checksum")
|
||||
) {
|
||||
error(400, "Invalid request body");
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
|
||||
@@ -67,27 +67,39 @@ export const POST: RequestHandler = async ({ locals, request }) => {
|
||||
|
||||
let metadata: FileMetadata | null = null;
|
||||
let content: Readable | null = null;
|
||||
const checksum = new Promise<string>((resolveChecksum, rejectChecksum) => {
|
||||
bb.on(
|
||||
"field",
|
||||
handler(async (fieldname, val) => {
|
||||
if (fieldname === "metadata") {
|
||||
if (!metadata) {
|
||||
// Ignore subsequent metadata fields
|
||||
metadata = parseFileMetadata(userId, val);
|
||||
}
|
||||
} else if (fieldname === "checksum") {
|
||||
resolveChecksum(val); // Ignore subsequent checksum fields
|
||||
} else {
|
||||
error(400, "Invalid request body");
|
||||
}
|
||||
}),
|
||||
);
|
||||
bb.on(
|
||||
"file",
|
||||
handler(async (fieldname, file) => {
|
||||
if (fieldname !== "content") error(400, "Invalid request body");
|
||||
if (!metadata || content) error(400, "Invalid request body");
|
||||
content = file;
|
||||
|
||||
bb.on(
|
||||
"field",
|
||||
handler(async (fieldname, val) => {
|
||||
if (fieldname !== "metadata") error(400, "Invalid request body");
|
||||
if (metadata || content) error(400, "Invalid request body");
|
||||
metadata = parseFileMetadata(userId, val);
|
||||
}),
|
||||
);
|
||||
bb.on(
|
||||
"file",
|
||||
handler(async (fieldname, file) => {
|
||||
if (fieldname !== "content") error(400, "Invalid request body");
|
||||
if (!metadata || content) error(400, "Invalid request body");
|
||||
content = file;
|
||||
|
||||
await uploadFile(metadata, content);
|
||||
resolve(text("File uploaded", { headers: { "Content-Type": "text/plain" } }));
|
||||
}),
|
||||
);
|
||||
bb.on("error", (e) => content?.emit("error", e) ?? reject(e));
|
||||
await uploadFile(metadata, content, checksum);
|
||||
resolve(text("File uploaded", { headers: { "Content-Type": "text/plain" } }));
|
||||
}),
|
||||
);
|
||||
bb.on("finish", () => rejectChecksum(new Error("Invalid request body")));
|
||||
bb.on("error", (e) => {
|
||||
content?.emit("error", e) ?? reject(e);
|
||||
rejectChecksum(e);
|
||||
});
|
||||
});
|
||||
|
||||
request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user