파일 업로드 스케쥴링 구현

암호화는 동시에 최대 4개까지, 업로드는 1개까지 가능하도록 설정했습니다.
This commit is contained in:
static
2025-01-16 02:33:00 +09:00
parent 366f657113
commit 937c4e2453
14 changed files with 367 additions and 162 deletions

View File

@@ -0,0 +1,50 @@
import {
getFileCacheIndex as getFileCacheIndexFromIndexedDB,
storeFileCacheIndex,
deleteFileCacheIndex,
type FileCacheIndex,
} from "$lib/indexedDB";
import { readFile, writeFile, deleteFile } from "$lib/modules/opfs";
const fileCacheIndex = new Map<number, FileCacheIndex>();
export const prepareFileCache = async () => {
for (const cache of await getFileCacheIndexFromIndexedDB()) {
fileCacheIndex.set(cache.fileId, cache);
}
};
export const getFileCacheIndex = () => {
return Array.from(fileCacheIndex.values());
};
export const getFileCache = async (fileId: number) => {
const cacheIndex = fileCacheIndex.get(fileId);
if (!cacheIndex) return null;
cacheIndex.lastRetrievedAt = new Date();
storeFileCacheIndex(cacheIndex); // Intended
return await readFile(`/cache/${fileId}`);
};
export const storeFileCache = async (fileId: number, fileBuffer: ArrayBuffer) => {
const now = new Date();
await writeFile(`/cache/${fileId}`, fileBuffer);
const cacheIndex: FileCacheIndex = {
fileId,
cachedAt: now,
lastRetrievedAt: now,
size: fileBuffer.byteLength,
};
fileCacheIndex.set(fileId, cacheIndex);
await storeFileCacheIndex(cacheIndex);
};
export const deleteFileCache = async (fileId: number) => {
if (!fileCacheIndex.has(fileId)) return;
fileCacheIndex.delete(fileId);
await deleteFile(`/cache/${fileId}`);
await deleteFileCacheIndex(fileId);
};

View File

@@ -0,0 +1,3 @@
export * from "./cache";
export * from "./info";
export * from "./upload";

View File

@@ -0,0 +1,104 @@
import { writable, type Writable } from "svelte/store";
import { callGetApi } from "$lib/hooks";
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
import type { DirectoryInfoResponse, FileInfoResponse } from "$lib/server/schemas";
import {
directoryInfoStore,
fileInfoStore,
type DirectoryInfo,
type FileInfo,
} from "$lib/stores/file";
const fetchDirectoryInfo = async (
directoryId: "root" | number,
masterKey: CryptoKey,
infoStore: Writable<DirectoryInfo | null>,
) => {
const res = await callGetApi(`/api/directory/${directoryId}`);
if (!res.ok) throw new Error("Failed to fetch directory information");
const { metadata, subDirectories, files }: DirectoryInfoResponse = await res.json();
let newInfo: DirectoryInfo;
if (directoryId === "root") {
newInfo = {
id: "root",
subDirectoryIds: subDirectories,
fileIds: files,
};
} else {
const { dataKey } = await unwrapDataKey(metadata!.dek, masterKey);
newInfo = {
id: directoryId,
dataKey,
dataKeyVersion: new Date(metadata!.dekVersion),
name: await decryptString(metadata!.name, metadata!.nameIv, dataKey),
subDirectoryIds: subDirectories,
fileIds: files,
};
}
infoStore.update(() => newInfo);
};
export const getDirectoryInfo = (directoryId: "root" | number, masterKey: CryptoKey) => {
// TODO: MEK rotation
let info = directoryInfoStore.get(directoryId);
if (!info) {
info = writable(null);
directoryInfoStore.set(directoryId, info);
}
fetchDirectoryInfo(directoryId, masterKey, info);
return info;
};
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
};
const fetchFileInfo = async (
fileId: number,
masterKey: CryptoKey,
infoStore: Writable<FileInfo | null>,
) => {
const res = await callGetApi(`/api/file/${fileId}`);
if (!res.ok) {
if (res.status === 404) {
infoStore.update(() => null);
return;
}
throw new Error("Failed to fetch file information");
}
const metadata: FileInfoResponse = await res.json();
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
const newInfo: FileInfo = {
id: fileId,
dataKey,
dataKeyVersion: new Date(metadata.dekVersion),
contentType: metadata.contentType,
contentIv: metadata.contentIv,
name: await decryptString(metadata.name, metadata.nameIv, dataKey),
createdAt:
metadata.createdAt && metadata.createdAtIv
? await decryptDate(metadata.createdAt, metadata.createdAtIv, dataKey)
: undefined,
lastModifiedAt: await decryptDate(metadata.lastModifiedAt, metadata.lastModifiedAtIv, dataKey),
};
infoStore.update(() => newInfo);
};
export const getFileInfo = (fileId: number, masterKey: CryptoKey) => {
// TODO: MEK rotation
let info = fileInfoStore.get(fileId);
if (!info) {
info = writable(null);
fileInfoStore.set(fileId, info);
}
fetchFileInfo(fileId, masterKey, info);
return info;
};

View File

@@ -0,0 +1,221 @@
import axios from "axios";
import ExifReader from "exifreader";
import { limitFunction } from "p-limit";
import { writable, type Writable } from "svelte/store";
import {
encodeToBase64,
generateDataKey,
wrapDataKey,
encryptData,
encryptString,
signMessageHmac,
} from "$lib/modules/crypto";
import type {
DuplicateFileScanRequest,
DuplicateFileScanResponse,
FileUploadRequest,
} from "$lib/server/schemas";
import {
fileUploadStatusStore,
type MasterKey,
type HmacSecret,
type FileUploadStatus,
} from "$lib/stores";
const requestDuplicateFileScan = limitFunction(
async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise<boolean>) => {
const fileBuffer = await file.arrayBuffer();
const fileSigned = encodeToBase64(await signMessageHmac(fileBuffer, hmacSecret.secret));
const res = await axios.post("/api/file/scanDuplicates", {
hskVersion: hmacSecret.version,
contentHmac: fileSigned,
} satisfies DuplicateFileScanRequest);
const { files }: DuplicateFileScanResponse = res.data;
if (files.length === 0 || (await onDuplicate())) {
return { fileBuffer, fileSigned };
} else {
return {};
}
},
{ concurrency: 1 },
);
const getFileType = (file: File) => {
if (file.type) return file.type;
if (file.name.endsWith(".heic")) return "image/heic";
throw new Error("Unknown file type");
};
const extractExifDateTime = (fileBuffer: ArrayBuffer) => {
const exif = ExifReader.load(fileBuffer);
const dateTimeOriginal = exif["DateTimeOriginal"]?.description;
const offsetTimeOriginal = exif["OffsetTimeOriginal"]?.description;
if (!dateTimeOriginal) return undefined;
const [date, time] = dateTimeOriginal.split(" ");
if (!date || !time) return undefined;
const [year, month, day] = date.split(":").map(Number);
const [hour, minute, second] = time.split(":").map(Number);
if (!year || !month || !day || !hour || !minute || !second) return undefined;
if (!offsetTimeOriginal) {
// No timezone information.. Assume local timezone
return new Date(year, month - 1, day, hour, minute, second);
}
const offsetSign = offsetTimeOriginal[0] === "+" ? 1 : -1;
const [offsetHour, offsetMinute] = offsetTimeOriginal.slice(1).split(":").map(Number);
const utcDate = Date.UTC(year, month - 1, day, hour, minute, second);
const offsetMs = offsetSign * ((offsetHour ?? 0) * 60 + (offsetMinute ?? 0)) * 60 * 1000;
return new Date(utcDate - offsetMs);
};
const encryptFile = limitFunction(
async (
status: Writable<FileUploadStatus>,
file: File,
fileBuffer: ArrayBuffer,
masterKey: MasterKey,
) => {
status.update((value) => {
value.status = "encrypting";
return value;
});
const fileType = getFileType(file);
let createdAt;
if (fileType.startsWith("image/")) {
createdAt = extractExifDateTime(fileBuffer);
}
const { dataKey, dataKeyVersion } = await generateDataKey();
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
const fileEncrypted = await encryptData(fileBuffer, dataKey);
const nameEncrypted = await encryptString(file.name, dataKey);
const createdAtEncrypted =
createdAt && (await encryptString(createdAt.getTime().toString(), dataKey));
const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey);
status.update((value) => {
value.status = "upload-pending";
return value;
});
return {
dataKeyWrapped,
dataKeyVersion,
fileEncrypted,
fileType,
nameEncrypted,
createdAtEncrypted,
lastModifiedAtEncrypted,
};
},
{ concurrency: 4 },
);
const uploadFileInternal = limitFunction(
async (status: Writable<FileUploadStatus>, form: FormData) => {
status.update((value) => {
value.status = "uploading";
return value;
});
await axios.post("/api/file/upload", form, {
onUploadProgress: ({ progress, rate, estimated }) => {
status.update((value) => {
value.progress = progress;
value.rate = rate;
value.estimated = estimated;
return value;
});
},
});
status.update((value) => {
value.status = "uploaded";
return value;
});
},
{ concurrency: 1 },
);
export const uploadFile = async (
file: File,
parentId: "root" | number,
hmacSecret: HmacSecret,
masterKey: MasterKey,
onDuplicate: () => Promise<boolean>,
) => {
const status = writable<FileUploadStatus>({
name: file.name,
parentId,
status: "encryption-pending",
});
fileUploadStatusStore.update((value) => {
value.push(status);
return value;
});
try {
const { fileBuffer, fileSigned } = await requestDuplicateFileScan(
file,
hmacSecret,
onDuplicate,
);
if (!fileBuffer || !fileSigned) {
status.update((value) => {
value.status = "canceled";
return value;
});
return false;
}
const {
dataKeyWrapped,
dataKeyVersion,
fileEncrypted,
fileType,
nameEncrypted,
createdAtEncrypted,
lastModifiedAtEncrypted,
} = await encryptFile(status, file, fileBuffer, masterKey);
const form = new FormData();
form.set(
"metadata",
JSON.stringify({
parentId,
mekVersion: masterKey.version,
dek: dataKeyWrapped,
dekVersion: dataKeyVersion.toISOString(),
hskVersion: hmacSecret.version,
contentHmac: fileSigned,
contentType: fileType,
contentIv: fileEncrypted.iv,
name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv,
createdAt: createdAtEncrypted?.ciphertext,
createdAtIv: createdAtEncrypted?.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv,
} as FileUploadRequest),
);
form.set("content", new Blob([fileEncrypted.ciphertext]));
await uploadFileInternal(status, form);
return true;
} catch (e) {
status.update((value) => {
value.status = "error";
return value;
});
throw e;
}
};