3 Commits

19 changed files with 714 additions and 349 deletions

View File

@@ -0,0 +1,30 @@
import { hmac } from "@noble/hashes/hmac.js";
import { sha256 } from "@noble/hashes/sha2.js";
interface ComputeMessage {
type: "compute";
file: File;
hmacSecret: ArrayBuffer;
}
type WorkerMessage = ComputeMessage;
self.onmessage = async (event: MessageEvent<WorkerMessage>) => {
const { type } = event.data;
if (type === "compute") {
const { file, hmacSecret } = event.data;
const h = hmac.create(sha256, new Uint8Array(hmacSecret));
const reader = file.stream().getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
h.update(value);
}
const result = h.digest();
self.postMessage({ type: "result", hmac: result }, { transfer: [result.buffer] });
}
};

View File

@@ -0,0 +1,25 @@
import HmacWorker from "./hmac.worker?worker";
export const computeFileHmac = async (file: File, hmacSecret: CryptoKey): Promise<Uint8Array> => {
const worker = new HmacWorker();
const hmacSecretRaw = await crypto.subtle.exportKey("raw", hmacSecret);
return new Promise((resolve, reject) => {
worker.onmessage = (event: MessageEvent<{ type: "result"; hmac: Uint8Array }>) => {
if (event.data.type === "result") {
resolve(event.data.hmac);
worker.terminate();
}
};
worker.onerror = (error) => {
reject(error);
worker.terminate();
};
worker.postMessage(
{ type: "compute", file, hmacSecret: hmacSecretRaw },
{ transfer: [hmacSecretRaw] },
);
});
};

View File

@@ -1,21 +1,13 @@
import ExifReader from "exifreader";
import pLimit, { limitFunction } from "p-limit";
import { limitFunction } from "p-limit";
import { CHUNK_SIZE } from "$lib/constants";
import {
encodeToBase64,
generateDataKey,
wrapDataKey,
encryptData,
encryptString,
encryptChunk,
digestMessage,
createHmacStream,
} from "$lib/modules/crypto";
import { encodeToBase64, generateDataKey, wrapDataKey, encryptString } from "$lib/modules/crypto";
import { computeFileHmac } from "$lib/modules/crypto/hmacWorker";
import { Scheduler } from "$lib/modules/scheduler";
import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail";
import { generateThumbnail } from "$lib/modules/thumbnail";
import { uploadBlob } from "$lib/modules/upload";
import type { MasterKey, HmacSecret } from "$lib/stores";
import { trpc } from "$trpc/client";
import type { RouterInputs } from "$trpc/router.server";
export interface FileUploadState {
name: string;
@@ -59,16 +51,8 @@ export const clearUploadedFiles = () => {
const requestDuplicateFileScan = limitFunction(
async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise<boolean>) => {
const hmacStream = await createHmacStream(hmacSecret.secret);
const reader = file.stream().getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
hmacStream.update(value);
}
const fileSigned = encodeToBase64(hmacStream.digest());
const hmacResult = await computeFileHmac(file, hmacSecret.secret);
const fileSigned = encodeToBase64(hmacResult);
const files = await trpc().file.listByHash.query({
hskVersion: hmacSecret.version,
contentHmac: fileSigned,
@@ -114,295 +98,83 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => {
return new Date(utcDate - offsetMs);
};
const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => {
const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = [];
let offset = 0;
while (offset < fileBuffer.byteLength) {
const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength);
const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey);
chunksEncrypted.push({
chunkEncrypted: chunkEncrypted,
chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)),
});
offset = nextOffset;
}
return chunksEncrypted;
};
const encryptImageFile = limitFunction(
async (state: FileUploadState, file: File, masterKey: MasterKey) => {
const requestFileUpload2 = async (
state: FileUploadState,
file: Blob,
fileSigned: string,
fileMetadata: {
parentId: "root" | number;
name: string;
createdAt?: Date;
lastModifiedAt: Date;
},
masterKey: MasterKey,
hmacSecret: HmacSecret,
) => {
state.status = "encrypting";
const fileBuffer = await file.arrayBuffer();
const createdAt = extractExifDateTime(fileBuffer);
const { dataKey, dataKeyVersion } = await generateDataKey();
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
const chunksEncrypted = await encryptChunks(fileBuffer, dataKey);
const nameEncrypted = await encryptString(file.name, dataKey);
const createdAtEncrypted =
createdAt && (await encryptString(createdAt.getTime().toString(), dataKey));
const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey);
const [nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnailBuffer] =
await Promise.all([
encryptString(fileMetadata.name, dataKey),
fileMetadata.createdAt && encryptString(fileMetadata.createdAt.getTime().toString(), dataKey),
encryptString(fileMetadata.lastModifiedAt.getTime().toString(), dataKey),
generateThumbnail(file).then((blob) => blob?.arrayBuffer()),
]);
const thumbnail = await generateThumbnail(fileBuffer, getFileType(file));
const thumbnailBuffer = await thumbnail?.arrayBuffer();
const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey));
const { uploadId } = await trpc().upload.startFileUpload.mutate({
chunks: Math.ceil(file.size / CHUNK_SIZE),
parent: fileMetadata.parentId,
mekVersion: masterKey.version,
dek: dataKeyWrapped,
dekVersion: dataKeyVersion,
hskVersion: hmacSecret.version,
contentType: file.type,
name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv,
createdAt: createdAtEncrypted?.ciphertext,
createdAtIv: createdAtEncrypted?.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv,
});
state.status = "upload-pending";
state.status = "uploading";
return {
dataKeyWrapped,
dataKeyVersion,
chunksEncrypted,
nameEncrypted,
createdAtEncrypted,
lastModifiedAtEncrypted,
thumbnail: thumbnailEncrypted && { plaintext: thumbnailBuffer, ...thumbnailEncrypted },
};
await uploadBlob(uploadId, file, dataKey, {
onProgress(s) {
state.progress = s.progress;
state.rate = s.rateBps;
},
{ concurrency: 4 },
);
});
const uploadThumbnail = async (
fileId: number,
thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer },
dataKeyVersion: Date,
) => {
const { file: fileId } = await trpc().upload.completeFileUpload.mutate({
uploadId,
contentHmac: fileSigned,
});
if (thumbnailBuffer) {
const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({
file: fileId,
dekVersion: dataKeyVersion,
});
const ivAndCiphertext = new Uint8Array(
thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength,
);
ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0);
ivAndCiphertext.set(
new Uint8Array(thumbnailEncrypted.ciphertext),
thumbnailEncrypted.iv.byteLength,
);
const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext));
const response = await fetch(`/api/upload/${uploadId}/chunks/0`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkHash}:`,
},
body: ivAndCiphertext,
});
if (!response.ok) {
throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`);
}
await uploadBlob(uploadId, new Blob([thumbnailBuffer]), dataKey);
await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId });
};
const requestImageFileUpload = limitFunction(
async (
state: FileUploadState,
metadata: RouterInputs["upload"]["startFileUpload"],
chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[],
fileSigned: string | undefined,
thumbnailData: { ciphertext: ArrayBuffer; iv: ArrayBuffer; plaintext: ArrayBuffer } | null,
dataKeyVersion: Date,
) => {
state.status = "uploading";
const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata);
const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0);
let uploadedBytes = 0;
const startTime = Date.now();
for (let i = 0; i < chunksEncrypted.length; i++) {
const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!;
const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkEncryptedHash}:`,
},
body: chunkEncrypted,
});
if (!response.ok) {
throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`);
}
uploadedBytes += chunkEncrypted.byteLength;
const elapsed = (Date.now() - startTime) / 1000;
const rate = uploadedBytes / elapsed;
const remaining = totalBytes - uploadedBytes;
const estimated = rate > 0 ? remaining / rate : undefined;
state.progress = uploadedBytes / totalBytes;
state.rate = rate;
state.estimated = estimated;
}
const { file: fileId } = await trpc().upload.completeFileUpload.mutate({
uploadId,
contentHmac: fileSigned,
});
if (thumbnailData) {
try {
await uploadThumbnail(fileId, thumbnailData, dataKeyVersion);
} catch (e) {
// TODO: Error handling for thumbnail upload
console.error(e);
}
}
state.status = "uploaded";
return { fileId, thumbnailBuffer: thumbnailData?.plaintext };
},
{ concurrency: 1 },
);
const requestFileUpload = async (
state: FileUploadState,
file: File,
masterKey: MasterKey,
hmacSecret: HmacSecret,
fileSigned: string,
parentId: DirectoryId,
) => {
state.status = "uploading";
const fileType = getFileType(file);
const { dataKey, dataKeyVersion } = await generateDataKey();
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
const nameEncrypted = await encryptString(file.name, dataKey);
const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey);
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
const metadata = {
chunks: totalChunks,
parent: parentId,
mekVersion: masterKey.version,
dek: dataKeyWrapped,
dekVersion: dataKeyVersion,
hskVersion: hmacSecret.version,
contentType: fileType,
name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv,
};
const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata);
const reader = file.stream().getReader();
const limit = pLimit(4);
let buffer = new Uint8Array(0);
let chunkIndex = 0;
const uploadPromises: Promise<void>[] = [];
const totalBytes = file.size;
let uploadedBytes = 0;
const startTime = Date.now();
const uploadChunk = async (
index: number,
encryptedChunk: ArrayBuffer,
chunkHash: string,
originalChunkSize: number,
) => {
const response = await fetch(`/api/upload/${uploadId}/chunks/${index}`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkHash}:`,
},
body: encryptedChunk,
});
if (!response.ok) {
throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`);
}
uploadedBytes += originalChunkSize;
const elapsed = (Date.now() - startTime) / 1000;
const rate = uploadedBytes / elapsed;
const remaining = totalBytes - uploadedBytes;
const estimated = rate > 0 ? remaining / rate : undefined;
state.progress = uploadedBytes / totalBytes;
state.rate = rate;
state.estimated = estimated;
};
while (true) {
const { done, value } = await reader.read();
if (done && buffer.length === 0) break;
if (value) {
const newBuffer = new Uint8Array(buffer.length + value.length);
newBuffer.set(buffer);
newBuffer.set(value, buffer.length);
buffer = newBuffer;
}
while (buffer.length >= CHUNK_SIZE || (done && buffer.length > 0)) {
const chunkSize = Math.min(CHUNK_SIZE, buffer.length);
const chunk = buffer.slice(0, chunkSize);
buffer = buffer.slice(chunkSize);
const encryptedChunk = await encryptChunk(chunk.buffer.slice(0, chunk.byteLength), dataKey);
const chunkHash = encodeToBase64(await digestMessage(encryptedChunk));
const currentIndex = chunkIndex++;
uploadPromises.push(
limit(() => uploadChunk(currentIndex, encryptedChunk, chunkHash, chunkSize)),
);
}
if (done) break;
}
await Promise.all(uploadPromises);
const { file: fileId } = await trpc().upload.completeFileUpload.mutate({
uploadId,
contentHmac: fileSigned,
});
if (fileType.startsWith("video/")) {
try {
const thumbnail = await generateThumbnailFromFile(file);
if (thumbnail) {
const thumbnailBuffer = await thumbnail.arrayBuffer();
const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey);
await uploadThumbnail(fileId, thumbnailEncrypted, dataKeyVersion);
}
} catch (e) {
// Thumbnail upload failure is not critical
console.error(e);
}
}
state.status = "uploaded";
return { fileId };
return { fileId, thumbnailBuffer };
};
export const uploadFile = async (
file: File,
parentId: "root" | number,
hmacSecret: HmacSecret,
masterKey: MasterKey,
hmacSecret: HmacSecret,
onDuplicate: () => Promise<boolean>,
) => {
uploadingFiles.push({
@@ -426,51 +198,37 @@ export const uploadFile = async (
const fileType = getFileType(file);
if (fileType.startsWith("image/")) {
const fileBuffer = await file.arrayBuffer();
const {
dataKeyWrapped,
dataKeyVersion,
chunksEncrypted,
nameEncrypted,
createdAtEncrypted,
lastModifiedAtEncrypted,
thumbnail,
} = await encryptImageFile(state, file, masterKey);
const fileCreatedAt = extractExifDateTime(fileBuffer);
const metadata = {
chunks: chunksEncrypted.length,
parent: parentId,
mekVersion: masterKey.version,
dek: dataKeyWrapped,
dekVersion: dataKeyVersion,
hskVersion: hmacSecret.version,
contentType: fileType,
name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv,
createdAt: createdAtEncrypted?.ciphertext,
createdAtIv: createdAtEncrypted?.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv,
};
const { fileId, thumbnailBuffer } = await requestImageFileUpload(
const { fileId, thumbnailBuffer } = await requestFileUpload2(
state,
metadata,
chunksEncrypted,
new Blob([fileBuffer], { type: fileType }),
fileSigned,
thumbnail ?? null,
dataKeyVersion,
);
return { fileId, fileBuffer, thumbnailBuffer };
} else {
const { fileId } = await requestFileUpload(
state,
file,
{
parentId,
name: file.name,
createdAt: fileCreatedAt,
lastModifiedAt: new Date(file.lastModified),
},
masterKey,
hmacSecret,
fileSigned,
parentId,
);
return { fileId };
return { fileId, fileBuffer, thumbnailBuffer };
} else {
const { fileId, thumbnailBuffer } = await requestFileUpload2(
state,
file,
fileSigned,
{
parentId,
name: file.name,
lastModifiedAt: new Date(file.lastModified),
},
masterKey,
hmacSecret,
);
return { fileId, thumbnailBuffer };
}
} catch (e) {
state.status = "error";

View File

@@ -52,7 +52,6 @@ const generateImageThumbnail = (imageUrl: string) => {
.catch(reject);
};
image.onerror = reject;
image.src = imageUrl;
});
};
@@ -85,31 +84,27 @@ const generateVideoThumbnail = (videoUrl: string, time = 0) => {
});
};
export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: string) => {
export const generateThumbnail = async (blob: Blob) => {
let url;
try {
if (fileType.startsWith("image/")) {
const fileBlob = new Blob([fileBuffer], { type: fileType });
url = URL.createObjectURL(fileBlob);
if (blob.type.startsWith("image/")) {
url = URL.createObjectURL(blob);
try {
return await generateImageThumbnail(url);
} catch {
URL.revokeObjectURL(url);
url = undefined;
if (fileType === "image/heic") {
if (blob.type === "image/heic") {
const { default: heic2any } = await import("heic2any");
url = URL.createObjectURL(
(await heic2any({ blob: fileBlob, toType: "image/png" })) as Blob,
);
url = URL.createObjectURL((await heic2any({ blob, toType: "image/png" })) as Blob);
return await generateImageThumbnail(url);
} else {
return null;
}
}
} else if (fileType.startsWith("video/")) {
url = URL.createObjectURL(new Blob([fileBuffer], { type: fileType }));
} else if (blob.type.startsWith("video/")) {
url = URL.createObjectURL(blob);
return await generateVideoThumbnail(url);
}
return null;
@@ -122,22 +117,6 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin
}
};
export const generateThumbnailFromFile = async (file: File) => {
if (!file.type.startsWith("video/")) return null;
let url;
try {
url = URL.createObjectURL(file);
return await generateVideoThumbnail(url);
} catch {
return null;
} finally {
if (url) {
URL.revokeObjectURL(url);
}
}
};
export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => {
return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`;
};

103
src/lib/modules/upload.ts Normal file
View File

@@ -0,0 +1,103 @@
import axios from "axios";
import pLimit from "p-limit";
import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants";
import { encryptChunk, digestMessage, encodeToBase64 } from "$lib/modules/crypto";
type UploadStats = {
progress: number; // 0..1 (암호화 후 기준)
rateBps: number; // bytes/sec
uploadedBytes: number;
totalBytes: number;
};
function createSpeedMeter(windowMs = 1500) {
const samples: Array<{ t: number; b: number }> = [];
return (bytesNow: number) => {
const now = performance.now();
samples.push({ t: now, b: bytesNow });
const cutoff = now - windowMs;
while (samples.length > 2 && samples[0]!.t < cutoff) samples.shift();
const first = samples[0]!;
const dt = now - first.t;
const db = bytesNow - first.b;
return dt > 0 ? (db / dt) * 1000 : 0;
};
}
const uploadChunk = async (
uploadId: string,
chunkIndex: number,
chunk: Blob,
dataKey: CryptoKey,
onChunkProgress: (chunkIndex: number, loaded: number) => void,
) => {
const chunkEncrypted = await encryptChunk(await chunk.arrayBuffer(), dataKey);
const chunkEncryptedHash = encodeToBase64(await digestMessage(chunkEncrypted));
await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex}`, chunkEncrypted, {
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkEncryptedHash}:`,
},
onUploadProgress(e) {
onChunkProgress(chunkIndex, e.loaded ?? 0);
},
});
onChunkProgress(chunkIndex, chunkEncrypted.byteLength);
};
export const uploadBlob = async (
uploadId: string,
blob: Blob,
dataKey: CryptoKey,
options?: { concurrency?: number; onProgress?: (s: UploadStats) => void },
) => {
const onProgress = options?.onProgress;
const totalChunks = Math.ceil(blob.size / CHUNK_SIZE);
const totalBytes = blob.size + totalChunks * ENCRYPTION_OVERHEAD;
const uploadedByChunk = new Array<number>(totalChunks).fill(0);
const speedMeter = createSpeedMeter(1500);
const emit = () => {
if (!onProgress) return;
const uploadedBytes = uploadedByChunk.reduce((a, b) => a + b, 0);
const rateBps = speedMeter(uploadedBytes);
const progress = Math.min(1, uploadedBytes / totalBytes);
onProgress({ progress, rateBps, uploadedBytes, totalBytes });
};
const onChunkProgress = (idx: number, loaded: number) => {
uploadedByChunk[idx] = loaded;
emit();
};
const limit = pLimit(options?.concurrency ?? 4);
await Promise.all(
Array.from({ length: totalChunks }, (_, chunkIndex) =>
limit(() =>
uploadChunk(
uploadId,
chunkIndex,
blob.slice(chunkIndex * CHUNK_SIZE, (chunkIndex + 1) * CHUNK_SIZE),
dataKey,
onChunkProgress,
),
),
),
);
// 완료 보정
onProgress?.({
progress: 1,
rateBps: 0,
uploadedBytes: totalBytes,
totalBytes,
});
};

View File

@@ -9,6 +9,7 @@ type IntegrityErrorMessages =
// File
| "Directory not found"
| "File not found"
| "File is not legacy"
| "File not found in category"
| "File already added to category"
| "Invalid DEK version"

View File

@@ -334,6 +334,16 @@ export const getAllFileIds = async (userId: number) => {
return files.map(({ id }) => id);
};
export const getLegacyFileIds = async (userId: number) => {
const files = await db
.selectFrom("file")
.select("id")
.where("user_id", "=", userId)
.where("encrypted_content_iv", "is not", null)
.execute();
return files.map(({ id }) => id);
};
export const getAllFileIdsByContentHmac = async (
userId: number,
hskVersion: number,
@@ -482,6 +492,52 @@ export const unregisterFile = async (userId: number, fileId: number) => {
});
};
export const migrateFileContent = async (
trx: typeof db,
userId: number,
fileId: number,
newPath: string,
encContentHash: string,
) => {
const file = await trx
.selectFrom("file")
.select(["path", "encrypted_content_iv"])
.where("id", "=", fileId)
.where("user_id", "=", userId)
.limit(1)
.forUpdate()
.executeTakeFirst();
if (!file) {
throw new IntegrityError("File not found");
}
if (!file.encrypted_content_iv) {
throw new IntegrityError("File is not legacy");
}
await trx
.updateTable("file")
.set({
path: newPath,
encrypted_content_iv: null,
encrypted_content_hash: encContentHash,
})
.where("id", "=", fileId)
.where("user_id", "=", userId)
.execute();
await trx
.insertInto("file_log")
.values({
file_id: fileId,
timestamp: new Date(),
action: "migrate",
})
.execute();
return file.path;
};
export const addFileToCategory = async (fileId: number, categoryId: number) => {
await db.transaction().execute(async (trx) => {
try {

View File

@@ -41,7 +41,7 @@ interface FileLogTable {
id: Generated<number>;
file_id: number;
timestamp: ColumnType<Date, Date, never>;
action: "create" | "rename" | "add-to-category" | "remove-from-category";
action: "create" | "rename" | "migrate" | "add-to-category" | "remove-from-category";
new_name: Ciphertext | null;
category_id: number | null;
}

View File

@@ -3,7 +3,7 @@ import type { Ciphertext } from "./utils";
interface UploadSessionTable {
id: string;
type: "file" | "thumbnail";
type: "file" | "thumbnail" | "migration";
user_id: number;
path: string;
total_chunks: number;

View File

@@ -31,6 +31,11 @@ interface ThumbnailUploadSession extends BaseUploadSession {
dekVersion: Date;
}
interface MigrationUploadSession extends BaseUploadSession {
type: "migration";
fileId: number;
}
export const createFileUploadSession = async (
params: Omit<FileUploadSession, "type" | "uploadedChunks">,
) => {
@@ -118,6 +123,39 @@ export const createThumbnailUploadSession = async (
});
};
export const createMigrationUploadSession = async (
params: Omit<MigrationUploadSession, "type" | "uploadedChunks">,
) => {
await db.transaction().execute(async (trx) => {
const file = await trx
.selectFrom("file")
.select("encrypted_content_iv")
.where("id", "=", params.fileId)
.where("user_id", "=", params.userId)
.limit(1)
.forUpdate()
.executeTakeFirst();
if (!file) {
throw new IntegrityError("File not found");
} else if (!file.encrypted_content_iv) {
throw new IntegrityError("File is not legacy");
}
await trx
.insertInto("upload_session")
.values({
id: params.id,
type: "migration",
user_id: params.userId,
path: params.path,
total_chunks: params.totalChunks,
expires_at: params.expiresAt,
file_id: params.fileId,
})
.execute();
});
};
export const getUploadSession = async (sessionId: string, userId: number) => {
const session = await db
.selectFrom("upload_session")
@@ -148,7 +186,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => {
encCreatedAt: session.encrypted_created_at,
encLastModifiedAt: session.encrypted_last_modified_at!,
} satisfies FileUploadSession;
} else {
} else if (session.type === "thumbnail") {
return {
type: "thumbnail",
id: session.id,
@@ -160,6 +198,17 @@ export const getUploadSession = async (sessionId: string, userId: number) => {
fileId: session.file_id!,
dekVersion: session.data_encryption_key_version!,
} satisfies ThumbnailUploadSession;
} else {
return {
type: "migration",
id: session.id,
userId: session.user_id,
path: session.path,
totalChunks: session.total_chunks,
uploadedChunks: session.uploaded_chunks,
expiresAt: session.expires_at,
fileId: session.file_id!,
} satisfies MigrationUploadSession;
}
};

View File

@@ -0,0 +1,7 @@
import { createCaller } from "$trpc/router.server";
import type { PageServerLoad } from "./$types";
export const load: PageServerLoad = async (event) => {
const files = await createCaller(event).file.listLegacy();
return { files };
};

View File

@@ -0,0 +1,79 @@
<script lang="ts">
import { onMount } from "svelte";
import { goto } from "$app/navigation";
import { BottomDiv, Button, FullscreenDiv } from "$lib/components/atoms";
import { TopBar } from "$lib/components/molecules";
import { bulkGetFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem";
import { masterKeyStore } from "$lib/stores";
import { sortEntries } from "$lib/utils";
import File from "./File.svelte";
import { getMigrationState, clearMigrationStates, requestFileMigration } from "./service.svelte";
let { data } = $props();
let fileInfos: MaybeFileInfo[] = $state([]);
let files = $derived(
fileInfos
.map((info) => ({
info,
state: getMigrationState(info.id),
}))
.filter((file) => file.state?.status !== "completed"),
);
const migrateAllFiles = () => {
files.forEach(({ info }) => {
if (info.exists) {
requestFileMigration(info);
}
});
};
onMount(async () => {
fileInfos = sortEntries(
Array.from((await bulkGetFileInfo(data.files, $masterKeyStore?.get(1)?.key!)).values()),
);
});
$effect(() => clearMigrationStates);
</script>
<svelte:head>
<title>암호화 마이그레이션</title>
</svelte:head>
<TopBar title="암호화 마이그레이션" />
<FullscreenDiv>
{#if files.length > 0}
<div class="space-y-4 pb-4">
<p class="break-keep text-gray-800">
이전 버전의 ArkVault에서 업로드된 {files.length}개 파일을 다시 암호화할 수 있어요.
</p>
<div class="space-y-2">
{#each files as { info, state } (info.id)}
{#if info.exists}
<File
{info}
{state}
onclick={({ id }) => goto(`/file/${id}`)}
onMigrateClick={requestFileMigration}
/>
{/if}
{/each}
</div>
</div>
<BottomDiv>
<Button onclick={migrateAllFiles} class="w-full">모두 다시 암호화하기</Button>
</BottomDiv>
{:else}
<div class="flex flex-grow items-center justify-center">
<p class="text-gray-500">
{#if data.files.length === 0}
마이그레이션할 파일이 없어요.
{:else}
파일 목록을 불러오고 있어요.
{/if}
</p>
</div>
{/if}
</FullscreenDiv>

View File

@@ -0,0 +1,55 @@
<script module lang="ts">
const subtexts = {
queued: "대기 중",
"download-pending": "다운로드를 기다리는 중",
downloading: "다운로드하는 중",
"encryption-pending": "암호화를 기다리는 중",
encrypting: "암호화하는 중",
"upload-pending": "업로드를 기다리는 중",
completed: "완료",
error: "실패",
} as const;
</script>
<script lang="ts">
import { ActionEntryButton } from "$lib/components/atoms";
import { DirectoryEntryLabel } from "$lib/components/molecules";
import type { FileInfo } from "$lib/modules/filesystem";
import { formatDateTime, formatNetworkSpeed } from "$lib/utils";
import type { MigrationState } from "./service.svelte";
import IconSync from "~icons/material-symbols/sync";
type FileInfoWithExists = FileInfo & { exists: true };
interface Props {
info: FileInfoWithExists;
onclick: (file: FileInfo) => void;
onMigrateClick: (file: FileInfoWithExists) => void;
state: MigrationState | undefined;
}
let { info, onclick, onMigrateClick, state }: Props = $props();
let subtext = $derived.by(() => {
if (!state) {
return formatDateTime(info.createdAt ?? info.lastModifiedAt);
}
if (state.status === "uploading") {
const progress = Math.floor((state.progress ?? 0) * 100);
const speed = formatNetworkSpeed((state.rate ?? 0) * 8);
return `전송됨 ${progress}% · ${speed}`;
}
return subtexts[state.status] ?? state.status;
});
</script>
<ActionEntryButton
class="h-14"
onclick={() => onclick(info)}
actionButtonIcon={!state || state.status === "error" ? IconSync : undefined}
onActionButtonClick={() => onMigrateClick(info)}
actionButtonClass="text-gray-800"
>
<DirectoryEntryLabel type="file" name={info.name} {subtext} />
</ActionEntryButton>

View File

@@ -0,0 +1,107 @@
import { limitFunction } from "p-limit";
import { SvelteMap } from "svelte/reactivity";
import { CHUNK_SIZE } from "$lib/constants";
import type { FileInfo } from "$lib/modules/filesystem";
import { Scheduler } from "$lib/modules/scheduler";
import { uploadBlob } from "$lib/modules/upload";
import { requestFileDownload } from "$lib/services/file";
import { trpc } from "$trpc/client";
export type MigrationStatus =
| "queued"
| "download-pending"
| "downloading"
| "encryption-pending"
| "encrypting"
| "upload-pending"
| "uploading"
| "completed"
| "error";
export interface MigrationState {
status: MigrationStatus;
progress?: number;
rate?: number;
}
const scheduler = new Scheduler();
const states = new SvelteMap<number, MigrationState>();
const createState = (status: MigrationStatus): MigrationState => {
const state = $state({ status });
return state;
};
export const getMigrationState = (fileId: number) => {
return states.get(fileId);
};
export const clearMigrationStates = () => {
for (const [id, state] of states) {
if (state.status === "completed" || state.status === "error") {
states.delete(id);
}
}
};
const uploadMigrationChunks = limitFunction(
async (state: MigrationState, fileId: number, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => {
state.status = "uploading";
const { uploadId } = await trpc().upload.startMigrationUpload.mutate({
file: fileId,
chunks: Math.ceil(fileBuffer.byteLength / CHUNK_SIZE),
});
await uploadBlob(uploadId, new Blob([fileBuffer]), dataKey, {
onProgress(s) {
state.progress = s.progress;
state.rate = s.rateBps;
},
});
await trpc().upload.completeMigrationUpload.mutate({ uploadId });
},
{ concurrency: 1 },
);
export const requestFileMigration = async (fileInfo: FileInfo) => {
let state = states.get(fileInfo.id);
if (state) {
if (state.status !== "error") return;
state.status = "queued";
state.progress = undefined;
state.rate = undefined;
} else {
state = createState("queued");
states.set(fileInfo.id, state);
}
try {
const dataKey = fileInfo.dataKey?.key;
if (!dataKey) {
throw new Error("Data key not available");
}
let fileBuffer: ArrayBuffer | undefined;
await scheduler.schedule(
async () => {
state.status = "download-pending";
state.status = "downloading";
fileBuffer = await requestFileDownload(fileInfo.id, dataKey, true);
return fileBuffer.byteLength;
},
async () => {
state.status = "encryption-pending";
await uploadMigrationChunks(state, fileInfo.id, fileBuffer!, dataKey);
state.status = "completed";
},
);
} catch (e) {
state.status = "error";
throw e;
}
};

View File

@@ -51,7 +51,7 @@
if (!files || files.length === 0) return;
for (const file of files) {
requestFileUpload(file, data.id, $hmacSecretStore?.get(1)!, $masterKeyStore?.get(1)!, () => {
requestFileUpload(file, data.id, $masterKeyStore?.get(1)!, $hmacSecretStore?.get(1)!, () => {
return new Promise((resolve) => {
duplicatedFile = file;
resolveForDuplicateFileModal = resolve;

View File

@@ -81,11 +81,11 @@ export const requestDirectoryCreation = async (
export const requestFileUpload = async (
file: File,
parentId: "root" | number,
hmacSecret: HmacSecret,
masterKey: MasterKey,
hmacSecret: HmacSecret,
onDuplicate: () => Promise<boolean>,
) => {
const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate);
const res = await uploadFile(file, parentId, masterKey, hmacSecret, onDuplicate);
if (!res) return false;
if (res.fileBuffer) {

View File

@@ -5,6 +5,7 @@
import IconStorage from "~icons/material-symbols/storage";
import IconImage from "~icons/material-symbols/image";
import IconLockReset from "~icons/material-symbols/lock-reset";
import IconPassword from "~icons/material-symbols/password";
import IconLogout from "~icons/material-symbols/logout";
@@ -41,6 +42,13 @@
>
썸네일
</MenuEntryButton>
<MenuEntryButton
onclick={() => goto("/settings/migration")}
icon={IconLockReset}
iconColor="text-teal-500"
>
암호화 마이그레이션
</MenuEntryButton>
</div>
<div class="space-y-2">
<p class="font-semibold">보안</p>

View File

@@ -100,6 +100,10 @@ const fileRouter = router({
return await MediaRepo.getMissingFileThumbnails(ctx.session.userId);
}),
listLegacy: roleProcedure["activeClient"].query(async ({ ctx }) => {
return await FileRepo.getLegacyFileIds(ctx.session.userId);
}),
rename: roleProcedure["activeClient"]
.input(
z.object({

View File

@@ -250,6 +250,110 @@ const uploadRouter = router({
sessionLocks.delete(uploadId);
}
}),
startMigrationUpload: roleProcedure["activeClient"]
.input(
z.object({
file: z.int().positive(),
chunks: z.int().positive(),
}),
)
.mutation(async ({ ctx, input }) => {
const { id, path } = await generateSessionId();
try {
await UploadRepo.createMigrationUploadSession({
id,
userId: ctx.session.userId,
path,
totalChunks: input.chunks,
expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours
fileId: input.file,
});
return { uploadId: id };
} catch (e) {
await safeRecursiveRm(path);
if (e instanceof IntegrityError) {
if (e.message === "File not found") {
throw new TRPCError({ code: "NOT_FOUND", message: "File not found" });
} else if (e.message === "File is not legacy") {
throw new TRPCError({ code: "BAD_REQUEST", message: "File is not legacy" });
}
}
throw e;
}
}),
completeMigrationUpload: roleProcedure["activeClient"]
.input(
z.object({
uploadId: z.uuidv4(),
}),
)
.mutation(async ({ ctx, input }) => {
const { uploadId } = input;
if (sessionLocks.has(uploadId)) {
throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" });
} else {
sessionLocks.add(uploadId);
}
let filePath = "";
try {
const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId);
if (!session || session.type !== "migration") {
throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" });
} else if (session.uploadedChunks.length < session.totalChunks) {
throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" });
}
filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`;
await mkdir(dirname(filePath), { recursive: true });
const hashStream = createHash("sha256");
const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 });
for (let i = 0; i < session.totalChunks; i++) {
for await (const chunk of createReadStream(`${session.path}/${i}`)) {
hashStream.update(chunk);
writeStream.write(chunk);
}
}
await new Promise<void>((resolve, reject) => {
writeStream.end((e: any) => (e ? reject(e) : resolve()));
});
const hash = hashStream.digest("base64");
const oldPath = await db.transaction().execute(async (trx) => {
const oldPath = await FileRepo.migrateFileContent(
trx,
ctx.session.userId,
session.fileId,
filePath,
hash,
);
await UploadRepo.deleteUploadSession(trx, uploadId);
return oldPath;
});
await Promise.all([safeUnlink(oldPath), safeRecursiveRm(session.path)]);
} catch (e) {
await safeUnlink(filePath);
if (e instanceof IntegrityError) {
if (e.message === "File not found") {
throw new TRPCError({ code: "NOT_FOUND", message: "File not found" });
} else if (e.message === "File is not legacy") {
throw new TRPCError({ code: "BAD_REQUEST", message: "File is not legacy" });
}
}
throw e;
} finally {
sessionLocks.delete(uploadId);
}
}),
});
export default uploadRouter;