10 Commits

Author SHA1 Message Date
static
ac6aaa18ca Merge pull request #20 from kmc7468/dev
v0.9.0
2026-01-18 13:30:15 +09:00
static
7b621d6e98 Merge pull request #19 from kmc7468/dev
v0.8.0
2026-01-13 00:29:14 +09:00
static
3906ec4371 Merge pull request #17 from kmc7468/dev
v0.7.0
2026-01-06 07:50:16 +09:00
static
90ac5ba4c3 Merge pull request #15 from kmc7468/dev
v0.6.0
2025-12-27 14:22:26 +09:00
static
dfffa004ac Merge pull request #13 from kmc7468/dev
v0.5.1
2025-07-12 19:56:12 +09:00
static
0cd55a413d Merge pull request #12 from kmc7468/dev
v0.5.0
2025-07-12 06:01:08 +09:00
static
361d966a59 Merge pull request #10 from kmc7468/dev
v0.4.0
2025-01-30 21:06:50 +09:00
static
aef43b8bfa Merge pull request #6 from kmc7468/dev
v0.3.0
2025-01-18 13:29:09 +09:00
static
7f128cccf6 Merge pull request #5 from kmc7468/dev
v0.2.0
2025-01-13 03:53:14 +09:00
static
a198e5f6dc Merge pull request #2 from kmc7468/dev
v0.1.0
2025-01-09 06:24:31 +09:00
33 changed files with 1379 additions and 1028 deletions

View File

@@ -12,3 +12,4 @@ USER_CLIENT_CHALLENGE_EXPIRES=
SESSION_UPGRADE_CHALLENGE_EXPIRES= SESSION_UPGRADE_CHALLENGE_EXPIRES=
LIBRARY_PATH= LIBRARY_PATH=
THUMBNAILS_PATH= THUMBNAILS_PATH=
UPLOADS_PATH=

View File

@@ -9,6 +9,7 @@ services:
volumes: volumes:
- ./data/library:/app/data/library - ./data/library:/app/data/library
- ./data/thumbnails:/app/data/thumbnails - ./data/thumbnails:/app/data/thumbnails
- ./data/uploads:/app/data/uploads
environment: environment:
# ArkVault # ArkVault
- DATABASE_HOST=database - DATABASE_HOST=database
@@ -20,6 +21,7 @@ services:
- SESSION_UPGRADE_CHALLENGE_EXPIRES - SESSION_UPGRADE_CHALLENGE_EXPIRES
- LIBRARY_PATH=/app/data/library - LIBRARY_PATH=/app/data/library
- THUMBNAILS_PATH=/app/data/thumbnails - THUMBNAILS_PATH=/app/data/thumbnails
- UPLOADS_PATH=/app/data/uploads
# SvelteKit # SvelteKit
- ADDRESS_HEADER=${TRUST_PROXY:+X-Forwarded-For} - ADDRESS_HEADER=${TRUST_PROXY:+X-Forwarded-For}
- XFF_DEPTH=${TRUST_PROXY:-} - XFF_DEPTH=${TRUST_PROXY:-}

View File

@@ -1,7 +1,7 @@
{ {
"name": "arkvault", "name": "arkvault",
"private": true, "private": true,
"version": "0.9.1", "version": "0.9.0",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "vite dev", "dev": "vite dev",
@@ -16,56 +16,56 @@
"db:migrate": "kysely migrate" "db:migrate": "kysely migrate"
}, },
"devDependencies": { "devDependencies": {
"@eslint/compat": "^2.0.3", "@eslint/compat": "^2.0.1",
"@eslint/js": "^9.39.4", "@eslint/js": "^9.39.2",
"@iconify-json/material-symbols": "^1.2.61", "@iconify-json/material-symbols": "^1.2.51",
"@noble/hashes": "^2.0.1", "@noble/hashes": "^2.0.1",
"@sveltejs/adapter-node": "^5.5.4", "@sveltejs/adapter-node": "^5.5.1",
"@sveltejs/kit": "^2.53.4", "@sveltejs/kit": "^2.49.5",
"@sveltejs/vite-plugin-svelte": "^6.2.4", "@sveltejs/vite-plugin-svelte": "^6.2.4",
"@tanstack/svelte-virtual": "^3.13.21", "@tanstack/svelte-virtual": "^3.13.18",
"@trpc/client": "^11.12.0", "@trpc/client": "^11.8.1",
"@types/file-saver": "^2.0.7", "@types/file-saver": "^2.0.7",
"@types/ms": "^0.7.34", "@types/ms": "^0.7.34",
"@types/node-schedule": "^2.1.8", "@types/node-schedule": "^2.1.8",
"@types/pg": "^8.18.0", "@types/pg": "^8.16.0",
"autoprefixer": "^10.4.27", "autoprefixer": "^10.4.23",
"axios": "^1.13.6", "axios": "^1.13.2",
"dexie": "^4.3.0", "dexie": "^4.2.1",
"es-hangul": "^2.3.8", "es-hangul": "^2.3.8",
"eslint": "^9.39.4", "eslint": "^9.39.2",
"eslint-config-prettier": "^10.1.8", "eslint-config-prettier": "^10.1.8",
"eslint-plugin-svelte": "^3.15.2", "eslint-plugin-svelte": "^3.14.0",
"eslint-plugin-tailwindcss": "^3.18.2", "eslint-plugin-tailwindcss": "^3.18.2",
"exifreader": "^4.36.2", "exifreader": "^4.36.0",
"file-saver": "^2.0.5", "file-saver": "^2.0.5",
"globals": "^17.4.0", "globals": "^17.0.0",
"heic2any": "^0.0.4", "heic2any": "^0.0.4",
"kysely-ctl": "^0.20.0", "kysely-ctl": "^0.20.0",
"lru-cache": "^11.2.6", "lru-cache": "^11.2.4",
"mime": "^4.1.0", "mime": "^4.1.0",
"p-limit": "^7.3.0", "p-limit": "^7.2.0",
"prettier": "^3.8.1", "prettier": "^3.8.0",
"prettier-plugin-svelte": "^3.5.1", "prettier-plugin-svelte": "^3.4.1",
"prettier-plugin-tailwindcss": "^0.7.2", "prettier-plugin-tailwindcss": "^0.7.2",
"svelte": "^5.53.9", "svelte": "^5.46.4",
"svelte-check": "^4.4.5", "svelte-check": "^4.3.5",
"tailwindcss": "^3.4.19", "tailwindcss": "^3.4.19",
"typescript": "^5.9.3", "typescript": "^5.9.3",
"typescript-eslint": "^8.57.0", "typescript-eslint": "^8.53.0",
"unplugin-icons": "^23.0.1", "unplugin-icons": "^23.0.1",
"vite": "^7.3.1" "vite": "^7.3.1"
}, },
"dependencies": { "dependencies": {
"@trpc/server": "^11.12.0", "@trpc/server": "^11.8.1",
"argon2": "^0.44.0", "argon2": "^0.44.0",
"kysely": "^0.28.11", "kysely": "^0.28.9",
"ms": "^2.1.3", "ms": "^2.1.3",
"node-schedule": "^2.1.1", "node-schedule": "^2.1.1",
"pg": "^8.20.0", "pg": "^8.17.1",
"superjson": "^2.2.6", "superjson": "^2.2.6",
"uuid": "^13.0.0", "uuid": "^13.0.0",
"zod": "^4.3.6" "zod": "^4.3.5"
}, },
"engines": { "engines": {
"node": "^22.0.0", "node": "^22.0.0",

1386
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -168,7 +168,7 @@ const requestFileUpload = limitFunction(
) => { ) => {
state.status = "uploading"; state.status = "uploading";
const { encContentHash } = await uploadBlob(uploadId, file, dataKey, { await uploadBlob(uploadId, file, dataKey, {
onProgress(s) { onProgress(s) {
state.progress = s.progress; state.progress = s.progress;
state.rate = s.rate; state.rate = s.rate;
@@ -178,7 +178,6 @@ const requestFileUpload = limitFunction(
const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ const { file: fileId } = await trpc().upload.completeFileUpload.mutate({
uploadId, uploadId,
contentHmac: fileSigned, contentHmac: fileSigned,
encContentHash,
}); });
if (thumbnailBuffer) { if (thumbnailBuffer) {

View File

@@ -34,7 +34,7 @@ export class FilesystemCache<K, V extends object> {
} }
(state.value (state.value
? Promise.resolve($state.snapshot(state.value) as V) ? Promise.resolve(state.value)
: this.options.fetchFromIndexedDB(key).then((loadedInfo) => { : this.options.fetchFromIndexedDB(key).then((loadedInfo) => {
if (loadedInfo) { if (loadedInfo) {
state.value = loadedInfo; state.value = loadedInfo;

View File

@@ -12,3 +12,11 @@ export const parseRangeHeader = (value: string | null) => {
export const getContentRangeHeader = (range?: { start: number; end: number; total: number }) => { export const getContentRangeHeader = (range?: { start: number; end: number; total: number }) => {
return range && { "Content-Range": `bytes ${range.start}-${range.end}/${range.total}` }; return range && { "Content-Range": `bytes ${range.start}-${range.end}/${range.total}` };
}; };
export const parseContentDigestHeader = (value: string | null) => {
if (!value) return undefined;
const firstDigest = value.split(",")[0]!.trim();
const match = firstDigest.match(/^sha-256=:([A-Za-z0-9+/=]+):$/);
return match?.[1];
};

View File

@@ -1,8 +1,7 @@
import { sha256 } from "@noble/hashes/sha2.js";
import axios from "axios"; import axios from "axios";
import pLimit from "p-limit"; import pLimit from "p-limit";
import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants"; import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants";
import { encodeToBase64, encryptChunk } from "$lib/modules/crypto"; import { encryptChunk, digestMessage, encodeToBase64 } from "$lib/modules/crypto";
import { BoundedQueue } from "$lib/utils"; import { BoundedQueue } from "$lib/utils";
interface UploadStats { interface UploadStats {
@@ -13,6 +12,7 @@ interface UploadStats {
interface EncryptedChunk { interface EncryptedChunk {
index: number; index: number;
data: ArrayBuffer; data: ArrayBuffer;
hash: string;
} }
const createSpeedMeter = (timeWindow = 3000, minInterval = 200, warmupPeriod = 500) => { const createSpeedMeter = (timeWindow = 3000, minInterval = 200, warmupPeriod = 500) => {
@@ -68,18 +68,27 @@ const createSpeedMeter = (timeWindow = 3000, minInterval = 200, warmupPeriod = 5
}; };
}; };
const encryptChunkData = async (chunk: Blob, dataKey: CryptoKey): Promise<ArrayBuffer> => { const encryptChunkData = async (
return await encryptChunk(await chunk.arrayBuffer(), dataKey); chunk: Blob,
dataKey: CryptoKey,
): Promise<{ data: ArrayBuffer; hash: string }> => {
const encrypted = await encryptChunk(await chunk.arrayBuffer(), dataKey);
const hash = encodeToBase64(await digestMessage(encrypted));
return { data: encrypted, hash };
}; };
const uploadEncryptedChunk = async ( const uploadEncryptedChunk = async (
uploadId: string, uploadId: string,
chunkIndex: number, chunkIndex: number,
encrypted: ArrayBuffer, encrypted: ArrayBuffer,
hash: string,
onChunkProgress: (chunkIndex: number, loaded: number) => void, onChunkProgress: (chunkIndex: number, loaded: number) => void,
) => { ) => {
await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex + 1}`, encrypted, { await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex + 1}`, encrypted, {
headers: { "Content-Type": "application/octet-stream" }, headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${hash}:`,
},
onUploadProgress(e) { onUploadProgress(e) {
onChunkProgress(chunkIndex, e.loaded ?? 0); onChunkProgress(chunkIndex, e.loaded ?? 0);
}, },
@@ -103,7 +112,6 @@ export const uploadBlob = async (
const uploadedByChunk = new Array<number>(totalChunks).fill(0); const uploadedByChunk = new Array<number>(totalChunks).fill(0);
const speedMeter = createSpeedMeter(3000, 200); const speedMeter = createSpeedMeter(3000, 200);
const hash = sha256.create();
const emit = () => { const emit = () => {
if (!onProgress) return; if (!onProgress) return;
@@ -128,9 +136,8 @@ export const uploadBlob = async (
try { try {
for (let i = 0; i < totalChunks; i++) { for (let i = 0; i < totalChunks; i++) {
const chunk = blob.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE); const chunk = blob.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE);
const data = await encryptChunkData(chunk, dataKey); const { data, hash } = await encryptChunkData(chunk, dataKey);
hash.update(new Uint8Array(data)); await queue.push({ index: i, data, hash });
await queue.push({ index: i, data });
} }
} catch (e) { } catch (e) {
encryptionError = e instanceof Error ? e : new Error(String(e)); encryptionError = e instanceof Error ? e : new Error(String(e));
@@ -151,7 +158,7 @@ export const uploadBlob = async (
const task = limit(async () => { const task = limit(async () => {
try { try {
await uploadEncryptedChunk(uploadId, item.index, item.data, onChunkProgress); await uploadEncryptedChunk(uploadId, item.index, item.data, item.hash, onChunkProgress);
} finally { } finally {
// @ts-ignore // @ts-ignore
item.data = null; item.data = null;
@@ -173,5 +180,4 @@ export const uploadBlob = async (
await Promise.all([encryptionProducer(), uploadConsumer()]); await Promise.all([encryptionProducer(), uploadConsumer()]);
onProgress?.({ progress: 1, rate: speedMeter() }); onProgress?.({ progress: 1, rate: speedMeter() });
return { encContentHash: encodeToBase64(hash.digest()) };
}; };

View File

@@ -74,6 +74,28 @@ export const getAllDirectoriesByParent = async (userId: number, parentId: Direct
return directories.map(toDirectory); return directories.map(toDirectory);
}; };
export const getAllRecursiveDirectoriesByParent = async (userId: number, parentId: DirectoryId) => {
const directories = await db
.withRecursive("directory_tree", (db) =>
db
.selectFrom("directory")
.selectAll()
.$if(parentId === "root", (qb) => qb.where("parent_id", "is", null))
.$if(parentId !== "root", (qb) => qb.where("parent_id", "=", parentId as number))
.where("user_id", "=", userId)
.unionAll((db) =>
db
.selectFrom("directory")
.innerJoin("directory_tree", "directory.parent_id", "directory_tree.id")
.selectAll("directory"),
),
)
.selectFrom("directory_tree")
.selectAll()
.execute();
return directories.map(toDirectory);
};
export const getAllFavoriteDirectories = async (userId: number) => { export const getAllFavoriteDirectories = async (userId: number) => {
const directories = await db const directories = await db
.selectFrom("directory") .selectFrom("directory")
@@ -95,61 +117,6 @@ export const getDirectory = async (userId: number, directoryId: number) => {
return directory ? toDirectory(directory) : null; return directory ? toDirectory(directory) : null;
}; };
export const searchDirectories = async (
userId: number,
filters: {
parentId: DirectoryId;
inFavorites: boolean;
},
) => {
const directories = await db
.withRecursive("directory_tree", (db) =>
db
.selectFrom("directory")
.select("id")
.where("user_id", "=", userId)
.$if(filters.parentId === "root", (qb) => qb.where((eb) => eb.lit(false))) // directory_tree will be empty if parentId is "root"
.$if(filters.parentId !== "root", (qb) => qb.where("id", "=", filters.parentId as number))
.unionAll(
db
.selectFrom("directory as d")
.innerJoin("directory_tree as dt", "d.parent_id", "dt.id")
.select("d.id"),
),
)
.withRecursive("favorite_directory_tree", (db) =>
db
.selectFrom("directory")
.select("id")
.where("user_id", "=", userId)
.$if(!filters.inFavorites, (qb) => qb.where((eb) => eb.lit(false))) // favorite_directory_tree will be empty if inFavorites is false
.$if(filters.inFavorites, (qb) => qb.where("is_favorite", "=", true))
.unionAll((db) =>
db
.selectFrom("directory as d")
.innerJoin("favorite_directory_tree as dt", "d.parent_id", "dt.id")
.select("d.id"),
),
)
.selectFrom("directory")
.selectAll()
.where("user_id", "=", userId)
.$if(filters.parentId !== "root", (qb) =>
qb.where((eb) =>
eb.exists(eb.selectFrom("directory_tree as dt").whereRef("dt.id", "=", "parent_id")),
),
)
.$if(filters.inFavorites, (qb) =>
qb.where((eb) =>
eb.exists(
eb.selectFrom("favorite_directory_tree as dt").whereRef("dt.id", "=", "directory.id"),
),
),
)
.execute();
return directories.map(toDirectory);
};
export const setDirectoryEncName = async ( export const setDirectoryEncName = async (
userId: number, userId: number,
directoryId: number, directoryId: number,

View File

@@ -11,6 +11,7 @@ type IntegrityErrorMessages =
| "Directory already favorited" | "Directory already favorited"
| "Directory not favorited" | "Directory not favorited"
| "File not found" | "File not found"
| "File is not legacy"
| "File not found in category" | "File not found in category"
| "File already added to category" | "File already added to category"
| "File already favorited" | "File already favorited"

View File

@@ -141,6 +141,17 @@ export const getAllFileIds = async (userId: number) => {
return files.map(({ id }) => id); return files.map(({ id }) => id);
}; };
export const getLegacyFiles = async (userId: number, limit: number = 100) => {
const files = await db
.selectFrom("file")
.selectAll()
.where("user_id", "=", userId)
.where("encrypted_content_iv", "is not", null)
.limit(limit)
.execute();
return files.map(toFile);
};
export const getFilesWithoutThumbnail = async (userId: number, limit: number = 100) => { export const getFilesWithoutThumbnail = async (userId: number, limit: number = 100) => {
const files = await db const files = await db
.selectFrom("file") .selectFrom("file")
@@ -237,7 +248,6 @@ export const searchFiles = async (
userId: number, userId: number,
filters: { filters: {
parentId: DirectoryId; parentId: DirectoryId;
inFavorites: boolean;
includeCategoryIds: number[]; includeCategoryIds: number[];
excludeCategoryIds: number[]; excludeCategoryIds: number[];
}, },
@@ -248,7 +258,7 @@ export const searchFiles = async (
.selectFrom("directory") .selectFrom("directory")
.select("id") .select("id")
.where("user_id", "=", userId) .where("user_id", "=", userId)
.$if(filters.parentId === "root", (qb) => qb.where((eb) => eb.lit(false))) // directory_tree will be empty if parentId is "root" .where((eb) => eb.val(filters.parentId !== "root")) // directory_tree will be empty if parentId is "root"
.$if(filters.parentId !== "root", (qb) => qb.where("id", "=", filters.parentId as number)) .$if(filters.parentId !== "root", (qb) => qb.where("id", "=", filters.parentId as number))
.unionAll( .unionAll(
db db
@@ -257,20 +267,6 @@ export const searchFiles = async (
.select("d.id"), .select("d.id"),
), ),
) )
.withRecursive("favorite_directory_tree", (db) =>
db
.selectFrom("directory")
.select("id")
.where("user_id", "=", userId)
.$if(!filters.inFavorites, (qb) => qb.where((eb) => eb.lit(false))) // favorite_directory_tree will be empty if inFavorites is false
.$if(filters.inFavorites, (qb) => qb.where("is_favorite", "=", true))
.unionAll((db) =>
db
.selectFrom("directory as d")
.innerJoin("favorite_directory_tree as dt", "d.parent_id", "dt.id")
.select("d.id"),
),
)
.withRecursive("include_category_tree", (db) => .withRecursive("include_category_tree", (db) =>
db db
.selectFrom("category") .selectFrom("category")
@@ -299,31 +295,19 @@ export const searchFiles = async (
) )
.selectFrom("file") .selectFrom("file")
.selectAll("file") .selectAll("file")
.where("user_id", "=", userId) .$if(filters.parentId === "root", (qb) => qb.where("user_id", "=", userId)) // directory_tree isn't used if parentId is "root"
.$if(filters.parentId !== "root", (qb) => .$if(filters.parentId !== "root", (qb) =>
qb.where((eb) => qb.where("parent_id", "in", (eb) => eb.selectFrom("directory_tree").select("id")),
eb.exists(eb.selectFrom("directory_tree as dt").whereRef("dt.id", "=", "file.parent_id")),
),
) )
.$if(filters.inFavorites, (qb) => .where((eb) =>
qb.where((eb) => eb.not(
eb.or([ eb.exists(
eb("is_favorite", "=", true), eb
eb.exists( .selectFrom("file_category")
eb.selectFrom("favorite_directory_tree as dt").whereRef("dt.id", "=", "file.parent_id"), .whereRef("file_id", "=", "file.id")
), .where("category_id", "in", (eb) =>
]), eb.selectFrom("exclude_category_tree").select("id"),
), ),
)
.$if(filters.excludeCategoryIds.length > 0, (qb) =>
qb.where((eb) =>
eb.not(
eb.exists(
eb
.selectFrom("file_category")
.innerJoin("exclude_category_tree", "category_id", "exclude_category_tree.id")
.whereRef("file_id", "=", "file.id"),
),
), ),
), ),
); );
@@ -405,6 +389,51 @@ export const unregisterFile = async (userId: number, fileId: number) => {
}); });
}; };
export const migrateFileContent = async (
trx: typeof db,
userId: number,
fileId: number,
newPath: string,
dekVersion: Date,
encContentHash: string,
) => {
const file = await trx
.selectFrom("file")
.select(["path", "data_encryption_key_version", "encrypted_content_iv"])
.where("id", "=", fileId)
.where("user_id", "=", userId)
.limit(1)
.forUpdate()
.executeTakeFirst();
if (!file) {
throw new IntegrityError("File not found");
} else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) {
throw new IntegrityError("Invalid DEK version");
} else if (!file.encrypted_content_iv) {
throw new IntegrityError("File is not legacy");
}
await trx
.updateTable("file")
.set({
path: newPath,
encrypted_content_iv: null,
encrypted_content_hash: encContentHash,
})
.where("id", "=", fileId)
.where("user_id", "=", userId)
.execute();
await trx
.insertInto("file_log")
.values({
file_id: fileId,
timestamp: new Date(),
action: "migrate",
})
.execute();
return { oldPath: file.path };
};
export const addFileToCategory = async (fileId: number, categoryId: number) => { export const addFileToCategory = async (fileId: number, categoryId: number) => {
await db.transaction().execute(async (trx) => { await db.transaction().execute(async (trx) => {
try { try {

View File

@@ -3,7 +3,7 @@ import type { Ciphertext } from "./utils";
export interface UploadSessionTable { export interface UploadSessionTable {
id: string; id: string;
type: "file" | "thumbnail"; type: "file" | "thumbnail" | "migration";
user_id: number; user_id: number;
path: string; path: string;
bitmap: Buffer; bitmap: Buffer;

View File

@@ -26,8 +26,8 @@ interface FileUploadSession extends BaseUploadSession {
encLastModifiedAt: Ciphertext; encLastModifiedAt: Ciphertext;
} }
interface ThumbnailUploadSession extends BaseUploadSession { interface ThumbnailOrMigrationUploadSession extends BaseUploadSession {
type: "thumbnail"; type: "thumbnail" | "migration";
fileId: number; fileId: number;
dekVersion: Date; dekVersion: Date;
} }
@@ -86,8 +86,8 @@ export const createFileUploadSession = async (
}); });
}; };
export const createThumbnailUploadSession = async ( export const createThumbnailOrMigrationUploadSession = async (
params: Omit<ThumbnailUploadSession, "bitmap" | "uploadedChunks">, params: Omit<ThumbnailOrMigrationUploadSession, "bitmap" | "uploadedChunks">,
) => { ) => {
await db.transaction().execute(async (trx) => { await db.transaction().execute(async (trx) => {
const file = await trx const file = await trx
@@ -164,7 +164,7 @@ export const getUploadSession = async (sessionId: string, userId: number) => {
expiresAt: session.expires_at, expiresAt: session.expires_at,
fileId: session.file_id!, fileId: session.file_id!,
dekVersion: session.data_encryption_key_version!, dekVersion: session.data_encryption_key_version!,
} satisfies ThumbnailUploadSession; } satisfies ThumbnailOrMigrationUploadSession;
} }
}; };

View File

@@ -26,4 +26,5 @@ export default {
}, },
libraryPath: env.LIBRARY_PATH || "library", libraryPath: env.LIBRARY_PATH || "library",
thumbnailsPath: env.THUMBNAILS_PATH || "thumbnails", thumbnailsPath: env.THUMBNAILS_PATH || "thumbnails",
uploadsPath: env.UPLOADS_PATH || "uploads",
}; };

View File

@@ -1,4 +1,10 @@
import { unlink } from "fs/promises"; import { rm, unlink } from "fs/promises";
export const safeRecursiveRm = async (path: string | null | undefined) => {
if (path) {
await rm(path, { recursive: true }).catch(console.error);
}
};
export const safeUnlink = async (path: string | null | undefined) => { export const safeUnlink = async (path: string | null | undefined) => {
if (path) { if (path) {

View File

@@ -1,9 +1,10 @@
import { error } from "@sveltejs/kit"; import { error } from "@sveltejs/kit";
import { open } from "fs/promises"; import { createHash } from "crypto";
import { createWriteStream } from "fs";
import { Readable } from "stream"; import { Readable } from "stream";
import { ENCRYPTION_OVERHEAD, ENCRYPTED_CHUNK_SIZE } from "$lib/constants"; import { ENCRYPTION_OVERHEAD, ENCRYPTED_CHUNK_SIZE } from "$lib/constants";
import { UploadRepo } from "$lib/server/db"; import { UploadRepo } from "$lib/server/db";
import { safeUnlink } from "$lib/server/modules/filesystem"; import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem";
const chunkLocks = new Set<string>(); const chunkLocks = new Set<string>();
@@ -13,61 +14,12 @@ const isChunkUploaded = (bitmap: Buffer, chunkIndex: number) => {
return !!byte && (byte & (1 << (chunkIndex % 8))) !== 0; // Postgres sucks return !!byte && (byte & (1 << (chunkIndex % 8))) !== 0; // Postgres sucks
}; };
const writeChunkAtOffset = async (
path: string,
encChunkStream: Readable,
chunkIndex: number,
isLastChunk: boolean,
) => {
const offset = (chunkIndex - 1) * ENCRYPTED_CHUNK_SIZE;
const file = await open(path, "r+");
let written = 0;
try {
for await (const chunk of encChunkStream) {
const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
written += buffer.length;
if (written > ENCRYPTED_CHUNK_SIZE) {
throw new Error("Invalid chunk size");
}
let chunkOffset = 0;
while (chunkOffset < buffer.length) {
const { bytesWritten } = await file.write(
buffer,
chunkOffset,
buffer.length - chunkOffset,
offset + written - buffer.length + chunkOffset,
);
if (bytesWritten <= 0) {
throw new Error("Failed to write chunk");
}
chunkOffset += bytesWritten;
}
}
if (
(!isLastChunk && written !== ENCRYPTED_CHUNK_SIZE) ||
(isLastChunk && (written <= ENCRYPTION_OVERHEAD || written > ENCRYPTED_CHUNK_SIZE))
) {
throw new Error("Invalid chunk size");
}
if (isLastChunk) {
await file.truncate(offset + written);
}
return written;
} finally {
await file.close();
}
};
export const uploadChunk = async ( export const uploadChunk = async (
userId: number, userId: number,
sessionId: string, sessionId: string,
chunkIndex: number, chunkIndex: number,
encChunkStream: Readable, encChunkStream: Readable,
encChunkHash: string,
) => { ) => {
const lockKey = `${sessionId}/${chunkIndex}`; const lockKey = `${sessionId}/${chunkIndex}`;
if (chunkLocks.has(lockKey)) { if (chunkLocks.has(lockKey)) {
@@ -76,6 +28,8 @@ export const uploadChunk = async (
chunkLocks.add(lockKey); chunkLocks.add(lockKey);
} }
let filePath;
try { try {
const session = await UploadRepo.getUploadSession(sessionId, userId); const session = await UploadRepo.getUploadSession(sessionId, userId);
if (!session) { if (!session) {
@@ -87,10 +41,39 @@ export const uploadChunk = async (
} }
const isLastChunk = chunkIndex === session.totalChunks; const isLastChunk = chunkIndex === session.totalChunks;
await writeChunkAtOffset(session.path, encChunkStream, chunkIndex, isLastChunk); filePath = `${session.path}/${chunkIndex}`;
const hashStream = createHash("sha256");
const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 });
let writtenBytes = 0;
for await (const chunk of encChunkStream) {
hashStream.update(chunk);
writeStream.write(chunk);
writtenBytes += chunk.length;
}
await new Promise<void>((resolve, reject) => {
writeStream.end((e: any) => (e ? reject(e) : resolve()));
});
if (hashStream.digest("base64") !== encChunkHash) {
throw new Error("Invalid checksum");
} else if (
(!isLastChunk && writtenBytes !== ENCRYPTED_CHUNK_SIZE) ||
(isLastChunk && (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > ENCRYPTED_CHUNK_SIZE))
) {
throw new Error("Invalid chunk size");
}
await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex); await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex);
} catch (e) { } catch (e) {
if (e instanceof Error && e.message === "Invalid chunk size") { await safeUnlink(filePath);
if (
e instanceof Error &&
(e.message === "Invalid checksum" || e.message === "Invalid chunk size")
) {
error(400, "Invalid request body"); error(400, "Invalid request body");
} }
throw e; throw e;
@@ -101,5 +84,5 @@ export const uploadChunk = async (
export const cleanupExpiredUploadSessions = async () => { export const cleanupExpiredUploadSessions = async () => {
const paths = await UploadRepo.cleanupExpiredUploadSessions(); const paths = await UploadRepo.cleanupExpiredUploadSessions();
await Promise.all(paths.map(safeUnlink)); await Promise.all(paths.map(safeRecursiveRm));
}; };

View File

@@ -46,7 +46,7 @@
{#if isOpen && (directoryId || downloadUrl || fileBlob)} {#if isOpen && (directoryId || downloadUrl || fileBlob)}
<div <div
class="absolute right-2 top-full z-20 min-w-44 space-y-1 rounded-lg bg-white px-1 py-2 shadow-2xl" class="absolute right-2 top-full z-20 min-w-40 space-y-1 rounded-lg bg-white px-1 py-2 shadow-2xl"
transition:fly={{ y: -8, duration: 200 }} transition:fly={{ y: -8, duration: 200 }}
> >
<p class="px-3 pt-2 text-sm font-semibold text-gray-600">더보기</p> <p class="px-3 pt-2 text-sm font-semibold text-gray-600">더보기</p>
@@ -57,9 +57,7 @@
onclick: () => void, onclick: () => void,
)} )}
<button {onclick} class="rounded-xl active:bg-gray-100"> <button {onclick} class="rounded-xl active:bg-gray-100">
<div <div class="flex items-center gap-x-3 px-3 py-2 text-gray-700 transition active:scale-95">
class="flex items-center gap-x-3 px-3 py-2 text-lg text-gray-700 transition active:scale-95"
>
<Icon /> <Icon />
<p class="font-medium">{text}</p> <p class="font-medium">{text}</p>
</div> </div>

View File

@@ -37,8 +37,8 @@
includeImages: false, includeImages: false,
includeVideos: false, includeVideos: false,
includeDirectories: false, includeDirectories: false,
searchInDirectory: false,
searchInFavorites: false, searchInFavorites: false,
searchInDirectory: false,
categories: [], categories: [],
}); });
let hasCategoryFilter = $derived(filters.categories.length > 0); let hasCategoryFilter = $derived(filters.categories.length > 0);
@@ -47,6 +47,7 @@
filters.includeImages || filters.includeImages ||
filters.includeVideos || filters.includeVideos ||
filters.includeDirectories || filters.includeDirectories ||
filters.searchInFavorites ||
filters.name.trim().length > 0, filters.name.trim().length > 0,
); );
@@ -83,7 +84,9 @@
return sortEntries( return sortEntries(
[...directories, ...files].filter( [...directories, ...files].filter(
(entry) => !nameFilter || searchString(entry.name, nameFilter), (entry) =>
(!nameFilter || searchString(entry.name, nameFilter)) &&
(!filters.searchInFavorites || entry.isFavorite),
), ),
); );
}); });
@@ -142,7 +145,6 @@
// Svelte sucks // Svelte sucks
hasAnyFilter; hasAnyFilter;
filters.searchInDirectory; filters.searchInDirectory;
filters.searchInFavorites;
filters.categories.length; filters.categories.length;
if (untrack(() => isRestoredFromSnapshot)) { if (untrack(() => isRestoredFromSnapshot)) {
@@ -154,7 +156,6 @@
requestSearch( requestSearch(
{ {
ancestorId: filters.searchInDirectory ? data.directoryId! : "root", ancestorId: filters.searchInDirectory ? data.directoryId! : "root",
inFavorites: filters.searchInFavorites,
categories: filters.categories, categories: filters.categories,
}, },
$masterKeyStore?.get(1)?.key!, $masterKeyStore?.get(1)?.key!,
@@ -215,12 +216,7 @@
</div> </div>
{#if hasAnyFilter} {#if hasAnyFilter}
<div class="flex flex-grow flex-col space-y-2 bg-white p-4"> <div class="flex flex-grow flex-col space-y-2 bg-white p-4">
<p class="text-lg font-bold text-gray-800"> <p class="text-lg font-bold text-gray-800">검색 결과</p>
검색 결과
{#if result.length > 0}
{" "}({result.length}개)
{/if}
</p>
{#if result.length > 0} {#if result.length > 0}
<RowVirtualizer <RowVirtualizer
count={result.length} count={result.length}

View File

@@ -1,14 +1,17 @@
<script lang="ts"> <script lang="ts">
import type { ClassValue } from "svelte/elements";
import IconArrowBack from "~icons/material-symbols/arrow-back"; import IconArrowBack from "~icons/material-symbols/arrow-back";
interface Props { interface Props {
class?: ClassValue;
value: string; value: string;
} }
let { value = $bindable() }: Props = $props(); let { class: className, value = $bindable() }: Props = $props();
</script> </script>
<div class={"sticky top-0 z-10 flex items-center gap-x-2 px-2 py-3 backdrop-blur-2xl"}> <div class={["sticky top-0 z-10 flex items-center gap-x-2 px-2 py-3 backdrop-blur-2xl", className]}>
<button <button
onclick={() => history.back()} onclick={() => history.back()}
class="w-[2.3rem] flex-shrink-0 rounded-full p-1 active:bg-black active:bg-opacity-[0.04]" class="w-[2.3rem] flex-shrink-0 rounded-full p-1 active:bg-black active:bg-opacity-[0.04]"
@@ -19,6 +22,6 @@
bind:value bind:value
type="text" type="text"
placeholder="검색" placeholder="검색"
class="mr-1 h-[2.3rem] min-w-0 flex-grow rounded-lg bg-gray-100 px-3 py-1.5 placeholder-gray-600 focus:outline-none" class="mr-1 h-[2.3rem] flex-grow rounded-lg bg-gray-100 px-3 py-1.5 placeholder-gray-600 focus:outline-none"
/> />
</div> </div>

View File

@@ -10,7 +10,6 @@ import { trpc } from "$trpc/client";
export interface SearchFilter { export interface SearchFilter {
ancestorId: DirectoryId; ancestorId: DirectoryId;
inFavorites: boolean;
categories: { info: LocalCategoryInfo; type: "include" | "exclude" }[]; categories: { info: LocalCategoryInfo; type: "include" | "exclude" }[];
} }
@@ -22,7 +21,6 @@ export interface SearchResult {
export const requestSearch = async (filter: SearchFilter, masterKey: CryptoKey) => { export const requestSearch = async (filter: SearchFilter, masterKey: CryptoKey) => {
const { directories: directoriesRaw, files: filesRaw } = await trpc().search.search.query({ const { directories: directoriesRaw, files: filesRaw } = await trpc().search.search.query({
ancestor: filter.ancestorId, ancestor: filter.ancestorId,
inFavorites: filter.inFavorites,
includeCategories: filter.categories includeCategories: filter.categories
.filter(({ type }) => type === "include") .filter(({ type }) => type === "include")
.map(({ info }) => info.id), .map(({ info }) => info.id),

View File

@@ -0,0 +1,7 @@
import { createCaller } from "$trpc/router.server";
import type { PageServerLoad } from "./$types";
export const load: PageServerLoad = async (event) => {
const files = await createCaller(event).file.listLegacy();
return { files };
};

View File

@@ -0,0 +1,82 @@
<script lang="ts">
import { onMount } from "svelte";
import { goto } from "$app/navigation";
import { BottomDiv, Button, FullscreenDiv } from "$lib/components/atoms";
import { TopBar } from "$lib/components/molecules";
import type { MaybeFileInfo } from "$lib/modules/filesystem";
import { masterKeyStore } from "$lib/stores";
import { sortEntries } from "$lib/utils";
import File from "./File.svelte";
import {
getMigrationState,
clearMigrationStates,
requestLegacyFiles,
requestFileMigration,
} from "./service.svelte";
let { data } = $props();
let fileInfos: MaybeFileInfo[] = $state([]);
let files = $derived(
fileInfos
.map((info) => ({
info,
state: getMigrationState(info.id),
}))
.filter((file) => file.state?.status !== "uploaded"),
);
const migrateAllFiles = () => {
files.forEach(({ info }) => {
if (info.exists) {
requestFileMigration(info);
}
});
};
onMount(async () => {
fileInfos = sortEntries(await requestLegacyFiles(data.files, $masterKeyStore?.get(1)?.key!));
});
$effect(() => clearMigrationStates);
</script>
<svelte:head>
<title>암호화 마이그레이션</title>
</svelte:head>
<TopBar title="암호화 마이그레이션" />
<FullscreenDiv>
{#if files.length > 0}
<div class="space-y-4 pb-4">
<p class="break-keep text-gray-800">
이전 버전의 ArkVault에서 업로드된 {files.length}개 파일을 다시 암호화할 수 있어요.
</p>
<div class="space-y-2">
{#each files as { info, state } (info.id)}
{#if info.exists}
<File
{info}
{state}
onclick={({ id }) => goto(`/file/${id}`)}
onMigrateClick={requestFileMigration}
/>
{/if}
{/each}
</div>
</div>
<BottomDiv>
<Button onclick={migrateAllFiles} class="w-full">모두 다시 암호화하기</Button>
</BottomDiv>
{:else}
<div class="flex flex-grow items-center justify-center">
<p class="text-gray-500">
{#if data.files.length === 0}
마이그레이션할 파일이 없어요.
{:else}
파일 목록을 불러오고 있어요.
{/if}
</p>
</div>
{/if}
</FullscreenDiv>

View File

@@ -0,0 +1,52 @@
<script module lang="ts">
const subtexts = {
queued: "대기 중",
downloading: "다운로드하는 중",
"upload-pending": "업로드를 기다리는 중",
uploaded: "",
error: "실패",
} as const;
</script>
<script lang="ts">
import { ActionEntryButton } from "$lib/components/atoms";
import { DirectoryEntryLabel } from "$lib/components/molecules";
import type { FileInfo } from "$lib/modules/filesystem";
import { formatDateTime, formatNetworkSpeed } from "$lib/utils";
import type { MigrationState } from "./service.svelte";
import IconSync from "~icons/material-symbols/sync";
type FileInfoWithExists = FileInfo & { exists: true };
interface Props {
info: FileInfoWithExists;
onclick: (file: FileInfo) => void;
onMigrateClick: (file: FileInfoWithExists) => void;
state: MigrationState | undefined;
}
let { info, onclick, onMigrateClick, state }: Props = $props();
let subtext = $derived.by(() => {
if (!state) {
return formatDateTime(info.createdAt ?? info.lastModifiedAt);
}
if (state.status === "uploading") {
const progress = Math.floor((state.progress ?? 0) * 100);
const speed = formatNetworkSpeed((state.rate ?? 0) * 8);
return `전송됨 ${progress}% · ${speed}`;
}
return subtexts[state.status] ?? state.status;
});
</script>
<ActionEntryButton
class="h-14"
onclick={() => onclick(info)}
actionButtonIcon={!state || state.status === "error" ? IconSync : undefined}
onActionButtonClick={() => onMigrateClick(info)}
actionButtonClass="text-gray-800"
>
<DirectoryEntryLabel type="file" name={info.name} {subtext} />
</ActionEntryButton>

View File

@@ -0,0 +1,116 @@
import { limitFunction } from "p-limit";
import { SvelteMap } from "svelte/reactivity";
import { CHUNK_SIZE } from "$lib/constants";
import { getFileInfo, type FileInfo } from "$lib/modules/filesystem";
import { uploadBlob } from "$lib/modules/upload";
import { requestFileDownload } from "$lib/services/file";
import { HybridPromise, Scheduler } from "$lib/utils";
import { trpc } from "$trpc/client";
import type { RouterOutputs } from "$trpc/router.server";
export type MigrationStatus =
| "queued"
| "downloading"
| "upload-pending"
| "uploading"
| "uploaded"
| "error";
export interface MigrationState {
status: MigrationStatus;
progress?: number;
rate?: number;
}
const scheduler = new Scheduler();
const states = new SvelteMap<number, MigrationState>();
export const requestLegacyFiles = async (
filesRaw: RouterOutputs["file"]["listLegacy"],
masterKey: CryptoKey,
) => {
const files = await HybridPromise.all(
filesRaw.map((file) => getFileInfo(file.id, masterKey, { serverResponse: file })),
);
return files;
};
const createState = (status: MigrationStatus): MigrationState => {
const state = $state({ status });
return state;
};
export const getMigrationState = (fileId: number) => {
return states.get(fileId);
};
export const clearMigrationStates = () => {
for (const [id, state] of states) {
if (state.status === "uploaded" || state.status === "error") {
states.delete(id);
}
}
};
const requestFileUpload = limitFunction(
async (
state: MigrationState,
fileId: number,
fileBuffer: ArrayBuffer,
dataKey: CryptoKey,
dataKeyVersion: Date,
) => {
state.status = "uploading";
const { uploadId } = await trpc().upload.startMigrationUpload.mutate({
file: fileId,
chunks: Math.ceil(fileBuffer.byteLength / CHUNK_SIZE),
dekVersion: dataKeyVersion,
});
await uploadBlob(uploadId, new Blob([fileBuffer]), dataKey, {
onProgress(s) {
state.progress = s.progress;
state.rate = s.rate;
},
});
await trpc().upload.completeMigrationUpload.mutate({ uploadId });
state.status = "uploaded";
},
{ concurrency: 1 },
);
export const requestFileMigration = async (fileInfo: FileInfo) => {
let state = states.get(fileInfo.id);
if (state) {
if (state.status !== "error") return;
state.status = "queued";
state.progress = undefined;
state.rate = undefined;
} else {
state = createState("queued");
states.set(fileInfo.id, state);
}
try {
const dataKey = fileInfo.dataKey;
if (!dataKey) {
throw new Error("Data key not available");
}
let fileBuffer: ArrayBuffer | undefined;
await scheduler.schedule(
async () => {
state.status = "downloading";
fileBuffer = await requestFileDownload(fileInfo.id, dataKey.key, true);
return fileBuffer.byteLength;
},
() => requestFileUpload(state, fileInfo.id, fileBuffer!, dataKey.key, dataKey.version),
);
} catch (e) {
state.status = "error";
throw e;
}
};

View File

@@ -3,7 +3,6 @@
import { DirectoryEntryLabel } from "$lib/components/molecules"; import { DirectoryEntryLabel } from "$lib/components/molecules";
import { getFileThumbnail } from "$lib/modules/file"; import { getFileThumbnail } from "$lib/modules/file";
import type { CategoryFileInfo } from "$lib/modules/filesystem"; import type { CategoryFileInfo } from "$lib/modules/filesystem";
import { formatDateTime } from "$lib/utils";
import type { SelectedFile } from "./service.svelte"; import type { SelectedFile } from "./service.svelte";
import IconClose from "~icons/material-symbols/close"; import IconClose from "~icons/material-symbols/close";
@@ -20,7 +19,7 @@
</script> </script>
<ActionEntryButton <ActionEntryButton
class="h-14" class="h-12"
onclick={() => onclick(info)} onclick={() => onclick(info)}
actionButtonIcon={onRemoveClick && IconClose} actionButtonIcon={onRemoveClick && IconClose}
onActionButtonClick={() => onRemoveClick?.(info)} onActionButtonClick={() => onRemoveClick?.(info)}
@@ -29,7 +28,6 @@
type="file" type="file"
thumbnail={$thumbnail} thumbnail={$thumbnail}
name={info.name} name={info.name}
subtext={formatDateTime(info.createdAt ?? info.lastModifiedAt)}
isFavorite={info.isFavorite} isFavorite={info.isFavorite}
/> />
</ActionEntryButton> </ActionEntryButton>

View File

@@ -103,7 +103,7 @@
</script> </script>
<svelte:head> <svelte:head>
<title>파일</title> <title>파일</title>
</svelte:head> </svelte:head>
<input bind:this={fileInput} onchange={uploadFile} type="file" multiple class="hidden" /> <input bind:this={fileInput} onchange={uploadFile} type="file" multiple class="hidden" />

View File

@@ -44,50 +44,52 @@
<title>즐겨찾기</title> <title>즐겨찾기</title>
</svelte:head> </svelte:head>
<div class="flex h-full flex-col"> <TopBar title="즐겨찾기" showBackButton={false}>
<TopBar title="즐겨찾기" showBackButton={false}> <button
<button onclick={() => goto("/search?from=favorites")}
onclick={() => goto("/search?from=favorites")} class="w-full rounded-full p-1 text-2xl active:bg-black active:bg-opacity-[0.04]"
class="w-full rounded-full p-1 text-2xl active:bg-black active:bg-opacity-[0.04]" >
<IconSearch />
</button>
</TopBar>
<div class="flex h-full flex-col p-4 !pt-0">
{#if isLoading}
<div class="flex flex-grow items-center justify-center">
<p class="text-gray-500">
{#if data.favorites.files.length === 0 && data.favorites.directories.length === 0}
즐겨찾기한 항목이 없어요.
{:else}
즐겨찾기 목록을 불러오고 있어요.
{/if}
</p>
</div>
{:else if entries.length === 0}
<div class="flex flex-grow items-center justify-center">
<p class="text-gray-500">즐겨찾기한 항목이 없어요.</p>
</div>
{:else}
<RowVirtualizer
count={entries.length}
getItemKey={(index) => `${entries[index]!.type}-${entries[index]!.details.id}`}
estimateItemHeight={() => 56}
itemGap={4}
> >
<IconSearch /> {#snippet item(index)}
</button> {@const entry = entries[index]!}
</TopBar> {#if entry.type === "directory"}
<div class="flex flex-grow flex-col px-4 pb-4"> <Directory
{#if entries.length > 0} info={entry.details}
<RowVirtualizer onclick={() => handleClick(entry)}
count={entries.length} onRemoveClick={() => handleRemove(entry)}
getItemKey={(index) => `${entries[index]!.type}-${entries[index]!.details.id}`} />
estimateItemHeight={() => 56} {:else}
itemGap={4} <File
> info={entry.details}
{#snippet item(index)} onclick={() => handleClick(entry)}
{@const entry = entries[index]!} onRemoveClick={() => handleRemove(entry)}
{#if entry.type === "directory"} />
<Directory {/if}
info={entry.details} {/snippet}
onclick={() => handleClick(entry)} </RowVirtualizer>
onRemoveClick={() => handleRemove(entry)} {/if}
/>
{:else}
<File
info={entry.details}
onclick={() => handleClick(entry)}
onRemoveClick={() => handleRemove(entry)}
/>
{/if}
{/snippet}
</RowVirtualizer>
{:else}
<div class="flex flex-grow items-center justify-center">
<p class="text-gray-500">
{#if isLoading}
즐겨찾기 목록을 불러오고 있어요.
{:else}
즐겨찾기한 항목이 없어요.
{/if}
</p>
</div>
{/if}
</div>
</div> </div>

View File

@@ -3,7 +3,6 @@
import { DirectoryEntryLabel } from "$lib/components/molecules"; import { DirectoryEntryLabel } from "$lib/components/molecules";
import { getFileThumbnail } from "$lib/modules/file"; import { getFileThumbnail } from "$lib/modules/file";
import type { SummarizedFileInfo } from "$lib/modules/filesystem"; import type { SummarizedFileInfo } from "$lib/modules/filesystem";
import { formatDateTime } from "$lib/utils";
import IconClose from "~icons/material-symbols/close"; import IconClose from "~icons/material-symbols/close";
@@ -24,11 +23,5 @@
actionButtonIcon={IconClose} actionButtonIcon={IconClose}
onActionButtonClick={onRemoveClick} onActionButtonClick={onRemoveClick}
> >
<DirectoryEntryLabel <DirectoryEntryLabel type="file" thumbnail={$thumbnail} name={info.name} isFavorite />
type="file"
thumbnail={$thumbnail}
name={info.name}
subtext={formatDateTime(info.createdAt ?? info.lastModifiedAt)}
isFavorite
/>
</ActionEntryButton> </ActionEntryButton>

View File

@@ -5,6 +5,7 @@
import IconStorage from "~icons/material-symbols/storage"; import IconStorage from "~icons/material-symbols/storage";
import IconImage from "~icons/material-symbols/image"; import IconImage from "~icons/material-symbols/image";
import IconLockReset from "~icons/material-symbols/lock-reset";
import IconPassword from "~icons/material-symbols/password"; import IconPassword from "~icons/material-symbols/password";
import IconLogout from "~icons/material-symbols/logout"; import IconLogout from "~icons/material-symbols/logout";
@@ -41,6 +42,13 @@
> >
썸네일 썸네일
</MenuEntryButton> </MenuEntryButton>
<MenuEntryButton
onclick={() => goto("/settings/migration")}
icon={IconLockReset}
iconColor="text-teal-500"
>
암호화 마이그레이션
</MenuEntryButton>
</div> </div>
<div class="space-y-2"> <div class="space-y-2">
<p class="font-semibold">보안</p> <p class="font-semibold">보안</p>

View File

@@ -2,6 +2,7 @@ import { error, text } from "@sveltejs/kit";
import { Readable } from "stream"; import { Readable } from "stream";
import type { ReadableStream } from "stream/web"; import type { ReadableStream } from "stream/web";
import { z } from "zod"; import { z } from "zod";
import { parseContentDigestHeader } from "$lib/modules/http";
import { authorize } from "$lib/server/modules/auth"; import { authorize } from "$lib/server/modules/auth";
import { uploadChunk } from "$lib/server/services/upload"; import { uploadChunk } from "$lib/server/services/upload";
import type { RequestHandler } from "./$types"; import type { RequestHandler } from "./$types";
@@ -18,7 +19,10 @@ export const POST: RequestHandler = async ({ locals, params, request }) => {
if (!zodRes.success) error(400, "Invalid path parameters"); if (!zodRes.success) error(400, "Invalid path parameters");
const { id: sessionId, index: chunkIndex } = zodRes.data; const { id: sessionId, index: chunkIndex } = zodRes.data;
if (!request.body) { const encContentHash = parseContentDigestHeader(request.headers.get("Content-Digest"));
if (!encContentHash) {
error(400, "Invalid request headers");
} else if (!request.body) {
error(400, "Invalid request body"); error(400, "Invalid request body");
} }
@@ -27,6 +31,7 @@ export const POST: RequestHandler = async ({ locals, params, request }) => {
sessionId, sessionId,
chunkIndex, chunkIndex,
Readable.fromWeb(request.body as ReadableStream), Readable.fromWeb(request.body as ReadableStream),
encContentHash,
); );
return text("Chunk uploaded", { headers: { "Content-Type": "text/plain" } }); return text("Chunk uploaded", { headers: { "Content-Type": "text/plain" } });
}; };

View File

@@ -118,6 +118,26 @@ const fileRouter = router({
})); }));
}), }),
listLegacy: roleProcedure["activeClient"].query(async ({ ctx }) => {
const files = await FileRepo.getLegacyFiles(ctx.session.userId);
return files.map((file) => ({
id: file.id,
isLegacy: true,
parent: file.parentId,
mekVersion: file.mekVersion,
dek: file.encDek,
dekVersion: file.dekVersion,
contentType: file.contentType,
name: file.encName.ciphertext,
nameIv: file.encName.iv,
createdAt: file.encCreatedAt?.ciphertext,
createdAtIv: file.encCreatedAt?.iv,
lastModifiedAt: file.encLastModifiedAt.ciphertext,
lastModifiedAtIv: file.encLastModifiedAt.iv,
isFavorite: file.isFavorite,
}));
}),
rename: roleProcedure["activeClient"] rename: roleProcedure["activeClient"]
.input( .input(
z.object({ z.object({

View File

@@ -8,7 +8,6 @@ const searchRouter = router({
.input( .input(
z.object({ z.object({
ancestor: DirectoryIdSchema.default("root"), ancestor: DirectoryIdSchema.default("root"),
inFavorites: z.boolean().default(false),
includeCategories: z.number().positive().array().default([]), includeCategories: z.number().positive().array().default([]),
excludeCategories: z.number().positive().array().default([]), excludeCategories: z.number().positive().array().default([]),
}), }),
@@ -16,14 +15,10 @@ const searchRouter = router({
.query(async ({ ctx, input }) => { .query(async ({ ctx, input }) => {
const [directories, files] = await Promise.all([ const [directories, files] = await Promise.all([
input.includeCategories.length === 0 && input.excludeCategories.length === 0 input.includeCategories.length === 0 && input.excludeCategories.length === 0
? DirectoryRepo.searchDirectories(ctx.session.userId, { ? DirectoryRepo.getAllRecursiveDirectoriesByParent(ctx.session.userId, input.ancestor)
parentId: input.ancestor,
inFavorites: input.inFavorites,
})
: [], : [],
FileRepo.searchFiles(ctx.session.userId, { FileRepo.searchFiles(ctx.session.userId, {
parentId: input.ancestor, parentId: input.ancestor,
inFavorites: input.inFavorites,
includeCategoryIds: input.includeCategories, includeCategoryIds: input.includeCategories,
excludeCategoryIds: input.excludeCategories, excludeCategoryIds: input.excludeCategories,
}), }),

View File

@@ -1,7 +1,7 @@
import { TRPCError } from "@trpc/server"; import { TRPCError } from "@trpc/server";
import { createHash } from "crypto"; import { createHash } from "crypto";
import { createReadStream } from "fs"; import { createReadStream, createWriteStream } from "fs";
import { mkdir, open } from "fs/promises"; import { copyFile, mkdir } from "fs/promises";
import mime from "mime"; import mime from "mime";
import { dirname } from "path"; import { dirname } from "path";
import { v4 as uuidv4 } from "uuid"; import { v4 as uuidv4 } from "uuid";
@@ -10,30 +10,17 @@ import { DirectoryIdSchema } from "$lib/schemas";
import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db";
import db from "$lib/server/db/kysely"; import db from "$lib/server/db/kysely";
import env from "$lib/server/loadenv"; import env from "$lib/server/loadenv";
import { safeUnlink } from "$lib/server/modules/filesystem"; import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem";
import { router, roleProcedure } from "../init.server"; import { router, roleProcedure } from "../init.server";
const UPLOADS_EXPIRES = 24 * 3600 * 1000; // 24 hours const UPLOADS_EXPIRES = 24 * 3600 * 1000; // 24 hours
const sessionLocks = new Set<string>(); const sessionLocks = new Set<string>();
const reserveUploadPath = async (path: string) => { const generateSessionId = async () => {
await mkdir(dirname(path), { recursive: true });
const file = await open(path, "wx", 0o600);
await file.close();
};
const generateFileUploadSession = async (userId: number) => {
const id = uuidv4(); const id = uuidv4();
const path = `${env.libraryPath}/${userId}/${uuidv4()}`; const path = `${env.uploadsPath}/${id}`;
await reserveUploadPath(path); await mkdir(path, { recursive: true });
return { id, path };
};
const generateThumbnailUploadSession = async (userId: number) => {
const id = uuidv4();
const path = `${env.thumbnailsPath}/${userId}/${id}`;
await reserveUploadPath(path);
return { id, path }; return { id, path };
}; };
@@ -67,7 +54,7 @@ const uploadRouter = router({
throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" });
} }
const { id, path } = await generateFileUploadSession(ctx.session.userId); const { id, path } = await generateSessionId();
try { try {
await UploadRepo.createFileUploadSession({ await UploadRepo.createFileUploadSession({
@@ -91,7 +78,7 @@ const uploadRouter = router({
}); });
return { uploadId: id }; return { uploadId: id };
} catch (e) { } catch (e) {
await safeUnlink(path); await safeRecursiveRm(path);
if (e instanceof IntegrityError) { if (e instanceof IntegrityError) {
if (e.message === "Inactive MEK version") { if (e.message === "Inactive MEK version") {
@@ -109,7 +96,6 @@ const uploadRouter = router({
z.object({ z.object({
uploadId: z.uuidv4(), uploadId: z.uuidv4(),
contentHmac: z.base64().nonempty().optional(), contentHmac: z.base64().nonempty().optional(),
encContentHash: z.base64().nonempty(),
}), }),
) )
.mutation(async ({ ctx, input }) => { .mutation(async ({ ctx, input }) => {
@@ -120,6 +106,8 @@ const uploadRouter = router({
sessionLocks.add(uploadId); sessionLocks.add(uploadId);
} }
let filePath = "";
try { try {
const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId);
if (session?.type !== "file") { if (session?.type !== "file") {
@@ -133,24 +121,29 @@ const uploadRouter = router({
throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" });
} }
const hashStream = createHash("sha256"); filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`;
await mkdir(dirname(filePath), { recursive: true });
for await (const chunk of createReadStream(session.path)) { const hashStream = createHash("sha256");
hashStream.update(chunk); const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 });
for (let i = 1; i <= session.totalChunks; i++) {
for await (const chunk of createReadStream(`${session.path}/${i}`)) {
hashStream.update(chunk);
writeStream.write(chunk);
}
} }
await new Promise<void>((resolve, reject) => {
writeStream.end((e: any) => (e ? reject(e) : resolve()));
});
const hash = hashStream.digest("base64"); const hash = hashStream.digest("base64");
if (hash !== input.encContentHash) {
await UploadRepo.deleteUploadSession(db, uploadId);
await safeUnlink(session.path);
throw new TRPCError({ code: "CONFLICT", message: "Uploaded file corrupted" });
}
const fileId = await db.transaction().execute(async (trx) => { const fileId = await db.transaction().execute(async (trx) => {
const { id: fileId } = await FileRepo.registerFile(trx, { const { id: fileId } = await FileRepo.registerFile(trx, {
...session, ...session,
userId: ctx.session.userId, userId: ctx.session.userId,
path: session.path, path: filePath,
contentHmac: input.contentHmac ?? null, contentHmac: input.contentHmac ?? null,
encContentHash: hash, encContentHash: hash,
encContentIv: null, encContentIv: null,
@@ -159,7 +152,11 @@ const uploadRouter = router({
return fileId; return fileId;
}); });
await safeRecursiveRm(session.path);
return { file: fileId }; return { file: fileId };
} catch (e) {
await safeUnlink(filePath);
throw e;
} finally { } finally {
sessionLocks.delete(uploadId); sessionLocks.delete(uploadId);
} }
@@ -173,10 +170,10 @@ const uploadRouter = router({
}), }),
) )
.mutation(async ({ ctx, input }) => { .mutation(async ({ ctx, input }) => {
const { id, path } = await generateThumbnailUploadSession(ctx.session.userId); const { id, path } = await generateSessionId();
try { try {
await UploadRepo.createThumbnailUploadSession({ await UploadRepo.createThumbnailOrMigrationUploadSession({
id, id,
type: "thumbnail", type: "thumbnail",
userId: ctx.session.userId, userId: ctx.session.userId,
@@ -188,7 +185,7 @@ const uploadRouter = router({
}); });
return { uploadId: id }; return { uploadId: id };
} catch (e) { } catch (e) {
await safeUnlink(path); await safeRecursiveRm(path);
if (e instanceof IntegrityError) { if (e instanceof IntegrityError) {
if (e.message === "File not found") { if (e.message === "File not found") {
@@ -215,6 +212,8 @@ const uploadRouter = router({
sessionLocks.add(uploadId); sessionLocks.add(uploadId);
} }
let thumbnailPath = "";
try { try {
const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId);
if (session?.type !== "thumbnail") { if (session?.type !== "thumbnail") {
@@ -223,20 +222,26 @@ const uploadRouter = router({
throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" });
} }
thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uploadId}`;
await mkdir(dirname(thumbnailPath), { recursive: true });
await copyFile(`${session.path}/1`, thumbnailPath);
const oldThumbnailPath = await db.transaction().execute(async (trx) => { const oldThumbnailPath = await db.transaction().execute(async (trx) => {
const oldPath = await MediaRepo.updateFileThumbnail( const oldPath = await MediaRepo.updateFileThumbnail(
trx, trx,
ctx.session.userId, ctx.session.userId,
session.fileId, session.fileId,
session.dekVersion, session.dekVersion,
session.path, thumbnailPath,
null, null,
); );
await UploadRepo.deleteUploadSession(trx, uploadId); await UploadRepo.deleteUploadSession(trx, uploadId);
return oldPath; return oldPath;
}); });
await safeUnlink(oldThumbnailPath); await Promise.all([safeUnlink(oldThumbnailPath), safeRecursiveRm(session.path)]);
} catch (e) { } catch (e) {
await safeUnlink(thumbnailPath);
if (e instanceof IntegrityError && e.message === "Invalid DEK version") { if (e instanceof IntegrityError && e.message === "Invalid DEK version") {
// DEK rotated after this upload started // DEK rotated after this upload started
throw new TRPCError({ code: "CONFLICT", message: e.message }); throw new TRPCError({ code: "CONFLICT", message: e.message });
@@ -246,6 +251,112 @@ const uploadRouter = router({
sessionLocks.delete(uploadId); sessionLocks.delete(uploadId);
} }
}), }),
startMigrationUpload: roleProcedure["activeClient"]
.input(
z.object({
file: z.int().positive(),
chunks: z.int().positive(),
dekVersion: z.date(),
}),
)
.mutation(async ({ ctx, input }) => {
const { id, path } = await generateSessionId();
try {
await UploadRepo.createThumbnailOrMigrationUploadSession({
id,
type: "migration",
userId: ctx.session.userId,
path,
totalChunks: input.chunks,
expiresAt: new Date(Date.now() + UPLOADS_EXPIRES),
fileId: input.file,
dekVersion: input.dekVersion,
});
return { uploadId: id };
} catch (e) {
await safeRecursiveRm(path);
if (e instanceof IntegrityError) {
if (e.message === "File not found") {
throw new TRPCError({ code: "NOT_FOUND", message: "Invalid file id" });
} else if (e.message === "File is not legacy") {
throw new TRPCError({ code: "BAD_REQUEST", message: e.message });
}
}
throw e;
}
}),
completeMigrationUpload: roleProcedure["activeClient"]
.input(
z.object({
uploadId: z.uuidv4(),
}),
)
.mutation(async ({ ctx, input }) => {
const { uploadId } = input;
if (sessionLocks.has(uploadId)) {
throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" });
} else {
sessionLocks.add(uploadId);
}
let filePath = "";
try {
const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId);
if (session?.type !== "migration") {
throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" });
} else if (session.uploadedChunks < session.totalChunks) {
throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" });
}
filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`;
await mkdir(dirname(filePath), { recursive: true });
const hashStream = createHash("sha256");
const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 });
for (let i = 1; i <= session.totalChunks; i++) {
for await (const chunk of createReadStream(`${session.path}/${i}`)) {
hashStream.update(chunk);
writeStream.write(chunk);
}
}
await new Promise<void>((resolve, reject) => {
writeStream.end((e: any) => (e ? reject(e) : resolve()));
});
const hash = hashStream.digest("base64");
const oldPath = await db.transaction().execute(async (trx) => {
const { oldPath } = await FileRepo.migrateFileContent(
trx,
ctx.session.userId,
session.fileId,
filePath,
session.dekVersion!,
hash,
);
await UploadRepo.deleteUploadSession(trx, uploadId);
return oldPath;
});
await Promise.all([safeUnlink(oldPath), safeRecursiveRm(session.path)]);
} catch (e) {
await safeUnlink(filePath);
if (e instanceof IntegrityError && e.message === "File is not legacy") {
// File migrated after this upload started
throw new TRPCError({ code: "CONFLICT", message: e.message });
}
throw e;
} finally {
sessionLocks.delete(uploadId);
}
}),
}); });
export default uploadRouter; export default uploadRouter;