8 Commits

Author SHA1 Message Date
static
3906ec4371 Merge pull request #17 from kmc7468/dev
v0.7.0
2026-01-06 07:50:16 +09:00
static
90ac5ba4c3 Merge pull request #15 from kmc7468/dev
v0.6.0
2025-12-27 14:22:26 +09:00
static
dfffa004ac Merge pull request #13 from kmc7468/dev
v0.5.1
2025-07-12 19:56:12 +09:00
static
0cd55a413d Merge pull request #12 from kmc7468/dev
v0.5.0
2025-07-12 06:01:08 +09:00
static
361d966a59 Merge pull request #10 from kmc7468/dev
v0.4.0
2025-01-30 21:06:50 +09:00
static
aef43b8bfa Merge pull request #6 from kmc7468/dev
v0.3.0
2025-01-18 13:29:09 +09:00
static
7f128cccf6 Merge pull request #5 from kmc7468/dev
v0.2.0
2025-01-13 03:53:14 +09:00
static
a198e5f6dc Merge pull request #2 from kmc7468/dev
v0.1.0
2025-01-09 06:24:31 +09:00
77 changed files with 961 additions and 2070 deletions

View File

@@ -12,7 +12,6 @@ node_modules
/data /data
/library /library
/thumbnails /thumbnails
/uploads
# OS # OS
.DS_Store .DS_Store

View File

@@ -12,4 +12,3 @@ USER_CLIENT_CHALLENGE_EXPIRES=
SESSION_UPGRADE_CHALLENGE_EXPIRES= SESSION_UPGRADE_CHALLENGE_EXPIRES=
LIBRARY_PATH= LIBRARY_PATH=
THUMBNAILS_PATH= THUMBNAILS_PATH=
UPLOADS_PATH=

1
.gitignore vendored
View File

@@ -10,7 +10,6 @@ node_modules
/data /data
/library /library
/thumbnails /thumbnails
/uploads
# OS # OS
.DS_Store .DS_Store

View File

@@ -20,7 +20,6 @@ services:
- SESSION_UPGRADE_CHALLENGE_EXPIRES - SESSION_UPGRADE_CHALLENGE_EXPIRES
- LIBRARY_PATH=/app/data/library - LIBRARY_PATH=/app/data/library
- THUMBNAILS_PATH=/app/data/thumbnails - THUMBNAILS_PATH=/app/data/thumbnails
- UPLOADS_PATH=/app/data/uploads
# SvelteKit # SvelteKit
- ADDRESS_HEADER=${TRUST_PROXY:+X-Forwarded-For} - ADDRESS_HEADER=${TRUST_PROXY:+X-Forwarded-For}
- XFF_DEPTH=${TRUST_PROXY:-} - XFF_DEPTH=${TRUST_PROXY:-}

View File

@@ -1,7 +1,7 @@
{ {
"name": "arkvault", "name": "arkvault",
"private": true, "private": true,
"version": "0.8.0", "version": "0.7.0",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "vite dev", "dev": "vite dev",
@@ -16,14 +16,13 @@
"db:migrate": "kysely migrate" "db:migrate": "kysely migrate"
}, },
"devDependencies": { "devDependencies": {
"@eslint/compat": "^2.0.1", "@eslint/compat": "^2.0.0",
"@eslint/js": "^9.39.2", "@eslint/js": "^9.39.2",
"@iconify-json/material-symbols": "^1.2.51", "@iconify-json/material-symbols": "^1.2.50",
"@noble/hashes": "^2.0.1",
"@sveltejs/adapter-node": "^5.4.0", "@sveltejs/adapter-node": "^5.4.0",
"@sveltejs/kit": "^2.49.4", "@sveltejs/kit": "^2.49.2",
"@sveltejs/vite-plugin-svelte": "^6.2.4", "@sveltejs/vite-plugin-svelte": "^6.2.1",
"@tanstack/svelte-virtual": "^3.13.18", "@tanstack/svelte-virtual": "^3.13.16",
"@trpc/client": "^11.8.1", "@trpc/client": "^11.8.1",
"@types/file-saver": "^2.0.7", "@types/file-saver": "^2.0.7",
"@types/ms": "^0.7.34", "@types/ms": "^0.7.34",
@@ -34,11 +33,11 @@
"dexie": "^4.2.1", "dexie": "^4.2.1",
"eslint": "^9.39.2", "eslint": "^9.39.2",
"eslint-config-prettier": "^10.1.8", "eslint-config-prettier": "^10.1.8",
"eslint-plugin-svelte": "^3.14.0", "eslint-plugin-svelte": "^3.13.1",
"eslint-plugin-tailwindcss": "^3.18.2", "eslint-plugin-tailwindcss": "^3.18.2",
"exifreader": "^4.35.0", "exifreader": "^4.33.1",
"file-saver": "^2.0.5", "file-saver": "^2.0.5",
"globals": "^17.0.0", "globals": "^16.5.0",
"heic2any": "^0.0.4", "heic2any": "^0.0.4",
"kysely-ctl": "^0.19.0", "kysely-ctl": "^0.19.0",
"lru-cache": "^11.2.4", "lru-cache": "^11.2.4",
@@ -51,11 +50,12 @@
"svelte-check": "^4.3.5", "svelte-check": "^4.3.5",
"tailwindcss": "^3.4.19", "tailwindcss": "^3.4.19",
"typescript": "^5.9.3", "typescript": "^5.9.3",
"typescript-eslint": "^8.52.0", "typescript-eslint": "^8.51.0",
"unplugin-icons": "^22.5.0", "unplugin-icons": "^22.5.0",
"vite": "^7.3.1" "vite": "^7.3.0"
}, },
"dependencies": { "dependencies": {
"@fastify/busboy": "^3.2.0",
"@trpc/server": "^11.8.1", "@trpc/server": "^11.8.1",
"argon2": "^0.44.0", "argon2": "^0.44.0",
"kysely": "^0.28.9", "kysely": "^0.28.9",

580
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,7 @@
import type { ClientInit } from "@sveltejs/kit"; import type { ClientInit } from "@sveltejs/kit";
import { cleanupDanglingInfos, getClientKey, getMasterKeys, getHmacSecrets } from "$lib/indexedDB"; import { cleanupDanglingInfos, getClientKey, getMasterKeys, getHmacSecrets } from "$lib/indexedDB";
import { prepareFileCache } from "$lib/modules/file"; import { prepareFileCache } from "$lib/modules/file";
import { prepareOpfs } from "$lib/modules/opfs";
import { clientKeyStore, masterKeyStore, hmacSecretStore } from "$lib/stores"; import { clientKeyStore, masterKeyStore, hmacSecretStore } from "$lib/stores";
const requestPersistentStorage = async () => { const requestPersistentStorage = async () => {
@@ -45,6 +46,7 @@ export const init: ClientInit = async () => {
prepareClientKeyStore(), prepareClientKeyStore(),
prepareMasterKeyStore(), prepareMasterKeyStore(),
prepareHmacSecretStore(), prepareHmacSecretStore(),
prepareOpfs(),
]); ]);
cleanupDanglingInfos(); // Intended cleanupDanglingInfos(); // Intended

View File

@@ -7,7 +7,6 @@ import {
cleanupExpiredSessions, cleanupExpiredSessions,
cleanupExpiredSessionUpgradeChallenges, cleanupExpiredSessionUpgradeChallenges,
} from "$lib/server/db/session"; } from "$lib/server/db/session";
import { cleanupExpiredUploadSessions } from "$lib/server/services/upload";
import { authenticate, setAgentInfo } from "$lib/server/middlewares"; import { authenticate, setAgentInfo } from "$lib/server/middlewares";
export const init: ServerInit = async () => { export const init: ServerInit = async () => {
@@ -17,7 +16,6 @@ export const init: ServerInit = async () => {
cleanupExpiredUserClientChallenges(); cleanupExpiredUserClientChallenges();
cleanupExpiredSessions(); cleanupExpiredSessions();
cleanupExpiredSessionUpgradeChallenges(); cleanupExpiredSessionUpgradeChallenges();
cleanupExpiredUploadSessions();
}); });
}; };

View File

@@ -1,2 +0,0 @@
export * from "./serviceWorker";
export * from "./upload";

View File

@@ -1 +0,0 @@
export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decryptedFile/";

View File

@@ -1,6 +0,0 @@
export const AES_GCM_IV_SIZE = 12;
export const AES_GCM_TAG_SIZE = 16;
export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE;
export const CHUNK_SIZE = 4 * 1024 * 1024; // 4 MiB
export const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ENCRYPTION_OVERHEAD;

View File

@@ -70,12 +70,12 @@ export const storeMasterKeys = async (keys: MasterKey[]) => {
}; };
export const getHmacSecrets = async () => { export const getHmacSecrets = async () => {
return (await keyStore.hmacSecret.toArray()).filter(({ secret }) => secret.extractable); return await keyStore.hmacSecret.toArray();
}; };
export const storeHmacSecrets = async (secrets: HmacSecret[]) => { export const storeHmacSecrets = async (secrets: HmacSecret[]) => {
if (secrets.some(({ secret }) => !secret.extractable)) { if (secrets.some(({ secret }) => secret.extractable)) {
throw new Error("Hmac secrets must be extractable"); throw new Error("Hmac secrets must be nonextractable");
} }
await keyStore.hmacSecret.bulkPut(secrets); await keyStore.hmacSecret.bulkPut(secrets);
}; };

View File

@@ -1,15 +1,8 @@
import { AES_GCM_IV_SIZE } from "$lib/constants"; import { encodeString, decodeString, encodeToBase64, decodeFromBase64 } from "./util";
import {
encodeString,
decodeString,
encodeToBase64,
decodeFromBase64,
concatenateBuffers,
} from "./utils";
export const generateMasterKey = async () => { export const generateMasterKey = async () => {
return { return {
masterKey: await crypto.subtle.generateKey( masterKey: await window.crypto.subtle.generateKey(
{ {
name: "AES-KW", name: "AES-KW",
length: 256, length: 256,
@@ -22,7 +15,7 @@ export const generateMasterKey = async () => {
export const generateDataKey = async () => { export const generateDataKey = async () => {
return { return {
dataKey: await crypto.subtle.generateKey( dataKey: await window.crypto.subtle.generateKey(
{ {
name: "AES-GCM", name: "AES-GCM",
length: 256, length: 256,
@@ -35,9 +28,9 @@ export const generateDataKey = async () => {
}; };
export const makeAESKeyNonextractable = async (key: CryptoKey) => { export const makeAESKeyNonextractable = async (key: CryptoKey) => {
return await crypto.subtle.importKey( return await window.crypto.subtle.importKey(
"raw", "raw",
await crypto.subtle.exportKey("raw", key), await window.crypto.subtle.exportKey("raw", key),
key.algorithm, key.algorithm,
false, false,
key.usages, key.usages,
@@ -45,12 +38,12 @@ export const makeAESKeyNonextractable = async (key: CryptoKey) => {
}; };
export const wrapDataKey = async (dataKey: CryptoKey, masterKey: CryptoKey) => { export const wrapDataKey = async (dataKey: CryptoKey, masterKey: CryptoKey) => {
return encodeToBase64(await crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW")); return encodeToBase64(await window.crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW"));
}; };
export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey) => { export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey) => {
return { return {
dataKey: await crypto.subtle.unwrapKey( dataKey: await window.crypto.subtle.unwrapKey(
"raw", "raw",
decodeFromBase64(dataKeyWrapped), decodeFromBase64(dataKeyWrapped),
masterKey, masterKey,
@@ -63,12 +56,12 @@ export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey
}; };
export const wrapHmacSecret = async (hmacSecret: CryptoKey, masterKey: CryptoKey) => { export const wrapHmacSecret = async (hmacSecret: CryptoKey, masterKey: CryptoKey) => {
return encodeToBase64(await crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW")); return encodeToBase64(await window.crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW"));
}; };
export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: CryptoKey) => { export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: CryptoKey) => {
return { return {
hmacSecret: await crypto.subtle.unwrapKey( hmacSecret: await window.crypto.subtle.unwrapKey(
"raw", "raw",
decodeFromBase64(hmacSecretWrapped), decodeFromBase64(hmacSecretWrapped),
masterKey, masterKey,
@@ -77,15 +70,15 @@ export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: Cry
name: "HMAC", name: "HMAC",
hash: "SHA-256", hash: "SHA-256",
} satisfies HmacImportParams, } satisfies HmacImportParams,
true, // Extractable false, // Nonextractable
["sign", "verify"], ["sign", "verify"],
), ),
}; };
}; };
export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => {
const iv = crypto.getRandomValues(new Uint8Array(12)); const iv = window.crypto.getRandomValues(new Uint8Array(12));
const ciphertext = await crypto.subtle.encrypt( const ciphertext = await window.crypto.subtle.encrypt(
{ {
name: "AES-GCM", name: "AES-GCM",
iv, iv,
@@ -93,18 +86,14 @@ export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => {
dataKey, dataKey,
data, data,
); );
return { ciphertext, iv: iv.buffer }; return { ciphertext, iv: encodeToBase64(iv.buffer) };
}; };
export const decryptData = async ( export const decryptData = async (ciphertext: BufferSource, iv: string, dataKey: CryptoKey) => {
ciphertext: BufferSource, return await window.crypto.subtle.decrypt(
iv: string | BufferSource,
dataKey: CryptoKey,
) => {
return await crypto.subtle.decrypt(
{ {
name: "AES-GCM", name: "AES-GCM",
iv: typeof iv === "string" ? decodeFromBase64(iv) : iv, iv: decodeFromBase64(iv),
} satisfies AesGcmParams, } satisfies AesGcmParams,
dataKey, dataKey,
ciphertext, ciphertext,
@@ -113,22 +102,9 @@ export const decryptData = async (
export const encryptString = async (plaintext: string, dataKey: CryptoKey) => { export const encryptString = async (plaintext: string, dataKey: CryptoKey) => {
const { ciphertext, iv } = await encryptData(encodeString(plaintext), dataKey); const { ciphertext, iv } = await encryptData(encodeString(plaintext), dataKey);
return { ciphertext: encodeToBase64(ciphertext), iv: encodeToBase64(iv) }; return { ciphertext: encodeToBase64(ciphertext), iv };
}; };
export const decryptString = async (ciphertext: string, iv: string, dataKey: CryptoKey) => { export const decryptString = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
return decodeString(await decryptData(decodeFromBase64(ciphertext), iv, dataKey)); return decodeString(await decryptData(decodeFromBase64(ciphertext), iv, dataKey));
}; };
export const encryptChunk = async (chunk: ArrayBuffer, dataKey: CryptoKey) => {
const { ciphertext, iv } = await encryptData(chunk, dataKey);
return concatenateBuffers(iv, ciphertext).buffer;
};
export const decryptChunk = async (encryptedChunk: ArrayBuffer, dataKey: CryptoKey) => {
return await decryptData(
encryptedChunk.slice(AES_GCM_IV_SIZE),
encryptedChunk.slice(0, AES_GCM_IV_SIZE),
dataKey,
);
};

View File

@@ -1,4 +1,4 @@
export * from "./aes"; export * from "./aes";
export * from "./rsa"; export * from "./rsa";
export * from "./sha"; export * from "./sha";
export * from "./utils"; export * from "./util";

View File

@@ -1,7 +1,7 @@
import { encodeString, encodeToBase64, decodeFromBase64 } from "./utils"; import { encodeString, encodeToBase64, decodeFromBase64 } from "./util";
export const generateEncryptionKeyPair = async () => { export const generateEncryptionKeyPair = async () => {
const keyPair = await crypto.subtle.generateKey( const keyPair = await window.crypto.subtle.generateKey(
{ {
name: "RSA-OAEP", name: "RSA-OAEP",
modulusLength: 4096, modulusLength: 4096,
@@ -18,7 +18,7 @@ export const generateEncryptionKeyPair = async () => {
}; };
export const generateSigningKeyPair = async () => { export const generateSigningKeyPair = async () => {
const keyPair = await crypto.subtle.generateKey( const keyPair = await window.crypto.subtle.generateKey(
{ {
name: "RSA-PSS", name: "RSA-PSS",
modulusLength: 4096, modulusLength: 4096,
@@ -37,7 +37,7 @@ export const generateSigningKeyPair = async () => {
export const exportRSAKey = async (key: CryptoKey) => { export const exportRSAKey = async (key: CryptoKey) => {
const format = key.type === "public" ? ("spki" as const) : ("pkcs8" as const); const format = key.type === "public" ? ("spki" as const) : ("pkcs8" as const);
return { return {
key: await crypto.subtle.exportKey(format, key), key: await window.crypto.subtle.exportKey(format, key),
format, format,
}; };
}; };
@@ -54,14 +54,14 @@ export const importEncryptionKeyPairFromBase64 = async (
name: "RSA-OAEP", name: "RSA-OAEP",
hash: "SHA-256", hash: "SHA-256",
}; };
const encryptKey = await crypto.subtle.importKey( const encryptKey = await window.crypto.subtle.importKey(
"spki", "spki",
decodeFromBase64(encryptKeyBase64), decodeFromBase64(encryptKeyBase64),
algorithm, algorithm,
true, true,
["encrypt", "wrapKey"], ["encrypt", "wrapKey"],
); );
const decryptKey = await crypto.subtle.importKey( const decryptKey = await window.crypto.subtle.importKey(
"pkcs8", "pkcs8",
decodeFromBase64(decryptKeyBase64), decodeFromBase64(decryptKeyBase64),
algorithm, algorithm,
@@ -79,14 +79,14 @@ export const importSigningKeyPairFromBase64 = async (
name: "RSA-PSS", name: "RSA-PSS",
hash: "SHA-256", hash: "SHA-256",
}; };
const signKey = await crypto.subtle.importKey( const signKey = await window.crypto.subtle.importKey(
"pkcs8", "pkcs8",
decodeFromBase64(signKeyBase64), decodeFromBase64(signKeyBase64),
algorithm, algorithm,
true, true,
["sign"], ["sign"],
); );
const verifyKey = await crypto.subtle.importKey( const verifyKey = await window.crypto.subtle.importKey(
"spki", "spki",
decodeFromBase64(verifyKeyBase64), decodeFromBase64(verifyKeyBase64),
algorithm, algorithm,
@@ -98,11 +98,17 @@ export const importSigningKeyPairFromBase64 = async (
export const makeRSAKeyNonextractable = async (key: CryptoKey) => { export const makeRSAKeyNonextractable = async (key: CryptoKey) => {
const { key: exportedKey, format } = await exportRSAKey(key); const { key: exportedKey, format } = await exportRSAKey(key);
return await crypto.subtle.importKey(format, exportedKey, key.algorithm, false, key.usages); return await window.crypto.subtle.importKey(
format,
exportedKey,
key.algorithm,
false,
key.usages,
);
}; };
export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) => { export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) => {
return await crypto.subtle.decrypt( return await window.crypto.subtle.decrypt(
{ {
name: "RSA-OAEP", name: "RSA-OAEP",
} satisfies RsaOaepParams, } satisfies RsaOaepParams,
@@ -113,7 +119,7 @@ export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey)
export const wrapMasterKey = async (masterKey: CryptoKey, encryptKey: CryptoKey) => { export const wrapMasterKey = async (masterKey: CryptoKey, encryptKey: CryptoKey) => {
return encodeToBase64( return encodeToBase64(
await crypto.subtle.wrapKey("raw", masterKey, encryptKey, { await window.crypto.subtle.wrapKey("raw", masterKey, encryptKey, {
name: "RSA-OAEP", name: "RSA-OAEP",
} satisfies RsaOaepParams), } satisfies RsaOaepParams),
); );
@@ -125,7 +131,7 @@ export const unwrapMasterKey = async (
extractable = false, extractable = false,
) => { ) => {
return { return {
masterKey: await crypto.subtle.unwrapKey( masterKey: await window.crypto.subtle.unwrapKey(
"raw", "raw",
decodeFromBase64(masterKeyWrapped), decodeFromBase64(masterKeyWrapped),
decryptKey, decryptKey,
@@ -140,7 +146,7 @@ export const unwrapMasterKey = async (
}; };
export const signMessageRSA = async (message: BufferSource, signKey: CryptoKey) => { export const signMessageRSA = async (message: BufferSource, signKey: CryptoKey) => {
return await crypto.subtle.sign( return await window.crypto.subtle.sign(
{ {
name: "RSA-PSS", name: "RSA-PSS",
saltLength: 32, // SHA-256 saltLength: 32, // SHA-256
@@ -155,7 +161,7 @@ export const verifySignatureRSA = async (
signature: BufferSource, signature: BufferSource,
verifyKey: CryptoKey, verifyKey: CryptoKey,
) => { ) => {
return await crypto.subtle.verify( return await window.crypto.subtle.verify(
{ {
name: "RSA-PSS", name: "RSA-PSS",
saltLength: 32, // SHA-256 saltLength: 32, // SHA-256

View File

@@ -1,13 +1,10 @@
import { hmac } from "@noble/hashes/hmac.js";
import { sha256 } from "@noble/hashes/sha2.js";
export const digestMessage = async (message: BufferSource) => { export const digestMessage = async (message: BufferSource) => {
return await crypto.subtle.digest("SHA-256", message); return await window.crypto.subtle.digest("SHA-256", message);
}; };
export const generateHmacSecret = async () => { export const generateHmacSecret = async () => {
return { return {
hmacSecret: await crypto.subtle.generateKey( hmacSecret: await window.crypto.subtle.generateKey(
{ {
name: "HMAC", name: "HMAC",
hash: "SHA-256", hash: "SHA-256",
@@ -18,10 +15,6 @@ export const generateHmacSecret = async () => {
}; };
}; };
export const createHmacStream = async (hmacSecret: CryptoKey) => { export const signMessageHmac = async (message: BufferSource, hmacSecret: CryptoKey) => {
const h = hmac.create(sha256, new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret))); return await window.crypto.subtle.sign("HMAC", hmacSecret, message);
return {
update: (data: Uint8Array) => h.update(data),
digest: () => h.digest(),
};
}; };

View File

@@ -9,8 +9,8 @@ export const decodeString = (data: ArrayBuffer) => {
return textDecoder.decode(data); return textDecoder.decode(data);
}; };
export const encodeToBase64 = (data: ArrayBuffer | Uint8Array) => { export const encodeToBase64 = (data: ArrayBuffer) => {
return btoa(String.fromCharCode(...(data instanceof ArrayBuffer ? new Uint8Array(data) : data))); return btoa(String.fromCharCode(...new Uint8Array(data)));
}; };
export const decodeFromBase64 = (data: string) => { export const decodeFromBase64 = (data: string) => {

View File

@@ -1,7 +1,6 @@
import axios from "axios"; import axios from "axios";
import { limitFunction } from "p-limit"; import { limitFunction } from "p-limit";
import { CHUNK_SIZE, ENCRYPTION_OVERHEAD } from "$lib/constants"; import { decryptData } from "$lib/modules/crypto";
import { decryptChunk, concatenateBuffers } from "$lib/modules/crypto";
export interface FileDownloadState { export interface FileDownloadState {
id: number; id: number;
@@ -66,21 +65,13 @@ const decryptFile = limitFunction(
async ( async (
state: FileDownloadState, state: FileDownloadState,
fileEncrypted: ArrayBuffer, fileEncrypted: ArrayBuffer,
encryptedChunkSize: number, fileEncryptedIv: string,
dataKey: CryptoKey, dataKey: CryptoKey,
) => { ) => {
state.status = "decrypting"; state.status = "decrypting";
const chunks: ArrayBuffer[] = []; const fileBuffer = await decryptData(fileEncrypted, fileEncryptedIv, dataKey);
let offset = 0;
while (offset < fileEncrypted.byteLength) {
const nextOffset = Math.min(offset + encryptedChunkSize, fileEncrypted.byteLength);
chunks.push(await decryptChunk(fileEncrypted.slice(offset, nextOffset), dataKey));
offset = nextOffset;
}
const fileBuffer = concatenateBuffers(...chunks).buffer;
state.status = "decrypted"; state.status = "decrypted";
state.result = fileBuffer; state.result = fileBuffer;
return fileBuffer; return fileBuffer;
@@ -88,7 +79,7 @@ const decryptFile = limitFunction(
{ concurrency: 4 }, { concurrency: 4 },
); );
export const downloadFile = async (id: number, dataKey: CryptoKey, isLegacy: boolean) => { export const downloadFile = async (id: number, fileEncryptedIv: string, dataKey: CryptoKey) => {
downloadingFiles.push({ downloadingFiles.push({
id, id,
status: "download-pending", status: "download-pending",
@@ -96,13 +87,7 @@ export const downloadFile = async (id: number, dataKey: CryptoKey, isLegacy: boo
const state = downloadingFiles.at(-1)!; const state = downloadingFiles.at(-1)!;
try { try {
const fileEncrypted = await requestFileDownload(state, id); return await decryptFile(state, await requestFileDownload(state, id), fileEncryptedIv, dataKey);
return await decryptFile(
state,
fileEncrypted,
isLegacy ? fileEncrypted.byteLength : CHUNK_SIZE + ENCRYPTION_OVERHEAD,
dataKey,
);
} catch (e) { } catch (e) {
state.status = "error"; state.status = "error";
throw e; throw e;

View File

@@ -5,6 +5,7 @@ import { decryptData } from "$lib/modules/crypto";
import type { SummarizedFileInfo } from "$lib/modules/filesystem"; import type { SummarizedFileInfo } from "$lib/modules/filesystem";
import { readFile, writeFile, deleteFile, deleteDirectory } from "$lib/modules/opfs"; import { readFile, writeFile, deleteFile, deleteDirectory } from "$lib/modules/opfs";
import { getThumbnailUrl } from "$lib/modules/thumbnail"; import { getThumbnailUrl } from "$lib/modules/thumbnail";
import { isTRPCClientError, trpc } from "$trpc/client";
const loadedThumbnails = new LRUCache<number, Writable<string>>({ max: 100 }); const loadedThumbnails = new LRUCache<number, Writable<string>>({ max: 100 });
const loadingThumbnails = new Map<number, Writable<string | undefined>>(); const loadingThumbnails = new Map<number, Writable<string | undefined>>();
@@ -17,18 +18,25 @@ const fetchFromOpfs = async (fileId: number) => {
}; };
const fetchFromServer = async (fileId: number, dataKey: CryptoKey) => { const fetchFromServer = async (fileId: number, dataKey: CryptoKey) => {
const res = await fetch(`/api/file/${fileId}/thumbnail/download`); try {
if (!res.ok) return null; const [thumbnailEncrypted, { contentIv: thumbnailEncryptedIv }] = await Promise.all([
fetch(`/api/file/${fileId}/thumbnail/download`),
const thumbnailEncrypted = await res.arrayBuffer(); trpc().file.thumbnail.query({ id: fileId }),
]);
const thumbnailBuffer = await decryptData( const thumbnailBuffer = await decryptData(
thumbnailEncrypted.slice(12), await thumbnailEncrypted.arrayBuffer(),
thumbnailEncrypted.slice(0, 12), thumbnailEncryptedIv,
dataKey, dataKey,
); );
void writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer); void writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer);
return getThumbnailUrl(thumbnailBuffer); return getThumbnailUrl(thumbnailBuffer);
} catch (e) {
if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") {
return null;
}
throw e;
}
}; };
export const getFileThumbnail = (file: SummarizedFileInfo) => { export const getFileThumbnail = (file: SummarizedFileInfo) => {

View File

@@ -1,21 +1,24 @@
import axios from "axios";
import ExifReader from "exifreader"; import ExifReader from "exifreader";
import pLimit, { limitFunction } from "p-limit"; import { limitFunction } from "p-limit";
import { CHUNK_SIZE } from "$lib/constants";
import { import {
encodeToBase64, encodeToBase64,
generateDataKey, generateDataKey,
wrapDataKey, wrapDataKey,
encryptData, encryptData,
encryptString, encryptString,
encryptChunk,
digestMessage, digestMessage,
createHmacStream, signMessageHmac,
} from "$lib/modules/crypto"; } from "$lib/modules/crypto";
import { Scheduler } from "$lib/modules/scheduler"; import { Scheduler } from "$lib/modules/scheduler";
import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; import { generateThumbnail } from "$lib/modules/thumbnail";
import type {
FileThumbnailUploadRequest,
FileUploadRequest,
FileUploadResponse,
} from "$lib/server/schemas";
import type { MasterKey, HmacSecret } from "$lib/stores"; import type { MasterKey, HmacSecret } from "$lib/stores";
import { trpc } from "$trpc/client"; import { trpc } from "$trpc/client";
import type { RouterInputs } from "$trpc/router.server";
export interface FileUploadState { export interface FileUploadState {
name: string; name: string;
@@ -39,7 +42,7 @@ export type LiveFileUploadState = FileUploadState & {
}; };
const scheduler = new Scheduler< const scheduler = new Scheduler<
{ fileId: number; fileBuffer?: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined { fileId: number; fileBuffer: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined
>(); >();
let uploadingFiles: FileUploadState[] = $state([]); let uploadingFiles: FileUploadState[] = $state([]);
@@ -59,22 +62,15 @@ export const clearUploadedFiles = () => {
const requestDuplicateFileScan = limitFunction( const requestDuplicateFileScan = limitFunction(
async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise<boolean>) => { async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise<boolean>) => {
const hmacStream = await createHmacStream(hmacSecret.secret); const fileBuffer = await file.arrayBuffer();
const reader = file.stream().getReader(); const fileSigned = encodeToBase64(await signMessageHmac(fileBuffer, hmacSecret.secret));
while (true) {
const { done, value } = await reader.read();
if (done) break;
hmacStream.update(value);
}
const fileSigned = encodeToBase64(hmacStream.digest());
const files = await trpc().file.listByHash.query({ const files = await trpc().file.listByHash.query({
hskVersion: hmacSecret.version, hskVersion: hmacSecret.version,
contentHmac: fileSigned, contentHmac: fileSigned,
}); });
if (files.length === 0 || (await onDuplicate())) { if (files.length === 0 || (await onDuplicate())) {
return { fileSigned }; return { fileBuffer, fileSigned };
} else { } else {
return {}; return {};
} }
@@ -114,40 +110,29 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => {
return new Date(utcDate - offsetMs); return new Date(utcDate - offsetMs);
}; };
const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { const encryptFile = limitFunction(
const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = []; async (state: FileUploadState, file: File, fileBuffer: ArrayBuffer, masterKey: MasterKey) => {
let offset = 0;
while (offset < fileBuffer.byteLength) {
const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength);
const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey);
chunksEncrypted.push({
chunkEncrypted: chunkEncrypted,
chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)),
});
offset = nextOffset;
}
return chunksEncrypted;
};
const encryptImageFile = limitFunction(
async (state: FileUploadState, file: File, masterKey: MasterKey) => {
state.status = "encrypting"; state.status = "encrypting";
const fileBuffer = await file.arrayBuffer(); const fileType = getFileType(file);
const createdAt = extractExifDateTime(fileBuffer);
let createdAt;
if (fileType.startsWith("image/")) {
createdAt = extractExifDateTime(fileBuffer);
}
const { dataKey, dataKeyVersion } = await generateDataKey(); const { dataKey, dataKeyVersion } = await generateDataKey();
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
const chunksEncrypted = await encryptChunks(fileBuffer, dataKey);
const fileEncrypted = await encryptData(fileBuffer, dataKey);
const fileEncryptedHash = encodeToBase64(await digestMessage(fileEncrypted.ciphertext));
const nameEncrypted = await encryptString(file.name, dataKey); const nameEncrypted = await encryptString(file.name, dataKey);
const createdAtEncrypted = const createdAtEncrypted =
createdAt && (await encryptString(createdAt.getTime().toString(), dataKey)); createdAt && (await encryptString(createdAt.getTime().toString(), dataKey));
const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey);
const thumbnail = await generateThumbnail(fileBuffer, getFileType(file)); const thumbnail = await generateThumbnail(fileBuffer, fileType);
const thumbnailBuffer = await thumbnail?.arrayBuffer(); const thumbnailBuffer = await thumbnail?.arrayBuffer();
const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey)); const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey));
@@ -156,7 +141,9 @@ const encryptImageFile = limitFunction(
return { return {
dataKeyWrapped, dataKeyWrapped,
dataKeyVersion, dataKeyVersion,
chunksEncrypted, fileType,
fileEncrypted,
fileEncryptedHash,
nameEncrypted, nameEncrypted,
createdAtEncrypted, createdAtEncrypted,
lastModifiedAtEncrypted, lastModifiedAtEncrypted,
@@ -166,238 +153,35 @@ const encryptImageFile = limitFunction(
{ concurrency: 4 }, { concurrency: 4 },
); );
const uploadThumbnail = async ( const requestFileUpload = limitFunction(
fileId: number, async (state: FileUploadState, form: FormData, thumbnailForm: FormData | null) => {
thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer },
dataKeyVersion: Date,
) => {
const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({
file: fileId,
dekVersion: dataKeyVersion,
});
const ivAndCiphertext = new Uint8Array(
thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength,
);
ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0);
ivAndCiphertext.set(
new Uint8Array(thumbnailEncrypted.ciphertext),
thumbnailEncrypted.iv.byteLength,
);
const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext));
const response = await fetch(`/api/upload/${uploadId}/chunks/0`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkHash}:`,
},
body: ivAndCiphertext,
});
if (!response.ok) {
throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`);
}
await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId });
};
const requestImageFileUpload = limitFunction(
async (
state: FileUploadState,
metadata: RouterInputs["upload"]["startFileUpload"],
chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[],
fileSigned: string | undefined,
thumbnailData: { ciphertext: ArrayBuffer; iv: ArrayBuffer; plaintext: ArrayBuffer } | null,
dataKeyVersion: Date,
) => {
state.status = "uploading"; state.status = "uploading";
const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); const res = await axios.post("/api/file/upload", form, {
onUploadProgress: ({ progress, rate, estimated }) => {
const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); state.progress = progress;
let uploadedBytes = 0;
const startTime = Date.now();
for (let i = 0; i < chunksEncrypted.length; i++) {
const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!;
const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkEncryptedHash}:`,
},
body: chunkEncrypted,
});
if (!response.ok) {
throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`);
}
uploadedBytes += chunkEncrypted.byteLength;
const elapsed = (Date.now() - startTime) / 1000;
const rate = uploadedBytes / elapsed;
const remaining = totalBytes - uploadedBytes;
const estimated = rate > 0 ? remaining / rate : undefined;
state.progress = uploadedBytes / totalBytes;
state.rate = rate; state.rate = rate;
state.estimated = estimated; state.estimated = estimated;
} },
const { file: fileId } = await trpc().upload.completeFileUpload.mutate({
uploadId,
contentHmac: fileSigned,
}); });
const { file }: FileUploadResponse = res.data;
if (thumbnailData) { if (thumbnailForm) {
try { try {
await uploadThumbnail(fileId, thumbnailData, dataKeyVersion); await axios.post(`/api/file/${file}/thumbnail/upload`, thumbnailForm);
} catch (e) { } catch (e) {
// TODO: Error handling for thumbnail upload // TODO
console.error(e); console.error(e);
} }
} }
state.status = "uploaded"; state.status = "uploaded";
return { fileId, thumbnailBuffer: thumbnailData?.plaintext }; return { fileId: file };
}, },
{ concurrency: 1 }, { concurrency: 1 },
); );
const requestFileUpload = async (
state: FileUploadState,
file: File,
masterKey: MasterKey,
hmacSecret: HmacSecret,
fileSigned: string,
parentId: DirectoryId,
) => {
state.status = "uploading";
const fileType = getFileType(file);
const { dataKey, dataKeyVersion } = await generateDataKey();
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
const nameEncrypted = await encryptString(file.name, dataKey);
const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey);
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
const metadata = {
chunks: totalChunks,
parent: parentId,
mekVersion: masterKey.version,
dek: dataKeyWrapped,
dekVersion: dataKeyVersion,
hskVersion: hmacSecret.version,
contentType: fileType,
name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv,
};
const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata);
const reader = file.stream().getReader();
const limit = pLimit(4);
let buffer = new Uint8Array(0);
let chunkIndex = 0;
const uploadPromises: Promise<void>[] = [];
const totalBytes = file.size;
let uploadedBytes = 0;
const startTime = Date.now();
const uploadChunk = async (
index: number,
encryptedChunk: ArrayBuffer,
chunkHash: string,
originalChunkSize: number,
) => {
const response = await fetch(`/api/upload/${uploadId}/chunks/${index}`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkHash}:`,
},
body: encryptedChunk,
});
if (!response.ok) {
throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`);
}
uploadedBytes += originalChunkSize;
const elapsed = (Date.now() - startTime) / 1000;
const rate = uploadedBytes / elapsed;
const remaining = totalBytes - uploadedBytes;
const estimated = rate > 0 ? remaining / rate : undefined;
state.progress = uploadedBytes / totalBytes;
state.rate = rate;
state.estimated = estimated;
};
while (true) {
const { done, value } = await reader.read();
if (done && buffer.length === 0) break;
if (value) {
const newBuffer = new Uint8Array(buffer.length + value.length);
newBuffer.set(buffer);
newBuffer.set(value, buffer.length);
buffer = newBuffer;
}
while (buffer.length >= CHUNK_SIZE || (done && buffer.length > 0)) {
const chunkSize = Math.min(CHUNK_SIZE, buffer.length);
const chunk = buffer.slice(0, chunkSize);
buffer = buffer.slice(chunkSize);
const encryptedChunk = await encryptChunk(chunk.buffer.slice(0, chunk.byteLength), dataKey);
const chunkHash = encodeToBase64(await digestMessage(encryptedChunk));
const currentIndex = chunkIndex++;
uploadPromises.push(
limit(() => uploadChunk(currentIndex, encryptedChunk, chunkHash, chunkSize)),
);
}
if (done) break;
}
await Promise.all(uploadPromises);
const { file: fileId } = await trpc().upload.completeFileUpload.mutate({
uploadId,
contentHmac: fileSigned,
});
if (fileType.startsWith("video/")) {
try {
const thumbnail = await generateThumbnailFromFile(file);
if (thumbnail) {
const thumbnailBuffer = await thumbnail.arrayBuffer();
const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey);
await uploadThumbnail(fileId, thumbnailEncrypted, dataKeyVersion);
}
} catch (e) {
// Thumbnail upload failure is not critical
console.error(e);
}
}
state.status = "uploaded";
return { fileId };
};
export const uploadFile = async ( export const uploadFile = async (
file: File, file: File,
parentId: "root" | number, parentId: "root" | number,
@@ -416,62 +200,67 @@ export const uploadFile = async (
state.status = "encryption-pending"; state.status = "encryption-pending";
try { try {
const { fileSigned } = await requestDuplicateFileScan(file, hmacSecret, onDuplicate); const { fileBuffer, fileSigned } = await requestDuplicateFileScan(
if (!fileSigned) { file,
hmacSecret,
onDuplicate,
);
if (!fileBuffer || !fileSigned) {
state.status = "canceled"; state.status = "canceled";
uploadingFiles = uploadingFiles.filter((file) => file !== state); uploadingFiles = uploadingFiles.filter((file) => file !== state);
return; return undefined;
} }
const fileType = getFileType(file);
if (fileType.startsWith("image/")) {
const fileBuffer = await file.arrayBuffer();
const { const {
dataKeyWrapped, dataKeyWrapped,
dataKeyVersion, dataKeyVersion,
chunksEncrypted, fileType,
fileEncrypted,
fileEncryptedHash,
nameEncrypted, nameEncrypted,
createdAtEncrypted, createdAtEncrypted,
lastModifiedAtEncrypted, lastModifiedAtEncrypted,
thumbnail, thumbnail,
} = await encryptImageFile(state, file, masterKey); } = await encryptFile(state, file, fileBuffer, masterKey);
const metadata = { const form = new FormData();
chunks: chunksEncrypted.length, form.set(
"metadata",
JSON.stringify({
parent: parentId, parent: parentId,
mekVersion: masterKey.version, mekVersion: masterKey.version,
dek: dataKeyWrapped, dek: dataKeyWrapped,
dekVersion: dataKeyVersion, dekVersion: dataKeyVersion.toISOString(),
hskVersion: hmacSecret.version, hskVersion: hmacSecret.version,
contentHmac: fileSigned,
contentType: fileType, contentType: fileType,
contentIv: fileEncrypted.iv,
name: nameEncrypted.ciphertext, name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv, nameIv: nameEncrypted.iv,
createdAt: createdAtEncrypted?.ciphertext, createdAt: createdAtEncrypted?.ciphertext,
createdAtIv: createdAtEncrypted?.iv, createdAtIv: createdAtEncrypted?.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext, lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv, lastModifiedAtIv: lastModifiedAtEncrypted.iv,
}; } satisfies FileUploadRequest),
);
form.set("content", new Blob([fileEncrypted.ciphertext]));
form.set("checksum", fileEncryptedHash);
const { fileId, thumbnailBuffer } = await requestImageFileUpload( let thumbnailForm = null;
state, if (thumbnail) {
metadata, thumbnailForm = new FormData();
chunksEncrypted, thumbnailForm.set(
fileSigned, "metadata",
thumbnail ?? null, JSON.stringify({
dataKeyVersion, dekVersion: dataKeyVersion.toISOString(),
contentIv: thumbnail.iv,
} satisfies FileThumbnailUploadRequest),
); );
return { fileId, fileBuffer, thumbnailBuffer }; thumbnailForm.set("content", new Blob([thumbnail.ciphertext]));
} else {
const { fileId } = await requestFileUpload(
state,
file,
masterKey,
hmacSecret,
fileSigned,
parentId,
);
return { fileId };
} }
const { fileId } = await requestFileUpload(state, form, thumbnailForm);
return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext };
} catch (e) { } catch (e) {
state.status = "error"; state.status = "error";
throw e; throw e;

View File

@@ -47,10 +47,10 @@ const cache = new FilesystemCache<number, MaybeFileInfo>({
return storeToIndexedDB({ return storeToIndexedDB({
id, id,
isLegacy: file.isLegacy,
parentId: file.parent, parentId: file.parent,
dataKey: metadata.dataKey, dataKey: metadata.dataKey,
contentType: file.contentType, contentType: file.contentType,
contentIv: file.contentIv,
name: metadata.name, name: metadata.name,
createdAt: metadata.createdAt, createdAt: metadata.createdAt,
lastModifiedAt: metadata.lastModifiedAt, lastModifiedAt: metadata.lastModifiedAt,
@@ -116,9 +116,9 @@ const cache = new FilesystemCache<number, MaybeFileInfo>({
return { return {
id, id,
exists: true as const, exists: true as const,
isLegacy: metadataRaw.isLegacy,
parentId: metadataRaw.parent, parentId: metadataRaw.parent,
contentType: metadataRaw.contentType, contentType: metadataRaw.contentType,
contentIv: metadataRaw.contentIv,
categories, categories,
...metadata, ...metadata,
}; };

View File

@@ -28,10 +28,10 @@ export type SubDirectoryInfo = Omit<LocalDirectoryInfo, "subDirectories" | "file
export interface FileInfo { export interface FileInfo {
id: number; id: number;
isLegacy?: boolean;
parentId: DirectoryId; parentId: DirectoryId;
dataKey?: DataKey; dataKey?: DataKey;
contentType: string; contentType: string;
contentIv?: string;
name: string; name: string;
createdAt?: Date; createdAt?: Date;
lastModifiedAt: Date; lastModifiedAt: Date;
@@ -42,7 +42,7 @@ export type MaybeFileInfo =
| (FileInfo & { exists: true }) | (FileInfo & { exists: true })
| ({ id: number; exists: false } & AllUndefined<Omit<FileInfo, "id">>); | ({ id: number; exists: false } & AllUndefined<Omit<FileInfo, "id">>);
export type SummarizedFileInfo = Omit<FileInfo, "categories">; export type SummarizedFileInfo = Omit<FileInfo, "contentIv" | "categories">;
export type CategoryFileInfo = SummarizedFileInfo & { isRecursive: boolean }; export type CategoryFileInfo = SummarizedFileInfo & { isRecursive: boolean };
interface LocalCategoryInfo { interface LocalCategoryInfo {

View File

@@ -1,14 +0,0 @@
export const parseRangeHeader = (rangeHeader: string | null) => {
if (!rangeHeader) return undefined;
const firstRange = rangeHeader.split(",")[0]!.trim();
const parts = firstRange.replace(/bytes=/, "").split("-");
return {
start: parts[0] ? parseInt(parts[0], 10) : undefined,
end: parts[1] ? parseInt(parts[1], 10) : undefined,
};
};
export const getContentRangeHeader = (range?: { start: number; end: number; total: number }) => {
return range && { "Content-Range": `bytes ${range.start}-${range.end}/${range.total}` };
};

View File

@@ -2,7 +2,7 @@ import { z } from "zod";
import { storeClientKey } from "$lib/indexedDB"; import { storeClientKey } from "$lib/indexedDB";
import type { ClientKeys } from "$lib/stores"; import type { ClientKeys } from "$lib/stores";
const SerializedClientKeysSchema = z.intersection( const serializedClientKeysSchema = z.intersection(
z.object({ z.object({
generator: z.literal("ArkVault"), generator: z.literal("ArkVault"),
exportedAt: z.iso.datetime(), exportedAt: z.iso.datetime(),
@@ -16,7 +16,7 @@ const SerializedClientKeysSchema = z.intersection(
}), }),
); );
type SerializedClientKeys = z.infer<typeof SerializedClientKeysSchema>; type SerializedClientKeys = z.infer<typeof serializedClientKeysSchema>;
type DeserializedClientKeys = { type DeserializedClientKeys = {
encryptKeyBase64: string; encryptKeyBase64: string;
@@ -43,7 +43,7 @@ export const serializeClientKeys = ({
}; };
export const deserializeClientKeys = (serialized: string) => { export const deserializeClientKeys = (serialized: string) => {
const zodRes = SerializedClientKeysSchema.safeParse(JSON.parse(serialized)); const zodRes = serializedClientKeysSchema.safeParse(JSON.parse(serialized));
if (zodRes.success) { if (zodRes.success) {
return { return {
encryptKeyBase64: zodRes.data.encryptKey, encryptKeyBase64: zodRes.data.encryptKey,

View File

@@ -1,5 +1,13 @@
let rootHandle: FileSystemDirectoryHandle | null = null;
export const prepareOpfs = async () => {
rootHandle = await navigator.storage.getDirectory();
};
const getFileHandle = async (path: string, create = true) => { const getFileHandle = async (path: string, create = true) => {
if (path[0] !== "/") { if (!rootHandle) {
throw new Error("OPFS not prepared");
} else if (path[0] !== "/") {
throw new Error("Path must be absolute"); throw new Error("Path must be absolute");
} }
@@ -9,7 +17,7 @@ const getFileHandle = async (path: string, create = true) => {
} }
try { try {
let directoryHandle = await navigator.storage.getDirectory(); let directoryHandle = rootHandle;
for (const part of parts.slice(0, -1)) { for (const part of parts.slice(0, -1)) {
if (!part) continue; if (!part) continue;
directoryHandle = await directoryHandle.getDirectoryHandle(part, { create }); directoryHandle = await directoryHandle.getDirectoryHandle(part, { create });
@@ -26,15 +34,12 @@ const getFileHandle = async (path: string, create = true) => {
} }
}; };
export const getFile = async (path: string) => { export const readFile = async (path: string) => {
const { fileHandle } = await getFileHandle(path, false); const { fileHandle } = await getFileHandle(path, false);
if (!fileHandle) return null; if (!fileHandle) return null;
return await fileHandle.getFile(); const file = await fileHandle.getFile();
}; return await file.arrayBuffer();
export const readFile = async (path: string) => {
return (await getFile(path))?.arrayBuffer() ?? null;
}; };
export const writeFile = async (path: string, data: ArrayBuffer) => { export const writeFile = async (path: string, data: ArrayBuffer) => {
@@ -56,7 +61,9 @@ export const deleteFile = async (path: string) => {
}; };
const getDirectoryHandle = async (path: string) => { const getDirectoryHandle = async (path: string) => {
if (path[0] !== "/") { if (!rootHandle) {
throw new Error("OPFS not prepared");
} else if (path[0] !== "/") {
throw new Error("Path must be absolute"); throw new Error("Path must be absolute");
} }
@@ -66,7 +73,7 @@ const getDirectoryHandle = async (path: string) => {
} }
try { try {
let directoryHandle = await navigator.storage.getDirectory(); let directoryHandle = rootHandle;
let parentHandle; let parentHandle;
for (const part of parts.slice(1)) { for (const part of parts.slice(1)) {
if (!part) continue; if (!part) continue;

View File

@@ -122,22 +122,6 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin
} }
}; };
export const generateThumbnailFromFile = async (file: File) => {
if (!file.type.startsWith("video/")) return null;
let url;
try {
url = URL.createObjectURL(file);
return await generateVideoThumbnail(url);
} catch {
return null;
} finally {
if (url) {
URL.revokeObjectURL(url);
}
}
};
export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => {
return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`;
}; };

View File

@@ -1,4 +0,0 @@
import { z } from "zod";
export const DirectoryIdSchema = z.union([z.literal("root"), z.int().positive()]);
export const CategoryIdSchema = z.union([z.literal("root"), z.int().positive()]);

View File

@@ -1 +0,0 @@
export * from "./filesystem";

View File

@@ -15,6 +15,8 @@ interface Directory {
encName: Ciphertext; encName: Ciphertext;
} }
export type NewDirectory = Omit<Directory, "id">;
interface File { interface File {
id: number; id: number;
parentId: DirectoryId; parentId: DirectoryId;
@@ -26,13 +28,15 @@ interface File {
hskVersion: number | null; hskVersion: number | null;
contentHmac: string | null; contentHmac: string | null;
contentType: string; contentType: string;
encContentIv: string | null; encContentIv: string;
encContentHash: string; encContentHash: string;
encName: Ciphertext; encName: Ciphertext;
encCreatedAt: Ciphertext | null; encCreatedAt: Ciphertext | null;
encLastModifiedAt: Ciphertext; encLastModifiedAt: Ciphertext;
} }
export type NewFile = Omit<File, "id">;
interface FileCategory { interface FileCategory {
id: number; id: number;
parentId: CategoryId; parentId: CategoryId;
@@ -42,7 +46,7 @@ interface FileCategory {
encName: Ciphertext; encName: Ciphertext;
} }
export const registerDirectory = async (params: Omit<Directory, "id">) => { export const registerDirectory = async (params: NewDirectory) => {
await db.transaction().execute(async (trx) => { await db.transaction().execute(async (trx) => {
const mek = await trx const mek = await trx
.selectFrom("master_encryption_key") .selectFrom("master_encryption_key")
@@ -210,11 +214,38 @@ export const unregisterDirectory = async (userId: number, directoryId: number) =
}); });
}; };
export const registerFile = async (trx: typeof db, params: Omit<File, "id">) => { export const registerFile = async (params: NewFile) => {
if ((params.hskVersion && !params.contentHmac) || (!params.hskVersion && params.contentHmac)) { if ((params.hskVersion && !params.contentHmac) || (!params.hskVersion && params.contentHmac)) {
throw new Error("Invalid arguments"); throw new Error("Invalid arguments");
} }
return await db.transaction().execute(async (trx) => {
const mek = await trx
.selectFrom("master_encryption_key")
.select("version")
.where("user_id", "=", params.userId)
.where("state", "=", "active")
.limit(1)
.forUpdate()
.executeTakeFirst();
if (mek?.version !== params.mekVersion) {
throw new IntegrityError("Inactive MEK version");
}
if (params.hskVersion) {
const hsk = await trx
.selectFrom("hmac_secret_key")
.select("version")
.where("user_id", "=", params.userId)
.where("state", "=", "active")
.limit(1)
.forUpdate()
.executeTakeFirst();
if (hsk?.version !== params.hskVersion) {
throw new IntegrityError("Inactive HSK version");
}
}
const { fileId } = await trx const { fileId } = await trx
.insertInto("file") .insertInto("file")
.values({ .values({
@@ -245,6 +276,7 @@ export const registerFile = async (trx: typeof db, params: Omit<File, "id">) =>
}) })
.execute(); .execute();
return { id: fileId }; return { id: fileId };
});
}; };
export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => { export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => {

View File

@@ -5,7 +5,6 @@ export * as HskRepo from "./hsk";
export * as MediaRepo from "./media"; export * as MediaRepo from "./media";
export * as MekRepo from "./mek"; export * as MekRepo from "./mek";
export * as SessionRepo from "./session"; export * as SessionRepo from "./session";
export * as UploadRepo from "./upload";
export * as UserRepo from "./user"; export * as UserRepo from "./user";
export * from "./error"; export * from "./error";

View File

@@ -6,7 +6,7 @@ interface Thumbnail {
id: number; id: number;
path: string; path: string;
updatedAt: Date; updatedAt: Date;
encContentIv: string | null; encContentIv: string;
} }
interface FileThumbnail extends Thumbnail { interface FileThumbnail extends Thumbnail {
@@ -14,13 +14,13 @@ interface FileThumbnail extends Thumbnail {
} }
export const updateFileThumbnail = async ( export const updateFileThumbnail = async (
trx: typeof db,
userId: number, userId: number,
fileId: number, fileId: number,
dekVersion: Date, dekVersion: Date,
path: string, path: string,
encContentIv: string | null, encContentIv: string,
) => { ) => {
return await db.transaction().execute(async (trx) => {
const file = await trx const file = await trx
.selectFrom("file") .selectFrom("file")
.select("data_encryption_key_version") .select("data_encryption_key_version")
@@ -61,6 +61,7 @@ export const updateFileThumbnail = async (
) )
.execute(); .execute();
return thumbnail?.oldPath ?? null; return thumbnail?.oldPath ?? null;
});
}; };
export const getFileThumbnail = async (userId: number, fileId: number) => { export const getFileThumbnail = async (userId: number, fileId: number) => {

View File

@@ -1,63 +0,0 @@
import { Kysely, sql } from "kysely";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const up = async (db: Kysely<any>) => {
// file.ts
await db.schema
.alterTable("file")
.alterColumn("encrypted_content_iv", (col) => col.dropNotNull())
.execute();
// media.ts
await db.schema
.alterTable("thumbnail")
.alterColumn("encrypted_content_iv", (col) => col.dropNotNull())
.execute();
// upload.ts
await db.schema
.createTable("upload_session")
.addColumn("id", "uuid", (col) => col.primaryKey())
.addColumn("type", "text", (col) => col.notNull())
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("path", "text", (col) => col.notNull())
.addColumn("total_chunks", "integer", (col) => col.notNull())
.addColumn("uploaded_chunks", sql`integer[]`, (col) => col.notNull().defaultTo(sql`'{}'`))
.addColumn("expires_at", "timestamp(3)", (col) => col.notNull())
.addColumn("parent_id", "integer", (col) => col.references("directory.id"))
.addColumn("master_encryption_key_version", "integer")
.addColumn("encrypted_data_encryption_key", "text")
.addColumn("data_encryption_key_version", "timestamp(3)")
.addColumn("hmac_secret_key_version", "integer")
.addColumn("content_type", "text")
.addColumn("encrypted_name", "json")
.addColumn("encrypted_created_at", "json")
.addColumn("encrypted_last_modified_at", "json")
.addColumn("file_id", "integer", (col) => col.references("file.id"))
.addForeignKeyConstraint(
"upload_session_fk01",
["user_id", "master_encryption_key_version"],
"master_encryption_key",
["user_id", "version"],
)
.addForeignKeyConstraint(
"upload_session_fk02",
["user_id", "hmac_secret_key_version"],
"hmac_secret_key",
["user_id", "version"],
)
.execute();
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const down = async (db: Kysely<any>) => {
await db.schema.dropTable("upload_session").execute();
await db.schema
.alterTable("thumbnail")
.alterColumn("encrypted_content_iv", (col) => col.setNotNull())
.execute();
await db.schema
.alterTable("file")
.alterColumn("encrypted_content_iv", (col) => col.setNotNull())
.execute();
};

View File

@@ -1,11 +1,9 @@
import * as Initial1737357000 from "./1737357000-Initial"; import * as Initial1737357000 from "./1737357000-Initial";
import * as AddFileCategory1737422340 from "./1737422340-AddFileCategory"; import * as AddFileCategory1737422340 from "./1737422340-AddFileCategory";
import * as AddThumbnail1738409340 from "./1738409340-AddThumbnail"; import * as AddThumbnail1738409340 from "./1738409340-AddThumbnail";
import * as AddChunkedUpload1768062380 from "./1768062380-AddChunkedUpload";
export default { export default {
"1737357000-Initial": Initial1737357000, "1737357000-Initial": Initial1737357000,
"1737422340-AddFileCategory": AddFileCategory1737422340, "1737422340-AddFileCategory": AddFileCategory1737422340,
"1738409340-AddThumbnail": AddThumbnail1738409340, "1738409340-AddThumbnail": AddThumbnail1738409340,
"1768062380-AddChunkedUpload": AddChunkedUpload1768062380,
}; };

View File

@@ -1,5 +1,5 @@
import type { Generated } from "kysely"; import type { Generated } from "kysely";
import type { Ciphertext } from "./utils"; import type { Ciphertext } from "./util";
interface CategoryTable { interface CategoryTable {
id: Generated<number>; id: Generated<number>;

View File

@@ -1,5 +1,5 @@
import type { ColumnType, Generated } from "kysely"; import type { ColumnType, Generated } from "kysely";
import type { Ciphertext } from "./utils"; import type { Ciphertext } from "./util";
interface DirectoryTable { interface DirectoryTable {
id: Generated<number>; id: Generated<number>;
@@ -30,7 +30,7 @@ interface FileTable {
hmac_secret_key_version: number | null; hmac_secret_key_version: number | null;
content_hmac: string | null; // Base64 content_hmac: string | null; // Base64
content_type: string; content_type: string;
encrypted_content_iv: string | null; // Base64 encrypted_content_iv: string; // Base64
encrypted_content_hash: string; // Base64 encrypted_content_hash: string; // Base64
encrypted_name: Ciphertext; encrypted_name: Ciphertext;
encrypted_created_at: Ciphertext | null; encrypted_created_at: Ciphertext | null;

View File

@@ -5,9 +5,8 @@ export * from "./hsk";
export * from "./media"; export * from "./media";
export * from "./mek"; export * from "./mek";
export * from "./session"; export * from "./session";
export * from "./upload";
export * from "./user"; export * from "./user";
export * from "./utils"; export * from "./util";
// eslint-disable-next-line @typescript-eslint/no-empty-object-type // eslint-disable-next-line @typescript-eslint/no-empty-object-type
export interface Database {} export interface Database {}

View File

@@ -7,7 +7,7 @@ interface ThumbnailTable {
category_id: number | null; category_id: number | null;
path: string; path: string;
updated_at: Date; updated_at: Date;
encrypted_content_iv: string | null; // Base64 encrypted_content_iv: string; // Base64
} }
declare module "./index" { declare module "./index" {

View File

@@ -1,32 +0,0 @@
import type { Generated } from "kysely";
import type { Ciphertext } from "./utils";
interface UploadSessionTable {
id: string;
type: "file" | "thumbnail";
user_id: number;
path: string;
total_chunks: number;
uploaded_chunks: Generated<number[]>;
expires_at: Date;
// For file uploads
parent_id: number | null;
master_encryption_key_version: number | null;
encrypted_data_encryption_key: string | null; // Base64
data_encryption_key_version: Date | null;
hmac_secret_key_version: number | null;
content_type: string | null;
encrypted_name: Ciphertext | null;
encrypted_created_at: Ciphertext | null;
encrypted_last_modified_at: Ciphertext | null;
// For thumbnail uploads
file_id: number | null;
}
declare module "./index" {
interface Database {
upload_session: UploadSessionTable;
}
}

View File

@@ -1,185 +0,0 @@
import { sql } from "kysely";
import { IntegrityError } from "./error";
import db from "./kysely";
import type { Ciphertext } from "./schema";
interface BaseUploadSession {
id: string;
userId: number;
path: string;
totalChunks: number;
uploadedChunks: number[];
expiresAt: Date;
}
interface FileUploadSession extends BaseUploadSession {
type: "file";
parentId: DirectoryId;
mekVersion: number;
encDek: string;
dekVersion: Date;
hskVersion: number | null;
contentType: string;
encName: Ciphertext;
encCreatedAt: Ciphertext | null;
encLastModifiedAt: Ciphertext;
}
interface ThumbnailUploadSession extends BaseUploadSession {
type: "thumbnail";
fileId: number;
dekVersion: Date;
}
export const createFileUploadSession = async (
params: Omit<FileUploadSession, "type" | "uploadedChunks">,
) => {
await db.transaction().execute(async (trx) => {
const mek = await trx
.selectFrom("master_encryption_key")
.select("version")
.where("user_id", "=", params.userId)
.where("state", "=", "active")
.limit(1)
.forUpdate()
.executeTakeFirst();
if (mek?.version !== params.mekVersion) {
throw new IntegrityError("Inactive MEK version");
}
if (params.hskVersion) {
const hsk = await trx
.selectFrom("hmac_secret_key")
.select("version")
.where("user_id", "=", params.userId)
.where("state", "=", "active")
.limit(1)
.forUpdate()
.executeTakeFirst();
if (hsk?.version !== params.hskVersion) {
throw new IntegrityError("Inactive HSK version");
}
}
await trx
.insertInto("upload_session")
.values({
id: params.id,
type: "file",
user_id: params.userId,
path: params.path,
total_chunks: params.totalChunks,
expires_at: params.expiresAt,
parent_id: params.parentId !== "root" ? params.parentId : null,
master_encryption_key_version: params.mekVersion,
encrypted_data_encryption_key: params.encDek,
data_encryption_key_version: params.dekVersion,
hmac_secret_key_version: params.hskVersion,
content_type: params.contentType,
encrypted_name: params.encName,
encrypted_created_at: params.encCreatedAt,
encrypted_last_modified_at: params.encLastModifiedAt,
})
.execute();
});
};
export const createThumbnailUploadSession = async (
params: Omit<ThumbnailUploadSession, "type" | "uploadedChunks">,
) => {
await db.transaction().execute(async (trx) => {
const file = await trx
.selectFrom("file")
.select("data_encryption_key_version")
.where("id", "=", params.fileId)
.where("user_id", "=", params.userId)
.limit(1)
.forUpdate()
.executeTakeFirst();
if (!file) {
throw new IntegrityError("File not found");
} else if (file.data_encryption_key_version.getTime() !== params.dekVersion.getTime()) {
throw new IntegrityError("Invalid DEK version");
}
await trx
.insertInto("upload_session")
.values({
id: params.id,
type: "thumbnail",
user_id: params.userId,
path: params.path,
total_chunks: params.totalChunks,
expires_at: params.expiresAt,
file_id: params.fileId,
data_encryption_key_version: params.dekVersion,
})
.execute();
});
};
export const getUploadSession = async (sessionId: string, userId: number) => {
const session = await db
.selectFrom("upload_session")
.selectAll()
.where("id", "=", sessionId)
.where("user_id", "=", userId)
.where("expires_at", ">", new Date())
.limit(1)
.executeTakeFirst();
if (!session) {
return null;
} else if (session.type === "file") {
return {
type: "file",
id: session.id,
userId: session.user_id,
path: session.path,
totalChunks: session.total_chunks,
uploadedChunks: session.uploaded_chunks,
expiresAt: session.expires_at,
parentId: session.parent_id ?? "root",
mekVersion: session.master_encryption_key_version!,
encDek: session.encrypted_data_encryption_key!,
dekVersion: session.data_encryption_key_version!,
hskVersion: session.hmac_secret_key_version,
contentType: session.content_type!,
encName: session.encrypted_name!,
encCreatedAt: session.encrypted_created_at,
encLastModifiedAt: session.encrypted_last_modified_at!,
} satisfies FileUploadSession;
} else {
return {
type: "thumbnail",
id: session.id,
userId: session.user_id,
path: session.path,
totalChunks: session.total_chunks,
uploadedChunks: session.uploaded_chunks,
expiresAt: session.expires_at,
fileId: session.file_id!,
dekVersion: session.data_encryption_key_version!,
} satisfies ThumbnailUploadSession;
}
};
export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => {
await db
.updateTable("upload_session")
.set({ uploaded_chunks: sql`array_append(uploaded_chunks, ${chunkIndex})` })
.where("id", "=", sessionId)
.execute();
};
export const deleteUploadSession = async (trx: typeof db, sessionId: string) => {
await trx.deleteFrom("upload_session").where("id", "=", sessionId).execute();
};
export const cleanupExpiredUploadSessions = async () => {
const sessions = await db
.deleteFrom("upload_session")
.where("expires_at", "<=", new Date())
.returning("path")
.execute();
return sessions.map(({ path }) => path);
};

View File

@@ -26,5 +26,4 @@ export default {
}, },
libraryPath: env.LIBRARY_PATH || "library", libraryPath: env.LIBRARY_PATH || "library",
thumbnailsPath: env.THUMBNAILS_PATH || "thumbnails", thumbnailsPath: env.THUMBNAILS_PATH || "thumbnails",
uploadsPath: env.UPLOADS_PATH || "uploads",
}; };

View File

@@ -1,10 +1,4 @@
import { rm, unlink } from "fs/promises"; import { unlink } from "fs/promises";
export const safeRecursiveRm = async (path: string | null | undefined) => {
if (path) {
await rm(path, { recursive: true }).catch(console.error);
}
};
export const safeUnlink = async (path: string | null | undefined) => { export const safeUnlink = async (path: string | null | undefined) => {
if (path) { if (path) {

View File

@@ -0,0 +1,3 @@
import { z } from "zod";
export const categoryIdSchema = z.union([z.literal("root"), z.int().positive()]);

View File

@@ -0,0 +1,3 @@
import { z } from "zod";
export const directoryIdSchema = z.union([z.literal("root"), z.int().positive()]);

View File

@@ -0,0 +1,36 @@
import mime from "mime";
import { z } from "zod";
import { directoryIdSchema } from "./directory";
export const fileThumbnailUploadRequest = z.object({
dekVersion: z.iso.datetime(),
contentIv: z.base64().nonempty(),
});
export type FileThumbnailUploadRequest = z.input<typeof fileThumbnailUploadRequest>;
export const fileUploadRequest = z.object({
parent: directoryIdSchema,
mekVersion: z.int().positive(),
dek: z.base64().nonempty(),
dekVersion: z.iso.datetime(),
hskVersion: z.int().positive(),
contentHmac: z.base64().nonempty(),
contentType: z
.string()
.trim()
.nonempty()
.refine((value) => mime.getExtension(value) !== null), // MIME type
contentIv: z.base64().nonempty(),
name: z.base64().nonempty(),
nameIv: z.base64().nonempty(),
createdAt: z.base64().nonempty().optional(),
createdAtIv: z.base64().nonempty().optional(),
lastModifiedAt: z.base64().nonempty(),
lastModifiedAtIv: z.base64().nonempty(),
});
export type FileUploadRequest = z.input<typeof fileUploadRequest>;
export const fileUploadResponse = z.object({
file: z.int().positive(),
});
export type FileUploadResponse = z.output<typeof fileUploadResponse>;

View File

@@ -0,0 +1,3 @@
export * from "./category";
export * from "./directory";
export * from "./file";

View File

@@ -1,74 +1,126 @@
import { error } from "@sveltejs/kit"; import { error } from "@sveltejs/kit";
import { createReadStream } from "fs"; import { createHash } from "crypto";
import { stat } from "fs/promises"; import { createReadStream, createWriteStream } from "fs";
import { mkdir, stat } from "fs/promises";
import { dirname } from "path";
import { Readable } from "stream"; import { Readable } from "stream";
import { FileRepo, MediaRepo } from "$lib/server/db"; import { pipeline } from "stream/promises";
import { v4 as uuidv4 } from "uuid";
import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db";
import env from "$lib/server/loadenv";
import { safeUnlink } from "$lib/server/modules/filesystem";
const createEncContentStream = async ( export const getFileStream = async (userId: number, fileId: number) => {
path: string,
iv?: Buffer,
range?: { start?: number; end?: number },
) => {
const { size: fileSize } = await stat(path);
const ivSize = iv?.byteLength ?? 0;
const totalSize = fileSize + ivSize;
const start = range?.start ?? 0;
const end = range?.end ?? totalSize - 1;
if (start > end || start < 0 || end >= totalSize) {
error(416, "Invalid range");
}
return {
encContentStream: Readable.toWeb(
Readable.from(
(async function* () {
if (start < ivSize) {
yield iv!.subarray(start, Math.min(end + 1, ivSize));
}
if (end >= ivSize) {
yield* createReadStream(path, {
start: Math.max(0, start - ivSize),
end: end - ivSize,
});
}
})(),
),
),
range: { start, end, total: totalSize },
};
};
export const getFileStream = async (
userId: number,
fileId: number,
range?: { start?: number; end?: number },
) => {
const file = await FileRepo.getFile(userId, fileId); const file = await FileRepo.getFile(userId, fileId);
if (!file) { if (!file) {
error(404, "Invalid file id"); error(404, "Invalid file id");
} }
return createEncContentStream( const { size } = await stat(file.path);
file.path, return {
file.encContentIv ? Buffer.from(file.encContentIv, "base64") : undefined, encContentStream: Readable.toWeb(createReadStream(file.path)),
range, encContentSize: size,
); };
}; };
export const getFileThumbnailStream = async ( export const getFileThumbnailStream = async (userId: number, fileId: number) => {
userId: number,
fileId: number,
range?: { start?: number; end?: number },
) => {
const thumbnail = await MediaRepo.getFileThumbnail(userId, fileId); const thumbnail = await MediaRepo.getFileThumbnail(userId, fileId);
if (!thumbnail) { if (!thumbnail) {
error(404, "File or its thumbnail not found"); error(404, "File or its thumbnail not found");
} }
return createEncContentStream( const { size } = await stat(thumbnail.path);
thumbnail.path, return {
thumbnail.encContentIv ? Buffer.from(thumbnail.encContentIv, "base64") : undefined, encContentStream: Readable.toWeb(createReadStream(thumbnail.path)),
range, encContentSize: size,
); };
};
export const uploadFileThumbnail = async (
userId: number,
fileId: number,
dekVersion: Date,
encContentIv: string,
encContentStream: Readable,
) => {
const path = `${env.thumbnailsPath}/${userId}/${uuidv4()}`;
await mkdir(dirname(path), { recursive: true });
try {
await pipeline(encContentStream, createWriteStream(path, { flags: "wx", mode: 0o600 }));
const oldPath = await MediaRepo.updateFileThumbnail(
userId,
fileId,
dekVersion,
path,
encContentIv,
);
safeUnlink(oldPath); // Intended
} catch (e) {
await safeUnlink(path);
if (e instanceof IntegrityError) {
if (e.message === "File not found") {
error(404, "File not found");
} else if (e.message === "Invalid DEK version") {
error(400, "Mismatched DEK version");
}
}
throw e;
}
};
export const uploadFile = async (
params: Omit<FileRepo.NewFile, "path" | "encContentHash">,
encContentStream: Readable,
encContentHash: Promise<string>,
) => {
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
const oneMinuteLater = new Date(Date.now() + 60 * 1000);
if (params.dekVersion <= oneDayAgo || params.dekVersion >= oneMinuteLater) {
error(400, "Invalid DEK version");
}
const path = `${env.libraryPath}/${params.userId}/${uuidv4()}`;
await mkdir(dirname(path), { recursive: true });
try {
const hashStream = createHash("sha256");
const [, hash] = await Promise.all([
pipeline(
encContentStream,
async function* (source) {
for await (const chunk of source) {
hashStream.update(chunk);
yield chunk;
}
},
createWriteStream(path, { flags: "wx", mode: 0o600 }),
),
encContentHash,
]);
if (hashStream.digest("base64") !== hash) {
throw new Error("Invalid checksum");
}
const { id: fileId } = await FileRepo.registerFile({
...params,
path,
encContentHash: hash,
});
return { fileId };
} catch (e) {
await safeUnlink(path);
if (e instanceof IntegrityError && e.message === "Inactive MEK version") {
error(400, "Invalid MEK version");
} else if (
e instanceof Error &&
(e.message === "Invalid request body" || e.message === "Invalid checksum")
) {
error(400, "Invalid request body");
}
throw e;
}
}; };

View File

@@ -1,82 +0,0 @@
import { error } from "@sveltejs/kit";
import { createHash } from "crypto";
import { createWriteStream } from "fs";
import { Readable } from "stream";
import { ENCRYPTION_OVERHEAD, ENCRYPTED_CHUNK_SIZE } from "$lib/constants";
import { UploadRepo } from "$lib/server/db";
import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem";
const chunkLocks = new Set<string>();
export const uploadChunk = async (
userId: number,
sessionId: string,
chunkIndex: number,
encChunkStream: Readable,
encChunkHash: string,
) => {
const lockKey = `${sessionId}/${chunkIndex}`;
if (chunkLocks.has(lockKey)) {
error(409, "Chunk upload already in progress");
} else {
chunkLocks.add(lockKey);
}
let filePath;
try {
const session = await UploadRepo.getUploadSession(sessionId, userId);
if (!session) {
error(404, "Invalid upload id");
} else if (chunkIndex >= session.totalChunks) {
error(400, "Invalid chunk index");
} else if (session.uploadedChunks.includes(chunkIndex)) {
error(409, "Chunk already uploaded");
}
const isLastChunk = chunkIndex === session.totalChunks - 1;
filePath = `${session.path}/${chunkIndex}`;
const hashStream = createHash("sha256");
const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 });
let writtenBytes = 0;
for await (const chunk of encChunkStream) {
hashStream.update(chunk);
writeStream.write(chunk);
writtenBytes += chunk.length;
}
await new Promise<void>((resolve, reject) => {
writeStream.end((e: any) => (e ? reject(e) : resolve()));
});
if (hashStream.digest("base64") !== encChunkHash) {
throw new Error("Invalid checksum");
} else if (
(!isLastChunk && writtenBytes !== ENCRYPTED_CHUNK_SIZE) ||
(isLastChunk && (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > ENCRYPTED_CHUNK_SIZE))
) {
throw new Error("Invalid chunk size");
}
await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex);
} catch (e) {
await safeUnlink(filePath);
if (
e instanceof Error &&
(e.message === "Invalid checksum" || e.message === "Invalid chunk size")
) {
error(400, "Invalid request body");
}
throw e;
} finally {
chunkLocks.delete(lockKey);
}
};
export const cleanupExpiredUploadSessions = async () => {
const paths = await UploadRepo.cleanupExpiredUploadSessions();
await Promise.all(paths.map(safeRecursiveRm));
};

View File

@@ -1,39 +0,0 @@
import { DECRYPTED_FILE_URL_PREFIX } from "$lib/constants";
import type { FileMetadata, ServiceWorkerMessage, ServiceWorkerResponse } from "./types";
const PREPARE_TIMEOUT_MS = 5000;
const getServiceWorker = async () => {
const registration = await navigator.serviceWorker.ready;
const sw = registration.active;
if (!sw) {
throw new Error("Service worker not activated");
}
return sw;
};
export const prepareFileDecryption = async (id: number, metadata: FileMetadata) => {
const sw = await getServiceWorker();
return new Promise<void>((resolve, reject) => {
const timeout = setTimeout(
() => reject(new Error("Service worker timeout")),
PREPARE_TIMEOUT_MS,
);
const handler = (event: MessageEvent<ServiceWorkerResponse>) => {
if (event.data.type === "decryption-ready" && event.data.fileId === id) {
clearTimeout(timeout);
navigator.serviceWorker.removeEventListener("message", handler);
resolve();
}
};
navigator.serviceWorker.addEventListener("message", handler);
sw.postMessage({
type: "decryption-prepare",
fileId: id,
...metadata,
} satisfies ServiceWorkerMessage);
});
};
export const getDecryptedFileUrl = (id: number) => `${DECRYPTED_FILE_URL_PREFIX}${id}`;

View File

@@ -1,2 +0,0 @@
export * from "./client";
export * from "./types";

View File

@@ -1,19 +0,0 @@
export interface FileMetadata {
isLegacy: boolean;
dataKey: CryptoKey;
encContentSize: number;
contentType: string;
}
export interface DecryptionPrepareMessage extends FileMetadata {
type: "decryption-prepare";
fileId: number;
}
export interface DecryptionReadyMessage {
type: "decryption-ready";
fileId: number;
}
export type ServiceWorkerMessage = DecryptionPrepareMessage;
export type ServiceWorkerResponse = DecryptionReadyMessage;

View File

@@ -1,5 +1,4 @@
import { getAllFileInfos } from "$lib/indexedDB/filesystem"; import { getAllFileInfos } from "$lib/indexedDB/filesystem";
import { encodeToBase64, digestMessage } from "$lib/modules/crypto";
import { import {
getFileCache, getFileCache,
storeFileCache, storeFileCache,
@@ -7,17 +6,18 @@ import {
downloadFile, downloadFile,
deleteFileThumbnailCache, deleteFileThumbnailCache,
} from "$lib/modules/file"; } from "$lib/modules/file";
import type { FileThumbnailUploadRequest } from "$lib/server/schemas";
import { trpc } from "$trpc/client"; import { trpc } from "$trpc/client";
export const requestFileDownload = async ( export const requestFileDownload = async (
fileId: number, fileId: number,
fileEncryptedIv: string,
dataKey: CryptoKey, dataKey: CryptoKey,
isLegacy: boolean,
) => { ) => {
const cache = await getFileCache(fileId); const cache = await getFileCache(fileId);
if (cache) return cache; if (cache) return cache;
const fileBuffer = await downloadFile(fileId, dataKey, isLegacy); const fileBuffer = await downloadFile(fileId, fileEncryptedIv, dataKey);
storeFileCache(fileId, fileBuffer); // Intended storeFileCache(fileId, fileBuffer); // Intended
return fileBuffer; return fileBuffer;
}; };
@@ -25,40 +25,19 @@ export const requestFileDownload = async (
export const requestFileThumbnailUpload = async ( export const requestFileThumbnailUpload = async (
fileId: number, fileId: number,
dataKeyVersion: Date, dataKeyVersion: Date,
thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: string },
) => { ) => {
const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ const form = new FormData();
file: fileId, form.set(
dekVersion: dataKeyVersion, "metadata",
}); JSON.stringify({
dekVersion: dataKeyVersion.toISOString(),
// Prepend IV to ciphertext (consistent with file download format) contentIv: thumbnailEncrypted.iv,
const ivAndCiphertext = new Uint8Array( } satisfies FileThumbnailUploadRequest),
thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength,
);
ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0);
ivAndCiphertext.set(
new Uint8Array(thumbnailEncrypted.ciphertext),
thumbnailEncrypted.iv.byteLength,
); );
form.set("content", new Blob([thumbnailEncrypted.ciphertext]));
const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); return await fetch(`/api/file/${fileId}/thumbnail/upload`, { method: "POST", body: form });
const response = await fetch(`/api/upload/${uploadId}/chunks/0`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkHash}:`,
},
body: ivAndCiphertext,
});
if (!response.ok) {
throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`);
}
await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId });
return response;
}; };
export const requestDeletedFilesCleanup = async () => { export const requestDeletedFilesCleanup = async () => {

View File

@@ -5,7 +5,7 @@
import { page } from "$app/state"; import { page } from "$app/state";
import { FullscreenDiv } from "$lib/components/atoms"; import { FullscreenDiv } from "$lib/components/atoms";
import { Categories, IconEntryButton, TopBar } from "$lib/components/molecules"; import { Categories, IconEntryButton, TopBar } from "$lib/components/molecules";
import { getFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem"; import { getFileInfo, type FileInfo, type MaybeFileInfo } from "$lib/modules/filesystem";
import { captureVideoThumbnail } from "$lib/modules/thumbnail"; import { captureVideoThumbnail } from "$lib/modules/thumbnail";
import { getFileDownloadState } from "$lib/modules/file"; import { getFileDownloadState } from "$lib/modules/file";
import { masterKeyStore } from "$lib/stores"; import { masterKeyStore } from "$lib/stores";
@@ -17,7 +17,6 @@
requestFileDownload, requestFileDownload,
requestThumbnailUpload, requestThumbnailUpload,
requestFileAdditionToCategory, requestFileAdditionToCategory,
requestVideoStream,
} from "./service"; } from "./service";
import TopBarMenu from "./TopBarMenu.svelte"; import TopBarMenu from "./TopBarMenu.svelte";
@@ -38,7 +37,6 @@
let viewerType: "image" | "video" | undefined = $state(); let viewerType: "image" | "video" | undefined = $state();
let fileBlob: Blob | undefined = $state(); let fileBlob: Blob | undefined = $state();
let fileBlobUrl: string | undefined = $state(); let fileBlobUrl: string | undefined = $state();
let videoStreamUrl: string | undefined = $state();
let videoElement: HTMLVideoElement | undefined = $state(); let videoElement: HTMLVideoElement | undefined = $state();
const updateViewer = async (buffer: ArrayBuffer, contentType: string) => { const updateViewer = async (buffer: ArrayBuffer, contentType: string) => {
@@ -97,19 +95,7 @@
untrack(() => { untrack(() => {
if (!downloadState && !isDownloadRequested) { if (!downloadState && !isDownloadRequested) {
isDownloadRequested = true; isDownloadRequested = true;
requestFileDownload(data.id, info!.contentIv!, info!.dataKey!.key).then(
if (viewerType === "video" && !info!.isLegacy) {
requestVideoStream(data.id, info!.dataKey!.key, contentType).then((streamUrl) => {
if (streamUrl) {
videoStreamUrl = streamUrl;
} else {
requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then((buffer) =>
updateViewer(buffer, contentType),
);
}
});
} else {
requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then(
async (buffer) => { async (buffer) => {
const blob = await updateViewer(buffer, contentType); const blob = await updateViewer(buffer, contentType);
if (!viewerType) { if (!viewerType) {
@@ -118,16 +104,13 @@
}, },
); );
} }
}
}); });
} }
}); });
$effect(() => { $effect(() => {
if (info?.exists && downloadState?.status === "decrypted") { if (info?.exists && downloadState?.status === "decrypted") {
untrack( untrack(() => !isDownloadRequested && updateViewer(downloadState.result!, info!.contentIv!));
() => !isDownloadRequested && updateViewer(downloadState.result!, info!.contentType!),
);
} }
}); });
@@ -154,7 +137,6 @@
? info?.parentId ? info?.parentId
: undefined} : undefined}
{fileBlob} {fileBlob}
downloadUrl={videoStreamUrl}
filename={info?.name} filename={info?.name}
/> />
</div> </div>
@@ -177,10 +159,9 @@
{@render viewerLoading("이미지를 불러오고 있어요.")} {@render viewerLoading("이미지를 불러오고 있어요.")}
{/if} {/if}
{:else if viewerType === "video"} {:else if viewerType === "video"}
{#if videoStreamUrl || fileBlobUrl} {#if fileBlobUrl}
<div class="flex flex-col space-y-2"> <div class="flex flex-col space-y-2">
<video bind:this={videoElement} src={videoStreamUrl ?? fileBlobUrl} controls muted <video bind:this={videoElement} src={fileBlobUrl} controls muted></video>
></video>
<IconEntryButton <IconEntryButton
icon={IconCamera} icon={IconCamera}
onclick={() => updateThumbnail(info?.dataKey?.key!, info?.dataKey?.version!)} onclick={() => updateThumbnail(info?.dataKey?.key!, info?.dataKey?.version!)}

View File

@@ -10,29 +10,17 @@
interface Props { interface Props {
directoryId?: "root" | number; directoryId?: "root" | number;
downloadUrl?: string;
fileBlob?: Blob; fileBlob?: Blob;
filename?: string; filename?: string;
isOpen: boolean; isOpen: boolean;
} }
let { directoryId, downloadUrl, fileBlob, filename, isOpen = $bindable() }: Props = $props(); let { directoryId, fileBlob, filename, isOpen = $bindable() }: Props = $props();
const handleDownload = () => {
if (fileBlob && filename) {
FileSaver.saveAs(fileBlob, filename);
} else if (downloadUrl && filename) {
// Use streaming download via Content-Disposition header
const url = new URL(downloadUrl, window.location.origin);
url.searchParams.set("download", filename);
window.open(url.toString(), "_blank");
}
};
</script> </script>
<svelte:window onclick={() => (isOpen = false)} /> <svelte:window onclick={() => (isOpen = false)} />
{#if isOpen && (directoryId || downloadUrl || fileBlob)} {#if isOpen && (directoryId || fileBlob)}
<div <div
class="absolute right-2 top-full z-20 space-y-1 rounded-lg bg-white px-1 py-2 shadow-2xl" class="absolute right-2 top-full z-20 space-y-1 rounded-lg bg-white px-1 py-2 shadow-2xl"
transition:fly={{ y: -8, duration: 200 }} transition:fly={{ y: -8, duration: 200 }}
@@ -61,8 +49,10 @@
), ),
)} )}
{/if} {/if}
{#if fileBlob || downloadUrl} {#if fileBlob}
{@render menuButton(IconCloudDownload, "다운로드", handleDownload)} {@render menuButton(IconCloudDownload, "다운로드", () => {
FileSaver.saveAs(fileBlob, filename);
})}
{/if} {/if}
</div> </div>
</div> </div>

View File

@@ -1,32 +1,11 @@
import { encryptData } from "$lib/modules/crypto"; import { encryptData } from "$lib/modules/crypto";
import { storeFileThumbnailCache } from "$lib/modules/file"; import { storeFileThumbnailCache } from "$lib/modules/file";
import { prepareFileDecryption, getDecryptedFileUrl } from "$lib/serviceWorker";
import { requestFileThumbnailUpload } from "$lib/services/file"; import { requestFileThumbnailUpload } from "$lib/services/file";
import { trpc } from "$trpc/client"; import { trpc } from "$trpc/client";
export { requestCategoryCreation, requestFileRemovalFromCategory } from "$lib/services/category"; export { requestCategoryCreation, requestFileRemovalFromCategory } from "$lib/services/category";
export { requestFileDownload } from "$lib/services/file"; export { requestFileDownload } from "$lib/services/file";
export const requestVideoStream = async (
fileId: number,
dataKey: CryptoKey,
contentType: string,
) => {
const res = await fetch(`/api/file/${fileId}/download`, { method: "HEAD" });
if (!res.ok) return null;
const encContentSize = parseInt(res.headers.get("Content-Length") ?? "0", 10);
if (encContentSize <= 0) return null;
try {
await prepareFileDecryption(fileId, { isLegacy: false, dataKey, encContentSize, contentType });
return getDecryptedFileUrl(fileId);
} catch {
// TODO: Error Handling
return null;
}
};
export const requestThumbnailUpload = async ( export const requestThumbnailUpload = async (
fileId: number, fileId: number,
thumbnail: Blob, thumbnail: Blob,

View File

@@ -50,7 +50,7 @@ const requestThumbnailUpload = limitFunction(
async ( async (
fileId: number, fileId: number,
dataKeyVersion: Date, dataKeyVersion: Date,
thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: ArrayBuffer }, thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: string },
) => { ) => {
statuses.set(fileId, "uploading"); statuses.set(fileId, "uploading");
@@ -77,7 +77,7 @@ export const requestThumbnailGeneration = async (fileInfo: FileInfo) => {
await scheduler.schedule( await scheduler.schedule(
async () => { async () => {
statuses.set(fileInfo.id, "generation-pending"); statuses.set(fileInfo.id, "generation-pending");
file = await requestFileDownload(fileInfo.id, fileInfo.dataKey?.key!, fileInfo.isLegacy!); file = await requestFileDownload(fileInfo.id, fileInfo.contentIv!, fileInfo.dataKey?.key!);
return file.byteLength; return file.byteLength;
}, },
async () => { async () => {

View File

@@ -88,9 +88,7 @@ export const requestFileUpload = async (
const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate); const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate);
if (!res) return false; if (!res) return false;
if (res.fileBuffer) {
storeFileCache(res.fileId, res.fileBuffer); // Intended storeFileCache(res.fileId, res.fileBuffer); // Intended
}
if (res.thumbnailBuffer) { if (res.thumbnailBuffer) {
storeFileThumbnailCache(res.fileId, res.thumbnailBuffer); // Intended storeFileThumbnailCache(res.fileId, res.thumbnailBuffer); // Intended
} }

View File

@@ -1,15 +1,10 @@
import { error } from "@sveltejs/kit"; import { error } from "@sveltejs/kit";
import { z } from "zod"; import { z } from "zod";
import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http";
import { authorize } from "$lib/server/modules/auth"; import { authorize } from "$lib/server/modules/auth";
import { getFileStream } from "$lib/server/services/file"; import { getFileStream } from "$lib/server/services/file";
import type { RequestHandler } from "./$types"; import type { RequestHandler } from "./$types";
const downloadHandler = async ( export const GET: RequestHandler = async ({ locals, params }) => {
locals: App.Locals,
params: Record<string, string>,
request: Request,
) => {
const { userId } = await authorize(locals, "activeClient"); const { userId } = await authorize(locals, "activeClient");
const zodRes = z const zodRes = z
@@ -20,29 +15,11 @@ const downloadHandler = async (
if (!zodRes.success) error(400, "Invalid path parameters"); if (!zodRes.success) error(400, "Invalid path parameters");
const { id } = zodRes.data; const { id } = zodRes.data;
const { encContentStream, range } = await getFileStream( const { encContentStream, encContentSize } = await getFileStream(userId, id);
userId, return new Response(encContentStream as ReadableStream, {
id,
parseRangeHeader(request.headers.get("Range")),
);
return {
stream: encContentStream,
headers: { headers: {
"Accept-Ranges": "bytes",
"Content-Length": (range.end - range.start + 1).toString(),
"Content-Type": "application/octet-stream", "Content-Type": "application/octet-stream",
...getContentRangeHeader(range), "Content-Length": encContentSize.toString(),
}, },
isRangeRequest: !!range, });
};
};
export const GET: RequestHandler = async ({ locals, params, request }) => {
const { stream, headers, isRangeRequest } = await downloadHandler(locals, params, request);
return new Response(stream as ReadableStream, { status: isRangeRequest ? 206 : 200, headers });
};
export const HEAD: RequestHandler = async ({ locals, params, request }) => {
const { headers, isRangeRequest } = await downloadHandler(locals, params, request);
return new Response(null, { status: isRangeRequest ? 206 : 200, headers });
}; };

View File

@@ -1,15 +1,10 @@
import { error } from "@sveltejs/kit"; import { error } from "@sveltejs/kit";
import { z } from "zod"; import { z } from "zod";
import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http";
import { authorize } from "$lib/server/modules/auth"; import { authorize } from "$lib/server/modules/auth";
import { getFileThumbnailStream } from "$lib/server/services/file"; import { getFileThumbnailStream } from "$lib/server/services/file";
import type { RequestHandler } from "./$types"; import type { RequestHandler } from "./$types";
const downloadHandler = async ( export const GET: RequestHandler = async ({ locals, params }) => {
locals: App.Locals,
params: Record<string, string>,
request: Request,
) => {
const { userId } = await authorize(locals, "activeClient"); const { userId } = await authorize(locals, "activeClient");
const zodRes = z const zodRes = z
@@ -20,29 +15,11 @@ const downloadHandler = async (
if (!zodRes.success) error(400, "Invalid path parameters"); if (!zodRes.success) error(400, "Invalid path parameters");
const { id } = zodRes.data; const { id } = zodRes.data;
const { encContentStream, range } = await getFileThumbnailStream( const { encContentStream, encContentSize } = await getFileThumbnailStream(userId, id);
userId, return new Response(encContentStream as ReadableStream, {
id,
parseRangeHeader(request.headers.get("Range")),
);
return {
stream: encContentStream,
headers: { headers: {
"Accept-Ranges": "bytes",
"Content-Length": (range.end - range.start + 1).toString(),
"Content-Type": "application/octet-stream", "Content-Type": "application/octet-stream",
...getContentRangeHeader(range), "Content-Length": encContentSize.toString(),
}, },
isRangeRequest: !!range, });
};
};
export const GET: RequestHandler = async ({ locals, params, request }) => {
const { stream, headers, isRangeRequest } = await downloadHandler(locals, params, request);
return new Response(stream as ReadableStream, { status: isRangeRequest ? 206 : 200, headers });
};
export const HEAD: RequestHandler = async ({ locals, params, request }) => {
const { headers, isRangeRequest } = await downloadHandler(locals, params, request);
return new Response(null, { status: isRangeRequest ? 206 : 200, headers });
}; };

View File

@@ -0,0 +1,74 @@
import Busboy from "@fastify/busboy";
import { error, text } from "@sveltejs/kit";
import { Readable, Writable } from "stream";
import { z } from "zod";
import { authorize } from "$lib/server/modules/auth";
import { fileThumbnailUploadRequest, type FileThumbnailUploadRequest } from "$lib/server/schemas";
import { uploadFileThumbnail } from "$lib/server/services/file";
import type { RequestHandler } from "./$types";
export const POST: RequestHandler = async ({ locals, params, request }) => {
const { userId } = await authorize(locals, "activeClient");
const zodRes = z
.object({
id: z.coerce.number().int().positive(),
})
.safeParse(params);
if (!zodRes.success) error(400, "Invalid path parameters");
const { id } = zodRes.data;
const contentType = request.headers.get("Content-Type");
if (!contentType?.startsWith("multipart/form-data") || !request.body) {
error(400, "Invalid request body");
}
return new Promise<Response>((resolve, reject) => {
const bb = Busboy({ headers: { "content-type": contentType } });
const handler =
<T extends unknown[]>(f: (...args: T) => Promise<void>) =>
(...args: T) => {
f(...args).catch(reject);
};
let metadata: FileThumbnailUploadRequest | null = null;
let content: Readable | null = null;
bb.on(
"field",
handler(async (fieldname, val) => {
if (fieldname === "metadata") {
// Ignore subsequent metadata fields
if (!metadata) {
const zodRes = fileThumbnailUploadRequest.safeParse(JSON.parse(val));
if (!zodRes.success) error(400, "Invalid request body");
metadata = zodRes.data;
}
} else {
error(400, "Invalid request body");
}
}),
);
bb.on(
"file",
handler(async (fieldname, file) => {
if (fieldname !== "content") error(400, "Invalid request body");
if (!metadata || content) error(400, "Invalid request body");
content = file;
await uploadFileThumbnail(
userId,
id,
new Date(metadata.dekVersion),
metadata.contentIv,
content,
);
resolve(text("Thumbnail uploaded", { headers: { "Content-Type": "text/plain" } }));
}),
);
bb.on("error", (e) => {
content?.emit("error", e) ?? reject(e);
});
request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error
});
};

View File

@@ -0,0 +1,108 @@
import Busboy from "@fastify/busboy";
import { error, json } from "@sveltejs/kit";
import { Readable, Writable } from "stream";
import { authorize } from "$lib/server/modules/auth";
import {
fileUploadRequest,
fileUploadResponse,
type FileUploadResponse,
} from "$lib/server/schemas";
import { uploadFile } from "$lib/server/services/file";
import type { RequestHandler } from "./$types";
type FileMetadata = Parameters<typeof uploadFile>[0];
const parseFileMetadata = (userId: number, json: string) => {
const zodRes = fileUploadRequest.safeParse(JSON.parse(json));
if (!zodRes.success) error(400, "Invalid request body");
const {
parent,
mekVersion,
dek,
dekVersion,
hskVersion,
contentHmac,
contentType,
contentIv,
name,
nameIv,
createdAt,
createdAtIv,
lastModifiedAt,
lastModifiedAtIv,
} = zodRes.data;
if ((createdAt && !createdAtIv) || (!createdAt && createdAtIv))
error(400, "Invalid request body");
return {
userId,
parentId: parent,
mekVersion,
encDek: dek,
dekVersion: new Date(dekVersion),
hskVersion,
contentHmac,
contentType,
encContentIv: contentIv,
encName: { ciphertext: name, iv: nameIv },
encCreatedAt: createdAt && createdAtIv ? { ciphertext: createdAt, iv: createdAtIv } : null,
encLastModifiedAt: { ciphertext: lastModifiedAt, iv: lastModifiedAtIv },
} satisfies FileMetadata;
};
export const POST: RequestHandler = async ({ locals, request }) => {
const { userId } = await authorize(locals, "activeClient");
const contentType = request.headers.get("Content-Type");
if (!contentType?.startsWith("multipart/form-data") || !request.body) {
error(400, "Invalid request body");
}
return new Promise<Response>((resolve, reject) => {
const bb = Busboy({ headers: { "content-type": contentType } });
const handler =
<T extends unknown[]>(f: (...args: T) => Promise<void>) =>
(...args: T) => {
f(...args).catch(reject);
};
let metadata: FileMetadata | null = null;
let content: Readable | null = null;
const checksum = new Promise<string>((resolveChecksum, rejectChecksum) => {
bb.on(
"field",
handler(async (fieldname, val) => {
if (fieldname === "metadata") {
// Ignore subsequent metadata fields
if (!metadata) {
metadata = parseFileMetadata(userId, val);
}
} else if (fieldname === "checksum") {
// Ignore subsequent checksum fields
resolveChecksum(val);
} else {
error(400, "Invalid request body");
}
}),
);
bb.on(
"file",
handler(async (fieldname, file) => {
if (fieldname !== "content") error(400, "Invalid request body");
if (!metadata || content) error(400, "Invalid request body");
content = file;
const { fileId } = await uploadFile(metadata, content, checksum);
resolve(json(fileUploadResponse.parse({ file: fileId } satisfies FileUploadResponse)));
}),
);
bb.on("finish", () => rejectChecksum(new Error("Invalid request body")));
bb.on("error", (e) => {
content?.emit("error", e) ?? reject(e);
rejectChecksum(e);
});
});
request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error
});
};

View File

@@ -1,43 +0,0 @@
import { error, text } from "@sveltejs/kit";
import { Readable } from "stream";
import { z } from "zod";
import { authorize } from "$lib/server/modules/auth";
import { uploadChunk } from "$lib/server/services/upload";
import type { RequestHandler } from "./$types";
export const POST: RequestHandler = async ({ locals, params, request }) => {
const { userId } = await authorize(locals, "activeClient");
const zodRes = z
.object({
id: z.uuidv4(),
index: z.coerce.number().int().nonnegative(),
})
.safeParse(params);
if (!zodRes.success) error(400, "Invalid path parameters");
const { id: uploadId, index: chunkIndex } = zodRes.data;
// Parse Content-Digest header (RFC 9530)
// Expected format: sha-256=:base64hash:
const contentDigest = request.headers.get("Content-Digest");
if (!contentDigest) error(400, "Missing Content-Digest header");
const digestMatch = contentDigest.match(/^sha-256=:([A-Za-z0-9+/=]+):$/);
if (!digestMatch || !digestMatch[1])
error(400, "Invalid Content-Digest format, must be sha-256=:base64:");
const encChunkHash = digestMatch[1];
const contentType = request.headers.get("Content-Type");
if (contentType !== "application/octet-stream" || !request.body) {
error(400, "Invalid request body");
}
// Convert web ReadableStream to Node Readable
const nodeReadable = Readable.fromWeb(
request.body as unknown as Parameters<typeof Readable.fromWeb>[0],
);
await uploadChunk(userId, uploadId, chunkIndex, nodeReadable, encChunkHash);
return text("Chunk uploaded", { headers: { "Content-Type": "text/plain" } });
};

View File

@@ -1,153 +0,0 @@
import { DECRYPTED_FILE_URL_PREFIX, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "../modules/constants";
import { decryptChunk, getEncryptedRange, getDecryptedSize } from "../modules/crypto";
import { parseRangeHeader, getContentRangeHeader } from "../modules/http";
import { getFile } from "../modules/opfs";
import { fileMetadataStore } from "../stores";
import type { FileMetadata } from "../types";
const createResponse = (
stream: ReadableStream<Uint8Array>,
isRangeRequest: boolean,
range: { start: number; end: number; total: number },
contentType?: string,
downloadFilename?: string,
) => {
const headers: Record<string, string> = {
"Accept-Ranges": "bytes",
"Content-Length": String(range.end - range.start + 1),
"Content-Type": contentType ?? "application/octet-stream",
...(isRangeRequest ? getContentRangeHeader(range) : {}),
};
if (downloadFilename) {
headers["Content-Disposition"] =
`attachment; filename*=UTF-8''${encodeURIComponent(downloadFilename)}`;
}
return new Response(stream, {
status: isRangeRequest ? 206 : 200,
headers,
});
};
const streamFromOpfs = async (
file: File,
metadata?: FileMetadata,
range?: { start?: number; end?: number },
downloadFilename?: string,
) => {
const start = range?.start ?? 0;
const end = range?.end ?? file.size - 1;
if (start > end || start < 0 || end >= file.size) {
return new Response("Invalid range", { status: 416 });
}
return createResponse(
file.slice(start, end + 1).stream(),
!!range,
{ start, end, total: file.size },
metadata?.contentType,
downloadFilename,
);
};
const streamFromServer = async (
id: number,
metadata: FileMetadata,
range?: { start?: number; end?: number },
downloadFilename?: string,
) => {
const totalSize = getDecryptedSize(metadata.encContentSize, metadata.isLegacy);
const start = range?.start ?? 0;
const end =
range?.end ??
(range && !metadata.isLegacy ? Math.min(start + CHUNK_SIZE, totalSize) : totalSize) - 1;
if (start > end || start < 0 || end >= totalSize) {
return new Response("Invalid range", { status: 416 });
}
const encryptedRange = getEncryptedRange(start, end, metadata.encContentSize, metadata.isLegacy);
const apiResponse = await fetch(`/api/file/${id}/download`, {
headers: { Range: `bytes=${encryptedRange.start}-${encryptedRange.end}` },
});
if (apiResponse.status !== 206 || !apiResponse.body) {
return new Response("Failed to fetch encrypted file", { status: 502 });
}
if (metadata.isLegacy) {
const fileEncrypted = await apiResponse.arrayBuffer();
const decrypted = await decryptChunk(fileEncrypted, metadata.dataKey);
return createResponse(
new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(new Uint8Array(decrypted.slice(start, end + 1)));
controller.close();
},
}),
!!range,
{ start, end, total: totalSize },
metadata.contentType,
);
}
const totalChunks = encryptedRange.lastChunkIndex - encryptedRange.firstChunkIndex + 1;
let currentChunkIndex = 0;
let buffer = new Uint8Array(0);
const decryptingStream = new TransformStream<Uint8Array, Uint8Array>({
async transform(chunk, controller) {
const newBuffer = new Uint8Array(buffer.length + chunk.length);
newBuffer.set(buffer);
newBuffer.set(chunk, buffer.length);
buffer = newBuffer;
while (buffer.length >= ENCRYPTED_CHUNK_SIZE && currentChunkIndex < totalChunks - 1) {
const encryptedChunk = buffer.slice(0, ENCRYPTED_CHUNK_SIZE);
buffer = buffer.slice(ENCRYPTED_CHUNK_SIZE);
const decrypted = await decryptChunk(encryptedChunk.buffer, metadata.dataKey);
const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0;
controller.enqueue(new Uint8Array(decrypted.slice(sliceStart)));
currentChunkIndex++;
}
},
async flush(controller) {
if (buffer.length > 0) {
const decrypted = await decryptChunk(buffer.buffer, metadata.dataKey);
const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0;
const sliceEnd = (end % CHUNK_SIZE) + 1;
controller.enqueue(new Uint8Array(decrypted.slice(sliceStart, sliceEnd)));
}
},
});
return createResponse(
apiResponse.body.pipeThrough(decryptingStream),
!!range,
{ start, end, total: totalSize },
metadata.contentType,
downloadFilename,
);
};
const decryptFileHandler = async (request: Request) => {
const url = new URL(request.url);
const fileId = parseInt(url.pathname.slice(DECRYPTED_FILE_URL_PREFIX.length), 10);
if (isNaN(fileId)) {
throw new Response("Invalid file id", { status: 400 });
}
const downloadFilename = url.searchParams.get("download") ?? undefined;
const metadata = fileMetadataStore.get(fileId);
const range = parseRangeHeader(request.headers.get("Range"));
const cache = await getFile(`/cache/${fileId}`);
if (cache) {
return streamFromOpfs(cache, metadata, range, downloadFilename);
} else if (metadata) {
return streamFromServer(fileId, metadata, range, downloadFilename);
} else {
return new Response("Decryption not prepared", { status: 400 });
}
};
export default decryptFileHandler;

View File

@@ -1 +0,0 @@
export { default as decryptFile } from "./decryptFile";

View File

@@ -1,43 +0,0 @@
/// <reference no-default-lib="true"/>
/// <reference lib="esnext" />
/// <reference lib="webworker" />
/// <reference types="@sveltejs/kit" />
import { DECRYPTED_FILE_URL_PREFIX } from "./modules/constants";
import { decryptFile } from "./handlers";
import { fileMetadataStore } from "./stores";
import type { ServiceWorkerMessage, ServiceWorkerResponse } from "./types";
const self = globalThis.self as unknown as ServiceWorkerGlobalScope;
self.addEventListener("message", (event) => {
const message: ServiceWorkerMessage = event.data;
switch (message.type) {
case "decryption-prepare":
fileMetadataStore.set(message.fileId, message);
event.source?.postMessage({
type: "decryption-ready",
fileId: message.fileId,
} satisfies ServiceWorkerResponse);
break;
default: {
const exhaustive: never = message.type;
return exhaustive;
}
}
});
self.addEventListener("fetch", (event) => {
const url = new URL(event.request.url);
if (url.pathname.startsWith(DECRYPTED_FILE_URL_PREFIX)) {
event.respondWith(decryptFile(event.request));
}
});
self.addEventListener("install", () => {
self.skipWaiting();
});
self.addEventListener("activate", (event) => {
event.waitUntil(self.clients.claim());
});

View File

@@ -1 +0,0 @@
export * from "../../lib/constants";

View File

@@ -1,40 +0,0 @@
import { ENCRYPTION_OVERHEAD, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "./constants";
export * from "../../lib/modules/crypto";
export const getEncryptedRange = (
start: number,
end: number,
totalEncryptedSize: number,
isLegacy: boolean,
) => {
if (isLegacy) {
return {
firstChunkIndex: 0,
lastChunkIndex: 0,
start: 0,
end: totalEncryptedSize - 1,
};
}
const firstChunkIndex = Math.floor(start / CHUNK_SIZE);
const lastChunkIndex = Math.floor(end / CHUNK_SIZE);
return {
firstChunkIndex,
lastChunkIndex,
start: firstChunkIndex * ENCRYPTED_CHUNK_SIZE,
end: Math.min((lastChunkIndex + 1) * ENCRYPTED_CHUNK_SIZE - 1, totalEncryptedSize - 1),
};
};
export const getDecryptedSize = (encryptedSize: number, isLegacy: boolean) => {
if (isLegacy) {
return encryptedSize - ENCRYPTION_OVERHEAD;
}
const fullChunks = Math.floor(encryptedSize / ENCRYPTED_CHUNK_SIZE);
const lastChunkEncSize = encryptedSize % ENCRYPTED_CHUNK_SIZE;
return (
fullChunks * CHUNK_SIZE + (lastChunkEncSize > 0 ? lastChunkEncSize - ENCRYPTION_OVERHEAD : 0)
);
};

View File

@@ -1 +0,0 @@
export * from "../../lib/modules/http";

View File

@@ -1 +0,0 @@
export * from "../../lib/modules/opfs";

View File

@@ -1,3 +0,0 @@
import type { FileMetadata } from "./types";
export const fileMetadataStore = new Map<number, FileMetadata>();

View File

@@ -1 +0,0 @@
export * from "../lib/serviceWorker/types";

View File

@@ -9,7 +9,6 @@ import {
fileRouter, fileRouter,
hskRouter, hskRouter,
mekRouter, mekRouter,
uploadRouter,
userRouter, userRouter,
} from "./routers"; } from "./routers";
@@ -21,7 +20,6 @@ export const appRouter = router({
file: fileRouter, file: fileRouter,
hsk: hskRouter, hsk: hskRouter,
mek: mekRouter, mek: mekRouter,
upload: uploadRouter,
user: userRouter, user: userRouter,
}); });

View File

@@ -1,14 +1,14 @@
import { TRPCError } from "@trpc/server"; import { TRPCError } from "@trpc/server";
import { z } from "zod"; import { z } from "zod";
import { CategoryIdSchema } from "$lib/schemas";
import { CategoryRepo, FileRepo, IntegrityError } from "$lib/server/db"; import { CategoryRepo, FileRepo, IntegrityError } from "$lib/server/db";
import { categoryIdSchema } from "$lib/server/schemas";
import { router, roleProcedure } from "../init.server"; import { router, roleProcedure } from "../init.server";
const categoryRouter = router({ const categoryRouter = router({
get: roleProcedure["activeClient"] get: roleProcedure["activeClient"]
.input( .input(
z.object({ z.object({
id: CategoryIdSchema, id: categoryIdSchema,
recurse: z.boolean().default(false), recurse: z.boolean().default(false),
}), }),
) )
@@ -65,7 +65,7 @@ const categoryRouter = router({
create: roleProcedure["activeClient"] create: roleProcedure["activeClient"]
.input( .input(
z.object({ z.object({
parent: CategoryIdSchema, parent: categoryIdSchema,
mekVersion: z.int().positive(), mekVersion: z.int().positive(),
dek: z.base64().nonempty(), dek: z.base64().nonempty(),
dekVersion: z.date(), dekVersion: z.date(),

View File

@@ -1,15 +1,15 @@
import { TRPCError } from "@trpc/server"; import { TRPCError } from "@trpc/server";
import { z } from "zod"; import { z } from "zod";
import { DirectoryIdSchema } from "$lib/schemas";
import { FileRepo, IntegrityError } from "$lib/server/db"; import { FileRepo, IntegrityError } from "$lib/server/db";
import { safeUnlink } from "$lib/server/modules/filesystem"; import { safeUnlink } from "$lib/server/modules/filesystem";
import { directoryIdSchema } from "$lib/server/schemas";
import { router, roleProcedure } from "../init.server"; import { router, roleProcedure } from "../init.server";
const directoryRouter = router({ const directoryRouter = router({
get: roleProcedure["activeClient"] get: roleProcedure["activeClient"]
.input( .input(
z.object({ z.object({
id: DirectoryIdSchema, id: directoryIdSchema,
}), }),
) )
.query(async ({ ctx, input }) => { .query(async ({ ctx, input }) => {
@@ -59,7 +59,7 @@ const directoryRouter = router({
create: roleProcedure["activeClient"] create: roleProcedure["activeClient"]
.input( .input(
z.object({ z.object({
parent: DirectoryIdSchema, parent: directoryIdSchema,
mekVersion: z.int().positive(), mekVersion: z.int().positive(),
dek: z.base64().nonempty(), dek: z.base64().nonempty(),
dekVersion: z.date(), dekVersion: z.date(),

View File

@@ -19,12 +19,12 @@ const fileRouter = router({
const categories = await FileRepo.getAllFileCategories(input.id); const categories = await FileRepo.getAllFileCategories(input.id);
return { return {
isLegacy: !!file.encContentIv,
parent: file.parentId, parent: file.parentId,
mekVersion: file.mekVersion, mekVersion: file.mekVersion,
dek: file.encDek, dek: file.encDek,
dekVersion: file.dekVersion, dekVersion: file.dekVersion,
contentType: file.contentType, contentType: file.contentType,
contentIv: file.encContentIv,
name: file.encName.ciphertext, name: file.encName.ciphertext,
nameIv: file.encName.iv, nameIv: file.encName.iv,
createdAt: file.encCreatedAt?.ciphertext, createdAt: file.encCreatedAt?.ciphertext,
@@ -53,12 +53,12 @@ const fileRouter = router({
const files = await FileRepo.getFilesWithCategories(ctx.session.userId, input.ids); const files = await FileRepo.getFilesWithCategories(ctx.session.userId, input.ids);
return files.map((file) => ({ return files.map((file) => ({
id: file.id, id: file.id,
isLegacy: !!file.encContentIv,
parent: file.parentId, parent: file.parentId,
mekVersion: file.mekVersion, mekVersion: file.mekVersion,
dek: file.encDek, dek: file.encDek,
dekVersion: file.dekVersion, dekVersion: file.dekVersion,
contentType: file.contentType, contentType: file.contentType,
contentIv: file.encContentIv,
name: file.encName.ciphertext, name: file.encName.ciphertext,
nameIv: file.encName.iv, nameIv: file.encName.iv,
createdAt: file.encCreatedAt?.ciphertext, createdAt: file.encCreatedAt?.ciphertext,
@@ -158,7 +158,7 @@ const fileRouter = router({
throw new TRPCError({ code: "NOT_FOUND", message: "File or its thumbnail not found" }); throw new TRPCError({ code: "NOT_FOUND", message: "File or its thumbnail not found" });
} }
return { updatedAt: thumbnail.updatedAt }; return { updatedAt: thumbnail.updatedAt, contentIv: thumbnail.encContentIv };
}), }),
}); });

View File

@@ -5,5 +5,4 @@ export { default as directoryRouter } from "./directory";
export { default as fileRouter } from "./file"; export { default as fileRouter } from "./file";
export { default as hskRouter } from "./hsk"; export { default as hskRouter } from "./hsk";
export { default as mekRouter } from "./mek"; export { default as mekRouter } from "./mek";
export { default as uploadRouter } from "./upload";
export { default as userRouter } from "./user"; export { default as userRouter } from "./user";

View File

@@ -1,255 +0,0 @@
import { TRPCError } from "@trpc/server";
import { createHash } from "crypto";
import { createReadStream, createWriteStream } from "fs";
import { mkdir, rename } from "fs/promises";
import mime from "mime";
import { dirname } from "path";
import { v4 as uuidv4 } from "uuid";
import { z } from "zod";
import { DirectoryIdSchema } from "$lib/schemas";
import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db";
import db from "$lib/server/db/kysely";
import env from "$lib/server/loadenv";
import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem";
import { router, roleProcedure } from "../init.server";
const sessionLocks = new Set<string>();
const generateSessionId = async () => {
const id = uuidv4();
const path = `${env.uploadsPath}/${id}`;
await mkdir(path, { recursive: true });
return { id, path };
};
const uploadRouter = router({
startFileUpload: roleProcedure["activeClient"]
.input(
z.object({
chunks: z.int().positive(),
parent: DirectoryIdSchema,
mekVersion: z.int().positive(),
dek: z.base64().nonempty(),
dekVersion: z.date(),
hskVersion: z.int().positive().optional(),
contentType: z
.string()
.trim()
.nonempty()
.refine((value) => mime.getExtension(value) !== null),
name: z.base64().nonempty(),
nameIv: z.base64().nonempty(),
createdAt: z.base64().nonempty().optional(),
createdAtIv: z.base64().nonempty().optional(),
lastModifiedAt: z.base64().nonempty(),
lastModifiedAtIv: z.base64().nonempty(),
}),
)
.mutation(async ({ ctx, input }) => {
const oneMinuteAgo = new Date(Date.now() - 60 * 1000);
const oneMinuteLater = new Date(Date.now() + 60 * 1000);
if (input.dekVersion <= oneMinuteAgo || input.dekVersion >= oneMinuteLater) {
throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" });
}
const { id, path } = await generateSessionId();
try {
await UploadRepo.createFileUploadSession({
id,
userId: ctx.session.userId,
path,
totalChunks: input.chunks,
expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours
parentId: input.parent,
mekVersion: input.mekVersion,
encDek: input.dek,
dekVersion: input.dekVersion,
hskVersion: input.hskVersion ?? null,
contentType: input.contentType,
encName: { ciphertext: input.name, iv: input.nameIv },
encCreatedAt:
input.createdAt && input.createdAtIv
? { ciphertext: input.createdAt, iv: input.createdAtIv }
: null,
encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv },
});
return { uploadId: id };
} catch (e) {
await safeRecursiveRm(path);
if (e instanceof IntegrityError) {
if (e.message === "Inactive MEK version") {
throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" });
} else if (e.message === "Inactive HSK version") {
throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid HSK version" });
}
}
throw e;
}
}),
startFileThumbnailUpload: roleProcedure["activeClient"]
.input(
z.object({
file: z.int().positive(),
dekVersion: z.date(),
}),
)
.mutation(async ({ ctx, input }) => {
const { id, path } = await generateSessionId();
try {
await UploadRepo.createThumbnailUploadSession({
id,
userId: ctx.session.userId,
path,
totalChunks: 1, // Up to 4 MiB
expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours
fileId: input.file,
dekVersion: input.dekVersion,
});
return { uploadId: id };
} catch (e) {
await safeRecursiveRm(path);
if (e instanceof IntegrityError) {
if (e.message === "File not found") {
throw new TRPCError({ code: "NOT_FOUND", message: "File not found" });
} else if (e.message === "Invalid DEK version") {
throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" });
}
}
throw e;
}
}),
completeFileUpload: roleProcedure["activeClient"]
.input(
z.object({
uploadId: z.uuidv4(),
contentHmac: z.base64().nonempty().optional(),
}),
)
.mutation(async ({ ctx, input }) => {
const { uploadId } = input;
if (sessionLocks.has(uploadId)) {
throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" });
} else {
sessionLocks.add(uploadId);
}
let filePath = "";
try {
const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId);
if (!session || session.type !== "file") {
throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" });
} else if (
(session.hskVersion && !input.contentHmac) ||
(!session.hskVersion && input.contentHmac)
) {
throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content HMAC" });
} else if (session.uploadedChunks.length < session.totalChunks) {
throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" });
}
filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`;
await mkdir(dirname(filePath), { recursive: true });
const hashStream = createHash("sha256");
const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 });
for (let i = 0; i < session.totalChunks; i++) {
for await (const chunk of createReadStream(`${session.path}/${i}`)) {
hashStream.update(chunk);
writeStream.write(chunk);
}
}
await new Promise<void>((resolve, reject) => {
writeStream.end((e: any) => (e ? reject(e) : resolve()));
});
const hash = hashStream.digest("base64");
const fileId = await db.transaction().execute(async (trx) => {
const { id: fileId } = await FileRepo.registerFile(trx, {
...session,
userId: ctx.session.userId,
path: filePath,
contentHmac: input.contentHmac ?? null,
encContentHash: hash,
encContentIv: null,
});
await UploadRepo.deleteUploadSession(trx, uploadId);
return fileId;
});
await safeRecursiveRm(session.path);
return { file: fileId };
} catch (e) {
await safeUnlink(filePath);
throw e;
} finally {
sessionLocks.delete(uploadId);
}
}),
completeFileThumbnailUpload: roleProcedure["activeClient"]
.input(
z.object({
uploadId: z.uuidv4(),
}),
)
.mutation(async ({ ctx, input }) => {
const { uploadId } = input;
if (sessionLocks.has(uploadId)) {
throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" });
} else {
sessionLocks.add(uploadId);
}
let thumbnailPath = "";
try {
const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId);
if (!session || session.type !== "thumbnail") {
throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" });
} else if (session.uploadedChunks.length < session.totalChunks) {
throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" });
}
thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uploadId}`;
await mkdir(dirname(thumbnailPath), { recursive: true });
await rename(`${session.path}/0`, thumbnailPath);
const oldThumbnailPath = await db.transaction().execute(async (trx) => {
const oldPath = await MediaRepo.updateFileThumbnail(
trx,
ctx.session.userId,
session.fileId,
session.dekVersion,
thumbnailPath,
null,
);
await UploadRepo.deleteUploadSession(trx, uploadId);
return oldPath;
});
await Promise.all([safeUnlink(oldThumbnailPath), safeRecursiveRm(session.path)]);
} catch (e) {
await safeUnlink(thumbnailPath);
if (e instanceof IntegrityError) {
if (e.message === "File not found") {
throw new TRPCError({ code: "NOT_FOUND", message: "File not found" });
} else if (e.message === "Invalid DEK version") {
throw new TRPCError({ code: "BAD_REQUEST", message: "Mismatched DEK version" });
}
}
throw e;
} finally {
sessionLocks.delete(uploadId);
}
}),
});
export default uploadRouter;