Merge pull request #7 from kmc7468/migrate-to-postgresql

PostgreSQL + Kysely로 마이그레이션
This commit is contained in:
static
2025-01-20 21:51:23 +09:00
committed by GitHub
40 changed files with 1881 additions and 3097 deletions

View File

@@ -1,8 +1,12 @@
# Required environment variables # Required environment variables
DATABASE_PASSWORD=
SESSION_SECRET= SESSION_SECRET=
# Optional environment variables # Optional environment variables
DATABASE_URL= DATABASE_HOST=
DATABASE_PORT=
DATABASE_USER=
DATABASE_NAME=
SESSION_EXPIRES= SESSION_EXPIRES=
USER_CLIENT_CHALLENGE_EXPIRES= USER_CLIENT_CHALLENGE_EXPIRES=
SESSION_UPGRADE_CHALLENGE_EXPIRES= SESSION_UPGRADE_CHALLENGE_EXPIRES=

View File

@@ -2,6 +2,10 @@
FROM node:22-alpine AS base FROM node:22-alpine AS base
WORKDIR /app WORKDIR /app
RUN apk add --no-cache bash curl && \
curl -o /usr/local/bin/wait-for-it https://raw.githubusercontent.com/vishnubob/wait-for-it/master/wait-for-it.sh && \
chmod +x /usr/local/bin/wait-for-it
RUN npm install -g pnpm@9 RUN npm install -g pnpm@9
COPY pnpm-lock.yaml . COPY pnpm-lock.yaml .
@@ -10,10 +14,9 @@ FROM base AS build
RUN pnpm fetch RUN pnpm fetch
COPY . . COPY . .
RUN pnpm install --offline RUN pnpm install --offline && \
RUN pnpm build pnpm build && \
sed -i "s/http\.createServer()/http.createServer({ requestTimeout: 0 })/g" ./build/index.js
RUN sed -i "s/http\.createServer()/http.createServer({ requestTimeout: 0 })/g" ./build/index.js
# Deploy Stage # Deploy Stage
FROM base FROM base
@@ -23,9 +26,7 @@ COPY package.json .
RUN pnpm install --offline --prod RUN pnpm install --offline --prod
COPY --from=build /app/build ./build COPY --from=build /app/build ./build
COPY drizzle ./drizzle
EXPOSE 3000 EXPOSE 3000
ENV BODY_SIZE_LIMIT=Infinity ENV BODY_SIZE_LIMIT=Infinity
CMD ["bash", "-c", "wait-for-it ${DATABASE_HOST:-localhost}:${DATABASE_PORT:-5432} -- node ./build/index.js"]
CMD ["node", "./build/index.js"]

View File

@@ -23,7 +23,7 @@ vim .env # 아래를 참고하여 환경 변수를 설정해 주세요.
docker compose up --build -d docker compose up --build -d
``` ```
모든 데이터는 `./data` 디렉터리에 저장될 거예요. 모든 데이터는 `./data` 디렉터리에 아래에 저장될 거예요.
### Environment Variables ### Environment Variables
@@ -31,7 +31,8 @@ docker compose up --build -d
|이름|필수|기본값|설명| |이름|필수|기본값|설명|
|:-|:-:|:-:|:-| |:-|:-:|:-:|:-|
|`SESSION_SECRET`|Y||Session ID의 서명을 위해 사용돼요. 안전한 값으로 설정해 주세요.| |`DATABASE_PASSWORD`|Y||데이터베이스에 접근하기 위해 필요한 비밀번호예요. 안전한 값으로 설정해 주세요.|
|`SESSION_SECRET`|Y||Session ID의 서명에 사용되는 비밀번호예요. 안전한 값으로 설정해 주세요.|
|`SESSION_EXPIRES`||`14d`|Session의 유효 시간이에요. Session은 마지막으로 사용된 후 설정된 유효 시간이 지나면 자동으로 삭제돼요.| |`SESSION_EXPIRES`||`14d`|Session의 유효 시간이에요. Session은 마지막으로 사용된 후 설정된 유효 시간이 지나면 자동으로 삭제돼요.|
|`USER_CLIENT_CHALLENGE_EXPIRES`||`5m`|암호 키를 서버에 처음 등록할 때 사용되는 챌린지의 유효 시간이에요.| |`USER_CLIENT_CHALLENGE_EXPIRES`||`5m`|암호 키를 서버에 처음 등록할 때 사용되는 챌린지의 유효 시간이에요.|
|`SESSION_UPGRADE_CHALLENGE_EXPIRES`||`5m`|암호 키와 함께 로그인할 때 사용되는 챌린지의 유효 시간이에요.| |`SESSION_UPGRADE_CHALLENGE_EXPIRES`||`5m`|암호 키와 함께 로그인할 때 사용되는 챌린지의 유효 시간이에요.|

15
docker-compose.dev.yaml Normal file
View File

@@ -0,0 +1,15 @@
services:
database:
image: postgres:17.2
restart: on-failure
volumes:
- database:/var/lib/postgresql/data
environment:
- POSTGRES_USER=${DATABASE_USER:-}
- POSTGRES_PASSWORD=${DATABASE_PASSWORD:?} # Required
- POSTGRES_DB=${DATABASE_NAME:-}
ports:
- ${DATABASE_PORT:-5432}:5432
volumes:
database:

View File

@@ -1,13 +1,17 @@
services: services:
server: server:
build: . build: .
restart: unless-stopped restart: on-failure
depends_on:
- database
user: ${CONTAINER_UID:-0}:${CONTAINER_GID:-0} user: ${CONTAINER_UID:-0}:${CONTAINER_GID:-0}
volumes: volumes:
- ./data:/app/data - ./data/library:/app/data/library
environment: environment:
# ArkVault # ArkVault
- DATABASE_URL=/app/data/database.sqlite - DATABASE_HOST=database
- DATABASE_USER=arkvault
- DATABASE_PASSWORD=${DATABASE_PASSWORD:?} # Required
- SESSION_SECRET=${SESSION_SECRET:?} # Required - SESSION_SECRET=${SESSION_SECRET:?} # Required
- SESSION_EXPIRES - SESSION_EXPIRES
- USER_CLIENT_CHALLENGE_EXPIRES - USER_CLIENT_CHALLENGE_EXPIRES
@@ -19,3 +23,13 @@ services:
- NODE_ENV=${NODE_ENV:-production} - NODE_ENV=${NODE_ENV:-production}
ports: ports:
- ${PORT:-80}:3000 - ${PORT:-80}:3000
database:
image: postgres:17.2-alpine
restart: on-failure
user: ${CONTAINER_UID:-0}:${CONTAINER_GID:-0}
volumes:
- ./data/database:/var/lib/postgresql/data
environment:
- POSTGRES_USER=arkvault
- POSTGRES_PASSWORD=${DATABASE_PASSWORD:?}

View File

@@ -1,13 +0,0 @@
import { defineConfig } from "drizzle-kit";
export default defineConfig({
schema: "./src/lib/server/db/schema",
dbCredentials: {
url: process.env.DATABASE_URL || "local.db",
},
verbose: true,
strict: true,
dialect: "sqlite",
});

View File

@@ -1,178 +0,0 @@
CREATE TABLE `client` (
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
`encryption_public_key` text NOT NULL,
`signature_public_key` text NOT NULL
);
--> statement-breakpoint
CREATE TABLE `user_client` (
`user_id` integer NOT NULL,
`client_id` integer NOT NULL,
`state` text DEFAULT 'challenging' NOT NULL,
PRIMARY KEY(`client_id`, `user_id`),
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `user_client_challenge` (
`id` integer PRIMARY KEY NOT NULL,
`user_id` integer NOT NULL,
`client_id` integer NOT NULL,
`answer` text NOT NULL,
`allowed_ip` text NOT NULL,
`expires_at` integer NOT NULL,
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`user_id`,`client_id`) REFERENCES `user_client`(`user_id`,`client_id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `directory` (
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
`parent_id` integer,
`user_id` integer NOT NULL,
`master_encryption_key_version` integer NOT NULL,
`encrypted_data_encryption_key` text NOT NULL,
`data_encryption_key_version` integer NOT NULL,
`encrypted_name` text NOT NULL,
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`parent_id`) REFERENCES `directory`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`user_id`,`master_encryption_key_version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `directory_log` (
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
`directory_id` integer NOT NULL,
`timestamp` integer NOT NULL,
`action` text NOT NULL,
`new_name` text,
FOREIGN KEY (`directory_id`) REFERENCES `directory`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `file` (
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
`parent_id` integer,
`user_id` integer NOT NULL,
`path` text NOT NULL,
`master_encryption_key_version` integer NOT NULL,
`encrypted_data_encryption_key` text NOT NULL,
`data_encryption_key_version` integer NOT NULL,
`hmac_secret_key_version` integer,
`content_hmac` text,
`content_type` text NOT NULL,
`encrypted_content_iv` text NOT NULL,
`encrypted_content_hash` text NOT NULL,
`encrypted_name` text NOT NULL,
`encrypted_created_at` text,
`encrypted_last_modified_at` text NOT NULL,
FOREIGN KEY (`parent_id`) REFERENCES `directory`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`user_id`,`master_encryption_key_version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`user_id`,`hmac_secret_key_version`) REFERENCES `hmac_secret_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `file_log` (
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
`file_id` integer NOT NULL,
`timestamp` integer NOT NULL,
`action` text NOT NULL,
`new_name` text,
FOREIGN KEY (`file_id`) REFERENCES `file`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `hmac_secret_key` (
`user_id` integer NOT NULL,
`version` integer NOT NULL,
`state` text NOT NULL,
`master_encryption_key_version` integer NOT NULL,
`encrypted_key` text NOT NULL,
PRIMARY KEY(`user_id`, `version`),
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`user_id`,`master_encryption_key_version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `hmac_secret_key_log` (
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
`user_id` integer NOT NULL,
`hmac_secret_key_version` integer NOT NULL,
`timestamp` integer NOT NULL,
`action` text NOT NULL,
`action_by` integer,
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`action_by`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`user_id`,`hmac_secret_key_version`) REFERENCES `hmac_secret_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `client_master_encryption_key` (
`user_id` integer NOT NULL,
`client_id` integer NOT NULL,
`version` integer NOT NULL,
`encrypted_key` text NOT NULL,
`encrypted_key_signature` text NOT NULL,
PRIMARY KEY(`client_id`, `user_id`, `version`),
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`user_id`,`version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `master_encryption_key` (
`user_id` integer NOT NULL,
`version` integer NOT NULL,
`state` text NOT NULL,
`retired_at` integer,
PRIMARY KEY(`user_id`, `version`),
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `master_encryption_key_log` (
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
`user_id` integer NOT NULL,
`master_encryption_key_version` integer NOT NULL,
`timestamp` integer NOT NULL,
`action` text NOT NULL,
`action_by` integer,
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`action_by`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`user_id`,`master_encryption_key_version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `session` (
`id` text PRIMARY KEY NOT NULL,
`user_id` integer NOT NULL,
`client_id` integer,
`created_at` integer NOT NULL,
`last_used_at` integer NOT NULL,
`last_used_by_ip` text,
`last_used_by_user_agent` text,
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `session_upgrade_challenge` (
`id` integer PRIMARY KEY NOT NULL,
`session_id` text NOT NULL,
`client_id` integer NOT NULL,
`answer` text NOT NULL,
`allowed_ip` text NOT NULL,
`expires_at` integer NOT NULL,
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `user` (
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
`email` text NOT NULL,
`password` text NOT NULL,
`nickname` text NOT NULL
);
--> statement-breakpoint
CREATE UNIQUE INDEX `client_encryption_public_key_unique` ON `client` (`encryption_public_key`);--> statement-breakpoint
CREATE UNIQUE INDEX `client_signature_public_key_unique` ON `client` (`signature_public_key`);--> statement-breakpoint
CREATE UNIQUE INDEX `client_encryption_public_key_signature_public_key_unique` ON `client` (`encryption_public_key`,`signature_public_key`);--> statement-breakpoint
CREATE UNIQUE INDEX `user_client_challenge_answer_unique` ON `user_client_challenge` (`answer`);--> statement-breakpoint
CREATE UNIQUE INDEX `directory_encrypted_data_encryption_key_unique` ON `directory` (`encrypted_data_encryption_key`);--> statement-breakpoint
CREATE UNIQUE INDEX `file_path_unique` ON `file` (`path`);--> statement-breakpoint
CREATE UNIQUE INDEX `file_encrypted_data_encryption_key_unique` ON `file` (`encrypted_data_encryption_key`);--> statement-breakpoint
CREATE UNIQUE INDEX `hmac_secret_key_encrypted_key_unique` ON `hmac_secret_key` (`encrypted_key`);--> statement-breakpoint
CREATE UNIQUE INDEX `session_user_id_client_id_unique` ON `session` (`user_id`,`client_id`);--> statement-breakpoint
CREATE UNIQUE INDEX `session_upgrade_challenge_session_id_unique` ON `session_upgrade_challenge` (`session_id`);--> statement-breakpoint
CREATE UNIQUE INDEX `session_upgrade_challenge_answer_unique` ON `session_upgrade_challenge` (`answer`);--> statement-breakpoint
CREATE UNIQUE INDEX `user_email_unique` ON `user` (`email`);

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +0,0 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1737219722656,
"tag": "0000_regular_the_watchers",
"breakpoints": true
}
]
}

18
kysely.config.ts Normal file
View File

@@ -0,0 +1,18 @@
import { defineConfig } from "kysely-ctl";
import { Pool } from "pg";
export default defineConfig({
dialect: "pg",
dialectConfig: {
pool: new Pool({
host: process.env.DATABASE_HOST,
port: process.env.DATABASE_PORT ? parseInt(process.env.DATABASE_PORT) : undefined,
user: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
database: process.env.DATABASE_NAME,
}),
},
migrations: {
migrationFolder: "./src/lib/server/db/migrations",
},
});

View File

@@ -5,16 +5,14 @@
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "vite dev", "dev": "vite dev",
"dev:db": "docker compose -f docker-compose.dev.yaml -p arkvault-dev up -d",
"build": "vite build", "build": "vite build",
"preview": "vite preview", "preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"format": "prettier --write .", "format": "prettier --write .",
"lint": "prettier --check . && eslint .", "lint": "prettier --check . && eslint .",
"db:push": "drizzle-kit push", "db:migrate": "kysely migrate"
"db:generate": "drizzle-kit generate",
"db:migrate": "drizzle-kit migrate",
"db:studio": "drizzle-kit studio"
}, },
"devDependencies": { "devDependencies": {
"@eslint/compat": "^1.2.4", "@eslint/compat": "^1.2.4",
@@ -26,10 +24,10 @@
"@types/file-saver": "^2.0.7", "@types/file-saver": "^2.0.7",
"@types/ms": "^0.7.34", "@types/ms": "^0.7.34",
"@types/node-schedule": "^2.1.7", "@types/node-schedule": "^2.1.7",
"@types/pg": "^8.11.10",
"autoprefixer": "^10.4.20", "autoprefixer": "^10.4.20",
"axios": "^1.7.9", "axios": "^1.7.9",
"dexie": "^4.0.10", "dexie": "^4.0.10",
"drizzle-kit": "^0.22.8",
"eslint": "^9.17.0", "eslint": "^9.17.0",
"eslint-config-prettier": "^9.1.0", "eslint-config-prettier": "^9.1.0",
"eslint-plugin-svelte": "^2.46.1", "eslint-plugin-svelte": "^2.46.1",
@@ -38,6 +36,7 @@
"file-saver": "^2.0.5", "file-saver": "^2.0.5",
"globals": "^15.14.0", "globals": "^15.14.0",
"heic2any": "^0.0.4", "heic2any": "^0.0.4",
"kysely-ctl": "^0.10.1",
"mime": "^4.0.6", "mime": "^4.0.6",
"p-limit": "^6.2.0", "p-limit": "^6.2.0",
"prettier": "^3.4.2", "prettier": "^3.4.2",
@@ -54,10 +53,10 @@
"dependencies": { "dependencies": {
"@fastify/busboy": "^3.1.1", "@fastify/busboy": "^3.1.1",
"argon2": "^0.41.1", "argon2": "^0.41.1",
"better-sqlite3": "^11.7.2", "kysely": "^0.27.5",
"drizzle-orm": "^0.33.0",
"ms": "^2.1.3", "ms": "^2.1.3",
"node-schedule": "^2.1.1", "node-schedule": "^2.1.1",
"pg": "^8.13.1",
"uuid": "^11.0.4", "uuid": "^11.0.4",
"zod": "^3.24.1" "zod": "^3.24.1"
}, },

1449
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,15 +2,15 @@ import type { ServerInit } from "@sveltejs/kit";
import { sequence } from "@sveltejs/kit/hooks"; import { sequence } from "@sveltejs/kit/hooks";
import schedule from "node-schedule"; import schedule from "node-schedule";
import { cleanupExpiredUserClientChallenges } from "$lib/server/db/client"; import { cleanupExpiredUserClientChallenges } from "$lib/server/db/client";
import { migrateDB } from "$lib/server/db/drizzle"; import { migrateDB } from "$lib/server/db/kysely";
import { import {
cleanupExpiredSessions, cleanupExpiredSessions,
cleanupExpiredSessionUpgradeChallenges, cleanupExpiredSessionUpgradeChallenges,
} from "$lib/server/db/session"; } from "$lib/server/db/session";
import { authenticate, setAgentInfo } from "$lib/server/middlewares"; import { authenticate, setAgentInfo } from "$lib/server/middlewares";
export const init: ServerInit = () => { export const init: ServerInit = async () => {
migrateDB(); await migrateDB();
schedule.scheduleJob("0 * * * *", () => { schedule.scheduleJob("0 * * * *", () => {
cleanupExpiredUserClientChallenges(); cleanupExpiredUserClientChallenges();

View File

@@ -1,53 +1,97 @@
import { SqliteError } from "better-sqlite3"; import pg from "pg";
import { and, or, eq, gt, lte } from "drizzle-orm";
import db from "./drizzle";
import { IntegrityError } from "./error"; import { IntegrityError } from "./error";
import { client, userClient, userClientChallenge } from "./schema"; import db from "./kysely";
import type { UserClientState } from "./schema";
interface Client {
id: number;
encPubKey: string;
sigPubKey: string;
}
interface UserClient {
userId: number;
clientId: number;
state: UserClientState;
}
interface UserClientWithDetails extends UserClient {
encPubKey: string;
sigPubKey: string;
}
export const createClient = async (encPubKey: string, sigPubKey: string, userId: number) => { export const createClient = async (encPubKey: string, sigPubKey: string, userId: number) => {
return await db.transaction( return await db
async (tx) => { .transaction()
const clients = await tx .setIsolationLevel("serializable")
.select({ id: client.id }) .execute(async (trx) => {
.from(client) const client = await trx
.where(or(eq(client.encPubKey, sigPubKey), eq(client.sigPubKey, encPubKey))) .selectFrom("client")
.limit(1); .where((eb) =>
if (clients.length !== 0) { eb.or([
eb("encryption_public_key", "=", encPubKey),
eb("encryption_public_key", "=", sigPubKey),
eb("signature_public_key", "=", encPubKey),
eb("signature_public_key", "=", sigPubKey),
]),
)
.limit(1)
.executeTakeFirst();
if (client) {
throw new IntegrityError("Public key(s) already registered"); throw new IntegrityError("Public key(s) already registered");
} }
const newClients = await tx const { clientId } = await trx
.insert(client) .insertInto("client")
.values({ encPubKey, sigPubKey }) .values({ encryption_public_key: encPubKey, signature_public_key: sigPubKey })
.returning({ id: client.id }); .returning("id as clientId")
const { id: clientId } = newClients[0]!; .executeTakeFirstOrThrow();
await tx.insert(userClient).values({ userId, clientId }); await trx
.insertInto("user_client")
return clientId; .values({ user_id: userId, client_id: clientId })
}, .execute();
{ behavior: "exclusive" }, return { clientId };
); });
}; };
export const getClient = async (clientId: number) => { export const getClient = async (clientId: number) => {
const clients = await db.select().from(client).where(eq(client.id, clientId)).limit(1); const client = await db
return clients[0] ?? null; .selectFrom("client")
.selectAll()
.where("id", "=", clientId)
.limit(1)
.executeTakeFirst();
return client
? ({
id: client.id,
encPubKey: client.encryption_public_key,
sigPubKey: client.signature_public_key,
} satisfies Client)
: null;
}; };
export const getClientByPubKeys = async (encPubKey: string, sigPubKey: string) => { export const getClientByPubKeys = async (encPubKey: string, sigPubKey: string) => {
const clients = await db const client = await db
.select() .selectFrom("client")
.from(client) .selectAll()
.where(and(eq(client.encPubKey, encPubKey), eq(client.sigPubKey, sigPubKey))) .where("encryption_public_key", "=", encPubKey)
.limit(1); .where("signature_public_key", "=", sigPubKey)
return clients[0] ?? null; .limit(1)
.executeTakeFirst();
return client
? ({
id: client.id,
encPubKey: client.encryption_public_key,
sigPubKey: client.signature_public_key,
} satisfies Client)
: null;
}; };
export const createUserClient = async (userId: number, clientId: number) => { export const createUserClient = async (userId: number, clientId: number) => {
try { try {
await db.insert(userClient).values({ userId, clientId }); await db.insertInto("user_client").values({ user_id: userId, client_id: clientId }).execute();
} catch (e) { } catch (e) {
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_PRIMARYKEY") { if (e instanceof pg.DatabaseError && e.code === "23505") {
throw new IntegrityError("User client already exists"); throw new IntegrityError("User client already exists");
} }
throw e; throw e;
@@ -55,52 +99,76 @@ export const createUserClient = async (userId: number, clientId: number) => {
}; };
export const getAllUserClients = async (userId: number) => { export const getAllUserClients = async (userId: number) => {
return await db.select().from(userClient).where(eq(userClient.userId, userId)); const userClients = await db
.selectFrom("user_client")
.selectAll()
.where("user_id", "=", userId)
.execute();
return userClients.map(
({ user_id, client_id, state }) =>
({
userId: user_id,
clientId: client_id,
state,
}) satisfies UserClient,
);
}; };
export const getUserClient = async (userId: number, clientId: number) => { export const getUserClient = async (userId: number, clientId: number) => {
const userClients = await db const userClient = await db
.select() .selectFrom("user_client")
.from(userClient) .selectAll()
.where(and(eq(userClient.userId, userId), eq(userClient.clientId, clientId))) .where("user_id", "=", userId)
.limit(1); .where("client_id", "=", clientId)
return userClients[0] ?? null; .limit(1)
.executeTakeFirst();
return userClient
? ({
userId: userClient.user_id,
clientId: userClient.client_id,
state: userClient.state,
} satisfies UserClient)
: null;
}; };
export const getUserClientWithDetails = async (userId: number, clientId: number) => { export const getUserClientWithDetails = async (userId: number, clientId: number) => {
const userClients = await db const userClient = await db
.select() .selectFrom("user_client")
.from(userClient) .innerJoin("client", "user_client.client_id", "client.id")
.innerJoin(client, eq(userClient.clientId, client.id)) .selectAll()
.where(and(eq(userClient.userId, userId), eq(userClient.clientId, clientId))) .where("user_id", "=", userId)
.limit(1); .where("client_id", "=", clientId)
return userClients[0] ?? null; .limit(1)
.executeTakeFirst();
return userClient
? ({
userId: userClient.user_id,
clientId: userClient.client_id,
state: userClient.state,
encPubKey: userClient.encryption_public_key,
sigPubKey: userClient.signature_public_key,
} satisfies UserClientWithDetails)
: null;
}; };
export const setUserClientStateToPending = async (userId: number, clientId: number) => { export const setUserClientStateToPending = async (userId: number, clientId: number) => {
await db await db
.update(userClient) .updateTable("user_client")
.set({ state: "pending" }) .set({ state: "pending" })
.where( .where("user_id", "=", userId)
and( .where("client_id", "=", clientId)
eq(userClient.userId, userId), .where("state", "=", "challenging")
eq(userClient.clientId, clientId), .execute();
eq(userClient.state, "challenging"),
),
);
}; };
export const setUserClientStateToActive = async (userId: number, clientId: number) => { export const setUserClientStateToActive = async (userId: number, clientId: number) => {
await db await db
.update(userClient) .updateTable("user_client")
.set({ state: "active" }) .set({ state: "active" })
.where( .where("user_id", "=", userId)
and( .where("client_id", "=", clientId)
eq(userClient.userId, userId), .where("state", "=", "pending")
eq(userClient.clientId, clientId), .execute();
eq(userClient.state, "pending"),
),
);
}; };
export const registerUserClientChallenge = async ( export const registerUserClientChallenge = async (
@@ -110,30 +178,30 @@ export const registerUserClientChallenge = async (
allowedIp: string, allowedIp: string,
expiresAt: Date, expiresAt: Date,
) => { ) => {
await db.insert(userClientChallenge).values({ await db
userId, .insertInto("user_client_challenge")
clientId, .values({
answer, user_id: userId,
allowedIp, client_id: clientId,
expiresAt, answer,
}); allowed_ip: allowedIp,
expires_at: expiresAt,
})
.execute();
}; };
export const consumeUserClientChallenge = async (userId: number, answer: string, ip: string) => { export const consumeUserClientChallenge = async (userId: number, answer: string, ip: string) => {
const challenges = await db const challenge = await db
.delete(userClientChallenge) .deleteFrom("user_client_challenge")
.where( .where("user_id", "=", userId)
and( .where("answer", "=", answer)
eq(userClientChallenge.userId, userId), .where("allowed_ip", "=", ip)
eq(userClientChallenge.answer, answer), .where("expires_at", ">", new Date())
eq(userClientChallenge.allowedIp, ip), .returning("client_id")
gt(userClientChallenge.expiresAt, new Date()), .executeTakeFirst();
), return challenge ? { clientId: challenge.client_id } : null;
)
.returning({ clientId: userClientChallenge.clientId });
return challenges[0] ?? null;
}; };
export const cleanupExpiredUserClientChallenges = async () => { export const cleanupExpiredUserClientChallenges = async () => {
await db.delete(userClientChallenge).where(lte(userClientChallenge.expiresAt, new Date())); await db.deleteFrom("user_client_challenge").where("expires_at", "<=", new Date()).execute();
}; };

View File

@@ -1,15 +0,0 @@
import Database from "better-sqlite3";
import { drizzle } from "drizzle-orm/better-sqlite3";
import { migrate } from "drizzle-orm/better-sqlite3/migrator";
import env from "$lib/server/loadenv";
const client = new Database(env.databaseUrl);
const db = drizzle(client);
export const migrateDB = () => {
if (process.env.NODE_ENV === "production") {
migrate(db, { migrationsFolder: "./drizzle" });
}
};
export default db;

View File

@@ -1,21 +1,23 @@
import { and, eq, isNull } from "drizzle-orm";
import db from "./drizzle";
import { IntegrityError } from "./error"; import { IntegrityError } from "./error";
import { directory, directoryLog, file, fileLog, hsk, mek } from "./schema"; import db from "./kysely";
import type { Ciphertext } from "./schema";
type DirectoryId = "root" | number; type DirectoryId = "root" | number;
export interface NewDirectoryParams { interface Directory {
id: number;
parentId: DirectoryId; parentId: DirectoryId;
userId: number; userId: number;
mekVersion: number; mekVersion: number;
encDek: string; encDek: string;
dekVersion: Date; dekVersion: Date;
encName: string; encName: Ciphertext;
encNameIv: string;
} }
export interface NewFileParams { export type NewDirectory = Omit<Directory, "id">;
interface File {
id: number;
parentId: DirectoryId; parentId: DirectoryId;
userId: number; userId: number;
path: string; path: string;
@@ -27,217 +29,264 @@ export interface NewFileParams {
contentType: string; contentType: string;
encContentIv: string; encContentIv: string;
encContentHash: string; encContentHash: string;
encName: string; encName: Ciphertext;
encNameIv: string; encCreatedAt: Ciphertext | null;
encCreatedAt: string | null; encLastModifiedAt: Ciphertext;
encCreatedAtIv: string | null;
encLastModifiedAt: string;
encLastModifiedAtIv: string;
} }
export const registerDirectory = async (params: NewDirectoryParams) => { export type NewFile = Omit<File, "id">;
await db.transaction(
async (tx) => {
const meks = await tx
.select({ version: mek.version })
.from(mek)
.where(and(eq(mek.userId, params.userId), eq(mek.state, "active")))
.limit(1);
if (meks[0]?.version !== params.mekVersion) {
throw new IntegrityError("Inactive MEK version");
}
const newDirectories = await tx export const registerDirectory = async (params: NewDirectory) => {
.insert(directory) await db.transaction().execute(async (trx) => {
.values({ const mek = await trx
parentId: params.parentId === "root" ? null : params.parentId, .selectFrom("master_encryption_key")
userId: params.userId, .select("version")
mekVersion: params.mekVersion, .where("user_id", "=", params.userId)
encDek: params.encDek, .where("state", "=", "active")
dekVersion: params.dekVersion, .limit(1)
encName: { ciphertext: params.encName, iv: params.encNameIv }, .forUpdate()
}) .executeTakeFirst();
.returning({ id: directory.id }); if (mek?.version !== params.mekVersion) {
const { id: directoryId } = newDirectories[0]!; throw new IntegrityError("Inactive MEK version");
await tx.insert(directoryLog).values({ }
directoryId,
const { directoryId } = await trx
.insertInto("directory")
.values({
parent_id: params.parentId !== "root" ? params.parentId : null,
user_id: params.userId,
master_encryption_key_version: params.mekVersion,
encrypted_data_encryption_key: params.encDek,
data_encryption_key_version: params.dekVersion,
encrypted_name: params.encName,
})
.returning("id as directoryId")
.executeTakeFirstOrThrow();
await trx
.insertInto("directory_log")
.values({
directory_id: directoryId,
timestamp: new Date(), timestamp: new Date(),
action: "create", action: "create",
newName: { ciphertext: params.encName, iv: params.encNameIv }, new_name: params.encName,
}); })
}, .execute();
{ behavior: "exclusive" }, });
);
}; };
export const getAllDirectoriesByParent = async (userId: number, parentId: DirectoryId) => { export const getAllDirectoriesByParent = async (userId: number, parentId: DirectoryId) => {
return await db let query = db.selectFrom("directory").selectAll().where("user_id", "=", userId);
.select() query =
.from(directory) parentId === "root"
.where( ? query.where("parent_id", "is", null)
and( : query.where("parent_id", "=", parentId);
eq(directory.userId, userId), const directories = await query.execute();
parentId === "root" ? isNull(directory.parentId) : eq(directory.parentId, parentId), return directories.map(
), (directory) =>
); ({
id: directory.id,
parentId: directory.parent_id ?? "root",
userId: directory.user_id,
mekVersion: directory.master_encryption_key_version,
encDek: directory.encrypted_data_encryption_key,
dekVersion: directory.data_encryption_key_version,
encName: directory.encrypted_name,
}) satisfies Directory,
);
}; };
export const getDirectory = async (userId: number, directoryId: number) => { export const getDirectory = async (userId: number, directoryId: number) => {
const res = await db const directory = await db
.select() .selectFrom("directory")
.from(directory) .selectAll()
.where(and(eq(directory.userId, userId), eq(directory.id, directoryId))) .where("id", "=", directoryId)
.limit(1); .where("user_id", "=", userId)
return res[0] ?? null; .limit(1)
.executeTakeFirst();
return directory
? ({
id: directory.id,
parentId: directory.parent_id ?? "root",
userId: directory.user_id,
mekVersion: directory.master_encryption_key_version,
encDek: directory.encrypted_data_encryption_key,
dekVersion: directory.data_encryption_key_version,
encName: directory.encrypted_name,
} satisfies Directory)
: null;
}; };
export const setDirectoryEncName = async ( export const setDirectoryEncName = async (
userId: number, userId: number,
directoryId: number, directoryId: number,
dekVersion: Date, dekVersion: Date,
encName: string, encName: Ciphertext,
encNameIv: string,
) => { ) => {
await db.transaction( await db.transaction().execute(async (trx) => {
async (tx) => { const directory = await trx
const directories = await tx .selectFrom("directory")
.select({ version: directory.dekVersion }) .select("data_encryption_key_version")
.from(directory) .where("id", "=", directoryId)
.where(and(eq(directory.userId, userId), eq(directory.id, directoryId))) .where("user_id", "=", userId)
.limit(1); .limit(1)
if (!directories[0]) { .forUpdate()
throw new IntegrityError("Directory not found"); .executeTakeFirst();
} else if (directories[0].version.getTime() !== dekVersion.getTime()) { if (!directory) {
throw new IntegrityError("Invalid DEK version"); throw new IntegrityError("Directory not found");
} } else if (directory.data_encryption_key_version.getTime() !== dekVersion.getTime()) {
throw new IntegrityError("Invalid DEK version");
}
await tx await trx
.update(directory) .updateTable("directory")
.set({ encName: { ciphertext: encName, iv: encNameIv } }) .set({ encrypted_name: encName })
.where(and(eq(directory.userId, userId), eq(directory.id, directoryId))); .where("id", "=", directoryId)
await tx.insert(directoryLog).values({ .where("user_id", "=", userId)
directoryId, .execute();
await trx
.insertInto("directory_log")
.values({
directory_id: directoryId,
timestamp: new Date(), timestamp: new Date(),
action: "rename", action: "rename",
newName: { ciphertext: encName, iv: encNameIv }, new_name: encName,
}); })
}, .execute();
{ behavior: "exclusive" }, });
);
}; };
export const unregisterDirectory = async (userId: number, directoryId: number) => { export const unregisterDirectory = async (userId: number, directoryId: number) => {
return await db.transaction( return await db
async (tx) => { .transaction()
.setIsolationLevel("repeatable read") // TODO: Sufficient?
.execute(async (trx) => {
const unregisterFiles = async (parentId: number) => { const unregisterFiles = async (parentId: number) => {
return await tx return await trx
.delete(file) .deleteFrom("file")
.where(and(eq(file.userId, userId), eq(file.parentId, parentId))) .where("parent_id", "=", parentId)
.returning({ id: file.id, path: file.path }); .where("user_id", "=", userId)
.returning(["id", "path"])
.execute();
}; };
const unregisterDirectoryRecursively = async ( const unregisterDirectoryRecursively = async (
directoryId: number, directoryId: number,
): Promise<{ id: number; path: string }[]> => { ): Promise<{ id: number; path: string }[]> => {
const files = await unregisterFiles(directoryId); const files = await unregisterFiles(directoryId);
const subDirectories = await tx const subDirectories = await trx
.select({ id: directory.id }) .selectFrom("directory")
.from(directory) .select("id")
.where(and(eq(directory.userId, userId), eq(directory.parentId, directoryId))); .where("parent_id", "=", directoryId)
.where("user_id", "=", userId)
.execute();
const subDirectoryFilePaths = await Promise.all( const subDirectoryFilePaths = await Promise.all(
subDirectories.map(async ({ id }) => await unregisterDirectoryRecursively(id)), subDirectories.map(async ({ id }) => await unregisterDirectoryRecursively(id)),
); );
const deleteRes = await tx.delete(directory).where(eq(directory.id, directoryId)); const deleteRes = await trx
if (deleteRes.changes === 0) { .deleteFrom("directory")
.where("id", "=", directoryId)
.where("user_id", "=", userId)
.executeTakeFirst();
if (deleteRes.numDeletedRows === 0n) {
throw new IntegrityError("Directory not found"); throw new IntegrityError("Directory not found");
} }
return files.concat(...subDirectoryFilePaths); return files.concat(...subDirectoryFilePaths);
}; };
return await unregisterDirectoryRecursively(directoryId); return await unregisterDirectoryRecursively(directoryId);
}, });
{ behavior: "exclusive" },
);
}; };
export const registerFile = async (params: NewFileParams) => { export const registerFile = async (params: NewFile) => {
if ( if ((params.hskVersion && !params.contentHmac) || (!params.hskVersion && params.contentHmac)) {
(params.hskVersion && !params.contentHmac) ||
(!params.hskVersion && params.contentHmac) ||
(params.encCreatedAt && !params.encCreatedAtIv) ||
(!params.encCreatedAt && params.encCreatedAtIv)
) {
throw new Error("Invalid arguments"); throw new Error("Invalid arguments");
} }
await db.transaction( await db.transaction().execute(async (trx) => {
async (tx) => { const mek = await trx
const meks = await tx .selectFrom("master_encryption_key")
.select({ version: mek.version }) .select("version")
.from(mek) .where("user_id", "=", params.userId)
.where(and(eq(mek.userId, params.userId), eq(mek.state, "active"))) .where("state", "=", "active")
.limit(1); .limit(1)
if (meks[0]?.version !== params.mekVersion) { .forUpdate()
throw new IntegrityError("Inactive MEK version"); .executeTakeFirst();
} if (mek?.version !== params.mekVersion) {
throw new IntegrityError("Inactive MEK version");
}
if (params.hskVersion) { if (params.hskVersion) {
const hsks = await tx const hsk = await trx
.select({ version: hsk.version }) .selectFrom("hmac_secret_key")
.from(hsk) .select("version")
.where(and(eq(hsk.userId, params.userId), eq(hsk.state, "active"))) .where("user_id", "=", params.userId)
.limit(1); .where("state", "=", "active")
if (hsks[0]?.version !== params.hskVersion) { .limit(1)
throw new IntegrityError("Inactive HSK version"); .forUpdate()
} .executeTakeFirst();
if (hsk?.version !== params.hskVersion) {
throw new IntegrityError("Inactive HSK version");
} }
}
const newFiles = await tx const { fileId } = await trx
.insert(file) .insertInto("file")
.values({ .values({
path: params.path, parent_id: params.parentId !== "root" ? params.parentId : null,
parentId: params.parentId === "root" ? null : params.parentId, user_id: params.userId,
userId: params.userId, path: params.path,
mekVersion: params.mekVersion, master_encryption_key_version: params.mekVersion,
hskVersion: params.hskVersion, encrypted_data_encryption_key: params.encDek,
encDek: params.encDek, data_encryption_key_version: params.dekVersion,
dekVersion: params.dekVersion, hmac_secret_key_version: params.hskVersion,
contentHmac: params.contentHmac, content_hmac: params.contentHmac,
contentType: params.contentType, content_type: params.contentType,
encContentIv: params.encContentIv, encrypted_content_iv: params.encContentIv,
encContentHash: params.encContentHash, encrypted_content_hash: params.encContentHash,
encName: { ciphertext: params.encName, iv: params.encNameIv }, encrypted_name: params.encName,
encCreatedAt: encrypted_created_at: params.encCreatedAt,
params.encCreatedAt && params.encCreatedAtIv encrypted_last_modified_at: params.encLastModifiedAt,
? { ciphertext: params.encCreatedAt, iv: params.encCreatedAtIv } })
: null, .returning("id as fileId")
encLastModifiedAt: { .executeTakeFirstOrThrow();
ciphertext: params.encLastModifiedAt, await trx
iv: params.encLastModifiedAtIv, .insertInto("file_log")
}, .values({
}) file_id: fileId,
.returning({ id: file.id });
const { id: fileId } = newFiles[0]!;
await tx.insert(fileLog).values({
fileId,
timestamp: new Date(), timestamp: new Date(),
action: "create", action: "create",
newName: { ciphertext: params.encName, iv: params.encNameIv }, new_name: params.encName,
}); })
}, .execute();
{ behavior: "exclusive" }, });
);
}; };
export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => { export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => {
return await db let query = db.selectFrom("file").selectAll().where("user_id", "=", userId);
.select() query =
.from(file) parentId === "root"
.where( ? query.where("parent_id", "is", null)
and( : query.where("parent_id", "=", parentId);
eq(file.userId, userId), const files = await query.execute();
parentId === "root" ? isNull(file.parentId) : eq(file.parentId, parentId), return files.map(
), (file) =>
); ({
id: file.id,
parentId: file.parent_id ?? "root",
userId: file.user_id,
path: file.path,
mekVersion: file.master_encryption_key_version,
encDek: file.encrypted_data_encryption_key,
dekVersion: file.data_encryption_key_version,
hskVersion: file.hmac_secret_key_version,
contentHmac: file.content_hmac,
contentType: file.content_type,
encContentIv: file.encrypted_content_iv,
encContentHash: file.encrypted_content_hash,
encName: file.encrypted_name,
encCreatedAt: file.encrypted_created_at,
encLastModifiedAt: file.encrypted_last_modified_at,
}) satisfies File,
);
}; };
export const getAllFileIdsByContentHmac = async ( export const getAllFileIdsByContentHmac = async (
@@ -245,69 +294,93 @@ export const getAllFileIdsByContentHmac = async (
hskVersion: number, hskVersion: number,
contentHmac: string, contentHmac: string,
) => { ) => {
return await db const files = await db
.select({ id: file.id }) .selectFrom("file")
.from(file) .select("id")
.where( .where("user_id", "=", userId)
and( .where("hmac_secret_key_version", "=", hskVersion)
eq(file.userId, userId), .where("content_hmac", "=", contentHmac)
eq(file.hskVersion, hskVersion), .execute();
eq(file.contentHmac, contentHmac), return files.map(({ id }) => ({ id }));
),
);
}; };
export const getFile = async (userId: number, fileId: number) => { export const getFile = async (userId: number, fileId: number) => {
const res = await db const file = await db
.select() .selectFrom("file")
.from(file) .selectAll()
.where(and(eq(file.userId, userId), eq(file.id, fileId))) .where("id", "=", fileId)
.limit(1); .where("user_id", "=", userId)
return res[0] ?? null; .limit(1)
.executeTakeFirst();
return file
? ({
id: file.id,
parentId: file.parent_id ?? "root",
userId: file.user_id,
path: file.path,
mekVersion: file.master_encryption_key_version,
encDek: file.encrypted_data_encryption_key,
dekVersion: file.data_encryption_key_version,
hskVersion: file.hmac_secret_key_version,
contentHmac: file.content_hmac,
contentType: file.content_type,
encContentIv: file.encrypted_content_iv,
encContentHash: file.encrypted_content_hash,
encName: file.encrypted_name,
encCreatedAt: file.encrypted_created_at,
encLastModifiedAt: file.encrypted_last_modified_at,
} satisfies File)
: null;
}; };
export const setFileEncName = async ( export const setFileEncName = async (
userId: number, userId: number,
fileId: number, fileId: number,
dekVersion: Date, dekVersion: Date,
encName: string, encName: Ciphertext,
encNameIv: string,
) => { ) => {
await db.transaction( await db.transaction().execute(async (trx) => {
async (tx) => { const file = await trx
const files = await tx .selectFrom("file")
.select({ version: file.dekVersion }) .select("data_encryption_key_version")
.from(file) .where("id", "=", fileId)
.where(and(eq(file.userId, userId), eq(file.id, fileId))) .where("user_id", "=", userId)
.limit(1); .limit(1)
if (!files[0]) { .forUpdate()
throw new IntegrityError("File not found"); .executeTakeFirst();
} else if (files[0].version.getTime() !== dekVersion.getTime()) { if (!file) {
throw new IntegrityError("Invalid DEK version"); throw new IntegrityError("File not found");
} } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) {
throw new IntegrityError("Invalid DEK version");
}
await tx await trx
.update(file) .updateTable("file")
.set({ encName: { ciphertext: encName, iv: encNameIv } }) .set({ encrypted_name: encName })
.where(and(eq(file.userId, userId), eq(file.id, fileId))); .where("id", "=", fileId)
await tx.insert(fileLog).values({ .where("user_id", "=", userId)
fileId, .execute();
await trx
.insertInto("file_log")
.values({
file_id: fileId,
timestamp: new Date(), timestamp: new Date(),
action: "rename", action: "rename",
newName: { ciphertext: encName, iv: encNameIv }, new_name: encName,
}); })
}, .execute();
{ behavior: "exclusive" }, });
);
}; };
export const unregisterFile = async (userId: number, fileId: number) => { export const unregisterFile = async (userId: number, fileId: number) => {
const files = await db const file = await db
.delete(file) .deleteFrom("file")
.where(and(eq(file.userId, userId), eq(file.id, fileId))) .where("id", "=", fileId)
.returning({ path: file.path }); .where("user_id", "=", userId)
if (!files[0]) { .returning("path")
.executeTakeFirst();
if (!file) {
throw new IntegrityError("File not found"); throw new IntegrityError("File not found");
} }
return files[0].path; return { path: file.path };
}; };

View File

@@ -1,8 +1,15 @@
import { SqliteError } from "better-sqlite3"; import pg from "pg";
import { and, eq } from "drizzle-orm";
import db from "./drizzle";
import { IntegrityError } from "./error"; import { IntegrityError } from "./error";
import { hsk, hskLog } from "./schema"; import db from "./kysely";
import type { HskState } from "./schema";
interface Hsk {
userId: number;
version: number;
state: HskState;
mekVersion: number;
encHsk: string;
}
export const registerInitialHsk = async ( export const registerInitialHsk = async (
userId: number, userId: number,
@@ -10,37 +17,52 @@ export const registerInitialHsk = async (
mekVersion: number, mekVersion: number,
encHsk: string, encHsk: string,
) => { ) => {
await db.transaction( await db.transaction().execute(async (trx) => {
async (tx) => { try {
try { await trx
await tx.insert(hsk).values({ .insertInto("hmac_secret_key")
userId, .values({
user_id: userId,
version: 1, version: 1,
state: "active", state: "active",
mekVersion, master_encryption_key_version: mekVersion,
encHsk, encrypted_key: encHsk,
}); })
await tx.insert(hskLog).values({ .execute();
userId, await trx
hskVersion: 1, .insertInto("hmac_secret_key_log")
.values({
user_id: userId,
hmac_secret_key_version: 1,
timestamp: new Date(), timestamp: new Date(),
action: "create", action: "create",
actionBy: createdBy, action_by: createdBy,
}); })
} catch (e) { .execute();
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_PRIMARYKEY") { } catch (e) {
throw new IntegrityError("HSK already registered"); if (e instanceof pg.DatabaseError && e.code === "23505") {
} throw new IntegrityError("HSK already registered");
throw e;
} }
}, throw e;
{ behavior: "exclusive" }, }
); });
}; };
export const getAllValidHsks = async (userId: number) => { export const getAllValidHsks = async (userId: number) => {
return await db const hsks = await db
.select() .selectFrom("hmac_secret_key")
.from(hsk) .selectAll()
.where(and(eq(hsk.userId, userId), eq(hsk.state, "active"))); .where("user_id", "=", userId)
.where("state", "=", "active")
.execute();
return hsks.map(
({ user_id, version, state, master_encryption_key_version, encrypted_key }) =>
({
userId: user_id,
version,
state: state as "active",
mekVersion: master_encryption_key_version,
encHsk: encrypted_key,
}) satisfies Hsk,
);
}; };

View File

@@ -0,0 +1,47 @@
import { Kysely, PostgresDialect, Migrator } from "kysely";
import pg from "pg";
import env from "$lib/server/loadenv";
import migrations from "./migrations";
import type { Database } from "./schema";
const dialect = new PostgresDialect({
pool: new pg.Pool({
host: env.database.host,
port: env.database.port,
user: env.database.user,
password: env.database.password,
database: env.database.name,
}),
});
const db = new Kysely<Database>({ dialect });
export const migrateDB = async () => {
if (env.nodeEnv !== "production") return;
const migrator = new Migrator({
db,
provider: {
async getMigrations() {
return migrations;
},
},
});
const { error, results } = await migrator.migrateToLatest();
if (error) {
const migration = results?.find(({ status }) => status === "Error");
if (migration) {
console.error(`Migration "${migration.migrationName}" failed.`);
}
console.error(error);
process.exit(1);
}
if (results?.length === 0) {
console.log("Database is up-to-date.");
} else {
console.log("Database migration completed.");
}
};
export default db;

View File

@@ -1,8 +1,19 @@
import { SqliteError } from "better-sqlite3"; import pg from "pg";
import { and, or, eq } from "drizzle-orm";
import db from "./drizzle";
import { IntegrityError } from "./error"; import { IntegrityError } from "./error";
import { mek, mekLog, clientMek } from "./schema"; import db from "./kysely";
import type { MekState } from "./schema";
interface Mek {
userId: number;
version: number;
state: MekState;
}
interface ClientMekWithDetails extends Mek {
clientId: number;
encMek: string;
encMekSig: string;
}
export const registerInitialMek = async ( export const registerInitialMek = async (
userId: number, userId: number,
@@ -10,58 +21,80 @@ export const registerInitialMek = async (
encMek: string, encMek: string,
encMekSig: string, encMekSig: string,
) => { ) => {
await db.transaction( await db.transaction().execute(async (trx) => {
async (tx) => { try {
try { await trx
await tx.insert(mek).values({ .insertInto("master_encryption_key")
userId, .values({
user_id: userId,
version: 1, version: 1,
state: "active", state: "active",
}); })
await tx.insert(clientMek).values({ .execute();
userId, await trx
clientId: createdBy, .insertInto("client_master_encryption_key")
mekVersion: 1, .values({
encMek, user_id: userId,
encMekSig, client_id: createdBy,
}); version: 1,
await tx.insert(mekLog).values({ encrypted_key: encMek,
userId, encrypted_key_signature: encMekSig,
mekVersion: 1, })
.execute();
await trx
.insertInto("master_encryption_key_log")
.values({
user_id: userId,
master_encryption_key_version: 1,
timestamp: new Date(), timestamp: new Date(),
action: "create", action: "create",
actionBy: createdBy, action_by: createdBy,
}); })
} catch (e) { .execute();
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_PRIMARYKEY") { } catch (e) {
throw new IntegrityError("MEK already registered"); if (e instanceof pg.DatabaseError && e.code === "23505") {
} throw new IntegrityError("MEK already registered");
throw e;
} }
}, throw e;
{ behavior: "exclusive" }, }
); });
}; };
export const getInitialMek = async (userId: number) => { export const getInitialMek = async (userId: number) => {
const meks = await db const mek = await db
.select() .selectFrom("master_encryption_key")
.from(mek) .selectAll()
.where(and(eq(mek.userId, userId), eq(mek.version, 1))) .where("user_id", "=", userId)
.limit(1); .where("version", "=", 1)
return meks[0] ?? null; .limit(1)
.executeTakeFirst();
return mek
? ({ userId: mek.user_id, version: mek.version, state: mek.state } satisfies Mek)
: null;
}; };
export const getAllValidClientMeks = async (userId: number, clientId: number) => { export const getAllValidClientMeks = async (userId: number, clientId: number) => {
return await db const clientMeks = await db
.select() .selectFrom("client_master_encryption_key")
.from(clientMek) .innerJoin("master_encryption_key", (join) =>
.innerJoin(mek, and(eq(clientMek.userId, mek.userId), eq(clientMek.mekVersion, mek.version))) join
.where( .onRef("client_master_encryption_key.user_id", "=", "master_encryption_key.user_id")
and( .onRef("client_master_encryption_key.version", "=", "master_encryption_key.version"),
eq(clientMek.userId, userId), )
eq(clientMek.clientId, clientId), .selectAll()
or(eq(mek.state, "active"), eq(mek.state, "retired")), .where("client_master_encryption_key.user_id", "=", userId)
), .where("client_master_encryption_key.client_id", "=", clientId)
); .where((eb) => eb.or([eb("state", "=", "active"), eb("state", "=", "retired")]))
.execute();
return clientMeks.map(
({ user_id, client_id, version, state, encrypted_key, encrypted_key_signature }) =>
({
userId: user_id,
version,
state: state as "active" | "retired",
clientId: client_id,
encMek: encrypted_key,
encMekSig: encrypted_key_signature,
}) satisfies ClientMekWithDetails,
);
}; };

View File

@@ -0,0 +1,224 @@
import { Kysely } from "kysely";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const up = async (db: Kysely<any>) => {
// user.ts
await db.schema
.createTable("user")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("email", "text", (col) => col.unique().notNull())
.addColumn("nickname", "text", (col) => col.notNull())
.addColumn("password", "text", (col) => col.notNull())
.execute();
// client.ts
await db.schema
.createTable("client")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("encryption_public_key", "text", (col) => col.unique().notNull())
.addColumn("signature_public_key", "text", (col) => col.unique().notNull())
.addUniqueConstraint("client_ak01", ["encryption_public_key", "signature_public_key"])
.execute();
await db.schema
.createTable("user_client")
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("client_id", "integer", (col) => col.references("client.id").notNull())
.addColumn("state", "text", (col) => col.notNull().defaultTo("challenging"))
.addPrimaryKeyConstraint("user_client_pk", ["user_id", "client_id"])
.execute();
await db.schema
.createTable("user_client_challenge")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("client_id", "integer", (col) => col.references("client.id").notNull())
.addColumn("answer", "text", (col) => col.unique().notNull())
.addColumn("allowed_ip", "text", (col) => col.notNull())
.addColumn("expires_at", "timestamp(3)", (col) => col.notNull())
.addForeignKeyConstraint(
"user_client_challenge_fk01",
["user_id", "client_id"],
"user_client",
["user_id", "client_id"],
)
.execute();
// session.ts
await db.schema
.createTable("session")
.addColumn("id", "text", (col) => col.primaryKey())
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("client_id", "integer", (col) => col.references("client.id"))
.addColumn("created_at", "timestamp(3)", (col) => col.notNull())
.addColumn("last_used_at", "timestamp(3)", (col) => col.notNull())
.addColumn("last_used_by_ip", "text")
.addColumn("last_used_by_agent", "text")
.addUniqueConstraint("session_ak01", ["user_id", "client_id"])
.execute();
await db.schema
.createTable("session_upgrade_challenge")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("session_id", "text", (col) => col.references("session.id").unique().notNull())
.addColumn("client_id", "integer", (col) => col.references("client.id").notNull())
.addColumn("answer", "text", (col) => col.unique().notNull())
.addColumn("allowed_ip", "text", (col) => col.notNull())
.addColumn("expires_at", "timestamp(3)", (col) => col.notNull())
.execute();
// mek.ts
await db.schema
.createTable("master_encryption_key")
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("version", "integer", (col) => col.notNull())
.addColumn("state", "text", (col) => col.notNull())
.addPrimaryKeyConstraint("master_encryption_key_pk", ["user_id", "version"])
.execute();
await db.schema
.createTable("master_encryption_key_log")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("master_encryption_key_version", "integer", (col) => col.notNull())
.addColumn("timestamp", "timestamp(3)", (col) => col.notNull())
.addColumn("action", "text", (col) => col.notNull())
.addColumn("action_by", "integer", (col) => col.references("client.id"))
.addForeignKeyConstraint(
"master_encryption_key_log_fk01",
["user_id", "master_encryption_key_version"],
"master_encryption_key",
["user_id", "version"],
)
.execute();
await db.schema
.createTable("client_master_encryption_key")
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("client_id", "integer", (col) => col.references("client.id").notNull())
.addColumn("version", "integer", (col) => col.notNull())
.addColumn("encrypted_key", "text", (col) => col.notNull())
.addColumn("encrypted_key_signature", "text", (col) => col.notNull())
.addPrimaryKeyConstraint("client_master_encryption_key_pk", ["user_id", "client_id", "version"])
.addForeignKeyConstraint(
"client_master_encryption_key_fk01",
["user_id", "version"],
"master_encryption_key",
["user_id", "version"],
)
.execute();
// hsk.ts
await db.schema
.createTable("hmac_secret_key")
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("version", "integer", (col) => col.notNull())
.addColumn("state", "text", (col) => col.notNull())
.addColumn("master_encryption_key_version", "integer", (col) => col.notNull())
.addColumn("encrypted_key", "text", (col) => col.unique().notNull())
.addPrimaryKeyConstraint("hmac_secret_key_pk", ["user_id", "version"])
.addForeignKeyConstraint(
"hmac_secret_key_fk01",
["user_id", "master_encryption_key_version"],
"master_encryption_key",
["user_id", "version"],
)
.execute();
await db.schema
.createTable("hmac_secret_key_log")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("hmac_secret_key_version", "integer", (col) => col.notNull())
.addColumn("timestamp", "timestamp(3)", (col) => col.notNull())
.addColumn("action", "text", (col) => col.notNull())
.addColumn("action_by", "integer", (col) => col.references("client.id"))
.addForeignKeyConstraint(
"hmac_secret_key_log_fk01",
["user_id", "hmac_secret_key_version"],
"hmac_secret_key",
["user_id", "version"],
)
.execute();
// file.ts
await db.schema
.createTable("directory")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("parent_id", "integer", (col) => col.references("directory.id"))
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("master_encryption_key_version", "integer", (col) => col.notNull())
.addColumn("encrypted_data_encryption_key", "text", (col) => col.unique().notNull())
.addColumn("data_encryption_key_version", "timestamp(3)", (col) => col.notNull())
.addColumn("encrypted_name", "json", (col) => col.notNull())
.addForeignKeyConstraint(
"directory_fk01",
["user_id", "master_encryption_key_version"],
"master_encryption_key",
["user_id", "version"],
)
.execute();
await db.schema
.createTable("directory_log")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("directory_id", "integer", (col) =>
col.references("directory.id").onDelete("cascade").notNull(),
)
.addColumn("timestamp", "timestamp(3)", (col) => col.notNull())
.addColumn("action", "text", (col) => col.notNull())
.addColumn("new_name", "json")
.execute();
await db.schema
.createTable("file")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("parent_id", "integer", (col) => col.references("directory.id"))
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("path", "text", (col) => col.unique().notNull())
.addColumn("master_encryption_key_version", "integer", (col) => col.notNull())
.addColumn("encrypted_data_encryption_key", "text", (col) => col.unique().notNull())
.addColumn("data_encryption_key_version", "timestamp(3)", (col) => col.notNull())
.addColumn("hmac_secret_key_version", "integer")
.addColumn("content_hmac", "text")
.addColumn("content_type", "text", (col) => col.notNull())
.addColumn("encrypted_content_iv", "text", (col) => col.notNull())
.addColumn("encrypted_content_hash", "text", (col) => col.notNull())
.addColumn("encrypted_name", "json", (col) => col.notNull())
.addColumn("encrypted_created_at", "json")
.addColumn("encrypted_last_modified_at", "json", (col) => col.notNull())
.addForeignKeyConstraint(
"file_fk01",
["user_id", "master_encryption_key_version"],
"master_encryption_key",
["user_id", "version"],
)
.addForeignKeyConstraint(
"file_fk02",
["user_id", "hmac_secret_key_version"],
"hmac_secret_key",
["user_id", "version"],
)
.execute();
await db.schema
.createTable("file_log")
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
.addColumn("file_id", "integer", (col) =>
col.references("file.id").onDelete("cascade").notNull(),
)
.addColumn("timestamp", "timestamp(3)", (col) => col.notNull())
.addColumn("action", "text", (col) => col.notNull())
.addColumn("new_name", "json")
.execute();
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const down = async (db: Kysely<any>) => {
await db.schema.dropTable("file_log").execute();
await db.schema.dropTable("file").execute();
await db.schema.dropTable("directory_log").execute();
await db.schema.dropTable("directory").execute();
await db.schema.dropTable("hmac_secret_key_log").execute();
await db.schema.dropTable("hmac_secret_key").execute();
await db.schema.dropTable("client_master_encryption_key").execute();
await db.schema.dropTable("master_encryption_key_log").execute();
await db.schema.dropTable("master_encryption_key").execute();
await db.schema.dropTable("session_upgrade_challenge").execute();
await db.schema.dropTable("session").execute();
await db.schema.dropTable("user_client_challenge").execute();
await db.schema.dropTable("user_client").execute();
await db.schema.dropTable("client").execute();
await db.schema.dropTable("user").execute();
};

View File

@@ -0,0 +1,5 @@
import * as Initial1737357000 from "./1737357000-Initial";
export default {
"1737357000-Initial": Initial1737357000,
};

View File

@@ -1,61 +1,32 @@
import { import type { ColumnType, Generated } from "kysely";
sqliteTable,
text,
integer,
primaryKey,
foreignKey,
unique,
} from "drizzle-orm/sqlite-core";
import { user } from "./user";
export const client = sqliteTable( interface ClientTable {
"client", id: Generated<number>;
{ encryption_public_key: string; // Base64
id: integer("id").primaryKey({ autoIncrement: true }), signature_public_key: string; // Base64
encPubKey: text("encryption_public_key").notNull().unique(), // Base64 }
sigPubKey: text("signature_public_key").notNull().unique(), // Base64
},
(t) => ({
unq: unique().on(t.encPubKey, t.sigPubKey),
}),
);
export const userClient = sqliteTable( export type UserClientState = "challenging" | "pending" | "active";
"user_client",
{
userId: integer("user_id")
.notNull()
.references(() => user.id),
clientId: integer("client_id")
.notNull()
.references(() => client.id),
state: text("state", { enum: ["challenging", "pending", "active"] })
.notNull()
.default("challenging"),
},
(t) => ({
pk: primaryKey({ columns: [t.userId, t.clientId] }),
}),
);
export const userClientChallenge = sqliteTable( interface UserClientTable {
"user_client_challenge", user_id: number;
{ client_id: number;
id: integer("id").primaryKey(), state: ColumnType<UserClientState, UserClientState | undefined>;
userId: integer("user_id") }
.notNull()
.references(() => user.id), interface UserClientChallengeTable {
clientId: integer("client_id") id: Generated<number>;
.notNull() user_id: number;
.references(() => client.id), client_id: number;
answer: text("answer").notNull().unique(), // Base64 answer: string; // Base64
allowedIp: text("allowed_ip").notNull(), allowed_ip: string;
expiresAt: integer("expires_at", { mode: "timestamp_ms" }).notNull(), expires_at: ColumnType<Date, Date, never>;
}, }
(t) => ({
ref: foreignKey({ declare module "./index" {
columns: [t.userId, t.clientId], interface Database {
foreignColumns: [userClient.userId, userClient.clientId], client: ClientTable;
}), user_client: UserClientTable;
}), user_client_challenge: UserClientChallengeTable;
); }
}

View File

@@ -1,88 +1,59 @@
import { sqliteTable, text, integer, foreignKey } from "drizzle-orm/sqlite-core"; import type { ColumnType, Generated } from "kysely";
import { hsk } from "./hsk";
import { mek } from "./mek";
import { user } from "./user";
const ciphertext = (name: string) => export type Ciphertext = {
text(name, { mode: "json" }).$type<{ ciphertext: string; // Base64
ciphertext: string; // Base64 iv: string; // Base64
iv: string; // Base64 };
}>();
export const directory = sqliteTable( interface DirectoryTable {
"directory", id: Generated<number>;
{ parent_id: number | null;
id: integer("id").primaryKey({ autoIncrement: true }), user_id: number;
parentId: integer("parent_id"), master_encryption_key_version: number;
userId: integer("user_id") encrypted_data_encryption_key: string; // Base64
.notNull() data_encryption_key_version: Date;
.references(() => user.id), encrypted_name: Ciphertext;
mekVersion: integer("master_encryption_key_version").notNull(), }
encDek: text("encrypted_data_encryption_key").notNull().unique(), // Base64
dekVersion: integer("data_encryption_key_version", { mode: "timestamp_ms" }).notNull(),
encName: ciphertext("encrypted_name").notNull(),
},
(t) => ({
ref1: foreignKey({
columns: [t.parentId],
foreignColumns: [t.id],
}),
ref2: foreignKey({
columns: [t.userId, t.mekVersion],
foreignColumns: [mek.userId, mek.version],
}),
}),
);
export const directoryLog = sqliteTable("directory_log", { interface DirectoryLogTable {
id: integer("id").primaryKey({ autoIncrement: true }), id: Generated<number>;
directoryId: integer("directory_id") directory_id: number;
.notNull() timestamp: ColumnType<Date, Date, never>;
.references(() => directory.id, { onDelete: "cascade" }), action: "create" | "rename";
timestamp: integer("timestamp", { mode: "timestamp_ms" }).notNull(), new_name: Ciphertext | null;
action: text("action", { enum: ["create", "rename"] }).notNull(), }
newName: ciphertext("new_name"),
});
export const file = sqliteTable( interface FileTable {
"file", id: Generated<number>;
{ parent_id: number | null;
id: integer("id").primaryKey({ autoIncrement: true }), user_id: number;
parentId: integer("parent_id").references(() => directory.id), path: string;
userId: integer("user_id") master_encryption_key_version: number;
.notNull() encrypted_data_encryption_key: string; // Base64
.references(() => user.id), data_encryption_key_version: Date;
path: text("path").notNull().unique(), hmac_secret_key_version: number | null;
mekVersion: integer("master_encryption_key_version").notNull(), content_hmac: string | null; // Base64
encDek: text("encrypted_data_encryption_key").notNull().unique(), // Base64 content_type: string;
dekVersion: integer("data_encryption_key_version", { mode: "timestamp_ms" }).notNull(), encrypted_content_iv: string; // Base64
hskVersion: integer("hmac_secret_key_version"), encrypted_content_hash: string; // Base64
contentHmac: text("content_hmac"), // Base64 encrypted_name: Ciphertext;
contentType: text("content_type").notNull(), encrypted_created_at: Ciphertext | null;
encContentIv: text("encrypted_content_iv").notNull(), // Base64 encrypted_last_modified_at: Ciphertext;
encContentHash: text("encrypted_content_hash").notNull(), // Base64 }
encName: ciphertext("encrypted_name").notNull(),
encCreatedAt: ciphertext("encrypted_created_at"),
encLastModifiedAt: ciphertext("encrypted_last_modified_at").notNull(),
},
(t) => ({
ref1: foreignKey({
columns: [t.userId, t.mekVersion],
foreignColumns: [mek.userId, mek.version],
}),
ref2: foreignKey({
columns: [t.userId, t.hskVersion],
foreignColumns: [hsk.userId, hsk.version],
}),
}),
);
export const fileLog = sqliteTable("file_log", { interface FileLogTable {
id: integer("id").primaryKey({ autoIncrement: true }), id: Generated<number>;
fileId: integer("file_id") file_id: number;
.notNull() timestamp: ColumnType<Date, Date, never>;
.references(() => file.id, { onDelete: "cascade" }), action: "create" | "rename";
timestamp: integer("timestamp", { mode: "timestamp_ms" }).notNull(), new_name: Ciphertext | null;
action: text("action", { enum: ["create", "rename"] }).notNull(), }
newName: ciphertext("new_name"),
}); declare module "./index" {
interface Database {
directory: DirectoryTable;
directory_log: DirectoryLogTable;
file: FileTable;
file_log: FileLogTable;
}
}

View File

@@ -1,44 +1,27 @@
import { sqliteTable, text, integer, primaryKey, foreignKey } from "drizzle-orm/sqlite-core"; import type { ColumnType, Generated } from "kysely";
import { client } from "./client";
import { mek } from "./mek";
import { user } from "./user";
export const hsk = sqliteTable( export type HskState = "active";
"hmac_secret_key",
{
userId: integer("user_id")
.notNull()
.references(() => user.id),
version: integer("version").notNull(),
state: text("state", { enum: ["active"] }).notNull(),
mekVersion: integer("master_encryption_key_version").notNull(),
encHsk: text("encrypted_key").notNull().unique(), // Base64
},
(t) => ({
pk: primaryKey({ columns: [t.userId, t.version] }),
ref: foreignKey({
columns: [t.userId, t.mekVersion],
foreignColumns: [mek.userId, mek.version],
}),
}),
);
export const hskLog = sqliteTable( interface HskTable {
"hmac_secret_key_log", user_id: number;
{ version: number;
id: integer("id").primaryKey({ autoIncrement: true }), state: HskState;
userId: integer("user_id") master_encryption_key_version: number;
.notNull() encrypted_key: string; // Base64
.references(() => user.id), }
hskVersion: integer("hmac_secret_key_version").notNull(),
timestamp: integer("timestamp", { mode: "timestamp_ms" }).notNull(), interface HskLogTable {
action: text("action", { enum: ["create"] }).notNull(), id: Generated<number>;
actionBy: integer("action_by").references(() => client.id), user_id: number;
}, hmac_secret_key_version: number;
(t) => ({ timestamp: ColumnType<Date, Date, never>;
ref: foreignKey({ action: "create";
columns: [t.userId, t.hskVersion], action_by: number | null;
foreignColumns: [hsk.userId, hsk.version], }
}),
}), declare module "./index" {
); interface Database {
hmac_secret_key: HskTable;
hmac_secret_key_log: HskLogTable;
}
}

View File

@@ -4,3 +4,6 @@ export * from "./hsk";
export * from "./mek"; export * from "./mek";
export * from "./session"; export * from "./session";
export * from "./user"; export * from "./user";
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
export interface Database {}

View File

@@ -1,60 +1,34 @@
import { sqliteTable, text, integer, primaryKey, foreignKey } from "drizzle-orm/sqlite-core"; import type { ColumnType, Generated } from "kysely";
import { client } from "./client";
import { user } from "./user";
export const mek = sqliteTable( export type MekState = "active" | "retired" | "dead";
"master_encryption_key",
{
userId: integer("user_id")
.notNull()
.references(() => user.id),
version: integer("version").notNull(),
state: text("state", { enum: ["active", "retired", "dead"] }).notNull(),
retiredAt: integer("retired_at", { mode: "timestamp_ms" }),
},
(t) => ({
pk: primaryKey({ columns: [t.userId, t.version] }),
}),
);
export const mekLog = sqliteTable( interface MekTable {
"master_encryption_key_log", user_id: number;
{ version: number;
id: integer("id").primaryKey({ autoIncrement: true }), state: MekState;
userId: integer("user_id") }
.notNull()
.references(() => user.id),
mekVersion: integer("master_encryption_key_version").notNull(),
timestamp: integer("timestamp", { mode: "timestamp_ms" }).notNull(),
action: text("action", { enum: ["create"] }).notNull(),
actionBy: integer("action_by").references(() => client.id),
},
(t) => ({
ref: foreignKey({
columns: [t.userId, t.mekVersion],
foreignColumns: [mek.userId, mek.version],
}),
}),
);
export const clientMek = sqliteTable( interface MekLogTable {
"client_master_encryption_key", id: Generated<number>;
{ user_id: number;
userId: integer("user_id") master_encryption_key_version: number;
.notNull() timestamp: ColumnType<Date, Date, never>;
.references(() => user.id), action: "create";
clientId: integer("client_id") action_by: number | null;
.notNull() }
.references(() => client.id),
mekVersion: integer("version").notNull(), interface ClientMekTable {
encMek: text("encrypted_key").notNull(), // Base64 user_id: number;
encMekSig: text("encrypted_key_signature").notNull(), // Base64 client_id: number;
}, version: number;
(t) => ({ encrypted_key: string; // Base64
pk: primaryKey({ columns: [t.userId, t.clientId, t.mekVersion] }), encrypted_key_signature: string; // Base64
ref: foreignKey({ }
columns: [t.userId, t.mekVersion],
foreignColumns: [mek.userId, mek.version], declare module "./index" {
}), interface Database {
}), master_encryption_key: MekTable;
); master_encryption_key_log: MekLogTable;
client_master_encryption_key: ClientMekTable;
}
}

View File

@@ -1,35 +1,27 @@
import { sqliteTable, text, integer, unique } from "drizzle-orm/sqlite-core"; import type { ColumnType, Generated } from "kysely";
import { client } from "./client";
import { user } from "./user";
export const session = sqliteTable( interface SessionTable {
"session", id: string;
{ user_id: number;
id: text("id").notNull().primaryKey(), client_id: number | null;
userId: integer("user_id") created_at: ColumnType<Date, Date, never>;
.notNull() last_used_at: Date;
.references(() => user.id), last_used_by_ip: string | null;
clientId: integer("client_id").references(() => client.id), last_used_by_agent: string | null;
createdAt: integer("created_at", { mode: "timestamp_ms" }).notNull(), }
lastUsedAt: integer("last_used_at", { mode: "timestamp_ms" }).notNull(),
lastUsedByIp: text("last_used_by_ip"),
lastUsedByUserAgent: text("last_used_by_user_agent"),
},
(t) => ({
unq: unique().on(t.userId, t.clientId),
}),
);
export const sessionUpgradeChallenge = sqliteTable("session_upgrade_challenge", { interface SessionUpgradeChallengeTable {
id: integer("id").primaryKey(), id: Generated<number>;
sessionId: text("session_id") session_id: string;
.notNull() client_id: number;
.references(() => session.id) answer: string; // Base64
.unique(), allowed_ip: string;
clientId: integer("client_id") expires_at: ColumnType<Date, Date, never>;
.notNull() }
.references(() => client.id),
answer: text("answer").notNull().unique(), // Base64 declare module "./index" {
allowedIp: text("allowed_ip").notNull(), interface Database {
expiresAt: integer("expires_at", { mode: "timestamp_ms" }).notNull(), session: SessionTable;
}); session_upgrade_challenge: SessionUpgradeChallengeTable;
}
}

View File

@@ -1,8 +1,14 @@
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"; import type { Generated } from "kysely";
export const user = sqliteTable("user", { interface UserTable {
id: integer("id").primaryKey({ autoIncrement: true }), id: Generated<number>;
email: text("email").notNull().unique(), email: string;
password: text("password").notNull(), nickname: string;
nickname: text("nickname").notNull(), password: string;
}); }
declare module "./index" {
interface Database {
user: UserTable;
}
}

View File

@@ -1,30 +1,31 @@
import { SqliteError } from "better-sqlite3"; import pg from "pg";
import { and, eq, ne, gt, lte, isNull } from "drizzle-orm";
import env from "$lib/server/loadenv"; import env from "$lib/server/loadenv";
import db from "./drizzle";
import { IntegrityError } from "./error"; import { IntegrityError } from "./error";
import { session, sessionUpgradeChallenge } from "./schema"; import db from "./kysely";
export const createSession = async ( export const createSession = async (
userId: number, userId: number,
clientId: number | null, clientId: number | null,
sessionId: string, sessionId: string,
ip: string | null, ip: string | null,
userAgent: string | null, agent: string | null,
) => { ) => {
try { try {
const now = new Date(); const now = new Date();
await db.insert(session).values({ await db
id: sessionId, .insertInto("session")
userId, .values({
clientId, id: sessionId,
createdAt: now, user_id: userId,
lastUsedAt: now, client_id: clientId,
lastUsedByIp: ip || null, created_at: now,
lastUsedByUserAgent: userAgent || null, last_used_at: now,
}); last_used_by_ip: ip || null,
last_used_by_agent: agent || null,
})
.execute();
} catch (e) { } catch (e) {
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_UNIQUE") { if (e instanceof pg.DatabaseError && e.code === "23505") {
throw new IntegrityError("Session already exists"); throw new IntegrityError("Session already exists");
} }
throw e; throw e;
@@ -34,49 +35,55 @@ export const createSession = async (
export const refreshSession = async ( export const refreshSession = async (
sessionId: string, sessionId: string,
ip: string | null, ip: string | null,
userAgent: string | null, agent: string | null,
) => { ) => {
const now = new Date(); const now = new Date();
const sessions = await db const session = await db
.update(session) .updateTable("session")
.set({ .set({
lastUsedAt: now, last_used_at: now,
lastUsedByIp: ip || undefined, last_used_by_ip: ip !== "" ? ip : undefined, // Don't update if empty
lastUsedByUserAgent: userAgent || undefined, last_used_by_agent: agent !== "" ? agent : undefined, // Don't update if empty
}) })
.where( .where("id", "=", sessionId)
and( .where("last_used_at", ">", new Date(now.getTime() - env.session.exp))
eq(session.id, sessionId), .returning(["user_id", "client_id"])
gt(session.lastUsedAt, new Date(now.getTime() - env.session.exp)), .executeTakeFirst();
), if (!session) {
)
.returning({ userId: session.userId, clientId: session.clientId });
if (!sessions[0]) {
throw new IntegrityError("Session not found"); throw new IntegrityError("Session not found");
} }
return sessions[0]; return { userId: session.user_id, clientId: session.client_id };
}; };
export const upgradeSession = async (sessionId: string, clientId: number) => { export const upgradeSession = async (sessionId: string, clientId: number) => {
const res = await db const res = await db
.update(session) .updateTable("session")
.set({ clientId }) .set({ client_id: clientId })
.where(and(eq(session.id, sessionId), isNull(session.clientId))); .where("id", "=", sessionId)
if (res.changes === 0) { .where("client_id", "is", null)
.executeTakeFirst();
if (res.numUpdatedRows === 0n) {
throw new IntegrityError("Session not found"); throw new IntegrityError("Session not found");
} }
}; };
export const deleteSession = async (sessionId: string) => { export const deleteSession = async (sessionId: string) => {
await db.delete(session).where(eq(session.id, sessionId)); await db.deleteFrom("session").where("id", "=", sessionId).execute();
}; };
export const deleteAllOtherSessions = async (userId: number, sessionId: string) => { export const deleteAllOtherSessions = async (userId: number, sessionId: string) => {
await db.delete(session).where(and(eq(session.userId, userId), ne(session.id, sessionId))); await db
.deleteFrom("session")
.where("id", "!=", sessionId)
.where("user_id", "=", userId)
.execute();
}; };
export const cleanupExpiredSessions = async () => { export const cleanupExpiredSessions = async () => {
await db.delete(session).where(lte(session.lastUsedAt, new Date(Date.now() - env.session.exp))); await db
.deleteFrom("session")
.where("last_used_at", "<=", new Date(Date.now() - env.session.exp))
.execute();
}; };
export const registerSessionUpgradeChallenge = async ( export const registerSessionUpgradeChallenge = async (
@@ -87,15 +94,18 @@ export const registerSessionUpgradeChallenge = async (
expiresAt: Date, expiresAt: Date,
) => { ) => {
try { try {
await db.insert(sessionUpgradeChallenge).values({ await db
sessionId, .insertInto("session_upgrade_challenge")
clientId, .values({
answer, session_id: sessionId,
allowedIp, client_id: clientId,
expiresAt, answer,
}); allowed_ip: allowedIp,
expires_at: expiresAt,
})
.execute();
} catch (e) { } catch (e) {
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_UNIQUE") { if (e instanceof pg.DatabaseError && e.code === "23505") {
throw new IntegrityError("Challenge already registered"); throw new IntegrityError("Challenge already registered");
} }
throw e; throw e;
@@ -107,22 +117,17 @@ export const consumeSessionUpgradeChallenge = async (
answer: string, answer: string,
ip: string, ip: string,
) => { ) => {
const challenges = await db const challenge = await db
.delete(sessionUpgradeChallenge) .deleteFrom("session_upgrade_challenge")
.where( .where("session_id", "=", sessionId)
and( .where("answer", "=", answer)
eq(sessionUpgradeChallenge.sessionId, sessionId), .where("allowed_ip", "=", ip)
eq(sessionUpgradeChallenge.answer, answer), .where("expires_at", ">", new Date())
eq(sessionUpgradeChallenge.allowedIp, ip), .returning("client_id")
gt(sessionUpgradeChallenge.expiresAt, new Date()), .executeTakeFirst();
), return challenge ? { clientId: challenge.client_id } : null;
)
.returning({ clientId: sessionUpgradeChallenge.clientId });
return challenges[0] ?? null;
}; };
export const cleanupExpiredSessionUpgradeChallenges = async () => { export const cleanupExpiredSessionUpgradeChallenges = async () => {
await db await db.deleteFrom("session_upgrade_challenge").where("expires_at", "<=", new Date()).execute();
.delete(sessionUpgradeChallenge)
.where(lte(sessionUpgradeChallenge.expiresAt, new Date()));
}; };

View File

@@ -1,21 +1,36 @@
import { eq } from "drizzle-orm"; import db from "./kysely";
import db from "./drizzle";
import { user } from "./schema"; interface User {
id: number;
email: string;
nickname: string;
password: string;
}
export const getUser = async (userId: number) => { export const getUser = async (userId: number) => {
const users = await db.select().from(user).where(eq(user.id, userId)).limit(1); const user = await db
return users[0] ?? null; .selectFrom("user")
.selectAll()
.where("id", "=", userId)
.limit(1)
.executeTakeFirst();
return user ? (user satisfies User) : null;
}; };
export const getUserByEmail = async (email: string) => { export const getUserByEmail = async (email: string) => {
const users = await db.select().from(user).where(eq(user.email, email)).limit(1); const user = await db
return users[0] ?? null; .selectFrom("user")
}; .selectAll()
.where("email", "=", email)
export const setUserPassword = async (userId: number, password: string) => { .limit(1)
await db.update(user).set({ password }).where(eq(user.id, userId)); .executeTakeFirst();
return user ? (user satisfies User) : null;
}; };
export const setUserNickname = async (userId: number, nickname: string) => { export const setUserNickname = async (userId: number, nickname: string) => {
await db.update(user).set({ nickname }).where(eq(user.id, userId)); await db.updateTable("user").set({ nickname }).where("id", "=", userId).execute();
};
export const setUserPassword = async (userId: number, password: string) => {
await db.updateTable("user").set({ password }).where("id", "=", userId).execute();
}; };

View File

@@ -3,11 +3,19 @@ import { building } from "$app/environment";
import { env } from "$env/dynamic/private"; import { env } from "$env/dynamic/private";
if (!building) { if (!building) {
if (!env.DATABASE_PASSWORD) throw new Error("DATABASE_PASSWORD not set");
if (!env.SESSION_SECRET) throw new Error("SESSION_SECRET not set"); if (!env.SESSION_SECRET) throw new Error("SESSION_SECRET not set");
} }
export default { export default {
databaseUrl: env.DATABASE_URL || "local.db", nodeEnv: env.NODE_ENV || "development",
database: {
host: env.DATABASE_HOST,
port: env.DATABASE_PORT ? parseInt(env.DATABASE_PORT, 10) : undefined,
user: env.DATABASE_USER,
password: env.DATABASE_PASSWORD!,
name: env.DATABASE_NAME,
},
session: { session: {
secret: env.SESSION_SECRET!, secret: env.SESSION_SECRET!,
exp: ms(env.SESSION_EXPIRES || "14d"), exp: ms(env.SESSION_EXPIRES || "14d"),

View File

@@ -21,5 +21,5 @@ export const verifyClientEncMekSig = async (
} }
const data = JSON.stringify({ version, key: encMek }); const data = JSON.stringify({ version, key: encMek });
return verifySignature(Buffer.from(data), encMekSig, userClient.client.sigPubKey); return verifySignature(Buffer.from(data), encMekSig, userClient.sigPubKey);
}; };

View File

@@ -63,7 +63,7 @@ export const registerUserClient = async (
} }
try { try {
const clientId = await createClient(encPubKey, sigPubKey, userId); const { clientId } = await createClient(encPubKey, sigPubKey, userId);
return { challenge: await createUserClientChallenge(ip, userId, clientId, encPubKey) }; return { challenge: await createUserClientChallenge(ip, userId, clientId, encPubKey) };
} catch (e) { } catch (e) {
if (e instanceof IntegrityError && e.message === "Public key(s) already registered") { if (e instanceof IntegrityError && e.message === "Public key(s) already registered") {

View File

@@ -8,8 +8,9 @@ import {
setDirectoryEncName, setDirectoryEncName,
unregisterDirectory, unregisterDirectory,
getAllFilesByParent, getAllFilesByParent,
type NewDirectoryParams, type NewDirectory,
} from "$lib/server/db/file"; } from "$lib/server/db/file";
import type { Ciphertext } from "$lib/server/db/schema";
export const getDirectoryInformation = async (userId: number, directoryId: "root" | number) => { export const getDirectoryInformation = async (userId: number, directoryId: "root" | number) => {
const directory = directoryId !== "root" ? await getDirectory(userId, directoryId) : undefined; const directory = directoryId !== "root" ? await getDirectory(userId, directoryId) : undefined;
@@ -53,11 +54,10 @@ export const renameDirectory = async (
userId: number, userId: number,
directoryId: number, directoryId: number,
dekVersion: Date, dekVersion: Date,
newEncName: string, newEncName: Ciphertext,
newEncNameIv: string,
) => { ) => {
try { try {
await setDirectoryEncName(userId, directoryId, dekVersion, newEncName, newEncNameIv); await setDirectoryEncName(userId, directoryId, dekVersion, newEncName);
} catch (e) { } catch (e) {
if (e instanceof IntegrityError) { if (e instanceof IntegrityError) {
if (e.message === "Directory not found") { if (e.message === "Directory not found") {
@@ -70,7 +70,7 @@ export const renameDirectory = async (
} }
}; };
export const createDirectory = async (params: NewDirectoryParams) => { export const createDirectory = async (params: NewDirectory) => {
const oneMinuteAgo = new Date(Date.now() - 60 * 1000); const oneMinuteAgo = new Date(Date.now() - 60 * 1000);
const oneMinuteLater = new Date(Date.now() + 60 * 1000); const oneMinuteLater = new Date(Date.now() + 60 * 1000);
if (params.dekVersion <= oneMinuteAgo || params.dekVersion >= oneMinuteLater) { if (params.dekVersion <= oneMinuteAgo || params.dekVersion >= oneMinuteLater) {

View File

@@ -13,8 +13,9 @@ import {
getFile, getFile,
setFileEncName, setFileEncName,
unregisterFile, unregisterFile,
type NewFileParams, type NewFile,
} from "$lib/server/db/file"; } from "$lib/server/db/file";
import type { Ciphertext } from "$lib/server/db/schema";
import env from "$lib/server/loadenv"; import env from "$lib/server/loadenv";
export const getFileInformation = async (userId: number, fileId: number) => { export const getFileInformation = async (userId: number, fileId: number) => {
@@ -38,8 +39,8 @@ export const getFileInformation = async (userId: number, fileId: number) => {
export const deleteFile = async (userId: number, fileId: number) => { export const deleteFile = async (userId: number, fileId: number) => {
try { try {
const filePath = await unregisterFile(userId, fileId); const { path } = await unregisterFile(userId, fileId);
unlink(filePath); // Intended unlink(path); // Intended
} catch (e) { } catch (e) {
if (e instanceof IntegrityError && e.message === "File not found") { if (e instanceof IntegrityError && e.message === "File not found") {
error(404, "Invalid file id"); error(404, "Invalid file id");
@@ -65,11 +66,10 @@ export const renameFile = async (
userId: number, userId: number,
fileId: number, fileId: number,
dekVersion: Date, dekVersion: Date,
newEncName: string, newEncName: Ciphertext,
newEncNameIv: string,
) => { ) => {
try { try {
await setFileEncName(userId, fileId, dekVersion, newEncName, newEncNameIv); await setFileEncName(userId, fileId, dekVersion, newEncName);
} catch (e) { } catch (e) {
if (e instanceof IntegrityError) { if (e instanceof IntegrityError) {
if (e.message === "File not found") { if (e.message === "File not found") {
@@ -96,7 +96,7 @@ const safeUnlink = async (path: string) => {
}; };
export const uploadFile = async ( export const uploadFile = async (
params: Omit<NewFileParams, "path" | "encContentHash">, params: Omit<NewFile, "path" | "encContentHash">,
encContentStream: Readable, encContentStream: Readable,
encContentHash: Promise<string>, encContentHash: Promise<string>,
) => { ) => {

View File

@@ -7,11 +7,11 @@ import { verifyClientEncMekSig } from "$lib/server/modules/mek";
export const getClientMekList = async (userId: number, clientId: number) => { export const getClientMekList = async (userId: number, clientId: number) => {
const clientMeks = await getAllValidClientMeks(userId, clientId); const clientMeks = await getAllValidClientMeks(userId, clientId);
return { return {
encMeks: clientMeks.map((clientMek) => ({ encMeks: clientMeks.map(({ version, state, encMek, encMekSig }) => ({
version: clientMek.master_encryption_key.version, version,
state: clientMek.master_encryption_key.state as "active" | "retired", state,
encMek: clientMek.client_master_encryption_key.encMek, encMek,
encMekSig: clientMek.client_master_encryption_key.encMekSig, encMekSig,
})), })),
}; };
}; };

View File

@@ -20,6 +20,6 @@ export const POST: RequestHandler = async ({ locals, params, request }) => {
if (!bodyZodRes.success) error(400, "Invalid request body"); if (!bodyZodRes.success) error(400, "Invalid request body");
const { dekVersion, name, nameIv } = bodyZodRes.data; const { dekVersion, name, nameIv } = bodyZodRes.data;
await renameDirectory(userId, id, new Date(dekVersion), name, nameIv); await renameDirectory(userId, id, new Date(dekVersion), { ciphertext: name, iv: nameIv });
return text("Directory renamed", { headers: { "Content-Type": "text/plain" } }); return text("Directory renamed", { headers: { "Content-Type": "text/plain" } });
}; };

View File

@@ -17,8 +17,7 @@ export const POST: RequestHandler = async ({ locals, request }) => {
mekVersion, mekVersion,
encDek: dek, encDek: dek,
dekVersion: new Date(dekVersion), dekVersion: new Date(dekVersion),
encName: name, encName: { ciphertext: name, iv: nameIv },
encNameIv: nameIv,
}); });
return text("Directory created", { headers: { "Content-Type": "text/plain" } }); return text("Directory created", { headers: { "Content-Type": "text/plain" } });
}; };

View File

@@ -20,6 +20,6 @@ export const POST: RequestHandler = async ({ locals, params, request }) => {
if (!bodyZodRes.success) error(400, "Invalid request body"); if (!bodyZodRes.success) error(400, "Invalid request body");
const { dekVersion, name, nameIv } = bodyZodRes.data; const { dekVersion, name, nameIv } = bodyZodRes.data;
await renameFile(userId, id, new Date(dekVersion), name, nameIv); await renameFile(userId, id, new Date(dekVersion), { ciphertext: name, iv: nameIv });
return text("File renamed", { headers: { "Content-Type": "text/plain" } }); return text("File renamed", { headers: { "Content-Type": "text/plain" } });
}; };

View File

@@ -40,12 +40,9 @@ const parseFileMetadata = (userId: number, json: string) => {
contentHmac, contentHmac,
contentType, contentType,
encContentIv: contentIv, encContentIv: contentIv,
encName: name, encName: { ciphertext: name, iv: nameIv },
encNameIv: nameIv, encCreatedAt: createdAt && createdAtIv ? { ciphertext: createdAt, iv: createdAtIv } : null,
encCreatedAt: createdAt ?? null, encLastModifiedAt: { ciphertext: lastModifiedAt, iv: lastModifiedAtIv },
encCreatedAtIv: createdAtIv ?? null,
encLastModifiedAt: lastModifiedAt,
encLastModifiedAtIv: lastModifiedAtIv,
} satisfies FileMetadata; } satisfies FileMetadata;
}; };