mirror of
https://github.com/kmc7468/arkvault.git
synced 2025-12-12 21:08:46 +00:00
Merge branch 'dev' into add-file-category
This commit is contained in:
@@ -1,8 +1,12 @@
|
||||
# Required environment variables
|
||||
DATABASE_PASSWORD=
|
||||
SESSION_SECRET=
|
||||
|
||||
# Optional environment variables
|
||||
DATABASE_URL=
|
||||
DATABASE_HOST=
|
||||
DATABASE_PORT=
|
||||
DATABASE_USER=
|
||||
DATABASE_NAME=
|
||||
SESSION_EXPIRES=
|
||||
USER_CLIENT_CHALLENGE_EXPIRES=
|
||||
SESSION_UPGRADE_CHALLENGE_EXPIRES=
|
||||
|
||||
15
Dockerfile
15
Dockerfile
@@ -2,6 +2,10 @@
|
||||
FROM node:22-alpine AS base
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache bash curl && \
|
||||
curl -o /usr/local/bin/wait-for-it https://raw.githubusercontent.com/vishnubob/wait-for-it/master/wait-for-it.sh && \
|
||||
chmod +x /usr/local/bin/wait-for-it
|
||||
|
||||
RUN npm install -g pnpm@9
|
||||
COPY pnpm-lock.yaml .
|
||||
|
||||
@@ -10,10 +14,9 @@ FROM base AS build
|
||||
RUN pnpm fetch
|
||||
|
||||
COPY . .
|
||||
RUN pnpm install --offline
|
||||
RUN pnpm build
|
||||
|
||||
RUN sed -i "s/http\.createServer()/http.createServer({ requestTimeout: 0 })/g" ./build/index.js
|
||||
RUN pnpm install --offline && \
|
||||
pnpm build && \
|
||||
sed -i "s/http\.createServer()/http.createServer({ requestTimeout: 0 })/g" ./build/index.js
|
||||
|
||||
# Deploy Stage
|
||||
FROM base
|
||||
@@ -23,9 +26,7 @@ COPY package.json .
|
||||
RUN pnpm install --offline --prod
|
||||
|
||||
COPY --from=build /app/build ./build
|
||||
COPY drizzle ./drizzle
|
||||
|
||||
EXPOSE 3000
|
||||
ENV BODY_SIZE_LIMIT=Infinity
|
||||
|
||||
CMD ["node", "./build/index.js"]
|
||||
CMD ["bash", "-c", "wait-for-it ${DATABASE_HOST:-localhost}:${DATABASE_PORT:-5432} -- node ./build/index.js"]
|
||||
|
||||
@@ -23,7 +23,7 @@ vim .env # 아래를 참고하여 환경 변수를 설정해 주세요.
|
||||
docker compose up --build -d
|
||||
```
|
||||
|
||||
모든 데이터는 `./data` 디렉터리에 저장될 거예요.
|
||||
모든 데이터는 `./data` 디렉터리에 아래에 저장될 거예요.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
@@ -31,7 +31,8 @@ docker compose up --build -d
|
||||
|
||||
|이름|필수|기본값|설명|
|
||||
|:-|:-:|:-:|:-|
|
||||
|`SESSION_SECRET`|Y||Session ID의 서명을 위해 사용돼요. 안전한 값으로 설정해 주세요.|
|
||||
|`DATABASE_PASSWORD`|Y||데이터베이스에 접근하기 위해 필요한 비밀번호예요. 안전한 값으로 설정해 주세요.|
|
||||
|`SESSION_SECRET`|Y||Session ID의 서명에 사용되는 비밀번호예요. 안전한 값으로 설정해 주세요.|
|
||||
|`SESSION_EXPIRES`||`14d`|Session의 유효 시간이에요. Session은 마지막으로 사용된 후 설정된 유효 시간이 지나면 자동으로 삭제돼요.|
|
||||
|`USER_CLIENT_CHALLENGE_EXPIRES`||`5m`|암호 키를 서버에 처음 등록할 때 사용되는 챌린지의 유효 시간이에요.|
|
||||
|`SESSION_UPGRADE_CHALLENGE_EXPIRES`||`5m`|암호 키와 함께 로그인할 때 사용되는 챌린지의 유효 시간이에요.|
|
||||
|
||||
15
docker-compose.dev.yaml
Normal file
15
docker-compose.dev.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
services:
|
||||
database:
|
||||
image: postgres:17.2
|
||||
restart: on-failure
|
||||
volumes:
|
||||
- database:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=${DATABASE_USER:-}
|
||||
- POSTGRES_PASSWORD=${DATABASE_PASSWORD:?} # Required
|
||||
- POSTGRES_DB=${DATABASE_NAME:-}
|
||||
ports:
|
||||
- ${DATABASE_PORT:-5432}:5432
|
||||
|
||||
volumes:
|
||||
database:
|
||||
@@ -1,13 +1,17 @@
|
||||
services:
|
||||
server:
|
||||
build: .
|
||||
restart: unless-stopped
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
- database
|
||||
user: ${CONTAINER_UID:-0}:${CONTAINER_GID:-0}
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./data/library:/app/data/library
|
||||
environment:
|
||||
# ArkVault
|
||||
- DATABASE_URL=/app/data/database.sqlite
|
||||
- DATABASE_HOST=database
|
||||
- DATABASE_USER=arkvault
|
||||
- DATABASE_PASSWORD=${DATABASE_PASSWORD:?} # Required
|
||||
- SESSION_SECRET=${SESSION_SECRET:?} # Required
|
||||
- SESSION_EXPIRES
|
||||
- USER_CLIENT_CHALLENGE_EXPIRES
|
||||
@@ -19,3 +23,13 @@ services:
|
||||
- NODE_ENV=${NODE_ENV:-production}
|
||||
ports:
|
||||
- ${PORT:-80}:3000
|
||||
|
||||
database:
|
||||
image: postgres:17.2-alpine
|
||||
restart: on-failure
|
||||
user: ${CONTAINER_UID:-0}:${CONTAINER_GID:-0}
|
||||
volumes:
|
||||
- ./data/database:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=arkvault
|
||||
- POSTGRES_PASSWORD=${DATABASE_PASSWORD:?}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
import { defineConfig } from "drizzle-kit";
|
||||
|
||||
export default defineConfig({
|
||||
schema: "./src/lib/server/db/schema",
|
||||
|
||||
dbCredentials: {
|
||||
url: process.env.DATABASE_URL || "local.db",
|
||||
},
|
||||
|
||||
verbose: true,
|
||||
strict: true,
|
||||
dialect: "sqlite",
|
||||
});
|
||||
@@ -1,178 +0,0 @@
|
||||
CREATE TABLE `client` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`encryption_public_key` text NOT NULL,
|
||||
`signature_public_key` text NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `user_client` (
|
||||
`user_id` integer NOT NULL,
|
||||
`client_id` integer NOT NULL,
|
||||
`state` text DEFAULT 'challenging' NOT NULL,
|
||||
PRIMARY KEY(`client_id`, `user_id`),
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `user_client_challenge` (
|
||||
`id` integer PRIMARY KEY NOT NULL,
|
||||
`user_id` integer NOT NULL,
|
||||
`client_id` integer NOT NULL,
|
||||
`answer` text NOT NULL,
|
||||
`allowed_ip` text NOT NULL,
|
||||
`expires_at` integer NOT NULL,
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`user_id`,`client_id`) REFERENCES `user_client`(`user_id`,`client_id`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `directory` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`parent_id` integer,
|
||||
`user_id` integer NOT NULL,
|
||||
`master_encryption_key_version` integer NOT NULL,
|
||||
`encrypted_data_encryption_key` text NOT NULL,
|
||||
`data_encryption_key_version` integer NOT NULL,
|
||||
`encrypted_name` text NOT NULL,
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`parent_id`) REFERENCES `directory`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`user_id`,`master_encryption_key_version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `directory_log` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`directory_id` integer NOT NULL,
|
||||
`timestamp` integer NOT NULL,
|
||||
`action` text NOT NULL,
|
||||
`new_name` text,
|
||||
FOREIGN KEY (`directory_id`) REFERENCES `directory`(`id`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `file` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`parent_id` integer,
|
||||
`user_id` integer NOT NULL,
|
||||
`path` text NOT NULL,
|
||||
`master_encryption_key_version` integer NOT NULL,
|
||||
`encrypted_data_encryption_key` text NOT NULL,
|
||||
`data_encryption_key_version` integer NOT NULL,
|
||||
`hmac_secret_key_version` integer,
|
||||
`content_hmac` text,
|
||||
`content_type` text NOT NULL,
|
||||
`encrypted_content_iv` text NOT NULL,
|
||||
`encrypted_content_hash` text NOT NULL,
|
||||
`encrypted_name` text NOT NULL,
|
||||
`encrypted_created_at` text,
|
||||
`encrypted_last_modified_at` text NOT NULL,
|
||||
FOREIGN KEY (`parent_id`) REFERENCES `directory`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`user_id`,`master_encryption_key_version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`user_id`,`hmac_secret_key_version`) REFERENCES `hmac_secret_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `file_log` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`file_id` integer NOT NULL,
|
||||
`timestamp` integer NOT NULL,
|
||||
`action` text NOT NULL,
|
||||
`new_name` text,
|
||||
FOREIGN KEY (`file_id`) REFERENCES `file`(`id`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `hmac_secret_key` (
|
||||
`user_id` integer NOT NULL,
|
||||
`version` integer NOT NULL,
|
||||
`state` text NOT NULL,
|
||||
`master_encryption_key_version` integer NOT NULL,
|
||||
`encrypted_key` text NOT NULL,
|
||||
PRIMARY KEY(`user_id`, `version`),
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`user_id`,`master_encryption_key_version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `hmac_secret_key_log` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`user_id` integer NOT NULL,
|
||||
`hmac_secret_key_version` integer NOT NULL,
|
||||
`timestamp` integer NOT NULL,
|
||||
`action` text NOT NULL,
|
||||
`action_by` integer,
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`action_by`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`user_id`,`hmac_secret_key_version`) REFERENCES `hmac_secret_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `client_master_encryption_key` (
|
||||
`user_id` integer NOT NULL,
|
||||
`client_id` integer NOT NULL,
|
||||
`version` integer NOT NULL,
|
||||
`encrypted_key` text NOT NULL,
|
||||
`encrypted_key_signature` text NOT NULL,
|
||||
PRIMARY KEY(`client_id`, `user_id`, `version`),
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`user_id`,`version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `master_encryption_key` (
|
||||
`user_id` integer NOT NULL,
|
||||
`version` integer NOT NULL,
|
||||
`state` text NOT NULL,
|
||||
`retired_at` integer,
|
||||
PRIMARY KEY(`user_id`, `version`),
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `master_encryption_key_log` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`user_id` integer NOT NULL,
|
||||
`master_encryption_key_version` integer NOT NULL,
|
||||
`timestamp` integer NOT NULL,
|
||||
`action` text NOT NULL,
|
||||
`action_by` integer,
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`action_by`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`user_id`,`master_encryption_key_version`) REFERENCES `master_encryption_key`(`user_id`,`version`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`user_id` integer NOT NULL,
|
||||
`client_id` integer,
|
||||
`created_at` integer NOT NULL,
|
||||
`last_used_at` integer NOT NULL,
|
||||
`last_used_by_ip` text,
|
||||
`last_used_by_user_agent` text,
|
||||
FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `session_upgrade_challenge` (
|
||||
`id` integer PRIMARY KEY NOT NULL,
|
||||
`session_id` text NOT NULL,
|
||||
`client_id` integer NOT NULL,
|
||||
`answer` text NOT NULL,
|
||||
`allowed_ip` text NOT NULL,
|
||||
`expires_at` integer NOT NULL,
|
||||
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`client_id`) REFERENCES `client`(`id`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `user` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`email` text NOT NULL,
|
||||
`password` text NOT NULL,
|
||||
`nickname` text NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `client_encryption_public_key_unique` ON `client` (`encryption_public_key`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `client_signature_public_key_unique` ON `client` (`signature_public_key`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `client_encryption_public_key_signature_public_key_unique` ON `client` (`encryption_public_key`,`signature_public_key`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `user_client_challenge_answer_unique` ON `user_client_challenge` (`answer`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `directory_encrypted_data_encryption_key_unique` ON `directory` (`encrypted_data_encryption_key`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `file_path_unique` ON `file` (`path`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `file_encrypted_data_encryption_key_unique` ON `file` (`encrypted_data_encryption_key`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `hmac_secret_key_encrypted_key_unique` ON `hmac_secret_key` (`encrypted_key`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `session_user_id_client_id_unique` ON `session` (`user_id`,`client_id`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `session_upgrade_challenge_session_id_unique` ON `session_upgrade_challenge` (`session_id`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `session_upgrade_challenge_answer_unique` ON `session_upgrade_challenge` (`answer`);--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `user_email_unique` ON `user` (`email`);
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "6",
|
||||
"when": 1737219722656,
|
||||
"tag": "0000_regular_the_watchers",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
18
kysely.config.ts
Normal file
18
kysely.config.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { defineConfig } from "kysely-ctl";
|
||||
import { Pool } from "pg";
|
||||
|
||||
export default defineConfig({
|
||||
dialect: "pg",
|
||||
dialectConfig: {
|
||||
pool: new Pool({
|
||||
host: process.env.DATABASE_HOST,
|
||||
port: process.env.DATABASE_PORT ? parseInt(process.env.DATABASE_PORT) : undefined,
|
||||
user: process.env.DATABASE_USER,
|
||||
password: process.env.DATABASE_PASSWORD,
|
||||
database: process.env.DATABASE_NAME,
|
||||
}),
|
||||
},
|
||||
migrations: {
|
||||
migrationFolder: "./src/lib/server/db/migrations",
|
||||
},
|
||||
});
|
||||
13
package.json
13
package.json
@@ -5,16 +5,14 @@
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite dev",
|
||||
"dev:db": "docker compose -f docker-compose.dev.yaml -p arkvault-dev up -d",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview",
|
||||
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
|
||||
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
|
||||
"format": "prettier --write .",
|
||||
"lint": "prettier --check . && eslint .",
|
||||
"db:push": "drizzle-kit push",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
"db:migrate": "drizzle-kit migrate",
|
||||
"db:studio": "drizzle-kit studio"
|
||||
"db:migrate": "kysely migrate"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/compat": "^1.2.4",
|
||||
@@ -26,10 +24,10 @@
|
||||
"@types/file-saver": "^2.0.7",
|
||||
"@types/ms": "^0.7.34",
|
||||
"@types/node-schedule": "^2.1.7",
|
||||
"@types/pg": "^8.11.10",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"axios": "^1.7.9",
|
||||
"dexie": "^4.0.10",
|
||||
"drizzle-kit": "^0.22.8",
|
||||
"eslint": "^9.17.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-svelte": "^2.46.1",
|
||||
@@ -38,6 +36,7 @@
|
||||
"file-saver": "^2.0.5",
|
||||
"globals": "^15.14.0",
|
||||
"heic2any": "^0.0.4",
|
||||
"kysely-ctl": "^0.10.1",
|
||||
"mime": "^4.0.6",
|
||||
"p-limit": "^6.2.0",
|
||||
"prettier": "^3.4.2",
|
||||
@@ -54,10 +53,10 @@
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^3.1.1",
|
||||
"argon2": "^0.41.1",
|
||||
"better-sqlite3": "^11.7.2",
|
||||
"drizzle-orm": "^0.33.0",
|
||||
"kysely": "^0.27.5",
|
||||
"ms": "^2.1.3",
|
||||
"node-schedule": "^2.1.1",
|
||||
"pg": "^8.13.1",
|
||||
"uuid": "^11.0.4",
|
||||
"zod": "^3.24.1"
|
||||
},
|
||||
|
||||
1449
pnpm-lock.yaml
generated
1449
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -2,15 +2,15 @@ import type { ServerInit } from "@sveltejs/kit";
|
||||
import { sequence } from "@sveltejs/kit/hooks";
|
||||
import schedule from "node-schedule";
|
||||
import { cleanupExpiredUserClientChallenges } from "$lib/server/db/client";
|
||||
import { migrateDB } from "$lib/server/db/drizzle";
|
||||
import { migrateDB } from "$lib/server/db/kysely";
|
||||
import {
|
||||
cleanupExpiredSessions,
|
||||
cleanupExpiredSessionUpgradeChallenges,
|
||||
} from "$lib/server/db/session";
|
||||
import { authenticate, setAgentInfo } from "$lib/server/middlewares";
|
||||
|
||||
export const init: ServerInit = () => {
|
||||
migrateDB();
|
||||
export const init: ServerInit = async () => {
|
||||
await migrateDB();
|
||||
|
||||
schedule.scheduleJob("0 * * * *", () => {
|
||||
cleanupExpiredUserClientChallenges();
|
||||
|
||||
@@ -1,53 +1,97 @@
|
||||
import { SqliteError } from "better-sqlite3";
|
||||
import { and, or, eq, gt, lte } from "drizzle-orm";
|
||||
import db from "./drizzle";
|
||||
import pg from "pg";
|
||||
import { IntegrityError } from "./error";
|
||||
import { client, userClient, userClientChallenge } from "./schema";
|
||||
import db from "./kysely";
|
||||
import type { UserClientState } from "./schema";
|
||||
|
||||
interface Client {
|
||||
id: number;
|
||||
encPubKey: string;
|
||||
sigPubKey: string;
|
||||
}
|
||||
|
||||
interface UserClient {
|
||||
userId: number;
|
||||
clientId: number;
|
||||
state: UserClientState;
|
||||
}
|
||||
|
||||
interface UserClientWithDetails extends UserClient {
|
||||
encPubKey: string;
|
||||
sigPubKey: string;
|
||||
}
|
||||
|
||||
export const createClient = async (encPubKey: string, sigPubKey: string, userId: number) => {
|
||||
return await db.transaction(
|
||||
async (tx) => {
|
||||
const clients = await tx
|
||||
.select({ id: client.id })
|
||||
.from(client)
|
||||
.where(or(eq(client.encPubKey, sigPubKey), eq(client.sigPubKey, encPubKey)))
|
||||
.limit(1);
|
||||
if (clients.length !== 0) {
|
||||
return await db
|
||||
.transaction()
|
||||
.setIsolationLevel("serializable")
|
||||
.execute(async (trx) => {
|
||||
const client = await trx
|
||||
.selectFrom("client")
|
||||
.where((eb) =>
|
||||
eb.or([
|
||||
eb("encryption_public_key", "=", encPubKey),
|
||||
eb("encryption_public_key", "=", sigPubKey),
|
||||
eb("signature_public_key", "=", encPubKey),
|
||||
eb("signature_public_key", "=", sigPubKey),
|
||||
]),
|
||||
)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
if (client) {
|
||||
throw new IntegrityError("Public key(s) already registered");
|
||||
}
|
||||
|
||||
const newClients = await tx
|
||||
.insert(client)
|
||||
.values({ encPubKey, sigPubKey })
|
||||
.returning({ id: client.id });
|
||||
const { id: clientId } = newClients[0]!;
|
||||
await tx.insert(userClient).values({ userId, clientId });
|
||||
|
||||
return clientId;
|
||||
},
|
||||
{ behavior: "exclusive" },
|
||||
);
|
||||
const { clientId } = await trx
|
||||
.insertInto("client")
|
||||
.values({ encryption_public_key: encPubKey, signature_public_key: sigPubKey })
|
||||
.returning("id as clientId")
|
||||
.executeTakeFirstOrThrow();
|
||||
await trx
|
||||
.insertInto("user_client")
|
||||
.values({ user_id: userId, client_id: clientId })
|
||||
.execute();
|
||||
return { clientId };
|
||||
});
|
||||
};
|
||||
|
||||
export const getClient = async (clientId: number) => {
|
||||
const clients = await db.select().from(client).where(eq(client.id, clientId)).limit(1);
|
||||
return clients[0] ?? null;
|
||||
const client = await db
|
||||
.selectFrom("client")
|
||||
.selectAll()
|
||||
.where("id", "=", clientId)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
return client
|
||||
? ({
|
||||
id: client.id,
|
||||
encPubKey: client.encryption_public_key,
|
||||
sigPubKey: client.signature_public_key,
|
||||
} satisfies Client)
|
||||
: null;
|
||||
};
|
||||
|
||||
export const getClientByPubKeys = async (encPubKey: string, sigPubKey: string) => {
|
||||
const clients = await db
|
||||
.select()
|
||||
.from(client)
|
||||
.where(and(eq(client.encPubKey, encPubKey), eq(client.sigPubKey, sigPubKey)))
|
||||
.limit(1);
|
||||
return clients[0] ?? null;
|
||||
const client = await db
|
||||
.selectFrom("client")
|
||||
.selectAll()
|
||||
.where("encryption_public_key", "=", encPubKey)
|
||||
.where("signature_public_key", "=", sigPubKey)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
return client
|
||||
? ({
|
||||
id: client.id,
|
||||
encPubKey: client.encryption_public_key,
|
||||
sigPubKey: client.signature_public_key,
|
||||
} satisfies Client)
|
||||
: null;
|
||||
};
|
||||
|
||||
export const createUserClient = async (userId: number, clientId: number) => {
|
||||
try {
|
||||
await db.insert(userClient).values({ userId, clientId });
|
||||
await db.insertInto("user_client").values({ user_id: userId, client_id: clientId }).execute();
|
||||
} catch (e) {
|
||||
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
|
||||
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||
throw new IntegrityError("User client already exists");
|
||||
}
|
||||
throw e;
|
||||
@@ -55,52 +99,76 @@ export const createUserClient = async (userId: number, clientId: number) => {
|
||||
};
|
||||
|
||||
export const getAllUserClients = async (userId: number) => {
|
||||
return await db.select().from(userClient).where(eq(userClient.userId, userId));
|
||||
const userClients = await db
|
||||
.selectFrom("user_client")
|
||||
.selectAll()
|
||||
.where("user_id", "=", userId)
|
||||
.execute();
|
||||
return userClients.map(
|
||||
({ user_id, client_id, state }) =>
|
||||
({
|
||||
userId: user_id,
|
||||
clientId: client_id,
|
||||
state,
|
||||
}) satisfies UserClient,
|
||||
);
|
||||
};
|
||||
|
||||
export const getUserClient = async (userId: number, clientId: number) => {
|
||||
const userClients = await db
|
||||
.select()
|
||||
.from(userClient)
|
||||
.where(and(eq(userClient.userId, userId), eq(userClient.clientId, clientId)))
|
||||
.limit(1);
|
||||
return userClients[0] ?? null;
|
||||
const userClient = await db
|
||||
.selectFrom("user_client")
|
||||
.selectAll()
|
||||
.where("user_id", "=", userId)
|
||||
.where("client_id", "=", clientId)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
return userClient
|
||||
? ({
|
||||
userId: userClient.user_id,
|
||||
clientId: userClient.client_id,
|
||||
state: userClient.state,
|
||||
} satisfies UserClient)
|
||||
: null;
|
||||
};
|
||||
|
||||
export const getUserClientWithDetails = async (userId: number, clientId: number) => {
|
||||
const userClients = await db
|
||||
.select()
|
||||
.from(userClient)
|
||||
.innerJoin(client, eq(userClient.clientId, client.id))
|
||||
.where(and(eq(userClient.userId, userId), eq(userClient.clientId, clientId)))
|
||||
.limit(1);
|
||||
return userClients[0] ?? null;
|
||||
const userClient = await db
|
||||
.selectFrom("user_client")
|
||||
.innerJoin("client", "user_client.client_id", "client.id")
|
||||
.selectAll()
|
||||
.where("user_id", "=", userId)
|
||||
.where("client_id", "=", clientId)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
return userClient
|
||||
? ({
|
||||
userId: userClient.user_id,
|
||||
clientId: userClient.client_id,
|
||||
state: userClient.state,
|
||||
encPubKey: userClient.encryption_public_key,
|
||||
sigPubKey: userClient.signature_public_key,
|
||||
} satisfies UserClientWithDetails)
|
||||
: null;
|
||||
};
|
||||
|
||||
export const setUserClientStateToPending = async (userId: number, clientId: number) => {
|
||||
await db
|
||||
.update(userClient)
|
||||
.updateTable("user_client")
|
||||
.set({ state: "pending" })
|
||||
.where(
|
||||
and(
|
||||
eq(userClient.userId, userId),
|
||||
eq(userClient.clientId, clientId),
|
||||
eq(userClient.state, "challenging"),
|
||||
),
|
||||
);
|
||||
.where("user_id", "=", userId)
|
||||
.where("client_id", "=", clientId)
|
||||
.where("state", "=", "challenging")
|
||||
.execute();
|
||||
};
|
||||
|
||||
export const setUserClientStateToActive = async (userId: number, clientId: number) => {
|
||||
await db
|
||||
.update(userClient)
|
||||
.updateTable("user_client")
|
||||
.set({ state: "active" })
|
||||
.where(
|
||||
and(
|
||||
eq(userClient.userId, userId),
|
||||
eq(userClient.clientId, clientId),
|
||||
eq(userClient.state, "pending"),
|
||||
),
|
||||
);
|
||||
.where("user_id", "=", userId)
|
||||
.where("client_id", "=", clientId)
|
||||
.where("state", "=", "pending")
|
||||
.execute();
|
||||
};
|
||||
|
||||
export const registerUserClientChallenge = async (
|
||||
@@ -110,30 +178,30 @@ export const registerUserClientChallenge = async (
|
||||
allowedIp: string,
|
||||
expiresAt: Date,
|
||||
) => {
|
||||
await db.insert(userClientChallenge).values({
|
||||
userId,
|
||||
clientId,
|
||||
answer,
|
||||
allowedIp,
|
||||
expiresAt,
|
||||
});
|
||||
await db
|
||||
.insertInto("user_client_challenge")
|
||||
.values({
|
||||
user_id: userId,
|
||||
client_id: clientId,
|
||||
answer,
|
||||
allowed_ip: allowedIp,
|
||||
expires_at: expiresAt,
|
||||
})
|
||||
.execute();
|
||||
};
|
||||
|
||||
export const consumeUserClientChallenge = async (userId: number, answer: string, ip: string) => {
|
||||
const challenges = await db
|
||||
.delete(userClientChallenge)
|
||||
.where(
|
||||
and(
|
||||
eq(userClientChallenge.userId, userId),
|
||||
eq(userClientChallenge.answer, answer),
|
||||
eq(userClientChallenge.allowedIp, ip),
|
||||
gt(userClientChallenge.expiresAt, new Date()),
|
||||
),
|
||||
)
|
||||
.returning({ clientId: userClientChallenge.clientId });
|
||||
return challenges[0] ?? null;
|
||||
const challenge = await db
|
||||
.deleteFrom("user_client_challenge")
|
||||
.where("user_id", "=", userId)
|
||||
.where("answer", "=", answer)
|
||||
.where("allowed_ip", "=", ip)
|
||||
.where("expires_at", ">", new Date())
|
||||
.returning("client_id")
|
||||
.executeTakeFirst();
|
||||
return challenge ? { clientId: challenge.client_id } : null;
|
||||
};
|
||||
|
||||
export const cleanupExpiredUserClientChallenges = async () => {
|
||||
await db.delete(userClientChallenge).where(lte(userClientChallenge.expiresAt, new Date()));
|
||||
await db.deleteFrom("user_client_challenge").where("expires_at", "<=", new Date()).execute();
|
||||
};
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import Database from "better-sqlite3";
|
||||
import { drizzle } from "drizzle-orm/better-sqlite3";
|
||||
import { migrate } from "drizzle-orm/better-sqlite3/migrator";
|
||||
import env from "$lib/server/loadenv";
|
||||
|
||||
const client = new Database(env.databaseUrl);
|
||||
const db = drizzle(client);
|
||||
|
||||
export const migrateDB = () => {
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
migrate(db, { migrationsFolder: "./drizzle" });
|
||||
}
|
||||
};
|
||||
|
||||
export default db;
|
||||
@@ -1,22 +1,23 @@
|
||||
import { SqliteError } from "better-sqlite3";
|
||||
import { and, eq, isNull } from "drizzle-orm";
|
||||
import db from "./drizzle";
|
||||
import { IntegrityError } from "./error";
|
||||
import { directory, directoryLog, file, fileLog, fileCategory, hsk, mek } from "./schema";
|
||||
import db from "./kysely";
|
||||
import type { Ciphertext } from "./schema";
|
||||
|
||||
type DirectoryId = "root" | number;
|
||||
|
||||
export interface NewDirectoryParams {
|
||||
interface Directory {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
userId: number;
|
||||
mekVersion: number;
|
||||
encDek: string;
|
||||
dekVersion: Date;
|
||||
encName: string;
|
||||
encNameIv: string;
|
||||
encName: Ciphertext;
|
||||
}
|
||||
|
||||
export interface NewFileParams {
|
||||
export type NewDirectory = Omit<Directory, "id">;
|
||||
|
||||
interface File {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
userId: number;
|
||||
path: string;
|
||||
@@ -28,217 +29,264 @@ export interface NewFileParams {
|
||||
contentType: string;
|
||||
encContentIv: string;
|
||||
encContentHash: string;
|
||||
encName: string;
|
||||
encNameIv: string;
|
||||
encCreatedAt: string | null;
|
||||
encCreatedAtIv: string | null;
|
||||
encLastModifiedAt: string;
|
||||
encLastModifiedAtIv: string;
|
||||
encName: Ciphertext;
|
||||
encCreatedAt: Ciphertext | null;
|
||||
encLastModifiedAt: Ciphertext;
|
||||
}
|
||||
|
||||
export const registerDirectory = async (params: NewDirectoryParams) => {
|
||||
await db.transaction(
|
||||
async (tx) => {
|
||||
const meks = await tx
|
||||
.select({ version: mek.version })
|
||||
.from(mek)
|
||||
.where(and(eq(mek.userId, params.userId), eq(mek.state, "active")))
|
||||
.limit(1);
|
||||
if (meks[0]?.version !== params.mekVersion) {
|
||||
throw new IntegrityError("Inactive MEK version");
|
||||
}
|
||||
export type NewFile = Omit<File, "id">;
|
||||
|
||||
const newDirectories = await tx
|
||||
.insert(directory)
|
||||
.values({
|
||||
parentId: params.parentId === "root" ? null : params.parentId,
|
||||
userId: params.userId,
|
||||
mekVersion: params.mekVersion,
|
||||
encDek: params.encDek,
|
||||
dekVersion: params.dekVersion,
|
||||
encName: { ciphertext: params.encName, iv: params.encNameIv },
|
||||
})
|
||||
.returning({ id: directory.id });
|
||||
const { id: directoryId } = newDirectories[0]!;
|
||||
await tx.insert(directoryLog).values({
|
||||
directoryId,
|
||||
export const registerDirectory = async (params: NewDirectory) => {
|
||||
await db.transaction().execute(async (trx) => {
|
||||
const mek = await trx
|
||||
.selectFrom("master_encryption_key")
|
||||
.select("version")
|
||||
.where("user_id", "=", params.userId)
|
||||
.where("state", "=", "active")
|
||||
.limit(1)
|
||||
.forUpdate()
|
||||
.executeTakeFirst();
|
||||
if (mek?.version !== params.mekVersion) {
|
||||
throw new IntegrityError("Inactive MEK version");
|
||||
}
|
||||
|
||||
const { directoryId } = await trx
|
||||
.insertInto("directory")
|
||||
.values({
|
||||
parent_id: params.parentId !== "root" ? params.parentId : null,
|
||||
user_id: params.userId,
|
||||
master_encryption_key_version: params.mekVersion,
|
||||
encrypted_data_encryption_key: params.encDek,
|
||||
data_encryption_key_version: params.dekVersion,
|
||||
encrypted_name: params.encName,
|
||||
})
|
||||
.returning("id as directoryId")
|
||||
.executeTakeFirstOrThrow();
|
||||
await trx
|
||||
.insertInto("directory_log")
|
||||
.values({
|
||||
directory_id: directoryId,
|
||||
timestamp: new Date(),
|
||||
action: "create",
|
||||
newName: { ciphertext: params.encName, iv: params.encNameIv },
|
||||
});
|
||||
},
|
||||
{ behavior: "exclusive" },
|
||||
);
|
||||
new_name: params.encName,
|
||||
})
|
||||
.execute();
|
||||
});
|
||||
};
|
||||
|
||||
export const getAllDirectoriesByParent = async (userId: number, parentId: DirectoryId) => {
|
||||
return await db
|
||||
.select()
|
||||
.from(directory)
|
||||
.where(
|
||||
and(
|
||||
eq(directory.userId, userId),
|
||||
parentId === "root" ? isNull(directory.parentId) : eq(directory.parentId, parentId),
|
||||
),
|
||||
);
|
||||
let query = db.selectFrom("directory").selectAll().where("user_id", "=", userId);
|
||||
query =
|
||||
parentId === "root"
|
||||
? query.where("parent_id", "is", null)
|
||||
: query.where("parent_id", "=", parentId);
|
||||
const directories = await query.execute();
|
||||
return directories.map(
|
||||
(directory) =>
|
||||
({
|
||||
id: directory.id,
|
||||
parentId: directory.parent_id ?? "root",
|
||||
userId: directory.user_id,
|
||||
mekVersion: directory.master_encryption_key_version,
|
||||
encDek: directory.encrypted_data_encryption_key,
|
||||
dekVersion: directory.data_encryption_key_version,
|
||||
encName: directory.encrypted_name,
|
||||
}) satisfies Directory,
|
||||
);
|
||||
};
|
||||
|
||||
export const getDirectory = async (userId: number, directoryId: number) => {
|
||||
const res = await db
|
||||
.select()
|
||||
.from(directory)
|
||||
.where(and(eq(directory.userId, userId), eq(directory.id, directoryId)))
|
||||
.limit(1);
|
||||
return res[0] ?? null;
|
||||
const directory = await db
|
||||
.selectFrom("directory")
|
||||
.selectAll()
|
||||
.where("id", "=", directoryId)
|
||||
.where("user_id", "=", userId)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
return directory
|
||||
? ({
|
||||
id: directory.id,
|
||||
parentId: directory.parent_id ?? "root",
|
||||
userId: directory.user_id,
|
||||
mekVersion: directory.master_encryption_key_version,
|
||||
encDek: directory.encrypted_data_encryption_key,
|
||||
dekVersion: directory.data_encryption_key_version,
|
||||
encName: directory.encrypted_name,
|
||||
} satisfies Directory)
|
||||
: null;
|
||||
};
|
||||
|
||||
export const setDirectoryEncName = async (
|
||||
userId: number,
|
||||
directoryId: number,
|
||||
dekVersion: Date,
|
||||
encName: string,
|
||||
encNameIv: string,
|
||||
encName: Ciphertext,
|
||||
) => {
|
||||
await db.transaction(
|
||||
async (tx) => {
|
||||
const directories = await tx
|
||||
.select({ version: directory.dekVersion })
|
||||
.from(directory)
|
||||
.where(and(eq(directory.userId, userId), eq(directory.id, directoryId)))
|
||||
.limit(1);
|
||||
if (!directories[0]) {
|
||||
throw new IntegrityError("Directory not found");
|
||||
} else if (directories[0].version.getTime() !== dekVersion.getTime()) {
|
||||
throw new IntegrityError("Invalid DEK version");
|
||||
}
|
||||
await db.transaction().execute(async (trx) => {
|
||||
const directory = await trx
|
||||
.selectFrom("directory")
|
||||
.select("data_encryption_key_version")
|
||||
.where("id", "=", directoryId)
|
||||
.where("user_id", "=", userId)
|
||||
.limit(1)
|
||||
.forUpdate()
|
||||
.executeTakeFirst();
|
||||
if (!directory) {
|
||||
throw new IntegrityError("Directory not found");
|
||||
} else if (directory.data_encryption_key_version.getTime() !== dekVersion.getTime()) {
|
||||
throw new IntegrityError("Invalid DEK version");
|
||||
}
|
||||
|
||||
await tx
|
||||
.update(directory)
|
||||
.set({ encName: { ciphertext: encName, iv: encNameIv } })
|
||||
.where(and(eq(directory.userId, userId), eq(directory.id, directoryId)));
|
||||
await tx.insert(directoryLog).values({
|
||||
directoryId,
|
||||
await trx
|
||||
.updateTable("directory")
|
||||
.set({ encrypted_name: encName })
|
||||
.where("id", "=", directoryId)
|
||||
.where("user_id", "=", userId)
|
||||
.execute();
|
||||
await trx
|
||||
.insertInto("directory_log")
|
||||
.values({
|
||||
directory_id: directoryId,
|
||||
timestamp: new Date(),
|
||||
action: "rename",
|
||||
newName: { ciphertext: encName, iv: encNameIv },
|
||||
});
|
||||
},
|
||||
{ behavior: "exclusive" },
|
||||
);
|
||||
new_name: encName,
|
||||
})
|
||||
.execute();
|
||||
});
|
||||
};
|
||||
|
||||
export const unregisterDirectory = async (userId: number, directoryId: number) => {
|
||||
return await db.transaction(
|
||||
async (tx) => {
|
||||
return await db
|
||||
.transaction()
|
||||
.setIsolationLevel("repeatable read") // TODO: Sufficient?
|
||||
.execute(async (trx) => {
|
||||
const unregisterFiles = async (parentId: number) => {
|
||||
return await tx
|
||||
.delete(file)
|
||||
.where(and(eq(file.userId, userId), eq(file.parentId, parentId)))
|
||||
.returning({ id: file.id, path: file.path });
|
||||
return await trx
|
||||
.deleteFrom("file")
|
||||
.where("parent_id", "=", parentId)
|
||||
.where("user_id", "=", userId)
|
||||
.returning(["id", "path"])
|
||||
.execute();
|
||||
};
|
||||
const unregisterDirectoryRecursively = async (
|
||||
directoryId: number,
|
||||
): Promise<{ id: number; path: string }[]> => {
|
||||
const files = await unregisterFiles(directoryId);
|
||||
const subDirectories = await tx
|
||||
.select({ id: directory.id })
|
||||
.from(directory)
|
||||
.where(and(eq(directory.userId, userId), eq(directory.parentId, directoryId)));
|
||||
const subDirectories = await trx
|
||||
.selectFrom("directory")
|
||||
.select("id")
|
||||
.where("parent_id", "=", directoryId)
|
||||
.where("user_id", "=", userId)
|
||||
.execute();
|
||||
const subDirectoryFilePaths = await Promise.all(
|
||||
subDirectories.map(async ({ id }) => await unregisterDirectoryRecursively(id)),
|
||||
);
|
||||
|
||||
const deleteRes = await tx.delete(directory).where(eq(directory.id, directoryId));
|
||||
if (deleteRes.changes === 0) {
|
||||
const deleteRes = await trx
|
||||
.deleteFrom("directory")
|
||||
.where("id", "=", directoryId)
|
||||
.where("user_id", "=", userId)
|
||||
.executeTakeFirst();
|
||||
if (deleteRes.numDeletedRows === 0n) {
|
||||
throw new IntegrityError("Directory not found");
|
||||
}
|
||||
return files.concat(...subDirectoryFilePaths);
|
||||
};
|
||||
return await unregisterDirectoryRecursively(directoryId);
|
||||
},
|
||||
{ behavior: "exclusive" },
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
export const registerFile = async (params: NewFileParams) => {
|
||||
if (
|
||||
(params.hskVersion && !params.contentHmac) ||
|
||||
(!params.hskVersion && params.contentHmac) ||
|
||||
(params.encCreatedAt && !params.encCreatedAtIv) ||
|
||||
(!params.encCreatedAt && params.encCreatedAtIv)
|
||||
) {
|
||||
export const registerFile = async (params: NewFile) => {
|
||||
if ((params.hskVersion && !params.contentHmac) || (!params.hskVersion && params.contentHmac)) {
|
||||
throw new Error("Invalid arguments");
|
||||
}
|
||||
|
||||
await db.transaction(
|
||||
async (tx) => {
|
||||
const meks = await tx
|
||||
.select({ version: mek.version })
|
||||
.from(mek)
|
||||
.where(and(eq(mek.userId, params.userId), eq(mek.state, "active")))
|
||||
.limit(1);
|
||||
if (meks[0]?.version !== params.mekVersion) {
|
||||
throw new IntegrityError("Inactive MEK version");
|
||||
}
|
||||
await db.transaction().execute(async (trx) => {
|
||||
const mek = await trx
|
||||
.selectFrom("master_encryption_key")
|
||||
.select("version")
|
||||
.where("user_id", "=", params.userId)
|
||||
.where("state", "=", "active")
|
||||
.limit(1)
|
||||
.forUpdate()
|
||||
.executeTakeFirst();
|
||||
if (mek?.version !== params.mekVersion) {
|
||||
throw new IntegrityError("Inactive MEK version");
|
||||
}
|
||||
|
||||
if (params.hskVersion) {
|
||||
const hsks = await tx
|
||||
.select({ version: hsk.version })
|
||||
.from(hsk)
|
||||
.where(and(eq(hsk.userId, params.userId), eq(hsk.state, "active")))
|
||||
.limit(1);
|
||||
if (hsks[0]?.version !== params.hskVersion) {
|
||||
throw new IntegrityError("Inactive HSK version");
|
||||
}
|
||||
if (params.hskVersion) {
|
||||
const hsk = await trx
|
||||
.selectFrom("hmac_secret_key")
|
||||
.select("version")
|
||||
.where("user_id", "=", params.userId)
|
||||
.where("state", "=", "active")
|
||||
.limit(1)
|
||||
.forUpdate()
|
||||
.executeTakeFirst();
|
||||
if (hsk?.version !== params.hskVersion) {
|
||||
throw new IntegrityError("Inactive HSK version");
|
||||
}
|
||||
}
|
||||
|
||||
const newFiles = await tx
|
||||
.insert(file)
|
||||
.values({
|
||||
path: params.path,
|
||||
parentId: params.parentId === "root" ? null : params.parentId,
|
||||
userId: params.userId,
|
||||
mekVersion: params.mekVersion,
|
||||
hskVersion: params.hskVersion,
|
||||
encDek: params.encDek,
|
||||
dekVersion: params.dekVersion,
|
||||
contentHmac: params.contentHmac,
|
||||
contentType: params.contentType,
|
||||
encContentIv: params.encContentIv,
|
||||
encContentHash: params.encContentHash,
|
||||
encName: { ciphertext: params.encName, iv: params.encNameIv },
|
||||
encCreatedAt:
|
||||
params.encCreatedAt && params.encCreatedAtIv
|
||||
? { ciphertext: params.encCreatedAt, iv: params.encCreatedAtIv }
|
||||
: null,
|
||||
encLastModifiedAt: {
|
||||
ciphertext: params.encLastModifiedAt,
|
||||
iv: params.encLastModifiedAtIv,
|
||||
},
|
||||
})
|
||||
.returning({ id: file.id });
|
||||
const { id: fileId } = newFiles[0]!;
|
||||
await tx.insert(fileLog).values({
|
||||
fileId,
|
||||
const { fileId } = await trx
|
||||
.insertInto("file")
|
||||
.values({
|
||||
parent_id: params.parentId !== "root" ? params.parentId : null,
|
||||
user_id: params.userId,
|
||||
path: params.path,
|
||||
master_encryption_key_version: params.mekVersion,
|
||||
encrypted_data_encryption_key: params.encDek,
|
||||
data_encryption_key_version: params.dekVersion,
|
||||
hmac_secret_key_version: params.hskVersion,
|
||||
content_hmac: params.contentHmac,
|
||||
content_type: params.contentType,
|
||||
encrypted_content_iv: params.encContentIv,
|
||||
encrypted_content_hash: params.encContentHash,
|
||||
encrypted_name: params.encName,
|
||||
encrypted_created_at: params.encCreatedAt,
|
||||
encrypted_last_modified_at: params.encLastModifiedAt,
|
||||
})
|
||||
.returning("id as fileId")
|
||||
.executeTakeFirstOrThrow();
|
||||
await trx
|
||||
.insertInto("file_log")
|
||||
.values({
|
||||
file_id: fileId,
|
||||
timestamp: new Date(),
|
||||
action: "create",
|
||||
newName: { ciphertext: params.encName, iv: params.encNameIv },
|
||||
});
|
||||
},
|
||||
{ behavior: "exclusive" },
|
||||
);
|
||||
new_name: params.encName,
|
||||
})
|
||||
.execute();
|
||||
});
|
||||
};
|
||||
|
||||
export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => {
|
||||
return await db
|
||||
.select()
|
||||
.from(file)
|
||||
.where(
|
||||
and(
|
||||
eq(file.userId, userId),
|
||||
parentId === "root" ? isNull(file.parentId) : eq(file.parentId, parentId),
|
||||
),
|
||||
);
|
||||
let query = db.selectFrom("file").selectAll().where("user_id", "=", userId);
|
||||
query =
|
||||
parentId === "root"
|
||||
? query.where("parent_id", "is", null)
|
||||
: query.where("parent_id", "=", parentId);
|
||||
const files = await query.execute();
|
||||
return files.map(
|
||||
(file) =>
|
||||
({
|
||||
id: file.id,
|
||||
parentId: file.parent_id ?? "root",
|
||||
userId: file.user_id,
|
||||
path: file.path,
|
||||
mekVersion: file.master_encryption_key_version,
|
||||
encDek: file.encrypted_data_encryption_key,
|
||||
dekVersion: file.data_encryption_key_version,
|
||||
hskVersion: file.hmac_secret_key_version,
|
||||
contentHmac: file.content_hmac,
|
||||
contentType: file.content_type,
|
||||
encContentIv: file.encrypted_content_iv,
|
||||
encContentHash: file.encrypted_content_hash,
|
||||
encName: file.encrypted_name,
|
||||
encCreatedAt: file.encrypted_created_at,
|
||||
encLastModifiedAt: file.encrypted_last_modified_at,
|
||||
}) satisfies File,
|
||||
);
|
||||
};
|
||||
|
||||
export const getAllFilesByCategory = async (userId: number, categoryId: number) => {
|
||||
@@ -254,71 +302,95 @@ export const getAllFileIdsByContentHmac = async (
|
||||
hskVersion: number,
|
||||
contentHmac: string,
|
||||
) => {
|
||||
return await db
|
||||
.select({ id: file.id })
|
||||
.from(file)
|
||||
.where(
|
||||
and(
|
||||
eq(file.userId, userId),
|
||||
eq(file.hskVersion, hskVersion),
|
||||
eq(file.contentHmac, contentHmac),
|
||||
),
|
||||
);
|
||||
const files = await db
|
||||
.selectFrom("file")
|
||||
.select("id")
|
||||
.where("user_id", "=", userId)
|
||||
.where("hmac_secret_key_version", "=", hskVersion)
|
||||
.where("content_hmac", "=", contentHmac)
|
||||
.execute();
|
||||
return files.map(({ id }) => ({ id }));
|
||||
};
|
||||
|
||||
export const getFile = async (userId: number, fileId: number) => {
|
||||
const res = await db
|
||||
.select()
|
||||
.from(file)
|
||||
.where(and(eq(file.userId, userId), eq(file.id, fileId)))
|
||||
.limit(1);
|
||||
return res[0] ?? null;
|
||||
const file = await db
|
||||
.selectFrom("file")
|
||||
.selectAll()
|
||||
.where("id", "=", fileId)
|
||||
.where("user_id", "=", userId)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
return file
|
||||
? ({
|
||||
id: file.id,
|
||||
parentId: file.parent_id ?? "root",
|
||||
userId: file.user_id,
|
||||
path: file.path,
|
||||
mekVersion: file.master_encryption_key_version,
|
||||
encDek: file.encrypted_data_encryption_key,
|
||||
dekVersion: file.data_encryption_key_version,
|
||||
hskVersion: file.hmac_secret_key_version,
|
||||
contentHmac: file.content_hmac,
|
||||
contentType: file.content_type,
|
||||
encContentIv: file.encrypted_content_iv,
|
||||
encContentHash: file.encrypted_content_hash,
|
||||
encName: file.encrypted_name,
|
||||
encCreatedAt: file.encrypted_created_at,
|
||||
encLastModifiedAt: file.encrypted_last_modified_at,
|
||||
} satisfies File)
|
||||
: null;
|
||||
};
|
||||
|
||||
export const setFileEncName = async (
|
||||
userId: number,
|
||||
fileId: number,
|
||||
dekVersion: Date,
|
||||
encName: string,
|
||||
encNameIv: string,
|
||||
encName: Ciphertext,
|
||||
) => {
|
||||
await db.transaction(
|
||||
async (tx) => {
|
||||
const files = await tx
|
||||
.select({ version: file.dekVersion })
|
||||
.from(file)
|
||||
.where(and(eq(file.userId, userId), eq(file.id, fileId)))
|
||||
.limit(1);
|
||||
if (!files[0]) {
|
||||
throw new IntegrityError("File not found");
|
||||
} else if (files[0].version.getTime() !== dekVersion.getTime()) {
|
||||
throw new IntegrityError("Invalid DEK version");
|
||||
}
|
||||
await db.transaction().execute(async (trx) => {
|
||||
const file = await trx
|
||||
.selectFrom("file")
|
||||
.select("data_encryption_key_version")
|
||||
.where("id", "=", fileId)
|
||||
.where("user_id", "=", userId)
|
||||
.limit(1)
|
||||
.forUpdate()
|
||||
.executeTakeFirst();
|
||||
if (!file) {
|
||||
throw new IntegrityError("File not found");
|
||||
} else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) {
|
||||
throw new IntegrityError("Invalid DEK version");
|
||||
}
|
||||
|
||||
await tx
|
||||
.update(file)
|
||||
.set({ encName: { ciphertext: encName, iv: encNameIv } })
|
||||
.where(and(eq(file.userId, userId), eq(file.id, fileId)));
|
||||
await tx.insert(fileLog).values({
|
||||
fileId,
|
||||
await trx
|
||||
.updateTable("file")
|
||||
.set({ encrypted_name: encName })
|
||||
.where("id", "=", fileId)
|
||||
.where("user_id", "=", userId)
|
||||
.execute();
|
||||
await trx
|
||||
.insertInto("file_log")
|
||||
.values({
|
||||
file_id: fileId,
|
||||
timestamp: new Date(),
|
||||
action: "rename",
|
||||
newName: { ciphertext: encName, iv: encNameIv },
|
||||
});
|
||||
},
|
||||
{ behavior: "exclusive" },
|
||||
);
|
||||
new_name: encName,
|
||||
})
|
||||
.execute();
|
||||
});
|
||||
};
|
||||
|
||||
export const unregisterFile = async (userId: number, fileId: number) => {
|
||||
const files = await db
|
||||
.delete(file)
|
||||
.where(and(eq(file.userId, userId), eq(file.id, fileId)))
|
||||
.returning({ path: file.path });
|
||||
if (!files[0]) {
|
||||
const file = await db
|
||||
.deleteFrom("file")
|
||||
.where("id", "=", fileId)
|
||||
.where("user_id", "=", userId)
|
||||
.returning("path")
|
||||
.executeTakeFirst();
|
||||
if (!file) {
|
||||
throw new IntegrityError("File not found");
|
||||
}
|
||||
return files[0].path;
|
||||
return { path: file.path };
|
||||
};
|
||||
|
||||
export const addFileToCategory = async (fileId: number, categoryId: number) => {
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import { SqliteError } from "better-sqlite3";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import db from "./drizzle";
|
||||
import pg from "pg";
|
||||
import { IntegrityError } from "./error";
|
||||
import { hsk, hskLog } from "./schema";
|
||||
import db from "./kysely";
|
||||
import type { HskState } from "./schema";
|
||||
|
||||
interface Hsk {
|
||||
userId: number;
|
||||
version: number;
|
||||
state: HskState;
|
||||
mekVersion: number;
|
||||
encHsk: string;
|
||||
}
|
||||
|
||||
export const registerInitialHsk = async (
|
||||
userId: number,
|
||||
@@ -10,37 +17,52 @@ export const registerInitialHsk = async (
|
||||
mekVersion: number,
|
||||
encHsk: string,
|
||||
) => {
|
||||
await db.transaction(
|
||||
async (tx) => {
|
||||
try {
|
||||
await tx.insert(hsk).values({
|
||||
userId,
|
||||
await db.transaction().execute(async (trx) => {
|
||||
try {
|
||||
await trx
|
||||
.insertInto("hmac_secret_key")
|
||||
.values({
|
||||
user_id: userId,
|
||||
version: 1,
|
||||
state: "active",
|
||||
mekVersion,
|
||||
encHsk,
|
||||
});
|
||||
await tx.insert(hskLog).values({
|
||||
userId,
|
||||
hskVersion: 1,
|
||||
master_encryption_key_version: mekVersion,
|
||||
encrypted_key: encHsk,
|
||||
})
|
||||
.execute();
|
||||
await trx
|
||||
.insertInto("hmac_secret_key_log")
|
||||
.values({
|
||||
user_id: userId,
|
||||
hmac_secret_key_version: 1,
|
||||
timestamp: new Date(),
|
||||
action: "create",
|
||||
actionBy: createdBy,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
|
||||
throw new IntegrityError("HSK already registered");
|
||||
}
|
||||
throw e;
|
||||
action_by: createdBy,
|
||||
})
|
||||
.execute();
|
||||
} catch (e) {
|
||||
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||
throw new IntegrityError("HSK already registered");
|
||||
}
|
||||
},
|
||||
{ behavior: "exclusive" },
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const getAllValidHsks = async (userId: number) => {
|
||||
return await db
|
||||
.select()
|
||||
.from(hsk)
|
||||
.where(and(eq(hsk.userId, userId), eq(hsk.state, "active")));
|
||||
const hsks = await db
|
||||
.selectFrom("hmac_secret_key")
|
||||
.selectAll()
|
||||
.where("user_id", "=", userId)
|
||||
.where("state", "=", "active")
|
||||
.execute();
|
||||
return hsks.map(
|
||||
({ user_id, version, state, master_encryption_key_version, encrypted_key }) =>
|
||||
({
|
||||
userId: user_id,
|
||||
version,
|
||||
state: state as "active",
|
||||
mekVersion: master_encryption_key_version,
|
||||
encHsk: encrypted_key,
|
||||
}) satisfies Hsk,
|
||||
);
|
||||
};
|
||||
|
||||
47
src/lib/server/db/kysely.ts
Normal file
47
src/lib/server/db/kysely.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { Kysely, PostgresDialect, Migrator } from "kysely";
|
||||
import pg from "pg";
|
||||
import env from "$lib/server/loadenv";
|
||||
import migrations from "./migrations";
|
||||
import type { Database } from "./schema";
|
||||
|
||||
const dialect = new PostgresDialect({
|
||||
pool: new pg.Pool({
|
||||
host: env.database.host,
|
||||
port: env.database.port,
|
||||
user: env.database.user,
|
||||
password: env.database.password,
|
||||
database: env.database.name,
|
||||
}),
|
||||
});
|
||||
|
||||
const db = new Kysely<Database>({ dialect });
|
||||
|
||||
export const migrateDB = async () => {
|
||||
if (env.nodeEnv !== "production") return;
|
||||
|
||||
const migrator = new Migrator({
|
||||
db,
|
||||
provider: {
|
||||
async getMigrations() {
|
||||
return migrations;
|
||||
},
|
||||
},
|
||||
});
|
||||
const { error, results } = await migrator.migrateToLatest();
|
||||
if (error) {
|
||||
const migration = results?.find(({ status }) => status === "Error");
|
||||
if (migration) {
|
||||
console.error(`Migration "${migration.migrationName}" failed.`);
|
||||
}
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (results?.length === 0) {
|
||||
console.log("Database is up-to-date.");
|
||||
} else {
|
||||
console.log("Database migration completed.");
|
||||
}
|
||||
};
|
||||
|
||||
export default db;
|
||||
@@ -1,8 +1,19 @@
|
||||
import { SqliteError } from "better-sqlite3";
|
||||
import { and, or, eq } from "drizzle-orm";
|
||||
import db from "./drizzle";
|
||||
import pg from "pg";
|
||||
import { IntegrityError } from "./error";
|
||||
import { mek, mekLog, clientMek } from "./schema";
|
||||
import db from "./kysely";
|
||||
import type { MekState } from "./schema";
|
||||
|
||||
interface Mek {
|
||||
userId: number;
|
||||
version: number;
|
||||
state: MekState;
|
||||
}
|
||||
|
||||
interface ClientMekWithDetails extends Mek {
|
||||
clientId: number;
|
||||
encMek: string;
|
||||
encMekSig: string;
|
||||
}
|
||||
|
||||
export const registerInitialMek = async (
|
||||
userId: number,
|
||||
@@ -10,58 +21,80 @@ export const registerInitialMek = async (
|
||||
encMek: string,
|
||||
encMekSig: string,
|
||||
) => {
|
||||
await db.transaction(
|
||||
async (tx) => {
|
||||
try {
|
||||
await tx.insert(mek).values({
|
||||
userId,
|
||||
await db.transaction().execute(async (trx) => {
|
||||
try {
|
||||
await trx
|
||||
.insertInto("master_encryption_key")
|
||||
.values({
|
||||
user_id: userId,
|
||||
version: 1,
|
||||
state: "active",
|
||||
});
|
||||
await tx.insert(clientMek).values({
|
||||
userId,
|
||||
clientId: createdBy,
|
||||
mekVersion: 1,
|
||||
encMek,
|
||||
encMekSig,
|
||||
});
|
||||
await tx.insert(mekLog).values({
|
||||
userId,
|
||||
mekVersion: 1,
|
||||
})
|
||||
.execute();
|
||||
await trx
|
||||
.insertInto("client_master_encryption_key")
|
||||
.values({
|
||||
user_id: userId,
|
||||
client_id: createdBy,
|
||||
version: 1,
|
||||
encrypted_key: encMek,
|
||||
encrypted_key_signature: encMekSig,
|
||||
})
|
||||
.execute();
|
||||
await trx
|
||||
.insertInto("master_encryption_key_log")
|
||||
.values({
|
||||
user_id: userId,
|
||||
master_encryption_key_version: 1,
|
||||
timestamp: new Date(),
|
||||
action: "create",
|
||||
actionBy: createdBy,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
|
||||
throw new IntegrityError("MEK already registered");
|
||||
}
|
||||
throw e;
|
||||
action_by: createdBy,
|
||||
})
|
||||
.execute();
|
||||
} catch (e) {
|
||||
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||
throw new IntegrityError("MEK already registered");
|
||||
}
|
||||
},
|
||||
{ behavior: "exclusive" },
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const getInitialMek = async (userId: number) => {
|
||||
const meks = await db
|
||||
.select()
|
||||
.from(mek)
|
||||
.where(and(eq(mek.userId, userId), eq(mek.version, 1)))
|
||||
.limit(1);
|
||||
return meks[0] ?? null;
|
||||
const mek = await db
|
||||
.selectFrom("master_encryption_key")
|
||||
.selectAll()
|
||||
.where("user_id", "=", userId)
|
||||
.where("version", "=", 1)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
return mek
|
||||
? ({ userId: mek.user_id, version: mek.version, state: mek.state } satisfies Mek)
|
||||
: null;
|
||||
};
|
||||
|
||||
export const getAllValidClientMeks = async (userId: number, clientId: number) => {
|
||||
return await db
|
||||
.select()
|
||||
.from(clientMek)
|
||||
.innerJoin(mek, and(eq(clientMek.userId, mek.userId), eq(clientMek.mekVersion, mek.version)))
|
||||
.where(
|
||||
and(
|
||||
eq(clientMek.userId, userId),
|
||||
eq(clientMek.clientId, clientId),
|
||||
or(eq(mek.state, "active"), eq(mek.state, "retired")),
|
||||
),
|
||||
);
|
||||
const clientMeks = await db
|
||||
.selectFrom("client_master_encryption_key")
|
||||
.innerJoin("master_encryption_key", (join) =>
|
||||
join
|
||||
.onRef("client_master_encryption_key.user_id", "=", "master_encryption_key.user_id")
|
||||
.onRef("client_master_encryption_key.version", "=", "master_encryption_key.version"),
|
||||
)
|
||||
.selectAll()
|
||||
.where("client_master_encryption_key.user_id", "=", userId)
|
||||
.where("client_master_encryption_key.client_id", "=", clientId)
|
||||
.where((eb) => eb.or([eb("state", "=", "active"), eb("state", "=", "retired")]))
|
||||
.execute();
|
||||
return clientMeks.map(
|
||||
({ user_id, client_id, version, state, encrypted_key, encrypted_key_signature }) =>
|
||||
({
|
||||
userId: user_id,
|
||||
version,
|
||||
state: state as "active" | "retired",
|
||||
clientId: client_id,
|
||||
encMek: encrypted_key,
|
||||
encMekSig: encrypted_key_signature,
|
||||
}) satisfies ClientMekWithDetails,
|
||||
);
|
||||
};
|
||||
|
||||
224
src/lib/server/db/migrations/1737357000-Initial.ts
Normal file
224
src/lib/server/db/migrations/1737357000-Initial.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
import { Kysely } from "kysely";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export const up = async (db: Kysely<any>) => {
|
||||
// user.ts
|
||||
await db.schema
|
||||
.createTable("user")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("email", "text", (col) => col.unique().notNull())
|
||||
.addColumn("nickname", "text", (col) => col.notNull())
|
||||
.addColumn("password", "text", (col) => col.notNull())
|
||||
.execute();
|
||||
|
||||
// client.ts
|
||||
await db.schema
|
||||
.createTable("client")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("encryption_public_key", "text", (col) => col.unique().notNull())
|
||||
.addColumn("signature_public_key", "text", (col) => col.unique().notNull())
|
||||
.addUniqueConstraint("client_ak01", ["encryption_public_key", "signature_public_key"])
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("user_client")
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("client_id", "integer", (col) => col.references("client.id").notNull())
|
||||
.addColumn("state", "text", (col) => col.notNull().defaultTo("challenging"))
|
||||
.addPrimaryKeyConstraint("user_client_pk", ["user_id", "client_id"])
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("user_client_challenge")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("client_id", "integer", (col) => col.references("client.id").notNull())
|
||||
.addColumn("answer", "text", (col) => col.unique().notNull())
|
||||
.addColumn("allowed_ip", "text", (col) => col.notNull())
|
||||
.addColumn("expires_at", "timestamp(3)", (col) => col.notNull())
|
||||
.addForeignKeyConstraint(
|
||||
"user_client_challenge_fk01",
|
||||
["user_id", "client_id"],
|
||||
"user_client",
|
||||
["user_id", "client_id"],
|
||||
)
|
||||
.execute();
|
||||
|
||||
// session.ts
|
||||
await db.schema
|
||||
.createTable("session")
|
||||
.addColumn("id", "text", (col) => col.primaryKey())
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("client_id", "integer", (col) => col.references("client.id"))
|
||||
.addColumn("created_at", "timestamp(3)", (col) => col.notNull())
|
||||
.addColumn("last_used_at", "timestamp(3)", (col) => col.notNull())
|
||||
.addColumn("last_used_by_ip", "text")
|
||||
.addColumn("last_used_by_agent", "text")
|
||||
.addUniqueConstraint("session_ak01", ["user_id", "client_id"])
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("session_upgrade_challenge")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("session_id", "text", (col) => col.references("session.id").unique().notNull())
|
||||
.addColumn("client_id", "integer", (col) => col.references("client.id").notNull())
|
||||
.addColumn("answer", "text", (col) => col.unique().notNull())
|
||||
.addColumn("allowed_ip", "text", (col) => col.notNull())
|
||||
.addColumn("expires_at", "timestamp(3)", (col) => col.notNull())
|
||||
.execute();
|
||||
|
||||
// mek.ts
|
||||
await db.schema
|
||||
.createTable("master_encryption_key")
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("version", "integer", (col) => col.notNull())
|
||||
.addColumn("state", "text", (col) => col.notNull())
|
||||
.addPrimaryKeyConstraint("master_encryption_key_pk", ["user_id", "version"])
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("master_encryption_key_log")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("master_encryption_key_version", "integer", (col) => col.notNull())
|
||||
.addColumn("timestamp", "timestamp(3)", (col) => col.notNull())
|
||||
.addColumn("action", "text", (col) => col.notNull())
|
||||
.addColumn("action_by", "integer", (col) => col.references("client.id"))
|
||||
.addForeignKeyConstraint(
|
||||
"master_encryption_key_log_fk01",
|
||||
["user_id", "master_encryption_key_version"],
|
||||
"master_encryption_key",
|
||||
["user_id", "version"],
|
||||
)
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("client_master_encryption_key")
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("client_id", "integer", (col) => col.references("client.id").notNull())
|
||||
.addColumn("version", "integer", (col) => col.notNull())
|
||||
.addColumn("encrypted_key", "text", (col) => col.notNull())
|
||||
.addColumn("encrypted_key_signature", "text", (col) => col.notNull())
|
||||
.addPrimaryKeyConstraint("client_master_encryption_key_pk", ["user_id", "client_id", "version"])
|
||||
.addForeignKeyConstraint(
|
||||
"client_master_encryption_key_fk01",
|
||||
["user_id", "version"],
|
||||
"master_encryption_key",
|
||||
["user_id", "version"],
|
||||
)
|
||||
.execute();
|
||||
|
||||
// hsk.ts
|
||||
await db.schema
|
||||
.createTable("hmac_secret_key")
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("version", "integer", (col) => col.notNull())
|
||||
.addColumn("state", "text", (col) => col.notNull())
|
||||
.addColumn("master_encryption_key_version", "integer", (col) => col.notNull())
|
||||
.addColumn("encrypted_key", "text", (col) => col.unique().notNull())
|
||||
.addPrimaryKeyConstraint("hmac_secret_key_pk", ["user_id", "version"])
|
||||
.addForeignKeyConstraint(
|
||||
"hmac_secret_key_fk01",
|
||||
["user_id", "master_encryption_key_version"],
|
||||
"master_encryption_key",
|
||||
["user_id", "version"],
|
||||
)
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("hmac_secret_key_log")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("hmac_secret_key_version", "integer", (col) => col.notNull())
|
||||
.addColumn("timestamp", "timestamp(3)", (col) => col.notNull())
|
||||
.addColumn("action", "text", (col) => col.notNull())
|
||||
.addColumn("action_by", "integer", (col) => col.references("client.id"))
|
||||
.addForeignKeyConstraint(
|
||||
"hmac_secret_key_log_fk01",
|
||||
["user_id", "hmac_secret_key_version"],
|
||||
"hmac_secret_key",
|
||||
["user_id", "version"],
|
||||
)
|
||||
.execute();
|
||||
|
||||
// file.ts
|
||||
await db.schema
|
||||
.createTable("directory")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("parent_id", "integer", (col) => col.references("directory.id"))
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("master_encryption_key_version", "integer", (col) => col.notNull())
|
||||
.addColumn("encrypted_data_encryption_key", "text", (col) => col.unique().notNull())
|
||||
.addColumn("data_encryption_key_version", "timestamp(3)", (col) => col.notNull())
|
||||
.addColumn("encrypted_name", "json", (col) => col.notNull())
|
||||
.addForeignKeyConstraint(
|
||||
"directory_fk01",
|
||||
["user_id", "master_encryption_key_version"],
|
||||
"master_encryption_key",
|
||||
["user_id", "version"],
|
||||
)
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("directory_log")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("directory_id", "integer", (col) =>
|
||||
col.references("directory.id").onDelete("cascade").notNull(),
|
||||
)
|
||||
.addColumn("timestamp", "timestamp(3)", (col) => col.notNull())
|
||||
.addColumn("action", "text", (col) => col.notNull())
|
||||
.addColumn("new_name", "json")
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("file")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("parent_id", "integer", (col) => col.references("directory.id"))
|
||||
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
|
||||
.addColumn("path", "text", (col) => col.unique().notNull())
|
||||
.addColumn("master_encryption_key_version", "integer", (col) => col.notNull())
|
||||
.addColumn("encrypted_data_encryption_key", "text", (col) => col.unique().notNull())
|
||||
.addColumn("data_encryption_key_version", "timestamp(3)", (col) => col.notNull())
|
||||
.addColumn("hmac_secret_key_version", "integer")
|
||||
.addColumn("content_hmac", "text")
|
||||
.addColumn("content_type", "text", (col) => col.notNull())
|
||||
.addColumn("encrypted_content_iv", "text", (col) => col.notNull())
|
||||
.addColumn("encrypted_content_hash", "text", (col) => col.notNull())
|
||||
.addColumn("encrypted_name", "json", (col) => col.notNull())
|
||||
.addColumn("encrypted_created_at", "json")
|
||||
.addColumn("encrypted_last_modified_at", "json", (col) => col.notNull())
|
||||
.addForeignKeyConstraint(
|
||||
"file_fk01",
|
||||
["user_id", "master_encryption_key_version"],
|
||||
"master_encryption_key",
|
||||
["user_id", "version"],
|
||||
)
|
||||
.addForeignKeyConstraint(
|
||||
"file_fk02",
|
||||
["user_id", "hmac_secret_key_version"],
|
||||
"hmac_secret_key",
|
||||
["user_id", "version"],
|
||||
)
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("file_log")
|
||||
.addColumn("id", "integer", (col) => col.primaryKey().generatedAlwaysAsIdentity())
|
||||
.addColumn("file_id", "integer", (col) =>
|
||||
col.references("file.id").onDelete("cascade").notNull(),
|
||||
)
|
||||
.addColumn("timestamp", "timestamp(3)", (col) => col.notNull())
|
||||
.addColumn("action", "text", (col) => col.notNull())
|
||||
.addColumn("new_name", "json")
|
||||
.execute();
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export const down = async (db: Kysely<any>) => {
|
||||
await db.schema.dropTable("file_log").execute();
|
||||
await db.schema.dropTable("file").execute();
|
||||
await db.schema.dropTable("directory_log").execute();
|
||||
await db.schema.dropTable("directory").execute();
|
||||
await db.schema.dropTable("hmac_secret_key_log").execute();
|
||||
await db.schema.dropTable("hmac_secret_key").execute();
|
||||
await db.schema.dropTable("client_master_encryption_key").execute();
|
||||
await db.schema.dropTable("master_encryption_key_log").execute();
|
||||
await db.schema.dropTable("master_encryption_key").execute();
|
||||
await db.schema.dropTable("session_upgrade_challenge").execute();
|
||||
await db.schema.dropTable("session").execute();
|
||||
await db.schema.dropTable("user_client_challenge").execute();
|
||||
await db.schema.dropTable("user_client").execute();
|
||||
await db.schema.dropTable("client").execute();
|
||||
await db.schema.dropTable("user").execute();
|
||||
};
|
||||
5
src/lib/server/db/migrations/index.ts
Normal file
5
src/lib/server/db/migrations/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import * as Initial1737357000 from "./1737357000-Initial";
|
||||
|
||||
export default {
|
||||
"1737357000-Initial": Initial1737357000,
|
||||
};
|
||||
@@ -1,61 +1,32 @@
|
||||
import {
|
||||
sqliteTable,
|
||||
text,
|
||||
integer,
|
||||
primaryKey,
|
||||
foreignKey,
|
||||
unique,
|
||||
} from "drizzle-orm/sqlite-core";
|
||||
import { user } from "./user";
|
||||
import type { ColumnType, Generated } from "kysely";
|
||||
|
||||
export const client = sqliteTable(
|
||||
"client",
|
||||
{
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
encPubKey: text("encryption_public_key").notNull().unique(), // Base64
|
||||
sigPubKey: text("signature_public_key").notNull().unique(), // Base64
|
||||
},
|
||||
(t) => ({
|
||||
unq: unique().on(t.encPubKey, t.sigPubKey),
|
||||
}),
|
||||
);
|
||||
interface ClientTable {
|
||||
id: Generated<number>;
|
||||
encryption_public_key: string; // Base64
|
||||
signature_public_key: string; // Base64
|
||||
}
|
||||
|
||||
export const userClient = sqliteTable(
|
||||
"user_client",
|
||||
{
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
clientId: integer("client_id")
|
||||
.notNull()
|
||||
.references(() => client.id),
|
||||
state: text("state", { enum: ["challenging", "pending", "active"] })
|
||||
.notNull()
|
||||
.default("challenging"),
|
||||
},
|
||||
(t) => ({
|
||||
pk: primaryKey({ columns: [t.userId, t.clientId] }),
|
||||
}),
|
||||
);
|
||||
export type UserClientState = "challenging" | "pending" | "active";
|
||||
|
||||
export const userClientChallenge = sqliteTable(
|
||||
"user_client_challenge",
|
||||
{
|
||||
id: integer("id").primaryKey(),
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
clientId: integer("client_id")
|
||||
.notNull()
|
||||
.references(() => client.id),
|
||||
answer: text("answer").notNull().unique(), // Base64
|
||||
allowedIp: text("allowed_ip").notNull(),
|
||||
expiresAt: integer("expires_at", { mode: "timestamp_ms" }).notNull(),
|
||||
},
|
||||
(t) => ({
|
||||
ref: foreignKey({
|
||||
columns: [t.userId, t.clientId],
|
||||
foreignColumns: [userClient.userId, userClient.clientId],
|
||||
}),
|
||||
}),
|
||||
);
|
||||
interface UserClientTable {
|
||||
user_id: number;
|
||||
client_id: number;
|
||||
state: ColumnType<UserClientState, UserClientState | undefined>;
|
||||
}
|
||||
|
||||
interface UserClientChallengeTable {
|
||||
id: Generated<number>;
|
||||
user_id: number;
|
||||
client_id: number;
|
||||
answer: string; // Base64
|
||||
allowed_ip: string;
|
||||
expires_at: ColumnType<Date, Date, never>;
|
||||
}
|
||||
|
||||
declare module "./index" {
|
||||
interface Database {
|
||||
client: ClientTable;
|
||||
user_client: UserClientTable;
|
||||
user_client_challenge: UserClientChallengeTable;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,95 +1,53 @@
|
||||
import { sqliteTable, text, integer, primaryKey, foreignKey } from "drizzle-orm/sqlite-core";
|
||||
import { category } from "./category";
|
||||
import { hsk } from "./hsk";
|
||||
import { mek } from "./mek";
|
||||
import { user } from "./user";
|
||||
import type { ColumnType, Generated } from "kysely";
|
||||
|
||||
const ciphertext = (name: string) =>
|
||||
text(name, { mode: "json" }).$type<{
|
||||
ciphertext: string; // Base64
|
||||
iv: string; // Base64
|
||||
}>();
|
||||
export type Ciphertext = {
|
||||
ciphertext: string; // Base64
|
||||
iv: string; // Base64
|
||||
};
|
||||
|
||||
export const directory = sqliteTable(
|
||||
"directory",
|
||||
{
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
parentId: integer("parent_id"),
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
mekVersion: integer("master_encryption_key_version").notNull(),
|
||||
encDek: text("encrypted_data_encryption_key").notNull().unique(), // Base64
|
||||
dekVersion: integer("data_encryption_key_version", { mode: "timestamp_ms" }).notNull(),
|
||||
encName: ciphertext("encrypted_name").notNull(),
|
||||
},
|
||||
(t) => ({
|
||||
ref1: foreignKey({
|
||||
columns: [t.parentId],
|
||||
foreignColumns: [t.id],
|
||||
}),
|
||||
ref2: foreignKey({
|
||||
columns: [t.userId, t.mekVersion],
|
||||
foreignColumns: [mek.userId, mek.version],
|
||||
}),
|
||||
}),
|
||||
);
|
||||
interface DirectoryTable {
|
||||
id: Generated<number>;
|
||||
parent_id: number | null;
|
||||
user_id: number;
|
||||
master_encryption_key_version: number;
|
||||
encrypted_data_encryption_key: string; // Base64
|
||||
data_encryption_key_version: Date;
|
||||
encrypted_name: Ciphertext;
|
||||
}
|
||||
|
||||
export const directoryLog = sqliteTable("directory_log", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
directoryId: integer("directory_id")
|
||||
.notNull()
|
||||
.references(() => directory.id, { onDelete: "cascade" }),
|
||||
timestamp: integer("timestamp", { mode: "timestamp_ms" }).notNull(),
|
||||
action: text("action", { enum: ["create", "rename"] }).notNull(),
|
||||
newName: ciphertext("new_name"),
|
||||
});
|
||||
interface DirectoryLogTable {
|
||||
id: Generated<number>;
|
||||
directory_id: number;
|
||||
timestamp: ColumnType<Date, Date, never>;
|
||||
action: "create" | "rename";
|
||||
new_name: Ciphertext | null;
|
||||
}
|
||||
|
||||
export const file = sqliteTable(
|
||||
"file",
|
||||
{
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
parentId: integer("parent_id").references(() => directory.id),
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
path: text("path").notNull().unique(),
|
||||
mekVersion: integer("master_encryption_key_version").notNull(),
|
||||
encDek: text("encrypted_data_encryption_key").notNull().unique(), // Base64
|
||||
dekVersion: integer("data_encryption_key_version", { mode: "timestamp_ms" }).notNull(),
|
||||
hskVersion: integer("hmac_secret_key_version"),
|
||||
contentHmac: text("content_hmac"), // Base64
|
||||
contentType: text("content_type").notNull(),
|
||||
encContentIv: text("encrypted_content_iv").notNull(), // Base64
|
||||
encContentHash: text("encrypted_content_hash").notNull(), // Base64
|
||||
encName: ciphertext("encrypted_name").notNull(),
|
||||
encCreatedAt: ciphertext("encrypted_created_at"),
|
||||
encLastModifiedAt: ciphertext("encrypted_last_modified_at").notNull(),
|
||||
},
|
||||
(t) => ({
|
||||
ref1: foreignKey({
|
||||
columns: [t.userId, t.mekVersion],
|
||||
foreignColumns: [mek.userId, mek.version],
|
||||
}),
|
||||
ref2: foreignKey({
|
||||
columns: [t.userId, t.hskVersion],
|
||||
foreignColumns: [hsk.userId, hsk.version],
|
||||
}),
|
||||
}),
|
||||
);
|
||||
interface FileTable {
|
||||
id: Generated<number>;
|
||||
parent_id: number | null;
|
||||
user_id: number;
|
||||
path: string;
|
||||
master_encryption_key_version: number;
|
||||
encrypted_data_encryption_key: string; // Base64
|
||||
data_encryption_key_version: Date;
|
||||
hmac_secret_key_version: number | null;
|
||||
content_hmac: string | null; // Base64
|
||||
content_type: string;
|
||||
encrypted_content_iv: string; // Base64
|
||||
encrypted_content_hash: string; // Base64
|
||||
encrypted_name: Ciphertext;
|
||||
encrypted_created_at: Ciphertext | null;
|
||||
encrypted_last_modified_at: Ciphertext;
|
||||
}
|
||||
|
||||
export const fileLog = sqliteTable("file_log", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
fileId: integer("file_id")
|
||||
.notNull()
|
||||
.references(() => file.id, { onDelete: "cascade" }),
|
||||
timestamp: integer("timestamp", { mode: "timestamp_ms" }).notNull(),
|
||||
action: text("action", {
|
||||
enum: ["create", "rename", "addToCategory", "removeFromCategory"],
|
||||
}).notNull(),
|
||||
newName: ciphertext("new_name"),
|
||||
categoryId: integer("category_id").references(() => category.id, { onDelete: "set null" }),
|
||||
});
|
||||
interface FileLogTable {
|
||||
id: Generated<number>;
|
||||
file_id: number;
|
||||
timestamp: ColumnType<Date, Date, never>;
|
||||
action: "create" | "rename";
|
||||
new_name: Ciphertext | null;
|
||||
}
|
||||
|
||||
export const fileCategory = sqliteTable(
|
||||
"file_category",
|
||||
@@ -107,3 +65,12 @@ export const fileCategory = sqliteTable(
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
declare module "./index" {
|
||||
interface Database {
|
||||
directory: DirectoryTable;
|
||||
directory_log: DirectoryLogTable;
|
||||
file: FileTable;
|
||||
file_log: FileLogTable;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,44 +1,27 @@
|
||||
import { sqliteTable, text, integer, primaryKey, foreignKey } from "drizzle-orm/sqlite-core";
|
||||
import { client } from "./client";
|
||||
import { mek } from "./mek";
|
||||
import { user } from "./user";
|
||||
import type { ColumnType, Generated } from "kysely";
|
||||
|
||||
export const hsk = sqliteTable(
|
||||
"hmac_secret_key",
|
||||
{
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
version: integer("version").notNull(),
|
||||
state: text("state", { enum: ["active"] }).notNull(),
|
||||
mekVersion: integer("master_encryption_key_version").notNull(),
|
||||
encHsk: text("encrypted_key").notNull().unique(), // Base64
|
||||
},
|
||||
(t) => ({
|
||||
pk: primaryKey({ columns: [t.userId, t.version] }),
|
||||
ref: foreignKey({
|
||||
columns: [t.userId, t.mekVersion],
|
||||
foreignColumns: [mek.userId, mek.version],
|
||||
}),
|
||||
}),
|
||||
);
|
||||
export type HskState = "active";
|
||||
|
||||
export const hskLog = sqliteTable(
|
||||
"hmac_secret_key_log",
|
||||
{
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
hskVersion: integer("hmac_secret_key_version").notNull(),
|
||||
timestamp: integer("timestamp", { mode: "timestamp_ms" }).notNull(),
|
||||
action: text("action", { enum: ["create"] }).notNull(),
|
||||
actionBy: integer("action_by").references(() => client.id),
|
||||
},
|
||||
(t) => ({
|
||||
ref: foreignKey({
|
||||
columns: [t.userId, t.hskVersion],
|
||||
foreignColumns: [hsk.userId, hsk.version],
|
||||
}),
|
||||
}),
|
||||
);
|
||||
interface HskTable {
|
||||
user_id: number;
|
||||
version: number;
|
||||
state: HskState;
|
||||
master_encryption_key_version: number;
|
||||
encrypted_key: string; // Base64
|
||||
}
|
||||
|
||||
interface HskLogTable {
|
||||
id: Generated<number>;
|
||||
user_id: number;
|
||||
hmac_secret_key_version: number;
|
||||
timestamp: ColumnType<Date, Date, never>;
|
||||
action: "create";
|
||||
action_by: number | null;
|
||||
}
|
||||
|
||||
declare module "./index" {
|
||||
interface Database {
|
||||
hmac_secret_key: HskTable;
|
||||
hmac_secret_key_log: HskLogTable;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,3 +5,6 @@ export * from "./hsk";
|
||||
export * from "./mek";
|
||||
export * from "./session";
|
||||
export * from "./user";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
|
||||
export interface Database {}
|
||||
|
||||
@@ -1,60 +1,34 @@
|
||||
import { sqliteTable, text, integer, primaryKey, foreignKey } from "drizzle-orm/sqlite-core";
|
||||
import { client } from "./client";
|
||||
import { user } from "./user";
|
||||
import type { ColumnType, Generated } from "kysely";
|
||||
|
||||
export const mek = sqliteTable(
|
||||
"master_encryption_key",
|
||||
{
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
version: integer("version").notNull(),
|
||||
state: text("state", { enum: ["active", "retired", "dead"] }).notNull(),
|
||||
retiredAt: integer("retired_at", { mode: "timestamp_ms" }),
|
||||
},
|
||||
(t) => ({
|
||||
pk: primaryKey({ columns: [t.userId, t.version] }),
|
||||
}),
|
||||
);
|
||||
export type MekState = "active" | "retired" | "dead";
|
||||
|
||||
export const mekLog = sqliteTable(
|
||||
"master_encryption_key_log",
|
||||
{
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
mekVersion: integer("master_encryption_key_version").notNull(),
|
||||
timestamp: integer("timestamp", { mode: "timestamp_ms" }).notNull(),
|
||||
action: text("action", { enum: ["create"] }).notNull(),
|
||||
actionBy: integer("action_by").references(() => client.id),
|
||||
},
|
||||
(t) => ({
|
||||
ref: foreignKey({
|
||||
columns: [t.userId, t.mekVersion],
|
||||
foreignColumns: [mek.userId, mek.version],
|
||||
}),
|
||||
}),
|
||||
);
|
||||
interface MekTable {
|
||||
user_id: number;
|
||||
version: number;
|
||||
state: MekState;
|
||||
}
|
||||
|
||||
export const clientMek = sqliteTable(
|
||||
"client_master_encryption_key",
|
||||
{
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
clientId: integer("client_id")
|
||||
.notNull()
|
||||
.references(() => client.id),
|
||||
mekVersion: integer("version").notNull(),
|
||||
encMek: text("encrypted_key").notNull(), // Base64
|
||||
encMekSig: text("encrypted_key_signature").notNull(), // Base64
|
||||
},
|
||||
(t) => ({
|
||||
pk: primaryKey({ columns: [t.userId, t.clientId, t.mekVersion] }),
|
||||
ref: foreignKey({
|
||||
columns: [t.userId, t.mekVersion],
|
||||
foreignColumns: [mek.userId, mek.version],
|
||||
}),
|
||||
}),
|
||||
);
|
||||
interface MekLogTable {
|
||||
id: Generated<number>;
|
||||
user_id: number;
|
||||
master_encryption_key_version: number;
|
||||
timestamp: ColumnType<Date, Date, never>;
|
||||
action: "create";
|
||||
action_by: number | null;
|
||||
}
|
||||
|
||||
interface ClientMekTable {
|
||||
user_id: number;
|
||||
client_id: number;
|
||||
version: number;
|
||||
encrypted_key: string; // Base64
|
||||
encrypted_key_signature: string; // Base64
|
||||
}
|
||||
|
||||
declare module "./index" {
|
||||
interface Database {
|
||||
master_encryption_key: MekTable;
|
||||
master_encryption_key_log: MekLogTable;
|
||||
client_master_encryption_key: ClientMekTable;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,35 +1,27 @@
|
||||
import { sqliteTable, text, integer, unique } from "drizzle-orm/sqlite-core";
|
||||
import { client } from "./client";
|
||||
import { user } from "./user";
|
||||
import type { ColumnType, Generated } from "kysely";
|
||||
|
||||
export const session = sqliteTable(
|
||||
"session",
|
||||
{
|
||||
id: text("id").notNull().primaryKey(),
|
||||
userId: integer("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
clientId: integer("client_id").references(() => client.id),
|
||||
createdAt: integer("created_at", { mode: "timestamp_ms" }).notNull(),
|
||||
lastUsedAt: integer("last_used_at", { mode: "timestamp_ms" }).notNull(),
|
||||
lastUsedByIp: text("last_used_by_ip"),
|
||||
lastUsedByUserAgent: text("last_used_by_user_agent"),
|
||||
},
|
||||
(t) => ({
|
||||
unq: unique().on(t.userId, t.clientId),
|
||||
}),
|
||||
);
|
||||
interface SessionTable {
|
||||
id: string;
|
||||
user_id: number;
|
||||
client_id: number | null;
|
||||
created_at: ColumnType<Date, Date, never>;
|
||||
last_used_at: Date;
|
||||
last_used_by_ip: string | null;
|
||||
last_used_by_agent: string | null;
|
||||
}
|
||||
|
||||
export const sessionUpgradeChallenge = sqliteTable("session_upgrade_challenge", {
|
||||
id: integer("id").primaryKey(),
|
||||
sessionId: text("session_id")
|
||||
.notNull()
|
||||
.references(() => session.id)
|
||||
.unique(),
|
||||
clientId: integer("client_id")
|
||||
.notNull()
|
||||
.references(() => client.id),
|
||||
answer: text("answer").notNull().unique(), // Base64
|
||||
allowedIp: text("allowed_ip").notNull(),
|
||||
expiresAt: integer("expires_at", { mode: "timestamp_ms" }).notNull(),
|
||||
});
|
||||
interface SessionUpgradeChallengeTable {
|
||||
id: Generated<number>;
|
||||
session_id: string;
|
||||
client_id: number;
|
||||
answer: string; // Base64
|
||||
allowed_ip: string;
|
||||
expires_at: ColumnType<Date, Date, never>;
|
||||
}
|
||||
|
||||
declare module "./index" {
|
||||
interface Database {
|
||||
session: SessionTable;
|
||||
session_upgrade_challenge: SessionUpgradeChallengeTable;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
|
||||
import type { Generated } from "kysely";
|
||||
|
||||
export const user = sqliteTable("user", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
email: text("email").notNull().unique(),
|
||||
password: text("password").notNull(),
|
||||
nickname: text("nickname").notNull(),
|
||||
});
|
||||
interface UserTable {
|
||||
id: Generated<number>;
|
||||
email: string;
|
||||
nickname: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
declare module "./index" {
|
||||
interface Database {
|
||||
user: UserTable;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,31 @@
|
||||
import { SqliteError } from "better-sqlite3";
|
||||
import { and, eq, ne, gt, lte, isNull } from "drizzle-orm";
|
||||
import pg from "pg";
|
||||
import env from "$lib/server/loadenv";
|
||||
import db from "./drizzle";
|
||||
import { IntegrityError } from "./error";
|
||||
import { session, sessionUpgradeChallenge } from "./schema";
|
||||
import db from "./kysely";
|
||||
|
||||
export const createSession = async (
|
||||
userId: number,
|
||||
clientId: number | null,
|
||||
sessionId: string,
|
||||
ip: string | null,
|
||||
userAgent: string | null,
|
||||
agent: string | null,
|
||||
) => {
|
||||
try {
|
||||
const now = new Date();
|
||||
await db.insert(session).values({
|
||||
id: sessionId,
|
||||
userId,
|
||||
clientId,
|
||||
createdAt: now,
|
||||
lastUsedAt: now,
|
||||
lastUsedByIp: ip || null,
|
||||
lastUsedByUserAgent: userAgent || null,
|
||||
});
|
||||
await db
|
||||
.insertInto("session")
|
||||
.values({
|
||||
id: sessionId,
|
||||
user_id: userId,
|
||||
client_id: clientId,
|
||||
created_at: now,
|
||||
last_used_at: now,
|
||||
last_used_by_ip: ip || null,
|
||||
last_used_by_agent: agent || null,
|
||||
})
|
||||
.execute();
|
||||
} catch (e) {
|
||||
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_UNIQUE") {
|
||||
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||
throw new IntegrityError("Session already exists");
|
||||
}
|
||||
throw e;
|
||||
@@ -34,49 +35,55 @@ export const createSession = async (
|
||||
export const refreshSession = async (
|
||||
sessionId: string,
|
||||
ip: string | null,
|
||||
userAgent: string | null,
|
||||
agent: string | null,
|
||||
) => {
|
||||
const now = new Date();
|
||||
const sessions = await db
|
||||
.update(session)
|
||||
const session = await db
|
||||
.updateTable("session")
|
||||
.set({
|
||||
lastUsedAt: now,
|
||||
lastUsedByIp: ip || undefined,
|
||||
lastUsedByUserAgent: userAgent || undefined,
|
||||
last_used_at: now,
|
||||
last_used_by_ip: ip !== "" ? ip : undefined, // Don't update if empty
|
||||
last_used_by_agent: agent !== "" ? agent : undefined, // Don't update if empty
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(session.id, sessionId),
|
||||
gt(session.lastUsedAt, new Date(now.getTime() - env.session.exp)),
|
||||
),
|
||||
)
|
||||
.returning({ userId: session.userId, clientId: session.clientId });
|
||||
if (!sessions[0]) {
|
||||
.where("id", "=", sessionId)
|
||||
.where("last_used_at", ">", new Date(now.getTime() - env.session.exp))
|
||||
.returning(["user_id", "client_id"])
|
||||
.executeTakeFirst();
|
||||
if (!session) {
|
||||
throw new IntegrityError("Session not found");
|
||||
}
|
||||
return sessions[0];
|
||||
return { userId: session.user_id, clientId: session.client_id };
|
||||
};
|
||||
|
||||
export const upgradeSession = async (sessionId: string, clientId: number) => {
|
||||
const res = await db
|
||||
.update(session)
|
||||
.set({ clientId })
|
||||
.where(and(eq(session.id, sessionId), isNull(session.clientId)));
|
||||
if (res.changes === 0) {
|
||||
.updateTable("session")
|
||||
.set({ client_id: clientId })
|
||||
.where("id", "=", sessionId)
|
||||
.where("client_id", "is", null)
|
||||
.executeTakeFirst();
|
||||
if (res.numUpdatedRows === 0n) {
|
||||
throw new IntegrityError("Session not found");
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteSession = async (sessionId: string) => {
|
||||
await db.delete(session).where(eq(session.id, sessionId));
|
||||
await db.deleteFrom("session").where("id", "=", sessionId).execute();
|
||||
};
|
||||
|
||||
export const deleteAllOtherSessions = async (userId: number, sessionId: string) => {
|
||||
await db.delete(session).where(and(eq(session.userId, userId), ne(session.id, sessionId)));
|
||||
await db
|
||||
.deleteFrom("session")
|
||||
.where("id", "!=", sessionId)
|
||||
.where("user_id", "=", userId)
|
||||
.execute();
|
||||
};
|
||||
|
||||
export const cleanupExpiredSessions = async () => {
|
||||
await db.delete(session).where(lte(session.lastUsedAt, new Date(Date.now() - env.session.exp)));
|
||||
await db
|
||||
.deleteFrom("session")
|
||||
.where("last_used_at", "<=", new Date(Date.now() - env.session.exp))
|
||||
.execute();
|
||||
};
|
||||
|
||||
export const registerSessionUpgradeChallenge = async (
|
||||
@@ -87,15 +94,18 @@ export const registerSessionUpgradeChallenge = async (
|
||||
expiresAt: Date,
|
||||
) => {
|
||||
try {
|
||||
await db.insert(sessionUpgradeChallenge).values({
|
||||
sessionId,
|
||||
clientId,
|
||||
answer,
|
||||
allowedIp,
|
||||
expiresAt,
|
||||
});
|
||||
await db
|
||||
.insertInto("session_upgrade_challenge")
|
||||
.values({
|
||||
session_id: sessionId,
|
||||
client_id: clientId,
|
||||
answer,
|
||||
allowed_ip: allowedIp,
|
||||
expires_at: expiresAt,
|
||||
})
|
||||
.execute();
|
||||
} catch (e) {
|
||||
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_UNIQUE") {
|
||||
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||
throw new IntegrityError("Challenge already registered");
|
||||
}
|
||||
throw e;
|
||||
@@ -107,22 +117,17 @@ export const consumeSessionUpgradeChallenge = async (
|
||||
answer: string,
|
||||
ip: string,
|
||||
) => {
|
||||
const challenges = await db
|
||||
.delete(sessionUpgradeChallenge)
|
||||
.where(
|
||||
and(
|
||||
eq(sessionUpgradeChallenge.sessionId, sessionId),
|
||||
eq(sessionUpgradeChallenge.answer, answer),
|
||||
eq(sessionUpgradeChallenge.allowedIp, ip),
|
||||
gt(sessionUpgradeChallenge.expiresAt, new Date()),
|
||||
),
|
||||
)
|
||||
.returning({ clientId: sessionUpgradeChallenge.clientId });
|
||||
return challenges[0] ?? null;
|
||||
const challenge = await db
|
||||
.deleteFrom("session_upgrade_challenge")
|
||||
.where("session_id", "=", sessionId)
|
||||
.where("answer", "=", answer)
|
||||
.where("allowed_ip", "=", ip)
|
||||
.where("expires_at", ">", new Date())
|
||||
.returning("client_id")
|
||||
.executeTakeFirst();
|
||||
return challenge ? { clientId: challenge.client_id } : null;
|
||||
};
|
||||
|
||||
export const cleanupExpiredSessionUpgradeChallenges = async () => {
|
||||
await db
|
||||
.delete(sessionUpgradeChallenge)
|
||||
.where(lte(sessionUpgradeChallenge.expiresAt, new Date()));
|
||||
await db.deleteFrom("session_upgrade_challenge").where("expires_at", "<=", new Date()).execute();
|
||||
};
|
||||
|
||||
@@ -1,21 +1,36 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import db from "./drizzle";
|
||||
import { user } from "./schema";
|
||||
import db from "./kysely";
|
||||
|
||||
interface User {
|
||||
id: number;
|
||||
email: string;
|
||||
nickname: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
export const getUser = async (userId: number) => {
|
||||
const users = await db.select().from(user).where(eq(user.id, userId)).limit(1);
|
||||
return users[0] ?? null;
|
||||
const user = await db
|
||||
.selectFrom("user")
|
||||
.selectAll()
|
||||
.where("id", "=", userId)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
return user ? (user satisfies User) : null;
|
||||
};
|
||||
|
||||
export const getUserByEmail = async (email: string) => {
|
||||
const users = await db.select().from(user).where(eq(user.email, email)).limit(1);
|
||||
return users[0] ?? null;
|
||||
};
|
||||
|
||||
export const setUserPassword = async (userId: number, password: string) => {
|
||||
await db.update(user).set({ password }).where(eq(user.id, userId));
|
||||
const user = await db
|
||||
.selectFrom("user")
|
||||
.selectAll()
|
||||
.where("email", "=", email)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
return user ? (user satisfies User) : null;
|
||||
};
|
||||
|
||||
export const setUserNickname = async (userId: number, nickname: string) => {
|
||||
await db.update(user).set({ nickname }).where(eq(user.id, userId));
|
||||
await db.updateTable("user").set({ nickname }).where("id", "=", userId).execute();
|
||||
};
|
||||
|
||||
export const setUserPassword = async (userId: number, password: string) => {
|
||||
await db.updateTable("user").set({ password }).where("id", "=", userId).execute();
|
||||
};
|
||||
|
||||
@@ -3,11 +3,19 @@ import { building } from "$app/environment";
|
||||
import { env } from "$env/dynamic/private";
|
||||
|
||||
if (!building) {
|
||||
if (!env.DATABASE_PASSWORD) throw new Error("DATABASE_PASSWORD not set");
|
||||
if (!env.SESSION_SECRET) throw new Error("SESSION_SECRET not set");
|
||||
}
|
||||
|
||||
export default {
|
||||
databaseUrl: env.DATABASE_URL || "local.db",
|
||||
nodeEnv: env.NODE_ENV || "development",
|
||||
database: {
|
||||
host: env.DATABASE_HOST,
|
||||
port: env.DATABASE_PORT ? parseInt(env.DATABASE_PORT, 10) : undefined,
|
||||
user: env.DATABASE_USER,
|
||||
password: env.DATABASE_PASSWORD!,
|
||||
name: env.DATABASE_NAME,
|
||||
},
|
||||
session: {
|
||||
secret: env.SESSION_SECRET!,
|
||||
exp: ms(env.SESSION_EXPIRES || "14d"),
|
||||
|
||||
@@ -21,5 +21,5 @@ export const verifyClientEncMekSig = async (
|
||||
}
|
||||
|
||||
const data = JSON.stringify({ version, key: encMek });
|
||||
return verifySignature(Buffer.from(data), encMekSig, userClient.client.sigPubKey);
|
||||
return verifySignature(Buffer.from(data), encMekSig, userClient.sigPubKey);
|
||||
};
|
||||
|
||||
@@ -63,7 +63,7 @@ export const registerUserClient = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const clientId = await createClient(encPubKey, sigPubKey, userId);
|
||||
const { clientId } = await createClient(encPubKey, sigPubKey, userId);
|
||||
return { challenge: await createUserClientChallenge(ip, userId, clientId, encPubKey) };
|
||||
} catch (e) {
|
||||
if (e instanceof IntegrityError && e.message === "Public key(s) already registered") {
|
||||
|
||||
@@ -8,8 +8,9 @@ import {
|
||||
setDirectoryEncName,
|
||||
unregisterDirectory,
|
||||
getAllFilesByParent,
|
||||
type NewDirectoryParams,
|
||||
type NewDirectory,
|
||||
} from "$lib/server/db/file";
|
||||
import type { Ciphertext } from "$lib/server/db/schema";
|
||||
|
||||
export const getDirectoryInformation = async (userId: number, directoryId: "root" | number) => {
|
||||
const directory = directoryId !== "root" ? await getDirectory(userId, directoryId) : undefined;
|
||||
@@ -53,11 +54,10 @@ export const renameDirectory = async (
|
||||
userId: number,
|
||||
directoryId: number,
|
||||
dekVersion: Date,
|
||||
newEncName: string,
|
||||
newEncNameIv: string,
|
||||
newEncName: Ciphertext,
|
||||
) => {
|
||||
try {
|
||||
await setDirectoryEncName(userId, directoryId, dekVersion, newEncName, newEncNameIv);
|
||||
await setDirectoryEncName(userId, directoryId, dekVersion, newEncName);
|
||||
} catch (e) {
|
||||
if (e instanceof IntegrityError) {
|
||||
if (e.message === "Directory not found") {
|
||||
@@ -70,7 +70,7 @@ export const renameDirectory = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const createDirectory = async (params: NewDirectoryParams) => {
|
||||
export const createDirectory = async (params: NewDirectory) => {
|
||||
const oneMinuteAgo = new Date(Date.now() - 60 * 1000);
|
||||
const oneMinuteLater = new Date(Date.now() + 60 * 1000);
|
||||
if (params.dekVersion <= oneMinuteAgo || params.dekVersion >= oneMinuteLater) {
|
||||
|
||||
@@ -13,8 +13,9 @@ import {
|
||||
getFile,
|
||||
setFileEncName,
|
||||
unregisterFile,
|
||||
type NewFileParams,
|
||||
type NewFile,
|
||||
} from "$lib/server/db/file";
|
||||
import type { Ciphertext } from "$lib/server/db/schema";
|
||||
import env from "$lib/server/loadenv";
|
||||
|
||||
export const getFileInformation = async (userId: number, fileId: number) => {
|
||||
@@ -38,8 +39,8 @@ export const getFileInformation = async (userId: number, fileId: number) => {
|
||||
|
||||
export const deleteFile = async (userId: number, fileId: number) => {
|
||||
try {
|
||||
const filePath = await unregisterFile(userId, fileId);
|
||||
unlink(filePath); // Intended
|
||||
const { path } = await unregisterFile(userId, fileId);
|
||||
unlink(path); // Intended
|
||||
} catch (e) {
|
||||
if (e instanceof IntegrityError && e.message === "File not found") {
|
||||
error(404, "Invalid file id");
|
||||
@@ -65,11 +66,10 @@ export const renameFile = async (
|
||||
userId: number,
|
||||
fileId: number,
|
||||
dekVersion: Date,
|
||||
newEncName: string,
|
||||
newEncNameIv: string,
|
||||
newEncName: Ciphertext,
|
||||
) => {
|
||||
try {
|
||||
await setFileEncName(userId, fileId, dekVersion, newEncName, newEncNameIv);
|
||||
await setFileEncName(userId, fileId, dekVersion, newEncName);
|
||||
} catch (e) {
|
||||
if (e instanceof IntegrityError) {
|
||||
if (e.message === "File not found") {
|
||||
@@ -96,7 +96,7 @@ const safeUnlink = async (path: string) => {
|
||||
};
|
||||
|
||||
export const uploadFile = async (
|
||||
params: Omit<NewFileParams, "path" | "encContentHash">,
|
||||
params: Omit<NewFile, "path" | "encContentHash">,
|
||||
encContentStream: Readable,
|
||||
encContentHash: Promise<string>,
|
||||
) => {
|
||||
|
||||
@@ -7,11 +7,11 @@ import { verifyClientEncMekSig } from "$lib/server/modules/mek";
|
||||
export const getClientMekList = async (userId: number, clientId: number) => {
|
||||
const clientMeks = await getAllValidClientMeks(userId, clientId);
|
||||
return {
|
||||
encMeks: clientMeks.map((clientMek) => ({
|
||||
version: clientMek.master_encryption_key.version,
|
||||
state: clientMek.master_encryption_key.state as "active" | "retired",
|
||||
encMek: clientMek.client_master_encryption_key.encMek,
|
||||
encMekSig: clientMek.client_master_encryption_key.encMekSig,
|
||||
encMeks: clientMeks.map(({ version, state, encMek, encMekSig }) => ({
|
||||
version,
|
||||
state,
|
||||
encMek,
|
||||
encMekSig,
|
||||
})),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -20,6 +20,6 @@ export const POST: RequestHandler = async ({ locals, params, request }) => {
|
||||
if (!bodyZodRes.success) error(400, "Invalid request body");
|
||||
const { dekVersion, name, nameIv } = bodyZodRes.data;
|
||||
|
||||
await renameDirectory(userId, id, new Date(dekVersion), name, nameIv);
|
||||
await renameDirectory(userId, id, new Date(dekVersion), { ciphertext: name, iv: nameIv });
|
||||
return text("Directory renamed", { headers: { "Content-Type": "text/plain" } });
|
||||
};
|
||||
|
||||
@@ -17,8 +17,7 @@ export const POST: RequestHandler = async ({ locals, request }) => {
|
||||
mekVersion,
|
||||
encDek: dek,
|
||||
dekVersion: new Date(dekVersion),
|
||||
encName: name,
|
||||
encNameIv: nameIv,
|
||||
encName: { ciphertext: name, iv: nameIv },
|
||||
});
|
||||
return text("Directory created", { headers: { "Content-Type": "text/plain" } });
|
||||
};
|
||||
|
||||
@@ -20,6 +20,6 @@ export const POST: RequestHandler = async ({ locals, params, request }) => {
|
||||
if (!bodyZodRes.success) error(400, "Invalid request body");
|
||||
const { dekVersion, name, nameIv } = bodyZodRes.data;
|
||||
|
||||
await renameFile(userId, id, new Date(dekVersion), name, nameIv);
|
||||
await renameFile(userId, id, new Date(dekVersion), { ciphertext: name, iv: nameIv });
|
||||
return text("File renamed", { headers: { "Content-Type": "text/plain" } });
|
||||
};
|
||||
|
||||
@@ -40,12 +40,9 @@ const parseFileMetadata = (userId: number, json: string) => {
|
||||
contentHmac,
|
||||
contentType,
|
||||
encContentIv: contentIv,
|
||||
encName: name,
|
||||
encNameIv: nameIv,
|
||||
encCreatedAt: createdAt ?? null,
|
||||
encCreatedAtIv: createdAtIv ?? null,
|
||||
encLastModifiedAt: lastModifiedAt,
|
||||
encLastModifiedAtIv: lastModifiedAtIv,
|
||||
encName: { ciphertext: name, iv: nameIv },
|
||||
encCreatedAt: createdAt && createdAtIv ? { ciphertext: createdAt, iv: createdAtIv } : null,
|
||||
encLastModifiedAt: { ciphertext: lastModifiedAt, iv: lastModifiedAtIv },
|
||||
} satisfies FileMetadata;
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user