mirror of
https://github.com/kmc7468/arkvault.git
synced 2025-12-14 22:08:45 +00:00
Production 환경에서의 DB 자동 Migration 구현
This commit is contained in:
15
Dockerfile
15
Dockerfile
@@ -2,6 +2,10 @@
|
|||||||
FROM node:22-alpine AS base
|
FROM node:22-alpine AS base
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN apk add --no-cache bash curl && \
|
||||||
|
curl -o /usr/local/bin/wait-for-it https://raw.githubusercontent.com/vishnubob/wait-for-it/master/wait-for-it.sh && \
|
||||||
|
chmod +x /usr/local/bin/wait-for-it
|
||||||
|
|
||||||
RUN npm install -g pnpm@9
|
RUN npm install -g pnpm@9
|
||||||
COPY pnpm-lock.yaml .
|
COPY pnpm-lock.yaml .
|
||||||
|
|
||||||
@@ -10,10 +14,9 @@ FROM base AS build
|
|||||||
RUN pnpm fetch
|
RUN pnpm fetch
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN pnpm install --offline
|
RUN pnpm install --offline && \
|
||||||
RUN pnpm build
|
pnpm build && \
|
||||||
|
sed -i "s/http\.createServer()/http.createServer({ requestTimeout: 0 })/g" ./build/index.js
|
||||||
RUN sed -i "s/http\.createServer()/http.createServer({ requestTimeout: 0 })/g" ./build/index.js
|
|
||||||
|
|
||||||
# Deploy Stage
|
# Deploy Stage
|
||||||
FROM base
|
FROM base
|
||||||
@@ -23,9 +26,7 @@ COPY package.json .
|
|||||||
RUN pnpm install --offline --prod
|
RUN pnpm install --offline --prod
|
||||||
|
|
||||||
COPY --from=build /app/build ./build
|
COPY --from=build /app/build ./build
|
||||||
COPY drizzle ./drizzle
|
|
||||||
|
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
ENV BODY_SIZE_LIMIT=Infinity
|
ENV BODY_SIZE_LIMIT=Infinity
|
||||||
|
CMD ["bash", "-c", "wait-for-it ${DATABASE_HOST:-localhost}:${DATABASE_PORT:-5432} -- node ./build/index.js"]
|
||||||
CMD ["node", "./build/index.js"]
|
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ services:
|
|||||||
database:
|
database:
|
||||||
image: postgres:17.2-alpine
|
image: postgres:17.2-alpine
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
|
user: ${CONTAINER_UID:-0}:${CONTAINER_GID:-0}
|
||||||
volumes:
|
volumes:
|
||||||
- ./data/database:/var/lib/postgresql/data
|
- ./data/database:/var/lib/postgresql/data
|
||||||
environment:
|
environment:
|
||||||
|
|||||||
@@ -2,15 +2,15 @@ import type { ServerInit } from "@sveltejs/kit";
|
|||||||
import { sequence } from "@sveltejs/kit/hooks";
|
import { sequence } from "@sveltejs/kit/hooks";
|
||||||
import schedule from "node-schedule";
|
import schedule from "node-schedule";
|
||||||
import { cleanupExpiredUserClientChallenges } from "$lib/server/db/client";
|
import { cleanupExpiredUserClientChallenges } from "$lib/server/db/client";
|
||||||
import { migrateDB } from "$lib/server/db/drizzle";
|
import { migrateDB } from "$lib/server/db/kysely";
|
||||||
import {
|
import {
|
||||||
cleanupExpiredSessions,
|
cleanupExpiredSessions,
|
||||||
cleanupExpiredSessionUpgradeChallenges,
|
cleanupExpiredSessionUpgradeChallenges,
|
||||||
} from "$lib/server/db/session";
|
} from "$lib/server/db/session";
|
||||||
import { authenticate, setAgentInfo } from "$lib/server/middlewares";
|
import { authenticate, setAgentInfo } from "$lib/server/middlewares";
|
||||||
|
|
||||||
export const init: ServerInit = () => {
|
export const init: ServerInit = async () => {
|
||||||
migrateDB();
|
await migrateDB();
|
||||||
|
|
||||||
schedule.scheduleJob("0 * * * *", () => {
|
schedule.scheduleJob("0 * * * *", () => {
|
||||||
cleanupExpiredUserClientChallenges();
|
cleanupExpiredUserClientChallenges();
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { DatabaseError } from "pg";
|
import pg from "pg";
|
||||||
import { IntegrityError } from "./error";
|
import { IntegrityError } from "./error";
|
||||||
import db from "./kysely";
|
import db from "./kysely";
|
||||||
import type { UserClientState } from "./schema";
|
import type { UserClientState } from "./schema";
|
||||||
@@ -91,7 +91,7 @@ export const createUserClient = async (userId: number, clientId: number) => {
|
|||||||
try {
|
try {
|
||||||
await db.insertInto("user_client").values({ user_id: userId, client_id: clientId }).execute();
|
await db.insertInto("user_client").values({ user_id: userId, client_id: clientId }).execute();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e instanceof DatabaseError && e.code === "23505") {
|
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||||
throw new IntegrityError("User client already exists");
|
throw new IntegrityError("User client already exists");
|
||||||
}
|
}
|
||||||
throw e;
|
throw e;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { DatabaseError } from "pg";
|
import pg from "pg";
|
||||||
import { IntegrityError } from "./error";
|
import { IntegrityError } from "./error";
|
||||||
import db from "./kysely";
|
import db from "./kysely";
|
||||||
import type { HskState } from "./schema";
|
import type { HskState } from "./schema";
|
||||||
@@ -40,7 +40,7 @@ export const registerInitialHsk = async (
|
|||||||
})
|
})
|
||||||
.execute();
|
.execute();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e instanceof DatabaseError && e.code === "23505") {
|
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||||
throw new IntegrityError("HSK already registered");
|
throw new IntegrityError("HSK already registered");
|
||||||
}
|
}
|
||||||
throw e;
|
throw e;
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
import { Kysely, PostgresDialect } from "kysely";
|
import { Kysely, PostgresDialect, Migrator } from "kysely";
|
||||||
import { Pool } from "pg";
|
import pg from "pg";
|
||||||
import env from "$lib/server/loadenv";
|
import env from "$lib/server/loadenv";
|
||||||
|
import migrations from "./migrations";
|
||||||
import type { Database } from "./schema";
|
import type { Database } from "./schema";
|
||||||
|
|
||||||
const dialect = new PostgresDialect({
|
const dialect = new PostgresDialect({
|
||||||
pool: new Pool({
|
pool: new pg.Pool({
|
||||||
host: env.database.host,
|
host: env.database.host,
|
||||||
port: env.database.port,
|
port: env.database.port,
|
||||||
user: env.database.user,
|
user: env.database.user,
|
||||||
@@ -15,6 +16,32 @@ const dialect = new PostgresDialect({
|
|||||||
|
|
||||||
const db = new Kysely<Database>({ dialect });
|
const db = new Kysely<Database>({ dialect });
|
||||||
|
|
||||||
// TODO: Migration
|
export const migrateDB = async () => {
|
||||||
|
if (env.nodeEnv !== "production") return;
|
||||||
|
|
||||||
|
const migrator = new Migrator({
|
||||||
|
db,
|
||||||
|
provider: {
|
||||||
|
async getMigrations() {
|
||||||
|
return migrations;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const { error, results } = await migrator.migrateToLatest();
|
||||||
|
if (error) {
|
||||||
|
const migration = results?.find(({ status }) => status === "Error");
|
||||||
|
if (migration) {
|
||||||
|
console.error(`Migration "${migration.migrationName}" failed.`);
|
||||||
|
}
|
||||||
|
console.error(error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (results?.length === 0) {
|
||||||
|
console.log("Database is up-to-date.");
|
||||||
|
} else {
|
||||||
|
console.log("Database migration completed.");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
export default db;
|
export default db;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { DatabaseError } from "pg";
|
import pg from "pg";
|
||||||
import { IntegrityError } from "./error";
|
import { IntegrityError } from "./error";
|
||||||
import db from "./kysely";
|
import db from "./kysely";
|
||||||
import type { MekState } from "./schema";
|
import type { MekState } from "./schema";
|
||||||
@@ -52,7 +52,7 @@ export const registerInitialMek = async (
|
|||||||
})
|
})
|
||||||
.execute();
|
.execute();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e instanceof DatabaseError && e.code === "23505") {
|
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||||
throw new IntegrityError("MEK already registered");
|
throw new IntegrityError("MEK already registered");
|
||||||
}
|
}
|
||||||
throw e;
|
throw e;
|
||||||
|
|||||||
5
src/lib/server/db/migrations/index.ts
Normal file
5
src/lib/server/db/migrations/index.ts
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import * as Initial1737357000 from "./1737357000-Initial";
|
||||||
|
|
||||||
|
export default {
|
||||||
|
"1737357000-Initial": Initial1737357000,
|
||||||
|
};
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { DatabaseError } from "pg";
|
import pg from "pg";
|
||||||
import env from "$lib/server/loadenv";
|
import env from "$lib/server/loadenv";
|
||||||
import { IntegrityError } from "./error";
|
import { IntegrityError } from "./error";
|
||||||
import db from "./kysely";
|
import db from "./kysely";
|
||||||
@@ -25,7 +25,7 @@ export const createSession = async (
|
|||||||
})
|
})
|
||||||
.execute();
|
.execute();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e instanceof DatabaseError && e.code === "23505") {
|
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||||
throw new IntegrityError("Session already exists");
|
throw new IntegrityError("Session already exists");
|
||||||
}
|
}
|
||||||
throw e;
|
throw e;
|
||||||
@@ -105,7 +105,7 @@ export const registerSessionUpgradeChallenge = async (
|
|||||||
})
|
})
|
||||||
.execute();
|
.execute();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e instanceof DatabaseError && e.code === "23505") {
|
if (e instanceof pg.DatabaseError && e.code === "23505") {
|
||||||
throw new IntegrityError("Challenge already registered");
|
throw new IntegrityError("Challenge already registered");
|
||||||
}
|
}
|
||||||
throw e;
|
throw e;
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ if (!building) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
|
nodeEnv: env.NODE_ENV || "development",
|
||||||
database: {
|
database: {
|
||||||
host: env.DATABASE_HOST,
|
host: env.DATABASE_HOST,
|
||||||
port: env.DATABASE_PORT ? parseInt(env.DATABASE_PORT, 10) : undefined,
|
port: env.DATABASE_PORT ? parseInt(env.DATABASE_PORT, 10) : undefined,
|
||||||
|
|||||||
Reference in New Issue
Block a user