diff --git a/.dockerignore b/.dockerignore index 4f68a3b..6d312ec 100644 --- a/.dockerignore +++ b/.dockerignore @@ -12,6 +12,7 @@ node_modules /data /library /thumbnails +/uploads # OS .DS_Store diff --git a/.env.example b/.env.example index e3b6365..4e8b20b 100644 --- a/.env.example +++ b/.env.example @@ -12,3 +12,4 @@ USER_CLIENT_CHALLENGE_EXPIRES= SESSION_UPGRADE_CHALLENGE_EXPIRES= LIBRARY_PATH= THUMBNAILS_PATH= +UPLOADS_PATH= diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml new file mode 100644 index 0000000..6f0627d --- /dev/null +++ b/.github/workflows/docker.yaml @@ -0,0 +1,45 @@ +name: Docker Image Build + +on: + release: + types: [published] + +jobs: + build-and-push: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract Docker metadata + uses: docker/metadata-action@v5 + id: meta + with: + images: ghcr.io/${{ github.repository }} + tags: | + type=semver,value={{version}} + type=raw,value=latest + type=sha + + - name: Build and push Docker image + uses: docker/build-push-action@v6 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.gitignore b/.gitignore index 5078fa8..a200c74 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ node_modules /data /library /thumbnails +/uploads # OS .DS_Store diff --git a/docker-compose.yaml b/docker-compose.yaml index 2015066..3544f14 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -9,6 +9,7 @@ services: volumes: - ./data/library:/app/data/library - ./data/thumbnails:/app/data/thumbnails + - ./data/uploads:/app/data/uploads environment: # ArkVault - DATABASE_HOST=database @@ -20,6 +21,7 @@ services: - SESSION_UPGRADE_CHALLENGE_EXPIRES - LIBRARY_PATH=/app/data/library - THUMBNAILS_PATH=/app/data/thumbnails + - UPLOADS_PATH=/app/data/uploads # SvelteKit - ADDRESS_HEADER=${TRUST_PROXY:+X-Forwarded-For} - XFF_DEPTH=${TRUST_PROXY:-} diff --git a/package.json b/package.json index c16b700..02ed42d 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "arkvault", "private": true, - "version": "0.7.0", + "version": "0.8.0", "type": "module", "scripts": { "dev": "vite dev", @@ -16,13 +16,14 @@ "db:migrate": "kysely migrate" }, "devDependencies": { - "@eslint/compat": "^2.0.0", + "@eslint/compat": "^2.0.1", "@eslint/js": "^9.39.2", - "@iconify-json/material-symbols": "^1.2.50", + "@iconify-json/material-symbols": "^1.2.51", + "@noble/hashes": "^2.0.1", "@sveltejs/adapter-node": "^5.4.0", - "@sveltejs/kit": "^2.49.2", - "@sveltejs/vite-plugin-svelte": "^6.2.1", - "@tanstack/svelte-virtual": "^3.13.16", + "@sveltejs/kit": "^2.49.4", + "@sveltejs/vite-plugin-svelte": "^6.2.4", + "@tanstack/svelte-virtual": "^3.13.18", "@trpc/client": "^11.8.1", "@types/file-saver": "^2.0.7", "@types/ms": "^0.7.34", @@ -33,11 +34,11 @@ "dexie": "^4.2.1", "eslint": "^9.39.2", "eslint-config-prettier": "^10.1.8", - "eslint-plugin-svelte": "^3.13.1", + "eslint-plugin-svelte": "^3.14.0", "eslint-plugin-tailwindcss": "^3.18.2", - "exifreader": "^4.33.1", + "exifreader": "^4.35.0", "file-saver": "^2.0.5", - "globals": "^16.5.0", + "globals": "^17.0.0", "heic2any": "^0.0.4", "kysely-ctl": "^0.19.0", "lru-cache": "^11.2.4", @@ -50,12 +51,11 @@ "svelte-check": "^4.3.5", "tailwindcss": "^3.4.19", "typescript": "^5.9.3", - "typescript-eslint": "^8.51.0", + "typescript-eslint": "^8.52.0", "unplugin-icons": "^22.5.0", - "vite": "^7.3.0" + "vite": "^7.3.1" }, "dependencies": { - "@fastify/busboy": "^3.2.0", "@trpc/server": "^11.8.1", "argon2": "^0.44.0", "kysely": "^0.28.9", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e4e336f..ac05c99 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,9 +8,6 @@ importers: .: dependencies: - '@fastify/busboy': - specifier: ^3.2.0 - version: 3.2.0 '@trpc/server': specifier: ^11.8.1 version: 11.8.1(typescript@5.9.3) @@ -40,26 +37,29 @@ importers: version: 4.3.5 devDependencies: '@eslint/compat': - specifier: ^2.0.0 - version: 2.0.0(eslint@9.39.2(jiti@1.21.7)) + specifier: ^2.0.1 + version: 2.0.1(eslint@9.39.2(jiti@1.21.7)) '@eslint/js': specifier: ^9.39.2 version: 9.39.2 '@iconify-json/material-symbols': - specifier: ^1.2.50 - version: 1.2.50 + specifier: ^1.2.51 + version: 1.2.51 + '@noble/hashes': + specifier: ^2.0.1 + version: 2.0.1 '@sveltejs/adapter-node': specifier: ^5.4.0 - version: 5.4.0(@sveltejs/kit@2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))) + version: 5.4.0(@sveltejs/kit@2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0))) '@sveltejs/kit': - specifier: ^2.49.2 - version: 2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) + specifier: ^2.49.4 + version: 2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) '@sveltejs/vite-plugin-svelte': - specifier: ^6.2.1 - version: 6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) + specifier: ^6.2.4 + version: 6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) '@tanstack/svelte-virtual': - specifier: ^3.13.16 - version: 3.13.16(svelte@5.46.1) + specifier: ^3.13.18 + version: 3.13.18(svelte@5.46.1) '@trpc/client': specifier: ^11.8.1 version: 11.8.1(@trpc/server@11.8.1(typescript@5.9.3))(typescript@5.9.3) @@ -91,20 +91,20 @@ importers: specifier: ^10.1.8 version: 10.1.8(eslint@9.39.2(jiti@1.21.7)) eslint-plugin-svelte: - specifier: ^3.13.1 - version: 3.13.1(eslint@9.39.2(jiti@1.21.7))(svelte@5.46.1) + specifier: ^3.14.0 + version: 3.14.0(eslint@9.39.2(jiti@1.21.7))(svelte@5.46.1) eslint-plugin-tailwindcss: specifier: ^3.18.2 version: 3.18.2(tailwindcss@3.4.19(yaml@2.8.0)) exifreader: - specifier: ^4.33.1 - version: 4.33.1 + specifier: ^4.35.0 + version: 4.35.0 file-saver: specifier: ^2.0.5 version: 2.0.5 globals: - specifier: ^16.5.0 - version: 16.5.0 + specifier: ^17.0.0 + version: 17.0.0 heic2any: specifier: ^0.0.4 version: 0.0.4 @@ -142,14 +142,14 @@ importers: specifier: ^5.9.3 version: 5.9.3 typescript-eslint: - specifier: ^8.51.0 - version: 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + specifier: ^8.52.0 + version: 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) unplugin-icons: specifier: ^22.5.0 version: 22.5.0(svelte@5.46.1) vite: - specifier: ^7.3.0 - version: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) + specifier: ^7.3.1 + version: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) packages: @@ -329,8 +329,8 @@ packages: resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} - '@eslint/compat@2.0.0': - resolution: {integrity: sha512-T9AfE1G1uv4wwq94ozgTGio5EUQBqAVe1X9qsQtSNVEYW6j3hvtZVm8Smr4qL1qDPFg+lOB2cL5RxTRMzq4CTA==} + '@eslint/compat@2.0.1': + resolution: {integrity: sha512-yl/JsgplclzuvGFNqwNYV4XNPhP3l62ZOP9w/47atNAdmDtIFCx6X7CSk/SlWUuBGkT4Et/5+UD+WyvX2iiIWA==} engines: {node: ^20.19.0 || ^22.13.0 || >=24} peerDependencies: eslint: ^8.40 || 9 @@ -350,8 +350,8 @@ packages: resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/core@1.0.0': - resolution: {integrity: sha512-PRfWP+8FOldvbApr6xL7mNCw4cJcSTq4GA7tYbgq15mRb0kWKO/wEB2jr+uwjFH3sZvEZneZyCUGTxsv4Sahyw==} + '@eslint/core@1.0.1': + resolution: {integrity: sha512-r18fEAj9uCk+VjzGt2thsbOmychS+4kxI14spVNibUO2vqKX7obOG+ymZljAwuPZl+S3clPGwCwTDtrdqTiY6Q==} engines: {node: ^20.19.0 || ^22.13.0 || >=24} '@eslint/eslintrc@3.3.3': @@ -370,9 +370,6 @@ packages: resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@fastify/busboy@3.2.0': - resolution: {integrity: sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==} - '@humanfs/core@0.19.1': resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} engines: {node: '>=18.18.0'} @@ -389,8 +386,8 @@ packages: resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} engines: {node: '>=18.18'} - '@iconify-json/material-symbols@1.2.50': - resolution: {integrity: sha512-71tjHR70h46LHtBFab3fAd2V/wPTO7JMV5lKnRn3IcF303LaFgAlO0BZeTJDcmCv9d0snRZmnoLZAJVD7/eisw==} + '@iconify-json/material-symbols@1.2.51': + resolution: {integrity: sha512-GkxlK8ocHi3NVVozaW62jm3qR9fNY3xX2penFtIRvoe1OtNhJ2KD4KRzv8x34pugMOAZYK8sALMcU30gDgCi1A==} '@iconify/types@2.0.0': resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==} @@ -414,6 +411,10 @@ packages: '@jridgewell/trace-mapping@0.3.31': resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + '@noble/hashes@2.0.1': + resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==} + engines: {node: '>= 20.19.0'} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -469,113 +470,128 @@ packages: rollup: optional: true - '@rollup/rollup-android-arm-eabi@4.54.0': - resolution: {integrity: sha512-OywsdRHrFvCdvsewAInDKCNyR3laPA2mc9bRYJ6LBp5IyvF3fvXbbNR0bSzHlZVFtn6E0xw2oZlyjg4rKCVcng==} + '@rollup/rollup-android-arm-eabi@4.55.1': + resolution: {integrity: sha512-9R0DM/ykwfGIlNu6+2U09ga0WXeZ9MRC2Ter8jnz8415VbuIykVuc6bhdrbORFZANDmTDvq26mJrEVTl8TdnDg==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.54.0': - resolution: {integrity: sha512-Skx39Uv+u7H224Af+bDgNinitlmHyQX1K/atIA32JP3JQw6hVODX5tkbi2zof/E69M1qH2UoN3Xdxgs90mmNYw==} + '@rollup/rollup-android-arm64@4.55.1': + resolution: {integrity: sha512-eFZCb1YUqhTysgW3sj/55du5cG57S7UTNtdMjCW7LwVcj3dTTcowCsC8p7uBdzKsZYa8J7IDE8lhMI+HX1vQvg==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.54.0': - resolution: {integrity: sha512-k43D4qta/+6Fq+nCDhhv9yP2HdeKeP56QrUUTW7E6PhZP1US6NDqpJj4MY0jBHlJivVJD5P8NxrjuobZBJTCRw==} + '@rollup/rollup-darwin-arm64@4.55.1': + resolution: {integrity: sha512-p3grE2PHcQm2e8PSGZdzIhCKbMCw/xi9XvMPErPhwO17vxtvCN5FEA2mSLgmKlCjHGMQTP6phuQTYWUnKewwGg==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.54.0': - resolution: {integrity: sha512-cOo7biqwkpawslEfox5Vs8/qj83M/aZCSSNIWpVzfU2CYHa2G3P1UN5WF01RdTHSgCkri7XOlTdtk17BezlV3A==} + '@rollup/rollup-darwin-x64@4.55.1': + resolution: {integrity: sha512-rDUjG25C9qoTm+e02Esi+aqTKSBYwVTaoS1wxcN47/Luqef57Vgp96xNANwt5npq9GDxsH7kXxNkJVEsWEOEaQ==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.54.0': - resolution: {integrity: sha512-miSvuFkmvFbgJ1BevMa4CPCFt5MPGw094knM64W9I0giUIMMmRYcGW/JWZDriaw/k1kOBtsWh1z6nIFV1vPNtA==} + '@rollup/rollup-freebsd-arm64@4.55.1': + resolution: {integrity: sha512-+JiU7Jbp5cdxekIgdte0jfcu5oqw4GCKr6i3PJTlXTCU5H5Fvtkpbs4XJHRmWNXF+hKmn4v7ogI5OQPaupJgOg==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.54.0': - resolution: {integrity: sha512-KGXIs55+b/ZfZsq9aR026tmr/+7tq6VG6MsnrvF4H8VhwflTIuYh+LFUlIsRdQSgrgmtM3fVATzEAj4hBQlaqQ==} + '@rollup/rollup-freebsd-x64@4.55.1': + resolution: {integrity: sha512-V5xC1tOVWtLLmr3YUk2f6EJK4qksksOYiz/TCsFHu/R+woubcLWdC9nZQmwjOAbmExBIVKsm1/wKmEy4z4u4Bw==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.54.0': - resolution: {integrity: sha512-EHMUcDwhtdRGlXZsGSIuXSYwD5kOT9NVnx9sqzYiwAc91wfYOE1g1djOEDseZJKKqtHAHGwnGPQu3kytmfaXLQ==} + '@rollup/rollup-linux-arm-gnueabihf@4.55.1': + resolution: {integrity: sha512-Rn3n+FUk2J5VWx+ywrG/HGPTD9jXNbicRtTM11e/uorplArnXZYsVifnPPqNNP5BsO3roI4n8332ukpY/zN7rQ==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.54.0': - resolution: {integrity: sha512-+pBrqEjaakN2ySv5RVrj/qLytYhPKEUwk+e3SFU5jTLHIcAtqh2rLrd/OkbNuHJpsBgxsD8ccJt5ga/SeG0JmA==} + '@rollup/rollup-linux-arm-musleabihf@4.55.1': + resolution: {integrity: sha512-grPNWydeKtc1aEdrJDWk4opD7nFtQbMmV7769hiAaYyUKCT1faPRm2av8CX1YJsZ4TLAZcg9gTR1KvEzoLjXkg==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.54.0': - resolution: {integrity: sha512-NSqc7rE9wuUaRBsBp5ckQ5CVz5aIRKCwsoa6WMF7G01sX3/qHUw/z4pv+D+ahL1EIKy6Enpcnz1RY8pf7bjwng==} + '@rollup/rollup-linux-arm64-gnu@4.55.1': + resolution: {integrity: sha512-a59mwd1k6x8tXKcUxSyISiquLwB5pX+fJW9TkWU46lCqD/GRDe9uDN31jrMmVP3feI3mhAdvcCClhV8V5MhJFQ==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.54.0': - resolution: {integrity: sha512-gr5vDbg3Bakga5kbdpqx81m2n9IX8M6gIMlQQIXiLTNeQW6CucvuInJ91EuCJ/JYvc+rcLLsDFcfAD1K7fMofg==} + '@rollup/rollup-linux-arm64-musl@4.55.1': + resolution: {integrity: sha512-puS1MEgWX5GsHSoiAsF0TYrpomdvkaXm0CofIMG5uVkP6IBV+ZO9xhC5YEN49nsgYo1DuuMquF9+7EDBVYu4uA==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-loong64-gnu@4.54.0': - resolution: {integrity: sha512-gsrtB1NA3ZYj2vq0Rzkylo9ylCtW/PhpLEivlgWe0bpgtX5+9j9EZa0wtZiCjgu6zmSeZWyI/e2YRX1URozpIw==} + '@rollup/rollup-linux-loong64-gnu@4.55.1': + resolution: {integrity: sha512-r3Wv40in+lTsULSb6nnoudVbARdOwb2u5fpeoOAZjFLznp6tDU8kd+GTHmJoqZ9lt6/Sys33KdIHUaQihFcu7g==} cpu: [loong64] os: [linux] - '@rollup/rollup-linux-ppc64-gnu@4.54.0': - resolution: {integrity: sha512-y3qNOfTBStmFNq+t4s7Tmc9hW2ENtPg8FeUD/VShI7rKxNW7O4fFeaYbMsd3tpFlIg1Q8IapFgy7Q9i2BqeBvA==} + '@rollup/rollup-linux-loong64-musl@4.55.1': + resolution: {integrity: sha512-MR8c0+UxAlB22Fq4R+aQSPBayvYa3+9DrwG/i1TKQXFYEaoW3B5b/rkSRIypcZDdWjWnpcvxbNaAJDcSbJU3Lw==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-ppc64-gnu@4.55.1': + resolution: {integrity: sha512-3KhoECe1BRlSYpMTeVrD4sh2Pw2xgt4jzNSZIIPLFEsnQn9gAnZagW9+VqDqAHgm1Xc77LzJOo2LdigS5qZ+gw==} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.54.0': - resolution: {integrity: sha512-89sepv7h2lIVPsFma8iwmccN7Yjjtgz0Rj/Ou6fEqg3HDhpCa+Et+YSufy27i6b0Wav69Qv4WBNl3Rs6pwhebQ==} + '@rollup/rollup-linux-ppc64-musl@4.55.1': + resolution: {integrity: sha512-ziR1OuZx0vdYZZ30vueNZTg73alF59DicYrPViG0NEgDVN8/Jl87zkAPu4u6VjZST2llgEUjaiNl9JM6HH1Vdw==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.55.1': + resolution: {integrity: sha512-uW0Y12ih2XJRERZ4jAfKamTyIHVMPQnTZcQjme2HMVDAHY4amf5u414OqNYC+x+LzRdRcnIG1YodLrrtA8xsxw==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-riscv64-musl@4.54.0': - resolution: {integrity: sha512-ZcU77ieh0M2Q8Ur7D5X7KvK+UxbXeDHwiOt/CPSBTI1fBmeDMivW0dPkdqkT4rOgDjrDDBUed9x4EgraIKoR2A==} + '@rollup/rollup-linux-riscv64-musl@4.55.1': + resolution: {integrity: sha512-u9yZ0jUkOED1BFrqu3BwMQoixvGHGZ+JhJNkNKY/hyoEgOwlqKb62qu+7UjbPSHYjiVy8kKJHvXKv5coH4wDeg==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.54.0': - resolution: {integrity: sha512-2AdWy5RdDF5+4YfG/YesGDDtbyJlC9LHmL6rZw6FurBJ5n4vFGupsOBGfwMRjBYH7qRQowT8D/U4LoSvVwOhSQ==} + '@rollup/rollup-linux-s390x-gnu@4.55.1': + resolution: {integrity: sha512-/0PenBCmqM4ZUd0190j7J0UsQ/1nsi735iPRakO8iPciE7BQ495Y6msPzaOmvx0/pn+eJVVlZrNrSh4WSYLxNg==} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.54.0': - resolution: {integrity: sha512-WGt5J8Ij/rvyqpFexxk3ffKqqbLf9AqrTBbWDk7ApGUzaIs6V+s2s84kAxklFwmMF/vBNGrVdYgbblCOFFezMQ==} + '@rollup/rollup-linux-x64-gnu@4.55.1': + resolution: {integrity: sha512-a8G4wiQxQG2BAvo+gU6XrReRRqj+pLS2NGXKm8io19goR+K8lw269eTrPkSdDTALwMmJp4th2Uh0D8J9bEV1vg==} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.54.0': - resolution: {integrity: sha512-JzQmb38ATzHjxlPHuTH6tE7ojnMKM2kYNzt44LO/jJi8BpceEC8QuXYA908n8r3CNuG/B3BV8VR3Hi1rYtmPiw==} + '@rollup/rollup-linux-x64-musl@4.55.1': + resolution: {integrity: sha512-bD+zjpFrMpP/hqkfEcnjXWHMw5BIghGisOKPj+2NaNDuVT+8Ds4mPf3XcPHuat1tz89WRL+1wbcxKY3WSbiT7w==} cpu: [x64] os: [linux] - '@rollup/rollup-openharmony-arm64@4.54.0': - resolution: {integrity: sha512-huT3fd0iC7jigGh7n3q/+lfPcXxBi+om/Rs3yiFxjvSxbSB6aohDFXbWvlspaqjeOh+hx7DDHS+5Es5qRkWkZg==} + '@rollup/rollup-openbsd-x64@4.55.1': + resolution: {integrity: sha512-eLXw0dOiqE4QmvikfQ6yjgkg/xDM+MdU9YJuP4ySTibXU0oAvnEWXt7UDJmD4UkYialMfOGFPJnIHSe/kdzPxg==} + cpu: [x64] + os: [openbsd] + + '@rollup/rollup-openharmony-arm64@4.55.1': + resolution: {integrity: sha512-xzm44KgEP11te3S2HCSyYf5zIzWmx3n8HDCc7EE59+lTcswEWNpvMLfd9uJvVX8LCg9QWG67Xt75AuHn4vgsXw==} cpu: [arm64] os: [openharmony] - '@rollup/rollup-win32-arm64-msvc@4.54.0': - resolution: {integrity: sha512-c2V0W1bsKIKfbLMBu/WGBz6Yci8nJ/ZJdheE0EwB73N3MvHYKiKGs3mVilX4Gs70eGeDaMqEob25Tw2Gb9Nqyw==} + '@rollup/rollup-win32-arm64-msvc@4.55.1': + resolution: {integrity: sha512-yR6Bl3tMC/gBok5cz/Qi0xYnVbIxGx5Fcf/ca0eB6/6JwOY+SRUcJfI0OpeTpPls7f194as62thCt/2BjxYN8g==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.54.0': - resolution: {integrity: sha512-woEHgqQqDCkAzrDhvDipnSirm5vxUXtSKDYTVpZG3nUdW/VVB5VdCYA2iReSj/u3yCZzXID4kuKG7OynPnB3WQ==} + '@rollup/rollup-win32-ia32-msvc@4.55.1': + resolution: {integrity: sha512-3fZBidchE0eY0oFZBnekYCfg+5wAB0mbpCBuofh5mZuzIU/4jIVkbESmd2dOsFNS78b53CYv3OAtwqkZZmU5nA==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-gnu@4.54.0': - resolution: {integrity: sha512-dzAc53LOuFvHwbCEOS0rPbXp6SIhAf2txMP5p6mGyOXXw5mWY8NGGbPMPrs4P1WItkfApDathBj/NzMLUZ9rtQ==} + '@rollup/rollup-win32-x64-gnu@4.55.1': + resolution: {integrity: sha512-xGGY5pXj69IxKb4yv/POoocPy/qmEGhimy/FoTpTSVju3FYXUQQMFCaZZXJVidsmGxRioZAwpThl/4zX41gRKg==} cpu: [x64] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.54.0': - resolution: {integrity: sha512-hYT5d3YNdSh3mbCU1gwQyPgQd3T2ne0A3KG8KSBdav5TiBg6eInVmV+TeR5uHufiIgSFg0XsOWGW5/RhNcSvPg==} + '@rollup/rollup-win32-x64-msvc@4.55.1': + resolution: {integrity: sha512-SPEpaL6DX4rmcXtnhdrQYgzQ5W2uW3SCJch88lB2zImhJRhIIK44fkUrgIV/Q8yUNfw5oyZ5vkeQsZLhCb06lw==} cpu: [x64] os: [win32] @@ -592,41 +608,44 @@ packages: peerDependencies: '@sveltejs/kit': ^2.4.0 - '@sveltejs/kit@2.49.2': - resolution: {integrity: sha512-Vp3zX/qlwerQmHMP6x0Ry1oY7eKKRcOWGc2P59srOp4zcqyn+etJyQpELgOi4+ZSUgteX8Y387NuwruLgGXLUQ==} + '@sveltejs/kit@2.49.4': + resolution: {integrity: sha512-JFtOqDoU0DI/+QSG8qnq5bKcehVb3tCHhOG4amsSYth5/KgO4EkJvi42xSAiyKmXAAULW1/Zdb6lkgGEgSxdZg==} engines: {node: '>=18.13'} hasBin: true peerDependencies: '@opentelemetry/api': ^1.0.0 '@sveltejs/vite-plugin-svelte': ^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0 svelte: ^4.0.0 || ^5.0.0-next.0 + typescript: ^5.3.3 vite: ^5.0.3 || ^6.0.0 || ^7.0.0-beta.0 peerDependenciesMeta: '@opentelemetry/api': optional: true + typescript: + optional: true - '@sveltejs/vite-plugin-svelte-inspector@5.0.1': - resolution: {integrity: sha512-ubWshlMk4bc8mkwWbg6vNvCeT7lGQojE3ijDh3QTR6Zr/R+GXxsGbyH4PExEPpiFmqPhYiVSVmHBjUcVc1JIrA==} + '@sveltejs/vite-plugin-svelte-inspector@5.0.2': + resolution: {integrity: sha512-TZzRTcEtZffICSAoZGkPSl6Etsj2torOVrx6Uw0KpXxrec9Gg6jFWQ60Q3+LmNGfZSxHRCZL7vXVZIWmuV50Ig==} engines: {node: ^20.19 || ^22.12 || >=24} peerDependencies: '@sveltejs/vite-plugin-svelte': ^6.0.0-next.0 svelte: ^5.0.0 vite: ^6.3.0 || ^7.0.0 - '@sveltejs/vite-plugin-svelte@6.2.1': - resolution: {integrity: sha512-YZs/OSKOQAQCnJvM/P+F1URotNnYNeU3P2s4oIpzm1uFaqUEqRxUB0g5ejMjEb5Gjb9/PiBI5Ktrq4rUUF8UVQ==} + '@sveltejs/vite-plugin-svelte@6.2.4': + resolution: {integrity: sha512-ou/d51QSdTyN26D7h6dSpusAKaZkAiGM55/AKYi+9AGZw7q85hElbjK3kEyzXHhLSnRISHOYzVge6x0jRZ7DXA==} engines: {node: ^20.19 || ^22.12 || >=24} peerDependencies: svelte: ^5.0.0 vite: ^6.3.0 || ^7.0.0 - '@tanstack/svelte-virtual@3.13.16': - resolution: {integrity: sha512-LRDPRzAPTIiDjiCA9lhNlFnZRLj/XsNhzNRsT5JEA8hzcBmZw8avdYYVjydPAy0ObFJgG1zBAm9Dtvwqju36sg==} + '@tanstack/svelte-virtual@3.13.18': + resolution: {integrity: sha512-BHh8WkFK58eE9KzLctPQkCkvCj46LnM9tIGkpwo5Unx5YaBPf0uBJBqvSdc2jMwdT8gLXLHFHtCnSujlZP69BA==} peerDependencies: svelte: ^3.48.0 || ^4.0.0 || ^5.0.0 - '@tanstack/virtual-core@3.13.16': - resolution: {integrity: sha512-njazUC8mDkrxWmyZmn/3eXrDcP8Msb3chSr4q6a65RmwdSbMlMCdnOphv6/8mLO7O3Fuza5s4M4DclmvAO5w0w==} + '@tanstack/virtual-core@3.13.18': + resolution: {integrity: sha512-Mx86Hqu1k39icq2Zusq+Ey2J6dDWTjDvEv43PJtRCoEYTLyfaPnxIQ6iy7YAOK0NV/qOEmZQ/uCufrppZxTgcg==} '@trpc/client@11.8.1': resolution: {integrity: sha512-L/SJFGanr9xGABmuDoeXR4xAdHJmsXsiF9OuH+apecJ+8sUITzVT1EPeqp0ebqA6lBhEl5pPfg3rngVhi/h60Q==} @@ -657,8 +676,8 @@ packages: '@types/node-schedule@2.1.8': resolution: {integrity: sha512-k00g6Yj/oUg/CDC+MeLHUzu0+OFxWbIqrFfDiLi6OPKxTujvpv29mHGM8GtKr7B+9Vv92FcK/8mRqi1DK5f3hA==} - '@types/node@25.0.3': - resolution: {integrity: sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==} + '@types/node@25.0.6': + resolution: {integrity: sha512-NNu0sjyNxpoiW3YuVFfNz7mxSQ+S4X2G28uqg2s+CzoqoQjLPsWSbsFFyztIAqt2vb8kfEAsJNepMGPTxFDx3Q==} '@types/pg@8.16.0': resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} @@ -666,63 +685,63 @@ packages: '@types/resolve@1.20.2': resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==} - '@typescript-eslint/eslint-plugin@8.51.0': - resolution: {integrity: sha512-XtssGWJvypyM2ytBnSnKtHYOGT+4ZwTnBVl36TA4nRO2f4PRNGz5/1OszHzcZCvcBMh+qb7I06uoCmLTRdR9og==} + '@typescript-eslint/eslint-plugin@8.52.0': + resolution: {integrity: sha512-okqtOgqu2qmZJ5iN4TWlgfF171dZmx2FzdOv2K/ixL2LZWDStL8+JgQerI2sa8eAEfoydG9+0V96m7V+P8yE1Q==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.51.0 + '@typescript-eslint/parser': ^8.52.0 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.51.0': - resolution: {integrity: sha512-3xP4XzzDNQOIqBMWogftkwxhg5oMKApqY0BAflmLZiFYHqyhSOxv/cd/zPQLTcCXr4AkaKb25joocY0BD1WC6A==} + '@typescript-eslint/parser@8.52.0': + resolution: {integrity: sha512-iIACsx8pxRnguSYhHiMn2PvhvfpopO9FXHyn1mG5txZIsAaB6F0KwbFnUQN3KCiG3Jcuad/Cao2FAs1Wp7vAyg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.51.0': - resolution: {integrity: sha512-Luv/GafO07Z7HpiI7qeEW5NW8HUtZI/fo/kE0YbtQEFpJRUuR0ajcWfCE5bnMvL7QQFrmT/odMe8QZww8X2nfQ==} + '@typescript-eslint/project-service@8.52.0': + resolution: {integrity: sha512-xD0MfdSdEmeFa3OmVqonHi+Cciab96ls1UhIF/qX/O/gPu5KXD0bY9lu33jj04fjzrXHcuvjBcBC+D3SNSadaw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/scope-manager@8.51.0': - resolution: {integrity: sha512-JhhJDVwsSx4hiOEQPeajGhCWgBMBwVkxC/Pet53EpBVs7zHHtayKefw1jtPaNRXpI9RA2uocdmpdfE7T+NrizA==} + '@typescript-eslint/scope-manager@8.52.0': + resolution: {integrity: sha512-ixxqmmCcc1Nf8S0mS0TkJ/3LKcC8mruYJPOU6Ia2F/zUUR4pApW7LzrpU3JmtePbRUTes9bEqRc1Gg4iyRnDzA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/tsconfig-utils@8.51.0': - resolution: {integrity: sha512-Qi5bSy/vuHeWyir2C8u/uqGMIlIDu8fuiYWv48ZGlZ/k+PRPHtaAu7erpc7p5bzw2WNNSniuxoMSO4Ar6V9OXw==} + '@typescript-eslint/tsconfig-utils@8.52.0': + resolution: {integrity: sha512-jl+8fzr/SdzdxWJznq5nvoI7qn2tNYV/ZBAEcaFMVXf+K6jmXvAFrgo/+5rxgnL152f//pDEAYAhhBAZGrVfwg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.51.0': - resolution: {integrity: sha512-0XVtYzxnobc9K0VU7wRWg1yiUrw4oQzexCG2V2IDxxCxhqBMSMbjB+6o91A+Uc0GWtgjCa3Y8bi7hwI0Tu4n5Q==} + '@typescript-eslint/type-utils@8.52.0': + resolution: {integrity: sha512-JD3wKBRWglYRQkAtsyGz1AewDu3mTc7NtRjR/ceTyGoPqmdS5oCdx/oZMWD5Zuqmo6/MpsYs0wp6axNt88/2EQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/types@8.51.0': - resolution: {integrity: sha512-TizAvWYFM6sSscmEakjY3sPqGwxZRSywSsPEiuZF6d5GmGD9Gvlsv0f6N8FvAAA0CD06l3rIcWNbsN1e5F/9Ag==} + '@typescript-eslint/types@8.52.0': + resolution: {integrity: sha512-LWQV1V4q9V4cT4H5JCIx3481iIFxH1UkVk+ZkGGAV1ZGcjGI9IoFOfg3O6ywz8QqCDEp7Inlg6kovMofsNRaGg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/typescript-estree@8.51.0': - resolution: {integrity: sha512-1qNjGqFRmlq0VW5iVlcyHBbCjPB7y6SxpBkrbhNWMy/65ZoncXCEPJxkRZL8McrseNH6lFhaxCIaX+vBuFnRng==} + '@typescript-eslint/typescript-estree@8.52.0': + resolution: {integrity: sha512-XP3LClsCc0FsTK5/frGjolyADTh3QmsLp6nKd476xNI9CsSsLnmn4f0jrzNoAulmxlmNIpeXuHYeEQv61Q6qeQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.51.0': - resolution: {integrity: sha512-11rZYxSe0zabiKaCP2QAwRf/dnmgFgvTmeDTtZvUvXG3UuAdg/GU02NExmmIXzz3vLGgMdtrIosI84jITQOxUA==} + '@typescript-eslint/utils@8.52.0': + resolution: {integrity: sha512-wYndVMWkweqHpEpwPhwqE2lnD2DxC6WVLupU/DOt/0/v+/+iQbbzO3jOHjmBMnhu0DgLULvOaU4h4pwHYi2oRQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/visitor-keys@8.51.0': - resolution: {integrity: sha512-mM/JRQOzhVN1ykejrvwnBRV3+7yTKK8tVANVN3o1O0t0v7o+jqdVu9crPy5Y9dov15TJk/FTIgoUGHrTOVL3Zg==} + '@typescript-eslint/visitor-keys@8.52.0': + resolution: {integrity: sha512-ink3/Zofus34nmBsPjow63FP5M7IGff0RKAgqR6+CFpdk22M7aLwC9gOcLGYqr7MczLPzZVERW9hRog3O4n1sQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@xmldom/xmldom@0.9.8': @@ -787,8 +806,8 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - baseline-browser-mapping@2.9.11: - resolution: {integrity: sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==} + baseline-browser-mapping@2.9.14: + resolution: {integrity: sha512-B0xUquLkiGLgHhpPBqvl7GWegWBUNuujQ6kXd/r1U38ElPT6Ok8KZ8e+FpUGEc2ZoRQUzq/aUnaKFc/svWUGSg==} hasBin: true binary-extensions@2.3.0: @@ -830,8 +849,8 @@ packages: resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} engines: {node: '>= 6'} - caniuse-lite@1.0.30001762: - resolution: {integrity: sha512-PxZwGNvH7Ak8WX5iXzoK1KPZttBXNPuaOvI2ZYU7NrlM+d9Ov+TUvlLOBNGzVXAntMSMMlJPd+jY6ovrVjSmUw==} + caniuse-lite@1.0.30001764: + resolution: {integrity: sha512-9JGuzl2M+vPL+pz70gtMF9sHdMFbY9FJaQBi186cHKH3pSzDvzoUJUPV6fqiKIMyXbud9ZLg4F3Yza1vJ1+93g==} chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} @@ -997,8 +1016,8 @@ packages: peerDependencies: eslint: '>=7.0.0' - eslint-plugin-svelte@3.13.1: - resolution: {integrity: sha512-Ng+kV/qGS8P/isbNYVE3sJORtubB+yLEcYICMkUWNaDTb0SwZni/JhAYXh/Dz/q2eThUwWY0VMPZ//KYD1n3eQ==} + eslint-plugin-svelte@3.14.0: + resolution: {integrity: sha512-Isw0GvaMm0yHxAj71edAdGFh28ufYs+6rk2KlbbZphnqZAzrH3Se3t12IFh2H9+1F/jlDhBBL4oiOJmLqmYX0g==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.1 || ^9.0.0 @@ -1064,8 +1083,8 @@ packages: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} - exifreader@4.33.1: - resolution: {integrity: sha512-KsVc4bRfZW255PSst5Opt5jUeLp+SD2+q6fmXQkMMkphpFCDBFjzNAvswgQa1YcMrXq+9Na6HJ6gS3wo2x7RRw==} + exifreader@4.35.0: + resolution: {integrity: sha512-qiMONyOObmwI6sIXy13vRGqlcoi9VUKr70iGI1aefP+xJsbcXp+hcyL/4J6hov/yG9UhS7Hq1OQ1hAoSEZl+RA==} exsolve@1.0.8: resolution: {integrity: sha512-LmDxfWXwcTArk8fUEnOfSZpHOJ6zOMUJKOtFLFqJLoKJetuQG874Uc7/Kki7zFLzYybmZhp1M7+98pfMqeX8yA==} @@ -1169,6 +1188,10 @@ packages: resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==} engines: {node: '>=18'} + globals@17.0.0: + resolution: {integrity: sha512-gv5BeD2EssA793rlFWVPMMCqefTlpusw6/2TbAVMy0FzcG8wKJn4O+NqJ4+XWmmwrayJgw5TzrmWjFgmz1XPqw==} + engines: {node: '>=18'} + gopd@1.2.0: resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} engines: {node: '>= 0.4'} @@ -1431,6 +1454,9 @@ packages: resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==} engines: {node: '>= 6'} + obug@2.1.1: + resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + ofetch@1.5.1: resolution: {integrity: sha512-2W4oUZlVaqAPAil6FUg/difl6YhqhUR7x2eZY4bQCko22UXg3hptq9KLQdqFClV+Wu85UX7hNtdGTngi/1BxcA==} @@ -1741,8 +1767,8 @@ packages: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - rollup@4.54.0: - resolution: {integrity: sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==} + rollup@4.55.1: + resolution: {integrity: sha512-wDv/Ht1BNHB4upNbK74s9usvl7hObDnvVzknxqY/E/O3X6rW1U1rV1aENEfJ54eFZDTNo7zv1f5N4edCluH7+A==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -1880,8 +1906,8 @@ packages: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} - typescript-eslint@8.51.0: - resolution: {integrity: sha512-jh8ZuM5oEh2PSdyQG9YAEM1TCGuWenLSuSUhf/irbVUNW9O5FhbFVONviN2TgMTBnUmyHv7E56rYnfLZK6TkiA==} + typescript-eslint@8.52.0: + resolution: {integrity: sha512-atlQQJ2YkO4pfTVQmQ+wvYQwexPDOIgo+RaVcD7gHgzy/IQA+XTyuxNM9M9TVXvttkF7koBHmcwisKdOAf2EcA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -1892,8 +1918,8 @@ packages: engines: {node: '>=14.17'} hasBin: true - ufo@1.6.1: - resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==} + ufo@1.6.2: + resolution: {integrity: sha512-heMioaxBcG9+Znsda5Q8sQbWnLJSl98AFDXTO80wELWEzX3hordXsTdxrIfMQoO9IY1MEnoGoPjpoKpMj+Yx0Q==} undici-types@7.16.0: resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} @@ -1941,8 +1967,8 @@ packages: resolution: {integrity: sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w==} hasBin: true - vite@7.3.0: - resolution: {integrity: sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==} + vite@7.3.1: + resolution: {integrity: sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: @@ -2124,9 +2150,9 @@ snapshots: '@eslint-community/regexpp@4.12.2': {} - '@eslint/compat@2.0.0(eslint@9.39.2(jiti@1.21.7))': + '@eslint/compat@2.0.1(eslint@9.39.2(jiti@1.21.7))': dependencies: - '@eslint/core': 1.0.0 + '@eslint/core': 1.0.1 optionalDependencies: eslint: 9.39.2(jiti@1.21.7) @@ -2146,7 +2172,7 @@ snapshots: dependencies: '@types/json-schema': 7.0.15 - '@eslint/core@1.0.0': + '@eslint/core@1.0.1': dependencies: '@types/json-schema': 7.0.15 @@ -2173,8 +2199,6 @@ snapshots: '@eslint/core': 0.17.0 levn: 0.4.1 - '@fastify/busboy@3.2.0': {} - '@humanfs/core@0.19.1': {} '@humanfs/node@0.16.7': @@ -2186,7 +2210,7 @@ snapshots: '@humanwhocodes/retry@0.4.3': {} - '@iconify-json/material-symbols@1.2.50': + '@iconify-json/material-symbols@1.2.51': dependencies: '@iconify/types': 2.0.0 @@ -2217,6 +2241,8 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@noble/hashes@2.0.1': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -2233,9 +2259,9 @@ snapshots: '@polka/url@1.0.0-next.29': {} - '@rollup/plugin-commonjs@28.0.9(rollup@4.54.0)': + '@rollup/plugin-commonjs@28.0.9(rollup@4.55.1)': dependencies: - '@rollup/pluginutils': 5.3.0(rollup@4.54.0) + '@rollup/pluginutils': 5.3.0(rollup@4.55.1) commondir: 1.0.1 estree-walker: 2.0.2 fdir: 6.5.0(picomatch@4.0.3) @@ -2243,96 +2269,105 @@ snapshots: magic-string: 0.30.21 picomatch: 4.0.3 optionalDependencies: - rollup: 4.54.0 + rollup: 4.55.1 - '@rollup/plugin-json@6.1.0(rollup@4.54.0)': + '@rollup/plugin-json@6.1.0(rollup@4.55.1)': dependencies: - '@rollup/pluginutils': 5.3.0(rollup@4.54.0) + '@rollup/pluginutils': 5.3.0(rollup@4.55.1) optionalDependencies: - rollup: 4.54.0 + rollup: 4.55.1 - '@rollup/plugin-node-resolve@16.0.3(rollup@4.54.0)': + '@rollup/plugin-node-resolve@16.0.3(rollup@4.55.1)': dependencies: - '@rollup/pluginutils': 5.3.0(rollup@4.54.0) + '@rollup/pluginutils': 5.3.0(rollup@4.55.1) '@types/resolve': 1.20.2 deepmerge: 4.3.1 is-module: 1.0.0 resolve: 1.22.11 optionalDependencies: - rollup: 4.54.0 + rollup: 4.55.1 - '@rollup/pluginutils@5.3.0(rollup@4.54.0)': + '@rollup/pluginutils@5.3.0(rollup@4.55.1)': dependencies: '@types/estree': 1.0.8 estree-walker: 2.0.2 picomatch: 4.0.3 optionalDependencies: - rollup: 4.54.0 + rollup: 4.55.1 - '@rollup/rollup-android-arm-eabi@4.54.0': + '@rollup/rollup-android-arm-eabi@4.55.1': optional: true - '@rollup/rollup-android-arm64@4.54.0': + '@rollup/rollup-android-arm64@4.55.1': optional: true - '@rollup/rollup-darwin-arm64@4.54.0': + '@rollup/rollup-darwin-arm64@4.55.1': optional: true - '@rollup/rollup-darwin-x64@4.54.0': + '@rollup/rollup-darwin-x64@4.55.1': optional: true - '@rollup/rollup-freebsd-arm64@4.54.0': + '@rollup/rollup-freebsd-arm64@4.55.1': optional: true - '@rollup/rollup-freebsd-x64@4.54.0': + '@rollup/rollup-freebsd-x64@4.55.1': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.54.0': + '@rollup/rollup-linux-arm-gnueabihf@4.55.1': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.54.0': + '@rollup/rollup-linux-arm-musleabihf@4.55.1': optional: true - '@rollup/rollup-linux-arm64-gnu@4.54.0': + '@rollup/rollup-linux-arm64-gnu@4.55.1': optional: true - '@rollup/rollup-linux-arm64-musl@4.54.0': + '@rollup/rollup-linux-arm64-musl@4.55.1': optional: true - '@rollup/rollup-linux-loong64-gnu@4.54.0': + '@rollup/rollup-linux-loong64-gnu@4.55.1': optional: true - '@rollup/rollup-linux-ppc64-gnu@4.54.0': + '@rollup/rollup-linux-loong64-musl@4.55.1': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.54.0': + '@rollup/rollup-linux-ppc64-gnu@4.55.1': optional: true - '@rollup/rollup-linux-riscv64-musl@4.54.0': + '@rollup/rollup-linux-ppc64-musl@4.55.1': optional: true - '@rollup/rollup-linux-s390x-gnu@4.54.0': + '@rollup/rollup-linux-riscv64-gnu@4.55.1': optional: true - '@rollup/rollup-linux-x64-gnu@4.54.0': + '@rollup/rollup-linux-riscv64-musl@4.55.1': optional: true - '@rollup/rollup-linux-x64-musl@4.54.0': + '@rollup/rollup-linux-s390x-gnu@4.55.1': optional: true - '@rollup/rollup-openharmony-arm64@4.54.0': + '@rollup/rollup-linux-x64-gnu@4.55.1': optional: true - '@rollup/rollup-win32-arm64-msvc@4.54.0': + '@rollup/rollup-linux-x64-musl@4.55.1': optional: true - '@rollup/rollup-win32-ia32-msvc@4.54.0': + '@rollup/rollup-openbsd-x64@4.55.1': optional: true - '@rollup/rollup-win32-x64-gnu@4.54.0': + '@rollup/rollup-openharmony-arm64@4.55.1': optional: true - '@rollup/rollup-win32-x64-msvc@4.54.0': + '@rollup/rollup-win32-arm64-msvc@4.55.1': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.55.1': + optional: true + + '@rollup/rollup-win32-x64-gnu@4.55.1': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.55.1': optional: true '@standard-schema/spec@1.1.0': {} @@ -2341,19 +2376,19 @@ snapshots: dependencies: acorn: 8.15.0 - '@sveltejs/adapter-node@5.4.0(@sveltejs/kit@2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))': + '@sveltejs/adapter-node@5.4.0(@sveltejs/kit@2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))': dependencies: - '@rollup/plugin-commonjs': 28.0.9(rollup@4.54.0) - '@rollup/plugin-json': 6.1.0(rollup@4.54.0) - '@rollup/plugin-node-resolve': 16.0.3(rollup@4.54.0) - '@sveltejs/kit': 2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) - rollup: 4.54.0 + '@rollup/plugin-commonjs': 28.0.9(rollup@4.55.1) + '@rollup/plugin-json': 6.1.0(rollup@4.55.1) + '@rollup/plugin-node-resolve': 16.0.3(rollup@4.55.1) + '@sveltejs/kit': 2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) + rollup: 4.55.1 - '@sveltejs/kit@2.49.2(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))': + '@sveltejs/kit@2.49.4(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0))': dependencies: '@standard-schema/spec': 1.1.0 '@sveltejs/acorn-typescript': 1.0.8(acorn@8.15.0) - '@sveltejs/vite-plugin-svelte': 6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) + '@sveltejs/vite-plugin-svelte': 6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) '@types/cookie': 0.6.0 acorn: 8.15.0 cookie: 0.6.0 @@ -2366,35 +2401,33 @@ snapshots: set-cookie-parser: 2.7.2 sirv: 3.0.2 svelte: 5.46.1 - vite: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) + vite: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) + optionalDependencies: + typescript: 5.9.3 - '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))': + '@sveltejs/vite-plugin-svelte-inspector@5.0.2(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0))': dependencies: - '@sveltejs/vite-plugin-svelte': 6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) - debug: 4.4.3 + '@sveltejs/vite-plugin-svelte': 6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) + obug: 2.1.1 svelte: 5.46.1 - vite: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) - transitivePeerDependencies: - - supports-color + vite: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) - '@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0))': + '@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0))': dependencies: - '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) - debug: 4.4.3 + '@sveltejs/vite-plugin-svelte-inspector': 5.0.2(@sveltejs/vite-plugin-svelte@6.2.4(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)))(svelte@5.46.1)(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) deepmerge: 4.3.1 magic-string: 0.30.21 + obug: 2.1.1 svelte: 5.46.1 - vite: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) - vitefu: 1.1.1(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) - transitivePeerDependencies: - - supports-color + vite: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) + vitefu: 1.1.1(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)) - '@tanstack/svelte-virtual@3.13.16(svelte@5.46.1)': + '@tanstack/svelte-virtual@3.13.18(svelte@5.46.1)': dependencies: - '@tanstack/virtual-core': 3.13.16 + '@tanstack/virtual-core': 3.13.18 svelte: 5.46.1 - '@tanstack/virtual-core@3.13.16': {} + '@tanstack/virtual-core@3.13.18': {} '@trpc/client@11.8.1(@trpc/server@11.8.1(typescript@5.9.3))(typescript@5.9.3)': dependencies: @@ -2417,28 +2450,28 @@ snapshots: '@types/node-schedule@2.1.8': dependencies: - '@types/node': 25.0.3 + '@types/node': 25.0.6 - '@types/node@25.0.3': + '@types/node@25.0.6': dependencies: undici-types: 7.16.0 '@types/pg@8.16.0': dependencies: - '@types/node': 25.0.3 + '@types/node': 25.0.6 pg-protocol: 1.10.3 pg-types: 2.2.0 '@types/resolve@1.20.2': {} - '@typescript-eslint/eslint-plugin@8.51.0(@typescript-eslint/parser@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/eslint-plugin@8.52.0(@typescript-eslint/parser@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.51.0 - '@typescript-eslint/type-utils': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/utils': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.51.0 + '@typescript-eslint/parser': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.52.0 + '@typescript-eslint/type-utils': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/utils': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.52.0 eslint: 9.39.2(jiti@1.21.7) ignore: 7.0.5 natural-compare: 1.4.0 @@ -2447,41 +2480,41 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/parser@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/scope-manager': 8.51.0 - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/typescript-estree': 8.51.0(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.51.0 + '@typescript-eslint/scope-manager': 8.52.0 + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/typescript-estree': 8.52.0(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.52.0 debug: 4.4.3 eslint: 9.39.2(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.51.0(typescript@5.9.3)': + '@typescript-eslint/project-service@8.52.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.51.0(typescript@5.9.3) - '@typescript-eslint/types': 8.51.0 + '@typescript-eslint/tsconfig-utils': 8.52.0(typescript@5.9.3) + '@typescript-eslint/types': 8.52.0 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/scope-manager@8.51.0': + '@typescript-eslint/scope-manager@8.52.0': dependencies: - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/visitor-keys': 8.51.0 + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/visitor-keys': 8.52.0 - '@typescript-eslint/tsconfig-utils@8.51.0(typescript@5.9.3)': + '@typescript-eslint/tsconfig-utils@8.52.0(typescript@5.9.3)': dependencies: typescript: 5.9.3 - '@typescript-eslint/type-utils@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/type-utils@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/typescript-estree': 8.51.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/typescript-estree': 8.52.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) debug: 4.4.3 eslint: 9.39.2(jiti@1.21.7) ts-api-utils: 2.4.0(typescript@5.9.3) @@ -2489,14 +2522,14 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/types@8.51.0': {} + '@typescript-eslint/types@8.52.0': {} - '@typescript-eslint/typescript-estree@8.51.0(typescript@5.9.3)': + '@typescript-eslint/typescript-estree@8.52.0(typescript@5.9.3)': dependencies: - '@typescript-eslint/project-service': 8.51.0(typescript@5.9.3) - '@typescript-eslint/tsconfig-utils': 8.51.0(typescript@5.9.3) - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/visitor-keys': 8.51.0 + '@typescript-eslint/project-service': 8.52.0(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.52.0(typescript@5.9.3) + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/visitor-keys': 8.52.0 debug: 4.4.3 minimatch: 9.0.5 semver: 7.7.3 @@ -2506,20 +2539,20 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + '@typescript-eslint/utils@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': dependencies: '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@1.21.7)) - '@typescript-eslint/scope-manager': 8.51.0 - '@typescript-eslint/types': 8.51.0 - '@typescript-eslint/typescript-estree': 8.51.0(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.52.0 + '@typescript-eslint/types': 8.52.0 + '@typescript-eslint/typescript-estree': 8.52.0(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/visitor-keys@8.51.0': + '@typescript-eslint/visitor-keys@8.52.0': dependencies: - '@typescript-eslint/types': 8.51.0 + '@typescript-eslint/types': 8.52.0 eslint-visitor-keys: 4.2.1 '@xmldom/xmldom@0.9.8': @@ -2567,7 +2600,7 @@ snapshots: autoprefixer@10.4.23(postcss@8.5.6): dependencies: browserslist: 4.28.1 - caniuse-lite: 1.0.30001762 + caniuse-lite: 1.0.30001764 fraction.js: 5.3.4 picocolors: 1.1.1 postcss: 8.5.6 @@ -2585,7 +2618,7 @@ snapshots: balanced-match@1.0.2: {} - baseline-browser-mapping@2.9.11: {} + baseline-browser-mapping@2.9.14: {} binary-extensions@2.3.0: {} @@ -2604,8 +2637,8 @@ snapshots: browserslist@4.28.1: dependencies: - baseline-browser-mapping: 2.9.11 - caniuse-lite: 1.0.30001762 + baseline-browser-mapping: 2.9.14 + caniuse-lite: 1.0.30001764 electron-to-chromium: 1.5.267 node-releases: 2.0.27 update-browserslist-db: 1.2.3(browserslist@4.28.1) @@ -2634,7 +2667,7 @@ snapshots: camelcase-css@2.0.1: {} - caniuse-lite@1.0.30001762: {} + caniuse-lite@1.0.30001764: {} chalk@4.1.2: dependencies: @@ -2796,7 +2829,7 @@ snapshots: dependencies: eslint: 9.39.2(jiti@1.21.7) - eslint-plugin-svelte@3.13.1(eslint@9.39.2(jiti@1.21.7))(svelte@5.46.1): + eslint-plugin-svelte@3.14.0(eslint@9.39.2(jiti@1.21.7))(svelte@5.46.1): dependencies: '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@1.21.7)) '@jridgewell/sourcemap-codec': 1.5.5 @@ -2896,7 +2929,7 @@ snapshots: esutils@2.0.3: {} - exifreader@4.33.1: + exifreader@4.35.0: optionalDependencies: '@xmldom/xmldom': 0.9.8 @@ -3002,6 +3035,8 @@ snapshots: globals@16.5.0: {} + globals@17.0.0: {} + gopd@1.2.0: {} has-flag@4.0.0: {} @@ -3166,7 +3201,7 @@ snapshots: acorn: 8.15.0 pathe: 2.0.3 pkg-types: 1.3.1 - ufo: 1.6.1 + ufo: 1.6.2 mri@1.2.0: {} @@ -3212,11 +3247,13 @@ snapshots: object-hash@3.0.0: {} + obug@2.1.1: {} + ofetch@1.5.1: dependencies: destr: 2.0.5 node-fetch-native: 1.6.7 - ufo: 1.6.1 + ufo: 1.6.2 ohash@2.0.11: {} @@ -3432,32 +3469,35 @@ snapshots: reusify@1.1.0: {} - rollup@4.54.0: + rollup@4.55.1: dependencies: '@types/estree': 1.0.8 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.54.0 - '@rollup/rollup-android-arm64': 4.54.0 - '@rollup/rollup-darwin-arm64': 4.54.0 - '@rollup/rollup-darwin-x64': 4.54.0 - '@rollup/rollup-freebsd-arm64': 4.54.0 - '@rollup/rollup-freebsd-x64': 4.54.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.54.0 - '@rollup/rollup-linux-arm-musleabihf': 4.54.0 - '@rollup/rollup-linux-arm64-gnu': 4.54.0 - '@rollup/rollup-linux-arm64-musl': 4.54.0 - '@rollup/rollup-linux-loong64-gnu': 4.54.0 - '@rollup/rollup-linux-ppc64-gnu': 4.54.0 - '@rollup/rollup-linux-riscv64-gnu': 4.54.0 - '@rollup/rollup-linux-riscv64-musl': 4.54.0 - '@rollup/rollup-linux-s390x-gnu': 4.54.0 - '@rollup/rollup-linux-x64-gnu': 4.54.0 - '@rollup/rollup-linux-x64-musl': 4.54.0 - '@rollup/rollup-openharmony-arm64': 4.54.0 - '@rollup/rollup-win32-arm64-msvc': 4.54.0 - '@rollup/rollup-win32-ia32-msvc': 4.54.0 - '@rollup/rollup-win32-x64-gnu': 4.54.0 - '@rollup/rollup-win32-x64-msvc': 4.54.0 + '@rollup/rollup-android-arm-eabi': 4.55.1 + '@rollup/rollup-android-arm64': 4.55.1 + '@rollup/rollup-darwin-arm64': 4.55.1 + '@rollup/rollup-darwin-x64': 4.55.1 + '@rollup/rollup-freebsd-arm64': 4.55.1 + '@rollup/rollup-freebsd-x64': 4.55.1 + '@rollup/rollup-linux-arm-gnueabihf': 4.55.1 + '@rollup/rollup-linux-arm-musleabihf': 4.55.1 + '@rollup/rollup-linux-arm64-gnu': 4.55.1 + '@rollup/rollup-linux-arm64-musl': 4.55.1 + '@rollup/rollup-linux-loong64-gnu': 4.55.1 + '@rollup/rollup-linux-loong64-musl': 4.55.1 + '@rollup/rollup-linux-ppc64-gnu': 4.55.1 + '@rollup/rollup-linux-ppc64-musl': 4.55.1 + '@rollup/rollup-linux-riscv64-gnu': 4.55.1 + '@rollup/rollup-linux-riscv64-musl': 4.55.1 + '@rollup/rollup-linux-s390x-gnu': 4.55.1 + '@rollup/rollup-linux-x64-gnu': 4.55.1 + '@rollup/rollup-linux-x64-musl': 4.55.1 + '@rollup/rollup-openbsd-x64': 4.55.1 + '@rollup/rollup-openharmony-arm64': 4.55.1 + '@rollup/rollup-win32-arm64-msvc': 4.55.1 + '@rollup/rollup-win32-ia32-msvc': 4.55.1 + '@rollup/rollup-win32-x64-gnu': 4.55.1 + '@rollup/rollup-win32-x64-msvc': 4.55.1 fsevents: 2.3.3 run-parallel@1.2.0: @@ -3618,12 +3658,12 @@ snapshots: dependencies: prelude-ls: 1.2.1 - typescript-eslint@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): + typescript-eslint@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3): dependencies: - '@typescript-eslint/eslint-plugin': 8.51.0(@typescript-eslint/parser@8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/parser': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) - '@typescript-eslint/typescript-estree': 8.51.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.51.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/eslint-plugin': 8.52.0(@typescript-eslint/parser@8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/parser': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/typescript-estree': 8.52.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.52.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.2(jiti@1.21.7) typescript: 5.9.3 transitivePeerDependencies: @@ -3631,7 +3671,7 @@ snapshots: typescript@5.9.3: {} - ufo@1.6.1: {} + ufo@1.6.2: {} undici-types@7.16.0: {} @@ -3668,23 +3708,23 @@ snapshots: uuid@13.0.0: {} - vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0): + vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0): dependencies: esbuild: 0.27.2 fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 postcss: 8.5.6 - rollup: 4.54.0 + rollup: 4.55.1 tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 25.0.3 + '@types/node': 25.0.6 fsevents: 2.3.3 jiti: 1.21.7 yaml: 2.8.0 - vitefu@1.1.1(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)): + vitefu@1.1.1(vite@7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0)): optionalDependencies: - vite: 7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0) + vite: 7.3.1(@types/node@25.0.6)(jiti@1.21.7)(yaml@2.8.0) webpack-virtual-modules@0.6.2: {} diff --git a/src/hooks.client.ts b/src/hooks.client.ts index 99e11c9..a677d9f 100644 --- a/src/hooks.client.ts +++ b/src/hooks.client.ts @@ -1,7 +1,6 @@ import type { ClientInit } from "@sveltejs/kit"; import { cleanupDanglingInfos, getClientKey, getMasterKeys, getHmacSecrets } from "$lib/indexedDB"; import { prepareFileCache } from "$lib/modules/file"; -import { prepareOpfs } from "$lib/modules/opfs"; import { clientKeyStore, masterKeyStore, hmacSecretStore } from "$lib/stores"; const requestPersistentStorage = async () => { @@ -46,7 +45,6 @@ export const init: ClientInit = async () => { prepareClientKeyStore(), prepareMasterKeyStore(), prepareHmacSecretStore(), - prepareOpfs(), ]); cleanupDanglingInfos(); // Intended diff --git a/src/hooks.server.ts b/src/hooks.server.ts index 6f94a7e..c670968 100644 --- a/src/hooks.server.ts +++ b/src/hooks.server.ts @@ -8,6 +8,7 @@ import { cleanupExpiredSessionUpgradeChallenges, } from "$lib/server/db/session"; import { authenticate, setAgentInfo } from "$lib/server/middlewares"; +import { cleanupExpiredUploadSessions } from "$lib/server/services/upload"; export const init: ServerInit = async () => { await migrateDB(); @@ -16,6 +17,7 @@ export const init: ServerInit = async () => { cleanupExpiredUserClientChallenges(); cleanupExpiredSessions(); cleanupExpiredSessionUpgradeChallenges(); + cleanupExpiredUploadSessions(); }); }; diff --git a/src/lib/constants/index.ts b/src/lib/constants/index.ts new file mode 100644 index 0000000..4983846 --- /dev/null +++ b/src/lib/constants/index.ts @@ -0,0 +1,2 @@ +export * from "./serviceWorker"; +export * from "./upload"; diff --git a/src/lib/constants/serviceWorker.ts b/src/lib/constants/serviceWorker.ts new file mode 100644 index 0000000..abbaa3c --- /dev/null +++ b/src/lib/constants/serviceWorker.ts @@ -0,0 +1 @@ +export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decryptedFile/"; diff --git a/src/lib/constants/upload.ts b/src/lib/constants/upload.ts new file mode 100644 index 0000000..57934d6 --- /dev/null +++ b/src/lib/constants/upload.ts @@ -0,0 +1,6 @@ +export const AES_GCM_IV_SIZE = 12; +export const AES_GCM_TAG_SIZE = 16; +export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE; + +export const CHUNK_SIZE = 4 * 1024 * 1024; // 4 MiB +export const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ENCRYPTION_OVERHEAD; diff --git a/src/lib/indexedDB/keyStore.ts b/src/lib/indexedDB/keyStore.ts index 7a4c89e..86b8b79 100644 --- a/src/lib/indexedDB/keyStore.ts +++ b/src/lib/indexedDB/keyStore.ts @@ -70,12 +70,12 @@ export const storeMasterKeys = async (keys: MasterKey[]) => { }; export const getHmacSecrets = async () => { - return await keyStore.hmacSecret.toArray(); + return (await keyStore.hmacSecret.toArray()).filter(({ secret }) => secret.extractable); }; export const storeHmacSecrets = async (secrets: HmacSecret[]) => { - if (secrets.some(({ secret }) => secret.extractable)) { - throw new Error("Hmac secrets must be nonextractable"); + if (secrets.some(({ secret }) => !secret.extractable)) { + throw new Error("Hmac secrets must be extractable"); } await keyStore.hmacSecret.bulkPut(secrets); }; diff --git a/src/lib/modules/crypto/aes.ts b/src/lib/modules/crypto/aes.ts index 3c096ba..35687e6 100644 --- a/src/lib/modules/crypto/aes.ts +++ b/src/lib/modules/crypto/aes.ts @@ -1,8 +1,15 @@ -import { encodeString, decodeString, encodeToBase64, decodeFromBase64 } from "./util"; +import { AES_GCM_IV_SIZE } from "$lib/constants"; +import { + encodeString, + decodeString, + encodeToBase64, + decodeFromBase64, + concatenateBuffers, +} from "./utils"; export const generateMasterKey = async () => { return { - masterKey: await window.crypto.subtle.generateKey( + masterKey: await crypto.subtle.generateKey( { name: "AES-KW", length: 256, @@ -15,7 +22,7 @@ export const generateMasterKey = async () => { export const generateDataKey = async () => { return { - dataKey: await window.crypto.subtle.generateKey( + dataKey: await crypto.subtle.generateKey( { name: "AES-GCM", length: 256, @@ -28,9 +35,9 @@ export const generateDataKey = async () => { }; export const makeAESKeyNonextractable = async (key: CryptoKey) => { - return await window.crypto.subtle.importKey( + return await crypto.subtle.importKey( "raw", - await window.crypto.subtle.exportKey("raw", key), + await crypto.subtle.exportKey("raw", key), key.algorithm, false, key.usages, @@ -38,12 +45,12 @@ export const makeAESKeyNonextractable = async (key: CryptoKey) => { }; export const wrapDataKey = async (dataKey: CryptoKey, masterKey: CryptoKey) => { - return encodeToBase64(await window.crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW")); + return encodeToBase64(await crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW")); }; export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey) => { return { - dataKey: await window.crypto.subtle.unwrapKey( + dataKey: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(dataKeyWrapped), masterKey, @@ -56,12 +63,12 @@ export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey }; export const wrapHmacSecret = async (hmacSecret: CryptoKey, masterKey: CryptoKey) => { - return encodeToBase64(await window.crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW")); + return encodeToBase64(await crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW")); }; export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: CryptoKey) => { return { - hmacSecret: await window.crypto.subtle.unwrapKey( + hmacSecret: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(hmacSecretWrapped), masterKey, @@ -70,15 +77,15 @@ export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: Cry name: "HMAC", hash: "SHA-256", } satisfies HmacImportParams, - false, // Nonextractable + true, // Extractable ["sign", "verify"], ), }; }; export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { - const iv = window.crypto.getRandomValues(new Uint8Array(12)); - const ciphertext = await window.crypto.subtle.encrypt( + const iv = crypto.getRandomValues(new Uint8Array(12)); + const ciphertext = await crypto.subtle.encrypt( { name: "AES-GCM", iv, @@ -86,14 +93,18 @@ export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => { dataKey, data, ); - return { ciphertext, iv: encodeToBase64(iv.buffer) }; + return { ciphertext, iv: iv.buffer }; }; -export const decryptData = async (ciphertext: BufferSource, iv: string, dataKey: CryptoKey) => { - return await window.crypto.subtle.decrypt( +export const decryptData = async ( + ciphertext: BufferSource, + iv: string | BufferSource, + dataKey: CryptoKey, +) => { + return await crypto.subtle.decrypt( { name: "AES-GCM", - iv: decodeFromBase64(iv), + iv: typeof iv === "string" ? decodeFromBase64(iv) : iv, } satisfies AesGcmParams, dataKey, ciphertext, @@ -102,9 +113,22 @@ export const decryptData = async (ciphertext: BufferSource, iv: string, dataKey: export const encryptString = async (plaintext: string, dataKey: CryptoKey) => { const { ciphertext, iv } = await encryptData(encodeString(plaintext), dataKey); - return { ciphertext: encodeToBase64(ciphertext), iv }; + return { ciphertext: encodeToBase64(ciphertext), iv: encodeToBase64(iv) }; }; export const decryptString = async (ciphertext: string, iv: string, dataKey: CryptoKey) => { return decodeString(await decryptData(decodeFromBase64(ciphertext), iv, dataKey)); }; + +export const encryptChunk = async (chunk: ArrayBuffer, dataKey: CryptoKey) => { + const { ciphertext, iv } = await encryptData(chunk, dataKey); + return concatenateBuffers(iv, ciphertext).buffer; +}; + +export const decryptChunk = async (encryptedChunk: ArrayBuffer, dataKey: CryptoKey) => { + return await decryptData( + encryptedChunk.slice(AES_GCM_IV_SIZE), + encryptedChunk.slice(0, AES_GCM_IV_SIZE), + dataKey, + ); +}; diff --git a/src/lib/modules/crypto/index.ts b/src/lib/modules/crypto/index.ts index e6972ba..e3c27a7 100644 --- a/src/lib/modules/crypto/index.ts +++ b/src/lib/modules/crypto/index.ts @@ -1,4 +1,4 @@ export * from "./aes"; export * from "./rsa"; export * from "./sha"; -export * from "./util"; +export * from "./utils"; diff --git a/src/lib/modules/crypto/rsa.ts b/src/lib/modules/crypto/rsa.ts index 13dfd46..78e17db 100644 --- a/src/lib/modules/crypto/rsa.ts +++ b/src/lib/modules/crypto/rsa.ts @@ -1,7 +1,7 @@ -import { encodeString, encodeToBase64, decodeFromBase64 } from "./util"; +import { encodeString, encodeToBase64, decodeFromBase64 } from "./utils"; export const generateEncryptionKeyPair = async () => { - const keyPair = await window.crypto.subtle.generateKey( + const keyPair = await crypto.subtle.generateKey( { name: "RSA-OAEP", modulusLength: 4096, @@ -18,7 +18,7 @@ export const generateEncryptionKeyPair = async () => { }; export const generateSigningKeyPair = async () => { - const keyPair = await window.crypto.subtle.generateKey( + const keyPair = await crypto.subtle.generateKey( { name: "RSA-PSS", modulusLength: 4096, @@ -37,7 +37,7 @@ export const generateSigningKeyPair = async () => { export const exportRSAKey = async (key: CryptoKey) => { const format = key.type === "public" ? ("spki" as const) : ("pkcs8" as const); return { - key: await window.crypto.subtle.exportKey(format, key), + key: await crypto.subtle.exportKey(format, key), format, }; }; @@ -54,14 +54,14 @@ export const importEncryptionKeyPairFromBase64 = async ( name: "RSA-OAEP", hash: "SHA-256", }; - const encryptKey = await window.crypto.subtle.importKey( + const encryptKey = await crypto.subtle.importKey( "spki", decodeFromBase64(encryptKeyBase64), algorithm, true, ["encrypt", "wrapKey"], ); - const decryptKey = await window.crypto.subtle.importKey( + const decryptKey = await crypto.subtle.importKey( "pkcs8", decodeFromBase64(decryptKeyBase64), algorithm, @@ -79,14 +79,14 @@ export const importSigningKeyPairFromBase64 = async ( name: "RSA-PSS", hash: "SHA-256", }; - const signKey = await window.crypto.subtle.importKey( + const signKey = await crypto.subtle.importKey( "pkcs8", decodeFromBase64(signKeyBase64), algorithm, true, ["sign"], ); - const verifyKey = await window.crypto.subtle.importKey( + const verifyKey = await crypto.subtle.importKey( "spki", decodeFromBase64(verifyKeyBase64), algorithm, @@ -98,17 +98,11 @@ export const importSigningKeyPairFromBase64 = async ( export const makeRSAKeyNonextractable = async (key: CryptoKey) => { const { key: exportedKey, format } = await exportRSAKey(key); - return await window.crypto.subtle.importKey( - format, - exportedKey, - key.algorithm, - false, - key.usages, - ); + return await crypto.subtle.importKey(format, exportedKey, key.algorithm, false, key.usages); }; export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) => { - return await window.crypto.subtle.decrypt( + return await crypto.subtle.decrypt( { name: "RSA-OAEP", } satisfies RsaOaepParams, @@ -119,7 +113,7 @@ export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) export const wrapMasterKey = async (masterKey: CryptoKey, encryptKey: CryptoKey) => { return encodeToBase64( - await window.crypto.subtle.wrapKey("raw", masterKey, encryptKey, { + await crypto.subtle.wrapKey("raw", masterKey, encryptKey, { name: "RSA-OAEP", } satisfies RsaOaepParams), ); @@ -131,7 +125,7 @@ export const unwrapMasterKey = async ( extractable = false, ) => { return { - masterKey: await window.crypto.subtle.unwrapKey( + masterKey: await crypto.subtle.unwrapKey( "raw", decodeFromBase64(masterKeyWrapped), decryptKey, @@ -146,7 +140,7 @@ export const unwrapMasterKey = async ( }; export const signMessageRSA = async (message: BufferSource, signKey: CryptoKey) => { - return await window.crypto.subtle.sign( + return await crypto.subtle.sign( { name: "RSA-PSS", saltLength: 32, // SHA-256 @@ -161,7 +155,7 @@ export const verifySignatureRSA = async ( signature: BufferSource, verifyKey: CryptoKey, ) => { - return await window.crypto.subtle.verify( + return await crypto.subtle.verify( { name: "RSA-PSS", saltLength: 32, // SHA-256 diff --git a/src/lib/modules/crypto/sha.ts b/src/lib/modules/crypto/sha.ts index 3acb258..286e6f2 100644 --- a/src/lib/modules/crypto/sha.ts +++ b/src/lib/modules/crypto/sha.ts @@ -1,10 +1,13 @@ +import HmacWorker from "$workers/hmac?worker"; +import type { ComputeMessage, ResultMessage } from "$workers/hmac"; + export const digestMessage = async (message: BufferSource) => { - return await window.crypto.subtle.digest("SHA-256", message); + return await crypto.subtle.digest("SHA-256", message); }; export const generateHmacSecret = async () => { return { - hmacSecret: await window.crypto.subtle.generateKey( + hmacSecret: await crypto.subtle.generateKey( { name: "HMAC", hash: "SHA-256", @@ -15,6 +18,24 @@ export const generateHmacSecret = async () => { }; }; -export const signMessageHmac = async (message: BufferSource, hmacSecret: CryptoKey) => { - return await window.crypto.subtle.sign("HMAC", hmacSecret, message); +export const signMessageHmac = async (message: Blob, hmacSecret: CryptoKey) => { + const stream = message.stream(); + const hmacSecretRaw = new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret)); + const worker = new HmacWorker(); + + return new Promise((resolve, reject) => { + worker.onmessage = ({ data }: MessageEvent) => { + resolve(data.result); + worker.terminate(); + }; + + worker.onerror = ({ error }) => { + reject(error); + worker.terminate(); + }; + + worker.postMessage({ stream, key: hmacSecretRaw } satisfies ComputeMessage, { + transfer: [stream, hmacSecretRaw.buffer], + }); + }); }; diff --git a/src/lib/modules/crypto/util.ts b/src/lib/modules/crypto/utils.ts similarity index 82% rename from src/lib/modules/crypto/util.ts rename to src/lib/modules/crypto/utils.ts index a3e3bc0..215eaf2 100644 --- a/src/lib/modules/crypto/util.ts +++ b/src/lib/modules/crypto/utils.ts @@ -9,8 +9,8 @@ export const decodeString = (data: ArrayBuffer) => { return textDecoder.decode(data); }; -export const encodeToBase64 = (data: ArrayBuffer) => { - return btoa(String.fromCharCode(...new Uint8Array(data))); +export const encodeToBase64 = (data: ArrayBuffer | Uint8Array) => { + return btoa(String.fromCharCode(...(data instanceof ArrayBuffer ? new Uint8Array(data) : data))); }; export const decodeFromBase64 = (data: string) => { diff --git a/src/lib/modules/file/download.svelte.ts b/src/lib/modules/file/download.svelte.ts index bea8316..88f1e9e 100644 --- a/src/lib/modules/file/download.svelte.ts +++ b/src/lib/modules/file/download.svelte.ts @@ -1,6 +1,7 @@ import axios from "axios"; import { limitFunction } from "p-limit"; -import { decryptData } from "$lib/modules/crypto"; +import { ENCRYPTED_CHUNK_SIZE } from "$lib/constants"; +import { decryptChunk, concatenateBuffers } from "$lib/modules/crypto"; export interface FileDownloadState { id: number; @@ -65,13 +66,21 @@ const decryptFile = limitFunction( async ( state: FileDownloadState, fileEncrypted: ArrayBuffer, - fileEncryptedIv: string, + encryptedChunkSize: number, dataKey: CryptoKey, ) => { state.status = "decrypting"; - const fileBuffer = await decryptData(fileEncrypted, fileEncryptedIv, dataKey); + const chunks: ArrayBuffer[] = []; + let offset = 0; + while (offset < fileEncrypted.byteLength) { + const nextOffset = Math.min(offset + encryptedChunkSize, fileEncrypted.byteLength); + chunks.push(await decryptChunk(fileEncrypted.slice(offset, nextOffset), dataKey)); + offset = nextOffset; + } + + const fileBuffer = concatenateBuffers(...chunks).buffer; state.status = "decrypted"; state.result = fileBuffer; return fileBuffer; @@ -79,7 +88,7 @@ const decryptFile = limitFunction( { concurrency: 4 }, ); -export const downloadFile = async (id: number, fileEncryptedIv: string, dataKey: CryptoKey) => { +export const downloadFile = async (id: number, dataKey: CryptoKey, isLegacy: boolean) => { downloadingFiles.push({ id, status: "download-pending", @@ -87,7 +96,13 @@ export const downloadFile = async (id: number, fileEncryptedIv: string, dataKey: const state = downloadingFiles.at(-1)!; try { - return await decryptFile(state, await requestFileDownload(state, id), fileEncryptedIv, dataKey); + const fileEncrypted = await requestFileDownload(state, id); + return await decryptFile( + state, + fileEncrypted, + isLegacy ? fileEncrypted.byteLength : ENCRYPTED_CHUNK_SIZE, + dataKey, + ); } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/file/thumbnail.ts b/src/lib/modules/file/thumbnail.ts index f923153..b33a4af 100644 --- a/src/lib/modules/file/thumbnail.ts +++ b/src/lib/modules/file/thumbnail.ts @@ -1,11 +1,10 @@ import { LRUCache } from "lru-cache"; import { writable, type Writable } from "svelte/store"; import { browser } from "$app/environment"; -import { decryptData } from "$lib/modules/crypto"; +import { decryptChunk } from "$lib/modules/crypto"; import type { SummarizedFileInfo } from "$lib/modules/filesystem"; import { readFile, writeFile, deleteFile, deleteDirectory } from "$lib/modules/opfs"; import { getThumbnailUrl } from "$lib/modules/thumbnail"; -import { isTRPCClientError, trpc } from "$trpc/client"; const loadedThumbnails = new LRUCache>({ max: 100 }); const loadingThumbnails = new Map>(); @@ -18,25 +17,13 @@ const fetchFromOpfs = async (fileId: number) => { }; const fetchFromServer = async (fileId: number, dataKey: CryptoKey) => { - try { - const [thumbnailEncrypted, { contentIv: thumbnailEncryptedIv }] = await Promise.all([ - fetch(`/api/file/${fileId}/thumbnail/download`), - trpc().file.thumbnail.query({ id: fileId }), - ]); - const thumbnailBuffer = await decryptData( - await thumbnailEncrypted.arrayBuffer(), - thumbnailEncryptedIv, - dataKey, - ); + const res = await fetch(`/api/file/${fileId}/thumbnail/download`); + if (!res.ok) return null; - void writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer); - return getThumbnailUrl(thumbnailBuffer); - } catch (e) { - if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") { - return null; - } - throw e; - } + const thumbnailBuffer = await decryptChunk(await res.arrayBuffer(), dataKey); + + void writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer); + return getThumbnailUrl(thumbnailBuffer); }; export const getFileThumbnail = (file: SummarizedFileInfo) => { diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index a632eb5..7ac15ce 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,23 +1,12 @@ -import axios from "axios"; import ExifReader from "exifreader"; import { limitFunction } from "p-limit"; -import { - encodeToBase64, - generateDataKey, - wrapDataKey, - encryptData, - encryptString, - digestMessage, - signMessageHmac, -} from "$lib/modules/crypto"; -import { Scheduler } from "$lib/modules/scheduler"; +import { CHUNK_SIZE } from "$lib/constants"; +import { encodeToBase64, generateDataKey, wrapDataKey, encryptString } from "$lib/modules/crypto"; +import { signMessageHmac } from "$lib/modules/crypto"; import { generateThumbnail } from "$lib/modules/thumbnail"; -import type { - FileThumbnailUploadRequest, - FileUploadRequest, - FileUploadResponse, -} from "$lib/server/schemas"; +import { uploadBlob } from "$lib/modules/upload"; import type { MasterKey, HmacSecret } from "$lib/stores"; +import { Scheduler } from "$lib/utils"; import { trpc } from "$trpc/client"; export interface FileUploadState { @@ -42,7 +31,7 @@ export type LiveFileUploadState = FileUploadState & { }; const scheduler = new Scheduler< - { fileId: number; fileBuffer: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined + { fileId: number; fileBuffer?: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined >(); let uploadingFiles: FileUploadState[] = $state([]); @@ -61,16 +50,21 @@ export const clearUploadedFiles = () => { }; const requestDuplicateFileScan = limitFunction( - async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise) => { - const fileBuffer = await file.arrayBuffer(); - const fileSigned = encodeToBase64(await signMessageHmac(fileBuffer, hmacSecret.secret)); + async ( + state: FileUploadState, + file: File, + hmacSecret: HmacSecret, + onDuplicate: () => Promise, + ) => { + state.status = "encryption-pending"; + const fileSigned = encodeToBase64(await signMessageHmac(file, hmacSecret.secret)); const files = await trpc().file.listByHash.query({ hskVersion: hmacSecret.version, contentHmac: fileSigned, }); if (files.length === 0 || (await onDuplicate())) { - return { fileBuffer, fileSigned }; + return { fileSigned }; } else { return {}; } @@ -110,74 +104,98 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => { return new Date(utcDate - offsetMs); }; -const encryptFile = limitFunction( - async (state: FileUploadState, file: File, fileBuffer: ArrayBuffer, masterKey: MasterKey) => { +interface FileMetadata { + parentId: "root" | number; + name: string; + createdAt?: Date; + lastModifiedAt: Date; +} + +const requestFileMetadataEncryption = limitFunction( + async ( + state: FileUploadState, + file: Blob, + fileMetadata: FileMetadata, + masterKey: MasterKey, + hmacSecret: HmacSecret, + ) => { state.status = "encrypting"; - const fileType = getFileType(file); - - let createdAt; - if (fileType.startsWith("image/")) { - createdAt = extractExifDateTime(fileBuffer); - } - const { dataKey, dataKeyVersion } = await generateDataKey(); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); - const fileEncrypted = await encryptData(fileBuffer, dataKey); - const fileEncryptedHash = encodeToBase64(await digestMessage(fileEncrypted.ciphertext)); + const [nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnailBuffer] = + await Promise.all([ + encryptString(fileMetadata.name, dataKey), + fileMetadata.createdAt && + encryptString(fileMetadata.createdAt.getTime().toString(), dataKey), + encryptString(fileMetadata.lastModifiedAt.getTime().toString(), dataKey), + generateThumbnail(file).then((blob) => blob?.arrayBuffer()), + ]); - const nameEncrypted = await encryptString(file.name, dataKey); - const createdAtEncrypted = - createdAt && (await encryptString(createdAt.getTime().toString(), dataKey)); - const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); - - const thumbnail = await generateThumbnail(fileBuffer, fileType); - const thumbnailBuffer = await thumbnail?.arrayBuffer(); - const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey)); + const { uploadId } = await trpc().upload.startFileUpload.mutate({ + chunks: Math.ceil(file.size / CHUNK_SIZE), + parent: fileMetadata.parentId, + mekVersion: masterKey.version, + dek: dataKeyWrapped, + dekVersion: dataKeyVersion, + hskVersion: hmacSecret.version, + contentType: file.type, + name: nameEncrypted.ciphertext, + nameIv: nameEncrypted.iv, + createdAt: createdAtEncrypted?.ciphertext, + createdAtIv: createdAtEncrypted?.iv, + lastModifiedAt: lastModifiedAtEncrypted.ciphertext, + lastModifiedAtIv: lastModifiedAtEncrypted.iv, + }); state.status = "upload-pending"; - - return { - dataKeyWrapped, - dataKeyVersion, - fileType, - fileEncrypted, - fileEncryptedHash, - nameEncrypted, - createdAtEncrypted, - lastModifiedAtEncrypted, - thumbnail: thumbnailEncrypted && { plaintext: thumbnailBuffer, ...thumbnailEncrypted }, - }; + return { uploadId, thumbnailBuffer, dataKey, dataKeyVersion }; }, { concurrency: 4 }, ); const requestFileUpload = limitFunction( - async (state: FileUploadState, form: FormData, thumbnailForm: FormData | null) => { + async ( + state: FileUploadState, + uploadId: string, + file: Blob, + fileSigned: string, + thumbnailBuffer: ArrayBuffer | undefined, + dataKey: CryptoKey, + dataKeyVersion: Date, + ) => { state.status = "uploading"; - const res = await axios.post("/api/file/upload", form, { - onUploadProgress: ({ progress, rate, estimated }) => { - state.progress = progress; - state.rate = rate; - state.estimated = estimated; + await uploadBlob(uploadId, file, dataKey, { + onProgress(s) { + state.progress = s.progress; + state.rate = s.rate; }, }); - const { file }: FileUploadResponse = res.data; - if (thumbnailForm) { + const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ + uploadId, + contentHmac: fileSigned, + }); + + if (thumbnailBuffer) { try { - await axios.post(`/api/file/${file}/thumbnail/upload`, thumbnailForm); + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); + + await uploadBlob(uploadId, new Blob([thumbnailBuffer]), dataKey); + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); } catch (e) { - // TODO console.error(e); } } state.status = "uploaded"; - - return { fileId: file }; + return { fileId }; }, { concurrency: 1 }, ); @@ -185,8 +203,8 @@ const requestFileUpload = limitFunction( export const uploadFile = async ( file: File, parentId: "root" | number, - hmacSecret: HmacSecret, masterKey: MasterKey, + hmacSecret: HmacSecret, onDuplicate: () => Promise, ) => { uploadingFiles.push({ @@ -197,70 +215,44 @@ export const uploadFile = async ( const state = uploadingFiles.at(-1)!; return await scheduler.schedule(file.size, async () => { - state.status = "encryption-pending"; - try { - const { fileBuffer, fileSigned } = await requestDuplicateFileScan( - file, - hmacSecret, - onDuplicate, - ); - if (!fileBuffer || !fileSigned) { + const { fileSigned } = await requestDuplicateFileScan(state, file, hmacSecret, onDuplicate); + + if (!fileSigned) { state.status = "canceled"; uploadingFiles = uploadingFiles.filter((file) => file !== state); - return undefined; + return; } - const { - dataKeyWrapped, + let fileBuffer; + const fileType = getFileType(file); + const fileMetadata: FileMetadata = { + parentId, + name: file.name, + lastModifiedAt: new Date(file.lastModified), + }; + + if (fileType.startsWith("image/")) { + fileBuffer = await file.arrayBuffer(); + fileMetadata.createdAt = extractExifDateTime(fileBuffer); + } + + const blob = new Blob([file], { type: fileType }); + + const { uploadId, thumbnailBuffer, dataKey, dataKeyVersion } = + await requestFileMetadataEncryption(state, blob, fileMetadata, masterKey, hmacSecret); + + const { fileId } = await requestFileUpload( + state, + uploadId, + blob, + fileSigned, + thumbnailBuffer, + dataKey, dataKeyVersion, - fileType, - fileEncrypted, - fileEncryptedHash, - nameEncrypted, - createdAtEncrypted, - lastModifiedAtEncrypted, - thumbnail, - } = await encryptFile(state, file, fileBuffer, masterKey); - - const form = new FormData(); - form.set( - "metadata", - JSON.stringify({ - parent: parentId, - mekVersion: masterKey.version, - dek: dataKeyWrapped, - dekVersion: dataKeyVersion.toISOString(), - hskVersion: hmacSecret.version, - contentHmac: fileSigned, - contentType: fileType, - contentIv: fileEncrypted.iv, - name: nameEncrypted.ciphertext, - nameIv: nameEncrypted.iv, - createdAt: createdAtEncrypted?.ciphertext, - createdAtIv: createdAtEncrypted?.iv, - lastModifiedAt: lastModifiedAtEncrypted.ciphertext, - lastModifiedAtIv: lastModifiedAtEncrypted.iv, - } satisfies FileUploadRequest), ); - form.set("content", new Blob([fileEncrypted.ciphertext])); - form.set("checksum", fileEncryptedHash); - let thumbnailForm = null; - if (thumbnail) { - thumbnailForm = new FormData(); - thumbnailForm.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: thumbnail.iv, - } satisfies FileThumbnailUploadRequest), - ); - thumbnailForm.set("content", new Blob([thumbnail.ciphertext])); - } - - const { fileId } = await requestFileUpload(state, form, thumbnailForm); - return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext }; + return { fileId, fileBuffer, thumbnailBuffer }; } catch (e) { state.status = "error"; throw e; diff --git a/src/lib/modules/filesystem/file.ts b/src/lib/modules/filesystem/file.ts index 7d5feb9..d80a872 100644 --- a/src/lib/modules/filesystem/file.ts +++ b/src/lib/modules/filesystem/file.ts @@ -47,10 +47,10 @@ const cache = new FilesystemCache({ return storeToIndexedDB({ id, + isLegacy: file.isLegacy, parentId: file.parent, dataKey: metadata.dataKey, contentType: file.contentType, - contentIv: file.contentIv, name: metadata.name, createdAt: metadata.createdAt, lastModifiedAt: metadata.lastModifiedAt, @@ -116,9 +116,9 @@ const cache = new FilesystemCache({ return { id, exists: true as const, + isLegacy: metadataRaw.isLegacy, parentId: metadataRaw.parent, contentType: metadataRaw.contentType, - contentIv: metadataRaw.contentIv, categories, ...metadata, }; diff --git a/src/lib/modules/filesystem/types.ts b/src/lib/modules/filesystem/types.ts index 9f33113..f4ce9cf 100644 --- a/src/lib/modules/filesystem/types.ts +++ b/src/lib/modules/filesystem/types.ts @@ -28,10 +28,10 @@ export type SubDirectoryInfo = Omit>); -export type SummarizedFileInfo = Omit; +export type SummarizedFileInfo = Omit; export type CategoryFileInfo = SummarizedFileInfo & { isRecursive: boolean }; interface LocalCategoryInfo { diff --git a/src/lib/modules/http.ts b/src/lib/modules/http.ts new file mode 100644 index 0000000..4116c18 --- /dev/null +++ b/src/lib/modules/http.ts @@ -0,0 +1,22 @@ +export const parseRangeHeader = (value: string | null) => { + if (!value) return undefined; + + const firstRange = value.split(",")[0]!.trim(); + const parts = firstRange.replace(/bytes=/, "").split("-"); + return { + start: parts[0] ? parseInt(parts[0], 10) : undefined, + end: parts[1] ? parseInt(parts[1], 10) : undefined, + }; +}; + +export const getContentRangeHeader = (range?: { start: number; end: number; total: number }) => { + return range && { "Content-Range": `bytes ${range.start}-${range.end}/${range.total}` }; +}; + +export const parseContentDigestHeader = (value: string | null) => { + if (!value) return undefined; + + const firstDigest = value.split(",")[0]!.trim(); + const match = firstDigest.match(/^sha-256=:([A-Za-z0-9+/=]+):$/); + return match?.[1]; +}; diff --git a/src/lib/modules/key.ts b/src/lib/modules/key.ts index d5276a5..ca84477 100644 --- a/src/lib/modules/key.ts +++ b/src/lib/modules/key.ts @@ -2,7 +2,7 @@ import { z } from "zod"; import { storeClientKey } from "$lib/indexedDB"; import type { ClientKeys } from "$lib/stores"; -const serializedClientKeysSchema = z.intersection( +const SerializedClientKeysSchema = z.intersection( z.object({ generator: z.literal("ArkVault"), exportedAt: z.iso.datetime(), @@ -16,7 +16,7 @@ const serializedClientKeysSchema = z.intersection( }), ); -type SerializedClientKeys = z.infer; +type SerializedClientKeys = z.infer; type DeserializedClientKeys = { encryptKeyBase64: string; @@ -43,7 +43,7 @@ export const serializeClientKeys = ({ }; export const deserializeClientKeys = (serialized: string) => { - const zodRes = serializedClientKeysSchema.safeParse(JSON.parse(serialized)); + const zodRes = SerializedClientKeysSchema.safeParse(JSON.parse(serialized)); if (zodRes.success) { return { encryptKeyBase64: zodRes.data.encryptKey, diff --git a/src/lib/modules/opfs.ts b/src/lib/modules/opfs.ts index 41f1f72..a367aae 100644 --- a/src/lib/modules/opfs.ts +++ b/src/lib/modules/opfs.ts @@ -1,13 +1,5 @@ -let rootHandle: FileSystemDirectoryHandle | null = null; - -export const prepareOpfs = async () => { - rootHandle = await navigator.storage.getDirectory(); -}; - const getFileHandle = async (path: string, create = true) => { - if (!rootHandle) { - throw new Error("OPFS not prepared"); - } else if (path[0] !== "/") { + if (path[0] !== "/") { throw new Error("Path must be absolute"); } @@ -17,7 +9,7 @@ const getFileHandle = async (path: string, create = true) => { } try { - let directoryHandle = rootHandle; + let directoryHandle = await navigator.storage.getDirectory(); for (const part of parts.slice(0, -1)) { if (!part) continue; directoryHandle = await directoryHandle.getDirectoryHandle(part, { create }); @@ -34,12 +26,15 @@ const getFileHandle = async (path: string, create = true) => { } }; -export const readFile = async (path: string) => { +export const getFile = async (path: string) => { const { fileHandle } = await getFileHandle(path, false); if (!fileHandle) return null; - const file = await fileHandle.getFile(); - return await file.arrayBuffer(); + return await fileHandle.getFile(); +}; + +export const readFile = async (path: string) => { + return (await getFile(path))?.arrayBuffer() ?? null; }; export const writeFile = async (path: string, data: ArrayBuffer) => { @@ -61,9 +56,7 @@ export const deleteFile = async (path: string) => { }; const getDirectoryHandle = async (path: string) => { - if (!rootHandle) { - throw new Error("OPFS not prepared"); - } else if (path[0] !== "/") { + if (path[0] !== "/") { throw new Error("Path must be absolute"); } @@ -73,7 +66,7 @@ const getDirectoryHandle = async (path: string) => { } try { - let directoryHandle = rootHandle; + let directoryHandle = await navigator.storage.getDirectory(); let parentHandle; for (const part of parts.slice(1)) { if (!part) continue; diff --git a/src/lib/modules/thumbnail.ts b/src/lib/modules/thumbnail.ts index d9a995b..18b0745 100644 --- a/src/lib/modules/thumbnail.ts +++ b/src/lib/modules/thumbnail.ts @@ -52,7 +52,6 @@ const generateImageThumbnail = (imageUrl: string) => { .catch(reject); }; image.onerror = reject; - image.src = imageUrl; }); }; @@ -85,31 +84,27 @@ const generateVideoThumbnail = (videoUrl: string, time = 0) => { }); }; -export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: string) => { +export const generateThumbnail = async (blob: Blob) => { let url; try { - if (fileType.startsWith("image/")) { - const fileBlob = new Blob([fileBuffer], { type: fileType }); - url = URL.createObjectURL(fileBlob); - + if (blob.type.startsWith("image/")) { + url = URL.createObjectURL(blob); try { return await generateImageThumbnail(url); } catch { URL.revokeObjectURL(url); url = undefined; - if (fileType === "image/heic") { + if (blob.type === "image/heic") { const { default: heic2any } = await import("heic2any"); - url = URL.createObjectURL( - (await heic2any({ blob: fileBlob, toType: "image/png" })) as Blob, - ); + url = URL.createObjectURL((await heic2any({ blob, toType: "image/png" })) as Blob); return await generateImageThumbnail(url); } else { return null; } } - } else if (fileType.startsWith("video/")) { - url = URL.createObjectURL(new Blob([fileBuffer], { type: fileType })); + } else if (blob.type.startsWith("video/")) { + url = URL.createObjectURL(blob); return await generateVideoThumbnail(url); } return null; diff --git a/src/lib/modules/upload.ts b/src/lib/modules/upload.ts new file mode 100644 index 0000000..cab51b7 --- /dev/null +++ b/src/lib/modules/upload.ts @@ -0,0 +1,183 @@ +import axios from "axios"; +import pLimit from "p-limit"; +import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants"; +import { encryptChunk, digestMessage, encodeToBase64 } from "$lib/modules/crypto"; +import { BoundedQueue } from "$lib/utils"; + +interface UploadStats { + progress: number; + rate: number; +} + +interface EncryptedChunk { + index: number; + data: ArrayBuffer; + hash: string; +} + +const createSpeedMeter = (timeWindow = 3000, minInterval = 200, warmupPeriod = 500) => { + const samples: { t: number; b: number }[] = []; + let lastSpeed = 0; + let startTime: number | null = null; + + return (bytesNow?: number) => { + if (bytesNow === undefined) return lastSpeed; + + const now = performance.now(); + + // Initialize start time on first call + if (startTime === null) { + startTime = now; + } + + // Check if enough time has passed since the last sample + const lastSample = samples[samples.length - 1]; + if (lastSample && now - lastSample.t < minInterval) { + return lastSpeed; + } + + samples.push({ t: now, b: bytesNow }); + + // Remove old samples outside the time window + const cutoff = now - timeWindow; + while (samples.length > 2 && samples[0]!.t < cutoff) samples.shift(); + + // Need at least 2 samples to calculate speed + if (samples.length < 2) { + return lastSpeed; + } + + const first = samples[0]!; + const dt = now - first.t; + const db = bytesNow - first.b; + + if (dt >= minInterval) { + const instantSpeed = (db / dt) * 1000; + // Apply EMA for smoother speed transitions + const alpha = 0.3; + const rawSpeed = + lastSpeed === 0 ? instantSpeed : alpha * instantSpeed + (1 - alpha) * lastSpeed; + + // Apply warmup ramp to prevent initial overestimation + const elapsed = now - startTime; + const warmupWeight = Math.min(1, elapsed / warmupPeriod); + lastSpeed = rawSpeed * warmupWeight; + } + + return lastSpeed; + }; +}; + +const encryptChunkData = async ( + chunk: Blob, + dataKey: CryptoKey, +): Promise<{ data: ArrayBuffer; hash: string }> => { + const encrypted = await encryptChunk(await chunk.arrayBuffer(), dataKey); + const hash = encodeToBase64(await digestMessage(encrypted)); + return { data: encrypted, hash }; +}; + +const uploadEncryptedChunk = async ( + uploadId: string, + chunkIndex: number, + encrypted: ArrayBuffer, + hash: string, + onChunkProgress: (chunkIndex: number, loaded: number) => void, +) => { + await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex + 1}`, encrypted, { + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${hash}:`, + }, + onUploadProgress(e) { + onChunkProgress(chunkIndex, e.loaded ?? 0); + }, + }); + + onChunkProgress(chunkIndex, encrypted.byteLength); +}; + +export const uploadBlob = async ( + uploadId: string, + blob: Blob, + dataKey: CryptoKey, + options?: { concurrency?: number; onProgress?: (s: UploadStats) => void }, +) => { + const onProgress = options?.onProgress; + const networkConcurrency = options?.concurrency ?? 4; + const maxQueueSize = 8; + + const totalChunks = Math.ceil(blob.size / CHUNK_SIZE); + const totalBytes = blob.size + totalChunks * ENCRYPTION_OVERHEAD; + + const uploadedByChunk = new Array(totalChunks).fill(0); + const speedMeter = createSpeedMeter(3000, 200); + + const emit = () => { + if (!onProgress) return; + + const uploadedBytes = uploadedByChunk.reduce((a, b) => a + b, 0); + const rate = speedMeter(uploadedBytes); + const progress = Math.min(1, uploadedBytes / totalBytes); + + onProgress({ progress, rate }); + }; + + const onChunkProgress = (idx: number, loaded: number) => { + uploadedByChunk[idx] = loaded; + emit(); + }; + + const queue = new BoundedQueue(maxQueueSize); + let encryptionError: Error | null = null; + + // Producer: encrypt chunks and push to queue + const encryptionProducer = async () => { + try { + for (let i = 0; i < totalChunks; i++) { + const chunk = blob.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE); + const { data, hash } = await encryptChunkData(chunk, dataKey); + await queue.push({ index: i, data, hash }); + } + } catch (e) { + encryptionError = e instanceof Error ? e : new Error(String(e)); + } finally { + queue.close(); + } + }; + + // Consumer: upload chunks from queue with concurrency limit + const uploadConsumer = async () => { + const limit = pLimit(networkConcurrency); + const activeTasks = new Set>(); + + while (true) { + const item = await queue.pop(); + if (item === null) break; + if (encryptionError) throw encryptionError; + + const task = limit(async () => { + try { + await uploadEncryptedChunk(uploadId, item.index, item.data, item.hash, onChunkProgress); + } finally { + // @ts-ignore + item.data = null; + } + }); + + activeTasks.add(task); + task.finally(() => activeTasks.delete(task)); + + if (activeTasks.size >= networkConcurrency) { + await Promise.race(activeTasks); + } + } + + await Promise.all(activeTasks); + }; + + // Run producer and consumer concurrently + await Promise.all([encryptionProducer(), uploadConsumer()]); + + onProgress?.({ progress: 1, rate: speedMeter() }); +}; diff --git a/src/lib/schemas/filesystem.ts b/src/lib/schemas/filesystem.ts new file mode 100644 index 0000000..d3a45f4 --- /dev/null +++ b/src/lib/schemas/filesystem.ts @@ -0,0 +1,4 @@ +import { z } from "zod"; + +export const DirectoryIdSchema = z.union([z.literal("root"), z.int().positive()]); +export const CategoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/schemas/index.ts b/src/lib/schemas/index.ts new file mode 100644 index 0000000..7d29e5d --- /dev/null +++ b/src/lib/schemas/index.ts @@ -0,0 +1 @@ +export * from "./filesystem"; diff --git a/src/lib/server/db/error.ts b/src/lib/server/db/error.ts index a145f14..0d61d72 100644 --- a/src/lib/server/db/error.ts +++ b/src/lib/server/db/error.ts @@ -9,6 +9,7 @@ type IntegrityErrorMessages = // File | "Directory not found" | "File not found" + | "File is not legacy" | "File not found in category" | "File already added to category" | "Invalid DEK version" diff --git a/src/lib/server/db/file.ts b/src/lib/server/db/file.ts index 472930a..d0c54cc 100644 --- a/src/lib/server/db/file.ts +++ b/src/lib/server/db/file.ts @@ -15,8 +15,6 @@ interface Directory { encName: Ciphertext; } -export type NewDirectory = Omit; - interface File { id: number; parentId: DirectoryId; @@ -28,15 +26,13 @@ interface File { hskVersion: number | null; contentHmac: string | null; contentType: string; - encContentIv: string; + encContentIv: string | null; encContentHash: string; encName: Ciphertext; encCreatedAt: Ciphertext | null; encLastModifiedAt: Ciphertext; } -export type NewFile = Omit; - interface FileCategory { id: number; parentId: CategoryId; @@ -46,7 +42,7 @@ interface FileCategory { encName: Ciphertext; } -export const registerDirectory = async (params: NewDirectory) => { +export const registerDirectory = async (params: Omit) => { await db.transaction().execute(async (trx) => { const mek = await trx .selectFrom("master_encryption_key") @@ -214,69 +210,41 @@ export const unregisterDirectory = async (userId: number, directoryId: number) = }); }; -export const registerFile = async (params: NewFile) => { +export const registerFile = async (trx: typeof db, params: Omit) => { if ((params.hskVersion && !params.contentHmac) || (!params.hskVersion && params.contentHmac)) { throw new Error("Invalid arguments"); } - return await db.transaction().execute(async (trx) => { - const mek = await trx - .selectFrom("master_encryption_key") - .select("version") - .where("user_id", "=", params.userId) - .where("state", "=", "active") - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (mek?.version !== params.mekVersion) { - throw new IntegrityError("Inactive MEK version"); - } - - if (params.hskVersion) { - const hsk = await trx - .selectFrom("hmac_secret_key") - .select("version") - .where("user_id", "=", params.userId) - .where("state", "=", "active") - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (hsk?.version !== params.hskVersion) { - throw new IntegrityError("Inactive HSK version"); - } - } - - const { fileId } = await trx - .insertInto("file") - .values({ - parent_id: params.parentId !== "root" ? params.parentId : null, - user_id: params.userId, - path: params.path, - master_encryption_key_version: params.mekVersion, - encrypted_data_encryption_key: params.encDek, - data_encryption_key_version: params.dekVersion, - hmac_secret_key_version: params.hskVersion, - content_hmac: params.contentHmac, - content_type: params.contentType, - encrypted_content_iv: params.encContentIv, - encrypted_content_hash: params.encContentHash, - encrypted_name: params.encName, - encrypted_created_at: params.encCreatedAt, - encrypted_last_modified_at: params.encLastModifiedAt, - }) - .returning("id as fileId") - .executeTakeFirstOrThrow(); - await trx - .insertInto("file_log") - .values({ - file_id: fileId, - timestamp: new Date(), - action: "create", - new_name: params.encName, - }) - .execute(); - return { id: fileId }; - }); + const { fileId } = await trx + .insertInto("file") + .values({ + parent_id: params.parentId !== "root" ? params.parentId : null, + user_id: params.userId, + path: params.path, + master_encryption_key_version: params.mekVersion, + encrypted_data_encryption_key: params.encDek, + data_encryption_key_version: params.dekVersion, + hmac_secret_key_version: params.hskVersion, + content_hmac: params.contentHmac, + content_type: params.contentType, + encrypted_content_iv: params.encContentIv, + encrypted_content_hash: params.encContentHash, + encrypted_name: params.encName, + encrypted_created_at: params.encCreatedAt, + encrypted_last_modified_at: params.encLastModifiedAt, + }) + .returning("id as fileId") + .executeTakeFirstOrThrow(); + await trx + .insertInto("file_log") + .values({ + file_id: fileId, + timestamp: new Date(), + action: "create", + new_name: params.encName, + }) + .execute(); + return { id: fileId }; }; export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => { @@ -366,6 +334,16 @@ export const getAllFileIds = async (userId: number) => { return files.map(({ id }) => id); }; +export const getLegacyFileIds = async (userId: number) => { + const files = await db + .selectFrom("file") + .select("id") + .where("user_id", "=", userId) + .where("encrypted_content_iv", "is not", null) + .execute(); + return files.map(({ id }) => id); +}; + export const getAllFileIdsByContentHmac = async ( userId: number, hskVersion: number, @@ -514,6 +492,51 @@ export const unregisterFile = async (userId: number, fileId: number) => { }); }; +export const migrateFileContent = async ( + trx: typeof db, + userId: number, + fileId: number, + newPath: string, + dekVersion: Date, + encContentHash: string, +) => { + const file = await trx + .selectFrom("file") + .select(["path", "data_encryption_key_version", "encrypted_content_iv"]) + .where("id", "=", fileId) + .where("user_id", "=", userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } else if (!file.encrypted_content_iv) { + throw new IntegrityError("File is not legacy"); + } + + await trx + .updateTable("file") + .set({ + path: newPath, + encrypted_content_iv: null, + encrypted_content_hash: encContentHash, + }) + .where("id", "=", fileId) + .where("user_id", "=", userId) + .execute(); + await trx + .insertInto("file_log") + .values({ + file_id: fileId, + timestamp: new Date(), + action: "migrate", + }) + .execute(); + return { oldPath: file.path }; +}; + export const addFileToCategory = async (fileId: number, categoryId: number) => { await db.transaction().execute(async (trx) => { try { diff --git a/src/lib/server/db/index.ts b/src/lib/server/db/index.ts index 5c21deb..140cf7d 100644 --- a/src/lib/server/db/index.ts +++ b/src/lib/server/db/index.ts @@ -5,6 +5,7 @@ export * as HskRepo from "./hsk"; export * as MediaRepo from "./media"; export * as MekRepo from "./mek"; export * as SessionRepo from "./session"; +export * as UploadRepo from "./upload"; export * as UserRepo from "./user"; export * from "./error"; diff --git a/src/lib/server/db/media.ts b/src/lib/server/db/media.ts index 209e256..3e165c0 100644 --- a/src/lib/server/db/media.ts +++ b/src/lib/server/db/media.ts @@ -6,7 +6,7 @@ interface Thumbnail { id: number; path: string; updatedAt: Date; - encContentIv: string; + encContentIv: string | null; } interface FileThumbnail extends Thumbnail { @@ -14,54 +14,53 @@ interface FileThumbnail extends Thumbnail { } export const updateFileThumbnail = async ( + trx: typeof db, userId: number, fileId: number, dekVersion: Date, path: string, - encContentIv: string, + encContentIv: string | null, ) => { - return await db.transaction().execute(async (trx) => { - const file = await trx - .selectFrom("file") - .select("data_encryption_key_version") - .where("id", "=", fileId) - .where("user_id", "=", userId) - .limit(1) - .forUpdate() - .executeTakeFirst(); - if (!file) { - throw new IntegrityError("File not found"); - } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { - throw new IntegrityError("Invalid DEK version"); - } + const file = await trx + .selectFrom("file") + .select("data_encryption_key_version") + .where("id", "=", fileId) + .where("user_id", "=", userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } - const thumbnail = await trx - .selectFrom("thumbnail") - .select("path as oldPath") - .where("file_id", "=", fileId) - .limit(1) - .forUpdate() - .executeTakeFirst(); - const now = new Date(); + const thumbnail = await trx + .selectFrom("thumbnail") + .select("path as oldPath") + .where("file_id", "=", fileId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + const now = new Date(); - await trx - .insertInto("thumbnail") - .values({ - file_id: fileId, + await trx + .insertInto("thumbnail") + .values({ + file_id: fileId, + path, + updated_at: now, + encrypted_content_iv: encContentIv, + }) + .onConflict((oc) => + oc.column("file_id").doUpdateSet({ path, updated_at: now, encrypted_content_iv: encContentIv, - }) - .onConflict((oc) => - oc.column("file_id").doUpdateSet({ - path, - updated_at: now, - encrypted_content_iv: encContentIv, - }), - ) - .execute(); - return thumbnail?.oldPath ?? null; - }); + }), + ) + .execute(); + return thumbnail?.oldPath ?? null; }; export const getFileThumbnail = async (userId: number, fileId: number) => { diff --git a/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts new file mode 100644 index 0000000..22676aa --- /dev/null +++ b/src/lib/server/db/migrations/1768062380-AddChunkedUpload.ts @@ -0,0 +1,74 @@ +import { Kysely, sql } from "kysely"; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const up = async (db: Kysely) => { + // file.ts + await db.schema + .alterTable("file") + .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) + .execute(); + + // media.ts + await db.schema + .alterTable("thumbnail") + .alterColumn("encrypted_content_iv", (col) => col.dropNotNull()) + .execute(); + + // upload.ts + await db.schema + .createTable("upload_session") + .addColumn("id", "uuid", (col) => col.primaryKey()) + .addColumn("type", "text", (col) => col.notNull()) + .addColumn("user_id", "integer", (col) => col.references("user.id").notNull()) + .addColumn("path", "text", (col) => col.notNull()) + .addColumn("bitmap", "bytea", (col) => col.notNull()) + .addColumn("total_chunks", "integer", (col) => col.notNull()) + .addColumn("uploaded_chunks", "integer", (col) => + col + .generatedAlwaysAs(sql`bit_count(bitmap)`) + .stored() + .notNull(), + ) + .addColumn("expires_at", "timestamp(3)", (col) => col.notNull()) + .addColumn("parent_id", "integer", (col) => col.references("directory.id")) + .addColumn("master_encryption_key_version", "integer") + .addColumn("encrypted_data_encryption_key", "text") + .addColumn("data_encryption_key_version", "timestamp(3)") + .addColumn("hmac_secret_key_version", "integer") + .addColumn("content_type", "text") + .addColumn("encrypted_name", "json") + .addColumn("encrypted_created_at", "json") + .addColumn("encrypted_last_modified_at", "json") + .addColumn("file_id", "integer", (col) => col.references("file.id")) + .addForeignKeyConstraint( + "upload_session_fk01", + ["user_id", "master_encryption_key_version"], + "master_encryption_key", + ["user_id", "version"], + ) + .addForeignKeyConstraint( + "upload_session_fk02", + ["user_id", "hmac_secret_key_version"], + "hmac_secret_key", + ["user_id", "version"], + ) + .addCheckConstraint( + "upload_session_ck01", + sql`length(bitmap) = ceil(total_chunks / 8.0)::integer`, + ) + .addCheckConstraint("upload_session_ck02", sql`uploaded_chunks <= total_chunks`) + .execute(); +}; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const down = async (db: Kysely) => { + await db.schema.dropTable("upload_session").execute(); + await db.schema + .alterTable("thumbnail") + .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) + .execute(); + await db.schema + .alterTable("file") + .alterColumn("encrypted_content_iv", (col) => col.setNotNull()) + .execute(); +}; diff --git a/src/lib/server/db/migrations/index.ts b/src/lib/server/db/migrations/index.ts index f58c2d0..ca3310a 100644 --- a/src/lib/server/db/migrations/index.ts +++ b/src/lib/server/db/migrations/index.ts @@ -1,9 +1,11 @@ import * as Initial1737357000 from "./1737357000-Initial"; import * as AddFileCategory1737422340 from "./1737422340-AddFileCategory"; import * as AddThumbnail1738409340 from "./1738409340-AddThumbnail"; +import * as AddChunkedUpload1768062380 from "./1768062380-AddChunkedUpload"; export default { "1737357000-Initial": Initial1737357000, "1737422340-AddFileCategory": AddFileCategory1737422340, "1738409340-AddThumbnail": AddThumbnail1738409340, + "1768062380-AddChunkedUpload": AddChunkedUpload1768062380, }; diff --git a/src/lib/server/db/schema/category.ts b/src/lib/server/db/schema/category.ts index 2304264..ccaba95 100644 --- a/src/lib/server/db/schema/category.ts +++ b/src/lib/server/db/schema/category.ts @@ -1,5 +1,5 @@ import type { Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface CategoryTable { id: Generated; diff --git a/src/lib/server/db/schema/file.ts b/src/lib/server/db/schema/file.ts index a1bf9bd..3680d1d 100644 --- a/src/lib/server/db/schema/file.ts +++ b/src/lib/server/db/schema/file.ts @@ -1,5 +1,5 @@ import type { ColumnType, Generated } from "kysely"; -import type { Ciphertext } from "./util"; +import type { Ciphertext } from "./utils"; interface DirectoryTable { id: Generated; @@ -30,7 +30,7 @@ interface FileTable { hmac_secret_key_version: number | null; content_hmac: string | null; // Base64 content_type: string; - encrypted_content_iv: string; // Base64 + encrypted_content_iv: string | null; // Base64 encrypted_content_hash: string; // Base64 encrypted_name: Ciphertext; encrypted_created_at: Ciphertext | null; @@ -41,7 +41,7 @@ interface FileLogTable { id: Generated; file_id: number; timestamp: ColumnType; - action: "create" | "rename" | "add-to-category" | "remove-from-category"; + action: "create" | "rename" | "migrate" | "add-to-category" | "remove-from-category"; new_name: Ciphertext | null; category_id: number | null; } diff --git a/src/lib/server/db/schema/index.ts b/src/lib/server/db/schema/index.ts index 4e427fb..7a13395 100644 --- a/src/lib/server/db/schema/index.ts +++ b/src/lib/server/db/schema/index.ts @@ -5,8 +5,9 @@ export * from "./hsk"; export * from "./media"; export * from "./mek"; export * from "./session"; +export * from "./upload"; export * from "./user"; -export * from "./util"; +export * from "./utils"; // eslint-disable-next-line @typescript-eslint/no-empty-object-type export interface Database {} diff --git a/src/lib/server/db/schema/media.ts b/src/lib/server/db/schema/media.ts index ebfbf29..1fef90b 100644 --- a/src/lib/server/db/schema/media.ts +++ b/src/lib/server/db/schema/media.ts @@ -7,7 +7,7 @@ interface ThumbnailTable { category_id: number | null; path: string; updated_at: Date; - encrypted_content_iv: string; // Base64 + encrypted_content_iv: string | null; // Base64 } declare module "./index" { diff --git a/src/lib/server/db/schema/upload.ts b/src/lib/server/db/schema/upload.ts new file mode 100644 index 0000000..5635921 --- /dev/null +++ b/src/lib/server/db/schema/upload.ts @@ -0,0 +1,30 @@ +import type { Generated } from "kysely"; +import type { Ciphertext } from "./utils"; + +interface UploadSessionTable { + id: string; + type: "file" | "thumbnail" | "migration"; + user_id: number; + path: string; + bitmap: Buffer; + total_chunks: number; + uploaded_chunks: Generated; + expires_at: Date; + + parent_id: number | null; + master_encryption_key_version: number | null; + encrypted_data_encryption_key: string | null; // Base64 + data_encryption_key_version: Date | null; + hmac_secret_key_version: number | null; + content_type: string | null; + encrypted_name: Ciphertext | null; + encrypted_created_at: Ciphertext | null; + encrypted_last_modified_at: Ciphertext | null; + file_id: number | null; +} + +declare module "./index" { + interface Database { + upload_session: UploadSessionTable; + } +} diff --git a/src/lib/server/db/schema/util.ts b/src/lib/server/db/schema/utils.ts similarity index 100% rename from src/lib/server/db/schema/util.ts rename to src/lib/server/db/schema/utils.ts diff --git a/src/lib/server/db/upload.ts b/src/lib/server/db/upload.ts new file mode 100644 index 0000000..9dd85a0 --- /dev/null +++ b/src/lib/server/db/upload.ts @@ -0,0 +1,192 @@ +import { sql } from "kysely"; +import { IntegrityError } from "./error"; +import db from "./kysely"; +import type { Ciphertext } from "./schema"; + +interface BaseUploadSession { + id: string; + userId: number; + path: string; + bitmap: Buffer; + totalChunks: number; + uploadedChunks: number; + expiresAt: Date; +} + +interface FileUploadSession extends BaseUploadSession { + type: "file"; + parentId: DirectoryId; + mekVersion: number; + encDek: string; + dekVersion: Date; + hskVersion: number | null; + contentType: string; + encName: Ciphertext; + encCreatedAt: Ciphertext | null; + encLastModifiedAt: Ciphertext; +} + +interface ThumbnailOrMigrationUploadSession extends BaseUploadSession { + type: "thumbnail" | "migration"; + fileId: number; + dekVersion: Date; +} + +export const createFileUploadSession = async ( + params: Omit, +) => { + await db.transaction().execute(async (trx) => { + const mek = await trx + .selectFrom("master_encryption_key") + .select("version") + .where("user_id", "=", params.userId) + .where("state", "=", "active") + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (mek?.version !== params.mekVersion) { + throw new IntegrityError("Inactive MEK version"); + } + + if (params.hskVersion) { + const hsk = await trx + .selectFrom("hmac_secret_key") + .select("version") + .where("user_id", "=", params.userId) + .where("state", "=", "active") + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (hsk?.version !== params.hskVersion) { + throw new IntegrityError("Inactive HSK version"); + } + } + + await trx + .insertInto("upload_session") + .values({ + id: params.id, + type: "file", + user_id: params.userId, + path: params.path, + bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)), + total_chunks: params.totalChunks, + expires_at: params.expiresAt, + parent_id: params.parentId !== "root" ? params.parentId : null, + master_encryption_key_version: params.mekVersion, + encrypted_data_encryption_key: params.encDek, + data_encryption_key_version: params.dekVersion, + hmac_secret_key_version: params.hskVersion, + content_type: params.contentType, + encrypted_name: params.encName, + encrypted_created_at: params.encCreatedAt, + encrypted_last_modified_at: params.encLastModifiedAt, + }) + .execute(); + }); +}; + +export const createThumbnailOrMigrationUploadSession = async ( + params: Omit, +) => { + await db.transaction().execute(async (trx) => { + const file = await trx + .selectFrom("file") + .select("data_encryption_key_version") + .where("id", "=", params.fileId) + .where("user_id", "=", params.userId) + .limit(1) + .forUpdate() + .executeTakeFirst(); + if (!file) { + throw new IntegrityError("File not found"); + } else if (file.data_encryption_key_version.getTime() !== params.dekVersion.getTime()) { + throw new IntegrityError("Invalid DEK version"); + } + + await trx + .insertInto("upload_session") + .values({ + id: params.id, + type: params.type, + user_id: params.userId, + path: params.path, + bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)), + total_chunks: params.totalChunks, + expires_at: params.expiresAt, + file_id: params.fileId, + data_encryption_key_version: params.dekVersion, + }) + .execute(); + }); +}; + +export const getUploadSession = async (sessionId: string, userId: number) => { + const session = await db + .selectFrom("upload_session") + .selectAll() + .where("id", "=", sessionId) + .where("user_id", "=", userId) + .where("expires_at", ">", new Date()) + .limit(1) + .executeTakeFirst(); + if (!session) { + return null; + } else if (session.type === "file") { + return { + type: "file", + id: session.id, + userId: session.user_id, + path: session.path, + bitmap: session.bitmap, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + parentId: session.parent_id ?? "root", + mekVersion: session.master_encryption_key_version!, + encDek: session.encrypted_data_encryption_key!, + dekVersion: session.data_encryption_key_version!, + hskVersion: session.hmac_secret_key_version, + contentType: session.content_type!, + encName: session.encrypted_name!, + encCreatedAt: session.encrypted_created_at, + encLastModifiedAt: session.encrypted_last_modified_at!, + } satisfies FileUploadSession; + } else { + return { + type: session.type, + id: session.id, + userId: session.user_id, + path: session.path, + bitmap: session.bitmap, + totalChunks: session.total_chunks, + uploadedChunks: session.uploaded_chunks, + expiresAt: session.expires_at, + fileId: session.file_id!, + dekVersion: session.data_encryption_key_version!, + } satisfies ThumbnailOrMigrationUploadSession; + } +}; + +export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => { + await db + .updateTable("upload_session") + .set({ + bitmap: sql`set_bit(${sql.ref("bitmap")}, ${chunkIndex - 1}, 1)`, + }) + .where("id", "=", sessionId) + .execute(); +}; + +export const deleteUploadSession = async (trx: typeof db, sessionId: string) => { + await trx.deleteFrom("upload_session").where("id", "=", sessionId).execute(); +}; + +export const cleanupExpiredUploadSessions = async () => { + const sessions = await db + .deleteFrom("upload_session") + .where("expires_at", "<=", new Date()) + .returning("path") + .execute(); + return sessions.map(({ path }) => path); +}; diff --git a/src/lib/server/loadenv.ts b/src/lib/server/loadenv.ts index 3a805d8..f8fd68f 100644 --- a/src/lib/server/loadenv.ts +++ b/src/lib/server/loadenv.ts @@ -26,4 +26,5 @@ export default { }, libraryPath: env.LIBRARY_PATH || "library", thumbnailsPath: env.THUMBNAILS_PATH || "thumbnails", + uploadsPath: env.UPLOADS_PATH || "uploads", }; diff --git a/src/lib/server/modules/filesystem.ts b/src/lib/server/modules/filesystem.ts index 65cb9ec..ade7d73 100644 --- a/src/lib/server/modules/filesystem.ts +++ b/src/lib/server/modules/filesystem.ts @@ -1,4 +1,10 @@ -import { unlink } from "fs/promises"; +import { rm, unlink } from "fs/promises"; + +export const safeRecursiveRm = async (path: string | null | undefined) => { + if (path) { + await rm(path, { recursive: true }).catch(console.error); + } +}; export const safeUnlink = async (path: string | null | undefined) => { if (path) { diff --git a/src/lib/server/schemas/category.ts b/src/lib/server/schemas/category.ts deleted file mode 100644 index 0bb07a7..0000000 --- a/src/lib/server/schemas/category.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { z } from "zod"; - -export const categoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/server/schemas/directory.ts b/src/lib/server/schemas/directory.ts deleted file mode 100644 index dba44b9..0000000 --- a/src/lib/server/schemas/directory.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { z } from "zod"; - -export const directoryIdSchema = z.union([z.literal("root"), z.int().positive()]); diff --git a/src/lib/server/schemas/file.ts b/src/lib/server/schemas/file.ts deleted file mode 100644 index 811e590..0000000 --- a/src/lib/server/schemas/file.ts +++ /dev/null @@ -1,36 +0,0 @@ -import mime from "mime"; -import { z } from "zod"; -import { directoryIdSchema } from "./directory"; - -export const fileThumbnailUploadRequest = z.object({ - dekVersion: z.iso.datetime(), - contentIv: z.base64().nonempty(), -}); -export type FileThumbnailUploadRequest = z.input; - -export const fileUploadRequest = z.object({ - parent: directoryIdSchema, - mekVersion: z.int().positive(), - dek: z.base64().nonempty(), - dekVersion: z.iso.datetime(), - hskVersion: z.int().positive(), - contentHmac: z.base64().nonempty(), - contentType: z - .string() - .trim() - .nonempty() - .refine((value) => mime.getExtension(value) !== null), // MIME type - contentIv: z.base64().nonempty(), - name: z.base64().nonempty(), - nameIv: z.base64().nonempty(), - createdAt: z.base64().nonempty().optional(), - createdAtIv: z.base64().nonempty().optional(), - lastModifiedAt: z.base64().nonempty(), - lastModifiedAtIv: z.base64().nonempty(), -}); -export type FileUploadRequest = z.input; - -export const fileUploadResponse = z.object({ - file: z.int().positive(), -}); -export type FileUploadResponse = z.output; diff --git a/src/lib/server/schemas/index.ts b/src/lib/server/schemas/index.ts deleted file mode 100644 index f7a2bc1..0000000 --- a/src/lib/server/schemas/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * from "./category"; -export * from "./directory"; -export * from "./file"; diff --git a/src/lib/server/services/file.ts b/src/lib/server/services/file.ts index 9032ffb..0d67303 100644 --- a/src/lib/server/services/file.ts +++ b/src/lib/server/services/file.ts @@ -1,126 +1,74 @@ import { error } from "@sveltejs/kit"; -import { createHash } from "crypto"; -import { createReadStream, createWriteStream } from "fs"; -import { mkdir, stat } from "fs/promises"; -import { dirname } from "path"; +import { createReadStream } from "fs"; +import { stat } from "fs/promises"; import { Readable } from "stream"; -import { pipeline } from "stream/promises"; -import { v4 as uuidv4 } from "uuid"; -import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db"; -import env from "$lib/server/loadenv"; -import { safeUnlink } from "$lib/server/modules/filesystem"; +import { FileRepo, MediaRepo } from "$lib/server/db"; -export const getFileStream = async (userId: number, fileId: number) => { +const createEncContentStream = async ( + path: string, + iv?: Buffer, + range?: { start?: number; end?: number }, +) => { + const { size: fileSize } = await stat(path); + const ivSize = iv?.byteLength ?? 0; + const totalSize = fileSize + ivSize; + + const start = range?.start ?? 0; + const end = range?.end ?? totalSize - 1; + if (start > end || start < 0 || end >= totalSize) { + error(416, "Invalid range"); + } + + return { + encContentStream: Readable.toWeb( + Readable.from( + (async function* () { + if (start < ivSize) { + yield iv!.subarray(start, Math.min(end + 1, ivSize)); + } + if (end >= ivSize) { + yield* createReadStream(path, { + start: Math.max(0, start - ivSize), + end: end - ivSize, + }); + } + })(), + ), + ), + range: { start, end, total: totalSize }, + }; +}; + +export const getFileStream = async ( + userId: number, + fileId: number, + range?: { start?: number; end?: number }, +) => { const file = await FileRepo.getFile(userId, fileId); if (!file) { error(404, "Invalid file id"); } - const { size } = await stat(file.path); - return { - encContentStream: Readable.toWeb(createReadStream(file.path)), - encContentSize: size, - }; + return createEncContentStream( + file.path, + file.encContentIv ? Buffer.from(file.encContentIv, "base64") : undefined, + range, + ); }; -export const getFileThumbnailStream = async (userId: number, fileId: number) => { +export const getFileThumbnailStream = async ( + userId: number, + fileId: number, + range?: { start?: number; end?: number }, +) => { const thumbnail = await MediaRepo.getFileThumbnail(userId, fileId); if (!thumbnail) { error(404, "File or its thumbnail not found"); } - const { size } = await stat(thumbnail.path); - return { - encContentStream: Readable.toWeb(createReadStream(thumbnail.path)), - encContentSize: size, - }; -}; - -export const uploadFileThumbnail = async ( - userId: number, - fileId: number, - dekVersion: Date, - encContentIv: string, - encContentStream: Readable, -) => { - const path = `${env.thumbnailsPath}/${userId}/${uuidv4()}`; - await mkdir(dirname(path), { recursive: true }); - - try { - await pipeline(encContentStream, createWriteStream(path, { flags: "wx", mode: 0o600 })); - - const oldPath = await MediaRepo.updateFileThumbnail( - userId, - fileId, - dekVersion, - path, - encContentIv, - ); - safeUnlink(oldPath); // Intended - } catch (e) { - await safeUnlink(path); - - if (e instanceof IntegrityError) { - if (e.message === "File not found") { - error(404, "File not found"); - } else if (e.message === "Invalid DEK version") { - error(400, "Mismatched DEK version"); - } - } - throw e; - } -}; - -export const uploadFile = async ( - params: Omit, - encContentStream: Readable, - encContentHash: Promise, -) => { - const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000); - const oneMinuteLater = new Date(Date.now() + 60 * 1000); - if (params.dekVersion <= oneDayAgo || params.dekVersion >= oneMinuteLater) { - error(400, "Invalid DEK version"); - } - - const path = `${env.libraryPath}/${params.userId}/${uuidv4()}`; - await mkdir(dirname(path), { recursive: true }); - - try { - const hashStream = createHash("sha256"); - const [, hash] = await Promise.all([ - pipeline( - encContentStream, - async function* (source) { - for await (const chunk of source) { - hashStream.update(chunk); - yield chunk; - } - }, - createWriteStream(path, { flags: "wx", mode: 0o600 }), - ), - encContentHash, - ]); - if (hashStream.digest("base64") !== hash) { - throw new Error("Invalid checksum"); - } - - const { id: fileId } = await FileRepo.registerFile({ - ...params, - path, - encContentHash: hash, - }); - return { fileId }; - } catch (e) { - await safeUnlink(path); - - if (e instanceof IntegrityError && e.message === "Inactive MEK version") { - error(400, "Invalid MEK version"); - } else if ( - e instanceof Error && - (e.message === "Invalid request body" || e.message === "Invalid checksum") - ) { - error(400, "Invalid request body"); - } - throw e; - } + return createEncContentStream( + thumbnail.path, + thumbnail.encContentIv ? Buffer.from(thumbnail.encContentIv, "base64") : undefined, + range, + ); }; diff --git a/src/lib/server/services/upload.ts b/src/lib/server/services/upload.ts new file mode 100644 index 0000000..d654f42 --- /dev/null +++ b/src/lib/server/services/upload.ts @@ -0,0 +1,88 @@ +import { error } from "@sveltejs/kit"; +import { createHash } from "crypto"; +import { createWriteStream } from "fs"; +import { Readable } from "stream"; +import { ENCRYPTION_OVERHEAD, ENCRYPTED_CHUNK_SIZE } from "$lib/constants"; +import { UploadRepo } from "$lib/server/db"; +import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; + +const chunkLocks = new Set(); + +const isChunkUploaded = (bitmap: Buffer, chunkIndex: number) => { + chunkIndex -= 1; + const byte = bitmap[Math.floor(chunkIndex / 8)]; + return !!byte && (byte & (1 << (chunkIndex % 8))) !== 0; // Postgres sucks +}; + +export const uploadChunk = async ( + userId: number, + sessionId: string, + chunkIndex: number, + encChunkStream: Readable, + encChunkHash: string, +) => { + const lockKey = `${sessionId}/${chunkIndex}`; + if (chunkLocks.has(lockKey)) { + error(409, "Chunk upload already in progress"); + } else { + chunkLocks.add(lockKey); + } + + let filePath; + + try { + const session = await UploadRepo.getUploadSession(sessionId, userId); + if (!session) { + error(404, "Invalid upload id"); + } else if (chunkIndex > session.totalChunks) { + error(400, "Invalid chunk index"); + } else if (isChunkUploaded(session.bitmap, chunkIndex)) { + error(409, "Chunk already uploaded"); + } + + const isLastChunk = chunkIndex === session.totalChunks; + filePath = `${session.path}/${chunkIndex}`; + + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + let writtenBytes = 0; + + for await (const chunk of encChunkStream) { + hashStream.update(chunk); + writeStream.write(chunk); + writtenBytes += chunk.length; + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + if (hashStream.digest("base64") !== encChunkHash) { + throw new Error("Invalid checksum"); + } else if ( + (!isLastChunk && writtenBytes !== ENCRYPTED_CHUNK_SIZE) || + (isLastChunk && (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > ENCRYPTED_CHUNK_SIZE)) + ) { + throw new Error("Invalid chunk size"); + } + + await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex); + } catch (e) { + await safeUnlink(filePath); + + if ( + e instanceof Error && + (e.message === "Invalid checksum" || e.message === "Invalid chunk size") + ) { + error(400, "Invalid request body"); + } + throw e; + } finally { + chunkLocks.delete(lockKey); + } +}; + +export const cleanupExpiredUploadSessions = async () => { + const paths = await UploadRepo.cleanupExpiredUploadSessions(); + await Promise.all(paths.map(safeRecursiveRm)); +}; diff --git a/src/lib/serviceWorker/client.ts b/src/lib/serviceWorker/client.ts new file mode 100644 index 0000000..771c15e --- /dev/null +++ b/src/lib/serviceWorker/client.ts @@ -0,0 +1,39 @@ +import { DECRYPTED_FILE_URL_PREFIX } from "$lib/constants"; +import type { FileMetadata, ServiceWorkerMessage, ServiceWorkerResponse } from "./types"; + +const PREPARE_TIMEOUT_MS = 5000; + +const getServiceWorker = async () => { + const registration = await navigator.serviceWorker.ready; + const sw = registration.active; + if (!sw) { + throw new Error("Service worker not activated"); + } + return sw; +}; + +export const prepareFileDecryption = async (id: number, metadata: FileMetadata) => { + const sw = await getServiceWorker(); + return new Promise((resolve, reject) => { + const timeout = setTimeout( + () => reject(new Error("Service worker timeout")), + PREPARE_TIMEOUT_MS, + ); + const handler = (event: MessageEvent) => { + if (event.data.type === "decryption-ready" && event.data.fileId === id) { + clearTimeout(timeout); + navigator.serviceWorker.removeEventListener("message", handler); + resolve(); + } + }; + navigator.serviceWorker.addEventListener("message", handler); + + sw.postMessage({ + type: "decryption-prepare", + fileId: id, + ...metadata, + } satisfies ServiceWorkerMessage); + }); +}; + +export const getDecryptedFileUrl = (id: number) => `${DECRYPTED_FILE_URL_PREFIX}${id}`; diff --git a/src/lib/serviceWorker/index.ts b/src/lib/serviceWorker/index.ts new file mode 100644 index 0000000..d2ec230 --- /dev/null +++ b/src/lib/serviceWorker/index.ts @@ -0,0 +1,2 @@ +export * from "./client"; +export * from "./types"; diff --git a/src/lib/serviceWorker/types.ts b/src/lib/serviceWorker/types.ts new file mode 100644 index 0000000..97edd6d --- /dev/null +++ b/src/lib/serviceWorker/types.ts @@ -0,0 +1,19 @@ +export interface FileMetadata { + isLegacy: boolean; + dataKey: CryptoKey; + encContentSize: number; + contentType: string; +} + +export interface DecryptionPrepareMessage extends FileMetadata { + type: "decryption-prepare"; + fileId: number; +} + +export interface DecryptionReadyMessage { + type: "decryption-ready"; + fileId: number; +} + +export type ServiceWorkerMessage = DecryptionPrepareMessage; +export type ServiceWorkerResponse = DecryptionReadyMessage; diff --git a/src/lib/services/file.ts b/src/lib/services/file.ts index 05a92e1..b8db243 100644 --- a/src/lib/services/file.ts +++ b/src/lib/services/file.ts @@ -6,38 +6,42 @@ import { downloadFile, deleteFileThumbnailCache, } from "$lib/modules/file"; -import type { FileThumbnailUploadRequest } from "$lib/server/schemas"; +import { uploadBlob } from "$lib/modules/upload"; import { trpc } from "$trpc/client"; export const requestFileDownload = async ( fileId: number, - fileEncryptedIv: string, dataKey: CryptoKey, + isLegacy: boolean, ) => { const cache = await getFileCache(fileId); if (cache) return cache; - const fileBuffer = await downloadFile(fileId, fileEncryptedIv, dataKey); + const fileBuffer = await downloadFile(fileId, dataKey, isLegacy); storeFileCache(fileId, fileBuffer); // Intended return fileBuffer; }; export const requestFileThumbnailUpload = async ( fileId: number, + thumbnail: Blob, + dataKey: CryptoKey, dataKeyVersion: Date, - thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: string }, ) => { - const form = new FormData(); - form.set( - "metadata", - JSON.stringify({ - dekVersion: dataKeyVersion.toISOString(), - contentIv: thumbnailEncrypted.iv, - } satisfies FileThumbnailUploadRequest), - ); - form.set("content", new Blob([thumbnailEncrypted.ciphertext])); + try { + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); - return await fetch(`/api/file/${fileId}/thumbnail/upload`, { method: "POST", body: form }); + await uploadBlob(uploadId, thumbnail, dataKey); + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); + return true; + } catch { + // TODO: Error Handling + return false; + } }; export const requestDeletedFilesCleanup = async () => { diff --git a/src/lib/utils/concurrency/BoundedQueue.ts b/src/lib/utils/concurrency/BoundedQueue.ts new file mode 100644 index 0000000..5970914 --- /dev/null +++ b/src/lib/utils/concurrency/BoundedQueue.ts @@ -0,0 +1,44 @@ +export class BoundedQueue { + private isClosed = false; + private reservedCount = 0; + private items: T[] = []; + + private waitersNotFull: (() => void)[] = []; + private waitersNotEmpty: (() => void)[] = []; + + constructor(private readonly maxSize: number) {} + + async push(item: T) { + if (this.isClosed) { + throw new Error("Queue closed"); + } + + while (this.reservedCount >= this.maxSize) { + await new Promise((resolve) => this.waitersNotFull.push(resolve)); + if (this.isClosed) throw new Error("Queue closed"); + } + + this.reservedCount++; + this.items.push(item); + this.waitersNotEmpty.shift()?.(); + } + + async pop() { + while (this.items.length === 0) { + if (this.isClosed) return null; + await new Promise((resolve) => this.waitersNotEmpty.push(resolve)); + } + + const item = this.items.shift()!; + this.reservedCount--; + this.waitersNotFull.shift()?.(); + + return item; + } + + close() { + this.isClosed = true; + while (this.waitersNotEmpty.length > 0) this.waitersNotEmpty.shift()!(); + while (this.waitersNotFull.length > 0) this.waitersNotFull.shift()!(); + } +} diff --git a/src/lib/utils/HybridPromise.ts b/src/lib/utils/concurrency/HybridPromise.ts similarity index 100% rename from src/lib/utils/HybridPromise.ts rename to src/lib/utils/concurrency/HybridPromise.ts diff --git a/src/lib/modules/scheduler.ts b/src/lib/utils/concurrency/Scheduler.ts similarity index 100% rename from src/lib/modules/scheduler.ts rename to src/lib/utils/concurrency/Scheduler.ts diff --git a/src/lib/utils/concurrency/index.ts b/src/lib/utils/concurrency/index.ts new file mode 100644 index 0000000..59fe81d --- /dev/null +++ b/src/lib/utils/concurrency/index.ts @@ -0,0 +1,3 @@ +export * from "./BoundedQueue"; +export * from "./HybridPromise"; +export * from "./Scheduler"; diff --git a/src/lib/utils/index.ts b/src/lib/utils/index.ts index 5d5b9d4..4c576d5 100644 --- a/src/lib/utils/index.ts +++ b/src/lib/utils/index.ts @@ -1,4 +1,4 @@ +export * from "./concurrency"; export * from "./format"; export * from "./gotoStateful"; -export * from "./HybridPromise"; export * from "./sort"; diff --git a/src/params/thumbnail.ts b/src/params/thumbnail.ts new file mode 100644 index 0000000..3faf298 --- /dev/null +++ b/src/params/thumbnail.ts @@ -0,0 +1,5 @@ +import type { ParamMatcher } from "@sveltejs/kit"; + +export const match: ParamMatcher = (param) => { + return param === "thumbnail"; +}; diff --git a/src/routes/(fullscreen)/file/[id]/+page.svelte b/src/routes/(fullscreen)/file/[id]/+page.svelte index 0b344bc..053d6bf 100644 --- a/src/routes/(fullscreen)/file/[id]/+page.svelte +++ b/src/routes/(fullscreen)/file/[id]/+page.svelte @@ -5,7 +5,7 @@ import { page } from "$app/state"; import { FullscreenDiv } from "$lib/components/atoms"; import { Categories, IconEntryButton, TopBar } from "$lib/components/molecules"; - import { getFileInfo, type FileInfo, type MaybeFileInfo } from "$lib/modules/filesystem"; + import { getFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem"; import { captureVideoThumbnail } from "$lib/modules/thumbnail"; import { getFileDownloadState } from "$lib/modules/file"; import { masterKeyStore } from "$lib/stores"; @@ -17,6 +17,7 @@ requestFileDownload, requestThumbnailUpload, requestFileAdditionToCategory, + requestVideoStream, } from "./service"; import TopBarMenu from "./TopBarMenu.svelte"; @@ -37,6 +38,7 @@ let viewerType: "image" | "video" | undefined = $state(); let fileBlob: Blob | undefined = $state(); let fileBlobUrl: string | undefined = $state(); + let videoStreamUrl: string | undefined = $state(); let videoElement: HTMLVideoElement | undefined = $state(); const updateViewer = async (buffer: ArrayBuffer, contentType: string) => { @@ -95,14 +97,27 @@ untrack(() => { if (!downloadState && !isDownloadRequested) { isDownloadRequested = true; - requestFileDownload(data.id, info!.contentIv!, info!.dataKey!.key).then( - async (buffer) => { - const blob = await updateViewer(buffer, contentType); - if (!viewerType) { - FileSaver.saveAs(blob, info!.name); + + if (viewerType === "video" && !info!.isLegacy) { + requestVideoStream(data.id, info!.dataKey!.key, contentType).then((streamUrl) => { + if (streamUrl) { + videoStreamUrl = streamUrl; + } else { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then((buffer) => + updateViewer(buffer, contentType), + ); } - }, - ); + }); + } else { + requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then( + async (buffer) => { + const blob = await updateViewer(buffer, contentType); + if (!viewerType) { + FileSaver.saveAs(blob, info!.name); + } + }, + ); + } } }); } @@ -110,7 +125,9 @@ $effect(() => { if (info?.exists && downloadState?.status === "decrypted") { - untrack(() => !isDownloadRequested && updateViewer(downloadState.result!, info!.contentIv!)); + untrack( + () => !isDownloadRequested && updateViewer(downloadState.result!, info!.contentType!), + ); } }); @@ -137,6 +154,7 @@ ? info?.parentId : undefined} {fileBlob} + downloadUrl={videoStreamUrl} filename={info?.name} /> @@ -159,9 +177,10 @@ {@render viewerLoading("이미지를 불러오고 있어요.")} {/if} {:else if viewerType === "video"} - {#if fileBlobUrl} + {#if videoStreamUrl || fileBlobUrl}
- + updateThumbnail(info?.dataKey?.key!, info?.dataKey?.version!)} diff --git a/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte b/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte index a037b61..d713e8c 100644 --- a/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte +++ b/src/routes/(fullscreen)/file/[id]/TopBarMenu.svelte @@ -10,17 +10,29 @@ interface Props { directoryId?: "root" | number; + downloadUrl?: string; fileBlob?: Blob; filename?: string; isOpen: boolean; } - let { directoryId, fileBlob, filename, isOpen = $bindable() }: Props = $props(); + let { directoryId, downloadUrl, fileBlob, filename, isOpen = $bindable() }: Props = $props(); + + const handleDownload = () => { + if (fileBlob && filename) { + FileSaver.saveAs(fileBlob, filename); + } else if (downloadUrl && filename) { + // Use streaming download via Content-Disposition header + const url = new URL(downloadUrl, window.location.origin); + url.searchParams.set("download", filename); + window.open(url.toString(), "_blank"); + } + }; (isOpen = false)} /> -{#if isOpen && (directoryId || fileBlob)} +{#if isOpen && (directoryId || downloadUrl || fileBlob)}
{ - FileSaver.saveAs(fileBlob, filename); - })} + {#if fileBlob || downloadUrl} + {@render menuButton(IconCloudDownload, "다운로드", handleDownload)} {/if}
diff --git a/src/routes/(fullscreen)/file/[id]/service.ts b/src/routes/(fullscreen)/file/[id]/service.ts index 09ec86f..598418b 100644 --- a/src/routes/(fullscreen)/file/[id]/service.ts +++ b/src/routes/(fullscreen)/file/[id]/service.ts @@ -1,23 +1,41 @@ -import { encryptData } from "$lib/modules/crypto"; import { storeFileThumbnailCache } from "$lib/modules/file"; +import { prepareFileDecryption, getDecryptedFileUrl } from "$lib/serviceWorker"; import { requestFileThumbnailUpload } from "$lib/services/file"; import { trpc } from "$trpc/client"; export { requestCategoryCreation, requestFileRemovalFromCategory } from "$lib/services/category"; export { requestFileDownload } from "$lib/services/file"; +export const requestVideoStream = async ( + fileId: number, + dataKey: CryptoKey, + contentType: string, +) => { + const res = await fetch(`/api/file/${fileId}/download`, { method: "HEAD" }); + if (!res.ok) return null; + + const encContentSize = parseInt(res.headers.get("Content-Length") ?? "0", 10); + if (encContentSize <= 0) return null; + + try { + await prepareFileDecryption(fileId, { isLegacy: false, dataKey, encContentSize, contentType }); + return getDecryptedFileUrl(fileId); + } catch { + // TODO: Error Handling + return null; + } +}; + export const requestThumbnailUpload = async ( fileId: number, thumbnail: Blob, dataKey: CryptoKey, dataKeyVersion: Date, ) => { - const thumbnailBuffer = await thumbnail.arrayBuffer(); - const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); - const res = await requestFileThumbnailUpload(fileId, dataKeyVersion, thumbnailEncrypted); - if (!res.ok) return false; + const res = await requestFileThumbnailUpload(fileId, thumbnail, dataKey, dataKeyVersion); + if (!res) return false; - storeFileThumbnailCache(fileId, thumbnailBuffer); // Intended + void thumbnail.arrayBuffer().then((buffer) => storeFileThumbnailCache(fileId, buffer)); return true; }; diff --git a/src/routes/(fullscreen)/settings/migration/+page.server.ts b/src/routes/(fullscreen)/settings/migration/+page.server.ts new file mode 100644 index 0000000..3e1c32a --- /dev/null +++ b/src/routes/(fullscreen)/settings/migration/+page.server.ts @@ -0,0 +1,7 @@ +import { createCaller } from "$trpc/router.server"; +import type { PageServerLoad } from "./$types"; + +export const load: PageServerLoad = async (event) => { + const files = await createCaller(event).file.listLegacy(); + return { files }; +}; diff --git a/src/routes/(fullscreen)/settings/migration/+page.svelte b/src/routes/(fullscreen)/settings/migration/+page.svelte new file mode 100644 index 0000000..4db6a80 --- /dev/null +++ b/src/routes/(fullscreen)/settings/migration/+page.svelte @@ -0,0 +1,79 @@ + + + + 암호화 마이그레이션 + + + + + {#if files.length > 0} +
+

+ 이전 버전의 ArkVault에서 업로드된 {files.length}개 파일을 다시 암호화할 수 있어요. +

+
+ {#each files as { info, state } (info.id)} + {#if info.exists} + goto(`/file/${id}`)} + onMigrateClick={requestFileMigration} + /> + {/if} + {/each} +
+
+ + + + {:else} +
+

+ {#if data.files.length === 0} + 마이그레이션할 파일이 없어요. + {:else} + 파일 목록을 불러오고 있어요. + {/if} +

+
+ {/if} +
diff --git a/src/routes/(fullscreen)/settings/migration/File.svelte b/src/routes/(fullscreen)/settings/migration/File.svelte new file mode 100644 index 0000000..d16e800 --- /dev/null +++ b/src/routes/(fullscreen)/settings/migration/File.svelte @@ -0,0 +1,52 @@ + + + + + onclick(info)} + actionButtonIcon={!state || state.status === "error" ? IconSync : undefined} + onActionButtonClick={() => onMigrateClick(info)} + actionButtonClass="text-gray-800" +> + + diff --git a/src/routes/(fullscreen)/settings/migration/service.svelte.ts b/src/routes/(fullscreen)/settings/migration/service.svelte.ts new file mode 100644 index 0000000..1bdf869 --- /dev/null +++ b/src/routes/(fullscreen)/settings/migration/service.svelte.ts @@ -0,0 +1,105 @@ +import { limitFunction } from "p-limit"; +import { SvelteMap } from "svelte/reactivity"; +import { CHUNK_SIZE } from "$lib/constants"; +import type { FileInfo } from "$lib/modules/filesystem"; +import { uploadBlob } from "$lib/modules/upload"; +import { requestFileDownload } from "$lib/services/file"; +import { Scheduler } from "$lib/utils"; +import { trpc } from "$trpc/client"; + +export type MigrationStatus = + | "queued" + | "downloading" + | "upload-pending" + | "uploading" + | "uploaded" + | "error"; + +export interface MigrationState { + status: MigrationStatus; + progress?: number; + rate?: number; +} + +const scheduler = new Scheduler(); +const states = new SvelteMap(); + +const createState = (status: MigrationStatus): MigrationState => { + const state = $state({ status }); + return state; +}; + +export const getMigrationState = (fileId: number) => { + return states.get(fileId); +}; + +export const clearMigrationStates = () => { + for (const [id, state] of states) { + if (state.status === "uploaded" || state.status === "error") { + states.delete(id); + } + } +}; + +const requestFileUpload = limitFunction( + async ( + state: MigrationState, + fileId: number, + fileBuffer: ArrayBuffer, + dataKey: CryptoKey, + dataKeyVersion: Date, + ) => { + state.status = "uploading"; + + const { uploadId } = await trpc().upload.startMigrationUpload.mutate({ + file: fileId, + chunks: Math.ceil(fileBuffer.byteLength / CHUNK_SIZE), + dekVersion: dataKeyVersion, + }); + + await uploadBlob(uploadId, new Blob([fileBuffer]), dataKey, { + onProgress(s) { + state.progress = s.progress; + state.rate = s.rate; + }, + }); + + await trpc().upload.completeMigrationUpload.mutate({ uploadId }); + state.status = "uploaded"; + }, + { concurrency: 1 }, +); + +export const requestFileMigration = async (fileInfo: FileInfo) => { + let state = states.get(fileInfo.id); + if (state) { + if (state.status !== "error") return; + state.status = "queued"; + state.progress = undefined; + state.rate = undefined; + } else { + state = createState("queued"); + states.set(fileInfo.id, state); + } + + try { + const dataKey = fileInfo.dataKey; + if (!dataKey) { + throw new Error("Data key not available"); + } + + let fileBuffer: ArrayBuffer | undefined; + + await scheduler.schedule( + async () => { + state.status = "downloading"; + fileBuffer = await requestFileDownload(fileInfo.id, dataKey.key, true); + return fileBuffer.byteLength; + }, + () => requestFileUpload(state, fileInfo.id, fileBuffer!, dataKey.key, dataKey.version), + ); + } catch (e) { + state.status = "error"; + throw e; + } +}; diff --git a/src/routes/(fullscreen)/settings/thumbnail/File.svelte b/src/routes/(fullscreen)/settings/thumbnail/File.svelte index 4440cf2..edb7e91 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/File.svelte +++ b/src/routes/(fullscreen)/settings/thumbnail/File.svelte @@ -3,7 +3,6 @@ queued: "대기 중", "generation-pending": "준비 중", generating: "생성하는 중", - "upload-pending": "업로드를 기다리는 중", uploading: "업로드하는 중", error: "실패", } as const; diff --git a/src/routes/(fullscreen)/settings/thumbnail/service.ts b/src/routes/(fullscreen)/settings/thumbnail/service.ts index 85226b0..5c4c61d 100644 --- a/src/routes/(fullscreen)/settings/thumbnail/service.ts +++ b/src/routes/(fullscreen)/settings/thumbnail/service.ts @@ -1,17 +1,15 @@ import { limitFunction } from "p-limit"; import { SvelteMap } from "svelte/reactivity"; -import { encryptData } from "$lib/modules/crypto"; import { storeFileThumbnailCache } from "$lib/modules/file"; import type { FileInfo } from "$lib/modules/filesystem"; -import { Scheduler } from "$lib/modules/scheduler"; -import { generateThumbnail as doGenerateThumbnail } from "$lib/modules/thumbnail"; +import { generateThumbnail } from "$lib/modules/thumbnail"; import { requestFileDownload, requestFileThumbnailUpload } from "$lib/services/file"; +import { Scheduler } from "$lib/utils"; export type GenerationStatus = | "queued" | "generation-pending" | "generating" - | "upload-pending" | "uploading" | "uploaded" | "error"; @@ -31,33 +29,27 @@ export const clearThumbnailGenerationStatuses = () => { } }; -const generateThumbnail = limitFunction( - async (fileId: number, fileBuffer: ArrayBuffer, fileType: string, dataKey: CryptoKey) => { - statuses.set(fileId, "generating"); - - const thumbnail = await doGenerateThumbnail(fileBuffer, fileType); - if (!thumbnail) return null; - - const thumbnailBuffer = await thumbnail.arrayBuffer(); - const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); - statuses.set(fileId, "upload-pending"); - return { plaintext: thumbnailBuffer, ...thumbnailEncrypted }; - }, - { concurrency: 4 }, -); - const requestThumbnailUpload = limitFunction( - async ( - fileId: number, - dataKeyVersion: Date, - thumbnail: { plaintext: ArrayBuffer; ciphertext: ArrayBuffer; iv: string }, - ) => { - statuses.set(fileId, "uploading"); + async (fileInfo: FileInfo, fileBuffer: ArrayBuffer) => { + statuses.set(fileInfo.id, "generating"); - const res = await requestFileThumbnailUpload(fileId, dataKeyVersion, thumbnail); - if (!res.ok) return false; - statuses.set(fileId, "uploaded"); - storeFileThumbnailCache(fileId, thumbnail.plaintext); // Intended + const thumbnail = await generateThumbnail( + new Blob([fileBuffer], { type: fileInfo.contentType }), + ); + if (!thumbnail) return false; + + statuses.set(fileInfo.id, "uploading"); + + const res = await requestFileThumbnailUpload( + fileInfo.id, + thumbnail, + fileInfo.dataKey?.key!, + fileInfo.dataKey?.version!, + ); + if (!res) return false; + + statuses.set(fileInfo.id, "uploaded"); + void thumbnail.arrayBuffer().then((buffer) => storeFileThumbnailCache(fileInfo.id, buffer)); return true; }, { concurrency: 4 }, @@ -77,20 +69,11 @@ export const requestThumbnailGeneration = async (fileInfo: FileInfo) => { await scheduler.schedule( async () => { statuses.set(fileInfo.id, "generation-pending"); - file = await requestFileDownload(fileInfo.id, fileInfo.contentIv!, fileInfo.dataKey?.key!); + file = await requestFileDownload(fileInfo.id, fileInfo.dataKey?.key!, fileInfo.isLegacy!); return file.byteLength; }, async () => { - const thumbnail = await generateThumbnail( - fileInfo.id, - file!, - fileInfo.contentType, - fileInfo.dataKey?.key!, - ); - if ( - !thumbnail || - !(await requestThumbnailUpload(fileInfo.id, fileInfo.dataKey?.version!, thumbnail)) - ) { + if (!(await requestThumbnailUpload(fileInfo, file!))) { statuses.set(fileInfo.id, "error"); } }, diff --git a/src/routes/(main)/directory/[[id]]/+page.svelte b/src/routes/(main)/directory/[[id]]/+page.svelte index a0a4d53..f500f34 100644 --- a/src/routes/(main)/directory/[[id]]/+page.svelte +++ b/src/routes/(main)/directory/[[id]]/+page.svelte @@ -51,7 +51,7 @@ if (!files || files.length === 0) return; for (const file of files) { - requestFileUpload(file, data.id, $hmacSecretStore?.get(1)!, $masterKeyStore?.get(1)!, () => { + requestFileUpload(file, data.id, $masterKeyStore?.get(1)!, $hmacSecretStore?.get(1)!, () => { return new Promise((resolve) => { duplicatedFile = file; resolveForDuplicateFileModal = resolve; diff --git a/src/routes/(main)/directory/[[id]]/service.svelte.ts b/src/routes/(main)/directory/[[id]]/service.svelte.ts index f83bbaf..be6392c 100644 --- a/src/routes/(main)/directory/[[id]]/service.svelte.ts +++ b/src/routes/(main)/directory/[[id]]/service.svelte.ts @@ -81,14 +81,16 @@ export const requestDirectoryCreation = async ( export const requestFileUpload = async ( file: File, parentId: "root" | number, - hmacSecret: HmacSecret, masterKey: MasterKey, + hmacSecret: HmacSecret, onDuplicate: () => Promise, ) => { - const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate); + const res = await uploadFile(file, parentId, masterKey, hmacSecret, onDuplicate); if (!res) return false; - storeFileCache(res.fileId, res.fileBuffer); // Intended + if (res.fileBuffer) { + storeFileCache(res.fileId, res.fileBuffer); // Intended + } if (res.thumbnailBuffer) { storeFileThumbnailCache(res.fileId, res.thumbnailBuffer); // Intended } diff --git a/src/routes/(main)/menu/+page.svelte b/src/routes/(main)/menu/+page.svelte index 40f4a26..2bfd3fc 100644 --- a/src/routes/(main)/menu/+page.svelte +++ b/src/routes/(main)/menu/+page.svelte @@ -5,6 +5,7 @@ import IconStorage from "~icons/material-symbols/storage"; import IconImage from "~icons/material-symbols/image"; + import IconLockReset from "~icons/material-symbols/lock-reset"; import IconPassword from "~icons/material-symbols/password"; import IconLogout from "~icons/material-symbols/logout"; @@ -41,6 +42,13 @@ > 썸네일 + goto("/settings/migration")} + icon={IconLockReset} + iconColor="text-teal-500" + > + 암호화 마이그레이션 +

보안

diff --git a/src/routes/api/file/[id]/[[thumbnail=thumbnail]]/download/+server.ts b/src/routes/api/file/[id]/[[thumbnail=thumbnail]]/download/+server.ts new file mode 100644 index 0000000..a79da41 --- /dev/null +++ b/src/routes/api/file/[id]/[[thumbnail=thumbnail]]/download/+server.ts @@ -0,0 +1,44 @@ +import { error } from "@sveltejs/kit"; +import { z } from "zod"; +import { parseRangeHeader, getContentRangeHeader } from "$lib/modules/http"; +import { authorize } from "$lib/server/modules/auth"; +import { getFileStream, getFileThumbnailStream } from "$lib/server/services/file"; +import type { RequestHandler, RouteParams } from "./$types"; + +const downloadHandler = async (locals: App.Locals, params: RouteParams, request: Request) => { + const { userId } = await authorize(locals, "activeClient"); + + const zodRes = z + .object({ + id: z.coerce.number().int().positive(), + }) + .safeParse(params); + if (!zodRes.success) error(400, "Invalid path parameters"); + const { id } = zodRes.data; + + const getStream = params.thumbnail ? getFileThumbnailStream : getFileStream; + const { encContentStream, range } = await getStream( + userId, + id, + parseRangeHeader(request.headers.get("Range")), + ); + return { + stream: encContentStream, + status: range ? 206 : 200, + headers: { + "Accept-Ranges": "bytes", + "Content-Length": String(range.end - range.start + 1), + "Content-Type": "application/octet-stream", + ...getContentRangeHeader(range), + }, + }; +}; + +export const GET: RequestHandler = async ({ locals, params, request }) => { + const { stream, ...init } = await downloadHandler(locals, params, request); + return new Response(stream as ReadableStream, init); +}; + +export const HEAD: RequestHandler = async ({ locals, params, request }) => { + return new Response(null, await downloadHandler(locals, params, request)); +}; diff --git a/src/routes/api/file/[id]/download/+server.ts b/src/routes/api/file/[id]/download/+server.ts deleted file mode 100644 index 5040c73..0000000 --- a/src/routes/api/file/[id]/download/+server.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { error } from "@sveltejs/kit"; -import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; -import { getFileStream } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -export const GET: RequestHandler = async ({ locals, params }) => { - const { userId } = await authorize(locals, "activeClient"); - - const zodRes = z - .object({ - id: z.coerce.number().int().positive(), - }) - .safeParse(params); - if (!zodRes.success) error(400, "Invalid path parameters"); - const { id } = zodRes.data; - - const { encContentStream, encContentSize } = await getFileStream(userId, id); - return new Response(encContentStream as ReadableStream, { - headers: { - "Content-Type": "application/octet-stream", - "Content-Length": encContentSize.toString(), - }, - }); -}; diff --git a/src/routes/api/file/[id]/thumbnail/download/+server.ts b/src/routes/api/file/[id]/thumbnail/download/+server.ts deleted file mode 100644 index addd800..0000000 --- a/src/routes/api/file/[id]/thumbnail/download/+server.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { error } from "@sveltejs/kit"; -import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; -import { getFileThumbnailStream } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -export const GET: RequestHandler = async ({ locals, params }) => { - const { userId } = await authorize(locals, "activeClient"); - - const zodRes = z - .object({ - id: z.coerce.number().int().positive(), - }) - .safeParse(params); - if (!zodRes.success) error(400, "Invalid path parameters"); - const { id } = zodRes.data; - - const { encContentStream, encContentSize } = await getFileThumbnailStream(userId, id); - return new Response(encContentStream as ReadableStream, { - headers: { - "Content-Type": "application/octet-stream", - "Content-Length": encContentSize.toString(), - }, - }); -}; diff --git a/src/routes/api/file/[id]/thumbnail/upload/+server.ts b/src/routes/api/file/[id]/thumbnail/upload/+server.ts deleted file mode 100644 index 62dfe42..0000000 --- a/src/routes/api/file/[id]/thumbnail/upload/+server.ts +++ /dev/null @@ -1,74 +0,0 @@ -import Busboy from "@fastify/busboy"; -import { error, text } from "@sveltejs/kit"; -import { Readable, Writable } from "stream"; -import { z } from "zod"; -import { authorize } from "$lib/server/modules/auth"; -import { fileThumbnailUploadRequest, type FileThumbnailUploadRequest } from "$lib/server/schemas"; -import { uploadFileThumbnail } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -export const POST: RequestHandler = async ({ locals, params, request }) => { - const { userId } = await authorize(locals, "activeClient"); - - const zodRes = z - .object({ - id: z.coerce.number().int().positive(), - }) - .safeParse(params); - if (!zodRes.success) error(400, "Invalid path parameters"); - const { id } = zodRes.data; - - const contentType = request.headers.get("Content-Type"); - if (!contentType?.startsWith("multipart/form-data") || !request.body) { - error(400, "Invalid request body"); - } - - return new Promise((resolve, reject) => { - const bb = Busboy({ headers: { "content-type": contentType } }); - const handler = - (f: (...args: T) => Promise) => - (...args: T) => { - f(...args).catch(reject); - }; - - let metadata: FileThumbnailUploadRequest | null = null; - let content: Readable | null = null; - bb.on( - "field", - handler(async (fieldname, val) => { - if (fieldname === "metadata") { - // Ignore subsequent metadata fields - if (!metadata) { - const zodRes = fileThumbnailUploadRequest.safeParse(JSON.parse(val)); - if (!zodRes.success) error(400, "Invalid request body"); - metadata = zodRes.data; - } - } else { - error(400, "Invalid request body"); - } - }), - ); - bb.on( - "file", - handler(async (fieldname, file) => { - if (fieldname !== "content") error(400, "Invalid request body"); - if (!metadata || content) error(400, "Invalid request body"); - content = file; - - await uploadFileThumbnail( - userId, - id, - new Date(metadata.dekVersion), - metadata.contentIv, - content, - ); - resolve(text("Thumbnail uploaded", { headers: { "Content-Type": "text/plain" } })); - }), - ); - bb.on("error", (e) => { - content?.emit("error", e) ?? reject(e); - }); - - request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error - }); -}; diff --git a/src/routes/api/file/upload/+server.ts b/src/routes/api/file/upload/+server.ts deleted file mode 100644 index f9cbd53..0000000 --- a/src/routes/api/file/upload/+server.ts +++ /dev/null @@ -1,108 +0,0 @@ -import Busboy from "@fastify/busboy"; -import { error, json } from "@sveltejs/kit"; -import { Readable, Writable } from "stream"; -import { authorize } from "$lib/server/modules/auth"; -import { - fileUploadRequest, - fileUploadResponse, - type FileUploadResponse, -} from "$lib/server/schemas"; -import { uploadFile } from "$lib/server/services/file"; -import type { RequestHandler } from "./$types"; - -type FileMetadata = Parameters[0]; - -const parseFileMetadata = (userId: number, json: string) => { - const zodRes = fileUploadRequest.safeParse(JSON.parse(json)); - if (!zodRes.success) error(400, "Invalid request body"); - const { - parent, - mekVersion, - dek, - dekVersion, - hskVersion, - contentHmac, - contentType, - contentIv, - name, - nameIv, - createdAt, - createdAtIv, - lastModifiedAt, - lastModifiedAtIv, - } = zodRes.data; - if ((createdAt && !createdAtIv) || (!createdAt && createdAtIv)) - error(400, "Invalid request body"); - - return { - userId, - parentId: parent, - mekVersion, - encDek: dek, - dekVersion: new Date(dekVersion), - hskVersion, - contentHmac, - contentType, - encContentIv: contentIv, - encName: { ciphertext: name, iv: nameIv }, - encCreatedAt: createdAt && createdAtIv ? { ciphertext: createdAt, iv: createdAtIv } : null, - encLastModifiedAt: { ciphertext: lastModifiedAt, iv: lastModifiedAtIv }, - } satisfies FileMetadata; -}; - -export const POST: RequestHandler = async ({ locals, request }) => { - const { userId } = await authorize(locals, "activeClient"); - - const contentType = request.headers.get("Content-Type"); - if (!contentType?.startsWith("multipart/form-data") || !request.body) { - error(400, "Invalid request body"); - } - - return new Promise((resolve, reject) => { - const bb = Busboy({ headers: { "content-type": contentType } }); - const handler = - (f: (...args: T) => Promise) => - (...args: T) => { - f(...args).catch(reject); - }; - - let metadata: FileMetadata | null = null; - let content: Readable | null = null; - const checksum = new Promise((resolveChecksum, rejectChecksum) => { - bb.on( - "field", - handler(async (fieldname, val) => { - if (fieldname === "metadata") { - // Ignore subsequent metadata fields - if (!metadata) { - metadata = parseFileMetadata(userId, val); - } - } else if (fieldname === "checksum") { - // Ignore subsequent checksum fields - resolveChecksum(val); - } else { - error(400, "Invalid request body"); - } - }), - ); - bb.on( - "file", - handler(async (fieldname, file) => { - if (fieldname !== "content") error(400, "Invalid request body"); - if (!metadata || content) error(400, "Invalid request body"); - content = file; - - const { fileId } = await uploadFile(metadata, content, checksum); - resolve(json(fileUploadResponse.parse({ file: fileId } satisfies FileUploadResponse))); - }), - ); - bb.on("finish", () => rejectChecksum(new Error("Invalid request body"))); - bb.on("error", (e) => { - content?.emit("error", e) ?? reject(e); - rejectChecksum(e); - }); - }); - - request.body!.pipeTo(Writable.toWeb(bb)).catch(() => {}); // busboy will handle the error - }); -}; diff --git a/src/routes/api/upload/[id]/chunks/[index]/+server.ts b/src/routes/api/upload/[id]/chunks/[index]/+server.ts new file mode 100644 index 0000000..3b2e85b --- /dev/null +++ b/src/routes/api/upload/[id]/chunks/[index]/+server.ts @@ -0,0 +1,37 @@ +import { error, text } from "@sveltejs/kit"; +import { Readable } from "stream"; +import type { ReadableStream } from "stream/web"; +import { z } from "zod"; +import { parseContentDigestHeader } from "$lib/modules/http"; +import { authorize } from "$lib/server/modules/auth"; +import { uploadChunk } from "$lib/server/services/upload"; +import type { RequestHandler } from "./$types"; + +export const POST: RequestHandler = async ({ locals, params, request }) => { + const { userId } = await authorize(locals, "activeClient"); + + const zodRes = z + .object({ + id: z.uuidv4(), + index: z.coerce.number().int().positive(), + }) + .safeParse(params); + if (!zodRes.success) error(400, "Invalid path parameters"); + const { id: sessionId, index: chunkIndex } = zodRes.data; + + const encContentHash = parseContentDigestHeader(request.headers.get("Content-Digest")); + if (!encContentHash) { + error(400, "Invalid request headers"); + } else if (!request.body) { + error(400, "Invalid request body"); + } + + await uploadChunk( + userId, + sessionId, + chunkIndex, + Readable.fromWeb(request.body as ReadableStream), + encContentHash, + ); + return text("Chunk uploaded", { headers: { "Content-Type": "text/plain" } }); +}; diff --git a/src/service-worker/constants.ts b/src/service-worker/constants.ts new file mode 100644 index 0000000..4938d61 --- /dev/null +++ b/src/service-worker/constants.ts @@ -0,0 +1 @@ +export * from "../lib/constants"; diff --git a/src/service-worker/handlers/decryptFile.ts b/src/service-worker/handlers/decryptFile.ts new file mode 100644 index 0000000..9aa9717 --- /dev/null +++ b/src/service-worker/handlers/decryptFile.ts @@ -0,0 +1,156 @@ +import { DECRYPTED_FILE_URL_PREFIX, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "../constants"; +import { decryptChunk, getEncryptedRange, getDecryptedSize } from "../modules/crypto"; +import { parseRangeHeader, getContentRangeHeader } from "../modules/http"; +import { getFile } from "../modules/opfs"; +import { fileMetadataStore } from "../stores"; +import type { FileMetadata } from "../types"; + +const createResponse = ( + stream: ReadableStream, + isRangeRequest: boolean, + range: { start: number; end: number; total: number }, + contentType?: string, + downloadFilename?: string, +) => { + const headers: Record = { + "Accept-Ranges": "bytes", + "Content-Length": String(range.end - range.start + 1), + ...(isRangeRequest ? getContentRangeHeader(range) : {}), + }; + + if (contentType) { + headers["Content-Type"] = contentType; + } + + if (downloadFilename) { + headers["Content-Disposition"] = + `attachment; filename*=UTF-8''${encodeURIComponent(downloadFilename)}`; + } + + return new Response(stream, { + status: isRangeRequest ? 206 : 200, + headers, + }); +}; + +const streamFromOpfs = async ( + file: File, + metadata?: FileMetadata, + range?: { start?: number; end?: number }, + downloadFilename?: string, +) => { + const start = range?.start ?? 0; + const end = range?.end ?? file.size - 1; + if (start > end || start < 0 || end >= file.size) { + return new Response("Invalid range", { status: 416 }); + } + + return createResponse( + file.slice(start, end + 1).stream(), + !!range, + { start, end, total: file.size }, + metadata?.contentType, + downloadFilename, + ); +}; + +const streamFromServer = async ( + id: number, + metadata: FileMetadata, + range?: { start?: number; end?: number }, + downloadFilename?: string, +) => { + const totalSize = getDecryptedSize(metadata.encContentSize, metadata.isLegacy); + const start = range?.start ?? 0; + const end = + range?.end ?? + (range && !metadata.isLegacy ? Math.min(start + CHUNK_SIZE, totalSize) : totalSize) - 1; + if (start > end || start < 0 || end >= totalSize) { + return new Response("Invalid range", { status: 416 }); + } + + const encryptedRange = getEncryptedRange(start, end, metadata.encContentSize, metadata.isLegacy); + const apiResponse = await fetch(`/api/file/${id}/download`, { + headers: { Range: `bytes=${encryptedRange.start}-${encryptedRange.end}` }, + }); + if (apiResponse.status !== 206 || !apiResponse.body) { + return new Response("Failed to fetch encrypted file", { status: 502 }); + } + + if (metadata.isLegacy) { + const fileEncrypted = await apiResponse.arrayBuffer(); + const decrypted = await decryptChunk(fileEncrypted, metadata.dataKey); + return createResponse( + new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array(decrypted.slice(start, end + 1))); + controller.close(); + }, + }), + !!range, + { start, end, total: totalSize }, + metadata.contentType, + ); + } + + const totalChunks = encryptedRange.lastChunkIndex - encryptedRange.firstChunkIndex + 1; + let currentChunkIndex = 0; + let buffer = new Uint8Array(0); + + const decryptingStream = new TransformStream({ + async transform(chunk, controller) { + const newBuffer = new Uint8Array(buffer.length + chunk.length); + newBuffer.set(buffer); + newBuffer.set(chunk, buffer.length); + buffer = newBuffer; + + while (buffer.length >= ENCRYPTED_CHUNK_SIZE && currentChunkIndex < totalChunks - 1) { + const encryptedChunk = buffer.slice(0, ENCRYPTED_CHUNK_SIZE); + buffer = buffer.slice(ENCRYPTED_CHUNK_SIZE); + + const decrypted = await decryptChunk(encryptedChunk.buffer, metadata.dataKey); + const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0; + controller.enqueue(new Uint8Array(decrypted.slice(sliceStart))); + currentChunkIndex++; + } + }, + async flush(controller) { + if (buffer.length > 0) { + const decrypted = await decryptChunk(buffer.buffer, metadata.dataKey); + const sliceStart = currentChunkIndex === 0 ? start % CHUNK_SIZE : 0; + const sliceEnd = (end % CHUNK_SIZE) + 1; + controller.enqueue(new Uint8Array(decrypted.slice(sliceStart, sliceEnd))); + } + }, + }); + + return createResponse( + apiResponse.body.pipeThrough(decryptingStream), + !!range, + { start, end, total: totalSize }, + metadata.contentType, + downloadFilename, + ); +}; + +const decryptFileHandler = async (request: Request) => { + const url = new URL(request.url); + const fileId = parseInt(url.pathname.slice(DECRYPTED_FILE_URL_PREFIX.length), 10); + if (isNaN(fileId)) { + throw new Response("Invalid file id", { status: 400 }); + } + + const downloadFilename = url.searchParams.get("download") ?? undefined; + const metadata = fileMetadataStore.get(fileId); + const range = parseRangeHeader(request.headers.get("Range")); + const cache = await getFile(`/cache/${fileId}`); + if (cache) { + return streamFromOpfs(cache, metadata, range, downloadFilename); + } else if (metadata) { + return streamFromServer(fileId, metadata, range, downloadFilename); + } else { + return new Response("Decryption not prepared", { status: 400 }); + } +}; + +export default decryptFileHandler; diff --git a/src/service-worker/handlers/index.ts b/src/service-worker/handlers/index.ts new file mode 100644 index 0000000..fe5b0f9 --- /dev/null +++ b/src/service-worker/handlers/index.ts @@ -0,0 +1 @@ +export { default as decryptFile } from "./decryptFile"; diff --git a/src/service-worker/index.ts b/src/service-worker/index.ts new file mode 100644 index 0000000..2861166 --- /dev/null +++ b/src/service-worker/index.ts @@ -0,0 +1,43 @@ +/// +/// +/// +/// + +import { DECRYPTED_FILE_URL_PREFIX } from "./constants"; +import { decryptFile } from "./handlers"; +import { fileMetadataStore } from "./stores"; +import type { ServiceWorkerMessage, ServiceWorkerResponse } from "./types"; + +const self = globalThis.self as unknown as ServiceWorkerGlobalScope; + +self.addEventListener("message", (event) => { + const message: ServiceWorkerMessage = event.data; + switch (message.type) { + case "decryption-prepare": + fileMetadataStore.set(message.fileId, message); + event.source?.postMessage({ + type: "decryption-ready", + fileId: message.fileId, + } satisfies ServiceWorkerResponse); + break; + default: { + const exhaustive: never = message.type; + return exhaustive; + } + } +}); + +self.addEventListener("fetch", (event) => { + const url = new URL(event.request.url); + if (url.pathname.startsWith(DECRYPTED_FILE_URL_PREFIX)) { + event.respondWith(decryptFile(event.request)); + } +}); + +self.addEventListener("install", () => { + self.skipWaiting(); +}); + +self.addEventListener("activate", (event) => { + event.waitUntil(self.clients.claim()); +}); diff --git a/src/service-worker/modules/crypto.ts b/src/service-worker/modules/crypto.ts new file mode 100644 index 0000000..ed35094 --- /dev/null +++ b/src/service-worker/modules/crypto.ts @@ -0,0 +1,40 @@ +import { ENCRYPTION_OVERHEAD, CHUNK_SIZE, ENCRYPTED_CHUNK_SIZE } from "../constants"; + +export * from "../../lib/modules/crypto"; + +export const getEncryptedRange = ( + start: number, + end: number, + totalEncryptedSize: number, + isLegacy: boolean, +) => { + if (isLegacy) { + return { + firstChunkIndex: 0, + lastChunkIndex: 0, + start: 0, + end: totalEncryptedSize - 1, + }; + } + + const firstChunkIndex = Math.floor(start / CHUNK_SIZE); + const lastChunkIndex = Math.floor(end / CHUNK_SIZE); + return { + firstChunkIndex, + lastChunkIndex, + start: firstChunkIndex * ENCRYPTED_CHUNK_SIZE, + end: Math.min((lastChunkIndex + 1) * ENCRYPTED_CHUNK_SIZE - 1, totalEncryptedSize - 1), + }; +}; + +export const getDecryptedSize = (encryptedSize: number, isLegacy: boolean) => { + if (isLegacy) { + return encryptedSize - ENCRYPTION_OVERHEAD; + } + + const fullChunks = Math.floor(encryptedSize / ENCRYPTED_CHUNK_SIZE); + const lastChunkEncSize = encryptedSize % ENCRYPTED_CHUNK_SIZE; + return ( + fullChunks * CHUNK_SIZE + (lastChunkEncSize > 0 ? lastChunkEncSize - ENCRYPTION_OVERHEAD : 0) + ); +}; diff --git a/src/service-worker/modules/http.ts b/src/service-worker/modules/http.ts new file mode 100644 index 0000000..0d1bf5e --- /dev/null +++ b/src/service-worker/modules/http.ts @@ -0,0 +1 @@ +export * from "../../lib/modules/http"; diff --git a/src/service-worker/modules/opfs.ts b/src/service-worker/modules/opfs.ts new file mode 100644 index 0000000..0ef5769 --- /dev/null +++ b/src/service-worker/modules/opfs.ts @@ -0,0 +1 @@ +export * from "../../lib/modules/opfs"; diff --git a/src/service-worker/stores.ts b/src/service-worker/stores.ts new file mode 100644 index 0000000..22d899e --- /dev/null +++ b/src/service-worker/stores.ts @@ -0,0 +1,3 @@ +import type { FileMetadata } from "./types"; + +export const fileMetadataStore = new Map(); diff --git a/src/service-worker/types.ts b/src/service-worker/types.ts new file mode 100644 index 0000000..f04ed39 --- /dev/null +++ b/src/service-worker/types.ts @@ -0,0 +1 @@ +export * from "../lib/serviceWorker/types"; diff --git a/src/trpc/router.server.ts b/src/trpc/router.server.ts index 64d25c7..d343fa6 100644 --- a/src/trpc/router.server.ts +++ b/src/trpc/router.server.ts @@ -9,6 +9,7 @@ import { fileRouter, hskRouter, mekRouter, + uploadRouter, userRouter, } from "./routers"; @@ -20,6 +21,7 @@ export const appRouter = router({ file: fileRouter, hsk: hskRouter, mek: mekRouter, + upload: uploadRouter, user: userRouter, }); diff --git a/src/trpc/routers/category.ts b/src/trpc/routers/category.ts index a292889..34887f7 100644 --- a/src/trpc/routers/category.ts +++ b/src/trpc/routers/category.ts @@ -1,14 +1,14 @@ import { TRPCError } from "@trpc/server"; import { z } from "zod"; +import { CategoryIdSchema } from "$lib/schemas"; import { CategoryRepo, FileRepo, IntegrityError } from "$lib/server/db"; -import { categoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const categoryRouter = router({ get: roleProcedure["activeClient"] .input( z.object({ - id: categoryIdSchema, + id: CategoryIdSchema, recurse: z.boolean().default(false), }), ) @@ -65,7 +65,7 @@ const categoryRouter = router({ create: roleProcedure["activeClient"] .input( z.object({ - parent: categoryIdSchema, + parent: CategoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), diff --git a/src/trpc/routers/directory.ts b/src/trpc/routers/directory.ts index 6e1e358..15f16f3 100644 --- a/src/trpc/routers/directory.ts +++ b/src/trpc/routers/directory.ts @@ -1,15 +1,15 @@ import { TRPCError } from "@trpc/server"; import { z } from "zod"; +import { DirectoryIdSchema } from "$lib/schemas"; import { FileRepo, IntegrityError } from "$lib/server/db"; import { safeUnlink } from "$lib/server/modules/filesystem"; -import { directoryIdSchema } from "$lib/server/schemas"; import { router, roleProcedure } from "../init.server"; const directoryRouter = router({ get: roleProcedure["activeClient"] .input( z.object({ - id: directoryIdSchema, + id: DirectoryIdSchema, }), ) .query(async ({ ctx, input }) => { @@ -59,7 +59,7 @@ const directoryRouter = router({ create: roleProcedure["activeClient"] .input( z.object({ - parent: directoryIdSchema, + parent: DirectoryIdSchema, mekVersion: z.int().positive(), dek: z.base64().nonempty(), dekVersion: z.date(), diff --git a/src/trpc/routers/file.ts b/src/trpc/routers/file.ts index c3f8159..d6d658c 100644 --- a/src/trpc/routers/file.ts +++ b/src/trpc/routers/file.ts @@ -19,12 +19,12 @@ const fileRouter = router({ const categories = await FileRepo.getAllFileCategories(input.id); return { + isLegacy: !!file.encContentIv, parent: file.parentId, mekVersion: file.mekVersion, dek: file.encDek, dekVersion: file.dekVersion, contentType: file.contentType, - contentIv: file.encContentIv, name: file.encName.ciphertext, nameIv: file.encName.iv, createdAt: file.encCreatedAt?.ciphertext, @@ -53,12 +53,12 @@ const fileRouter = router({ const files = await FileRepo.getFilesWithCategories(ctx.session.userId, input.ids); return files.map((file) => ({ id: file.id, + isLegacy: !!file.encContentIv, parent: file.parentId, mekVersion: file.mekVersion, dek: file.encDek, dekVersion: file.dekVersion, contentType: file.contentType, - contentIv: file.encContentIv, name: file.encName.ciphertext, nameIv: file.encName.iv, createdAt: file.encCreatedAt?.ciphertext, @@ -100,6 +100,10 @@ const fileRouter = router({ return await MediaRepo.getMissingFileThumbnails(ctx.session.userId); }), + listLegacy: roleProcedure["activeClient"].query(async ({ ctx }) => { + return await FileRepo.getLegacyFileIds(ctx.session.userId); + }), + rename: roleProcedure["activeClient"] .input( z.object({ @@ -158,7 +162,7 @@ const fileRouter = router({ throw new TRPCError({ code: "NOT_FOUND", message: "File or its thumbnail not found" }); } - return { updatedAt: thumbnail.updatedAt, contentIv: thumbnail.encContentIv }; + return { updatedAt: thumbnail.updatedAt }; }), }); diff --git a/src/trpc/routers/index.ts b/src/trpc/routers/index.ts index ab5b6a0..5c8df24 100644 --- a/src/trpc/routers/index.ts +++ b/src/trpc/routers/index.ts @@ -5,4 +5,5 @@ export { default as directoryRouter } from "./directory"; export { default as fileRouter } from "./file"; export { default as hskRouter } from "./hsk"; export { default as mekRouter } from "./mek"; +export { default as uploadRouter } from "./upload"; export { default as userRouter } from "./user"; diff --git a/src/trpc/routers/upload.ts b/src/trpc/routers/upload.ts new file mode 100644 index 0000000..11b0a84 --- /dev/null +++ b/src/trpc/routers/upload.ts @@ -0,0 +1,362 @@ +import { TRPCError } from "@trpc/server"; +import { createHash } from "crypto"; +import { createReadStream, createWriteStream } from "fs"; +import { copyFile, mkdir } from "fs/promises"; +import mime from "mime"; +import { dirname } from "path"; +import { v4 as uuidv4 } from "uuid"; +import { z } from "zod"; +import { DirectoryIdSchema } from "$lib/schemas"; +import { FileRepo, MediaRepo, UploadRepo, IntegrityError } from "$lib/server/db"; +import db from "$lib/server/db/kysely"; +import env from "$lib/server/loadenv"; +import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem"; +import { router, roleProcedure } from "../init.server"; + +const UPLOADS_EXPIRES = 24 * 3600 * 1000; // 24 hours + +const sessionLocks = new Set(); + +const generateSessionId = async () => { + const id = uuidv4(); + const path = `${env.uploadsPath}/${id}`; + await mkdir(path, { recursive: true }); + return { id, path }; +}; + +const uploadRouter = router({ + startFileUpload: roleProcedure["activeClient"] + .input( + z.object({ + chunks: z.int().positive(), + parent: DirectoryIdSchema, + mekVersion: z.int().positive(), + dek: z.base64().nonempty(), + dekVersion: z.date(), + hskVersion: z.int().positive().optional(), + contentType: z + .string() + .trim() + .nonempty() + .refine((value) => mime.getExtension(value) !== null), + name: z.base64().nonempty(), + nameIv: z.base64().nonempty(), + createdAt: z.base64().nonempty().optional(), + createdAtIv: z.base64().nonempty().optional(), + lastModifiedAt: z.base64().nonempty(), + lastModifiedAtIv: z.base64().nonempty(), + }), + ) + .mutation(async ({ ctx, input }) => { + const oneMinuteAgo = new Date(Date.now() - 60 * 1000); + const oneMinuteLater = new Date(Date.now() + 60 * 1000); + if (input.dekVersion <= oneMinuteAgo || input.dekVersion >= oneMinuteLater) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid DEK version" }); + } + + const { id, path } = await generateSessionId(); + + try { + await UploadRepo.createFileUploadSession({ + id, + userId: ctx.session.userId, + path, + totalChunks: input.chunks, + expiresAt: new Date(Date.now() + UPLOADS_EXPIRES), + parentId: input.parent, + mekVersion: input.mekVersion, + encDek: input.dek, + dekVersion: input.dekVersion, + hskVersion: input.hskVersion ?? null, + contentType: input.contentType, + encName: { ciphertext: input.name, iv: input.nameIv }, + encCreatedAt: + input.createdAt && input.createdAtIv + ? { ciphertext: input.createdAt, iv: input.createdAtIv } + : null, + encLastModifiedAt: { ciphertext: input.lastModifiedAt, iv: input.lastModifiedAtIv }, + }); + return { uploadId: id }; + } catch (e) { + await safeRecursiveRm(path); + + if (e instanceof IntegrityError) { + if (e.message === "Inactive MEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid MEK version" }); + } else if (e.message === "Inactive HSK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid HSK version" }); + } + } + throw e; + } + }), + + completeFileUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + contentHmac: z.base64().nonempty().optional(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); + } else { + sessionLocks.add(uploadId); + } + + let filePath = ""; + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (session?.type !== "file") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if ( + (session.hskVersion && !input.contentHmac) || + (!session.hskVersion && input.contentHmac) + ) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Invalid content HMAC" }); + } else if (session.uploadedChunks < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); + } + + filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for (let i = 1; i <= session.totalChunks; i++) { + for await (const chunk of createReadStream(`${session.path}/${i}`)) { + hashStream.update(chunk); + writeStream.write(chunk); + } + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + const hash = hashStream.digest("base64"); + const fileId = await db.transaction().execute(async (trx) => { + const { id: fileId } = await FileRepo.registerFile(trx, { + ...session, + userId: ctx.session.userId, + path: filePath, + contentHmac: input.contentHmac ?? null, + encContentHash: hash, + encContentIv: null, + }); + await UploadRepo.deleteUploadSession(trx, uploadId); + return fileId; + }); + + await safeRecursiveRm(session.path); + return { file: fileId }; + } catch (e) { + await safeUnlink(filePath); + throw e; + } finally { + sessionLocks.delete(uploadId); + } + }), + + startFileThumbnailUpload: roleProcedure["activeClient"] + .input( + z.object({ + file: z.int().positive(), + dekVersion: z.date(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { id, path } = await generateSessionId(); + + try { + await UploadRepo.createThumbnailOrMigrationUploadSession({ + id, + type: "thumbnail", + userId: ctx.session.userId, + path, + totalChunks: 1, // Up to 4 MiB + expiresAt: new Date(Date.now() + UPLOADS_EXPIRES), + fileId: input.file, + dekVersion: input.dekVersion, + }); + return { uploadId: id }; + } catch (e) { + await safeRecursiveRm(path); + + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid file id" }); + } else if (e.message === "Invalid DEK version") { + throw new TRPCError({ code: "BAD_REQUEST", message: e.message }); + } + } + throw e; + } + }), + + completeFileThumbnailUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); + } else { + sessionLocks.add(uploadId); + } + + let thumbnailPath = ""; + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (session?.type !== "thumbnail") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if (session.uploadedChunks < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); + } + + thumbnailPath = `${env.thumbnailsPath}/${ctx.session.userId}/${uploadId}`; + await mkdir(dirname(thumbnailPath), { recursive: true }); + await copyFile(`${session.path}/1`, thumbnailPath); + + const oldThumbnailPath = await db.transaction().execute(async (trx) => { + const oldPath = await MediaRepo.updateFileThumbnail( + trx, + ctx.session.userId, + session.fileId, + session.dekVersion, + thumbnailPath, + null, + ); + await UploadRepo.deleteUploadSession(trx, uploadId); + return oldPath; + }); + await Promise.all([safeUnlink(oldThumbnailPath), safeRecursiveRm(session.path)]); + } catch (e) { + await safeUnlink(thumbnailPath); + + if (e instanceof IntegrityError && e.message === "Invalid DEK version") { + // DEK rotated after this upload started + throw new TRPCError({ code: "CONFLICT", message: e.message }); + } + throw e; + } finally { + sessionLocks.delete(uploadId); + } + }), + + startMigrationUpload: roleProcedure["activeClient"] + .input( + z.object({ + file: z.int().positive(), + chunks: z.int().positive(), + dekVersion: z.date(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { id, path } = await generateSessionId(); + + try { + await UploadRepo.createThumbnailOrMigrationUploadSession({ + id, + type: "migration", + userId: ctx.session.userId, + path, + totalChunks: input.chunks, + expiresAt: new Date(Date.now() + UPLOADS_EXPIRES), + fileId: input.file, + dekVersion: input.dekVersion, + }); + return { uploadId: id }; + } catch (e) { + await safeRecursiveRm(path); + + if (e instanceof IntegrityError) { + if (e.message === "File not found") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid file id" }); + } else if (e.message === "File is not legacy") { + throw new TRPCError({ code: "BAD_REQUEST", message: e.message }); + } + } + throw e; + } + }), + + completeMigrationUpload: roleProcedure["activeClient"] + .input( + z.object({ + uploadId: z.uuidv4(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { uploadId } = input; + if (sessionLocks.has(uploadId)) { + throw new TRPCError({ code: "CONFLICT", message: "Completion already in progress" }); + } else { + sessionLocks.add(uploadId); + } + + let filePath = ""; + + try { + const session = await UploadRepo.getUploadSession(uploadId, ctx.session.userId); + if (session?.type !== "migration") { + throw new TRPCError({ code: "NOT_FOUND", message: "Invalid upload id" }); + } else if (session.uploadedChunks < session.totalChunks) { + throw new TRPCError({ code: "BAD_REQUEST", message: "Upload not completed" }); + } + + filePath = `${env.libraryPath}/${ctx.session.userId}/${uuidv4()}`; + await mkdir(dirname(filePath), { recursive: true }); + + const hashStream = createHash("sha256"); + const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 }); + + for (let i = 1; i <= session.totalChunks; i++) { + for await (const chunk of createReadStream(`${session.path}/${i}`)) { + hashStream.update(chunk); + writeStream.write(chunk); + } + } + + await new Promise((resolve, reject) => { + writeStream.end((e: any) => (e ? reject(e) : resolve())); + }); + + const hash = hashStream.digest("base64"); + const oldPath = await db.transaction().execute(async (trx) => { + const { oldPath } = await FileRepo.migrateFileContent( + trx, + ctx.session.userId, + session.fileId, + filePath, + session.dekVersion!, + hash, + ); + await UploadRepo.deleteUploadSession(trx, uploadId); + return oldPath; + }); + + await Promise.all([safeUnlink(oldPath), safeRecursiveRm(session.path)]); + } catch (e) { + await safeUnlink(filePath); + + if (e instanceof IntegrityError && e.message === "File is not legacy") { + // File migrated after this upload started + throw new TRPCError({ code: "CONFLICT", message: e.message }); + } + throw e; + } finally { + sessionLocks.delete(uploadId); + } + }), +}); + +export default uploadRouter; diff --git a/src/workers/hmac.ts b/src/workers/hmac.ts new file mode 100644 index 0000000..1b20235 --- /dev/null +++ b/src/workers/hmac.ts @@ -0,0 +1,25 @@ +import { hmac } from "@noble/hashes/hmac.js"; +import { sha256 } from "@noble/hashes/sha2.js"; + +export interface ComputeMessage { + stream: ReadableStream; + key: Uint8Array; +} + +export interface ResultMessage { + result: Uint8Array; +} + +self.onmessage = async (event: MessageEvent) => { + const h = hmac.create(sha256, event.data.key); + const reader = event.data.stream.getReader(); + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + h.update(value); + } + + const result = h.digest(); + self.postMessage({ result } satisfies ResultMessage, { transfer: [result.buffer] }); +}; diff --git a/svelte.config.js b/svelte.config.js index 4ffc844..6562b93 100644 --- a/svelte.config.js +++ b/svelte.config.js @@ -8,6 +8,7 @@ const config = { adapter: adapter(), alias: { $trpc: "./src/trpc", + $workers: "./src/workers", }, }, };