From ae1d34fc6b80491fed154d81c2d952edcfc6841f Mon Sep 17 00:00:00 2001 From: static Date: Mon, 5 Jan 2026 06:49:12 +0900 Subject: [PATCH] =?UTF-8?q?=ED=8C=8C=EC=9D=BC,=20=EC=B9=B4=ED=85=8C?= =?UTF-8?q?=EA=B3=A0=EB=A6=AC,=20=EB=94=94=EB=A0=89=ED=84=B0=EB=A6=AC=20?= =?UTF-8?q?=EC=A0=95=EB=B3=B4=EB=A5=BC=20=EB=B6=88=EB=9F=AC=EC=98=AC=20?= =?UTF-8?q?=EB=95=8C=20=ED=8A=B9=EC=A0=95=20=EC=A1=B0=EA=B1=B4=EC=97=90?= =?UTF-8?q?=EC=84=9C=20=EB=84=A4=ED=8A=B8=EC=9B=8C=ED=81=AC=20=EC=9A=94?= =?UTF-8?q?=EC=B2=AD=EC=9D=B4=20=EC=97=AC=EB=9F=AC=20=EB=B2=88=20=EB=B0=9C?= =?UTF-8?q?=EC=83=9D=ED=95=A0=20=EC=88=98=20=EC=9E=88=EB=8A=94=20=EB=B2=84?= =?UTF-8?q?=EA=B7=B8=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package.json | 4 +- pnpm-lock.yaml | 28 +- .../components/molecules/Categories.svelte | 2 +- src/lib/indexedDB/filesystem.ts | 12 +- src/lib/modules/filesystem/category.ts | 238 ++++++-------- src/lib/modules/filesystem/directory.ts | 179 +++++------ src/lib/modules/filesystem/file.ts | 296 +++++++++--------- src/lib/modules/filesystem/internal.svelte.ts | 166 ++++++---- src/lib/modules/filesystem/types.ts | 20 +- src/lib/server/db/file.ts | 3 + src/lib/utils/index.ts | 1 - src/lib/utils/promise.ts | 16 - src/trpc/routers/category.ts | 1 + src/trpc/routers/file.ts | 2 + 14 files changed, 467 insertions(+), 501 deletions(-) delete mode 100644 src/lib/utils/promise.ts diff --git a/package.json b/package.json index 7f0d51f..c16b700 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ "@sveltejs/adapter-node": "^5.4.0", "@sveltejs/kit": "^2.49.2", "@sveltejs/vite-plugin-svelte": "^6.2.1", - "@tanstack/svelte-virtual": "^3.13.15", + "@tanstack/svelte-virtual": "^3.13.16", "@trpc/client": "^11.8.1", "@types/file-saver": "^2.0.7", "@types/ms": "^0.7.34", @@ -64,7 +64,7 @@ "pg": "^8.16.3", "superjson": "^2.2.6", "uuid": "^13.0.0", - "zod": "^4.3.4" + "zod": "^4.3.5" }, "engines": { "node": "^22.0.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2299ddb..e4e336f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -36,8 +36,8 @@ importers: specifier: ^13.0.0 version: 13.0.0 zod: - specifier: ^4.3.4 - version: 4.3.4 + specifier: ^4.3.5 + version: 4.3.5 devDependencies: '@eslint/compat': specifier: ^2.0.0 @@ -58,8 +58,8 @@ importers: specifier: ^6.2.1 version: 6.2.1(svelte@5.46.1)(vite@7.3.0(@types/node@25.0.3)(jiti@1.21.7)(yaml@2.8.0)) '@tanstack/svelte-virtual': - specifier: ^3.13.15 - version: 3.13.15(svelte@5.46.1) + specifier: ^3.13.16 + version: 3.13.16(svelte@5.46.1) '@trpc/client': specifier: ^11.8.1 version: 11.8.1(@trpc/server@11.8.1(typescript@5.9.3))(typescript@5.9.3) @@ -620,13 +620,13 @@ packages: svelte: ^5.0.0 vite: ^6.3.0 || ^7.0.0 - '@tanstack/svelte-virtual@3.13.15': - resolution: {integrity: sha512-3PPLI3hsyT70zSZhBkSIZXIarlN+GjFNKeKr2Wk1UR7EuEVtXgNlB/Zk0sYtaeJ4CvGvldQNakOvbdETnWAgeA==} + '@tanstack/svelte-virtual@3.13.16': + resolution: {integrity: sha512-LRDPRzAPTIiDjiCA9lhNlFnZRLj/XsNhzNRsT5JEA8hzcBmZw8avdYYVjydPAy0ObFJgG1zBAm9Dtvwqju36sg==} peerDependencies: svelte: ^3.48.0 || ^4.0.0 || ^5.0.0 - '@tanstack/virtual-core@3.13.15': - resolution: {integrity: sha512-8cG3acM2cSIm3h8WxboHARAhQAJbYUhvmadvnN8uz8aziDwrbYb9KiARni+uY2qrLh49ycn+poGoxvtIAKhjog==} + '@tanstack/virtual-core@3.13.16': + resolution: {integrity: sha512-njazUC8mDkrxWmyZmn/3eXrDcP8Msb3chSr4q6a65RmwdSbMlMCdnOphv6/8mLO7O3Fuza5s4M4DclmvAO5w0w==} '@trpc/client@11.8.1': resolution: {integrity: sha512-L/SJFGanr9xGABmuDoeXR4xAdHJmsXsiF9OuH+apecJ+8sUITzVT1EPeqp0ebqA6lBhEl5pPfg3rngVhi/h60Q==} @@ -2025,8 +2025,8 @@ packages: zimmerframe@1.1.4: resolution: {integrity: sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==} - zod@4.3.4: - resolution: {integrity: sha512-Zw/uYiiyF6pUT1qmKbZziChgNPRu+ZRneAsMUDU6IwmXdWt5JwcUfy2bvLOCUtz5UniaN/Zx5aFttZYbYc7O/A==} + zod@4.3.5: + resolution: {integrity: sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==} snapshots: @@ -2389,12 +2389,12 @@ snapshots: transitivePeerDependencies: - supports-color - '@tanstack/svelte-virtual@3.13.15(svelte@5.46.1)': + '@tanstack/svelte-virtual@3.13.16(svelte@5.46.1)': dependencies: - '@tanstack/virtual-core': 3.13.15 + '@tanstack/virtual-core': 3.13.16 svelte: 5.46.1 - '@tanstack/virtual-core@3.13.15': {} + '@tanstack/virtual-core@3.13.16': {} '@trpc/client@11.8.1(@trpc/server@11.8.1(typescript@5.9.3))(typescript@5.9.3)': dependencies: @@ -3707,4 +3707,4 @@ snapshots: zimmerframe@1.1.4: {} - zod@4.3.4: {} + zod@4.3.5: {} diff --git a/src/lib/components/molecules/Categories.svelte b/src/lib/components/molecules/Categories.svelte index b8d52bb..72fe7de 100644 --- a/src/lib/components/molecules/Categories.svelte +++ b/src/lib/components/molecules/Categories.svelte @@ -25,7 +25,7 @@ let { categories, categoryMenuIcon, onCategoryClick, onCategoryMenuClick }: Props = $props(); - let categoriesWithName = $derived(sortEntries($state.snapshot(categories))); + let categoriesWithName = $derived(sortEntries([...categories])); {#if categoriesWithName.length > 0} diff --git a/src/lib/indexedDB/filesystem.ts b/src/lib/indexedDB/filesystem.ts index c78ce4d..1709033 100644 --- a/src/lib/indexedDB/filesystem.ts +++ b/src/lib/indexedDB/filesystem.ts @@ -13,15 +13,15 @@ interface FileInfo { contentType: string; createdAt?: Date; lastModifiedAt: Date; - categoryIds: number[]; + categoryIds?: number[]; } interface CategoryInfo { id: number; parentId: CategoryId; name: string; - files: { id: number; isRecursive: boolean }[]; - isFileRecursive: boolean; + files?: { id: number; isRecursive: boolean }[]; + isFileRecursive?: boolean; } const filesystem = new Dexie("filesystem") as Dexie & { @@ -55,7 +55,7 @@ export const getDirectoryInfo = async (id: number) => { }; export const storeDirectoryInfo = async (directoryInfo: DirectoryInfo) => { - await filesystem.directory.put(directoryInfo); + await filesystem.directory.upsert(directoryInfo.id, { ...directoryInfo }); }; export const deleteDirectoryInfo = async (id: number) => { @@ -89,7 +89,7 @@ export const bulkGetFileInfos = async (ids: number[]) => { }; export const storeFileInfo = async (fileInfo: FileInfo) => { - await filesystem.file.put(fileInfo); + await filesystem.file.upsert(fileInfo.id, { ...fileInfo }); }; export const deleteFileInfo = async (id: number) => { @@ -112,7 +112,7 @@ export const getCategoryInfo = async (id: number) => { }; export const storeCategoryInfo = async (categoryInfo: CategoryInfo) => { - await filesystem.category.put(categoryInfo); + await filesystem.category.upsert(categoryInfo.id, { ...categoryInfo }); }; export const updateCategoryInfo = async (id: number, changes: { isFileRecursive?: boolean }) => { diff --git a/src/lib/modules/filesystem/category.ts b/src/lib/modules/filesystem/category.ts index 2ca3472..47a4565 100644 --- a/src/lib/modules/filesystem/category.ts +++ b/src/lib/modules/filesystem/category.ts @@ -1,167 +1,121 @@ import * as IndexedDB from "$lib/indexedDB"; import { trpc, isTRPCClientError } from "$trpc/client"; import { FilesystemCache, decryptFileMetadata, decryptCategoryMetadata } from "./internal.svelte"; -import type { MaybeCategoryInfo } from "./types"; +import type { CategoryInfo, MaybeCategoryInfo } from "./types"; -const cache = new FilesystemCache>(); - -const fetchFromIndexedDB = async (id: CategoryId) => { - const [category, subCategories] = await Promise.all([ - id !== "root" ? IndexedDB.getCategoryInfo(id) : undefined, - IndexedDB.getCategoryInfos(id), - ]); - const files = category - ? await Promise.all( - category.files.map(async (file) => { - const fileInfo = await IndexedDB.getFileInfo(file.id); - return fileInfo - ? { - id: file.id, - contentType: fileInfo.contentType, - name: fileInfo.name, - createdAt: fileInfo.createdAt, - lastModifiedAt: fileInfo.lastModifiedAt, - isRecursive: file.isRecursive, - } - : undefined; - }), - ) - : undefined; - - if (id === "root") { - return { - id, - exists: true as const, - subCategories, - }; - } else if (category) { - return { - id, - exists: true as const, - name: category.name, - subCategories, - files: files!.filter((file) => !!file), - isFileRecursive: category.isFileRecursive, - }; - } -}; - -const fetchFromServer = async (id: CategoryId, masterKey: CryptoKey) => { - try { - const { - metadata, - subCategories: subCategoriesRaw, - files: filesRaw, - } = await trpc().category.get.query({ id, recurse: true }); - - void IndexedDB.deleteDanglingCategoryInfos(id, new Set(subCategoriesRaw.map(({ id }) => id))); - - const subCategories = await Promise.all( - subCategoriesRaw.map(async (category) => { - const decrypted = await decryptCategoryMetadata(category, masterKey); - const existing = await IndexedDB.getCategoryInfo(category.id); - await IndexedDB.storeCategoryInfo({ - id: category.id, - parentId: id, - name: decrypted.name, - files: existing?.files ?? [], - isFileRecursive: existing?.isFileRecursive ?? false, - }); - return { - id: category.id, - ...decrypted, - }; - }), - ); - - const existingFiles = filesRaw - ? await IndexedDB.bulkGetFileInfos(filesRaw.map((file) => file.id)) - : []; - const files = filesRaw +const cache = new FilesystemCache({ + async fetchFromIndexedDB(id) { + const [category, subCategories] = await Promise.all([ + id !== "root" ? IndexedDB.getCategoryInfo(id) : undefined, + IndexedDB.getCategoryInfos(id), + ]); + const files = category?.files ? await Promise.all( - filesRaw.map(async (file, index) => { - const decrypted = await decryptFileMetadata(file, masterKey); - const existing = existingFiles[index]; - if (existing) { - const categoryIds = file.isRecursive - ? existing.categoryIds - : Array.from(new Set([...existing.categoryIds, id as number])); - await IndexedDB.storeFileInfo({ - id: file.id, - parentId: existing.parentId, - contentType: file.contentType, - name: decrypted.name, - createdAt: decrypted.createdAt, - lastModifiedAt: decrypted.lastModifiedAt, - categoryIds, - }); - } - return { - id: file.id, - contentType: file.contentType, - isRecursive: file.isRecursive, - ...decrypted, - }; + category.files.map(async (file) => { + const fileInfo = await IndexedDB.getFileInfo(file.id); + return fileInfo + ? { + id: file.id, + parentId: fileInfo.parentId, + contentType: fileInfo.contentType, + name: fileInfo.name, + createdAt: fileInfo.createdAt, + lastModifiedAt: fileInfo.lastModifiedAt, + isRecursive: file.isRecursive, + } + : undefined; }), ) : undefined; - const decryptedMetadata = metadata - ? await decryptCategoryMetadata(metadata, masterKey) - : undefined; - if (id !== "root" && metadata && decryptedMetadata) { - const existingCategory = await IndexedDB.getCategoryInfo(id); - await IndexedDB.storeCategoryInfo({ - id: id as number, - parentId: metadata.parent, - name: decryptedMetadata.name, - files: - files?.map((file) => ({ - id: file.id, - isRecursive: file.isRecursive, - })) ?? - existingCategory?.files ?? - [], - isFileRecursive: existingCategory?.isFileRecursive ?? false, - }); - } - if (id === "root") { return { id, - exists: true as const, + exists: true, subCategories, }; - } else { + } else if (category) { return { id, - exists: true as const, + exists: true, + parentId: category.parentId, + name: category.name, subCategories, - files, - ...decryptedMetadata!, + files: files?.filter((file) => !!file) ?? [], + isFileRecursive: category.isFileRecursive ?? false, }; } - } catch (e) { - if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") { - await IndexedDB.deleteCategoryInfo(id as number); - return { id, exists: false as const }; + }, + + async fetchFromServer(id, cachedInfo, masterKey) { + try { + const category = await trpc().category.get.query({ id, recurse: true }); + const [subCategories, files, metadata] = await Promise.all([ + Promise.all( + category.subCategories.map(async (category) => ({ + id: category.id, + parentId: id, + ...(await decryptCategoryMetadata(category, masterKey)), + })), + ), + category.files && + Promise.all( + category.files.map(async (file) => ({ + id: file.id, + parentId: file.parent, + contentType: file.contentType, + isRecursive: file.isRecursive, + ...(await decryptFileMetadata(file, masterKey)), + })), + ), + category.metadata && decryptCategoryMetadata(category.metadata, masterKey), + ]); + + return storeToIndexedDB( + id !== "root" + ? { + id, + parentId: category.metadata!.parent, + subCategories, + files: files!, + isFileRecursive: cachedInfo?.isFileRecursive ?? false, + ...metadata!, + } + : { id, subCategories }, + ); + } catch (e) { + if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") { + await IndexedDB.deleteCategoryInfo(id as number); + return { id, exists: false }; + } + throw e; } - throw e; + }, +}); + +const storeToIndexedDB = (info: CategoryInfo) => { + if (info.id !== "root") { + void IndexedDB.storeCategoryInfo(info); + + // TODO: Bulk Upsert + new Map(info.files.map((file) => [file.id, file])).forEach((file) => { + void IndexedDB.storeFileInfo(file); + }); } + + // TODO: Bulk Upsert + info.subCategories.forEach((category) => { + void IndexedDB.storeCategoryInfo(category); + }); + + void IndexedDB.deleteDanglingCategoryInfos( + info.id, + new Set(info.subCategories.map(({ id }) => id)), + ); + + return { ...info, exists: true as const }; }; export const getCategoryInfo = async (id: CategoryId, masterKey: CryptoKey) => { - return await cache.get(id, async (isInitial, resolve) => { - if (isInitial) { - const info = await fetchFromIndexedDB(id); - if (info) { - resolve(info); - } - } - - const info = await fetchFromServer(id, masterKey); - if (info) { - resolve(info); - } - }); + return await cache.get(id, masterKey); }; diff --git a/src/lib/modules/filesystem/directory.ts b/src/lib/modules/filesystem/directory.ts index 8932698..3f6cab1 100644 --- a/src/lib/modules/filesystem/directory.ts +++ b/src/lib/modules/filesystem/directory.ts @@ -1,125 +1,102 @@ import * as IndexedDB from "$lib/indexedDB"; -import { monotonicResolve } from "$lib/utils"; import { trpc, isTRPCClientError } from "$trpc/client"; import { FilesystemCache, decryptDirectoryMetadata, decryptFileMetadata } from "./internal.svelte"; -import type { MaybeDirectoryInfo } from "./types"; +import type { DirectoryInfo, MaybeDirectoryInfo } from "./types"; -const cache = new FilesystemCache(); - -const fetchFromIndexedDB = async (id: DirectoryId) => { - const [directory, subDirectories, files] = await Promise.all([ - id !== "root" ? IndexedDB.getDirectoryInfo(id) : undefined, - IndexedDB.getDirectoryInfos(id), - IndexedDB.getFileInfos(id), - ]); - - if (id === "root") { - return { - id, - exists: true as const, - subDirectories, - files, - }; - } else if (directory) { - return { - id, - exists: true as const, - parentId: directory.parentId, - name: directory.name, - subDirectories, - files, - }; - } -}; - -const fetchFromServer = async (id: DirectoryId, masterKey: CryptoKey) => { - try { - const { - metadata, - subDirectories: subDirectoriesRaw, - files: filesRaw, - } = await trpc().directory.get.query({ id }); - - void IndexedDB.deleteDanglingDirectoryInfos(id, new Set(subDirectoriesRaw.map(({ id }) => id))); - void IndexedDB.deleteDanglingFileInfos(id, new Set(filesRaw.map(({ id }) => id))); - - const existingFiles = await IndexedDB.bulkGetFileInfos(filesRaw.map((file) => file.id)); - const [subDirectories, files, decryptedMetadata] = await Promise.all([ - Promise.all( - subDirectoriesRaw.map(async (directory) => { - const decrypted = await decryptDirectoryMetadata(directory, masterKey); - await IndexedDB.storeDirectoryInfo({ - id: directory.id, - parentId: id, - name: decrypted.name, - }); - return { - id: directory.id, - ...decrypted, - }; - }), - ), - Promise.all( - filesRaw.map(async (file, index) => { - const decrypted = await decryptFileMetadata(file, masterKey); - await IndexedDB.storeFileInfo({ - id: file.id, - parentId: id, - contentType: file.contentType, - name: decrypted.name, - createdAt: decrypted.createdAt, - lastModifiedAt: decrypted.lastModifiedAt, - categoryIds: existingFiles[index]?.categoryIds ?? [], - }); - return { - id: file.id, - contentType: file.contentType, - ...decrypted, - }; - }), - ), - metadata ? decryptDirectoryMetadata(metadata, masterKey) : undefined, +const cache = new FilesystemCache({ + async fetchFromIndexedDB(id) { + const [directory, subDirectories, files] = await Promise.all([ + id !== "root" ? IndexedDB.getDirectoryInfo(id) : undefined, + IndexedDB.getDirectoryInfos(id), + IndexedDB.getFileInfos(id), ]); - if (id !== "root" && metadata && decryptedMetadata) { - await IndexedDB.storeDirectoryInfo({ - id, - parentId: metadata.parent, - name: decryptedMetadata.name, - }); - } - if (id === "root") { return { id, - exists: true as const, + exists: true, subDirectories, files, }; - } else { + } else if (directory) { return { id, - exists: true as const, - parentId: metadata!.parent, + exists: true, + parentId: directory.parentId, + name: directory.name, subDirectories, files, - ...decryptedMetadata!, }; } - } catch (e) { - if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") { - await IndexedDB.deleteDirectoryInfo(id as number); - return { id, exists: false as const }; + }, + + async fetchFromServer(id, _cachedInfo, masterKey) { + try { + const directory = await trpc().directory.get.query({ id }); + const [subDirectories, files, metadata] = await Promise.all([ + Promise.all( + directory.subDirectories.map(async (directory) => ({ + id: directory.id, + parentId: id, + ...(await decryptDirectoryMetadata(directory, masterKey)), + })), + ), + Promise.all( + directory.files.map(async (file) => ({ + id: file.id, + parentId: id, + contentType: file.contentType, + ...(await decryptFileMetadata(file, masterKey)), + })), + ), + directory.metadata && decryptDirectoryMetadata(directory.metadata, masterKey), + ]); + + return storeToIndexedDB( + id !== "root" + ? { + id, + parentId: directory.metadata!.parent, + subDirectories, + files, + ...metadata!, + } + : { id, subDirectories, files }, + ); + } catch (e) { + if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") { + await IndexedDB.deleteDirectoryInfo(id as number); + return { id, exists: false as const }; + } + throw e; } - throw e; + }, +}); + +const storeToIndexedDB = (info: DirectoryInfo) => { + if (info.id !== "root") { + void IndexedDB.storeDirectoryInfo(info); } + + // TODO: Bulk Upsert + info.subDirectories.forEach((subDirectory) => { + void IndexedDB.storeDirectoryInfo(subDirectory); + }); + + // TODO: Bulk Upsert + info.files.forEach((file) => { + void IndexedDB.storeFileInfo(file); + }); + + void IndexedDB.deleteDanglingDirectoryInfos( + info.id, + new Set(info.subDirectories.map(({ id }) => id)), + ); + void IndexedDB.deleteDanglingFileInfos(info.id, new Set(info.files.map(({ id }) => id))); + + return { ...info, exists: true as const }; }; export const getDirectoryInfo = async (id: DirectoryId, masterKey: CryptoKey) => { - return await cache.get(id, (isInitial, resolve) => - monotonicResolve( - [isInitial && fetchFromIndexedDB(id), fetchFromServer(id, masterKey)], - resolve, - ), - ); + return await cache.get(id, masterKey); }; diff --git a/src/lib/modules/filesystem/file.ts b/src/lib/modules/filesystem/file.ts index 45fef78..66ad359 100644 --- a/src/lib/modules/filesystem/file.ts +++ b/src/lib/modules/filesystem/file.ts @@ -1,175 +1,177 @@ import * as IndexedDB from "$lib/indexedDB"; -import { monotonicResolve } from "$lib/utils"; import { trpc, isTRPCClientError } from "$trpc/client"; import { FilesystemCache, decryptFileMetadata, decryptCategoryMetadata } from "./internal.svelte"; -import type { MaybeFileInfo } from "./types"; +import type { FileInfo, MaybeFileInfo } from "./types"; -const cache = new FilesystemCache(); +const cache = new FilesystemCache({ + async fetchFromIndexedDB(id) { + const file = await IndexedDB.getFileInfo(id); + const categories = file?.categoryIds + ? await Promise.all( + file.categoryIds.map(async (categoryId) => { + const category = await IndexedDB.getCategoryInfo(categoryId); + return category + ? { id: category.id, parentId: category.parentId, name: category.name } + : undefined; + }), + ) + : undefined; -const fetchFromIndexedDB = async (id: number) => { - const file = await IndexedDB.getFileInfo(id); - const categories = file - ? await Promise.all( - file.categoryIds.map(async (categoryId) => { - const category = await IndexedDB.getCategoryInfo(categoryId); - return category ? { id: category.id, name: category.name } : undefined; - }), - ) - : undefined; - - if (file) { - return { - id, - exists: true as const, - parentId: file.parentId, - contentType: file.contentType, - name: file.name, - createdAt: file.createdAt, - lastModifiedAt: file.lastModifiedAt, - categories: categories!.filter((category) => !!category), - }; - } -}; - -const bulkFetchFromIndexedDB = async (ids: number[]) => { - const files = await IndexedDB.bulkGetFileInfos(ids); - const categories = await Promise.all( - files.map(async (file) => - file - ? await Promise.all( - file.categoryIds.map(async (categoryId) => { - const category = await IndexedDB.getCategoryInfo(categoryId); - return category ? { id: category.id, name: category.name } : undefined; - }), - ) - : undefined, - ), - ); - return new Map( - files - .map((file, index) => - file - ? ([ - file.id, - { - ...file, - exists: true, - categories: categories[index]!.filter((category) => !!category), - }, - ] as const) - : undefined, - ) - .filter((file) => !!file), - ); -}; - -const fetchFromServer = async (id: number, masterKey: CryptoKey) => { - try { - const { categories: categoriesRaw, ...metadata } = await trpc().file.get.query({ id }); - const [categories, decryptedMetadata] = await Promise.all([ - Promise.all( - categoriesRaw.map(async (category) => ({ - id: category.id, - ...(await decryptCategoryMetadata(category, masterKey)), - })), - ), - decryptFileMetadata(metadata, masterKey), - ]); - - await IndexedDB.storeFileInfo({ - id, - parentId: metadata.parent, - contentType: metadata.contentType, - name: decryptedMetadata.name, - createdAt: decryptedMetadata.createdAt, - lastModifiedAt: decryptedMetadata.lastModifiedAt, - categoryIds: categories.map((category) => category.id), - }); - - return { - id, - exists: true as const, - parentId: metadata.parent, - contentType: metadata.contentType, - contentIv: metadata.contentIv, - categories, - ...decryptedMetadata, - }; - } catch (e) { - if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") { - await IndexedDB.deleteFileInfo(id); - return { id, exists: false as const }; + if (file) { + return { + id, + exists: true, + parentId: file.parentId, + contentType: file.contentType, + name: file.name, + createdAt: file.createdAt, + lastModifiedAt: file.lastModifiedAt, + categories: categories?.filter((category) => !!category) ?? [], + }; } - throw e; - } -}; + }, -const bulkFetchFromServer = async (ids: number[], masterKey: CryptoKey) => { - const filesRaw = await trpc().file.bulkGet.query({ ids }); - const files = await Promise.all( - filesRaw.map(async (file) => { - const [categories, decryptedMetadata] = await Promise.all([ + async fetchFromServer(id, _cachedInfo, masterKey) { + try { + const file = await trpc().file.get.query({ id }); + const [categories, metadata] = await Promise.all([ Promise.all( file.categories.map(async (category) => ({ id: category.id, + parentId: category.parent, ...(await decryptCategoryMetadata(category, masterKey)), })), ), decryptFileMetadata(file, masterKey), ]); - await IndexedDB.storeFileInfo({ - id: file.id, - parentId: file.parent, - contentType: file.contentType, - name: decryptedMetadata.name, - createdAt: decryptedMetadata.createdAt, - lastModifiedAt: decryptedMetadata.lastModifiedAt, - categoryIds: categories.map((category) => category.id), - }); - return { - id: file.id, - exists: true as const, + return storeToIndexedDB({ + id, parentId: file.parent, + dataKey: metadata.dataKey, contentType: file.contentType, contentIv: file.contentIv, + name: metadata.name, + createdAt: metadata.createdAt, + lastModifiedAt: metadata.lastModifiedAt, categories, - ...decryptedMetadata, - }; - }), - ); + }); + } catch (e) { + if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") { + await IndexedDB.deleteFileInfo(id); + return { id, exists: false as const }; + } + throw e; + } + }, - const existingIds = new Set(filesRaw.map(({ id }) => id)); - return new Map([ - ...files.map((file) => [file.id, file] as const), - ...ids.filter((id) => !existingIds.has(id)).map((id) => [id, { id, exists: false }] as const), - ]); + async bulkFetchFromIndexedDB(ids) { + const files = await IndexedDB.bulkGetFileInfos([...ids]); + const categories = await Promise.all( + files.map(async (file) => + file?.categoryIds + ? await Promise.all( + file.categoryIds.map(async (categoryId) => { + const category = await IndexedDB.getCategoryInfo(categoryId); + return category + ? { id: category.id, parentId: category.parentId, name: category.name } + : undefined; + }), + ) + : undefined, + ), + ); + + return new Map( + files + .filter((file) => !!file) + .map((file, index) => [ + file.id, + { + ...file, + exists: true, + categories: categories[index]?.filter((category) => !!category) ?? [], + }, + ]), + ); + }, + + async bulkFetchFromServer(ids, masterKey) { + const idsArray = [...ids.keys()]; + + const filesRaw = await trpc().file.bulkGet.query({ ids: idsArray }); + const files = await Promise.all( + filesRaw.map(async ({ id, categories: categoriesRaw, ...metadataRaw }) => { + const [categories, metadata] = await Promise.all([ + Promise.all( + categoriesRaw.map(async (category) => ({ + id: category.id, + parentId: category.parent, + ...(await decryptCategoryMetadata(category, masterKey)), + })), + ), + decryptFileMetadata(metadataRaw, masterKey), + ]); + + return { + id, + exists: true as const, + parentId: metadataRaw.parent, + contentType: metadataRaw.contentType, + contentIv: metadataRaw.contentIv, + categories, + ...metadata, + }; + }), + ); + + const existingIds = new Set(filesRaw.map(({ id }) => id)); + + return new Map([ + ...bulkStoreToIndexedDB(files), + ...idsArray + .filter((id) => !existingIds.has(id)) + .map((id) => [id, { id, exists: false }] as const), + ]); + }, +}); + +const storeToIndexedDB = (info: FileInfo) => { + void IndexedDB.storeFileInfo({ + ...info, + categoryIds: info.categories.map(({ id }) => id), + }); + + info.categories.forEach((category) => { + void IndexedDB.storeCategoryInfo(category); + }); + + return { ...info, exists: true as const }; +}; + +const bulkStoreToIndexedDB = (infos: FileInfo[]) => { + // TODO: Bulk Upsert + infos.forEach((info) => { + void IndexedDB.storeFileInfo({ + ...info, + categoryIds: info.categories.map(({ id }) => id), + }); + }); + + // TODO: Bulk Upsert + new Map( + infos.flatMap(({ categories }) => categories).map((category) => [category.id, category]), + ).forEach((category) => { + void IndexedDB.storeCategoryInfo(category); + }); + + return infos.map((info) => [info.id, { ...info, exists: true }] as const); }; export const getFileInfo = async (id: number, masterKey: CryptoKey) => { - return await cache.get(id, (isInitial, resolve) => - monotonicResolve( - [isInitial && fetchFromIndexedDB(id), fetchFromServer(id, masterKey)], - resolve, - ), - ); + return await cache.get(id, masterKey); }; export const bulkGetFileInfo = async (ids: number[], masterKey: CryptoKey) => { - return await cache.bulkGet(new Set(ids), (keys, resolve) => - monotonicResolve( - [ - bulkFetchFromIndexedDB( - Array.from( - keys - .entries() - .filter(([, isInitial]) => isInitial) - .map(([key]) => key), - ), - ), - bulkFetchFromServer(Array.from(keys.keys()), masterKey), - ], - resolve, - ), - ); + return await cache.bulkGet(new Set(ids), masterKey); }; diff --git a/src/lib/modules/filesystem/internal.svelte.ts b/src/lib/modules/filesystem/internal.svelte.ts index 8b2b092..6e8d7f2 100644 --- a/src/lib/modules/filesystem/internal.svelte.ts +++ b/src/lib/modules/filesystem/internal.svelte.ts @@ -1,82 +1,120 @@ +import { untrack } from "svelte"; import { unwrapDataKey, decryptString } from "$lib/modules/crypto"; -export class FilesystemCache { - private map = new Map>(); +interface FilesystemCacheOptions { + fetchFromIndexedDB: (key: K) => Promise; + fetchFromServer: (key: K, cachedValue: V | undefined, masterKey: CryptoKey) => Promise; + bulkFetchFromIndexedDB?: (keys: Set) => Promise>; + bulkFetchFromServer?: ( + keys: Map, + masterKey: CryptoKey, + ) => Promise>; +} - get(key: K, loader: (isInitial: boolean, resolve: (value: RV | undefined) => void) => void) { - const info = this.map.get(key); - if (info instanceof Promise) { - return info; - } +export class FilesystemCache { + private map = new Map }>(); - const { promise, resolve } = Promise.withResolvers(); - if (!info) { - this.map.set(key, promise); - } + constructor(private readonly options: FilesystemCacheOptions) {} - loader(!info, (loadedInfo) => { - if (!loadedInfo) return; + get(key: K, masterKey: CryptoKey) { + return untrack(() => { + let state = this.map.get(key); + if (state?.promise) return state.value ?? state.promise; - const info = this.map.get(key)!; - if (info instanceof Promise) { - const state = $state(loadedInfo); - this.map.set(key, state as V); - resolve(state as V); - } else { - Object.assign(info, loadedInfo); - resolve(info); + const { promise: newPromise, resolve } = Promise.withResolvers(); + + if (!state) { + const newState = $state({}); + state = newState; + this.map.set(key, newState); } - }); - return info ?? promise; + state.promise = newPromise; + + (state.value + ? Promise.resolve(state.value) + : this.options.fetchFromIndexedDB(key).then((loadedInfo) => { + if (loadedInfo) { + state.value = loadedInfo; + resolve(state.value); + } + return loadedInfo; + }) + ) + .then((cachedInfo) => this.options.fetchFromServer(key, cachedInfo, masterKey)) + .then((loadedInfo) => { + if (state.value) { + Object.assign(state.value, loadedInfo); + } else { + state.value = loadedInfo; + } + resolve(state.value); + }) + .finally(() => { + state.promise = undefined; + }); + + return newPromise; + }); } - async bulkGet( - keys: Set, - loader: (keys: Map, resolve: (values: Map) => void) => void, - ) { - const states = new Map(); - const promises = new Map>(); - const resolvers = new Map void>(); + bulkGet(keys: Set, masterKey: CryptoKey) { + return untrack(() => { + const newPromises = new Map( + keys + .keys() + .filter((key) => this.map.get(key)?.promise === undefined) + .map((key) => [key, Promise.withResolvers()]), + ); + newPromises.forEach(({ promise }, key) => { + const state = this.map.get(key); + if (state) { + state.promise = promise; + } else { + const newState = $state({ promise }); + this.map.set(key, newState); + } + }); - keys.forEach((key) => { - const info = this.map.get(key); - if (info instanceof Promise) { - promises.set(key, info); - } else if (info) { - states.set(key, info); - } else { - const { promise, resolve } = Promise.withResolvers(); - this.map.set(key, promise); - promises.set(key, promise); - resolvers.set(key, resolve); - } - }); - - loader( - new Map([ - ...states.keys().map((key) => [key, false] as const), - ...resolvers.keys().map((key) => [key, true] as const), - ]), - (loadedInfos) => + const resolve = (loadedInfos: Map) => { loadedInfos.forEach((loadedInfo, key) => { - const info = this.map.get(key)!; - const resolve = resolvers.get(key); - if (info instanceof Promise) { - const state = $state(loadedInfo); - this.map.set(key, state as V); - resolve?.(state as V); + const state = this.map.get(key)!; + if (state.value) { + Object.assign(state.value, loadedInfo); } else { - Object.assign(info, loadedInfo); - resolve?.(info); + state.value = loadedInfo; } - }), - ); + newPromises.get(key)!.resolve(state.value); + }); + return loadedInfos; + }; - const newStates = await Promise.all( - promises.entries().map(async ([key, promise]) => [key, await promise] as const), - ); - return new Map([...states, ...newStates]); + this.options.bulkFetchFromIndexedDB!( + new Set(newPromises.keys().filter((key) => this.map.get(key)!.value === undefined)), + ) + .then(resolve) + .then(() => + this.options.bulkFetchFromServer!( + new Map( + newPromises.keys().map((key) => [key, { cachedValue: this.map.get(key)!.value }]), + ), + masterKey, + ), + ) + .then(resolve) + .finally(() => { + newPromises.forEach((_, key) => { + this.map.get(key)!.promise = undefined; + }); + }); + + return Promise.all( + keys + .keys() + .filter((key) => this.map.get(key)!.value === undefined) + .map((key) => this.map.get(key)!.promise!), + ).then(() => new Map(keys.keys().map((key) => [key, this.map.get(key)!.value!] as const))); + }); } } diff --git a/src/lib/modules/filesystem/types.ts b/src/lib/modules/filesystem/types.ts index 15b0e93..9f33113 100644 --- a/src/lib/modules/filesystem/types.ts +++ b/src/lib/modules/filesystem/types.ts @@ -20,11 +20,12 @@ interface RootDirectoryInfo { } export type DirectoryInfo = LocalDirectoryInfo | RootDirectoryInfo; -export type SubDirectoryInfo = Omit; export type MaybeDirectoryInfo = | (DirectoryInfo & { exists: true }) | ({ id: DirectoryId; exists: false } & AllUndefined>); +export type SubDirectoryInfo = Omit; + export interface FileInfo { id: number; parentId: DirectoryId; @@ -34,17 +35,19 @@ export interface FileInfo { name: string; createdAt?: Date; lastModifiedAt: Date; - categories: { id: number; name: string }[]; + categories: FileCategoryInfo[]; } -export type SummarizedFileInfo = Omit; -export type CategoryFileInfo = SummarizedFileInfo & { isRecursive: boolean }; export type MaybeFileInfo = | (FileInfo & { exists: true }) | ({ id: number; exists: false } & AllUndefined>); +export type SummarizedFileInfo = Omit; +export type CategoryFileInfo = SummarizedFileInfo & { isRecursive: boolean }; + interface LocalCategoryInfo { id: number; + parentId: DirectoryId; dataKey?: DataKey; name: string; subCategories: SubCategoryInfo[]; @@ -54,6 +57,7 @@ interface LocalCategoryInfo { interface RootCategoryInfo { id: "root"; + parentId?: undefined; dataKey?: undefined; name?: undefined; subCategories: SubCategoryInfo[]; @@ -62,10 +66,12 @@ interface RootCategoryInfo { } export type CategoryInfo = LocalCategoryInfo | RootCategoryInfo; +export type MaybeCategoryInfo = + | (CategoryInfo & { exists: true }) + | ({ id: CategoryId; exists: false } & AllUndefined>); + export type SubCategoryInfo = Omit< LocalCategoryInfo, "subCategories" | "files" | "isFileRecursive" >; -export type MaybeCategoryInfo = - | (CategoryInfo & { exists: true }) - | ({ id: CategoryId; exists: false } & AllUndefined>); +export type FileCategoryInfo = Omit; diff --git a/src/lib/server/db/file.ts b/src/lib/server/db/file.ts index 6a0a062..472930a 100644 --- a/src/lib/server/db/file.ts +++ b/src/lib/server/db/file.ts @@ -39,6 +39,7 @@ export type NewFile = Omit; interface FileCategory { id: number; + parentId: CategoryId; mekVersion: number; encDek: string; dekVersion: Date; @@ -445,6 +446,7 @@ export const getFilesWithCategories = async (userId: number, fileIds: number[]) encLastModifiedAt: file.encrypted_last_modified_at, categories: file.categories.map((category) => ({ id: category.id, + parentId: category.parent_id ?? "root", mekVersion: category.master_encryption_key_version, encDek: category.encrypted_data_encryption_key, dekVersion: new Date(category.data_encryption_key_version), @@ -548,6 +550,7 @@ export const getAllFileCategories = async (fileId: number) => { (category) => ({ id: category.id, + parentId: category.parent_id ?? "root", mekVersion: category.master_encryption_key_version, encDek: category.encrypted_data_encryption_key, dekVersion: category.data_encryption_key_version, diff --git a/src/lib/utils/index.ts b/src/lib/utils/index.ts index 9dc3631..1db9577 100644 --- a/src/lib/utils/index.ts +++ b/src/lib/utils/index.ts @@ -1,4 +1,3 @@ export * from "./format"; export * from "./gotoStateful"; -export * from "./promise"; export * from "./sort"; diff --git a/src/lib/utils/promise.ts b/src/lib/utils/promise.ts deleted file mode 100644 index 9e841c8..0000000 --- a/src/lib/utils/promise.ts +++ /dev/null @@ -1,16 +0,0 @@ -export const monotonicResolve = ( - promises: (Promise | false)[], - callback: (value: T) => void, -) => { - let latestResolvedIndex = -1; - promises - .filter((promise) => !!promise) - .forEach((promise, index) => { - promise.then((value) => { - if (index > latestResolvedIndex) { - latestResolvedIndex = index; - callback(value); - } - }); - }); -}; diff --git a/src/trpc/routers/category.ts b/src/trpc/routers/category.ts index 9b2567a..a292889 100644 --- a/src/trpc/routers/category.ts +++ b/src/trpc/routers/category.ts @@ -46,6 +46,7 @@ const categoryRouter = router({ })), files: files?.map((file) => ({ id: file.id, + parent: file.parentId, mekVersion: file.mekVersion, dek: file.encDek, dekVersion: file.dekVersion, diff --git a/src/trpc/routers/file.ts b/src/trpc/routers/file.ts index b08bbf2..c3f8159 100644 --- a/src/trpc/routers/file.ts +++ b/src/trpc/routers/file.ts @@ -33,6 +33,7 @@ const fileRouter = router({ lastModifiedAtIv: file.encLastModifiedAt.iv, categories: categories.map((category) => ({ id: category.id, + parent: category.parentId, mekVersion: category.mekVersion, dek: category.encDek, dekVersion: category.dekVersion, @@ -66,6 +67,7 @@ const fileRouter = router({ lastModifiedAtIv: file.encLastModifiedAt.iv, categories: file.categories.map((category) => ({ id: category.id, + parent: category.parentId, mekVersion: category.mekVersion, dek: category.encDek, dekVersion: category.dekVersion,