7 Commits

Author SHA1 Message Date
static
90ac5ba4c3 Merge pull request #15 from kmc7468/dev
v0.6.0
2025-12-27 14:22:26 +09:00
static
dfffa004ac Merge pull request #13 from kmc7468/dev
v0.5.1
2025-07-12 19:56:12 +09:00
static
0cd55a413d Merge pull request #12 from kmc7468/dev
v0.5.0
2025-07-12 06:01:08 +09:00
static
361d966a59 Merge pull request #10 from kmc7468/dev
v0.4.0
2025-01-30 21:06:50 +09:00
static
aef43b8bfa Merge pull request #6 from kmc7468/dev
v0.3.0
2025-01-18 13:29:09 +09:00
static
7f128cccf6 Merge pull request #5 from kmc7468/dev
v0.2.0
2025-01-13 03:53:14 +09:00
static
a198e5f6dc Merge pull request #2 from kmc7468/dev
v0.1.0
2025-01-09 06:24:31 +09:00
156 changed files with 3072 additions and 4634 deletions

View File

@@ -12,7 +12,6 @@ node_modules
/data
/library
/thumbnails
/uploads
# OS
.DS_Store

View File

@@ -12,4 +12,3 @@ USER_CLIENT_CHALLENGE_EXPIRES=
SESSION_UPGRADE_CHALLENGE_EXPIRES=
LIBRARY_PATH=
THUMBNAILS_PATH=
UPLOADS_PATH=

1
.gitignore vendored
View File

@@ -10,7 +10,6 @@ node_modules
/data
/library
/thumbnails
/uploads
# OS
.DS_Store

View File

@@ -9,7 +9,6 @@ services:
volumes:
- ./data/library:/app/data/library
- ./data/thumbnails:/app/data/thumbnails
- ./data/uploads:/app/data/uploads
environment:
# ArkVault
- DATABASE_HOST=database
@@ -21,7 +20,6 @@ services:
- SESSION_UPGRADE_CHALLENGE_EXPIRES
- LIBRARY_PATH=/app/data/library
- THUMBNAILS_PATH=/app/data/thumbnails
- UPLOADS_PATH=/app/data/uploads
# SvelteKit
- ADDRESS_HEADER=${TRUST_PROXY:+X-Forwarded-For}
- XFF_DEPTH=${TRUST_PROXY:-}

View File

@@ -1,7 +1,7 @@
{
"name": "arkvault",
"private": true,
"version": "0.8.0",
"version": "0.6.0",
"type": "module",
"scripts": {
"dev": "vite dev",
@@ -16,14 +16,12 @@
"db:migrate": "kysely migrate"
},
"devDependencies": {
"@eslint/compat": "^2.0.1",
"@eslint/js": "^9.39.2",
"@iconify-json/material-symbols": "^1.2.51",
"@noble/hashes": "^2.0.1",
"@eslint/compat": "^2.0.0",
"@iconify-json/material-symbols": "^1.2.50",
"@sveltejs/adapter-node": "^5.4.0",
"@sveltejs/kit": "^2.49.4",
"@sveltejs/vite-plugin-svelte": "^6.2.4",
"@tanstack/svelte-virtual": "^3.13.18",
"@sveltejs/kit": "^2.49.2",
"@sveltejs/vite-plugin-svelte": "^6.2.1",
"@tanstack/svelte-virtual": "^3.13.13",
"@trpc/client": "^11.8.1",
"@types/file-saver": "^2.0.7",
"@types/ms": "^0.7.34",
@@ -34,11 +32,11 @@
"dexie": "^4.2.1",
"eslint": "^9.39.2",
"eslint-config-prettier": "^10.1.8",
"eslint-plugin-svelte": "^3.14.0",
"eslint-plugin-svelte": "^3.13.1",
"eslint-plugin-tailwindcss": "^3.18.2",
"exifreader": "^4.35.0",
"exifreader": "^4.33.1",
"file-saver": "^2.0.5",
"globals": "^17.0.0",
"globals": "^16.5.0",
"heic2any": "^0.0.4",
"kysely-ctl": "^0.19.0",
"lru-cache": "^11.2.4",
@@ -51,11 +49,12 @@
"svelte-check": "^4.3.5",
"tailwindcss": "^3.4.19",
"typescript": "^5.9.3",
"typescript-eslint": "^8.52.0",
"typescript-eslint": "^8.50.1",
"unplugin-icons": "^22.5.0",
"vite": "^7.3.1"
"vite": "^7.3.0"
},
"dependencies": {
"@fastify/busboy": "^3.2.0",
"@trpc/server": "^11.8.1",
"argon2": "^0.44.0",
"kysely": "^0.28.9",
@@ -64,7 +63,7 @@
"pg": "^8.16.3",
"superjson": "^2.2.6",
"uuid": "^13.0.0",
"zod": "^4.3.5"
"zod": "^4.2.1"
},
"engines": {
"node": "^22.0.0",

625
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,7 @@
import type { ClientInit } from "@sveltejs/kit";
import { cleanupDanglingInfos, getClientKey, getMasterKeys, getHmacSecrets } from "$lib/indexedDB";
import { prepareFileCache } from "$lib/modules/file";
import { prepareOpfs } from "$lib/modules/opfs";
import { clientKeyStore, masterKeyStore, hmacSecretStore } from "$lib/stores";
const requestPersistentStorage = async () => {
@@ -45,6 +46,7 @@ export const init: ClientInit = async () => {
prepareClientKeyStore(),
prepareMasterKeyStore(),
prepareHmacSecretStore(),
prepareOpfs(),
]);
cleanupDanglingInfos(); // Intended

View File

@@ -8,7 +8,6 @@ import {
cleanupExpiredSessionUpgradeChallenges,
} from "$lib/server/db/session";
import { authenticate, setAgentInfo } from "$lib/server/middlewares";
import { cleanupExpiredUploadSessions } from "$lib/server/services/upload";
export const init: ServerInit = async () => {
await migrateDB();
@@ -17,7 +16,6 @@ export const init: ServerInit = async () => {
cleanupExpiredUserClientChallenges();
cleanupExpiredSessions();
cleanupExpiredSessionUpgradeChallenges();
cleanupExpiredUploadSessions();
});
};

View File

@@ -1,60 +0,0 @@
<script lang="ts">
import { createWindowVirtualizer } from "@tanstack/svelte-virtual";
import type { Snippet } from "svelte";
import type { ClassValue } from "svelte/elements";
interface Props {
class?: ClassValue;
count: number;
item: Snippet<[index: number]>;
itemHeight: (index: number) => number;
itemGap?: number;
placeholder?: Snippet;
}
let { class: className, count, item, itemHeight, itemGap, placeholder }: Props = $props();
let element: HTMLElement | undefined = $state();
let scrollMargin = $state(0);
let virtualizer = $derived(
createWindowVirtualizer({
count,
estimateSize: itemHeight,
gap: itemGap,
scrollMargin,
}),
);
const measureItem = (node: HTMLElement) => {
$effect(() => $virtualizer.measureElement(node));
};
$effect(() => {
if (!element) return;
const observer = new ResizeObserver(() => {
scrollMargin = Math.round(element!.getBoundingClientRect().top + window.scrollY);
});
observer.observe(element.parentElement!);
return () => observer.disconnect();
});
</script>
<div bind:this={element} class={["relative", className]}>
<div style:height="{$virtualizer.getTotalSize()}px">
{#each $virtualizer.getVirtualItems() as virtualItem (virtualItem.key)}
<div
class="absolute left-0 top-0 w-full"
style:transform="translateY({virtualItem.start - scrollMargin}px)"
data-index={virtualItem.index}
use:measureItem
>
{@render item(virtualItem.index)}
</div>
{/each}
</div>
{#if placeholder && count === 0}
{@render placeholder()}
{/if}
</div>

View File

@@ -1,24 +1,42 @@
<script lang="ts">
import { getFileThumbnail } from "$lib/modules/file";
import type { SummarizedFileInfo } from "$lib/modules/filesystem";
import type { Writable } from "svelte/store";
import type { FileInfo } from "$lib/modules/filesystem";
import { requestFileThumbnailDownload } from "$lib/services/file";
interface Props {
info: SummarizedFileInfo;
onclick?: (file: SummarizedFileInfo) => void;
info: Writable<FileInfo | null>;
onclick?: (file: FileInfo) => void;
}
let { info, onclick }: Props = $props();
let thumbnail = $derived(getFileThumbnail(info));
let thumbnail: string | undefined = $state();
$effect(() => {
if ($info) {
requestFileThumbnailDownload($info.id, $info.dataKey)
.then((thumbnailUrl) => {
thumbnail = thumbnailUrl ?? undefined;
})
.catch(() => {
// TODO: Error Handling
thumbnail = undefined;
});
} else {
thumbnail = undefined;
}
});
</script>
{#if $info}
<button
onclick={onclick && (() => setTimeout(() => onclick(info), 100))}
onclick={() => onclick?.($info)}
class="aspect-square overflow-hidden rounded transition active:scale-95 active:brightness-90"
>
{#if $thumbnail}
<img src={$thumbnail} alt={info.name} class="h-full w-full object-cover" />
{#if thumbnail}
<img src={thumbnail} alt={$info.name} class="h-full w-full object-cover" />
{:else}
<div class="h-full w-full bg-gray-100"></div>
{/if}
</button>
{/if}

View File

@@ -3,4 +3,3 @@ export * from "./buttons";
export * from "./divs";
export * from "./inputs";
export { default as Modal } from "./Modal.svelte";
export { default as RowVirtualizer } from "./RowVirtualizer.svelte";

View File

@@ -1,44 +0,0 @@
<script module lang="ts">
import type { DataKey } from "$lib/modules/filesystem";
export interface SelectedCategory {
id: number;
dataKey?: DataKey;
name: string;
}
</script>
<script lang="ts">
import type { Component } from "svelte";
import type { SvelteHTMLElements } from "svelte/elements";
import { ActionEntryButton } from "$lib/components/atoms";
import { CategoryLabel } from "$lib/components/molecules";
import type { SubCategoryInfo } from "$lib/modules/filesystem";
import { sortEntries } from "$lib/utils";
interface Props {
categories: SubCategoryInfo[];
categoryMenuIcon?: Component<SvelteHTMLElements["svg"]>;
onCategoryClick: (category: SelectedCategory) => void;
onCategoryMenuClick?: (category: SelectedCategory) => void;
}
let { categories, categoryMenuIcon, onCategoryClick, onCategoryMenuClick }: Props = $props();
let categoriesWithName = $derived(sortEntries([...categories]));
</script>
{#if categoriesWithName.length > 0}
<div class="space-y-1">
{#each categoriesWithName as category (category.id)}
<ActionEntryButton
class="h-12"
onclick={() => onCategoryClick(category)}
actionButtonIcon={categoryMenuIcon}
onActionButtonClick={() => onCategoryMenuClick?.(category)}
>
<CategoryLabel name={category.name} />
</ActionEntryButton>
{/each}
</div>
{/if}

View File

@@ -0,0 +1,63 @@
<script lang="ts">
import { untrack, type Component } from "svelte";
import type { SvelteHTMLElements } from "svelte/elements";
import { get, type Writable } from "svelte/store";
import type { CategoryInfo } from "$lib/modules/filesystem";
import { SortBy, sortEntries } from "$lib/utils";
import Category from "./Category.svelte";
import type { SelectedCategory } from "./service";
interface Props {
categories: Writable<CategoryInfo | null>[];
categoryMenuIcon?: Component<SvelteHTMLElements["svg"]>;
onCategoryClick: (category: SelectedCategory) => void;
onCategoryMenuClick?: (category: SelectedCategory) => void;
sortBy?: SortBy;
}
let {
categories,
categoryMenuIcon,
onCategoryClick,
onCategoryMenuClick,
sortBy = SortBy.NAME_ASC,
}: Props = $props();
let categoriesWithName: { name?: string; info: Writable<CategoryInfo | null> }[] = $state([]);
$effect(() => {
categoriesWithName = categories.map((category) => ({
name: get(category)?.name,
info: category,
}));
const sort = () => {
sortEntries(categoriesWithName, sortBy);
};
return untrack(() => {
sort();
const unsubscribes = categoriesWithName.map((category) =>
category.info.subscribe((value) => {
if (category.name === value?.name) return;
category.name = value?.name;
sort();
}),
);
return () => unsubscribes.forEach((unsubscribe) => unsubscribe());
});
});
</script>
{#if categoriesWithName.length > 0}
<div class="space-y-1">
{#each categoriesWithName as { info }}
<Category
{info}
menuIcon={categoryMenuIcon}
onclick={onCategoryClick}
onMenuClick={onCategoryMenuClick}
/>
{/each}
</div>
{/if}

View File

@@ -0,0 +1,43 @@
<script lang="ts">
import type { Component } from "svelte";
import type { SvelteHTMLElements } from "svelte/elements";
import type { Writable } from "svelte/store";
import { ActionEntryButton } from "$lib/components/atoms";
import { CategoryLabel } from "$lib/components/molecules";
import type { CategoryInfo } from "$lib/modules/filesystem";
import type { SelectedCategory } from "./service";
interface Props {
info: Writable<CategoryInfo | null>;
menuIcon?: Component<SvelteHTMLElements["svg"]>;
onclick: (category: SelectedCategory) => void;
onMenuClick?: (category: SelectedCategory) => void;
}
let { info, menuIcon, onclick, onMenuClick }: Props = $props();
const openCategory = () => {
const { id, dataKey, dataKeyVersion, name } = $info as CategoryInfo;
if (!dataKey || !dataKeyVersion) return; // TODO: Error handling
onclick({ id, dataKey, dataKeyVersion, name });
};
const openMenu = () => {
const { id, dataKey, dataKeyVersion, name } = $info as CategoryInfo;
if (!dataKey || !dataKeyVersion) return; // TODO: Error handling
onMenuClick!({ id, dataKey, dataKeyVersion, name });
};
</script>
{#if $info}
<ActionEntryButton
class="h-12"
onclick={openCategory}
actionButtonIcon={menuIcon}
onActionButtonClick={openMenu}
>
<CategoryLabel name={$info.name!} />
</ActionEntryButton>
{/if}

View File

@@ -0,0 +1,2 @@
export { default } from "./Categories.svelte";
export * from "./service";

View File

@@ -0,0 +1,6 @@
export interface SelectedCategory {
id: number;
dataKey: CryptoKey;
dataKeyVersion: Date;
name: string;
}

View File

@@ -1,8 +1,10 @@
<script lang="ts">
import type { Component } from "svelte";
import type { ClassValue, SvelteHTMLElements } from "svelte/elements";
import type { Writable } from "svelte/store";
import { Categories, IconEntryButton, type SelectedCategory } from "$lib/components/molecules";
import type { CategoryInfo } from "$lib/modules/filesystem";
import { getCategoryInfo, type CategoryInfo } from "$lib/modules/filesystem";
import { masterKeyStore } from "$lib/stores";
import IconAddCircle from "~icons/material-symbols/add-circle";
@@ -25,6 +27,14 @@
subCategoryCreatePosition = "bottom",
subCategoryMenuIcon,
}: Props = $props();
let subCategories: Writable<CategoryInfo | null>[] = $state([]);
$effect(() => {
subCategories = info.subCategoryIds.map((id) =>
getCategoryInfo(id, $masterKeyStore?.get(1)?.key!),
);
});
</script>
<div class={["space-y-1", className]}>
@@ -43,12 +53,14 @@
{#if subCategoryCreatePosition === "top"}
{@render subCategoryCreate()}
{/if}
{#key info}
<Categories
categories={info.subCategories}
categories={subCategories}
categoryMenuIcon={subCategoryMenuIcon}
onCategoryClick={onSubCategoryClick}
onCategoryMenuClick={onSubCategoryMenuClick}
/>
{/key}
{#if subCategoryCreatePosition === "bottom"}
{@render subCategoryCreate()}
{/if}

View File

@@ -1,7 +1,7 @@
export * from "./ActionModal.svelte";
export { default as ActionModal } from "./ActionModal.svelte";
export * from "./Categories.svelte";
export { default as Categories } from "./Categories.svelte";
export * from "./Categories";
export { default as Categories } from "./Categories";
export { default as IconEntryButton } from "./IconEntryButton.svelte";
export * from "./labels";
export { default as SubCategories } from "./SubCategories.svelte";

View File

@@ -0,0 +1,107 @@
<script lang="ts">
import { untrack } from "svelte";
import { get, type Writable } from "svelte/store";
import { CheckBox } from "$lib/components/atoms";
import { SubCategories, type SelectedCategory } from "$lib/components/molecules";
import { getFileInfo, type FileInfo, type CategoryInfo } from "$lib/modules/filesystem";
import { masterKeyStore } from "$lib/stores";
import { SortBy, sortEntries } from "$lib/utils";
import File from "./File.svelte";
import type { SelectedFile } from "./service";
import IconMoreVert from "~icons/material-symbols/more-vert";
interface Props {
info: CategoryInfo;
onFileClick: (file: SelectedFile) => void;
onFileRemoveClick: (file: SelectedFile) => void;
onSubCategoryClick: (subCategory: SelectedCategory) => void;
onSubCategoryCreateClick: () => void;
onSubCategoryMenuClick: (subCategory: SelectedCategory) => void;
sortBy?: SortBy;
isFileRecursive: boolean;
}
let {
info,
onFileClick,
onFileRemoveClick,
onSubCategoryClick,
onSubCategoryCreateClick,
onSubCategoryMenuClick,
sortBy = SortBy.NAME_ASC,
isFileRecursive = $bindable(),
}: Props = $props();
let files: { name?: string; info: Writable<FileInfo | null>; isRecursive: boolean }[] = $state(
[],
);
$effect(() => {
files =
info.files
?.filter(({ isRecursive }) => isFileRecursive || !isRecursive)
.map(({ id, isRecursive }) => {
const info = getFileInfo(id, $masterKeyStore?.get(1)?.key!);
return {
name: get(info)?.name,
info,
isRecursive,
};
}) ?? [];
const sort = () => {
sortEntries(files, sortBy);
};
return untrack(() => {
sort();
const unsubscribes = files.map((file) =>
file.info.subscribe((value) => {
if (file.name === value?.name) return;
file.name = value?.name;
sort();
}),
);
return () => unsubscribes.forEach((unsubscribe) => unsubscribe());
});
});
</script>
<div class="space-y-4">
<div class="space-y-4 bg-white p-4">
{#if info.id !== "root"}
<p class="text-lg font-bold text-gray-800">하위 카테고리</p>
{/if}
<SubCategories
{info}
{onSubCategoryClick}
{onSubCategoryCreateClick}
{onSubCategoryMenuClick}
subCategoryMenuIcon={IconMoreVert}
/>
</div>
{#if info.id !== "root"}
<div class="space-y-4 bg-white p-4">
<div class="flex items-center justify-between">
<p class="text-lg font-bold text-gray-800">파일</p>
<CheckBox bind:checked={isFileRecursive}>
<p class="font-medium">하위 카테고리의 파일</p>
</CheckBox>
</div>
<div class="space-y-1">
{#key info}
{#each files as { info, isRecursive }}
<File
{info}
onclick={onFileClick}
onRemoveClick={!isRecursive ? onFileRemoveClick : undefined}
/>
{:else}
<p class="text-gray-500 text-center">이 카테고리에 추가된 파일이 없어요.</p>
{/each}
{/key}
</div>
</div>
{/if}
</div>

View File

@@ -0,0 +1,59 @@
<script lang="ts">
import type { Writable } from "svelte/store";
import { ActionEntryButton } from "$lib/components/atoms";
import { DirectoryEntryLabel } from "$lib/components/molecules";
import type { FileInfo } from "$lib/modules/filesystem";
import { requestFileThumbnailDownload, type SelectedFile } from "./service";
import IconClose from "~icons/material-symbols/close";
interface Props {
info: Writable<FileInfo | null>;
onclick: (selectedFile: SelectedFile) => void;
onRemoveClick?: (selectedFile: SelectedFile) => void;
}
let { info, onclick, onRemoveClick }: Props = $props();
let thumbnail: string | undefined = $state();
const openFile = () => {
const { id, dataKey, dataKeyVersion, name } = $info as FileInfo;
if (!dataKey || !dataKeyVersion) return; // TODO: Error handling
onclick({ id, dataKey, dataKeyVersion, name });
};
const removeFile = () => {
const { id, dataKey, dataKeyVersion, name } = $info as FileInfo;
if (!dataKey || !dataKeyVersion) return; // TODO: Error handling
onRemoveClick!({ id, dataKey, dataKeyVersion, name });
};
$effect(() => {
if ($info) {
requestFileThumbnailDownload($info.id, $info.dataKey)
.then((thumbnailUrl) => {
thumbnail = thumbnailUrl ?? undefined;
})
.catch(() => {
// TODO: Error Handling
thumbnail = undefined;
});
} else {
thumbnail = undefined;
}
});
</script>
{#if $info}
<ActionEntryButton
class="h-12"
onclick={openFile}
actionButtonIcon={onRemoveClick && IconClose}
onActionButtonClick={removeFile}
>
<DirectoryEntryLabel type="file" {thumbnail} name={$info.name} />
</ActionEntryButton>
{/if}

View File

@@ -0,0 +1,2 @@
export { default } from "./Category.svelte";
export * from "./service";

View File

@@ -0,0 +1,8 @@
export { requestFileThumbnailDownload } from "$lib/services/file";
export interface SelectedFile {
id: number;
dataKey: CryptoKey;
dataKeyVersion: Date;
name: string;
}

View File

@@ -0,0 +1,148 @@
<script lang="ts">
import { createWindowVirtualizer } from "@tanstack/svelte-virtual";
import { untrack } from "svelte";
import { get, type Writable } from "svelte/store";
import { FileThumbnailButton } from "$lib/components/atoms";
import type { FileInfo } from "$lib/modules/filesystem";
import { formatDate, formatDateSortable, SortBy, sortEntries } from "$lib/utils";
interface Props {
files: Writable<FileInfo | null>[];
onFileClick?: (file: FileInfo) => void;
}
let { files, onFileClick }: Props = $props();
type FileEntry =
| { date?: undefined; contentType?: undefined; info: Writable<FileInfo | null> }
| { date: Date; contentType: string; info: Writable<FileInfo | null> };
type Row =
| { type: "header"; key: string; label: string }
| { type: "items"; key: string; items: FileEntry[] };
let filesWithDate: FileEntry[] = $state([]);
let rows: Row[] = $state([]);
let listElement: HTMLDivElement | undefined = $state();
const virtualizer = createWindowVirtualizer({
count: 0,
getItemKey: (index) => rows[index]!.key,
estimateSize: () => 1000, // TODO
});
const measureRow = (node: HTMLElement) => {
$virtualizer.measureElement(node);
return {
update: () => $virtualizer.measureElement(node),
};
};
$effect(() => {
filesWithDate = files.map((file) => {
const info = get(file);
if (info) {
return {
date: info.createdAt ?? info.lastModifiedAt,
contentType: info.contentType,
info: file,
};
} else {
return { info: file };
}
});
const buildRows = () => {
const map = new Map<string, FileEntry[]>();
for (const file of filesWithDate) {
if (
!file.date ||
!(file.contentType.startsWith("image/") || file.contentType.startsWith("video/"))
) {
continue;
}
const date = formatDateSortable(file.date);
const entries = map.get(date) ?? [];
entries.push(file);
map.set(date, entries);
}
const newRows: Row[] = [];
const sortedDates = Array.from(map.keys()).sort((a, b) => b.localeCompare(a));
for (const date of sortedDates) {
const entries = map.get(date)!;
sortEntries(entries, SortBy.DATE_DESC);
newRows.push({
type: "header",
key: `header-${date}`,
label: formatDate(entries[0]!.date!),
});
newRows.push({
type: "items",
key: `items-${date}`,
items: entries,
});
}
rows = newRows;
$virtualizer.setOptions({ count: rows.length });
};
return untrack(() => {
buildRows();
const unsubscribes = filesWithDate.map((file) =>
file.info.subscribe((value) => {
const newDate = value?.createdAt ?? value?.lastModifiedAt;
const newContentType = value?.contentType;
if (file.date?.getTime() === newDate?.getTime() && file.contentType === newContentType) {
return;
}
file.date = newDate;
file.contentType = newContentType;
buildRows();
}),
);
return () => unsubscribes.forEach((unsubscribe) => unsubscribe());
});
});
</script>
<div bind:this={listElement} class="relative flex flex-grow flex-col">
<div style="height: {$virtualizer.getTotalSize()}px;">
{#each $virtualizer.getVirtualItems() as virtualRow (virtualRow.key)}
{@const row = rows[virtualRow.index]!}
<div
use:measureRow
data-index={virtualRow.index}
class="absolute left-0 top-0 w-full"
style="transform: translateY({virtualRow.start}px);"
>
{#if row.type === "header"}
<p class="pb-2 font-medium">{row.label}</p>
{:else}
<div class="grid grid-cols-4 gap-1 pb-4">
{#each row.items as { info }}
<FileThumbnailButton {info} onclick={onFileClick} />
{/each}
</div>
{/if}
</div>
{/each}
</div>
{#if $virtualizer.getVirtualItems().length === 0}
<div class="flex h-full flex-grow items-center justify-center">
<p class="text-gray-500">
{#if files.length === 0}
업로드된 파일이 없어요.
{:else if filesWithDate.length === 0}
파일 목록을 불러오고 있어요.
{:else}
사진 또는 동영상이 없어요.
{/if}
</p>
</div>
{/if}
</div>

View File

@@ -1 +1,4 @@
export * from "./Category";
export { default as Category } from "./Category";
export { default as Gallery } from "./Gallery.svelte";
export * from "./modals";

View File

@@ -1,2 +0,0 @@
export * from "./serviceWorker";
export * from "./upload";

View File

@@ -1 +0,0 @@
export const DECRYPTED_FILE_URL_PREFIX = "/_internal/decryptedFile/";

View File

@@ -1,6 +0,0 @@
export const AES_GCM_IV_SIZE = 12;
export const AES_GCM_TAG_SIZE = 16;
export const ENCRYPTION_OVERHEAD = AES_GCM_IV_SIZE + AES_GCM_TAG_SIZE;
export const CHUNK_SIZE = 4 * 1024 * 1024; // 4 MiB
export const ENCRYPTED_CHUNK_SIZE = CHUNK_SIZE + ENCRYPTION_OVERHEAD;

View File

@@ -1,5 +1,7 @@
import { Dexie, type EntityTable } from "dexie";
export type DirectoryId = "root" | number;
interface DirectoryInfo {
id: number;
parentId: DirectoryId;
@@ -13,15 +15,17 @@ interface FileInfo {
contentType: string;
createdAt?: Date;
lastModifiedAt: Date;
categoryIds?: number[];
categoryIds: number[];
}
export type CategoryId = "root" | number;
interface CategoryInfo {
id: number;
parentId: CategoryId;
name: string;
files?: { id: number; isRecursive: boolean }[];
isFileRecursive?: boolean;
files: { id: number; isRecursive: boolean }[];
isFileRecursive: boolean;
}
const filesystem = new Dexie("filesystem") as Dexie & {
@@ -55,23 +59,13 @@ export const getDirectoryInfo = async (id: number) => {
};
export const storeDirectoryInfo = async (directoryInfo: DirectoryInfo) => {
await filesystem.directory.upsert(directoryInfo.id, { ...directoryInfo });
await filesystem.directory.put(directoryInfo);
};
export const deleteDirectoryInfo = async (id: number) => {
await filesystem.directory.delete(id);
};
export const deleteDanglingDirectoryInfos = async (
parentId: DirectoryId,
validIds: Set<number>,
) => {
await filesystem.directory
.where({ parentId })
.and((directory) => !validIds.has(directory.id))
.delete();
};
export const getAllFileInfos = async () => {
return await filesystem.file.toArray();
};
@@ -84,29 +78,14 @@ export const getFileInfo = async (id: number) => {
return await filesystem.file.get(id);
};
export const bulkGetFileInfos = async (ids: number[]) => {
return await filesystem.file.bulkGet(ids);
};
export const storeFileInfo = async (fileInfo: FileInfo) => {
await filesystem.file.upsert(fileInfo.id, { ...fileInfo });
await filesystem.file.put(fileInfo);
};
export const deleteFileInfo = async (id: number) => {
await filesystem.file.delete(id);
};
export const bulkDeleteFileInfos = async (ids: number[]) => {
await filesystem.file.bulkDelete(ids);
};
export const deleteDanglingFileInfos = async (parentId: DirectoryId, validIds: Set<number>) => {
await filesystem.file
.where({ parentId })
.and((file) => !validIds.has(file.id))
.delete();
};
export const getCategoryInfos = async (parentId: CategoryId) => {
return await filesystem.category.where({ parentId }).toArray();
};
@@ -116,7 +95,7 @@ export const getCategoryInfo = async (id: number) => {
};
export const storeCategoryInfo = async (categoryInfo: CategoryInfo) => {
await filesystem.category.upsert(categoryInfo.id, { ...categoryInfo });
await filesystem.category.put(categoryInfo);
};
export const updateCategoryInfo = async (id: number, changes: { isFileRecursive?: boolean }) => {
@@ -127,13 +106,6 @@ export const deleteCategoryInfo = async (id: number) => {
await filesystem.category.delete(id);
};
export const deleteDanglingCategoryInfos = async (parentId: CategoryId, validIds: Set<number>) => {
await filesystem.category
.where({ parentId })
.and((category) => !validIds.has(category.id))
.delete();
};
export const cleanupDanglingInfos = async () => {
const validDirectoryIds: number[] = [];
const validFileIds: number[] = [];

View File

@@ -70,12 +70,12 @@ export const storeMasterKeys = async (keys: MasterKey[]) => {
};
export const getHmacSecrets = async () => {
return (await keyStore.hmacSecret.toArray()).filter(({ secret }) => secret.extractable);
return await keyStore.hmacSecret.toArray();
};
export const storeHmacSecrets = async (secrets: HmacSecret[]) => {
if (secrets.some(({ secret }) => !secret.extractable)) {
throw new Error("Hmac secrets must be extractable");
if (secrets.some(({ secret }) => secret.extractable)) {
throw new Error("Hmac secrets must be nonextractable");
}
await keyStore.hmacSecret.bulkPut(secrets);
};

View File

@@ -1,15 +1,8 @@
import { AES_GCM_IV_SIZE } from "$lib/constants";
import {
encodeString,
decodeString,
encodeToBase64,
decodeFromBase64,
concatenateBuffers,
} from "./utils";
import { encodeString, decodeString, encodeToBase64, decodeFromBase64 } from "./util";
export const generateMasterKey = async () => {
return {
masterKey: await crypto.subtle.generateKey(
masterKey: await window.crypto.subtle.generateKey(
{
name: "AES-KW",
length: 256,
@@ -22,7 +15,7 @@ export const generateMasterKey = async () => {
export const generateDataKey = async () => {
return {
dataKey: await crypto.subtle.generateKey(
dataKey: await window.crypto.subtle.generateKey(
{
name: "AES-GCM",
length: 256,
@@ -35,9 +28,9 @@ export const generateDataKey = async () => {
};
export const makeAESKeyNonextractable = async (key: CryptoKey) => {
return await crypto.subtle.importKey(
return await window.crypto.subtle.importKey(
"raw",
await crypto.subtle.exportKey("raw", key),
await window.crypto.subtle.exportKey("raw", key),
key.algorithm,
false,
key.usages,
@@ -45,12 +38,12 @@ export const makeAESKeyNonextractable = async (key: CryptoKey) => {
};
export const wrapDataKey = async (dataKey: CryptoKey, masterKey: CryptoKey) => {
return encodeToBase64(await crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW"));
return encodeToBase64(await window.crypto.subtle.wrapKey("raw", dataKey, masterKey, "AES-KW"));
};
export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey) => {
return {
dataKey: await crypto.subtle.unwrapKey(
dataKey: await window.crypto.subtle.unwrapKey(
"raw",
decodeFromBase64(dataKeyWrapped),
masterKey,
@@ -63,12 +56,12 @@ export const unwrapDataKey = async (dataKeyWrapped: string, masterKey: CryptoKey
};
export const wrapHmacSecret = async (hmacSecret: CryptoKey, masterKey: CryptoKey) => {
return encodeToBase64(await crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW"));
return encodeToBase64(await window.crypto.subtle.wrapKey("raw", hmacSecret, masterKey, "AES-KW"));
};
export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: CryptoKey) => {
return {
hmacSecret: await crypto.subtle.unwrapKey(
hmacSecret: await window.crypto.subtle.unwrapKey(
"raw",
decodeFromBase64(hmacSecretWrapped),
masterKey,
@@ -77,15 +70,15 @@ export const unwrapHmacSecret = async (hmacSecretWrapped: string, masterKey: Cry
name: "HMAC",
hash: "SHA-256",
} satisfies HmacImportParams,
true, // Extractable
false, // Nonextractable
["sign", "verify"],
),
};
};
export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => {
const iv = crypto.getRandomValues(new Uint8Array(12));
const ciphertext = await crypto.subtle.encrypt(
const iv = window.crypto.getRandomValues(new Uint8Array(12));
const ciphertext = await window.crypto.subtle.encrypt(
{
name: "AES-GCM",
iv,
@@ -93,18 +86,14 @@ export const encryptData = async (data: BufferSource, dataKey: CryptoKey) => {
dataKey,
data,
);
return { ciphertext, iv: iv.buffer };
return { ciphertext, iv: encodeToBase64(iv.buffer) };
};
export const decryptData = async (
ciphertext: BufferSource,
iv: string | BufferSource,
dataKey: CryptoKey,
) => {
return await crypto.subtle.decrypt(
export const decryptData = async (ciphertext: BufferSource, iv: string, dataKey: CryptoKey) => {
return await window.crypto.subtle.decrypt(
{
name: "AES-GCM",
iv: typeof iv === "string" ? decodeFromBase64(iv) : iv,
iv: decodeFromBase64(iv),
} satisfies AesGcmParams,
dataKey,
ciphertext,
@@ -113,22 +102,9 @@ export const decryptData = async (
export const encryptString = async (plaintext: string, dataKey: CryptoKey) => {
const { ciphertext, iv } = await encryptData(encodeString(plaintext), dataKey);
return { ciphertext: encodeToBase64(ciphertext), iv: encodeToBase64(iv) };
return { ciphertext: encodeToBase64(ciphertext), iv };
};
export const decryptString = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
return decodeString(await decryptData(decodeFromBase64(ciphertext), iv, dataKey));
};
export const encryptChunk = async (chunk: ArrayBuffer, dataKey: CryptoKey) => {
const { ciphertext, iv } = await encryptData(chunk, dataKey);
return concatenateBuffers(iv, ciphertext).buffer;
};
export const decryptChunk = async (encryptedChunk: ArrayBuffer, dataKey: CryptoKey) => {
return await decryptData(
encryptedChunk.slice(AES_GCM_IV_SIZE),
encryptedChunk.slice(0, AES_GCM_IV_SIZE),
dataKey,
);
};

View File

@@ -1,4 +1,4 @@
export * from "./aes";
export * from "./rsa";
export * from "./sha";
export * from "./utils";
export * from "./util";

View File

@@ -1,7 +1,7 @@
import { encodeString, encodeToBase64, decodeFromBase64 } from "./utils";
import { encodeString, encodeToBase64, decodeFromBase64 } from "./util";
export const generateEncryptionKeyPair = async () => {
const keyPair = await crypto.subtle.generateKey(
const keyPair = await window.crypto.subtle.generateKey(
{
name: "RSA-OAEP",
modulusLength: 4096,
@@ -18,7 +18,7 @@ export const generateEncryptionKeyPair = async () => {
};
export const generateSigningKeyPair = async () => {
const keyPair = await crypto.subtle.generateKey(
const keyPair = await window.crypto.subtle.generateKey(
{
name: "RSA-PSS",
modulusLength: 4096,
@@ -37,7 +37,7 @@ export const generateSigningKeyPair = async () => {
export const exportRSAKey = async (key: CryptoKey) => {
const format = key.type === "public" ? ("spki" as const) : ("pkcs8" as const);
return {
key: await crypto.subtle.exportKey(format, key),
key: await window.crypto.subtle.exportKey(format, key),
format,
};
};
@@ -54,14 +54,14 @@ export const importEncryptionKeyPairFromBase64 = async (
name: "RSA-OAEP",
hash: "SHA-256",
};
const encryptKey = await crypto.subtle.importKey(
const encryptKey = await window.crypto.subtle.importKey(
"spki",
decodeFromBase64(encryptKeyBase64),
algorithm,
true,
["encrypt", "wrapKey"],
);
const decryptKey = await crypto.subtle.importKey(
const decryptKey = await window.crypto.subtle.importKey(
"pkcs8",
decodeFromBase64(decryptKeyBase64),
algorithm,
@@ -79,14 +79,14 @@ export const importSigningKeyPairFromBase64 = async (
name: "RSA-PSS",
hash: "SHA-256",
};
const signKey = await crypto.subtle.importKey(
const signKey = await window.crypto.subtle.importKey(
"pkcs8",
decodeFromBase64(signKeyBase64),
algorithm,
true,
["sign"],
);
const verifyKey = await crypto.subtle.importKey(
const verifyKey = await window.crypto.subtle.importKey(
"spki",
decodeFromBase64(verifyKeyBase64),
algorithm,
@@ -98,11 +98,17 @@ export const importSigningKeyPairFromBase64 = async (
export const makeRSAKeyNonextractable = async (key: CryptoKey) => {
const { key: exportedKey, format } = await exportRSAKey(key);
return await crypto.subtle.importKey(format, exportedKey, key.algorithm, false, key.usages);
return await window.crypto.subtle.importKey(
format,
exportedKey,
key.algorithm,
false,
key.usages,
);
};
export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey) => {
return await crypto.subtle.decrypt(
return await window.crypto.subtle.decrypt(
{
name: "RSA-OAEP",
} satisfies RsaOaepParams,
@@ -113,7 +119,7 @@ export const decryptChallenge = async (challenge: string, decryptKey: CryptoKey)
export const wrapMasterKey = async (masterKey: CryptoKey, encryptKey: CryptoKey) => {
return encodeToBase64(
await crypto.subtle.wrapKey("raw", masterKey, encryptKey, {
await window.crypto.subtle.wrapKey("raw", masterKey, encryptKey, {
name: "RSA-OAEP",
} satisfies RsaOaepParams),
);
@@ -125,7 +131,7 @@ export const unwrapMasterKey = async (
extractable = false,
) => {
return {
masterKey: await crypto.subtle.unwrapKey(
masterKey: await window.crypto.subtle.unwrapKey(
"raw",
decodeFromBase64(masterKeyWrapped),
decryptKey,
@@ -140,7 +146,7 @@ export const unwrapMasterKey = async (
};
export const signMessageRSA = async (message: BufferSource, signKey: CryptoKey) => {
return await crypto.subtle.sign(
return await window.crypto.subtle.sign(
{
name: "RSA-PSS",
saltLength: 32, // SHA-256
@@ -155,7 +161,7 @@ export const verifySignatureRSA = async (
signature: BufferSource,
verifyKey: CryptoKey,
) => {
return await crypto.subtle.verify(
return await window.crypto.subtle.verify(
{
name: "RSA-PSS",
saltLength: 32, // SHA-256

View File

@@ -1,13 +1,10 @@
import HmacWorker from "$workers/hmac?worker";
import type { ComputeMessage, ResultMessage } from "$workers/hmac";
export const digestMessage = async (message: BufferSource) => {
return await crypto.subtle.digest("SHA-256", message);
return await window.crypto.subtle.digest("SHA-256", message);
};
export const generateHmacSecret = async () => {
return {
hmacSecret: await crypto.subtle.generateKey(
hmacSecret: await window.crypto.subtle.generateKey(
{
name: "HMAC",
hash: "SHA-256",
@@ -18,24 +15,6 @@ export const generateHmacSecret = async () => {
};
};
export const signMessageHmac = async (message: Blob, hmacSecret: CryptoKey) => {
const stream = message.stream();
const hmacSecretRaw = new Uint8Array(await crypto.subtle.exportKey("raw", hmacSecret));
const worker = new HmacWorker();
return new Promise<Uint8Array>((resolve, reject) => {
worker.onmessage = ({ data }: MessageEvent<ResultMessage>) => {
resolve(data.result);
worker.terminate();
};
worker.onerror = ({ error }) => {
reject(error);
worker.terminate();
};
worker.postMessage({ stream, key: hmacSecretRaw } satisfies ComputeMessage, {
transfer: [stream, hmacSecretRaw.buffer],
});
});
export const signMessageHmac = async (message: BufferSource, hmacSecret: CryptoKey) => {
return await window.crypto.subtle.sign("HMAC", hmacSecret, message);
};

View File

@@ -9,8 +9,8 @@ export const decodeString = (data: ArrayBuffer) => {
return textDecoder.decode(data);
};
export const encodeToBase64 = (data: ArrayBuffer | Uint8Array) => {
return btoa(String.fromCharCode(...(data instanceof ArrayBuffer ? new Uint8Array(data) : data)));
export const encodeToBase64 = (data: ArrayBuffer) => {
return btoa(String.fromCharCode(...new Uint8Array(data)));
};
export const decodeFromBase64 = (data: string) => {

View File

@@ -1,12 +1,15 @@
import { LRUCache } from "lru-cache";
import {
getFileCacheIndex as getFileCacheIndexFromIndexedDB,
storeFileCacheIndex,
deleteFileCacheIndex,
type FileCacheIndex,
} from "$lib/indexedDB";
import { readFile, writeFile, deleteFile } from "$lib/modules/opfs";
import { readFile, writeFile, deleteFile, deleteDirectory } from "$lib/modules/opfs";
import { getThumbnailUrl } from "$lib/modules/thumbnail";
const fileCacheIndex = new Map<number, FileCacheIndex>();
const loadedThumbnails = new LRUCache<number, string>({ max: 100 });
export const prepareFileCache = async () => {
for (const cache of await getFileCacheIndexFromIndexedDB()) {
@@ -48,3 +51,30 @@ export const deleteFileCache = async (fileId: number) => {
await deleteFile(`/cache/${fileId}`);
await deleteFileCacheIndex(fileId);
};
export const getFileThumbnailCache = async (fileId: number) => {
const thumbnail = loadedThumbnails.get(fileId);
if (thumbnail) return thumbnail;
const thumbnailBuffer = await readFile(`/thumbnail/file/${fileId}`);
if (!thumbnailBuffer) return null;
const thumbnailUrl = getThumbnailUrl(thumbnailBuffer);
loadedThumbnails.set(fileId, thumbnailUrl);
return thumbnailUrl;
};
export const storeFileThumbnailCache = async (fileId: number, thumbnailBuffer: ArrayBuffer) => {
await writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer);
loadedThumbnails.set(fileId, getThumbnailUrl(thumbnailBuffer));
};
export const deleteFileThumbnailCache = async (fileId: number) => {
loadedThumbnails.delete(fileId);
await deleteFile(`/thumbnail/file/${fileId}`);
};
export const deleteAllFileThumbnailCaches = async () => {
loadedThumbnails.clear();
await deleteDirectory("/thumbnail/file");
};

View File

@@ -1,110 +0,0 @@
import axios from "axios";
import { limitFunction } from "p-limit";
import { ENCRYPTED_CHUNK_SIZE } from "$lib/constants";
import { decryptChunk, concatenateBuffers } from "$lib/modules/crypto";
export interface FileDownloadState {
id: number;
status:
| "download-pending"
| "downloading"
| "decryption-pending"
| "decrypting"
| "decrypted"
| "canceled"
| "error";
progress?: number;
rate?: number;
estimated?: number;
result?: ArrayBuffer;
}
type LiveFileDownloadState = FileDownloadState & {
status: "download-pending" | "downloading" | "decryption-pending" | "decrypting";
};
let downloadingFiles: FileDownloadState[] = $state([]);
export const isFileDownloading = (
status: FileDownloadState["status"],
): status is LiveFileDownloadState["status"] =>
["download-pending", "downloading", "decryption-pending", "decrypting"].includes(status);
export const getFileDownloadState = (fileId: number) => {
return downloadingFiles.find((file) => file.id === fileId && isFileDownloading(file.status));
};
export const getDownloadingFiles = () => {
return downloadingFiles.filter((file) => isFileDownloading(file.status));
};
export const clearDownloadedFiles = () => {
downloadingFiles = downloadingFiles.filter((file) => isFileDownloading(file.status));
};
const requestFileDownload = limitFunction(
async (state: FileDownloadState, id: number) => {
state.status = "downloading";
const res = await axios.get(`/api/file/${id}/download`, {
responseType: "arraybuffer",
onDownloadProgress: ({ progress, rate, estimated }) => {
state.progress = progress;
state.rate = rate;
state.estimated = estimated;
},
});
const fileEncrypted: ArrayBuffer = res.data;
state.status = "decryption-pending";
return fileEncrypted;
},
{ concurrency: 1 },
);
const decryptFile = limitFunction(
async (
state: FileDownloadState,
fileEncrypted: ArrayBuffer,
encryptedChunkSize: number,
dataKey: CryptoKey,
) => {
state.status = "decrypting";
const chunks: ArrayBuffer[] = [];
let offset = 0;
while (offset < fileEncrypted.byteLength) {
const nextOffset = Math.min(offset + encryptedChunkSize, fileEncrypted.byteLength);
chunks.push(await decryptChunk(fileEncrypted.slice(offset, nextOffset), dataKey));
offset = nextOffset;
}
const fileBuffer = concatenateBuffers(...chunks).buffer;
state.status = "decrypted";
state.result = fileBuffer;
return fileBuffer;
},
{ concurrency: 4 },
);
export const downloadFile = async (id: number, dataKey: CryptoKey, isLegacy: boolean) => {
downloadingFiles.push({
id,
status: "download-pending",
});
const state = downloadingFiles.at(-1)!;
try {
const fileEncrypted = await requestFileDownload(state, id);
return await decryptFile(
state,
fileEncrypted,
isLegacy ? fileEncrypted.byteLength : ENCRYPTED_CHUNK_SIZE,
dataKey,
);
} catch (e) {
state.status = "error";
throw e;
}
};

View File

@@ -0,0 +1,84 @@
import axios from "axios";
import { limitFunction } from "p-limit";
import { writable, type Writable } from "svelte/store";
import { decryptData } from "$lib/modules/crypto";
import { fileDownloadStatusStore, type FileDownloadStatus } from "$lib/stores";
const requestFileDownload = limitFunction(
async (status: Writable<FileDownloadStatus>, id: number) => {
status.update((value) => {
value.status = "downloading";
return value;
});
const res = await axios.get(`/api/file/${id}/download`, {
responseType: "arraybuffer",
onDownloadProgress: ({ progress, rate, estimated }) => {
status.update((value) => {
value.progress = progress;
value.rate = rate;
value.estimated = estimated;
return value;
});
},
});
const fileEncrypted: ArrayBuffer = res.data;
status.update((value) => {
value.status = "decryption-pending";
return value;
});
return fileEncrypted;
},
{ concurrency: 1 },
);
const decryptFile = limitFunction(
async (
status: Writable<FileDownloadStatus>,
fileEncrypted: ArrayBuffer,
fileEncryptedIv: string,
dataKey: CryptoKey,
) => {
status.update((value) => {
value.status = "decrypting";
return value;
});
const fileBuffer = await decryptData(fileEncrypted, fileEncryptedIv, dataKey);
status.update((value) => {
value.status = "decrypted";
value.result = fileBuffer;
return value;
});
return fileBuffer;
},
{ concurrency: 4 },
);
export const downloadFile = async (id: number, fileEncryptedIv: string, dataKey: CryptoKey) => {
const status = writable<FileDownloadStatus>({
id,
status: "download-pending",
});
fileDownloadStatusStore.update((value) => {
value.push(status);
return value;
});
try {
return await decryptFile(
status,
await requestFileDownload(status, id),
fileEncryptedIv,
dataKey,
);
} catch (e) {
status.update((value) => {
value.status = "error";
return value;
});
throw e;
}
};

View File

@@ -1,4 +1,3 @@
export * from "./cache";
export * from "./download.svelte";
export * from "./thumbnail";
export * from "./upload.svelte";
export * from "./download";
export * from "./upload";

View File

@@ -1,77 +0,0 @@
import { LRUCache } from "lru-cache";
import { writable, type Writable } from "svelte/store";
import { browser } from "$app/environment";
import { decryptChunk } from "$lib/modules/crypto";
import type { SummarizedFileInfo } from "$lib/modules/filesystem";
import { readFile, writeFile, deleteFile, deleteDirectory } from "$lib/modules/opfs";
import { getThumbnailUrl } from "$lib/modules/thumbnail";
const loadedThumbnails = new LRUCache<number, Writable<string>>({ max: 100 });
const loadingThumbnails = new Map<number, Writable<string | undefined>>();
const fetchFromOpfs = async (fileId: number) => {
const thumbnailBuffer = await readFile(`/thumbnail/file/${fileId}`);
if (thumbnailBuffer) {
return getThumbnailUrl(thumbnailBuffer);
}
};
const fetchFromServer = async (fileId: number, dataKey: CryptoKey) => {
const res = await fetch(`/api/file/${fileId}/thumbnail/download`);
if (!res.ok) return null;
const thumbnailBuffer = await decryptChunk(await res.arrayBuffer(), dataKey);
void writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer);
return getThumbnailUrl(thumbnailBuffer);
};
export const getFileThumbnail = (file: SummarizedFileInfo) => {
if (
!browser ||
!(file.contentType.startsWith("image/") || file.contentType.startsWith("video/"))
) {
return undefined;
}
const thumbnail = loadedThumbnails.get(file.id);
if (thumbnail) return thumbnail;
let loadingThumbnail = loadingThumbnails.get(file.id);
if (loadingThumbnail) return loadingThumbnail;
loadingThumbnail = writable(undefined);
loadingThumbnails.set(file.id, loadingThumbnail);
fetchFromOpfs(file.id)
.then((thumbnail) => thumbnail ?? (file.dataKey && fetchFromServer(file.id, file.dataKey.key)))
.then((thumbnail) => {
if (thumbnail) {
loadingThumbnail.set(thumbnail);
loadedThumbnails.set(file.id, loadingThumbnail as Writable<string>);
}
loadingThumbnails.delete(file.id);
});
return loadingThumbnail;
};
export const storeFileThumbnailCache = async (fileId: number, thumbnailBuffer: ArrayBuffer) => {
await writeFile(`/thumbnail/file/${fileId}`, thumbnailBuffer);
const oldThumbnail = loadedThumbnails.get(fileId);
if (oldThumbnail) {
oldThumbnail.set(getThumbnailUrl(thumbnailBuffer));
} else {
loadedThumbnails.set(fileId, writable(getThumbnailUrl(thumbnailBuffer)));
}
};
export const deleteFileThumbnailCache = async (fileId: number) => {
loadedThumbnails.delete(fileId);
await deleteFile(`/thumbnail/file/${fileId}`);
};
export const deleteAllFileThumbnailCaches = async () => {
loadedThumbnails.clear();
await deleteDirectory(`/thumbnail/file`);
};

View File

@@ -1,261 +0,0 @@
import ExifReader from "exifreader";
import { limitFunction } from "p-limit";
import { CHUNK_SIZE } from "$lib/constants";
import { encodeToBase64, generateDataKey, wrapDataKey, encryptString } from "$lib/modules/crypto";
import { signMessageHmac } from "$lib/modules/crypto";
import { Scheduler } from "$lib/modules/scheduler";
import { generateThumbnail } from "$lib/modules/thumbnail";
import { uploadBlob } from "$lib/modules/upload";
import type { MasterKey, HmacSecret } from "$lib/stores";
import { trpc } from "$trpc/client";
export interface FileUploadState {
name: string;
parentId: DirectoryId;
status:
| "queued"
| "encryption-pending"
| "encrypting"
| "upload-pending"
| "uploading"
| "uploaded"
| "canceled"
| "error";
progress?: number;
rate?: number;
estimated?: number;
}
export type LiveFileUploadState = FileUploadState & {
status: "queued" | "encryption-pending" | "encrypting" | "upload-pending" | "uploading";
};
const scheduler = new Scheduler<
{ fileId: number; fileBuffer?: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined
>();
let uploadingFiles: FileUploadState[] = $state([]);
const isFileUploading = (status: FileUploadState["status"]) =>
["queued", "encryption-pending", "encrypting", "upload-pending", "uploading"].includes(status);
export const getUploadingFiles = (parentId?: DirectoryId) => {
return uploadingFiles.filter(
(file) =>
(parentId === undefined || file.parentId === parentId) && isFileUploading(file.status),
);
};
export const clearUploadedFiles = () => {
uploadingFiles = uploadingFiles.filter((file) => isFileUploading(file.status));
};
const requestDuplicateFileScan = limitFunction(
async (
state: FileUploadState,
file: File,
hmacSecret: HmacSecret,
onDuplicate: () => Promise<boolean>,
) => {
state.status = "encryption-pending";
const fileSigned = encodeToBase64(await signMessageHmac(file, hmacSecret.secret));
const files = await trpc().file.listByHash.query({
hskVersion: hmacSecret.version,
contentHmac: fileSigned,
});
if (files.length === 0 || (await onDuplicate())) {
return { fileSigned };
} else {
return {};
}
},
{ concurrency: 1 },
);
const getFileType = (file: File) => {
if (file.type) return file.type;
if (file.name.endsWith(".heic")) return "image/heic";
throw new Error("Unknown file type");
};
const extractExifDateTime = (fileBuffer: ArrayBuffer) => {
const exif = ExifReader.load(fileBuffer);
const dateTimeOriginal = exif["DateTimeOriginal"]?.description;
const offsetTimeOriginal = exif["OffsetTimeOriginal"]?.description;
if (!dateTimeOriginal) return undefined;
const [date, time] = dateTimeOriginal.split(" ");
if (!date || !time) return undefined;
const [year, month, day] = date.split(":").map(Number);
const [hour, minute, second] = time.split(":").map(Number);
if (!year || !month || !day || !hour || !minute || !second) return undefined;
if (!offsetTimeOriginal) {
// No timezone information.. Assume local timezone
return new Date(year, month - 1, day, hour, minute, second);
}
const offsetSign = offsetTimeOriginal[0] === "+" ? 1 : -1;
const [offsetHour, offsetMinute] = offsetTimeOriginal.slice(1).split(":").map(Number);
const utcDate = Date.UTC(year, month - 1, day, hour, minute, second);
const offsetMs = offsetSign * ((offsetHour ?? 0) * 60 + (offsetMinute ?? 0)) * 60 * 1000;
return new Date(utcDate - offsetMs);
};
interface FileMetadata {
parentId: "root" | number;
name: string;
createdAt?: Date;
lastModifiedAt: Date;
}
const requestFileMetadataEncryption = limitFunction(
async (
state: FileUploadState,
file: Blob,
fileMetadata: FileMetadata,
masterKey: MasterKey,
hmacSecret: HmacSecret,
) => {
state.status = "encrypting";
const { dataKey, dataKeyVersion } = await generateDataKey();
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
const [nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnailBuffer] =
await Promise.all([
encryptString(fileMetadata.name, dataKey),
fileMetadata.createdAt &&
encryptString(fileMetadata.createdAt.getTime().toString(), dataKey),
encryptString(fileMetadata.lastModifiedAt.getTime().toString(), dataKey),
generateThumbnail(file).then((blob) => blob?.arrayBuffer()),
]);
const { uploadId } = await trpc().upload.startFileUpload.mutate({
chunks: Math.ceil(file.size / CHUNK_SIZE),
parent: fileMetadata.parentId,
mekVersion: masterKey.version,
dek: dataKeyWrapped,
dekVersion: dataKeyVersion,
hskVersion: hmacSecret.version,
contentType: file.type,
name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv,
createdAt: createdAtEncrypted?.ciphertext,
createdAtIv: createdAtEncrypted?.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv,
});
state.status = "upload-pending";
return { uploadId, thumbnailBuffer, dataKey, dataKeyVersion };
},
{ concurrency: 4 },
);
const requestFileUpload = limitFunction(
async (
state: FileUploadState,
uploadId: string,
file: Blob,
fileSigned: string,
thumbnailBuffer: ArrayBuffer | undefined,
dataKey: CryptoKey,
dataKeyVersion: Date,
) => {
state.status = "uploading";
await uploadBlob(uploadId, file, dataKey, {
onProgress(s) {
state.progress = s.progress;
state.rate = s.rate;
},
});
const { file: fileId } = await trpc().upload.completeFileUpload.mutate({
uploadId,
contentHmac: fileSigned,
});
if (thumbnailBuffer) {
try {
const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({
file: fileId,
dekVersion: dataKeyVersion,
});
await uploadBlob(uploadId, new Blob([thumbnailBuffer]), dataKey);
await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId });
} catch (e) {
console.error(e);
}
}
state.status = "uploaded";
return { fileId };
},
{ concurrency: 1 },
);
export const uploadFile = async (
file: File,
parentId: "root" | number,
masterKey: MasterKey,
hmacSecret: HmacSecret,
onDuplicate: () => Promise<boolean>,
) => {
uploadingFiles.push({
name: file.name,
parentId,
status: "queued",
});
const state = uploadingFiles.at(-1)!;
return await scheduler.schedule(file.size, async () => {
try {
const { fileSigned } = await requestDuplicateFileScan(state, file, hmacSecret, onDuplicate);
if (!fileSigned) {
state.status = "canceled";
uploadingFiles = uploadingFiles.filter((file) => file !== state);
return;
}
let fileBuffer;
const fileType = getFileType(file);
const fileMetadata: FileMetadata = {
parentId,
name: file.name,
lastModifiedAt: new Date(file.lastModified),
};
if (fileType.startsWith("image/")) {
fileBuffer = await file.arrayBuffer();
fileMetadata.createdAt = extractExifDateTime(fileBuffer);
}
const blob = new Blob([file], { type: fileType });
const { uploadId, thumbnailBuffer, dataKey, dataKeyVersion } =
await requestFileMetadataEncryption(state, blob, fileMetadata, masterKey, hmacSecret);
const { fileId } = await requestFileUpload(
state,
uploadId,
blob,
fileSigned,
thumbnailBuffer,
dataKey,
dataKeyVersion,
);
return { fileId, fileBuffer, thumbnailBuffer };
} catch (e) {
state.status = "error";
throw e;
}
});
};

View File

@@ -0,0 +1,264 @@
import axios from "axios";
import ExifReader from "exifreader";
import { limitFunction } from "p-limit";
import { writable, type Writable } from "svelte/store";
import {
encodeToBase64,
generateDataKey,
wrapDataKey,
encryptData,
encryptString,
digestMessage,
signMessageHmac,
} from "$lib/modules/crypto";
import { generateThumbnail } from "$lib/modules/thumbnail";
import type {
FileThumbnailUploadRequest,
FileUploadRequest,
FileUploadResponse,
} from "$lib/server/schemas";
import {
fileUploadStatusStore,
type MasterKey,
type HmacSecret,
type FileUploadStatus,
} from "$lib/stores";
import { trpc } from "$trpc/client";
const requestDuplicateFileScan = limitFunction(
async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise<boolean>) => {
const fileBuffer = await file.arrayBuffer();
const fileSigned = encodeToBase64(await signMessageHmac(fileBuffer, hmacSecret.secret));
const files = await trpc().file.listByHash.query({
hskVersion: hmacSecret.version,
contentHmac: fileSigned,
});
if (files.length === 0 || (await onDuplicate())) {
return { fileBuffer, fileSigned };
} else {
return {};
}
},
{ concurrency: 1 },
);
const getFileType = (file: File) => {
if (file.type) return file.type;
if (file.name.endsWith(".heic")) return "image/heic";
throw new Error("Unknown file type");
};
const extractExifDateTime = (fileBuffer: ArrayBuffer) => {
const exif = ExifReader.load(fileBuffer);
const dateTimeOriginal = exif["DateTimeOriginal"]?.description;
const offsetTimeOriginal = exif["OffsetTimeOriginal"]?.description;
if (!dateTimeOriginal) return undefined;
const [date, time] = dateTimeOriginal.split(" ");
if (!date || !time) return undefined;
const [year, month, day] = date.split(":").map(Number);
const [hour, minute, second] = time.split(":").map(Number);
if (!year || !month || !day || !hour || !minute || !second) return undefined;
if (!offsetTimeOriginal) {
// No timezone information.. Assume local timezone
return new Date(year, month - 1, day, hour, minute, second);
}
const offsetSign = offsetTimeOriginal[0] === "+" ? 1 : -1;
const [offsetHour, offsetMinute] = offsetTimeOriginal.slice(1).split(":").map(Number);
const utcDate = Date.UTC(year, month - 1, day, hour, minute, second);
const offsetMs = offsetSign * ((offsetHour ?? 0) * 60 + (offsetMinute ?? 0)) * 60 * 1000;
return new Date(utcDate - offsetMs);
};
const encryptFile = limitFunction(
async (
status: Writable<FileUploadStatus>,
file: File,
fileBuffer: ArrayBuffer,
masterKey: MasterKey,
) => {
status.update((value) => {
value.status = "encrypting";
return value;
});
const fileType = getFileType(file);
let createdAt;
if (fileType.startsWith("image/")) {
createdAt = extractExifDateTime(fileBuffer);
}
const { dataKey, dataKeyVersion } = await generateDataKey();
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
const fileEncrypted = await encryptData(fileBuffer, dataKey);
const fileEncryptedHash = encodeToBase64(await digestMessage(fileEncrypted.ciphertext));
const nameEncrypted = await encryptString(file.name, dataKey);
const createdAtEncrypted =
createdAt && (await encryptString(createdAt.getTime().toString(), dataKey));
const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey);
const thumbnail = await generateThumbnail(fileBuffer, fileType);
const thumbnailBuffer = await thumbnail?.arrayBuffer();
const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey));
status.update((value) => {
value.status = "upload-pending";
return value;
});
return {
dataKeyWrapped,
dataKeyVersion,
fileType,
fileEncrypted,
fileEncryptedHash,
nameEncrypted,
createdAtEncrypted,
lastModifiedAtEncrypted,
thumbnail: thumbnailEncrypted && { plaintext: thumbnailBuffer, ...thumbnailEncrypted },
};
},
{ concurrency: 4 },
);
const requestFileUpload = limitFunction(
async (status: Writable<FileUploadStatus>, form: FormData, thumbnailForm: FormData | null) => {
status.update((value) => {
value.status = "uploading";
return value;
});
const res = await axios.post("/api/file/upload", form, {
onUploadProgress: ({ progress, rate, estimated }) => {
status.update((value) => {
value.progress = progress;
value.rate = rate;
value.estimated = estimated;
return value;
});
},
});
const { file }: FileUploadResponse = res.data;
if (thumbnailForm) {
try {
await axios.post(`/api/file/${file}/thumbnail/upload`, thumbnailForm);
} catch (e) {
// TODO
console.error(e);
}
}
status.update((value) => {
value.status = "uploaded";
return value;
});
return { fileId: file };
},
{ concurrency: 1 },
);
export const uploadFile = async (
file: File,
parentId: "root" | number,
hmacSecret: HmacSecret,
masterKey: MasterKey,
onDuplicate: () => Promise<boolean>,
): Promise<
{ fileId: number; fileBuffer: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined
> => {
const status = writable<FileUploadStatus>({
name: file.name,
parentId,
status: "encryption-pending",
});
fileUploadStatusStore.update((value) => {
value.push(status);
return value;
});
try {
const { fileBuffer, fileSigned } = await requestDuplicateFileScan(
file,
hmacSecret,
onDuplicate,
);
if (!fileBuffer || !fileSigned) {
status.update((value) => {
value.status = "canceled";
return value;
});
fileUploadStatusStore.update((value) => {
value = value.filter((v) => v !== status);
return value;
});
return undefined;
}
const {
dataKeyWrapped,
dataKeyVersion,
fileType,
fileEncrypted,
fileEncryptedHash,
nameEncrypted,
createdAtEncrypted,
lastModifiedAtEncrypted,
thumbnail,
} = await encryptFile(status, file, fileBuffer, masterKey);
const form = new FormData();
form.set(
"metadata",
JSON.stringify({
parent: parentId,
mekVersion: masterKey.version,
dek: dataKeyWrapped,
dekVersion: dataKeyVersion.toISOString(),
hskVersion: hmacSecret.version,
contentHmac: fileSigned,
contentType: fileType,
contentIv: fileEncrypted.iv,
name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv,
createdAt: createdAtEncrypted?.ciphertext,
createdAtIv: createdAtEncrypted?.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv,
} satisfies FileUploadRequest),
);
form.set("content", new Blob([fileEncrypted.ciphertext]));
form.set("checksum", fileEncryptedHash);
let thumbnailForm = null;
if (thumbnail) {
thumbnailForm = new FormData();
thumbnailForm.set(
"metadata",
JSON.stringify({
dekVersion: dataKeyVersion.toISOString(),
contentIv: thumbnail.iv,
} satisfies FileThumbnailUploadRequest),
);
thumbnailForm.set("content", new Blob([thumbnail.ciphertext]));
}
const { fileId } = await requestFileUpload(status, form, thumbnailForm);
return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext };
} catch (e) {
status.update((value) => {
value.status = "error";
return value;
});
throw e;
}
};

View File

@@ -0,0 +1,370 @@
import { TRPCClientError } from "@trpc/client";
import { get, writable, type Writable } from "svelte/store";
import {
getDirectoryInfos as getDirectoryInfosFromIndexedDB,
getDirectoryInfo as getDirectoryInfoFromIndexedDB,
storeDirectoryInfo,
deleteDirectoryInfo,
getFileInfos as getFileInfosFromIndexedDB,
getFileInfo as getFileInfoFromIndexedDB,
storeFileInfo,
deleteFileInfo,
getCategoryInfos as getCategoryInfosFromIndexedDB,
getCategoryInfo as getCategoryInfoFromIndexedDB,
storeCategoryInfo,
updateCategoryInfo as updateCategoryInfoInIndexedDB,
deleteCategoryInfo,
type DirectoryId,
type CategoryId,
} from "$lib/indexedDB";
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
import { trpc } from "$trpc/client";
export type DirectoryInfo =
| {
id: "root";
parentId?: undefined;
dataKey?: undefined;
dataKeyVersion?: undefined;
name?: undefined;
subDirectoryIds: number[];
fileIds: number[];
}
| {
id: number;
parentId: DirectoryId;
dataKey?: CryptoKey;
dataKeyVersion?: Date;
name: string;
subDirectoryIds: number[];
fileIds: number[];
};
export interface FileInfo {
id: number;
parentId: DirectoryId;
dataKey?: CryptoKey;
dataKeyVersion?: Date;
contentType: string;
contentIv?: string;
name: string;
createdAt?: Date;
lastModifiedAt: Date;
categoryIds: number[];
}
export type CategoryInfo =
| {
id: "root";
dataKey?: undefined;
dataKeyVersion?: undefined;
name?: undefined;
subCategoryIds: number[];
files?: undefined;
isFileRecursive?: undefined;
}
| {
id: number;
dataKey?: CryptoKey;
dataKeyVersion?: Date;
name: string;
subCategoryIds: number[];
files: { id: number; isRecursive: boolean }[];
isFileRecursive: boolean;
};
const directoryInfoStore = new Map<DirectoryId, Writable<DirectoryInfo | null>>();
const fileInfoStore = new Map<number, Writable<FileInfo | null>>();
const categoryInfoStore = new Map<CategoryId, Writable<CategoryInfo | null>>();
const fetchDirectoryInfoFromIndexedDB = async (
id: DirectoryId,
info: Writable<DirectoryInfo | null>,
) => {
if (get(info)) return;
const [directory, subDirectories, files] = await Promise.all([
id !== "root" ? getDirectoryInfoFromIndexedDB(id) : undefined,
getDirectoryInfosFromIndexedDB(id),
getFileInfosFromIndexedDB(id),
]);
const subDirectoryIds = subDirectories.map(({ id }) => id);
const fileIds = files.map(({ id }) => id);
if (id === "root") {
info.set({ id, subDirectoryIds, fileIds });
} else {
if (!directory) return;
info.set({
id,
parentId: directory.parentId,
name: directory.name,
subDirectoryIds,
fileIds,
});
}
};
const fetchDirectoryInfoFromServer = async (
id: DirectoryId,
info: Writable<DirectoryInfo | null>,
masterKey: CryptoKey,
) => {
let data;
try {
data = await trpc().directory.get.query({ id });
} catch (e) {
if (e instanceof TRPCClientError && e.data?.code === "NOT_FOUND") {
info.set(null);
await deleteDirectoryInfo(id as number);
return;
}
throw new Error("Failed to fetch directory information");
}
const { metadata, subDirectories: subDirectoryIds, files: fileIds } = data;
if (id === "root") {
info.set({ id, subDirectoryIds, fileIds });
} else {
const { dataKey } = await unwrapDataKey(metadata!.dek, masterKey);
const name = await decryptString(metadata!.name, metadata!.nameIv, dataKey);
info.set({
id,
parentId: metadata!.parent,
dataKey,
dataKeyVersion: new Date(metadata!.dekVersion),
name,
subDirectoryIds,
fileIds,
});
await storeDirectoryInfo({ id, parentId: metadata!.parent, name });
}
};
const fetchDirectoryInfo = async (
id: DirectoryId,
info: Writable<DirectoryInfo | null>,
masterKey: CryptoKey,
) => {
await fetchDirectoryInfoFromIndexedDB(id, info);
await fetchDirectoryInfoFromServer(id, info, masterKey);
};
export const getDirectoryInfo = (id: DirectoryId, masterKey: CryptoKey) => {
// TODO: MEK rotation
let info = directoryInfoStore.get(id);
if (!info) {
info = writable(null);
directoryInfoStore.set(id, info);
}
fetchDirectoryInfo(id, info, masterKey); // Intended
return info;
};
const fetchFileInfoFromIndexedDB = async (id: number, info: Writable<FileInfo | null>) => {
if (get(info)) return;
const file = await getFileInfoFromIndexedDB(id);
if (!file) return;
info.set(file);
};
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
};
const fetchFileInfoFromServer = async (
id: number,
info: Writable<FileInfo | null>,
masterKey: CryptoKey,
) => {
let metadata;
try {
metadata = await trpc().file.get.query({ id });
} catch (e) {
if (e instanceof TRPCClientError && e.data?.code === "NOT_FOUND") {
info.set(null);
await deleteFileInfo(id);
return;
}
throw new Error("Failed to fetch file information");
}
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
const name = await decryptString(metadata.name, metadata.nameIv, dataKey);
const createdAt =
metadata.createdAt && metadata.createdAtIv
? await decryptDate(metadata.createdAt, metadata.createdAtIv, dataKey)
: undefined;
const lastModifiedAt = await decryptDate(
metadata.lastModifiedAt,
metadata.lastModifiedAtIv,
dataKey,
);
info.set({
id,
parentId: metadata.parent,
dataKey,
dataKeyVersion: new Date(metadata.dekVersion),
contentType: metadata.contentType,
contentIv: metadata.contentIv,
name,
createdAt,
lastModifiedAt,
categoryIds: metadata.categories,
});
await storeFileInfo({
id,
parentId: metadata.parent,
name,
contentType: metadata.contentType,
createdAt,
lastModifiedAt,
categoryIds: metadata.categories,
});
};
const fetchFileInfo = async (id: number, info: Writable<FileInfo | null>, masterKey: CryptoKey) => {
await fetchFileInfoFromIndexedDB(id, info);
await fetchFileInfoFromServer(id, info, masterKey);
};
export const getFileInfo = (fileId: number, masterKey: CryptoKey) => {
// TODO: MEK rotation
let info = fileInfoStore.get(fileId);
if (!info) {
info = writable(null);
fileInfoStore.set(fileId, info);
}
fetchFileInfo(fileId, info, masterKey); // Intended
return info;
};
const fetchCategoryInfoFromIndexedDB = async (
id: CategoryId,
info: Writable<CategoryInfo | null>,
) => {
if (get(info)) return;
const [category, subCategories] = await Promise.all([
id !== "root" ? getCategoryInfoFromIndexedDB(id) : undefined,
getCategoryInfosFromIndexedDB(id),
]);
const subCategoryIds = subCategories.map(({ id }) => id);
if (id === "root") {
info.set({ id, subCategoryIds });
} else {
if (!category) return;
info.set({
id,
name: category.name,
subCategoryIds,
files: category.files,
isFileRecursive: category.isFileRecursive,
});
}
};
const fetchCategoryInfoFromServer = async (
id: CategoryId,
info: Writable<CategoryInfo | null>,
masterKey: CryptoKey,
) => {
let data;
try {
data = await trpc().category.get.query({ id });
} catch (e) {
if (e instanceof TRPCClientError && e.data?.code === "NOT_FOUND") {
info.set(null);
await deleteCategoryInfo(id as number);
return;
}
throw new Error("Failed to fetch category information");
}
const { metadata, subCategories } = data;
if (id === "root") {
info.set({ id, subCategoryIds: subCategories });
} else {
const { dataKey } = await unwrapDataKey(metadata!.dek, masterKey);
const name = await decryptString(metadata!.name, metadata!.nameIv, dataKey);
let files;
try {
files = await trpc().category.files.query({ id, recurse: true });
} catch {
throw new Error("Failed to fetch category files");
}
const filesMapped = files.map(({ file, isRecursive }) => ({ id: file, isRecursive }));
let isFileRecursive: boolean | undefined = undefined;
info.update((value) => {
const newValue = {
isFileRecursive: false,
...value,
id,
dataKey,
dataKeyVersion: new Date(metadata!.dekVersion),
name,
subCategoryIds: subCategories,
files: filesMapped,
};
isFileRecursive = newValue.isFileRecursive;
return newValue;
});
await storeCategoryInfo({
id,
parentId: metadata!.parent,
name,
files: filesMapped,
isFileRecursive: isFileRecursive!,
});
}
};
const fetchCategoryInfo = async (
id: CategoryId,
info: Writable<CategoryInfo | null>,
masterKey: CryptoKey,
) => {
await fetchCategoryInfoFromIndexedDB(id, info);
await fetchCategoryInfoFromServer(id, info, masterKey);
};
export const getCategoryInfo = (categoryId: CategoryId, masterKey: CryptoKey) => {
// TODO: MEK rotation
let info = categoryInfoStore.get(categoryId);
if (!info) {
info = writable(null);
categoryInfoStore.set(categoryId, info);
}
fetchCategoryInfo(categoryId, info, masterKey); // Intended
return info;
};
export const updateCategoryInfo = async (
categoryId: number,
changes: { isFileRecursive?: boolean },
) => {
await updateCategoryInfoInIndexedDB(categoryId, changes);
categoryInfoStore.get(categoryId)?.update((value) => {
if (!value) return value;
if (changes.isFileRecursive !== undefined) {
value.isFileRecursive = changes.isFileRecursive;
}
return value;
});
};

View File

@@ -1,121 +0,0 @@
import * as IndexedDB from "$lib/indexedDB";
import { trpc, isTRPCClientError } from "$trpc/client";
import { FilesystemCache, decryptFileMetadata, decryptCategoryMetadata } from "./internal.svelte";
import type { CategoryInfo, MaybeCategoryInfo } from "./types";
const cache = new FilesystemCache<CategoryId, MaybeCategoryInfo>({
async fetchFromIndexedDB(id) {
const [category, subCategories] = await Promise.all([
id !== "root" ? IndexedDB.getCategoryInfo(id) : undefined,
IndexedDB.getCategoryInfos(id),
]);
const files = category?.files
? await Promise.all(
category.files.map(async (file) => {
const fileInfo = await IndexedDB.getFileInfo(file.id);
return fileInfo
? {
id: file.id,
parentId: fileInfo.parentId,
contentType: fileInfo.contentType,
name: fileInfo.name,
createdAt: fileInfo.createdAt,
lastModifiedAt: fileInfo.lastModifiedAt,
isRecursive: file.isRecursive,
}
: undefined;
}),
)
: undefined;
if (id === "root") {
return {
id,
exists: true,
subCategories,
};
} else if (category) {
return {
id,
exists: true,
parentId: category.parentId,
name: category.name,
subCategories,
files: files?.filter((file) => !!file) ?? [],
isFileRecursive: category.isFileRecursive ?? false,
};
}
},
async fetchFromServer(id, cachedInfo, masterKey) {
try {
const category = await trpc().category.get.query({ id, recurse: true });
const [subCategories, files, metadata] = await Promise.all([
Promise.all(
category.subCategories.map(async (category) => ({
id: category.id,
parentId: id,
...(await decryptCategoryMetadata(category, masterKey)),
})),
),
category.files &&
Promise.all(
category.files.map(async (file) => ({
id: file.id,
parentId: file.parent,
contentType: file.contentType,
isRecursive: file.isRecursive,
...(await decryptFileMetadata(file, masterKey)),
})),
),
category.metadata && decryptCategoryMetadata(category.metadata, masterKey),
]);
return storeToIndexedDB(
id !== "root"
? {
id,
parentId: category.metadata!.parent,
subCategories,
files: files!,
isFileRecursive: cachedInfo?.isFileRecursive ?? false,
...metadata!,
}
: { id, subCategories },
);
} catch (e) {
if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") {
await IndexedDB.deleteCategoryInfo(id as number);
return { id, exists: false };
}
throw e;
}
},
});
const storeToIndexedDB = (info: CategoryInfo) => {
if (info.id !== "root") {
void IndexedDB.storeCategoryInfo(info);
// TODO: Bulk Upsert
new Map(info.files.map((file) => [file.id, file])).forEach((file) => {
void IndexedDB.storeFileInfo(file);
});
}
// TODO: Bulk Upsert
info.subCategories.forEach((category) => {
void IndexedDB.storeCategoryInfo(category);
});
void IndexedDB.deleteDanglingCategoryInfos(
info.id,
new Set(info.subCategories.map(({ id }) => id)),
);
return { ...info, exists: true as const };
};
export const getCategoryInfo = (id: CategoryId, masterKey: CryptoKey) => {
return cache.get(id, masterKey);
};

View File

@@ -1,102 +0,0 @@
import * as IndexedDB from "$lib/indexedDB";
import { trpc, isTRPCClientError } from "$trpc/client";
import { FilesystemCache, decryptDirectoryMetadata, decryptFileMetadata } from "./internal.svelte";
import type { DirectoryInfo, MaybeDirectoryInfo } from "./types";
const cache = new FilesystemCache<DirectoryId, MaybeDirectoryInfo>({
async fetchFromIndexedDB(id) {
const [directory, subDirectories, files] = await Promise.all([
id !== "root" ? IndexedDB.getDirectoryInfo(id) : undefined,
IndexedDB.getDirectoryInfos(id),
IndexedDB.getFileInfos(id),
]);
if (id === "root") {
return {
id,
exists: true,
subDirectories,
files,
};
} else if (directory) {
return {
id,
exists: true,
parentId: directory.parentId,
name: directory.name,
subDirectories,
files,
};
}
},
async fetchFromServer(id, _cachedInfo, masterKey) {
try {
const directory = await trpc().directory.get.query({ id });
const [subDirectories, files, metadata] = await Promise.all([
Promise.all(
directory.subDirectories.map(async (directory) => ({
id: directory.id,
parentId: id,
...(await decryptDirectoryMetadata(directory, masterKey)),
})),
),
Promise.all(
directory.files.map(async (file) => ({
id: file.id,
parentId: id,
contentType: file.contentType,
...(await decryptFileMetadata(file, masterKey)),
})),
),
directory.metadata && decryptDirectoryMetadata(directory.metadata, masterKey),
]);
return storeToIndexedDB(
id !== "root"
? {
id,
parentId: directory.metadata!.parent,
subDirectories,
files,
...metadata!,
}
: { id, subDirectories, files },
);
} catch (e) {
if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") {
await IndexedDB.deleteDirectoryInfo(id as number);
return { id, exists: false as const };
}
throw e;
}
},
});
const storeToIndexedDB = (info: DirectoryInfo) => {
if (info.id !== "root") {
void IndexedDB.storeDirectoryInfo(info);
}
// TODO: Bulk Upsert
info.subDirectories.forEach((subDirectory) => {
void IndexedDB.storeDirectoryInfo(subDirectory);
});
// TODO: Bulk Upsert
info.files.forEach((file) => {
void IndexedDB.storeFileInfo(file);
});
void IndexedDB.deleteDanglingDirectoryInfos(
info.id,
new Set(info.subDirectories.map(({ id }) => id)),
);
void IndexedDB.deleteDanglingFileInfos(info.id, new Set(info.files.map(({ id }) => id)));
return { ...info, exists: true as const };
};
export const getDirectoryInfo = (id: DirectoryId, masterKey: CryptoKey) => {
return cache.get(id, masterKey);
};

View File

@@ -1,177 +0,0 @@
import * as IndexedDB from "$lib/indexedDB";
import { trpc, isTRPCClientError } from "$trpc/client";
import { FilesystemCache, decryptFileMetadata, decryptCategoryMetadata } from "./internal.svelte";
import type { FileInfo, MaybeFileInfo } from "./types";
const cache = new FilesystemCache<number, MaybeFileInfo>({
async fetchFromIndexedDB(id) {
const file = await IndexedDB.getFileInfo(id);
const categories = file?.categoryIds
? await Promise.all(
file.categoryIds.map(async (categoryId) => {
const category = await IndexedDB.getCategoryInfo(categoryId);
return category
? { id: category.id, parentId: category.parentId, name: category.name }
: undefined;
}),
)
: undefined;
if (file) {
return {
id,
exists: true,
parentId: file.parentId,
contentType: file.contentType,
name: file.name,
createdAt: file.createdAt,
lastModifiedAt: file.lastModifiedAt,
categories: categories?.filter((category) => !!category) ?? [],
};
}
},
async fetchFromServer(id, _cachedInfo, masterKey) {
try {
const file = await trpc().file.get.query({ id });
const [categories, metadata] = await Promise.all([
Promise.all(
file.categories.map(async (category) => ({
id: category.id,
parentId: category.parent,
...(await decryptCategoryMetadata(category, masterKey)),
})),
),
decryptFileMetadata(file, masterKey),
]);
return storeToIndexedDB({
id,
isLegacy: file.isLegacy,
parentId: file.parent,
dataKey: metadata.dataKey,
contentType: file.contentType,
name: metadata.name,
createdAt: metadata.createdAt,
lastModifiedAt: metadata.lastModifiedAt,
categories,
});
} catch (e) {
if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") {
await IndexedDB.deleteFileInfo(id);
return { id, exists: false as const };
}
throw e;
}
},
async bulkFetchFromIndexedDB(ids) {
const files = await IndexedDB.bulkGetFileInfos([...ids]);
const categories = await Promise.all(
files.map(async (file) =>
file?.categoryIds
? await Promise.all(
file.categoryIds.map(async (categoryId) => {
const category = await IndexedDB.getCategoryInfo(categoryId);
return category
? { id: category.id, parentId: category.parentId, name: category.name }
: undefined;
}),
)
: undefined,
),
);
return new Map(
files
.filter((file) => !!file)
.map((file, index) => [
file.id,
{
...file,
exists: true,
categories: categories[index]?.filter((category) => !!category) ?? [],
},
]),
);
},
async bulkFetchFromServer(ids, masterKey) {
const idsArray = [...ids.keys()];
const filesRaw = await trpc().file.bulkGet.query({ ids: idsArray });
const files = await Promise.all(
filesRaw.map(async ({ id, categories: categoriesRaw, ...metadataRaw }) => {
const [categories, metadata] = await Promise.all([
Promise.all(
categoriesRaw.map(async (category) => ({
id: category.id,
parentId: category.parent,
...(await decryptCategoryMetadata(category, masterKey)),
})),
),
decryptFileMetadata(metadataRaw, masterKey),
]);
return {
id,
exists: true as const,
isLegacy: metadataRaw.isLegacy,
parentId: metadataRaw.parent,
contentType: metadataRaw.contentType,
categories,
...metadata,
};
}),
);
const existingIds = new Set(filesRaw.map(({ id }) => id));
const deletedIds = idsArray.filter((id) => !existingIds.has(id));
void IndexedDB.bulkDeleteFileInfos(deletedIds);
return new Map<number, MaybeFileInfo>([
...bulkStoreToIndexedDB(files),
...deletedIds.map((id) => [id, { id, exists: false }] as const),
]);
},
});
const storeToIndexedDB = (info: FileInfo) => {
void IndexedDB.storeFileInfo({
...info,
categoryIds: info.categories.map(({ id }) => id),
});
info.categories.forEach((category) => {
void IndexedDB.storeCategoryInfo(category);
});
return { ...info, exists: true as const };
};
const bulkStoreToIndexedDB = (infos: FileInfo[]) => {
// TODO: Bulk Upsert
infos.forEach((info) => {
void IndexedDB.storeFileInfo({
...info,
categoryIds: info.categories.map(({ id }) => id),
});
});
// TODO: Bulk Upsert
new Map(
infos.flatMap(({ categories }) => categories).map((category) => [category.id, category]),
).forEach((category) => {
void IndexedDB.storeCategoryInfo(category);
});
return infos.map((info) => [info.id, { ...info, exists: true }] as const);
};
export const getFileInfo = (id: number, masterKey: CryptoKey) => {
return cache.get(id, masterKey);
};
export const bulkGetFileInfo = (ids: number[], masterKey: CryptoKey) => {
return cache.bulkGet(new Set(ids), masterKey);
};

View File

@@ -1,172 +0,0 @@
import { untrack } from "svelte";
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
interface FilesystemCacheOptions<K, V> {
fetchFromIndexedDB: (key: K) => Promise<V | undefined>;
fetchFromServer: (key: K, cachedValue: V | undefined, masterKey: CryptoKey) => Promise<V>;
bulkFetchFromIndexedDB?: (keys: Set<K>) => Promise<Map<K, V>>;
bulkFetchFromServer?: (
keys: Map<K, { cachedValue: V | undefined }>,
masterKey: CryptoKey,
) => Promise<Map<K, V>>;
}
export class FilesystemCache<K, V extends object> {
private map = new Map<K, { value?: V; promise?: Promise<V> }>();
constructor(private readonly options: FilesystemCacheOptions<K, V>) {}
get(key: K, masterKey: CryptoKey) {
return untrack(() => {
let state = this.map.get(key);
if (state?.promise) return state.value ?? state.promise;
const { promise: newPromise, resolve } = Promise.withResolvers<V>();
if (!state) {
const newState = $state({});
state = newState;
this.map.set(key, newState);
}
(state.value
? Promise.resolve(state.value)
: this.options.fetchFromIndexedDB(key).then((loadedInfo) => {
if (loadedInfo) {
state.value = loadedInfo;
resolve(state.value);
}
return loadedInfo;
})
)
.then((cachedInfo) => this.options.fetchFromServer(key, cachedInfo, masterKey))
.then((loadedInfo) => {
if (state.value) {
Object.assign(state.value, loadedInfo);
} else {
state.value = loadedInfo;
}
resolve(state.value);
})
.finally(() => {
state.promise = undefined;
});
state.promise = newPromise;
return state.value ?? newPromise;
});
}
bulkGet(keys: Set<K>, masterKey: CryptoKey) {
return untrack(() => {
const newPromises = new Map(
keys
.keys()
.filter((key) => this.map.get(key)?.promise === undefined)
.map((key) => [key, Promise.withResolvers<V>()]),
);
newPromises.forEach(({ promise }, key) => {
const state = this.map.get(key);
if (state) {
state.promise = promise;
} else {
const newState = $state({ promise });
this.map.set(key, newState);
}
});
const resolve = (loadedInfos: Map<K, V>) => {
loadedInfos.forEach((loadedInfo, key) => {
const state = this.map.get(key)!;
if (state.value) {
Object.assign(state.value, loadedInfo);
} else {
state.value = loadedInfo;
}
newPromises.get(key)!.resolve(state.value);
});
return loadedInfos;
};
this.options.bulkFetchFromIndexedDB!(
new Set(newPromises.keys().filter((key) => this.map.get(key)!.value === undefined)),
)
.then(resolve)
.then(() =>
this.options.bulkFetchFromServer!(
new Map(
newPromises.keys().map((key) => [key, { cachedValue: this.map.get(key)!.value }]),
),
masterKey,
),
)
.then(resolve)
.finally(() => {
newPromises.forEach((_, key) => {
this.map.get(key)!.promise = undefined;
});
});
const bottleneckPromises = Array.from(
keys
.keys()
.filter((key) => this.map.get(key)!.value === undefined)
.map((key) => this.map.get(key)!.promise!),
);
const makeResult = () =>
new Map(keys.keys().map((key) => [key, this.map.get(key)!.value!] as const));
return bottleneckPromises.length > 0
? Promise.all(bottleneckPromises).then(makeResult)
: makeResult();
});
}
}
export const decryptDirectoryMetadata = async (
metadata: { dek: string; dekVersion: Date; name: string; nameIv: string },
masterKey: CryptoKey,
) => {
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
const name = await decryptString(metadata.name, metadata.nameIv, dataKey);
return {
dataKey: { key: dataKey, version: metadata.dekVersion },
name,
};
};
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
};
export const decryptFileMetadata = async (
metadata: {
dek: string;
dekVersion: Date;
name: string;
nameIv: string;
createdAt?: string;
createdAtIv?: string;
lastModifiedAt: string;
lastModifiedAtIv: string;
},
masterKey: CryptoKey,
) => {
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
const [name, createdAt, lastModifiedAt] = await Promise.all([
decryptString(metadata.name, metadata.nameIv, dataKey),
metadata.createdAt
? decryptDate(metadata.createdAt, metadata.createdAtIv!, dataKey)
: undefined,
decryptDate(metadata.lastModifiedAt, metadata.lastModifiedAtIv, dataKey),
]);
return {
dataKey: { key: dataKey, version: metadata.dekVersion },
name,
createdAt,
lastModifiedAt,
};
};
export const decryptCategoryMetadata = decryptDirectoryMetadata;

View File

@@ -1,77 +0,0 @@
export type DataKey = { key: CryptoKey; version: Date };
type AllUndefined<T> = { [K in keyof T]?: undefined };
interface LocalDirectoryInfo {
id: number;
parentId: DirectoryId;
dataKey?: DataKey;
name: string;
subDirectories: SubDirectoryInfo[];
files: SummarizedFileInfo[];
}
interface RootDirectoryInfo {
id: "root";
parentId?: undefined;
dataKey?: undefined;
name?: undefined;
subDirectories: SubDirectoryInfo[];
files: SummarizedFileInfo[];
}
export type DirectoryInfo = LocalDirectoryInfo | RootDirectoryInfo;
export type MaybeDirectoryInfo =
| (DirectoryInfo & { exists: true })
| ({ id: DirectoryId; exists: false } & AllUndefined<Omit<DirectoryInfo, "id">>);
export type SubDirectoryInfo = Omit<LocalDirectoryInfo, "subDirectories" | "files">;
export interface FileInfo {
id: number;
isLegacy?: boolean;
parentId: DirectoryId;
dataKey?: DataKey;
contentType: string;
name: string;
createdAt?: Date;
lastModifiedAt: Date;
categories: FileCategoryInfo[];
}
export type MaybeFileInfo =
| (FileInfo & { exists: true })
| ({ id: number; exists: false } & AllUndefined<Omit<FileInfo, "id">>);
export type SummarizedFileInfo = Omit<FileInfo, "categories">;
export type CategoryFileInfo = SummarizedFileInfo & { isRecursive: boolean };
interface LocalCategoryInfo {
id: number;
parentId: DirectoryId;
dataKey?: DataKey;
name: string;
subCategories: SubCategoryInfo[];
files: CategoryFileInfo[];
isFileRecursive: boolean;
}
interface RootCategoryInfo {
id: "root";
parentId?: undefined;
dataKey?: undefined;
name?: undefined;
subCategories: SubCategoryInfo[];
files?: undefined;
isFileRecursive?: undefined;
}
export type CategoryInfo = LocalCategoryInfo | RootCategoryInfo;
export type MaybeCategoryInfo =
| (CategoryInfo & { exists: true })
| ({ id: CategoryId; exists: false } & AllUndefined<Omit<CategoryInfo, "id">>);
export type SubCategoryInfo = Omit<
LocalCategoryInfo,
"subCategories" | "files" | "isFileRecursive"
>;
export type FileCategoryInfo = Omit<SubCategoryInfo, "dataKey">;

View File

@@ -1,22 +0,0 @@
export const parseRangeHeader = (value: string | null) => {
if (!value) return undefined;
const firstRange = value.split(",")[0]!.trim();
const parts = firstRange.replace(/bytes=/, "").split("-");
return {
start: parts[0] ? parseInt(parts[0], 10) : undefined,
end: parts[1] ? parseInt(parts[1], 10) : undefined,
};
};
export const getContentRangeHeader = (range?: { start: number; end: number; total: number }) => {
return range && { "Content-Range": `bytes ${range.start}-${range.end}/${range.total}` };
};
export const parseContentDigestHeader = (value: string | null) => {
if (!value) return undefined;
const firstDigest = value.split(",")[0]!.trim();
const match = firstDigest.match(/^sha-256=:([A-Za-z0-9+/=]+):$/);
return match?.[1];
};

View File

@@ -2,7 +2,7 @@ import { z } from "zod";
import { storeClientKey } from "$lib/indexedDB";
import type { ClientKeys } from "$lib/stores";
const SerializedClientKeysSchema = z.intersection(
const serializedClientKeysSchema = z.intersection(
z.object({
generator: z.literal("ArkVault"),
exportedAt: z.iso.datetime(),
@@ -16,7 +16,7 @@ const SerializedClientKeysSchema = z.intersection(
}),
);
type SerializedClientKeys = z.infer<typeof SerializedClientKeysSchema>;
type SerializedClientKeys = z.infer<typeof serializedClientKeysSchema>;
type DeserializedClientKeys = {
encryptKeyBase64: string;
@@ -43,7 +43,7 @@ export const serializeClientKeys = ({
};
export const deserializeClientKeys = (serialized: string) => {
const zodRes = SerializedClientKeysSchema.safeParse(JSON.parse(serialized));
const zodRes = serializedClientKeysSchema.safeParse(JSON.parse(serialized));
if (zodRes.success) {
return {
encryptKeyBase64: zodRes.data.encryptKey,

View File

@@ -1,5 +1,13 @@
let rootHandle: FileSystemDirectoryHandle | null = null;
export const prepareOpfs = async () => {
rootHandle = await navigator.storage.getDirectory();
};
const getFileHandle = async (path: string, create = true) => {
if (path[0] !== "/") {
if (!rootHandle) {
throw new Error("OPFS not prepared");
} else if (path[0] !== "/") {
throw new Error("Path must be absolute");
}
@@ -9,7 +17,7 @@ const getFileHandle = async (path: string, create = true) => {
}
try {
let directoryHandle = await navigator.storage.getDirectory();
let directoryHandle = rootHandle;
for (const part of parts.slice(0, -1)) {
if (!part) continue;
directoryHandle = await directoryHandle.getDirectoryHandle(part, { create });
@@ -26,15 +34,12 @@ const getFileHandle = async (path: string, create = true) => {
}
};
export const getFile = async (path: string) => {
export const readFile = async (path: string) => {
const { fileHandle } = await getFileHandle(path, false);
if (!fileHandle) return null;
return await fileHandle.getFile();
};
export const readFile = async (path: string) => {
return (await getFile(path))?.arrayBuffer() ?? null;
const file = await fileHandle.getFile();
return await file.arrayBuffer();
};
export const writeFile = async (path: string, data: ArrayBuffer) => {
@@ -56,7 +61,9 @@ export const deleteFile = async (path: string) => {
};
const getDirectoryHandle = async (path: string) => {
if (path[0] !== "/") {
if (!rootHandle) {
throw new Error("OPFS not prepared");
} else if (path[0] !== "/") {
throw new Error("Path must be absolute");
}
@@ -66,7 +73,7 @@ const getDirectoryHandle = async (path: string) => {
}
try {
let directoryHandle = await navigator.storage.getDirectory();
let directoryHandle = rootHandle;
let parentHandle;
for (const part of parts.slice(1)) {
if (!part) continue;

View File

@@ -1,48 +0,0 @@
export class Scheduler<T = void> {
private isEstimating = false;
private memoryUsage = 0;
private queue: (() => void)[] = [];
constructor(public readonly memoryLimit = 100 * 1024 * 1024 /* 100 MiB */) {}
private next() {
if (!this.isEstimating && this.memoryUsage < this.memoryLimit) {
const resolve = this.queue.shift();
if (resolve) {
this.isEstimating = true;
resolve();
}
}
}
async schedule(
estimateMemoryUsage: number | (() => number | Promise<number>),
task: () => Promise<T>,
) {
if (this.isEstimating || this.memoryUsage >= this.memoryLimit) {
await new Promise<void>((resolve) => {
this.queue.push(resolve);
});
} else {
this.isEstimating = true;
}
let taskMemoryUsage = 0;
try {
taskMemoryUsage =
typeof estimateMemoryUsage === "number" ? estimateMemoryUsage : await estimateMemoryUsage();
this.memoryUsage += taskMemoryUsage;
} finally {
this.isEstimating = false;
this.next();
}
try {
return await task();
} finally {
this.memoryUsage -= taskMemoryUsage;
this.next();
}
}
}

View File

@@ -52,6 +52,7 @@ const generateImageThumbnail = (imageUrl: string) => {
.catch(reject);
};
image.onerror = reject;
image.src = imageUrl;
});
};
@@ -84,27 +85,31 @@ const generateVideoThumbnail = (videoUrl: string, time = 0) => {
});
};
export const generateThumbnail = async (blob: Blob) => {
export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: string) => {
let url;
try {
if (blob.type.startsWith("image/")) {
url = URL.createObjectURL(blob);
if (fileType.startsWith("image/")) {
const fileBlob = new Blob([fileBuffer], { type: fileType });
url = URL.createObjectURL(fileBlob);
try {
return await generateImageThumbnail(url);
} catch {
URL.revokeObjectURL(url);
url = undefined;
if (blob.type === "image/heic") {
if (fileType === "image/heic") {
const { default: heic2any } = await import("heic2any");
url = URL.createObjectURL((await heic2any({ blob, toType: "image/png" })) as Blob);
url = URL.createObjectURL(
(await heic2any({ blob: fileBlob, toType: "image/png" })) as Blob,
);
return await generateImageThumbnail(url);
} else {
return null;
}
}
} else if (blob.type.startsWith("video/")) {
url = URL.createObjectURL(blob);
} else if (fileType.startsWith("video/")) {
url = URL.createObjectURL(new Blob([fileBuffer], { type: fileType }));
return await generateVideoThumbnail(url);
}
return null;

View File

@@ -1,102 +0,0 @@
import axios from "axios";
import pLimit from "p-limit";
import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants";
import { encryptChunk, digestMessage, encodeToBase64 } from "$lib/modules/crypto";
interface UploadStats {
progress: number;
rate: number;
}
const createSpeedMeter = (timeWindow = 1500) => {
const samples: { t: number; b: number }[] = [];
let lastSpeed = 0;
return (bytesNow?: number) => {
if (!bytesNow) return lastSpeed;
const now = performance.now();
samples.push({ t: now, b: bytesNow });
const cutoff = now - timeWindow;
while (samples.length > 2 && samples[0]!.t < cutoff) samples.shift();
const first = samples[0]!;
const dt = now - first.t;
const db = bytesNow - first.b;
lastSpeed = dt > 0 ? (db / dt) * 1000 : 0;
return lastSpeed;
};
};
const uploadChunk = async (
uploadId: string,
chunkIndex: number,
chunk: Blob,
dataKey: CryptoKey,
onChunkProgress: (chunkIndex: number, loaded: number) => void,
) => {
const chunkEncrypted = await encryptChunk(await chunk.arrayBuffer(), dataKey);
const chunkEncryptedHash = encodeToBase64(await digestMessage(chunkEncrypted));
await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex}`, chunkEncrypted, {
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkEncryptedHash}:`,
},
onUploadProgress(e) {
onChunkProgress(chunkIndex, e.loaded ?? 0);
},
});
onChunkProgress(chunkIndex, chunkEncrypted.byteLength);
};
export const uploadBlob = async (
uploadId: string,
blob: Blob,
dataKey: CryptoKey,
options?: { concurrency?: number; onProgress?: (s: UploadStats) => void },
) => {
const onProgress = options?.onProgress;
const totalChunks = Math.ceil(blob.size / CHUNK_SIZE);
const totalBytes = blob.size + totalChunks * ENCRYPTION_OVERHEAD;
const uploadedByChunk = new Array<number>(totalChunks).fill(0);
const speedMeter = createSpeedMeter(1500);
const emit = () => {
if (!onProgress) return;
const uploadedBytes = uploadedByChunk.reduce((a, b) => a + b, 0);
const rate = speedMeter(uploadedBytes);
const progress = Math.min(1, uploadedBytes / totalBytes);
onProgress({ progress, rate });
};
const onChunkProgress = (idx: number, loaded: number) => {
uploadedByChunk[idx] = loaded;
emit();
};
const limit = pLimit(options?.concurrency ?? 4);
await Promise.all(
Array.from({ length: totalChunks }, (_, i) =>
limit(() =>
uploadChunk(
uploadId,
i + 1,
blob.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE),
dataKey,
onChunkProgress,
),
),
),
);
onProgress?.({ progress: 1, rate: speedMeter() });
};

View File

@@ -1,4 +0,0 @@
import { z } from "zod";
export const DirectoryIdSchema = z.union([z.literal("root"), z.int().positive()]);
export const CategoryIdSchema = z.union([z.literal("root"), z.int().positive()]);

View File

@@ -1 +0,0 @@
export * from "./filesystem";

View File

@@ -2,6 +2,8 @@ import { IntegrityError } from "./error";
import db from "./kysely";
import type { Ciphertext } from "./schema";
export type CategoryId = "root" | number;
interface Category {
id: number;
parentId: CategoryId;

View File

@@ -9,7 +9,6 @@ type IntegrityErrorMessages =
// File
| "Directory not found"
| "File not found"
| "File is not legacy"
| "File not found in category"
| "File already added to category"
| "Invalid DEK version"

View File

@@ -1,10 +1,11 @@
import { sql } from "kysely";
import { jsonArrayFrom } from "kysely/helpers/postgres";
import { sql, type NotNull } from "kysely";
import pg from "pg";
import { IntegrityError } from "./error";
import db from "./kysely";
import type { Ciphertext } from "./schema";
export type DirectoryId = "root" | number;
interface Directory {
id: number;
parentId: DirectoryId;
@@ -15,6 +16,8 @@ interface Directory {
encName: Ciphertext;
}
export type NewDirectory = Omit<Directory, "id">;
interface File {
id: number;
parentId: DirectoryId;
@@ -26,23 +29,16 @@ interface File {
hskVersion: number | null;
contentHmac: string | null;
contentType: string;
encContentIv: string | null;
encContentIv: string;
encContentHash: string;
encName: Ciphertext;
encCreatedAt: Ciphertext | null;
encLastModifiedAt: Ciphertext;
}
interface FileCategory {
id: number;
parentId: CategoryId;
mekVersion: number;
encDek: string;
dekVersion: Date;
encName: Ciphertext;
}
export type NewFile = Omit<File, "id">;
export const registerDirectory = async (params: Omit<Directory, "id">) => {
export const registerDirectory = async (params: NewDirectory) => {
await db.transaction().execute(async (trx) => {
const mek = await trx
.selectFrom("master_encryption_key")
@@ -210,11 +206,38 @@ export const unregisterDirectory = async (userId: number, directoryId: number) =
});
};
export const registerFile = async (trx: typeof db, params: Omit<File, "id">) => {
export const registerFile = async (params: NewFile) => {
if ((params.hskVersion && !params.contentHmac) || (!params.hskVersion && params.contentHmac)) {
throw new Error("Invalid arguments");
}
return await db.transaction().execute(async (trx) => {
const mek = await trx
.selectFrom("master_encryption_key")
.select("version")
.where("user_id", "=", params.userId)
.where("state", "=", "active")
.limit(1)
.forUpdate()
.executeTakeFirst();
if (mek?.version !== params.mekVersion) {
throw new IntegrityError("Inactive MEK version");
}
if (params.hskVersion) {
const hsk = await trx
.selectFrom("hmac_secret_key")
.select("version")
.where("user_id", "=", params.userId)
.where("state", "=", "active")
.limit(1)
.forUpdate()
.executeTakeFirst();
if (hsk?.version !== params.hskVersion) {
throw new IntegrityError("Inactive HSK version");
}
}
const { fileId } = await trx
.insertInto("file")
.values({
@@ -245,6 +268,7 @@ export const registerFile = async (trx: typeof db, params: Omit<File, "id">) =>
})
.execute();
return { id: fileId };
});
};
export const getAllFilesByParent = async (userId: number, parentId: DirectoryId) => {
@@ -282,51 +306,39 @@ export const getAllFilesByCategory = async (
recurse: boolean,
) => {
const files = await db
.withRecursive("category_tree", (db) =>
.withRecursive("cte", (db) =>
db
.selectFrom("category")
.select(["id", sql<number>`0`.as("depth")])
.leftJoin("file_category", "category.id", "file_category.category_id")
.select(["id", "parent_id", "user_id", "file_category.file_id"])
.select(sql<number>`0`.as("depth"))
.where("id", "=", categoryId)
.where("user_id", "=", userId)
.$if(recurse, (qb) =>
qb.unionAll((db) =>
db
.selectFrom("category")
.innerJoin("category_tree", "category.parent_id", "category_tree.id")
.select(["category.id", sql<number>`depth + 1`.as("depth")]),
.leftJoin("file_category", "category.id", "file_category.category_id")
.innerJoin("cte", "category.parent_id", "cte.id")
.select([
"category.id",
"category.parent_id",
"category.user_id",
"file_category.file_id",
])
.select(sql<number>`cte.depth + 1`.as("depth")),
),
),
)
.selectFrom("category_tree")
.innerJoin("file_category", "category_tree.id", "file_category.category_id")
.innerJoin("file", "file_category.file_id", "file.id")
.selectFrom("cte")
.select(["file_id", "depth"])
.selectAll("file")
.distinctOn("file_id")
.where("user_id", "=", userId)
.where("file_id", "is not", null)
.$narrowType<{ file_id: NotNull }>()
.orderBy("file_id")
.orderBy("depth")
.execute();
return files.map(
(file) =>
({
id: file.file_id,
parentId: file.parent_id ?? "root",
userId: file.user_id,
path: file.path,
mekVersion: file.master_encryption_key_version,
encDek: file.encrypted_data_encryption_key,
dekVersion: file.data_encryption_key_version,
hskVersion: file.hmac_secret_key_version,
contentHmac: file.content_hmac,
contentType: file.content_type,
encContentIv: file.encrypted_content_iv,
encContentHash: file.encrypted_content_hash,
encName: file.encrypted_name,
encCreatedAt: file.encrypted_created_at,
encLastModifiedAt: file.encrypted_last_modified_at,
isRecursive: file.depth > 0,
}) satisfies File & { isRecursive: boolean },
);
return files.map(({ file_id, depth }) => ({ id: file_id, isRecursive: depth > 0 }));
};
export const getAllFileIds = async (userId: number) => {
@@ -334,16 +346,6 @@ export const getAllFileIds = async (userId: number) => {
return files.map(({ id }) => id);
};
export const getLegacyFileIds = async (userId: number) => {
const files = await db
.selectFrom("file")
.select("id")
.where("user_id", "=", userId)
.where("encrypted_content_iv", "is not", null)
.execute();
return files.map(({ id }) => id);
};
export const getAllFileIdsByContentHmac = async (
userId: number,
hskVersion: number,
@@ -388,52 +390,6 @@ export const getFile = async (userId: number, fileId: number) => {
: null;
};
export const getFilesWithCategories = async (userId: number, fileIds: number[]) => {
const files = await db
.selectFrom("file")
.selectAll()
.select((eb) =>
jsonArrayFrom(
eb
.selectFrom("file_category")
.innerJoin("category", "file_category.category_id", "category.id")
.where("file_category.file_id", "=", eb.ref("file.id"))
.selectAll("category"),
).as("categories"),
)
.where("id", "=", (eb) => eb.fn.any(eb.val(fileIds)))
.where("user_id", "=", userId)
.execute();
return files.map(
(file) =>
({
id: file.id,
parentId: file.parent_id ?? "root",
userId: file.user_id,
path: file.path,
mekVersion: file.master_encryption_key_version,
encDek: file.encrypted_data_encryption_key,
dekVersion: file.data_encryption_key_version,
hskVersion: file.hmac_secret_key_version,
contentHmac: file.content_hmac,
contentType: file.content_type,
encContentIv: file.encrypted_content_iv,
encContentHash: file.encrypted_content_hash,
encName: file.encrypted_name,
encCreatedAt: file.encrypted_created_at,
encLastModifiedAt: file.encrypted_last_modified_at,
categories: file.categories.map((category) => ({
id: category.id,
parentId: category.parent_id ?? "root",
mekVersion: category.master_encryption_key_version,
encDek: category.encrypted_data_encryption_key,
dekVersion: new Date(category.data_encryption_key_version),
encName: category.encrypted_name,
})),
}) satisfies File & { categories: FileCategory[] },
);
};
export const setFileEncName = async (
userId: number,
fileId: number,
@@ -492,51 +448,6 @@ export const unregisterFile = async (userId: number, fileId: number) => {
});
};
export const migrateFileContent = async (
trx: typeof db,
userId: number,
fileId: number,
newPath: string,
dekVersion: Date,
encContentHash: string,
) => {
const file = await trx
.selectFrom("file")
.select(["path", "data_encryption_key_version", "encrypted_content_iv"])
.where("id", "=", fileId)
.where("user_id", "=", userId)
.limit(1)
.forUpdate()
.executeTakeFirst();
if (!file) {
throw new IntegrityError("File not found");
} else if (file.data_encryption_key_version.getTime() !== dekVersion.getTime()) {
throw new IntegrityError("Invalid DEK version");
} else if (!file.encrypted_content_iv) {
throw new IntegrityError("File is not legacy");
}
await trx
.updateTable("file")
.set({
path: newPath,
encrypted_content_iv: null,
encrypted_content_hash: encContentHash,
})
.where("id", "=", fileId)
.where("user_id", "=", userId)
.execute();
await trx
.insertInto("file_log")
.values({
file_id: fileId,
timestamp: new Date(),
action: "migrate",
})
.execute();
return { oldPath: file.path };
};
export const addFileToCategory = async (fileId: number, categoryId: number) => {
await db.transaction().execute(async (trx) => {
try {
@@ -565,21 +476,10 @@ export const addFileToCategory = async (fileId: number, categoryId: number) => {
export const getAllFileCategories = async (fileId: number) => {
const categories = await db
.selectFrom("file_category")
.innerJoin("category", "file_category.category_id", "category.id")
.selectAll("category")
.select("category_id")
.where("file_id", "=", fileId)
.execute();
return categories.map(
(category) =>
({
id: category.id,
parentId: category.parent_id ?? "root",
mekVersion: category.master_encryption_key_version,
encDek: category.encrypted_data_encryption_key,
dekVersion: category.data_encryption_key_version,
encName: category.encrypted_name,
}) satisfies FileCategory,
);
return categories.map(({ category_id }) => ({ id: category_id }));
};
export const removeFileFromCategory = async (fileId: number, categoryId: number) => {

View File

@@ -5,7 +5,6 @@ export * as HskRepo from "./hsk";
export * as MediaRepo from "./media";
export * as MekRepo from "./mek";
export * as SessionRepo from "./session";
export * as UploadRepo from "./upload";
export * as UserRepo from "./user";
export * from "./error";

View File

@@ -6,7 +6,7 @@ interface Thumbnail {
id: number;
path: string;
updatedAt: Date;
encContentIv: string | null;
encContentIv: string;
}
interface FileThumbnail extends Thumbnail {
@@ -14,13 +14,13 @@ interface FileThumbnail extends Thumbnail {
}
export const updateFileThumbnail = async (
trx: typeof db,
userId: number,
fileId: number,
dekVersion: Date,
path: string,
encContentIv: string | null,
encContentIv: string,
) => {
return await db.transaction().execute(async (trx) => {
const file = await trx
.selectFrom("file")
.select("data_encryption_key_version")
@@ -61,6 +61,7 @@ export const updateFileThumbnail = async (
)
.execute();
return thumbnail?.oldPath ?? null;
});
};
export const getFileThumbnail = async (userId: number, fileId: number) => {

View File

@@ -1,74 +0,0 @@
import { Kysely, sql } from "kysely";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const up = async (db: Kysely<any>) => {
// file.ts
await db.schema
.alterTable("file")
.alterColumn("encrypted_content_iv", (col) => col.dropNotNull())
.execute();
// media.ts
await db.schema
.alterTable("thumbnail")
.alterColumn("encrypted_content_iv", (col) => col.dropNotNull())
.execute();
// upload.ts
await db.schema
.createTable("upload_session")
.addColumn("id", "uuid", (col) => col.primaryKey())
.addColumn("type", "text", (col) => col.notNull())
.addColumn("user_id", "integer", (col) => col.references("user.id").notNull())
.addColumn("path", "text", (col) => col.notNull())
.addColumn("bitmap", "bytea", (col) => col.notNull())
.addColumn("total_chunks", "integer", (col) => col.notNull())
.addColumn("uploaded_chunks", "integer", (col) =>
col
.generatedAlwaysAs(sql`bit_count(bitmap)`)
.stored()
.notNull(),
)
.addColumn("expires_at", "timestamp(3)", (col) => col.notNull())
.addColumn("parent_id", "integer", (col) => col.references("directory.id"))
.addColumn("master_encryption_key_version", "integer")
.addColumn("encrypted_data_encryption_key", "text")
.addColumn("data_encryption_key_version", "timestamp(3)")
.addColumn("hmac_secret_key_version", "integer")
.addColumn("content_type", "text")
.addColumn("encrypted_name", "json")
.addColumn("encrypted_created_at", "json")
.addColumn("encrypted_last_modified_at", "json")
.addColumn("file_id", "integer", (col) => col.references("file.id"))
.addForeignKeyConstraint(
"upload_session_fk01",
["user_id", "master_encryption_key_version"],
"master_encryption_key",
["user_id", "version"],
)
.addForeignKeyConstraint(
"upload_session_fk02",
["user_id", "hmac_secret_key_version"],
"hmac_secret_key",
["user_id", "version"],
)
.addCheckConstraint(
"upload_session_ck01",
sql`length(bitmap) = ceil(total_chunks / 8.0)::integer`,
)
.addCheckConstraint("upload_session_ck02", sql`uploaded_chunks <= total_chunks`)
.execute();
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const down = async (db: Kysely<any>) => {
await db.schema.dropTable("upload_session").execute();
await db.schema
.alterTable("thumbnail")
.alterColumn("encrypted_content_iv", (col) => col.setNotNull())
.execute();
await db.schema
.alterTable("file")
.alterColumn("encrypted_content_iv", (col) => col.setNotNull())
.execute();
};

View File

@@ -1,11 +1,9 @@
import * as Initial1737357000 from "./1737357000-Initial";
import * as AddFileCategory1737422340 from "./1737422340-AddFileCategory";
import * as AddThumbnail1738409340 from "./1738409340-AddThumbnail";
import * as AddChunkedUpload1768062380 from "./1768062380-AddChunkedUpload";
export default {
"1737357000-Initial": Initial1737357000,
"1737422340-AddFileCategory": AddFileCategory1737422340,
"1738409340-AddThumbnail": AddThumbnail1738409340,
"1768062380-AddChunkedUpload": AddChunkedUpload1768062380,
};

View File

@@ -1,5 +1,5 @@
import type { Generated } from "kysely";
import type { Ciphertext } from "./utils";
import type { Ciphertext } from "./util";
interface CategoryTable {
id: Generated<number>;

View File

@@ -1,5 +1,5 @@
import type { ColumnType, Generated } from "kysely";
import type { Ciphertext } from "./utils";
import type { Ciphertext } from "./util";
interface DirectoryTable {
id: Generated<number>;
@@ -30,7 +30,7 @@ interface FileTable {
hmac_secret_key_version: number | null;
content_hmac: string | null; // Base64
content_type: string;
encrypted_content_iv: string | null; // Base64
encrypted_content_iv: string; // Base64
encrypted_content_hash: string; // Base64
encrypted_name: Ciphertext;
encrypted_created_at: Ciphertext | null;
@@ -41,7 +41,7 @@ interface FileLogTable {
id: Generated<number>;
file_id: number;
timestamp: ColumnType<Date, Date, never>;
action: "create" | "rename" | "migrate" | "add-to-category" | "remove-from-category";
action: "create" | "rename" | "add-to-category" | "remove-from-category";
new_name: Ciphertext | null;
category_id: number | null;
}

View File

@@ -5,9 +5,8 @@ export * from "./hsk";
export * from "./media";
export * from "./mek";
export * from "./session";
export * from "./upload";
export * from "./user";
export * from "./utils";
export * from "./util";
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
export interface Database {}

View File

@@ -7,7 +7,7 @@ interface ThumbnailTable {
category_id: number | null;
path: string;
updated_at: Date;
encrypted_content_iv: string | null; // Base64
encrypted_content_iv: string; // Base64
}
declare module "./index" {

View File

@@ -1,30 +0,0 @@
import type { Generated } from "kysely";
import type { Ciphertext } from "./utils";
interface UploadSessionTable {
id: string;
type: "file" | "thumbnail" | "migration";
user_id: number;
path: string;
bitmap: Buffer;
total_chunks: number;
uploaded_chunks: Generated<number>;
expires_at: Date;
parent_id: number | null;
master_encryption_key_version: number | null;
encrypted_data_encryption_key: string | null; // Base64
data_encryption_key_version: Date | null;
hmac_secret_key_version: number | null;
content_type: string | null;
encrypted_name: Ciphertext | null;
encrypted_created_at: Ciphertext | null;
encrypted_last_modified_at: Ciphertext | null;
file_id: number | null;
}
declare module "./index" {
interface Database {
upload_session: UploadSessionTable;
}
}

View File

@@ -1,192 +0,0 @@
import { sql } from "kysely";
import { IntegrityError } from "./error";
import db from "./kysely";
import type { Ciphertext } from "./schema";
interface BaseUploadSession {
id: string;
userId: number;
path: string;
bitmap: Buffer;
totalChunks: number;
uploadedChunks: number;
expiresAt: Date;
}
interface FileUploadSession extends BaseUploadSession {
type: "file";
parentId: DirectoryId;
mekVersion: number;
encDek: string;
dekVersion: Date;
hskVersion: number | null;
contentType: string;
encName: Ciphertext;
encCreatedAt: Ciphertext | null;
encLastModifiedAt: Ciphertext;
}
interface ThumbnailOrMigrationUploadSession extends BaseUploadSession {
type: "thumbnail" | "migration";
fileId: number;
dekVersion: Date;
}
export const createFileUploadSession = async (
params: Omit<FileUploadSession, "type" | "bitmap" | "uploadedChunks">,
) => {
await db.transaction().execute(async (trx) => {
const mek = await trx
.selectFrom("master_encryption_key")
.select("version")
.where("user_id", "=", params.userId)
.where("state", "=", "active")
.limit(1)
.forUpdate()
.executeTakeFirst();
if (mek?.version !== params.mekVersion) {
throw new IntegrityError("Inactive MEK version");
}
if (params.hskVersion) {
const hsk = await trx
.selectFrom("hmac_secret_key")
.select("version")
.where("user_id", "=", params.userId)
.where("state", "=", "active")
.limit(1)
.forUpdate()
.executeTakeFirst();
if (hsk?.version !== params.hskVersion) {
throw new IntegrityError("Inactive HSK version");
}
}
await trx
.insertInto("upload_session")
.values({
id: params.id,
type: "file",
user_id: params.userId,
path: params.path,
bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)),
total_chunks: params.totalChunks,
expires_at: params.expiresAt,
parent_id: params.parentId !== "root" ? params.parentId : null,
master_encryption_key_version: params.mekVersion,
encrypted_data_encryption_key: params.encDek,
data_encryption_key_version: params.dekVersion,
hmac_secret_key_version: params.hskVersion,
content_type: params.contentType,
encrypted_name: params.encName,
encrypted_created_at: params.encCreatedAt,
encrypted_last_modified_at: params.encLastModifiedAt,
})
.execute();
});
};
export const createThumbnailOrMigrationUploadSession = async (
params: Omit<ThumbnailOrMigrationUploadSession, "bitmap" | "uploadedChunks">,
) => {
await db.transaction().execute(async (trx) => {
const file = await trx
.selectFrom("file")
.select("data_encryption_key_version")
.where("id", "=", params.fileId)
.where("user_id", "=", params.userId)
.limit(1)
.forUpdate()
.executeTakeFirst();
if (!file) {
throw new IntegrityError("File not found");
} else if (file.data_encryption_key_version.getTime() !== params.dekVersion.getTime()) {
throw new IntegrityError("Invalid DEK version");
}
await trx
.insertInto("upload_session")
.values({
id: params.id,
type: params.type,
user_id: params.userId,
path: params.path,
bitmap: Buffer.alloc(Math.ceil(params.totalChunks / 8)),
total_chunks: params.totalChunks,
expires_at: params.expiresAt,
file_id: params.fileId,
data_encryption_key_version: params.dekVersion,
})
.execute();
});
};
export const getUploadSession = async (sessionId: string, userId: number) => {
const session = await db
.selectFrom("upload_session")
.selectAll()
.where("id", "=", sessionId)
.where("user_id", "=", userId)
.where("expires_at", ">", new Date())
.limit(1)
.executeTakeFirst();
if (!session) {
return null;
} else if (session.type === "file") {
return {
type: "file",
id: session.id,
userId: session.user_id,
path: session.path,
bitmap: session.bitmap,
totalChunks: session.total_chunks,
uploadedChunks: session.uploaded_chunks,
expiresAt: session.expires_at,
parentId: session.parent_id ?? "root",
mekVersion: session.master_encryption_key_version!,
encDek: session.encrypted_data_encryption_key!,
dekVersion: session.data_encryption_key_version!,
hskVersion: session.hmac_secret_key_version,
contentType: session.content_type!,
encName: session.encrypted_name!,
encCreatedAt: session.encrypted_created_at,
encLastModifiedAt: session.encrypted_last_modified_at!,
} satisfies FileUploadSession;
} else {
return {
type: session.type,
id: session.id,
userId: session.user_id,
path: session.path,
bitmap: session.bitmap,
totalChunks: session.total_chunks,
uploadedChunks: session.uploaded_chunks,
expiresAt: session.expires_at,
fileId: session.file_id!,
dekVersion: session.data_encryption_key_version!,
} satisfies ThumbnailOrMigrationUploadSession;
}
};
export const markChunkAsUploaded = async (sessionId: string, chunkIndex: number) => {
await db
.updateTable("upload_session")
.set({
bitmap: sql`set_bit(${sql.ref("bitmap")}, ${chunkIndex - 1}, 1)`,
})
.where("id", "=", sessionId)
.execute();
};
export const deleteUploadSession = async (trx: typeof db, sessionId: string) => {
await trx.deleteFrom("upload_session").where("id", "=", sessionId).execute();
};
export const cleanupExpiredUploadSessions = async () => {
const sessions = await db
.deleteFrom("upload_session")
.where("expires_at", "<=", new Date())
.returning("path")
.execute();
return sessions.map(({ path }) => path);
};

View File

@@ -26,5 +26,4 @@ export default {
},
libraryPath: env.LIBRARY_PATH || "library",
thumbnailsPath: env.THUMBNAILS_PATH || "thumbnails",
uploadsPath: env.UPLOADS_PATH || "uploads",
};

View File

@@ -1,10 +1,4 @@
import { rm, unlink } from "fs/promises";
export const safeRecursiveRm = async (path: string | null | undefined) => {
if (path) {
await rm(path, { recursive: true }).catch(console.error);
}
};
import { unlink } from "fs/promises";
export const safeUnlink = async (path: string | null | undefined) => {
if (path) {

View File

@@ -0,0 +1,3 @@
import { z } from "zod";
export const categoryIdSchema = z.union([z.literal("root"), z.int().positive()]);

View File

@@ -0,0 +1,3 @@
import { z } from "zod";
export const directoryIdSchema = z.union([z.literal("root"), z.int().positive()]);

View File

@@ -0,0 +1,36 @@
import mime from "mime";
import { z } from "zod";
import { directoryIdSchema } from "./directory";
export const fileThumbnailUploadRequest = z.object({
dekVersion: z.iso.datetime(),
contentIv: z.base64().nonempty(),
});
export type FileThumbnailUploadRequest = z.input<typeof fileThumbnailUploadRequest>;
export const fileUploadRequest = z.object({
parent: directoryIdSchema,
mekVersion: z.int().positive(),
dek: z.base64().nonempty(),
dekVersion: z.iso.datetime(),
hskVersion: z.int().positive(),
contentHmac: z.base64().nonempty(),
contentType: z
.string()
.trim()
.nonempty()
.refine((value) => mime.getExtension(value) !== null), // MIME type
contentIv: z.base64().nonempty(),
name: z.base64().nonempty(),
nameIv: z.base64().nonempty(),
createdAt: z.base64().nonempty().optional(),
createdAtIv: z.base64().nonempty().optional(),
lastModifiedAt: z.base64().nonempty(),
lastModifiedAtIv: z.base64().nonempty(),
});
export type FileUploadRequest = z.input<typeof fileUploadRequest>;
export const fileUploadResponse = z.object({
file: z.int().positive(),
});
export type FileUploadResponse = z.output<typeof fileUploadResponse>;

View File

@@ -1,4 +1,3 @@
export * from "./category";
export * from "./directory";
export * from "./file";
export * from "./types";

View File

@@ -1,74 +1,126 @@
import { error } from "@sveltejs/kit";
import { createReadStream } from "fs";
import { stat } from "fs/promises";
import { createHash } from "crypto";
import { createReadStream, createWriteStream } from "fs";
import { mkdir, stat } from "fs/promises";
import { dirname } from "path";
import { Readable } from "stream";
import { FileRepo, MediaRepo } from "$lib/server/db";
import { pipeline } from "stream/promises";
import { v4 as uuidv4 } from "uuid";
import { FileRepo, MediaRepo, IntegrityError } from "$lib/server/db";
import env from "$lib/server/loadenv";
import { safeUnlink } from "$lib/server/modules/filesystem";
const createEncContentStream = async (
path: string,
iv?: Buffer,
range?: { start?: number; end?: number },
) => {
const { size: fileSize } = await stat(path);
const ivSize = iv?.byteLength ?? 0;
const totalSize = fileSize + ivSize;
const start = range?.start ?? 0;
const end = range?.end ?? totalSize - 1;
if (start > end || start < 0 || end >= totalSize) {
error(416, "Invalid range");
}
return {
encContentStream: Readable.toWeb(
Readable.from(
(async function* () {
if (start < ivSize) {
yield iv!.subarray(start, Math.min(end + 1, ivSize));
}
if (end >= ivSize) {
yield* createReadStream(path, {
start: Math.max(0, start - ivSize),
end: end - ivSize,
});
}
})(),
),
),
range: { start, end, total: totalSize },
};
};
export const getFileStream = async (
userId: number,
fileId: number,
range?: { start?: number; end?: number },
) => {
export const getFileStream = async (userId: number, fileId: number) => {
const file = await FileRepo.getFile(userId, fileId);
if (!file) {
error(404, "Invalid file id");
}
return createEncContentStream(
file.path,
file.encContentIv ? Buffer.from(file.encContentIv, "base64") : undefined,
range,
);
const { size } = await stat(file.path);
return {
encContentStream: Readable.toWeb(createReadStream(file.path)),
encContentSize: size,
};
};
export const getFileThumbnailStream = async (
userId: number,
fileId: number,
range?: { start?: number; end?: number },
) => {
export const getFileThumbnailStream = async (userId: number, fileId: number) => {
const thumbnail = await MediaRepo.getFileThumbnail(userId, fileId);
if (!thumbnail) {
error(404, "File or its thumbnail not found");
}
return createEncContentStream(
thumbnail.path,
thumbnail.encContentIv ? Buffer.from(thumbnail.encContentIv, "base64") : undefined,
range,
);
const { size } = await stat(thumbnail.path);
return {
encContentStream: Readable.toWeb(createReadStream(thumbnail.path)),
encContentSize: size,
};
};
export const uploadFileThumbnail = async (
userId: number,
fileId: number,
dekVersion: Date,
encContentIv: string,
encContentStream: Readable,
) => {
const path = `${env.thumbnailsPath}/${userId}/${uuidv4()}`;
await mkdir(dirname(path), { recursive: true });
try {
await pipeline(encContentStream, createWriteStream(path, { flags: "wx", mode: 0o600 }));
const oldPath = await MediaRepo.updateFileThumbnail(
userId,
fileId,
dekVersion,
path,
encContentIv,
);
safeUnlink(oldPath); // Intended
} catch (e) {
await safeUnlink(path);
if (e instanceof IntegrityError) {
if (e.message === "File not found") {
error(404, "File not found");
} else if (e.message === "Invalid DEK version") {
error(400, "Mismatched DEK version");
}
}
throw e;
}
};
export const uploadFile = async (
params: Omit<FileRepo.NewFile, "path" | "encContentHash">,
encContentStream: Readable,
encContentHash: Promise<string>,
) => {
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
const oneMinuteLater = new Date(Date.now() + 60 * 1000);
if (params.dekVersion <= oneDayAgo || params.dekVersion >= oneMinuteLater) {
error(400, "Invalid DEK version");
}
const path = `${env.libraryPath}/${params.userId}/${uuidv4()}`;
await mkdir(dirname(path), { recursive: true });
try {
const hashStream = createHash("sha256");
const [, hash] = await Promise.all([
pipeline(
encContentStream,
async function* (source) {
for await (const chunk of source) {
hashStream.update(chunk);
yield chunk;
}
},
createWriteStream(path, { flags: "wx", mode: 0o600 }),
),
encContentHash,
]);
if (hashStream.digest("base64") !== hash) {
throw new Error("Invalid checksum");
}
const { id: fileId } = await FileRepo.registerFile({
...params,
path,
encContentHash: hash,
});
return { fileId };
} catch (e) {
await safeUnlink(path);
if (e instanceof IntegrityError && e.message === "Inactive MEK version") {
error(400, "Invalid MEK version");
} else if (
e instanceof Error &&
(e.message === "Invalid request body" || e.message === "Invalid checksum")
) {
error(400, "Invalid request body");
}
throw e;
}
};

View File

@@ -1,88 +0,0 @@
import { error } from "@sveltejs/kit";
import { createHash } from "crypto";
import { createWriteStream } from "fs";
import { Readable } from "stream";
import { ENCRYPTION_OVERHEAD, ENCRYPTED_CHUNK_SIZE } from "$lib/constants";
import { UploadRepo } from "$lib/server/db";
import { safeRecursiveRm, safeUnlink } from "$lib/server/modules/filesystem";
const chunkLocks = new Set<string>();
const isChunkUploaded = (bitmap: Buffer, chunkIndex: number) => {
chunkIndex -= 1;
const byte = bitmap[Math.floor(chunkIndex / 8)];
return !!byte && (byte & (1 << (chunkIndex % 8))) !== 0; // Postgres sucks
};
export const uploadChunk = async (
userId: number,
sessionId: string,
chunkIndex: number,
encChunkStream: Readable,
encChunkHash: string,
) => {
const lockKey = `${sessionId}/${chunkIndex}`;
if (chunkLocks.has(lockKey)) {
error(409, "Chunk upload already in progress");
} else {
chunkLocks.add(lockKey);
}
let filePath;
try {
const session = await UploadRepo.getUploadSession(sessionId, userId);
if (!session) {
error(404, "Invalid upload id");
} else if (chunkIndex > session.totalChunks) {
error(400, "Invalid chunk index");
} else if (isChunkUploaded(session.bitmap, chunkIndex)) {
error(409, "Chunk already uploaded");
}
const isLastChunk = chunkIndex === session.totalChunks;
filePath = `${session.path}/${chunkIndex}`;
const hashStream = createHash("sha256");
const writeStream = createWriteStream(filePath, { flags: "wx", mode: 0o600 });
let writtenBytes = 0;
for await (const chunk of encChunkStream) {
hashStream.update(chunk);
writeStream.write(chunk);
writtenBytes += chunk.length;
}
await new Promise<void>((resolve, reject) => {
writeStream.end((e: any) => (e ? reject(e) : resolve()));
});
if (hashStream.digest("base64") !== encChunkHash) {
throw new Error("Invalid checksum");
} else if (
(!isLastChunk && writtenBytes !== ENCRYPTED_CHUNK_SIZE) ||
(isLastChunk && (writtenBytes <= ENCRYPTION_OVERHEAD || writtenBytes > ENCRYPTED_CHUNK_SIZE))
) {
throw new Error("Invalid chunk size");
}
await UploadRepo.markChunkAsUploaded(sessionId, chunkIndex);
} catch (e) {
await safeUnlink(filePath);
if (
e instanceof Error &&
(e.message === "Invalid checksum" || e.message === "Invalid chunk size")
) {
error(400, "Invalid request body");
}
throw e;
} finally {
chunkLocks.delete(lockKey);
}
};
export const cleanupExpiredUploadSessions = async () => {
const paths = await UploadRepo.cleanupExpiredUploadSessions();
await Promise.all(paths.map(safeRecursiveRm));
};

View File

@@ -1,39 +0,0 @@
import { DECRYPTED_FILE_URL_PREFIX } from "$lib/constants";
import type { FileMetadata, ServiceWorkerMessage, ServiceWorkerResponse } from "./types";
const PREPARE_TIMEOUT_MS = 5000;
const getServiceWorker = async () => {
const registration = await navigator.serviceWorker.ready;
const sw = registration.active;
if (!sw) {
throw new Error("Service worker not activated");
}
return sw;
};
export const prepareFileDecryption = async (id: number, metadata: FileMetadata) => {
const sw = await getServiceWorker();
return new Promise<void>((resolve, reject) => {
const timeout = setTimeout(
() => reject(new Error("Service worker timeout")),
PREPARE_TIMEOUT_MS,
);
const handler = (event: MessageEvent<ServiceWorkerResponse>) => {
if (event.data.type === "decryption-ready" && event.data.fileId === id) {
clearTimeout(timeout);
navigator.serviceWorker.removeEventListener("message", handler);
resolve();
}
};
navigator.serviceWorker.addEventListener("message", handler);
sw.postMessage({
type: "decryption-prepare",
fileId: id,
...metadata,
} satisfies ServiceWorkerMessage);
});
};
export const getDecryptedFileUrl = (id: number) => `${DECRYPTED_FILE_URL_PREFIX}${id}`;

View File

@@ -1,2 +0,0 @@
export * from "./client";
export * from "./types";

View File

@@ -1,19 +0,0 @@
export interface FileMetadata {
isLegacy: boolean;
dataKey: CryptoKey;
encContentSize: number;
contentType: string;
}
export interface DecryptionPrepareMessage extends FileMetadata {
type: "decryption-prepare";
fileId: number;
}
export interface DecryptionReadyMessage {
type: "decryption-ready";
fileId: number;
}
export type ServiceWorkerMessage = DecryptionPrepareMessage;
export type ServiceWorkerResponse = DecryptionReadyMessage;

View File

@@ -1,5 +1,6 @@
import { TRPCClientError } from "@trpc/client";
import { encodeToBase64, decryptChallenge, signMessageRSA } from "$lib/modules/crypto";
import { trpc, isTRPCClientError } from "$trpc/client";
import { trpc } from "$trpc/client";
export const requestSessionUpgrade = async (
encryptKeyBase64: string,
@@ -15,7 +16,7 @@ export const requestSessionUpgrade = async (
sigPubKey: verifyKeyBase64,
}));
} catch (e) {
if (isTRPCClientError(e) && e.data?.code === "FORBIDDEN") {
if (e instanceof TRPCClientError && e.data?.code === "FORBIDDEN") {
return [false, "Unregistered client"] as const;
}
return [false] as const;
@@ -30,7 +31,7 @@ export const requestSessionUpgrade = async (
force,
});
} catch (e) {
if (isTRPCClientError(e) && e.data?.code === "CONFLICT") {
if (e instanceof TRPCClientError && e.data?.code === "CONFLICT") {
return [false, "Already logged in"] as const;
}
return [false] as const;

View File

@@ -1,47 +1,70 @@
import { getAllFileInfos } from "$lib/indexedDB/filesystem";
import { decryptData } from "$lib/modules/crypto";
import {
getFileCache,
storeFileCache,
deleteFileCache,
downloadFile,
getFileThumbnailCache,
storeFileThumbnailCache,
deleteFileThumbnailCache,
downloadFile,
} from "$lib/modules/file";
import { uploadBlob } from "$lib/modules/upload";
import { getThumbnailUrl } from "$lib/modules/thumbnail";
import type { FileThumbnailUploadRequest } from "$lib/server/schemas";
import { trpc } from "$trpc/client";
export const requestFileDownload = async (
fileId: number,
fileEncryptedIv: string,
dataKey: CryptoKey,
isLegacy: boolean,
) => {
const cache = await getFileCache(fileId);
if (cache) return cache;
const fileBuffer = await downloadFile(fileId, dataKey, isLegacy);
const fileBuffer = await downloadFile(fileId, fileEncryptedIv, dataKey);
storeFileCache(fileId, fileBuffer); // Intended
return fileBuffer;
};
export const requestFileThumbnailUpload = async (
fileId: number,
thumbnail: Blob,
dataKey: CryptoKey,
dataKeyVersion: Date,
thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: string },
) => {
const form = new FormData();
form.set(
"metadata",
JSON.stringify({
dekVersion: dataKeyVersion.toISOString(),
contentIv: thumbnailEncrypted.iv,
} satisfies FileThumbnailUploadRequest),
);
form.set("content", new Blob([thumbnailEncrypted.ciphertext]));
return await fetch(`/api/file/${fileId}/thumbnail/upload`, { method: "POST", body: form });
};
export const requestFileThumbnailDownload = async (fileId: number, dataKey?: CryptoKey) => {
const cache = await getFileThumbnailCache(fileId);
if (cache || !dataKey) return cache;
let thumbnailInfo;
try {
const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({
file: fileId,
dekVersion: dataKeyVersion,
});
await uploadBlob(uploadId, thumbnail, dataKey);
await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId });
return true;
thumbnailInfo = await trpc().file.thumbnail.query({ id: fileId });
} catch {
// TODO: Error Handling
return false;
return null;
}
const { contentIv: thumbnailEncryptedIv } = thumbnailInfo;
const res = await fetch(`/api/file/${fileId}/thumbnail/download`);
if (!res.ok) return null;
const thumbnailEncrypted = await res.arrayBuffer();
const thumbnailBuffer = await decryptData(thumbnailEncrypted, thumbnailEncryptedIv, dataKey);
storeFileThumbnailCache(fileId, thumbnailBuffer); // Intended
return getThumbnailUrl(thumbnailBuffer);
};
export const requestDeletedFilesCleanup = async () => {

View File

@@ -1,3 +1,4 @@
import { TRPCClientError } from "@trpc/client";
import { storeMasterKeys } from "$lib/indexedDB";
import {
encodeToBase64,
@@ -10,7 +11,7 @@ import {
} from "$lib/modules/crypto";
import { requestSessionUpgrade } from "$lib/services/auth";
import { masterKeyStore, type ClientKeys } from "$lib/stores";
import { trpc, isTRPCClientError } from "$trpc/client";
import { trpc } from "$trpc/client";
export const requestClientRegistration = async (
encryptKeyBase64: string,
@@ -111,7 +112,10 @@ export const requestInitialMasterKeyAndHmacSecretRegistration = async (
mekSig: await signMasterKeyWrapped(masterKeyWrapped, 1, signKey),
});
} catch (e) {
if (isTRPCClientError(e) && (e.data?.code === "FORBIDDEN" || e.data?.code === "CONFLICT")) {
if (
e instanceof TRPCClientError &&
(e.data?.code === "FORBIDDEN" || e.data?.code === "CONFLICT")
) {
return true;
}
// TODO: Error Handling

49
src/lib/stores/file.ts Normal file
View File

@@ -0,0 +1,49 @@
import { writable, type Writable } from "svelte/store";
export interface FileUploadStatus {
name: string;
parentId: "root" | number;
status:
| "encryption-pending"
| "encrypting"
| "upload-pending"
| "uploading"
| "uploaded"
| "canceled"
| "error";
progress?: number;
rate?: number;
estimated?: number;
}
export interface FileDownloadStatus {
id: number;
status:
| "download-pending"
| "downloading"
| "decryption-pending"
| "decrypting"
| "decrypted"
| "canceled"
| "error";
progress?: number;
rate?: number;
estimated?: number;
result?: ArrayBuffer;
}
export const fileUploadStatusStore = writable<Writable<FileUploadStatus>[]>([]);
export const fileDownloadStatusStore = writable<Writable<FileDownloadStatus>[]>([]);
export const isFileUploading = (
status: FileUploadStatus["status"],
): status is "encryption-pending" | "encrypting" | "upload-pending" | "uploading" => {
return ["encryption-pending", "encrypting", "upload-pending", "uploading"].includes(status);
};
export const isFileDownloading = (
status: FileDownloadStatus["status"],
): status is "download-pending" | "downloading" | "decryption-pending" | "decrypting" => {
return ["download-pending", "downloading", "decryption-pending", "decrypting"].includes(status);
};

View File

@@ -1 +1,2 @@
export * from "./file";
export * from "./key";

View File

@@ -1,2 +0,0 @@
type DirectoryId = "root" | number;
type CategoryId = "root" | number;

View File

@@ -1,93 +0,0 @@
type MaybePromise<T> = T | Promise<T> | HybridPromise<T>;
type HybridPromiseState<T> =
| { mode: "sync"; status: "fulfilled"; value: T }
| { mode: "sync"; status: "rejected"; reason: unknown }
| { mode: "async"; promise: Promise<T> };
export class HybridPromise<T> implements PromiseLike<T> {
private isConsumed = false;
private constructor(private readonly state: HybridPromiseState<T>) {
if (state.mode === "sync" && state.status === "rejected") {
queueMicrotask(() => {
if (!this.isConsumed) {
throw state.reason;
}
});
}
}
isSync(): boolean {
return this.state.mode === "sync";
}
toPromise(): Promise<T> {
this.isConsumed = true;
if (this.state.mode === "async") return this.state.promise;
return this.state.status === "fulfilled"
? Promise.resolve(this.state.value)
: Promise.reject(this.state.reason);
}
static resolve<T>(value: MaybePromise<T>): HybridPromise<T> {
if (value instanceof HybridPromise) return value;
return new HybridPromise(
value instanceof Promise
? { mode: "async", promise: value }
: { mode: "sync", status: "fulfilled", value },
);
}
static reject<T = never>(reason?: unknown): HybridPromise<T> {
return new HybridPromise({ mode: "sync", status: "rejected", reason });
}
then<TResult1 = T, TResult2 = never>(
onfulfilled?: ((value: T) => MaybePromise<TResult1>) | null | undefined,
onrejected?: ((reason: unknown) => MaybePromise<TResult2>) | null | undefined,
): HybridPromise<TResult1 | TResult2> {
this.isConsumed = true;
if (this.state.mode === "async") {
return new HybridPromise({
mode: "async",
promise: this.state.promise.then(onfulfilled, onrejected) as any,
});
}
try {
if (this.state.status === "fulfilled") {
if (!onfulfilled) return HybridPromise.resolve(this.state.value as any);
return HybridPromise.resolve(onfulfilled(this.state.value));
} else {
if (!onrejected) return HybridPromise.reject(this.state.reason);
return HybridPromise.resolve(onrejected(this.state.reason));
}
} catch (e) {
return HybridPromise.reject(e);
}
}
catch<TResult = never>(
onrejected?: ((reason: unknown) => MaybePromise<TResult>) | null | undefined,
): HybridPromise<T | TResult> {
return this.then<T, TResult>(null, onrejected);
}
finally(onfinally?: (() => void) | null | undefined): HybridPromise<T> {
this.isConsumed = true;
if (this.state.mode === "async") {
return new HybridPromise({ mode: "async", promise: this.state.promise.finally(onfinally) });
}
try {
onfinally?.();
return new HybridPromise(this.state);
} catch (e) {
return HybridPromise.reject(e);
}
}
}

View File

@@ -1,4 +1,3 @@
export * from "./format";
export * from "./gotoStateful";
export * from "./HybridPromise";
export * from "./sort";

View File

@@ -32,7 +32,7 @@ const sortByDateAsc: SortFunc = ({ date: a }, { date: b }) => {
const sortByDateDesc: SortFunc = (a, b) => -sortByDateAsc(a, b);
export const sortEntries = <T extends SortEntry>(entries: T[], sortBy = SortBy.NAME_ASC) => {
export const sortEntries = <T extends SortEntry>(entries: T[], sortBy: SortBy) => {
let sortFunc: SortFunc;
switch (sortBy) {
@@ -48,12 +48,10 @@ export const sortEntries = <T extends SortEntry>(entries: T[], sortBy = SortBy.N
case SortBy.DATE_DESC:
sortFunc = sortByDateDesc;
break;
default: {
default:
const exhaustive: never = sortBy;
sortFunc = exhaustive;
}
}
entries.sort(sortFunc);
return entries;
};

View File

@@ -1,5 +0,0 @@
import type { ParamMatcher } from "@sveltejs/kit";
export const match: ParamMatcher = (param) => {
return param === "thumbnail";
};

View File

@@ -1,15 +1,19 @@
<script lang="ts">
import FileSaver from "file-saver";
import { untrack } from "svelte";
import { get, type Writable } from "svelte/store";
import { goto } from "$app/navigation";
import { page } from "$app/state";
import { FullscreenDiv } from "$lib/components/atoms";
import { Categories, IconEntryButton, TopBar } from "$lib/components/molecules";
import { getFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem";
import {
getFileInfo,
getCategoryInfo,
type FileInfo,
type CategoryInfo,
} from "$lib/modules/filesystem";
import { captureVideoThumbnail } from "$lib/modules/thumbnail";
import { getFileDownloadState } from "$lib/modules/file";
import { masterKeyStore } from "$lib/stores";
import { HybridPromise } from "$lib/utils";
import { fileDownloadStatusStore, isFileDownloading, masterKeyStore } from "$lib/stores";
import AddToCategoryBottomSheet from "./AddToCategoryBottomSheet.svelte";
import DownloadStatus from "./DownloadStatus.svelte";
import {
@@ -17,7 +21,6 @@
requestFileDownload,
requestThumbnailUpload,
requestFileAdditionToCategory,
requestVideoStream,
} from "./service";
import TopBarMenu from "./TopBarMenu.svelte";
@@ -28,17 +31,23 @@
let { data } = $props();
let info: MaybeFileInfo | undefined = $state();
let downloadState = $derived(getFileDownloadState(data.id));
let info: Writable<FileInfo | null> | undefined = $state();
let categories: Writable<CategoryInfo | null>[] = $state([]);
let isMenuOpen = $state(false);
let isAddToCategoryBottomSheetOpen = $state(false);
let downloadStatus = $derived(
$fileDownloadStatusStore.find((statusStore) => {
const { id, status } = get(statusStore);
return id === data.id && isFileDownloading(status);
}),
);
let isDownloadRequested = $state(false);
let viewerType: "image" | "video" | undefined = $state();
let fileBlob: Blob | undefined = $state();
let fileBlobUrl: string | undefined = $state();
let videoStreamUrl: string | undefined = $state();
let videoElement: HTMLVideoElement | undefined = $state();
const updateViewer = async (buffer: ArrayBuffer, contentType: string) => {
@@ -67,27 +76,28 @@
const addToCategory = async (categoryId: number) => {
await requestFileAdditionToCategory(data.id, categoryId);
isAddToCategoryBottomSheetOpen = false;
void getFileInfo(data.id, $masterKeyStore?.get(1)?.key!); // TODO: FIXME
info = getFileInfo(data.id, $masterKeyStore?.get(1)?.key!); // TODO: FIXME
};
const removeFromCategory = async (categoryId: number) => {
await requestFileRemovalFromCategory(data.id, categoryId);
void getFileInfo(data.id, $masterKeyStore?.get(1)?.key!); // TODO: FIXME
info = getFileInfo(data.id, $masterKeyStore?.get(1)?.key!); // TODO: FIXME
};
$effect(() => {
HybridPromise.resolve(getFileInfo(data.id, $masterKeyStore?.get(1)?.key!)).then((result) => {
if (data.id === result.id) {
info = result;
}
});
info = getFileInfo(data.id, $masterKeyStore?.get(1)?.key!);
isDownloadRequested = false;
viewerType = undefined;
});
$effect(() => {
if (info?.dataKey) {
const contentType = info.contentType;
categories =
$info?.categoryIds.map((id) => getCategoryInfo(id, $masterKeyStore?.get(1)?.key!)) ?? [];
});
$effect(() => {
if ($info && $info.dataKey && $info.contentIv) {
const contentType = $info.contentType;
if (contentType.startsWith("image")) {
viewerType = "image";
} else if (contentType.startsWith("video")) {
@@ -95,38 +105,23 @@
}
untrack(() => {
if (!downloadState && !isDownloadRequested) {
if (!downloadStatus && !isDownloadRequested) {
isDownloadRequested = true;
if (viewerType === "video" && !info!.isLegacy) {
requestVideoStream(data.id, info!.dataKey!.key, contentType).then((streamUrl) => {
if (streamUrl) {
videoStreamUrl = streamUrl;
} else {
requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then((buffer) =>
updateViewer(buffer, contentType),
);
}
});
} else {
requestFileDownload(data.id, info!.dataKey!.key, info!.isLegacy!).then(
async (buffer) => {
requestFileDownload(data.id, $info.contentIv!, $info.dataKey!).then(async (buffer) => {
const blob = await updateViewer(buffer, contentType);
if (!viewerType) {
FileSaver.saveAs(blob, info!.name);
}
},
);
FileSaver.saveAs(blob, $info.name);
}
});
}
});
}
});
$effect(() => {
if (info?.exists && downloadState?.status === "decrypted") {
if ($info && $downloadStatus?.status === "decrypted") {
untrack(
() => !isDownloadRequested && updateViewer(downloadState.result!, info!.contentType!),
() => !isDownloadRequested && updateViewer($downloadStatus.result!, $info.contentType),
);
}
});
@@ -138,7 +133,7 @@
<title>파일</title>
</svelte:head>
<TopBar title={info?.name}>
<TopBar title={$info?.name}>
<!-- svelte-ignore a11y_no_static_element_interactions -->
<!-- svelte-ignore a11y_click_events_have_key_events -->
<div onclick={(e) => e.stopPropagation()}>
@@ -150,21 +145,16 @@
</button>
<TopBarMenu
bind:isOpen={isMenuOpen}
directoryId={["category", "gallery"].includes(page.url.searchParams.get("from") ?? "")
? info?.parentId
: undefined}
directoryId={page.url.searchParams.get("from") === "category" ? $info?.parentId : undefined}
{fileBlob}
downloadUrl={videoStreamUrl}
filename={info?.name}
filename={$info?.name}
/>
</div>
</TopBar>
<FullscreenDiv>
<div class="space-y-4 pb-4">
{#if downloadState}
<DownloadStatus state={downloadState} />
{/if}
{#if info && viewerType}
<DownloadStatus status={downloadStatus} />
{#if $info && viewerType}
<div class="flex w-full justify-center">
{#snippet viewerLoading(message: string)}
<p class="text-gray-500">{message}</p>
@@ -172,18 +162,18 @@
{#if viewerType === "image"}
{#if fileBlobUrl}
<img src={fileBlobUrl} alt={info.name} onerror={convertHeicToJpeg} />
<img src={fileBlobUrl} alt={$info.name} onerror={convertHeicToJpeg} />
{:else}
{@render viewerLoading("이미지를 불러오고 있어요.")}
{/if}
{:else if viewerType === "video"}
{#if videoStreamUrl || fileBlobUrl}
{#if fileBlobUrl}
<div class="flex flex-col space-y-2">
<video bind:this={videoElement} src={videoStreamUrl ?? fileBlobUrl} controls muted
></video>
<!-- svelte-ignore a11y_media_has_caption -->
<video bind:this={videoElement} src={fileBlobUrl} controls muted></video>
<IconEntryButton
icon={IconCamera}
onclick={() => updateThumbnail(info?.dataKey?.key!, info?.dataKey?.version!)}
onclick={() => updateThumbnail($info.dataKey!, $info.dataKeyVersion!)}
class="w-full"
>
이 장면을 썸네일로 설정하기
@@ -199,7 +189,7 @@
<p class="text-lg font-bold">카테고리</p>
<div class="space-y-1">
<Categories
categories={info?.categories ?? []}
{categories}
categoryMenuIcon={IconClose}
onCategoryClick={({ id }) => goto(`/category/${id}`)}
onCategoryMenuClick={({ id }) => removeFromCategory(id)}

View File

@@ -1,10 +1,10 @@
<script lang="ts">
import type { Writable } from "svelte/store";
import { BottomDiv, BottomSheet, Button, FullscreenDiv } from "$lib/components/atoms";
import { SubCategories } from "$lib/components/molecules";
import { CategoryCreateModal } from "$lib/components/organisms";
import { getCategoryInfo, type MaybeCategoryInfo } from "$lib/modules/filesystem";
import { getCategoryInfo, type CategoryInfo } from "$lib/modules/filesystem";
import { masterKeyStore } from "$lib/stores";
import { HybridPromise } from "$lib/utils";
import { requestCategoryCreation } from "./service";
interface Props {
@@ -14,36 +14,32 @@
let { onAddToCategoryClick, isOpen = $bindable() }: Props = $props();
let categoryInfo: MaybeCategoryInfo | undefined = $state();
let category: Writable<CategoryInfo | null> | undefined = $state();
let isCategoryCreateModalOpen = $state(false);
$effect(() => {
if (isOpen) {
HybridPromise.resolve(getCategoryInfo("root", $masterKeyStore?.get(1)?.key!)).then(
(result) => (categoryInfo = result),
);
category = getCategoryInfo("root", $masterKeyStore?.get(1)?.key!);
}
});
</script>
{#if categoryInfo?.exists}
{#if $category}
<BottomSheet bind:isOpen class="flex flex-col">
<FullscreenDiv>
<SubCategories
class="py-4"
info={categoryInfo}
info={$category}
onSubCategoryClick={({ id }) =>
HybridPromise.resolve(getCategoryInfo(id, $masterKeyStore?.get(1)?.key!)).then(
(result) => (categoryInfo = result),
)}
(category = getCategoryInfo(id, $masterKeyStore?.get(1)?.key!))}
onSubCategoryCreateClick={() => (isCategoryCreateModalOpen = true)}
subCategoryCreatePosition="top"
/>
{#if categoryInfo.id !== "root"}
{#if $category.id !== "root"}
<BottomDiv>
<Button onclick={() => onAddToCategoryClick(categoryInfo!.id as number)} class="w-full">
{categoryInfo!.name} 카테고리에 추가하기
<Button onclick={() => onAddToCategoryClick($category.id)} class="w-full">
카테고리에 추가하기
</Button>
</BottomDiv>
{/if}
@@ -54,8 +50,8 @@
<CategoryCreateModal
bind:isOpen={isCategoryCreateModalOpen}
onCreateClick={async (name: string) => {
if (await requestCategoryCreation(name, categoryInfo!.id, $masterKeyStore?.get(1)!)) {
void getCategoryInfo(categoryInfo!.id, $masterKeyStore?.get(1)?.key!); // TODO: FIXME
if (await requestCategoryCreation(name, $category!.id, $masterKeyStore?.get(1)!)) {
category = getCategoryInfo($category!.id, $masterKeyStore?.get(1)?.key!); // TODO: FIXME
return true;
}
return false;

View File

@@ -1,31 +1,32 @@
<script lang="ts">
import { isFileDownloading, type FileDownloadState } from "$lib/modules/file";
import type { Writable } from "svelte/store";
import { isFileDownloading, type FileDownloadStatus } from "$lib/stores";
import { formatNetworkSpeed } from "$lib/utils";
interface Props {
state: FileDownloadState;
status?: Writable<FileDownloadStatus>;
}
let { state }: Props = $props();
let { status }: Props = $props();
</script>
{#if isFileDownloading(state.status)}
{#if $status && isFileDownloading($status.status)}
<div class="w-full rounded-xl bg-gray-100 p-3">
<p class="font-medium">
{#if state.status === "download-pending"}
{#if $status.status === "download-pending"}
다운로드를 기다리는 중
{:else if state.status === "downloading"}
{:else if $status.status === "downloading"}
다운로드하는 중
{:else if state.status === "decryption-pending"}
{:else if $status.status === "decryption-pending"}
복호화를 기다리는 중
{:else if state.status === "decrypting"}
{:else if $status.status === "decrypting"}
복호화하는 중
{/if}
</p>
<p class="text-xs">
{#if state.status === "downloading"}
{#if $status.status === "downloading"}
전송됨
{Math.floor((state.progress ?? 0) * 100)}% · {formatNetworkSpeed((state.rate ?? 0) * 8)}
{Math.floor(($status.progress ?? 0) * 100)}% · {formatNetworkSpeed(($status.rate ?? 0) * 8)}
{/if}
</p>
</div>

View File

@@ -10,29 +10,17 @@
interface Props {
directoryId?: "root" | number;
downloadUrl?: string;
fileBlob?: Blob;
filename?: string;
isOpen: boolean;
}
let { directoryId, downloadUrl, fileBlob, filename, isOpen = $bindable() }: Props = $props();
const handleDownload = () => {
if (fileBlob && filename) {
FileSaver.saveAs(fileBlob, filename);
} else if (downloadUrl && filename) {
// Use streaming download via Content-Disposition header
const url = new URL(downloadUrl, window.location.origin);
url.searchParams.set("download", filename);
window.open(url.toString(), "_blank");
}
};
let { directoryId, fileBlob, filename, isOpen = $bindable() }: Props = $props();
</script>
<svelte:window onclick={() => (isOpen = false)} />
{#if isOpen && (directoryId || downloadUrl || fileBlob)}
{#if isOpen && (directoryId || fileBlob)}
<div
class="absolute right-2 top-full z-20 space-y-1 rounded-lg bg-white px-1 py-2 shadow-2xl"
transition:fly={{ y: -8, duration: 200 }}
@@ -61,8 +49,10 @@
),
)}
{/if}
{#if fileBlob || downloadUrl}
{@render menuButton(IconCloudDownload, "다운로드", handleDownload)}
{#if fileBlob}
{@render menuButton(IconCloudDownload, "다운로드", () => {
FileSaver.saveAs(fileBlob, filename);
})}
{/if}
</div>
</div>

View File

@@ -1,41 +1,23 @@
import { encryptData } from "$lib/modules/crypto";
import { storeFileThumbnailCache } from "$lib/modules/file";
import { prepareFileDecryption, getDecryptedFileUrl } from "$lib/serviceWorker";
import { requestFileThumbnailUpload } from "$lib/services/file";
import { trpc } from "$trpc/client";
export { requestCategoryCreation, requestFileRemovalFromCategory } from "$lib/services/category";
export { requestFileDownload } from "$lib/services/file";
export const requestVideoStream = async (
fileId: number,
dataKey: CryptoKey,
contentType: string,
) => {
const res = await fetch(`/api/file/${fileId}/download`, { method: "HEAD" });
if (!res.ok) return null;
const encContentSize = parseInt(res.headers.get("Content-Length") ?? "0", 10);
if (encContentSize <= 0) return null;
try {
await prepareFileDecryption(fileId, { isLegacy: false, dataKey, encContentSize, contentType });
return getDecryptedFileUrl(fileId);
} catch {
// TODO: Error Handling
return null;
}
};
export const requestThumbnailUpload = async (
fileId: number,
thumbnail: Blob,
dataKey: CryptoKey,
dataKeyVersion: Date,
) => {
const res = await requestFileThumbnailUpload(fileId, thumbnail, dataKey, dataKeyVersion);
if (!res) return false;
const thumbnailBuffer = await thumbnail.arrayBuffer();
const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey);
const res = await requestFileThumbnailUpload(fileId, dataKeyVersion, thumbnailEncrypted);
if (!res.ok) return false;
void thumbnail.arrayBuffer().then((buffer) => storeFileThumbnailCache(fileId, buffer));
storeFileThumbnailCache(fileId, thumbnailBuffer); // Intended
return true;
};

View File

@@ -1,31 +1,19 @@
<script lang="ts">
import { onMount } from "svelte";
import { get } from "svelte/store";
import { FullscreenDiv } from "$lib/components/atoms";
import { TopBar } from "$lib/components/molecules";
import {
getDownloadingFiles,
clearDownloadedFiles,
type FileDownloadState,
} from "$lib/modules/file";
import { bulkGetFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem";
import { masterKeyStore } from "$lib/stores";
import { fileDownloadStatusStore, isFileDownloading } from "$lib/stores";
import File from "./File.svelte";
let downloadingFiles: { info: MaybeFileInfo; state: FileDownloadState }[] = $state([]);
onMount(async () => {
const states = getDownloadingFiles();
const infos = await bulkGetFileInfo(
states.map(({ id }) => id),
$masterKeyStore?.get(1)?.key!,
let downloadingFiles = $derived(
$fileDownloadStatusStore.filter((status) => isFileDownloading(get(status).status)),
);
downloadingFiles = states.map((state) => ({
info: infos.get(state.id)!,
state,
}));
});
$effect(() => clearDownloadedFiles);
$effect(() => () => {
$fileDownloadStatusStore = $fileDownloadStatusStore.filter((status) =>
isFileDownloading(get(status).status),
);
});
</script>
<svelte:head>
@@ -35,10 +23,8 @@
<TopBar />
<FullscreenDiv>
<div class="space-y-2 pb-4">
{#each downloadingFiles as { info, state } (info.id)}
{#if info.exists}
<File {info} {state} />
{/if}
{#each downloadingFiles as status}
<File {status} />
{/each}
</div>
</FullscreenDiv>

View File

@@ -1,6 +1,7 @@
<script lang="ts">
import type { FileDownloadState } from "$lib/modules/file";
import type { SummarizedFileInfo } from "$lib/modules/filesystem";
import { get, type Writable } from "svelte/store";
import { getFileInfo, type FileInfo } from "$lib/modules/filesystem";
import { masterKeyStore, type FileDownloadStatus } from "$lib/stores";
import { formatNetworkSpeed } from "$lib/utils";
import IconCloud from "~icons/material-symbols/cloud";
@@ -11,49 +12,56 @@
import IconError from "~icons/material-symbols/error";
interface Props {
info: SummarizedFileInfo;
state: FileDownloadState;
status: Writable<FileDownloadStatus>;
}
let { info, state }: Props = $props();
let { status }: Props = $props();
let fileInfo: Writable<FileInfo | null> | undefined = $state();
$effect(() => {
fileInfo = getFileInfo(get(status).id, $masterKeyStore?.get(1)?.key!);
});
</script>
{#if $fileInfo}
<div class="flex h-14 items-center gap-x-4 p-2">
<div class="flex-shrink-0 text-lg text-gray-600">
{#if state.status === "download-pending"}
{#if $status.status === "download-pending"}
<IconCloud />
{:else if state.status === "downloading"}
{:else if $status.status === "downloading"}
<IconCloudDownload />
{:else if state.status === "decryption-pending"}
{:else if $status.status === "decryption-pending"}
<IconLock />
{:else if state.status === "decrypting"}
{:else if $status.status === "decrypting"}
<IconLockClock />
{:else if state.status === "decrypted"}
{:else if $status.status === "decrypted"}
<IconCheckCircle class="text-green-500" />
{:else if state.status === "error"}
{:else if $status.status === "error"}
<IconError class="text-red-500" />
{/if}
</div>
<div class="flex-grow overflow-hidden">
<p title={info.name} class="truncate font-medium">
{info.name}
<p title={$fileInfo.name} class="truncate font-medium">
{$fileInfo.name}
</p>
<p class="text-xs text-gray-800">
{#if state.status === "download-pending"}
{#if $status.status === "download-pending"}
다운로드를 기다리는 중
{:else if state.status === "downloading"}
{:else if $status.status === "downloading"}
전송됨
{Math.floor((state.progress ?? 0) * 100)}% ·
{formatNetworkSpeed((state.rate ?? 0) * 8)}
{:else if state.status === "decryption-pending"}
{Math.floor(($status.progress ?? 0) * 100)}% ·
{formatNetworkSpeed(($status.rate ?? 0) * 8)}
{:else if $status.status === "decryption-pending"}
복호화를 기다리는 중
{:else if state.status === "decrypting"}
{:else if $status.status === "decrypting"}
복호화하는 중
{:else if state.status === "decrypted"}
{:else if $status.status === "decrypted"}
다운로드 완료
{:else if state.status === "error"}
{:else if $status.status === "error"}
다운로드 실패
{/if}
</p>
</div>
</div>
{/if}

View File

@@ -1,12 +1,19 @@
<script lang="ts">
import { get } from "svelte/store";
import { FullscreenDiv } from "$lib/components/atoms";
import { TopBar } from "$lib/components/molecules";
import { getUploadingFiles, clearUploadedFiles } from "$lib/modules/file";
import { fileUploadStatusStore, isFileUploading } from "$lib/stores";
import File from "./File.svelte";
const uploadingFiles = getUploadingFiles();
let uploadingFiles = $derived(
$fileUploadStatusStore.filter((status) => isFileUploading(get(status).status)),
);
$effect(() => clearUploadedFiles);
$effect(() => () => {
$fileUploadStatusStore = $fileUploadStatusStore.filter((status) =>
isFileUploading(get(status).status),
);
});
</script>
<svelte:head>
@@ -16,8 +23,8 @@
<TopBar />
<FullscreenDiv>
<div class="space-y-2 pb-4">
{#each uploadingFiles as file}
<File state={file} />
{#each uploadingFiles as status}
<File {status} />
{/each}
</div>
</FullscreenDiv>

View File

@@ -1,5 +1,6 @@
<script lang="ts">
import type { FileUploadState } from "$lib/modules/file";
import type { Writable } from "svelte/store";
import type { FileUploadStatus } from "$lib/stores";
import { formatNetworkSpeed } from "$lib/utils";
import IconPending from "~icons/material-symbols/pending";
@@ -10,47 +11,45 @@
import IconError from "~icons/material-symbols/error";
interface Props {
state: FileUploadState;
status: Writable<FileUploadStatus>;
}
let { state }: Props = $props();
let { status }: Props = $props();
</script>
<div class="flex h-14 items-center gap-x-4 p-2">
<div class="flex-shrink-0 text-lg text-gray-600">
{#if state.status === "queued" || state.status === "encryption-pending"}
{#if $status.status === "encryption-pending"}
<IconPending />
{:else if state.status === "encrypting"}
{:else if $status.status === "encrypting"}
<IconLockClock />
{:else if state.status === "upload-pending"}
{:else if $status.status === "upload-pending"}
<IconCloud />
{:else if state.status === "uploading"}
{:else if $status.status === "uploading"}
<IconCloudUpload />
{:else if state.status === "uploaded"}
{:else if $status.status === "uploaded"}
<IconCloudDone class="text-blue-500" />
{:else if state.status === "error"}
{:else if $status.status === "error"}
<IconError class="text-red-500" />
{/if}
</div>
<div class="flex-grow overflow-hidden">
<p title={state.name} class="truncate font-medium">
{state.name}
<p title={$status.name} class="truncate font-medium">
{$status.name}
</p>
<p class="text-xs text-gray-800">
{#if state.status === "queued"}
대기 중
{:else if state.status === "encryption-pending"}
{#if $status.status === "encryption-pending"}
준비 중
{:else if state.status === "encrypting"}
{:else if $status.status === "encrypting"}
암호화하는 중
{:else if state.status === "upload-pending"}
{:else if $status.status === "upload-pending"}
업로드를 기다리는 중
{:else if state.status === "uploading"}
{:else if $status.status === "uploading"}
전송됨
{Math.floor((state.progress ?? 0) * 100)}% · {formatNetworkSpeed((state.rate ?? 0) * 8)}
{:else if state.status === "uploaded"}
{Math.floor(($status.progress ?? 0) * 100)}% · {formatNetworkSpeed(($status.rate ?? 0) * 8)}
{:else if $status.status === "uploaded"}
업로드 완료
{:else if state.status === "error"}
{:else if $status.status === "error"}
업로드 실패
{/if}
</p>

View File

@@ -1,7 +0,0 @@
import { createCaller } from "$trpc/router.server";
import type { PageServerLoad } from "./$types";
export const load: PageServerLoad = async (event) => {
const files = await createCaller(event).file.list();
return { files };
};

Some files were not shown because too many files have changed in this diff Show More