3 Commits

20 changed files with 540 additions and 268 deletions

View File

@@ -32,6 +32,7 @@
"autoprefixer": "^10.4.23",
"axios": "^1.13.2",
"dexie": "^4.2.1",
"es-hangul": "^2.3.8",
"eslint": "^9.39.2",
"eslint-config-prettier": "^10.1.8",
"eslint-plugin-svelte": "^3.14.0",

8
pnpm-lock.yaml generated
View File

@@ -84,6 +84,9 @@ importers:
dexie:
specifier: ^4.2.1
version: 4.2.1
es-hangul:
specifier: ^2.3.8
version: 2.3.8
eslint:
specifier: ^9.39.2
version: 9.39.2(jiti@1.21.7)
@@ -989,6 +992,9 @@ packages:
resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
engines: {node: '>= 0.4'}
es-hangul@2.3.8:
resolution: {integrity: sha512-VrJuqYBC7W04aKYjCnswomuJNXQRc0q33SG1IltVrRofi2YEE6FwVDPlsEJIdKbHwsOpbBL/mk9sUaBxVpbd+w==}
es-object-atoms@1.1.1:
resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==}
engines: {node: '>= 0.4'}
@@ -2781,6 +2787,8 @@ snapshots:
es-errors@1.3.0: {}
es-hangul@2.3.8: {}
es-object-atoms@1.1.1:
dependencies:
es-errors: 1.3.0

View File

@@ -1,7 +1,6 @@
import { untrack } from "svelte";
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
interface FilesystemCacheOptions<K, V> {
export interface FilesystemCacheOptions<K, V> {
fetchFromIndexedDB: (key: K) => Promise<V | undefined>;
fetchFromServer: (key: K, cachedValue: V | undefined, masterKey: CryptoKey) => Promise<V>;
bulkFetchFromIndexedDB?: (keys: Set<K>) => Promise<Map<K, V>>;
@@ -16,7 +15,11 @@ export class FilesystemCache<K, V extends object> {
constructor(private readonly options: FilesystemCacheOptions<K, V>) {}
get(key: K, masterKey: CryptoKey) {
get(
key: K,
masterKey: CryptoKey,
options?: { fetchFromServer?: FilesystemCacheOptions<K, V>["fetchFromServer"] },
) {
return untrack(() => {
let state = this.map.get(key);
if (state?.promise) return state.value ?? state.promise;
@@ -39,7 +42,9 @@ export class FilesystemCache<K, V extends object> {
return loadedInfo;
})
)
.then((cachedInfo) => this.options.fetchFromServer(key, cachedInfo, masterKey))
.then((cachedInfo) =>
(options?.fetchFromServer ?? this.options.fetchFromServer)(key, cachedInfo, masterKey),
)
.then((loadedInfo) => {
if (state.value) {
Object.assign(state.value, loadedInfo);
@@ -121,52 +126,3 @@ export class FilesystemCache<K, V extends object> {
});
}
}
export const decryptDirectoryMetadata = async (
metadata: { dek: string; dekVersion: Date; name: string; nameIv: string },
masterKey: CryptoKey,
) => {
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
const name = await decryptString(metadata.name, metadata.nameIv, dataKey);
return {
dataKey: { key: dataKey, version: metadata.dekVersion },
name,
};
};
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
};
export const decryptFileMetadata = async (
metadata: {
dek: string;
dekVersion: Date;
name: string;
nameIv: string;
createdAt?: string;
createdAtIv?: string;
lastModifiedAt: string;
lastModifiedAtIv: string;
},
masterKey: CryptoKey,
) => {
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
const [name, createdAt, lastModifiedAt] = await Promise.all([
decryptString(metadata.name, metadata.nameIv, dataKey),
metadata.createdAt
? decryptDate(metadata.createdAt, metadata.createdAtIv!, dataKey)
: undefined,
decryptDate(metadata.lastModifiedAt, metadata.lastModifiedAtIv, dataKey),
]);
return {
dataKey: { key: dataKey, version: metadata.dekVersion },
name,
createdAt,
lastModifiedAt,
};
};
export const decryptCategoryMetadata = decryptDirectoryMetadata;

View File

@@ -1,6 +1,7 @@
import * as IndexedDB from "$lib/indexedDB";
import { trpc, isTRPCClientError } from "$trpc/client";
import { FilesystemCache, decryptFileMetadata, decryptCategoryMetadata } from "./internal.svelte";
import { decryptFileMetadata, decryptCategoryMetadata } from "./common";
import { FilesystemCache } from "./FilesystemCache.svelte";
import type { CategoryInfo, MaybeCategoryInfo } from "./types";
const cache = new FilesystemCache<CategoryId, MaybeCategoryInfo>({

View File

@@ -0,0 +1,50 @@
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
export const decryptDirectoryMetadata = async (
metadata: { dek: string; dekVersion: Date; name: string; nameIv: string },
masterKey: CryptoKey,
) => {
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
const name = await decryptString(metadata.name, metadata.nameIv, dataKey);
return {
dataKey: { key: dataKey, version: metadata.dekVersion },
name,
};
};
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
};
export const decryptFileMetadata = async (
metadata: {
dek: string;
dekVersion: Date;
name: string;
nameIv: string;
createdAt?: string;
createdAtIv?: string;
lastModifiedAt: string;
lastModifiedAtIv: string;
},
masterKey: CryptoKey,
) => {
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
const [name, createdAt, lastModifiedAt] = await Promise.all([
decryptString(metadata.name, metadata.nameIv, dataKey),
metadata.createdAt
? decryptDate(metadata.createdAt, metadata.createdAtIv!, dataKey)
: undefined,
decryptDate(metadata.lastModifiedAt, metadata.lastModifiedAtIv, dataKey),
]);
return {
dataKey: { key: dataKey, version: metadata.dekVersion },
name,
createdAt,
lastModifiedAt,
};
};
export const decryptCategoryMetadata = decryptDirectoryMetadata;

View File

@@ -1,6 +1,7 @@
import * as IndexedDB from "$lib/indexedDB";
import { trpc, isTRPCClientError } from "$trpc/client";
import { FilesystemCache, decryptDirectoryMetadata, decryptFileMetadata } from "./internal.svelte";
import { decryptDirectoryMetadata, decryptFileMetadata } from "./common";
import { FilesystemCache, type FilesystemCacheOptions } from "./FilesystemCache.svelte";
import type { DirectoryInfo, MaybeDirectoryInfo } from "./types";
const cache = new FilesystemCache<DirectoryId, MaybeDirectoryInfo>({
@@ -97,6 +98,12 @@ const storeToIndexedDB = (info: DirectoryInfo) => {
return { ...info, exists: true as const };
};
export const getDirectoryInfo = (id: DirectoryId, masterKey: CryptoKey) => {
return cache.get(id, masterKey);
export const getDirectoryInfo = (
id: DirectoryId,
masterKey: CryptoKey,
options?: {
fetchFromServer?: FilesystemCacheOptions<DirectoryId, MaybeDirectoryInfo>["fetchFromServer"];
},
) => {
return cache.get(id, masterKey, options);
};

View File

@@ -1,6 +1,7 @@
import * as IndexedDB from "$lib/indexedDB";
import { trpc, isTRPCClientError } from "$trpc/client";
import { FilesystemCache, decryptFileMetadata, decryptCategoryMetadata } from "./internal.svelte";
import { decryptFileMetadata, decryptCategoryMetadata } from "./common";
import { FilesystemCache, type FilesystemCacheOptions } from "./FilesystemCache.svelte";
import type { FileInfo, MaybeFileInfo } from "./types";
const cache = new FilesystemCache<number, MaybeFileInfo>({
@@ -168,8 +169,12 @@ const bulkStoreToIndexedDB = (infos: FileInfo[]) => {
return infos.map((info) => [info.id, { ...info, exists: true }] as const);
};
export const getFileInfo = (id: number, masterKey: CryptoKey) => {
return cache.get(id, masterKey);
export const getFileInfo = (
id: number,
masterKey: CryptoKey,
options?: { fetchFromServer?: FilesystemCacheOptions<number, MaybeFileInfo>["fetchFromServer"] },
) => {
return cache.get(id, masterKey, options);
};
export const bulkGetFileInfo = (ids: number[], masterKey: CryptoKey) => {

View File

@@ -1,4 +1,5 @@
export * from "./category";
export * from "./common";
export * from "./directory";
export * from "./file";
export * from "./types";

View File

@@ -367,14 +367,77 @@ export const getAllFileIds = async (userId: number) => {
return files.map(({ id }) => id);
};
export const getLegacyFileIds = async (userId: number) => {
export const getLegacyFiles = async (userId: number, limit: number = 100) => {
const files = await db
.selectFrom("file")
.select("id")
.selectAll()
.where("user_id", "=", userId)
.where("encrypted_content_iv", "is not", null)
.limit(limit)
.execute();
return files.map(({ id }) => id);
return files.map(
(file) =>
({
id: file.id,
parentId: file.parent_id ?? "root",
userId: file.user_id,
path: file.path,
mekVersion: file.master_encryption_key_version,
encDek: file.encrypted_data_encryption_key,
dekVersion: file.data_encryption_key_version,
hskVersion: file.hmac_secret_key_version,
contentHmac: file.content_hmac,
contentType: file.content_type,
encContentIv: file.encrypted_content_iv,
encContentHash: file.encrypted_content_hash,
encName: file.encrypted_name,
encCreatedAt: file.encrypted_created_at,
encLastModifiedAt: file.encrypted_last_modified_at,
}) satisfies File,
);
};
export const getFilesWithoutThumbnail = async (userId: number, limit: number = 100) => {
const files = await db
.selectFrom("file")
.selectAll()
.where("user_id", "=", userId)
.where((eb) =>
eb.or([eb("content_type", "like", "image/%"), eb("content_type", "like", "video/%")]),
)
.where((eb) =>
eb.not(
eb.exists(
eb
.selectFrom("thumbnail")
.select("thumbnail.id")
.whereRef("thumbnail.file_id", "=", "file.id")
.limit(1),
),
),
)
.limit(limit)
.execute();
return files.map(
(file) =>
({
id: file.id,
parentId: file.parent_id ?? "root",
userId: file.user_id,
path: file.path,
mekVersion: file.master_encryption_key_version,
encDek: file.encrypted_data_encryption_key,
dekVersion: file.data_encryption_key_version,
hskVersion: file.hmac_secret_key_version,
contentHmac: file.content_hmac,
contentType: file.content_type,
encContentIv: file.encrypted_content_iv,
encContentHash: file.encrypted_content_hash,
encName: file.encrypted_name,
encCreatedAt: file.encrypted_created_at,
encLastModifiedAt: file.encrypted_last_modified_at,
}) satisfies File,
);
};
export const getAllFileIdsByContentHmac = async (
@@ -475,95 +538,99 @@ export const searchFiles = async (
excludeCategoryIds: number[];
},
) => {
const ctes: string[] = [];
const conditions: string[] = [];
if (filters.parentId === "root") {
conditions.push(`user_id = ${userId}`);
} else {
ctes.push(`
directory_tree AS (
SELECT id FROM directory WHERE user_id = ${userId} AND id = ${filters.parentId}
UNION ALL
SELECT d.id FROM directory d INNER JOIN directory_tree dt ON d.parent_id = dt.id
)`);
conditions.push(`parent_id IN (SELECT id FROM directory_tree)`);
}
filters.includeCategoryIds.forEach((categoryId, index) => {
ctes.push(`
include_category_tree_${index} AS (
SELECT id FROM category WHERE user_id = ${userId} AND id = ${categoryId}
UNION ALL
SELECT c.id FROM category c INNER JOIN include_category_tree_${index} ct ON c.parent_id = ct.id
)`);
conditions.push(`
EXISTS(
SELECT 1 FROM file_category
WHERE file_id = file.id
AND EXISTS (SELECT 1 FROM include_category_tree_${index} ct WHERE ct.id = category_id)
)`);
});
if (filters.excludeCategoryIds.length > 0) {
ctes.push(`
exclude_category_tree AS (
SELECT id FROM category WHERE user_id = ${userId} AND id IN (${filters.excludeCategoryIds.join(",")})
UNION ALL
SELECT c.id FROM category c INNER JOIN exclude_category_tree ct ON c.parent_id = ct.id
)`);
conditions.push(`
NOT EXISTS(
SELECT 1 FROM file_category
WHERE file_id = id
AND EXISTS (SELECT 1 FROM exclude_category_tree ct WHERE ct.id = category_id)
)`);
}
const query = `
${ctes.length > 0 ? `WITH RECURSIVE ${ctes.join(",")}` : ""}
SELECT * FROM file
WHERE ${conditions.join(" AND ")}
`;
const { rows } = await sql
.raw<{
id: number;
parent_id: number | null;
user_id: number;
path: string;
master_encryption_key_version: number;
encrypted_data_encryption_key: string;
data_encryption_key_version: Date;
hmac_secret_key_version: number;
content_hmac: string;
content_type: string;
encrypted_content_iv: string;
encrypted_content_hash: string;
encrypted_name: Ciphertext;
encrypted_created_at: Ciphertext | null;
encrypted_last_modified_at: Ciphertext;
}>(query)
.execute(db);
return rows.map(
(file) =>
({
id: file.id,
parentId: file.parent_id ?? "root",
userId: file.user_id,
path: file.path,
mekVersion: file.master_encryption_key_version,
encDek: file.encrypted_data_encryption_key,
dekVersion: file.data_encryption_key_version,
hskVersion: file.hmac_secret_key_version,
contentHmac: file.content_hmac,
contentType: file.content_type,
encContentIv: file.encrypted_content_iv,
encContentHash: file.encrypted_content_hash,
encName: file.encrypted_name,
encCreatedAt: file.encrypted_created_at,
encLastModifiedAt: file.encrypted_last_modified_at,
}) satisfies File,
);
const baseQuery = db
.withRecursive("directory_tree", (db) =>
db
.selectFrom("directory")
.select("id")
.where("user_id", "=", userId)
.where((eb) => eb.val(filters.parentId !== "root")) // directory_tree will be empty if parentId is "root"
.$if(filters.parentId !== "root", (qb) => qb.where("id", "=", filters.parentId as number))
.unionAll(
db
.selectFrom("directory as d")
.innerJoin("directory_tree as dt", "d.parent_id", "dt.id")
.select("d.id"),
),
)
.withRecursive("include_category_tree", (db) =>
db
.selectFrom("category")
.select(["id", "id as root_id"])
.where("id", "=", (eb) => eb.fn.any(eb.val(filters.includeCategoryIds)))
.where("user_id", "=", userId)
.unionAll(
db
.selectFrom("category as c")
.innerJoin("include_category_tree as ct", "c.parent_id", "ct.id")
.select(["c.id", "ct.root_id"]),
),
)
.withRecursive("exclude_category_tree", (db) =>
db
.selectFrom("category")
.select("id")
.where("id", "=", (eb) => eb.fn.any(eb.val(filters.excludeCategoryIds)))
.where("user_id", "=", userId)
.unionAll((db) =>
db
.selectFrom("category as c")
.innerJoin("exclude_category_tree as ct", "c.parent_id", "ct.id")
.select("c.id"),
),
)
.selectFrom("file")
.selectAll("file")
.$if(filters.parentId === "root", (qb) => qb.where("user_id", "=", userId)) // directory_tree isn't used if parentId is "root"
.$if(filters.parentId !== "root", (qb) =>
qb.where("parent_id", "in", (eb) => eb.selectFrom("directory_tree").select("id")),
)
.where((eb) =>
eb.not(
eb.exists(
eb
.selectFrom("file_category")
.whereRef("file_id", "=", "file.id")
.where("category_id", "in", (eb) =>
eb.selectFrom("exclude_category_tree").select("id"),
),
),
),
);
const files =
filters.includeCategoryIds.length > 0
? await baseQuery
.innerJoin("file_category", "file.id", "file_category.file_id")
.innerJoin(
"include_category_tree",
"file_category.category_id",
"include_category_tree.id",
)
.groupBy("file.id")
.having(
(eb) => eb.fn.count("include_category_tree.root_id").distinct(),
"=",
filters.includeCategoryIds.length,
)
.execute()
: await baseQuery.execute();
return files.map((file) => ({
id: file.id,
parentId: file.parent_id ?? ("root" as const),
userId: file.user_id,
path: file.path,
mekVersion: file.master_encryption_key_version,
encDek: file.encrypted_data_encryption_key,
dekVersion: file.data_encryption_key_version,
hskVersion: file.hmac_secret_key_version,
contentHmac: file.content_hmac,
contentType: file.content_type,
encContentIv: file.encrypted_content_iv,
encContentHash: file.encrypted_content_hash,
encName: file.encrypted_name,
encCreatedAt: file.encrypted_created_at,
encLastModifiedAt: file.encrypted_last_modified_at,
}));
};
export const setFileEncName = async (

View File

@@ -83,27 +83,3 @@ export const getFileThumbnail = async (userId: number, fileId: number) => {
} satisfies FileThumbnail)
: null;
};
export const getMissingFileThumbnails = async (userId: number, limit: number = 100) => {
const files = await db
.selectFrom("file")
.select("id")
.where("user_id", "=", userId)
.where((eb) =>
eb.or([eb("content_type", "like", "image/%"), eb("content_type", "like", "video/%")]),
)
.where((eb) =>
eb.not(
eb.exists(
eb
.selectFrom("thumbnail")
.select("thumbnail.id")
.whereRef("thumbnail.file_id", "=", "file.id")
.limit(1),
),
),
)
.limit(limit)
.execute();
return files.map(({ id }) => id);
};

View File

@@ -90,4 +90,42 @@ export class HybridPromise<T> implements PromiseLike<T> {
return HybridPromise.reject(e);
}
}
static all<T extends readonly unknown[] | []>(
maybePromises: T,
): HybridPromise<{ -readonly [P in keyof T]: HybridAwaited<T[P]> }> {
const length = maybePromises.length;
if (length === 0) {
return HybridPromise.resolve([] as any);
}
const hps = Array.from(maybePromises).map((p) => HybridPromise.resolve(p));
if (hps.some((hp) => !hp.isSync())) {
return new HybridPromise({
mode: "async",
promise: Promise.all(hps.map((hp) => hp.toPromise())) as any,
});
}
try {
return HybridPromise.resolve(
Array.from(
hps.map((hp) => {
if (hp.state.mode === "sync") {
if (hp.state.status === "fulfilled") {
return hp.state.value;
} else {
throw hp.state.reason;
}
}
}),
) as any,
);
} catch (e) {
return HybridPromise.reject(e);
}
}
}
export type HybridAwaited<T> =
T extends HybridPromise<infer U> ? U : T extends Promise<infer U> ? U : T;

View File

@@ -1,4 +1,5 @@
export * from "./concurrency";
export * from "./format";
export * from "./gotoStateful";
export * from "./search";
export * from "./sort";

28
src/lib/utils/search.ts Normal file
View File

@@ -0,0 +1,28 @@
import { disassemble, getChoseong } from "es-hangul";
const normalize = (s: string) => {
return s.normalize("NFC").toLowerCase().replace(/\s/g, "");
};
const extractHangul = (s: string) => {
return s.replace(/[^가-힣ㄱ-ㅎㅏ-ㅣ]/g, "");
};
const hangulSearch = (original: string, query: string) => {
original = extractHangul(original);
query = extractHangul(query);
if (!original || !query) return false;
return (
disassemble(original).includes(disassemble(query)) ||
getChoseong(original).includes(getChoseong(query))
);
};
export const searchString = (original: string, query: string) => {
original = normalize(original);
query = normalize(query);
if (!original || !query) return false;
return original.includes(query) || hangulSearch(original, query);
};

View File

@@ -1,4 +1,7 @@
<script lang="ts">
import type { Snapshot } from "@sveltejs/kit";
import superjson, { type SuperJSONResult } from "superjson";
import { untrack } from "svelte";
import { slide } from "svelte/transition";
import { goto } from "$app/navigation";
import { Chip, FullscreenDiv, RowVirtualizer } from "$lib/components/atoms";
@@ -8,7 +11,7 @@
type MaybeDirectoryInfo,
} from "$lib/modules/filesystem";
import { masterKeyStore } from "$lib/stores";
import { HybridPromise, sortEntries } from "$lib/utils";
import { HybridPromise, searchString, sortEntries } from "$lib/utils";
import Directory from "./Directory.svelte";
import File from "./File.svelte";
import SearchBar from "./SearchBar.svelte";
@@ -17,15 +20,24 @@
let { data } = $props();
interface SearchFilters {
name: string;
includeImages: boolean;
includeVideos: boolean;
includeDirectories: boolean;
searchInDirectory: boolean;
categories: SearchFilter["categories"];
}
let directoryInfo: MaybeDirectoryInfo | undefined = $state();
let filters = $state({
let filters: SearchFilters = $state({
name: "",
includeImages: false,
includeVideos: false,
includeDirectories: false,
searchInDirectory: false,
categories: [] as SearchFilter["categories"],
categories: [],
});
let hasCategoryFilter = $derived(filters.categories.length > 0);
let hasAnyFilter = $derived(
@@ -36,29 +48,40 @@
filters.name.trim().length > 0,
);
let isRestoredFromSnapshot = $state(false);
let serverResult: SearchResult | undefined = $state();
let result = $derived.by(() => {
if (!serverResult) return [];
const nameFilter = filters.name.trim().toLowerCase();
const nameFilter = filters.name.trim();
const hasTypeFilter =
filters.includeImages || filters.includeVideos || filters.includeDirectories;
const directories =
!hasTypeFilter || filters.includeDirectories ? serverResult.directories : [];
!hasTypeFilter || filters.includeDirectories
? serverResult.directories.map((directory) => ({
type: "directory" as const,
...directory,
}))
: [];
const files =
!hasTypeFilter || filters.includeImages || filters.includeVideos
? serverResult.files.filter(
({ contentType }) =>
!hasTypeFilter ||
(filters.includeImages && contentType.startsWith("image/")) ||
(filters.includeVideos && contentType.startsWith("video/")),
)
? serverResult.files
.filter(
({ contentType }) =>
!hasTypeFilter ||
(filters.includeImages && contentType.startsWith("image/")) ||
(filters.includeVideos && contentType.startsWith("video/")),
)
.map((file) => ({
type: "file" as const,
...file,
}))
: [];
return sortEntries(
[...directories, ...files].filter(
({ name }) => !nameFilter || name.toLowerCase().includes(nameFilter),
({ name }) => !nameFilter || searchString(name, nameFilter),
),
);
});
@@ -81,6 +104,20 @@
}
};
export const snapshot: Snapshot<{
filters: SearchFilters;
serverResult: SuperJSONResult;
}> = {
capture() {
return { filters, serverResult: superjson.serialize(serverResult) };
},
restore(value) {
filters = value.filters;
serverResult = superjson.deserialize(value.serverResult, { inPlace: true });
isRestoredFromSnapshot = true;
},
};
$effect(() => {
if (data.directoryId) {
HybridPromise.resolve(getDirectoryInfo(data.directoryId, $masterKeyStore?.get(1)?.key!)).then(
@@ -96,6 +133,16 @@
});
$effect(() => {
// Svelte sucks
hasAnyFilter;
filters.searchInDirectory;
filters.categories.length;
if (untrack(() => isRestoredFromSnapshot)) {
isRestoredFromSnapshot = false;
return;
}
if (hasAnyFilter) {
requestSearch(
{

View File

@@ -1,8 +1,13 @@
import type { DataKey, LocalCategoryInfo } from "$lib/modules/filesystem";
import {
decryptDirectoryMetadata,
decryptFileMetadata,
} from "$lib/modules/filesystem/internal.svelte";
getDirectoryInfo,
getFileInfo,
type LocalDirectoryInfo,
type FileInfo,
type LocalCategoryInfo,
} from "$lib/modules/filesystem";
import { HybridPromise } from "$lib/utils";
import { trpc } from "$trpc/client";
export interface SearchFilter {
@@ -10,28 +15,9 @@ export interface SearchFilter {
categories: { info: LocalCategoryInfo; type: "include" | "exclude" }[];
}
interface SearchedDirectory {
type: "directory";
id: number;
parentId: DirectoryId;
dataKey?: DataKey;
name: string;
}
interface SearchedFile {
type: "file";
id: number;
parentId: DirectoryId;
dataKey?: DataKey;
contentType: string;
name: string;
createdAt?: Date;
lastModifiedAt: Date;
}
export interface SearchResult {
directories: SearchedDirectory[];
files: SearchedFile[];
directories: LocalDirectoryInfo[];
files: FileInfo[];
}
export const requestSearch = async (filter: SearchFilter, masterKey: CryptoKey) => {
@@ -45,51 +31,47 @@ export const requestSearch = async (filter: SearchFilter, masterKey: CryptoKey)
.map(({ info }) => info.id),
});
// TODO: FIXME
const [directories, files] = await Promise.all([
Promise.all(
directoriesRaw.map(async (dir) => {
const metadata = await decryptDirectoryMetadata(
{ dek: dir.dek, dekVersion: dir.dekVersion, name: dir.name, nameIv: dir.nameIv },
masterKey,
);
return {
type: "directory" as const,
id: dir.id,
parentId: dir.parent,
dataKey: metadata.dataKey,
name: metadata.name,
};
}),
const [directories, files] = await HybridPromise.all([
HybridPromise.all(
directoriesRaw.map((directory) =>
HybridPromise.resolve(
getDirectoryInfo(directory.id, masterKey, {
async fetchFromServer(id, cachedInfo, masterKey) {
const metadata = await decryptDirectoryMetadata(directory, masterKey);
return {
subDirectories: [],
files: [],
...cachedInfo,
id: id as number,
exists: true,
parentId: directory.parent,
...metadata,
};
},
}),
),
),
),
Promise.all(
filesRaw.map(async (file) => {
const metadata = await decryptFileMetadata(
{
dek: file.dek,
dekVersion: file.dekVersion,
name: file.name,
nameIv: file.nameIv,
createdAt: file.createdAt,
createdAtIv: file.createdAtIv,
lastModifiedAt: file.lastModifiedAt,
lastModifiedAtIv: file.lastModifiedAtIv,
},
masterKey,
);
return {
type: "file" as const,
id: file.id,
parentId: file.parent,
dataKey: metadata.dataKey,
contentType: file.contentType,
name: metadata.name,
createdAt: metadata.createdAt,
lastModifiedAt: metadata.lastModifiedAt,
};
}),
HybridPromise.all(
filesRaw.map((file) =>
HybridPromise.resolve(
getFileInfo(file.id, masterKey, {
async fetchFromServer(id, cachedInfo, masterKey) {
const metadata = await decryptFileMetadata(file, masterKey);
return {
categories: [],
...cachedInfo,
id: id as number,
exists: true,
parentId: file.parent,
contentType: file.contentType,
...metadata,
};
},
}),
),
),
),
]);
return { directories, files };
return { directories, files } as SearchResult;
};

View File

@@ -3,11 +3,16 @@
import { goto } from "$app/navigation";
import { BottomDiv, Button, FullscreenDiv } from "$lib/components/atoms";
import { TopBar } from "$lib/components/molecules";
import { bulkGetFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem";
import type { MaybeFileInfo } from "$lib/modules/filesystem";
import { masterKeyStore } from "$lib/stores";
import { sortEntries } from "$lib/utils";
import File from "./File.svelte";
import { getMigrationState, clearMigrationStates, requestFileMigration } from "./service.svelte";
import {
getMigrationState,
clearMigrationStates,
requestLegacyFiles,
requestFileMigration,
} from "./service.svelte";
let { data } = $props();
@@ -30,9 +35,7 @@
};
onMount(async () => {
fileInfos = sortEntries(
Array.from((await bulkGetFileInfo(data.files, $masterKeyStore?.get(1)?.key!)).values()),
);
fileInfos = sortEntries(await requestLegacyFiles(data.files, $masterKeyStore?.get(1)?.key!));
});
$effect(() => clearMigrationStates);

View File

@@ -1,11 +1,17 @@
import { limitFunction } from "p-limit";
import { SvelteMap } from "svelte/reactivity";
import { CHUNK_SIZE } from "$lib/constants";
import type { FileInfo } from "$lib/modules/filesystem";
import {
decryptFileMetadata,
getFileInfo,
type FileInfo,
type MaybeFileInfo,
} from "$lib/modules/filesystem";
import { uploadBlob } from "$lib/modules/upload";
import { requestFileDownload } from "$lib/services/file";
import { Scheduler } from "$lib/utils";
import { HybridPromise, Scheduler } from "$lib/utils";
import { trpc } from "$trpc/client";
import type { RouterOutputs } from "$trpc/router.server";
export type MigrationStatus =
| "queued"
@@ -24,6 +30,35 @@ export interface MigrationState {
const scheduler = new Scheduler();
const states = new SvelteMap<number, MigrationState>();
export const requestLegacyFiles = async (
filesRaw: RouterOutputs["file"]["listLegacy"],
masterKey: CryptoKey,
) => {
const files = await HybridPromise.all(
filesRaw.map((file) =>
HybridPromise.resolve(
getFileInfo(file.id, masterKey, {
async fetchFromServer(id, cachedInfo, masterKey) {
const metadata = await decryptFileMetadata(file, masterKey);
return {
categories: [],
...cachedInfo,
id: id as number,
exists: true,
isLegacy: file.isLegacy,
parentId: file.parent,
contentType: file.contentType,
...metadata,
};
},
}),
),
),
);
return files as MaybeFileInfo[];
};
const createState = (status: MigrationStatus): MigrationState => {
const state = $state({ status });
return state;

View File

@@ -4,13 +4,14 @@
import { BottomDiv, Button, FullscreenDiv } from "$lib/components/atoms";
import { IconEntryButton, TopBar } from "$lib/components/molecules";
import { deleteAllFileThumbnailCaches } from "$lib/modules/file";
import { bulkGetFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem";
import type { MaybeFileInfo } from "$lib/modules/filesystem";
import { masterKeyStore } from "$lib/stores";
import { sortEntries } from "$lib/utils";
import File from "./File.svelte";
import {
getThumbnailGenerationStatus,
clearThumbnailGenerationStatuses,
requestMissingThumbnailFiles,
requestThumbnailGeneration,
type GenerationStatus,
} from "./service";
@@ -42,7 +43,7 @@
onMount(async () => {
fileInfos = sortEntries(
Array.from((await bulkGetFileInfo(data.files, $masterKeyStore?.get(1)?.key!)).values()),
await requestMissingThumbnailFiles(data.files, $masterKeyStore?.get(1)?.key!),
);
});

View File

@@ -1,10 +1,16 @@
import { limitFunction } from "p-limit";
import { SvelteMap } from "svelte/reactivity";
import { storeFileThumbnailCache } from "$lib/modules/file";
import type { FileInfo } from "$lib/modules/filesystem";
import {
decryptFileMetadata,
getFileInfo,
type FileInfo,
type MaybeFileInfo,
} from "$lib/modules/filesystem";
import { generateThumbnail } from "$lib/modules/thumbnail";
import { requestFileDownload, requestFileThumbnailUpload } from "$lib/services/file";
import { Scheduler } from "$lib/utils";
import { HybridPromise, Scheduler } from "$lib/utils";
import type { RouterOutputs } from "$trpc/router.server";
export type GenerationStatus =
| "queued"
@@ -29,6 +35,35 @@ export const clearThumbnailGenerationStatuses = () => {
}
};
export const requestMissingThumbnailFiles = async (
filesRaw: RouterOutputs["file"]["listWithoutThumbnail"],
masterKey: CryptoKey,
) => {
const files = await HybridPromise.all(
filesRaw.map((file) =>
HybridPromise.resolve(
getFileInfo(file.id, masterKey, {
async fetchFromServer(id, cachedInfo, masterKey) {
const metadata = await decryptFileMetadata(file, masterKey);
return {
categories: [],
...cachedInfo,
id: id as number,
exists: true,
isLegacy: file.isLegacy,
parentId: file.parent,
contentType: file.contentType,
...metadata,
};
},
}),
),
),
);
return files as MaybeFileInfo[];
};
const requestThumbnailUpload = limitFunction(
async (fileInfo: FileInfo, fileBuffer: ArrayBuffer) => {
statuses.set(fileInfo.id, "generating");

View File

@@ -97,11 +97,41 @@ const fileRouter = router({
}),
listWithoutThumbnail: roleProcedure["activeClient"].query(async ({ ctx }) => {
return await MediaRepo.getMissingFileThumbnails(ctx.session.userId);
const files = await FileRepo.getFilesWithoutThumbnail(ctx.session.userId);
return files.map((file) => ({
id: file.id,
isLegacy: !!file.encContentIv,
parent: file.parentId,
mekVersion: file.mekVersion,
dek: file.encDek,
dekVersion: file.dekVersion,
contentType: file.contentType,
name: file.encName.ciphertext,
nameIv: file.encName.iv,
createdAt: file.encCreatedAt?.ciphertext,
createdAtIv: file.encCreatedAt?.iv,
lastModifiedAt: file.encLastModifiedAt.ciphertext,
lastModifiedAtIv: file.encLastModifiedAt.iv,
}));
}),
listLegacy: roleProcedure["activeClient"].query(async ({ ctx }) => {
return await FileRepo.getLegacyFileIds(ctx.session.userId);
const files = await FileRepo.getLegacyFiles(ctx.session.userId);
return files.map((file) => ({
id: file.id,
isLegacy: true,
parent: file.parentId,
mekVersion: file.mekVersion,
dek: file.encDek,
dekVersion: file.dekVersion,
contentType: file.contentType,
name: file.encName.ciphertext,
nameIv: file.encName.iv,
createdAt: file.encCreatedAt?.ciphertext,
createdAtIv: file.encCreatedAt?.iv,
lastModifiedAt: file.encLastModifiedAt.ciphertext,
lastModifiedAtIv: file.encLastModifiedAt.iv,
}));
}),
rename: roleProcedure["activeClient"]