mirror of
https://github.com/kmc7468/arkvault.git
synced 2026-02-04 08:06:56 +00:00
썸네일이 누락된 파일 조회 및 레거시 파일 조회 네트워크 호출 최적화
This commit is contained in:
@@ -1,7 +1,6 @@
|
|||||||
import { untrack } from "svelte";
|
import { untrack } from "svelte";
|
||||||
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
|
|
||||||
|
|
||||||
interface FilesystemCacheOptions<K, V> {
|
export interface FilesystemCacheOptions<K, V> {
|
||||||
fetchFromIndexedDB: (key: K) => Promise<V | undefined>;
|
fetchFromIndexedDB: (key: K) => Promise<V | undefined>;
|
||||||
fetchFromServer: (key: K, cachedValue: V | undefined, masterKey: CryptoKey) => Promise<V>;
|
fetchFromServer: (key: K, cachedValue: V | undefined, masterKey: CryptoKey) => Promise<V>;
|
||||||
bulkFetchFromIndexedDB?: (keys: Set<K>) => Promise<Map<K, V>>;
|
bulkFetchFromIndexedDB?: (keys: Set<K>) => Promise<Map<K, V>>;
|
||||||
@@ -16,7 +15,11 @@ export class FilesystemCache<K, V extends object> {
|
|||||||
|
|
||||||
constructor(private readonly options: FilesystemCacheOptions<K, V>) {}
|
constructor(private readonly options: FilesystemCacheOptions<K, V>) {}
|
||||||
|
|
||||||
get(key: K, masterKey: CryptoKey) {
|
get(
|
||||||
|
key: K,
|
||||||
|
masterKey: CryptoKey,
|
||||||
|
options?: { fetchFromServer?: FilesystemCacheOptions<K, V>["fetchFromServer"] },
|
||||||
|
) {
|
||||||
return untrack(() => {
|
return untrack(() => {
|
||||||
let state = this.map.get(key);
|
let state = this.map.get(key);
|
||||||
if (state?.promise) return state.value ?? state.promise;
|
if (state?.promise) return state.value ?? state.promise;
|
||||||
@@ -39,7 +42,9 @@ export class FilesystemCache<K, V extends object> {
|
|||||||
return loadedInfo;
|
return loadedInfo;
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.then((cachedInfo) => this.options.fetchFromServer(key, cachedInfo, masterKey))
|
.then((cachedInfo) =>
|
||||||
|
(options?.fetchFromServer ?? this.options.fetchFromServer)(key, cachedInfo, masterKey),
|
||||||
|
)
|
||||||
.then((loadedInfo) => {
|
.then((loadedInfo) => {
|
||||||
if (state.value) {
|
if (state.value) {
|
||||||
Object.assign(state.value, loadedInfo);
|
Object.assign(state.value, loadedInfo);
|
||||||
@@ -121,52 +126,3 @@ export class FilesystemCache<K, V extends object> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const decryptDirectoryMetadata = async (
|
|
||||||
metadata: { dek: string; dekVersion: Date; name: string; nameIv: string },
|
|
||||||
masterKey: CryptoKey,
|
|
||||||
) => {
|
|
||||||
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
|
|
||||||
const name = await decryptString(metadata.name, metadata.nameIv, dataKey);
|
|
||||||
|
|
||||||
return {
|
|
||||||
dataKey: { key: dataKey, version: metadata.dekVersion },
|
|
||||||
name,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
|
|
||||||
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
|
|
||||||
};
|
|
||||||
|
|
||||||
export const decryptFileMetadata = async (
|
|
||||||
metadata: {
|
|
||||||
dek: string;
|
|
||||||
dekVersion: Date;
|
|
||||||
name: string;
|
|
||||||
nameIv: string;
|
|
||||||
createdAt?: string;
|
|
||||||
createdAtIv?: string;
|
|
||||||
lastModifiedAt: string;
|
|
||||||
lastModifiedAtIv: string;
|
|
||||||
},
|
|
||||||
masterKey: CryptoKey,
|
|
||||||
) => {
|
|
||||||
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
|
|
||||||
const [name, createdAt, lastModifiedAt] = await Promise.all([
|
|
||||||
decryptString(metadata.name, metadata.nameIv, dataKey),
|
|
||||||
metadata.createdAt
|
|
||||||
? decryptDate(metadata.createdAt, metadata.createdAtIv!, dataKey)
|
|
||||||
: undefined,
|
|
||||||
decryptDate(metadata.lastModifiedAt, metadata.lastModifiedAtIv, dataKey),
|
|
||||||
]);
|
|
||||||
|
|
||||||
return {
|
|
||||||
dataKey: { key: dataKey, version: metadata.dekVersion },
|
|
||||||
name,
|
|
||||||
createdAt,
|
|
||||||
lastModifiedAt,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export const decryptCategoryMetadata = decryptDirectoryMetadata;
|
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import * as IndexedDB from "$lib/indexedDB";
|
import * as IndexedDB from "$lib/indexedDB";
|
||||||
import { trpc, isTRPCClientError } from "$trpc/client";
|
import { trpc, isTRPCClientError } from "$trpc/client";
|
||||||
import { FilesystemCache, decryptFileMetadata, decryptCategoryMetadata } from "./internal.svelte";
|
import { decryptFileMetadata, decryptCategoryMetadata } from "./common";
|
||||||
|
import { FilesystemCache } from "./FilesystemCache.svelte";
|
||||||
import type { CategoryInfo, MaybeCategoryInfo } from "./types";
|
import type { CategoryInfo, MaybeCategoryInfo } from "./types";
|
||||||
|
|
||||||
const cache = new FilesystemCache<CategoryId, MaybeCategoryInfo>({
|
const cache = new FilesystemCache<CategoryId, MaybeCategoryInfo>({
|
||||||
|
|||||||
50
src/lib/modules/filesystem/common.ts
Normal file
50
src/lib/modules/filesystem/common.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
|
||||||
|
|
||||||
|
export const decryptDirectoryMetadata = async (
|
||||||
|
metadata: { dek: string; dekVersion: Date; name: string; nameIv: string },
|
||||||
|
masterKey: CryptoKey,
|
||||||
|
) => {
|
||||||
|
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
|
||||||
|
const name = await decryptString(metadata.name, metadata.nameIv, dataKey);
|
||||||
|
|
||||||
|
return {
|
||||||
|
dataKey: { key: dataKey, version: metadata.dekVersion },
|
||||||
|
name,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
|
||||||
|
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
|
||||||
|
};
|
||||||
|
|
||||||
|
export const decryptFileMetadata = async (
|
||||||
|
metadata: {
|
||||||
|
dek: string;
|
||||||
|
dekVersion: Date;
|
||||||
|
name: string;
|
||||||
|
nameIv: string;
|
||||||
|
createdAt?: string;
|
||||||
|
createdAtIv?: string;
|
||||||
|
lastModifiedAt: string;
|
||||||
|
lastModifiedAtIv: string;
|
||||||
|
},
|
||||||
|
masterKey: CryptoKey,
|
||||||
|
) => {
|
||||||
|
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
|
||||||
|
const [name, createdAt, lastModifiedAt] = await Promise.all([
|
||||||
|
decryptString(metadata.name, metadata.nameIv, dataKey),
|
||||||
|
metadata.createdAt
|
||||||
|
? decryptDate(metadata.createdAt, metadata.createdAtIv!, dataKey)
|
||||||
|
: undefined,
|
||||||
|
decryptDate(metadata.lastModifiedAt, metadata.lastModifiedAtIv, dataKey),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
dataKey: { key: dataKey, version: metadata.dekVersion },
|
||||||
|
name,
|
||||||
|
createdAt,
|
||||||
|
lastModifiedAt,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const decryptCategoryMetadata = decryptDirectoryMetadata;
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import * as IndexedDB from "$lib/indexedDB";
|
import * as IndexedDB from "$lib/indexedDB";
|
||||||
import { trpc, isTRPCClientError } from "$trpc/client";
|
import { trpc, isTRPCClientError } from "$trpc/client";
|
||||||
import { FilesystemCache, decryptDirectoryMetadata, decryptFileMetadata } from "./internal.svelte";
|
import { decryptDirectoryMetadata, decryptFileMetadata } from "./common";
|
||||||
|
import { FilesystemCache, type FilesystemCacheOptions } from "./FilesystemCache.svelte";
|
||||||
import type { DirectoryInfo, MaybeDirectoryInfo } from "./types";
|
import type { DirectoryInfo, MaybeDirectoryInfo } from "./types";
|
||||||
|
|
||||||
const cache = new FilesystemCache<DirectoryId, MaybeDirectoryInfo>({
|
const cache = new FilesystemCache<DirectoryId, MaybeDirectoryInfo>({
|
||||||
@@ -97,6 +98,12 @@ const storeToIndexedDB = (info: DirectoryInfo) => {
|
|||||||
return { ...info, exists: true as const };
|
return { ...info, exists: true as const };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getDirectoryInfo = (id: DirectoryId, masterKey: CryptoKey) => {
|
export const getDirectoryInfo = (
|
||||||
return cache.get(id, masterKey);
|
id: DirectoryId,
|
||||||
|
masterKey: CryptoKey,
|
||||||
|
options?: {
|
||||||
|
fetchFromServer?: FilesystemCacheOptions<DirectoryId, MaybeDirectoryInfo>["fetchFromServer"];
|
||||||
|
},
|
||||||
|
) => {
|
||||||
|
return cache.get(id, masterKey, options);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import * as IndexedDB from "$lib/indexedDB";
|
import * as IndexedDB from "$lib/indexedDB";
|
||||||
import { trpc, isTRPCClientError } from "$trpc/client";
|
import { trpc, isTRPCClientError } from "$trpc/client";
|
||||||
import { FilesystemCache, decryptFileMetadata, decryptCategoryMetadata } from "./internal.svelte";
|
import { decryptFileMetadata, decryptCategoryMetadata } from "./common";
|
||||||
|
import { FilesystemCache, type FilesystemCacheOptions } from "./FilesystemCache.svelte";
|
||||||
import type { FileInfo, MaybeFileInfo } from "./types";
|
import type { FileInfo, MaybeFileInfo } from "./types";
|
||||||
|
|
||||||
const cache = new FilesystemCache<number, MaybeFileInfo>({
|
const cache = new FilesystemCache<number, MaybeFileInfo>({
|
||||||
@@ -168,8 +169,12 @@ const bulkStoreToIndexedDB = (infos: FileInfo[]) => {
|
|||||||
return infos.map((info) => [info.id, { ...info, exists: true }] as const);
|
return infos.map((info) => [info.id, { ...info, exists: true }] as const);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getFileInfo = (id: number, masterKey: CryptoKey) => {
|
export const getFileInfo = (
|
||||||
return cache.get(id, masterKey);
|
id: number,
|
||||||
|
masterKey: CryptoKey,
|
||||||
|
options?: { fetchFromServer?: FilesystemCacheOptions<number, MaybeFileInfo>["fetchFromServer"] },
|
||||||
|
) => {
|
||||||
|
return cache.get(id, masterKey, options);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const bulkGetFileInfo = (ids: number[], masterKey: CryptoKey) => {
|
export const bulkGetFileInfo = (ids: number[], masterKey: CryptoKey) => {
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
export * from "./category";
|
export * from "./category";
|
||||||
|
export * from "./common";
|
||||||
export * from "./directory";
|
export * from "./directory";
|
||||||
export * from "./file";
|
export * from "./file";
|
||||||
export * from "./types";
|
export * from "./types";
|
||||||
|
|||||||
@@ -367,14 +367,77 @@ export const getAllFileIds = async (userId: number) => {
|
|||||||
return files.map(({ id }) => id);
|
return files.map(({ id }) => id);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getLegacyFileIds = async (userId: number) => {
|
export const getLegacyFiles = async (userId: number, limit: number = 100) => {
|
||||||
const files = await db
|
const files = await db
|
||||||
.selectFrom("file")
|
.selectFrom("file")
|
||||||
.select("id")
|
.selectAll()
|
||||||
.where("user_id", "=", userId)
|
.where("user_id", "=", userId)
|
||||||
.where("encrypted_content_iv", "is not", null)
|
.where("encrypted_content_iv", "is not", null)
|
||||||
|
.limit(limit)
|
||||||
.execute();
|
.execute();
|
||||||
return files.map(({ id }) => id);
|
return files.map(
|
||||||
|
(file) =>
|
||||||
|
({
|
||||||
|
id: file.id,
|
||||||
|
parentId: file.parent_id ?? "root",
|
||||||
|
userId: file.user_id,
|
||||||
|
path: file.path,
|
||||||
|
mekVersion: file.master_encryption_key_version,
|
||||||
|
encDek: file.encrypted_data_encryption_key,
|
||||||
|
dekVersion: file.data_encryption_key_version,
|
||||||
|
hskVersion: file.hmac_secret_key_version,
|
||||||
|
contentHmac: file.content_hmac,
|
||||||
|
contentType: file.content_type,
|
||||||
|
encContentIv: file.encrypted_content_iv,
|
||||||
|
encContentHash: file.encrypted_content_hash,
|
||||||
|
encName: file.encrypted_name,
|
||||||
|
encCreatedAt: file.encrypted_created_at,
|
||||||
|
encLastModifiedAt: file.encrypted_last_modified_at,
|
||||||
|
}) satisfies File,
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getFilesWithoutThumbnail = async (userId: number, limit: number = 100) => {
|
||||||
|
const files = await db
|
||||||
|
.selectFrom("file")
|
||||||
|
.selectAll()
|
||||||
|
.where("user_id", "=", userId)
|
||||||
|
.where((eb) =>
|
||||||
|
eb.or([eb("content_type", "like", "image/%"), eb("content_type", "like", "video/%")]),
|
||||||
|
)
|
||||||
|
.where((eb) =>
|
||||||
|
eb.not(
|
||||||
|
eb.exists(
|
||||||
|
eb
|
||||||
|
.selectFrom("thumbnail")
|
||||||
|
.select("thumbnail.id")
|
||||||
|
.whereRef("thumbnail.file_id", "=", "file.id")
|
||||||
|
.limit(1),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.limit(limit)
|
||||||
|
.execute();
|
||||||
|
return files.map(
|
||||||
|
(file) =>
|
||||||
|
({
|
||||||
|
id: file.id,
|
||||||
|
parentId: file.parent_id ?? "root",
|
||||||
|
userId: file.user_id,
|
||||||
|
path: file.path,
|
||||||
|
mekVersion: file.master_encryption_key_version,
|
||||||
|
encDek: file.encrypted_data_encryption_key,
|
||||||
|
dekVersion: file.data_encryption_key_version,
|
||||||
|
hskVersion: file.hmac_secret_key_version,
|
||||||
|
contentHmac: file.content_hmac,
|
||||||
|
contentType: file.content_type,
|
||||||
|
encContentIv: file.encrypted_content_iv,
|
||||||
|
encContentHash: file.encrypted_content_hash,
|
||||||
|
encName: file.encrypted_name,
|
||||||
|
encCreatedAt: file.encrypted_created_at,
|
||||||
|
encLastModifiedAt: file.encrypted_last_modified_at,
|
||||||
|
}) satisfies File,
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getAllFileIdsByContentHmac = async (
|
export const getAllFileIdsByContentHmac = async (
|
||||||
|
|||||||
@@ -83,27 +83,3 @@ export const getFileThumbnail = async (userId: number, fileId: number) => {
|
|||||||
} satisfies FileThumbnail)
|
} satisfies FileThumbnail)
|
||||||
: null;
|
: null;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getMissingFileThumbnails = async (userId: number, limit: number = 100) => {
|
|
||||||
const files = await db
|
|
||||||
.selectFrom("file")
|
|
||||||
.select("id")
|
|
||||||
.where("user_id", "=", userId)
|
|
||||||
.where((eb) =>
|
|
||||||
eb.or([eb("content_type", "like", "image/%"), eb("content_type", "like", "video/%")]),
|
|
||||||
)
|
|
||||||
.where((eb) =>
|
|
||||||
eb.not(
|
|
||||||
eb.exists(
|
|
||||||
eb
|
|
||||||
.selectFrom("thumbnail")
|
|
||||||
.select("thumbnail.id")
|
|
||||||
.whereRef("thumbnail.file_id", "=", "file.id")
|
|
||||||
.limit(1),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.limit(limit)
|
|
||||||
.execute();
|
|
||||||
return files.map(({ id }) => id);
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -90,4 +90,42 @@ export class HybridPromise<T> implements PromiseLike<T> {
|
|||||||
return HybridPromise.reject(e);
|
return HybridPromise.reject(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static all<T extends readonly unknown[] | []>(
|
||||||
|
maybePromises: T,
|
||||||
|
): HybridPromise<{ -readonly [P in keyof T]: HybridAwaited<T[P]> }> {
|
||||||
|
const length = maybePromises.length;
|
||||||
|
if (length === 0) {
|
||||||
|
return HybridPromise.resolve([] as any);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const hps = Array.from(maybePromises).map((p) => HybridPromise.resolve(p));
|
||||||
|
if (hps.some((hp) => !hp.isSync())) {
|
||||||
|
return new HybridPromise({
|
||||||
|
mode: "async",
|
||||||
|
promise: Promise.all(hps.map((hp) => hp.toPromise())) as any,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return HybridPromise.resolve(
|
||||||
|
Array.from(
|
||||||
|
hps.map((hp) => {
|
||||||
|
if (hp.state.mode === "sync") {
|
||||||
|
if (hp.state.status === "fulfilled") {
|
||||||
|
return hp.state.value;
|
||||||
|
} else {
|
||||||
|
throw hp.state.reason;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
) as any,
|
||||||
|
);
|
||||||
|
} catch (e) {
|
||||||
|
return HybridPromise.reject(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type HybridAwaited<T> =
|
||||||
|
T extends HybridPromise<infer U> ? U : T extends Promise<infer U> ? U : T;
|
||||||
|
|||||||
@@ -58,15 +58,25 @@
|
|||||||
filters.includeImages || filters.includeVideos || filters.includeDirectories;
|
filters.includeImages || filters.includeVideos || filters.includeDirectories;
|
||||||
|
|
||||||
const directories =
|
const directories =
|
||||||
!hasTypeFilter || filters.includeDirectories ? serverResult.directories : [];
|
!hasTypeFilter || filters.includeDirectories
|
||||||
|
? serverResult.directories.map((directory) => ({
|
||||||
|
type: "directory" as const,
|
||||||
|
...directory,
|
||||||
|
}))
|
||||||
|
: [];
|
||||||
const files =
|
const files =
|
||||||
!hasTypeFilter || filters.includeImages || filters.includeVideos
|
!hasTypeFilter || filters.includeImages || filters.includeVideos
|
||||||
? serverResult.files.filter(
|
? serverResult.files
|
||||||
|
.filter(
|
||||||
({ contentType }) =>
|
({ contentType }) =>
|
||||||
!hasTypeFilter ||
|
!hasTypeFilter ||
|
||||||
(filters.includeImages && contentType.startsWith("image/")) ||
|
(filters.includeImages && contentType.startsWith("image/")) ||
|
||||||
(filters.includeVideos && contentType.startsWith("video/")),
|
(filters.includeVideos && contentType.startsWith("video/")),
|
||||||
)
|
)
|
||||||
|
.map((file) => ({
|
||||||
|
type: "file" as const,
|
||||||
|
...file,
|
||||||
|
}))
|
||||||
: [];
|
: [];
|
||||||
|
|
||||||
return sortEntries(
|
return sortEntries(
|
||||||
|
|||||||
@@ -1,8 +1,13 @@
|
|||||||
import type { DataKey, LocalCategoryInfo } from "$lib/modules/filesystem";
|
|
||||||
import {
|
import {
|
||||||
decryptDirectoryMetadata,
|
decryptDirectoryMetadata,
|
||||||
decryptFileMetadata,
|
decryptFileMetadata,
|
||||||
} from "$lib/modules/filesystem/internal.svelte";
|
getDirectoryInfo,
|
||||||
|
getFileInfo,
|
||||||
|
type LocalDirectoryInfo,
|
||||||
|
type FileInfo,
|
||||||
|
type LocalCategoryInfo,
|
||||||
|
} from "$lib/modules/filesystem";
|
||||||
|
import { HybridPromise } from "$lib/utils";
|
||||||
import { trpc } from "$trpc/client";
|
import { trpc } from "$trpc/client";
|
||||||
|
|
||||||
export interface SearchFilter {
|
export interface SearchFilter {
|
||||||
@@ -10,28 +15,9 @@ export interface SearchFilter {
|
|||||||
categories: { info: LocalCategoryInfo; type: "include" | "exclude" }[];
|
categories: { info: LocalCategoryInfo; type: "include" | "exclude" }[];
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SearchedDirectory {
|
|
||||||
type: "directory";
|
|
||||||
id: number;
|
|
||||||
parentId: DirectoryId;
|
|
||||||
dataKey?: DataKey;
|
|
||||||
name: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SearchedFile {
|
|
||||||
type: "file";
|
|
||||||
id: number;
|
|
||||||
parentId: DirectoryId;
|
|
||||||
dataKey?: DataKey;
|
|
||||||
contentType: string;
|
|
||||||
name: string;
|
|
||||||
createdAt?: Date;
|
|
||||||
lastModifiedAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SearchResult {
|
export interface SearchResult {
|
||||||
directories: SearchedDirectory[];
|
directories: LocalDirectoryInfo[];
|
||||||
files: SearchedFile[];
|
files: FileInfo[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export const requestSearch = async (filter: SearchFilter, masterKey: CryptoKey) => {
|
export const requestSearch = async (filter: SearchFilter, masterKey: CryptoKey) => {
|
||||||
@@ -45,51 +31,47 @@ export const requestSearch = async (filter: SearchFilter, masterKey: CryptoKey)
|
|||||||
.map(({ info }) => info.id),
|
.map(({ info }) => info.id),
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: FIXME
|
const [directories, files] = await HybridPromise.all([
|
||||||
const [directories, files] = await Promise.all([
|
HybridPromise.all(
|
||||||
Promise.all(
|
directoriesRaw.map((directory) =>
|
||||||
directoriesRaw.map(async (dir) => {
|
HybridPromise.resolve(
|
||||||
const metadata = await decryptDirectoryMetadata(
|
getDirectoryInfo(directory.id, masterKey, {
|
||||||
{ dek: dir.dek, dekVersion: dir.dekVersion, name: dir.name, nameIv: dir.nameIv },
|
async fetchFromServer(id, cachedInfo, masterKey) {
|
||||||
masterKey,
|
const metadata = await decryptDirectoryMetadata(directory, masterKey);
|
||||||
);
|
|
||||||
return {
|
return {
|
||||||
type: "directory" as const,
|
subDirectories: [],
|
||||||
id: dir.id,
|
files: [],
|
||||||
parentId: dir.parent,
|
...cachedInfo,
|
||||||
dataKey: metadata.dataKey,
|
id: id as number,
|
||||||
name: metadata.name,
|
exists: true,
|
||||||
|
parentId: directory.parent,
|
||||||
|
...metadata,
|
||||||
};
|
};
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
Promise.all(
|
),
|
||||||
filesRaw.map(async (file) => {
|
),
|
||||||
const metadata = await decryptFileMetadata(
|
HybridPromise.all(
|
||||||
{
|
filesRaw.map((file) =>
|
||||||
dek: file.dek,
|
HybridPromise.resolve(
|
||||||
dekVersion: file.dekVersion,
|
getFileInfo(file.id, masterKey, {
|
||||||
name: file.name,
|
async fetchFromServer(id, cachedInfo, masterKey) {
|
||||||
nameIv: file.nameIv,
|
const metadata = await decryptFileMetadata(file, masterKey);
|
||||||
createdAt: file.createdAt,
|
|
||||||
createdAtIv: file.createdAtIv,
|
|
||||||
lastModifiedAt: file.lastModifiedAt,
|
|
||||||
lastModifiedAtIv: file.lastModifiedAtIv,
|
|
||||||
},
|
|
||||||
masterKey,
|
|
||||||
);
|
|
||||||
return {
|
return {
|
||||||
type: "file" as const,
|
categories: [],
|
||||||
id: file.id,
|
...cachedInfo,
|
||||||
|
id: id as number,
|
||||||
|
exists: true,
|
||||||
parentId: file.parent,
|
parentId: file.parent,
|
||||||
dataKey: metadata.dataKey,
|
|
||||||
contentType: file.contentType,
|
contentType: file.contentType,
|
||||||
name: metadata.name,
|
...metadata,
|
||||||
createdAt: metadata.createdAt,
|
|
||||||
lastModifiedAt: metadata.lastModifiedAt,
|
|
||||||
};
|
};
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
]);
|
]);
|
||||||
|
return { directories, files } as SearchResult;
|
||||||
return { directories, files } satisfies SearchResult;
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -3,11 +3,16 @@
|
|||||||
import { goto } from "$app/navigation";
|
import { goto } from "$app/navigation";
|
||||||
import { BottomDiv, Button, FullscreenDiv } from "$lib/components/atoms";
|
import { BottomDiv, Button, FullscreenDiv } from "$lib/components/atoms";
|
||||||
import { TopBar } from "$lib/components/molecules";
|
import { TopBar } from "$lib/components/molecules";
|
||||||
import { bulkGetFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem";
|
import type { MaybeFileInfo } from "$lib/modules/filesystem";
|
||||||
import { masterKeyStore } from "$lib/stores";
|
import { masterKeyStore } from "$lib/stores";
|
||||||
import { sortEntries } from "$lib/utils";
|
import { sortEntries } from "$lib/utils";
|
||||||
import File from "./File.svelte";
|
import File from "./File.svelte";
|
||||||
import { getMigrationState, clearMigrationStates, requestFileMigration } from "./service.svelte";
|
import {
|
||||||
|
getMigrationState,
|
||||||
|
clearMigrationStates,
|
||||||
|
requestLegacyFiles,
|
||||||
|
requestFileMigration,
|
||||||
|
} from "./service.svelte";
|
||||||
|
|
||||||
let { data } = $props();
|
let { data } = $props();
|
||||||
|
|
||||||
@@ -30,9 +35,7 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
fileInfos = sortEntries(
|
fileInfos = sortEntries(await requestLegacyFiles(data.files, $masterKeyStore?.get(1)?.key!));
|
||||||
Array.from((await bulkGetFileInfo(data.files, $masterKeyStore?.get(1)?.key!)).values()),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
$effect(() => clearMigrationStates);
|
$effect(() => clearMigrationStates);
|
||||||
|
|||||||
@@ -1,11 +1,17 @@
|
|||||||
import { limitFunction } from "p-limit";
|
import { limitFunction } from "p-limit";
|
||||||
import { SvelteMap } from "svelte/reactivity";
|
import { SvelteMap } from "svelte/reactivity";
|
||||||
import { CHUNK_SIZE } from "$lib/constants";
|
import { CHUNK_SIZE } from "$lib/constants";
|
||||||
import type { FileInfo } from "$lib/modules/filesystem";
|
import {
|
||||||
|
decryptFileMetadata,
|
||||||
|
getFileInfo,
|
||||||
|
type FileInfo,
|
||||||
|
type MaybeFileInfo,
|
||||||
|
} from "$lib/modules/filesystem";
|
||||||
import { uploadBlob } from "$lib/modules/upload";
|
import { uploadBlob } from "$lib/modules/upload";
|
||||||
import { requestFileDownload } from "$lib/services/file";
|
import { requestFileDownload } from "$lib/services/file";
|
||||||
import { Scheduler } from "$lib/utils";
|
import { HybridPromise, Scheduler } from "$lib/utils";
|
||||||
import { trpc } from "$trpc/client";
|
import { trpc } from "$trpc/client";
|
||||||
|
import type { RouterOutputs } from "$trpc/router.server";
|
||||||
|
|
||||||
export type MigrationStatus =
|
export type MigrationStatus =
|
||||||
| "queued"
|
| "queued"
|
||||||
@@ -24,6 +30,35 @@ export interface MigrationState {
|
|||||||
const scheduler = new Scheduler();
|
const scheduler = new Scheduler();
|
||||||
const states = new SvelteMap<number, MigrationState>();
|
const states = new SvelteMap<number, MigrationState>();
|
||||||
|
|
||||||
|
export const requestLegacyFiles = async (
|
||||||
|
filesRaw: RouterOutputs["file"]["listLegacy"],
|
||||||
|
masterKey: CryptoKey,
|
||||||
|
) => {
|
||||||
|
const files = await HybridPromise.all(
|
||||||
|
filesRaw.map((file) =>
|
||||||
|
HybridPromise.resolve(
|
||||||
|
getFileInfo(file.id, masterKey, {
|
||||||
|
async fetchFromServer(id, cachedInfo, masterKey) {
|
||||||
|
const metadata = await decryptFileMetadata(file, masterKey);
|
||||||
|
return {
|
||||||
|
categories: [],
|
||||||
|
...cachedInfo,
|
||||||
|
id: id as number,
|
||||||
|
exists: true,
|
||||||
|
isLegacy: file.isLegacy,
|
||||||
|
parentId: file.parent,
|
||||||
|
contentType: file.contentType,
|
||||||
|
...metadata,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
return files as MaybeFileInfo[];
|
||||||
|
};
|
||||||
|
|
||||||
const createState = (status: MigrationStatus): MigrationState => {
|
const createState = (status: MigrationStatus): MigrationState => {
|
||||||
const state = $state({ status });
|
const state = $state({ status });
|
||||||
return state;
|
return state;
|
||||||
|
|||||||
@@ -4,13 +4,14 @@
|
|||||||
import { BottomDiv, Button, FullscreenDiv } from "$lib/components/atoms";
|
import { BottomDiv, Button, FullscreenDiv } from "$lib/components/atoms";
|
||||||
import { IconEntryButton, TopBar } from "$lib/components/molecules";
|
import { IconEntryButton, TopBar } from "$lib/components/molecules";
|
||||||
import { deleteAllFileThumbnailCaches } from "$lib/modules/file";
|
import { deleteAllFileThumbnailCaches } from "$lib/modules/file";
|
||||||
import { bulkGetFileInfo, type MaybeFileInfo } from "$lib/modules/filesystem";
|
import type { MaybeFileInfo } from "$lib/modules/filesystem";
|
||||||
import { masterKeyStore } from "$lib/stores";
|
import { masterKeyStore } from "$lib/stores";
|
||||||
import { sortEntries } from "$lib/utils";
|
import { sortEntries } from "$lib/utils";
|
||||||
import File from "./File.svelte";
|
import File from "./File.svelte";
|
||||||
import {
|
import {
|
||||||
getThumbnailGenerationStatus,
|
getThumbnailGenerationStatus,
|
||||||
clearThumbnailGenerationStatuses,
|
clearThumbnailGenerationStatuses,
|
||||||
|
requestMissingThumbnailFiles,
|
||||||
requestThumbnailGeneration,
|
requestThumbnailGeneration,
|
||||||
type GenerationStatus,
|
type GenerationStatus,
|
||||||
} from "./service";
|
} from "./service";
|
||||||
@@ -42,7 +43,7 @@
|
|||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
fileInfos = sortEntries(
|
fileInfos = sortEntries(
|
||||||
Array.from((await bulkGetFileInfo(data.files, $masterKeyStore?.get(1)?.key!)).values()),
|
await requestMissingThumbnailFiles(data.files, $masterKeyStore?.get(1)?.key!),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,16 @@
|
|||||||
import { limitFunction } from "p-limit";
|
import { limitFunction } from "p-limit";
|
||||||
import { SvelteMap } from "svelte/reactivity";
|
import { SvelteMap } from "svelte/reactivity";
|
||||||
import { storeFileThumbnailCache } from "$lib/modules/file";
|
import { storeFileThumbnailCache } from "$lib/modules/file";
|
||||||
import type { FileInfo } from "$lib/modules/filesystem";
|
import {
|
||||||
|
decryptFileMetadata,
|
||||||
|
getFileInfo,
|
||||||
|
type FileInfo,
|
||||||
|
type MaybeFileInfo,
|
||||||
|
} from "$lib/modules/filesystem";
|
||||||
import { generateThumbnail } from "$lib/modules/thumbnail";
|
import { generateThumbnail } from "$lib/modules/thumbnail";
|
||||||
import { requestFileDownload, requestFileThumbnailUpload } from "$lib/services/file";
|
import { requestFileDownload, requestFileThumbnailUpload } from "$lib/services/file";
|
||||||
import { Scheduler } from "$lib/utils";
|
import { HybridPromise, Scheduler } from "$lib/utils";
|
||||||
|
import type { RouterOutputs } from "$trpc/router.server";
|
||||||
|
|
||||||
export type GenerationStatus =
|
export type GenerationStatus =
|
||||||
| "queued"
|
| "queued"
|
||||||
@@ -29,6 +35,35 @@ export const clearThumbnailGenerationStatuses = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const requestMissingThumbnailFiles = async (
|
||||||
|
filesRaw: RouterOutputs["file"]["listWithoutThumbnail"],
|
||||||
|
masterKey: CryptoKey,
|
||||||
|
) => {
|
||||||
|
const files = await HybridPromise.all(
|
||||||
|
filesRaw.map((file) =>
|
||||||
|
HybridPromise.resolve(
|
||||||
|
getFileInfo(file.id, masterKey, {
|
||||||
|
async fetchFromServer(id, cachedInfo, masterKey) {
|
||||||
|
const metadata = await decryptFileMetadata(file, masterKey);
|
||||||
|
return {
|
||||||
|
categories: [],
|
||||||
|
...cachedInfo,
|
||||||
|
id: id as number,
|
||||||
|
exists: true,
|
||||||
|
isLegacy: file.isLegacy,
|
||||||
|
parentId: file.parent,
|
||||||
|
contentType: file.contentType,
|
||||||
|
...metadata,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
return files as MaybeFileInfo[];
|
||||||
|
};
|
||||||
|
|
||||||
const requestThumbnailUpload = limitFunction(
|
const requestThumbnailUpload = limitFunction(
|
||||||
async (fileInfo: FileInfo, fileBuffer: ArrayBuffer) => {
|
async (fileInfo: FileInfo, fileBuffer: ArrayBuffer) => {
|
||||||
statuses.set(fileInfo.id, "generating");
|
statuses.set(fileInfo.id, "generating");
|
||||||
|
|||||||
@@ -97,11 +97,41 @@ const fileRouter = router({
|
|||||||
}),
|
}),
|
||||||
|
|
||||||
listWithoutThumbnail: roleProcedure["activeClient"].query(async ({ ctx }) => {
|
listWithoutThumbnail: roleProcedure["activeClient"].query(async ({ ctx }) => {
|
||||||
return await MediaRepo.getMissingFileThumbnails(ctx.session.userId);
|
const files = await FileRepo.getFilesWithoutThumbnail(ctx.session.userId);
|
||||||
|
return files.map((file) => ({
|
||||||
|
id: file.id,
|
||||||
|
isLegacy: !!file.encContentIv,
|
||||||
|
parent: file.parentId,
|
||||||
|
mekVersion: file.mekVersion,
|
||||||
|
dek: file.encDek,
|
||||||
|
dekVersion: file.dekVersion,
|
||||||
|
contentType: file.contentType,
|
||||||
|
name: file.encName.ciphertext,
|
||||||
|
nameIv: file.encName.iv,
|
||||||
|
createdAt: file.encCreatedAt?.ciphertext,
|
||||||
|
createdAtIv: file.encCreatedAt?.iv,
|
||||||
|
lastModifiedAt: file.encLastModifiedAt.ciphertext,
|
||||||
|
lastModifiedAtIv: file.encLastModifiedAt.iv,
|
||||||
|
}));
|
||||||
}),
|
}),
|
||||||
|
|
||||||
listLegacy: roleProcedure["activeClient"].query(async ({ ctx }) => {
|
listLegacy: roleProcedure["activeClient"].query(async ({ ctx }) => {
|
||||||
return await FileRepo.getLegacyFileIds(ctx.session.userId);
|
const files = await FileRepo.getLegacyFiles(ctx.session.userId);
|
||||||
|
return files.map((file) => ({
|
||||||
|
id: file.id,
|
||||||
|
isLegacy: true,
|
||||||
|
parent: file.parentId,
|
||||||
|
mekVersion: file.mekVersion,
|
||||||
|
dek: file.encDek,
|
||||||
|
dekVersion: file.dekVersion,
|
||||||
|
contentType: file.contentType,
|
||||||
|
name: file.encName.ciphertext,
|
||||||
|
nameIv: file.encName.iv,
|
||||||
|
createdAt: file.encCreatedAt?.ciphertext,
|
||||||
|
createdAtIv: file.encCreatedAt?.iv,
|
||||||
|
lastModifiedAt: file.encLastModifiedAt.ciphertext,
|
||||||
|
lastModifiedAtIv: file.encLastModifiedAt.iv,
|
||||||
|
}));
|
||||||
}),
|
}),
|
||||||
|
|
||||||
rename: roleProcedure["activeClient"]
|
rename: roleProcedure["activeClient"]
|
||||||
|
|||||||
Reference in New Issue
Block a user