mirror of
https://github.com/kmc7468/arkvault.git
synced 2026-02-04 16:16:55 +00:00
디렉터리 페이지에서의 네트워크 호출 최적화
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
<script lang="ts">
|
||||
import { createWindowVirtualizer } from "@tanstack/svelte-virtual";
|
||||
import { untrack, type Snippet } from "svelte";
|
||||
import type { Snippet } from "svelte";
|
||||
import type { ClassValue } from "svelte/elements";
|
||||
|
||||
interface Props {
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
const virtualizer = $derived(
|
||||
createWindowVirtualizer({
|
||||
count: untrack(() => count),
|
||||
count,
|
||||
estimateSize: itemHeight,
|
||||
}),
|
||||
);
|
||||
@@ -23,8 +23,6 @@
|
||||
const measureItem = (node: HTMLElement) => {
|
||||
$effect(() => $virtualizer.measureElement(node));
|
||||
};
|
||||
|
||||
$effect(() => $virtualizer.setOptions({ count }));
|
||||
</script>
|
||||
|
||||
<div class={["relative", className]}>
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { Dexie, type EntityTable } from "dexie";
|
||||
|
||||
export type DirectoryId = "root" | number;
|
||||
|
||||
interface DirectoryInfo {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
@@ -18,8 +16,6 @@ interface FileInfo {
|
||||
categoryIds: number[];
|
||||
}
|
||||
|
||||
export type CategoryId = "root" | number;
|
||||
|
||||
interface CategoryInfo {
|
||||
id: number;
|
||||
parentId: CategoryId;
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
export * from "./cache";
|
||||
export * from "./download";
|
||||
export * from "./upload";
|
||||
export * from "./upload.svelte";
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import axios from "axios";
|
||||
import ExifReader from "exifreader";
|
||||
import { limitFunction } from "p-limit";
|
||||
import { writable, type Writable } from "svelte/store";
|
||||
import {
|
||||
encodeToBase64,
|
||||
generateDataKey,
|
||||
@@ -17,14 +16,45 @@ import type {
|
||||
FileUploadRequest,
|
||||
FileUploadResponse,
|
||||
} from "$lib/server/schemas";
|
||||
import {
|
||||
fileUploadStatusStore,
|
||||
type MasterKey,
|
||||
type HmacSecret,
|
||||
type FileUploadStatus,
|
||||
} from "$lib/stores";
|
||||
import type { MasterKey, HmacSecret } from "$lib/stores";
|
||||
import { trpc } from "$trpc/client";
|
||||
|
||||
export interface FileUploadState {
|
||||
name: string;
|
||||
parentId: DirectoryId;
|
||||
status:
|
||||
| "encryption-pending"
|
||||
| "encrypting"
|
||||
| "upload-pending"
|
||||
| "uploading"
|
||||
| "uploaded"
|
||||
| "canceled"
|
||||
| "error";
|
||||
progress?: number;
|
||||
rate?: number;
|
||||
estimated?: number;
|
||||
}
|
||||
|
||||
export type LiveFileUploadState = FileUploadState & {
|
||||
status: "encryption-pending" | "encrypting" | "upload-pending" | "uploading";
|
||||
};
|
||||
|
||||
let uploadingFiles: FileUploadState[] = $state([]);
|
||||
|
||||
const isFileUploading = (status: FileUploadState["status"]) =>
|
||||
["encryption-pending", "encrypting", "upload-pending", "uploading"].includes(status);
|
||||
|
||||
export const getUploadingFiles = (parentId?: DirectoryId) => {
|
||||
return uploadingFiles.filter(
|
||||
(file): file is LiveFileUploadState =>
|
||||
(parentId === undefined || file.parentId === parentId) && isFileUploading(file.status),
|
||||
);
|
||||
};
|
||||
|
||||
export const clearUploadedFiles = () => {
|
||||
uploadingFiles = uploadingFiles.filter((file) => isFileUploading(file.status));
|
||||
};
|
||||
|
||||
const requestDuplicateFileScan = limitFunction(
|
||||
async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise<boolean>) => {
|
||||
const fileBuffer = await file.arrayBuffer();
|
||||
@@ -76,16 +106,8 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => {
|
||||
};
|
||||
|
||||
const encryptFile = limitFunction(
|
||||
async (
|
||||
status: Writable<FileUploadStatus>,
|
||||
file: File,
|
||||
fileBuffer: ArrayBuffer,
|
||||
masterKey: MasterKey,
|
||||
) => {
|
||||
status.update((value) => {
|
||||
value.status = "encrypting";
|
||||
return value;
|
||||
});
|
||||
async (state: FileUploadState, file: File, fileBuffer: ArrayBuffer, masterKey: MasterKey) => {
|
||||
state.status = "encrypting";
|
||||
|
||||
const fileType = getFileType(file);
|
||||
|
||||
@@ -109,10 +131,7 @@ const encryptFile = limitFunction(
|
||||
const thumbnailBuffer = await thumbnail?.arrayBuffer();
|
||||
const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey));
|
||||
|
||||
status.update((value) => {
|
||||
value.status = "upload-pending";
|
||||
return value;
|
||||
});
|
||||
state.status = "upload-pending";
|
||||
|
||||
return {
|
||||
dataKeyWrapped,
|
||||
@@ -130,20 +149,14 @@ const encryptFile = limitFunction(
|
||||
);
|
||||
|
||||
const requestFileUpload = limitFunction(
|
||||
async (status: Writable<FileUploadStatus>, form: FormData, thumbnailForm: FormData | null) => {
|
||||
status.update((value) => {
|
||||
value.status = "uploading";
|
||||
return value;
|
||||
});
|
||||
async (state: FileUploadState, form: FormData, thumbnailForm: FormData | null) => {
|
||||
state.status = "uploading";
|
||||
|
||||
const res = await axios.post("/api/file/upload", form, {
|
||||
onUploadProgress: ({ progress, rate, estimated }) => {
|
||||
status.update((value) => {
|
||||
value.progress = progress;
|
||||
value.rate = rate;
|
||||
value.estimated = estimated;
|
||||
return value;
|
||||
});
|
||||
state.progress = progress;
|
||||
state.rate = rate;
|
||||
state.estimated = estimated;
|
||||
},
|
||||
});
|
||||
const { file }: FileUploadResponse = res.data;
|
||||
@@ -157,10 +170,7 @@ const requestFileUpload = limitFunction(
|
||||
}
|
||||
}
|
||||
|
||||
status.update((value) => {
|
||||
value.status = "uploaded";
|
||||
return value;
|
||||
});
|
||||
state.status = "uploaded";
|
||||
|
||||
return { fileId: file };
|
||||
},
|
||||
@@ -176,15 +186,12 @@ export const uploadFile = async (
|
||||
): Promise<
|
||||
{ fileId: number; fileBuffer: ArrayBuffer; thumbnailBuffer?: ArrayBuffer } | undefined
|
||||
> => {
|
||||
const status = writable<FileUploadStatus>({
|
||||
uploadingFiles.push({
|
||||
name: file.name,
|
||||
parentId,
|
||||
status: "encryption-pending",
|
||||
});
|
||||
fileUploadStatusStore.update((value) => {
|
||||
value.push(status);
|
||||
return value;
|
||||
});
|
||||
const state = uploadingFiles.at(-1)!;
|
||||
|
||||
try {
|
||||
const { fileBuffer, fileSigned } = await requestDuplicateFileScan(
|
||||
@@ -193,14 +200,8 @@ export const uploadFile = async (
|
||||
onDuplicate,
|
||||
);
|
||||
if (!fileBuffer || !fileSigned) {
|
||||
status.update((value) => {
|
||||
value.status = "canceled";
|
||||
return value;
|
||||
});
|
||||
fileUploadStatusStore.update((value) => {
|
||||
value = value.filter((v) => v !== status);
|
||||
return value;
|
||||
});
|
||||
state.status = "canceled";
|
||||
uploadingFiles = uploadingFiles.filter((file) => file !== state);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -214,7 +215,7 @@ export const uploadFile = async (
|
||||
createdAtEncrypted,
|
||||
lastModifiedAtEncrypted,
|
||||
thumbnail,
|
||||
} = await encryptFile(status, file, fileBuffer, masterKey);
|
||||
} = await encryptFile(state, file, fileBuffer, masterKey);
|
||||
|
||||
const form = new FormData();
|
||||
form.set(
|
||||
@@ -252,13 +253,10 @@ export const uploadFile = async (
|
||||
thumbnailForm.set("content", new Blob([thumbnail.ciphertext]));
|
||||
}
|
||||
|
||||
const { fileId } = await requestFileUpload(status, form, thumbnailForm);
|
||||
const { fileId } = await requestFileUpload(state, form, thumbnailForm);
|
||||
return { fileId, fileBuffer, thumbnailBuffer: thumbnail?.plaintext };
|
||||
} catch (e) {
|
||||
status.update((value) => {
|
||||
value.status = "error";
|
||||
return value;
|
||||
});
|
||||
state.status = "error";
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
@@ -1,10 +1,5 @@
|
||||
import { get, writable, type Writable } from "svelte/store";
|
||||
import {
|
||||
getDirectoryInfos as getDirectoryInfosFromIndexedDB,
|
||||
getDirectoryInfo as getDirectoryInfoFromIndexedDB,
|
||||
storeDirectoryInfo,
|
||||
deleteDirectoryInfo,
|
||||
getFileInfos as getFileInfosFromIndexedDB,
|
||||
getFileInfo as getFileInfoFromIndexedDB,
|
||||
storeFileInfo,
|
||||
deleteFileInfo,
|
||||
@@ -13,32 +8,10 @@ import {
|
||||
storeCategoryInfo,
|
||||
updateCategoryInfo as updateCategoryInfoInIndexedDB,
|
||||
deleteCategoryInfo,
|
||||
type DirectoryId,
|
||||
type CategoryId,
|
||||
} from "$lib/indexedDB";
|
||||
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
|
||||
import { trpc, isTRPCClientError } from "$trpc/client";
|
||||
|
||||
export type DirectoryInfo =
|
||||
| {
|
||||
id: "root";
|
||||
parentId?: undefined;
|
||||
dataKey?: undefined;
|
||||
dataKeyVersion?: undefined;
|
||||
name?: undefined;
|
||||
subDirectoryIds: number[];
|
||||
fileIds: number[];
|
||||
}
|
||||
| {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
dataKey?: CryptoKey;
|
||||
dataKeyVersion?: Date;
|
||||
name: string;
|
||||
subDirectoryIds: number[];
|
||||
fileIds: number[];
|
||||
};
|
||||
|
||||
export interface FileInfo {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
@@ -72,98 +45,9 @@ export type CategoryInfo =
|
||||
isFileRecursive: boolean;
|
||||
};
|
||||
|
||||
const directoryInfoStore = new Map<DirectoryId, Writable<DirectoryInfo | null>>();
|
||||
const fileInfoStore = new Map<number, Writable<FileInfo | null>>();
|
||||
const categoryInfoStore = new Map<CategoryId, Writable<CategoryInfo | null>>();
|
||||
|
||||
const fetchDirectoryInfoFromIndexedDB = async (
|
||||
id: DirectoryId,
|
||||
info: Writable<DirectoryInfo | null>,
|
||||
) => {
|
||||
if (get(info)) return;
|
||||
|
||||
const [directory, subDirectories, files] = await Promise.all([
|
||||
id !== "root" ? getDirectoryInfoFromIndexedDB(id) : undefined,
|
||||
getDirectoryInfosFromIndexedDB(id),
|
||||
getFileInfosFromIndexedDB(id),
|
||||
]);
|
||||
const subDirectoryIds = subDirectories.map(({ id }) => id);
|
||||
const fileIds = files.map(({ id }) => id);
|
||||
|
||||
if (id === "root") {
|
||||
info.set({ id, subDirectoryIds, fileIds });
|
||||
} else {
|
||||
if (!directory) return;
|
||||
info.set({
|
||||
id,
|
||||
parentId: directory.parentId,
|
||||
name: directory.name,
|
||||
subDirectoryIds,
|
||||
fileIds,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const fetchDirectoryInfoFromServer = async (
|
||||
id: DirectoryId,
|
||||
info: Writable<DirectoryInfo | null>,
|
||||
masterKey: CryptoKey,
|
||||
) => {
|
||||
let data;
|
||||
try {
|
||||
data = await trpc().directory.get.query({ id });
|
||||
} catch (e) {
|
||||
if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") {
|
||||
info.set(null);
|
||||
await deleteDirectoryInfo(id as number);
|
||||
return;
|
||||
}
|
||||
throw new Error("Failed to fetch directory information");
|
||||
}
|
||||
|
||||
const { metadata, subDirectories: subDirectoryIds, files: fileIds } = data;
|
||||
|
||||
if (id === "root") {
|
||||
info.set({ id, subDirectoryIds, fileIds });
|
||||
} else {
|
||||
const { dataKey } = await unwrapDataKey(metadata!.dek, masterKey);
|
||||
const name = await decryptString(metadata!.name, metadata!.nameIv, dataKey);
|
||||
|
||||
info.set({
|
||||
id,
|
||||
parentId: metadata!.parent,
|
||||
dataKey,
|
||||
dataKeyVersion: new Date(metadata!.dekVersion),
|
||||
name,
|
||||
subDirectoryIds,
|
||||
fileIds,
|
||||
});
|
||||
await storeDirectoryInfo({ id, parentId: metadata!.parent, name });
|
||||
}
|
||||
};
|
||||
|
||||
const fetchDirectoryInfo = async (
|
||||
id: DirectoryId,
|
||||
info: Writable<DirectoryInfo | null>,
|
||||
masterKey: CryptoKey,
|
||||
) => {
|
||||
await fetchDirectoryInfoFromIndexedDB(id, info);
|
||||
await fetchDirectoryInfoFromServer(id, info, masterKey);
|
||||
};
|
||||
|
||||
export const getDirectoryInfo = (id: DirectoryId, masterKey: CryptoKey) => {
|
||||
// TODO: MEK rotation
|
||||
|
||||
let info = directoryInfoStore.get(id);
|
||||
if (!info) {
|
||||
info = writable(null);
|
||||
directoryInfoStore.set(id, info);
|
||||
}
|
||||
|
||||
fetchDirectoryInfo(id, info, masterKey); // Intended
|
||||
return info;
|
||||
};
|
||||
|
||||
const fetchFileInfoFromIndexedDB = async (id: number, info: Writable<FileInfo | null>) => {
|
||||
if (get(info)) return;
|
||||
|
||||
|
||||
191
src/lib/modules/filesystem2.svelte.ts
Normal file
191
src/lib/modules/filesystem2.svelte.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import {
|
||||
getDirectoryInfos as getDirectoryInfosFromIndexedDB,
|
||||
getDirectoryInfo as getDirectoryInfoFromIndexedDB,
|
||||
storeDirectoryInfo,
|
||||
deleteDirectoryInfo,
|
||||
getFileInfos as getFileInfosFromIndexedDB,
|
||||
getFileInfo as getFileInfoFromIndexedDB,
|
||||
storeFileInfo,
|
||||
deleteFileInfo,
|
||||
getCategoryInfos as getCategoryInfosFromIndexedDB,
|
||||
getCategoryInfo as getCategoryInfoFromIndexedDB,
|
||||
storeCategoryInfo,
|
||||
updateCategoryInfo as updateCategoryInfoInIndexedDB,
|
||||
deleteCategoryInfo,
|
||||
} from "$lib/indexedDB";
|
||||
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
|
||||
import { monotonicResolve } from "$lib/utils";
|
||||
import { trpc, isTRPCClientError } from "$trpc/client";
|
||||
|
||||
type DataKey = { key: CryptoKey; version: Date };
|
||||
|
||||
interface LocalDirectoryInfo {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
dataKey?: DataKey;
|
||||
name: string;
|
||||
subDirectories: SubDirectoryInfo[];
|
||||
files: SummarizedFileInfo[];
|
||||
}
|
||||
|
||||
interface RootDirectoryInfo {
|
||||
id: "root";
|
||||
parentId?: undefined;
|
||||
dataKey?: undefined;
|
||||
dataKeyVersion?: undefined;
|
||||
name?: undefined;
|
||||
subDirectories: SubDirectoryInfo[];
|
||||
files: SummarizedFileInfo[];
|
||||
}
|
||||
|
||||
export type DirectoryInfo = LocalDirectoryInfo | RootDirectoryInfo;
|
||||
export type SubDirectoryInfo = Omit<LocalDirectoryInfo, "parentId" | "subDirectories" | "files">;
|
||||
|
||||
interface FileInfo {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
dataKey?: DataKey;
|
||||
contentType: string;
|
||||
contentIv: string | undefined;
|
||||
name: string;
|
||||
createdAt?: Date;
|
||||
lastModifiedAt: Date;
|
||||
categories: { id: number; name: string }[];
|
||||
}
|
||||
|
||||
export type SummarizedFileInfo = Omit<FileInfo, "parentId" | "contentIv" | "categories">;
|
||||
|
||||
interface LocalCategoryInfo {
|
||||
id: number;
|
||||
dataKey: DataKey | undefined;
|
||||
name: string;
|
||||
subCategories: Omit<LocalCategoryInfo, "subCategories" | "files" | "isFileRecursive">[];
|
||||
files: { id: number; name: string; isRecursive: boolean }[];
|
||||
isFileRecursive: boolean;
|
||||
}
|
||||
|
||||
interface RootCategoryInfo {
|
||||
id: "root";
|
||||
dataKey?: undefined;
|
||||
name?: undefined;
|
||||
subCategories: Omit<LocalCategoryInfo, "subCategories" | "files" | "isFileRecursive">[];
|
||||
files?: undefined;
|
||||
}
|
||||
|
||||
export type CategoryInfo = LocalCategoryInfo | RootCategoryInfo;
|
||||
|
||||
const directoryInfoCache = new Map<DirectoryId, DirectoryInfo | Promise<DirectoryInfo>>();
|
||||
|
||||
export const getDirectoryInfo = async (id: DirectoryId, masterKey: CryptoKey) => {
|
||||
const info = directoryInfoCache.get(id);
|
||||
if (info instanceof Promise) {
|
||||
return info;
|
||||
}
|
||||
|
||||
const { promise, resolve } = Promise.withResolvers<DirectoryInfo>();
|
||||
if (!info) {
|
||||
directoryInfoCache.set(id, promise);
|
||||
}
|
||||
|
||||
monotonicResolve(
|
||||
[!info && fetchDirectoryInfoFromIndexedDB(id), fetchDirectoryInfoFromServer(id, masterKey)],
|
||||
(directoryInfo) => {
|
||||
let info = directoryInfoCache.get(id);
|
||||
if (info instanceof Promise) {
|
||||
const state = $state(directoryInfo);
|
||||
directoryInfoCache.set(id, state);
|
||||
resolve(state);
|
||||
} else {
|
||||
Object.assign(info!, directoryInfo);
|
||||
resolve(info!);
|
||||
}
|
||||
},
|
||||
);
|
||||
return info ?? promise;
|
||||
};
|
||||
|
||||
const fetchDirectoryInfoFromIndexedDB = async (
|
||||
id: DirectoryId,
|
||||
): Promise<DirectoryInfo | undefined> => {
|
||||
const [directory, subDirectories, files] = await Promise.all([
|
||||
id !== "root" ? getDirectoryInfoFromIndexedDB(id) : undefined,
|
||||
getDirectoryInfosFromIndexedDB(id),
|
||||
getFileInfosFromIndexedDB(id),
|
||||
]);
|
||||
|
||||
if (id === "root") {
|
||||
return { id, subDirectories, files };
|
||||
} else if (directory) {
|
||||
return { id, parentId: directory.parentId, name: directory.name, subDirectories, files };
|
||||
}
|
||||
};
|
||||
|
||||
const fetchDirectoryInfoFromServer = async (
|
||||
id: DirectoryId,
|
||||
masterKey: CryptoKey,
|
||||
): Promise<DirectoryInfo | undefined> => {
|
||||
try {
|
||||
const {
|
||||
metadata,
|
||||
subDirectories: subDirectoriesRaw,
|
||||
files: filesRaw,
|
||||
} = await trpc().directory.get.query({ id });
|
||||
const [subDirectories, files] = await Promise.all([
|
||||
Promise.all(
|
||||
subDirectoriesRaw.map(async (directory) => {
|
||||
const { dataKey } = await unwrapDataKey(directory.dek, masterKey);
|
||||
const name = await decryptString(directory.name, directory.nameIv, dataKey);
|
||||
return {
|
||||
id: directory.id,
|
||||
dataKey: { key: dataKey, version: directory.dekVersion },
|
||||
name,
|
||||
};
|
||||
}),
|
||||
),
|
||||
Promise.all(
|
||||
filesRaw.map(async (file) => {
|
||||
const { dataKey } = await unwrapDataKey(file.dek, masterKey);
|
||||
const [name, createdAt, lastModifiedAt] = await Promise.all([
|
||||
decryptString(file.name, file.nameIv, dataKey),
|
||||
file.createdAt ? decryptDate(file.createdAt, file.createdAtIv!, dataKey) : undefined,
|
||||
decryptDate(file.lastModifiedAt, file.lastModifiedAtIv, dataKey),
|
||||
]);
|
||||
return {
|
||||
id: file.id,
|
||||
dataKey: { key: dataKey, version: file.dekVersion },
|
||||
contentType: file.contentType,
|
||||
name,
|
||||
createdAt,
|
||||
lastModifiedAt,
|
||||
};
|
||||
}),
|
||||
),
|
||||
]);
|
||||
|
||||
if (id === "root") {
|
||||
return { id, subDirectories, files };
|
||||
} else {
|
||||
const { dataKey } = await unwrapDataKey(metadata!.dek, masterKey);
|
||||
const name = await decryptString(metadata!.name, metadata!.nameIv, dataKey);
|
||||
return {
|
||||
id,
|
||||
parentId: metadata!.parent,
|
||||
dataKey: { key: dataKey, version: metadata!.dekVersion },
|
||||
name,
|
||||
subDirectories,
|
||||
files,
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
if (isTRPCClientError(e) && e.data?.code === "NOT_FOUND") {
|
||||
directoryInfoCache.delete(id);
|
||||
await deleteDirectoryInfo(id as number);
|
||||
return;
|
||||
}
|
||||
throw new Error("Failed to fetch directory information");
|
||||
}
|
||||
};
|
||||
|
||||
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
|
||||
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
|
||||
};
|
||||
@@ -2,8 +2,6 @@ import { IntegrityError } from "./error";
|
||||
import db from "./kysely";
|
||||
import type { Ciphertext } from "./schema";
|
||||
|
||||
export type CategoryId = "root" | number;
|
||||
|
||||
interface Category {
|
||||
id: number;
|
||||
parentId: CategoryId;
|
||||
|
||||
@@ -4,8 +4,6 @@ import { IntegrityError } from "./error";
|
||||
import db from "./kysely";
|
||||
import type { Ciphertext } from "./schema";
|
||||
|
||||
export type DirectoryId = "root" | number;
|
||||
|
||||
interface Directory {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
|
||||
@@ -1,21 +1,5 @@
|
||||
import { writable, type Writable } from "svelte/store";
|
||||
|
||||
export interface FileUploadStatus {
|
||||
name: string;
|
||||
parentId: "root" | number;
|
||||
status:
|
||||
| "encryption-pending"
|
||||
| "encrypting"
|
||||
| "upload-pending"
|
||||
| "uploading"
|
||||
| "uploaded"
|
||||
| "canceled"
|
||||
| "error";
|
||||
progress?: number;
|
||||
rate?: number;
|
||||
estimated?: number;
|
||||
}
|
||||
|
||||
export interface FileDownloadStatus {
|
||||
id: number;
|
||||
status:
|
||||
@@ -32,16 +16,8 @@ export interface FileDownloadStatus {
|
||||
result?: ArrayBuffer;
|
||||
}
|
||||
|
||||
export const fileUploadStatusStore = writable<Writable<FileUploadStatus>[]>([]);
|
||||
|
||||
export const fileDownloadStatusStore = writable<Writable<FileDownloadStatus>[]>([]);
|
||||
|
||||
export const isFileUploading = (
|
||||
status: FileUploadStatus["status"],
|
||||
): status is "encryption-pending" | "encrypting" | "upload-pending" | "uploading" => {
|
||||
return ["encryption-pending", "encrypting", "upload-pending", "uploading"].includes(status);
|
||||
};
|
||||
|
||||
export const isFileDownloading = (
|
||||
status: FileDownloadStatus["status"],
|
||||
): status is "download-pending" | "downloading" | "decryption-pending" | "decrypting" => {
|
||||
|
||||
2
src/lib/types/filesystem.d.ts
vendored
Normal file
2
src/lib/types/filesystem.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
type DirectoryId = "root" | number;
|
||||
type CategoryId = "root" | number;
|
||||
@@ -1,3 +1,4 @@
|
||||
export * from "./format";
|
||||
export * from "./gotoStateful";
|
||||
export * from "./promise";
|
||||
export * from "./sort";
|
||||
|
||||
16
src/lib/utils/promise.ts
Normal file
16
src/lib/utils/promise.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
export const monotonicResolve = <T>(
|
||||
promises: (Promise<T | undefined> | false)[],
|
||||
callback: (value: T) => void,
|
||||
) => {
|
||||
let latestResolvedIndex = -1;
|
||||
|
||||
promises.forEach((promise, index) => {
|
||||
if (!promise) return;
|
||||
promise.then((value) => {
|
||||
if (value !== undefined && index > latestResolvedIndex) {
|
||||
latestResolvedIndex = index;
|
||||
callback(value);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
@@ -32,7 +32,7 @@ const sortByDateAsc: SortFunc = ({ date: a }, { date: b }) => {
|
||||
|
||||
const sortByDateDesc: SortFunc = (a, b) => -sortByDateAsc(a, b);
|
||||
|
||||
export const sortEntries = <T extends SortEntry>(entries: T[], sortBy: SortBy) => {
|
||||
export const sortEntries = <T extends SortEntry>(entries: T[], sortBy = SortBy.NAME_ASC) => {
|
||||
let sortFunc: SortFunc;
|
||||
|
||||
switch (sortBy) {
|
||||
@@ -54,4 +54,5 @@ export const sortEntries = <T extends SortEntry>(entries: T[], sortBy: SortBy) =
|
||||
}
|
||||
|
||||
entries.sort(sortFunc);
|
||||
return entries;
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user