mirror of
https://github.com/kmc7468/arkvault.git
synced 2025-12-16 15:08:46 +00:00
Merge branch 'dev' into add-file-category
This commit is contained in:
28
src/lib/indexedDB/cacheIndex.ts
Normal file
28
src/lib/indexedDB/cacheIndex.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { Dexie, type EntityTable } from "dexie";
|
||||
|
||||
export interface FileCacheIndex {
|
||||
fileId: number;
|
||||
cachedAt: Date;
|
||||
lastRetrievedAt: Date;
|
||||
size: number;
|
||||
}
|
||||
|
||||
const cacheIndex = new Dexie("cacheIndex") as Dexie & {
|
||||
fileCache: EntityTable<FileCacheIndex, "fileId">;
|
||||
};
|
||||
|
||||
cacheIndex.version(1).stores({
|
||||
fileCache: "fileId",
|
||||
});
|
||||
|
||||
export const getFileCacheIndex = async () => {
|
||||
return await cacheIndex.fileCache.toArray();
|
||||
};
|
||||
|
||||
export const storeFileCacheIndex = async (fileCacheIndex: FileCacheIndex) => {
|
||||
await cacheIndex.fileCache.put(fileCacheIndex);
|
||||
};
|
||||
|
||||
export const deleteFileCacheIndex = async (fileId: number) => {
|
||||
await cacheIndex.fileCache.delete(fileId);
|
||||
};
|
||||
86
src/lib/indexedDB/filesystem.ts
Normal file
86
src/lib/indexedDB/filesystem.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { Dexie, type EntityTable } from "dexie";
|
||||
|
||||
export type DirectoryId = "root" | number;
|
||||
|
||||
interface DirectoryInfo {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface FileInfo {
|
||||
id: number;
|
||||
parentId: DirectoryId;
|
||||
name: string;
|
||||
contentType: string;
|
||||
createdAt?: Date;
|
||||
lastModifiedAt: Date;
|
||||
}
|
||||
|
||||
const filesystem = new Dexie("filesystem") as Dexie & {
|
||||
directory: EntityTable<DirectoryInfo, "id">;
|
||||
file: EntityTable<FileInfo, "id">;
|
||||
};
|
||||
|
||||
filesystem.version(1).stores({
|
||||
directory: "id, parentId",
|
||||
file: "id, parentId",
|
||||
});
|
||||
|
||||
export const getDirectoryInfos = async (parentId: DirectoryId) => {
|
||||
return await filesystem.directory.where({ parentId }).toArray();
|
||||
};
|
||||
|
||||
export const getDirectoryInfo = async (id: number) => {
|
||||
return await filesystem.directory.get(id);
|
||||
};
|
||||
|
||||
export const storeDirectoryInfo = async (directoryInfo: DirectoryInfo) => {
|
||||
await filesystem.directory.put(directoryInfo);
|
||||
};
|
||||
|
||||
export const deleteDirectoryInfo = async (id: number) => {
|
||||
await filesystem.directory.delete(id);
|
||||
};
|
||||
|
||||
export const getFileInfos = async (parentId: DirectoryId) => {
|
||||
return await filesystem.file.where({ parentId }).toArray();
|
||||
};
|
||||
|
||||
export const getFileInfo = async (id: number) => {
|
||||
return await filesystem.file.get(id);
|
||||
};
|
||||
|
||||
export const storeFileInfo = async (fileInfo: FileInfo) => {
|
||||
await filesystem.file.put(fileInfo);
|
||||
};
|
||||
|
||||
export const deleteFileInfo = async (id: number) => {
|
||||
await filesystem.file.delete(id);
|
||||
};
|
||||
|
||||
export const cleanupDanglingInfos = async () => {
|
||||
const validDirectoryIds: number[] = [];
|
||||
const validFileIds: number[] = [];
|
||||
const queue: DirectoryId[] = ["root"];
|
||||
|
||||
while (true) {
|
||||
const directoryId = queue.shift();
|
||||
if (!directoryId) break;
|
||||
|
||||
const [subDirectories, files] = await Promise.all([
|
||||
filesystem.directory.where({ parentId: directoryId }).toArray(),
|
||||
filesystem.file.where({ parentId: directoryId }).toArray(),
|
||||
]);
|
||||
subDirectories.forEach(({ id }) => {
|
||||
validDirectoryIds.push(id);
|
||||
queue.push(id);
|
||||
});
|
||||
files.forEach(({ id }) => validFileIds.push(id));
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
filesystem.directory.where("id").noneOf(validDirectoryIds).delete(),
|
||||
filesystem.file.where("id").noneOf(validFileIds).delete(),
|
||||
]);
|
||||
};
|
||||
3
src/lib/indexedDB/index.ts
Normal file
3
src/lib/indexedDB/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./cacheIndex";
|
||||
export * from "./filesystem";
|
||||
export * from "./keyStore";
|
||||
@@ -1,98 +0,0 @@
|
||||
import { writable, type Writable } from "svelte/store";
|
||||
import { callGetApi } from "$lib/hooks";
|
||||
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
|
||||
import type { DirectoryInfoResponse, FileInfoResponse } from "$lib/server/schemas";
|
||||
import {
|
||||
directoryInfoStore,
|
||||
fileInfoStore,
|
||||
type DirectoryInfo,
|
||||
type FileInfo,
|
||||
} from "$lib/stores/file";
|
||||
|
||||
const fetchDirectoryInfo = async (
|
||||
directoryId: "root" | number,
|
||||
masterKey: CryptoKey,
|
||||
infoStore: Writable<DirectoryInfo | null>,
|
||||
) => {
|
||||
const res = await callGetApi(`/api/directory/${directoryId}`);
|
||||
if (!res.ok) throw new Error("Failed to fetch directory information");
|
||||
const { metadata, subDirectories, files }: DirectoryInfoResponse = await res.json();
|
||||
|
||||
let newInfo: DirectoryInfo;
|
||||
if (directoryId === "root") {
|
||||
newInfo = {
|
||||
id: "root",
|
||||
subDirectoryIds: subDirectories,
|
||||
fileIds: files,
|
||||
};
|
||||
} else {
|
||||
const { dataKey } = await unwrapDataKey(metadata!.dek, masterKey);
|
||||
newInfo = {
|
||||
id: directoryId,
|
||||
dataKey,
|
||||
dataKeyVersion: new Date(metadata!.dekVersion),
|
||||
name: await decryptString(metadata!.name, metadata!.nameIv, dataKey),
|
||||
subDirectoryIds: subDirectories,
|
||||
fileIds: files,
|
||||
};
|
||||
}
|
||||
|
||||
infoStore.update(() => newInfo);
|
||||
};
|
||||
|
||||
export const getDirectoryInfo = (directoryId: "root" | number, masterKey: CryptoKey) => {
|
||||
// TODO: MEK rotation
|
||||
|
||||
let info = directoryInfoStore.get(directoryId);
|
||||
if (!info) {
|
||||
info = writable(null);
|
||||
directoryInfoStore.set(directoryId, info);
|
||||
}
|
||||
|
||||
fetchDirectoryInfo(directoryId, masterKey, info);
|
||||
return info;
|
||||
};
|
||||
|
||||
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
|
||||
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
|
||||
};
|
||||
|
||||
const fetchFileInfo = async (
|
||||
fileId: number,
|
||||
masterKey: CryptoKey,
|
||||
infoStore: Writable<FileInfo | null>,
|
||||
) => {
|
||||
const res = await callGetApi(`/api/file/${fileId}`);
|
||||
if (!res.ok) throw new Error("Failed to fetch file information");
|
||||
const metadata: FileInfoResponse = await res.json();
|
||||
|
||||
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
|
||||
const newInfo: FileInfo = {
|
||||
id: fileId,
|
||||
dataKey,
|
||||
dataKeyVersion: new Date(metadata.dekVersion),
|
||||
contentType: metadata.contentType,
|
||||
contentIv: metadata.contentIv,
|
||||
name: await decryptString(metadata.name, metadata.nameIv, dataKey),
|
||||
createdAt:
|
||||
metadata.createdAt && metadata.createdAtIv
|
||||
? await decryptDate(metadata.createdAt, metadata.createdAtIv, dataKey)
|
||||
: undefined,
|
||||
lastModifiedAt: await decryptDate(metadata.lastModifiedAt, metadata.lastModifiedAtIv, dataKey),
|
||||
};
|
||||
|
||||
infoStore.update(() => newInfo);
|
||||
};
|
||||
|
||||
export const getFileInfo = (fileId: number, masterKey: CryptoKey) => {
|
||||
// TODO: MEK rotation
|
||||
|
||||
let info = fileInfoStore.get(fileId);
|
||||
if (!info) {
|
||||
info = writable(null);
|
||||
fileInfoStore.set(fileId, info);
|
||||
}
|
||||
|
||||
fetchFileInfo(fileId, masterKey, info);
|
||||
return info;
|
||||
};
|
||||
50
src/lib/modules/file/cache.ts
Normal file
50
src/lib/modules/file/cache.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import {
|
||||
getFileCacheIndex as getFileCacheIndexFromIndexedDB,
|
||||
storeFileCacheIndex,
|
||||
deleteFileCacheIndex,
|
||||
type FileCacheIndex,
|
||||
} from "$lib/indexedDB";
|
||||
import { readFile, writeFile, deleteFile } from "$lib/modules/opfs";
|
||||
|
||||
const fileCacheIndex = new Map<number, FileCacheIndex>();
|
||||
|
||||
export const prepareFileCache = async () => {
|
||||
for (const cache of await getFileCacheIndexFromIndexedDB()) {
|
||||
fileCacheIndex.set(cache.fileId, cache);
|
||||
}
|
||||
};
|
||||
|
||||
export const getFileCacheIndex = () => {
|
||||
return Array.from(fileCacheIndex.values());
|
||||
};
|
||||
|
||||
export const getFileCache = async (fileId: number) => {
|
||||
const cacheIndex = fileCacheIndex.get(fileId);
|
||||
if (!cacheIndex) return null;
|
||||
|
||||
cacheIndex.lastRetrievedAt = new Date();
|
||||
storeFileCacheIndex(cacheIndex); // Intended
|
||||
return await readFile(`/cache/${fileId}`);
|
||||
};
|
||||
|
||||
export const storeFileCache = async (fileId: number, fileBuffer: ArrayBuffer) => {
|
||||
const now = new Date();
|
||||
await writeFile(`/cache/${fileId}`, fileBuffer);
|
||||
|
||||
const cacheIndex: FileCacheIndex = {
|
||||
fileId,
|
||||
cachedAt: now,
|
||||
lastRetrievedAt: now,
|
||||
size: fileBuffer.byteLength,
|
||||
};
|
||||
fileCacheIndex.set(fileId, cacheIndex);
|
||||
await storeFileCacheIndex(cacheIndex);
|
||||
};
|
||||
|
||||
export const deleteFileCache = async (fileId: number) => {
|
||||
if (!fileCacheIndex.has(fileId)) return;
|
||||
|
||||
fileCacheIndex.delete(fileId);
|
||||
await deleteFile(`/cache/${fileId}`);
|
||||
await deleteFileCacheIndex(fileId);
|
||||
};
|
||||
84
src/lib/modules/file/download.ts
Normal file
84
src/lib/modules/file/download.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import axios from "axios";
|
||||
import { limitFunction } from "p-limit";
|
||||
import { writable, type Writable } from "svelte/store";
|
||||
import { decryptData } from "$lib/modules/crypto";
|
||||
import { fileDownloadStatusStore, type FileDownloadStatus } from "$lib/stores";
|
||||
|
||||
const requestFileDownload = limitFunction(
|
||||
async (status: Writable<FileDownloadStatus>, id: number) => {
|
||||
status.update((value) => {
|
||||
value.status = "downloading";
|
||||
return value;
|
||||
});
|
||||
|
||||
const res = await axios.get(`/api/file/${id}/download`, {
|
||||
responseType: "arraybuffer",
|
||||
onDownloadProgress: ({ progress, rate, estimated }) => {
|
||||
status.update((value) => {
|
||||
value.progress = progress;
|
||||
value.rate = rate;
|
||||
value.estimated = estimated;
|
||||
return value;
|
||||
});
|
||||
},
|
||||
});
|
||||
const fileEncrypted: ArrayBuffer = res.data;
|
||||
|
||||
status.update((value) => {
|
||||
value.status = "decryption-pending";
|
||||
return value;
|
||||
});
|
||||
return fileEncrypted;
|
||||
},
|
||||
{ concurrency: 1 },
|
||||
);
|
||||
|
||||
const decryptFile = limitFunction(
|
||||
async (
|
||||
status: Writable<FileDownloadStatus>,
|
||||
fileEncrypted: ArrayBuffer,
|
||||
fileEncryptedIv: string,
|
||||
dataKey: CryptoKey,
|
||||
) => {
|
||||
status.update((value) => {
|
||||
value.status = "decrypting";
|
||||
return value;
|
||||
});
|
||||
|
||||
const fileBuffer = await decryptData(fileEncrypted, fileEncryptedIv, dataKey);
|
||||
|
||||
status.update((value) => {
|
||||
value.status = "decrypted";
|
||||
value.result = fileBuffer;
|
||||
return value;
|
||||
});
|
||||
return fileBuffer;
|
||||
},
|
||||
{ concurrency: 4 },
|
||||
);
|
||||
|
||||
export const downloadFile = async (id: number, fileEncryptedIv: string, dataKey: CryptoKey) => {
|
||||
const status = writable<FileDownloadStatus>({
|
||||
id,
|
||||
status: "download-pending",
|
||||
});
|
||||
fileDownloadStatusStore.update((value) => {
|
||||
value.push(status);
|
||||
return value;
|
||||
});
|
||||
|
||||
try {
|
||||
return await decryptFile(
|
||||
status,
|
||||
await requestFileDownload(status, id),
|
||||
fileEncryptedIv,
|
||||
dataKey,
|
||||
);
|
||||
} catch (e) {
|
||||
status.update((value) => {
|
||||
value.status = "error";
|
||||
return value;
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
3
src/lib/modules/file/index.ts
Normal file
3
src/lib/modules/file/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./cache";
|
||||
export * from "./download";
|
||||
export * from "./upload";
|
||||
231
src/lib/modules/file/upload.ts
Normal file
231
src/lib/modules/file/upload.ts
Normal file
@@ -0,0 +1,231 @@
|
||||
import axios from "axios";
|
||||
import ExifReader from "exifreader";
|
||||
import { limitFunction } from "p-limit";
|
||||
import { writable, type Writable } from "svelte/store";
|
||||
import {
|
||||
encodeToBase64,
|
||||
generateDataKey,
|
||||
wrapDataKey,
|
||||
encryptData,
|
||||
encryptString,
|
||||
digestMessage,
|
||||
signMessageHmac,
|
||||
} from "$lib/modules/crypto";
|
||||
import type {
|
||||
DuplicateFileScanRequest,
|
||||
DuplicateFileScanResponse,
|
||||
FileUploadRequest,
|
||||
} from "$lib/server/schemas";
|
||||
import {
|
||||
fileUploadStatusStore,
|
||||
type MasterKey,
|
||||
type HmacSecret,
|
||||
type FileUploadStatus,
|
||||
} from "$lib/stores";
|
||||
|
||||
const requestDuplicateFileScan = limitFunction(
|
||||
async (file: File, hmacSecret: HmacSecret, onDuplicate: () => Promise<boolean>) => {
|
||||
const fileBuffer = await file.arrayBuffer();
|
||||
const fileSigned = encodeToBase64(await signMessageHmac(fileBuffer, hmacSecret.secret));
|
||||
|
||||
const res = await axios.post("/api/file/scanDuplicates", {
|
||||
hskVersion: hmacSecret.version,
|
||||
contentHmac: fileSigned,
|
||||
} satisfies DuplicateFileScanRequest);
|
||||
const { files }: DuplicateFileScanResponse = res.data;
|
||||
|
||||
if (files.length === 0 || (await onDuplicate())) {
|
||||
return { fileBuffer, fileSigned };
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
},
|
||||
{ concurrency: 1 },
|
||||
);
|
||||
|
||||
const getFileType = (file: File) => {
|
||||
if (file.type) return file.type;
|
||||
if (file.name.endsWith(".heic")) return "image/heic";
|
||||
throw new Error("Unknown file type");
|
||||
};
|
||||
|
||||
const extractExifDateTime = (fileBuffer: ArrayBuffer) => {
|
||||
const exif = ExifReader.load(fileBuffer);
|
||||
const dateTimeOriginal = exif["DateTimeOriginal"]?.description;
|
||||
const offsetTimeOriginal = exif["OffsetTimeOriginal"]?.description;
|
||||
if (!dateTimeOriginal) return undefined;
|
||||
|
||||
const [date, time] = dateTimeOriginal.split(" ");
|
||||
if (!date || !time) return undefined;
|
||||
|
||||
const [year, month, day] = date.split(":").map(Number);
|
||||
const [hour, minute, second] = time.split(":").map(Number);
|
||||
if (!year || !month || !day || !hour || !minute || !second) return undefined;
|
||||
|
||||
if (!offsetTimeOriginal) {
|
||||
// No timezone information.. Assume local timezone
|
||||
return new Date(year, month - 1, day, hour, minute, second);
|
||||
}
|
||||
|
||||
const offsetSign = offsetTimeOriginal[0] === "+" ? 1 : -1;
|
||||
const [offsetHour, offsetMinute] = offsetTimeOriginal.slice(1).split(":").map(Number);
|
||||
|
||||
const utcDate = Date.UTC(year, month - 1, day, hour, minute, second);
|
||||
const offsetMs = offsetSign * ((offsetHour ?? 0) * 60 + (offsetMinute ?? 0)) * 60 * 1000;
|
||||
return new Date(utcDate - offsetMs);
|
||||
};
|
||||
|
||||
const encryptFile = limitFunction(
|
||||
async (
|
||||
status: Writable<FileUploadStatus>,
|
||||
file: File,
|
||||
fileBuffer: ArrayBuffer,
|
||||
masterKey: MasterKey,
|
||||
) => {
|
||||
status.update((value) => {
|
||||
value.status = "encrypting";
|
||||
return value;
|
||||
});
|
||||
|
||||
const fileType = getFileType(file);
|
||||
|
||||
let createdAt;
|
||||
if (fileType.startsWith("image/")) {
|
||||
createdAt = extractExifDateTime(fileBuffer);
|
||||
}
|
||||
|
||||
const { dataKey, dataKeyVersion } = await generateDataKey();
|
||||
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
|
||||
|
||||
const fileEncrypted = await encryptData(fileBuffer, dataKey);
|
||||
const fileEncryptedHash = encodeToBase64(await digestMessage(fileEncrypted.ciphertext));
|
||||
|
||||
const nameEncrypted = await encryptString(file.name, dataKey);
|
||||
const createdAtEncrypted =
|
||||
createdAt && (await encryptString(createdAt.getTime().toString(), dataKey));
|
||||
const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey);
|
||||
|
||||
status.update((value) => {
|
||||
value.status = "upload-pending";
|
||||
return value;
|
||||
});
|
||||
|
||||
return {
|
||||
dataKeyWrapped,
|
||||
dataKeyVersion,
|
||||
fileType,
|
||||
fileEncrypted,
|
||||
fileEncryptedHash,
|
||||
nameEncrypted,
|
||||
createdAtEncrypted,
|
||||
lastModifiedAtEncrypted,
|
||||
};
|
||||
},
|
||||
{ concurrency: 4 },
|
||||
);
|
||||
|
||||
const requestFileUpload = limitFunction(
|
||||
async (status: Writable<FileUploadStatus>, form: FormData) => {
|
||||
status.update((value) => {
|
||||
value.status = "uploading";
|
||||
return value;
|
||||
});
|
||||
|
||||
await axios.post("/api/file/upload", form, {
|
||||
onUploadProgress: ({ progress, rate, estimated }) => {
|
||||
status.update((value) => {
|
||||
value.progress = progress;
|
||||
value.rate = rate;
|
||||
value.estimated = estimated;
|
||||
return value;
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
status.update((value) => {
|
||||
value.status = "uploaded";
|
||||
return value;
|
||||
});
|
||||
},
|
||||
{ concurrency: 1 },
|
||||
);
|
||||
|
||||
export const uploadFile = async (
|
||||
file: File,
|
||||
parentId: "root" | number,
|
||||
hmacSecret: HmacSecret,
|
||||
masterKey: MasterKey,
|
||||
onDuplicate: () => Promise<boolean>,
|
||||
) => {
|
||||
const status = writable<FileUploadStatus>({
|
||||
name: file.name,
|
||||
parentId,
|
||||
status: "encryption-pending",
|
||||
});
|
||||
fileUploadStatusStore.update((value) => {
|
||||
value.push(status);
|
||||
return value;
|
||||
});
|
||||
|
||||
try {
|
||||
const { fileBuffer, fileSigned } = await requestDuplicateFileScan(
|
||||
file,
|
||||
hmacSecret,
|
||||
onDuplicate,
|
||||
);
|
||||
if (!fileBuffer || !fileSigned) {
|
||||
status.update((value) => {
|
||||
value.status = "canceled";
|
||||
return value;
|
||||
});
|
||||
fileUploadStatusStore.update((value) => {
|
||||
value = value.filter((v) => v !== status);
|
||||
return value;
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
const {
|
||||
dataKeyWrapped,
|
||||
dataKeyVersion,
|
||||
fileType,
|
||||
fileEncrypted,
|
||||
fileEncryptedHash,
|
||||
nameEncrypted,
|
||||
createdAtEncrypted,
|
||||
lastModifiedAtEncrypted,
|
||||
} = await encryptFile(status, file, fileBuffer, masterKey);
|
||||
|
||||
const form = new FormData();
|
||||
form.set(
|
||||
"metadata",
|
||||
JSON.stringify({
|
||||
parent: parentId,
|
||||
mekVersion: masterKey.version,
|
||||
dek: dataKeyWrapped,
|
||||
dekVersion: dataKeyVersion.toISOString(),
|
||||
hskVersion: hmacSecret.version,
|
||||
contentHmac: fileSigned,
|
||||
contentType: fileType,
|
||||
contentIv: fileEncrypted.iv,
|
||||
name: nameEncrypted.ciphertext,
|
||||
nameIv: nameEncrypted.iv,
|
||||
createdAt: createdAtEncrypted?.ciphertext,
|
||||
createdAtIv: createdAtEncrypted?.iv,
|
||||
lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
|
||||
lastModifiedAtIv: lastModifiedAtEncrypted.iv,
|
||||
} as FileUploadRequest),
|
||||
);
|
||||
form.set("content", new Blob([fileEncrypted.ciphertext]));
|
||||
form.set("checksum", fileEncryptedHash);
|
||||
|
||||
await requestFileUpload(status, form);
|
||||
return true;
|
||||
} catch (e) {
|
||||
status.update((value) => {
|
||||
value.status = "error";
|
||||
return value;
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
208
src/lib/modules/filesystem.ts
Normal file
208
src/lib/modules/filesystem.ts
Normal file
@@ -0,0 +1,208 @@
|
||||
import { get, writable, type Writable } from "svelte/store";
|
||||
import { callGetApi } from "$lib/hooks";
|
||||
import {
|
||||
getDirectoryInfos as getDirectoryInfosFromIndexedDB,
|
||||
getDirectoryInfo as getDirectoryInfoFromIndexedDB,
|
||||
storeDirectoryInfo,
|
||||
deleteDirectoryInfo,
|
||||
getFileInfos as getFileInfosFromIndexedDB,
|
||||
getFileInfo as getFileInfoFromIndexedDB,
|
||||
storeFileInfo,
|
||||
deleteFileInfo,
|
||||
type DirectoryId,
|
||||
} from "$lib/indexedDB";
|
||||
import { unwrapDataKey, decryptString } from "$lib/modules/crypto";
|
||||
import type { DirectoryInfoResponse, FileInfoResponse } from "$lib/server/schemas";
|
||||
|
||||
export type DirectoryInfo =
|
||||
| {
|
||||
id: "root";
|
||||
dataKey?: undefined;
|
||||
dataKeyVersion?: undefined;
|
||||
name?: undefined;
|
||||
subDirectoryIds: number[];
|
||||
fileIds: number[];
|
||||
}
|
||||
| {
|
||||
id: number;
|
||||
dataKey?: CryptoKey;
|
||||
dataKeyVersion?: Date;
|
||||
name: string;
|
||||
subDirectoryIds: number[];
|
||||
fileIds: number[];
|
||||
};
|
||||
|
||||
export interface FileInfo {
|
||||
id: number;
|
||||
dataKey?: CryptoKey;
|
||||
dataKeyVersion?: Date;
|
||||
contentType: string;
|
||||
contentIv?: string;
|
||||
name: string;
|
||||
createdAt?: Date;
|
||||
lastModifiedAt: Date;
|
||||
}
|
||||
|
||||
const directoryInfoStore = new Map<DirectoryId, Writable<DirectoryInfo | null>>();
|
||||
const fileInfoStore = new Map<number, Writable<FileInfo | null>>();
|
||||
|
||||
const fetchDirectoryInfoFromIndexedDB = async (
|
||||
id: DirectoryId,
|
||||
info: Writable<DirectoryInfo | null>,
|
||||
) => {
|
||||
if (get(info)) return;
|
||||
|
||||
const [directory, subDirectories, files] = await Promise.all([
|
||||
id !== "root" ? getDirectoryInfoFromIndexedDB(id) : undefined,
|
||||
getDirectoryInfosFromIndexedDB(id),
|
||||
getFileInfosFromIndexedDB(id),
|
||||
]);
|
||||
const subDirectoryIds = subDirectories.map(({ id }) => id);
|
||||
const fileIds = files.map(({ id }) => id);
|
||||
|
||||
if (id === "root") {
|
||||
info.set({ id, subDirectoryIds, fileIds });
|
||||
} else {
|
||||
if (!directory) return;
|
||||
info.set({ id, name: directory.name, subDirectoryIds, fileIds });
|
||||
}
|
||||
};
|
||||
|
||||
const fetchDirectoryInfoFromServer = async (
|
||||
id: DirectoryId,
|
||||
info: Writable<DirectoryInfo | null>,
|
||||
masterKey: CryptoKey,
|
||||
) => {
|
||||
const res = await callGetApi(`/api/directory/${id}`);
|
||||
if (res.status === 404) {
|
||||
info.set(null);
|
||||
await deleteDirectoryInfo(id as number);
|
||||
return;
|
||||
} else if (!res.ok) {
|
||||
throw new Error("Failed to fetch directory information");
|
||||
}
|
||||
|
||||
const {
|
||||
metadata,
|
||||
subDirectories: subDirectoryIds,
|
||||
files: fileIds,
|
||||
}: DirectoryInfoResponse = await res.json();
|
||||
|
||||
if (id === "root") {
|
||||
info.set({ id, subDirectoryIds, fileIds });
|
||||
} else {
|
||||
const { dataKey } = await unwrapDataKey(metadata!.dek, masterKey);
|
||||
const name = await decryptString(metadata!.name, metadata!.nameIv, dataKey);
|
||||
|
||||
info.set({
|
||||
id,
|
||||
dataKey,
|
||||
dataKeyVersion: new Date(metadata!.dekVersion),
|
||||
name,
|
||||
subDirectoryIds,
|
||||
fileIds,
|
||||
});
|
||||
await storeDirectoryInfo({ id, parentId: metadata!.parent, name });
|
||||
}
|
||||
};
|
||||
|
||||
const fetchDirectoryInfo = async (
|
||||
id: DirectoryId,
|
||||
info: Writable<DirectoryInfo | null>,
|
||||
masterKey: CryptoKey,
|
||||
) => {
|
||||
await fetchDirectoryInfoFromIndexedDB(id, info);
|
||||
await fetchDirectoryInfoFromServer(id, info, masterKey);
|
||||
};
|
||||
|
||||
export const getDirectoryInfo = (id: DirectoryId, masterKey: CryptoKey) => {
|
||||
// TODO: MEK rotation
|
||||
|
||||
let info = directoryInfoStore.get(id);
|
||||
if (!info) {
|
||||
info = writable(null);
|
||||
directoryInfoStore.set(id, info);
|
||||
}
|
||||
|
||||
fetchDirectoryInfo(id, info, masterKey);
|
||||
return info;
|
||||
};
|
||||
|
||||
const fetchFileInfoFromIndexedDB = async (id: number, info: Writable<FileInfo | null>) => {
|
||||
if (get(info)) return;
|
||||
|
||||
const file = await getFileInfoFromIndexedDB(id);
|
||||
if (!file) return;
|
||||
|
||||
info.set(file);
|
||||
};
|
||||
|
||||
const decryptDate = async (ciphertext: string, iv: string, dataKey: CryptoKey) => {
|
||||
return new Date(parseInt(await decryptString(ciphertext, iv, dataKey), 10));
|
||||
};
|
||||
|
||||
const fetchFileInfoFromServer = async (
|
||||
id: number,
|
||||
info: Writable<FileInfo | null>,
|
||||
masterKey: CryptoKey,
|
||||
) => {
|
||||
const res = await callGetApi(`/api/file/${id}`);
|
||||
if (res.status === 404) {
|
||||
info.set(null);
|
||||
await deleteFileInfo(id);
|
||||
return;
|
||||
} else if (!res.ok) {
|
||||
throw new Error("Failed to fetch file information");
|
||||
}
|
||||
|
||||
const metadata: FileInfoResponse = await res.json();
|
||||
const { dataKey } = await unwrapDataKey(metadata.dek, masterKey);
|
||||
|
||||
const name = await decryptString(metadata.name, metadata.nameIv, dataKey);
|
||||
const createdAt =
|
||||
metadata.createdAt && metadata.createdAtIv
|
||||
? await decryptDate(metadata.createdAt, metadata.createdAtIv, dataKey)
|
||||
: undefined;
|
||||
const lastModifiedAt = await decryptDate(
|
||||
metadata.lastModifiedAt,
|
||||
metadata.lastModifiedAtIv,
|
||||
dataKey,
|
||||
);
|
||||
|
||||
info.set({
|
||||
id,
|
||||
dataKey,
|
||||
dataKeyVersion: new Date(metadata.dekVersion),
|
||||
contentType: metadata.contentType,
|
||||
contentIv: metadata.contentIv,
|
||||
name,
|
||||
createdAt,
|
||||
lastModifiedAt,
|
||||
});
|
||||
await storeFileInfo({
|
||||
id,
|
||||
parentId: metadata.parent,
|
||||
name,
|
||||
contentType: metadata.contentType,
|
||||
createdAt,
|
||||
lastModifiedAt,
|
||||
});
|
||||
};
|
||||
|
||||
const fetchFileInfo = async (id: number, info: Writable<FileInfo | null>, masterKey: CryptoKey) => {
|
||||
await fetchFileInfoFromIndexedDB(id, info);
|
||||
await fetchFileInfoFromServer(id, info, masterKey);
|
||||
};
|
||||
|
||||
export const getFileInfo = (fileId: number, masterKey: CryptoKey) => {
|
||||
// TODO: MEK rotation
|
||||
|
||||
let info = fileInfoStore.get(fileId);
|
||||
if (!info) {
|
||||
info = writable(null);
|
||||
fileInfoStore.set(fileId, info);
|
||||
}
|
||||
|
||||
fetchFileInfo(fileId, info, masterKey);
|
||||
return info;
|
||||
};
|
||||
61
src/lib/modules/opfs.ts
Normal file
61
src/lib/modules/opfs.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
let rootHandle: FileSystemDirectoryHandle | null = null;
|
||||
|
||||
export const prepareOpfs = async () => {
|
||||
rootHandle = await navigator.storage.getDirectory();
|
||||
};
|
||||
|
||||
const getFileHandle = async (path: string, create = true) => {
|
||||
if (!rootHandle) {
|
||||
throw new Error("OPFS not prepared");
|
||||
} else if (path[0] !== "/") {
|
||||
throw new Error("Path must be absolute");
|
||||
}
|
||||
|
||||
const parts = path.split("/");
|
||||
if (parts.length <= 1) {
|
||||
throw new Error("Invalid path");
|
||||
}
|
||||
|
||||
try {
|
||||
let directoryHandle = rootHandle;
|
||||
for (const part of parts.slice(0, -1)) {
|
||||
if (!part) continue;
|
||||
directoryHandle = await directoryHandle.getDirectoryHandle(part, { create });
|
||||
}
|
||||
|
||||
const filename = parts[parts.length - 1]!;
|
||||
const fileHandle = await directoryHandle.getFileHandle(filename, { create });
|
||||
return { parentHandle: directoryHandle, filename, fileHandle };
|
||||
} catch (e) {
|
||||
if (e instanceof DOMException && e.name === "NotFoundError") {
|
||||
return {};
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
export const readFile = async (path: string) => {
|
||||
const { fileHandle } = await getFileHandle(path, false);
|
||||
if (!fileHandle) return null;
|
||||
|
||||
const file = await fileHandle.getFile();
|
||||
return await file.arrayBuffer();
|
||||
};
|
||||
|
||||
export const writeFile = async (path: string, data: ArrayBuffer) => {
|
||||
const { fileHandle } = await getFileHandle(path);
|
||||
const writable = await fileHandle!.createWritable();
|
||||
|
||||
try {
|
||||
await writable.write(data);
|
||||
} finally {
|
||||
await writable.close();
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteFile = async (path: string) => {
|
||||
const { parentHandle, filename } = await getFileHandle(path, false);
|
||||
if (!parentHandle) return;
|
||||
|
||||
await parentHandle.removeEntry(filename);
|
||||
};
|
||||
29
src/lib/modules/util.ts
Normal file
29
src/lib/modules/util.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
const pad2 = (num: number) => num.toString().padStart(2, "0");
|
||||
|
||||
export const formatDate = (date: Date) => {
|
||||
const year = date.getFullYear();
|
||||
const month = date.getMonth() + 1;
|
||||
const day = date.getDate();
|
||||
return `${year}. ${month}. ${day}.`;
|
||||
};
|
||||
|
||||
export const formatDateTime = (date: Date) => {
|
||||
const dateFormatted = formatDate(date);
|
||||
const hours = date.getHours();
|
||||
const minutes = date.getMinutes();
|
||||
return `${dateFormatted} ${pad2(hours)}:${pad2(minutes)}`;
|
||||
};
|
||||
|
||||
export const formatFileSize = (size: number) => {
|
||||
if (size < 1024) return `${size} B`;
|
||||
if (size < 1024 * 1024) return `${(size / 1024).toFixed(1)} KiB`;
|
||||
if (size < 1024 * 1024 * 1024) return `${(size / 1024 / 1024).toFixed(1)} MiB`;
|
||||
return `${(size / 1024 / 1024 / 1024).toFixed(1)} GiB`;
|
||||
};
|
||||
|
||||
export const formatNetworkSpeed = (speed: number) => {
|
||||
if (speed < 1000) return `${speed} bps`;
|
||||
if (speed < 1000 * 1000) return `${(speed / 1000).toFixed(1)} kbps`;
|
||||
if (speed < 1000 * 1000 * 1000) return `${(speed / 1000 / 1000).toFixed(1)} Mbps`;
|
||||
return `${(speed / 1000 / 1000 / 1000).toFixed(1)} Gbps`;
|
||||
};
|
||||
@@ -27,6 +27,7 @@ export interface NewFileParams {
|
||||
contentHmac: string | null;
|
||||
contentType: string;
|
||||
encContentIv: string;
|
||||
encContentHash: string;
|
||||
encName: string;
|
||||
encNameIv: string;
|
||||
encCreatedAt: string | null;
|
||||
@@ -130,14 +131,15 @@ export const unregisterDirectory = async (userId: number, directoryId: number) =
|
||||
return await db.transaction(
|
||||
async (tx) => {
|
||||
const unregisterFiles = async (parentId: number) => {
|
||||
const files = await tx
|
||||
return await tx
|
||||
.delete(file)
|
||||
.where(and(eq(file.userId, userId), eq(file.parentId, parentId)))
|
||||
.returning({ path: file.path });
|
||||
return files.map(({ path }) => path);
|
||||
.returning({ id: file.id, path: file.path });
|
||||
};
|
||||
const unregisterDirectoryRecursively = async (directoryId: number): Promise<string[]> => {
|
||||
const filePaths = await unregisterFiles(directoryId);
|
||||
const unregisterDirectoryRecursively = async (
|
||||
directoryId: number,
|
||||
): Promise<{ id: number; path: string }[]> => {
|
||||
const files = await unregisterFiles(directoryId);
|
||||
const subDirectories = await tx
|
||||
.select({ id: directory.id })
|
||||
.from(directory)
|
||||
@@ -150,7 +152,7 @@ export const unregisterDirectory = async (userId: number, directoryId: number) =
|
||||
if (deleteRes.changes === 0) {
|
||||
throw new IntegrityError("Directory not found");
|
||||
}
|
||||
return filePaths.concat(...subDirectoryFilePaths);
|
||||
return files.concat(...subDirectoryFilePaths);
|
||||
};
|
||||
return await unregisterDirectoryRecursively(directoryId);
|
||||
},
|
||||
@@ -198,11 +200,12 @@ export const registerFile = async (params: NewFileParams) => {
|
||||
userId: params.userId,
|
||||
mekVersion: params.mekVersion,
|
||||
hskVersion: params.hskVersion,
|
||||
contentHmac: params.contentHmac,
|
||||
contentType: params.contentType,
|
||||
encDek: params.encDek,
|
||||
dekVersion: params.dekVersion,
|
||||
contentHmac: params.contentHmac,
|
||||
contentType: params.contentType,
|
||||
encContentIv: params.encContentIv,
|
||||
encContentHash: params.encContentHash,
|
||||
encName: { ciphertext: params.encName, iv: params.encNameIv },
|
||||
encCreatedAt:
|
||||
params.encCreatedAt && params.encCreatedAtIv
|
||||
|
||||
@@ -61,6 +61,7 @@ export const file = sqliteTable(
|
||||
contentHmac: text("content_hmac"), // Base64
|
||||
contentType: text("content_type").notNull(),
|
||||
encContentIv: text("encrypted_content_iv").notNull(), // Base64
|
||||
encContentHash: text("encrypted_content_hash").notNull(), // Base64
|
||||
encName: ciphertext("encrypted_name").notNull(),
|
||||
encCreatedAt: ciphertext("encrypted_created_at"),
|
||||
encLastModifiedAt: ciphertext("encrypted_last_modified_at").notNull(),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { sqliteTable, text, integer, primaryKey, foreignKey } from "drizzle-orm/sqlite-core";
|
||||
import { client } from "./client";
|
||||
import { mek } from "./mek";
|
||||
import { user } from "./user";
|
||||
|
||||
@@ -32,7 +33,7 @@ export const hskLog = sqliteTable(
|
||||
hskVersion: integer("hmac_secret_key_version").notNull(),
|
||||
timestamp: integer("timestamp", { mode: "timestamp_ms" }).notNull(),
|
||||
action: text("action", { enum: ["create"] }).notNull(),
|
||||
actionBy: integer("action_by").references(() => user.id),
|
||||
actionBy: integer("action_by").references(() => client.id),
|
||||
},
|
||||
(t) => ({
|
||||
ref: foreignKey({
|
||||
|
||||
@@ -3,6 +3,7 @@ import { z } from "zod";
|
||||
export const directoryInfoResponse = z.object({
|
||||
metadata: z
|
||||
.object({
|
||||
parent: z.union([z.enum(["root"]), z.number().int().positive()]),
|
||||
mekVersion: z.number().int().positive(),
|
||||
dek: z.string().base64().nonempty(),
|
||||
dekVersion: z.string().datetime(),
|
||||
@@ -15,6 +16,11 @@ export const directoryInfoResponse = z.object({
|
||||
});
|
||||
export type DirectoryInfoResponse = z.infer<typeof directoryInfoResponse>;
|
||||
|
||||
export const directoryDeleteResponse = z.object({
|
||||
deletedFiles: z.number().int().positive().array(),
|
||||
});
|
||||
export type DirectoryDeleteResponse = z.infer<typeof directoryDeleteResponse>;
|
||||
|
||||
export const directoryRenameRequest = z.object({
|
||||
dekVersion: z.string().datetime(),
|
||||
name: z.string().base64().nonempty(),
|
||||
@@ -23,7 +29,7 @@ export const directoryRenameRequest = z.object({
|
||||
export type DirectoryRenameRequest = z.infer<typeof directoryRenameRequest>;
|
||||
|
||||
export const directoryCreateRequest = z.object({
|
||||
parentId: z.union([z.enum(["root"]), z.number().int().positive()]),
|
||||
parent: z.union([z.enum(["root"]), z.number().int().positive()]),
|
||||
mekVersion: z.number().int().positive(),
|
||||
dek: z.string().base64().nonempty(),
|
||||
dekVersion: z.string().datetime(),
|
||||
|
||||
@@ -2,6 +2,7 @@ import mime from "mime";
|
||||
import { z } from "zod";
|
||||
|
||||
export const fileInfoResponse = z.object({
|
||||
parent: z.union([z.enum(["root"]), z.number().int().positive()]),
|
||||
mekVersion: z.number().int().positive(),
|
||||
dek: z.string().base64().nonempty(),
|
||||
dekVersion: z.string().datetime(),
|
||||
@@ -38,7 +39,7 @@ export const duplicateFileScanResponse = z.object({
|
||||
export type DuplicateFileScanResponse = z.infer<typeof duplicateFileScanResponse>;
|
||||
|
||||
export const fileUploadRequest = z.object({
|
||||
parentId: z.union([z.enum(["root"]), z.number().int().positive()]),
|
||||
parent: z.union([z.enum(["root"]), z.number().int().positive()]),
|
||||
mekVersion: z.number().int().positive(),
|
||||
dek: z.string().base64().nonempty(),
|
||||
dekVersion: z.string().datetime(),
|
||||
|
||||
@@ -19,9 +19,9 @@ export const getDirectoryInformation = async (userId: number, directoryId: "root
|
||||
|
||||
const directories = await getAllDirectoriesByParent(userId, directoryId);
|
||||
const files = await getAllFilesByParent(userId, directoryId);
|
||||
|
||||
return {
|
||||
metadata: directory && {
|
||||
parentId: directory.parentId ?? ("root" as const),
|
||||
mekVersion: directory.mekVersion,
|
||||
encDek: directory.encDek,
|
||||
dekVersion: directory.dekVersion,
|
||||
@@ -34,8 +34,13 @@ export const getDirectoryInformation = async (userId: number, directoryId: "root
|
||||
|
||||
export const deleteDirectory = async (userId: number, directoryId: number) => {
|
||||
try {
|
||||
const filePaths = await unregisterDirectory(userId, directoryId);
|
||||
filePaths.map((path) => unlink(path)); // Intended
|
||||
const files = await unregisterDirectory(userId, directoryId);
|
||||
return {
|
||||
files: files.map(({ id, path }) => {
|
||||
unlink(path); // Intended
|
||||
return id;
|
||||
}),
|
||||
};
|
||||
} catch (e) {
|
||||
if (e instanceof IntegrityError && e.message === "Directory not found") {
|
||||
error(404, "Invalid directory id");
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { error } from "@sveltejs/kit";
|
||||
import { createHash } from "crypto";
|
||||
import { createReadStream, createWriteStream } from "fs";
|
||||
import { mkdir, stat, unlink } from "fs/promises";
|
||||
import { dirname } from "path";
|
||||
import { Readable, Writable } from "stream";
|
||||
import { Readable } from "stream";
|
||||
import { pipeline } from "stream/promises";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { IntegrityError } from "$lib/server/db/error";
|
||||
import {
|
||||
@@ -22,6 +24,7 @@ export const getFileInformation = async (userId: number, fileId: number) => {
|
||||
}
|
||||
|
||||
return {
|
||||
parentId: file.parentId ?? ("root" as const),
|
||||
mekVersion: file.mekVersion,
|
||||
encDek: file.encDek,
|
||||
dekVersion: file.dekVersion,
|
||||
@@ -93,12 +96,13 @@ const safeUnlink = async (path: string) => {
|
||||
};
|
||||
|
||||
export const uploadFile = async (
|
||||
params: Omit<NewFileParams, "path">,
|
||||
encContentStream: ReadableStream<Uint8Array>,
|
||||
params: Omit<NewFileParams, "path" | "encContentHash">,
|
||||
encContentStream: Readable,
|
||||
encContentHash: Promise<string>,
|
||||
) => {
|
||||
const oneMinuteAgo = new Date(Date.now() - 60 * 1000);
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
const oneMinuteLater = new Date(Date.now() + 60 * 1000);
|
||||
if (params.dekVersion <= oneMinuteAgo || params.dekVersion >= oneMinuteLater) {
|
||||
if (params.dekVersion <= oneDayAgo || params.dekVersion >= oneMinuteLater) {
|
||||
error(400, "Invalid DEK version");
|
||||
}
|
||||
|
||||
@@ -106,20 +110,39 @@ export const uploadFile = async (
|
||||
await mkdir(dirname(path), { recursive: true });
|
||||
|
||||
try {
|
||||
await encContentStream.pipeTo(
|
||||
Writable.toWeb(createWriteStream(path, { flags: "wx", mode: 0o600 })),
|
||||
);
|
||||
const hashStream = createHash("sha256");
|
||||
const [_, hash] = await Promise.all([
|
||||
pipeline(
|
||||
encContentStream,
|
||||
async function* (source) {
|
||||
for await (const chunk of source) {
|
||||
hashStream.update(chunk);
|
||||
yield chunk;
|
||||
}
|
||||
},
|
||||
createWriteStream(path, { flags: "wx", mode: 0o600 }),
|
||||
),
|
||||
encContentHash,
|
||||
]);
|
||||
if (hashStream.digest("base64") != hash) {
|
||||
throw new Error("Invalid checksum");
|
||||
}
|
||||
|
||||
await registerFile({
|
||||
...params,
|
||||
path,
|
||||
encContentHash: hash,
|
||||
});
|
||||
} catch (e) {
|
||||
await safeUnlink(path);
|
||||
|
||||
if (e instanceof IntegrityError) {
|
||||
if (e.message === "Inactive MEK version") {
|
||||
error(400, "Invalid MEK version");
|
||||
}
|
||||
if (e instanceof IntegrityError && e.message === "Inactive MEK version") {
|
||||
error(400, "Invalid MEK version");
|
||||
} else if (
|
||||
e instanceof Error &&
|
||||
(e.message === "Invalid request body" || e.message === "Invalid checksum")
|
||||
) {
|
||||
error(400, "Invalid request body");
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
|
||||
@@ -1,34 +1,49 @@
|
||||
import type { Writable } from "svelte/store";
|
||||
import { writable, type Writable } from "svelte/store";
|
||||
|
||||
export type DirectoryInfo =
|
||||
| {
|
||||
id: "root";
|
||||
dataKey?: undefined;
|
||||
dataKeyVersion?: undefined;
|
||||
name?: undefined;
|
||||
subDirectoryIds: number[];
|
||||
fileIds: number[];
|
||||
}
|
||||
| {
|
||||
id: number;
|
||||
dataKey: CryptoKey;
|
||||
dataKeyVersion: Date;
|
||||
name: string;
|
||||
subDirectoryIds: number[];
|
||||
fileIds: number[];
|
||||
};
|
||||
|
||||
export interface FileInfo {
|
||||
id: number;
|
||||
dataKey: CryptoKey;
|
||||
dataKeyVersion: Date;
|
||||
contentType: string;
|
||||
contentIv: string;
|
||||
export interface FileUploadStatus {
|
||||
name: string;
|
||||
createdAt?: Date;
|
||||
lastModifiedAt: Date;
|
||||
parentId: "root" | number;
|
||||
status:
|
||||
| "encryption-pending"
|
||||
| "encrypting"
|
||||
| "upload-pending"
|
||||
| "uploading"
|
||||
| "uploaded"
|
||||
| "canceled"
|
||||
| "error";
|
||||
progress?: number;
|
||||
rate?: number;
|
||||
estimated?: number;
|
||||
}
|
||||
|
||||
export const directoryInfoStore = new Map<"root" | number, Writable<DirectoryInfo | null>>();
|
||||
export interface FileDownloadStatus {
|
||||
id: number;
|
||||
status:
|
||||
| "download-pending"
|
||||
| "downloading"
|
||||
| "decryption-pending"
|
||||
| "decrypting"
|
||||
| "decrypted"
|
||||
| "canceled"
|
||||
| "error";
|
||||
progress?: number;
|
||||
rate?: number;
|
||||
estimated?: number;
|
||||
result?: ArrayBuffer;
|
||||
}
|
||||
|
||||
export const fileInfoStore = new Map<number, Writable<FileInfo | null>>();
|
||||
export const fileUploadStatusStore = writable<Writable<FileUploadStatus>[]>([]);
|
||||
|
||||
export const fileDownloadStatusStore = writable<Writable<FileDownloadStatus>[]>([]);
|
||||
|
||||
export const isFileUploading = (
|
||||
status: FileUploadStatus["status"],
|
||||
): status is "encryption-pending" | "encrypting" | "upload-pending" | "uploading" => {
|
||||
return ["encryption-pending", "encrypting", "upload-pending", "uploading"].includes(status);
|
||||
};
|
||||
|
||||
export const isFileDownloading = (
|
||||
status: FileDownloadStatus["status"],
|
||||
): status is "download-pending" | "downloading" | "decryption-pending" | "decrypting" => {
|
||||
return ["download-pending", "downloading", "decryption-pending", "decrypting"].includes(status);
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user