파일 업로드 로직 리팩토링

This commit is contained in:
static
2026-01-12 12:02:20 +09:00
parent 27e90ef4d7
commit b636d75ea0
6 changed files with 193 additions and 398 deletions

View File

@@ -1,21 +1,18 @@
import ExifReader from "exifreader"; import ExifReader from "exifreader";
import pLimit, { limitFunction } from "p-limit"; import { limitFunction } from "p-limit";
import { CHUNK_SIZE } from "$lib/constants"; import { CHUNK_SIZE } from "$lib/constants";
import { import {
encodeToBase64, encodeToBase64,
generateDataKey, generateDataKey,
wrapDataKey, wrapDataKey,
encryptData,
encryptString, encryptString,
encryptChunk,
digestMessage,
createHmacStream, createHmacStream,
} from "$lib/modules/crypto"; } from "$lib/modules/crypto";
import { Scheduler } from "$lib/modules/scheduler"; import { Scheduler } from "$lib/modules/scheduler";
import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; import { generateThumbnail } from "$lib/modules/thumbnail";
import { uploadBlob } from "$lib/modules/upload";
import type { MasterKey, HmacSecret } from "$lib/stores"; import type { MasterKey, HmacSecret } from "$lib/stores";
import { trpc } from "$trpc/client"; import { trpc } from "$trpc/client";
import type { RouterInputs } from "$trpc/router.server";
export interface FileUploadState { export interface FileUploadState {
name: string; name: string;
@@ -114,295 +111,83 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => {
return new Date(utcDate - offsetMs); return new Date(utcDate - offsetMs);
}; };
const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { const requestFileUpload2 = async (
const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = [];
let offset = 0;
while (offset < fileBuffer.byteLength) {
const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength);
const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey);
chunksEncrypted.push({
chunkEncrypted: chunkEncrypted,
chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)),
});
offset = nextOffset;
}
return chunksEncrypted;
};
const encryptImageFile = limitFunction(
async (state: FileUploadState, file: File, masterKey: MasterKey) => {
state.status = "encrypting";
const fileBuffer = await file.arrayBuffer();
const createdAt = extractExifDateTime(fileBuffer);
const { dataKey, dataKeyVersion } = await generateDataKey();
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
const chunksEncrypted = await encryptChunks(fileBuffer, dataKey);
const nameEncrypted = await encryptString(file.name, dataKey);
const createdAtEncrypted =
createdAt && (await encryptString(createdAt.getTime().toString(), dataKey));
const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey);
const thumbnail = await generateThumbnail(fileBuffer, getFileType(file));
const thumbnailBuffer = await thumbnail?.arrayBuffer();
const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey));
state.status = "upload-pending";
return {
dataKeyWrapped,
dataKeyVersion,
chunksEncrypted,
nameEncrypted,
createdAtEncrypted,
lastModifiedAtEncrypted,
thumbnail: thumbnailEncrypted && { plaintext: thumbnailBuffer, ...thumbnailEncrypted },
};
},
{ concurrency: 4 },
);
const uploadThumbnail = async (
fileId: number,
thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer },
dataKeyVersion: Date,
) => {
const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({
file: fileId,
dekVersion: dataKeyVersion,
});
const ivAndCiphertext = new Uint8Array(
thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength,
);
ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0);
ivAndCiphertext.set(
new Uint8Array(thumbnailEncrypted.ciphertext),
thumbnailEncrypted.iv.byteLength,
);
const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext));
const response = await fetch(`/api/upload/${uploadId}/chunks/0`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkHash}:`,
},
body: ivAndCiphertext,
});
if (!response.ok) {
throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`);
}
await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId });
};
const requestImageFileUpload = limitFunction(
async (
state: FileUploadState,
metadata: RouterInputs["upload"]["startFileUpload"],
chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[],
fileSigned: string | undefined,
thumbnailData: { ciphertext: ArrayBuffer; iv: ArrayBuffer; plaintext: ArrayBuffer } | null,
dataKeyVersion: Date,
) => {
state.status = "uploading";
const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata);
const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0);
let uploadedBytes = 0;
const startTime = Date.now();
for (let i = 0; i < chunksEncrypted.length; i++) {
const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!;
const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkEncryptedHash}:`,
},
body: chunkEncrypted,
});
if (!response.ok) {
throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`);
}
uploadedBytes += chunkEncrypted.byteLength;
const elapsed = (Date.now() - startTime) / 1000;
const rate = uploadedBytes / elapsed;
const remaining = totalBytes - uploadedBytes;
const estimated = rate > 0 ? remaining / rate : undefined;
state.progress = uploadedBytes / totalBytes;
state.rate = rate;
state.estimated = estimated;
}
const { file: fileId } = await trpc().upload.completeFileUpload.mutate({
uploadId,
contentHmac: fileSigned,
});
if (thumbnailData) {
try {
await uploadThumbnail(fileId, thumbnailData, dataKeyVersion);
} catch (e) {
// TODO: Error handling for thumbnail upload
console.error(e);
}
}
state.status = "uploaded";
return { fileId, thumbnailBuffer: thumbnailData?.plaintext };
},
{ concurrency: 1 },
);
const requestFileUpload = async (
state: FileUploadState, state: FileUploadState,
file: File, file: Blob,
fileSigned: string,
fileMetadata: {
parentId: "root" | number;
name: string;
createdAt?: Date;
lastModifiedAt: Date;
},
masterKey: MasterKey, masterKey: MasterKey,
hmacSecret: HmacSecret, hmacSecret: HmacSecret,
fileSigned: string,
parentId: DirectoryId,
) => { ) => {
state.status = "uploading"; state.status = "encrypting";
const fileType = getFileType(file);
const { dataKey, dataKeyVersion } = await generateDataKey(); const { dataKey, dataKeyVersion } = await generateDataKey();
const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key);
const nameEncrypted = await encryptString(file.name, dataKey); const [nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnailBuffer] =
const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); await Promise.all([
encryptString(fileMetadata.name, dataKey),
fileMetadata.createdAt && encryptString(fileMetadata.createdAt.getTime().toString(), dataKey),
encryptString(fileMetadata.lastModifiedAt.getTime().toString(), dataKey),
generateThumbnail(file).then((blob) => blob?.arrayBuffer()),
]);
const totalChunks = Math.ceil(file.size / CHUNK_SIZE); const { uploadId } = await trpc().upload.startFileUpload.mutate({
const metadata = { chunks: Math.ceil(file.size / CHUNK_SIZE),
chunks: totalChunks, parent: fileMetadata.parentId,
parent: parentId,
mekVersion: masterKey.version, mekVersion: masterKey.version,
dek: dataKeyWrapped, dek: dataKeyWrapped,
dekVersion: dataKeyVersion, dekVersion: dataKeyVersion,
hskVersion: hmacSecret.version, hskVersion: hmacSecret.version,
contentType: fileType, contentType: file.type,
name: nameEncrypted.ciphertext, name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv, nameIv: nameEncrypted.iv,
createdAt: createdAtEncrypted?.ciphertext,
createdAtIv: createdAtEncrypted?.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext, lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv, lastModifiedAtIv: lastModifiedAtEncrypted.iv,
}; });
const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); state.status = "uploading";
const reader = file.stream().getReader(); await uploadBlob(uploadId, file, dataKey, {
const limit = pLimit(4); onProgress(s) {
let buffer = new Uint8Array(0); state.progress = s.progress;
let chunkIndex = 0; state.rate = s.rateBps;
const uploadPromises: Promise<void>[] = []; },
});
const totalBytes = file.size;
let uploadedBytes = 0;
const startTime = Date.now();
const uploadChunk = async (
index: number,
encryptedChunk: ArrayBuffer,
chunkHash: string,
originalChunkSize: number,
) => {
const response = await fetch(`/api/upload/${uploadId}/chunks/${index}`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkHash}:`,
},
body: encryptedChunk,
});
if (!response.ok) {
throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`);
}
uploadedBytes += originalChunkSize;
const elapsed = (Date.now() - startTime) / 1000;
const rate = uploadedBytes / elapsed;
const remaining = totalBytes - uploadedBytes;
const estimated = rate > 0 ? remaining / rate : undefined;
state.progress = uploadedBytes / totalBytes;
state.rate = rate;
state.estimated = estimated;
};
while (true) {
const { done, value } = await reader.read();
if (done && buffer.length === 0) break;
if (value) {
const newBuffer = new Uint8Array(buffer.length + value.length);
newBuffer.set(buffer);
newBuffer.set(value, buffer.length);
buffer = newBuffer;
}
while (buffer.length >= CHUNK_SIZE || (done && buffer.length > 0)) {
const chunkSize = Math.min(CHUNK_SIZE, buffer.length);
const chunk = buffer.slice(0, chunkSize);
buffer = buffer.slice(chunkSize);
const encryptedChunk = await encryptChunk(chunk.buffer.slice(0, chunk.byteLength), dataKey);
const chunkHash = encodeToBase64(await digestMessage(encryptedChunk));
const currentIndex = chunkIndex++;
uploadPromises.push(
limit(() => uploadChunk(currentIndex, encryptedChunk, chunkHash, chunkSize)),
);
}
if (done) break;
}
await Promise.all(uploadPromises);
const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ const { file: fileId } = await trpc().upload.completeFileUpload.mutate({
uploadId, uploadId,
contentHmac: fileSigned, contentHmac: fileSigned,
}); });
if (fileType.startsWith("video/")) { if (thumbnailBuffer) {
try { const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({
const thumbnail = await generateThumbnailFromFile(file); file: fileId,
if (thumbnail) { dekVersion: dataKeyVersion,
const thumbnailBuffer = await thumbnail.arrayBuffer(); });
const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey);
await uploadThumbnail(fileId, thumbnailEncrypted, dataKeyVersion); await uploadBlob(uploadId, new Blob([thumbnailBuffer]), dataKey);
}
} catch (e) { await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId });
// Thumbnail upload failure is not critical
console.error(e);
}
} }
state.status = "uploaded"; state.status = "uploaded";
return { fileId }; return { fileId, thumbnailBuffer };
}; };
export const uploadFile = async ( export const uploadFile = async (
file: File, file: File,
parentId: "root" | number, parentId: "root" | number,
hmacSecret: HmacSecret,
masterKey: MasterKey, masterKey: MasterKey,
hmacSecret: HmacSecret,
onDuplicate: () => Promise<boolean>, onDuplicate: () => Promise<boolean>,
) => { ) => {
uploadingFiles.push({ uploadingFiles.push({
@@ -426,51 +211,37 @@ export const uploadFile = async (
const fileType = getFileType(file); const fileType = getFileType(file);
if (fileType.startsWith("image/")) { if (fileType.startsWith("image/")) {
const fileBuffer = await file.arrayBuffer(); const fileBuffer = await file.arrayBuffer();
const { const fileCreatedAt = extractExifDateTime(fileBuffer);
dataKeyWrapped,
dataKeyVersion,
chunksEncrypted,
nameEncrypted,
createdAtEncrypted,
lastModifiedAtEncrypted,
thumbnail,
} = await encryptImageFile(state, file, masterKey);
const metadata = { const { fileId, thumbnailBuffer } = await requestFileUpload2(
chunks: chunksEncrypted.length,
parent: parentId,
mekVersion: masterKey.version,
dek: dataKeyWrapped,
dekVersion: dataKeyVersion,
hskVersion: hmacSecret.version,
contentType: fileType,
name: nameEncrypted.ciphertext,
nameIv: nameEncrypted.iv,
createdAt: createdAtEncrypted?.ciphertext,
createdAtIv: createdAtEncrypted?.iv,
lastModifiedAt: lastModifiedAtEncrypted.ciphertext,
lastModifiedAtIv: lastModifiedAtEncrypted.iv,
};
const { fileId, thumbnailBuffer } = await requestImageFileUpload(
state, state,
metadata, new Blob([fileBuffer], { type: fileType }),
chunksEncrypted,
fileSigned, fileSigned,
thumbnail ?? null, {
dataKeyVersion, parentId,
); name: file.name,
return { fileId, fileBuffer, thumbnailBuffer }; createdAt: fileCreatedAt,
} else { lastModifiedAt: new Date(file.lastModified),
const { fileId } = await requestFileUpload( },
state,
file,
masterKey, masterKey,
hmacSecret, hmacSecret,
fileSigned,
parentId,
); );
return { fileId };
return { fileId, fileBuffer, thumbnailBuffer };
} else {
const { fileId, thumbnailBuffer } = await requestFileUpload2(
state,
file,
fileSigned,
{
parentId,
name: file.name,
lastModifiedAt: new Date(file.lastModified),
},
masterKey,
hmacSecret,
);
return { fileId, thumbnailBuffer };
} }
} catch (e) { } catch (e) {
state.status = "error"; state.status = "error";

View File

@@ -52,7 +52,6 @@ const generateImageThumbnail = (imageUrl: string) => {
.catch(reject); .catch(reject);
}; };
image.onerror = reject; image.onerror = reject;
image.src = imageUrl; image.src = imageUrl;
}); });
}; };
@@ -85,31 +84,27 @@ const generateVideoThumbnail = (videoUrl: string, time = 0) => {
}); });
}; };
export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: string) => { export const generateThumbnail = async (blob: Blob) => {
let url; let url;
try { try {
if (fileType.startsWith("image/")) { if (blob.type.startsWith("image/")) {
const fileBlob = new Blob([fileBuffer], { type: fileType }); url = URL.createObjectURL(blob);
url = URL.createObjectURL(fileBlob);
try { try {
return await generateImageThumbnail(url); return await generateImageThumbnail(url);
} catch { } catch {
URL.revokeObjectURL(url); URL.revokeObjectURL(url);
url = undefined; url = undefined;
if (fileType === "image/heic") { if (blob.type === "image/heic") {
const { default: heic2any } = await import("heic2any"); const { default: heic2any } = await import("heic2any");
url = URL.createObjectURL( url = URL.createObjectURL((await heic2any({ blob, toType: "image/png" })) as Blob);
(await heic2any({ blob: fileBlob, toType: "image/png" })) as Blob,
);
return await generateImageThumbnail(url); return await generateImageThumbnail(url);
} else { } else {
return null; return null;
} }
} }
} else if (fileType.startsWith("video/")) { } else if (blob.type.startsWith("video/")) {
url = URL.createObjectURL(new Blob([fileBuffer], { type: fileType })); url = URL.createObjectURL(blob);
return await generateVideoThumbnail(url); return await generateVideoThumbnail(url);
} }
return null; return null;
@@ -122,22 +117,6 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin
} }
}; };
export const generateThumbnailFromFile = async (file: File) => {
if (!file.type.startsWith("video/")) return null;
let url;
try {
url = URL.createObjectURL(file);
return await generateVideoThumbnail(url);
} catch {
return null;
} finally {
if (url) {
URL.revokeObjectURL(url);
}
}
};
export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => {
return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`;
}; };

103
src/lib/modules/upload.ts Normal file
View File

@@ -0,0 +1,103 @@
import axios from "axios";
import pLimit from "p-limit";
import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants";
import { encryptChunk, digestMessage, encodeToBase64 } from "$lib/modules/crypto";
type UploadStats = {
progress: number; // 0..1 (암호화 후 기준)
rateBps: number; // bytes/sec
uploadedBytes: number;
totalBytes: number;
};
function createSpeedMeter(windowMs = 1500) {
const samples: Array<{ t: number; b: number }> = [];
return (bytesNow: number) => {
const now = performance.now();
samples.push({ t: now, b: bytesNow });
const cutoff = now - windowMs;
while (samples.length > 2 && samples[0]!.t < cutoff) samples.shift();
const first = samples[0]!;
const dt = now - first.t;
const db = bytesNow - first.b;
return dt > 0 ? (db / dt) * 1000 : 0;
};
}
const uploadChunk = async (
uploadId: string,
chunkIndex: number,
chunk: Blob,
dataKey: CryptoKey,
onChunkProgress: (chunkIndex: number, loaded: number) => void,
) => {
const chunkEncrypted = await encryptChunk(await chunk.arrayBuffer(), dataKey);
const chunkEncryptedHash = encodeToBase64(await digestMessage(chunkEncrypted));
await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex}`, chunkEncrypted, {
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkEncryptedHash}:`,
},
onUploadProgress(e) {
onChunkProgress(chunkIndex, e.loaded ?? 0);
},
});
onChunkProgress(chunkIndex, chunkEncrypted.byteLength);
};
export const uploadBlob = async (
uploadId: string,
blob: Blob,
dataKey: CryptoKey,
options?: { concurrency?: number; onProgress?: (s: UploadStats) => void },
) => {
const onProgress = options?.onProgress;
const totalChunks = Math.ceil(blob.size / CHUNK_SIZE);
const totalBytes = blob.size + totalChunks * ENCRYPTION_OVERHEAD;
const uploadedByChunk = new Array<number>(totalChunks).fill(0);
const speedMeter = createSpeedMeter(1500);
const emit = () => {
if (!onProgress) return;
const uploadedBytes = uploadedByChunk.reduce((a, b) => a + b, 0);
const rateBps = speedMeter(uploadedBytes);
const progress = Math.min(1, uploadedBytes / totalBytes);
onProgress({ progress, rateBps, uploadedBytes, totalBytes });
};
const onChunkProgress = (idx: number, loaded: number) => {
uploadedByChunk[idx] = loaded;
emit();
};
const limit = pLimit(options?.concurrency ?? 4);
await Promise.all(
Array.from({ length: totalChunks }, (_, chunkIndex) =>
limit(() =>
uploadChunk(
uploadId,
chunkIndex,
blob.slice(chunkIndex * CHUNK_SIZE, (chunkIndex + 1) * CHUNK_SIZE),
dataKey,
onChunkProgress,
),
),
),
);
// 완료 보정
onProgress?.({
progress: 1,
rateBps: 0,
uploadedBytes: totalBytes,
totalBytes,
});
};

View File

@@ -1,10 +1,9 @@
import { limitFunction } from "p-limit"; import { limitFunction } from "p-limit";
import { SvelteMap } from "svelte/reactivity"; import { SvelteMap } from "svelte/reactivity";
import { CHUNK_SIZE } from "$lib/constants"; import { CHUNK_SIZE } from "$lib/constants";
import { encodeToBase64, encryptChunk, digestMessage } from "$lib/modules/crypto";
import { deleteFileCache } from "$lib/modules/file";
import type { FileInfo } from "$lib/modules/filesystem"; import type { FileInfo } from "$lib/modules/filesystem";
import { Scheduler } from "$lib/modules/scheduler"; import { Scheduler } from "$lib/modules/scheduler";
import { uploadBlob } from "$lib/modules/upload";
import { requestFileDownload } from "$lib/services/file"; import { requestFileDownload } from "$lib/services/file";
import { trpc } from "$trpc/client"; import { trpc } from "$trpc/client";
@@ -45,81 +44,28 @@ export const clearMigrationStates = () => {
} }
}; };
const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => {
const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = [];
let offset = 0;
while (offset < fileBuffer.byteLength) {
const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength);
const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey);
chunksEncrypted.push({
chunkEncrypted: chunkEncrypted,
chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)),
});
offset = nextOffset;
}
return chunksEncrypted;
};
const uploadMigrationChunks = limitFunction( const uploadMigrationChunks = limitFunction(
async ( async (state: MigrationState, fileId: number, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => {
state: MigrationState,
fileId: number,
chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[],
) => {
state.status = "uploading"; state.status = "uploading";
const { uploadId } = await trpc().upload.startMigrationUpload.mutate({ const { uploadId } = await trpc().upload.startMigrationUpload.mutate({
file: fileId, file: fileId,
chunks: chunksEncrypted.length, chunks: Math.ceil(fileBuffer.byteLength / CHUNK_SIZE),
}); });
const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); await uploadBlob(uploadId, new Blob([fileBuffer]), dataKey, {
let uploadedBytes = 0; onProgress(s) {
const startTime = Date.now(); state.progress = s.progress;
state.rate = s.rateBps;
for (let i = 0; i < chunksEncrypted.length; i++) { },
const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; });
const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, {
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Digest": `sha-256=:${chunkEncryptedHash}:`,
},
body: chunkEncrypted,
});
if (!response.ok) {
throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`);
}
uploadedBytes += chunkEncrypted.byteLength;
const elapsed = (Date.now() - startTime) / 1000;
const rate = uploadedBytes / elapsed;
state.progress = uploadedBytes / totalBytes;
state.rate = rate;
}
await trpc().upload.completeMigrationUpload.mutate({ uploadId }); await trpc().upload.completeMigrationUpload.mutate({ uploadId });
}, },
{ concurrency: 1 }, { concurrency: 1 },
); );
const encryptFile = limitFunction( export const requestFileMigration = async (fileInfo: FileInfo) => {
async (state: MigrationState, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => {
state.status = "encrypting";
const chunksEncrypted = await encryptChunks(fileBuffer, dataKey);
state.status = "upload-pending";
return chunksEncrypted;
},
{ concurrency: 4 },
);
export const requestFileMigration = async (fileInfo: FileInfo & { exists: true }) => {
let state = states.get(fileInfo.id); let state = states.get(fileInfo.id);
if (state) { if (state) {
if (state.status !== "error") return; if (state.status !== "error") return;
@@ -148,12 +94,8 @@ export const requestFileMigration = async (fileInfo: FileInfo & { exists: true }
}, },
async () => { async () => {
state.status = "encryption-pending"; state.status = "encryption-pending";
const chunksEncrypted = await encryptFile(state, fileBuffer!, dataKey);
await uploadMigrationChunks(state, fileInfo.id, chunksEncrypted); await uploadMigrationChunks(state, fileInfo.id, fileBuffer!, dataKey);
// Clear file cache since the file format has changed
await deleteFileCache(fileInfo.id);
state.status = "completed"; state.status = "completed";
}, },

View File

@@ -51,7 +51,7 @@
if (!files || files.length === 0) return; if (!files || files.length === 0) return;
for (const file of files) { for (const file of files) {
requestFileUpload(file, data.id, $hmacSecretStore?.get(1)!, $masterKeyStore?.get(1)!, () => { requestFileUpload(file, data.id, $masterKeyStore?.get(1)!, $hmacSecretStore?.get(1)!, () => {
return new Promise((resolve) => { return new Promise((resolve) => {
duplicatedFile = file; duplicatedFile = file;
resolveForDuplicateFileModal = resolve; resolveForDuplicateFileModal = resolve;

View File

@@ -81,11 +81,11 @@ export const requestDirectoryCreation = async (
export const requestFileUpload = async ( export const requestFileUpload = async (
file: File, file: File,
parentId: "root" | number, parentId: "root" | number,
hmacSecret: HmacSecret,
masterKey: MasterKey, masterKey: MasterKey,
hmacSecret: HmacSecret,
onDuplicate: () => Promise<boolean>, onDuplicate: () => Promise<boolean>,
) => { ) => {
const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate); const res = await uploadFile(file, parentId, masterKey, hmacSecret, onDuplicate);
if (!res) return false; if (!res) return false;
if (res.fileBuffer) { if (res.fileBuffer) {