From b636d75ea08554ec6b413043b8da86e11c3c2f32 Mon Sep 17 00:00:00 2001 From: static Date: Mon, 12 Jan 2026 12:02:20 +0900 Subject: [PATCH] =?UTF-8?q?=ED=8C=8C=EC=9D=BC=20=EC=97=85=EB=A1=9C?= =?UTF-8?q?=EB=93=9C=20=EB=A1=9C=EC=A7=81=20=EB=A6=AC=ED=8C=A9=ED=86=A0?= =?UTF-8?q?=EB=A7=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/modules/file/upload.svelte.ts | 367 ++++-------------- src/lib/modules/thumbnail.ts | 35 +- src/lib/modules/upload.ts | 103 +++++ .../settings/migration/service.svelte.ts | 80 +--- .../(main)/directory/[[id]]/+page.svelte | 2 +- .../(main)/directory/[[id]]/service.svelte.ts | 4 +- 6 files changed, 193 insertions(+), 398 deletions(-) create mode 100644 src/lib/modules/upload.ts diff --git a/src/lib/modules/file/upload.svelte.ts b/src/lib/modules/file/upload.svelte.ts index 4bea638..9e9f784 100644 --- a/src/lib/modules/file/upload.svelte.ts +++ b/src/lib/modules/file/upload.svelte.ts @@ -1,21 +1,18 @@ import ExifReader from "exifreader"; -import pLimit, { limitFunction } from "p-limit"; +import { limitFunction } from "p-limit"; import { CHUNK_SIZE } from "$lib/constants"; import { encodeToBase64, generateDataKey, wrapDataKey, - encryptData, encryptString, - encryptChunk, - digestMessage, createHmacStream, } from "$lib/modules/crypto"; import { Scheduler } from "$lib/modules/scheduler"; -import { generateThumbnail, generateThumbnailFromFile } from "$lib/modules/thumbnail"; +import { generateThumbnail } from "$lib/modules/thumbnail"; +import { uploadBlob } from "$lib/modules/upload"; import type { MasterKey, HmacSecret } from "$lib/stores"; import { trpc } from "$trpc/client"; -import type { RouterInputs } from "$trpc/router.server"; export interface FileUploadState { name: string; @@ -114,295 +111,83 @@ const extractExifDateTime = (fileBuffer: ArrayBuffer) => { return new Date(utcDate - offsetMs); }; -const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { - const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = []; - let offset = 0; - - while (offset < fileBuffer.byteLength) { - const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength); - const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey); - chunksEncrypted.push({ - chunkEncrypted: chunkEncrypted, - chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)), - }); - offset = nextOffset; - } - - return chunksEncrypted; -}; - -const encryptImageFile = limitFunction( - async (state: FileUploadState, file: File, masterKey: MasterKey) => { - state.status = "encrypting"; - - const fileBuffer = await file.arrayBuffer(); - const createdAt = extractExifDateTime(fileBuffer); - - const { dataKey, dataKeyVersion } = await generateDataKey(); - const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); - const chunksEncrypted = await encryptChunks(fileBuffer, dataKey); - - const nameEncrypted = await encryptString(file.name, dataKey); - const createdAtEncrypted = - createdAt && (await encryptString(createdAt.getTime().toString(), dataKey)); - const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); - - const thumbnail = await generateThumbnail(fileBuffer, getFileType(file)); - const thumbnailBuffer = await thumbnail?.arrayBuffer(); - const thumbnailEncrypted = thumbnailBuffer && (await encryptData(thumbnailBuffer, dataKey)); - - state.status = "upload-pending"; - - return { - dataKeyWrapped, - dataKeyVersion, - chunksEncrypted, - nameEncrypted, - createdAtEncrypted, - lastModifiedAtEncrypted, - thumbnail: thumbnailEncrypted && { plaintext: thumbnailBuffer, ...thumbnailEncrypted }, - }; - }, - { concurrency: 4 }, -); - -const uploadThumbnail = async ( - fileId: number, - thumbnailEncrypted: { ciphertext: ArrayBuffer; iv: ArrayBuffer }, - dataKeyVersion: Date, -) => { - const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ - file: fileId, - dekVersion: dataKeyVersion, - }); - - const ivAndCiphertext = new Uint8Array( - thumbnailEncrypted.iv.byteLength + thumbnailEncrypted.ciphertext.byteLength, - ); - ivAndCiphertext.set(new Uint8Array(thumbnailEncrypted.iv), 0); - ivAndCiphertext.set( - new Uint8Array(thumbnailEncrypted.ciphertext), - thumbnailEncrypted.iv.byteLength, - ); - - const chunkHash = encodeToBase64(await digestMessage(ivAndCiphertext)); - - const response = await fetch(`/api/upload/${uploadId}/chunks/0`, { - method: "POST", - headers: { - "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkHash}:`, - }, - body: ivAndCiphertext, - }); - - if (!response.ok) { - throw new Error(`Thumbnail upload failed: ${response.status} ${response.statusText}`); - } - - await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); -}; - -const requestImageFileUpload = limitFunction( - async ( - state: FileUploadState, - metadata: RouterInputs["upload"]["startFileUpload"], - chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], - fileSigned: string | undefined, - thumbnailData: { ciphertext: ArrayBuffer; iv: ArrayBuffer; plaintext: ArrayBuffer } | null, - dataKeyVersion: Date, - ) => { - state.status = "uploading"; - - const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); - - const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); - let uploadedBytes = 0; - const startTime = Date.now(); - - for (let i = 0; i < chunksEncrypted.length; i++) { - const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; - - const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, { - method: "POST", - headers: { - "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, - }, - body: chunkEncrypted, - }); - - if (!response.ok) { - throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); - } - - uploadedBytes += chunkEncrypted.byteLength; - - const elapsed = (Date.now() - startTime) / 1000; - const rate = uploadedBytes / elapsed; - const remaining = totalBytes - uploadedBytes; - const estimated = rate > 0 ? remaining / rate : undefined; - - state.progress = uploadedBytes / totalBytes; - state.rate = rate; - state.estimated = estimated; - } - - const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ - uploadId, - contentHmac: fileSigned, - }); - - if (thumbnailData) { - try { - await uploadThumbnail(fileId, thumbnailData, dataKeyVersion); - } catch (e) { - // TODO: Error handling for thumbnail upload - console.error(e); - } - } - - state.status = "uploaded"; - - return { fileId, thumbnailBuffer: thumbnailData?.plaintext }; - }, - { concurrency: 1 }, -); - -const requestFileUpload = async ( +const requestFileUpload2 = async ( state: FileUploadState, - file: File, + file: Blob, + fileSigned: string, + fileMetadata: { + parentId: "root" | number; + name: string; + createdAt?: Date; + lastModifiedAt: Date; + }, masterKey: MasterKey, hmacSecret: HmacSecret, - fileSigned: string, - parentId: DirectoryId, ) => { - state.status = "uploading"; + state.status = "encrypting"; - const fileType = getFileType(file); const { dataKey, dataKeyVersion } = await generateDataKey(); const dataKeyWrapped = await wrapDataKey(dataKey, masterKey.key); - const nameEncrypted = await encryptString(file.name, dataKey); - const lastModifiedAtEncrypted = await encryptString(file.lastModified.toString(), dataKey); + const [nameEncrypted, createdAtEncrypted, lastModifiedAtEncrypted, thumbnailBuffer] = + await Promise.all([ + encryptString(fileMetadata.name, dataKey), + fileMetadata.createdAt && encryptString(fileMetadata.createdAt.getTime().toString(), dataKey), + encryptString(fileMetadata.lastModifiedAt.getTime().toString(), dataKey), + generateThumbnail(file).then((blob) => blob?.arrayBuffer()), + ]); - const totalChunks = Math.ceil(file.size / CHUNK_SIZE); - const metadata = { - chunks: totalChunks, - parent: parentId, + const { uploadId } = await trpc().upload.startFileUpload.mutate({ + chunks: Math.ceil(file.size / CHUNK_SIZE), + parent: fileMetadata.parentId, mekVersion: masterKey.version, dek: dataKeyWrapped, dekVersion: dataKeyVersion, hskVersion: hmacSecret.version, - contentType: fileType, + contentType: file.type, name: nameEncrypted.ciphertext, nameIv: nameEncrypted.iv, + createdAt: createdAtEncrypted?.ciphertext, + createdAtIv: createdAtEncrypted?.iv, lastModifiedAt: lastModifiedAtEncrypted.ciphertext, lastModifiedAtIv: lastModifiedAtEncrypted.iv, - }; + }); - const { uploadId } = await trpc().upload.startFileUpload.mutate(metadata); + state.status = "uploading"; - const reader = file.stream().getReader(); - const limit = pLimit(4); - let buffer = new Uint8Array(0); - let chunkIndex = 0; - const uploadPromises: Promise[] = []; - - const totalBytes = file.size; - let uploadedBytes = 0; - const startTime = Date.now(); - - const uploadChunk = async ( - index: number, - encryptedChunk: ArrayBuffer, - chunkHash: string, - originalChunkSize: number, - ) => { - const response = await fetch(`/api/upload/${uploadId}/chunks/${index}`, { - method: "POST", - headers: { - "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkHash}:`, - }, - body: encryptedChunk, - }); - - if (!response.ok) { - throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); - } - - uploadedBytes += originalChunkSize; - const elapsed = (Date.now() - startTime) / 1000; - const rate = uploadedBytes / elapsed; - const remaining = totalBytes - uploadedBytes; - const estimated = rate > 0 ? remaining / rate : undefined; - - state.progress = uploadedBytes / totalBytes; - state.rate = rate; - state.estimated = estimated; - }; - - while (true) { - const { done, value } = await reader.read(); - if (done && buffer.length === 0) break; - - if (value) { - const newBuffer = new Uint8Array(buffer.length + value.length); - newBuffer.set(buffer); - newBuffer.set(value, buffer.length); - buffer = newBuffer; - } - - while (buffer.length >= CHUNK_SIZE || (done && buffer.length > 0)) { - const chunkSize = Math.min(CHUNK_SIZE, buffer.length); - const chunk = buffer.slice(0, chunkSize); - buffer = buffer.slice(chunkSize); - - const encryptedChunk = await encryptChunk(chunk.buffer.slice(0, chunk.byteLength), dataKey); - const chunkHash = encodeToBase64(await digestMessage(encryptedChunk)); - const currentIndex = chunkIndex++; - - uploadPromises.push( - limit(() => uploadChunk(currentIndex, encryptedChunk, chunkHash, chunkSize)), - ); - } - - if (done) break; - } - - await Promise.all(uploadPromises); + await uploadBlob(uploadId, file, dataKey, { + onProgress(s) { + state.progress = s.progress; + state.rate = s.rateBps; + }, + }); const { file: fileId } = await trpc().upload.completeFileUpload.mutate({ uploadId, contentHmac: fileSigned, }); - if (fileType.startsWith("video/")) { - try { - const thumbnail = await generateThumbnailFromFile(file); - if (thumbnail) { - const thumbnailBuffer = await thumbnail.arrayBuffer(); - const thumbnailEncrypted = await encryptData(thumbnailBuffer, dataKey); + if (thumbnailBuffer) { + const { uploadId } = await trpc().upload.startFileThumbnailUpload.mutate({ + file: fileId, + dekVersion: dataKeyVersion, + }); - await uploadThumbnail(fileId, thumbnailEncrypted, dataKeyVersion); - } - } catch (e) { - // Thumbnail upload failure is not critical - console.error(e); - } + await uploadBlob(uploadId, new Blob([thumbnailBuffer]), dataKey); + + await trpc().upload.completeFileThumbnailUpload.mutate({ uploadId }); } state.status = "uploaded"; - return { fileId }; + return { fileId, thumbnailBuffer }; }; export const uploadFile = async ( file: File, parentId: "root" | number, - hmacSecret: HmacSecret, masterKey: MasterKey, + hmacSecret: HmacSecret, onDuplicate: () => Promise, ) => { uploadingFiles.push({ @@ -426,51 +211,37 @@ export const uploadFile = async ( const fileType = getFileType(file); if (fileType.startsWith("image/")) { const fileBuffer = await file.arrayBuffer(); - const { - dataKeyWrapped, - dataKeyVersion, - chunksEncrypted, - nameEncrypted, - createdAtEncrypted, - lastModifiedAtEncrypted, - thumbnail, - } = await encryptImageFile(state, file, masterKey); + const fileCreatedAt = extractExifDateTime(fileBuffer); - const metadata = { - chunks: chunksEncrypted.length, - parent: parentId, - mekVersion: masterKey.version, - dek: dataKeyWrapped, - dekVersion: dataKeyVersion, - hskVersion: hmacSecret.version, - contentType: fileType, - name: nameEncrypted.ciphertext, - nameIv: nameEncrypted.iv, - createdAt: createdAtEncrypted?.ciphertext, - createdAtIv: createdAtEncrypted?.iv, - lastModifiedAt: lastModifiedAtEncrypted.ciphertext, - lastModifiedAtIv: lastModifiedAtEncrypted.iv, - }; - - const { fileId, thumbnailBuffer } = await requestImageFileUpload( + const { fileId, thumbnailBuffer } = await requestFileUpload2( state, - metadata, - chunksEncrypted, + new Blob([fileBuffer], { type: fileType }), fileSigned, - thumbnail ?? null, - dataKeyVersion, - ); - return { fileId, fileBuffer, thumbnailBuffer }; - } else { - const { fileId } = await requestFileUpload( - state, - file, + { + parentId, + name: file.name, + createdAt: fileCreatedAt, + lastModifiedAt: new Date(file.lastModified), + }, masterKey, hmacSecret, - fileSigned, - parentId, ); - return { fileId }; + + return { fileId, fileBuffer, thumbnailBuffer }; + } else { + const { fileId, thumbnailBuffer } = await requestFileUpload2( + state, + file, + fileSigned, + { + parentId, + name: file.name, + lastModifiedAt: new Date(file.lastModified), + }, + masterKey, + hmacSecret, + ); + return { fileId, thumbnailBuffer }; } } catch (e) { state.status = "error"; diff --git a/src/lib/modules/thumbnail.ts b/src/lib/modules/thumbnail.ts index 75b0168..18b0745 100644 --- a/src/lib/modules/thumbnail.ts +++ b/src/lib/modules/thumbnail.ts @@ -52,7 +52,6 @@ const generateImageThumbnail = (imageUrl: string) => { .catch(reject); }; image.onerror = reject; - image.src = imageUrl; }); }; @@ -85,31 +84,27 @@ const generateVideoThumbnail = (videoUrl: string, time = 0) => { }); }; -export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: string) => { +export const generateThumbnail = async (blob: Blob) => { let url; try { - if (fileType.startsWith("image/")) { - const fileBlob = new Blob([fileBuffer], { type: fileType }); - url = URL.createObjectURL(fileBlob); - + if (blob.type.startsWith("image/")) { + url = URL.createObjectURL(blob); try { return await generateImageThumbnail(url); } catch { URL.revokeObjectURL(url); url = undefined; - if (fileType === "image/heic") { + if (blob.type === "image/heic") { const { default: heic2any } = await import("heic2any"); - url = URL.createObjectURL( - (await heic2any({ blob: fileBlob, toType: "image/png" })) as Blob, - ); + url = URL.createObjectURL((await heic2any({ blob, toType: "image/png" })) as Blob); return await generateImageThumbnail(url); } else { return null; } } - } else if (fileType.startsWith("video/")) { - url = URL.createObjectURL(new Blob([fileBuffer], { type: fileType })); + } else if (blob.type.startsWith("video/")) { + url = URL.createObjectURL(blob); return await generateVideoThumbnail(url); } return null; @@ -122,22 +117,6 @@ export const generateThumbnail = async (fileBuffer: ArrayBuffer, fileType: strin } }; -export const generateThumbnailFromFile = async (file: File) => { - if (!file.type.startsWith("video/")) return null; - - let url; - try { - url = URL.createObjectURL(file); - return await generateVideoThumbnail(url); - } catch { - return null; - } finally { - if (url) { - URL.revokeObjectURL(url); - } - } -}; - export const getThumbnailUrl = (thumbnailBuffer: ArrayBuffer) => { return `data:image/webp;base64,${encodeToBase64(thumbnailBuffer)}`; }; diff --git a/src/lib/modules/upload.ts b/src/lib/modules/upload.ts new file mode 100644 index 0000000..231b54b --- /dev/null +++ b/src/lib/modules/upload.ts @@ -0,0 +1,103 @@ +import axios from "axios"; +import pLimit from "p-limit"; +import { ENCRYPTION_OVERHEAD, CHUNK_SIZE } from "$lib/constants"; +import { encryptChunk, digestMessage, encodeToBase64 } from "$lib/modules/crypto"; + +type UploadStats = { + progress: number; // 0..1 (암호화 후 기준) + rateBps: number; // bytes/sec + uploadedBytes: number; + totalBytes: number; +}; + +function createSpeedMeter(windowMs = 1500) { + const samples: Array<{ t: number; b: number }> = []; + return (bytesNow: number) => { + const now = performance.now(); + samples.push({ t: now, b: bytesNow }); + const cutoff = now - windowMs; + while (samples.length > 2 && samples[0]!.t < cutoff) samples.shift(); + + const first = samples[0]!; + const dt = now - first.t; + const db = bytesNow - first.b; + return dt > 0 ? (db / dt) * 1000 : 0; + }; +} + +const uploadChunk = async ( + uploadId: string, + chunkIndex: number, + chunk: Blob, + dataKey: CryptoKey, + onChunkProgress: (chunkIndex: number, loaded: number) => void, +) => { + const chunkEncrypted = await encryptChunk(await chunk.arrayBuffer(), dataKey); + const chunkEncryptedHash = encodeToBase64(await digestMessage(chunkEncrypted)); + + await axios.post(`/api/upload/${uploadId}/chunks/${chunkIndex}`, chunkEncrypted, { + headers: { + "Content-Type": "application/octet-stream", + "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, + }, + onUploadProgress(e) { + onChunkProgress(chunkIndex, e.loaded ?? 0); + }, + }); + + onChunkProgress(chunkIndex, chunkEncrypted.byteLength); +}; + +export const uploadBlob = async ( + uploadId: string, + blob: Blob, + dataKey: CryptoKey, + options?: { concurrency?: number; onProgress?: (s: UploadStats) => void }, +) => { + const onProgress = options?.onProgress; + + const totalChunks = Math.ceil(blob.size / CHUNK_SIZE); + const totalBytes = blob.size + totalChunks * ENCRYPTION_OVERHEAD; + + const uploadedByChunk = new Array(totalChunks).fill(0); + const speedMeter = createSpeedMeter(1500); + + const emit = () => { + if (!onProgress) return; + + const uploadedBytes = uploadedByChunk.reduce((a, b) => a + b, 0); + const rateBps = speedMeter(uploadedBytes); + const progress = Math.min(1, uploadedBytes / totalBytes); + + onProgress({ progress, rateBps, uploadedBytes, totalBytes }); + }; + + const onChunkProgress = (idx: number, loaded: number) => { + uploadedByChunk[idx] = loaded; + emit(); + }; + + const limit = pLimit(options?.concurrency ?? 4); + + await Promise.all( + Array.from({ length: totalChunks }, (_, chunkIndex) => + limit(() => + uploadChunk( + uploadId, + chunkIndex, + blob.slice(chunkIndex * CHUNK_SIZE, (chunkIndex + 1) * CHUNK_SIZE), + dataKey, + onChunkProgress, + ), + ), + ), + ); + + // 완료 보정 + onProgress?.({ + progress: 1, + rateBps: 0, + uploadedBytes: totalBytes, + totalBytes, + }); +}; diff --git a/src/routes/(fullscreen)/settings/migration/service.svelte.ts b/src/routes/(fullscreen)/settings/migration/service.svelte.ts index d59f46e..9d08db2 100644 --- a/src/routes/(fullscreen)/settings/migration/service.svelte.ts +++ b/src/routes/(fullscreen)/settings/migration/service.svelte.ts @@ -1,10 +1,9 @@ import { limitFunction } from "p-limit"; import { SvelteMap } from "svelte/reactivity"; import { CHUNK_SIZE } from "$lib/constants"; -import { encodeToBase64, encryptChunk, digestMessage } from "$lib/modules/crypto"; -import { deleteFileCache } from "$lib/modules/file"; import type { FileInfo } from "$lib/modules/filesystem"; import { Scheduler } from "$lib/modules/scheduler"; +import { uploadBlob } from "$lib/modules/upload"; import { requestFileDownload } from "$lib/services/file"; import { trpc } from "$trpc/client"; @@ -45,81 +44,28 @@ export const clearMigrationStates = () => { } }; -const encryptChunks = async (fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { - const chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[] = []; - let offset = 0; - - while (offset < fileBuffer.byteLength) { - const nextOffset = Math.min(offset + CHUNK_SIZE, fileBuffer.byteLength); - const chunkEncrypted = await encryptChunk(fileBuffer.slice(offset, nextOffset), dataKey); - chunksEncrypted.push({ - chunkEncrypted: chunkEncrypted, - chunkEncryptedHash: encodeToBase64(await digestMessage(chunkEncrypted)), - }); - offset = nextOffset; - } - - return chunksEncrypted; -}; - const uploadMigrationChunks = limitFunction( - async ( - state: MigrationState, - fileId: number, - chunksEncrypted: { chunkEncrypted: ArrayBuffer; chunkEncryptedHash: string }[], - ) => { + async (state: MigrationState, fileId: number, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { state.status = "uploading"; const { uploadId } = await trpc().upload.startMigrationUpload.mutate({ file: fileId, - chunks: chunksEncrypted.length, + chunks: Math.ceil(fileBuffer.byteLength / CHUNK_SIZE), }); - const totalBytes = chunksEncrypted.reduce((sum, c) => sum + c.chunkEncrypted.byteLength, 0); - let uploadedBytes = 0; - const startTime = Date.now(); - - for (let i = 0; i < chunksEncrypted.length; i++) { - const { chunkEncrypted, chunkEncryptedHash } = chunksEncrypted[i]!; - - const response = await fetch(`/api/upload/${uploadId}/chunks/${i}`, { - method: "POST", - headers: { - "Content-Type": "application/octet-stream", - "Content-Digest": `sha-256=:${chunkEncryptedHash}:`, - }, - body: chunkEncrypted, - }); - - if (!response.ok) { - throw new Error(`Chunk upload failed: ${response.status} ${response.statusText}`); - } - - uploadedBytes += chunkEncrypted.byteLength; - - const elapsed = (Date.now() - startTime) / 1000; - const rate = uploadedBytes / elapsed; - - state.progress = uploadedBytes / totalBytes; - state.rate = rate; - } + await uploadBlob(uploadId, new Blob([fileBuffer]), dataKey, { + onProgress(s) { + state.progress = s.progress; + state.rate = s.rateBps; + }, + }); await trpc().upload.completeMigrationUpload.mutate({ uploadId }); }, { concurrency: 1 }, ); -const encryptFile = limitFunction( - async (state: MigrationState, fileBuffer: ArrayBuffer, dataKey: CryptoKey) => { - state.status = "encrypting"; - const chunksEncrypted = await encryptChunks(fileBuffer, dataKey); - state.status = "upload-pending"; - return chunksEncrypted; - }, - { concurrency: 4 }, -); - -export const requestFileMigration = async (fileInfo: FileInfo & { exists: true }) => { +export const requestFileMigration = async (fileInfo: FileInfo) => { let state = states.get(fileInfo.id); if (state) { if (state.status !== "error") return; @@ -148,12 +94,8 @@ export const requestFileMigration = async (fileInfo: FileInfo & { exists: true } }, async () => { state.status = "encryption-pending"; - const chunksEncrypted = await encryptFile(state, fileBuffer!, dataKey); - await uploadMigrationChunks(state, fileInfo.id, chunksEncrypted); - - // Clear file cache since the file format has changed - await deleteFileCache(fileInfo.id); + await uploadMigrationChunks(state, fileInfo.id, fileBuffer!, dataKey); state.status = "completed"; }, diff --git a/src/routes/(main)/directory/[[id]]/+page.svelte b/src/routes/(main)/directory/[[id]]/+page.svelte index a0a4d53..f500f34 100644 --- a/src/routes/(main)/directory/[[id]]/+page.svelte +++ b/src/routes/(main)/directory/[[id]]/+page.svelte @@ -51,7 +51,7 @@ if (!files || files.length === 0) return; for (const file of files) { - requestFileUpload(file, data.id, $hmacSecretStore?.get(1)!, $masterKeyStore?.get(1)!, () => { + requestFileUpload(file, data.id, $masterKeyStore?.get(1)!, $hmacSecretStore?.get(1)!, () => { return new Promise((resolve) => { duplicatedFile = file; resolveForDuplicateFileModal = resolve; diff --git a/src/routes/(main)/directory/[[id]]/service.svelte.ts b/src/routes/(main)/directory/[[id]]/service.svelte.ts index ccd5b14..be6392c 100644 --- a/src/routes/(main)/directory/[[id]]/service.svelte.ts +++ b/src/routes/(main)/directory/[[id]]/service.svelte.ts @@ -81,11 +81,11 @@ export const requestDirectoryCreation = async ( export const requestFileUpload = async ( file: File, parentId: "root" | number, - hmacSecret: HmacSecret, masterKey: MasterKey, + hmacSecret: HmacSecret, onDuplicate: () => Promise, ) => { - const res = await uploadFile(file, parentId, hmacSecret, masterKey, onDuplicate); + const res = await uploadFile(file, parentId, masterKey, hmacSecret, onDuplicate); if (!res) return false; if (res.fileBuffer) {