From 7a15fbb4651c2fee32fb4c1ee2c9d7f12323feb0 Mon Sep 17 00:00:00 2001 From: Elias Schneider Date: Thu, 4 Apr 2024 20:55:45 +0200 Subject: [PATCH] fix: memory leak while uploading files by disabling base64 encoding of chunks --- backend/src/file/file.controller.ts | 5 +- .../src/components/upload/EditableUpload.tsx | 53 ++++++++----------- frontend/src/pages/upload/index.tsx | 49 ++++++++--------- frontend/src/services/share.service.ts | 6 +-- 4 files changed, 46 insertions(+), 67 deletions(-) diff --git a/backend/src/file/file.controller.ts b/backend/src/file/file.controller.ts index 5ca7651f..c11b70c2 100644 --- a/backend/src/file/file.controller.ts +++ b/backend/src/file/file.controller.ts @@ -27,17 +27,14 @@ export class FileController { @UseGuards(CreateShareGuard, ShareOwnerGuard) async create( @Query() query: any, - @Body() body: string, @Param("shareId") shareId: string, ) { const { id, name, chunkIndex, totalChunks } = query; // Data can be empty if the file is empty - const data = body.toString().split(",")[1] ?? ""; - return await this.fileService.create( - data, + body, { index: parseInt(chunkIndex), total: parseInt(totalChunks) }, { id, name }, shareId, diff --git a/frontend/src/components/upload/EditableUpload.tsx b/frontend/src/components/upload/EditableUpload.tsx index fd1c65eb..7846b661 100644 --- a/frontend/src/components/upload/EditableUpload.tsx +++ b/frontend/src/components/upload/EditableUpload.tsx @@ -1,22 +1,19 @@ import { Button, Group } from "@mantine/core"; -import { useModals } from "@mantine/modals"; import { cleanNotifications } from "@mantine/notifications"; import { AxiosError } from "axios"; +import { useRouter } from "next/router"; import pLimit from "p-limit"; -import { useEffect, useMemo, useState } from "react"; +import { useEffect, useMemo, useRef, useState } from "react"; import { FormattedMessage } from "react-intl"; import Dropzone from "../../components/upload/Dropzone"; import FileList from "../../components/upload/FileList"; -import showCompletedUploadModal from "../../components/upload/modals/showCompletedUploadModal"; import useConfig from "../../hooks/config.hook"; import useTranslate from "../../hooks/useTranslate.hook"; import shareService from "../../services/share.service"; import { FileListItem, FileMetaData, FileUpload } from "../../types/File.type"; import toast from "../../utils/toast.util"; -import { useRouter } from "next/router"; const promiseLimit = pLimit(3); -const chunkSize = 10 * 1024 * 1024; // 10MB let errorToastShown = false; const EditableUpload = ({ @@ -33,6 +30,8 @@ const EditableUpload = ({ const router = useRouter(); const config = useConfig(); + const chunkSize = useRef(parseInt(config.get("share.chunkSize"))); + const [existingFiles, setExistingFiles] = useState>(savedFiles); const [uploadingFiles, setUploadingFiles] = useState([]); @@ -66,7 +65,7 @@ const EditableUpload = ({ const fileUploadPromises = files.map(async (file, fileIndex) => // Limit the number of concurrent uploads to 3 promiseLimit(async () => { - let fileId: string; + let fileId: string | undefined; const setFileProgress = (progress: number) => { setUploadingFiles((files) => @@ -81,38 +80,30 @@ const EditableUpload = ({ setFileProgress(1); - let chunks = Math.ceil(file.size / chunkSize); + let chunks = Math.ceil(file.size / chunkSize.current); // If the file is 0 bytes, we still need to upload 1 chunk if (chunks == 0) chunks++; for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) { - const from = chunkIndex * chunkSize; - const to = from + chunkSize; + const from = chunkIndex * chunkSize.current; + const to = from + chunkSize.current; const blob = file.slice(from, to); try { - await new Promise((resolve, reject) => { - const reader = new FileReader(); - reader.onload = async (event) => - await shareService - .uploadFile( - shareId, - event, - { - id: fileId, - name: file.name, - }, - chunkIndex, - chunks, - ) - .then((response) => { - fileId = response.id; - resolve(response); - }) - .catch(reject); - - reader.readAsDataURL(blob); - }); + await shareService + .uploadFile( + shareId, + blob, + { + id: fileId, + name: file.name, + }, + chunkIndex, + chunks, + ) + .then((response) => { + fileId = response.id; + }); setFileProgress(((chunkIndex + 1) / chunks) * 100); } catch (e) { diff --git a/frontend/src/pages/upload/index.tsx b/frontend/src/pages/upload/index.tsx index 4ab894d1..9c56fd4f 100644 --- a/frontend/src/pages/upload/index.tsx +++ b/frontend/src/pages/upload/index.tsx @@ -3,7 +3,7 @@ import { useModals } from "@mantine/modals"; import { cleanNotifications } from "@mantine/notifications"; import { AxiosError } from "axios"; import pLimit from "p-limit"; -import { useEffect, useState } from "react"; +import { useEffect, useRef, useState } from "react"; import { FormattedMessage } from "react-intl"; import Meta from "../../components/Meta"; import Dropzone from "../../components/upload/Dropzone"; @@ -19,7 +19,6 @@ import { CreateShare, Share } from "../../types/share.type"; import toast from "../../utils/toast.util"; const promiseLimit = pLimit(3); -const chunkSize = 10 * 1024 * 1024; // 10MB let errorToastShown = false; let createdShare: Share; @@ -38,6 +37,8 @@ const Upload = ({ const [files, setFiles] = useState([]); const [isUploading, setisUploading] = useState(false); + const chunkSize = useRef(parseInt(config.get("share.chunkSize"))); + maxShareSize ??= parseInt(config.get("share.maxSize")); const uploadFiles = async (share: CreateShare, files: FileUpload[]) => { @@ -54,7 +55,7 @@ const Upload = ({ const fileUploadPromises = files.map(async (file, fileIndex) => // Limit the number of concurrent uploads to 3 promiseLimit(async () => { - let fileId: string; + let fileId; const setFileProgress = (progress: number) => { setFiles((files) => @@ -69,38 +70,30 @@ const Upload = ({ setFileProgress(1); - let chunks = Math.ceil(file.size / chunkSize); + let chunks = Math.ceil(file.size / chunkSize.current); // If the file is 0 bytes, we still need to upload 1 chunk if (chunks == 0) chunks++; for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) { - const from = chunkIndex * chunkSize; - const to = from + chunkSize; + const from = chunkIndex * chunkSize.current; + const to = from + chunkSize.current; const blob = file.slice(from, to); try { - await new Promise((resolve, reject) => { - const reader = new FileReader(); - reader.onload = async (event) => - await shareService - .uploadFile( - createdShare.id, - event, - { - id: fileId, - name: file.name, - }, - chunkIndex, - chunks, - ) - .then((response) => { - fileId = response.id; - resolve(response); - }) - .catch(reject); - - reader.readAsDataURL(blob); - }); + await shareService + .uploadFile( + createdShare.id, + blob, + { + id: fileId, + name: file.name, + }, + chunkIndex, + chunks, + ) + .then((response) => { + fileId = response.id; + }); setFileProgress(((chunkIndex + 1) / chunks) * 100); } catch (e) { diff --git a/frontend/src/services/share.service.ts b/frontend/src/services/share.service.ts index e7ea73a5..8732dca9 100644 --- a/frontend/src/services/share.service.ts +++ b/frontend/src/services/share.service.ts @@ -77,7 +77,7 @@ const removeFile = async (shareId: string, fileId: string) => { const uploadFile = async ( shareId: string, - readerEvent: ProgressEvent, + chunk: Blob, file: { id?: string; name: string; @@ -85,10 +85,8 @@ const uploadFile = async ( chunkIndex: number, totalChunks: number, ): Promise => { - const data = readerEvent.target!.result; - return ( - await api.post(`shares/${shareId}/files`, data, { + await api.post(`shares/${shareId}/files`, chunk, { headers: { "Content-Type": "application/octet-stream" }, params: { id: file.id,