Skip to content

Commit

Permalink
fix: memory leak while uploading files by disabling base64 encoding o…
Browse files Browse the repository at this point in the history
…f chunks
  • Loading branch information
stonith404 committed Apr 4, 2024
1 parent 0bfbaea commit 7a15fbb
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 67 deletions.
5 changes: 1 addition & 4 deletions backend/src/file/file.controller.ts
Expand Up @@ -27,17 +27,14 @@ export class FileController {
@UseGuards(CreateShareGuard, ShareOwnerGuard)
async create(
@Query() query: any,

@Body() body: string,
@Param("shareId") shareId: string,
) {
const { id, name, chunkIndex, totalChunks } = query;

// Data can be empty if the file is empty
const data = body.toString().split(",")[1] ?? "";

return await this.fileService.create(
data,
body,
{ index: parseInt(chunkIndex), total: parseInt(totalChunks) },
{ id, name },
shareId,
Expand Down
53 changes: 22 additions & 31 deletions frontend/src/components/upload/EditableUpload.tsx
@@ -1,22 +1,19 @@
import { Button, Group } from "@mantine/core";
import { useModals } from "@mantine/modals";
import { cleanNotifications } from "@mantine/notifications";
import { AxiosError } from "axios";
import { useRouter } from "next/router";
import pLimit from "p-limit";
import { useEffect, useMemo, useState } from "react";
import { useEffect, useMemo, useRef, useState } from "react";
import { FormattedMessage } from "react-intl";
import Dropzone from "../../components/upload/Dropzone";
import FileList from "../../components/upload/FileList";
import showCompletedUploadModal from "../../components/upload/modals/showCompletedUploadModal";
import useConfig from "../../hooks/config.hook";
import useTranslate from "../../hooks/useTranslate.hook";
import shareService from "../../services/share.service";
import { FileListItem, FileMetaData, FileUpload } from "../../types/File.type";
import toast from "../../utils/toast.util";
import { useRouter } from "next/router";

const promiseLimit = pLimit(3);
const chunkSize = 10 * 1024 * 1024; // 10MB
let errorToastShown = false;

const EditableUpload = ({
Expand All @@ -33,6 +30,8 @@ const EditableUpload = ({
const router = useRouter();
const config = useConfig();

const chunkSize = useRef(parseInt(config.get("share.chunkSize")));

const [existingFiles, setExistingFiles] =
useState<Array<FileMetaData & { deleted?: boolean }>>(savedFiles);
const [uploadingFiles, setUploadingFiles] = useState<FileUpload[]>([]);
Expand Down Expand Up @@ -66,7 +65,7 @@ const EditableUpload = ({
const fileUploadPromises = files.map(async (file, fileIndex) =>
// Limit the number of concurrent uploads to 3
promiseLimit(async () => {
let fileId: string;
let fileId: string | undefined;

const setFileProgress = (progress: number) => {
setUploadingFiles((files) =>
Expand All @@ -81,38 +80,30 @@ const EditableUpload = ({

setFileProgress(1);

let chunks = Math.ceil(file.size / chunkSize);
let chunks = Math.ceil(file.size / chunkSize.current);

// If the file is 0 bytes, we still need to upload 1 chunk
if (chunks == 0) chunks++;

for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) {
const from = chunkIndex * chunkSize;
const to = from + chunkSize;
const from = chunkIndex * chunkSize.current;
const to = from + chunkSize.current;
const blob = file.slice(from, to);
try {
await new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = async (event) =>
await shareService
.uploadFile(
shareId,
event,
{
id: fileId,
name: file.name,
},
chunkIndex,
chunks,
)
.then((response) => {
fileId = response.id;
resolve(response);
})
.catch(reject);

reader.readAsDataURL(blob);
});
await shareService
.uploadFile(
shareId,
blob,
{
id: fileId,
name: file.name,
},
chunkIndex,
chunks,
)
.then((response) => {
fileId = response.id;
});

setFileProgress(((chunkIndex + 1) / chunks) * 100);
} catch (e) {
Expand Down
49 changes: 21 additions & 28 deletions frontend/src/pages/upload/index.tsx
Expand Up @@ -3,7 +3,7 @@ import { useModals } from "@mantine/modals";
import { cleanNotifications } from "@mantine/notifications";
import { AxiosError } from "axios";
import pLimit from "p-limit";
import { useEffect, useState } from "react";
import { useEffect, useRef, useState } from "react";
import { FormattedMessage } from "react-intl";
import Meta from "../../components/Meta";
import Dropzone from "../../components/upload/Dropzone";
Expand All @@ -19,7 +19,6 @@ import { CreateShare, Share } from "../../types/share.type";
import toast from "../../utils/toast.util";

const promiseLimit = pLimit(3);
const chunkSize = 10 * 1024 * 1024; // 10MB
let errorToastShown = false;
let createdShare: Share;

Expand All @@ -38,6 +37,8 @@ const Upload = ({
const [files, setFiles] = useState<FileUpload[]>([]);
const [isUploading, setisUploading] = useState(false);

const chunkSize = useRef(parseInt(config.get("share.chunkSize")));

maxShareSize ??= parseInt(config.get("share.maxSize"));

const uploadFiles = async (share: CreateShare, files: FileUpload[]) => {
Expand All @@ -54,7 +55,7 @@ const Upload = ({
const fileUploadPromises = files.map(async (file, fileIndex) =>
// Limit the number of concurrent uploads to 3
promiseLimit(async () => {
let fileId: string;
let fileId;

const setFileProgress = (progress: number) => {
setFiles((files) =>
Expand All @@ -69,38 +70,30 @@ const Upload = ({

setFileProgress(1);

let chunks = Math.ceil(file.size / chunkSize);
let chunks = Math.ceil(file.size / chunkSize.current);

// If the file is 0 bytes, we still need to upload 1 chunk
if (chunks == 0) chunks++;

for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) {
const from = chunkIndex * chunkSize;
const to = from + chunkSize;
const from = chunkIndex * chunkSize.current;
const to = from + chunkSize.current;
const blob = file.slice(from, to);
try {
await new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = async (event) =>
await shareService
.uploadFile(
createdShare.id,
event,
{
id: fileId,
name: file.name,
},
chunkIndex,
chunks,
)
.then((response) => {
fileId = response.id;
resolve(response);
})
.catch(reject);

reader.readAsDataURL(blob);
});
await shareService
.uploadFile(
createdShare.id,
blob,
{
id: fileId,
name: file.name,
},
chunkIndex,
chunks,
)
.then((response) => {
fileId = response.id;
});

setFileProgress(((chunkIndex + 1) / chunks) * 100);
} catch (e) {
Expand Down
6 changes: 2 additions & 4 deletions frontend/src/services/share.service.ts
Expand Up @@ -77,18 +77,16 @@ const removeFile = async (shareId: string, fileId: string) => {

const uploadFile = async (
shareId: string,
readerEvent: ProgressEvent<FileReader>,
chunk: Blob,
file: {
id?: string;
name: string;
},
chunkIndex: number,
totalChunks: number,
): Promise<FileUploadResponse> => {
const data = readerEvent.target!.result;

return (
await api.post(`shares/${shareId}/files`, data, {
await api.post(`shares/${shareId}/files`, chunk, {
headers: { "Content-Type": "application/octet-stream" },
params: {
id: file.id,
Expand Down

0 comments on commit 7a15fbb

Please sign in to comment.