From 507948e0f82d14cfcb8b32cf6cdc988b43450c4a Mon Sep 17 00:00:00 2001 From: fenos Date: Mon, 18 Dec 2023 10:33:46 +0000 Subject: [PATCH] address review comments --- packages/server/README.md | 3 +- packages/server/src/handlers/BaseHandler.ts | 63 ++++------ .../server/src/handlers/OptionsHandler.ts | 2 +- packages/server/src/types.ts | 2 +- test/e2e.test.ts | 115 +++++++++++++++++- 5 files changed, 144 insertions(+), 41 deletions(-) diff --git a/packages/server/README.md b/packages/server/README.md index 7d163ab4..50aa39b5 100644 --- a/packages/server/README.md +++ b/packages/server/README.md @@ -62,7 +62,8 @@ The route to accept requests (`string`). #### `options.maxSize` -Max file size (in bytes) allowed when uploading (`number`) +Max file size (in bytes) allowed when uploading (`number` | (`(req, id: string | null) => Promise | number`)). +When providing a function during the OPTIONS request the id will be `null`. #### `options.relativeLocation` diff --git a/packages/server/src/handlers/BaseHandler.ts b/packages/server/src/handlers/BaseHandler.ts index e60b30d6..d27d5968 100644 --- a/packages/server/src/handlers/BaseHandler.ts +++ b/packages/server/src/handlers/BaseHandler.ts @@ -139,21 +139,20 @@ export class BaseHandler extends EventEmitter { context: CancellationContext ) { return new Promise(async (resolve, reject) => { + // Abort early if the operation has been cancelled. if (context.signal.aborted) { reject(ERRORS.ABORTED) return } + // Create a PassThrough stream as a proxy to manage the request stream. + // This allows for aborting the write process without affecting the incoming request stream. const proxy = new PassThrough() addAbortSignal(context.signal, proxy) proxy.on('error', (err) => { req.unpipe(proxy) - if (err.name === 'AbortError') { - reject(ERRORS.ABORTED) - } else { - reject(err) - } + reject(err.name === 'AbortError' ? ERRORS.ABORTED : err) }) req.on('error', (err) => { @@ -162,6 +161,9 @@ export class BaseHandler extends EventEmitter { } }) + // Pipe the request stream through the proxy. We use the proxy instead of the request stream directly + // to ensure that errors in the pipeline do not cause the request stream to be destroyed, + // which would result in a socket hangup error for the client. stream .pipeline(req.pipe(proxy), new StreamLimiter(maxFileSize), async (stream) => { return this.store.write(stream as StreamLimiter, id, offset) @@ -171,7 +173,7 @@ export class BaseHandler extends EventEmitter { }) } - getConfiguredMaxSize(req: http.IncomingMessage, id: string) { + getConfiguredMaxSize(req: http.IncomingMessage, id: string | null) { if (typeof this.options.maxSize === 'function') { return this.options.maxSize(req, id) } @@ -195,48 +197,35 @@ export class BaseHandler extends EventEmitter { const length = parseInt(req.headers['content-length'] || '0', 10) const offset = file.offset - const hasContentLengthSet = length > 0 + const hasContentLengthSet = req.headers['content-length'] !== undefined const hasConfiguredMaxSizeSet = configuredMaxSize > 0 - // Check if the upload fits into the file's size when the size is not deferred. - if (!file.sizeIsDeferred && offset + length > (file.size || 0)) { - throw ERRORS.ERR_SIZE_EXCEEDED - } - - // For deferred size uploads, if it's not a chunked transfer, check against the configured maximum size. - if ( - file.sizeIsDeferred && - hasContentLengthSet && - hasConfiguredMaxSizeSet && - offset + length > configuredMaxSize - ) { - throw ERRORS.ERR_SIZE_EXCEEDED - } - - // Calculate the maximum allowable size based on the file's total size and current offset. - let maxSize = (file.size || 0) - offset - - // Handle deferred size uploads where no Content-Length is provided (possible with chunked transfers). if (file.sizeIsDeferred) { + // For deferred size uploads, if it's not a chunked transfer, check against the configured maximum size. + if ( + hasContentLengthSet && + hasConfiguredMaxSizeSet && + offset + length > configuredMaxSize + ) { + throw ERRORS.ERR_SIZE_EXCEEDED + } + if (hasConfiguredMaxSizeSet) { - // Limit the upload to not exceed the maximum configured upload size. - maxSize = configuredMaxSize - offset + return configuredMaxSize - offset } else { - // Allow arbitrary sizes if no upload limit is configured. - maxSize = Number.MAX_SAFE_INTEGER + return Number.MAX_SAFE_INTEGER } } - // Override maxSize with the Content-Length if it's provided. - if (hasContentLengthSet) { - maxSize = length + // Check if the upload fits into the file's size when the size is not deferred. + if (offset + length > (file.size || 0)) { + throw ERRORS.ERR_SIZE_EXCEEDED } - // Enforce the server-configured maximum size limit, if applicable. - if (hasConfiguredMaxSizeSet && maxSize > configuredMaxSize) { - maxSize = configuredMaxSize + if (hasConfiguredMaxSizeSet) { + return Math.min((file.size || 0) - offset, configuredMaxSize - offset) } - return maxSize + return (file.size || 0) - offset } } diff --git a/packages/server/src/handlers/OptionsHandler.ts b/packages/server/src/handlers/OptionsHandler.ts index 6efd27e8..944318cd 100644 --- a/packages/server/src/handlers/OptionsHandler.ts +++ b/packages/server/src/handlers/OptionsHandler.ts @@ -7,7 +7,7 @@ import type http from 'node:http' // the Tus-Version header. It MAY include the Tus-Extension and Tus-Max-Size headers. export class OptionsHandler extends BaseHandler { async send(req: http.IncomingMessage, res: http.ServerResponse) { - const maxSize = await this.getConfiguredMaxSize(req, '') + const maxSize = await this.getConfiguredMaxSize(req, null) if (maxSize) { res.setHeader('Tus-Max-Size', maxSize) diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts index 6420c378..65420349 100644 --- a/packages/server/src/types.ts +++ b/packages/server/src/types.ts @@ -16,7 +16,7 @@ export type ServerOptions = { */ maxSize?: | number - | ((req: http.IncomingMessage, uploadId: string) => Promise | number) + | ((req: http.IncomingMessage, uploadId: string | null) => Promise | number) /** * Return a relative URL as the `Location` header. diff --git a/test/e2e.test.ts b/test/e2e.test.ts index 4e22beec..5c1527a3 100644 --- a/test/e2e.test.ts +++ b/test/e2e.test.ts @@ -451,6 +451,119 @@ describe('EndToEnd', () => { after(() => { listener.close() }) + + it('should not allow uploading with fixed length more than the defined MaxFileSize', async () => { + const body = Buffer.alloc(1024 * 1024 * 2) + const chunkSize = (1024 * 1024 * 2) / 4 + // purposely set this to 1MB even if we will try uploading 2MB via transfer-encoding: chunked + const uploadLength = 1024 * 1024 + + const res = await agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Length', uploadLength.toString()) + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(201) + + assert.equal('location' in res.headers, true) + assert.equal(res.headers['tus-resumable'], TUS_RESUMABLE) + + const uploadId = res.headers.location.split('/').pop() + + const uploadChunk = async (body: Buffer, offset = 0) => { + const res = await agent + .patch(`${STORE_PATH}/${uploadId}`) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Offset', offset.toString()) + .set('Content-Type', 'application/offset+octet-stream') + .send(body) + .expect(204) + .expect('Tus-Resumable', TUS_RESUMABLE) + + return parseInt(res.headers['upload-offset'] || '0', 0) + } + + let offset = 0 + offset = await uploadChunk(body.subarray(offset, chunkSize)) // 500Kb + offset = await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1MB + + try { + // this request should fail since it exceeds the 1MB mark + await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1.5MB + throw new Error('failed test') + } catch (e) { + assert.equal(e instanceof Error, true) + assert.equal( + e.message.includes('got 413 "Payload Too Large"'), + true, + `wrong message received "${e.message}"` + ) + } + }) + + it('should not allow uploading with fixed length more than the defined MaxFileSize using chunked encoding', async () => { + const body = Buffer.alloc(1024 * 1024 * 2) + const chunkSize = (1024 * 1024 * 2) / 4 + // purposely set this to 1MB even if we will try uploading 2MB via transfer-encoding: chunked + const uploadLength = 1024 * 1024 + + const res = await agent + .post(STORE_PATH) + .set('Tus-Resumable', TUS_RESUMABLE) + .set('Upload-Length', uploadLength.toString()) + .set('Upload-Metadata', TEST_METADATA) + .set('Tus-Resumable', TUS_RESUMABLE) + .expect(201) + + assert.equal('location' in res.headers, true) + assert.equal(res.headers['tus-resumable'], TUS_RESUMABLE) + + const uploadId = res.headers.location.split('/').pop() + const address = listener.address() as AddressInfo + // Options for the HTTP request. + // transfer-encoding doesn't seem to be supported by superagent + const options = { + hostname: 'localhost', + port: address.port, + path: `${STORE_PATH}/${uploadId}`, + method: 'PATCH', + headers: { + 'Tus-Resumable': TUS_RESUMABLE, + 'Upload-Offset': '0', + 'Content-Type': 'application/offset+octet-stream', + 'Transfer-Encoding': 'chunked', + }, + } + + const {res: patchResp, body: resBody} = await new Promise<{ + res: http.IncomingMessage + body: string + }>((resolve, reject) => { + const req = http.request(options, (res) => { + let body = '' + res.on('data', (chunk) => { + body += chunk.toString() + }) + res.on('end', () => { + resolve({res, body}) + }) + }) + + req.on('error', (e) => { + reject(e) + }) + + req.write(body.subarray(0, chunkSize)) + req.write(body.subarray(chunkSize, chunkSize * 2)) + req.write(body.subarray(chunkSize * 2, chunkSize * 3)) + req.end() + }) + + assert.equal(patchResp.statusCode, 413) + assert.equal(resBody, 'Maximum size exceeded\n') + }) + it('should not allow uploading with deferred length more than the defined MaxFileSize', async () => { const res = await agent .post(STORE_PATH) @@ -489,9 +602,9 @@ describe('EndToEnd', () => { let offset = 0 offset = await uploadChunk(body.subarray(offset, chunkSize)) // 500Kb offset = await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1MB - // this request should fail since it exceeds the 1MB mark try { + // this request should fail since it exceeds the 1MB mark await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1.5MB throw new Error('failed test') } catch (e) {