Skip to content

Commit

Permalink
address review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
fenos committed Dec 19, 2023
1 parent 0b9f118 commit 507948e
Show file tree
Hide file tree
Showing 5 changed files with 144 additions and 41 deletions.
3 changes: 2 additions & 1 deletion packages/server/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,8 @@ The route to accept requests (`string`).

#### `options.maxSize`

Max file size (in bytes) allowed when uploading (`number`)
Max file size (in bytes) allowed when uploading (`number` | (`(req, id: string | null) => Promise<number> | number`)).
When providing a function during the OPTIONS request the id will be `null`.

#### `options.relativeLocation`

Expand Down
63 changes: 26 additions & 37 deletions packages/server/src/handlers/BaseHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -139,21 +139,20 @@ export class BaseHandler extends EventEmitter {
context: CancellationContext
) {
return new Promise<number>(async (resolve, reject) => {
// Abort early if the operation has been cancelled.
if (context.signal.aborted) {
reject(ERRORS.ABORTED)
return
}

// Create a PassThrough stream as a proxy to manage the request stream.
// This allows for aborting the write process without affecting the incoming request stream.
const proxy = new PassThrough()
addAbortSignal(context.signal, proxy)

proxy.on('error', (err) => {
req.unpipe(proxy)
if (err.name === 'AbortError') {
reject(ERRORS.ABORTED)
} else {
reject(err)
}
reject(err.name === 'AbortError' ? ERRORS.ABORTED : err)
})

req.on('error', (err) => {
Expand All @@ -162,6 +161,9 @@ export class BaseHandler extends EventEmitter {
}
})

// Pipe the request stream through the proxy. We use the proxy instead of the request stream directly
// to ensure that errors in the pipeline do not cause the request stream to be destroyed,
// which would result in a socket hangup error for the client.
stream
.pipeline(req.pipe(proxy), new StreamLimiter(maxFileSize), async (stream) => {
return this.store.write(stream as StreamLimiter, id, offset)
Expand All @@ -171,7 +173,7 @@ export class BaseHandler extends EventEmitter {
})
}

getConfiguredMaxSize(req: http.IncomingMessage, id: string) {
getConfiguredMaxSize(req: http.IncomingMessage, id: string | null) {
if (typeof this.options.maxSize === 'function') {
return this.options.maxSize(req, id)
}
Expand All @@ -195,48 +197,35 @@ export class BaseHandler extends EventEmitter {
const length = parseInt(req.headers['content-length'] || '0', 10)
const offset = file.offset

const hasContentLengthSet = length > 0
const hasContentLengthSet = req.headers['content-length'] !== undefined
const hasConfiguredMaxSizeSet = configuredMaxSize > 0

// Check if the upload fits into the file's size when the size is not deferred.
if (!file.sizeIsDeferred && offset + length > (file.size || 0)) {
throw ERRORS.ERR_SIZE_EXCEEDED
}

// For deferred size uploads, if it's not a chunked transfer, check against the configured maximum size.
if (
file.sizeIsDeferred &&
hasContentLengthSet &&
hasConfiguredMaxSizeSet &&
offset + length > configuredMaxSize
) {
throw ERRORS.ERR_SIZE_EXCEEDED
}

// Calculate the maximum allowable size based on the file's total size and current offset.
let maxSize = (file.size || 0) - offset

// Handle deferred size uploads where no Content-Length is provided (possible with chunked transfers).
if (file.sizeIsDeferred) {
// For deferred size uploads, if it's not a chunked transfer, check against the configured maximum size.
if (
hasContentLengthSet &&
hasConfiguredMaxSizeSet &&
offset + length > configuredMaxSize
) {
throw ERRORS.ERR_SIZE_EXCEEDED
}

if (hasConfiguredMaxSizeSet) {
// Limit the upload to not exceed the maximum configured upload size.
maxSize = configuredMaxSize - offset
return configuredMaxSize - offset
} else {
// Allow arbitrary sizes if no upload limit is configured.
maxSize = Number.MAX_SAFE_INTEGER
return Number.MAX_SAFE_INTEGER
}
}

// Override maxSize with the Content-Length if it's provided.
if (hasContentLengthSet) {
maxSize = length
// Check if the upload fits into the file's size when the size is not deferred.
if (offset + length > (file.size || 0)) {
throw ERRORS.ERR_SIZE_EXCEEDED
}

// Enforce the server-configured maximum size limit, if applicable.
if (hasConfiguredMaxSizeSet && maxSize > configuredMaxSize) {
maxSize = configuredMaxSize
if (hasConfiguredMaxSizeSet) {
return Math.min((file.size || 0) - offset, configuredMaxSize - offset)
}

return maxSize
return (file.size || 0) - offset
}
}
2 changes: 1 addition & 1 deletion packages/server/src/handlers/OptionsHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import type http from 'node:http'
// the Tus-Version header. It MAY include the Tus-Extension and Tus-Max-Size headers.
export class OptionsHandler extends BaseHandler {
async send(req: http.IncomingMessage, res: http.ServerResponse) {
const maxSize = await this.getConfiguredMaxSize(req, '')
const maxSize = await this.getConfiguredMaxSize(req, null)

if (maxSize) {
res.setHeader('Tus-Max-Size', maxSize)
Expand Down
2 changes: 1 addition & 1 deletion packages/server/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ export type ServerOptions = {
*/
maxSize?:
| number
| ((req: http.IncomingMessage, uploadId: string) => Promise<number> | number)
| ((req: http.IncomingMessage, uploadId: string | null) => Promise<number> | number)

/**
* Return a relative URL as the `Location` header.
Expand Down
115 changes: 114 additions & 1 deletion test/e2e.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -451,6 +451,119 @@ describe('EndToEnd', () => {
after(() => {
listener.close()
})

it('should not allow uploading with fixed length more than the defined MaxFileSize', async () => {
const body = Buffer.alloc(1024 * 1024 * 2)
const chunkSize = (1024 * 1024 * 2) / 4
// purposely set this to 1MB even if we will try uploading 2MB via transfer-encoding: chunked
const uploadLength = 1024 * 1024

const res = await agent
.post(STORE_PATH)
.set('Tus-Resumable', TUS_RESUMABLE)
.set('Upload-Length', uploadLength.toString())
.set('Upload-Metadata', TEST_METADATA)
.set('Tus-Resumable', TUS_RESUMABLE)
.expect(201)

assert.equal('location' in res.headers, true)
assert.equal(res.headers['tus-resumable'], TUS_RESUMABLE)

const uploadId = res.headers.location.split('/').pop()

const uploadChunk = async (body: Buffer, offset = 0) => {
const res = await agent
.patch(`${STORE_PATH}/${uploadId}`)
.set('Tus-Resumable', TUS_RESUMABLE)
.set('Upload-Offset', offset.toString())
.set('Content-Type', 'application/offset+octet-stream')
.send(body)
.expect(204)
.expect('Tus-Resumable', TUS_RESUMABLE)

return parseInt(res.headers['upload-offset'] || '0', 0)
}

let offset = 0
offset = await uploadChunk(body.subarray(offset, chunkSize)) // 500Kb
offset = await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1MB

try {
// this request should fail since it exceeds the 1MB mark
await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1.5MB
throw new Error('failed test')
} catch (e) {
assert.equal(e instanceof Error, true)
assert.equal(
e.message.includes('got 413 "Payload Too Large"'),
true,
`wrong message received "${e.message}"`
)
}
})

it('should not allow uploading with fixed length more than the defined MaxFileSize using chunked encoding', async () => {
const body = Buffer.alloc(1024 * 1024 * 2)
const chunkSize = (1024 * 1024 * 2) / 4
// purposely set this to 1MB even if we will try uploading 2MB via transfer-encoding: chunked
const uploadLength = 1024 * 1024

const res = await agent
.post(STORE_PATH)
.set('Tus-Resumable', TUS_RESUMABLE)
.set('Upload-Length', uploadLength.toString())
.set('Upload-Metadata', TEST_METADATA)
.set('Tus-Resumable', TUS_RESUMABLE)
.expect(201)

assert.equal('location' in res.headers, true)
assert.equal(res.headers['tus-resumable'], TUS_RESUMABLE)

const uploadId = res.headers.location.split('/').pop()
const address = listener.address() as AddressInfo
// Options for the HTTP request.
// transfer-encoding doesn't seem to be supported by superagent
const options = {
hostname: 'localhost',
port: address.port,
path: `${STORE_PATH}/${uploadId}`,
method: 'PATCH',
headers: {
'Tus-Resumable': TUS_RESUMABLE,
'Upload-Offset': '0',
'Content-Type': 'application/offset+octet-stream',
'Transfer-Encoding': 'chunked',
},
}

const {res: patchResp, body: resBody} = await new Promise<{
res: http.IncomingMessage
body: string
}>((resolve, reject) => {
const req = http.request(options, (res) => {
let body = ''
res.on('data', (chunk) => {
body += chunk.toString()
})
res.on('end', () => {
resolve({res, body})
})
})

req.on('error', (e) => {
reject(e)
})

req.write(body.subarray(0, chunkSize))
req.write(body.subarray(chunkSize, chunkSize * 2))
req.write(body.subarray(chunkSize * 2, chunkSize * 3))
req.end()
})

assert.equal(patchResp.statusCode, 413)
assert.equal(resBody, 'Maximum size exceeded\n')
})

it('should not allow uploading with deferred length more than the defined MaxFileSize', async () => {
const res = await agent
.post(STORE_PATH)
Expand Down Expand Up @@ -489,9 +602,9 @@ describe('EndToEnd', () => {
let offset = 0
offset = await uploadChunk(body.subarray(offset, chunkSize)) // 500Kb
offset = await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1MB
// this request should fail since it exceeds the 1MB mark

try {
// this request should fail since it exceeds the 1MB mark
await uploadChunk(body.subarray(offset, offset + chunkSize), offset) // 1.5MB
throw new Error('failed test')
} catch (e) {
Expand Down

0 comments on commit 507948e

Please sign in to comment.