Skip to content

Commit

Permalink
feat: export saveRequestFiles to context (#34)
Browse files Browse the repository at this point in the history
allow controller use file mode in dynamic logic
  • Loading branch information
fengmk2 committed May 20, 2019
1 parent c5ca3ea commit 0d26aa0
Show file tree
Hide file tree
Showing 9 changed files with 179 additions and 111 deletions.
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -228,8 +228,9 @@ module.exports = class extends Controller {
// process file or upload to cloud storage
result = await ctx.oss.put('egg-multipart-test/' + file.filename, file.filepath);
} finally {
// need to remove the tmp files
await ctx.cleanupRequestFiles();
// remove tmp files and don't block the request's response
// cleanupRequestFiles won't throw error even remove file io error happen
ctx.cleanupRequestFiles();
}
console.log(result);
}
Expand Down
106 changes: 104 additions & 2 deletions app/extend/context.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,15 @@
'use strict';

const rimraf = require('mz-modules/rimraf');
const parse = require('co-busboy');
const Readable = require('stream').Readable;
const path = require('path');
const uuid = require('uuid');
const parse = require('co-busboy');
const sendToWormhole = require('stream-wormhole');
const moment = require('moment');
const fs = require('mz/fs');
const mkdirp = require('mz-modules/mkdirp');
const pump = require('mz-modules/pump');
const rimraf = require('mz-modules/rimraf');

class EmptyStream extends Readable {
_read() {
Expand All @@ -12,6 +19,14 @@ class EmptyStream extends Readable {

const HAS_CONSUMED = Symbol('Context#multipartHasConsumed');

async function limit(code, message) {
// throw 413 error
const err = new Error(message);
err.code = code;
err.status = 413;
throw err;
}

module.exports = {
/**
* clean up request tmp files helper
Expand All @@ -35,6 +50,93 @@ module.exports = {
}
},

/**
* save request multipart data and files to `ctx.request`
* @function Context#saveRequestFiles
*/
async saveRequestFiles() {
const ctx = this;
let storedir;

const requestBody = {};
const requestFiles = [];

const parts = ctx.multipart({ autoFields: false });
let part;
do {
try {
part = await parts();
} catch (err) {
await ctx.cleanupRequestFiles(requestFiles);
throw err;
}

if (!part) break;

if (part.length) {
ctx.coreLogger.debug('[egg-multipart:storeMultipart] handle value part: %j', part);
const fieldnameTruncated = part[2];
const valueTruncated = part[3];
if (valueTruncated) {
await ctx.cleanupRequestFiles(requestFiles);
return await limit('Request_fieldSize_limit', 'Reach fieldSize limit');
}
if (fieldnameTruncated) {
await ctx.cleanupRequestFiles(requestFiles);
return await limit('Request_fieldNameSize_limit', 'Reach fieldNameSize limit');
}

// arrays are busboy fields
requestBody[part[0]] = part[1];
continue;
}

// otherwise, it's a stream
const meta = {
field: part.fieldname,
filename: part.filename,
encoding: part.encoding,
mime: part.mime,
};
// keep same property name as file stream
// https://github.com/cojs/busboy/blob/master/index.js#L114
meta.fieldname = meta.field;
meta.transferEncoding = meta.encoding;
meta.mimeType = meta.mime;

ctx.coreLogger.debug('[egg-multipart:storeMultipart] handle stream part: %j', meta);
// empty part, ignore it
if (!part.filename) {
await sendToWormhole(part);
continue;
}

if (!storedir) {
// ${tmpdir}/YYYY/MM/DD/HH
storedir = path.join(ctx.app.config.multipart.tmpdir, moment().format('YYYY/MM/DD/HH'));
const exists = await fs.exists(storedir);
if (!exists) {
await mkdirp(storedir);
}
}
const filepath = path.join(storedir, uuid.v4() + path.extname(meta.filename));
const target = fs.createWriteStream(filepath);
await pump(part, target);
// https://github.com/mscdex/busboy/blob/master/lib/types/multipart.js#L221
meta.filepath = filepath;
requestFiles.push(meta);

// https://github.com/mscdex/busboy/blob/master/lib/types/multipart.js#L221
if (part.truncated) {
await ctx.cleanupRequestFiles(requestFiles);
return await limit('Request_fileSize_limit', 'Reach fileSize limit');
}
} while (part != null);

ctx.request.body = requestBody;
ctx.request.files = requestFiles;
},

/**
* create multipart.parts instance, to get separated files.
* @function Context#multipart
Expand Down
96 changes: 1 addition & 95 deletions app/middleware/multipart.js
Original file line number Diff line number Diff line change
@@ -1,105 +1,11 @@
'use strict';

const path = require('path');
const fs = require('mz/fs');
const uuid = require('uuid');
const mkdirp = require('mz-modules/mkdirp');
const pump = require('mz-modules/pump');
const sendToWormhole = require('stream-wormhole');
const moment = require('moment');

module.exports = options => {
async function limit(code, message) {
// throw 413 error
const err = new Error(message);
err.code = code;
err.status = 413;
throw err;
}

return async function multipart(ctx, next) {
if (!ctx.is('multipart')) return next();
if (options.fileModeMatch && !options.fileModeMatch.test(ctx.path)) return next();

let storedir;

const requestBody = {};
const requestFiles = [];

const parts = ctx.multipart({ autoFields: false });
let part;
do {
try {
part = await parts();
} catch (err) {
await ctx.cleanupRequestFiles(requestFiles);
throw err;
}

if (!part) break;

if (part.length) {
ctx.coreLogger.debug('[egg-multipart:storeMultipart] handle value part: %j', part);
const fieldnameTruncated = part[2];
const valueTruncated = part[3];
if (valueTruncated) {
await ctx.cleanupRequestFiles(requestFiles);
return await limit('Request_fieldSize_limit', 'Reach fieldSize limit');
}
if (fieldnameTruncated) {
await ctx.cleanupRequestFiles(requestFiles);
return await limit('Request_fieldNameSize_limit', 'Reach fieldNameSize limit');
}

// arrays are busboy fields
requestBody[part[0]] = part[1];
continue;
}

// otherwise, it's a stream
const meta = {
field: part.fieldname,
filename: part.filename,
encoding: part.encoding,
mime: part.mime,
};
// keep same property name as file stream
// https://github.com/cojs/busboy/blob/master/index.js#L114
meta.fieldname = meta.field;
meta.transferEncoding = meta.encoding;
meta.mimeType = meta.mime;

ctx.coreLogger.debug('[egg-multipart:storeMultipart] handle stream part: %j', meta);
// empty part, ignore it
if (!part.filename) {
await sendToWormhole(part);
continue;
}

if (!storedir) {
// ${tmpdir}/YYYY/MM/DD/HH
storedir = path.join(options.tmpdir, moment().format('YYYY/MM/DD/HH'));
const exists = await fs.exists(storedir);
if (!exists) {
await mkdirp(storedir);
}
}
const filepath = path.join(storedir, uuid.v4() + path.extname(meta.filename));
const target = fs.createWriteStream(filepath);
await pump(part, target);
// https://github.com/mscdex/busboy/blob/master/lib/types/multipart.js#L221
meta.filepath = filepath;
requestFiles.push(meta);

// https://github.com/mscdex/busboy/blob/master/lib/types/multipart.js#L221
if (part.truncated) {
await ctx.cleanupRequestFiles(requestFiles);
return await limit('Request_fileSize_limit', 'Reach fileSize limit');
}
} while (part != null);

ctx.request.body = requestBody;
ctx.request.files = requestFiles;
await ctx.saveRequestFiles();
return next();
};
};
2 changes: 0 additions & 2 deletions app/schedule/clean_tmpdir.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ module.exports = app => {
type: 'worker',
cron: app.config.multipart.cleanSchedule.cron,
immediate: false,
// disable on stream mode and not set fileModeMatch
disable: app.config.multipart.mode === 'stream' && !app.config.multipart.fileModeMatch,
};
}

Expand Down
14 changes: 6 additions & 8 deletions index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,19 +31,12 @@ interface MultipartOptions {

interface MultipartFileStream extends Readable {
fields: any;

filename: string;

fieldname: string;

mime: string;

mimeType: string;

transferEncoding: string;

encoding: string;

truncated: boolean;
}

Expand Down Expand Up @@ -72,6 +65,12 @@ declare module 'egg' {
*/
cleanupRequestFiles(files?: EggFile[]): Promise<void>;

/**
* save request multipart data and files to `ctx.request`
* @return {Promise<void>}
*/
saveRequestFiles(): Promise<void>;

/**
* create multipart.parts instance, to get separated files.
* @param {MultipartOptions} options
Expand All @@ -85,7 +84,6 @@ declare module 'egg' {
* @return {Promise<MultipartFileStream>}
*/
getFileStream(options?: MultipartOptions): Promise<MultipartFileStream>

}

interface Request {
Expand Down
9 changes: 9 additions & 0 deletions test/fixtures/apps/fileModeMatch/app/controller/save.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
'use strict';

module.exports = async ctx => {
await ctx.saveRequestFiles();
ctx.body = {
body: ctx.request.body,
files: ctx.request.files,
};
};
1 change: 1 addition & 0 deletions test/fixtures/apps/fileModeMatch/app/router.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@
module.exports = app => {
app.post('/upload', app.controller.upload);
app.post('/upload_file', app.controller.upload);
app.post('/save', app.controller.save);
};
4 changes: 2 additions & 2 deletions test/multipart.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ describe('test/multipart.test.js', () => {
}
});

it('should not register clean_tmpdir schedule', () => {
it('should alway register clean_tmpdir schedule in stream mode', () => {
const logger = app.loggers.scheduleLogger;
const content = fs.readFileSync(logger.options.file, 'utf8');
assert(!/\[egg-schedule\]: register schedule .+clean_tmpdir\.js/.test(content));
assert(/\[egg-schedule\]: register schedule .+clean_tmpdir\.js/.test(content));
});

it('should upload with csrf', function* () {
Expand Down
53 changes: 53 additions & 0 deletions test/stream-mode-with-filematch.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,59 @@ describe('test/stream-mode-with-filematch.test.js', () => {
assert.deepStrictEqual(data, { body: {} });
});

it('should allow to call saveRequestFiles on controller', async () => {
const form = formstream();
form.field('foo', 'fengmk2').field('love', 'egg');
form.file('file1', __filename, 'foooooooo.js');
form.file('file2', __filename);
// will ignore empty file
form.buffer('file3', Buffer.from(''), '', 'application/octet-stream');
form.file('bigfile', path.join(__dirname, 'fixtures', 'bigfile.js'));
// other form fields
form.field('work', 'with Node.js');

const headers = form.headers();
const res = await urllib.request(host + '/save', {
method: 'POST',
headers,
stream: form,
});

assert(res.status === 200);
const data = JSON.parse(res.data);
assert.deepStrictEqual(data.body, { foo: 'fengmk2', love: 'egg', work: 'with Node.js' });
assert(data.files.length === 3);
assert(data.files[0].field === 'file1');
assert(data.files[0].filename === 'foooooooo.js');
assert(data.files[0].encoding === '7bit');
assert(data.files[0].mime === 'application/javascript');
assert(data.files[0].filepath.startsWith(app.config.multipart.tmpdir));

assert(data.files[1].field === 'file2');
assert(data.files[1].filename === 'stream-mode-with-filematch.test.js');
assert(data.files[1].encoding === '7bit');
assert(data.files[1].mime === 'application/javascript');
assert(data.files[1].filepath.startsWith(app.config.multipart.tmpdir));

assert(data.files[2].field === 'bigfile');
assert(data.files[2].filename === 'bigfile.js');
assert(data.files[2].encoding === '7bit');
assert(data.files[2].mime === 'application/javascript');
assert(data.files[2].filepath.startsWith(app.config.multipart.tmpdir));
});

it('should 400 when request is not multipart', async () => {
const res = await urllib.request(host + '/save', {
method: 'POST',
data: { foo: 'bar' },
dataType: 'json',
});
assert(res.status === 400);
assert.deepStrictEqual(res.data, {
message: 'Content-Type must be multipart/*',
});
});

it('should register clean_tmpdir schedule', () => {
// [egg-schedule]: register schedule /hello/egg-multipart/app/schedule/clean_tmpdir.js
const logger = app.loggers.scheduleLogger;
Expand Down

0 comments on commit 0d26aa0

Please sign in to comment.