Skip to content

Commit

Permalink
Refactor attachment.js with ES6 features (haraka#2260)
Browse files Browse the repository at this point in the history
  • Loading branch information
GuroKung authored and msimerson committed Oct 24, 2017
1 parent 5a5f266 commit bd68bd5
Showing 1 changed file with 69 additions and 69 deletions.
138 changes: 69 additions & 69 deletions plugins/attachment.js
Expand Up @@ -3,7 +3,7 @@
// attachment

const fs = require('fs');
const spawn = require('child_process').spawn;
const { spawn } = require('child_process');
const path = require('path');
const crypto = require('crypto');
const utils = require('haraka-utils');
Expand All @@ -22,8 +22,7 @@ exports.register = function () {
}
catch (e) {
archives_disabled = true;
this.logwarn('This plugin requires the \'tmp\' module to extract ' +
'filenames from archive files');
this.logwarn(`This plugin requires the 'tmp' module to extract filenames from archive files`);
}
this.load_attachment_ini();
this.register_hook('data_post', 'wait_for_attachment_hooks');
Expand All @@ -33,7 +32,7 @@ exports.register = function () {
exports.load_attachment_ini = function () {
const plugin = this;

plugin.cfg = plugin.config.get('attachment.ini', function () {
plugin.cfg = plugin.config.get('attachment.ini', () => {
plugin.load_attachment_ini();
});

Expand All @@ -46,10 +45,10 @@ exports.load_attachment_ini = function () {
exports.find_bsdtar_path = function (cb) {
let found = false;
let i = 0;
['/bin', '/usr/bin', '/usr/local/bin'].forEach(function (dir) {
['/bin', '/usr/bin', '/usr/local/bin'].forEach((dir) => {
if (found) return;
i++;
fs.stat(dir + '/bsdtar', function (err, stats) {
fs.stat(`${dir}/bsdtar`, (err, stats) => {
i--;
if (found) return;
if (err) {
Expand All @@ -65,15 +64,14 @@ exports.find_bsdtar_path = function (cb) {

exports.hook_init_master = exports.hook_init_child = function (next) {
const plugin = this;
plugin.find_bsdtar_path(function (err, dir) {
plugin.find_bsdtar_path((err, dir) => {
if (err) {
archives_disabled = true;
plugin.logwarn('This plugin requires the \'bsdtar\' binary ' +
'to extract filenames from archive files');
plugin.logwarn(`This plugin requires the 'bsdtar' binary to extract filenames from archive files`);
}
else {
plugin.logdebug('found bsdtar in ' + dir);
bsdtar_path = dir + '/bsdtar';
plugin.logdebug(`found bsdtar in ${dir}`);
bsdtar_path = `${dir}/bsdtar`;
}
return next();
});
Expand Down Expand Up @@ -112,6 +110,7 @@ exports.unarchive_recursive = function (connection, f, archive_file_name, cb) {
function do_cb (err, files2) {
if (timer) clearTimeout(timer);
if (done_cb) return;

done_cb = true;
deleteTempFiles();
return cb(err, files2);
Expand All @@ -120,9 +119,9 @@ exports.unarchive_recursive = function (connection, f, archive_file_name, cb) {
function deleteTempFiles () {
tmpfiles.forEach(function (t) {
fs.close(t[0], function () {
connection.logdebug(plugin, 'closed fd: ' + t[0]);
connection.logdebug(plugin, `closed fd: ${t[0]}`);
fs.unlink(t[1], function () {
connection.logdebug(plugin, 'deleted tempfile: ' + t[1]);
connection.logdebug(plugin, `deleted tempfile: ${t[1]}`);
});
});
});
Expand All @@ -141,59 +140,60 @@ exports.unarchive_recursive = function (connection, f, archive_file_name, cb) {
'cwd': '/tmp',
'env': { 'LANG': 'C' },
});

// Start timer
let t1_timeout = false;
const t1_timer = setTimeout(function () {
const t1_timer = setTimeout(() => {
t1_timeout = true;
bsdtar.kill();
return do_cb(new Error('bsdtar timed out'));
}, plugin.cfg.timeout);

let lines = "";
bsdtar.stdout.on('data', function (data) {
bsdtar.stdout.on('data', (data) => {
lines += data;
});

let stderr = "";
bsdtar.stderr.on('data', function (data) {
bsdtar.stderr.on('data', (data) => {
stderr += data;
});
bsdtar.on('exit', function (code, signal) {

bsdtar.on('exit', (code, signal) => {
count--;
if (t1_timeout) return;
clearTimeout(t1_timer);
if (code && code > 0) {
// Error was returned
return do_cb(new Error('bsdtar returned error code: ' + code +
' error=' + stderr.replace(/\r?\n/,' ')));
return do_cb(new Error(`bsdtar returned error code: ${code} error=${stderr.replace(/\r?\n/)} `));
}
if (signal) {
// Process terminated due to signal
return do_cb(new Error('bsdtar terminated by signal: ' + signal));
return do_cb(new Error(`bsdtar terminated by signal: ${signal}`));
}
// Process filenames
const fl = lines.split(/\r?\n/);
for (let i=0; i<fl.length; i++) {
const file = fl[i];
// Skip any blank lines
if (!file) continue;
connection.logdebug(plugin, 'file: ' + file + ' depth=' + depth);
files.push((prefix ? prefix + '/' : '') + file);
connection.logdebug(plugin, `file: ${file} depth=${depth}`);
files.push((prefix ? `${prefix}/` : '') + file);
const extn = path.extname(file.toLowerCase());
if (plugin.archive_exts.indexOf(extn) === -1 &&
plugin.archive_exts.indexOf(extn.substring(1)) === -1)
{
// Not an archive file extension
continue;
}
connection.logdebug(plugin, 'need to extract file: ' + file);
connection.logdebug(plugin, `need to extract file: ${file}`);
count++;
depth++;
(function (file, depth) {
tmp.file(function (err, tmpfile, fd) {
((file, depth) => {
tmp.file((err, tmpfile, fd) => {
count--;
if (err) return do_cb(err.message);
connection.logdebug(plugin, 'created tmp file: ' + tmpfile +
'(fd=' + fd + ') for file ' +
(prefix ? prefix + '/' : '') + file);
connection.logdebug(plugin, `created tmp file: ${tmpfile}(fd=${fd}) for file (${prefix ? `${prefix}/` : ''}) ${file}`);
tmpfiles.push([fd, tmpfile]);
// Extract this file from the archive
count++;
Expand All @@ -208,30 +208,30 @@ exports.unarchive_recursive = function (connection, f, archive_file_name, cb) {
);
// Start timer
let t2_timeout = false;
const t2_timer = setTimeout(function () {
const t2_timer = setTimeout(() => {
t2_timeout = true;
return do_cb(new Error('bsdtar timed out extracting file '
+ file));
return do_cb(new Error(`bsdtar timed out extracting file ${file}`));
}, plugin.cfg.timeout);

// Create WriteStream for this file
const tws = fs.createWriteStream(tmpfile, { "fd": fd });
err = "";
cmd.stderr.on('data', function (data) {

cmd.stderr.on('data', (data) => {
err += data;
});
cmd.on('exit', function (code, signal) {

cmd.on('exit', (code, signal) => {
count--;
if (t2_timeout) return;
clearTimeout(t2_timer);
if (code && code > 0) {
// Error was returned
return do_cb(new Error('bsdtar returned error code: '
+ code + ' error=' + err.replace(/\r?\n/,' ')));
return do_cb(new Error(`bsdtar returned error code: ${code} error=${err.replace(/\r?\n/,' ')}`));
}
if (signal) {
// Process terminated due to signal
return do_cb(new Error('bsdtar terminated by signal: '
+ signal));
return do_cb(new Error(`bsdtar terminated by signal: ${signal}`));
}
// Recurse
return listFiles(tmpfile, (prefix ? prefix + '/' : '') +
Expand All @@ -241,15 +241,14 @@ exports.unarchive_recursive = function (connection, f, archive_file_name, cb) {
});
})(file, depth);
}
connection.loginfo(plugin, 'finish: count=' + count +
' depth=' + depth);
connection.loginfo(plugin, `finish: count=${count} depth=${depth}`);
if (count === 0) {
return do_cb(null, files);
}
});
}

timer = setTimeout(function () {
timer = setTimeout(() => {
return do_cb(new Error('timeout unpacking attachments'));
}, plugin.cfg.timeout);

Expand All @@ -270,7 +269,7 @@ exports.start_attachment = function (connection, ctype, filename, body, stream)
// Parse Content-Type
let ct;
if ((ct = ctype.match(/^([^/]+\/[^;\r\n ]+)/)) && ct[1]) {
connection.logdebug(plugin, 'found content type: ' + ct[1]);
connection.logdebug(plugin, `found content type: ${ct[1]}`);
txn.notes.attachment_ctypes.push(ct[1]);
}

Expand All @@ -288,16 +287,17 @@ exports.start_attachment = function (connection, ctype, filename, body, stream)
const md5 = crypto.createHash('md5');
let digest;
let bytes = 0;
stream.on('data', function (data) {

stream.on('data', (data) => {
md5.update(data);
bytes += data.length;
});
stream.once('end', function () {

stream.once('end', () => {
stream.pause();

digest = md5.digest('hex');
connection.loginfo(plugin, 'file="' + filename + '" ctype="' + ctype +
'" md5=' + digest + ' bytes=' + bytes);
connection.loginfo(plugin, `file="${filename}" ctype="${ctype}" md5=${digest} bytes=${bytes}`);
txn.notes.attachments.push({
ctype: ((ct && ct[1]) ? ct[1].toLowerCase() : 'unknown/unknown'),
filename: (filename ? filename : ''),
Expand All @@ -307,54 +307,54 @@ exports.start_attachment = function (connection, ctype, filename, body, stream)
});

if (!filename) return;
connection.logdebug(plugin, 'found attachment file: ' + filename);
connection.logdebug(plugin, `found attachment file: ${filename}`);
// See if filename extension matches archive extension list
// We check with the dot prefixed and without
if (archives_disabled || (plugin.archive_exts.indexOf(fileext) === -1 &&
plugin.archive_exts.indexOf(fileext.substring(1)) === -1))
{
return;
}
connection.logdebug(plugin, 'found ' + fileext + ' on archive list');
connection.logdebug(plugin, `found ${fileext} on archive list`);
txn.notes.attachment_count++;
stream.connection = connection;
stream.pause();
tmp.file(function (err, fn, fd) {
tmp.file((err, fn, fd) => {
function cleanup () {
fs.close(fd, function () {
connection.logdebug(plugin, 'closed fd: ' + fd);
connection.logdebug(plugin, `closed fd: ${fd}`);
fs.unlink(fn, function () {
connection.logdebug(plugin, 'unlinked: ' + fn);
connection.logdebug(plugin, `unlinked: ${fn}`);
});
});
stream.resume();
}
if (err) {
txn.notes.attachment_result = [ DENYSOFT, err.message ];
connection.logerror(plugin, 'Error writing tempfile: ' +
err.message);
connection.logerror(plugin, `Error writing tempfile: ${err.message}`);
txn.notes.attachment_count--;
cleanup();
stream.resume();
return next();
}
connection.logdebug(plugin, 'Got tmpfile: attachment="' +
filename + '" tmpfile="' + fn +
'" fd=' + fd);
connection.logdebug(plugin, `Got tmpfile: attachment="${filename}" tmpfile="${fn}" fd={fd}`);

const ws = fs.createWriteStream(fn);
stream.pipe(ws);
stream.resume();
ws.on('error', function (error) {

ws.on('error', (error) => {
txn.notes.attachment_count--;
txn.notes.attachment_result = [ DENYSOFT, error.message ];
connection.logerror(plugin, 'stream error: ' + error.message);
connection.logerror(plugin, `stream error: ${error.message}`);
cleanup();
return next();
});
ws.on('close', function () {

ws.on('close', () => {
connection.logdebug(plugin, 'end of stream reached');
connection.pause();
plugin.unarchive_recursive(connection, fn, filename, function (error, files) {
plugin.unarchive_recursive(connection, fn, filename, (error, files) => {
txn.notes.attachment_count--;
cleanup();
if (err) {
Expand Down Expand Up @@ -396,7 +396,7 @@ exports.hook_data = function (next, connection) {
txn.notes.attachment_ctypes = [];
txn.notes.attachment_files = [];
txn.notes.attachment_archive_files = [];
txn.attachment_hooks(function (ctype, filename, body, stream) {
txn.attachment_hooks((ctype, filename, body, stream) => {
plugin.start_attachment(connection, ctype, filename, body, stream);
});
return next();
Expand Down Expand Up @@ -434,38 +434,38 @@ exports.check_attachments = function (next, connection) {
const body = txn.body;
let body_ct;
if (body && (body_ct = /^([^/]+\/[^;\r\n ]+)/.exec(body.header.get('content-type')))) {
connection.logdebug(this, 'found content type: ' + body_ct[1]);
connection.logdebug(this, `found content type: ${body_ct[1]}`);
ctypes.push(body_ct[1]);
}
// MIME parts
if (body && body.children) {
for (let c=0; c<body.children.length; c++) {
let child_ct;
if (body.children[c] && (child_ct = /^([^/]+\/[^;\r\n ]+)/.exec(body.children[c].header.get('content-type')))) {
connection.logdebug(this, 'found content type: ' + child_ct[1]);
connection.logdebug(this, `found content type: ${child_ct[1]}`);
ctypes.push(child_ct[1]);
}
}
}

const ctypes_result = this.check_items_against_regexps(ctypes, ctype_config);
if (ctypes_result) {
connection.loginfo(this, 'match ctype="' + ctypes_result[0] + '" regexp=/' + ctypes_result[1] + '/');
return next(DENY, 'Message contains unacceptable content type (' + ctypes_result[0] + ')');
connection.loginfo(this, `match ctype="${ctypes_result[0]}" regexp=/${ctypes_result[1]}/`);
return next(DENY, `Message contains unacceptable content type (${ctypes_result[0]})`);
}

const files = txn.notes.attachment_files;
const files_result = this.check_items_against_regexps(files, file_config);
if (files_result) {
connection.loginfo(this, 'match file="' + files_result[0] + '" regexp=/' + files_result[1] + '/');
return next(DENY, 'Message contains unacceptable attachment (' + files_result[0] + ')');
connection.loginfo(this, `match file="${files_result[0]}" regexp=/${files_result[1]}/`);
return next(DENY, `Message contains unacceptable attachment (${files_result[0]})`);
}

const archive_files = txn.notes.attachment_archive_files;
const archives_result = this.check_items_against_regexps(archive_files, archive_config);
if (archives_result) {
connection.loginfo(this, 'match file="' + archives_result[0] + '" regexp=/' + archives_result[1] + '/');
return next(DENY, 'Message contains unacceptable attachment (' + archives_result[0] + ')');
connection.loginfo(this, `match file="${archives_result[0]}" regexp=/${archives_result[1]}/`);
return next(DENY, `Message contains unacceptable attachment (${archives_result[0]})`);
}

return next();
Expand All @@ -481,7 +481,7 @@ exports.check_items_against_regexps = function (items, regexps) {
reg = new RegExp(regexps[r], 'i');
}
catch (e) {
this.logerror('skipping invalid regexp: /' + regexps[r] + '/ (' + e + ')');
this.logerror(`skipping invalid regexp: /${regexps[r]}/ (${e})`);
}
if (reg) {
for (let i=0; i < items.length; i++) {
Expand Down

0 comments on commit bd68bd5

Please sign in to comment.