Skip to content

Commit

Permalink
The local storage backend's tests are passing. Next I'll implement th…
Browse files Browse the repository at this point in the history
…e S3 backend
  • Loading branch information
Tom Boutell committed Dec 9, 2012
0 parents commit dd7abbe
Show file tree
Hide file tree
Showing 7 changed files with 285 additions and 0 deletions.
1 change: 1 addition & 0 deletions .gitignore
@@ -0,0 +1 @@
node_modules
40 changes: 40 additions & 0 deletions README.md
@@ -0,0 +1,40 @@
uploadfs
========

Copies files to a web-accessible location and provides a consistent way to get the URLs that correspond to those files. Includes both S3-based and local filesystem-based backends. The API offers the same conveniences with both backends:

* Parent directories are created automatically as needed
* Content types are inferred from file extensions
* Files are automatically marked as being readable via the web when using S3

You can also remove a file if needed.

There is no API to retrieve information about existing files. This is intentional. Constantly manipulating directory information is much slower in the cloud than on a local filesystem and you should not become reliant on it. Your code should maintain its own database of file information if needed, for instance in a MongoDB collection.

The copyIn method takes a local filename and copies it to a path in uploadfs. Note that Express conveniently handles file uploads by dropping them in a temporary local file for the duration of the request.

Usage:

var uploadfs = require('uploadfs');
uploadfs.init({ backend: 'local', uploadsPath: __dirname + '/public/uploads' });

app.post('/profile', function(req, res) {
uploadfs.copyIn(req.files.photo.path, '/profiles/me.jpg', function(e) {
if (e) {
res.send('An error occurred: ' + e);
} else {
res.send('<h1>All is well. Here is the image.</h1>' +
'<img src="' + uploadfs.getUploadsUrl() + '/images/me.jpg'" />');
}
});
});

Note the use of uploadfs.getUploadsUrl() to determine the URL of the uploaded image. Use this method consistently and your code will find the file in the right place regardless of the backend chosen.

Removing a file:

uploadfs.remove('/profiles/me.jpg', function(e) { ... });

That's it. That should be all you need. If not, I'll add things.

Tom Boutell, @boutell, tom@punkave.com
112 changes: 112 additions & 0 deletions local.js
@@ -0,0 +1,112 @@
// Local filesystem-based backend for uploadfs. See also
// s3.js. The main difference between this backend and just using
// the local filesystem directly is that it creates parent
// folders automatically when they are discovered to be missing,
// and it encourages you to write code that will still work
// when you switch to the s3 backend

var mkdirp = require('mkdirp');
var dirname = require('path').dirname;
var fs = require('fs');

var uploadsPath;
var uploadsUrl;

var self = module.exports = {
init: function(options, callback) {
uploadsPath = options.uploadsPath;
if (!uploadsPath) {
return callback('uploadsPath not set');
}
uploadsUrl = options.uploadsUrl;
if (!uploadsUrl) {
return callback('uploadsUrl not set');
}
return callback(null);
},

copyIn: function(localPath, path, options, callback) {
var uploadPath = uploadsPath + path;
// Other people's implementations of fs.copy() lack
// error handling, let's be thorough and also implement
// a retry that does mkdirp() for consistency with S3
var sin = fs.createReadStream(localPath);
var sout = fs.createWriteStream(uploadPath);

sin.on('error', function(e) {
errorCleanup();
return callback(e);
});

sout.on('error', function(e) {
// If the destination folder doesn't exist yet,
// retry the whole thing after recursively creating
// the folder and its parents as needed, avoiding the
// overhead of checking for folders in the majority
// of cases where they already exist. Try this up to
// 5 times to guard against rare race conditions with
// the rmdir mechanism (see remove()).
if ((e.code === 'ENOENT') && ((!options.afterMkdirp) || (options.afterMkdirp <= 5))) {
mkdirp(dirname(uploadPath), function (e) {
if (e) {
return callback(e);
}
options.afterMkdirp = options.afterMkdirp ? (options.afterMkdirp + 1) : 1;
return self.copyIn(localPath, path, options, callback);
});
return;
}
errorCleanup();
return callback(e);
});

sout.on('close', function() {
return callback();
});

// Carry out the actual copying
sin.pipe(sout);

function errorCleanup() {
// These are async methods, provide callbacks although
// we don't really have any practical steps to take if
// we somehow can't clean up after an error has
// already been caught
sin.destroy(function(e) { });
sout.destroy(function(e) { });
// This will fail if we weren't able to write to
// uploadPath in the first place; don't get excited
fs.unlink(uploadPath, function(e) { });
}
},

remove: function(path, callback) {
var uploadPath = uploadsPath + path;
fs.unlink(uploadPath, callback);
// After a random interval to prevent a slamming scenario,
// attempt to remove the folder. If it is not empty this will
// succeed. In that case, try again with the parent folder until
// we run out of parents. This will eventually purge all subdirectories
// if all files have been removed
removeDirectoryLaterIfEmpty(dirname(path));
function removeDirectoryLaterIfEmpty(path) {
// Don't remove the main upload dir
if (path.length <= 1) {
return;
}
setTimeout(function() {
fs.rmdir(uploadsPath + path, function(e) {
if (!e) {
removeDirectoryLaterIfEmpty(dirname(path));
}
});
}, Math.random() * 1000 + 1000);
}
},

getUrl: function() {
return uploadsUrl;
},

};

24 changes: 24 additions & 0 deletions package.json
@@ -0,0 +1,24 @@
{
"name": "uploadfs",
"version": "0.1.0",
"description": "Store files in a web-accessible location. Includes both S3-based and local filesystem-based backends.",
"main": "uploadfs.js",
"scripts": {
"test": "node test.js"
},
"repository": {
"type": "git",
"url": "git@github.com:boutell/uploadfs.git"
},
"keywords": [
"upload",
"files",
"s3",
"storage"
],
"author": "Thomas Boutell",
"license": "MIT",
"dependencies": {
"mkdirp": "~0.3.4"
}
}
74 changes: 74 additions & 0 deletions test.js
@@ -0,0 +1,74 @@
var uploadfs = require('./uploadfs.js');
var fs = require('fs');

console.log('Initializing uploadfs');

uploadfs.init({ backend: 'local', uploadsPath: __dirname + '/test', uploadsUrl: 'http://localhost:3000/test' }, function(e) {
if (e) {
console.log('uploadfs.init failed:');
console.log(e);
process.exit(1);
}
testCopyIn();
});

function testCopyIn() {
console.log('testing copyIn');
uploadfs.copyIn('test.txt', '/one/two/three/test.txt', function(e) {
if (e) {
console.log('testCopyIn failed:');
console.log(e);
process.exit(1);
}
var content = fs.readFileSync('test/one/two/three/test.txt', 'utf8');
var original = fs.readFileSync('test.txt', 'utf8');
if (content !== original) {
console.log('testCopyIn did not copy the file faithfully.');
process.exit(1);
}
testRemove();
});
}

function testRemove() {
console.log('testing remove');
uploadfs.remove('/one/two/three/test.txt', function(e) {
if (e) {
console.log('testRemove failed:');
console.log(e);
process.exit(1);
}
if (fs.existsSync('test/one/two/three/test.txt')) {
console.log('testRemove did not remove the file.');
process.exit(1);
}
testRmdir();
});
}

function testRmdir() {
console.log('testing the automatic empty folder cleanup mechanism');
console.log('Waiting for the automatic empty folder cleanup mechanism to finish.');
setTimeout(function() {
if (fs.existsSync('test/one')) {
console.log('testRmdir saw that test/one still existed.');
process.exit(1);
}
testGetUrl();
}, 10000);
}

function testGetUrl() {
console.log('testing getUrl');
var url = uploadfs.getUrl();
if (url + '/one/two/three/test.txt' !== 'http://localhost:3000/test/one/two/three/test.txt') {
console.log('testGetUrl did not return the expected URL.');
process.exit(1);
}
success();
}

function success() {
console.log('All tests passing.');
process.exit(0);
}
1 change: 1 addition & 0 deletions test.txt
@@ -0,0 +1 @@
This is a test text file.
33 changes: 33 additions & 0 deletions uploadfs.js
@@ -0,0 +1,33 @@
var backend;

var self = module.exports = {
init: function(options, callback) {
if (!options.backend) {
return callback("backend must be specified");
}
// Load standard backends, by name
if (typeof(options.backend) === 'string') {
options.backend = require(__dirname + '/' + options.backend + '.js');
}
// Custom backends can be passed as objects
backend = options.backend;
return backend.init(options, callback);
},

copyIn: function(localPath, path, options, callback) {
if (typeof(options) === 'function') {
callback = options;
options = {};
}
return backend.copyIn(localPath, path, options, callback);
},

getUrl: function(options, callback) {
return backend.getUrl(options, callback);
},

remove: function(path, callback) {
return backend.remove(path, callback);
}
};

0 comments on commit dd7abbe

Please sign in to comment.