diff --git a/.gitignore b/.gitignore index 2523ef93..79f0b46d 100644 --- a/.gitignore +++ b/.gitignore @@ -7,10 +7,11 @@ # Samples /samples/Node.js/node_modules/ +/samples/Node.js-Skipper/node_modules/ # Editors .idea # Tests sauce_connect.log -/coverage \ No newline at end of file +/coverage diff --git a/samples/Node.js-Skipper/README.md b/samples/Node.js-Skipper/README.md new file mode 100644 index 00000000..c9de4f72 --- /dev/null +++ b/samples/Node.js-Skipper/README.md @@ -0,0 +1,19 @@ +# Sample code for Node.js + +This sample is written for [Node.js](http://nodejs.org/) and requires [Express](http://expressjs.com/) to make the sample code cleaner. + +To install and run: + + cd samples/Node.js + npm install + node app.js + +Then browse to [localhost:3000](http://localhost:3000). + +File chunks will be uploaded to samples/Node.js/tmp directory. + +## Enabling Cross-domain Uploads + +If you would like to load the flow.js library from one domain and have your Node.js reside on another, you must allow 'Access-Control-Allow-Origin' from '*'. Please remember, there are some potential security risks with enabling this functionality. If you would still like to implement cross-domain uploads, open app.js and uncomment lines 24-31 and uncomment line 17. + +Then in public/index.html, on line 49, update the target with your server's address. For example: target:'http://www.example.com/upload' diff --git a/samples/Node.js-Skipper/app.js b/samples/Node.js-Skipper/app.js new file mode 100644 index 00000000..2965a718 --- /dev/null +++ b/samples/Node.js-Skipper/app.js @@ -0,0 +1,58 @@ +process.env.TMPDIR = 'tmp'; // to avoid the EXDEV rename error, see http://stackoverflow.com/q/21071303/76173 + +var express = require('express'); +var skipper = require('skipper')(); +var flow = require('./flow-node.js')('tmp'); +var app = express(); +app.use(skipper); + +// Configure access control allow origin header stuff +var ACCESS_CONTROLL_ALLOW_ORIGIN = false; + +// Host most stuff in the public folder +app.use(express.static(__dirname + '/public')); +app.use(express.static(__dirname + '/../../src')); + +// Handle uploads through Flow.js +app.post('/upload', function(req, res) { + flow.post(req, function(status, filename, original_filename, identifier) { + console.log('POST', status, original_filename, identifier); + if (ACCESS_CONTROLL_ALLOW_ORIGIN) { + res.header("Access-Control-Allow-Origin", "*"); + } + res.status(status).send(); + }); +}); + + +app.options('/upload', function(req, res){ + console.log('OPTIONS'); + if (ACCESS_CONTROLL_ALLOW_ORIGIN) { + res.header("Access-Control-Allow-Origin", "*"); + } + res.status(200).send(); +}); + +// Handle status checks on chunks through Flow.js +app.get('/upload', function(req, res) { + flow.get(req, function(status, filename, original_filename, identifier) { + console.log('GET', status); + if (ACCESS_CONTROLL_ALLOW_ORIGIN) { + res.header("Access-Control-Allow-Origin", "*"); + } + + if (status == 'found') { + status = 200; + } else { + status = 204; + } + + res.status(status).send(); + }); +}); + +app.get('/download/:identifier', function(req, res) { + flow.write(req.params.identifier, res); +}); + +app.listen(3000); diff --git a/samples/Node.js-Skipper/flow-node.js b/samples/Node.js-Skipper/flow-node.js new file mode 100644 index 00000000..9d66ff46 --- /dev/null +++ b/samples/Node.js-Skipper/flow-node.js @@ -0,0 +1,243 @@ +var fs = require('fs'), + path = require('path'), + util = require('util'), + Stream = require('stream').Stream; + +module.exports = flow = function(temporaryFolder) +{ + var $ = this; + $.temporaryFolder = temporaryFolder; + $.maxFileSize = null; + $.fileParameterName = 'file'; + + try { + fs.mkdirSync($.temporaryFolder); + } + catch (e) + {} + + function cleanIdentifier(identifier) + { + return identifier.replace(/[^0-9A-Za-z_-]/g, ''); + } + + function getChunkFilename(chunkNumber, identifier) + { + // Clean up the identifier + identifier = cleanIdentifier(identifier); + // What would the file name be? + return path.resolve($.temporaryFolder, './flow-' + identifier + '.' + chunkNumber); + } + + function validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, fileSize) + { + // Clean up the identifier + identifier = cleanIdentifier(identifier); + + // Check if the request is sane + if (chunkNumber === 0 || chunkSize === 0 || totalSize === 0 || identifier.length === 0 || filename.length === 0) + { + return 'non_flow_request'; + } + var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1); + if (chunkNumber > numberOfChunks) + { + return 'invalid_flow_request1'; + } + + // Is the file too big? + if ($.maxFileSize && totalSize > $.maxFileSize) + { + return 'invalid_flow_request2'; + } + + if (typeof(fileSize) != 'undefined') + { + if (chunkNumber < numberOfChunks && fileSize != chunkSize) + { + // The chunk in the POST request isn't the correct size + return 'invalid_flow_request3'; + } + if (numberOfChunks > 1 && chunkNumber == numberOfChunks && fileSize != ((totalSize % chunkSize) + parseInt(chunkSize))) + { + // The chunks in the POST is the last one, and the fil is not the correct size + return 'invalid_flow_request4'; + } + if (numberOfChunks == 1 && fileSize != totalSize) + { + // The file is only a single chunk, and the data size does not fit + return 'invalid_flow_request5'; + } + } + return 'valid'; + } + + //'found', filename, original_filename, identifier + //'not_found', null, null, null + $.get = function(req, callback) + { + var chunkNumber = req.param('flowChunkNumber', 0); + var chunkSize = req.param('flowChunkSize', 0); + var totalSize = req.param('flowTotalSize', 0); + var identifier = req.param('flowIdentifier', ""); + var filename = req.param('flowFilename', ""); + + if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid') + { + var chunkFilename = getChunkFilename(chunkNumber, identifier); + fs.exists(chunkFilename, function(exists) + { + if (exists) + { + callback('found', chunkFilename, filename, identifier); + } else { + callback('not_found', null, null, null); + } + }); + } else { + callback('not_found', null, null, null); + } + }; + + //'partly_done', filename, original_filename, identifier + //'done', filename, original_filename, identifier + //'invalid_flow_request', null, null, null + //'non_flow_request', null, null, null + $.post = function(req, callback) + { + + var fields = req.body; + var file = req.file($.fileParameterName); + var stream = req.file($.fileParameterName)._files[0].stream; + + var chunkNumber = fields.flowChunkNumber; + var chunkSize = fields.flowChunkSize; + var totalSize = fields.flowTotalSize; + var identifier = cleanIdentifier(fields.flowIdentifier); + var filename = fields.flowFilename; + + if (file._files.length === 0 || !stream.byteCount) + { + callback('invalid_flow_request', null, null, null); + return; + } + + var original_filename = stream.filename; + var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, stream.byteCount); + if (validation == 'valid') + { + + var chunkFilename = getChunkFilename(chunkNumber, identifier); + + // Save the chunk by skipper file upload api + file.upload({saveAs:chunkFilename},function(err, uploadedFiles){ + // Do we have all the chunks? + var currentTestChunk = 1; + var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1); + var testChunkExists = function() + { + fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists) + { + if (exists) + { + currentTestChunk++; + if (currentTestChunk > numberOfChunks) + { + callback('done', filename, original_filename, identifier); + } else { + // Recursion + testChunkExists(); + } + } else { + callback('partly_done', filename, original_filename, identifier); + } + }); + }; + testChunkExists(); + }); + } else { + callback(validation, filename, original_filename, identifier); + } + }; + + // Pipe chunks directly in to an existsing WritableStream + // r.write(identifier, response); + // r.write(identifier, response, {end:false}); + // + // var stream = fs.createWriteStream(filename); + // r.write(identifier, stream); + // stream.on('data', function(data){...}); + // stream.on('finish', function(){...}); + $.write = function(identifier, writableStream, options) + { + options = options || {}; + options.end = (typeof options.end == 'undefined' ? true : options.end); + + // Iterate over each chunk + var pipeChunk = function(number) + { + var chunkFilename = getChunkFilename(number, identifier); + fs.exists(chunkFilename, function(exists) + { + + if (exists) + { + // If the chunk with the current number exists, + // then create a ReadStream from the file + // and pipe it to the specified writableStream. + var sourceStream = fs.createReadStream(chunkFilename); + sourceStream.pipe(writableStream, { + end: false + }); + sourceStream.on('end', function() + { + // When the chunk is fully streamed, + // jump to the next one + pipeChunk(number + 1); + }); + } else { + // When all the chunks have been piped, end the stream + if (options.end) writableStream.end(); + if (options.onDone) options.onDone(); + } + }); + }; + pipeChunk(1); + }; + + $.clean = function(identifier, options) + { + options = options || {}; + + // Iterate over each chunk + var pipeChunkRm = function(number) + { + + var chunkFilename = getChunkFilename(number, identifier); + + //console.log('removing pipeChunkRm ', number, 'chunkFilename', chunkFilename); + fs.exists(chunkFilename, function(exists) + { + if (exists) + { + + console.log('exist removing ', chunkFilename); + fs.unlink(chunkFilename, function(err) + { + if (err && options.onError) options.onError(err); + }); + + pipeChunkRm(number + 1); + + } else { + + if (options.onDone) options.onDone(); + + } + }); + }; + pipeChunkRm(1); + }; + + return $; +}; diff --git a/samples/Node.js-Skipper/package.json b/samples/Node.js-Skipper/package.json new file mode 100644 index 00000000..313e7e71 --- /dev/null +++ b/samples/Node.js-Skipper/package.json @@ -0,0 +1,6 @@ +{ + "dependencies": { + "express": "^4.3.1", + "skipper": "^0.5.5" + } +} diff --git a/samples/Node.js-Skipper/public/cancel.png b/samples/Node.js-Skipper/public/cancel.png new file mode 100644 index 00000000..f5a10aba Binary files /dev/null and b/samples/Node.js-Skipper/public/cancel.png differ diff --git a/samples/Node.js-Skipper/public/index.html b/samples/Node.js-Skipper/public/index.html new file mode 100644 index 00000000..0d5741fa --- /dev/null +++ b/samples/Node.js-Skipper/public/index.html @@ -0,0 +1,200 @@ + + +
+It's a JavaScript library providing multiple simultaneous, stable and resumable uploads via the HTML5 File API.
+ +The library is designed to introduce fault-tolerance into the upload of large files through HTTP. This is done by splitting each files into small chunks; whenever the upload of a chunk fails, uploading is retried until the procedure completes. This allows uploads to automatically resume uploading after a network connection is lost either locally or to the server. Additionally, it allows for users to pause and resume uploads without loosing state.
+ +Flow.js relies on the HTML5 File API and the ability to chunks files into smaller pieces. Currently, this means that support is limited to Firefox 4+ and Chrome 11+.
+ +