Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add nodejs with skipper sample #119

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@

# Samples
/samples/Node.js/node_modules/
/samples/Node.js-Skipper/node_modules/

# Editors
.idea

# Tests
sauce_connect.log
/coverage
/coverage
19 changes: 19 additions & 0 deletions samples/Node.js-Skipper/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Sample code for Node.js

This sample is written for [Node.js](http://nodejs.org/) and requires [Express](http://expressjs.com/) to make the sample code cleaner.

To install and run:

cd samples/Node.js
npm install
node app.js

Then browse to [localhost:3000](http://localhost:3000).

File chunks will be uploaded to samples/Node.js/tmp directory.

## Enabling Cross-domain Uploads

If you would like to load the flow.js library from one domain and have your Node.js reside on another, you must allow 'Access-Control-Allow-Origin' from '*'. Please remember, there are some potential security risks with enabling this functionality. If you would still like to implement cross-domain uploads, open app.js and uncomment lines 24-31 and uncomment line 17.

Then in public/index.html, on line 49, update the target with your server's address. For example: target:'http://www.example.com/upload'
58 changes: 58 additions & 0 deletions samples/Node.js-Skipper/app.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
process.env.TMPDIR = 'tmp'; // to avoid the EXDEV rename error, see http://stackoverflow.com/q/21071303/76173

var express = require('express');
var skipper = require('skipper')();
var flow = require('./flow-node.js')('tmp');
var app = express();
app.use(skipper);

// Configure access control allow origin header stuff
var ACCESS_CONTROLL_ALLOW_ORIGIN = false;

// Host most stuff in the public folder
app.use(express.static(__dirname + '/public'));
app.use(express.static(__dirname + '/../../src'));

// Handle uploads through Flow.js
app.post('/upload', function(req, res) {
flow.post(req, function(status, filename, original_filename, identifier) {
console.log('POST', status, original_filename, identifier);
if (ACCESS_CONTROLL_ALLOW_ORIGIN) {
res.header("Access-Control-Allow-Origin", "*");
}
res.status(status).send();
});
});


app.options('/upload', function(req, res){
console.log('OPTIONS');
if (ACCESS_CONTROLL_ALLOW_ORIGIN) {
res.header("Access-Control-Allow-Origin", "*");
}
res.status(200).send();
});

// Handle status checks on chunks through Flow.js
app.get('/upload', function(req, res) {
flow.get(req, function(status, filename, original_filename, identifier) {
console.log('GET', status);
if (ACCESS_CONTROLL_ALLOW_ORIGIN) {
res.header("Access-Control-Allow-Origin", "*");
}

if (status == 'found') {
status = 200;
} else {
status = 204;
}

res.status(status).send();
});
});

app.get('/download/:identifier', function(req, res) {
flow.write(req.params.identifier, res);
});

app.listen(3000);
243 changes: 243 additions & 0 deletions samples/Node.js-Skipper/flow-node.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,243 @@
var fs = require('fs'),
path = require('path'),
util = require('util'),
Stream = require('stream').Stream;

module.exports = flow = function(temporaryFolder)
{
var $ = this;
$.temporaryFolder = temporaryFolder;
$.maxFileSize = null;
$.fileParameterName = 'file';

try {
fs.mkdirSync($.temporaryFolder);
}
catch (e)
{}

function cleanIdentifier(identifier)
{
return identifier.replace(/[^0-9A-Za-z_-]/g, '');
}

function getChunkFilename(chunkNumber, identifier)
{
// Clean up the identifier
identifier = cleanIdentifier(identifier);
// What would the file name be?
return path.resolve($.temporaryFolder, './flow-' + identifier + '.' + chunkNumber);
}

function validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, fileSize)
{
// Clean up the identifier
identifier = cleanIdentifier(identifier);

// Check if the request is sane
if (chunkNumber === 0 || chunkSize === 0 || totalSize === 0 || identifier.length === 0 || filename.length === 0)
{
return 'non_flow_request';
}
var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1);
if (chunkNumber > numberOfChunks)
{
return 'invalid_flow_request1';
}

// Is the file too big?
if ($.maxFileSize && totalSize > $.maxFileSize)
{
return 'invalid_flow_request2';
}

if (typeof(fileSize) != 'undefined')
{
if (chunkNumber < numberOfChunks && fileSize != chunkSize)
{
// The chunk in the POST request isn't the correct size
return 'invalid_flow_request3';
}
if (numberOfChunks > 1 && chunkNumber == numberOfChunks && fileSize != ((totalSize % chunkSize) + parseInt(chunkSize)))
{
// The chunks in the POST is the last one, and the fil is not the correct size
return 'invalid_flow_request4';
}
if (numberOfChunks == 1 && fileSize != totalSize)
{
// The file is only a single chunk, and the data size does not fit
return 'invalid_flow_request5';
}
}
return 'valid';
}

//'found', filename, original_filename, identifier
//'not_found', null, null, null
$.get = function(req, callback)
{
var chunkNumber = req.param('flowChunkNumber', 0);
var chunkSize = req.param('flowChunkSize', 0);
var totalSize = req.param('flowTotalSize', 0);
var identifier = req.param('flowIdentifier', "");
var filename = req.param('flowFilename', "");

if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid')
{
var chunkFilename = getChunkFilename(chunkNumber, identifier);
fs.exists(chunkFilename, function(exists)
{
if (exists)
{
callback('found', chunkFilename, filename, identifier);
} else {
callback('not_found', null, null, null);
}
});
} else {
callback('not_found', null, null, null);
}
};

//'partly_done', filename, original_filename, identifier
//'done', filename, original_filename, identifier
//'invalid_flow_request', null, null, null
//'non_flow_request', null, null, null
$.post = function(req, callback)
{

var fields = req.body;
var file = req.file($.fileParameterName);
var stream = req.file($.fileParameterName)._files[0].stream;

var chunkNumber = fields.flowChunkNumber;
var chunkSize = fields.flowChunkSize;
var totalSize = fields.flowTotalSize;
var identifier = cleanIdentifier(fields.flowIdentifier);
var filename = fields.flowFilename;

if (file._files.length === 0 || !stream.byteCount)
{
callback('invalid_flow_request', null, null, null);
return;
}

var original_filename = stream.filename;
var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, stream.byteCount);
if (validation == 'valid')
{

var chunkFilename = getChunkFilename(chunkNumber, identifier);

// Save the chunk by skipper file upload api
file.upload({saveAs:chunkFilename},function(err, uploadedFiles){
// Do we have all the chunks?
var currentTestChunk = 1;
var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1);
var testChunkExists = function()
{
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists)
{
if (exists)
{
currentTestChunk++;
if (currentTestChunk > numberOfChunks)
{
callback('done', filename, original_filename, identifier);
} else {
// Recursion
testChunkExists();
}
} else {
callback('partly_done', filename, original_filename, identifier);
}
});
};
testChunkExists();
});
} else {
callback(validation, filename, original_filename, identifier);
}
};

// Pipe chunks directly in to an existsing WritableStream
// r.write(identifier, response);
// r.write(identifier, response, {end:false});
//
// var stream = fs.createWriteStream(filename);
// r.write(identifier, stream);
// stream.on('data', function(data){...});
// stream.on('finish', function(){...});
$.write = function(identifier, writableStream, options)
{
options = options || {};
options.end = (typeof options.end == 'undefined' ? true : options.end);

// Iterate over each chunk
var pipeChunk = function(number)
{
var chunkFilename = getChunkFilename(number, identifier);
fs.exists(chunkFilename, function(exists)
{

if (exists)
{
// If the chunk with the current number exists,
// then create a ReadStream from the file
// and pipe it to the specified writableStream.
var sourceStream = fs.createReadStream(chunkFilename);
sourceStream.pipe(writableStream, {
end: false
});
sourceStream.on('end', function()
{
// When the chunk is fully streamed,
// jump to the next one
pipeChunk(number + 1);
});
} else {
// When all the chunks have been piped, end the stream
if (options.end) writableStream.end();
if (options.onDone) options.onDone();
}
});
};
pipeChunk(1);
};

$.clean = function(identifier, options)
{
options = options || {};

// Iterate over each chunk
var pipeChunkRm = function(number)
{

var chunkFilename = getChunkFilename(number, identifier);

//console.log('removing pipeChunkRm ', number, 'chunkFilename', chunkFilename);
fs.exists(chunkFilename, function(exists)
{
if (exists)
{

console.log('exist removing ', chunkFilename);
fs.unlink(chunkFilename, function(err)
{
if (err && options.onError) options.onError(err);
});

pipeChunkRm(number + 1);

} else {

if (options.onDone) options.onDone();

}
});
};
pipeChunkRm(1);
};

return $;
};
6 changes: 6 additions & 0 deletions samples/Node.js-Skipper/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"dependencies": {
"express": "^4.3.1",
"skipper": "^0.5.5"
}
}
Binary file added samples/Node.js-Skipper/public/cancel.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading