Permalink
Browse files

added throttling to streaming examples; updated readme

  • Loading branch information...
1 parent 4dcc504 commit b9340df1b656db35942f02168f1eb41470b531c1 grippy committed Mar 29, 2010
Showing with 39 additions and 15 deletions.
  1. +10 −4 README
  2. +28 −10 app.js
  3. +1 −1 aws/s3.js
View
14 README
@@ -46,15 +46,21 @@ node app.js --env=[env option (defaults to dev)]
/save_then_stream_upload (stores in memory, writes to disk, and then streams the file to s3.)
/stream_save_stream_upload (streams the file to disk and then streas the file to s3)
-Note: chunked encoding isn't supported by s3.
+== Notes ==
+
+-Chunked encoding isn't supported by s3.
+
+-I experience the following error when trying to stream large files (>10MB) to s3. Not exactly sure what the problem is.
+
+ TypeError: Cannot call method 'flush' of undefined
+ at Client.<anonymous> (http:514:20)
+ at node.js:810:9
== Future Improvements ==
-The views are baked into the controller action responses. I may consider porting this to http://github.com/visionmedia/express
--The /save_then_stream_upload action should stream the file to disk using the node fs.createWriteStream api.
-
--The uploading assumes the filedata is binary.
+-Uploading to s3 assumes the filedata is binary.
== Wax It, Flip It, Rub It Down ==
If you find this app useful please tweak it to your hearts content. If you feel like sharing your modifications, please do.
View
38 app.js
@@ -7,7 +7,7 @@ var http = require("http"),
helper = require("./lib/helper"),
s3 = require("./aws/s3"),
config = require('./config'),
- b64 = require('./aws/crypto/base64');
+ b64 = require('./aws/crypto/base64')
/* globals */
var env = null
@@ -390,14 +390,22 @@ function stream_upload(req, res) {
filetype = part.headers['content-type'];
var args = {'bucket': params['b'], 'file':{'name': filename, 'content_type': filetype}};
// log('start stream...')
- stream.open(args)
+ stream.open(args, function (resp) {
+ // the data listener is only returned if there is an error. todo: sniff the error message
+ resp.addListener("data", function (chunk) {
+ inspect(chunk)
+ });
+ // the end is always reached
+ resp.addListener("end", function() {
+ log('this is the end')
+ });
+ })
}
});
mp.addListener("body", function(chunk) {
if (name && filename != undefined) {
- // log('stream.write...')
- stream.write(chunk)
+ stream.write(chunk);
} else {
params[name] += chunk;
}
@@ -441,6 +449,8 @@ function stream_upload(req, res) {
/*
* Stores the file in memory, write it to disk, and then streams to s3
+* Attempts to throttle the put stream to allow for large files...
+* Be advised this isn't perfect. You'll want to test the optimal delay times.
*/
function save_then_stream_upload(req, res) {
@@ -474,7 +484,9 @@ function save_then_stream_upload(req, res) {
throw err;
})
file.addListener('data', function(data) {
+ file.pause()
stream.write(data)
+ setTimeout(function(){file.resume()}, 200)
})
file.addListener('end', function(){
stream.close()
@@ -507,7 +519,9 @@ function save_then_stream_upload(req, res) {
}
/*
-* Streams the upload to disk and streams to s3
+* Streams the upload to disk and streams to s3.
+* Attempts to throttle the put stream to allow for large files...
+* Be advised this isn't perfect. You'll want to test the optimal delay times.
*/
function stream_disk_stream_upload(req, res) {
@@ -530,15 +544,15 @@ function stream_disk_stream_upload(req, res) {
if (part.filename != undefined) {
filename = dt + '-' + part.filename.replace(' ', '-');
filetype = part.headers['content-type'];
- log('stream to disk...')
+ //log('stream to disk...')
path = s3.config.upload_directory + filename;
disk = helper.disk(path)
}
});
mp.addListener("body", function(chunk) {
if (name && filename != undefined) {
- log('disk.write...')
+ //log('disk.write...')
disk.write(chunk)
} else {
params[name] += chunk;
@@ -548,7 +562,7 @@ function stream_disk_stream_upload(req, res) {
mp.addListener("partEnd", function(part) {
if (part.name == 'upload-file') {
- log('partend: ' + part.name)
+ // log('partend: ' + part.name)
disk.close()
}
});
@@ -563,10 +577,12 @@ function stream_disk_stream_upload(req, res) {
args = {'bucket': params['b'], 'file':{'name': filename, 'content_type': filetype}};
stream.open(args, function (resp) {
// log here for error messages
- resp.addListener("data", function (chunk) {});
+ resp.addListener("data", function (chunk) {
+ inspect(chunk)
+ });
// the end is always reached
resp.addListener("end", function() {
- sys.puts("this is end... my only friend, the end.");
+ log("this is end... my only friend, the end.");
});
})
@@ -575,7 +591,9 @@ function stream_disk_stream_upload(req, res) {
throw err;
})
file.addListener('data', function(data) {
+ file.pause()
stream.write(data)
+ setTimeout(function(){file.resume()}, 200)
})
file.addListener('end', function(){
stream.close()
View
@@ -179,13 +179,13 @@ Stream.prototype.write = function(chunk){
// log('chunk.length:' + chunk.length.toString())
this.length += chunk.length;
this.request.write(chunk, 'binary')
+ chunk=null;
}
Stream.prototype.close = function(args) {
// replace the content-length placeholder
this.request.output[0] = this.request.output[0].replace('<content-length>', this.length);
this.request.close();
- // inspect(this.request)
}
Stream.prototype.unixtime = function(){

0 comments on commit b9340df

Please sign in to comment.