Skip to content
Browse files

Adding deps and continuous support.

  • Loading branch information...
1 parent 2420eb4 commit 6397cbf39a5e26c60c7fc3a83b2458399750af21 @mikeal committed Aug 25, 2011
Showing with 3,253 additions and 20 deletions.
  1. +86 −20 main.js
  2. +2 −0 node_modules/follow/.gitignore
  3. +111 −0 node_modules/follow/README.md
  4. +15 −0 node_modules/follow/api.js
  5. +137 −0 node_modules/follow/changes.js
  6. +53 −0 node_modules/follow/cli.js
  7. +450 −0 node_modules/follow/feed.js
  8. +30 −0 node_modules/follow/lib.js
  9. +13 −0 node_modules/follow/package.json
  10. +4 −0 node_modules/formidable/.gitignore
  11. +9 −0 node_modules/formidable/Makefile
  12. +258 −0 node_modules/formidable/Readme.md
  13. +3 −0 node_modules/formidable/TODO
  14. +70 −0 node_modules/formidable/benchmark/bench-multipart-parser.js
  15. +43 −0 node_modules/formidable/example/post.js
  16. +48 −0 node_modules/formidable/example/upload.js
  17. +1 −0 node_modules/formidable/index.js
  18. +61 −0 node_modules/formidable/lib/file.js
  19. +349 −0 node_modules/formidable/lib/incoming_form.js
  20. +3 −0 node_modules/formidable/lib/index.js
  21. +303 −0 node_modules/formidable/lib/multipart_parser.js
  22. +25 −0 node_modules/formidable/lib/querystring_parser.js
  23. +6 −0 node_modules/formidable/lib/util.js
  24. +15 −0 node_modules/formidable/package.json
  25. +24 −0 node_modules/formidable/test/common.js
  26. +72 −0 node_modules/formidable/test/fixture/multipart.js
  27. +80 −0 node_modules/formidable/test/integration/test-multipart-parser.js
  28. +104 −0 node_modules/formidable/test/simple/test-file.js
  29. +710 −0 node_modules/formidable/test/simple/test-incoming-form.js
  30. +50 −0 node_modules/formidable/test/simple/test-multipart-parser.js
  31. +45 −0 node_modules/formidable/test/simple/test-querystring-parser.js
  32. +72 −0 node_modules/formidable/test/system/test-multi-video-upload.js
  33. +1 −0 node_modules/request
View
106 main.js
@@ -1,8 +1,9 @@
var request = require('request')
, events = require('events')
, util = require('util')
+ , follow = require('follow')
+ , formidable = require('formidable')
, r = request.defaults({json:true})
- // , m = request.defaults({headers:{'accept':'multipart/related'}})
;
function requests () {
@@ -16,7 +17,7 @@ function requests () {
(function (i) {
r(args[i], function (e, resp, body) {
if (e) errors[i] = e
- if (resp.statusCode !== 200) errors[i] = new Error("status is not 200.")
+ else if (resp.statusCode !== 200) errors[i] = new Error("status is not 200.")
results.push([i, body])
if (results.length === args.length) {
var fullresults = [errors.length ? errors : null]
@@ -36,6 +37,64 @@ function Replicator (options) {
// events.EventEmitter.prototype.call(this)
}
util.inherits(Replicator, events.EventEmitter)
+Replicator.prototype.pushDoc = function (id, rev, cb) {
+ var options = this
+ , headers = {'accept':"multipart/related,application/json"}
+ ;
+
+ if (!cb) cb = function () {}
+
+ if (options.filter && options.filter(id, rev) === false) return cb({id:id, rev:rev, filter:false})
+
+ if (!options.mutation) {
+ request
+ .get({url: options.from + encodeURIComponent(id) + '?attachments=true&revs=true&rev=' + rev, headers:headers})
+ .pipe(request.put(options.to + encodeURIComponent(id) + '?new_edits=false&rev=' + rev, function (e, resp, b) {
+ if (e) {
+ cb({error:e, id:id, rev:rev, body:b})
+ } else if (resp.statusCode > 199 && resp.statusCode < 300) {
+ cb({id:id, rev:rev, success:true, resp:resp, body:b})
+ } else {
+ cb({error:"status code is not 201.", id:id, resp:resp, body:b})
+ }
+
+ }))
+ } else {
+ var form = new formidable.IncomingForm();
+ request.get(
+ { uri: options.from + encodeURIComponent(id) + '?attachments=true&revs=true&rev=' + rev
+ , onResponse: function (e, resp) {
+ // form.parse(resp)
+ }
+ }, function (e, resp, body) {
+ console.log(resp.statusCode)
+ console.log(resp.headers)
+ // console.error(body)
+ }
+ )
+ // form.parse(,
+ // function(err, fields, files) {
+ // options.mutate(err, fields, files)
+ // }
+ // )
+
+ //
+ // .pipe(request.put(options.to + id + '?new_edits=false&rev=' + rev, function (e, resp, b) {
+ // if (e) {
+ // options.emit("failed", e)
+ // results[id] = {error:e}
+ // } else if (resp.statusCode === 201) {
+ // options.emit("pushed", resp, b)
+ // results[id] = {rev:rev, success:true}
+ // } else {
+ // options.emit("failed", resp, b)
+ // results[id] = {error:"status code is not 201.", resp:resp, body:b}
+ // }
+ // cb(e, resp b)
+ // }))
+ }
+}
+
Replicator.prototype.push = function (cb) {
var options = this
if (options.from[options.from.length - 1] !== '/') options.from += '/'
@@ -49,39 +108,45 @@ Replicator.prototype.push = function (cb) {
if (e) throw e
if (resp.statusCode !== 200) throw new Error("status is not 200.")
var byid = {}
+ options.since = body.results[body.results.length - 1].seq
body.results.forEach(function (change) {
byid[change.id] = change.changes.map(function (r) {return r.rev})
})
-
r.post({url:options.to + '_missing_revs', json:byid}, function (e, resp, body) {
var results = {}
, counter = 0
;
body = body.missing_revs
for (var id in body) {
(function (id) {
+ if (!id) return;
body[id].forEach(function (rev) {
counter++
- request
- .get(options.from + id + '?attachments=true&revs=true&rev=' + rev)
- .pipe(request.put(options.to + id + '?new_edits=false&rev=' + rev, function (e, resp, b) {
- if (e) {
- options.emit("failed", e)
- results[id] = {error:e}
- } else if (resp.statusCode === 201) {
- options.emit("pushed", resp, b)
- results[id] = {rev:rev, success:true}
- } else {
- options.emit("failed", resp, b)
- results[id] = {error:"status code is not 201.", resp:resp, body:b}
- }
- if (counter-- === 0) cb(null, results)
- }))
+ options.pushDoc(id, rev, function (obj) {
+ if (obj.error) options.emit('failed', obj)
+ else options.emit('pushed', obj)
+ counter--
+ if (counter === 0) cb(results)
+ })
})
})(id)
}
- if (Object.keys(body).length === 0) cb(null, {})
+ if (Object.keys(body).length === 0) cb({})
+ })
+ })
+ })
+}
+Replicator.prototype.continuous = function () {
+ var options = this
+ options.push(function () {
+ follow({db:options.from, since:options.since}, function (e, change) {
+ if (e) return
+ change.changes.forEach(function (o) {
+ options.pushDoc(change.id, o.rev, function (obj) {
+ if (obj.error) options.emit('failed', obj)
+ else options.emit('pushed', obj)
+ })
})
})
})
@@ -98,4 +163,5 @@ function replicate (from, to) {
return rep
}
-replicate("http://mikeal.iriscouch.com/hoodies", "http://mikeal.iriscouch.com/blank")
+module.exports = replicate
+replicate.Replicator = Replicator
View
2 node_modules/follow/.gitignore
@@ -0,0 +1,2 @@
+.*.swp
+node_modules
View
111 node_modules/follow/README.md
@@ -0,0 +1,111 @@
+# Follow: CouchDB changes notifier for NodeJS
+
+Follow (upper-case *F*) comes from an internal Iris Couch project used in production for over a year.
+
+## Objective
+
+The API must be very simple: notify me every time a change happens in the DB. Also, never fail.
+
+If an error occurs, Follow will internally retry without notifying your code.
+
+Specifically, this should be possible:
+
+1. Begin a changes feed. Get a couple of change callbacks
+2. Shut down CouchDB
+3. Go home. Have a nice weekend. Come back on Monday.
+4. Start CouchDB with a different IP address
+5. Make a couple of changes
+6. Update DNS so the domain points to the new IP
+7. Once DNS propagates, get a couple more change callbacks
+
+## Failure Mode
+
+If CouchDB permanently crashes, there is an option of failure modes:
+
+* **Default:** Simply never call back with a change again
+* **Optional:** Specify an *inactivity* timeout. If no changes happen by the timeout, Follow will signal an error.
+
+## Very Simple API
+
+This looks much like the `request` package.
+
+ var follow_couchdb = require('follow_couchdb');
+ follow_couchdb("https://example.iriscouch.com/boogie", function(error, change) {
+ if(!error) {
+ console.log("Got change number " + change.seq + ": " + change.id);
+ }
+ })
+
+The `error` parameter to the callback will basically always be `null`.
+
+The first argument can be an object, useful to include the documents in the feed.
+
+ follow_couchdb({db:"https://example.iriscouch.com/boogie", include_docs:true}, function(error, change) {
+ if(!error) {
+ console.log("Change " + change.seq + " has " + Object.keys(change.doc).length + " fields");
+ }
+ })
+
+### follow_couchdb(options, callback)
+
+The first argument is an options object. The only required option is `db`. Instead of an object, you can use a string to indicate the ``db` value.
+
+All of the CouchDB _changes options are allowed. See http://guide.couchdb.org/draft/notifications.html.
+
+* `db` | Fully-qualified URL of a couch database. (Basic auth URLs are ok.)
+* `since` | The sequence number to start from. Use `"now"` to start from the latest change in the DB.
+* `heartbeat` | Milliseconds within which CouchDB must respond (default: **30000** or 30 seconds)
+* `feed` | **Optional but only "continuous" is allowed**
+* `filter` |
+ * **Either** a path to design document filter, e.g. `app/important`
+ * **Or** a Javascript `function(doc, req) { ... }` which should return true or false
+
+Besides the CouchDB options, more are available:
+
+* `headers` | Object with HTTP headers to add to the request
+* `inactivity_ms` | Maximum time to wait between **changes**. Omitting this means no maximum.
+
+## Object API
+
+The main API is a thin wrapper around the EventEmitter API.
+
+ var follow_couchdb = require('follow_couchdb');
+
+ var opts = {}; // Same options paramters as before
+ var feed = new follow_couchdb.Feed(opts);
+
+ // You can also set values directly.
+ feed.db = "http://example.iriscouch.com/boogie";
+ feed.since = 3;
+ feed.heartbeat = 30 * 1000
+ feed.inactivity_ms = 86400 * 1000;
+
+ feed.filter = function(doc, req) {
+ // req.query is the parameters from the _changes request.
+ console.log('Filtering for query: ' + JSON.stringify(req.query));
+
+ if(doc.stinky || doc.ugly)
+ return false;
+ return true;
+ }
+
+ feed.on('change', function(change) {
+ console.log('Doc ' + change.id + ' in change ' + change.seq + ' is neither stinky nor ugly.');
+ })
+
+ feed.on('error', function(er) {
+ console.error('Since Follow always retries on errors, this must be serious');
+ throw er;
+ })
+
+ feed.follow();
+
+## Error conditions
+
+Follow is happy to retry over and over, for all eternity. It will only emit an error if it thinks your whole application might be in trouble.
+
+* *DB confirmation* failed: Follow confirms the DB with a preliminary query, which must reply properly.
+* *Your inactivity timer* expired: This is a last-ditch way to detect possible errors. What if couch is sending heartbeats just fine, but nothing has changed for 24 hours? You know that for your app, 24 hours with no change is impossible. Maybe your filter has a bug? Maybe you queried the wrong DB? Whatever the reason, Follow will emit an error.
+* JSON parse error, which should be impossible from CouchDB
+* Invalid change object format, which should be impossible from CouchDB
+* Internal error, if the internal state seems wrong, e.g. cancelling a timeout that already expired, etc. Follow tries to fail early.
View
15 node_modules/follow/api.js
@@ -0,0 +1,15 @@
+// The changes_couchdb API
+//
+
+var feed = require('./feed');
+
+function follow_feed(opts, cb) {
+ var ch_feed = new feed.Feed(opts);
+ ch_feed.on('error' , function(er) { return cb && cb(er) });
+ ch_feed.on('change', function(ch) { return cb && cb(null, ch) });
+ ch_feed.follow();
+ return ch_feed;
+}
+
+module.exports = follow_feed;
+module.exports.Feed = feed.Feed;
View
137 node_modules/follow/changes.js
@@ -0,0 +1,137 @@
+// Follow a CouchDB changes feed.
+//
+
+var lib = require('../lib')
+ , request = require('request')
+ , events = require('events')
+ ;
+
+var LOG = lib.logging.getLogger('couch_changes');
+LOG.setLevel(process.env.changes_level || "info");
+
+function scrub_creds(url) {
+ return url.replace(/^(https?:\/\/)[^:]+:[^@]+@(.*)$/, '$1$2'); // Scrub username and password
+}
+
+// Start a changes request.
+exports.follow = function(opts, cb) {
+ LOG.debug('Beginning changes request: ' + scrub_creds(opts.db));
+
+ var req_opts = {uri:opts.db};
+ var heartbeat_ms = opts.heartbeat || 2500
+ , timeout_ms = heartbeat_ms * 3;
+
+ if(req_opts.uri[req_opts.uri.length - 1] != '/')
+ req_opts.uri += '/';
+ req_opts.uri += '_changes?feed=continuous&heartbeat=' + heartbeat_ms;
+
+ // include_docs default is TRUE unless explicitly asked to be disabled.
+ if(opts.include_docs !== false)
+ req_opts.uri += '&include_docs=true';
+
+ Object.keys(opts).forEach(function(key) {
+ if(key == 'db' || key == 'heartbeat') {
+ // No copy.
+ } else if(key == 'filter' && typeof opts[key] == 'function') {
+ var old_cb = cb, filter = opts[key];
+ cb = function(change) {
+ if(old_cb) {
+ var req = {query: JSON.parse(JSON.stringify(opts))};
+ if(filter(change.doc, req))
+ old_cb(change);
+ else {
+ //LOG.debug("FILTERED\n" + require('sys').inspect(change));
+ }
+ }
+ }
+ } else {
+ req_opts.uri += lib.sprintf('&%s=%s', key, opts[key]);
+ }
+ })
+
+ var seq = opts.since || 0;
+ var timeout_id, retried = false;
+ var retry = function(delay) {
+ if(retried) {
+ LOG.warn("This retry already ran, aborting");
+ return;
+ } else {
+ retried = true;
+ }
+ LOG.debug('Retrying from seq: ' + seq + (delay ? (" after " + delay + "s") : ""));
+ clearTimeout(timeout_id);
+
+ // No-op the existing stream in case anything else comes in later.
+ if(req_opts.client) {
+ req_opts.client.end();
+ LOG.debug('Closed old changes client');
+ }
+ req_opts.responseBodyStream.write = function(chunk) { LOG.warn("Ignoring old change feed " + req_opts.uri + ": " + JSON.stringify(chunk)) };
+ req_opts.responseBodyStream.end = function() { LOG.warn("Closing old change feed: " + req_opts.uri) };
+
+ var new_opts = JSON.parse(JSON.stringify(opts));
+ new_opts.since = seq;
+
+ if(delay)
+ setTimeout(function() { exports.follow(new_opts, cb); }, delay * 1000);
+ else
+ exports.follow(new_opts, cb);
+ }
+
+ var timeout_id;
+ var timed_out = function() {
+ LOG.error('Retry on heartbeat timeout after ' + timeout_ms + 'ms');
+ retry();
+ }
+
+ var buf = '';
+ req_opts.responseBodyStream = new events.EventEmitter;
+ req_opts.responseBodyStream.write = function(chunk) {
+ buf += (chunk || '');
+
+ clearTimeout(timeout_id);
+ timeout_id = setTimeout(timed_out, timeout_ms);
+
+ // Buf could have 0, 1, or many JSON objects in it.
+ var offset, json;
+ while((offset = buf.indexOf("\n")) >= 0) {
+ json = buf.substr(0, offset);
+ buf = buf.substr(offset + 1);
+
+ if(json == '') {
+ // This is a heartbeat.
+ } else {
+ //LOG.debug('JSON: ' + json);
+ var change = JSON.parse(json);
+ //LOG.debug('Object:\n' + require('sys').inspect(change));
+
+ seq = change.seq;
+ if(!seq) {
+ LOG.fatal("seq was not defined in change: " + json);
+ throw new Error("seq was not defined in change: " + json);
+ }
+
+ cb && cb(change);
+ }
+ }
+ }
+
+ req_opts.responseBodyStream.end = function() {
+ LOG.error("A changes feed should never end. Restarting from last known sequence: " + seq);
+ retry();
+ }
+
+ var clean_req = JSON.parse(JSON.stringify(req_opts));
+ clean_req.uri = scrub_creds(clean_req.uri);
+ LOG.debug('Requesting: ' + JSON.stringify(clean_req));
+
+ request(req_opts, function(er, res, bodyStream) {
+ LOG.debug('Called back: ' + require('sys').inspect({er:er, res:res, bodyStream:bodyStream}));
+ if(er) {
+ LOG.error('Error requesting "' + JSON.stringify(req_opts.uri), er);
+ retry(10);
+ }
+ })
+
+ timeout_id = setTimeout(timed_out, timeout_ms);
+}
View
53 node_modules/follow/cli.js
@@ -0,0 +1,53 @@
+#!/usr/bin/env node
+// The changes_couchdb command-line interface.
+//
+
+var lib = require('./lib')
+ , couch_changes = require('./api')
+ ;
+
+function usage() {
+ console.log([ 'usage: changes_couchdb <URL>'
+ , ''
+ ].join("\n"));
+}
+
+var db = process.argv[2];
+if(! /^https?:\/\//.test(db))
+ db = 'http://' + db;
+
+console.log('Watching:', db);
+
+var feed = new couch_changes.Feed();
+feed.db = db;
+feed.since = (process.env.since === 'now') ? 'now' : parseInt(process.env.since || '0');
+feed.heartbeat = parseInt(process.env.heartbeat || '3000');
+
+if(process.env.host)
+ feed.headers.host = process.env.host;
+
+if(process.env.inactivity)
+ feed.inactivity_ms = parseInt(process.env.inactivity);
+
+feed.filter = function(doc, req) {
+ // This is a local filter. It runs on the client side.
+ return true;
+}
+
+feed.on('change', function(change) {
+ console.log('Change:' + JSON.stringify(change));
+})
+
+feed.on('error', function(er) {
+ //console.error(er);
+ console.error('Changes error ============\n' + er.stack);
+ setTimeout(function() { process.exit(0) }, 100);
+})
+
+process.on('uncaughtException', function(er) {
+ console.log('========= UNCAUGHT EXCEPTION; This is bad');
+ console.log(er.stack);
+ setTimeout(function() { process.exit(1) }, 100);
+})
+
+feed.follow();
View
450 node_modules/follow/feed.js
@@ -0,0 +1,450 @@
+// Core routines for event emitters
+//
+
+var lib = require('./lib')
+ , url = require('url')
+ , util = require('util')
+ , request = require('request')
+ , querystring = require('querystring')
+ ;
+
+var DEFAULT_HEARTBEAT = 30000;
+var HEARTBEAT_TIMEOUT_COEFFICIENT = 1.25; // E.g. heartbeat 1000ms would trigger a timeout after 1250ms of no heartbeat.
+var DEFAULT_MAX_RETRY_SECONDS = 60 * 60;
+var INITIAL_RETRY_DELAY = 1000;
+
+var FEED_PARAMETERS = ['since', 'limit', 'feed', 'heartbeat', 'filter', 'include_docs'];
+
+var SUPER_CLASS = require('events').EventEmitter;
+//var SUPER_CLASS = require('stream').Stream;
+
+function Feed (opts) {
+ var self = this;
+ SUPER_CLASS.call(self);
+
+ self.feed = 'continuous';
+ self.heartbeat = DEFAULT_HEARTBEAT;
+ self.max_retry_seconds = DEFAULT_MAX_RETRY_SECONDS;
+ self.inactivity_ms = null;
+
+ self.headers = {};
+ self.request = {}; // Extra options for potentially future versions of request. The caller can supply them.
+
+ self.since = 0;
+ self.retry_delay = INITIAL_RETRY_DELAY; // ms
+
+ opts = opts || {};
+ if(typeof opts === 'string')
+ opts = {'db': opts};
+ Object.keys(opts).forEach(function(key) {
+ self[key] = opts[key];
+ })
+
+ self.pending = { request : null
+ , activity_at : null
+ , data : null
+ };
+
+ /*
+ // Debugging help
+ var events = ['drain', 'error', 'close', 'pipe', 'data', 'end', 'fd'];
+
+ events.forEach(function(ev) {
+ //self.on(ev, function() { return self['on_' + ev].apply(self, arguments) });
+ self.on(ev, function() {
+ var args = Array.prototype.slice.call(arguments);
+ self.log.debug('EVENT ' + ev + ' ' + lib.JS(args));
+ })
+ })
+ */
+
+} // Feed
+util.inherits(Feed, SUPER_CLASS);
+
+Feed.prototype.start =
+Feed.prototype.follow = function follow_feed() {
+ var self = this;
+
+ if(!self.db)
+ throw new Error('Database URL required');
+
+ if(self.feed !== 'continuous')
+ throw new Error('The only valid feed option is "continuous"');
+
+ if(typeof self.heartbeat !== 'number')
+ throw new Error('Required "heartbeat" value');
+
+ var parsed = url.parse(self.db);
+ self.log = lib.log4js().getLogger(parsed.hostname + parsed.pathname);
+ self.log.setLevel(process.env.changes_level || "info");
+
+ return self.confirm();
+}
+
+Feed.prototype.confirm = function confirm_feed() {
+ var self = this;
+
+ self.db_safe = lib.scrub_creds(self.db);
+
+ self.log.debug('Checking database: ' + self.db_safe);
+ self.emit('confirm');
+
+ var confirm_timeout = self.heartbeat * 3; // Give it time to look up the name, connect, etc.
+ var timeout_id = setTimeout(function() {
+ return self.die(new Error('Timeout confirming database: ' + self.db_safe));
+ }, confirm_timeout);
+
+ var headers = lib.JP(lib.JS(self.headers));
+ headers.accept = 'application/json';
+
+ request({uri:self.db, headers:headers}, function(er, resp, body) {
+ clearTimeout(timeout_id);
+
+ if(er)
+ return self.die(er);
+
+ var db;
+ try {
+ db = JSON.parse(body)
+ } catch(json_er) {
+ return self.emit('error', json_er)
+ }
+
+ if(!db.db_name || !db.instance_start_time)
+ return self.emit('error', new Error('Bad DB response: ' + body));
+
+ self.log.debug('Confirmed db: ' + self.db_safe);
+
+ if(self.since === 'now') {
+ self.since = db.update_seq;
+ self.log.debug('Query since "now" will start at ' + self.since);
+ }
+
+ return self.query();
+ })
+}
+
+Feed.prototype.query = function query_feed() {
+ var self = this;
+
+ var query_params = {};
+ FEED_PARAMETERS.forEach(function(key) {
+ if(key in self)
+ query_params[key] = self[key];
+ })
+
+ if(typeof query_params.filter !== 'string')
+ delete query_params.filter;
+
+ var feed_url = self.db + '/_changes?' + querystring.stringify(query_params);
+
+ self.headers.accept = self.headers.accept || 'application/json';
+ var req = { method : 'GET'
+ , uri : feed_url
+ , headers: self.headers
+ , encoding: 'utf-8'
+ }
+
+ req.changes_query = query_params;
+ Object.keys(self.request).forEach(function(key) {
+ req[key] = self.request[key];
+ })
+
+ var now = new Date;
+ self.log.debug('Query at ' + lib.JP(lib.JS(now)) + ': ' + lib.scrub_creds(feed_url));
+
+ var timeout_id, timed_out = false;
+ var in_flight, timeout_id, timed_out = false;
+
+ var timed_out = false;
+ function on_timeout() {
+ self.log.debug('Request timeout: ' + in_flight.id());
+ timed_out = true;
+ return self.retry();
+ }
+
+ function on_response(er, resp) {
+ clearTimeout(timeout_id);
+
+ if(timed_out) {
+ self.log.debug('Ignoring late response: ' + in_flight.id());
+ return destroy_response(resp);
+ }
+
+ if(er) {
+ self.log.debug('Request error ' + in_flight.id() + ': ' + er.stack);
+ destroy_response(resp);
+ return self.retry();
+ }
+
+ if(resp.statusCode !== 200) {
+ self.log.debug('Bad changes response' + in_flight.id() + ': ' + resp.statusCode);
+ destroy_response(resp);
+ return self.retry();
+ }
+
+ self.log.debug('Good response: ' + in_flight.id());
+ self.retry_delay = INITIAL_RETRY_DELAY;
+ return self.prep(in_flight);
+ }
+
+ req.onResponse = on_response;
+ timeout_id = setTimeout(on_timeout, self.heartbeat);
+ in_flight = request(req);
+ in_flight.created_at = now;
+ in_flight.id = function() { return lib.JP(lib.JS(this.created_at)) };
+
+ // Shorten the timestamp, used for debugging.
+ //in_flight.id = function() { return /\.(\d\d\d)Z$/.exec(lib.JP(lib.JS(this.created_at)))[1] };
+
+ return self.emit('query');
+}
+
+Feed.prototype.prep = function prep_request(req) {
+ var self = this;
+
+ var now = new Date;
+ self.pending.request = req;
+ self.pending.activity_at = now;
+ self.pending.data = "";
+ self.pending.wait_timer = null;
+
+ function handler_for(ev) {
+ var name = 'on_couch_' + ev;
+ var inner_handler = self[name];
+
+ function handle_confirmed_req_event() {
+ if(self.pending.request === req)
+ return inner_handler.apply(self, arguments);
+
+ if(!req.created_at)
+ return self.die(new Error("Received data from unknown request")); // Pretty sure this is impossible.
+
+ var s_to_now = (new Date() - req.created_at) / 1000;
+ var s_to_req = '[no req]';
+ if(self.pending.request)
+ s_to_req = (self.pending.request.created_at - req.created_at) / 1000;
+
+ var msg = ': ' + req.id() + ' to_req=' + s_to_req + 's, to_now=' + s_to_now + 's';
+
+ if(ev === 'end') {
+ return self.log.debug('Old END' + msg);
+ return destroy_req(req);
+ }
+
+ if(ev === 'data') {
+ self.log.debug('Old DATA' + msg);
+ return destroy_req(req);
+ }
+
+ self.log.warn('Old "'+ev+'"' + msg);
+ }
+
+ return handle_confirmed_req_event;
+ }
+
+ var handlers = ['data', 'end', 'error'];
+ handlers.forEach(function(ev) {
+ req.on(ev, handler_for(ev));
+ })
+
+ // The inactivity timer is for time between *changes*, or time between the
+ // initial connection and the first change. Therefore it goes here.
+ self.change_at = now;
+ if(self.inactivity_ms)
+ self.inactivity_timer = setTimeout(function() { self.on_inactivity() }, self.inactivity_ms);
+
+ return self.wait();
+}
+
+Feed.prototype.wait = function wait_for_event() {
+ var self = this;
+ self.emit('wait');
+
+ if(self.pending.wait_timer)
+ return self.die(new Error('wait() called but there is already a wait_timer: ' + self.pending.wait_timer));
+
+ var timeout_ms = self.heartbeat * HEARTBEAT_TIMEOUT_COEFFICIENT;
+ var msg = 'Req ' + self.pending.request.id() + ' timeout=' + timeout_ms;
+ if(self.inactivity_ms)
+ msg += ', inactivity=' + self.inactivity_ms;
+ msg += ': ' + self.db_safe;
+
+ self.log.debug(msg);
+ self.pending.wait_timer = setTimeout(function() { self.on_timeout() }, timeout_ms);
+}
+
+Feed.prototype.on_couch_data = function on_couch_data(data, req) {
+ var self = this;
+
+ if(! self.pending.wait_timer)
+ return self.die(new Error('Cannot find timeout timer during incoming data'));
+
+ clearTimeout(self.pending.wait_timer);
+ self.pending.wait_timer = null;
+ self.pending.activity_at = new Date;
+
+ self.log.debug('Data from ' + self.pending.request.id());
+
+ // Buf could have 0, 1, or many JSON objects in it.
+ var buf = self.pending.data + data;
+ var offset, json, change;
+
+ while((offset = buf.indexOf("\n")) >= 0) {
+ json = buf.substr(0, offset);
+ buf = buf.substr(offset + 1);
+
+ if(json == '') {
+ self.log.debug('Heartbeat: ' + self.pending.request.id());
+ } else {
+ //self.log.debug('JSON: ' + json);
+ try {
+ change = JSON.parse(json);
+ } catch(er) {
+ return self.die(er);
+ }
+
+ //self.log.debug('Object:\n' + util.inspect(change));
+
+ seq = change.seq;
+ if(!seq)
+ return self.die(new Error('Change has no .seq field: ' + json));
+
+ self.on_change(change);
+ }
+ }
+
+ self.pending.data = buf;
+ self.wait();
+}
+
+Feed.prototype.on_timeout = function on_timeout() {
+ var self = this;
+
+ var now = new Date;
+ var elapsed_ms = now - self.pending.activity_at;
+ self.log.warn('Closing req ' + self.pending.request.id() + ' for timeout after ' + elapsed_ms + 'ms; heartbeat=' + self.heartbeat);
+
+ return destroy_req(self.pending.request);
+ //return self.retry();
+}
+
+Feed.prototype.retry = function retry() {
+ var self = this;
+
+ clearTimeout(self.pending.wait_timer);
+ self.pending.wait_timer = null;
+
+ self.log.info('Retrying since=' + self.since + ' after ' + self.retry_delay + 'ms: ' + self.db_safe);
+ self.emit('retry');
+
+ setTimeout(function() { self.query() }, self.retry_delay);
+
+ var max_retry_ms = self.max_retry_seconds * 1000;
+ self.retry_delay *= 2;
+ if(self.retry_delay > max_retry_ms)
+ self.retry_delay = max_retry_ms;
+}
+
+Feed.prototype.on_couch_end = function on_couch_end() {
+ var self = this;
+
+ self.log.debug('Changes feed ended ' + self.pending.request.id());
+ self.pending.request = null;
+ return self.retry();
+}
+
+Feed.prototype.on_couch_error = function on_couch_error(er) {
+ var self = this;
+
+ self.log.debug('Changes query eror: ' + lib.JS(er.stack));
+ return self.retry();
+}
+
+Feed.prototype.die = function(er) {
+ var self = this;
+
+ self.log.fatal('Fatal error: ' + er.stack);
+
+ self.emit('error', er);
+
+ var req = self.pending.request;
+ self.pending.request = null;
+ if(req) {
+ self.log.debug('Destroying req ' + req.id());
+ destroy_req(req);
+ }
+
+ //throw er;
+}
+
+Feed.prototype.on_change = function on_change(change) {
+ var self = this;
+
+ if(!change.seq)
+ return self.die(new Error('No seq value in change: ' + lib.JS(change)));
+
+ if(change.seq <= self.since) {
+ self.log.debug('Bad seq value ' + change.seq + ' since=' + self.since);
+ return destroy_req(self.pending.request);
+ }
+
+ if(typeof self.filter !== 'function')
+ return self.on_good_change(change);
+
+ var req = { 'query': lib.JDUP(self.pending.request.changes_query) };
+ var f_change = lib.JDUP(change); // Don't let the filter mutate the real data.
+ var result = self.filter.apply(null, [f_change, req]);
+ result = (result && true) || false;
+ if(result) {
+ self.log.debug('Builtin filter PASS for change: ' + change.seq);
+ return self.on_good_change(change);
+ } else
+ self.log.debug('Builtin filter FAIL for change: ' + change.seq);
+}
+
+Feed.prototype.on_good_change = function on_good_change(change) {
+ var self = this;
+
+ if(self.inactivity_ms && !self.inactivity_timer)
+ return self.die(new Error('Cannot find inactivity timer during change'));
+
+ clearTimeout(self.inactivity_timer);
+ self.inactivity_timer = null;
+ if(self.inactivity_ms)
+ self.inactivity_timer = setTimeout(function() { self.on_inactivity() }, self.inactivity_ms);
+
+ self.change_at = new Date;
+ self.since = change.seq;
+ self.emit('change', change);
+}
+
+Feed.prototype.on_inactivity = function on_inactivity() {
+ var self = this;
+ var now = new Date;
+ var elapsed_ms = now - self.change_at;
+ var elapsed_s = elapsed_ms / 1000;
+
+ return self.die(new Error('Req ' + self.pending.request.id() + ' made no changes for ' + elapsed_s + 's'));
+}
+
+module.exports = { "Feed" : Feed
+ };
+
+
+/*
+ * Utilities
+ */
+
+function destroy_req(req) {
+ if(req)
+ return destroy_response(req.response);
+}
+
+function destroy_response(response) {
+ if(!response)
+ return;
+
+ response.connection.end();
+ response.connection.destroy();
+}
View
30 node_modules/follow/lib.js
@@ -0,0 +1,30 @@
+exports.scrub_creds = function scrub_creds(url) {
+ return url.replace(/^(https?:\/\/)[^:]+:[^@]+@(.*)$/, '$1$2'); // Scrub username and password
+}
+
+exports.JP = JSON.parse;
+exports.JS = JSON.stringify;
+exports.JDUP = function(obj) { return JSON.parse(JSON.stringify(obj)) };
+
+// Wrap log4js so it will not be a dependency.
+var VERBOSE = (process.env.verbose === 'true');
+
+var noop = function() {};
+var noops = { "trace": noop
+ , "debug": VERBOSE ? console.log : noop
+ , "info" : VERBOSE ? console.info : noop
+ , "warn" : VERBOSE ? console.warn : noop
+ , "error": VERBOSE ? console.error : noop
+ , "fatal": VERBOSE ? console.error : noop
+
+ , "setLevel": noop
+ }
+
+try {
+ exports.log4js = require('log4js');
+} catch(e) {
+ exports.log4js = function() {
+ return { 'getLogger': function() { return noops }
+ }
+ }
+}
View
13 node_modules/follow/package.json
@@ -0,0 +1,13 @@
+{ "name": "follow"
+, "version": "0.1.0"
+, "author": { "name": "Jason Smith"
+ , "email": "jhs@iriscouch.com" }
+, "description": "Extremely robust, fault-tolerant CouchDB changes follower"
+, "homepage": "http://github.com/iriscouch/follow"
+, "repository": { "type": "git"
+ , "url": "git://github.com/iriscouch/follow" }
+, "engines": [ "node" ]
+, "dependencies" : { "request" : ">=1.9.6"
+ }
+, "main": "api"
+}
View
4 node_modules/formidable/.gitignore
@@ -0,0 +1,4 @@
+/test/tmp
+*.upload
+*.un~
+/node_modules
View
9 node_modules/formidable/Makefile
@@ -0,0 +1,9 @@
+SHELL := /bin/bash
+
+test:
+ @find test/{simple,integration,system}/test-*.js | xargs -n 1 -t node
+
+clean:
+ rm test/tmp/*
+
+.PHONY: test clean
View
258 node_modules/formidable/Readme.md
@@ -0,0 +1,258 @@
+# Formidable
+
+## Purpose
+
+A node.js module for parsing form data, especially file uploads.
+
+## Current status
+
+This module was developed for [Transloadit](http://transloadit.com/), a service focused on uploading
+and encoding images and videos. It has been battle-tested against hundreds of GB of file uploads from
+a big variety of clients and is considered production-ready.
+
+## Features
+
+* Fast (~500mb/sec), non-buffering multipart parser
+* Automatically writing file uploads to disk
+* Low memory footprint
+* Graceful error handling
+* Very high test coverage
+
+## Changelog
+
+### v1.0.2
+
+* Exclude node\_modules folder from git
+* Implement new `'aborted'` event
+* Fix files in example folder to work with recent node versions
+* Make gently a devDependency
+
+[See Commits](https://github.com/felixge/node-formidable/compare/v1.0.1...v1.0.2)
+
+### v1.0.1
+
+* Fix package.json to refer to proper main directory. (#68, Dean Landolt)
+
+[See Commits](https://github.com/felixge/node-formidable/compare/v1.0.0...v1.0.1)
+
+### v1.0.0
+
+* Add support for multipart boundaries that are quoted strings. (Jeff Craig)
+
+This marks the begin of the development on version 2.0 which will include
+several architecural improvements.
+
+[See Commits](https://github.com/felixge/node-formidable/compare/v0.9.11...v1.0.0)
+
+### v0.9.11
+
+* Emit `'progress'` event when receiving data, regardless of parsing it. (Tim Koschützki)
+* Use [W3C FileAPI Draft](http://dev.w3.org/2006/webapi/FileAPI/) properties for File class
+
+**Important:** The old property names of the File class will be removed in a
+future release.
+
+[See Commits](https://github.com/felixge/node-formidable/compare/v0.9.10...v0.9.11)
+
+### Older releases
+
+These releases were done before starting to maintain the above Changelog:
+
+* [v0.9.10](https://github.com/felixge/node-formidable/compare/v0.9.9...v0.9.10)
+* [v0.9.9](https://github.com/felixge/node-formidable/compare/v0.9.8...v0.9.9)
+* [v0.9.8](https://github.com/felixge/node-formidable/compare/v0.9.7...v0.9.8)
+* [v0.9.7](https://github.com/felixge/node-formidable/compare/v0.9.6...v0.9.7)
+* [v0.9.6](https://github.com/felixge/node-formidable/compare/v0.9.5...v0.9.6)
+* [v0.9.5](https://github.com/felixge/node-formidable/compare/v0.9.4...v0.9.5)
+* [v0.9.4](https://github.com/felixge/node-formidable/compare/v0.9.3...v0.9.4)
+* [v0.9.3](https://github.com/felixge/node-formidable/compare/v0.9.2...v0.9.3)
+* [v0.9.2](https://github.com/felixge/node-formidable/compare/v0.9.1...v0.9.2)
+* [v0.9.1](https://github.com/felixge/node-formidable/compare/v0.9.0...v0.9.1)
+* [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0)
+* [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0)
+* [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0)
+* [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0)
+* [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0)
+* [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0)
+* [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0)
+* [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0)
+* [v0.1.0](https://github.com/felixge/node-formidable/commits/v0.1.0)
+
+## Installation
+
+Via [npm](http://github.com/isaacs/npm):
+
+ npm install formidable@latest
+
+Manually:
+
+ git clone git://github.com/felixge/node-formidable.git formidable
+ vim my.js
+ # var formidable = require('./formidable');
+
+Note: Formidable requires [gently](http://github.com/felixge/node-gently) to run the unit tests, but you won't need it for just using the library.
+
+## Example
+
+Parse an incoming file upload.
+
+ var formidable = require('formidable'),
+ http = require('http'),
+
+ sys = require('sys');
+
+ http.createServer(function(req, res) {
+ if (req.url == '/upload' && req.method.toLowerCase() == 'post') {
+ // parse a file upload
+ var form = new formidable.IncomingForm();
+ form.parse(req, function(err, fields, files) {
+ res.writeHead(200, {'content-type': 'text/plain'});
+ res.write('received upload:\n\n');
+ res.end(sys.inspect({fields: fields, files: files}));
+ });
+ return;
+ }
+
+ // show a file upload form
+ res.writeHead(200, {'content-type': 'text/html'});
+ res.end(
+ '<form action="/upload" enctype="multipart/form-data" method="post">'+
+ '<input type="text" name="title"><br>'+
+ '<input type="file" name="upload" multiple="multiple"><br>'+
+ '<input type="submit" value="Upload">'+
+ '</form>'
+ );
+ });
+
+## API
+
+### formdiable.IncomingForm
+
+#### new formdiable.IncomingForm()
+
+Creates a new incoming form.
+
+#### incomingForm.encoding = 'utf-8'
+
+The encoding to use for incoming form fields.
+
+#### incomingForm.uploadDir = '/tmp'
+
+The directory for placing file uploads in. You can later on move them using `fs.rename()`.
+
+#### incomingForm.keepExtensions = false
+
+If you want the files written to `incomingForm.uploadDir` to include the extensions of the original files, set this property to `true`.
+
+#### incomingForm.type
+
+Either 'multipart' or 'urlencoded' depending on the incoming request.
+
+#### incomingForm.maxFieldsSize = 2 * 1024 * 1024
+
+Limits the amount of memory a field (not file) can allocate in bytes.
+I this value is exceeded, an `'error'` event is emitted. The default
+size is 2MB.
+
+#### incomingForm.bytesReceived
+
+The amount of bytes received for this form so far.
+
+#### incomingForm.bytesExpected
+
+The expected number of bytes in this form.
+
+#### incomingForm.parse(request, [cb])
+
+Parses an incoming node.js `request` containing form data. If `cb` is provided, all fields an files are collected and passed to the callback:
+
+ incomingForm.parse(req, function(err, fields, files) {
+ // ...
+ });
+
+#### incomingForm.onPart(part)
+
+You may overwrite this method if you are interested in directly accessing the multipart stream. Doing so will disable any `'field'` / `'file'` events processing which would occur otherwise, making you fully responsible for handling the processing.
+
+ incomingForm.onPart = function(part) {
+ part.addListener('data', function() {
+ // ...
+ });
+ }
+
+If you want to use formidable to only handle certain parts for you, you can do so:
+
+ incomingForm.onPart = function(part) {
+ if (!part.filename) {
+ // let formidable handle all non-file parts
+ incomingForm.handlePart(part);
+ }
+ }
+
+Check the code in this method for further inspiration.
+
+#### Event: 'progress' (bytesReceived, bytesExpected)
+
+Emitted after each incoming chunk of data that has been parsed. Can be used to roll your own progress bar.
+
+#### Event: 'field' (name, value)
+
+Emitted whenever a field / value pair has been received.
+
+#### Event: 'fileBegin' (name, file)
+
+Emitted whenever a new file is detected in the upload stream. Use this even if
+you want to stream the file to somewhere else while buffering the upload on
+the file system.
+
+#### Event: 'file' (name, file)
+
+Emitted whenever a field / file pair has been received. `file` is an instance of `File`.
+
+#### Event: 'error' (err)
+
+Emitted when there is an error processing the incoming form. A request that experiences an error is automatically paused, you will have to manually call `request.resume()` if you want the request to continue firing `'data'` events.
+
+#### Event: 'aborted'
+
+Emitted when the request was aborted by the user. Right now this can be due to a 'timeout' or 'close' event on the socket. In the future there will be a seperate 'timeout' event (needs a change in the node core).
+
+#### Event: 'end' ()
+
+Emitted when the entire request has been received, and all contained files have finished flushing to disk. This is a great place for you to send your response.
+
+### formdiable.File
+
+#### file.size = 0
+
+The size of the uploade file in bytes. If the file is still being uploaded (see `'fileBegin'` event), this property says how many bytes of the file have been written to disk yet.
+
+#### file.path = null
+
+The path this file is being written to. You can modify this in the `'fileBegin'` event in
+case you are unhappy with the way formidable generates a temporary path for your files.
+
+#### file.name = null
+
+The name this file had according to the uploading client.
+
+#### file.type = null
+
+The mime type of this file, according to the uploading client.
+
+#### file.lastModifiedDate = null
+
+A date object (or `null`) containing the time this file was last written to. Mostly
+here for compatiblity with the [W3C File API Draft](http://dev.w3.org/2006/webapi/FileAPI/).
+
+## License
+
+Formidable is licensed under the MIT license.
+
+## Ports
+
+* [multipart-parser](http://github.com/FooBarWidget/multipart-parser): a C++ parser based on formidable
+
+## Credits
+
+* [Ryan Dahl](http://twitter.com/ryah) for his work on [http-parser](http://github.com/ry/http-parser) which heavily inspired multipart_parser.js
View
3 node_modules/formidable/TODO
@@ -0,0 +1,3 @@
+- Better bufferMaxSize handling approach
+- Add tests for JSON parser pull request and merge it
+- Implement QuerystringParser the same way as MultipartParser
View
70 node_modules/formidable/benchmark/bench-multipart-parser.js
@@ -0,0 +1,70 @@
+require('../test/common');
+var multipartParser = require('../lib/multipart_parser'),
+ MultipartParser = multipartParser.MultipartParser,
+ parser = new MultipartParser(),
+ Buffer = require('buffer').Buffer,
+ boundary = '-----------------------------168072824752491622650073',
+ mb = 100,
+ buffer = createMultipartBuffer(boundary, mb * 1024 * 1024),
+ callbacks =
+ { partBegin: -1,
+ partEnd: -1,
+ headerField: -1,
+ headerValue: -1,
+ partData: -1,
+ end: -1,
+ };
+
+
+parser.initWithBoundary(boundary);
+parser.onHeaderField = function() {
+ callbacks.headerField++;
+};
+
+parser.onHeaderValue = function() {
+ callbacks.headerValue++;
+};
+
+parser.onPartBegin = function() {
+ callbacks.partBegin++;
+};
+
+parser.onPartData = function() {
+ callbacks.partData++;
+};
+
+parser.onPartEnd = function() {
+ callbacks.partEnd++;
+};
+
+parser.onEnd = function() {
+ callbacks.end++;
+};
+
+var start = +new Date(),
+ nparsed = parser.write(buffer),
+ duration = +new Date - start,
+ mbPerSec = (mb / (duration / 1000)).toFixed(2);
+
+console.log(mbPerSec+' mb/sec');
+
+assert.equal(nparsed, buffer.length);
+
+function createMultipartBuffer(boundary, size) {
+ var head =
+ '--'+boundary+'\r\n'
+ + 'content-disposition: form-data; name="field1"\r\n'
+ + '\r\n'
+ , tail = '\r\n--'+boundary+'--\r\n'
+ , buffer = new Buffer(size);
+
+ buffer.write(head, 'ascii', 0);
+ buffer.write(tail, 'ascii', buffer.length - tail.length);
+ return buffer;
+}
+
+process.on('exit', function() {
+ for (var k in callbacks) {
+ assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]);
+ }
+});
View
43 node_modules/formidable/example/post.js
@@ -0,0 +1,43 @@
+require('../test/common');
+var http = require('http'),
+ util = require('util'),
+ formidable = require('formidable'),
+ server;
+
+server = http.createServer(function(req, res) {
+ if (req.url == '/') {
+ res.writeHead(200, {'content-type': 'text/html'});
+ res.end(
+ '<form action="/post" method="post">'+
+ '<input type="text" name="title"><br>'+
+ '<input type="text" name="data[foo][]"><br>'+
+ '<input type="submit" value="Submit">'+
+ '</form>'
+ );
+ } else if (req.url == '/post') {
+ var form = new formidable.IncomingForm(),
+ fields = [];
+
+ form
+ .on('error', function(err) {
+ res.writeHead(200, {'content-type': 'text/plain'});
+ res.end('error:\n\n'+util.inspect(err));
+ })
+ .on('field', function(field, value) {
+ console.log(field, value);
+ fields.push([field, value]);
+ })
+ .on('end', function() {
+ console.log('-> post done');
+ res.writeHead(200, {'content-type': 'text/plain'});
+ res.end('received fields:\n\n '+util.inspect(fields));
+ });
+ form.parse(req);
+ } else {
+ res.writeHead(404, {'content-type': 'text/plain'});
+ res.end('404');
+ }
+});
+server.listen(TEST_PORT);
+
+console.log('listening on http://localhost:'+TEST_PORT+'/');
View
48 node_modules/formidable/example/upload.js
@@ -0,0 +1,48 @@
+require('../test/common');
+var http = require('http'),
+ util = require('util'),
+ formidable = require('formidable'),
+ server;
+
+server = http.createServer(function(req, res) {
+ if (req.url == '/') {
+ res.writeHead(200, {'content-type': 'text/html'});
+ res.end(
+ '<form action="/upload" enctype="multipart/form-data" method="post">'+
+ '<input type="text" name="title"><br>'+
+ '<input type="file" name="upload" multiple="multiple"><br>'+
+ '<input type="submit" value="Upload">'+
+ '</form>'
+ );
+ } else if (req.url == '/upload') {
+ var form = new formidable.IncomingForm(),
+ files = [],
+ fields = [];
+
+ form.uploadDir = TEST_TMP;
+
+ form
+ .on('field', function(field, value) {
+ console.log(field, value);
+ fields.push([field, value]);
+ })
+ .on('file', function(field, file) {
+ console.log(field, file);
+ files.push([field, file]);
+ })
+ .on('end', function() {
+ console.log('-> upload done');
+ res.writeHead(200, {'content-type': 'text/plain'});
+ res.write('received fields:\n\n '+util.inspect(fields));
+ res.write('\n\n');
+ res.end('received files:\n\n '+util.inspect(files));
+ });
+ form.parse(req);
+ } else {
+ res.writeHead(404, {'content-type': 'text/plain'});
+ res.end('404');
+ }
+});
+server.listen(TEST_PORT);
+
+console.log('listening on http://localhost:'+TEST_PORT+'/');
View
1 node_modules/formidable/index.js
@@ -0,0 +1 @@
+module.exports = require('./lib/formidable');
View
61 node_modules/formidable/lib/file.js
@@ -0,0 +1,61 @@
+if (global.GENTLY) require = GENTLY.hijack(require);
+
+var util = require('./util'),
+ WriteStream = require('fs').WriteStream,
+ EventEmitter = require('events').EventEmitter;
+
+function File(properties) {
+ EventEmitter.call(this);
+
+ this.size = 0;
+ this.path = null;
+ this.name = null;
+ this.type = null;
+ this.lastModifiedDate = null;
+
+ this._writeStream = null;
+
+ for (var key in properties) {
+ this[key] = properties[key];
+ }
+
+ this._backwardsCompatibility();
+}
+module.exports = File;
+util.inherits(File, EventEmitter);
+
+// @todo Next release: Show error messages when accessing these
+File.prototype._backwardsCompatibility = function() {
+ var self = this;
+ this.__defineGetter__('length', function() {
+ return self.size;
+ });
+ this.__defineGetter__('filename', function() {
+ return self.name;
+ });
+ this.__defineGetter__('mime', function() {
+ return self.type;
+ });
+};
+
+File.prototype.open = function() {
+ this._writeStream = new WriteStream(this.path);
+};
+
+File.prototype.write = function(buffer, cb) {
+ var self = this;
+ this._writeStream.write(buffer, function() {
+ self.lastModifiedDate = new Date();
+ self.size += buffer.length;
+ self.emit('progress', self.size);
+ cb();
+ });
+};
+
+File.prototype.end = function(cb) {
+ var self = this;
+ this._writeStream.end(function() {
+ self.emit('end');
+ cb();
+ });
+};
View
349 node_modules/formidable/lib/incoming_form.js
@@ -0,0 +1,349 @@
+if (global.GENTLY) require = GENTLY.hijack(require);
+
+var util = require('./util'),
+ path = require('path'),
+ File = require('./file'),
+ MultipartParser = require('./multipart_parser').MultipartParser,
+ QuerystringParser = require('./querystring_parser').QuerystringParser,
+ StringDecoder = require('string_decoder').StringDecoder,
+ EventEmitter = require('events').EventEmitter;
+
+function IncomingForm() {
+ if (!(this instanceof IncomingForm)) return new IncomingForm;
+ EventEmitter.call(this);
+
+ this.error = null;
+ this.ended = false;
+
+ this.maxFieldsSize = 2 * 1024 * 1024;
+ this.keepExtensions = false;
+ this.uploadDir = '/tmp';
+ this.encoding = 'utf-8';
+ this.headers = null;
+ this.type = null;
+
+ this.bytesReceived = null;
+ this.bytesExpected = null;
+
+ this._parser = null;
+ this._flushing = 0;
+ this._fieldsSize = 0;
+};
+util.inherits(IncomingForm, EventEmitter);
+exports.IncomingForm = IncomingForm;
+
+IncomingForm.prototype.parse = function(req, cb) {
+ this.pause = function() {
+ try {
+ req.pause();
+ } catch (err) {
+ // the stream was destroyed
+ if (!this.ended) {
+ // before it was completed, crash & burn
+ this._error(err);
+ }
+ return false;
+ }
+ return true;
+ };
+
+ this.resume = function() {
+ try {
+ req.resume();
+ } catch (err) {
+ // the stream was destroyed
+ if (!this.ended) {
+ // before it was completed, crash & burn
+ this._error(err);
+ }
+ return false;
+ }
+
+ return true;
+ };
+
+ this.writeHeaders(req.headers);
+
+ var self = this;
+ req
+ .on('error', function(err) {
+ self._error(err);
+ })
+ .on('aborted', function() {
+ self.emit('aborted');
+ })
+ .on('data', function(buffer) {
+ self.write(buffer);
+ })
+ .on('end', function() {
+ if (self.error) {
+ return;
+ }
+
+ var err = self._parser.end();
+ if (err) {
+ self._error(err);
+ }
+ });
+
+ if (cb) {
+ var fields = {}, files = {};
+ this
+ .on('field', function(name, value) {
+ fields[name] = value;
+ })
+ .on('file', function(name, file) {
+ files[name] = file;
+ })
+ .on('error', function(err) {
+ cb(err, fields, files);
+ })
+ .on('end', function() {
+ cb(null, fields, files);
+ });
+ }
+
+ return this;
+};
+
+IncomingForm.prototype.writeHeaders = function(headers) {
+ this.headers = headers;
+ this._parseContentLength();
+ this._parseContentType();
+};
+
+IncomingForm.prototype.write = function(buffer) {
+ if (!this._parser) {
+ this._error(new Error('unintialized parser'));
+ return;
+ }
+
+ this.bytesReceived += buffer.length;
+ this.emit('progress', this.bytesReceived, this.bytesExpected);
+
+ var bytesParsed = this._parser.write(buffer);
+ if (bytesParsed !== buffer.length) {
+ this._error(new Error('parser error, '+bytesParsed+' of '+buffer.length+' bytes parsed'));
+ }
+
+ return bytesParsed;
+};
+
+IncomingForm.prototype.pause = function() {
+ // this does nothing, unless overwritten in IncomingForm.parse
+ return false;
+};
+
+IncomingForm.prototype.resume = function() {
+ // this does nothing, unless overwritten in IncomingForm.parse
+ return false;
+};
+
+IncomingForm.prototype.onPart = function(part) {
+ // this method can be overwritten by the user
+ this.handlePart(part);
+};
+
+IncomingForm.prototype.handlePart = function(part) {
+ var self = this;
+
+ if (!part.filename) {
+ var value = ''
+ , decoder = new StringDecoder(this.encoding);
+
+ part.on('data', function(buffer) {
+ self._fieldsSize += buffer.length;
+ if (self._fieldsSize > self.maxFieldsSize) {
+ self._error(new Error('maxFieldsSize exceeded, received '+self._fieldsSize+' bytes of field data'));
+ return;
+ }
+ value += decoder.write(buffer);
+ });
+
+ part.on('end', function() {
+ self.emit('field', part.name, value);
+ });
+ return;
+ }
+
+ this._flushing++;
+
+ var file = new File({
+ path: this._uploadPath(part.filename),
+ name: part.filename,
+ type: part.mime,
+ });
+
+ this.emit('fileBegin', part.name, file);
+
+ file.open();
+
+ part.on('data', function(buffer) {
+ self.pause();
+ file.write(buffer, function() {
+ self.resume();
+ });
+ });
+
+ part.on('end', function() {
+ file.end(function() {
+ self._flushing--;
+ self.emit('file', part.name, file);
+ self._maybeEnd();
+ });
+ });
+};
+
+IncomingForm.prototype._parseContentType = function() {
+ if (!this.headers['content-type']) {
+ this._error(new Error('bad content-type header, no content-type'));
+ return;
+ }
+
+ if (this.headers['content-type'].match(/urlencoded/i)) {
+ this._initUrlencoded();
+ return;
+ }
+
+ if (this.headers['content-type'].match(/multipart/i)) {
+ var m;
+ if (m = this.headers['content-type'].match(/boundary=(?:"([^"]+)"|([^;]+))/i)) {
+ this._initMultipart(m[1] || m[2]);
+ } else {
+ this._error(new Error('bad content-type header, no multipart boundary'));
+ }
+ return;
+ }
+
+ this._error(new Error('bad content-type header, unknown content-type: '+this.headers['content-type']));
+};
+
+IncomingForm.prototype._error = function(err) {
+ if (this.error) {
+ return;
+ }
+
+ this.error = err;
+ this.pause();
+ this.emit('error', err);
+};
+
+IncomingForm.prototype._parseContentLength = function() {
+ if (this.headers['content-length']) {
+ this.bytesReceived = 0;
+ this.bytesExpected = parseInt(this.headers['content-length'], 10);
+ }
+};
+
+IncomingForm.prototype._newParser = function() {
+ return new MultipartParser();
+};
+
+IncomingForm.prototype._initMultipart = function(boundary) {
+ this.type = 'multipart';
+
+ var parser = new MultipartParser(),
+ self = this,
+ headerField,
+ headerValue,
+ part;
+
+ parser.initWithBoundary(boundary);
+
+ parser.onPartBegin = function() {
+ part = new EventEmitter();
+ part.headers = {};
+ part.name = null;
+ part.filename = null;
+ part.mime = null;
+ headerField = '';
+ headerValue = '';
+ };
+
+ parser.onHeaderField = function(b, start, end) {
+ headerField += b.toString(self.encoding, start, end);
+ };
+
+ parser.onHeaderValue = function(b, start, end) {
+ headerValue += b.toString(self.encoding, start, end);
+ };
+
+ parser.onHeaderEnd = function() {
+ headerField = headerField.toLowerCase();
+ part.headers[headerField] = headerValue;
+
+ var m;
+ if (headerField == 'content-disposition') {
+ if (m = headerValue.match(/name="([^"]+)"/i)) {
+ part.name = m[1];
+ }
+
+ if (m = headerValue.match(/filename="([^;]+)"/i)) {
+ part.filename = m[1].substr(m[1].lastIndexOf('\\') + 1);
+ }
+ } else if (headerField == 'content-type') {
+ part.mime = headerValue;
+ }
+
+ headerField = '';
+ headerValue = '';
+ };
+
+ parser.onHeadersEnd = function() {
+ self.onPart(part);
+ };
+
+ parser.onPartData = function(b, start, end) {
+ part.emit('data', b.slice(start, end));
+ };
+
+ parser.onPartEnd = function() {
+ part.emit('end');
+ };
+
+ parser.onEnd = function() {
+ self.ended = true;
+ self._maybeEnd();
+ };
+
+ this._parser = parser;
+};
+
+IncomingForm.prototype._initUrlencoded = function() {
+ this.type = 'urlencoded';
+
+ var parser = new QuerystringParser()
+ , self = this;
+
+ parser.onField = function(key, val) {
+ self.emit('field', key, val);
+ };
+
+ parser.onEnd = function() {
+ self.ended = true;
+ self._maybeEnd();
+ };
+
+ this._parser = parser;
+};
+
+IncomingForm.prototype._uploadPath = function(filename) {
+ var name = '';
+ for (var i = 0; i < 32; i++) {
+ name += Math.floor(Math.random() * 16).toString(16);
+ }
+
+ if (this.keepExtensions) {
+ name += path.extname(filename);
+ }
+
+ return path.join(this.uploadDir, name);
+};
+
+IncomingForm.prototype._maybeEnd = function() {
+ if (!this.ended || this._flushing) {
+ return;
+ }
+
+ this.emit('end');
+};
View
3 node_modules/formidable/lib/index.js
@@ -0,0 +1,3 @@
+var IncomingForm = require('./incoming_form').IncomingForm;
+IncomingForm.IncomingForm = IncomingForm;
+module.exports = IncomingForm;
View
303 node_modules/formidable/lib/multipart_parser.js
@@ -0,0 +1,303 @@
+var Buffer = require('buffer').Buffer,
+ s = 0,
+ S =
+ { PARSER_UNINITIALIZED: s++,
+ START: s++,
+ START_BOUNDARY: s++,
+ HEADER_FIELD_START: s++,
+ HEADER_FIELD: s++,
+ HEADER_VALUE_START: s++,
+ HEADER_VALUE: s++,
+ HEADER_VALUE_ALMOST_DONE: s++,
+ HEADERS_ALMOST_DONE: s++,
+ PART_DATA_START: s++,
+ PART_DATA: s++,
+ PART_END: s++,
+ END: s++,
+ },
+
+ f = 1,
+ F =
+ { PART_BOUNDARY: f,
+ LAST_BOUNDARY: f *= 2,
+ },
+
+ LF = 10,
+ CR = 13,
+ SPACE = 32,
+ HYPHEN = 45,
+ COLON = 58,
+ A = 97,
+ Z = 122,
+
+ lower = function(c) {
+ return c | 0x20;
+ };
+
+for (var s in S) {
+ exports[s] = S[s];
+}
+
+function MultipartParser() {
+ this.boundary = null;
+ this.boundaryChars = null;
+ this.lookbehind = null;
+ this.state = S.PARSER_UNINITIALIZED;
+
+ this.index = null;
+ this.flags = 0;
+};
+exports.MultipartParser = MultipartParser;
+
+MultipartParser.prototype.initWithBoundary = function(str) {
+ this.boundary = new Buffer(str.length+4);
+ this.boundary.write('\r\n--', 'ascii', 0);
+ this.boundary.write(str, 'ascii', 4);
+ this.lookbehind = new Buffer(this.boundary.length+8);
+ this.state = S.START;
+
+ this.boundaryChars = {};
+ for (var i = 0; i < this.boundary.length; i++) {
+ this.boundaryChars[this.boundary[i]] = true;
+ }
+};
+
+MultipartParser.prototype.write = function(buffer) {
+ var self = this,
+ i = 0,
+ len = buffer.length,
+ prevIndex = this.index,
+ index = this.index,
+ state = this.state,
+ flags = this.flags,
+ lookbehind = this.lookbehind,
+ boundary = this.boundary,
+ boundaryChars = this.boundaryChars,
+ boundaryLength = this.boundary.length,
+ boundaryEnd = boundaryLength - 1,
+ bufferLength = buffer.length,
+ c,
+ cl,
+
+ mark = function(name) {
+ self[name+'Mark'] = i;
+ },
+ clear = function(name) {
+ delete self[name+'Mark'];
+ },
+ callback = function(name, buffer, start, end) {
+ if (start !== undefined && start === end) {
+ return;
+ }
+
+ var callbackSymbol = 'on'+name.substr(0, 1).toUpperCase()+name.substr(1);
+ if (callbackSymbol in self) {
+ self[callbackSymbol](buffer, start, end);
+ }
+ },
+ dataCallback = function(name, clear) {
+ var markSymbol = name+'Mark';
+ if (!(markSymbol in self)) {
+ return;
+ }
+
+ if (!clear) {
+ callback(name, buffer, self[markSymbol], buffer.length);
+ self[markSymbol] = 0;
+ } else {
+ callback(name, buffer, self[markSymbol], i);
+ delete self[markSymbol];
+ }
+ };
+
+ for (i = 0; i < len; i++) {
+ c = buffer[i];
+ switch (state) {
+ case S.PARSER_UNINITIALIZED:
+ return i;
+ case S.START:
+ index = 0;
+ state = S.START_BOUNDARY;
+ case S.START_BOUNDARY:
+ if (index == boundary.length - 2) {
+ if (c != CR) {
+ return i;
+ }
+ index++;
+ break;
+ } else if (index - 1 == boundary.length - 2) {
+ if (c != LF) {
+ return i;
+ }
+ index = 0;
+ callback('partBegin');
+ state = S.HEADER_FIELD_START;
+ break;
+ }
+
+ if (c != boundary[index+2]) {
+ return i;
+ }
+ index++;
+ break;
+ case S.HEADER_FIELD_START:
+ state = S.HEADER_FIELD;
+ mark('headerField');
+ index = 0;
+ case S.HEADER_FIELD:
+ if (c == CR) {
+ clear('headerField');
+ state = S.HEADERS_ALMOST_DONE;
+ break;
+ }
+
+ index++;
+ if (c == HYPHEN) {
+ break;
+ }
+
+ if (c == COLON) {
+ if (index == 1) {
+ // empty header field
+ return i;
+ }
+ dataCallback('headerField', true);
+ state = S.HEADER_VALUE_START;
+ break;
+ }
+
+ cl = lower(c);
+ if (cl < A || cl > Z) {
+ return i;
+ }
+ break;
+ case S.HEADER_VALUE_START:
+ if (c == SPACE) {
+ break;
+ }
+
+ mark('headerValue');
+ state = S.HEADER_VALUE;
+ case S.HEADER_VALUE:
+ if (c == CR) {
+ dataCallback('headerValue', true);
+ callback('headerEnd');
+ state = S.HEADER_VALUE_ALMOST_DONE;
+ }
+ break;
+ case S.HEADER_VALUE_ALMOST_DONE:
+ if (c != LF) {
+ return i;
+ }
+ state = S.HEADER_FIELD_START;
+ break;
+ case S.HEADERS_ALMOST_DONE:
+ if (c != LF) {
+ return i;
+ }
+
+ callback('headersEnd');
+ state = S.PART_DATA_START;
+ break;
+ case S.PART_DATA_START:
+ state = S.PART_DATA
+ mark('partData');
+ case S.PART_DATA:
+ prevIndex = index;
+
+ if (index == 0) {
+ // boyer-moore derrived algorithm to safely skip non-boundary data
+ while (i + boundaryLength <= bufferLength) {
+ if (buffer[i + boundaryEnd] in boundaryChars) {
+ break;
+ }
+
+ i += boundaryLength;
+ }
+ c = buffer[i];
+ }
+
+ if (index < boundary.length) {
+ if (boundary[index] == c) {
+ if (index == 0) {
+ dataCallback('partData', true);
+ }
+ index++;
+ } else {
+ index = 0;
+ }
+ } else if (index == boundary.length) {
+ index++;
+ if (c == CR) {
+ // CR = part boundary
+ flags |= F.PART_BOUNDARY;
+ } else if (c == HYPHEN) {
+ // HYPHEN = end boundary
+ flags |= F.LAST_BOUNDARY;
+ } else {
+ index = 0;
+ }
+ } else if (index - 1 == boundary.length) {
+ if (flags & F.PART_BOUNDARY) {
+ index = 0;
+ if (c == LF) {
+ // unset the PART_BOUNDARY flag
+ flags &= ~F.PART_BOUNDARY;
+ callback('partEnd');
+ callback('partBegin');
+ state = S.HEADER_FIELD_START;
+ break;
+ }
+ } else if (flags & F.LAST_BOUNDARY) {
+ if (c == HYPHEN) {
+ callback('partEnd');
+ callback('end');
+ state = S.END;
+ } else {
+ index = 0;
+ }
+ } else {
+ index = 0;
+ }
+ }
+
+ if (index > 0) {
+ // when matching a possible boundary, keep a lookbehind reference
+ // in case it turns out to be a false lead
+ lookbehind[index-1] = c;
+ } else if (prevIndex > 0) {
+ // if our boundary turned out to be rubbish, the captured lookbehind
+ // belongs to partData
+ callback('partData', lookbehind, 0, prevIndex);
+ prevIndex = 0;
+ mark('partData');
+
+ // reconsider the current character even so it interrupted the sequence
+ // it could be the beginning of a new sequence
+ i--;
+ }
+
+ break;
+ case S.END:
+ break;
+ default:
+ return i;
+ }
+ }
+
+ dataCallback('headerField');
+ dataCallback('headerValue');
+ dataCallback('partData');
+
+ this.index = index;
+ this.state = state;
+ this.flags = flags;
+
+ return len;
+};
+
+MultipartParser.prototype.end = function() {
+ if (this.state != S.END) {
+ return new Error('MultipartParser.end(): stream ended unexpectedly');
+ }
+};
View
25 node_modules/formidable/lib/querystring_parser.js
@@ -0,0 +1,25 @@
+if (global.GENTLY) require = GENTLY.hijack(require);
+
+// This is a buffering parser, not quite as nice as the multipart one.
+// If I find time I'll rewrite this to be fully streaming as well
+var querystring = require('querystring');
+
+function QuerystringParser() {
+ this.buffer = '';
+};
+exports.QuerystringParser = QuerystringParser;
+
+QuerystringParser.prototype.write = function(buffer) {
+ this.buffer += buffer.toString('ascii');
+ return buffer.length;
+};
+
+QuerystringParser.prototype.end = function() {
+ var fields = querystring.parse(this.buffer);
+ for (var field in fields) {
+ this.onField(field, fields[field]);
+ }
+ this.buffer = '';
+
+ this.onEnd();
+};
View
6 node_modules/formidable/lib/util.js
@@ -0,0 +1,6 @@
+// Backwards compatibility ...
+try {
+ module.exports = require('util');
+} catch (e) {
+ module.exports = require('sys');
+}
View
15 node_modules/formidable/package.json
@@ -0,0 +1,15 @@
+{
+ "name": "formidable",
+ "version": "1.0.2",
+ "dependencies": {},
+ "devDependencies": {
+ "gently": ">=0.7.0"
+ },
+ "directories": {
+ "lib": "./lib"
+ },
+ "main": "./lib/index",
+ "engines": {
+ "node": "*"
+ }
+}
View
24 node_modules/formidable/test/common.js
@@ -0,0 +1,24 @@
+var path = require('path'),
+ fs = require('fs');
+
+try {
+ global.Gently = require('gently');
+} catch (e) {
+ throw new Error('this test suite requires node-gently');
+}
+
+exports.lib = path.join(__dirname, '../lib');
+
+global.GENTLY = new Gently();
+
+global.assert = require('assert');
+global.TEST_PORT = 13532;
+