From c4cdb6b51bac5d6d1b3ffae6b4dccd5ce1ac3dc9 Mon Sep 17 00:00:00 2001 From: Orlando Vazquez Date: Thu, 18 Aug 2011 01:17:08 -0700 Subject: [PATCH] Initial commit sdc-convertvm tool --- bin/sdc-convertvm.js | 4 + doc/design.md | 1 + lib/cli.js | 438 ++++ lib/dataset_manifest.js | 24 + lib/disk_image.js | 139 ++ .../.npm/async/0.1.8/package/.gitmodules | 9 + node_modules/.npm/async/0.1.8/package/LICENSE | 19 + .../.npm/async/0.1.8/package/Makefile | 21 + .../.npm/async/0.1.8/package/README.md | 962 ++++++++ .../async/0.1.8/package/deps/nodeunit.css | 70 + .../.npm/async/0.1.8/package/deps/nodeunit.js | 1966 +++++++++++++++++ .../async/0.1.8/package/dist/async.min.js | 1 + .../.npm/async/0.1.8/package/index.js | 3 + .../.npm/async/0.1.8/package/lib/async.js | 623 ++++++ .../.npm/async/0.1.8/package/nodelint.cfg | 4 + .../.npm/async/0.1.8/package/package.json | 16 + .../async/0.1.8/package/test/test-async.js | 1321 +++++++++++ .../.npm/async/0.1.8/package/test/test.html | 24 + node_modules/.npm/async/active | 1 + .../.npm/node-uuid/1.2.0/package/LICENSE.md | 3 + .../.npm/node-uuid/1.2.0/package/README.md | 100 + .../.npm/node-uuid/1.2.0/package/package.json | 12 + .../1.2.0/package/test/benchmark-native.c | 34 + .../node-uuid/1.2.0/package/test/benchmark.js | 27 + .../node-uuid/1.2.0/package/test/test.html | 14 + .../.npm/node-uuid/1.2.0/package/test/test.js | 83 + .../.npm/node-uuid/1.2.0/package/uuid.js | 80 + node_modules/.npm/node-uuid/active | 1 + .../.npm/optparse/1.0.1/package/README.md | 161 ++ node_modules/.npm/optparse/1.0.1/package/TODO | 1 + .../1.0.1/package/examples/browser-test.html | 75 + .../1.0.1/package/examples/nodejs-test.js | 90 + .../optparse/1.0.1/package/lib/optparse.js | 309 +++ .../.npm/optparse/1.0.1/package/package.json | 8 + .../.npm/optparse/1.0.1/package/seed.yml | 5 + node_modules/.npm/optparse/active | 1 + .../.npm/sax/0.2.3/dependents/xml2js@0.1.9 | 1 + node_modules/.npm/sax/0.2.3/package/LICENSE | 19 + node_modules/.npm/sax/0.2.3/package/README.md | 171 ++ .../sax/0.2.3/package/examples/example.js | 41 + .../sax/0.2.3/package/examples/not-pretty.xml | 8 + .../0.2.3/package/examples/pretty-print.js | 72 + .../sax/0.2.3/package/examples/strict.dtd | 870 ++++++++ .../0.2.3/package/examples/switch-bench.js | 45 + .../.npm/sax/0.2.3/package/examples/test.html | 15 + .../.npm/sax/0.2.3/package/examples/test.xml | 1254 +++++++++++ .../.npm/sax/0.2.3/package/lib/sax.js | 782 +++++++ .../.npm/sax/0.2.3/package/package.json | 7 + .../sax/0.2.3/package/test/buffer-overrun.js | 25 + .../sax/0.2.3/package/test/cdata-chunked.js | 11 + .../sax/0.2.3/package/test/cdata-end-split.js | 15 + .../sax/0.2.3/package/test/cdata-fake-end.js | 28 + .../sax/0.2.3/package/test/cdata-multiple.js | 15 + .../.npm/sax/0.2.3/package/test/cdata.js | 10 + .../.npm/sax/0.2.3/package/test/index.js | 73 + .../.npm/sax/0.2.3/package/test/issue-23.js | 43 + .../.npm/sax/0.2.3/package/test/issue-30.js | 24 + .../sax/0.2.3/package/test/parser-position.js | 27 + .../package/test/self-closing-child-strict.js | 40 + .../0.2.3/package/test/self-closing-child.js | 40 + .../0.2.3/package/test/self-closing-tag.js | 25 + .../sax/0.2.3/package/test/stray-ending.js | 17 + .../package/test/trailing-non-whitespace.js | 17 + .../.npm/sax/0.2.3/package/test/unquoted.js | 17 + node_modules/.npm/sax/active | 1 + .../.npm/xml2js/0.1.9/dependson/sax@0.2.3 | 1 + .../xml2js/0.1.9/node_modules/sax/index.js | 20 + .../0.1.9/node_modules/sax/package.json.js | 89 + .../.npm/xml2js/0.1.9/node_modules/sax/sax.js | 20 + .../.npm/xml2js/0.1.9/package/.gitignore | 1 + .../.npm/xml2js/0.1.9/package/Cakefile | 12 + .../.npm/xml2js/0.1.9/package/LICENSE | 19 + .../.npm/xml2js/0.1.9/package/README.md | 41 + .../.npm/xml2js/0.1.9/package/lib/xml2js.js | 96 + .../.npm/xml2js/0.1.9/package/package.json | 29 + .../xml2js/0.1.9/package/src/xml2js.coffee | 78 + .../0.1.9/package/test/fixtures/sample.xml | 21 + .../0.1.9/package/test/xml2js.test.coffee | 60 + node_modules/.npm/xml2js/active | 1 + .../.npm/zfs/0.1.3/package/.gitmodules | 3 + node_modules/.npm/zfs/0.1.3/package/README.md | 75 + node_modules/.npm/zfs/0.1.3/package/index.js | 1 + .../zfs/0.1.3/package/lib/async_testing.js | 357 +++ .../.npm/zfs/0.1.3/package/lib/zfs.js | 375 ++++ .../.npm/zfs/0.1.3/package/package.json | 12 + .../.npm/zfs/0.1.3/package/tests/test_zfs.js | 274 +++ node_modules/.npm/zfs/active | 1 + node_modules/async | 1 + node_modules/async@0.1.8/async.js | 20 + node_modules/async@0.1.8/index.js | 20 + node_modules/async@0.1.8/package.json.js | 102 + node_modules/node-uuid | 1 + node_modules/node-uuid@1.2.0/index.js | 20 + node_modules/node-uuid@1.2.0/package.json.js | 99 + node_modules/optparse | 1 + node_modules/optparse@1.0.1/index.js | 20 + node_modules/optparse@1.0.1/package.json.js | 100 + node_modules/sax | 1 + node_modules/sax@0.2.3/index.js | 20 + node_modules/sax@0.2.3/package.json.js | 105 + node_modules/xml2js | 1 + node_modules/xml2js@0.1.9/index.js | 20 + node_modules/xml2js@0.1.9/package.json.js | 138 ++ node_modules/zfs | 1 + node_modules/zfs@0.1.3/async_testing.js | 20 + node_modules/zfs@0.1.3/index.js | 20 + node_modules/zfs@0.1.3/package.json.js | 94 + node_modules/zfs@0.1.3/zfs.js | 20 + package.json | 13 + 109 files changed, 12786 insertions(+) create mode 100755 bin/sdc-convertvm.js create mode 100644 doc/design.md create mode 100644 lib/cli.js create mode 100644 lib/dataset_manifest.js create mode 100644 lib/disk_image.js create mode 100644 node_modules/.npm/async/0.1.8/package/.gitmodules create mode 100644 node_modules/.npm/async/0.1.8/package/LICENSE create mode 100644 node_modules/.npm/async/0.1.8/package/Makefile create mode 100644 node_modules/.npm/async/0.1.8/package/README.md create mode 100644 node_modules/.npm/async/0.1.8/package/deps/nodeunit.css create mode 100644 node_modules/.npm/async/0.1.8/package/deps/nodeunit.js create mode 100644 node_modules/.npm/async/0.1.8/package/dist/async.min.js create mode 100644 node_modules/.npm/async/0.1.8/package/index.js create mode 100644 node_modules/.npm/async/0.1.8/package/lib/async.js create mode 100644 node_modules/.npm/async/0.1.8/package/nodelint.cfg create mode 100644 node_modules/.npm/async/0.1.8/package/package.json create mode 100644 node_modules/.npm/async/0.1.8/package/test/test-async.js create mode 100644 node_modules/.npm/async/0.1.8/package/test/test.html create mode 120000 node_modules/.npm/async/active create mode 100644 node_modules/.npm/node-uuid/1.2.0/package/LICENSE.md create mode 100644 node_modules/.npm/node-uuid/1.2.0/package/README.md create mode 100644 node_modules/.npm/node-uuid/1.2.0/package/package.json create mode 100644 node_modules/.npm/node-uuid/1.2.0/package/test/benchmark-native.c create mode 100644 node_modules/.npm/node-uuid/1.2.0/package/test/benchmark.js create mode 100644 node_modules/.npm/node-uuid/1.2.0/package/test/test.html create mode 100644 node_modules/.npm/node-uuid/1.2.0/package/test/test.js create mode 100644 node_modules/.npm/node-uuid/1.2.0/package/uuid.js create mode 120000 node_modules/.npm/node-uuid/active create mode 100644 node_modules/.npm/optparse/1.0.1/package/README.md create mode 100644 node_modules/.npm/optparse/1.0.1/package/TODO create mode 100644 node_modules/.npm/optparse/1.0.1/package/examples/browser-test.html create mode 100644 node_modules/.npm/optparse/1.0.1/package/examples/nodejs-test.js create mode 100755 node_modules/.npm/optparse/1.0.1/package/lib/optparse.js create mode 100644 node_modules/.npm/optparse/1.0.1/package/package.json create mode 100644 node_modules/.npm/optparse/1.0.1/package/seed.yml create mode 120000 node_modules/.npm/optparse/active create mode 120000 node_modules/.npm/sax/0.2.3/dependents/xml2js@0.1.9 create mode 100644 node_modules/.npm/sax/0.2.3/package/LICENSE create mode 100644 node_modules/.npm/sax/0.2.3/package/README.md create mode 100644 node_modules/.npm/sax/0.2.3/package/examples/example.js create mode 100644 node_modules/.npm/sax/0.2.3/package/examples/not-pretty.xml create mode 100644 node_modules/.npm/sax/0.2.3/package/examples/pretty-print.js create mode 100644 node_modules/.npm/sax/0.2.3/package/examples/strict.dtd create mode 100755 node_modules/.npm/sax/0.2.3/package/examples/switch-bench.js create mode 100644 node_modules/.npm/sax/0.2.3/package/examples/test.html create mode 100644 node_modules/.npm/sax/0.2.3/package/examples/test.xml create mode 100644 node_modules/.npm/sax/0.2.3/package/lib/sax.js create mode 100644 node_modules/.npm/sax/0.2.3/package/package.json create mode 100644 node_modules/.npm/sax/0.2.3/package/test/buffer-overrun.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/cdata-chunked.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/cdata-end-split.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/cdata-fake-end.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/cdata-multiple.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/cdata.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/index.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/issue-23.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/issue-30.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/parser-position.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/self-closing-child-strict.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/self-closing-child.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/self-closing-tag.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/stray-ending.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/trailing-non-whitespace.js create mode 100644 node_modules/.npm/sax/0.2.3/package/test/unquoted.js create mode 120000 node_modules/.npm/sax/active create mode 120000 node_modules/.npm/xml2js/0.1.9/dependson/sax@0.2.3 create mode 100755 node_modules/.npm/xml2js/0.1.9/node_modules/sax/index.js create mode 100644 node_modules/.npm/xml2js/0.1.9/node_modules/sax/package.json.js create mode 100755 node_modules/.npm/xml2js/0.1.9/node_modules/sax/sax.js create mode 100644 node_modules/.npm/xml2js/0.1.9/package/.gitignore create mode 100644 node_modules/.npm/xml2js/0.1.9/package/Cakefile create mode 100644 node_modules/.npm/xml2js/0.1.9/package/LICENSE create mode 100644 node_modules/.npm/xml2js/0.1.9/package/README.md create mode 100644 node_modules/.npm/xml2js/0.1.9/package/lib/xml2js.js create mode 100644 node_modules/.npm/xml2js/0.1.9/package/package.json create mode 100644 node_modules/.npm/xml2js/0.1.9/package/src/xml2js.coffee create mode 100644 node_modules/.npm/xml2js/0.1.9/package/test/fixtures/sample.xml create mode 100644 node_modules/.npm/xml2js/0.1.9/package/test/xml2js.test.coffee create mode 120000 node_modules/.npm/xml2js/active create mode 100644 node_modules/.npm/zfs/0.1.3/package/.gitmodules create mode 100644 node_modules/.npm/zfs/0.1.3/package/README.md create mode 120000 node_modules/.npm/zfs/0.1.3/package/index.js create mode 100644 node_modules/.npm/zfs/0.1.3/package/lib/async_testing.js create mode 100644 node_modules/.npm/zfs/0.1.3/package/lib/zfs.js create mode 100644 node_modules/.npm/zfs/0.1.3/package/package.json create mode 100755 node_modules/.npm/zfs/0.1.3/package/tests/test_zfs.js create mode 120000 node_modules/.npm/zfs/active create mode 120000 node_modules/async create mode 100755 node_modules/async@0.1.8/async.js create mode 100755 node_modules/async@0.1.8/index.js create mode 100644 node_modules/async@0.1.8/package.json.js create mode 120000 node_modules/node-uuid create mode 100755 node_modules/node-uuid@1.2.0/index.js create mode 100644 node_modules/node-uuid@1.2.0/package.json.js create mode 120000 node_modules/optparse create mode 100755 node_modules/optparse@1.0.1/index.js create mode 100644 node_modules/optparse@1.0.1/package.json.js create mode 120000 node_modules/sax create mode 100755 node_modules/sax@0.2.3/index.js create mode 100644 node_modules/sax@0.2.3/package.json.js create mode 120000 node_modules/xml2js create mode 100755 node_modules/xml2js@0.1.9/index.js create mode 100644 node_modules/xml2js@0.1.9/package.json.js create mode 120000 node_modules/zfs create mode 100755 node_modules/zfs@0.1.3/async_testing.js create mode 100755 node_modules/zfs@0.1.3/index.js create mode 100644 node_modules/zfs@0.1.3/package.json.js create mode 100755 node_modules/zfs@0.1.3/zfs.js create mode 100644 package.json diff --git a/bin/sdc-convertvm.js b/bin/sdc-convertvm.js new file mode 100755 index 0000000..1368b9a --- /dev/null +++ b/bin/sdc-convertvm.js @@ -0,0 +1,4 @@ +#!/usr/bin/env node +var CLI = require('../lib/cli'); +var app = new CLI(); +app.start(); diff --git a/doc/design.md b/doc/design.md new file mode 100644 index 0000000..b37663f --- /dev/null +++ b/doc/design.md @@ -0,0 +1 @@ +` diff --git a/lib/cli.js b/lib/cli.js new file mode 100644 index 0000000..9c2ab71 --- /dev/null +++ b/lib/cli.js @@ -0,0 +1,438 @@ +var DiskImage = require('./disk_image'); +var async = require('async'); +var execFile = require('child_process').execFile; +var fs = require('fs'); +var optparse = require('optparse'); +var path = require('path'); +var util = require('util'); +var xml2js = require('xml2js'); +var crypto = require('crypto'); +var fs = require('fs'); +var uuid = require('node-uuid'); + +var DatasetManifest = require('./dataset_manifest'); + +var CLI = module.exports = function () {} + +CLI.prototype.parseOptions = function () { + var self = this; + var switches + = [ ['-h', '--help', 'This help message.'] + , ['-n', '--ds-name VALUE', 'Short name for the dataset.'] + , ['-v', '--ds-version VALUE', 'Semantic version of dataset.'] + , ['-d', '--ds-description VALUE', 'Short description of dataset (to max. of 255 bytes).'] + , ['-u', '--assets-url VALUE', 'Assets location url'] + ]; + + var options = this.options = {}; + var parser = new optparse.OptionParser(switches); + parser.banner + = [ "Usage:" + , " " + [process.argv[0], process.argv[1], "[options] [output-directory]"].join(' ') + ].join("\n"); + + parser.on(2, function (value) { + options.input = value; + }); + + parser.on(3, function (value) { + options.outputDir = value; + }); + + parser.on('help', function () { + self.displayHelp(parser.toString()); + }); + + parser.on('ds-name', function (ds_name) { + options.ds_name = ds_name; + }); + + parser.on('ds-version', function (ds_version) { + options.ds_version = ds_version; + }); + + parser.on('ds-uuid', function (ds_uuid) { + options.ds_uuid = ds_uuid; + }); + + parser.on('assets-url', function (name, assets_url) { + options.assets_url = assets_url; + }); + + var args = parser.parse(process.argv); + + // name is mandatory + + if (!options.input) { + self.displayHelp(parser.toString()); + process.exit(1); + } + + self.ovfFilename = options.input; + if (!options.outputDir) { + options.outputDir = '.'; + } + + if (!options.assets_url) { + options.assets_url = 'http://10.99.99.6/datasets' + } + + return options; +} + +CLI.prototype.start = function () { + var self = this; + var options = this.parseOptions(); + + // create/verify directory + async.waterfall + ( [ self.mkdir.bind(self) + , self.verifyFiles.bind(self) + , self.convertOvfToJson.bind(self) + , self.createDatasetManifest.bind(self) + , self.createDiskImages.bind(self) + , self.populateFiles.bind(self) + , self.writeDatasetManifest.bind(self) + ] + , function (error) { + console.dir(error); + console.log("All done!"); + } + ); +} + +CLI.prototype.writeDatasetManifest = function (callback) { + var self = this; + var dsmfilename + = path.join(self.options.outputDir, self.manifest.name) + '.dsmanifest'; + console.log(dsmfilename); + fs.writeFile + ( dsmfilename + , self.manifest.toJson()+"\n" + , function (error) { + return callback(error); + } + ); +} + +CLI.prototype.mkdir = function (callback) { + var self = this; + console.log("Output directory: " + self.options.outputDir); + path.exists(self.options.outputDir, function (exists) { + if (exists) { + callback(); + } + else { + fs.mkdir(self.options.outputDir, 0755, function (error) { + if (error) { + throw new Error(error.toString); + } + callback(); + }); + } + }); +} + +CLI.prototype.displayHelp = function (help) { + console.error(help+"\n"); + process.exit(0); +} + +CLI.prototype.verifyFiles = function (callback) { + var self = this; + + self.fileDigests = {}; + var manifestFilename = (path.dirname(self.options.input) + + '/' + + path.basename( self.options.input + , path.extname(self.options.input)) + + '.mf'); + + path.exists(manifestFilename, function (exists) { + if (exists) { + fs.readFile(manifestFilename, function(err, data) { + var lines = data.toString().split(/\n+/); + async.forEach + ( lines + , function (line, callback) { + if (!line) return; + var m = line.match(/([^)]+)\((.+?)\)\s*=\s*(.*)/); + var digest = m[1]; + var filename = m[2]; + sha1file(filename, function (error, computedDigest) { + if (digest !== computedDigest) { + console.error("Digest mismatch for file " + filename); + process.exit(1); + } + fileDigests[filename] = computedDigest; + callback(); + }); + } + , function (error) { + callback(); + } + ); + }); + } + else { + return callback(); + } + }); +} + +CLI.prototype.createDatasetManifest = function (callback) { + var self = this; + + var manifest; + var ovf = self.ovf; + + this.manifest = manifest = new DatasetManifest(); + + // Scan hardware and gather the following information. + // - cpu type + // - nic driver + // - harddisk driver + + var VirtualSystem = ovf.VirtualSystem; + + self.parseDisks(ovf); + self.parseVirtualSytemSection(ovf); + self.parseNetworkSection(ovf); + + manifest.name = this.options.ds_name || VirtualSystem['@']['ovf:id']; + manifest.requirements = {}; + manifest.type = 'vmimage'; + manifest.uuid = this.options.ds_uuid || uuid().toLowerCase(); + manifest.version = this.options.ds_version || '1.0.0'; + + return callback(); +} + +CLI.prototype.populateFiles = function (callback) { + var self = this; + + console.dir(self.files); + + async.forEach + ( Object.keys(self.files) + , function (file, callback) { + var outputFile = path.join(self.options.outputDir + , replaceFilenameExtension(self.files[file].href, '.zfs.bz2')); + + var record = { + path: replaceFilenameExtension(self.files[file].href, '.zfs.bz2') + }; + + record.url = self.options.assets_url + '/' + record.path; + async.waterfall + ( [ function (callback) { + fs.stat(self.files[file].path, function (error, stat) { + record.size = stat.size; + callback(); + }); + } + , function (callback) { + console.log("Verifying file " + outputFile); + sha1file(outputFile, function (error, sha) { + self.fileDigests[file] = record.sha = sha; + callback(); + }); + } + ] + , function (error) { + self.manifest.files.push(record); + return callback(error); + } + ) + } + , function (error) { + return callback(error); + } + ); +} + +function sha1file (filename, callback) { + var shasum = crypto.createHash('sha1'); + var s = fs.ReadStream(filename); + s.on('data', function(d) { + shasum.update(d); + }); + + s.on('end', function() { + var d = shasum.digest('hex'); + return callback(null, d); + }); +} + +CLI.prototype.parseVirtualSytemSection = function (obj, callback) { + var self = this; + var VirtualHardwareSection = obj.VirtualSystem.VirtualHardwareSection; + + var Items = + + VirtualHardwareSection.Item.forEach(function (hw) { + switch (Number(hw['rasd:ResourceType'])) { + // NIC + case 10: + break; + } + }); +} + +CLI.prototype.parseNetworkSection = function (obj) { + var self = this; + var NetworkSection = obj.NetworkSection; + var Network; + + if (NetworkSection.Network) { + if (Array.isArray(NetworkSection.Network)) { + Network = NetworkSection.Network; + } + else { + Network = [ NetworkSection.Network ]; + } + + var nets = []; + var count = 0; + + Network.forEach(function (n) { + nets.push({ name: 'net'+count++, description: n.Description }); + }); + } + + // Scan Networks + if (nets.length) { + self.manifest.networks = nets; + } +} + +CLI.prototype.getAsArray = function (section, key) { + if (Array.isArray(section[key])) { + return section[key]; + } + else { + return [ section[key] ]; + } +} + +CLI.prototype.parseDisks = function () { + var self = this; + var ovf = self.ovf; + var files = self.files = {}; + var disks = self.disks = {}; + + var dirname = path.dirname(self.ovfFilename); + + var Files = self.getAsArray(ovf.References, 'File'); + var Disks = self.getAsArray(ovf.DiskSection, 'Disk'); + + Files.forEach(function (File) { + var file + = files[File['@']['ovf:id']] + = { size: File['@']['ovf:size'] + , id: File['@']['ovf:id'] + }; + var href = File['@']['ovf:href']; + var m = href.match(/^(\w+):\/\//); + if (m) { + throw new Error ("OVF disk referenced file with unsupported href type: " + m[1]); + } + file.path = path.join(dirname, href); + file.href = href; + file.outputFile = path.join(self.options.outputDir, replaceFilenameExtension(href, '.zfs.bz2')); + }); + + Disks.forEach(function (Disk) { + var disk = disks[Disk['@']['ovf:diskId']] + = { capacityBytes: Disk['@']['ovf:capacity'] + , file: files[Disk['@']['ovf:fileRef']] + }; + + var format = Disk['@']['ovf:format']; + + if (format.match(/vmdk\.html/i)) { + disk.format = 'vmdk'; + } + + var allocUnits = Disk['@']['ovf:capacityAllocationUnits']; + if (allocUnits) { + var m = allocUnits.match(/^byte * (\d+)\^(\d+)$/); + if (m) { + disk.capacityBytes *= Math.pow(Number(m[1]), Number(m[2])); + } + else { + console.error("Waning: Couldn't make sense of capacityAllocationUnits: " + + allocUnits); + } + } + }); +} + +CLI.prototype.convertOvfToJson = function (callback) { + var self = this; + var parser = new xml2js.Parser(); + parser.addListener('end', function (ovf) { + self.ovf = ovf; + return callback(); + }); + var filename = self.ovfFilename; + fs.readFile(filename, function(err, data) { + parser.parseString(data); + }); +} + +CLI.prototype.createDiskImages = function (callback) { + var self = this; + async.forEachSeries + ( Object.keys(self.disks) + , function (diskId, callback) { + var disk = self.disks[diskId]; + console.dir(self.disks[disk]); + var diskImage = new DiskImage(); + + console.dir(disk); + var opts + = { inputFile: disk.file.path + , outputFile: disk.file.outputFile + , capacityBytes: disk.capacityBytes + , zpool: self.options.zpool + , format: disk.format + }; + console.dir(opts); + + diskImage.convertToZfsStream(opts, function (error) { + console.dir(arguments); + console.log("Done converting " + disk); + callback(); + }); + } + , function () { + return callback(); + } + ); +} + +function replaceFilenameExtension (filename, newExt) { + return ( path.join + ( path.dirname(filename) + , path.basename + ( filename + , path.extname(filename) + ) + newExt + ) + ); +} + +var OVF = function () {} + +OVF.prototype.parse = function (opts) { + if (opts.file) { + + } + else if (opts.xml) { + + } +} + +OVF.prototype.parseXml = function (opts) { +} diff --git a/lib/dataset_manifest.js b/lib/dataset_manifest.js new file mode 100644 index 0000000..a40f791 --- /dev/null +++ b/lib/dataset_manifest.js @@ -0,0 +1,24 @@ +var DatasetManifest = module.exports = function () { + var self = this; + var simpleKeys = 'name type uuid version files'.split(' '); + + this.manifest = { files: [], requirements: { networks : [] }}; + var requirements = this.manifest.requirements; + + simpleKeys.forEach(function (i) { + self.__defineSetter__(i, function (v) { + this.manifest[i] = v; + }); + self.__defineGetter__(i, function () { + return this.manifest[i]; + }); + }); + + self.__defineSetter__('networks', function (v) { + requirements.networks = v; + }); +} + +DatasetManifest.prototype.toJson = function () { + return JSON.stringify(this.manifest, null, ' '); +} diff --git a/lib/disk_image.js b/lib/disk_image.js new file mode 100644 index 0000000..aeacbc8 --- /dev/null +++ b/lib/disk_image.js @@ -0,0 +1,139 @@ +var execFile = require('child_process').execFile; +var exec = require('child_process').exec; +var spawn = require('child_process').spawn; +var zfs = require('zfs').zfs; +var async = require('async'); + +/* + * Get DiskSection.Disk @populatedSize + * @ovf:capacity + * @ovf:capacityAllocationUnits + * + * 1. zfs create -V $capacity_in_bytes zones/$zvol + * 2. qemu-img convert -f vmdk -O host_device + * 3. zfs snapshot/send + * 4. zfs destroy +*/ + +var DiskImage = module.exports = function () {} + + +DiskImage.prototype.convertToZfsStream = function (opts, callback) { + var self = this; + + opts.zpool = opts.zpool || 'zones'; + + if (!opts.inputFile) { + throw new Error("Missing option to DiskImage: inputFile"); + } + if (!opts.outputFile) { + throw new Error("Missing option to DiskImage: outputFile"); + } + + if (!opts.format) { + throw new Error("No disk format specified"); + } + + if (['vmdk'].indexOf(opts.format) === -1) { + throw new Error("Unsupported disk format: " + opts.format); + } + + this.capacityBytes = opts.capacityBytes; + this.zvolName = opts.zpool + '/convert-image-'+randstr(); + this.zvolSnapshotName = this.zvolName + '@qemu-img-convert'; + this.zvolDskPath = '/dev/zvol/dsk/' + this.zvolName; + this.inputFile = opts.inputFile; + this.outputFile = opts.outputFile; + + async.waterfall + ( [ self._createZvol.bind(self) + , self._vmdkToZvol.bind(self) + , self._snapshotZvol.bind(self) + , self._zfsSendSnapshot.bind(self) + ] + , function (error) { + if (error) { + console.error(error); + } + console.log("All done!"); + zfs.destroyAll + ( self.zvolName + , function (error, stdout, stderr) { + if (callback) return callback(); + } + ); + } + ); +} + +DiskImage.prototype._createZvol = function (callback) { + var self = this; + + execFile + ( '/usr/sbin/zfs' + , [ 'create', '-V', self.capacityBytes, self.zvolName ] + , {} + , function (error, stdout, stderr) { + if (error) { + return callback(new Error(stderr.toString())); + } + return callback(); + } + ); +} + +DiskImage.prototype._vmdkToZvol = function (callback) { + var self = this; + var child = + spawn + ( '/smartdc/bin/qemu-img' + , [ 'convert', '-f', 'vmdk', '-O', 'host_device' + , self.inputFile, self.zvolDskPath + ] + ); + + child.stdout.on('data', function (data) { + console.log(data.toString()); + }); + + child.stderr.on('data', function (data) { + console.error(data.toString()); + }); + + child.on('exit', function (code) { + if (code) { + return callback(new Error(stderr.toString())); + } + return callback(); + }); + +} + +DiskImage.prototype._snapshotZvol = function (callback) { + var self = this; + + zfs.snapshot(self.zvolName + '@qemu-img-convert', function (error) { + if (error) { + return callback(new Error(stderr.toString())); + } + return callback(); + }); +} + +DiskImage.prototype._zfsSendSnapshot = function (callback) { + var self = this; + exec + ( '/usr/sbin/zfs send ' + self.zvolSnapshotName + ' | bzip2 > ' + self.outputFile + , {} + , function (error, stdout, stderr) { + if (error) { + return callback(new Error(stderr.toString())); + } + return callback(); + } + ); +} + +function randstr () { + return Math.floor(Math.random() * 0xffffffff).toString(16); +}; diff --git a/node_modules/.npm/async/0.1.8/package/.gitmodules b/node_modules/.npm/async/0.1.8/package/.gitmodules new file mode 100644 index 0000000..a9aae98 --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/.gitmodules @@ -0,0 +1,9 @@ +[submodule "deps/nodeunit"] + path = deps/nodeunit + url = git://github.com/caolan/nodeunit.git +[submodule "deps/UglifyJS"] + path = deps/UglifyJS + url = https://github.com/mishoo/UglifyJS.git +[submodule "deps/nodelint"] + path = deps/nodelint + url = https://github.com/tav/nodelint.git diff --git a/node_modules/.npm/async/0.1.8/package/LICENSE b/node_modules/.npm/async/0.1.8/package/LICENSE new file mode 100644 index 0000000..b7f9d50 --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010 Caolan McMahon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/.npm/async/0.1.8/package/Makefile b/node_modules/.npm/async/0.1.8/package/Makefile new file mode 100644 index 0000000..00f07ea --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/Makefile @@ -0,0 +1,21 @@ +PACKAGE = asyncjs +NODEJS = $(if $(shell test -f /usr/bin/nodejs && echo "true"),nodejs,node) + +BUILDDIR = dist + +all: build + +build: $(wildcard lib/*.js) + mkdir -p $(BUILDDIR) + uglifyjs lib/async.js > $(BUILDDIR)/async.min.js + +test: + nodeunit test + +clean: + rm -rf $(BUILDDIR) + +lint: + nodelint --config nodelint.cfg lib/async.js + +.PHONY: test build all diff --git a/node_modules/.npm/async/0.1.8/package/README.md b/node_modules/.npm/async/0.1.8/package/README.md new file mode 100644 index 0000000..196740a --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/README.md @@ -0,0 +1,962 @@ +# Async.js + +Async is a utility module which provides straight-forward, powerful functions +for working with asynchronous JavaScript. Although originally designed for +use with [node.js](http://nodejs.org), it can also be used directly in the +browser. + +Async provides around 20 functions that include the usual 'functional' +suspects (map, reduce, filter, forEach…) as well as some common patterns +for asynchronous flow control (parallel, series, waterfall…). All these +functions assume you follow the node.js convention of providing a single +callback as the last argument of your async function. + + +## Quick Examples + + async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file + }); + + async.filter(['file1','file2','file3'], path.exists, function(results){ + // results now equals an array of the existing files + }); + + async.parallel([ + function(){ ... }, + function(){ ... } + ], callback); + + async.series([ + function(){ ... }, + function(){ ... } + ]); + +There are many more functions available so take a look at the docs below for a +full list. This module aims to be comprehensive, so if you feel anything is +missing please create a GitHub issue for it. + + +## Download + +Releases are available for download from +[GitHub](http://github.com/caolan/async/downloads). +Alternatively, you can install using Node Package Manager (npm): + + npm install async + + +__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 17.5kb Uncompressed + +__Production:__ [async.min.js](https://github.com/caolan/async/raw/master/dist/async.min.js) - 1.7kb Packed and Gzipped + + +## In the Browser + +So far its been tested in IE6, IE7, IE8, FF3.6 and Chrome 5. Usage: + + + + + +## Documentation + +### Collections + +* [forEach](#forEach) +* [map](#map) +* [filter](#filter) +* [reject](#reject) +* [reduce](#reduce) +* [detect](#detect) +* [sortBy](#sortBy) +* [some](#some) +* [every](#every) +* [concat](#concat) + +### Flow Control + +* [series](#series) +* [parallel](#parallel) +* [whilst](#whilst) +* [until](#until) +* [waterfall](#waterfall) +* [queue](#queue) +* [auto](#auto) +* [iterator](#iterator) +* [apply](#apply) +* [nextTick](#nextTick) + +### Utils + +* [memoize](#memoize) +* [log](#log) +* [dir](#dir) +* [noConflict](#noConflict) + + +## Collections + + +### forEach(arr, iterator, callback) + +Applies an iterator function to each item in an array, in parallel. +The iterator is called with an item from the list and a callback for when it +has finished. If the iterator passes an error to this callback, the main +callback for the forEach function is immediately called with the error. + +Note, that since this function applies the iterator to each item in parallel +there is no guarantee that the iterator functions will complete in order. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback which must be called once it has completed. +* callback(err) - A callback which is called after all the iterator functions + have finished, or an error has occurred. + +__Example__ + + // assuming openFiles is an array of file names and saveFile is a function + // to save the modified contents of that file: + + async.forEach(openFiles, saveFile, function(err){ + // if any of the saves produced an error, err would equal that error + }); + +--------------------------------------- + + +### forEachSeries(arr, iterator, callback) + +The same as forEach only the iterator is applied to each item in the array in +series. The next iterator is only called once the current one has completed +processing. This means the iterator functions will complete in order. + + +--------------------------------------- + + +### map(arr, iterator, callback) + +Produces a new array of values by mapping each value in the given array through +the iterator function. The iterator is called with an item from the array and a +callback for when it has finished processing. The callback takes 2 arguments, +an error and the transformed item from the array. If the iterator passes an +error to this callback, the main callback for the map function is immediately +called with the error. + +Note, that since this function applies the iterator to each item in parallel +there is no guarantee that the iterator functions will complete in order, however +the results array will be in the same order as the original array. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback which must be called once it has completed + with an error (which can be null) and a transformed item. +* callback(err, results) - A callback which is called after all the iterator + functions have finished, or an error has occurred. Results is an array of the + transformed items from the original array. + +__Example__ + + async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file + }); + +--------------------------------------- + + +### mapSeries(arr, iterator, callback) + +The same as map only the iterator is applied to each item in the array in +series. The next iterator is only called once the current one has completed +processing. The results array will be in the same order as the original. + + +--------------------------------------- + + +### filter(arr, iterator, callback) + +__Alias:__ select + +Returns a new array of all the values which pass an async truth test. +_The callback for each iterator call only accepts a single argument of true or +false, it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like path.exists. This operation is +performed in parallel, but the results array will be in the same order as the +original. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A truth test to apply to each item in the array. + The iterator is passed a callback which must be called once it has completed. +* callback(results) - A callback which is called after all the iterator + functions have finished. + +__Example__ + + async.filter(['file1','file2','file3'], path.exists, function(results){ + // results now equals an array of the existing files + }); + +--------------------------------------- + + +### filterSeries(arr, iterator, callback) + +__alias:__ selectSeries + +The same as filter only the iterator is applied to each item in the array in +series. The next iterator is only called once the current one has completed +processing. The results array will be in the same order as the original. + +--------------------------------------- + + +### reject(arr, iterator, callback) + +The opposite of filter. Removes values that pass an async truth test. + +--------------------------------------- + + +### rejectSeries(arr, iterator, callback) + +The same as filter, only the iterator is applied to each item in the array +in series. + + +--------------------------------------- + + +### reduce(arr, memo, iterator, callback) + +__aliases:__ inject, foldl + +Reduces a list of values into a single value using an async iterator to return +each successive step. Memo is the initial state of the reduction. This +function only operates in series. For performance reasons, it may make sense to +split a call to this function into a parallel map, then use the normal +Array.prototype.reduce on the results. This function is for situations where +each step in the reduction needs to be async, if you can get the data before +reducing it then its probably a good idea to do so. + +__Arguments__ + +* arr - An array to iterate over. +* memo - The initial state of the reduction. +* iterator(memo, item, callback) - A function applied to each item in the + array to produce the next step in the reduction. The iterator is passed a + callback which accepts an optional error as its first argument, and the state + of the reduction as the second. If an error is passed to the callback, the + reduction is stopped and the main callback is immediately called with the + error. +* callback(err, result) - A callback which is called after all the iterator + functions have finished. Result is the reduced value. + +__Example__ + + async.reduce([1,2,3], 0, function(memo, item, callback){ + // pointless async: + process.nextTick(function(){ + callback(null, memo + item) + }); + }, function(err, result){ + // result is now equal to the last value of memo, which is 6 + }); + +--------------------------------------- + + +### reduceRight(arr, memo, iterator, callback) + +__Alias:__ foldr + +Same as reduce, only operates on the items in the array in reverse order. + + +--------------------------------------- + + +### detect(arr, iterator, callback) + +Returns the first value in a list that passes an async truth test. The +iterator is applied in parallel, meaning the first iterator to return true will +fire the detect callback with that result. That means the result might not be +the first item in the original array (in terms of order) that passes the test. + +If order within the original array is important then look at detectSeries. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A truth test to apply to each item in the array. + The iterator is passed a callback which must be called once it has completed. +* callback(result) - A callback which is called as soon as any iterator returns + true, or after all the iterator functions have finished. Result will be + the first item in the array that passes the truth test (iterator) or the + value undefined if none passed. + +__Example__ + + async.detect(['file1','file2','file3'], path.exists, function(result){ + // result now equals the first file in the list that exists + }); + +--------------------------------------- + + +### detectSeries(arr, iterator, callback) + +The same as detect, only the iterator is applied to each item in the array +in series. This means the result is always the first in the original array (in +terms of array order) that passes the truth test. + + +--------------------------------------- + + +### sortBy(arr, iterator, callback) + +Sorts a list by the results of running each value through an async iterator. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback which must be called once it has completed + with an error (which can be null) and a value to use as the sort criteria. +* callback(err, results) - A callback which is called after all the iterator + functions have finished, or an error has occurred. Results is the items from + the original array sorted by the values returned by the iterator calls. + +__Example__ + + async.sortBy(['file1','file2','file3'], function(file, callback){ + fs.stat(file, function(err, stats){ + callback(err, stats.mtime); + }); + }, function(err, results){ + // results is now the original array of files sorted by + // modified date + }); + + +--------------------------------------- + + +### some(arr, iterator, callback) + +__Alias:__ any + +Returns true if at least one element in the array satisfies an async test. +_The callback for each iterator call only accepts a single argument of true or +false, it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like path.exists. Once any iterator +call returns true, the main callback is immediately called. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A truth test to apply to each item in the array. + The iterator is passed a callback which must be called once it has completed. +* callback(result) - A callback which is called as soon as any iterator returns + true, or after all the iterator functions have finished. Result will be + either true or false depending on the values of the async tests. + +__Example__ + + async.some(['file1','file2','file3'], path.exists, function(result){ + // if result is true then at least one of the files exists + }); + +--------------------------------------- + + +### every(arr, iterator, callback) + +__Alias:__ all + +Returns true if every element in the array satisfies an async test. +_The callback for each iterator call only accepts a single argument of true or +false, it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like path.exists. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A truth test to apply to each item in the array. + The iterator is passed a callback which must be called once it has completed. +* callback(result) - A callback which is called after all the iterator + functions have finished. Result will be either true or false depending on + the values of the async tests. + +__Example__ + + async.every(['file1','file2','file3'], path.exists, function(result){ + // if result is true then every file exists + }); + +--------------------------------------- + + +### concat(arr, iterator, callback) + +Applies an iterator to each item in a list, concatenating the results. Returns the +concatenated list. The iterators are called in parallel, and the results are +concatenated as they return. There is no guarantee that the results array will +be returned in the original order of the arguments passed to the iterator function. + +__Arguments__ + +* arr - An array to iterate over +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback which must be called once it has completed + with an error (which can be null) and an array of results. +* callback(err, results) - A callback which is called after all the iterator + functions have finished, or an error has occurred. Results is an array containing + the concatenated results of the iterator function. + +__Example__ + + async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){ + // files is now a list of filenames that exist in the 3 directories + }); + +--------------------------------------- + + +### concatSeries(arr, iterator, callback) + +Same as async.concat, but executes in series instead of parallel. + + +## Flow Control + + +### series(tasks, [callback]) + +Run an array of functions in series, each one running once the previous +function has completed. If any functions in the series pass an error to its +callback, no more functions are run and the callback for the series is +immediately called with the value of the error. Once the tasks have completed, +the results are passed to the final callback as an array. + +It is also possible to use an object instead of an array. Each property will be +run as a function and the results will be passed to the final callback as an object +instead of an array. This can be a more readable way of handling results from +async.series. + + +__Arguments__ + +* tasks - An array or object containing functions to run, each function is passed + a callback it must call on completion. +* callback(err, results) - An optional callback to run once all the functions + have completed. This function gets an array of all the arguments passed to + the callbacks used in the array. + +__Example__ + + async.series([ + function(callback){ + // do some stuff ... + callback(null, 'one'); + }, + function(callback){ + // do some more stuff ... + callback(null, 'two'); + }, + ], + // optional callback + function(err, results){ + // results is now equal to ['one', 'two'] + }); + + + // an example using an object instead of an array + async.series({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + }, + }, + function(err, results) { + // results is now equals to: {one: 1, two: 2} + }); + + +--------------------------------------- + + +### parallel(tasks, [callback]) + +Run an array of functions in parallel, without waiting until the previous +function has completed. If any of the functions pass an error to its +callback, the main callback is immediately called with the value of the error. +Once the tasks have completed, the results are passed to the final callback as an +array. + +It is also possible to use an object instead of an array. Each property will be +run as a function and the results will be passed to the final callback as an object +instead of an array. This can be a more readable way of handling results from +async.parallel. + + +__Arguments__ + +* tasks - An array or object containing functions to run, each function is passed a + callback it must call on completion. +* callback(err, results) - An optional callback to run once all the functions + have completed. This function gets an array of all the arguments passed to + the callbacks used in the array. + +__Example__ + + async.parallel([ + function(callback){ + setTimeout(function(){ + callback(null, 'one'); + }, 200); + }, + function(callback){ + setTimeout(function(){ + callback(null, 'two'); + }, 100); + }, + ], + // optional callback + function(err, results){ + // in this case, the results array will equal ['two','one'] + // because the functions were run in parallel and the second + // function had a shorter timeout before calling the callback. + }); + + + // an example using an object instead of an array + async.parallel({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + }, + }, + function(err, results) { + // results is now equals to: {one: 1, two: 2} + }); + + +--------------------------------------- + + +### whilst(test, fn, callback) + +Repeatedly call fn, while test returns true. Calls the callback when stopped, +or an error occurs. + +__Arguments__ + +* test() - synchronous truth test to perform before each execution of fn. +* fn(callback) - A function to call each time the test passes. The function is + passed a callback which must be called once it has completed with an optional + error as the first argument. +* callback(err) - A callback which is called after the test fails and repeated + execution of fn has stopped. + +__Example__ + + var count = 0; + + async.whilst( + function () { return count < 5; }, + function (callback) { + count++; + setTimeout(callback, 1000); + }, + function (err) { + // 5 seconds have passed + } + }); + + +--------------------------------------- + + +### until(test, fn, callback) + +Repeatedly call fn, until test returns true. Calls the callback when stopped, +or an error occurs. + +The inverse of async.whilst. + + +--------------------------------------- + + +### waterfall(tasks, [callback]) + +Runs an array of functions in series, each passing their results to the next in +the array. However, if any of the functions pass an error to the callback, the +next function is not executed and the main callback is immediately called with +the error. + +__Arguments__ + +* tasks - An array of functions to run, each function is passed a callback it + must call on completion. +* callback(err) - An optional callback to run once all the functions have + completed. This function gets passed any error that may have occurred. + +__Example__ + + async.waterfall([ + function(callback){ + callback(null, 'one', 'two'); + }, + function(arg1, arg2, callback){ + callback(null, 'three'); + }, + function(arg1, callback){ + // arg1 now equals 'three' + callback(null, 'done'); + } + ]); + + +--------------------------------------- + + +### queue(worker, concurrency) + +Creates a queue object with the specified concurrency. Tasks added to the +queue will be processed in parallel (up to the concurrency limit). If all +workers are in progress, the task is queued until one is available. Once +a worker has completed a task, the task's callback is called. + +__Arguments__ + +* worker(task, callback) - An asynchronous function for processing a queued + task. +* concurrency - An integer for determining how many worker functions should be + run in parallel. + +__Queue objects__ + +The queue object returned by this function has the following properties and +methods: + +* length() - a function returning the number of items waiting to be processed. +* concurrency - an integer for determining how many worker functions should be + run in parallel. This property can be changed after a queue is created to + alter the concurrency on-the-fly. +* push(task, [callback]) - add a new task to the queue, the callback is called + once the worker has finished processing the task. + +__Example__ + + // create a queue object with concurrency 2 + + var q = async.queue(function (task, callback) { + console.log('hello ' + task.name). + callback(); + }, 2); + + + // add some items to the queue + + q.push({name: 'foo'}, function (err) { + console.log('finished processing foo'); + }); + q.push({name: 'bar'}, function (err) { + console.log('finished processing bar'); + }); + + +--------------------------------------- + + +### auto(tasks, [callback]) + +Determines the best order for running functions based on their requirements. +Each function can optionally depend on other functions being completed first, +and each function is run as soon as its requirements are satisfied. If any of +the functions pass and error to their callback, that function will not complete +(so any other functions depending on it will not run) and the main callback +will be called immediately with the error. + +__Arguments__ + +* tasks - An object literal containing named functions or an array of + requirements, with the function itself the last item in the array. The key + used for each function or array is used when specifying requirements. The + syntax is easier to understand by looking at the example. +* callback(err) - An optional callback which is called when all the tasks have + been completed. The callback may receive an error as an argument. + +__Example__ + + async.auto({ + get_data: function(callback){ + // async code to get some data + }, + make_folder: function(callback){ + // async code to create a directory to store a file in + // this is run at the same time as getting the data + }, + write_file: ['get_data', 'make_folder', function(callback){ + // once there is some data and the directory exists, + // write the data to a file in the directory + }], + email_link: ['write_file', function(callback){ + // once the file is written let's email a link to it... + }] + }); + +This is a fairly trivial example, but to do this using the basic parallel and +series functions would look like this: + + async.parallel([ + function(callback){ + // async code to get some data + }, + function(callback){ + // async code to create a directory to store a file in + // this is run at the same time as getting the data + } + ], + function(results){ + async.series([ + function(callback){ + // once there is some data and the directory exists, + // write the data to a file in the directory + }, + email_link: ['write_file', function(callback){ + // once the file is written let's email a link to it... + } + ]); + }); + +For a complicated series of async tasks using the auto function makes adding +new tasks much easier and makes the code more readable. + + +--------------------------------------- + + +### iterator(tasks) + +Creates an iterator function which calls the next function in the array, +returning a continuation to call the next one after that. Its also possible to +'peek' the next iterator by doing iterator.next(). + +This function is used internally by the async module but can be useful when +you want to manually control the flow of functions in series. + +__Arguments__ + +* tasks - An array of functions to run, each function is passed a callback it + must call on completion. + +__Example__ + + var iterator = async.iterator([ + function(){ sys.p('one'); }, + function(){ sys.p('two'); }, + function(){ sys.p('three'); } + ]); + + node> var iterator2 = iterator(); + 'one' + node> var iterator3 = iterator2(); + 'two' + node> iterator3(); + 'three' + node> var nextfn = iterator2.next(); + node> nextfn(); + 'three' + + +--------------------------------------- + + +### apply(function, arguments..) + +Creates a continuation function with some arguments already applied, a useful +shorthand when combined with other flow control functions. Any arguments +passed to the returned function are added to the arguments originally passed +to apply. + +__Arguments__ + +* function - The function you want to eventually apply all arguments to. +* arguments... - Any number of arguments to automatically apply when the + continuation is called. + +__Example__ + + // using apply + + async.parallel([ + async.apply(fs.writeFile, 'testfile1', 'test1'), + async.apply(fs.writeFile, 'testfile2', 'test2'), + ]); + + + // the same process without using apply + + async.parallel([ + function(callback){ + fs.writeFile('testfile1', 'test1', callback); + }, + function(callback){ + fs.writeFile('testfile2', 'test2', callback); + }, + ]); + +It's possible to pass any number of additional arguments when calling the +continuation: + + node> var fn = async.apply(sys.puts, 'one'); + node> fn('two', 'three'); + one + two + three + +--------------------------------------- + + +### nextTick(callback) + +Calls the callback on a later loop around the event loop. In node.js this just +calls process.nextTick, in the browser it falls back to setTimeout(callback, 0), +which means other higher priority events may precede the execution of the callback. + +This is used internally for browser-compatibility purposes. + +__Arguments__ + +* callback - The function to call on a later loop around the event loop. + +__Example__ + + var call_order = []; + async.nextTick(function(){ + call_order.push('two'); + // call_order now equals ['one','two] + }); + call_order.push('one') + + +## Utils + + +### memoize(fn, [hasher]) + +Caches the results of an async function. When creating a hash to store function +results against, the callback is omitted from the hash and an optional hash +function can be used. + +__Arguments__ + +* fn - the function you to proxy and cache results from. +* hasher - an optional function for generating a custom hash for storing + results, it has all the arguments applied to it apart from the callback, and + must be synchronous. + +__Example__ + + var slow_fn = function (name, callback) { + // do something + callback(null, result); + }; + var fn = async.memoize(slow_fn); + + // fn can now be used as if it were slow_fn + fn('some name', function () { + // callback + }); + + + +### log(function, arguments) + +Logs the result of an async function to the console. Only works in node.js or +in browsers that support console.log and console.error (such as FF and Chrome). +If multiple arguments are returned from the async function, console.log is +called on each argument in order. + +__Arguments__ + +* function - The function you want to eventually apply all arguments to. +* arguments... - Any number of arguments to apply to the function. + +__Example__ + + var hello = function(name, callback){ + setTimeout(function(){ + callback(null, 'hello ' + name); + }, 1000); + }; + + node> async.log(hello, 'world'); + 'hello world' + + +--------------------------------------- + + +### dir(function, arguments) + +Logs the result of an async function to the console using console.dir to +display the properties of the resulting object. Only works in node.js or +in browsers that support console.dir and console.error (such as FF and Chrome). +If multiple arguments are returned from the async function, console.dir is +called on each argument in order. + +__Arguments__ + +* function - The function you want to eventually apply all arguments to. +* arguments... - Any number of arguments to apply to the function. + +__Example__ + + var hello = function(name, callback){ + setTimeout(function(){ + callback(null, {hello: name}); + }, 1000); + }; + + node> async.dir(hello, 'world'); + {hello: 'world'} + + +--------------------------------------- + + +### noConflict() + +Changes the value of async back to its original value, returning a reference to the +async object. diff --git a/node_modules/.npm/async/0.1.8/package/deps/nodeunit.css b/node_modules/.npm/async/0.1.8/package/deps/nodeunit.css new file mode 100644 index 0000000..274434a --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/deps/nodeunit.css @@ -0,0 +1,70 @@ +/*! + * Styles taken from qunit.css + */ + +h1#nodeunit-header, h1.nodeunit-header { + padding: 15px; + font-size: large; + background-color: #06b; + color: white; + font-family: 'trebuchet ms', verdana, arial; + margin: 0; +} + +h1#nodeunit-header a { + color: white; +} + +h2#nodeunit-banner { + height: 2em; + border-bottom: 1px solid white; + background-color: #eee; + margin: 0; + font-family: 'trebuchet ms', verdana, arial; +} +h2#nodeunit-banner.pass { + background-color: green; +} +h2#nodeunit-banner.fail { + background-color: red; +} + +h2#nodeunit-userAgent, h2.nodeunit-userAgent { + padding: 10px; + background-color: #eee; + color: black; + margin: 0; + font-size: small; + font-weight: normal; + font-family: 'trebuchet ms', verdana, arial; + font-size: 10pt; +} + +div#nodeunit-testrunner-toolbar { + background: #eee; + border-top: 1px solid black; + padding: 10px; + font-family: 'trebuchet ms', verdana, arial; + margin: 0; + font-size: 10pt; +} + +ol#nodeunit-tests { + font-family: 'trebuchet ms', verdana, arial; + font-size: 10pt; +} +ol#nodeunit-tests li strong { + cursor:pointer; +} +ol#nodeunit-tests .pass { + color: green; +} +ol#nodeunit-tests .fail { + color: red; +} + +p#nodeunit-testresult { + margin-left: 1em; + font-size: 10pt; + font-family: 'trebuchet ms', verdana, arial; +} diff --git a/node_modules/.npm/async/0.1.8/package/deps/nodeunit.js b/node_modules/.npm/async/0.1.8/package/deps/nodeunit.js new file mode 100644 index 0000000..5957184 --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/deps/nodeunit.js @@ -0,0 +1,1966 @@ +/*! + * Nodeunit + * https://github.com/caolan/nodeunit + * Copyright (c) 2010 Caolan McMahon + * MIT Licensed + * + * json2.js + * http://www.JSON.org/json2.js + * Public Domain. + * NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + */ +nodeunit = (function(){ +/* + http://www.JSON.org/json2.js + 2010-11-17 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, strict: false, regexp: false */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +if (!this.JSON) { + this.JSON = {}; +} + +(function () { + "use strict"; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + if (typeof Date.prototype.toJSON !== 'function') { + + Date.prototype.toJSON = function (key) { + + return isFinite(this.valueOf()) ? + this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z' : null; + }; + + String.prototype.toJSON = + Number.prototype.toJSON = + Boolean.prototype.toJSON = function (key) { + return this.valueOf(); + }; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? + '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' ? c : + '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : + '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key]; + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + return quote(value); + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 ? '[]' : + gap ? '[\n' + gap + + partial.join(',\n' + gap) + '\n' + + mind + ']' : + '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + k = rep[i]; + if (typeof k === 'string') { + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + for (k in value) { + if (Object.hasOwnProperty.call(value, k)) { + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 ? '{}' : + gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + + mind + '}' : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } + + +// If the JSON object does not yet have a parse method, give it one. + + if (typeof JSON.parse !== 'function') { + JSON.parse = function (text, reviver) { + +// The parse method takes a text and an optional reviver function, and returns +// a JavaScript value if the text is a valid JSON text. + + var j; + + function walk(holder, key) { + +// The walk method is used to recursively walk the resulting structure so +// that modifications can be made. + + var k, v, value = holder[key]; + if (value && typeof value === 'object') { + for (k in value) { + if (Object.hasOwnProperty.call(value, k)) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + } + } + } + return reviver.call(holder, key, value); + } + + +// Parsing happens in four stages. In the first stage, we replace certain +// Unicode characters with escape sequences. JavaScript handles many characters +// incorrectly, either silently deleting them, or treating them as line endings. + + text = String(text); + cx.lastIndex = 0; + if (cx.test(text)) { + text = text.replace(cx, function (a) { + return '\\u' + + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }); + } + +// In the second stage, we run the text against regular expressions that look +// for non-JSON patterns. We are especially concerned with '()' and 'new' +// because they can cause invocation, and '=' because it can cause mutation. +// But just to be safe, we want to reject all unexpected forms. + +// We split the second stage into 4 regexp operations in order to work around +// crippling inefficiencies in IE's and Safari's regexp engines. First we +// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we +// replace all simple value tokens with ']' characters. Third, we delete all +// open brackets that follow a colon or comma or that begin the text. Finally, +// we look to see that the remaining characters are only whitespace or ']' or +// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval. + + if (/^[\],:{}\s]*$/ +.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@') +.replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']') +.replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) { + +// In the third stage we use the eval function to compile the text into a +// JavaScript structure. The '{' operator is subject to a syntactic ambiguity +// in JavaScript: it can begin a block or an object literal. We wrap the text +// in parens to eliminate the ambiguity. + + j = eval('(' + text + ')'); + +// In the optional fourth stage, we recursively walk the new structure, passing +// each name/value pair to a reviver function for possible transformation. + + return typeof reviver === 'function' ? + walk({'': j}, '') : j; + } + +// If the text is not JSON parseable, then a SyntaxError is thrown. + + throw new SyntaxError('JSON.parse'); + }; + } +}()); +var assert = this.assert = {}; +var types = {}; +var core = {}; +var nodeunit = {}; +var reporter = {}; +/*global setTimeout: false, console: false */ +(function () { + + var async = {}; + + // global on the server, window in the browser + var root = this, + previous_async = root.async; + + if (typeof module !== 'undefined' && module.exports) { + module.exports = async; + } + else { + root.async = async; + } + + async.noConflict = function () { + root.async = previous_async; + return async; + }; + + //// cross-browser compatiblity functions //// + + var _forEach = function (arr, iterator) { + if (arr.forEach) { + return arr.forEach(iterator); + } + for (var i = 0; i < arr.length; i += 1) { + iterator(arr[i], i, arr); + } + }; + + var _map = function (arr, iterator) { + if (arr.map) { + return arr.map(iterator); + } + var results = []; + _forEach(arr, function (x, i, a) { + results.push(iterator(x, i, a)); + }); + return results; + }; + + var _reduce = function (arr, iterator, memo) { + if (arr.reduce) { + return arr.reduce(iterator, memo); + } + _forEach(arr, function (x, i, a) { + memo = iterator(memo, x, i, a); + }); + return memo; + }; + + var _keys = function (obj) { + if (Object.keys) { + return Object.keys(obj); + } + var keys = []; + for (var k in obj) { + if (obj.hasOwnProperty(k)) { + keys.push(k); + } + } + return keys; + }; + + var _indexOf = function (arr, item) { + if (arr.indexOf) { + return arr.indexOf(item); + } + for (var i = 0; i < arr.length; i += 1) { + if (arr[i] === item) { + return i; + } + } + return -1; + }; + + //// exported async module functions //// + + //// nextTick implementation with browser-compatible fallback //// + async.nextTick = function (fn) { + if (typeof process === 'undefined' || !(process.nextTick)) { + setTimeout(fn, 0); + } + else { + process.nextTick(fn); + } + }; + + async.forEach = function (arr, iterator, callback) { + if (!arr.length) { + return callback(); + } + var completed = 0; + _forEach(arr, function (x) { + iterator(x, function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + if (completed === arr.length) { + callback(); + } + } + }); + }); + }; + + async.forEachSeries = function (arr, iterator, callback) { + if (!arr.length) { + return callback(); + } + var completed = 0; + var iterate = function () { + iterator(arr[completed], function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + if (completed === arr.length) { + callback(); + } + else { + iterate(); + } + } + }); + }; + iterate(); + }; + + + var doParallel = function (fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [async.forEach].concat(args)); + }; + }; + var doSeries = function (fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [async.forEachSeries].concat(args)); + }; + }; + + + var _asyncMap = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (err, v) { + results[x.index] = v; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + }; + async.map = doParallel(_asyncMap); + async.mapSeries = doSeries(_asyncMap); + + + // reduce only has a series version, as doing reduce in parallel won't + // work in many situations. + async.reduce = function (arr, memo, iterator, callback) { + async.forEachSeries(arr, function (x, callback) { + iterator(memo, x, function (err, v) { + memo = v; + callback(err); + }); + }, function (err) { + callback(err, memo); + }); + }; + // inject alias + async.inject = async.reduce; + // foldl alias + async.foldl = async.reduce; + + async.reduceRight = function (arr, memo, iterator, callback) { + var reversed = _map(arr, function (x) { + return x; + }).reverse(); + async.reduce(reversed, memo, iterator, callback); + }; + // foldr alias + async.foldr = async.reduceRight; + + var _filter = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (v) { + if (v) { + results.push(x); + } + callback(); + }); + }, function (err) { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + }; + async.filter = doParallel(_filter); + async.filterSeries = doSeries(_filter); + // select alias + async.select = async.filter; + async.selectSeries = async.filterSeries; + + var _reject = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (v) { + if (!v) { + results.push(x); + } + callback(); + }); + }, function (err) { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + }; + async.reject = doParallel(_reject); + async.rejectSeries = doSeries(_reject); + + var _detect = function (eachfn, arr, iterator, main_callback) { + eachfn(arr, function (x, callback) { + iterator(x, function (result) { + if (result) { + main_callback(x); + } + else { + callback(); + } + }); + }, function (err) { + main_callback(); + }); + }; + async.detect = doParallel(_detect); + async.detectSeries = doSeries(_detect); + + async.some = function (arr, iterator, main_callback) { + async.forEach(arr, function (x, callback) { + iterator(x, function (v) { + if (v) { + main_callback(true); + main_callback = function () {}; + } + callback(); + }); + }, function (err) { + main_callback(false); + }); + }; + // any alias + async.any = async.some; + + async.every = function (arr, iterator, main_callback) { + async.forEach(arr, function (x, callback) { + iterator(x, function (v) { + if (!v) { + main_callback(false); + main_callback = function () {}; + } + callback(); + }); + }, function (err) { + main_callback(true); + }); + }; + // all alias + async.all = async.every; + + async.sortBy = function (arr, iterator, callback) { + async.map(arr, function (x, callback) { + iterator(x, function (err, criteria) { + if (err) { + callback(err); + } + else { + callback(null, {value: x, criteria: criteria}); + } + }); + }, function (err, results) { + if (err) { + return callback(err); + } + else { + var fn = function (left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + }; + callback(null, _map(results.sort(fn), function (x) { + return x.value; + })); + } + }); + }; + + async.auto = function (tasks, callback) { + callback = callback || function () {}; + var keys = _keys(tasks); + if (!keys.length) { + return callback(null); + } + + var completed = []; + + var listeners = []; + var addListener = function (fn) { + listeners.unshift(fn); + }; + var removeListener = function (fn) { + for (var i = 0; i < listeners.length; i += 1) { + if (listeners[i] === fn) { + listeners.splice(i, 1); + return; + } + } + }; + var taskComplete = function () { + _forEach(listeners, function (fn) { + fn(); + }); + }; + + addListener(function () { + if (completed.length === keys.length) { + callback(null); + } + }); + + _forEach(keys, function (k) { + var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k]; + var taskCallback = function (err) { + if (err) { + callback(err); + // stop subsequent errors hitting callback multiple times + callback = function () {}; + } + else { + completed.push(k); + taskComplete(); + } + }; + var requires = task.slice(0, Math.abs(task.length - 1)) || []; + var ready = function () { + return _reduce(requires, function (a, x) { + return (a && _indexOf(completed, x) !== -1); + }, true); + }; + if (ready()) { + task[task.length - 1](taskCallback); + } + else { + var listener = function () { + if (ready()) { + removeListener(listener); + task[task.length - 1](taskCallback); + } + }; + addListener(listener); + } + }); + }; + + async.waterfall = function (tasks, callback) { + if (!tasks.length) { + return callback(); + } + callback = callback || function () {}; + var wrapIterator = function (iterator) { + return function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + var args = Array.prototype.slice.call(arguments, 1); + var next = iterator.next(); + if (next) { + args.push(wrapIterator(next)); + } + else { + args.push(callback); + } + async.nextTick(function () { + iterator.apply(null, args); + }); + } + }; + }; + wrapIterator(async.iterator(tasks))(); + }; + + async.parallel = function (tasks, callback) { + callback = callback || function () {}; + if (tasks.constructor === Array) { + async.map(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args || null); + }); + } + }, callback); + } + else { + var results = {}; + async.forEach(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.series = function (tasks, callback) { + callback = callback || function () {}; + if (tasks.constructor === Array) { + async.mapSeries(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args || null); + }); + } + }, callback); + } + else { + var results = {}; + async.forEachSeries(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.iterator = function (tasks) { + var makeCallback = function (index) { + var fn = function () { + if (tasks.length) { + tasks[index].apply(null, arguments); + } + return fn.next(); + }; + fn.next = function () { + return (index < tasks.length - 1) ? makeCallback(index + 1): null; + }; + return fn; + }; + return makeCallback(0); + }; + + async.apply = function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + return function () { + return fn.apply( + null, args.concat(Array.prototype.slice.call(arguments)) + ); + }; + }; + + var _concat = function (eachfn, arr, fn, callback) { + var r = []; + eachfn(arr, function (x, cb) { + fn(x, function (err, y) { + r = r.concat(y || []); + cb(err); + }); + }, function (err) { + callback(err, r); + }); + }; + async.concat = doParallel(_concat); + async.concatSeries = doSeries(_concat); + + async.whilst = function (test, iterator, callback) { + if (test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.whilst(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.until = function (test, iterator, callback) { + if (!test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.until(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.queue = function (worker, concurrency) { + var workers = 0; + var tasks = []; + var q = { + concurrency: concurrency, + push: function (data, callback) { + tasks.push({data: data, callback: callback}); + async.nextTick(q.process); + }, + process: function () { + if (workers < q.concurrency && tasks.length) { + var task = tasks.splice(0, 1)[0]; + workers += 1; + worker(task.data, function () { + workers -= 1; + if (task.callback) { + task.callback.apply(task, arguments); + } + q.process(); + }); + } + }, + length: function () { + return tasks.length; + } + }; + return q; + }; + + var _console_fn = function (name) { + return function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + fn.apply(null, args.concat([function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (typeof console !== 'undefined') { + if (err) { + if (console.error) { + console.error(err); + } + } + else if (console[name]) { + _forEach(args, function (x) { + console[name](x); + }); + } + } + }])); + }; + }; + async.log = _console_fn('log'); + async.dir = _console_fn('dir'); + /*async.info = _console_fn('info'); + async.warn = _console_fn('warn'); + async.error = _console_fn('error');*/ + +}()); +(function(exports){ +/** + * This file is based on the node.js assert module, but with some small + * changes for browser-compatibility + * THIS FILE SHOULD BE BROWSER-COMPATIBLE JS! + */ + + +/** + * Added for browser compatibility + */ + +var _keys = function(obj){ + if(Object.keys) return Object.keys(obj); + var keys = []; + for(var k in obj){ + if(obj.hasOwnProperty(k)) keys.push(k); + } + return keys; +}; + + + +// http://wiki.commonjs.org/wiki/Unit_Testing/1.0 +// +// THIS IS NOT TESTED NOR LIKELY TO WORK OUTSIDE V8! +// +// Originally from narwhal.js (http://narwhaljs.org) +// Copyright (c) 2009 Thomas Robinson <280north.com> +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +var pSlice = Array.prototype.slice; + +// 1. The assert module provides functions that throw +// AssertionError's when particular conditions are not met. The +// assert module must conform to the following interface. + +var assert = exports; + +// 2. The AssertionError is defined in assert. +// new assert.AssertionError({message: message, actual: actual, expected: expected}) + +assert.AssertionError = function AssertionError (options) { + this.name = "AssertionError"; + this.message = options.message; + this.actual = options.actual; + this.expected = options.expected; + this.operator = options.operator; + var stackStartFunction = options.stackStartFunction || fail; + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, stackStartFunction); + } +}; +// code from util.inherits in node +assert.AssertionError.super_ = Error; + + +// EDITED FOR BROWSER COMPATIBILITY: replaced Object.create call +// TODO: test what effect this may have +var ctor = function () { this.constructor = assert.AssertionError; }; +ctor.prototype = Error.prototype; +assert.AssertionError.prototype = new ctor(); + + +assert.AssertionError.prototype.toString = function() { + if (this.message) { + return [this.name+":", this.message].join(' '); + } else { + return [ this.name+":" + , JSON.stringify(this.expected ) + , this.operator + , JSON.stringify(this.actual) + ].join(" "); + } +}; + +// assert.AssertionError instanceof Error + +assert.AssertionError.__proto__ = Error.prototype; + +// At present only the three keys mentioned above are used and +// understood by the spec. Implementations or sub modules can pass +// other keys to the AssertionError's constructor - they will be +// ignored. + +// 3. All of the following functions must throw an AssertionError +// when a corresponding condition is not met, with a message that +// may be undefined if not provided. All assertion methods provide +// both the actual and expected values to the assertion error for +// display purposes. + +function fail(actual, expected, message, operator, stackStartFunction) { + throw new assert.AssertionError({ + message: message, + actual: actual, + expected: expected, + operator: operator, + stackStartFunction: stackStartFunction + }); +} + +// EXTENSION! allows for well behaved errors defined elsewhere. +assert.fail = fail; + +// 4. Pure assertion tests whether a value is truthy, as determined +// by !!guard. +// assert.ok(guard, message_opt); +// This statement is equivalent to assert.equal(true, guard, +// message_opt);. To test strictly for the value true, use +// assert.strictEqual(true, guard, message_opt);. + +assert.ok = function ok(value, message) { + if (!!!value) fail(value, true, message, "==", assert.ok); +}; + +// 5. The equality assertion tests shallow, coercive equality with +// ==. +// assert.equal(actual, expected, message_opt); + +assert.equal = function equal(actual, expected, message) { + if (actual != expected) fail(actual, expected, message, "==", assert.equal); +}; + +// 6. The non-equality assertion tests for whether two objects are not equal +// with != assert.notEqual(actual, expected, message_opt); + +assert.notEqual = function notEqual(actual, expected, message) { + if (actual == expected) { + fail(actual, expected, message, "!=", assert.notEqual); + } +}; + +// 7. The equivalence assertion tests a deep equality relation. +// assert.deepEqual(actual, expected, message_opt); + +assert.deepEqual = function deepEqual(actual, expected, message) { + if (!_deepEqual(actual, expected)) { + fail(actual, expected, message, "deepEqual", assert.deepEqual); + } +}; + +function _deepEqual(actual, expected) { + // 7.1. All identical values are equivalent, as determined by ===. + if (actual === expected) { + return true; + // 7.2. If the expected value is a Date object, the actual value is + // equivalent if it is also a Date object that refers to the same time. + } else if (actual instanceof Date && expected instanceof Date) { + return actual.getTime() === expected.getTime(); + + // 7.3. Other pairs that do not both pass typeof value == "object", + // equivalence is determined by ==. + } else if (typeof actual != 'object' && typeof expected != 'object') { + return actual == expected; + + // 7.4. For all other Object pairs, including Array objects, equivalence is + // determined by having the same number of owned properties (as verified + // with Object.prototype.hasOwnProperty.call), the same set of keys + // (although not necessarily the same order), equivalent values for every + // corresponding key, and an identical "prototype" property. Note: this + // accounts for both named and indexed properties on Arrays. + } else { + return objEquiv(actual, expected); + } +} + +function isUndefinedOrNull (value) { + return value === null || value === undefined; +} + +function isArguments (object) { + return Object.prototype.toString.call(object) == '[object Arguments]'; +} + +function objEquiv (a, b) { + if (isUndefinedOrNull(a) || isUndefinedOrNull(b)) + return false; + // an identical "prototype" property. + if (a.prototype !== b.prototype) return false; + //~~~I've managed to break Object.keys through screwy arguments passing. + // Converting to array solves the problem. + if (isArguments(a)) { + if (!isArguments(b)) { + return false; + } + a = pSlice.call(a); + b = pSlice.call(b); + return _deepEqual(a, b); + } + try{ + var ka = _keys(a), + kb = _keys(b), + key, i; + } catch (e) {//happens when one is a string literal and the other isn't + return false; + } + // having the same number of owned properties (keys incorporates hasOwnProperty) + if (ka.length != kb.length) + return false; + //the same set of keys (although not necessarily the same order), + ka.sort(); + kb.sort(); + //~~~cheap key test + for (i = ka.length - 1; i >= 0; i--) { + if (ka[i] != kb[i]) + return false; + } + //equivalent values for every corresponding key, and + //~~~possibly expensive deep test + for (i = ka.length - 1; i >= 0; i--) { + key = ka[i]; + if (!_deepEqual(a[key], b[key] )) + return false; + } + return true; +} + +// 8. The non-equivalence assertion tests for any deep inequality. +// assert.notDeepEqual(actual, expected, message_opt); + +assert.notDeepEqual = function notDeepEqual(actual, expected, message) { + if (_deepEqual(actual, expected)) { + fail(actual, expected, message, "notDeepEqual", assert.notDeepEqual); + } +}; + +// 9. The strict equality assertion tests strict equality, as determined by ===. +// assert.strictEqual(actual, expected, message_opt); + +assert.strictEqual = function strictEqual(actual, expected, message) { + if (actual !== expected) { + fail(actual, expected, message, "===", assert.strictEqual); + } +}; + +// 10. The strict non-equality assertion tests for strict inequality, as determined by !==. +// assert.notStrictEqual(actual, expected, message_opt); + +assert.notStrictEqual = function notStrictEqual(actual, expected, message) { + if (actual === expected) { + fail(actual, expected, message, "!==", assert.notStrictEqual); + } +}; + +function _throws (shouldThrow, block, err, message) { + var exception = null, + threw = false, + typematters = true; + + message = message || ""; + + //handle optional arguments + if (arguments.length == 3) { + if (typeof(err) == "string") { + message = err; + typematters = false; + } + } else if (arguments.length == 2) { + typematters = false; + } + + try { + block(); + } catch (e) { + threw = true; + exception = e; + } + + if (shouldThrow && !threw) { + fail( "Missing expected exception" + + (err && err.name ? " ("+err.name+")." : '.') + + (message ? " " + message : "") + ); + } + if (!shouldThrow && threw && typematters && exception instanceof err) { + fail( "Got unwanted exception" + + (err && err.name ? " ("+err.name+")." : '.') + + (message ? " " + message : "") + ); + } + if ((shouldThrow && threw && typematters && !(exception instanceof err)) || + (!shouldThrow && threw)) { + throw exception; + } +}; + +// 11. Expected to throw an error: +// assert.throws(block, Error_opt, message_opt); + +assert.throws = function(block, /*optional*/error, /*optional*/message) { + _throws.apply(this, [true].concat(pSlice.call(arguments))); +}; + +// EXTENSION! This is annoying to write outside this module. +assert.doesNotThrow = function(block, /*optional*/error, /*optional*/message) { + _throws.apply(this, [false].concat(pSlice.call(arguments))); +}; + +assert.ifError = function (err) { if (err) {throw err;}}; +})(assert); +(function(exports){ +/*! + * Nodeunit + * Copyright (c) 2010 Caolan McMahon + * MIT Licensed + * + * THIS FILE SHOULD BE BROWSER-COMPATIBLE JS! + * Only code on that line will be removed, its mostly to avoid requiring code + * that is node specific + */ + +/** + * Module dependencies + */ + + + +/** + * Creates assertion objects representing the result of an assert call. + * Accepts an object or AssertionError as its argument. + * + * @param {object} obj + * @api public + */ + +exports.assertion = function (obj) { + return { + method: obj.method || '', + message: obj.message || (obj.error && obj.error.message) || '', + error: obj.error, + passed: function () { + return !this.error; + }, + failed: function () { + return Boolean(this.error); + } + }; +}; + +/** + * Creates an assertion list object representing a group of assertions. + * Accepts an array of assertion objects. + * + * @param {Array} arr + * @param {Number} duration + * @api public + */ + +exports.assertionList = function (arr, duration) { + var that = arr || []; + that.failures = function () { + var failures = 0; + for (var i=0; i(' + + '' + assertions.failures() + ', ' + + '' + assertions.passes() + ', ' + + assertions.length + + ')'; + test.className = assertions.failures() ? 'fail': 'pass'; + test.appendChild(strong); + + var aList = document.createElement('ol'); + aList.style.display = 'none'; + test.onclick = function () { + var d = aList.style.display; + aList.style.display = (d == 'none') ? 'block': 'none'; + }; + for (var i=0; i' + (a.error.stack || a.error) + ''; + li.className = 'fail'; + } + else { + li.innerHTML = a.message || a.method || 'no message'; + li.className = 'pass'; + } + aList.appendChild(li); + } + test.appendChild(aList); + tests.appendChild(test); + }, + done: function (assertions) { + var end = new Date().getTime(); + var duration = end - start; + + var failures = assertions.failures(); + banner.className = failures ? 'fail': 'pass'; + + result.innerHTML = 'Tests completed in ' + duration + + ' milliseconds.
' + + assertions.passes() + ' assertions of ' + + '' + assertions.length + ' passed, ' + + assertions.failures() + ' failed.'; + } + }); +}; +})(reporter); +nodeunit = core; +nodeunit.assert = assert; +nodeunit.reporter = reporter; +nodeunit.run = reporter.run; +return nodeunit; })(); diff --git a/node_modules/.npm/async/0.1.8/package/dist/async.min.js b/node_modules/.npm/async/0.1.8/package/dist/async.min.js new file mode 100644 index 0000000..c25ae30 --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/dist/async.min.js @@ -0,0 +1 @@ +/*global setTimeout: false, console: false */(function(){var a={};var b=this,c=b.async;typeof module!=="undefined"&&module.exports?module.exports=a:b.async=a,a.noConflict=function(){b.async=c;return a};var d=function(a,b){if(a.forEach)return a.forEach(b);for(var c=0;cd?1:0};d(null,e(b.sort(c),function(a){return a.value}))})},a.auto=function(a,b){b=b||function(){};var c=g(a);if(!c.length)return b(null);var e=[];var i=[];var j=function(a){i.unshift(a)};var k=function(a){for(var b=0;b b ? 1 : 0; + }; + callback(null, _map(results.sort(fn), function (x) { + return x.value; + })); + } + }); + }; + + async.auto = function (tasks, callback) { + callback = callback || function () {}; + var keys = _keys(tasks); + if (!keys.length) { + return callback(null); + } + + var completed = []; + + var listeners = []; + var addListener = function (fn) { + listeners.unshift(fn); + }; + var removeListener = function (fn) { + for (var i = 0; i < listeners.length; i += 1) { + if (listeners[i] === fn) { + listeners.splice(i, 1); + return; + } + } + }; + var taskComplete = function () { + _forEach(listeners, function (fn) { + fn(); + }); + }; + + addListener(function () { + if (completed.length === keys.length) { + callback(null); + } + }); + + _forEach(keys, function (k) { + var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k]; + var taskCallback = function (err) { + if (err) { + callback(err); + // stop subsequent errors hitting callback multiple times + callback = function () {}; + } + else { + completed.push(k); + taskComplete(); + } + }; + var requires = task.slice(0, Math.abs(task.length - 1)) || []; + var ready = function () { + return _reduce(requires, function (a, x) { + return (a && _indexOf(completed, x) !== -1); + }, true); + }; + if (ready()) { + task[task.length - 1](taskCallback); + } + else { + var listener = function () { + if (ready()) { + removeListener(listener); + task[task.length - 1](taskCallback); + } + }; + addListener(listener); + } + }); + }; + + async.waterfall = function (tasks, callback) { + if (!tasks.length) { + return callback(); + } + callback = callback || function () {}; + var wrapIterator = function (iterator) { + return function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + var args = Array.prototype.slice.call(arguments, 1); + var next = iterator.next(); + if (next) { + args.push(wrapIterator(next)); + } + else { + args.push(callback); + } + async.nextTick(function () { + iterator.apply(null, args); + }); + } + }; + }; + wrapIterator(async.iterator(tasks))(); + }; + + async.parallel = function (tasks, callback) { + callback = callback || function () {}; + if (tasks.constructor === Array) { + async.map(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args); + }); + } + }, callback); + } + else { + var results = {}; + async.forEach(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.series = function (tasks, callback) { + callback = callback || function () {}; + if (tasks.constructor === Array) { + async.mapSeries(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args); + }); + } + }, callback); + } + else { + var results = {}; + async.forEachSeries(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.iterator = function (tasks) { + var makeCallback = function (index) { + var fn = function () { + if (tasks.length) { + tasks[index].apply(null, arguments); + } + return fn.next(); + }; + fn.next = function () { + return (index < tasks.length - 1) ? makeCallback(index + 1): null; + }; + return fn; + }; + return makeCallback(0); + }; + + async.apply = function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + return function () { + return fn.apply( + null, args.concat(Array.prototype.slice.call(arguments)) + ); + }; + }; + + var _concat = function (eachfn, arr, fn, callback) { + var r = []; + eachfn(arr, function (x, cb) { + fn(x, function (err, y) { + r = r.concat(y || []); + cb(err); + }); + }, function (err) { + callback(err, r); + }); + }; + async.concat = doParallel(_concat); + async.concatSeries = doSeries(_concat); + + async.whilst = function (test, iterator, callback) { + if (test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.whilst(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.until = function (test, iterator, callback) { + if (!test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.until(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.queue = function (worker, concurrency) { + var workers = 0; + var tasks = []; + var q = { + concurrency: concurrency, + push: function (data, callback) { + tasks.push({data: data, callback: callback}); + async.nextTick(q.process); + }, + process: function () { + if (workers < q.concurrency && tasks.length) { + var task = tasks.splice(0, 1)[0]; + workers += 1; + worker(task.data, function () { + workers -= 1; + if (task.callback) { + task.callback.apply(task, arguments); + } + q.process(); + }); + } + }, + length: function () { + return tasks.length; + } + }; + return q; + }; + + var _console_fn = function (name) { + return function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + fn.apply(null, args.concat([function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (typeof console !== 'undefined') { + if (err) { + if (console.error) { + console.error(err); + } + } + else if (console[name]) { + _forEach(args, function (x) { + console[name](x); + }); + } + } + }])); + }; + }; + async.log = _console_fn('log'); + async.dir = _console_fn('dir'); + /*async.info = _console_fn('info'); + async.warn = _console_fn('warn'); + async.error = _console_fn('error');*/ + + async.memoize = function (fn, hasher) { + var memo = {}; + hasher = hasher || function (x) { + return x; + }; + return function () { + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + var key = hasher.apply(null, args); + if (key in memo) { + callback.apply(null, memo[key]); + } + else { + fn.apply(null, args.concat([function () { + memo[key] = arguments; + callback.apply(null, arguments); + }])); + } + }; + }; + +}()); diff --git a/node_modules/.npm/async/0.1.8/package/nodelint.cfg b/node_modules/.npm/async/0.1.8/package/nodelint.cfg new file mode 100644 index 0000000..457a967 --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/nodelint.cfg @@ -0,0 +1,4 @@ +var options = { + indent: 4, + onevar: false +}; diff --git a/node_modules/.npm/async/0.1.8/package/package.json b/node_modules/.npm/async/0.1.8/package/package.json new file mode 100644 index 0000000..eda4068 --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/package.json @@ -0,0 +1,16 @@ +{ "name": "async" +, "description": "Higher-order functions and common patterns for asynchronous code" +, "main": "./index" +, "author": "Caolan McMahon" +, "version": "0.1.8" +, "repository" : + { "type" : "git" + , "url" : "http://github.com/caolan/async.git" + } +, "bugs" : { "web" : "http://github.com/caolan/async/issues" } +, "licenses" : + [ { "type" : "MIT" + , "url" : "http://github.com/caolan/async/raw/master/LICENSE" + } + ] +} diff --git a/node_modules/.npm/async/0.1.8/package/test/test-async.js b/node_modules/.npm/async/0.1.8/package/test/test-async.js new file mode 100644 index 0000000..4f5648f --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/test/test-async.js @@ -0,0 +1,1321 @@ +var async = require('../lib/async'); + + +exports['auto'] = function(test){ + var callOrder = []; + var testdata = [{test: 'test'}]; + async.auto({ + task1: ['task2', function(callback){ + setTimeout(function(){ + callOrder.push('task1'); + callback(); + }, 25); + }], + task2: function(callback){ + setTimeout(function(){ + callOrder.push('task2'); + callback(); + }, 50); + }, + task3: ['task2', function(callback){ + callOrder.push('task3'); + callback(); + }], + task4: ['task1', 'task2', function(callback){ + callOrder.push('task4'); + callback(); + }] + }, + function(err){ + test.same(callOrder, ['task2','task3','task1','task4']); + test.done(); + }); +}; + +exports['auto empty object'] = function(test){ + async.auto({}, function(err){ + test.done(); + }); +}; + +exports['auto error'] = function(test){ + test.expect(1); + async.auto({ + task1: function(callback){ + callback('testerror'); + }, + task2: ['task1', function(callback){ + test.ok(false, 'task2 should not be called'); + callback(); + }], + task3: function(callback){ + callback('testerror2'); + } + }, + function(err){ + test.equals(err, 'testerror'); + }); + setTimeout(test.done, 100); +}; + +exports['auto no callback'] = function(test){ + async.auto({ + task1: function(callback){callback();}, + task2: ['task1', function(callback){callback(); test.done();}] + }); +}; + +exports['waterfall'] = function(test){ + test.expect(6); + var call_order = []; + async.waterfall([ + function(callback){ + call_order.push('fn1'); + setTimeout(function(){callback(null, 'one', 'two');}, 0); + }, + function(arg1, arg2, callback){ + call_order.push('fn2'); + test.equals(arg1, 'one'); + test.equals(arg2, 'two'); + setTimeout(function(){callback(null, arg1, arg2, 'three');}, 25); + }, + function(arg1, arg2, arg3, callback){ + call_order.push('fn3'); + test.equals(arg1, 'one'); + test.equals(arg2, 'two'); + test.equals(arg3, 'three'); + callback(null, 'four'); + }, + function(arg4, callback){ + call_order.push('fn4'); + test.same(call_order, ['fn1','fn2','fn3','fn4']); + callback(null, 'test'); + } + ], function(err){ + test.done(); + }); +}; + +exports['waterfall empty array'] = function(test){ + async.waterfall([], function(err){ + test.done(); + }); +}; + +exports['waterfall no callback'] = function(test){ + async.waterfall([ + function(callback){callback();}, + function(callback){callback(); test.done();} + ]); +}; + +exports['waterfall async'] = function(test){ + var call_order = []; + async.waterfall([ + function(callback){ + call_order.push(1); + callback(); + call_order.push(2); + }, + function(callback){ + call_order.push(3); + callback(); + }, + function(){ + test.same(call_order, [1,2,3]); + test.done(); + } + ]); +}; + +exports['waterfall error'] = function(test){ + test.expect(1); + async.waterfall([ + function(callback){ + callback('error'); + }, + function(callback){ + test.ok(false, 'next function should not be called'); + callback(); + } + ], function(err){ + test.equals(err, 'error'); + }); + setTimeout(test.done, 50); +}; + +exports['waterfall multiple callback calls'] = function(test){ + var call_order = []; + var arr = [ + function(callback){ + call_order.push(1); + // call the callback twice. this should call function 2 twice + callback(null, 'one', 'two'); + callback(null, 'one', 'two'); + }, + function(arg1, arg2, callback){ + call_order.push(2); + callback(null, arg1, arg2, 'three'); + }, + function(arg1, arg2, arg3, callback){ + call_order.push(3); + callback(null, 'four'); + }, + function(arg4){ + call_order.push(4); + arr[3] = function(){ + call_order.push(4); + test.same(call_order, [1,2,2,3,3,4,4]); + test.done(); + }; + } + ]; + async.waterfall(arr); +}; + + +exports['parallel'] = function(test){ + var call_order = []; + async.parallel([ + function(callback){ + setTimeout(function(){ + call_order.push(1); + callback(null, 1); + }, 25); + }, + function(callback){ + setTimeout(function(){ + call_order.push(2); + callback(null, 2); + }, 50); + }, + function(callback){ + setTimeout(function(){ + call_order.push(3); + callback(null, 3,3); + }, 15); + } + ], + function(err, results){ + test.equals(err, null); + test.same(call_order, [3,1,2]); + test.same(results, [1,2,[3,3]]); + test.done(); + }); +}; + +exports['parallel empty array'] = function(test){ + async.parallel([], function(err, results){ + test.equals(err, null); + test.same(results, []); + test.done(); + }); +}; + +exports['parallel error'] = function(test){ + async.parallel([ + function(callback){ + callback('error', 1); + }, + function(callback){ + callback('error2', 2); + } + ], + function(err, results){ + test.equals(err, 'error'); + }); + setTimeout(test.done, 100); +}; + +exports['parallel no callback'] = function(test){ + async.parallel([ + function(callback){callback();}, + function(callback){callback(); test.done();}, + ]); +}; + +exports['parallel object'] = function(test){ + var call_order = []; + async.parallel({ + one: function(callback){ + setTimeout(function(){ + call_order.push(1); + callback(null, 1); + }, 25); + }, + two: function(callback){ + setTimeout(function(){ + call_order.push(2); + callback(null, 2); + }, 50); + }, + three: function(callback){ + setTimeout(function(){ + call_order.push(3); + callback(null, 3,3); + }, 15); + } + }, + function(err, results){ + test.equals(err, null); + test.same(call_order, [3,1,2]); + test.same(results, { + one: 1, + two: 2, + three: [3,3] + }); + test.done(); + }); +}; + +exports['series'] = function(test){ + var call_order = []; + async.series([ + function(callback){ + setTimeout(function(){ + call_order.push(1); + callback(null, 1); + }, 25); + }, + function(callback){ + setTimeout(function(){ + call_order.push(2); + callback(null, 2); + }, 50); + }, + function(callback){ + setTimeout(function(){ + call_order.push(3); + callback(null, 3,3); + }, 15); + } + ], + function(err, results){ + test.equals(err, null); + test.same(results, [1,2,[3,3]]); + test.same(call_order, [1,2,3]); + test.done(); + }); +}; + +exports['series empty array'] = function(test){ + async.series([], function(err, results){ + test.equals(err, null); + test.same(results, []); + test.done(); + }); +}; + +exports['series error'] = function(test){ + test.expect(1); + async.series([ + function(callback){ + callback('error', 1); + }, + function(callback){ + test.ok(false, 'should not be called'); + callback('error2', 2); + } + ], + function(err, results){ + test.equals(err, 'error'); + }); + setTimeout(test.done, 100); +}; + +exports['series no callback'] = function(test){ + async.series([ + function(callback){callback();}, + function(callback){callback(); test.done();}, + ]); +}; + +exports['series object'] = function(test){ + var call_order = []; + async.series({ + one: function(callback){ + setTimeout(function(){ + call_order.push(1); + callback(null, 1); + }, 25); + }, + two: function(callback){ + setTimeout(function(){ + call_order.push(2); + callback(null, 2); + }, 50); + }, + three: function(callback){ + setTimeout(function(){ + call_order.push(3); + callback(null, 3,3); + }, 15); + } + }, + function(err, results){ + test.equals(err, null); + test.same(results, { + one: 1, + two: 2, + three: [3,3] + }); + test.same(call_order, [1,2,3]); + test.done(); + }); +}; + +exports['iterator'] = function(test){ + var call_order = []; + var iterator = async.iterator([ + function(){call_order.push(1);}, + function(arg1){ + test.equals(arg1, 'arg1'); + call_order.push(2); + }, + function(arg1, arg2){ + test.equals(arg1, 'arg1'); + test.equals(arg2, 'arg2'); + call_order.push(3); + } + ]); + iterator(); + test.same(call_order, [1]); + var iterator2 = iterator(); + test.same(call_order, [1,1]); + var iterator3 = iterator2('arg1'); + test.same(call_order, [1,1,2]); + var iterator4 = iterator3('arg1', 'arg2'); + test.same(call_order, [1,1,2,3]); + test.equals(iterator4, undefined); + test.done(); +}; + +exports['iterator empty array'] = function(test){ + var iterator = async.iterator([]); + test.equals(iterator(), undefined); + test.equals(iterator.next(), undefined); + test.done(); +}; + +exports['iterator.next'] = function(test){ + var call_order = []; + var iterator = async.iterator([ + function(){call_order.push(1);}, + function(arg1){ + test.equals(arg1, 'arg1'); + call_order.push(2); + }, + function(arg1, arg2){ + test.equals(arg1, 'arg1'); + test.equals(arg2, 'arg2'); + call_order.push(3); + } + ]); + var fn = iterator.next(); + var iterator2 = fn('arg1'); + test.same(call_order, [2]); + iterator2('arg1','arg2'); + test.same(call_order, [2,3]); + test.equals(iterator2.next(), undefined); + test.done(); +}; + +exports['forEach'] = function(test){ + var args = []; + async.forEach([1,3,2], function(x, callback){ + setTimeout(function(){ + args.push(x); + callback(); + }, x*25); + }, function(err){ + test.same(args, [1,2,3]); + test.done(); + }); +}; + +exports['forEach empty array'] = function(test){ + test.expect(1); + async.forEach([], function(x, callback){ + test.ok(false, 'iterator should not be called'); + callback(); + }, function(err){ + test.ok(true, 'should call callback'); + }); + setTimeout(test.done, 25); +}; + +exports['forEach error'] = function(test){ + test.expect(1); + async.forEach([1,2,3], function(x, callback){ + callback('error'); + }, function(err){ + test.equals(err, 'error'); + }); + setTimeout(test.done, 50); +}; + +exports['forEachSeries'] = function(test){ + var args = []; + async.forEachSeries([1,3,2], function(x, callback){ + setTimeout(function(){ + args.push(x); + callback(); + }, x*25); + }, function(err){ + test.same(args, [1,3,2]); + test.done(); + }); +}; + +exports['forEachSeries empty array'] = function(test){ + test.expect(1); + async.forEachSeries([], function(x, callback){ + test.ok(false, 'iterator should not be called'); + callback(); + }, function(err){ + test.ok(true, 'should call callback'); + }); + setTimeout(test.done, 25); +}; + +exports['forEachSeries error'] = function(test){ + test.expect(2); + var call_order = []; + async.forEachSeries([1,2,3], function(x, callback){ + call_order.push(x); + callback('error'); + }, function(err){ + test.same(call_order, [1]); + test.equals(err, 'error'); + }); + setTimeout(test.done, 50); +}; + +exports['map'] = function(test){ + var call_order = []; + async.map([1,3,2], function(x, callback){ + setTimeout(function(){ + call_order.push(x); + callback(null, x*2); + }, x*25); + }, function(err, results){ + test.same(call_order, [1,2,3]); + test.same(results, [2,6,4]); + test.done(); + }); +}; + +exports['map original untouched'] = function(test){ + var a = [1,2,3]; + async.map(a, function(x, callback){ + callback(null, x*2); + }, function(err, results){ + test.same(results, [2,4,6]); + test.same(a, [1,2,3]); + test.done(); + }); +}; + +exports['map error'] = function(test){ + test.expect(1); + async.map([1,2,3], function(x, callback){ + callback('error'); + }, function(err, results){ + test.equals(err, 'error'); + }); + setTimeout(test.done, 50); +}; + +exports['mapSeries'] = function(test){ + var call_order = []; + async.mapSeries([1,3,2], function(x, callback){ + setTimeout(function(){ + call_order.push(x); + callback(null, x*2); + }, x*25); + }, function(err, results){ + test.same(call_order, [1,3,2]); + test.same(results, [2,6,4]); + test.done(); + }); +}; + +exports['mapSeries error'] = function(test){ + test.expect(1); + async.mapSeries([1,2,3], function(x, callback){ + callback('error'); + }, function(err, results){ + test.equals(err, 'error'); + }); + setTimeout(test.done, 50); +}; + +exports['reduce'] = function(test){ + var call_order = []; + async.reduce([1,2,3], 0, function(a, x, callback){ + call_order.push(x); + callback(null, a + x); + }, function(err, result){ + test.equals(result, 6); + test.same(call_order, [1,2,3]); + test.done(); + }); +}; + +exports['reduce async with non-reference memo'] = function(test){ + async.reduce([1,3,2], 0, function(a, x, callback){ + setTimeout(function(){callback(null, a + x)}, Math.random()*100); + }, function(err, result){ + test.equals(result, 6); + test.done(); + }); +}; + +exports['reduce error'] = function(test){ + test.expect(1); + async.reduce([1,2,3], 0, function(a, x, callback){ + callback('error'); + }, function(err, result){ + test.equals(err, 'error'); + }); + setTimeout(test.done, 50); +}; + +exports['inject alias'] = function(test){ + test.equals(async.inject, async.reduce); + test.done(); +}; + +exports['foldl alias'] = function(test){ + test.equals(async.foldl, async.reduce); + test.done(); +}; + +exports['reduceRight'] = function(test){ + var call_order = []; + var a = [1,2,3]; + async.reduceRight(a, 0, function(a, x, callback){ + call_order.push(x); + callback(null, a + x); + }, function(err, result){ + test.equals(result, 6); + test.same(call_order, [3,2,1]); + test.same(a, [1,2,3]); + test.done(); + }); +}; + +exports['foldr alias'] = function(test){ + test.equals(async.foldr, async.reduceRight); + test.done(); +}; + +exports['filter'] = function(test){ + async.filter([3,1,2], function(x, callback){ + setTimeout(function(){callback(x % 2);}, x*25); + }, function(results){ + test.same(results, [3,1]); + test.done(); + }); +}; + +exports['filter original untouched'] = function(test){ + var a = [3,1,2]; + async.filter(a, function(x, callback){ + callback(x % 2); + }, function(results){ + test.same(results, [3,1]); + test.same(a, [3,1,2]); + test.done(); + }); +}; + +exports['filterSeries'] = function(test){ + async.filterSeries([3,1,2], function(x, callback){ + setTimeout(function(){callback(x % 2);}, x*25); + }, function(results){ + test.same(results, [3,1]); + test.done(); + }); +}; + +exports['select alias'] = function(test){ + test.equals(async.select, async.filter); + test.done(); +}; + +exports['selectSeries alias'] = function(test){ + test.equals(async.selectSeries, async.filterSeries); + test.done(); +}; + +exports['reject'] = function(test){ + async.reject([3,1,2], function(x, callback){ + setTimeout(function(){callback(x % 2);}, x*25); + }, function(results){ + test.same(results, [2]); + test.done(); + }); +}; + +exports['reject original untouched'] = function(test){ + var a = [3,1,2]; + async.reject(a, function(x, callback){ + callback(x % 2); + }, function(results){ + test.same(results, [2]); + test.same(a, [3,1,2]); + test.done(); + }); +}; + +exports['rejectSeries'] = function(test){ + async.rejectSeries([3,1,2], function(x, callback){ + setTimeout(function(){callback(x % 2);}, x*25); + }, function(results){ + test.same(results, [2]); + test.done(); + }); +}; + +exports['some true'] = function(test){ + async.some([3,1,2], function(x, callback){ + setTimeout(function(){callback(x === 1);}, 0); + }, function(result){ + test.equals(result, true); + test.done(); + }); +}; + +exports['some false'] = function(test){ + async.some([3,1,2], function(x, callback){ + setTimeout(function(){callback(x === 10);}, 0); + }, function(result){ + test.equals(result, false); + test.done(); + }); +}; + +exports['some early return'] = function(test){ + var call_order = []; + async.some([1,2,3], function(x, callback){ + setTimeout(function(){ + call_order.push(x); + callback(x === 1); + }, x*25); + }, function(result){ + call_order.push('callback'); + }); + setTimeout(function(){ + test.same(call_order, [1,'callback',2,3]); + test.done(); + }, 100); +}; + +exports['any alias'] = function(test){ + test.equals(async.any, async.some); + test.done(); +}; + +exports['every true'] = function(test){ + async.every([1,2,3], function(x, callback){ + setTimeout(function(){callback(true);}, 0); + }, function(result){ + test.equals(result, true); + test.done(); + }); +}; + +exports['every false'] = function(test){ + async.every([1,2,3], function(x, callback){ + setTimeout(function(){callback(x % 2);}, 0); + }, function(result){ + test.equals(result, false); + test.done(); + }); +}; + +exports['every early return'] = function(test){ + var call_order = []; + async.every([1,2,3], function(x, callback){ + setTimeout(function(){ + call_order.push(x); + callback(x === 1); + }, x*25); + }, function(result){ + call_order.push('callback'); + }); + setTimeout(function(){ + test.same(call_order, [1,2,'callback',3]); + test.done(); + }, 100); +}; + +exports['all alias'] = function(test){ + test.equals(async.all, async.every); + test.done(); +}; + +exports['detect'] = function(test){ + var call_order = []; + async.detect([3,2,1], function(x, callback){ + setTimeout(function(){ + call_order.push(x); + callback(x == 2); + }, x*25); + }, function(result){ + call_order.push('callback'); + test.equals(result, 2); + }); + setTimeout(function(){ + test.same(call_order, [1,2,'callback',3]); + test.done(); + }, 100); +}; + +exports['detectSeries'] = function(test){ + var call_order = []; + async.detectSeries([3,2,1], function(x, callback){ + setTimeout(function(){ + call_order.push(x); + callback(x == 2); + }, x*25); + }, function(result){ + call_order.push('callback'); + test.equals(result, 2); + }); + setTimeout(function(){ + test.same(call_order, [3,2,'callback']); + test.done(); + }, 200); +}; + +exports['sortBy'] = function(test){ + async.sortBy([{a:1},{a:15},{a:6}], function(x, callback){ + setTimeout(function(){callback(null, x.a);}, 0); + }, function(err, result){ + test.same(result, [{a:1},{a:6},{a:15}]); + test.done(); + }); +}; + +exports['apply'] = function(test){ + test.expect(6); + var fn = function(){ + test.same(Array.prototype.slice.call(arguments), [1,2,3,4]) + }; + async.apply(fn, 1, 2, 3, 4)(); + async.apply(fn, 1, 2, 3)(4); + async.apply(fn, 1, 2)(3, 4); + async.apply(fn, 1)(2, 3, 4); + async.apply(fn)(1, 2, 3, 4); + test.equals( + async.apply(function(name){return 'hello ' + name}, 'world')(), + 'hello world' + ); + test.done(); +}; + + +// generates tests for console functions such as async.log +var console_fn_tests = function(name){ + + if (typeof console !== 'undefined') { + exports[name] = function(test){ + test.expect(5); + var fn = function(arg1, callback){ + test.equals(arg1, 'one'); + setTimeout(function(){callback(null, 'test');}, 0); + }; + var fn_err = function(arg1, callback){ + test.equals(arg1, 'one'); + setTimeout(function(){callback('error');}, 0); + }; + var _console_fn = console[name]; + var _error = console.error; + console[name] = function(val){ + test.equals(val, 'test'); + test.equals(arguments.length, 1); + console.error = function(val){ + test.equals(val, 'error'); + console[name] = _console_fn; + console.error = _error; + test.done(); + }; + async[name](fn_err, 'one'); + }; + async[name](fn, 'one'); + }; + + exports[name + ' with multiple result params'] = function(test){ + var fn = function(callback){callback(null,'one','two','three');}; + var _console_fn = console[name]; + var called_with = []; + console[name] = function(x){ + called_with.push(x); + }; + async[name](fn); + test.same(called_with, ['one','two','three']); + console[name] = _console_fn; + test.done(); + }; + } + + // browser-only test + exports[name + ' without console.' + name] = function(test){ + if (typeof window !== 'undefined') { + var _console = window.console; + window.console = undefined; + var fn = function(callback){callback(null, 'val');}; + var fn_err = function(callback){callback('error');}; + async[name](fn); + async[name](fn_err); + window.console = _console; + } + test.done(); + }; + +}; + +console_fn_tests('log'); +console_fn_tests('dir'); +/*console_fn_tests('info'); +console_fn_tests('warn'); +console_fn_tests('error');*/ + +exports['nextTick'] = function(test){ + var call_order = []; + async.nextTick(function(){call_order.push('two');}); + call_order.push('one'); + setTimeout(function(){ + test.same(call_order, ['one','two']); + test.done(); + }, 50); +}; + +exports['nextTick in the browser'] = function(test){ + if (typeof process !== 'undefined') { + // skip this test in node + return test.done(); + } + test.expect(1); + + var call_order = []; + async.nextTick(function(){call_order.push('two');}); + + call_order.push('one'); + setTimeout(function(){ + if (typeof process !== 'undefined') { + process.nextTick = _nextTick; + } + test.same(call_order, ['one','two']); + }, 50); + setTimeout(test.done, 100); +}; + +exports['noConflict - node only'] = function(test){ + if (typeof process !== 'undefined') { + // node only test + test.expect(3); + var fs = require('fs'); + var filename = __dirname + '/../lib/async.js'; + fs.readFile(filename, function(err, content){ + if(err) return test.done(); + var Script = process.binding('evals').Script; + + var s = new Script(content, filename); + var s2 = new Script( + content + 'this.async2 = this.async.noConflict();', + filename + ); + + var sandbox1 = {async: 'oldvalue'}; + s.runInNewContext(sandbox1); + test.ok(sandbox1.async); + + var sandbox2 = {async: 'oldvalue'}; + s2.runInNewContext(sandbox2); + test.equals(sandbox2.async, 'oldvalue'); + test.ok(sandbox2.async2); + + test.done(); + }); + } + else test.done(); +}; + +exports['concat'] = function(test){ + var call_order = []; + var iterator = function (x, cb) { + setTimeout(function(){ + call_order.push(x); + var r = []; + while (x > 0) { + r.push(x); + x--; + } + cb(null, r); + }, x*25); + }; + async.concat([1,3,2], iterator, function(err, results){ + test.same(results, [1,2,1,3,2,1]); + test.same(call_order, [1,2,3]); + test.ok(!err); + test.done(); + }); +}; + +exports['concat error'] = function(test){ + var iterator = function (x, cb) { + cb(new Error('test error')); + }; + async.concat([1,2,3], iterator, function(err, results){ + test.ok(err); + test.done(); + }); +}; + +exports['concatSeries'] = function(test){ + var call_order = []; + var iterator = function (x, cb) { + setTimeout(function(){ + call_order.push(x); + var r = []; + while (x > 0) { + r.push(x); + x--; + } + cb(null, r); + }, x*25); + }; + async.concatSeries([1,3,2], iterator, function(err, results){ + test.same(results, [1,3,2,1,2,1]); + test.same(call_order, [1,3,2]); + test.ok(!err); + test.done(); + }); +}; + +exports['until'] = function (test) { + var call_order = []; + + var count = 0; + async.until( + function () { + call_order.push(['test', count]); + return (count == 5); + }, + function (cb) { + call_order.push(['iterator', count]); + count++; + cb(); + }, + function (err) { + test.same(call_order, [ + ['test', 0], + ['iterator', 0], ['test', 1], + ['iterator', 1], ['test', 2], + ['iterator', 2], ['test', 3], + ['iterator', 3], ['test', 4], + ['iterator', 4], ['test', 5], + ]); + test.equals(count, 5); + test.done(); + } + ); +}; + +exports['whilst'] = function (test) { + var call_order = []; + + var count = 0; + async.whilst( + function () { + call_order.push(['test', count]); + return (count < 5); + }, + function (cb) { + call_order.push(['iterator', count]); + count++; + cb(); + }, + function (err) { + test.same(call_order, [ + ['test', 0], + ['iterator', 0], ['test', 1], + ['iterator', 1], ['test', 2], + ['iterator', 2], ['test', 3], + ['iterator', 3], ['test', 4], + ['iterator', 4], ['test', 5], + ]); + test.equals(count, 5); + test.done(); + } + ); +}; + +exports['queue'] = function (test) { + var call_order = [], + delays = [40,20,60,20]; + + // worker1: --1-4 + // worker2: -2---3 + // order of completion: 2,1,4,3 + + var q = async.queue(function (task, callback) { + setTimeout(function () { + call_order.push('process ' + task); + callback('error', 'arg'); + }, delays.splice(0,1)[0]); + }, 2); + + q.push(1, function (err, arg) { + test.equal(err, 'error'); + test.equal(arg, 'arg'); + test.equal(q.length(), 1); + call_order.push('callback ' + 1); + }); + q.push(2, function (err, arg) { + test.equal(err, 'error'); + test.equal(arg, 'arg'); + test.equal(q.length(), 2); + call_order.push('callback ' + 2); + }); + q.push(3, function (err, arg) { + test.equal(err, 'error'); + test.equal(arg, 'arg'); + test.equal(q.length(), 0); + call_order.push('callback ' + 3); + }); + q.push(4, function (err, arg) { + test.equal(err, 'error'); + test.equal(arg, 'arg'); + test.equal(q.length(), 0); + call_order.push('callback ' + 4); + }); + test.equal(q.length(), 4); + test.equal(q.concurrency, 2); + + setTimeout(function () { + test.same(call_order, [ + 'process 2', 'callback 2', + 'process 1', 'callback 1', + 'process 4', 'callback 4', + 'process 3', 'callback 3' + ]); + test.equal(q.concurrency, 2); + test.equal(q.length(), 0); + test.done(); + }, 200); +}; + +exports['queue changing concurrency'] = function (test) { + var call_order = [], + delays = [40,20,60,20]; + + // worker1: --1-2---3-4 + // order of completion: 1,2,3,4 + + var q = async.queue(function (task, callback) { + setTimeout(function () { + call_order.push('process ' + task); + callback('error', 'arg'); + }, delays.splice(0,1)[0]); + }, 2); + + q.push(1, function (err, arg) { + test.equal(err, 'error'); + test.equal(arg, 'arg'); + test.equal(q.length(), 3); + call_order.push('callback ' + 1); + }); + q.push(2, function (err, arg) { + test.equal(err, 'error'); + test.equal(arg, 'arg'); + test.equal(q.length(), 2); + call_order.push('callback ' + 2); + }); + q.push(3, function (err, arg) { + test.equal(err, 'error'); + test.equal(arg, 'arg'); + test.equal(q.length(), 1); + call_order.push('callback ' + 3); + }); + q.push(4, function (err, arg) { + test.equal(err, 'error'); + test.equal(arg, 'arg'); + test.equal(q.length(), 0); + call_order.push('callback ' + 4); + }); + test.equal(q.length(), 4); + test.equal(q.concurrency, 2); + q.concurrency = 1; + + setTimeout(function () { + test.same(call_order, [ + 'process 1', 'callback 1', + 'process 2', 'callback 2', + 'process 3', 'callback 3', + 'process 4', 'callback 4' + ]); + test.equal(q.concurrency, 1); + test.equal(q.length(), 0); + test.done(); + }, 250); +}; + +exports['queue push without callback'] = function (test) { + var call_order = [], + delays = [40,20,60,20]; + + // worker1: --1-4 + // worker2: -2---3 + // order of completion: 2,1,4,3 + + var q = async.queue(function (task, callback) { + setTimeout(function () { + call_order.push('process ' + task); + callback('error', 'arg'); + }, delays.splice(0,1)[0]); + }, 2); + + q.push(1); + q.push(2); + q.push(3); + q.push(4); + + setTimeout(function () { + test.same(call_order, [ + 'process 2', + 'process 1', + 'process 4', + 'process 3' + ]); + test.done(); + }, 200); +}; + +exports['memoize'] = function (test) { + test.expect(4); + var call_order = []; + + var fn = function (arg1, arg2, callback) { + call_order.push(['fn', arg1, arg2]); + callback(null, arg1 + arg2); + }; + + var fn2 = async.memoize(fn); + fn2(1, 2, function (err, result) { + test.equal(result, 3); + }); + fn2(1, 2, function (err, result) { + test.equal(result, 3); + }); + fn2(2, 2, function (err, result) { + test.equal(result, 4); + }); + + test.same(call_order, [['fn',1,2], ['fn',2,2]]); + test.done(); +}; + +exports['memoize error'] = function (test) { + test.expect(1); + var testerr = new Error('test'); + var fn = function (arg1, arg2, callback) { + callback(testerr, arg1 + arg2); + }; + async.memoize(fn)(1, 2, function (err, result) { + test.equal(err, testerr); + }); + test.done(); +}; + +exports['memoize custom hash function'] = function (test) { + test.expect(2); + var testerr = new Error('test'); + + var fn = function (arg1, arg2, callback) { + callback(testerr, arg1 + arg2); + }; + var fn2 = async.memoize(fn, function () { + return 'custom hash'; + }); + fn2(1, 2, function (err, result) { + test.equal(result, 3); + }); + fn2(2, 2, function (err, result) { + test.equal(result, 3); + }); + test.done(); +}; + +// Issue 10 on github: https://github.com/caolan/async/issues#issue/10 +exports['falsy return values in series'] = function (test) { + function taskFalse(callback) { + process.nextTick(function() { + callback(null, false); + }); + }; + function taskUndefined(callback) { + process.nextTick(function() { + callback(null, undefined); + }); + }; + function taskEmpty(callback) { + process.nextTick(function() { + callback(null); + }); + }; + function taskNull(callback) { + process.nextTick(function() { + callback(null, null); + }); + }; + async.series( + [taskFalse, taskUndefined, taskEmpty, taskNull], + function(err, results) { + test.same(results, [false, undefined, undefined, null]); + test.strictEqual(results[0], false); + test.strictEqual(results[1], undefined); + test.strictEqual(results[2], undefined); + test.strictEqual(results[3], null); + test.done(); + } + ); +}; + +// Issue 10 on github: https://github.com/caolan/async/issues#issue/10 +exports['falsy return values in parallel'] = function (test) { + function taskFalse(callback) { + process.nextTick(function() { + callback(null, false); + }); + }; + function taskUndefined(callback) { + process.nextTick(function() { + callback(null, undefined); + }); + }; + function taskEmpty(callback) { + process.nextTick(function() { + callback(null); + }); + }; + function taskNull(callback) { + process.nextTick(function() { + callback(null, null); + }); + }; + async.parallel( + [taskFalse, taskUndefined, taskEmpty, taskNull], + function(err, results) { + test.same(results, [false, undefined, undefined, null]); + test.strictEqual(results[0], false); + test.strictEqual(results[1], undefined); + test.strictEqual(results[2], undefined); + test.strictEqual(results[3], null); + test.done(); + } + ); +}; diff --git a/node_modules/.npm/async/0.1.8/package/test/test.html b/node_modules/.npm/async/0.1.8/package/test/test.html new file mode 100644 index 0000000..2450e2d --- /dev/null +++ b/node_modules/.npm/async/0.1.8/package/test/test.html @@ -0,0 +1,24 @@ + + + Async.js Test Suite + + + + + + + + +

Async.js Test Suite

+ + + diff --git a/node_modules/.npm/async/active b/node_modules/.npm/async/active new file mode 120000 index 0000000..07f041c --- /dev/null +++ b/node_modules/.npm/async/active @@ -0,0 +1 @@ +./0.1.8 \ No newline at end of file diff --git a/node_modules/.npm/node-uuid/1.2.0/package/LICENSE.md b/node_modules/.npm/node-uuid/1.2.0/package/LICENSE.md new file mode 100644 index 0000000..bcdddf9 --- /dev/null +++ b/node_modules/.npm/node-uuid/1.2.0/package/LICENSE.md @@ -0,0 +1,3 @@ +Copyright (c) 2010 Robert Kieffer + +Dual licensed under the [MIT](http://en.wikipedia.org/wiki/MIT_License) and [GPL](http://en.wikipedia.org/wiki/GNU_General_Public_License) licenses. diff --git a/node_modules/.npm/node-uuid/1.2.0/package/README.md b/node_modules/.npm/node-uuid/1.2.0/package/README.md new file mode 100644 index 0000000..c006750 --- /dev/null +++ b/node_modules/.npm/node-uuid/1.2.0/package/README.md @@ -0,0 +1,100 @@ +# node-uuid + +Simple, fast generation of RFC4122[RFC4122(v4)](http://www.ietf.org/rfc/rfc4122.txt) UUIDS. It runs in node.js and all major browsers. + +## Installation + + npm install node-uuid + +### In browser + + + +### In node.js + + var uuid = require('node-uuid'); + +## Usage + +### Generate a String UUID + + var id = uuid(); // -> '92329D39-6F5C-4520-ABFC-AAB64544E172' + +### Generate a Binary UUID + + // Simple form - allocates a Buffer/Array for you + var buf = uuid('binary'); + // node.js -> + // browser -> [8, 80, 5, 200, 156, 178, 76, 7, 172, 7, 209, 79, 185, 245, 4, 81] + + // Provide your own Buffer or Array + var buf = new Array(16); + uuid('binary', buf); // -> [8, 80, 5, 200, 156, 178, 76, 7, 172, 7, 209, 79, 185, 245, 4, 81] + var buf = new Buffer(16); + uuid('binary', buf); // -> + + // Provide your own Buffer/Array, plus specify offset + // (e.g. here we fill an array with 3 uuids) + var buf = new Buffer(16 \* 3); + uuid('binary', id, 0); + uuid('binary', id, 16); + uuid('binary', id, 32); + +## Testing + +test/test.js generates performance data (similar to test/benchmark.js). It also verifies the syntax of 100K string UUIDs, and logs the distribution of hex digits found therein. For example: + + - - - Performance Data - - - + uuid(): 1052631 uuids/second + uuid('binary'): 680272 uuids/second + uuid('binary', buffer): 2702702 uuids/second + + - - - Distribution of Hex Digits (% deviation from ideal) - - - + 0 |================================| 187705 (0.11%) + 1 |================================| 187880 (0.2%) + 2 |================================| 186875 (-0.33%) + 3 |================================| 186847 (-0.35%) + 4 |==================================================| 287433 (-0.02%) + 5 |================================| 187910 (0.22%) + 6 |================================| 188172 (0.36%) + 7 |================================| 187350 (-0.08%) + 8 |====================================| 211994 (-0.24%) + 9 |====================================| 212664 (0.08%) + A |=====================================| 213185 (0.32%) + B |=====================================| 212877 (0.18%) + C |================================| 187445 (-0.03%) + D |================================| 186737 (-0.41%) + E |================================| 187155 (-0.18%) + F |================================| 187771 (0.14%) + +Note that the increased values for 4 and 8-B are expected as part of the RFC4122 syntax (and are accounted for in the deviation calculation). BTW, if someone wants to do the calculation to determine what a statistically significant deviation would be, I'll gladly add that to the test. + +### In browser + + Open test/test.html + +### In node.js + + > node test/test.js + +node.js users can also run the node-uuid .vs. uuid.js benchmark: + + > node test/benchmark.js + +## Performance + +### In node.js + +node-uuid is designed to be fast. That said, the target platform is node.js, where it is screaming fast. Here's what I get on my 2.66GHz Macbook Pro for the test/benchmark.js script: + + nodeuuid(): 1126126 uuids/second + nodeuuid('binary'): 782472 uuids/second + nodeuuid('binary', buffer): 2688172 uuids/second + uuidjs(): 620347 uuids/second + uuidjs('binary'): 1275510 uuids/second + +The uuidjs() entries are for Nikhil Marathe's [uuidjs module](https://bitbucket.org/nikhilm/uuidjs), and are provided for comparison. uuidjs is a wrapper around the native libuuid library. + +### In browser + +node-uuid performance varies dramatically across browsers. For comprehensive test results, please [checkout the JSPerf tests](http://jsperf.com/node-uuid-performance). diff --git a/node_modules/.npm/node-uuid/1.2.0/package/package.json b/node_modules/.npm/node-uuid/1.2.0/package/package.json new file mode 100644 index 0000000..bf70062 --- /dev/null +++ b/node_modules/.npm/node-uuid/1.2.0/package/package.json @@ -0,0 +1,12 @@ +{ + "name" : "node-uuid", + "description" : "Simple, fast generation of RFC4122(v4) UUIDs.", + "url" : "http://github.com/broofa/node-uuid", + "keywords" : ["uuid", "guid", "rfc4122"], + "author" : "Robert Kieffer ", + "contributors" : [], + "dependencies" : [], + "lib" : ".", + "main" : "./uuid.js", + "version" : "1.2.0" +} diff --git a/node_modules/.npm/node-uuid/1.2.0/package/test/benchmark-native.c b/node_modules/.npm/node-uuid/1.2.0/package/test/benchmark-native.c new file mode 100644 index 0000000..dbfc75f --- /dev/null +++ b/node_modules/.npm/node-uuid/1.2.0/package/test/benchmark-native.c @@ -0,0 +1,34 @@ +/* +Test performance of native C UUID generation + +To Compile: cc -luuid benchmark-native.c -o benchmark-native +*/ + +#include +#include +#include +#include + +int main() { + uuid_t myid; + char buf[36+1]; + int i; + struct timeval t; + double start, finish; + + gettimeofday(&t, NULL); + start = t.tv_sec + t.tv_usec/1e6; + + int n = 2e5; + for (i = 0; i < n; i++) { + uuid_generate(myid); + uuid_unparse(myid, buf); + } + + gettimeofday(&t, NULL); + finish = t.tv_sec + t.tv_usec/1e6; + double dur = finish - start; + + printf("%d uuids/sec", (int)(n/dur)); + return 0; +} diff --git a/node_modules/.npm/node-uuid/1.2.0/package/test/benchmark.js b/node_modules/.npm/node-uuid/1.2.0/package/test/benchmark.js new file mode 100644 index 0000000..2505dc4 --- /dev/null +++ b/node_modules/.npm/node-uuid/1.2.0/package/test/benchmark.js @@ -0,0 +1,27 @@ +var nodeuuid = require('../uuid'), + uuidjs = require('uuid').generate, + N = 5e5; + +function rate(msg, t) { + console.log(msg + ': ' + + (N / (Date.now() - t) * 1e3 | 0) + + ' uuids/second'); +} + +// node-uuid - string form +for (var i = 0, t = Date.now(); i < N; i++) nodeuuid(); +rate('nodeuuid()', t); + +for (var i = 0, t = Date.now(); i < N; i++) nodeuuid('binary'); +rate('nodeuuid(\'binary\')', t); + +var buffer = new nodeuuid.BufferClass(16); +for (var i = 0, t = Date.now(); i < N; i++) nodeuuid('binary', buffer); +rate('nodeuuid(\'binary\', buffer)', t); + +// node-uuid - string form +for (var i = 0, t = Date.now(); i < N; i++) uuidjs(); +rate('uuidjs()', t); + +for (var i = 0, t = Date.now(); i < N; i++) uuidjs('binary'); +rate('uuidjs(\'binary\')', t); diff --git a/node_modules/.npm/node-uuid/1.2.0/package/test/test.html b/node_modules/.npm/node-uuid/1.2.0/package/test/test.html new file mode 100644 index 0000000..89e0f2c --- /dev/null +++ b/node_modules/.npm/node-uuid/1.2.0/package/test/test.html @@ -0,0 +1,14 @@ + + + + + + + + + diff --git a/node_modules/.npm/node-uuid/1.2.0/package/test/test.js b/node_modules/.npm/node-uuid/1.2.0/package/test/test.js new file mode 100644 index 0000000..5037566 --- /dev/null +++ b/node_modules/.npm/node-uuid/1.2.0/package/test/test.js @@ -0,0 +1,83 @@ +if (typeof(uuid) == 'undefined') { + uuid = require('../uuid'); +} + +var UUID_FORMAT = /[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89a-fAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}/; +var N = 1e5; + +function log(msg) { + if (typeof(document) != 'undefined') { + document.write('
' + msg + '
'); + } + if (typeof(console) != 'undefined') { + console.log(msg); + } +} + +function rate(msg, t) { + log(msg + ': ' + (N / (Date.now() - t) * 1e3 | 0) + ' uuids/second'); +} + +// Perf tests +log('- - - Performance Data - - -'); +for (var i = 0, t = Date.now(); i < N; i++) uuid(); +rate('uuid()', t); +for (var i = 0, t = Date.now(); i < N; i++) uuid('binary'); +rate('uuid(\'binary\')', t); +var buf = new uuid.BufferClass(16); +for (var i = 0, t = Date.now(); i < N; i++) uuid('binary', buf); +rate('uuid(\'binary\', buffer)', t); + +var counts = {}, max = 0; + +var b = new uuid.BufferClass(16); +for (var i = 0; i < N; i++) { + id = uuid(); + if (!UUID_FORMAT.test(id)) { + throw Error(id + ' is not a valid UUID string'); + } + + if (id != uuid.unparse(uuid.parse(id))) { + throw Error(id + ' does not parse/unparse'); + } + + // Count digits for our randomness check + var digits = id.replace(/-/g, '').split(''); + for (var j = digits.length-1; j >= 0; j--) { + var c = digits[j]; + max = Math.max(max, counts[c] = (counts[c] || 0) + 1); + } +} + +// Get %'age an actual value differs from the ideal value +function divergence(actual, ideal) { + return Math.round(100*100*(actual - ideal)/ideal)/100; +} + +log('
- - - Distribution of Hex Digits (% deviation from ideal) - - -'); + +// Check randomness +for (var i = 0; i < 16; i++) { + var c = i.toString(16); + var bar = '', n = counts[c], p = Math.round(n/max*100|0); + + // 1-3,5-8, and D-F: 1:16 odds over 30 digits + var ideal = N*30/16; + if (i == 4) { + // 4: 1:1 odds on 1 digit, plus 1:16 odds on 30 digits + ideal = N*(1 + 30/16); + } else if (i >= 8 && i <= 11) { + // 8-B: 1:4 odds on 1 digit, plus 1:16 odds on 30 digits + ideal = N*(1/4 + 30/16); + } else { + // Otherwise: 1:16 odds on 30 digits + ideal = N*30/16; + } + var d = divergence(n, ideal); + + // Draw bar using UTF squares (just for grins) + var s = n/max*50 | 0; + while (s--) bar += '='; + + log(c + ' |' + bar + '| ' + counts[c] + ' (' + d + '%)'); +} diff --git a/node_modules/.npm/node-uuid/1.2.0/package/uuid.js b/node_modules/.npm/node-uuid/1.2.0/package/uuid.js new file mode 100644 index 0000000..fdf6c54 --- /dev/null +++ b/node_modules/.npm/node-uuid/1.2.0/package/uuid.js @@ -0,0 +1,80 @@ +(function() { + /* + * Generate a RFC4122(v4) UUID + * + * Documentation at https://github.com/broofa/node-uuid + */ + + // Use node.js Buffer class if available, otherwise use the Array class + var BufferClass = typeof(Buffer) == 'function' ? Buffer : Array; + + // Buffer used for generating string uuids + var _buf = new BufferClass(16); + + // Cache number <-> hex string for octet values + var toString = []; + var toNumber = {}; + for (var i = 0; i < 256; i++) { + toString[i] = (i + 0x100).toString(16).substr(1); + toNumber[toString[i]] = i; + } + + function parse(s) { + var buf = new BufferClass(16); + var i = 0, ton = toNumber; + s.toLowerCase().replace(/[0-9a-f][0-9a-f]/g, function(octet) { + buf[i++] = toNumber[octet]; + }); + return buf; + } + + function unparse(buf) { + var tos = toString, b = buf; + return tos[b[0]] + tos[b[1]] + tos[b[2]] + tos[b[3]] + '-' + + tos[b[4]] + tos[b[5]] + '-' + + tos[b[6]] + tos[b[7]] + '-' + + tos[b[8]] + tos[b[9]] + '-' + + tos[b[10]] + tos[b[11]] + tos[b[12]] + + tos[b[13]] + tos[b[14]] + tos[b[15]]; + } + + var b32 = 0x100000000, ff = 0xff; + function uuid(fmt, buf, offset) { + var b = fmt != 'binary' ? _buf : (buf ? buf : new BufferClass(16)); + var i = buf && offset || 0; + + var r = Math.random()*b32; + b[i++] = r & ff; + b[i++] = r>>>8 & ff; + b[i++] = r>>>16 & ff; + b[i++] = r>>>24 & ff; + r = Math.random()*b32; + b[i++] = r & ff; + b[i++] = r>>>8 & ff; + b[i++] = r>>>16 & 0x0f | 0x40; // See RFC4122 sect. 4.1.3 + b[i++] = r>>>24 & ff; + r = Math.random()*b32; + b[i++] = r & 0x3f | 0x80; // See RFC4122 sect. 4.4 + b[i++] = r>>>8 & ff; + b[i++] = r>>>16 & ff; + b[i++] = r>>>24 & ff; + r = Math.random()*b32; + b[i++] = r & ff; + b[i++] = r>>>8 & ff; + b[i++] = r>>>16 & ff; + b[i++] = r>>>24 & ff; + + return fmt === undefined ? unparse(b) : b; + }; + + uuid.parse = parse; + uuid.unparse = unparse; + uuid.BufferClass = BufferClass; + + if (typeof(module) != 'undefined') { + module.exports = uuid; + } else { + // In browser? Set as top-level function + this.uuid = uuid; + } +})(); diff --git a/node_modules/.npm/node-uuid/active b/node_modules/.npm/node-uuid/active new file mode 120000 index 0000000..d395782 --- /dev/null +++ b/node_modules/.npm/node-uuid/active @@ -0,0 +1 @@ +./1.2.0 \ No newline at end of file diff --git a/node_modules/.npm/optparse/1.0.1/package/README.md b/node_modules/.npm/optparse/1.0.1/package/README.md new file mode 100644 index 0000000..d08ccf1 --- /dev/null +++ b/node_modules/.npm/optparse/1.0.1/package/README.md @@ -0,0 +1,161 @@ +optparse-js +=========== + +Optparse-js is a command line option parser for Javascript. It's slightly based on Ruby's implementation optparse but with some differences (different languages has different needs) such as custom parsers. + +All examples in this readme is using [Node.js](http://nodejs.org/). How ever, the library works with all kinds of Javascript implementations. + + +QUICK START +----------- + +The library defines one class, the OptionParser class. The class constructor takes one single argument, a list with a set of rules. Here is a quick example: + + // Import the sys library + var sys = require('sys'); + + // Import the optparse library. + var optparse = require('optparse'); + + // Define an option called ´´help´´. We give it a quick alias named ´´-h´´ + // and a quick help text. + var switches = [ + ['-h', '--help', 'Shows help sections'] + ]; + + // Create a new OptionParser. + var parser = new optparse.OptionParser(switches); + + // Hook the help option. The callback will be executed when the OptionParser + // hits the switch ´´-h´´ or ´´--help´´. Each representatio + parser.on('help', function() { + sys.puts('Help'); + }); + + + +DEFINING RULES +-------------- +The OptionParser constructor takes an Array with rules. Each rule is represented by an array (tuple) of two or three values. A typical rule definition may look like this: + + ['-h', '--help', 'Print this help'] + + +The first value is optional, and represents an alias for the long-named switch (the second value, in this case ´´--help´´). + +The second argument is the actual rule. The rule must start with a double dash followed by a switch name (in this case ´help´). The OptionParser also supports special option arguments. Define an option argument in the rule by adding a named argument after the leading double dash and switch name (E.G '--port-number PORT_NUMBER'). The argument is then parsed to the option handler. To define an optional option argument, just add a braces around argument in the rule (E.G '--port-number [PORT_NUMBER]). The OptionParser also supports filter. More on that in in the section called ´Option Filters´. + +The third argument is an optional rule description. + + +OPTION FILTERS +-------------- +Filters is a neat feature that let you filter option arguments. The OptionParser itself as already a set of built-in common filter's. These are: + +- NUMBER, supports both decimal and hexadecimal numbers. +- DATE, filters arguments that matches YYYY-MM-DD. +- EMAIL, filters arguments that matches my@email.com. + +It's simple to use any of the filter above in your rule-set. Here is a quick example how to filter number: + + var rules = [ + ['--first-option NUMBER', 'Takes a number as argument'], + ['--second-option [NUMBER]', 'Takes an optional number as argument'] + ] + +You can add your own set of filter by calling the *parser_instance.filter* method: + + parser.filter('single_char', function(value) { + if(value.length != 1) throw "Filter mismatch."; + return value; + }); + + +OPTION PARSER +------------- +The OptionParser class has the following properties and methods: + +### string banner +An optional usage banner. This text is included when calling ´´toString´´. Default value is: "Usage: [Options]". + + +### string options_title +An optional title for the options list. This text is included when calling ´´toString´´. Default value is: "Available options:". + + +### function on(switch_or_arg_index, callback) +Add's a callback for a switch or an argument (defined by index). Switch hooks MUST be typed witout the leading ´´--´´. This example show how to hook a switch: + + parser.on('help', function(optional_argument) { + // Show help section + }); + +And this example show how to hook an argument (an option without the leading - or --): + + parser.on(0, function(opt) { + puts('The first non-switch option is:' + opt); + }); + +It's also possible to define a default handler. The default handler is called when no rule's are meet. Here is an example how to add a ´default handler´: + + parser.on(function(opt) { + puts('No handler was defined for option:' + opt); + }); + +Use the wildcard handler to build a custom ´´on´´ handler. + + parser.on('*', function(opt, value) { + puts('option=' + opt + ', value=' + value); + }); + +### function filter(name, callback) +Adds a new filter extension to the OptionParser instance. The first argument is the name of the filter (trigger). The second argument is the actual filter See the ´OPTION FILTERS´ section for more info. + +It's possible to override the default filters by passing the value "_DEFAULT" to the ´´name´´ argument. The name of the filter is automatically transformed into +upper case. + + +### function halt([callback]) +Interrupt's further parsing. This function should be called from an ´on´ -callbacks, to cancel the parsing. This can be useful when the program should ignore all other arguments (when displaying help or version information). + +The function also takes an optional callback argument. If the callback argument is specified, a ´halt´ callback will be added (instead of executing the ´halt´ command). + +Here is an example how to add an ´on_halt´ callback: + + parser.halt(function() { + puts('An option callback interupted the parser'); + }); + + +### function parse(arguments) +Start's parsing of arguments. This should be the last thing you do. + + +### function options() +Returns an Array with all defined option rules + + +### function toString() +Returns a string representation of this OptionParser instance (a formatted help section). + + +MORE EXAMPLES +------------- +See examples/nodejs-test.js and examples/browser-test-html for more info how to +use the script. + + +SUGGESTIONS +----------- +All comments in how to improve this library is very welcome. Feel free post suggestions to the [Issue tracker](http://github.com/jfd/optparse-js/issues), or even better, fork the repository to implement your own features. + + +LICENSE +------- +Released under a MIT-style license. + + +COPYRIGHT +--------- +Copyright (c) 2009 Johan Dahlberg + diff --git a/node_modules/.npm/optparse/1.0.1/package/TODO b/node_modules/.npm/optparse/1.0.1/package/TODO new file mode 100644 index 0000000..a1b6050 --- /dev/null +++ b/node_modules/.npm/optparse/1.0.1/package/TODO @@ -0,0 +1 @@ +- Support for Argument lists (for switches) \ No newline at end of file diff --git a/node_modules/.npm/optparse/1.0.1/package/examples/browser-test.html b/node_modules/.npm/optparse/1.0.1/package/examples/browser-test.html new file mode 100644 index 0000000..2d8f6d3 --- /dev/null +++ b/node_modules/.npm/optparse/1.0.1/package/examples/browser-test.html @@ -0,0 +1,75 @@ + + + + + optparse.js example + + + + + + + \ No newline at end of file diff --git a/node_modules/.npm/optparse/1.0.1/package/examples/nodejs-test.js b/node_modules/.npm/optparse/1.0.1/package/examples/nodejs-test.js new file mode 100644 index 0000000..7f35ed3 --- /dev/null +++ b/node_modules/.npm/optparse/1.0.1/package/examples/nodejs-test.js @@ -0,0 +1,90 @@ +// Import the optparse script +require.paths.unshift(__dirname); //make local paths accessible + +var optparse = require('lib/optparse'); + +var sys= require('sys'); + +// Define some options +var SWITCHES = [ + ['-i', '--include-file FILE', "Includes a file"], + ['-p', '--print [MESSAGE]', "Prints an optional message on screen"], + ['-d', '--debug', "Enables debug mode"], + ['-H', '--help', "Shows this help section"], + ['--date DATE', "A date. A date is expected E.G. 2009-01-14"], + ['--number NUMBER', "A Number. Supported formats are 123, 123.123, 0xA123"], + ['--other NAME', "No handler defined for this option. Will be handled by the wildcard handler."], +]; + +// Create a new OptionParser with defined switches +var parser = new optparse.OptionParser(SWITCHES), print_summary = true, + first_arg; +parser.banner = 'Usage: nodejs-test.js [options]'; + +// Internal variable to store options. +var options = { + debug: true, + files: [], + number: undefined, + date: undefined +}; + +// Handle the first argument (switches excluded) +parser.on(0, function(value) { + first_arg = value; +}); + +// Handle the --include-file switch +parser.on('include-file', function(value) { + options.files.push(value); +}); + +// Handle the --print switch +parser.on('print', function(value) { + sys.puts('PRINT: ' + (value || 'No message entered')); +}); + +// Handle the --date switch +parser.on('date', function(value) { + options.date = value; +}); + +// Handle the --number switch +parser.on('number', function(value) { + options.number = value; +}); + +// Handle the --debug switch +parser.on('debug', function() { + options.debug = true; +}); + +// Handle the --help switch +parser.on('help', function() { + sys.puts(parser.toString()); + print_summary = false; +}); + +// Set a default handler +parser.on('*', function(opt, value) { + sys.puts('wild handler for ' + opt + ', value=' + value); +}); + +// Parse command line arguments +parser.parse(process.ARGV); + +if(print_summary) { + sys.puts("First non-switch argument is: " + first_arg); + + // Output all files that was included. + sys.puts("No of files to include: " + options.files.length); + for(var i = 0; i < options.files.length; i++) { + sys.puts("File [" + (i + 1) + "]:" + options.files[i]); + } + + // Is debug-mode enabled? + sys.puts("Debug mode is set to: " + options.debug); + + sys.puts("Number value is: " + options.number); + sys.puts("Date value is: " + options.date); +} \ No newline at end of file diff --git a/node_modules/.npm/optparse/1.0.1/package/lib/optparse.js b/node_modules/.npm/optparse/1.0.1/package/lib/optparse.js new file mode 100755 index 0000000..37e3ee8 --- /dev/null +++ b/node_modules/.npm/optparse/1.0.1/package/lib/optparse.js @@ -0,0 +1,309 @@ +// Optparse.js 1.0.2 - Option Parser for Javascript +// +// Copyright (c) 2009 Johan Dahlberg +// +// See README.md for license. +// +var optparse = {}; +try{ optparse = exports } catch(e) {}; // Try to export the lib for node.js +(function(self) { +var VERSION = '1.0.2'; +var LONG_SWITCH_RE = /^--\w/; +var SHORT_SWITCH_RE = /^-\w/; +var NUMBER_RE = /^(0x[A-Fa-f0-9]+)|([0-9]+\.[0-9]+)|(\d+)$/; +var DATE_RE = /^\d{4}-(0[0-9]|1[0,1,2])-([0,1,2][0-9]|3[0,1])$/; +var EMAIL_RE = /^([0-9a-zA-Z]+([_.-]?[0-9a-zA-Z]+)*@[0-9a-zA-Z]+[0-9,a-z,A-Z,.,-]*(.){1}[a-zA-Z]{2,4})+$/; +var EXT_RULE_RE = /(\-\-[\w_-]+)\s+([\w\[\]_-]+)|(\-\-[\w_-]+)/; +var ARG_OPTIONAL_RE = /\[(.+)\]/; + +// The default switch argument filter to use, when argument name doesnt match +// any other names. +var DEFAULT_FILTER = '_DEFAULT'; +var PREDEFINED_FILTERS = {}; + +// The default switch argument filter. Parses the argument as text. +function filter_text(value) { + return value; +} + +// Switch argument filter that expects an integer, HEX or a decimal value. An +// exception is throwed if the criteria is not matched. +// Valid input formats are: 0xFFFFFFF, 12345 and 1234.1234 +function filter_number(value) { + var m = NUMBER_RE(value); + if(m == null) throw OptError('Expected a number representative'); + if(m[1]) { + // The number is in HEX format. Convert into a number, then return it + return parseInt(m[1], 16); + } else { + // The number is in regular- or decimal form. Just run in through + // the float caster. + return parseFloat(m[2] || m[3]); + } +}; + +// Switch argument filter that expects a Date expression. The date string MUST be +// formated as: "yyyy-mm-dd" An exception is throwed if the criteria is not +// matched. An DATE object is returned on success. +function filter_date(value) { + var m = DATE_RE(value); + if(m == null) throw OptError('Expected a date representation in the "yyyy-mm-dd" format.'); + return new Date(parseInt(m[0]), parseInt(m[1]), parseInt(m[2])); +}; + +// Switch argument filter that expects an email address. An exception is throwed +// if the criteria doesn`t match. +function filter_email(value) { + var m = EMAIL_RE(value); + if(m == null) throw OptError('Excpeted an email address.'); + return m[1]; +} + +// Register all predefined filters. This dict is used by each OptionParser +// instance, when parsing arguments. Custom filters can be added to the parser +// instance by calling the "add_filter" -method. +PREDEFINED_FILTERS[DEFAULT_FILTER] = filter_text; +PREDEFINED_FILTERS['TEXT'] = filter_text; +PREDEFINED_FILTERS['NUMBER'] = filter_number; +PREDEFINED_FILTERS['DATE'] = filter_date; +PREDEFINED_FILTERS['EMAIL'] = filter_email; + +// Buildes rules from a switches collection. The switches collection is defined +// when constructing a new OptionParser object. +function build_rules(filters, arr) { + var rules = []; + for(var i=0; i> value means that the switch does +// not take anargument. +function build_rule(filters, short, expr, desc) { + var optional, filter; + var m = expr.match(EXT_RULE_RE); + if(m == null) throw OptError('The switch is not well-formed.'); + var long = m[1] || m[3]; + if(m[2] != undefined) { + // A switch argument is expected. Check if the argument is optional, + // then find a filter that suites. + var optional_match = ARG_OPTIONAL_RE(m[2]); + var filter_name = optional_match === null ? m[2] : optional_match[1]; + optional = optional_match !== null; + filter = filters[filter_name]; + if(filter === undefined) filter = filters[DEFAULT_FILTER]; + } + return { + name: long.substr(2), + short: short, + long: long, + decl: expr, + desc: desc, + optional_arg: optional, + filter: filter + } +} + +// Loop's trough all elements of an array and check if there is valid +// options expression within. An valid option is a token that starts +// double dashes. E.G. --my_option +function contains_expr(arr) { + if(!arr || !arr.length) return false; + var l = arr.length; + while(l-- > 0) if(LONG_SWITCH_RE(arr[l])) return true; + return false; +} + +// Extends destination object with members of source object +function extend(dest, src) { + var result = dest; + for(var n in src) { + result[n] = src[n]; + } + return result; +} + +// Appends spaces to match specified number of chars +function spaces(arg1, arg2) { + var l, builder = []; + if(arg1.constructor === Number) { + l = arg1; + } else { + if(arg1.length == arg2) return arg1; + l = arg2 - arg1.length; + builder.push(arg1); + } + while(l-- > 0) builder.push(' '); + return builder.join(''); +} + +// Create a new Parser object that can be used to parse command line arguments. +// +// +function Parser(rules) { + return new OptionParser(rules); +} + +// Creates an error object with specified error message. +function OptError(msg) { + return new function() { + this.msg = msg; + this.toString = function() { + return this.msg; + } + } +} + +function OptionParser(rules) { + this.banner = 'Usage: [Options]'; + this.options_title = 'Available options:' + this._rules = rules; + this._halt = false; + this.filters = extend({}, PREDEFINED_FILTERS); + this.on_args = {}; + this.on_switches = {}; + this.on_halt = function() {}; + this.default_handler = function() {}; +} + +OptionParser.prototype = { + + // Adds args and switchs handler. + on: function(value, fn) { + if(value.constructor === Function ) { + this.default_handler = value; + } else if(value.constructor === Number) { + this.on_args[value] = fn; + } else { + this.on_switches[value] = fn; + } + }, + + // Adds a custom filter to the parser. It's possible to override the + // default filter by passing the value "_DEFAULT" to the ´´name´´ + // argument. The name of the filter is automatically transformed into + // upper case. + filter: function(name, fn) { + this.filters[name.toUpperCase()] = fn; + }, + + // Parses specified args. Returns remaining arguments. + parse: function(args) { + var result = [], callback; + var rules = build_rules(this.filters, this._rules); + var tokens = args.concat([]); + while((token = tokens.shift()) && this._halt == false) { + if(LONG_SWITCH_RE(token) || SHORT_SWITCH_RE(token)) { + var arg = undefined; + // The token is a long or a short switch. Get the corresponding + // rule, filter and handle it. Pass the switch to the default + // handler if no rule matched. + for(var i = 0; i < rules.length; i++) { + var rule = rules[i]; + if(rule.long == token || rule.short == token) { + if(rule.filter !== undefined) { + arg = tokens.shift(); + if(!LONG_SWITCH_RE(arg) && !SHORT_SWITCH_RE(arg)) { + try { + arg = rule.filter(arg); + } catch(e) { + throw OptError(token + ': ' + e.toString()); + } + } else if(rule.optional_arg) { + tokens.unshift(arg); + } else { + throw OptError('Expected switch argument.'); + } + } + callback = this.on_switches[rule.name]; + if (!callback) callback = this.on_switches['*']; + if(callback) callback.apply(this, [rule.name, arg]); + break; + } + } + if(i == rules.length) this.default_handler.apply(this, [token]); + } else { + // Did not match long or short switch. Parse the token as a + // normal argument. + callback = this.on_args[result.length]; + result.push(token); + if(callback) callback.apply(this, [token]); + } + } + return this._halt ? this.on_halt.apply(this, []) : result; + }, + + // Returns an Array with all defined option rules + options: function() { + return build_rules(this.filters, this._rules); + }, + + // Add an on_halt callback if argument ´´fn´´ is specified. on_switch handlers can + // call instance.halt to abort the argument parsing. This can be useful when + // displaying help or version information. + halt: function(fn) { + this._halt = fn === undefined + if(fn) this.on_halt = fn; + }, + + // Returns a string representation of this OptionParser instance. + toString: function() { + var builder = [this.banner, '', this.options_title], + shorts = false, longest = 0, rule; + var rules = build_rules(this.filters, this._rules); + for(var i = 0; i < rules.length; i++) { + rule = rules[i]; + // Quick-analyze the options. + if(rule.short) shorts = true; + if(rule.decl.length > longest) longest = rule.decl.length; + } + for(var i = 0; i < rules.length; i++) { + var text; + rule = rules[i]; + if(shorts) { + if(rule.short) text = spaces(2) + rule.short + ', '; + else text = spaces(6); + } + text += spaces(rule.decl, longest) + spaces(3); + text += rule.desc; + builder.push(text); + } + return builder.join('\n'); + } +} + +self.VERSION = VERSION; +self.OptionParser = OptionParser; + +})(optparse); \ No newline at end of file diff --git a/node_modules/.npm/optparse/1.0.1/package/package.json b/node_modules/.npm/optparse/1.0.1/package/package.json new file mode 100644 index 0000000..41ec5ea --- /dev/null +++ b/node_modules/.npm/optparse/1.0.1/package/package.json @@ -0,0 +1,8 @@ +{ + "name": "optparse", + "author": "Johan Dahlberg", + "description": "Command-line option parser", + "keywords": ["option", "parser", "command-line", "cli", "terminal"], + "version": "1.0.1", + "main": "./lib/optparse" +} diff --git a/node_modules/.npm/optparse/1.0.1/package/seed.yml b/node_modules/.npm/optparse/1.0.1/package/seed.yml new file mode 100644 index 0000000..fd5ac11 --- /dev/null +++ b/node_modules/.npm/optparse/1.0.1/package/seed.yml @@ -0,0 +1,5 @@ +--- + name: optparse + description: Command-line option parser + tags: option parser command-line cli terminal + version: 1.0.1 diff --git a/node_modules/.npm/optparse/active b/node_modules/.npm/optparse/active new file mode 120000 index 0000000..5486dc8 --- /dev/null +++ b/node_modules/.npm/optparse/active @@ -0,0 +1 @@ +./1.0.1 \ No newline at end of file diff --git a/node_modules/.npm/sax/0.2.3/dependents/xml2js@0.1.9 b/node_modules/.npm/sax/0.2.3/dependents/xml2js@0.1.9 new file mode 120000 index 0000000..22b87b0 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/dependents/xml2js@0.1.9 @@ -0,0 +1 @@ +./../../../xml2js/0.1.9 \ No newline at end of file diff --git a/node_modules/.npm/sax/0.2.3/package/LICENSE b/node_modules/.npm/sax/0.2.3/package/LICENSE new file mode 100644 index 0000000..0681b11 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010 Isaac Z. Schlueter + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/.npm/sax/0.2.3/package/README.md b/node_modules/.npm/sax/0.2.3/package/README.md new file mode 100644 index 0000000..81301dd --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/README.md @@ -0,0 +1,171 @@ +# sax js + +A sax-style parser for XML and HTML. + +Designed with [node](http://nodejs.org/) in mind, but should work fine in the +browser or other CommonJS implementations. + +## What This Is + +* A very simple tool to parse through an XML string. +* A stepping stone to a streaming HTML parser. +* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML docs. + +## What This Is (probably) Not + +* An HTML Parser - That's the goal, but this isn't it. It's just XML for now. +* A DOM Builder - You can use it to build an object model out of XML, but it doesn't + do that out of the box. +* XSLT - No DOM, no querying. +* 100% Compliant with (some other SAX implementation) - Most SAX implementations are + in Java and do a lot more than this does. +* An XML Validator - It does a little validation when in strict mode, but not much. +* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic masochism. +* A DTD-aware Thing - Fetching DTDs is a much bigger job. + +## Regarding `Hello, world!').close(); + + // stream usage + // takes the same options as the parser + var saxStream = require("sax").createStream(strict, options) + saxStream.on("error", function (e) { + // unhandled errors will throw, since this is a proper node + // event emitter. + console.error("error!", e) + // clear the error + this._parser.error = null + this._parser.resume() + }) + saxStream.on("opentag", function (node) { + // same object as above + }) + // pipe is supported, and it's readable/writable + // same chunks coming in also go out. + fs.createReadStream("file.xml") + .pipe(saxStream) + .pipe(fs.createReadStream("file-copy.xml")) + + + +## Arguments + +Pass the following arguments to the parser function. All are optional. + +`strict` - Boolean. Whether or not to be a jerk. Default: `false`. + +`opt` - Object bag of settings regarding string formatting. All default to `false`. +Settings supported: + +* `trim` - Boolean. Whether or not to trim text and comment nodes. +* `normalize` - Boolean. If true, then turn any whitespace into a single space. +* `lowercasetags` - Boolean. If true, then lowercase tags in loose mode, rather + than uppercasing them. + +## Methods + +`write` - Write bytes onto the stream. You don't have to do this all at once. You +can keep writing as much as you want. + +`close` - Close the stream. Once closed, no more data may be written until it is +done processing the buffer, which is signaled by the `end` event. + +`resume` - To gracefully handle errors, assign a listener to the `error` event. Then, +when the error is taken care of, you can call `resume` to continue parsing. Otherwise, +the parser will not continue while in an error state. + +## Members + +At all times, the parser object will have the following members: + +`line`, `column`, `position` - Indications of the position in the XML document where +the parser currently is looking. + +`closed` - Boolean indicating whether or not the parser can be written to. If it's +`true`, then wait for the `ready` event to write again. + +`strict` - Boolean indicating whether or not the parser is a jerk. + +`opt` - Any options passed into the constructor. + +And a bunch of other stuff that you probably shouldn't touch. + +## Events + +All events emit with a single argument. To listen to an event, assign a function to +`on`. Functions get executed in the this-context of the parser object. +The list of supported events are also in the exported `EVENTS` array. + +When using the stream interface, assign handlers using the EventEmitter +`on` function in the normal fashion. + +`error` - Indication that something bad happened. The error will be hanging out on +`parser.error`, and must be deleted before parsing can continue. By listening to +this event, you can keep an eye on that kind of stuff. Note: this happens *much* +more in strict mode. Argument: instance of `Error`. + +`text` - Text node. Argument: string of text. + +`doctype` - The ``. Argument: object with +`name` and `body` members. Attributes are not parsed, as processing instructions +have implementation dependent semantics. + +`sgmldeclaration` - Random SGML declarations. Stuff like `` would trigger +this kind of event. This is a weird thing to support, so it might go away at some +point. SAX isn't intended to be used to parse SGML, after all. + +`opentag` - An opening tag. Argument: object with `name` and `attributes`. In +non-strict mode, tag names are uppercased. + +`closetag` - A closing tag. In loose mode, tags are auto-closed if their parent +closes. In strict mode, well-formedness is enforced. Note that self-closing tags +will have `closeTag` emitted immediately after `openTag`. Argument: tag name. + +`attribute` - An attribute node. Argument: object with `name` and `value`. + +`comment` - A comment node. Argument: the string of the comment. + +`opencdata` - The opening tag of a ``) of a ` + something blerm a bit down here diff --git a/node_modules/.npm/sax/0.2.3/package/examples/pretty-print.js b/node_modules/.npm/sax/0.2.3/package/examples/pretty-print.js new file mode 100644 index 0000000..0a40ef0 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/examples/pretty-print.js @@ -0,0 +1,72 @@ +var printer = require("../lib/sax").parser(false, {lowercasetags:true, trim:true}), + sys = require("sys"), + fs = require("fs"); + +function entity (str) { + return str.replace('"', '"'); +} + +printer.tabstop = 2; +printer.level = 0; +printer.indent = function () { + sys.print("\n"); + for (var i = this.level; i > 0; i --) { + for (var j = this.tabstop; j > 0; j --) { + sys.print(" "); + } + } +} +printer.onopentag = function (tag) { + this.indent(); + this.level ++; + sys.print("<"+tag.name); + for (var i in tag.attributes) { + sys.print(" "+i+"=\""+entity(tag.attributes[i])+"\""); + } + sys.print(">"); +} +printer.ontext = printer.ondoctype = function (text) { + this.indent(); + sys.print(text); +} +printer.onclosetag = function (tag) { + this.level --; + this.indent(); + sys.print(""); +} +printer.oncdata = function (data) { + this.indent(); + sys.print(""); +} +printer.oncomment = function (comment) { + this.indent(); + sys.print(""); +} +printer.onerror = function (error) { + sys.debug(error); + throw error; +} + +if (!process.argv[2]) { + throw new Error("Please provide an xml file to prettify\n"+ + "TODO: read from stdin or take a file"); +} +var xmlfile = require("path").join(process.cwd(), process.argv[2]); +fs.open(xmlfile, "r", 0666, function (er, fd) { + if (er) throw er; + (function R () { + fs.read(fd, 1024, null, "utf8", function (er, data, bytesRead) { + if (er) throw er; + if (data) { + printer.write(data); + R(); + } else { + fs.close(fd); + printer.close(); + } + }); + })(); +}); + + + diff --git a/node_modules/.npm/sax/0.2.3/package/examples/strict.dtd b/node_modules/.npm/sax/0.2.3/package/examples/strict.dtd new file mode 100644 index 0000000..b274559 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/examples/strict.dtd @@ -0,0 +1,870 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +%HTMLlat1; + + +%HTMLsymbol; + + +%HTMLspecial; + + + + + + + + + + + + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/node_modules/.npm/sax/0.2.3/package/examples/switch-bench.js b/node_modules/.npm/sax/0.2.3/package/examples/switch-bench.js new file mode 100755 index 0000000..4d3cf14 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/examples/switch-bench.js @@ -0,0 +1,45 @@ +#!/usr/local/bin/node-bench + +var Promise = require("events").Promise; + +var xml = require("posix").cat("test.xml").wait(), + path = require("path"), + sax = require("../lib/sax"), + saxT = require("../lib/sax-trampoline"), + + parser = sax.parser(false, {trim:true}), + parserT = saxT.parser(false, {trim:true}), + + sys = require("sys"); + + +var count = exports.stepsPerLap = 500, + l = xml.length, + runs = 0; +exports.countPerLap = 1000; +exports.compare = { + "switch" : function () { + // sys.debug("switch runs: "+runs++); + // for (var x = 0; x < l; x += 1000) { + // parser.write(xml.substr(x, 1000)) + // } + // for (var i = 0; i < count; i ++) { + parser.write(xml); + parser.close(); + // } + // done(); + }, + trampoline : function () { + // sys.debug("trampoline runs: "+runs++); + // for (var x = 0; x < l; x += 1000) { + // parserT.write(xml.substr(x, 1000)) + // } + // for (var i = 0; i < count; i ++) { + parserT.write(xml); + parserT.close(); + // } + // done(); + }, +}; + +sys.debug("rock and roll..."); \ No newline at end of file diff --git a/node_modules/.npm/sax/0.2.3/package/examples/test.html b/node_modules/.npm/sax/0.2.3/package/examples/test.html new file mode 100644 index 0000000..61f8f1a --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/examples/test.html @@ -0,0 +1,15 @@ + + + + + testing the parser + + + +

hello + + + + diff --git a/node_modules/.npm/sax/0.2.3/package/examples/test.xml b/node_modules/.npm/sax/0.2.3/package/examples/test.xml new file mode 100644 index 0000000..801292d --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/examples/test.xml @@ -0,0 +1,1254 @@ + + +]> + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + + Some Text + + + + + + + are ok in here. ]]> + + Pre-Text & Inlined text Post-text. +  + + \ No newline at end of file diff --git a/node_modules/.npm/sax/0.2.3/package/lib/sax.js b/node_modules/.npm/sax/0.2.3/package/lib/sax.js new file mode 100644 index 0000000..67b434d --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/lib/sax.js @@ -0,0 +1,782 @@ +// wrapper for non-node envs +;(function (sax) { + +sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } +sax.SAXParser = SAXParser +sax.SAXStream = SAXStream +sax.createStream = createStream + +// When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. +// When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), +// since that's the earliest that a buffer overrun could occur. This way, checks are +// as rare as required, but as often as necessary to ensure never crossing this bound. +// Furthermore, buffers are only tested at most once per write(), so passing a very +// large string into write() might have undesirable effects, but this is manageable by +// the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme +// edge case, result in creating at most one complete copy of the string passed in. +// Set to Infinity to have unlimited buffers. +sax.MAX_BUFFER_LENGTH = 64 * 1024 + +var buffers = [ + "comment", "sgmlDecl", "textNode", "tagName", "doctype", + "procInstName", "procInstBody", "entity", "attribName", + "attribValue", "cdata" +] + +function SAXParser (strict, opt) { + clearBuffers(this) + this.q = this.c = "" + this.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + this.opt = opt || {} + this.tagCase = this.opt.lowercasetags ? "toLowerCase" : "toUpperCase" + this.tags = [] + this.closed = this.closedRoot = this.sawRoot = false + this.tag = this.error = null + this.strict = !!strict + this.state = S.BEGIN + this.ENTITIES = Object.create(sax.ENTITIES) + + // mostly just for error reporting + this.position = this.line = this.column = 0 + emit(this, "onready") +} + +function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + , maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i ++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case "textNode": + closeText(parser) + break + + case "cdata": + emitNode(parser, "oncdata", parser.cdata) + parser.cdata = "" + break + + default: + error(parser, "Max buffer length exceeded: "+buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + parser.bufferCheckPosition = (sax.MAX_BUFFER_LENGTH - maxActual) + + parser.position +} + +function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i ++) { + parser[buffers[i]] = "" + } +} + +SAXParser.prototype = { + end : function () { end(this) }, + write : write, + resume : function () { this.error = null; return this }, + close : function () { return this.write(null) }, +} + +try { + var Stream = require("stream").Stream +} catch (ex) { + var Stream = function () {} +} + + +var streamWraps = + [ "opentag" + , "closetag" + , "text" + , "attribute" + , "error" + , "doctype" + , "processinginstruction" + , "sgmldeclaration" + , "comment" + , "opencdata" + , "cdata" + , "closecdata" + , "ready" + ] + +function createStream (strict, opt) { + return new SAXStream(strict, opt) +} + +function SAXStream (strict, opt) { + Stream.apply(me) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + + var me = this + + this._parser.onend = function () { + me.emit("end") + } + + this._parser.onerror = function (er) { + me.emit("error", er) + } + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, "on" + ev, { + get: function () { return me._parser["on" + ev] }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + return me._parser["on"+ev] = h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) +} + +SAXStream.prototype = Object.create(Stream.prototype, + { constructor: { value: SAXStream } }) + +SAXStream.prototype.write = function (data) { + this._parser.write(data.toString()) + this.emit("data", data) + return true +} + +SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) this._parser.write(chunk.toString()) + this._parser.end() + return true +} + +SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser["on"+ev] && streamWraps.indexOf(ev) !== -1) { + me._parser["on"+ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] + : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) +} + + + +// character classes and tokens +var whitespace = "\r\n\t " + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + , number = "0124356789" + , letter = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + // (Letter | "_" | ":") + , nameStart = letter+"_:" + , nameBody = nameStart+number+"-." + , quote = "'\"" + , entity = number+letter+"#" + , CDATA = "[CDATA[" + , DOCTYPE = "DOCTYPE" + +function is (charclass, c) { return charclass.indexOf(c) !== -1 } +function not (charclass, c) { return !is(charclass, c) } + +var S = 0 +sax.STATE = +{ BEGIN : S++ +, TEXT : S++ // general stuff +, TEXT_ENTITY : S++ // & and such. +, OPEN_WAKA : S++ // < +, SGML_DECL : S++ // +} + +sax.ENTITIES = +{ "apos" : "'" +, "quot" : '"' +, "amp" : "&" +, "gt" : ">" +, "lt" : "<" +} + +for (var S in sax.STATE) sax.STATE[sax.STATE[S]] = S + +// shorthand +S = sax.STATE +sax.EVENTS = [ // for discoverability. + "text", "processinginstruction", "sgmldeclaration", + "doctype", "comment", "attribute", "opentag", "closetag", + "opencdata", "cdata", "closecdata", "error", "end", "ready" ] + +function emit (parser, event, data) { + parser[event] && parser[event](data) +} + +function emitNode (parser, nodeType, data) { + if (parser.textNode) closeText(parser) + emit(parser, nodeType, data) +} + +function closeText (parser) { + parser.textNode = textopts(parser.opt, parser.textNode) + if (parser.textNode) emit(parser, "ontext", parser.textNode) + parser.textNode = "" +} + +function textopts (opt, text) { + if (opt.trim) text = text.trim() + if (opt.normalize) text = text.replace(/\s+/g, " ") + return text +} + +function error (parser, er) { + closeText(parser) + er += "\nLine: "+parser.line+ + "\nColumn: "+parser.column+ + "\nChar: "+parser.c + er = new Error(er) + parser.error = er + emit(parser, "onerror", er) + return parser +} + +function end (parser) { + if (parser.state !== S.TEXT) error(parser, "Unexpected end") + closeText(parser) + parser.c = "" + parser.closed = true + emit(parser, "onend") + SAXParser.call(parser, parser.strict, parser.opt) + return parser +} + +function strictFail (parser, message) { + if (parser.strict) error(parser, message) +} + +function newTag (parser) { + if (!parser.strict) parser.tagName = parser.tagName[parser.tagCase]() + parser.tag = { name : parser.tagName, attributes : {} } +} + +function openTag (parser, selfClosing) { + parser.sawRoot = true + parser.tags.push(parser.tag) + emitNode(parser, "onopentag", parser.tag) + if (!selfClosing) { + parser.tag = null + parser.tagName = "" + parser.state = S.TEXT + } + parser.attribName = parser.attribValue = "" +} + +function closeTag (parser) { + if (!parser.tagName) { + strictFail(parser, "Weird empty close tag.") + parser.textNode += "" + parser.state = S.TEXT + return + } + // first make sure that the closing tag actually exists. + // will close everything, otherwise. + var t = parser.tags.length + var tagName = parser.tagName + if (!parser.strict) tagName = tagName[parser.tagCase]() + var closeTo = tagName + while (t --) { + var close = parser.tags[t] + if (close.name !== closeTo) { + // fail the first time in strict mode + strictFail(parser, "Unexpected close tag") + } else break + } + + // didn't find it. we already failed for strict, so just abort. + if (t < 0) { + strictFail(parser, "Unmatched closing tag: "+parser.tagName) + parser.textNode += "" + parser.state = S.TEXT + return + } + parser.tagName = tagName + var s = parser.tags.length + while (s --> t) { + parser.tag = parser.tags.pop() + parser.tagName = parser.tag.name + emitNode(parser, "onclosetag", parser.tagName) + } + if (t === 0) parser.closedRoot = true + parser.tagName = parser.attribValue = parser.attribName = "" + parser.tag = null + parser.state = S.TEXT +} + +function parseEntity (parser) { + var entity = parser.entity.toLowerCase(), num, numStr = "" + if (parser.ENTITIES[entity]) return parser.ENTITIES[entity] + if (entity.charAt(0) === "#") { + if (entity.charAt(1) === "x") { + entity = entity.slice(2) + num = parseInt(entity, 16), numStr = num.toString(16) + } else { + entity = entity.slice(1) + num = parseInt(entity, 10), numStr = num.toString(10) + } + } + if (numStr.toLowerCase() !== entity) { + strictFail(parser, "Invalid character entity") + return "&"+parser.entity + ";" + } + return String.fromCharCode(num) +} + +function write (chunk) { + var parser = this + if (this.error) throw this.error + if (parser.closed) return error(parser, + "Cannot write after close. Assign an onready handler.") + if (chunk === null) return end(parser) + var i = 0, c = "" + while (parser.c = c = chunk.charAt(i++)) { + parser.position ++ + if (c === "\n") { + parser.line ++ + parser.column = 0 + } else parser.column ++ + switch (parser.state) { + + case S.BEGIN: + if (c === "<") parser.state = S.OPEN_WAKA + else if (not(whitespace,c)) { + // have to process this as a text node. + // weird, but happens. + strictFail(parser, "Non-whitespace before first tag.") + parser.textNode = c + state = S.TEXT + } + continue + + case S.TEXT: + if (parser.sawRoot && !parser.closedRoot) { + var starti = i-1 + while (c && c!=="<" && c!=="&") { + c = chunk.charAt(i++) + if (c) { + parser.position ++ + if (c === "\n") { + parser.line ++ + parser.column = 0 + } else parser.column ++ + } + } + parser.textNode += chunk.substring(starti, i-1) + } + if (c === "<") parser.state = S.OPEN_WAKA + else { + if (not(whitespace, c) && (!parser.sawRoot || parser.closedRoot)) + strictFail("Text data outside of root node.") + if (c === "&") parser.state = S.TEXT_ENTITY + else parser.textNode += c + } + continue + + case S.OPEN_WAKA: + // either a /, ?, !, or text is coming next. + if (c === "!") { + parser.state = S.SGML_DECL + parser.sgmlDecl = "" + } else if (is(whitespace, c)) { + // wait for it... + } else if (is(nameStart,c)) { + parser.state = S.OPEN_TAG + parser.tagName = c + } else if (c === "/") { + parser.state = S.CLOSE_TAG + parser.tagName = "" + } else if (c === "?") { + parser.state = S.PROC_INST + parser.procInstName = parser.procInstBody = "" + } else { + strictFail(parser, "Unencoded <") + parser.textNode += "<" + c + parser.state = S.TEXT + } + continue + + case S.SGML_DECL: + if ((parser.sgmlDecl+c).toUpperCase() === CDATA) { + emitNode(parser, "onopencdata") + parser.state = S.CDATA + parser.sgmlDecl = "" + parser.cdata = "" + } else if (parser.sgmlDecl+c === "--") { + parser.state = S.COMMENT + parser.comment = "" + parser.sgmlDecl = "" + } else if ((parser.sgmlDecl+c).toUpperCase() === DOCTYPE) { + parser.state = S.DOCTYPE + if (parser.doctype || parser.sawRoot) strictFail(parser, + "Inappropriately located doctype declaration") + parser.doctype = "" + parser.sgmlDecl = "" + } else if (c === ">") { + emitNode(parser, "onsgmldeclaration", parser.sgmlDecl) + parser.sgmlDecl = "" + parser.state = S.TEXT + } else if (is(quote, c)) { + parser.state = S.SGML_DECL_QUOTED + parser.sgmlDecl += c + } else parser.sgmlDecl += c + continue + + case S.SGML_DECL_QUOTED: + if (c === parser.q) { + parser.state = S.SGML_DECL + parser.q = "" + } + parser.sgmlDecl += c + continue + + case S.DOCTYPE: + if (c === ">") { + parser.state = S.TEXT + emitNode(parser, "ondoctype", parser.doctype) + parser.doctype = true // just remember that we saw it. + } else { + parser.doctype += c + if (c === "[") parser.state = S.DOCTYPE_DTD + else if (is(quote, c)) { + parser.state = S.DOCTYPE_QUOTED + parser.q = c + } + } + continue + + case S.DOCTYPE_QUOTED: + parser.doctype += c + if (c === parser.q) { + parser.q = "" + parser.state = S.DOCTYPE + } + continue + + case S.DOCTYPE_DTD: + parser.doctype += c + if (c === "]") parser.state = S.DOCTYPE + else if (is(quote,c)) { + parser.state = S.DOCTYPE_DTD_QUOTED + parser.q = c + } + continue + + case S.DOCTYPE_DTD_QUOTED: + parser.doctype += c + if (c === parser.q) { + parser.state = S.DOCTYPE_DTD + parser.q = "" + } + continue + + case S.COMMENT: + if (c === "-") parser.state = S.COMMENT_ENDING + else parser.comment += c + continue + + case S.COMMENT_ENDING: + if (c === "-") { + parser.state = S.COMMENT_ENDED + parser.comment = textopts(parser.opt, parser.comment) + if (parser.comment) emitNode(parser, "oncomment", parser.comment) + parser.comment = "" + } else { + parser.comment += "-" + c + parser.state = S.COMMENT + } + continue + + case S.COMMENT_ENDED: + if (c !== ">") { + strictFail(parser, "Malformed comment") + // allow in non-strict mode, + // which is a comment of " blah -- bloo " + parser.comment += "--" + c + parser.state = S.COMMENT + } else parser.state = S.TEXT + continue + + case S.CDATA: + if (c === "]") parser.state = S.CDATA_ENDING + else parser.cdata += c + continue + + case S.CDATA_ENDING: + if (c === "]") parser.state = S.CDATA_ENDING_2 + else { + parser.cdata += "]" + c + parser.state = S.CDATA + } + continue + + case S.CDATA_ENDING_2: + if (c === ">") { + if (parser.cdata) emitNode(parser, "oncdata", parser.cdata) + emitNode(parser, "onclosecdata") + parser.cdata = "" + parser.state = S.TEXT + } else if (c === "]") { + parser.cdata += "]" + } else { + parser.cdata += "]]" + c + parser.state = S.CDATA + } + continue + + case S.PROC_INST: + if (c === "?") parser.state = S.PROC_INST_ENDING + else if (is(whitespace, c)) parser.state = S.PROC_INST_BODY + else parser.procInstName += c + continue + + case S.PROC_INST_BODY: + if (!parser.procInstBody && is(whitespace, c)) continue + else if (c === "?") parser.state = S.PROC_INST_ENDING + else if (is(quote, c)) { + parser.state = S.PROC_INST_QUOTED + parser.q = c + parser.procInstBody += c + } else parser.procInstBody += c + continue + + case S.PROC_INST_ENDING: + if (c === ">") { + emitNode(parser, "onprocessinginstruction", { + name : parser.procInstName, + body : parser.procInstBody + }) + parser.procInstName = parser.procInstBody = "" + parser.state = S.TEXT + } else { + parser.procInstBody += "?" + c + parser.state = S.PROC_INST_BODY + } + continue + + case S.PROC_INST_QUOTED: + parser.procInstBody += c + if (c === parser.q) { + parser.state = S.PROC_INST_BODY + parser.q = "" + } + continue + + case S.OPEN_TAG: + if (is(nameBody, c)) parser.tagName += c + else { + newTag(parser) + if (c === ">") openTag(parser) + else if (c === "/") parser.state = S.OPEN_TAG_SLASH + else { + if (not(whitespace, c)) strictFail( + parser, "Invalid character in tag name") + parser.state = S.ATTRIB + } + } + continue + + case S.OPEN_TAG_SLASH: + if (c === ">") { + openTag(parser, true) + closeTag(parser) + } else { + strictFail(parser, "Forward-slash in opening tag not followed by >") + parser.state = S.ATTRIB + } + continue + + case S.ATTRIB: + // haven't read the attribute name yet. + if (is(whitespace, c)) continue + else if (c === ">") openTag(parser) + else if (c === "/") parser.state = S.OPEN_TAG_SLASH + else if (is(nameStart, c)) { + parser.attribName = c + parser.attribValue = "" + parser.state = S.ATTRIB_NAME + } else strictFail(parser, "Invalid attribute name") + continue + + case S.ATTRIB_NAME: + if (c === "=") parser.state = S.ATTRIB_VALUE + else if (is(whitespace, c)) parser.state = S.ATTRIB_NAME_SAW_WHITE + else if (is(nameBody, c)) parser.attribName += c + else strictFail(parser, "Invalid attribute name") + continue + + case S.ATTRIB_NAME_SAW_WHITE: + if (c === "=") parser.state = S.ATTRIB_VALUE + else if (is(whitespace, c)) continue + else { + strictFail(parser, "Attribute without value") + parser.tag.attributes[parser.attribName] = "" + parser.attribValue = "" + emitNode(parser, "onattribute", + { name : parser.attribName, value : "" }) + parser.attribName = "" + if (c === ">") openTag(parser) + else if (is(nameStart, c)) { + parser.attribName = c + parser.state = S.ATTRIB_NAME + } else { + strictFail(parser, "Invalid attribute name") + parser.state = S.ATTRIB + } + } + continue + + case S.ATTRIB_VALUE: + if (is(whitespace, c)) continue + else if (is(quote, c)) { + parser.q = c + parser.state = S.ATTRIB_VALUE_QUOTED + } else { + strictFail(parser, "Unquoted attribute value") + parser.state = S.ATTRIB_VALUE_UNQUOTED + parser.attribValue = c + } + continue + + case S.ATTRIB_VALUE_QUOTED: + if (c !== parser.q) { + if (c === "&") parser.state = S.ATTRIB_VALUE_ENTITY_Q + else parser.attribValue += c + continue + } + parser.tag.attributes[parser.attribName] = parser.attribValue + emitNode(parser, "onattribute", { + name:parser.attribName, value:parser.attribValue}) + parser.attribName = parser.attribValue = "" + parser.q = "" + parser.state = S.ATTRIB + continue + + case S.ATTRIB_VALUE_UNQUOTED: + if (not(whitespace+">",c)) { + if (c === "&") parser.state = S.ATTRIB_VALUE_ENTITY_U + else parser.attribValue += c + continue + } + parser.tag.attributes[parser.attribName] = parser.attribValue + emitNode(parser, "onattribute", + { name: parser.attribName, value: parser.attribValue}) + parser.attribName = parser.attribValue = "" + if (c === ">") openTag(parser) + else parser.state = S.ATTRIB + continue + + case S.CLOSE_TAG: + if (!parser.tagName) { + if (is(whitespace, c)) continue + else if (not(nameStart, c)) strictFail(parser, + "Invalid tagname in closing tag.") + else parser.tagName = c + } + else if (c === ">") closeTag(parser) + else if (is(nameBody, c)) parser.tagName += c + else { + if (not(whitespace, c)) strictFail(parser, + "Invalid tagname in closing tag") + parser.state = S.CLOSE_TAG_SAW_WHITE + } + continue + + case S.CLOSE_TAG_SAW_WHITE: + if (is(whitespace, c)) continue + if (c === ">") closeTag(parser) + else strictFail("Invalid characters in closing tag") + continue + + case S.TEXT_ENTITY: + case S.ATTRIB_VALUE_ENTITY_Q: + case S.ATTRIB_VALUE_ENTITY_U: + switch(parser.state) { + case S.TEXT_ENTITY: + var returnState = S.TEXT, buffer = "textNode" + break + + case S.ATTRIB_VALUE_ENTITY_Q: + var returnState = S.ATTRIB_VALUE_QUOTED, buffer = "attribValue" + break + + case S.ATTRIB_VALUE_ENTITY_U: + var returnState = S.ATTRIB_VALUE_UNQUOTED, buffer = "attribValue" + break + } + if (c === ";") { + parser[buffer] += parseEntity(parser) + parser.entity = "" + parser.state = returnState + } + else if (is(entity, c)) parser.entity += c + else { + strictFail("Invalid character entity") + parser[buffer] += "&" + parser.entity + parser.entity = "" + parser.state = returnState + } + continue + + default: + throw new Error(parser, "Unknown state: " + parser.state) + } + } // while + // cdata blocks can get very big under normal conditions. emit and move on. + // if (parser.state === S.CDATA && parser.cdata) { + // emitNode(parser, "oncdata", parser.cdata) + // parser.cdata = "" + // } + if (parser.position >= parser.bufferCheckPosition) checkBufferLength(parser) + return parser +} + +})(typeof exports === "undefined" ? sax = {} : exports) diff --git a/node_modules/.npm/sax/0.2.3/package/package.json b/node_modules/.npm/sax/0.2.3/package/package.json new file mode 100644 index 0000000..caa2207 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/package.json @@ -0,0 +1,7 @@ +{ "name" : "sax" +, "author" : "Isaac Z. Schlueter " +, "version" : "0.2.3" +, "main" : "lib/sax" +, "license" : "MIT" +, "scripts" : { "test" : "node test/index.js" } +} diff --git a/node_modules/.npm/sax/0.2.3/package/test/buffer-overrun.js b/node_modules/.npm/sax/0.2.3/package/test/buffer-overrun.js new file mode 100644 index 0000000..8d12fac --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/buffer-overrun.js @@ -0,0 +1,25 @@ +// set this really low so that I don't have to put 64 MB of xml in here. +var sax = require("../lib/sax") +var bl = sax.MAX_BUFFER_LENGTH +sax.MAX_BUFFER_LENGTH = 5; + +require(__dirname).test({ + expect : [ + ["error", "Max buffer length exceeded: tagName\nLine: 0\nColumn: 15\nChar: "], + ["error", "Max buffer length exceeded: tagName\nLine: 0\nColumn: 30\nChar: "], + ["error", "Max buffer length exceeded: tagName\nLine: 0\nColumn: 45\nChar: "], + ["opentag", { + "name": "ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ", + "attributes": {} + }], + ["text", "yo"], + ["closetag", "ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ"] + ] +}).write("") + .write("yo") + .write("") + .close(); +sax.MAX_BUFFER_LENGTH = bl diff --git a/node_modules/.npm/sax/0.2.3/package/test/cdata-chunked.js b/node_modules/.npm/sax/0.2.3/package/test/cdata-chunked.js new file mode 100644 index 0000000..ccd5ee6 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/cdata-chunked.js @@ -0,0 +1,11 @@ + +require(__dirname).test({ + expect : [ + ["opentag", {"name": "R","attributes": {}}], + ["opencdata", undefined], + ["cdata", " this is character data  "], + ["closecdata", undefined], + ["closetag", "R"] + ] +}).write("").close(); + diff --git a/node_modules/.npm/sax/0.2.3/package/test/cdata-end-split.js b/node_modules/.npm/sax/0.2.3/package/test/cdata-end-split.js new file mode 100644 index 0000000..b41bd00 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/cdata-end-split.js @@ -0,0 +1,15 @@ + +require(__dirname).test({ + expect : [ + ["opentag", {"name": "R","attributes": {}}], + ["opencdata", undefined], + ["cdata", " this is "], + ["closecdata", undefined], + ["closetag", "R"] + ] +}) + .write("") + .write("") + .close(); + diff --git a/node_modules/.npm/sax/0.2.3/package/test/cdata-fake-end.js b/node_modules/.npm/sax/0.2.3/package/test/cdata-fake-end.js new file mode 100644 index 0000000..07aeac4 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/cdata-fake-end.js @@ -0,0 +1,28 @@ + +var p = require(__dirname).test({ + expect : [ + ["opentag", {"name": "R","attributes": {}}], + ["opencdata", undefined], + ["cdata", "[[[[[[[[]]]]]]]]"], + ["closecdata", undefined], + ["closetag", "R"] + ] +}) +var x = "" +for (var i = 0; i < x.length ; i ++) { + p.write(x.charAt(i)) +} +p.close(); + + +var p2 = require(__dirname).test({ + expect : [ + ["opentag", {"name": "R","attributes": {}}], + ["opencdata", undefined], + ["cdata", "[[[[[[[[]]]]]]]]"], + ["closecdata", undefined], + ["closetag", "R"] + ] +}) +var x = "" +p2.write(x).close(); diff --git a/node_modules/.npm/sax/0.2.3/package/test/cdata-multiple.js b/node_modules/.npm/sax/0.2.3/package/test/cdata-multiple.js new file mode 100644 index 0000000..dab2015 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/cdata-multiple.js @@ -0,0 +1,15 @@ + +require(__dirname).test({ + expect : [ + ["opentag", {"name": "R","attributes": {}}], + ["opencdata", undefined], + ["cdata", " this is "], + ["closecdata", undefined], + ["opencdata", undefined], + ["cdata", "character data  "], + ["closecdata", undefined], + ["closetag", "R"] + ] +}).write("").write("").close(); + diff --git a/node_modules/.npm/sax/0.2.3/package/test/cdata.js b/node_modules/.npm/sax/0.2.3/package/test/cdata.js new file mode 100644 index 0000000..0f09cce --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/cdata.js @@ -0,0 +1,10 @@ +require(__dirname).test({ + xml : "", + expect : [ + ["opentag", {"name": "R","attributes": {}}], + ["opencdata", undefined], + ["cdata", " this is character data  "], + ["closecdata", undefined], + ["closetag", "R"] + ] +}); diff --git a/node_modules/.npm/sax/0.2.3/package/test/index.js b/node_modules/.npm/sax/0.2.3/package/test/index.js new file mode 100644 index 0000000..7ee08df --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/index.js @@ -0,0 +1,73 @@ + +var sys = require("sys"), + assert = require("assert"), + fs = require("fs"), + path = require("path"), + sax = require("../lib/sax"); + +exports.sax = sax; + +// handy way to do simple unit tests +// if the options contains an xml string, it'll be written and the parser closed. +// otherwise, it's assumed that the test will write and close. +exports.test = function test (options) { + var xml = options.xml, + parser = sax.parser(options.strict, options.opt), + expect = options.expect, + e = 0; + sax.EVENTS.forEach(function (ev) { + parser["on" + ev] = function (n) { + if (e >= expect.length && (ev === "end" || ev === "ready")) return; + assert.ok( e < expect.length, + "expectation #"+e+" "+sys.inspect(expect[e])+"\n"+ + "Unexpected event: "+ev+" "+(n ? sys.inspect(n) : "")); + var inspected = n instanceof Error ? "\n"+ n.message : sys.inspect(n) + assert.equal(ev, expect[e][0], + "expectation #"+e+"\n"+ + "Didn't get expected event\n"+ + "expect: "+expect[e][0] + " " +sys.inspect(expect[e][1])+"\n"+ + "actual: "+ev+" "+inspected+"\n"); + if (ev === "error") assert.equal(n.message, expect[e][1]); + else assert.deepEqual(n, expect[e][1], + "expectation #"+e+"\n"+ + "Didn't get expected argument\n"+ + "expect: "+expect[e][0] + " " +sys.inspect(expect[e][1])+"\n"+ + "actual: "+ev+" "+inspected+"\n"); + e++; + if (ev === "error") parser.resume(); + }; + }); + if (xml) parser.write(xml).close(); + return parser; +} + +if (module === require.main) { + var running = true, + failures = 0; + function fail (file, er) { + sys.error("Failed: "+file); + sys.error(er.stack || er.message); + failures ++; + } + + fs.readdir(__dirname, function (error, files) { + files = files.filter(function (file) { + return (/\.js$/.exec(file) && file !== 'index.js') + }) + var n = files.length + , i = 0 + console.log("0.." + n) + files.forEach(function (file) { + // run this test. + try { + require(path.resolve(__dirname, file)) + console.log("ok " + (++i) + " - " + file) + } catch (er) { + console.log("not ok "+ (++i) + " - " + file) + fail(file, er) + } + }) + if (!failures) return console.log("#all pass"); + else return console.error(failures + " failure" + (failures > 1 ? "s" : "")); + }); +} diff --git a/node_modules/.npm/sax/0.2.3/package/test/issue-23.js b/node_modules/.npm/sax/0.2.3/package/test/issue-23.js new file mode 100644 index 0000000..e7991b2 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/issue-23.js @@ -0,0 +1,43 @@ + +require(__dirname).test + ( { xml : + ""+ + ""+ + "653724009"+ + "-1"+ + "01pG0000002KoSUIA0"+ + "-1"+ + "CalendarController"+ + "true"+ + ""+ + "" + + , expect : + [ [ "opentag", { name: "COMPILECLASSESRESPONSE", attributes: {} } ] + , [ "opentag", { name : "RESULT", attributes: {} } ] + , [ "opentag", { name: "BODYCRC", attributes: {} } ] + , [ "text", "653724009" ] + , [ "closetag", "BODYCRC" ] + , [ "opentag", { name: "COLUMN", attributes: {} } ] + , [ "text", "-1" ] + , [ "closetag", "COLUMN" ] + , [ "opentag", { name: "ID", attributes: {} } ] + , [ "text", "01pG0000002KoSUIA0" ] + , [ "closetag", "ID" ] + , [ "opentag", {name: "LINE", attributes: {} } ] + , [ "text", "-1" ] + , [ "closetag", "LINE" ] + , [ "opentag", {name: "NAME", attributes: {} } ] + , [ "text", "CalendarController" ] + , [ "closetag", "NAME" ] + , [ "opentag", {name: "SUCCESS", attributes: {} } ] + , [ "text", "true" ] + , [ "closetag", "SUCCESS" ] + , [ "closetag", "RESULT" ] + , [ "closetag", "COMPILECLASSESRESPONSE" ] + ] + , strict : false + , opt : {} + } + ) + diff --git a/node_modules/.npm/sax/0.2.3/package/test/issue-30.js b/node_modules/.npm/sax/0.2.3/package/test/issue-30.js new file mode 100644 index 0000000..c2cc809 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/issue-30.js @@ -0,0 +1,24 @@ +// https://github.com/isaacs/sax-js/issues/33 +require(__dirname).test + ( { xml : "\n"+ + "\n"+ + "\n"+ + "" + + , expect : + [ [ "opentag", { name: "xml", attributes: {} } ] + , [ "text", "\n" ] + , [ "comment", " \n comment with a single dash- in it\n" ] + , [ "text", "\n" ] + , [ "opentag", { name: "data", attributes: {} } ] + , [ "closetag", "data" ] + , [ "text", "\n" ] + , [ "closetag", "xml" ] + ] + , strict : true + , opt : {} + } + ) + diff --git a/node_modules/.npm/sax/0.2.3/package/test/parser-position.js b/node_modules/.npm/sax/0.2.3/package/test/parser-position.js new file mode 100644 index 0000000..84637dc --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/parser-position.js @@ -0,0 +1,27 @@ +var sax = require("../lib/sax"), + assert = require("assert") + +function testPosition(chunks, expectedEvents) { + var parser = sax.parser(); + expectedEvents.forEach(function(expectation) { + parser['on' + expectation[0]] = function() { + assert.equal(parser.position, expectation[1]); + } + }); + chunks.forEach(function(chunk) { + parser.write(chunk); + }); +}; + +testPosition(['

abcdefgh
'], + [ ['opentag', 5] + , ['text', 19] + , ['closetag', 19] + ]); + +testPosition(['
abcde','fgh
'], + [ ['opentag', 5] + , ['text', 19] + , ['closetag', 19] + ]); + diff --git a/node_modules/.npm/sax/0.2.3/package/test/self-closing-child-strict.js b/node_modules/.npm/sax/0.2.3/package/test/self-closing-child-strict.js new file mode 100644 index 0000000..ce9c045 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/self-closing-child-strict.js @@ -0,0 +1,40 @@ + +require(__dirname).test({ + xml : + ""+ + "" + + "" + + "" + + "" + + "=(|)" + + "" + + "", + expect : [ + ["opentag", { + "name": "root", + "attributes": {} + }], + ["opentag", { + "name": "child", + "attributes": {} + }], + ["opentag", { + "name": "haha", + "attributes": {} + }], + ["closetag", "haha"], + ["closetag", "child"], + ["opentag", { + "name": "monkey", + "attributes": {} + }], + ["text", "=(|)"], + ["closetag", "monkey"], + ["closetag", "root"], + ["end"], + ["ready"] + ], + strict : true, + opt : {} +}); + diff --git a/node_modules/.npm/sax/0.2.3/package/test/self-closing-child.js b/node_modules/.npm/sax/0.2.3/package/test/self-closing-child.js new file mode 100644 index 0000000..bc6b52b --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/self-closing-child.js @@ -0,0 +1,40 @@ + +require(__dirname).test({ + xml : + ""+ + "" + + "" + + "" + + "" + + "=(|)" + + "" + + "", + expect : [ + ["opentag", { + "name": "ROOT", + "attributes": {} + }], + ["opentag", { + "name": "CHILD", + "attributes": {} + }], + ["opentag", { + "name": "HAHA", + "attributes": {} + }], + ["closetag", "HAHA"], + ["closetag", "CHILD"], + ["opentag", { + "name": "MONKEY", + "attributes": {} + }], + ["text", "=(|)"], + ["closetag", "MONKEY"], + ["closetag", "ROOT"], + ["end"], + ["ready"] + ], + strict : false, + opt : {} +}); + diff --git a/node_modules/.npm/sax/0.2.3/package/test/self-closing-tag.js b/node_modules/.npm/sax/0.2.3/package/test/self-closing-tag.js new file mode 100644 index 0000000..b2c5736 --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/self-closing-tag.js @@ -0,0 +1,25 @@ + +require(__dirname).test({ + xml : + " "+ + " "+ + " "+ + " "+ + "=(|) "+ + ""+ + " ", + expect : [ + ["opentag", {name:"ROOT", attributes:{}}], + ["opentag", {name:"HAHA", attributes:{}}], + ["closetag", "HAHA"], + ["opentag", {name:"HAHA", attributes:{}}], + ["closetag", "HAHA"], + // ["opentag", {name:"HAHA", attributes:{}}], + // ["closetag", "HAHA"], + ["opentag", {name:"MONKEY", attributes:{}}], + ["text", "=(|)"], + ["closetag", "MONKEY"], + ["closetag", "ROOT"] + ], + opt : { trim : true } +}); \ No newline at end of file diff --git a/node_modules/.npm/sax/0.2.3/package/test/stray-ending.js b/node_modules/.npm/sax/0.2.3/package/test/stray-ending.js new file mode 100644 index 0000000..6b0aa7f --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/stray-ending.js @@ -0,0 +1,17 @@ +// stray ending tags should just be ignored in non-strict mode. +// https://github.com/isaacs/sax-js/issues/32 +require(__dirname).test + ( { xml : + "" + , expect : + [ [ "opentag", { name: "A", attributes: {} } ] + , [ "opentag", { name: "B", attributes: {} } ] + , [ "text", "" ] + , [ "closetag", "B" ] + , [ "closetag", "A" ] + ] + , strict : false + , opt : {} + } + ) + diff --git a/node_modules/.npm/sax/0.2.3/package/test/trailing-non-whitespace.js b/node_modules/.npm/sax/0.2.3/package/test/trailing-non-whitespace.js new file mode 100644 index 0000000..3e1fb2e --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/trailing-non-whitespace.js @@ -0,0 +1,17 @@ + +require(__dirname).test({ + xml : "Welcome, to monkey land", + expect : [ + ["opentag", { + "name": "SPAN", + "attributes": {} + }], + ["text", "Welcome,"], + ["closetag", "SPAN"], + ["text", " to monkey land"], + ["end"], + ["ready"] + ], + strict : false, + opt : {} +}); diff --git a/node_modules/.npm/sax/0.2.3/package/test/unquoted.js b/node_modules/.npm/sax/0.2.3/package/test/unquoted.js new file mode 100644 index 0000000..79f1d0b --- /dev/null +++ b/node_modules/.npm/sax/0.2.3/package/test/unquoted.js @@ -0,0 +1,17 @@ +// unquoted attributes should be ok in non-strict mode +// https://github.com/isaacs/sax-js/issues/31 +require(__dirname).test + ( { xml : + "" + , expect : + [ [ "attribute", { name: "class", value: "test" } ] + , [ "attribute", { name: "hello", value: "world" } ] + , [ "opentag", { name: "SPAN", + attributes: { class: "test", hello: "world" } } ] + , [ "closetag", "SPAN" ] + ] + , strict : false + , opt : {} + } + ) + diff --git a/node_modules/.npm/sax/active b/node_modules/.npm/sax/active new file mode 120000 index 0000000..945532c --- /dev/null +++ b/node_modules/.npm/sax/active @@ -0,0 +1 @@ +./0.2.3 \ No newline at end of file diff --git a/node_modules/.npm/xml2js/0.1.9/dependson/sax@0.2.3 b/node_modules/.npm/xml2js/0.1.9/dependson/sax@0.2.3 new file mode 120000 index 0000000..aa67612 --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/dependson/sax@0.2.3 @@ -0,0 +1 @@ +./../../../sax/0.2.3 \ No newline at end of file diff --git a/node_modules/.npm/xml2js/0.1.9/node_modules/sax/index.js b/node_modules/.npm/xml2js/0.1.9/node_modules/sax/index.js new file mode 100755 index 0000000..86482be --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/node_modules/sax/index.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../../../../sax/0.2.3/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../../../../sax/0.2.3/package/lib/sax" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/.npm/xml2js/0.1.9/node_modules/sax/package.json.js b/node_modules/.npm/xml2js/0.1.9/node_modules/sax/package.json.js new file mode 100644 index 0000000..daa881b --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/node_modules/sax/package.json.js @@ -0,0 +1,89 @@ +module.exports = { + "name": "sax", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "version": "0.2.3", + "license": "MIT", + "scripts": { + "test": "node test/index.js" + }, + "_id": "sax@0.2.3", + "engines": { + "node": "*" + }, + "_engineSupported": true, + "_npmVersion": "0.2.18", + "_nodeVersion": "v0.4.9", + "directories": { + "lib": "./lib" + }, + "modules": { + "sax.js": "lib/sax.js", + "index.js": "lib/sax" + }, + "files": [ + "" + ], + "_defaultsLoaded": true, + "_npmConfig": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "binroot": "/root/nfs/sdc-import-vm/node_modules/.bin", + "manroot": null, + "must-install": false, + "showlevel": 1, + "registry": "http://registry.npmjs.org/", + "argv": { + "remain": [ + "xml2js", + "sax@>=0.1.1" + ], + "cooked": [ + "bundle", + "install", + "xml2js" + ], + "original": [ + "bundle", + "install", + "xml2js" + ] + }, + "email": "ovazquez@gmail.com", + "tar": "gtar", + "auto-activate": "always", + "auto-deactivate": true, + "browser": "open", + "color": true, + "description": true, + "dev": false, + "dotnpm": ".npm", + "force": false, + "globalconfig": "/usr/etc/npmrc", + "gzipbin": "gzip", + "listopts": "", + "logfd": 2, + "loglevel": "info", + "node-version": "v0.4.9", + "onload-script": false, + "outfd": 1, + "proxy": null, + "rebuild-bundle": true, + "recursive": false, + "tag": "latest", + "tmproot": "/tmp", + "update-dependents": true, + "userconfig": "/root/.npmrc" + }, + "_env": {}, + "_npmPaths": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "dir": "/root/nfs/sdc-import-vm/node_modules/.npm", + "cache": "/root/nfs/sdc-import-vm/node_modules/.npm/.cache", + "tmp": "/tmp/npm-1313565244904", + "package": "/root/nfs/sdc-import-vm/node_modules/.npm/sax/0.2.3/package", + "modules": "/root/nfs/sdc-import-vm/node_modules/.npm/xml2js/0.1.9/node_modules/sax", + "dependencies": "/root/nfs/sdc-import-vm/node_modules/.npm/sax/0.2.3/node_modules" + } +} diff --git a/node_modules/.npm/xml2js/0.1.9/node_modules/sax/sax.js b/node_modules/.npm/xml2js/0.1.9/node_modules/sax/sax.js new file mode 100755 index 0000000..86482be --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/node_modules/sax/sax.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../../../../sax/0.2.3/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../../../../sax/0.2.3/package/lib/sax" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/.npm/xml2js/0.1.9/package/.gitignore b/node_modules/.npm/xml2js/0.1.9/package/.gitignore new file mode 100644 index 0000000..1377554 --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/package/.gitignore @@ -0,0 +1 @@ +*.swp diff --git a/node_modules/.npm/xml2js/0.1.9/package/Cakefile b/node_modules/.npm/xml2js/0.1.9/package/Cakefile new file mode 100644 index 0000000..1143442 --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/package/Cakefile @@ -0,0 +1,12 @@ +{spawn, exec} = require 'child_process' + +task 'build', 'continually build the JavaScript code', -> + coffee = spawn 'coffee', ['-cw', '-o', 'lib', 'src'] + coffee.stdout.on 'data', (data) -> console.log data.toString().trim() + +task 'doc', 'rebuild the Docco documentation', -> + exec([ + 'docco src/xml2js.coffee' + ].join(' && '), (err) -> + throw err if err + ) diff --git a/node_modules/.npm/xml2js/0.1.9/package/LICENSE b/node_modules/.npm/xml2js/0.1.9/package/LICENSE new file mode 100644 index 0000000..83c3140 --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/package/LICENSE @@ -0,0 +1,19 @@ +Copyright 2010, 2011. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/.npm/xml2js/0.1.9/package/README.md b/node_modules/.npm/xml2js/0.1.9/package/README.md new file mode 100644 index 0000000..6bb83bc --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/package/README.md @@ -0,0 +1,41 @@ +node-xml2js +=========== + +Description +----------- + +Simple XML to JavaScript object converter. Uses [sax-js](http://github.com/isaacs/sax-js/). + +See the tests for examples until docs are written. + +Note: If you're looking for a full DOM parser, you probably want +[JSDom](http://github.com/tmpvar/jsdom). + +Installation +------------ + +Simplest way to install `xml2js` is to use [npm](http://npmjs.org), just `npm +install xml2js` which will download xml2js and all dependencies. + +Simple usage +----------- + + var sys = require('sys'), + fs = require('fs'), + xml2js = require('xml2js'); + + var parser = new xml2js.Parser(); + parser.addListener('end', function(result) { + console.log(sys.inspect(result)); + console.log('Done.'); + }); + fs.readFile(__dirname + '/foo.xml', function(err, data) { + parser.parseString(data); + }); + +Running tests, development +-------------------------- + +The development requirements are handled by npm, you just need to install +them. We also have a number of unittests, they can be run using `zap` +directly from the project root. diff --git a/node_modules/.npm/xml2js/0.1.9/package/lib/xml2js.js b/node_modules/.npm/xml2js/0.1.9/package/lib/xml2js.js new file mode 100644 index 0000000..68c8f10 --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/package/lib/xml2js.js @@ -0,0 +1,96 @@ +(function() { + var events, sax; + var __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }, __hasProp = Object.prototype.hasOwnProperty, __extends = function(child, parent) { + for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } + function ctor() { this.constructor = child; } + ctor.prototype = parent.prototype; + child.prototype = new ctor; + child.__super__ = parent.prototype; + return child; + }; + sax = require('sax'); + events = require('events'); + exports.Parser = (function() { + __extends(Parser, events.EventEmitter); + function Parser(opts) { + this.parseString = __bind(this.parseString, this); var key, options, stack, value; + options = { + explicitCharkey: false, + trim: true, + normalize: true + }; + for (key in opts) { + if (!__hasProp.call(opts, key)) continue; + value = opts[key]; + options[key] = value; + } + this.saxParser = sax.parser(true, { + trim: false, + normalize: false + }); + this.EXPLICIT_CHARKEY = options.explicitCharkey; + this.resultObject = null; + stack = []; + this.saxParser.onopentag = __bind(function(node) { + var key, obj, _ref; + obj = {}; + obj["#"] = ""; + _ref = node.attributes; + for (key in _ref) { + if (!__hasProp.call(_ref, key)) continue; + if (!("@" in obj)) { + obj["@"] = {}; + } + obj["@"][key] = node.attributes[key]; + } + obj["#name"] = node.name; + return stack.push(obj); + }, this); + this.saxParser.onclosetag = __bind(function() { + var nodeName, obj, old, s; + obj = stack.pop(); + nodeName = obj["#name"]; + delete obj["#name"]; + s = stack[stack.length - 1]; + if (obj["#"].match(/^\s*$/)) { + delete obj["#"]; + } else { + if (options.trim) { + obj["#"] = obj["#"].trim(); + } + if (options.normalize) { + obj["#"] = obj["#"].replace(/\s{2,}/g, " ").trim(); + } + if (Object.keys(obj).length === 1 && "#" in obj && !this.EXPLICIT_CHARKEY) { + obj = obj["#"]; + } + } + if (stack.length > 0) { + if (!(nodeName in s)) { + return s[nodeName] = obj; + } else if (s[nodeName] instanceof Array) { + return s[nodeName].push(obj); + } else { + old = s[nodeName]; + s[nodeName] = [old]; + return s[nodeName].push(obj); + } + } else { + this.resultObject = obj; + return this.emit("end", this.resultObject); + } + }, this); + this.saxParser.ontext = this.saxParser.oncdata = __bind(function(text) { + var s; + s = stack[stack.length - 1]; + if (s) { + return s["#"] += text; + } + }, this); + } + Parser.prototype.parseString = function(str) { + return this.saxParser.write(str.toString()); + }; + return Parser; + })(); +}).call(this); diff --git a/node_modules/.npm/xml2js/0.1.9/package/package.json b/node_modules/.npm/xml2js/0.1.9/package/package.json new file mode 100644 index 0000000..6584d86 --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/package/package.json @@ -0,0 +1,29 @@ +{ + "name" : "xml2js", + "description" : "Simple XML to JavaScript object converter.", + "keywords" : ["xml", "json"], + "homepage" : "https://github.com/Leonidas-from-XIV/node-xml2js", + "version" : "0.1.9", + "author" : "Marek Kubica (http://xivilization.net)", + "contributors" : [ + "maqr (https://github.com/maqr)", + "Ben Weaver (http://benweaver.com/)", + "Jae Kwon (https://github.com/jaekwon)", + "Jim Robert" + ], + "main" : "./lib/xml2js", + "directories" : { + "lib": "./lib" + }, + "repository" : { + "type" : "git", + "url" : "https://github.com/Leonidas-from-XIV/node-xml2js.git" + }, + "dependencies" : { + "sax" : ">=0.1.1" + }, + "devDependencies" : { + "coffee-script" : ">=1.0.1", + "zap" : ">=0.2.3" + } +} diff --git a/node_modules/.npm/xml2js/0.1.9/package/src/xml2js.coffee b/node_modules/.npm/xml2js/0.1.9/package/src/xml2js.coffee new file mode 100644 index 0000000..f9b4378 --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/package/src/xml2js.coffee @@ -0,0 +1,78 @@ +sax = require 'sax' +events = require 'events' + +class exports.Parser extends events.EventEmitter + constructor: (opts) -> + + # default options. for compatibility's sake set to some + # sub-optimal settings. might change in the future. + options = + explicitCharkey: false + trim: true + # normalize implicates trimming, just so you know + normalize: true + # overwrite them with the specified options, if any + options[key] = value for own key, value of opts + + # make the SAX parser. tried trim and normalize, but they are not + # very helpful + @saxParser = sax.parser true, { + trim: false, + normalize: false + } + # always use the '#' key, even if there are no subkeys + # setting this property by and is deprecated, yet still supported. + # better pass it as explicitCharkey option to the constructor + @EXPLICIT_CHARKEY = options.explicitCharkey + @resultObject = null + stack = [] + + @saxParser.onopentag = (node) => + obj = {} + obj["#"] = "" + for own key of node.attributes + if "@" not of obj + obj["@"] = {} + obj["@"][key] = node.attributes[key] + + # need a place to store the node name + obj["#name"] = node.name + stack.push obj + + @saxParser.onclosetag = => + obj = stack.pop() + nodeName = obj["#name"] + delete obj["#name"] + + s = stack[stack.length - 1] + # remove the '#' key altogether if it's blank + if obj["#"].match(/^\s*$/) + delete obj["#"] + else + obj["#"] = obj["#"].trim() if options.trim + obj["#"] = obj["#"].replace(/\s{2,}/g, " ").trim() if options.normalize + # also do away with '#' key altogether, if there's no subkeys + # unless EXPLICIT_CHARKEY is set + if Object.keys(obj).length == 1 and "#" of obj and not @EXPLICIT_CHARKEY + obj = obj["#"] + + if stack.length > 0 + if nodeName not of s + s[nodeName] = obj + else if s[nodeName] instanceof Array + s[nodeName].push obj + else + old = s[nodeName] + s[nodeName] = [old] + s[nodeName].push obj + else + @resultObject = obj + @emit "end", @resultObject + + @saxParser.ontext = @saxParser.oncdata = (text) => + s = stack[stack.length - 1] + if s + s["#"] += text + + parseString: (str) => + @saxParser.write str.toString() diff --git a/node_modules/.npm/xml2js/0.1.9/package/test/fixtures/sample.xml b/node_modules/.npm/xml2js/0.1.9/package/test/fixtures/sample.xml new file mode 100644 index 0000000..dd5ec76 --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/package/test/fixtures/sample.xml @@ -0,0 +1,21 @@ + + Character data here! + + + + Line One + Line Two + + + + This Foo(1) is + Foo(2) + character + Foo(3) + data! + Foo(4) + + Qux. + Quux. + + diff --git a/node_modules/.npm/xml2js/0.1.9/package/test/xml2js.test.coffee b/node_modules/.npm/xml2js/0.1.9/package/test/xml2js.test.coffee new file mode 100644 index 0000000..2768418 --- /dev/null +++ b/node_modules/.npm/xml2js/0.1.9/package/test/xml2js.test.coffee @@ -0,0 +1,60 @@ +# use zap to run tests, it also detects CoffeeScript files +xml2js = require '../lib/xml2js' +fs = require 'fs' +sys = require 'sys' +assert = require 'assert' + +skeleton = (options, checks) -> + (test) -> + x2js = new xml2js.Parser(options) + x2js.addListener 'end', (r) -> + checks(r) + test.finish() + fs.readFile __dirname + '/fixtures/sample.xml', (err, data) -> + x2js.parseString data + +module.exports = + 'test parse with defaults': skeleton(undefined, (r) -> + console.log 'Result object: ' + sys.inspect(r, false, 10) + assert.equal r['chartest']['@']['desc'], 'Test for CHARs' + assert.equal r['chartest']['#'], 'Character data here!' + assert.equal r['cdatatest']['@']['desc'], 'Test for CDATA' + assert.equal r['cdatatest']['@']['misc'], 'true' + assert.equal r['cdatatest']['#'], 'CDATA here!' + assert.equal r['nochartest']['@']['desc'], 'No data' + assert.equal r['nochartest']['@']['misc'], 'false' + assert.equal r['listtest']['item'][0]['#'], 'This is character data!' + assert.equal r['listtest']['item'][0]['subitem'][0], 'Foo(1)' + assert.equal r['listtest']['item'][0]['subitem'][1], 'Foo(2)' + assert.equal r['listtest']['item'][0]['subitem'][2], 'Foo(3)' + assert.equal r['listtest']['item'][0]['subitem'][3], 'Foo(4)' + assert.equal r['listtest']['item'][1], 'Qux.' + assert.equal r['listtest']['item'][2], 'Quux.') + + 'test parse with explicitCharkey': skeleton({explicitCharkey: true}, (r) -> + assert.equal r['chartest']['@']['desc'], 'Test for CHARs' + assert.equal r['chartest']['#'], 'Character data here!' + assert.equal r['cdatatest']['@']['desc'], 'Test for CDATA' + assert.equal r['cdatatest']['@']['misc'], 'true' + assert.equal r['cdatatest']['#'], 'CDATA here!' + assert.equal r['nochartest']['@']['desc'], 'No data' + assert.equal r['nochartest']['@']['misc'], 'false' + assert.equal r['listtest']['item'][0]['#'], 'This is character data!' + assert.equal r['listtest']['item'][0]['subitem'][0]['#'], 'Foo(1)' + assert.equal r['listtest']['item'][0]['subitem'][1]['#'], 'Foo(2)' + assert.equal r['listtest']['item'][0]['subitem'][2]['#'], 'Foo(3)' + assert.equal r['listtest']['item'][0]['subitem'][3]['#'], 'Foo(4)' + assert.equal r['listtest']['item'][1]['#'], 'Qux.' + assert.equal r['listtest']['item'][2]['#'], 'Quux.') + + 'test default text handling': skeleton(undefined, (r) -> + assert.equal r['whitespacetest']['#'], 'Line One Line Two') + + 'test disable trimming': skeleton({trim: false}, (r) -> + assert.equal r['whitespacetest']['#'], 'Line One Line Two') + + 'test disable normalize': skeleton({normalize: false}, (r) -> + assert.equal r['whitespacetest']['#'], 'Line One\n Line Two') + + 'test disable normalize and trim': skeleton({normalize: false, trim: false}, (r) -> + assert.equal r['whitespacetest']['#'], '\n Line One\n Line Two\n ') diff --git a/node_modules/.npm/xml2js/active b/node_modules/.npm/xml2js/active new file mode 120000 index 0000000..b05eb8c --- /dev/null +++ b/node_modules/.npm/xml2js/active @@ -0,0 +1 @@ +./0.1.9 \ No newline at end of file diff --git a/node_modules/.npm/zfs/0.1.3/package/.gitmodules b/node_modules/.npm/zfs/0.1.3/package/.gitmodules new file mode 100644 index 0000000..83e38ed --- /dev/null +++ b/node_modules/.npm/zfs/0.1.3/package/.gitmodules @@ -0,0 +1,3 @@ +[submodule "async-testing"] + path = async-testing + url = http://github.com/bentomas/node-async-testing.git diff --git a/node_modules/.npm/zfs/0.1.3/package/README.md b/node_modules/.npm/zfs/0.1.3/package/README.md new file mode 100644 index 0000000..112263c --- /dev/null +++ b/node_modules/.npm/zfs/0.1.3/package/README.md @@ -0,0 +1,75 @@ +NAME +---- + +node-zfs - Node.js ZFS interface + +SYNOPSIS +-------- + + // list datasets + zfs.list(function (err, fields, data) { + // ... + }); + + // list snapshots + zfs.list_snapshots(function (err, fields, data) { + // ... + }); + + // create a dataset + zfs.create('mydataset', function (err) { + // ... + }); + + // destroy a dataset or snapshot + zfs.destroy('mydataset', function (err) { + // ... + }); + + // recursively destroy a dataset + zfs.destroyAll('mydataset', function (err) { + // ... + }); + + // rollback a snapshot + zfs.rollback('mydataset@backup', function (err) { + // ... + }); + + // clone a dataset + zfs.clone('mydataset@backup', 'mynewdataset', function (err) { + // ... + }); + + // set dataset properties + zfs.set('mydataset', { 'test:key1': 'value' + , 'test:key2': 'value' }, function (err) { + // ... + }); + + // get dataset properties + zfs.get('mydataset', [ 'test:key1', 'test:key2' ], + function (err, properties) { + // ... + }); + +DESCRIPTION +----------- + +The node-zfs library provies a thin, evented wrapper around common ZFS +commands. + +ENVIRONMENT +----------- + +The library was developed on an OpenSolaris snv_111b system. + +AUTHOR +------ + +Orlando Vazquez + +SEE ALSO +-------- + +zfs(1M), zpool(1M) diff --git a/node_modules/.npm/zfs/0.1.3/package/index.js b/node_modules/.npm/zfs/0.1.3/package/index.js new file mode 120000 index 0000000..c4d801a --- /dev/null +++ b/node_modules/.npm/zfs/0.1.3/package/index.js @@ -0,0 +1 @@ +zfs.js \ No newline at end of file diff --git a/node_modules/.npm/zfs/0.1.3/package/lib/async_testing.js b/node_modules/.npm/zfs/0.1.3/package/lib/async_testing.js new file mode 100644 index 0000000..ee29587 --- /dev/null +++ b/node_modules/.npm/zfs/0.1.3/package/lib/async_testing.js @@ -0,0 +1,357 @@ +var sys = require('sys'), + assert = require('assert'), + events = require('events'); + +var AssertWrapper = exports.AssertWrapper = function(test) { + var test = this.__test = test; + var assertion_functions = [ + 'ok', + 'equal', + 'notEqual', + 'deepEqual', + 'notDeepEqual', + 'strictEqual', + 'notStrictEqual', + 'throws', + 'doesNotThrow' + ]; + + assertion_functions.forEach(function(func_name) { + this[func_name] = function() { + try { + assert[func_name].apply(null, arguments); + test.__numAssertions++; + } + catch(err) { + if( err instanceof assert.AssertionError ) { + test.failed(err); + } + } + } + }, this); +}; + +var Test = function(name, func, suite) { + events.EventEmitter.call(this); + + this.assert = new AssertWrapper(this); + this.numAssertionsExpected = null; + + this.__name = name; + this.__phase = 'setup'; + this.__func = func; + this.__suite = suite; + this.__finishedCallback = null; + this.__numAssertions = 0; + this.__finished = false; + this.__failure = null; + this.__symbol = '.'; +}; +sys.inherits(Test, events.EventEmitter); + +Test.prototype.run = function() { + var self = this; + + try { + this.__phase = 'test'; + this.__func(this.assert, function() { self.finish(); }, this); + } + catch(err) { + if( this.listeners('uncaughtException').length > 0 ) { + this.emit('uncaughtException',err); + } + else { + this.failed(err); + } + } + + // they didn't ask for the finish function so assume it is synchronous + if( this.__func.length < 2 ) { + this.finish(); + } +}; +Test.prototype.finish = function() { + if( !this.__finished ) { + this.__finished = true; + + if( this.__failure === null && this.numAssertionsExpected !== null ) { + try { + var message = this.numAssertionsExpected + (this.numAssertionsExpected == 1 ? ' assertion was ' : ' assertions were ') + + 'expected but ' + this.__numAssertions + ' fired'; + assert.equal(this.numAssertionsExpected, this.__numAssertions, message); + } + catch(err) { + this.__failure = err; + this.__symbol = 'F'; + } + } + + if( this.__finishedCallback ) { + this.__finishedCallback(this.__numAssertions); + } + } +}; +Test.prototype.failureString = function() { + var output = ''; + + if( this.__symbol == 'F' ) { + output += ' test "' + this.__name + '" failed: \n'; + } + else { + output += ' test "' + this.__name + '" threw an error'; + if( this.__phase !== 'test' ) { + output += ' during ' + this.__phase; + } + output += ': \n'; + } + + if( this.__failure.stack ) { + this.__failure.stack.split("\n").forEach(function(line) { + output += ' ' + line + '\n'; + }); + + } + else { + output += ' '+this.__failure; + } + + return output; +}; +Test.prototype.failed = function(err) { + this.__failure = err; + if( err instanceof assert.AssertionError ) { + this.__symbol = 'F'; + } + else { + this.__symbol = 'E'; + } + + if( !this.__finished ) { + this.finish(); + } +}; + +var TestSuite = exports.TestSuite = function(name) { + this.name = name; + this.wait = true; + this.tests = []; + this.numAssertions = 0; + this.numFinishedTests = 0; + this.numFailedTests = 0; + this.finished = false; + this.callback = null; + + this._setup = null; + this._teardown = null; + + var suite = this; + process.addListener('exit', function() { + if( !suite.wait ) { + suite.finish(); + } + }); + + // I'm having trouble doing instance of tests to see if something + // is a test suite, so i'll add a property nothing is likely to have + this.nodeAsyncTesting = 42; +}; +TestSuite.prototype.finish = function() { + if( this.finished ) { + return; + } + + this.finished = true; + + var failures = []; + this.tests.forEach(function(t) { + if( !t.__finished ) { + t.finish(); + } + if( t.__failure !== null ) { + this.numFailedTests++; + failures.push(t); + } + },this); + + + output = '\n'; + output += this.tests.length + ' test' + (this.tests.length == 1 ? '' : 's') + '; '; + output += failures.length + ' failure' + (failures.length == 1 ? '' : 's') + '; '; + output += this.numAssertions + ' assertion' + (this.numAssertions == 1 ? '' : 's') + ' '; + sys.error(output); + + sys.error(''); + failures.forEach(function(t) { + sys.error(t.failureString()); + }); + + if( this.callback ) { + this.callback(); + } +}; + +TestSuite.prototype.setup = function(func) { + this._setup = func; + return this; +}; +TestSuite.prototype.teardown = function(func) { + this._teardown = func; + return this; +}; +TestSuite.prototype.waitForTests = function(yesOrNo) { + if(typeof yesOrNo == 'undefined') { + yesOrNo = true; + } + this.wait = yesOrNo; + return this; +}; +TestSuite.prototype.addTests = function(tests) { + for( var testName in tests ) { + var t = new Test(testName, tests[testName], this); + this.tests.push(t); + }; + + return this; +}; +TestSuite.prototype.runTests = function(callback) { + if( callback ) { + this.callback = callback; + } + sys.error('Running "' + this.name + '"'); + this.runTest(0); +}; +TestSuite.prototype.runTest = function(testIndex) { + if( testIndex >= this.tests.length ) { + return; + } + + var t = this.tests[testIndex]; + t.__finishedCallback = finishedCallback; + var suite = this; + + var wait = suite.wait; + + if(wait) { + // if we are waiting then let's assume we are only running one test at + // a time, so we can catch all errors + var errorListener = function(err) { + if( t.listeners('uncaughtException').length > 0 ) { + t.emit('uncaughtException',err); + } + else { + t.failed(err); + } + }; + process.addListener('uncaughtException', errorListener); + + var exitListener = function() { + sys.error("\n\nOoops! The process exited in the middle of the test '" + t.__name + "'\nDid you forget to finish it?\n"); + }; + process.addListener('exit', exitListener); + } + else { + sys.error(' Starting test "' + this.__name + '"'); + } + + try { + if(this._setup) { + if( this._setup.length == 0 ) { + this._setup.call(t); + afterSetup(); + } + else { + this._setup.call(t, afterSetup, t); + } + } + else { + afterSetup(); + } + } + catch(err) { + t.failed(err); + } + + function afterSetup() { + t.run(); + + if( !wait ) { + suite.runTest(testIndex+1); + } + } + + function finishedCallback(numAssertions) { + var teardownCallback = function() { + suite.numAssertions += numAssertions; + suite.numFinishedTests++; + + if( wait ) { + process.binding('stdio').writeError(t.__symbol); + process.removeListener('uncaughtException', errorListener); + process.removeListener('exit', exitListener); + suite.runTest(testIndex+1); + } + + if( suite.numFinishedTests == suite.tests.length ) { + suite.finish(); + } + } + + try { + if(suite._teardown) { + t.__phase = 'teardown'; + if( suite._teardown.length == 0 ) { + suite._teardown.call(t); + teardownCallback(); + } + else { + suite._teardown.call(t, teardownCallback, t); + } + } + else { + teardownCallback(); + } + } + catch(err) { + t.failed(err); + teardownCallback(); + } + } +}; + +exports.runSuites = function(module, callback) { + var suites = []; + + for( var suiteName in module ) { + var suite = module[suiteName]; + + if(suite && suite.nodeAsyncTesting == 42) { + suite.name = suiteName; + suites.push(suite); + } + } + + var stats = { + numSuites: 0, + numFailed: 0 + }; + + function runNextSuite() { + if( suites.length < 1 ) { + return callback ? callback(stats) : null; + } + var suite = suites.shift(); + suite.runTests(function() { + if( suites.length > 0 ) { + sys.error('----------------------------------\n'); + } + stats.numSuites++; + if( suite.numFailedTests > 0 ) { + stats.numFailed++; + } + runNextSuite(); + }); + } + + sys.puts(''); + runNextSuite(); +}; diff --git a/node_modules/.npm/zfs/0.1.3/package/lib/zfs.js b/node_modules/.npm/zfs/0.1.3/package/lib/zfs.js new file mode 100644 index 0000000..fd3c457 --- /dev/null +++ b/node_modules/.npm/zfs/0.1.3/package/lib/zfs.js @@ -0,0 +1,375 @@ +var sys = require('sys') + , cp = require('child_process'); + +var execFile = cp.execFile + , spawn = cp.spawn + , puts = sys.puts + , inspect = sys.inspect; + +/** + * ZFS utilities paths + */ + +var ZPOOL_PATH = '/sbin/zpool' + , ZFS_PATH = '/sbin/zfs'; + +exports.zpool = zpool = function () { } + +// if zfs commands take longer than timeoutDuration it's an error +timeoutDuration = exports.timeoutDuration = 5000; + +zpool.listFields_ = + [ 'name', 'size', 'allocated', 'free', 'cap', 'health', 'altroot' ]; + +zpool.list = function () { + var pool, callback; + switch (arguments.length) { + case 1: + callback = arguments[0]; + break; + case 2: + pool = arguments[0]; + callback = arguments[1]; + break; + default: + throw Error('Invalid arguments'); + } + var args = ['list', '-H', '-o', zpool.listFields_.join(',')]; + if (pool) args.push(pool); + + execFile(ZPOOL_PATH, args, { timeout: timeoutDuration }, + function (error, stdout, stderr) { + stdout = stdout.trim(); + if (error) { + return callback(stderr.toString()); + } + if (stdout == "no pools available\n") { + callback(error, zfs.listFields_, []); + return; + } + lines = parseTabSeperatedTable(stdout); + callback(null, zpool.listFields_, lines); + }); +}; + +function parseTabSeperatedTable(data) { + var i, l, lines = data.split("\n"); + for (i=0, l=lines.length; i < l; i++) { + lines[i] = lines[i].split("\t"); + } + return lines; +} + +exports.zfs = zfs = function () {} + +zfs.create = function (name, callback) { + if (arguments.length != 2) { + throw Error('Invalid arguments'); + } + execFile + ( ZFS_PATH + , ['create', name] + , { timeout: timeoutDuration } + , function (error, stdout, stderr) { + if (error) { + return callback(new Error(stderr.toString())); + } + callback(); + } + ); +} + +zfs.set = function (name, properties, callback) { + if (arguments.length != 3) { + throw Error('Invalid arguments'); + } + + var keys = Object.keys(properties); + + // loop over and set all the properties using chained callbacks + (function () { + var next = arguments.callee; + if (!keys.length) { + callback(); + return; + } + var key = keys.pop(); + + execFile(ZFS_PATH, ['set', key + '=' + properties[key], name], + { timeout: timeoutDuration }, + function (error, stdout, stderr) { + if (error) { + return callback(new Error(stderr.toString())); + } + next(); // loop by calling enclosing function + }); + })(); +} + +zfsGetRegex = new RegExp("^([^\t]+)\t([^\t]+)\t(.+)"); +zfs.get = function (name, propNames, callback) { + if (arguments.length != 3) { + throw Error("Invalid arguments"); + } + + execFile(ZFS_PATH, + ['get', '-H', '-o', 'source,property,value', propNames.join(','), name], + { timeout: timeoutDuration }, + function (error, stdout, stderr) { + if (error) { + return callback(new Error(stderr.toString())); + } + var properties = {}; + + // Populate the properties hash with regexp match groups from each line. + // Break on first empty line + var lines = stdout.split("\n"); + var i,l,m; + for (i=0,l=lines.length;i 0, "zfs list is empty"); + assert.ok(list.some(function (i) { return i[0] === name; }), + "zfs dataset doesn't exist"); + callback(); + }); +} + +function assertDatasetDoesNotExist(assert, name, callback) { + var listFunc = name.indexOf('@') === -1 + ? zfs.list + : zfs.list_snapshots; + listFunc(name, function (err, fields, list) { + assert.ok(err, "Should get an error message here"); + assert.ok(err.toString().match(/does not exist/), "received unexpected error message " + err.msg); + assert.ok(!list, "zfs list is empty"); + callback(); + }); +} + +var zfsName = process.argv[2] || 'nodezfstest/test'; +var zpoolName = zfsName.split('/')[0]; +var testFilename = '/' + zfsName + '/mytestfile'; +var testData = "Dancing is forbidden!"; +var testDataModified = "Chicken arise! Arise chicken! Arise!"; +var suite = new TestSuite("node-zfs unit tests"); + +var tests = [ + { 'pre check': + function (assert, finished) { + zpool.list(function (err, fields, list) { + assert.ok(list, 'zpools list was empty or did not have a value'); + assert.ok(list.length > 0, "zpool list is empty"); + assert.ok( + list.some(function (i) { return i[0] == zpoolName; }), + "zpool doesn't exist"); + + zfs.list(function (err, fields, list) { + assert.ok(list, 'zfs list was empty or did not have a value'); + assert.ok(list.length > 0, "zfs list is empty"); + assert.ok( + !list.some(function (i) { return i[0] == zfsName; }), + "zfs dataset already exists"); + + finished(); + }); + }); + } + } +, { 'create a dataset': + function (assert, finished) { + zfs.create(zfsName, function () { + assertDatasetExists(assert, zfsName, function() { + fs.writeFile(testFilename, testData, function (error) { + if (error) throw error; + finished(); + }); + }); + }); + } + } +, { "set'ing a property": + function (assert, finished) { + var properties = { 'test:property1': "foo\tbix\tqube" + , 'test:property2': 'baz' + }; + zfs.set(zfsName, properties, finished); + } + } +, { "get'ing a property": + function (assert, finished) { + zfs.get(zfsName, + ['test:property1', 'test:property2'], + function (err, properties) { + assert.ok(properties, "Didn't get any properties back"); + assert.equal(properties['test:property1'], "foo\tbix\tqube", + "Property 'test:property1' should be 'foo'"); + assert.equal(properties['test:property2'], 'baz', + "Property 'test:property2' should be 'baz'"); + finished(); + }); + } + } +, { "snapshot a dataset": + function (assert, finished) { + var snapshotName = zfsName + '@mysnapshot'; + zfs.snapshot(snapshotName, function (error, stdout, stderr) { + if (error) throw error; + assertDatasetExists(assert, snapshotName, function () { + // check that the snapshot appears in the `list_snapshots` list + zfs.list_snapshots(function (err, fields, lines) { + assert.ok( + lines.some(function (i) { return i[0] === snapshotName; }), + "snapshot didn't appear in list of snapshots"); + + // check that the snapshot didn't appear in the `list` list + zfs.list(function (err, fields, lines) { + assert.ok( + !lines.some(function (i) { return i[0] === snapshotName; }), + "snapshot appeared in `list` command"); + finished(); + }); + }); + }); + }); + } + } +, { 'recursive list of datasets': + function (assert, finished) { + zfs.list(zfsName, { recursive: true, type: 'all' }, function (error, fields, list) { + console.dir(list); + assert.equal(list.length, 2, "The list should have 2 elements"); + assert.ok(inList(zfsName, list), "Should have found our dataset"); + assert.ok(inList(zfsName + '@mysnapshot', list), "Should have found child dataset"); + finished(); + function inList(needle, haystack) { + return haystack.some(function (i) { + return needle == i[0]; + }); + } + }); + } + } +, { 'send a snapshot to a file': + function(assert, finished) { + var snapshotName = zfsName + '@mysnapshot'; + var snapshotFilename = '/tmp/node-zfs-test-snapshot.zfs'; + zfs.send(snapshotName, snapshotFilename, function () { + path.exists(snapshotFilename, function (exists) { + assert.ok(exists, "Snapshot file should exist"); + finished(); + }); + }); + } + } +, { 'receive a snapshot from a file': + function(assert, finished) { + var datasetName = zfsName + '/from_receive'; + var snapshotFilename = '/tmp/node-zfs-test-snapshot.zfs'; + zfs.receive(datasetName, snapshotFilename, function (err) { + if (err) throw err; + assertDatasetExists(assert, datasetName, function () { + path.exists('/'+datasetName+'/mytestfile', function (exists) { + assert.ok(exists, "My file should still exist"); + + fs.readFile('/'+datasetName+'/mytestfile', function (err, str) { + if (err) throw err; + assert.equal(str.toString(), testData); + finished(); + }); + }); + }); + }); + } + } +, { 'rolling back a snapshot': + function (assert, finished) { + var snapshotName = zfsName + '@mysnapshot'; + fs.writeFile(testFilename, testDataModified, + function (error) { + if (error) throw error; + fs.readFile(testFilename, function (err, str) { + if (err) throw err; + assert.equal(str.toString(), testDataModified); + zfs.rollback(snapshotName, function (err, stdout, stderr) { + if (err) throw err; + fs.readFile(testFilename, function (err, str) { + assert.equal(str.toString(), testData); + finished(); + }); + }); + }); + }); + } + } +, { 'clone a dataset': + function (assert, finished) { + var snapshotName = zfsName + '@mysnapshot'; + var cloneName = zpoolName + '/' + 'myclone'; + zfs.clone(snapshotName, cloneName, function (err, stdout, stderr) { + assertDatasetExists(assert, cloneName, finished); + }); + } + } +, { 'destroy a clone': + function (assert, finished) { + var cloneName = zpoolName + '/' + 'myclone'; + assertDatasetExists(assert, cloneName, function () { + zfs.destroy(cloneName, function (err, stdout, stderr) { + assertDatasetDoesNotExist(assert, cloneName, finished); + }); + }); + } + } +, { "destroying a snapshot": + function (assert, finished) { + var snapshotName = zfsName + '@mysnapshot'; + assertDatasetExists(assert, snapshotName, function () { + zfs.destroy(snapshotName, function (err, stdout, stderr) { + assertDatasetDoesNotExist(assert, snapshotName, finished); + }); + }); + } + } +, { 'destroy a dataset': + function (assert, finished) { + zfs.destroyAll(zfsName, function (err, stdout, stderr) { + assertDatasetDoesNotExist(assert, zfsName, finished); + }); + } + } +, { 'destroy all datasets': + function (assert, finished) { + zfs.destroyAll(zfsName, function (err, stdout, stderr) { + assertDatasetDoesNotExist(assert, zfsName, finished); + }); + } + } +, { "list errors": + function (assert, finished) { + var datasetName = 'thisprobably/doesnotexist'; + assertDatasetDoesNotExist(assert, datasetName, function () { + zfs.list(datasetName, function (err, fields, list) { + assert.ok(err); + assert.ok(err.toString().match(/does not exist/), + 'Could list snashot that should not exist'); + finished(); + }); + }); + } + } +, { "delete errors": + function (assert, finished) { + var datasetName = 'thisprobably/doesnotexist'; + assertDatasetDoesNotExist(assert, datasetName, function () { + zfs.destroy(datasetName, function (err, stdout, stderr) { + assert.ok(err, "Expected an error deleting nonexistant dataset"); + assert.ok(err.toString().match(/does not exist/), + 'Error message did not indicate that dataset does not exist'); + finished(); + }); + }); + } + } +]; + +var testCount = tests.length; + +// order matters in our tests +for (i in tests) { + suite.addTests(tests[i]); +} + +suite.runTests(); diff --git a/node_modules/.npm/zfs/active b/node_modules/.npm/zfs/active new file mode 120000 index 0000000..f476a1d --- /dev/null +++ b/node_modules/.npm/zfs/active @@ -0,0 +1 @@ +./0.1.3 \ No newline at end of file diff --git a/node_modules/async b/node_modules/async new file mode 120000 index 0000000..29309cc --- /dev/null +++ b/node_modules/async @@ -0,0 +1 @@ +./async@0.1.8 \ No newline at end of file diff --git a/node_modules/async@0.1.8/async.js b/node_modules/async@0.1.8/async.js new file mode 100755 index 0000000..0ab0dcb --- /dev/null +++ b/node_modules/async@0.1.8/async.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../.npm/async/0.1.8/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../.npm/async/0.1.8/package/lib/async" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/async@0.1.8/index.js b/node_modules/async@0.1.8/index.js new file mode 100755 index 0000000..0b597b8 --- /dev/null +++ b/node_modules/async@0.1.8/index.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../.npm/async/0.1.8/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../.npm/async/0.1.8/package/index" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/async@0.1.8/package.json.js b/node_modules/async@0.1.8/package.json.js new file mode 100644 index 0000000..3d88afc --- /dev/null +++ b/node_modules/async@0.1.8/package.json.js @@ -0,0 +1,102 @@ +module.exports = { + "name": "async", + "description": "Higher-order functions and common patterns for asynchronous code", + "author": { + "name": "Caolan McMahon" + }, + "version": "0.1.8", + "repository": { + "type": "git", + "url": "http://github.com/caolan/async.git" + }, + "bugs": { + "url": "http://github.com/caolan/async/issues" + }, + "licenses": [ + { + "type": "MIT", + "url": "http://github.com/caolan/async/raw/master/LICENSE" + } + ], + "_id": "async@0.1.8", + "engines": { + "node": "*" + }, + "_engineSupported": true, + "_npmVersion": "0.2.18", + "_nodeVersion": "v0.4.9", + "directories": { + "lib": "./lib" + }, + "modules": { + "async.js": "lib/async.js", + "index.js": "./index" + }, + "files": [ + "" + ], + "_defaultsLoaded": true, + "dist": { + "shasum": "aeeba6a28d21747ad445fb1915eed286060d79e6" + }, + "_bundledDeps": [], + "_resolvedDeps": [], + "_npmConfig": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "binroot": "/root/nfs/sdc-import-vm/node_modules/.bin", + "manroot": null, + "must-install": false, + "showlevel": 1, + "argv": { + "remain": [ + "/root/nfs/async/" + ], + "cooked": [ + "bundle", + "install", + "/root/nfs/async/" + ], + "original": [ + "bundle", + "install", + "/root/nfs/async/" + ] + }, + "email": "ovazquez@gmail.com", + "tar": "gtar", + "auto-activate": "always", + "auto-deactivate": true, + "browser": "open", + "color": true, + "description": true, + "dev": false, + "dotnpm": ".npm", + "force": false, + "globalconfig": "/usr/etc/npmrc", + "gzipbin": "gzip", + "listopts": "", + "logfd": 2, + "loglevel": "info", + "node-version": "v0.4.9", + "onload-script": false, + "outfd": 1, + "proxy": null, + "rebuild-bundle": true, + "recursive": false, + "registry": "http://registry.npmjs.org/", + "tag": "latest", + "tmproot": "/tmp", + "update-dependents": true, + "userconfig": "/root/.npmrc" + }, + "_env": {}, + "_npmPaths": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "dir": "/root/nfs/sdc-import-vm/node_modules/.npm", + "cache": "/root/nfs/sdc-import-vm/node_modules/.npm/.cache", + "tmp": "/tmp/npm-1313565209572", + "package": "/root/nfs/sdc-import-vm/node_modules/.npm/async/0.1.8/package", + "modules": "/root/nfs/sdc-import-vm/node_modules/async@0.1.8", + "dependencies": "/root/nfs/sdc-import-vm/node_modules/.npm/async/0.1.8/node_modules" + } +} diff --git a/node_modules/node-uuid b/node_modules/node-uuid new file mode 120000 index 0000000..95c69b5 --- /dev/null +++ b/node_modules/node-uuid @@ -0,0 +1 @@ +./node-uuid@1.2.0 \ No newline at end of file diff --git a/node_modules/node-uuid@1.2.0/index.js b/node_modules/node-uuid@1.2.0/index.js new file mode 100755 index 0000000..3091ea9 --- /dev/null +++ b/node_modules/node-uuid@1.2.0/index.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../.npm/node-uuid/1.2.0/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../.npm/node-uuid/1.2.0/package/uuid" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/node-uuid@1.2.0/package.json.js b/node_modules/node-uuid@1.2.0/package.json.js new file mode 100644 index 0000000..6b6af6f --- /dev/null +++ b/node_modules/node-uuid@1.2.0/package.json.js @@ -0,0 +1,99 @@ +module.exports = { + "name": "node-uuid", + "description": "Simple, fast generation of RFC4122(v4) UUIDs.", + "url": "http://github.com/broofa/node-uuid", + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "author": { + "name": "Robert Kieffer", + "email": "robert@broofa.com" + }, + "contributors": [], + "dependencies": {}, + "lib": ".", + "version": "1.2.0", + "_id": "node-uuid@1.2.0", + "devDependencies": {}, + "engines": { + "node": "*" + }, + "_engineSupported": true, + "_npmVersion": "0.2.18", + "_nodeVersion": "v0.4.9", + "_defaultsLoaded": true, + "dist": { + "shasum": "3ea8135493b747c21fcfc7c015a2eae496b76d7b", + "tarball": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.2.0.tgz" + }, + "scripts": {}, + "directories": {}, + "files": [ + "" + ], + "_bundledDeps": [], + "_resolvedDeps": [], + "modules": { + "index.js": "./uuid" + }, + "_npmConfig": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "binroot": "/root/nfs/sdc-import-vm/node_modules/.bin", + "manroot": null, + "must-install": false, + "showlevel": 1, + "registry": "http://registry.npmjs.org/", + "argv": { + "remain": [ + "node-uuid" + ], + "cooked": [ + "bundle", + "install", + "node-uuid" + ], + "original": [ + "bundle", + "install", + "node-uuid" + ] + }, + "email": "ovazquez@gmail.com", + "tar": "gtar", + "auto-activate": "always", + "auto-deactivate": true, + "browser": "open", + "color": true, + "description": true, + "dev": false, + "dotnpm": ".npm", + "force": false, + "globalconfig": "/usr/etc/npmrc", + "gzipbin": "gzip", + "listopts": "", + "logfd": 2, + "loglevel": "info", + "node-version": "v0.4.9", + "onload-script": false, + "outfd": 1, + "proxy": null, + "rebuild-bundle": true, + "recursive": false, + "tag": "latest", + "tmproot": "/tmp", + "update-dependents": true, + "userconfig": "/root/.npmrc" + }, + "_env": {}, + "_npmPaths": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "dir": "/root/nfs/sdc-import-vm/node_modules/.npm", + "cache": "/root/nfs/sdc-import-vm/node_modules/.npm/.cache", + "tmp": "/tmp/npm-1313565265150", + "package": "/root/nfs/sdc-import-vm/node_modules/.npm/node-uuid/1.2.0/package", + "modules": "/root/nfs/sdc-import-vm/node_modules/node-uuid@1.2.0", + "dependencies": "/root/nfs/sdc-import-vm/node_modules/.npm/node-uuid/1.2.0/node_modules" + } +} diff --git a/node_modules/optparse b/node_modules/optparse new file mode 120000 index 0000000..5c8ff79 --- /dev/null +++ b/node_modules/optparse @@ -0,0 +1 @@ +./optparse@1.0.1 \ No newline at end of file diff --git a/node_modules/optparse@1.0.1/index.js b/node_modules/optparse@1.0.1/index.js new file mode 100755 index 0000000..c37fd58 --- /dev/null +++ b/node_modules/optparse@1.0.1/index.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../.npm/optparse/1.0.1/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../.npm/optparse/1.0.1/package/lib/optparse" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/optparse@1.0.1/package.json.js b/node_modules/optparse@1.0.1/package.json.js new file mode 100644 index 0000000..f39bf07 --- /dev/null +++ b/node_modules/optparse@1.0.1/package.json.js @@ -0,0 +1,100 @@ +module.exports = { + "name": "optparse", + "author": { + "name": "Johan Dahlberg" + }, + "description": "Command-line option parser", + "keywords": [ + "option", + "parser", + "command-line", + "cli", + "terminal" + ], + "version": "1.0.1", + "_id": "optparse@1.0.1", + "engines": { + "node": "*" + }, + "_engineSupported": true, + "_npmVersion": "0.2.18", + "_nodeVersion": "v0.4.9", + "_defaultsLoaded": true, + "dist": { + "shasum": "ba690f38b3187b416b20306cd0a6a4b797202df8", + "bin": { + "0.5-darwin-10.7.0": { + "shasum": "69a049c96fcdf2126733214cd7535e1485d9c67a", + "tarball": "http://registry.npmjs.org/optparse/-/optparse-1.0.1-0.5-darwin-10.7.0.tgz" + } + }, + "tarball": "http://registry.npmjs.org/optparse/-/optparse-1.0.1.tgz" + }, + "directories": {}, + "files": [ + "" + ], + "_bundledDeps": [], + "_resolvedDeps": [], + "modules": { + "index.js": "./lib/optparse" + }, + "_npmConfig": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "binroot": "/root/nfs/sdc-import-vm/node_modules/.bin", + "manroot": null, + "must-install": false, + "showlevel": 1, + "registry": "http://registry.npmjs.org/", + "argv": { + "remain": [ + "optparse" + ], + "cooked": [ + "bundle", + "install", + "optparse" + ], + "original": [ + "bundle", + "install", + "optparse" + ] + }, + "email": "ovazquez@gmail.com", + "tar": "gtar", + "auto-activate": "always", + "auto-deactivate": true, + "browser": "open", + "color": true, + "description": true, + "dev": false, + "dotnpm": ".npm", + "force": false, + "globalconfig": "/usr/etc/npmrc", + "gzipbin": "gzip", + "listopts": "", + "logfd": 2, + "loglevel": "info", + "node-version": "v0.4.9", + "onload-script": false, + "outfd": 1, + "proxy": null, + "rebuild-bundle": true, + "recursive": false, + "tag": "latest", + "tmproot": "/tmp", + "update-dependents": true, + "userconfig": "/root/.npmrc" + }, + "_env": {}, + "_npmPaths": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "dir": "/root/nfs/sdc-import-vm/node_modules/.npm", + "cache": "/root/nfs/sdc-import-vm/node_modules/.npm/.cache", + "tmp": "/tmp/npm-1313565254752", + "package": "/root/nfs/sdc-import-vm/node_modules/.npm/optparse/1.0.1/package", + "modules": "/root/nfs/sdc-import-vm/node_modules/optparse@1.0.1", + "dependencies": "/root/nfs/sdc-import-vm/node_modules/.npm/optparse/1.0.1/node_modules" + } +} diff --git a/node_modules/sax b/node_modules/sax new file mode 120000 index 0000000..3307e15 --- /dev/null +++ b/node_modules/sax @@ -0,0 +1 @@ +./sax@0.2.3 \ No newline at end of file diff --git a/node_modules/sax@0.2.3/index.js b/node_modules/sax@0.2.3/index.js new file mode 100755 index 0000000..3f9cd2b --- /dev/null +++ b/node_modules/sax@0.2.3/index.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../.npm/sax/0.2.3/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../.npm/sax/0.2.3/package/lib/sax" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/sax@0.2.3/package.json.js b/node_modules/sax@0.2.3/package.json.js new file mode 100644 index 0000000..5c3f767 --- /dev/null +++ b/node_modules/sax@0.2.3/package.json.js @@ -0,0 +1,105 @@ +module.exports = { + "name": "sax", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "version": "0.2.3", + "license": "MIT", + "scripts": { + "test": "node test/index.js" + }, + "_npmJsonOpts": { + "file": "/Users/isaacs/.npm/sax/0.2.3/package/package.json", + "wscript": false, + "contributors": false, + "serverjs": false + }, + "_id": "sax@0.2.3", + "dependencies": {}, + "devDependencies": {}, + "engines": { + "node": "*" + }, + "_engineSupported": true, + "_npmVersion": "0.2.18", + "_nodeVersion": "v0.4.9", + "_defaultsLoaded": true, + "dist": { + "shasum": "8a049a2090f2f5e67b27d1034525f04f315564a5", + "tarball": "http://registry.npmjs.org/sax/-/sax-0.2.3.tgz" + }, + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "directories": {}, + "files": [ + "" + ], + "_bundledDeps": [], + "_resolvedDeps": [], + "modules": { + "index.js": "lib/sax" + }, + "_npmConfig": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "binroot": "/root/nfs/sdc-import-vm/node_modules/.bin", + "manroot": null, + "must-install": false, + "showlevel": 1, + "registry": "http://registry.npmjs.org/", + "argv": { + "remain": [ + "sax" + ], + "cooked": [ + "bundle", + "install", + "sax" + ], + "original": [ + "bundle", + "install", + "sax" + ] + }, + "email": "ovazquez@gmail.com", + "tar": "gtar", + "auto-activate": "always", + "auto-deactivate": true, + "browser": "open", + "color": true, + "description": true, + "dev": false, + "dotnpm": ".npm", + "force": false, + "globalconfig": "/usr/etc/npmrc", + "gzipbin": "gzip", + "listopts": "", + "logfd": 2, + "loglevel": "info", + "node-version": "v0.4.9", + "onload-script": false, + "outfd": 1, + "proxy": null, + "rebuild-bundle": true, + "recursive": false, + "tag": "latest", + "tmproot": "/tmp", + "update-dependents": true, + "userconfig": "/root/.npmrc" + }, + "_env": {}, + "_npmPaths": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "dir": "/root/nfs/sdc-import-vm/node_modules/.npm", + "cache": "/root/nfs/sdc-import-vm/node_modules/.npm/.cache", + "tmp": "/tmp/npm-1313565227234", + "package": "/root/nfs/sdc-import-vm/node_modules/.npm/sax/0.2.3/package", + "modules": "/root/nfs/sdc-import-vm/node_modules/sax@0.2.3", + "dependencies": "/root/nfs/sdc-import-vm/node_modules/.npm/sax/0.2.3/node_modules" + } +} diff --git a/node_modules/xml2js b/node_modules/xml2js new file mode 120000 index 0000000..cab450e --- /dev/null +++ b/node_modules/xml2js @@ -0,0 +1 @@ +./xml2js@0.1.9 \ No newline at end of file diff --git a/node_modules/xml2js@0.1.9/index.js b/node_modules/xml2js@0.1.9/index.js new file mode 100755 index 0000000..f54eef1 --- /dev/null +++ b/node_modules/xml2js@0.1.9/index.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../.npm/xml2js/0.1.9/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../.npm/xml2js/0.1.9/package/lib/xml2js" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/xml2js@0.1.9/package.json.js b/node_modules/xml2js@0.1.9/package.json.js new file mode 100644 index 0000000..2182c1f --- /dev/null +++ b/node_modules/xml2js@0.1.9/package.json.js @@ -0,0 +1,138 @@ +module.exports = { + "name": "xml2js", + "description": "Simple XML to JavaScript object converter.", + "keywords": [ + "xml", + "json" + ], + "homepage": "https://github.com/Leonidas-from-XIV/node-xml2js", + "version": "0.1.9", + "author": { + "name": "Marek Kubica", + "email": "marek@xivilization.net", + "url": "http://xivilization.net" + }, + "contributors": [ + { + "name": "maqr", + "email": "maqr.lollerskates@gmail.com", + "url": "https://github.com/maqr" + }, + { + "name": "Ben Weaver", + "url": "http://benweaver.com/" + }, + { + "name": "Jae Kwon", + "url": "https://github.com/jaekwon" + }, + { + "name": "Jim Robert" + } + ], + "directories": { + "lib": "./lib" + }, + "repository": { + "type": "git", + "url": "git://github.com/Leonidas-from-XIV/node-xml2js.git" + }, + "dependencies": { + "sax": ">=0.1.1" + }, + "devDependencies": { + "coffee-script": ">=1.0.1", + "zap": ">=0.2.3" + }, + "_npmJsonOpts": { + "file": "/home/marek/.npm/xml2js/0.1.9/package/package.json", + "wscript": false, + "contributors": false, + "serverjs": false + }, + "_id": "xml2js@0.1.9", + "engines": { + "node": "*" + }, + "_engineSupported": true, + "_npmVersion": "0.2.18", + "_nodeVersion": "v0.4.9", + "_defaultsLoaded": true, + "dist": { + "shasum": "79c857cf7474f6be604534a33f85cee72d450e3f", + "tarball": "http://registry.npmjs.org/xml2js/-/xml2js-0.1.9.tgz" + }, + "scripts": {}, + "files": [ + "" + ], + "_bundledDeps": [], + "_resolvedDeps": [ + { + "name": "sax", + "version": "0.2.3" + } + ], + "modules": { + "index.js": "./lib/xml2js" + }, + "_npmConfig": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "binroot": "/root/nfs/sdc-import-vm/node_modules/.bin", + "manroot": null, + "must-install": false, + "showlevel": 1, + "registry": "http://registry.npmjs.org/", + "argv": { + "remain": [ + "xml2js", + "sax@>=0.1.1" + ], + "cooked": [ + "bundle", + "install", + "xml2js" + ], + "original": [ + "bundle", + "install", + "xml2js" + ] + }, + "email": "ovazquez@gmail.com", + "tar": "gtar", + "auto-activate": "always", + "auto-deactivate": true, + "browser": "open", + "color": true, + "description": true, + "dev": false, + "dotnpm": ".npm", + "force": false, + "globalconfig": "/usr/etc/npmrc", + "gzipbin": "gzip", + "listopts": "", + "logfd": 2, + "loglevel": "info", + "node-version": "v0.4.9", + "onload-script": false, + "outfd": 1, + "proxy": null, + "rebuild-bundle": true, + "recursive": false, + "tag": "latest", + "tmproot": "/tmp", + "update-dependents": true, + "userconfig": "/root/.npmrc" + }, + "_env": {}, + "_npmPaths": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "dir": "/root/nfs/sdc-import-vm/node_modules/.npm", + "cache": "/root/nfs/sdc-import-vm/node_modules/.npm/.cache", + "tmp": "/tmp/npm-1313565244904", + "package": "/root/nfs/sdc-import-vm/node_modules/.npm/xml2js/0.1.9/package", + "modules": "/root/nfs/sdc-import-vm/node_modules/xml2js@0.1.9", + "dependencies": "/root/nfs/sdc-import-vm/node_modules/.npm/xml2js/0.1.9/node_modules" + } +} diff --git a/node_modules/zfs b/node_modules/zfs new file mode 120000 index 0000000..da30ea1 --- /dev/null +++ b/node_modules/zfs @@ -0,0 +1 @@ +./zfs@0.1.3 \ No newline at end of file diff --git a/node_modules/zfs@0.1.3/async_testing.js b/node_modules/zfs@0.1.3/async_testing.js new file mode 100755 index 0000000..89652e8 --- /dev/null +++ b/node_modules/zfs@0.1.3/async_testing.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../.npm/zfs/0.1.3/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../.npm/zfs/0.1.3/package/lib/async_testing" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/zfs@0.1.3/index.js b/node_modules/zfs@0.1.3/index.js new file mode 100755 index 0000000..ecf8e08 --- /dev/null +++ b/node_modules/zfs@0.1.3/index.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../.npm/zfs/0.1.3/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../.npm/zfs/0.1.3/package/lib/zfs" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/node_modules/zfs@0.1.3/package.json.js b/node_modules/zfs@0.1.3/package.json.js new file mode 100644 index 0000000..550d558 --- /dev/null +++ b/node_modules/zfs@0.1.3/package.json.js @@ -0,0 +1,94 @@ +module.exports = { + "name": "zfs", + "description": "Node library for interacting with ZFS utilities", + "version": "0.1.3", + "author": { + "name": "Joyent", + "url": "joyent.com" + }, + "directories": { + "lib": "lib" + }, + "scripts": { + "test": "./run-tests" + }, + "_id": "zfs@0.1.3", + "engines": { + "node": "*" + }, + "_engineSupported": true, + "_npmVersion": "0.2.18", + "_nodeVersion": "v0.4.9", + "modules": { + "zfs.js": "lib/zfs.js", + "async_testing.js": "lib/async_testing.js", + "index.js": "lib/zfs" + }, + "files": [ + "" + ], + "_defaultsLoaded": true, + "dist": { + "shasum": "32a3e89e69a3353f31612dc7ba88530a3074689f" + }, + "_bundledDeps": [], + "_resolvedDeps": [], + "_npmConfig": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "binroot": "/root/nfs/sdc-import-vm/node_modules/.bin", + "manroot": null, + "must-install": false, + "showlevel": 1, + "argv": { + "remain": [ + "/root/nfs/node_zfs" + ], + "cooked": [ + "bundle", + "install", + "/root/nfs/node_zfs" + ], + "original": [ + "bundle", + "install", + "/root/nfs/node_zfs" + ] + }, + "email": "ovazquez@gmail.com", + "tar": "gtar", + "auto-activate": "always", + "auto-deactivate": true, + "browser": "open", + "color": true, + "description": true, + "dev": false, + "dotnpm": ".npm", + "force": false, + "globalconfig": "/usr/etc/npmrc", + "gzipbin": "gzip", + "listopts": "", + "logfd": 2, + "loglevel": "info", + "node-version": "v0.4.9", + "onload-script": false, + "outfd": 1, + "proxy": null, + "rebuild-bundle": true, + "recursive": false, + "registry": "http://registry.npmjs.org/", + "tag": "latest", + "tmproot": "/tmp", + "update-dependents": true, + "userconfig": "/root/.npmrc" + }, + "_env": {}, + "_npmPaths": { + "root": "/root/nfs/sdc-import-vm/node_modules", + "dir": "/root/nfs/sdc-import-vm/node_modules/.npm", + "cache": "/root/nfs/sdc-import-vm/node_modules/.npm/.cache", + "tmp": "/tmp/npm-1313565205110", + "package": "/root/nfs/sdc-import-vm/node_modules/.npm/zfs/0.1.3/package", + "modules": "/root/nfs/sdc-import-vm/node_modules/zfs@0.1.3", + "dependencies": "/root/nfs/sdc-import-vm/node_modules/.npm/zfs/0.1.3/node_modules" + } +} diff --git a/node_modules/zfs@0.1.3/zfs.js b/node_modules/zfs@0.1.3/zfs.js new file mode 100755 index 0000000..ecf8e08 --- /dev/null +++ b/node_modules/zfs@0.1.3/zfs.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +// generated by npm, please don't touch! +var dep = require('path').join(__dirname, "./../.npm/zfs/0.1.3/node_modules") +var depMet = require.paths.indexOf(dep) !== -1 +var bundle = dep.replace(/node_modules$/, 'package/node_modules') +var bundleMet = require.paths.indexOf(bundle) !== -1 +var from = "./../.npm/zfs/0.1.3/package/lib/zfs" + +if (!depMet) require.paths.unshift(dep) +if (!bundleMet) require.paths.unshift(bundle) +module.exports = require(from) + +if (!depMet) { + var i = require.paths.indexOf(dep) + if (i !== -1) require.paths.splice(i, 1) +} +if (!bundleMet) { + var i = require.paths.indexOf(bundle) + if (i !== -1) require.paths.slice(i, 1) +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..247291b --- /dev/null +++ b/package.json @@ -0,0 +1,13 @@ +{ + "name": "sdc-convertvm", + "description": "SmartDC VM Conversion Tools", + "version": "1.0.0", + "author": "Joyent (joyent.com)", + "main": "bin/sdc-convertvm", + "directories": { + "lib": "lib", + "bin": "bin", + "node_modules": "node_modules" + }, + "bin": { "sdc-convertvm": "./bin/sdc-convertvm.js" } +}