This repository has been archived by the owner on Jul 8, 2021. It is now read-only.
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
4 changed files
with
308 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
*.swp |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
Dependancies | ||
------------ | ||
|
||
* node-compress http://github.com/egorich239/node-compress | ||
* |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,168 @@ | ||
var Asset, Buffer, Gzip, Package, Parallel, coffeescript, compiler, compressGzip, fs, makePackage, path, resolveContents, watch; | ||
var __slice = Array.prototype.slice, __bind = function(func, obj, args) { | ||
return function() { | ||
return func.apply(obj || {}, args ? args.concat(__slice.call(arguments, 0)) : arguments); | ||
}; | ||
}; | ||
Gzip = require('node-compress').Gzip; | ||
coffeescript = null; | ||
compiler = require('closure-compiler').compile; | ||
Buffer = require('buffer').Buffer; | ||
Parallel = require('parallel').Parallel; | ||
fs = require('fs'); | ||
path = require('path'); | ||
Package = function(output, input, options) { | ||
var _a, _b, _c; | ||
this.filename = output; | ||
this.contents = input; | ||
this.compress = options.compress === true || !(typeof (_a = options.compress) !== "undefined" && _a !== null) ? true : false; | ||
this.compile = options.compile === true || !(typeof (_b = options.compile) !== "undefined" && _b !== null) ? true : false; | ||
options.watch === true || !(typeof (_c = options.watch) !== "undefined" && _c !== null) ? (this.watch = true) : (this.watch = false); | ||
this.type = options.type ? options.type : 'js'; | ||
return this; | ||
}; | ||
Package.prototype.add = function(item) { | ||
false === this.contents instanceof Array ? (this.contents = []) : null; | ||
return this.contents.push(item); | ||
}; | ||
Package.prototype.serve = function() { | ||
return resolveContents(this.contents, __bind(function(files, dirs) { | ||
var contents; | ||
contents = []; | ||
files.forEach(__bind(function(asset) { | ||
if (asset.type === this.type) { | ||
return contents.push(asset); | ||
} | ||
}, this)); | ||
this.contents = contents; | ||
this.dirs = dirs; | ||
makePackage(this); | ||
if (this.watch === true) { | ||
return watch(this); | ||
} | ||
}, this)); | ||
}; | ||
|
||
exports.Package = Package; | ||
makePackage = function(package) { | ||
var compile, compress, read_task, result, write; | ||
read_task = new Parallel(); | ||
package.contents.forEach(function(asset) { | ||
return read_task.add(asset.path, [fs.readFile, asset.path]); | ||
}); | ||
result = ''; | ||
read_task.run(function(filename, err, data) { | ||
if (filename === null) { | ||
package.type === 'coffee' ? (result = require('coffee-script').compile(result, { | ||
no_wrap: true | ||
})) : null; | ||
if (package.compile) { | ||
return compile(result); | ||
} else if (package.compress) { | ||
return compress(result); | ||
} else { | ||
return write(result); | ||
} | ||
} else { | ||
return result += data.toString() + "\n"; | ||
} | ||
}); | ||
if (package.type === 'js' || package.type === 'coffee') { | ||
compile = function(data) { | ||
return compiler(data, function(data) { | ||
if (package.compress) { | ||
return compress(data); | ||
} else { | ||
return write(data); | ||
} | ||
}); | ||
}; | ||
compress = function(data) { | ||
return compressGzip(data, function(data) { | ||
return write(data); | ||
}); | ||
}; | ||
write = function(data) { | ||
return fs.writeFile(package.filename, data, 'binary'); | ||
}; | ||
return write; | ||
} | ||
}; | ||
Asset = function(pathname, dir) { | ||
if (dir === true) { | ||
this.dir = true; | ||
this.path = pathname; | ||
} else { | ||
this.dir = false; | ||
this.path = pathname; | ||
this.type = path.extname(pathname).slice(1); | ||
} | ||
return this; | ||
}; | ||
|
||
resolveContents = function(input, callback) { | ||
var lookup_task, results; | ||
'string' === typeof input ? (input = [input]) : null; | ||
lookup_task = new Parallel(); | ||
input.forEach(function(pathname) { | ||
return lookup_task.add(pathname, [fs.stat, pathname]); | ||
}); | ||
results = []; | ||
return lookup_task.run(function(name, err, stats) { | ||
var dirs, files; | ||
if (name === null) { | ||
dirs = []; | ||
files = []; | ||
results.forEach(function(asset) { | ||
if (asset.dir === true) { | ||
return dirs.push(asset); | ||
} else { | ||
return files.push(asset); | ||
} | ||
}); | ||
if (dirs.length > 0) { | ||
lookup_task = new Parallel(); | ||
dirs.forEach(function(dir) { | ||
return lookup_task.add(dir.path, [fs.readdir, dir.path]); | ||
}); | ||
lookup_task.run(function(dir, err, paths) { | ||
if (dir === null) { | ||
callback(files, dirs); | ||
} else if (err) { | ||
return null; | ||
} else { | ||
paths.forEach(function(pathname) { | ||
return files.push(new Asset(path.join(dir, pathname))); | ||
}); | ||
} | ||
}); | ||
} else { | ||
callback(files, dirs); | ||
} | ||
} else { | ||
if (err) { | ||
return null; | ||
} | ||
stats.isDirectory() ? results.push(new Asset(name, true)) : results.push(new Asset(name)); | ||
} | ||
}); | ||
}; | ||
compressGzip = function(data, callback) { | ||
var buffer, gzip; | ||
buffer = new Buffer(Buffer.byteLength(data, 'binary')); | ||
buffer.write(data, 'binary', 0); | ||
gzip = new Gzip(); | ||
return gzip.write(buffer, function(err, data) { | ||
if (err) { | ||
throw err; | ||
} | ||
return gzip.close(function(err, data2) { | ||
if (err) { | ||
throw err; | ||
} | ||
return callback(data + data2); | ||
}); | ||
}); | ||
}; | ||
watch = function(files, dirs) {}; | ||
// TODO: Watch files |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,134 @@ | ||
Gzip: require('node-compress').Gzip | ||
coffeescript: null | ||
compiler: require('closure-compiler').compile | ||
Buffer: require('buffer').Buffer | ||
Parallel: require('parallel').Parallel | ||
fs: require 'fs' | ||
path: require 'path' | ||
|
||
class Package | ||
constructor: (output, input, options) -> | ||
@filename: output | ||
@contents: input | ||
|
||
@compress: if options.compress is true or !options.compress? then true | ||
else false | ||
|
||
@compile: if options.compile is true or !options.compile? then true | ||
else false | ||
|
||
if options.watch is true or !options.watch? | ||
@watch: true | ||
else @watch: false | ||
|
||
@type: if options.type then options.type else 'js' | ||
|
||
add: (item) -> | ||
if false is @contents instanceof Array | ||
@contents: [] | ||
|
||
@contents.push item | ||
|
||
serve: -> | ||
resolveContents @contents, (files, dirs) => | ||
contents: [] | ||
files.forEach (asset) => | ||
if asset.type is @type | ||
contents.push asset | ||
|
||
@contents: contents | ||
@dirs: dirs | ||
|
||
makePackage @ | ||
|
||
if @watch is true | ||
watch @ | ||
|
||
exports.Package: Package | ||
|
||
makePackage: (package) -> | ||
read_task: new Parallel() | ||
package.contents.forEach (asset) -> | ||
read_task.add asset.path, [fs.readFile, asset.path] | ||
result: '' | ||
read_task.run (filename, err, data) -> | ||
if filename is null | ||
if package.type is 'coffee' | ||
result: require('coffee-script').compile result, { | ||
no_wrap: true | ||
} | ||
|
||
if package.compile then compile result | ||
else if package.compress then compress result | ||
else write result | ||
else | ||
result: + data.toString() + "\n" | ||
if package.type is 'js' or package.type is 'coffee' | ||
compile: (data) -> | ||
compiler data, (data) -> | ||
if package.compress then compress data | ||
else write data | ||
compress: (data) -> | ||
compressGzip data, (data) -> | ||
write data | ||
write: (data) -> | ||
fs.writeFile package.filename, data, 'binary' | ||
|
||
class Asset | ||
constructor: (pathname, dir) -> | ||
if dir is true | ||
@dir: true | ||
@path: pathname | ||
else | ||
@dir: false | ||
@path: pathname | ||
@type: path.extname(pathname).slice 1 | ||
|
||
resolveContents: (input, callback) -> | ||
if 'string' is typeof input | ||
input: [input] | ||
|
||
lookup_task: new Parallel() | ||
|
||
input.forEach (pathname) -> | ||
lookup_task.add pathname, [fs.stat, pathname] | ||
|
||
results: [] | ||
|
||
lookup_task.run (name, err, stats) -> | ||
if name is null | ||
dirs: [] | ||
files: [] | ||
results.forEach (asset) -> | ||
if asset.dir is true then dirs.push asset | ||
else files.push asset | ||
|
||
if dirs.length > 0 | ||
lookup_task: new Parallel() | ||
dirs.forEach (dir) -> lookup_task.add dir.path, [fs.readdir, dir.path] | ||
lookup_task.run (dir, err, paths) -> | ||
if dir is null | ||
callback files, dirs | ||
else if err then return | ||
else | ||
paths.forEach (pathname) -> files.push new Asset path.join dir, pathname | ||
else | ||
callback files, dirs | ||
else | ||
if err then return | ||
if stats.isDirectory() then results.push new Asset name, true | ||
else results.push new Asset name | ||
|
||
compressGzip: (data, callback) -> | ||
buffer: new Buffer Buffer.byteLength data, 'binary' | ||
buffer.write data, 'binary', 0 | ||
|
||
gzip: new Gzip() | ||
gzip.write buffer, (err, data) -> | ||
if err then throw err | ||
gzip.close (err, data2) -> | ||
if err then throw err | ||
callback data + data2 | ||
|
||
watch: (files, dirs) -> | ||
# TODO: Watch files |