Permalink
Browse files

Work in progress on rewrite to use minitask for parallel processingh

  • Loading branch information...
1 parent d75c11d commit fbf99cf6a7ff74836757968177bbf1ed44f2a191 @mixu committed Oct 16, 2013
View
@@ -0,0 +1,82 @@
+## Support for template pre-compilation
+
+I've included examples for the following:
+
+- Coffeescript (directly via `coffee`)
+- DoT (directly via `dottojs`)
+- EJS (plugin)
+- Handlebars (directly via `handlebars`)
+- Jade (directly via `jade`)
+- Mustache (plugin)
+- Underscore templates (plugin)
+
+These are generally triggered by passing a JSON descriptor to the `--command` option:
+
+`--command <json>` / `.set('command', [ { expr: new RegExp('\.foo$'), cmd: '<cmd>' }, ... ])`: Advanced usage. You can apply a command to files with a particular extension.
+
+JSON options:
+
+- `cmd`: The command to run.
+- `ext`: The command is run on files matching this extension.
+- `expr`: The command is run on files matching this regular expression (passed to `new RegExp` and matched).
+- `wrap`: Optional wrapping. Right now, only `exports` is supported; it wraps the result in `module.exports = <result>;`.
+
+For example, for Coffeescript (e.g. files matching `.coffee`):
+
+ --command '{ "ext":".coffee", "cmd": "coffee --compile --stdio" }'
+
+Or for Jade (e.g. files matching `.jade`):
+
+ --command '{ "ext":".jade", "wrap": "exports", "cmd": "jade --client --no-debug" }'
+
+Sadly, some commands are not unixy: they don't support pipes. For those commands, you can use the automatic placeholders `<input>` and `<output>`. This alters how `glue` runs: it will replace the `<input>` string with the actual filename, and `<output>` with a temporary directory, from which the file is included into the build and then later removed.
+
+For example, for Handlebars (e.g. files matching `.hbs`):
+
+ --command '{ "ext":".hbs", "wrap": "exports", "cmd": "handlebars <input> --simple" }'
+
+Handlebars requires an input file, but does not need an output file (can write to stdout but not read from stdin).
+
+For example, for DoT (e.g. `.dot`):
+
+ --command-dot "dottojs -s <input> -d <output>"
+
+## Plugins for pre-compilation
+
+What about [templating libraries](http://garann.github.io/template-chooser/) that don't have builtin precompilation support?
+
+ejs:
+
+ --command-ejs "~/precompile-ejs.js"
+
+ console.log(ejs.compile(str, { client: true }));
+
+underscore.js templates:
+
+ console.log(_.template(str));
+
+Mustache.js:
+
+ console.log(Mustache.compile(stringTemplate));
+
+## Using packages from other package managers
+
+AMD/RequireJS to CommonJS conversion:
+
+Component:
+
+Bower:
+
+## Exporting to AMD etc
+
+UMD support documentation
+
+## Generating obfuscated server side code
+
+## --no-json
+
+By default `.json` files are included - this is how Node's native `require` implementation works.
+
+This means that something like `var config = require('./config.json')` works.
+
+Specify `--no-json` to exclude json files.
View
@@ -29,15 +29,25 @@ API.prototype.include = function(filepath) {
API.prototype.render = function(dest) {
if(typeof dest == 'function') {
var capture = new Capture();
- packageCommonJs(this.files, this.options, capture, function() {
+
+ capture.on('error', function(err) {
+ console.error('Error in the capture stream: ', err);
+ console.trace();
+ });
+
+ capture.once('finish', function() {
dest(null, capture.get());
});
+
+ packageCommonJs(this.files, this.options, capture, function() {
+ // NOP
+ });
} else if(dest.write) {
// writable stream
packageCommonJs(this.files, this.options, dest, function() {
- if(dest !== process.stdout) {
- dest.end();
- }
+ // if(dest !== process.stdout) {
+ // dest.end();
+ // }
});
}
};
@@ -1,24 +1,20 @@
// use readable-stream to use Node 0.10.x streams in Node 0.8.x
-var Transform = require('readable-stream').Transform;
+var Writable = require('readable-stream').Writable,
+ util = require('util');
function Wrap(options) {
- Transform.call(this, options);
+ Writable.call(this, options);
this.buffer = '';
}
-// this is just the recommended boilerplate from the Node core docs
-Wrap.prototype = Object.create(Transform.prototype, { constructor: { value: Wrap }});
+util.inherits(Wrap, Writable);
-Wrap.prototype._transform = function(chunk, encoding, done) {
+Wrap.prototype._write = function(chunk, encoding, done) {
// marked cannot stream input, so we need to accumulate it here.
this.buffer += chunk;
done();
};
-Wrap.prototype._flush = function(done) {
- done();
-};
-
Wrap.prototype.get = function() {
return this.buffer;
};
@@ -0,0 +1,80 @@
+var path = require('path');
+
+var spawn = require('../../file-tasks/spawn.js'),
+ wrapCommonJs = require('../../file-tasks/wrap-commonjs-web.js'),
+ wrapJson = require('../../file-tasks/wrap-json-web.js'),
+ wrapExports = require('../../file-tasks/wrap-exports-web.js');
+
+module.exports = function(options) {
+ var result = [],
+ useDefaults = true;
+
+ // 1st: any custom commands (in array order)
+ // 2nd: any wrapping
+ // 3rd: any reporting
+
+ // the expected result is one of (sync(input), async(input, done), fn() { return stream | child process } )
+ // getFileTasks will call the function once with the item as the param
+ // --> TODO in the future might want to just combine these two as the syntax is a bit awkward
+
+ if (Array.isArray(options.command)) {
+ result = options.command;
+ } else if(options.command) {
+ // "simple mode": one --command which only applies to .js files
+ result.push({
+ ext: '.js',
+ task: function(item) {
+ // extra level of nesting is annoying, but it avoids having to instantiate the task resources immediately
+ return function() {
+ return spawn({
+ name: item.name, // full path
+ task: options.command
+ });
+ };
+ }
+ });
+ }
+
+ var exportVariableName = options['export'] || 'foo';
+
+ if (useDefaults) {
+ // default task for wrapping .js
+ result.push({
+ ext: '.js',
+ task: function(item, packageObj) {
+ var relname = path.relative(packageObj.basepath, item.name);
+ return function() {
+ return wrapCommonJs({
+ 'source-url': options['source-url'],
+ 'name': (packageObj.name ? exportVariableName+'/' + packageObj.name + '/' : exportVariableName+'/') + relname
+ });
+ };
+ }
+ });
+ // default task for wrapping .json
+ result.push({
+ ext: '.json',
+ task: function() {
+ return function() {
+ return wrapJson({ });
+ };
+ }
+ });
+ // if we are reporting, add the stream size capture task at the end
+ // so we can report on results (e.g. of minification)
+ if (options.command && options.report) {
+ result.push({
+ ext: '.js',
+ task: function() {
+ return streamSize({
+ onDone: function(size) {
+ packageObj.files[innerCurrent].sizeAfter = size;
+ }
+ });
+ }
+ });
+ }
+ // this would be a good place to add more default tasks
+ }
+ return result;
+};
@@ -0,0 +1,19 @@
+module.exports = function(file, pkg, commands) {
+ var result = [];
+ if(typeof file.name !== 'string' ||
+ !Array.isArray(commands)) {
+ throw new Error('Invalid params to getFileCommands ' + file + commands);
+ }
+
+ // task selection from commands
+ commands.forEach(function(command) {
+ // for now, only support ext (note: can be ".test.js" or some other longer postfix)
+ if(file.name.substr(file.name.length - command.ext.length).toLowerCase() == command.ext) {
+ // the task is a function (file, package) which returns another function (used in the task)
+ result.push(command.task(file, pkg));
+ }
+ });
+ return result;
+};
+
+
Oops, something went wrong.

0 comments on commit fbf99cf

Please sign in to comment.