diff --git a/lib/build/node-sass.js b/lib/build/node-sass.js
index 38e706f..6790b74 100644
--- a/lib/build/node-sass.js
+++ b/lib/build/node-sass.js
@@ -10,16 +10,18 @@ var path = require('path'),
visit = require('rework-visit'),
convert = require('convert-source-map'),
SourceMapConsumer = require('source-map').SourceMapConsumer,
- mime = require('mime');
+ mime = require('mime'),
+ crypto = require('crypto'),
+ defaults = require('lodash.defaults');
/**
* Search for the relative file reference from the startPath up to the process
* working directory, avoiding any other directories with a package.json or bower.json.
* @param {string} startPath The location of the uri declaration and the place to start the search from
* @param {string} uri The content of the url() statement, expected to be a relative file path
- * @returns {string} dataURI of the file where found or undefined otherwise
+ * @returns {string} the full file path of the file where found or null otherwise
*/
-function encodeRelativeURL(startPath, uri) {
+function findFile(startPath, uri) {
/**
* Test whether the given directory is the root of its own package
@@ -66,7 +68,7 @@ function encodeRelativeURL(startPath, uri) {
var isWorking;
do {
pathToRoot.push(absoluteStart);
- isWorking = (absoluteStart !== process.cwd()) && notPackage(absoluteStart);
+ isWorking = (absoluteStart !== process.cwd()) && notPackage(absoluteStart);
absoluteStart = path.resolve(absoluteStart, '..');
} while (isWorking);
@@ -83,16 +85,36 @@ function encodeRelativeURL(startPath, uri) {
// file exists so convert to a dataURI and end
if (fs.existsSync(fullPath)) {
- var type = mime.lookup(fullPath);
- var contents = fs.readFileSync(fullPath);
- var base64 = new Buffer(contents).toString('base64');
- return 'data:' + type + ';base64,' + base64;
+ return fullPath;
}
// enqueue subdirectories that are not packages and are not in the root path
else {
enqueue(queue, basePath);
}
}
+
+ // not found
+ return null;
+ }
+}
+
+/**
+ * Search for the relative file reference from the startPath up to the process
+ * working directory, avoiding any other directories with a package.json or bower.json,
+ * and encode as base64 data URI.
+ * @param {string} startPath The location of the uri declaration and the place to start the search from
+ * @param {string} uri The content of the url() statement, expected to be a relative file path
+ * @returns {string} data URI of the file where found or null otherwise
+ */
+function embedRelativeURL(startPath, uri) {
+ var fullPath = findFile(startPath, uri);
+ if (fullPath) {
+ var type = mime.lookup(fullPath),
+ contents = fs.readFileSync(fullPath),
+ base64 = new Buffer(contents).toString('base64');
+ return 'data:' + type + ';base64,' + base64;
+ } else {
+ return null;
}
}
@@ -103,107 +125,129 @@ function encodeRelativeURL(startPath, uri) {
* @param {Array.} [libraryPaths] Any number of library path strings
* @returns {stream.Through} A through stream that performs the operation of a gulp stream
*/
-module.exports = function (bannerWidth, libraryPaths) {
- var output = [ ];
- var libList = (libraryPaths || [ ]).filter(function isString(value) {
- return (typeof value === 'string');
+module.exports = function (options) {
+ defaults(options, {
+ libraryPaths: [],
+ embedAssets : false
});
+
+ var output = [],
+ libList = options.libraryPaths.filter(function isString(value) {
+ return (typeof value === 'string');
+ });
+
return through.obj(function (file, encoding, done) {
var stream = this;
// setup parameters
- var sourcePath = file.path.replace(path.basename(file.path), '');
- var sourceName = path.basename(file.path, path.extname(file.path));
- var mapName = sourceName + '.css.map';
- var sourceMapConsumer;
+ var sourcePath = path.dirname(file.path),
+ compiledName = path.basename(file.path, path.extname(file.path)) + '.css',
+ mapName = compiledName + '.map',
+ sourceMapConsumer;
/**
* Push file contents to the output stream.
- * @param {string} ext The extension for the file, including dot
- * @param {string|object?} contents The contents for the file or fields to assign to it
+ * @param {string} filename The filename of the file, including extension
+ * @param {Buffer|string|object} [contents] Optional contents for the file or fields to assign to it
* @return {vinyl.File} The file that has been pushed to the stream
*/
- function pushResult(ext, contents) {
+ function pushResult(filename, contents) {
var pending = new gutil.File({
- cwd: file.cwd,
- base: file.base,
- path: sourcePath + sourceName + ext,
- contents: (typeof contents === 'string') ? new Buffer(contents) : null
+ cwd : file.cwd,
+ base : file.base,
+ path : path.join(sourcePath, filename),
+ contents: Buffer.isBuffer(contents) ? contents : (typeof contents === 'string') ? new Buffer(contents) : null
});
- if (typeof contents === 'object') {
- for (var key in contents) {
- pending[key] = contents[key];
- }
- }
stream.push(pending);
return pending;
}
/**
- * Plugin for css rework that follows SASS transpilation
- * @param {object} stylesheet AST for the CSS output from SASS
+ * Create a plugin for css rework that performs rewriting of url() sources
+ * @param {function({string}, {string}):{string}} uriRewriter A method that rewrites uris
*/
- function reworkPlugin(stylesheet) {
-
- // visit each node (selector) in the stylesheet recursively using the official utility method
- // each node may have multiple declarations
- visit(stylesheet, function visitor(declarations) {
- declarations
- .forEach(eachDeclaration);
- });
-
- /**
- * Process a declaration from the syntax tree.
- * @param declaration
- */
- function eachDeclaration(declaration) {
- var URL_STATEMENT_REGEX = /(url\s*\()\s*(?:(['"])((?:(?!\2).)*)(\2)|([^'"](?:(?!\)).)*[^'"]))\s*(\))/g;
-
- // reverse the original source-map to find the original sass file
- var cssStart = declaration.position.start;
- var sassStart = sourceMapConsumer.originalPositionFor({
- line : cssStart.line,
- column: cssStart.column
+ function rewriteUriPlugin(uriRewriter) {
+ return function reworkPlugin(stylesheet) {
+
+ // visit each node (selector) in the stylesheet recursively using the official utility method
+ // each node may have multiple declarations
+ visit(stylesheet, function visitor(declarations) {
+ declarations
+ .forEach(eachDeclaration);
});
- if (!sassStart.source) {
- throw new Error('failed to decode node-sass source map'); // this can occur with regressions in libsass
- }
- var sassDir = path.dirname(sassStart.source);
-
- // allow multiple url() values in the declaration
- // split by url statements and process the content
- // additional capture groups are needed to match quotations correctly
- // escaped quotations are not considered
- declaration.value = declaration.value
- .split(URL_STATEMENT_REGEX)
- .map(eachSplitOrGroup)
- .join('');
/**
- * Encode the content portion of url() statements.
- * There are 4 capture groups in the split making every 5th unmatched.
- * @param {string} token A single split item
- * @param i The index of the item in the split
- * @returns {string} Every 3 or 5 items is an encoded url everything else is as is
+ * Process a declaration from the syntax tree.
+ * @param declaration
*/
- function eachSplitOrGroup(token, i) {
-
- // we can get groups as undefined under certain match circumstances
- var initialised = token || '';
-
- // the content of the url() statement is either in group 3 or group 5
- var mod = i % 7;
- if ((mod === 3) || (mod === 5)) {
-
- // remove query string or hash suffix
- var uri = initialised.split(/[?#]/g).shift();
- return uri && encodeRelativeURL(sassDir, uri) || initialised;
+ function eachDeclaration(declaration) {
+ var URL_STATEMENT_REGEX = /(url\s*\()\s*(?:(['"])((?:(?!\2).)*)(\2)|([^'"](?:(?!\)).)*[^'"]))\s*(\))/g;
+
+ // reverse the original source-map to find the original sass file
+ var cssStart = declaration.position.start;
+ var sassStart = sourceMapConsumer.originalPositionFor({
+ line : cssStart.line,
+ column: cssStart.column
+ });
+ if (!sassStart.source) {
+ throw new Error('failed to decode node-sass source map'); // this can occur with regressions in libsass
}
- // everything else, including parentheses and quotation (where present) and media statements
- else {
- return initialised;
+ var sassDir = path.dirname(sassStart.source);
+
+ // allow multiple url() values in the declaration
+ // split by url statements and process the content
+ // additional capture groups are needed to match quotations correctly
+ // escaped quotations are not considered
+ declaration.value = declaration.value
+ .split(URL_STATEMENT_REGEX)
+ .map(eachSplitOrGroup)
+ .join('');
+
+ /**
+ * Encode the content portion of url() statements.
+ * There are 4 capture groups in the split making every 5th unmatched.
+ * @param {string} token A single split item
+ * @param i The index of the item in the split
+ * @returns {string} Every 3 or 5 items is an encoded url everything else is as is
+ */
+ function eachSplitOrGroup(token, i) {
+
+ // we can get groups as undefined under certain match circumstances
+ var initialised = token || '';
+
+ // the content of the url() statement is either in group 3 or group 5
+ var mod = i % 7;
+ if ((mod === 3) || (mod === 5)) {
+
+ // remove query string or hash suffix
+ var uri = initialised.split(/[?#]/g).shift();
+ return uri && uriRewriter(sassDir, uri) || initialised;
+ }
+ // everything else, including parentheses and quotation (where present) and media statements
+ else {
+ return initialised;
+ }
}
}
+ };
+ }
+
+ /**
+ * A URI re-writer function that pushes the file to the output stream and rewrites the URI accordingly.
+ * @param {string} startPath The location of the uri declaration and the place to start the search from
+ * @param {string} uri The content of the url() statement, expected to be a relative file path
+ * @returns {string} the new URL of the output file where found or null otherwise
+ */
+ function pushAssetToOutput(startPath, uri) {
+ var fullPath = findFile(startPath, uri);
+ if (fullPath) {
+ var contents = fs.readFileSync(fullPath),
+ hash = crypto.createHash('md5').update(contents).digest('hex'),
+ filename = ['.', compiledName + '.assets', hash + path.extname(fullPath)].join('/');
+ pushResult(filename, contents);
+ return filename;
+ } else {
+ return null;
}
}
@@ -232,12 +276,13 @@ module.exports = function (bannerWidth, libraryPaths) {
);
// rework css
- var reworked = rework(cssWithMap, '')
- .use(reworkPlugin)
- .toString({
- sourcemap : true,
- sourcemapAsObject: true
- });
+ var plugin = rewriteUriPlugin(options.embedAssets ? embedRelativeURL : pushAssetToOutput),
+ reworked = rework(cssWithMap, '')
+ .use(plugin)
+ .toString({
+ sourcemap : true,
+ sourcemapAsObject: true
+ });
// adjust overall sourcemap
delete reworked.map.file;
@@ -247,8 +292,8 @@ module.exports = function (bannerWidth, libraryPaths) {
});
// write stream output
- pushResult('.css', reworked.code + '\n/*# sourceMappingURL=' + mapName + ' */');
- pushResult('.css.map', JSON.stringify(reworked.map, null, 2));
+ pushResult(compiledName, reworked.code + '\n/*# sourceMappingURL=' + mapName + ' */');
+ pushResult(mapName, JSON.stringify(reworked.map, null, 2));
done();
}
@@ -257,19 +302,19 @@ module.exports = function (bannerWidth, libraryPaths) {
* @param {string} error The error text from node-sass
*/
function errorHandler(error) {
- var analysis = /(.*)\:(\d+)\:\s*error\:\s*(.*)/.exec(error);
- var resolved = path.resolve(analysis[1]);
- var filename = [ '.scss', '.css']
- .map(function (ext) {
- return resolved + ext;
- })
- .filter(function (fullname) {
- return fs.existsSync(fullname);
- })
- .pop();
- var message = analysis ?
- ((filename || resolved) + ':' + analysis[2] + ':0: ' + analysis[3] + '\n') :
- ('TODO parse this error\n' + error + '\n');
+ var analysis = /(.*)\:(\d+)\:\s*error\:\s*(.*)/.exec(error),
+ resolved = path.resolve(analysis[1]),
+ filename = ['.scss', '.css']
+ .map(function (ext) {
+ return resolved + ext;
+ })
+ .filter(function (fullname) {
+ return fs.existsSync(fullname);
+ })
+ .pop(),
+ message = analysis ?
+ ((filename || resolved) + ':' + analysis[2] + ':0: ' + analysis[3] + '\n') :
+ ('TODO parse this error\n' + error + '\n');
if (output.indexOf(message) < 0) {
output.push(message);
}
@@ -291,7 +336,7 @@ module.exports = function (bannerWidth, libraryPaths) {
error : error,
includePaths: libList,
outputStyle : 'compressed',
- stats : { },
+ stats : {},
sourceMap : map
});
}
@@ -305,10 +350,10 @@ module.exports = function (bannerWidth, libraryPaths) {
// display the output buffer with padding before and after and between each item
if (output.length) {
- var width = Number(bannerWidth) || 0;
- var hr = new Array(width + 1); // this is a good trick to repeat a character N times
- var start = (width > 0) ? (hr.join('\u25BC') + '\n') : '';
- var stop = (width > 0) ? (hr.join('\u25B2') + '\n') : '';
+ var WIDTH = 80,
+ hr = new Array(WIDTH + 1), // this is a good trick to repeat a character N times
+ start = (WIDTH > 0) ? (hr.join('\u25BC') + '\n') : '',
+ stop = (WIDTH > 0) ? (hr.join('\u25B2') + '\n') : '';
process.stdout.write(start + '\n' + output.join('\n') + '\n' + stop);
}
done();
diff --git a/package.json b/package.json
index 2c38d8f..2d9a591 100644
--- a/package.json
+++ b/package.json
@@ -111,7 +111,7 @@
},
"devDependencies": {
"angularity-helloworld-es5": "angularity/angularity-helloworld-es5#ci-build-0.2.0-E",
- "angularity-todo-es5": "angularity/angularity-todo-es5#ci-build-0.2.0-F",
+ "angularity-todo-es5": "angularity/angularity-todo-es5#ci-build-0.3.1-A",
"autodocs": "^0.6.8",
"jasmine-diff-matchers": "~2.0.0",
"jasmine-node": "2.0.0-beta4",
diff --git a/tasks/css.js b/tasks/css.js
index fb4e43b..8a545d9 100644
--- a/tasks/css.js
+++ b/tasks/css.js
@@ -9,19 +9,19 @@ function setUpTaskCss(context) {
}
var taskDefinition = {
- name: 'css',
- description: 'The "css" task performs a one time build of the SASS composition root(s).',
+ name : 'css',
+ description : 'The "css" task performs a one time build of the SASS composition root(s).',
prerequisiteTasks: ['help'],
- checks: [],
- options: [],
- onInit: function onInitCssTask() {
- var gulp = context.gulp,
- runSequence = context.runSequence,
- rimraf = require('gulp-rimraf');
+ checks : [],
+ options : [],
+ onInit : function onInitCssTask() {
+ var gulp = context.gulp,
+ runSequence = context.runSequence,
+ rimraf = require('gulp-rimraf');
- var nodeSass = require('../lib/build/node-sass'),
- hr = require('../lib/util/hr'),
- streams = require('../lib/config/streams');
+ var nodeSass = require('../lib/build/node-sass'),
+ hr = require('../lib/util/hr'),
+ streams = require('../lib/config/streams');
gulp.task('css', function (done) {
console.log(hr('-', 80, 'css'));
@@ -41,12 +41,14 @@ function setUpTaskCss(context) {
// compile sass with the previously discovered lib paths
gulp.task('css:build', function () {
return streams.scssApp()
- .pipe(nodeSass(80, [streams.BOWER, streams.NODE]))
+ .pipe(nodeSass({
+ libraryPaths: [streams.BOWER, streams.NODE]
+ }))
.pipe(gulp.dest(streams.BUILD));
});
},
- onRun: function onRunCssTask() {
- var gulp = context.gulp;
+ onRun : function onRunCssTask() {
+ var gulp = context.gulp;
gulp.start(taskDefinition.name);
}
};
diff --git a/tasks/release.js b/tasks/release.js
index 85ef41c..70dea9d 100644
--- a/tasks/release.js
+++ b/tasks/release.js
@@ -56,7 +56,7 @@ function setUpTaskRelease(context) {
});
gulp.task('release:adjacent', function () {
- return gulp.src([streams.BUILD + '/*.js*', streams.BUILD + '/*.css*', streams.BUILD + '/assets/**'])
+ return gulp.src([streams.BUILD + '/*.js*', streams.BUILD + '/*.css*', streams.BUILD + '/*assets/**'])
.pipe(semiflat(streams.BUILD))
.pipe(gulp.dest(streams.RELEASE_BUNDLE));
});
diff --git a/test/specs/tasks/css-task.js b/test/specs/tasks/css-task.js
index 211d8aa..9d717cd 100644
--- a/test/specs/tasks/css-task.js
+++ b/test/specs/tasks/css-task.js
@@ -1,6 +1,8 @@
'use strict';
-var diffMatchers = require('jasmine-diff-matchers');
+var diffMatchers = require('jasmine-diff-matchers'),
+ fs = require('fs'),
+ path = require('path');
var helper = require('./../../helpers/angularity-test'),
matchers = require('./../../helpers/jasmine-matchers');
@@ -10,11 +12,25 @@ var BUILD_FOLDER = 'app-build';
function expectations(testCase) {
var workingBuildFile = helper.getConcatenation(testCase.cwd, BUILD_FOLDER);
var sourceBuildFile = helper.getConcatenation(testCase.sourceDir, testCase.subdir, BUILD_FOLDER);
+
+ // general
expect(testCase.stdout).toBeTask('css');
+
+ // build output
expect(testCase.cwd).toHaveExpectedCssExcept();
expect(workingBuildFile('index.css')).diffFilePatch(sourceBuildFile('index.css'));
-// expect(workingBuildFile('index.css.map')).diffFilePatch(sourceBuildFile('index.css.map'));
-// TODO @bholloway solve repeatability of .map files
+ // expect(workingBuildFile('index.css.map')).diffFilePatch(sourceBuildFile('index.css.map'));
+ // TODO @bholloway solve repeatability of .map files
+
+ // css assets
+ var assetDir = path.join.apply(path, sourceBuildFile('index.css.assets'));
+ if (fs.existsSync(assetDir)) {
+ fs.readdirSync(assetDir)
+ .forEach(function eachAsset(filename) {
+ var relative = path.join('index.css.assets', filename);
+ expect(workingBuildFile(relative)).diffFilePatch(sourceBuildFile(relative));
+ });
+ }
}
function customMatchers() {