Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
265 changes: 155 additions & 110 deletions lib/build/node-sass.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,18 @@ var path = require('path'),
visit = require('rework-visit'),
convert = require('convert-source-map'),
SourceMapConsumer = require('source-map').SourceMapConsumer,
mime = require('mime');
mime = require('mime'),
crypto = require('crypto'),
defaults = require('lodash.defaults');

/**
* Search for the relative file reference from the <code>startPath</code> up to the process
* working directory, avoiding any other directories with a <code>package.json</code> or <code>bower.json</code>.
* @param {string} startPath The location of the uri declaration and the place to start the search from
* @param {string} uri The content of the url() statement, expected to be a relative file path
* @returns {string} dataURI of the file where found or <code>undefined</code> otherwise
* @returns {string} the full file path of the file where found or <code>null</code> otherwise
*/
function encodeRelativeURL(startPath, uri) {
function findFile(startPath, uri) {

/**
* Test whether the given directory is the root of its own package
Expand Down Expand Up @@ -66,7 +68,7 @@ function encodeRelativeURL(startPath, uri) {
var isWorking;
do {
pathToRoot.push(absoluteStart);
isWorking = (absoluteStart !== process.cwd()) && notPackage(absoluteStart);
isWorking = (absoluteStart !== process.cwd()) && notPackage(absoluteStart);
absoluteStart = path.resolve(absoluteStart, '..');
} while (isWorking);

Expand All @@ -83,16 +85,36 @@ function encodeRelativeURL(startPath, uri) {

// file exists so convert to a dataURI and end
if (fs.existsSync(fullPath)) {
var type = mime.lookup(fullPath);
var contents = fs.readFileSync(fullPath);
var base64 = new Buffer(contents).toString('base64');
return 'data:' + type + ';base64,' + base64;
return fullPath;
}
// enqueue subdirectories that are not packages and are not in the root path
else {
enqueue(queue, basePath);
}
}

// not found
return null;
}
}

/**
* Search for the relative file reference from the <code>startPath</code> up to the process
* working directory, avoiding any other directories with a <code>package.json</code> or <code>bower.json</code>,
* and encode as base64 data URI.
* @param {string} startPath The location of the uri declaration and the place to start the search from
* @param {string} uri The content of the url() statement, expected to be a relative file path
* @returns {string} data URI of the file where found or <code>null</code> otherwise
*/
function embedRelativeURL(startPath, uri) {
var fullPath = findFile(startPath, uri);
if (fullPath) {
var type = mime.lookup(fullPath),
contents = fs.readFileSync(fullPath),
base64 = new Buffer(contents).toString('base64');
return 'data:' + type + ';base64,' + base64;
} else {
return null;
}
}

Expand All @@ -103,107 +125,129 @@ function encodeRelativeURL(startPath, uri) {
* @param {Array.<string>} [libraryPaths] Any number of library path strings
* @returns {stream.Through} A through stream that performs the operation of a gulp stream
*/
module.exports = function (bannerWidth, libraryPaths) {
var output = [ ];
var libList = (libraryPaths || [ ]).filter(function isString(value) {
return (typeof value === 'string');
module.exports = function (options) {
defaults(options, {
libraryPaths: [],
embedAssets : false
});

var output = [],
libList = options.libraryPaths.filter(function isString(value) {
return (typeof value === 'string');
});

return through.obj(function (file, encoding, done) {
var stream = this;

// setup parameters
var sourcePath = file.path.replace(path.basename(file.path), '');
var sourceName = path.basename(file.path, path.extname(file.path));
var mapName = sourceName + '.css.map';
var sourceMapConsumer;
var sourcePath = path.dirname(file.path),
compiledName = path.basename(file.path, path.extname(file.path)) + '.css',
mapName = compiledName + '.map',
sourceMapConsumer;

/**
* Push file contents to the output stream.
* @param {string} ext The extension for the file, including dot
* @param {string|object?} contents The contents for the file or fields to assign to it
* @param {string} filename The filename of the file, including extension
* @param {Buffer|string|object} [contents] Optional contents for the file or fields to assign to it
* @return {vinyl.File} The file that has been pushed to the stream
*/
function pushResult(ext, contents) {
function pushResult(filename, contents) {
var pending = new gutil.File({
cwd: file.cwd,
base: file.base,
path: sourcePath + sourceName + ext,
contents: (typeof contents === 'string') ? new Buffer(contents) : null
cwd : file.cwd,
base : file.base,
path : path.join(sourcePath, filename),
contents: Buffer.isBuffer(contents) ? contents : (typeof contents === 'string') ? new Buffer(contents) : null
});
if (typeof contents === 'object') {
for (var key in contents) {
pending[key] = contents[key];
}
}
stream.push(pending);
return pending;
}

/**
* Plugin for css rework that follows SASS transpilation
* @param {object} stylesheet AST for the CSS output from SASS
* Create a plugin for css rework that performs rewriting of url() sources
* @param {function({string}, {string}):{string}} uriRewriter A method that rewrites uris
*/
function reworkPlugin(stylesheet) {

// visit each node (selector) in the stylesheet recursively using the official utility method
// each node may have multiple declarations
visit(stylesheet, function visitor(declarations) {
declarations
.forEach(eachDeclaration);
});

/**
* Process a declaration from the syntax tree.
* @param declaration
*/
function eachDeclaration(declaration) {
var URL_STATEMENT_REGEX = /(url\s*\()\s*(?:(['"])((?:(?!\2).)*)(\2)|([^'"](?:(?!\)).)*[^'"]))\s*(\))/g;

// reverse the original source-map to find the original sass file
var cssStart = declaration.position.start;
var sassStart = sourceMapConsumer.originalPositionFor({
line : cssStart.line,
column: cssStart.column
function rewriteUriPlugin(uriRewriter) {
return function reworkPlugin(stylesheet) {

// visit each node (selector) in the stylesheet recursively using the official utility method
// each node may have multiple declarations
visit(stylesheet, function visitor(declarations) {
declarations
.forEach(eachDeclaration);
});
if (!sassStart.source) {
throw new Error('failed to decode node-sass source map'); // this can occur with regressions in libsass
}
var sassDir = path.dirname(sassStart.source);

// allow multiple url() values in the declaration
// split by url statements and process the content
// additional capture groups are needed to match quotations correctly
// escaped quotations are not considered
declaration.value = declaration.value
.split(URL_STATEMENT_REGEX)
.map(eachSplitOrGroup)
.join('');

/**
* Encode the content portion of <code>url()</code> statements.
* There are 4 capture groups in the split making every 5th unmatched.
* @param {string} token A single split item
* @param i The index of the item in the split
* @returns {string} Every 3 or 5 items is an encoded url everything else is as is
* Process a declaration from the syntax tree.
* @param declaration
*/
function eachSplitOrGroup(token, i) {

// we can get groups as undefined under certain match circumstances
var initialised = token || '';

// the content of the url() statement is either in group 3 or group 5
var mod = i % 7;
if ((mod === 3) || (mod === 5)) {

// remove query string or hash suffix
var uri = initialised.split(/[?#]/g).shift();
return uri && encodeRelativeURL(sassDir, uri) || initialised;
function eachDeclaration(declaration) {
var URL_STATEMENT_REGEX = /(url\s*\()\s*(?:(['"])((?:(?!\2).)*)(\2)|([^'"](?:(?!\)).)*[^'"]))\s*(\))/g;

// reverse the original source-map to find the original sass file
var cssStart = declaration.position.start;
var sassStart = sourceMapConsumer.originalPositionFor({
line : cssStart.line,
column: cssStart.column
});
if (!sassStart.source) {
throw new Error('failed to decode node-sass source map'); // this can occur with regressions in libsass
}
// everything else, including parentheses and quotation (where present) and media statements
else {
return initialised;
var sassDir = path.dirname(sassStart.source);

// allow multiple url() values in the declaration
// split by url statements and process the content
// additional capture groups are needed to match quotations correctly
// escaped quotations are not considered
declaration.value = declaration.value
.split(URL_STATEMENT_REGEX)
.map(eachSplitOrGroup)
.join('');

/**
* Encode the content portion of <code>url()</code> statements.
* There are 4 capture groups in the split making every 5th unmatched.
* @param {string} token A single split item
* @param i The index of the item in the split
* @returns {string} Every 3 or 5 items is an encoded url everything else is as is
*/
function eachSplitOrGroup(token, i) {

// we can get groups as undefined under certain match circumstances
var initialised = token || '';

// the content of the url() statement is either in group 3 or group 5
var mod = i % 7;
if ((mod === 3) || (mod === 5)) {

// remove query string or hash suffix
var uri = initialised.split(/[?#]/g).shift();
return uri && uriRewriter(sassDir, uri) || initialised;
}
// everything else, including parentheses and quotation (where present) and media statements
else {
return initialised;
}
}
}
};
}

/**
* A URI re-writer function that pushes the file to the output stream and rewrites the URI accordingly.
* @param {string} startPath The location of the uri declaration and the place to start the search from
* @param {string} uri The content of the url() statement, expected to be a relative file path
* @returns {string} the new URL of the output file where found or <code>null</code> otherwise
*/
function pushAssetToOutput(startPath, uri) {
var fullPath = findFile(startPath, uri);
if (fullPath) {
var contents = fs.readFileSync(fullPath),
hash = crypto.createHash('md5').update(contents).digest('hex'),
filename = ['.', compiledName + '.assets', hash + path.extname(fullPath)].join('/');
pushResult(filename, contents);
return filename;
} else {
return null;
}
}

Expand Down Expand Up @@ -232,12 +276,13 @@ module.exports = function (bannerWidth, libraryPaths) {
);

// rework css
var reworked = rework(cssWithMap, '')
.use(reworkPlugin)
.toString({
sourcemap : true,
sourcemapAsObject: true
});
var plugin = rewriteUriPlugin(options.embedAssets ? embedRelativeURL : pushAssetToOutput),
reworked = rework(cssWithMap, '')
.use(plugin)
.toString({
sourcemap : true,
sourcemapAsObject: true
});

// adjust overall sourcemap
delete reworked.map.file;
Expand All @@ -247,8 +292,8 @@ module.exports = function (bannerWidth, libraryPaths) {
});

// write stream output
pushResult('.css', reworked.code + '\n/*# sourceMappingURL=' + mapName + ' */');
pushResult('.css.map', JSON.stringify(reworked.map, null, 2));
pushResult(compiledName, reworked.code + '\n/*# sourceMappingURL=' + mapName + ' */');
pushResult(mapName, JSON.stringify(reworked.map, null, 2));
done();
}

Expand All @@ -257,19 +302,19 @@ module.exports = function (bannerWidth, libraryPaths) {
* @param {string} error The error text from node-sass
*/
function errorHandler(error) {
var analysis = /(.*)\:(\d+)\:\s*error\:\s*(.*)/.exec(error);
var resolved = path.resolve(analysis[1]);
var filename = [ '.scss', '.css']
.map(function (ext) {
return resolved + ext;
})
.filter(function (fullname) {
return fs.existsSync(fullname);
})
.pop();
var message = analysis ?
((filename || resolved) + ':' + analysis[2] + ':0: ' + analysis[3] + '\n') :
('TODO parse this error\n' + error + '\n');
var analysis = /(.*)\:(\d+)\:\s*error\:\s*(.*)/.exec(error),
resolved = path.resolve(analysis[1]),
filename = ['.scss', '.css']
.map(function (ext) {
return resolved + ext;
})
.filter(function (fullname) {
return fs.existsSync(fullname);
})
.pop(),
message = analysis ?
((filename || resolved) + ':' + analysis[2] + ':0: ' + analysis[3] + '\n') :
('TODO parse this error\n' + error + '\n');
if (output.indexOf(message) < 0) {
output.push(message);
}
Expand All @@ -291,7 +336,7 @@ module.exports = function (bannerWidth, libraryPaths) {
error : error,
includePaths: libList,
outputStyle : 'compressed',
stats : { },
stats : {},
sourceMap : map
});
}
Expand All @@ -305,10 +350,10 @@ module.exports = function (bannerWidth, libraryPaths) {

// display the output buffer with padding before and after and between each item
if (output.length) {
var width = Number(bannerWidth) || 0;
var hr = new Array(width + 1); // this is a good trick to repeat a character N times
var start = (width > 0) ? (hr.join('\u25BC') + '\n') : '';
var stop = (width > 0) ? (hr.join('\u25B2') + '\n') : '';
var WIDTH = 80,
hr = new Array(WIDTH + 1), // this is a good trick to repeat a character N times
start = (WIDTH > 0) ? (hr.join('\u25BC') + '\n') : '',
stop = (WIDTH > 0) ? (hr.join('\u25B2') + '\n') : '';
process.stdout.write(start + '\n' + output.join('\n') + '\n' + stop);
}
done();
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@
},
"devDependencies": {
"angularity-helloworld-es5": "angularity/angularity-helloworld-es5#ci-build-0.2.0-E",
"angularity-todo-es5": "angularity/angularity-todo-es5#ci-build-0.2.0-F",
"angularity-todo-es5": "angularity/angularity-todo-es5#ci-build-0.3.1-A",
"autodocs": "^0.6.8",
"jasmine-diff-matchers": "~2.0.0",
"jasmine-node": "2.0.0-beta4",
Expand Down
Loading