Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Switch to winston for logging and much more.

* Switch to winston for logging.
* Move fs_utils classes to separate files.
* Make jade templates work.
  • Loading branch information...
commit 3d16cf27d6cf7f7262583b7523b2bcb49d095c6e 1 parent c89ba5b
@paulmillr paulmillr authored
View
3  package.json
@@ -34,7 +34,8 @@
"mkdirp": "0.2.1",
"ncp": "0.2.3",
"growl": "1.4.1",
- "express": "2.5.1"
+ "express": "2.5.1",
+ "winston": "0.5.10"
},
"devDependencies": {
"mocha": "0.14.0",
View
41 src/brunch.coffee
@@ -4,8 +4,9 @@ fs = require 'fs'
mkdirp = require 'mkdirp'
{ncp} = require 'ncp'
sysPath = require 'path'
-fs_utils = require './fs_utils'
helpers = require './helpers'
+global.helpers = helpers
+fs_utils = require './fs_utils'
loadPlugins = (config, callback) ->
cwd = sysPath.resolve config.rootPath
@@ -42,15 +43,18 @@ watchApplication = (persistent, rootPath, config, callback) ->
helpers.startServer config.server.port, config.buildPath if config.server.run
directories = ['app', 'vendor'].map (dir) -> sysPath.join rootPath, dir
- fileList = new fs_utils.FileList
+ fileList = new fs_utils.SourceFileList
loadPlugins config, (error, plugins) ->
- return helpers.logError error if error?
- addToFileList = (disableWrapping) -> (path) ->
+ return logger.error error if error?
+ start = null
+ addToFileList = (isPluginHelper) -> (path) ->
+ start = Date.now()
+ logger.log 'debug', "File '#{path}' was changed"
compiler = plugins.filter(isCompilerFor path)[0]
return unless compiler
- file = new fs_utils.File path, compiler
- file.disableWrapping = disableWrapping if disableWrapping
+ file = new fs_utils.SourceFile path, compiler
+ file.isPluginHelper = yes if isPluginHelper
fileList.add file
plugins.forEach (plugin) ->
@@ -62,15 +66,16 @@ watchApplication = (persistent, rootPath, config, callback) ->
includePathes.forEach addToFileList yes
writer = new fs_utils.FileWriter config, plugins
- watcher = (new fs_utils.FSWatcher directories)
+ watcher = (new fs_utils.FileWatcher directories)
.on('change', addToFileList no)
.on('remove', (path) -> fileList.remove path)
fileList.on 'resetTimer', -> writer.write fileList
writer.on 'write', (result) ->
assetPath = sysPath.join rootPath, 'app', 'assets'
ncp assetPath, config.buildPath, (error) ->
- helpers.logError "Asset compilation error: #{error}" if error?
- helpers.log "compiled."
+ logger.error "Asset compilation failed: #{error}" if error?
+ logger.info "compiled."
+ logger.log 'debug', "compilation time: #{Date.now() - start}ms"
watcher.close() unless persistent
callback null, result
watcher
@@ -78,17 +83,17 @@ watchApplication = (persistent, rootPath, config, callback) ->
generateFile = (path, data, callback) ->
parentDir = sysPath.dirname path
write = ->
- helpers.log "create #{path}"
+ logger.info "create #{path}"
fs.writeFile path, data, callback
sysPath.exists parentDir, (exists) ->
return write() if exists
- helpers.log "invoke #{parentDir}"
+ logger.info "create #{parentDir}"
mkdirp parentDir, (parseInt 755, 8), (error) ->
- return helpers.logError if error?
+ return logger.error if error?
write()
destroyFile = (path, callback) ->
- helpers.log "destroy #{path}"
+ logger.info "destroy #{path}"
fs.unlink path, callback
generateOrDestroy = (generate, options, callback) ->
@@ -141,7 +146,7 @@ generateOrDestroy = (generate, options, callback) ->
exports.install = (rootPath, callback = (->)) ->
prevDir = process.cwd()
process.chdir rootPath
- helpers.log 'Installing packages...'
+ logger.info 'Installing packages...'
exec 'npm install', (error, stdout, stderr) ->
process.chdir prevDir
callback stderr, stdout
@@ -154,12 +159,12 @@ exports.new = (options, callback = (->)) ->
template ?= sysPath.join __dirname, '..', 'template', 'base'
sysPath.exists rootPath, (exists) ->
if exists
- return helpers.logError "Directory '#{rootPath}' already exists"
+ return logger.error "Directory '#{rootPath}' already exists"
mkdirp rootPath, (parseInt 755, 8), (error) ->
- return helpers.logError error if error?
+ return logger.error error if error?
ncp template, rootPath, (error) ->
- return helpers.logError error if error?
- helpers.log 'Created brunch directory layout'
+ return logger.error error if error?
+ logger.info 'Created brunch directory layout'
exports.install rootPath, callback
# Build application once and execute callback.
View
319 src/fs_utils.coffee
@@ -1,319 +0,0 @@
-async = require 'async'
-{EventEmitter} = require 'events'
-fs = require 'fs'
-mkdirp = require 'mkdirp'
-sysPath = require 'path'
-util = require 'util'
-helpers = require './helpers'
-
-# The definition would be added on top of every filewriter .js file.
-requireDefinition = '''
-(function(/*! Brunch !*/) {
- if (!this.require) {
- var modules = {}, cache = {}, require = function(name, root) {
- var module = cache[name], path = expand(root, name), fn;
- if (module) {
- return module;
- } else if (fn = modules[path] || modules[path = expand(path, './index')]) {
- module = {id: name, exports: {}};
- try {
- cache[name] = module.exports;
- fn(module.exports, function(name) {
- return require(name, dirname(path));
- }, module);
- return cache[name] = module.exports;
- } catch (err) {
- delete cache[name];
- throw err;
- }
- } else {
- throw 'module \\'' + name + '\\' not found';
- }
- }, expand = function(root, name) {
- var results = [], parts, part;
- if (/^\\.\\.?(\\/|$)/.test(name)) {
- parts = [root, name].join('/').split('/');
- } else {
- parts = name.split('/');
- }
- for (var i = 0, length = parts.length; i < length; i++) {
- part = parts[i];
- if (part == '..') {
- results.pop();
- } else if (part != '.' && part != '') {
- results.push(part);
- }
- }
- return results.join('/');
- }, dirname = function(path) {
- return path.split('/').slice(0, -1).join('/');
- };
- this.require = function(name) {
- return require(name, '');
- };
- this.require.brunch = true;
- this.require.define = function(bundle) {
- for (var key in bundle)
- modules[key] = bundle[key];
- };
- }
-}).call(this);
-'''
-
-pluralize = (word) -> word + 's'
-
-dePluralize = (word) -> word[0..word.length - 1]
-
-# Creates file if it doesn't exist and writes data to it.
-# Would also create a parent directories if they don't exist.
-#
-# path - path to file that would be written.
-# data - data to be written
-# callback(error, path, data) - would be executed on error or on
-# successful write.
-#
-# Example
-#
-# writeFile 'test.txt', 'data', (error) -> console.log error if error?
-#
-writeFile = (path, data, callback) ->
- write = (callback) -> fs.writeFile path, data, callback
- write (error) ->
- return callback null, path, data unless error?
- mkdirp (sysPath.dirname path), (parseInt 755, 8), (error) ->
- return callback error if error?
- write (error) ->
- callback error, path, data
-
-exports.File = class File
- constructor: (@path, @compiler) ->
- @type = @compiler.compilerType
- @data = ''
- @disableWrapping = !(/^vendor/.test @path)
-
- # Defines a requirejs module in scripts & templates.
- # This allows brunch users to use `require 'module/name'` in browsers.
- #
- # path - path to file, contents of which will be wrapped.
- # source - file contents.
- #
- # Returns a wrapped string.
- _wrap: (data) ->
- if @type in ['javascript', 'template'] and @disableWrapping
- moduleName = JSON.stringify(
- @path.replace(/^app\//, '').replace(/\.\w*$/, '')
- )
- """
- (this.require.define({
- #{moduleName}: function(exports, require, module) {
- #{data}
- }
- }));\n
- """
- else
- data
-
- compile: (callback) ->
- fs.readFile @path, (error, data) =>
- return callback error if error?
- @compiler.compile data.toString(), @path, (error, result) =>
- @data = @_wrap result if result?
- callback error, result
-
-exports.FileList = class FileList extends EventEmitter
- constructor: ->
- @files = []
-
- resetTimer: ->
- clearTimeout @timer if @timer?
- @timer = setTimeout (=> @emit 'resetTimer'), 150
-
- get: (searchFunction) ->
- (@files.filter searchFunction)[0]
-
- add: (file) ->
- @files = @files.concat [file]
- compilerName = file.compiler.constructor.name
- file.compile (error, result) =>
- if error?
- return helpers.logError "#{compilerName} error in '#{file.path}':
-#{error}"
- @resetTimer()
-
- remove: (path) ->
- removed = @get (file) -> file.path is path
- @files = @files.filter (file) -> file isnt removed
- delete removed
- @resetTimer()
-
-class GeneratedFile
- constructor: (@path, @sourceFiles, @config) ->
- @type = if (@sourceFiles.some (file) -> file.type is 'javascript')
- 'javascript'
- else
- 'stylesheet'
-
- _extractOrder: (files, config) ->
- types = files.map (file) -> pluralize file.type
- arrays = (value.order for own key, value of config.files when key in types)
- arrays.reduce (memo, array) ->
- array or= {}
- {
- before: memo.before.concat(array.before or []),
- after: memo.after.concat(array.after or [])
- }
- , {before: [], after: []}
-
- # Collects content from a list of files and wraps it with
- # require.js module definition if needed.
- joinSourceFiles: ->
- files = @sourceFiles
- pathes = files.map (file) -> file.path
- order = @_extractOrder files, @config
- sourceFiles = (helpers.sort pathes, order).map (file) ->
- files[pathes.indexOf file]
- data = ''
- data += requireDefinition if @type is 'javascript'
- data += sourceFiles.map((file) -> file.data).join ''
- data
-
- minify: (data, callback) ->
- if @minifier?.minify?
- @minifier.minify data, @path, callback
- else
- callback null, data
-
- write: (callback) ->
- @minify @joinSourceFiles(), (error, data) =>
- writeFile @path, data, callback
-
-# A simple file changes watcher.
-#
-# files - array of directories that would be watched.
-#
-# Example
-#
-# (new FSWatcher ['app', 'vendor'])
-# .on 'change', (file) ->
-# console.log 'File %s was changed', file
-#
-class exports.FSWatcher extends EventEmitter
- # RegExp that would filter invalid files (dotfiles, emacs caches etc).
- invalid: /^(\.|#)/
-
- constructor: (files) ->
- @watched = {}
- @_handle file for file in files
-
- _getWatchedDir: (directory) ->
- @watched[directory] ?= []
-
- _watch: (item, callback) ->
- parent = @_getWatchedDir sysPath.dirname item
- basename = sysPath.basename item
- # Prevent memory leaks.
- return if basename in parent
- parent.push basename
- fs.watchFile item, persistent: yes, interval: 100, (curr, prev) =>
- if curr.mtime.getTime() isnt prev.mtime.getTime()
- callback? item
-
- _handleFile: (file) ->
- emit = (file) =>
- @emit 'change', file
- emit file
- @_watch file, emit
-
- _handleDir: (directory) ->
- read = (directory) =>
- fs.readdir directory, (error, current) =>
- return helpers.logError error if error?
- return unless current
- previous = @_getWatchedDir directory
- previous
- .filter (file) ->
- file not in current
- .forEach (file) =>
- @emit 'remove', sysPath.join directory, file
-
- current
- .filter (file) ->
- file not in previous
- .forEach (file) =>
- @_handle sysPath.join directory, file
- read directory
- @_watch directory, read
-
- _handle: (file) ->
- return if @invalid.test sysPath.basename file
- fs.realpath file, (error, path) =>
- return helpers.logError error if error?
- fs.stat file, (error, stats) =>
- return helpers.logError error if error?
- @_handleFile file if stats.isFile()
- @_handleDir file if stats.isDirectory()
-
- on: ->
- super
- this
-
- # Removes all listeners from watched files.
- close: ->
- for directory, files of @watched
- for file in files
- fs.unwatchFile sysPath.join directory, file
- @watched = {}
- this
-
-class exports.FileWriter extends EventEmitter
- constructor: (@config, @plugins) ->
- @destFiles = []
- @_initFilesConfig @config.files
-
- _initFilesConfig: (filesConfig) ->
- config = filesConfig
- Object.keys(config).forEach (type) =>
- data = config[type]
- if typeof data.joinTo is 'string'
- object = {}
- object[data.joinTo] = /.+/
- data.joinTo = object
- Object.keys(data.joinTo).forEach (destinationPath) =>
- regExpOrFunction = data.joinTo[destinationPath]
- data.joinTo[destinationPath] = if regExpOrFunction instanceof RegExp
- (string) ->
- regExpOrFunction.test string
- else
- regExpOrFunction
- config
-
- _getDestinationPathes: (file) ->
- pathes = []
- data = @config.files[pluralize file.type]
- for own destinationPath, tester of data.joinTo when tester file.path
- pathes.push destinationPath
- if pathes.length > 0 then pathes else null
-
- _getFiles: (fileList, minifiers) ->
- map = {}
- fileList.files.forEach (file) =>
- pathes = @_getDestinationPathes file
- return unless pathes?
- pathes.forEach (path) =>
- map[path] ?= []
- map[path].push file
- files = []
- for generatedFilePath, sourceFiles of map
- generatedFilePath = sysPath.join @config.buildPath, generatedFilePath
- file = new GeneratedFile generatedFilePath, sourceFiles, @config
- for minifier in minifiers when minifier.minifierType is file.type
- file.minifier = minifier
- files.push file
- files
-
- write: (fileList) =>
- files = @_getFiles fileList, @plugins.filter (plugin) -> !!plugin.minify
- write = (file, callback) -> file.write callback
- async.forEach files, write, (error, results) =>
- return helpers.logError "write error. #{error}" if error?
- @emit 'write', results
View
27 src/fs_utils/common.coffee
@@ -0,0 +1,27 @@
+fs = require 'fs'
+mkdirp = require 'mkdirp'
+sysPath = require 'path'
+
+exports.pluralize = (word) ->
+ word + 's'
+
+# Creates file if it doesn't exist and writes data to it.
+# Would also create a parent directories if they don't exist.
+#
+# path - path to file that would be written.
+# data - data to be written
+# callback(error, path, data) - would be executed on error or on
+# successful write.
+#
+# Example
+#
+# writeFile 'test.txt', 'data', (error) -> console.log error if error?
+#
+exports.writeFile = (path, data, callback) ->
+ write = (callback) -> fs.writeFile path, data, callback
+ write (error) ->
+ return callback null, path, data unless error?
+ mkdirp (sysPath.dirname path), (parseInt 755, 8), (error) ->
+ return callback error if error?
+ write (error) ->
+ callback error, path, data
View
71 src/fs_utils/file_watcher.coffee
@@ -0,0 +1,71 @@
+{EventEmitter} = require 'events'
+fs = require 'fs'
+sysPath = require 'path'
+
+class exports.FileWatcher extends EventEmitter
+ # RegExp that would filter invalid files (dotfiles, emacs caches etc).
+ invalid: /^(\.|#)/
+
+ constructor: (files) ->
+ @watched = {}
+ @_handle file for file in files
+
+ _getWatchedDir: (directory) ->
+ @watched[directory] ?= []
+
+ _watch: (item, callback) ->
+ parent = @_getWatchedDir sysPath.dirname item
+ basename = sysPath.basename item
+ # Prevent memory leaks.
+ return if basename in parent
+ parent.push basename
+ fs.watchFile item, persistent: yes, interval: 100, (curr, prev) =>
+ if curr.mtime.getTime() isnt prev.mtime.getTime()
+ callback? item
+
+ _handleFile: (file) ->
+ emit = (file) =>
+ @emit 'change', file
+ emit file
+ @_watch file, emit
+
+ _handleDir: (directory) ->
+ read = (directory) =>
+ fs.readdir directory, (error, current) =>
+ return logger.error error if error?
+ return unless current
+ previous = @_getWatchedDir directory
+ previous
+ .filter (file) ->
+ file not in current
+ .forEach (file) =>
+ @emit 'remove', sysPath.join directory, file
+
+ current
+ .filter (file) ->
+ file not in previous
+ .forEach (file) =>
+ @_handle sysPath.join directory, file
+ read directory
+ @_watch directory, read
+
+ _handle: (file) ->
+ return if @invalid.test sysPath.basename file
+ fs.realpath file, (error, path) =>
+ return logger.error error if error?
+ fs.stat file, (error, stats) =>
+ return logger.error error if error?
+ @_handleFile file if stats.isFile()
+ @_handleDir file if stats.isDirectory()
+
+ on: ->
+ super
+ this
+
+ # Removes all listeners from watched files.
+ close: ->
+ for directory, files of @watched
+ for file in files
+ fs.unwatchFile sysPath.join directory, file
+ @watched = {}
+ this
View
58 src/fs_utils/file_writer.coffee
@@ -0,0 +1,58 @@
+async = require 'async'
+{EventEmitter} = require 'events'
+sysPath = require 'path'
+common = require './common'
+{GeneratedFile} = require './generated_file'
+
+class exports.FileWriter extends EventEmitter
+ constructor: (@config, @plugins) ->
+ @destFiles = []
+ @_initFilesConfig @config.files
+
+ _initFilesConfig: (filesConfig) ->
+ config = filesConfig
+ Object.keys(config).forEach (type) =>
+ data = config[type]
+ if typeof data.joinTo is 'string'
+ object = {}
+ object[data.joinTo] = /.+/
+ data.joinTo = object
+ Object.keys(data.joinTo).forEach (destinationPath) =>
+ regExpOrFunction = data.joinTo[destinationPath]
+ data.joinTo[destinationPath] = if regExpOrFunction instanceof RegExp
+ (string) ->
+ regExpOrFunction.test string
+ else
+ regExpOrFunction
+ config
+
+ _getDestinationPathes: (file) ->
+ pathes = []
+ data = @config.files[common.pluralize file.type]
+ for own destinationPath, tester of data.joinTo when tester file.path
+ pathes.push destinationPath
+ if pathes.length > 0 then pathes else null
+
+ _getFiles: (fileList, minifiers) ->
+ map = {}
+ fileList.files.forEach (file) =>
+ pathes = @_getDestinationPathes file
+ return unless pathes?
+ pathes.forEach (path) =>
+ map[path] ?= []
+ map[path].push file
+ files = []
+ for generatedFilePath, sourceFiles of map
+ generatedFilePath = sysPath.join @config.buildPath, generatedFilePath
+ file = new GeneratedFile generatedFilePath, sourceFiles, @config
+ for minifier in minifiers when minifier.minifierType is file.type
+ file.minifier = minifier
+ files.push file
+ files
+
+ write: (fileList) =>
+ files = @_getFiles fileList, @plugins.filter (plugin) -> !!plugin.minify
+ write = (file, callback) -> file.write callback
+ async.forEach files, write, (error, results) =>
+ return logger.error "write error. #{error}" if error?
+ @emit 'write', results
View
150 src/fs_utils/generated_file.coffee
@@ -0,0 +1,150 @@
+common = require './common'
+
+# The definition would be added on top of every filewriter .js file.
+requireDefinition = '''
+(function(/*! Brunch !*/) {
+ if (!this.require) {
+ var modules = {}, cache = {}, require = function(name, root) {
+ var module = cache[name], path = expand(root, name), fn;
+ if (module) {
+ return module;
+ } else if (fn = modules[path] || modules[path = expand(path, './index')]) {
+ module = {id: name, exports: {}};
+ try {
+ cache[name] = module.exports;
+ fn(module.exports, function(name) {
+ return require(name, dirname(path));
+ }, module);
+ return cache[name] = module.exports;
+ } catch (err) {
+ delete cache[name];
+ throw err;
+ }
+ } else {
+ throw 'module \\'' + name + '\\' not found';
+ }
+ }, expand = function(root, name) {
+ var results = [], parts, part;
+ if (/^\\.\\.?(\\/|$)/.test(name)) {
+ parts = [root, name].join('/').split('/');
+ } else {
+ parts = name.split('/');
+ }
+ for (var i = 0, length = parts.length; i < length; i++) {
+ part = parts[i];
+ if (part == '..') {
+ results.pop();
+ } else if (part != '.' && part != '') {
+ results.push(part);
+ }
+ }
+ return results.join('/');
+ }, dirname = function(path) {
+ return path.split('/').slice(0, -1).join('/');
+ };
+ this.require = function(name) {
+ return require(name, '');
+ };
+ this.require.brunch = true;
+ this.require.define = function(bundle) {
+ for (var key in bundle)
+ modules[key] = bundle[key];
+ };
+ }
+}).call(this);
+'''
+
+# Sorts by pattern.
+#
+# Examples
+#
+# sort ['b.coffee', 'c.coffee', 'a.coffee'],
+# before: ['a.coffee'], after: ['b.coffee']
+# # => ['a.coffee', 'c.coffee', 'b.coffee']
+#
+sortByConfig = (files, config) ->
+ return files if typeof config isnt 'object'
+ config.before ?= []
+ config.after ?= []
+ # Clone data to a new array.
+ [files...]
+ .sort (a, b) ->
+ # Try to find items in config.before.
+ # Item that config.after contains would have bigger sorting index.
+ indexOfA = config.before.indexOf a
+ indexOfB = config.before.indexOf b
+ [hasA, hasB] = [(indexOfA isnt -1), (indexOfB isnt -1)]
+ if hasA and not hasB
+ -1
+ else if not hasA and hasB
+ 1
+ else if hasA and hasB
+ indexOfA - indexOfB
+ else
+ # Items wasn't found in config.before, try to find then in
+ # config.after.
+ # Item that config.after contains would have lower sorting index.
+ indexOfA = config.after.indexOf a
+ indexOfB = config.after.indexOf b
+ [hasA, hasB] = [(indexOfA isnt -1), (indexOfB isnt -1)]
+ if hasA and not hasB
+ 1
+ else if not hasA and hasB
+ -1
+ else if hasA and hasB
+ indexOfA - indexOfB
+ else
+ # If item path starts with 'vendor', it has bigger priority.
+ aIsVendor = (a.indexOf 'vendor') is 0
+ bIsVendor = (b.indexOf 'vendor') is 0
+ if aIsVendor and not bIsVendor
+ -1
+ else if not aIsVendor and bIsVendor
+ 1
+ else
+ # All conditions were false, we don't care about order of
+ # these two items.
+ 0
+
+class exports.GeneratedFile
+ constructor: (@path, @sourceFiles, @config) ->
+ @type = if (@sourceFiles.some (file) -> file.type is 'javascript')
+ 'javascript'
+ else
+ 'stylesheet'
+
+ _extractOrder: (files, config) ->
+ types = files.map (file) -> common.pluralize file.type
+ arrays = (value.order for own key, value of config.files when key in types)
+ arrays.reduce (memo, array) ->
+ array or= {}
+ {
+ before: memo.before.concat(array.before or []),
+ after: memo.after.concat(array.after or [])
+ }
+ , {before: [], after: []}
+
+ # Collects content from a list of files and wraps it with
+ # require.js module definition if needed.
+ joinSourceFiles: ->
+ files = @sourceFiles
+ pathes = files.map (file) -> file.path
+ order = @_extractOrder files, @config
+ sourceFiles = (sortByConfig pathes, order).map (file) ->
+ files[pathes.indexOf file]
+ data = ''
+ data += requireDefinition if @type is 'javascript'
+ data += sourceFiles.map((file) -> file.data).join ''
+ data
+
+ minify: (data, callback) ->
+ if @minifier?.minify?
+ @minifier.minify data, @path, callback
+ else
+ callback null, data
+
+ write: (callback) ->
+ files = (@sourceFiles.map (file) -> file.path).join(', ')
+ logger.log 'debug', "Writing files '#{files}' to '#{@path}'"
+ @minify @joinSourceFiles(), (error, data) =>
+ common.writeFile @path, data, callback
View
6 src/fs_utils/index.coffee
@@ -0,0 +1,6 @@
+{FileWatcher} = require './file_watcher'
+{FileWriter} = require './file_writer'
+{SourceFile} = require './source_file'
+{SourceFileList} = require './source_file_list'
+
+module.exports = {FileWriter, FileWatcher, SourceFile, SourceFileList}
View
45 src/fs_utils/source_file.coffee
@@ -0,0 +1,45 @@
+fs = require 'fs'
+sysPath = require 'path'
+
+pluginHelperCounter = 0
+
+class exports.SourceFile
+ constructor: (@path, @compiler) ->
+ @type = @compiler.compilerType
+ @data = ''
+ @isPluginHelper = no
+
+ # Defines a requirejs module in scripts & templates.
+ # This allows brunch users to use `require 'module/name'` in browsers.
+ #
+ # path - path to file, contents of which will be wrapped.
+ # source - file contents.
+ #
+ # Returns a wrapped string.
+ _wrap: (data) ->
+ if @isPluginHelper
+ pluginHelperCounter += 1
+ fileName = "brunch_#{pluginHelperCounter}_#{sysPath.basename @path}"
+ @path = sysPath.join 'vendor', 'scripts', fileName
+ data
+ else
+ if @type in ['javascript', 'template'] and !(/^vendor/.test @path)
+ moduleName = JSON.stringify(
+ @path.replace(/^app\//, '').replace(/\.\w*$/, '')
+ )
+ """
+ (this.require.define({
+ #{moduleName}: function(exports, require, module) {
+ #{data}
+ }
+ }));\n
+ """
+ else
+ data
+
+ compile: (callback) ->
+ fs.readFile @path, (error, data) =>
+ return callback error if error?
+ @compiler.compile data.toString(), @path, (error, result) =>
+ @data = @_wrap result if result?
+ callback error, result
View
30 src/fs_utils/source_file_list.coffee
@@ -0,0 +1,30 @@
+{EventEmitter} = require 'events'
+
+class exports.SourceFileList extends EventEmitter
+ RESET_TIME: 100
+
+ constructor: ->
+ @files = []
+
+ resetTimer: ->
+ clearTimeout @timer if @timer?
+ @timer = setTimeout (=> @emit 'resetTimer'), @RESET_TIME
+
+ get: (searchFunction) ->
+ (@files.filter searchFunction)[0]
+
+ add: (file) ->

I believe we should remove previous version of the file right ?
something like:

@files = @files.filter (f) -> f.path isnt file.path

Or change the FileWatcher.on("change") to not simply add the file but look for an existing file and call compile()

@paulmillr Owner

You're right, my bad.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
+ @files = @files.concat [file]
+ compilerName = file.compiler.constructor.name
+ file.compile (error, result) =>
+ logger.log 'debug', "Compiling file '#{file.path}'"
+ if error?
+ return logger.error "#{compilerName} failed in '#{file.path}' --
+#{error}"
+ @resetTimer()
+
+ remove: (path) ->
+ removed = @get (file) -> file.path is path
+ @files = @files.filter (file) -> file isnt removed
+ delete removed
+ @resetTimer()
View
99 src/helpers.coffee
@@ -2,11 +2,39 @@ coffeescript = require 'coffee-script'
express = require 'express'
growl = require 'growl'
sysPath = require 'path'
+winston = require 'winston'
+util = require 'util'
require.extensions['.coffee'] ?= (module, filename) ->
content = coffeescript.compile fs.readFileSync filename, 'utf8', {filename}
module._compile content, filename
+class ConsoleGrowlTransport extends winston.transports.Console
+ constructor: ->
+ super
+ @super = ConsoleGrowlTransport.__super__
+
+ log: (level, msg, meta, callback) ->
+ args = arguments
+ notify = (notifyCallback) ->
+ if level is 'error'
+ growl msg, title: 'Brunch error', notifyCallback
+ else
+ notifyCallback()
+ notify =>
+ @super.log.apply this, args
+
+exports.logger = logger = new winston.Logger transports: [
+ new ConsoleGrowlTransport {
+ colorize: 'true',
+ timestamp: 'true'
+ }
+]
+
+debug = process.env.BRUNCH_DEBUG is '1'
+logger.setLevels winston.config.syslog.levels unless debug
+global.logger = logger
+
# Extends the object with properties from another object.
# Example
#
@@ -59,89 +87,24 @@ formatDate = (color = 'none') ->
exports.isTesting = ->
no
-exports.log = (text, color = 'green', isError = no) ->
- stream = if isError then process.stderr else process.stdout
- # TODO: log stdout on testing output end.
- output = "#{formatDate(color)} #{text}\n"
- stream.write output, 'utf8' unless exports.isTesting()
- growl text, title: 'Brunch error' if isError
-
-exports.logError = (text) ->
- exports.log text, 'red', yes
-
-exports.logDebug = (args...) ->
- console.log (formatDate 'green'), args...
-
exports.exit = ->
if exports.isTesting()
- exports.logError 'Terminated process'
+ logger.error 'Terminated process'
else
process.exit 0
-# Sorts by pattern.
-#
-# Examples
-#
-# sort ['b.coffee', 'c.coffee', 'a.coffee'],
-# before: ['a.coffee'], after: ['b.coffee']
-# # => ['a.coffee', 'c.coffee', 'b.coffee']
-#
-exports.sort = (files, config) ->
- return files if typeof config isnt 'object'
- config.before ?= []
- config.after ?= []
- # Clone data to a new array.
- [files...]
- .sort (a, b) ->
- # Try to find items in config.before.
- # Item that config.after contains would have bigger sorting index.
- indexOfA = config.before.indexOf a
- indexOfB = config.before.indexOf b
- [hasA, hasB] = [(indexOfA isnt -1), (indexOfB isnt -1)]
- if hasA and not hasB
- -1
- else if not hasA and hasB
- 1
- else if hasA and hasB
- indexOfA - indexOfB
- else
- # Items wasn't found in config.before, try to find then in
- # config.after.
- # Item that config.after contains would have lower sorting index.
- indexOfA = config.after.indexOf a
- indexOfB = config.after.indexOf b
- [hasA, hasB] = [(indexOfA isnt -1), (indexOfB isnt -1)]
- if hasA and not hasB
- 1
- else if not hasA and hasB
- -1
- else if hasA and hasB
- indexOfA - indexOfB
- else
- # If item path starts with 'vendor', it has bigger priority.
- aIsVendor = (a.indexOf 'vendor') is 0
- bIsVendor = (b.indexOf 'vendor') is 0
- if aIsVendor and not bIsVendor
- -1
- else if not aIsVendor and bIsVendor
- 1
- else
- # All conditions were false, we don't care about order of
- # these two items.
- 0
-
exports.startServer = (port = 3333, path = '.') ->
try
server = require sysPath.resolve 'server.coffee'
server.startServer port, path, express, this
catch error
- exports.logError "[Brunch]: couldn\'t load server.coffee. #{error}"
+ logger.error "couldn\'t load server.coffee. #{error}"
exports.exit()
exports.loadConfig = (configPath) ->
try
{config} = require sysPath.resolve configPath
catch error
- exports.logError "[Brunch]: couldn\'t load config.coffee. #{error}"
+ logger.error "couldn\'t load config.coffee. #{error}"
exports.exit()
config

0 comments on commit 3d16cf2

Please sign in to comment.
Something went wrong with that request. Please try again.